Added scripts to scrape PDF's, make table operations, convert coordinates between different formats
This commit is contained in:
parent
a4bf20cdb5
commit
c80a2dd40e
@ -3,7 +3,12 @@
|
||||
|
||||
import argparse
|
||||
import os, sys
|
||||
import math
|
||||
|
||||
import scipy.interpolate
|
||||
|
||||
from fgtools.utils.interpolator import Interpolator
|
||||
from fgtools.utils import range
|
||||
|
||||
class Case:
|
||||
def __init__(self, textcase):
|
||||
@ -25,7 +30,12 @@ class Case:
|
||||
textcase.pop(0) # get rid of the Solver case line
|
||||
textcase.pop(0) # get rid of another legend line
|
||||
lastiterindex = textcase.index("Skin Friction Drag Break Out:") - 1
|
||||
self.CL, self.CDo, self.CDi, self.CDtot, self.CS, self.LD, self.E, self.CFx, self.CFy, self.CFz, self.CMx, self.CMy, self.CMz, self.CDtrefftz, self.TQS = map(float, textcase.pop(lastiterindex).split()[4:]) # we don't need Mach, AoA, Beta again
|
||||
line = textcase.pop(lastiterindex).split()[4:]
|
||||
if len(line) == 16:
|
||||
self.CL, self.CDo, self.CDi, self.CDtot, self.CDt, self.CDtot_t, self.CS, self.LD, self.E, self.CFx, self.CFy, self.CFz, self.CMx, self.CMy, self.CMz, self.TQS = map(float, line)
|
||||
else:
|
||||
self.CL, self.CDo, self.CDi, self.CDtot, self.CS, self.LD, self.E, self.CFx, self.CFy, self.CFz, self.CMx, self.CMy, self.CMz, self.CDtrefftz, self.TQS = map(float, line)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return "Case(" + ", ".join(str(k) + " = " + str(v) for k, v in vars(self).items()) + ")"
|
||||
@ -33,7 +43,7 @@ class Case:
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def get_cases(path):
|
||||
def get_cases(path, mach):
|
||||
path = os.path.abspath(path)
|
||||
|
||||
if not path.endswith(".history"):
|
||||
@ -56,13 +66,21 @@ def get_cases(path):
|
||||
|
||||
for textcase in textcases:
|
||||
case = Case(textcase)
|
||||
if case.RollRate or case.YawRate or case.PitchRate or case.Mach != mach:
|
||||
continue
|
||||
if not case.AoA in cases:
|
||||
cases[case.AoA] = {}
|
||||
cases[case.AoA][case.Beta] = case
|
||||
|
||||
mostbetas = len(max(cases.values(), key=lambda d: len(d)))
|
||||
for AoA in list(cases.keys()):
|
||||
if len(cases[AoA]) < mostbetas:
|
||||
del cases[AoA]
|
||||
continue
|
||||
|
||||
return cases
|
||||
|
||||
def print_table(cases, coeff, indent, precision):
|
||||
def get_raw_coeffs(cases, coeff):
|
||||
coeffs = {}
|
||||
for AoA in cases:
|
||||
for Beta in cases[AoA]:
|
||||
@ -73,13 +91,37 @@ def print_table(cases, coeff, indent, precision):
|
||||
if not AoA in coeffs:
|
||||
coeffs[AoA] = {}
|
||||
coeffs[AoA][Beta] = getattr(cases[AoA][Beta], coeff)
|
||||
return coeffs
|
||||
|
||||
def get_interpolated_coeffs(cases, coeff, alphas, betas, symmetrize):
|
||||
coeffs = get_raw_coeffs(cases, coeff)
|
||||
ralphas = list(coeffs.keys())
|
||||
rbetas = list(coeffs[ralphas[0]].keys())
|
||||
values = list([list(coeffs[ralpha].values()) for ralpha in ralphas])
|
||||
interp = scipy.interpolate.RectBivariateSpline(ralphas, rbetas, values)
|
||||
#interp = scipy.interpolate.interp2d(rbetas, ralphas, values, fill_value=None)
|
||||
coeffs = {}
|
||||
for i, alpha in enumerate(alphas):
|
||||
if not alpha in coeffs:
|
||||
coeffs[alpha] = {}
|
||||
for j, beta in enumerate(betas):
|
||||
if symmetrize:
|
||||
s = interp(alpha, beta, grid=False)
|
||||
coeffs[alpha][beta] = math.copysign((abs(interp(alpha, beta, grid=False)) + abs(interp(alpha, -beta, grid=False))) / 2, s)
|
||||
else:
|
||||
coeffs[alpha][beta] = interp(alpha, beta, grid=False)
|
||||
#coeffs[alpha][beta] = interp([beta], [alpha])
|
||||
return coeffs
|
||||
|
||||
def print_table(coeffs, indent, precision, use_wing_alpha):
|
||||
print("<table>")
|
||||
print(indent + '<independentVar lookup="row">aero/alpha-deg</independentVar>')
|
||||
print(indent + '<independentVar lookup="column">aero/beta-deg</independentVar>')
|
||||
print(indent + '<independentVar lookup="row">aero/alpha-' + ("wing-" if use_wing_alpha else "") + 'deg</independentVar>')
|
||||
if len(coeffs[list(coeffs.keys())[0]]) > 1:
|
||||
print(indent + '<independentVar lookup="column">aero/beta-deg</independentVar>')
|
||||
#print(indent + '<independentVar lookup="table">velocities/mach</independentVar>')
|
||||
print(indent + "<tableData>")
|
||||
print(indent + indent + indent + indent + (indent + indent).join(map(str, coeffs[list(coeffs.keys())[0]].keys())))
|
||||
if len(coeffs[list(coeffs.keys())[0]]) > 1:
|
||||
print(indent + indent + indent + indent + (indent + indent).join(map(str, coeffs[list(coeffs.keys())[0]].keys())))
|
||||
for AoA in coeffs:
|
||||
print(indent + indent + str(AoA), end="")
|
||||
for Beta in coeffs[AoA]:
|
||||
@ -111,13 +153,78 @@ if __name__ == "__main__":
|
||||
default=6
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"--alpha-min",
|
||||
help="Lowest alpha table lookup value",
|
||||
type=float,
|
||||
default=None
|
||||
)
|
||||
argp.add_argument(
|
||||
"--alpha-max",
|
||||
help="Highest alpha table lookup value",
|
||||
type=float,
|
||||
default=None
|
||||
)
|
||||
argp.add_argument(
|
||||
"--alpha-step",
|
||||
help="Alpha table lookup value step size",
|
||||
type=float,
|
||||
default=None
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"--beta-min",
|
||||
help="Lowest alpha table lookup value",
|
||||
type=float,
|
||||
default=None
|
||||
)
|
||||
argp.add_argument(
|
||||
"--beta-max",
|
||||
help="Highest alpha table lookup value",
|
||||
type=float,
|
||||
default=None
|
||||
)
|
||||
argp.add_argument(
|
||||
"--beta-step",
|
||||
help="Alpha table lookup value step size",
|
||||
type=float,
|
||||
default=None
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"-s", "--symmetrize",
|
||||
help="Symmetrize table around the sideslip axis",
|
||||
action="store_true",
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"-w", "--use-wing-alpha",
|
||||
help="Use aero/alpha-wing-deg instead of aero/alpha-deg for the alpha lookup property",
|
||||
action="store_true"
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"input_file",
|
||||
help="VSPAERO .history file",
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"-m", "--mach",
|
||||
help="Mach for which to output table - must exist in the .history file !",
|
||||
type=float,
|
||||
required=True
|
||||
)
|
||||
|
||||
args = argp.parse_args()
|
||||
|
||||
cases = get_cases(args.input_file)
|
||||
print_table(cases, args.coeff, args.indentation, args.precision)
|
||||
cases = get_cases(args.input_file, args.mach)
|
||||
if None not in (args.alpha_min, args.alpha_max, args.alpha_step, args.beta_min, args.beta_max, args.beta_step):
|
||||
alphas = list(range(args.alpha_min, args.alpha_max, args.alpha_step))
|
||||
alphas.append(args.alpha_max)
|
||||
betas = list(range(args.beta_min, args.beta_max, args.beta_step))
|
||||
betas.append(args.beta_max)
|
||||
coeffs = get_interpolated_coeffs(cases, args.coeff, alphas, betas, args.symmetrize)
|
||||
else:
|
||||
coeffs = get_raw_coeffs(cases, args.coeff)
|
||||
print_table(coeffs, args.indentation, args.precision, args.use_wing_alpha)
|
||||
|
||||
|
@ -125,3 +125,16 @@ def wrap_period(n, min, max):
|
||||
|
||||
return n
|
||||
|
||||
def range(stop, start=None, step=1):
|
||||
if start:
|
||||
stop, start = start, stop
|
||||
else:
|
||||
start = 0
|
||||
yield round(start, 14)
|
||||
i = 0
|
||||
r = start
|
||||
while r < stop:
|
||||
i += 1
|
||||
r = start + i * step
|
||||
yield round(r, 14)
|
||||
|
||||
|
@ -33,9 +33,13 @@ class Interpolator:
|
||||
raise ValueError(f"Interpolator.interpolate: cannot interpolate on a table with less than two data points")
|
||||
|
||||
# only sort if not already sorted to increase performance for large tables
|
||||
if not self._sorted and sort:
|
||||
self._indexes.sort()
|
||||
self._values.sort()
|
||||
if sort and not self._sorted:
|
||||
items = sorted(list(zip(self._indexes, self._values)), key=lambda t: t[0])
|
||||
self._indexes = []
|
||||
self._values = []
|
||||
for item in items:
|
||||
self._indexes.append(item[0])
|
||||
self._values.append(item[1])
|
||||
self._sorted = True
|
||||
|
||||
if index in self._indexes:
|
||||
|
63
misc/scrape-emanualonline-pdf.py
Normal file
63
misc/scrape-emanualonline-pdf.py
Normal file
@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python
|
||||
#-*- coding:utf-8 -*-
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import re
|
||||
import requests
|
||||
|
||||
from fgtools.utils import constants
|
||||
|
||||
pattern = r'(?<=src=")https:\/\/static-repo.emanualonline.com\/.+\.jpg(?=")'
|
||||
|
||||
def download_pages(url, output):
|
||||
html = requests.get(url).text
|
||||
urls = re.findall(pattern, html)
|
||||
urltemplate = "/".join(urls[0].split("/")[:-2] + ["%d", "%d.jpg"])
|
||||
|
||||
paths = []
|
||||
i = 1
|
||||
while True:
|
||||
page = requests.get(urltemplate % (i, i))
|
||||
i += 1
|
||||
if page.status_code != 200:
|
||||
break
|
||||
|
||||
path = os.path.join(constants.CACHEDIR, os.path.split(output)[-1] + f"-{i}.jpg")
|
||||
paths.append(path)
|
||||
with open(path, "wb") as f:
|
||||
f.write(page.content)
|
||||
|
||||
return paths
|
||||
|
||||
def write_pdf(paths, output):
|
||||
print(f"Joining {len(paths)} JPG files into {output} … ", end="")
|
||||
newpaths = " ".join([f'"{path}"' for path in paths])
|
||||
os.system(f'img2pdf {newpaths} --output "{output}"')
|
||||
print("done.")
|
||||
print("Deleting JPG files … ", end="")
|
||||
for path in paths:
|
||||
os.remove(path)
|
||||
print("done")
|
||||
|
||||
if __name__ == "__main__":
|
||||
argp = argparse.ArgumentParser()
|
||||
|
||||
argp.add_argument(
|
||||
"url",
|
||||
help="URL to emanualonline.com PDF offer"
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"-o", "--output",
|
||||
help="Output file",
|
||||
required=True
|
||||
)
|
||||
|
||||
args = argp.parse_args()
|
||||
|
||||
os.makedirs(os.path.join(*os.path.split(os.path.relpath(args.output))[:-1]) or ".", exist_ok=True)
|
||||
|
||||
paths = download_pages(args.url, args.output)
|
||||
write_pdf(paths, args.output)
|
||||
|
112
misc/scrape-scribd.py
Normal file
112
misc/scrape-scribd.py
Normal file
@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python
|
||||
#-*- coding:utf-8 -*-
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import re
|
||||
import requests
|
||||
|
||||
from PIL import Image
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from fgtools.utils import constants
|
||||
json_pattern = r'(?<=content-url: ")https:\/\/html.scribdassets.com\/.+\.jsonp(?=")'
|
||||
img_pattern = r'<img .+?\/>'
|
||||
|
||||
class JSPage:
|
||||
def __init__(self, number, width, height, url):
|
||||
self.number = number
|
||||
self.width = width
|
||||
self.height = height
|
||||
self.url = url
|
||||
|
||||
def get_image(self):
|
||||
text = requests.get(self.url).text
|
||||
images = list(map(lambda s: BeautifulSoup(s.replace("\\", ""), features="lxml").body.find("img"), re.findall(img_pattern, text)))
|
||||
print(len(images))
|
||||
src_image = Image.open(requests.get(images[0]["orig"], stream=True).raw)
|
||||
pil_image = Image.new("RGB", (self.width, self.height))
|
||||
pil_image.paste((255, 255, 255), (0, 0, pil_image.size[0], pil_image.size[1]))
|
||||
for image in images:
|
||||
style = {}
|
||||
for item in image["style"].split(";"):
|
||||
item = item.split(":")
|
||||
style[item[0]] = item[1].replace("px", "")
|
||||
|
||||
clip = style["clip"]
|
||||
clip = {k: int(v) for k, v in zip(("top", "right", "bottom", "left"), clip[clip.find("(") + 1:-1].split(" "))}
|
||||
cropped_src_image = src_image.copy().crop((clip["left"], clip["top"], clip["right"], clip["bottom"]))
|
||||
pil_image.paste(cropped_src_image, (int(style["left"]) + clip["left"], int(style["top"]) + clip["top"]))
|
||||
|
||||
return pil_image
|
||||
|
||||
def parse_pages_script(script):
|
||||
lines = list(map(str.strip, script.split("\n")[1:-1]))
|
||||
number = 0
|
||||
width = 0
|
||||
height = 0
|
||||
url = ""
|
||||
pages = []
|
||||
for line in lines:
|
||||
if "pageNum" in line:
|
||||
number = int(line.split(": ")[1][:-1])
|
||||
elif "origWidth" in line:
|
||||
width = int(line.split(": ")[1][:-1])
|
||||
elif "origHeight" in line:
|
||||
height = int(line.split(": ")[1][:-1])
|
||||
elif "contentUrl" in line:
|
||||
url = line.split(": ")[1][1:-1]
|
||||
|
||||
if number and width and height and url:
|
||||
page = JSPage(number, width, height, url)
|
||||
pages.append(page)
|
||||
number = width = height = 0
|
||||
url = ""
|
||||
|
||||
return pages
|
||||
|
||||
def download_pages(url, output):
|
||||
html = BeautifulSoup(requests.get(url).text, features="lxml")
|
||||
pages_script = html.body.find("div", attrs={"class": "outer_page_container"}).find("script", attrs={"type": "text/javascript"})
|
||||
pages = sorted(parse_pages_script(str(pages_script)), key=lambda p: p.number)
|
||||
|
||||
paths = []
|
||||
for page in pages:
|
||||
path = os.path.join(constants.CACHEDIR, os.path.split(output)[-1] + f"-{page.number}.jpg")
|
||||
paths.append(path)
|
||||
page.get_image().save(path, "JPEG")
|
||||
|
||||
return paths
|
||||
|
||||
def write_pdf(paths, output):
|
||||
print(f"Joining {len(paths)} JPG files into {output} … ", end="")
|
||||
newpaths = " ".join([f'"{path}"' for path in paths])
|
||||
os.system(f'img2pdf {newpaths} --output "{output}"')
|
||||
print("done.")
|
||||
print("Deleting JPG files … ", end="")
|
||||
for path in paths:
|
||||
os.remove(path)
|
||||
print("done")
|
||||
|
||||
if __name__ == "__main__":
|
||||
argp = argparse.ArgumentParser()
|
||||
|
||||
argp.add_argument(
|
||||
"url",
|
||||
help="URL to Scribd web page"
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"-o", "--output",
|
||||
help="Output file",
|
||||
required=True
|
||||
)
|
||||
|
||||
args = argp.parse_args()
|
||||
|
||||
os.makedirs(os.path.join(*os.path.split(os.path.relpath(args.output))[:-1]) or ".", exist_ok=True)
|
||||
|
||||
paths = download_pages(args.url, args.output)
|
||||
write_pdf(paths, args.output)
|
||||
|
146
misc/tabletool.py
Normal file
146
misc/tabletool.py
Normal file
@ -0,0 +1,146 @@
|
||||
#!/usr/bin/env python
|
||||
#-*- coding:utf-8 -*-
|
||||
|
||||
import argparse
|
||||
|
||||
class TableRow:
|
||||
def __init__(self, cols):
|
||||
self.values = [None] * cols
|
||||
|
||||
def __len__(self):
|
||||
return len(self.values)
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
if index >= len(self.values):
|
||||
raise IndexError(f"column index {index} out of bounds for row with {len(self.values)} columns")
|
||||
self.values[index] = value
|
||||
|
||||
def __getitem__(self, index):
|
||||
if index >= len(self.values):
|
||||
raise IndexError(f"column index {index} out of bounds for row with {len(self.values)} columns")
|
||||
return self.values[index]
|
||||
|
||||
def __imul__(self, other):
|
||||
if len(self.values) != len(other.values):
|
||||
raise IndexError(f"attempting to perform *= on two table rows with different column counts")
|
||||
for i in range(len(self)):
|
||||
self[i] *= other[i]
|
||||
return self
|
||||
|
||||
def __isub__(self, other):
|
||||
if len(self.values) != len(other.values):
|
||||
raise IndexError(f"attempting to perform -= on two table rows with different column counts")
|
||||
for i in range(len(self)):
|
||||
self[i] -= other[i]
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
if len(self.values) != len(other.values):
|
||||
raise IndexError(f"attempting to perform += on two table rows with different column counts")
|
||||
for i in range(len(self)):
|
||||
self[i] += other[i]
|
||||
return self
|
||||
|
||||
class Table:
|
||||
def __init__(self, rows, cols):
|
||||
self.rows = [TableRow(cols) for i in range(rows)]
|
||||
|
||||
def __getitem__(self, index):
|
||||
if index >= len(self.rows):
|
||||
raise IndexError(f"row index {index} out of bounds for table with {len(self.rows)} rows")
|
||||
return self.rows[index]
|
||||
|
||||
def __imul__(self, other):
|
||||
if len(self.rows) != len(other.rows):
|
||||
raise IndexError(f"attempting to perform *= on two tables of different row count")
|
||||
for selfrow, otherrow in zip(self, other):
|
||||
selfrow *= otherrow
|
||||
return self
|
||||
|
||||
def __isub__(self, other):
|
||||
if len(self.rows) != len(other.rows):
|
||||
raise IndexError(f"attempting to perform -= on two tables of different row count")
|
||||
for selfrow, otherrow in zip(self, other):
|
||||
selfrow -= otherrow
|
||||
return self
|
||||
|
||||
def __iadd__(self, other):
|
||||
if len(self.rows) != len(other.rows):
|
||||
raise IndexError(f"attempting to perform += on two tables of different row count")
|
||||
for selfrow, otherrow in zip(self, other):
|
||||
selfrow += otherrow
|
||||
return self
|
||||
|
||||
def add_table(tables):
|
||||
lines = []
|
||||
EOT = False
|
||||
EOF = False
|
||||
while not EOT and not EOF:
|
||||
line = input().strip()
|
||||
if line.lower() == "eot":
|
||||
EOT = True
|
||||
elif line.lower() == "eof":
|
||||
EOF = True
|
||||
else:
|
||||
lines.append(list(filter(None, line.split())))
|
||||
|
||||
if len(lines) > 0:
|
||||
tables.append(lines)
|
||||
|
||||
return EOF
|
||||
|
||||
def parse_tables(tables):
|
||||
newtables = []
|
||||
for table in tables:
|
||||
t = Table(len(table), max(map(len, table)))
|
||||
for i, row in enumerate(table):
|
||||
for j, col in enumerate(table[i]):
|
||||
t[i][j] = float(table[i][j])
|
||||
newtables.append(t)
|
||||
return newtables
|
||||
|
||||
def perform_operation(tables, op):
|
||||
first, *tables = tables
|
||||
for table in tables:
|
||||
if op == "product":
|
||||
first *= table
|
||||
elif op == "difference":
|
||||
first -= table
|
||||
elif op == "sum":
|
||||
first += table
|
||||
return first
|
||||
|
||||
def print_table(table, precision):
|
||||
for row in table:
|
||||
print("\t".join(map(lambda f: str(round(f, precision)), row)))
|
||||
|
||||
if __name__ == "__main__":
|
||||
argp = argparse.ArgumentParser(description="perform various operations on one or more tables")
|
||||
|
||||
argp.add_argument(
|
||||
"-o", "--operation",
|
||||
help="which operation to perform on the inputted tables",
|
||||
required=True,
|
||||
choices=["product", "difference", "sum"]
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"-p", "--precision",
|
||||
help="Number of decimal places the numbers in the outputted table should have",
|
||||
default=6,
|
||||
type=int
|
||||
)
|
||||
|
||||
args = argp.parse_args()
|
||||
|
||||
print("Input as many tables as you want, but at least two - input 'EOF' (without the quotes) when you are done")
|
||||
print("For each table, input as many table rows as you want - input 'EOT' (without the quotes) to end a table")
|
||||
print("For each table row, the columns can be separated by any amount of tabs or spaces")
|
||||
tables = []
|
||||
EOF = False
|
||||
while not EOF:
|
||||
EOF = add_table(tables)
|
||||
tables = parse_tables(tables)
|
||||
table = perform_operation(tables, args.operation)
|
||||
print_table(table, args.precision)
|
||||
|
@ -105,10 +105,10 @@ class Runway:
|
||||
return unit_convert.m2ft(self.get_length_m())
|
||||
|
||||
def get_heading1_deg(self):
|
||||
return self.coord2.angle(self.coord1)
|
||||
return self.coord1.angle(self.coord2)
|
||||
|
||||
def get_heading2_deg(self):
|
||||
return self.coord1.angle(self.coord2)
|
||||
return self.coord2.angle(self.coord1)
|
||||
|
||||
def __repr__(self):
|
||||
return f""" <runway>
|
||||
@ -457,7 +457,6 @@ def write_ils_files(ils_d, output, elevpipe, overwrite):
|
||||
|
||||
i += 1
|
||||
print()
|
||||
print(i)
|
||||
|
||||
if __name__ == "__main__":
|
||||
argp = argparse.ArgumentParser(description="Convert apt.dat files to groundnet.xml files")
|
||||
|
46
scenery/coord_convert.py
Normal file
46
scenery/coord_convert.py
Normal file
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python
|
||||
#-*- coding:utf-8 -*-
|
||||
|
||||
import argparse
|
||||
|
||||
def format_skyvector(lon, lat):
|
||||
lond, lonm = divmod(abs(lon), 1)
|
||||
lonm = lonm * 60
|
||||
latd, latm = divmod(abs(lat), 1)
|
||||
latm = latm * 60
|
||||
ew = "EW"[int(lon < 0)]
|
||||
ns = "NS"[int(lat < 0)]
|
||||
return f"{int(latd):02d}{int(latm * 100):04d}{ns}{int(lond):03d}{int(lonm * 100):04d}{ew}"
|
||||
|
||||
if __name__ == "__main__":
|
||||
argp = argparse.ArgumentParser(description="Convert GPS coordinates between different formats")
|
||||
|
||||
argp.add_argument(
|
||||
"--lon",
|
||||
help="Input longitude",
|
||||
required=True,
|
||||
type=float
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"--lat",
|
||||
help="Input latitude",
|
||||
required=True,
|
||||
type=float
|
||||
)
|
||||
|
||||
argp.add_argument(
|
||||
"-f", "--format",
|
||||
help="Output format",
|
||||
required=True,
|
||||
choices=["dmd", "dms", "skyvector"]
|
||||
)
|
||||
|
||||
args = argp.parse_args()
|
||||
|
||||
if args.format == "skyvector":
|
||||
result = format_skyvector(args.lon, args.lat)
|
||||
else:
|
||||
result = "Output format is not implemented yet"
|
||||
print(result)
|
||||
|
Loading…
Reference in New Issue
Block a user