Clean up nested ifs, separate downloader to def:
This commit is contained in:
parent
59f61f7ae3
commit
42425d4681
124
get_specs.py
124
get_specs.py
@ -10,6 +10,18 @@ import json
|
||||
import subprocess
|
||||
|
||||
bartext = ""
|
||||
failed = []
|
||||
|
||||
def check_internet(url='https://belden.com', timeout=5):
|
||||
try:
|
||||
# Make a GET request to the specified URL
|
||||
response = requests.get(url, timeout=timeout)
|
||||
|
||||
# If the request succeeds, return True
|
||||
return True
|
||||
except requests.ConnectionError:
|
||||
# If a connection error occurs, return False
|
||||
return False
|
||||
|
||||
def try_download_datasheet(partnum, output_dir): # Guess datasheet URL
|
||||
global bartext
|
||||
@ -131,22 +143,29 @@ def touch(path):
|
||||
with open(path, 'a'):
|
||||
os.utime(path, None)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
partnums = ["10GXS12", "RST 5L-RKT 5L-949",
|
||||
"10GXS13",
|
||||
"10GXW12",
|
||||
"10GXW13",
|
||||
"2412",
|
||||
"2413",
|
||||
"OSP6AU",
|
||||
"FI4D024P9",
|
||||
"FISD012R9",
|
||||
"FDSD012A9",
|
||||
"FSSL024NG",
|
||||
"FISX006W0",
|
||||
]
|
||||
|
||||
def get_multi(partnums):
|
||||
with alive_bar(len(partnums) * 2, dual_line=True, calibrate=30, bar="classic2", spinner="classic") as bar:
|
||||
|
||||
def __use_cached_datasheet(partnum, path, output_dir):
|
||||
print("Using cached datasheet for " + partnum, end='')
|
||||
bar.text = "Using cached datasheet for " + partnum
|
||||
bar(skipped=True)
|
||||
print("Parsing Datasheet contents of " + partnum, end='')
|
||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||
read_datasheet.parse(path, output_dir)
|
||||
bar(skipped=False)
|
||||
|
||||
def __downloaded_datasheet(partnum, path, output_dir):
|
||||
print("Downloaded " + path, end='')
|
||||
bar.text = "Downloaded " + path
|
||||
bar(skipped=False)
|
||||
print("Parsing Datasheet contents of " + partnum, end='')
|
||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||
read_datasheet.parse(path, output_dir)
|
||||
bar(skipped=False)
|
||||
|
||||
for partnum in partnums:
|
||||
output_dir = "cables/" + partnum
|
||||
path = output_dir + "/datasheet.pdf"
|
||||
@ -163,59 +182,62 @@ if __name__ == "__main__":
|
||||
if download_image(search_result["image"], output_dir):
|
||||
print("Downloaded hi-res part image for " + partnum)
|
||||
touch(output_dir + "/found_part_hires")
|
||||
else:
|
||||
print("Using cached hi-res part image for " + partnum)
|
||||
|
||||
# Download datasheet from provided URL if needed
|
||||
if os.path.exists(path) and os.path.getsize(path) > 1:
|
||||
print("Using cached " + path, end='')
|
||||
bar.text = "Using cached " + path
|
||||
bar(skipped=True)
|
||||
print("Parsing Datasheet contents of " + partnum, end='')
|
||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||
read_datasheet.parse(path, output_dir)
|
||||
bar(skipped=False)
|
||||
__use_cached_datasheet(partnum, path, output_dir)
|
||||
|
||||
elif download_datasheet(search_result["datasheet"], output_dir) is not False:
|
||||
print("Downloaded " + path, end='')
|
||||
bar.text = "Downloaded " + path
|
||||
bar(skipped=False)
|
||||
print("Parsing Datasheet contents of " + partnum, end='')
|
||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||
read_datasheet.parse(path, output_dir)
|
||||
bar(skipped=False)
|
||||
|
||||
__downloaded_datasheet(partnum, path, output_dir)
|
||||
|
||||
elif os.path.exists(path) and os.path.getsize(path) > 1:
|
||||
print("Using cached " + path, end='')
|
||||
bar.text = "Using cached " + path
|
||||
bar(skipped=True)
|
||||
print("Parsing Datasheet contents of " + partnum, end='')
|
||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||
read_datasheet.parse(path, output_dir)
|
||||
bar(skipped=False)
|
||||
__use_cached_datasheet(partnum, path, output_dir)
|
||||
|
||||
# If search fails, and we don't already have the datasheet, guess datasheet URL and skip the hires image download
|
||||
elif try_download_datasheet(partnum, output_dir) is not False:
|
||||
print("Downloaded " + path, end='')
|
||||
bar.text = "Downloaded " + path
|
||||
bar(skipped=False)
|
||||
print("Parsing Datasheet contents of " + partnum, end='')
|
||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||
read_datasheet.parse(path, output_dir)
|
||||
bar(skipped=False)
|
||||
__downloaded_datasheet(partnum, path, output_dir)
|
||||
|
||||
# Failed to download with search or guess :(
|
||||
else:
|
||||
print("Failed to download datasheet for part " + partnum, end='')
|
||||
bar.text = "Failed to download datasheet for part " + partnum
|
||||
failed.append(partnum)
|
||||
bar(skipped=True)
|
||||
bar(skipped=True)
|
||||
|
||||
# We already have a hi-res image and the datasheet - perfect!
|
||||
else:
|
||||
if os.path.exists(path) and os.path.getsize(path) > 1:
|
||||
print("Using cached " + path, end='')
|
||||
bar.text = "Using cached " + path
|
||||
bar(skipped=True)
|
||||
print("Parsing Datasheet contents of " + partnum, end='')
|
||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||
read_datasheet.parse(path, output_dir)
|
||||
bar(skipped=False)
|
||||
print("Using cached hi-res part image for " + partnum)
|
||||
__use_cached_datasheet(partnum, path, output_dir)
|
||||
|
||||
if len(failed) > 0:
|
||||
print("Failed to download:")
|
||||
for partnum in failed:
|
||||
print(partnum)
|
||||
return False # Go to manual review upload page
|
||||
else:
|
||||
return True # All cables downloaded; we are good to go
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
partnums = ["10GXS12", "RST 5L-RKT 5L-949",
|
||||
"10GXS13",
|
||||
"10GXW12",
|
||||
"10GXW13",
|
||||
"2412",
|
||||
"2413",
|
||||
"OSP6AU",
|
||||
"FI4D024P9",
|
||||
"FISD012R9",
|
||||
"FDSD012A9",
|
||||
"FSSL024NG",
|
||||
"FISX006W0",
|
||||
"FISX00103"
|
||||
]
|
||||
get_multi(partnums)
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user