Clean up nested ifs, separate downloader to def:
This commit is contained in:
parent
59f61f7ae3
commit
42425d4681
126
get_specs.py
126
get_specs.py
@ -10,7 +10,19 @@ import json
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
bartext = ""
|
bartext = ""
|
||||||
|
failed = []
|
||||||
|
|
||||||
|
def check_internet(url='https://belden.com', timeout=5):
|
||||||
|
try:
|
||||||
|
# Make a GET request to the specified URL
|
||||||
|
response = requests.get(url, timeout=timeout)
|
||||||
|
|
||||||
|
# If the request succeeds, return True
|
||||||
|
return True
|
||||||
|
except requests.ConnectionError:
|
||||||
|
# If a connection error occurs, return False
|
||||||
|
return False
|
||||||
|
|
||||||
def try_download_datasheet(partnum, output_dir): # Guess datasheet URL
|
def try_download_datasheet(partnum, output_dir): # Guess datasheet URL
|
||||||
global bartext
|
global bartext
|
||||||
|
|
||||||
@ -130,23 +142,30 @@ def query_search(partnum):
|
|||||||
def touch(path):
|
def touch(path):
|
||||||
with open(path, 'a'):
|
with open(path, 'a'):
|
||||||
os.utime(path, None)
|
os.utime(path, None)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
|
|
||||||
partnums = ["10GXS12", "RST 5L-RKT 5L-949",
|
|
||||||
"10GXS13",
|
|
||||||
"10GXW12",
|
def get_multi(partnums):
|
||||||
"10GXW13",
|
|
||||||
"2412",
|
|
||||||
"2413",
|
|
||||||
"OSP6AU",
|
|
||||||
"FI4D024P9",
|
|
||||||
"FISD012R9",
|
|
||||||
"FDSD012A9",
|
|
||||||
"FSSL024NG",
|
|
||||||
"FISX006W0",
|
|
||||||
]
|
|
||||||
with alive_bar(len(partnums) * 2, dual_line=True, calibrate=30, bar="classic2", spinner="classic") as bar:
|
with alive_bar(len(partnums) * 2, dual_line=True, calibrate=30, bar="classic2", spinner="classic") as bar:
|
||||||
|
|
||||||
|
def __use_cached_datasheet(partnum, path, output_dir):
|
||||||
|
print("Using cached datasheet for " + partnum, end='')
|
||||||
|
bar.text = "Using cached datasheet for " + partnum
|
||||||
|
bar(skipped=True)
|
||||||
|
print("Parsing Datasheet contents of " + partnum, end='')
|
||||||
|
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||||
|
read_datasheet.parse(path, output_dir)
|
||||||
|
bar(skipped=False)
|
||||||
|
|
||||||
|
def __downloaded_datasheet(partnum, path, output_dir):
|
||||||
|
print("Downloaded " + path, end='')
|
||||||
|
bar.text = "Downloaded " + path
|
||||||
|
bar(skipped=False)
|
||||||
|
print("Parsing Datasheet contents of " + partnum, end='')
|
||||||
|
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
||||||
|
read_datasheet.parse(path, output_dir)
|
||||||
|
bar(skipped=False)
|
||||||
|
|
||||||
for partnum in partnums:
|
for partnum in partnums:
|
||||||
output_dir = "cables/" + partnum
|
output_dir = "cables/" + partnum
|
||||||
path = output_dir + "/datasheet.pdf"
|
path = output_dir + "/datasheet.pdf"
|
||||||
@ -163,59 +182,62 @@ if __name__ == "__main__":
|
|||||||
if download_image(search_result["image"], output_dir):
|
if download_image(search_result["image"], output_dir):
|
||||||
print("Downloaded hi-res part image for " + partnum)
|
print("Downloaded hi-res part image for " + partnum)
|
||||||
touch(output_dir + "/found_part_hires")
|
touch(output_dir + "/found_part_hires")
|
||||||
|
else:
|
||||||
|
print("Using cached hi-res part image for " + partnum)
|
||||||
|
|
||||||
# Download datasheet from provided URL if needed
|
# Download datasheet from provided URL if needed
|
||||||
if os.path.exists(path) and os.path.getsize(path) > 1:
|
if os.path.exists(path) and os.path.getsize(path) > 1:
|
||||||
print("Using cached " + path, end='')
|
__use_cached_datasheet(partnum, path, output_dir)
|
||||||
bar.text = "Using cached " + path
|
|
||||||
bar(skipped=True)
|
|
||||||
print("Parsing Datasheet contents of " + partnum, end='')
|
|
||||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
|
||||||
read_datasheet.parse(path, output_dir)
|
|
||||||
bar(skipped=False)
|
|
||||||
|
|
||||||
elif download_datasheet(search_result["datasheet"], output_dir) is not False:
|
elif download_datasheet(search_result["datasheet"], output_dir) is not False:
|
||||||
print("Downloaded " + path, end='')
|
__downloaded_datasheet(partnum, path, output_dir)
|
||||||
bar.text = "Downloaded " + path
|
|
||||||
bar(skipped=False)
|
|
||||||
print("Parsing Datasheet contents of " + partnum, end='')
|
|
||||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
|
||||||
read_datasheet.parse(path, output_dir)
|
|
||||||
bar(skipped=False)
|
|
||||||
|
|
||||||
|
|
||||||
elif os.path.exists(path) and os.path.getsize(path) > 1:
|
elif os.path.exists(path) and os.path.getsize(path) > 1:
|
||||||
print("Using cached " + path, end='')
|
__use_cached_datasheet(partnum, path, output_dir)
|
||||||
bar.text = "Using cached " + path
|
|
||||||
bar(skipped=True)
|
|
||||||
print("Parsing Datasheet contents of " + partnum, end='')
|
|
||||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
|
||||||
read_datasheet.parse(path, output_dir)
|
|
||||||
bar(skipped=False)
|
|
||||||
|
|
||||||
# If search fails, and we don't already have the datasheet, guess datasheet URL and skip the hires image download
|
# If search fails, and we don't already have the datasheet, guess datasheet URL and skip the hires image download
|
||||||
elif try_download_datasheet(partnum, output_dir) is not False:
|
elif try_download_datasheet(partnum, output_dir) is not False:
|
||||||
print("Downloaded " + path, end='')
|
__downloaded_datasheet(partnum, path, output_dir)
|
||||||
bar.text = "Downloaded " + path
|
|
||||||
bar(skipped=False)
|
|
||||||
print("Parsing Datasheet contents of " + partnum, end='')
|
|
||||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
|
||||||
read_datasheet.parse(path, output_dir)
|
|
||||||
bar(skipped=False)
|
|
||||||
|
|
||||||
# Failed to download with search or guess :(
|
# Failed to download with search or guess :(
|
||||||
else:
|
else:
|
||||||
print("Failed to download datasheet for part " + partnum, end='')
|
print("Failed to download datasheet for part " + partnum, end='')
|
||||||
bar.text = "Failed to download datasheet for part " + partnum
|
bar.text = "Failed to download datasheet for part " + partnum
|
||||||
|
failed.append(partnum)
|
||||||
bar(skipped=True)
|
bar(skipped=True)
|
||||||
bar(skipped=True)
|
bar(skipped=True)
|
||||||
|
|
||||||
# We already have a hi-res image and the datasheet - perfect!
|
# We already have a hi-res image and the datasheet - perfect!
|
||||||
else:
|
else:
|
||||||
if os.path.exists(path) and os.path.getsize(path) > 1:
|
print("Using cached hi-res part image for " + partnum)
|
||||||
print("Using cached " + path, end='')
|
__use_cached_datasheet(partnum, path, output_dir)
|
||||||
bar.text = "Using cached " + path
|
|
||||||
bar(skipped=True)
|
if len(failed) > 0:
|
||||||
print("Parsing Datasheet contents of " + partnum, end='')
|
print("Failed to download:")
|
||||||
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
|
for partnum in failed:
|
||||||
read_datasheet.parse(path, output_dir)
|
print(partnum)
|
||||||
bar(skipped=False)
|
return False # Go to manual review upload page
|
||||||
|
else:
|
||||||
|
return True # All cables downloaded; we are good to go
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
partnums = ["10GXS12", "RST 5L-RKT 5L-949",
|
||||||
|
"10GXS13",
|
||||||
|
"10GXW12",
|
||||||
|
"10GXW13",
|
||||||
|
"2412",
|
||||||
|
"2413",
|
||||||
|
"OSP6AU",
|
||||||
|
"FI4D024P9",
|
||||||
|
"FISD012R9",
|
||||||
|
"FDSD012A9",
|
||||||
|
"FSSL024NG",
|
||||||
|
"FISX006W0",
|
||||||
|
"FISX00103"
|
||||||
|
]
|
||||||
|
get_multi(partnums)
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user