Create main runner app, with async multithreading

This commit is contained in:
Cole Deck 2024-01-17 16:06:15 -06:00
parent 33671683ea
commit 01526524d4
8 changed files with 263800 additions and 55 deletions

3
config.yml Normal file
View File

@ -0,0 +1,3 @@
arm:
ip: 192.168.1.145

View File

@ -8,6 +8,7 @@ import requests
#import time
import json
import subprocess
from util import fprint
bartext = ""
failed = []
@ -33,16 +34,16 @@ def query_search(partnum):
search_url = "https://www.belden.com/coveo/rest/search"
search_data ='{ "q": "' + str(partnum) + '", "sortCriteria": "relevancy", "numberOfResults": "250", "sortCriteria": "@catalogitemwebdisplaypriority ascending", "searchHub": "products-only-search", "pipeline": "Site Search", "maximumAge": "900000", "tab": "products-search", "locale": "en" }'
#"aq": "", "cq": "((@z95xlanguage==en) (@z95xlatestversion==1) (@source==\\"Coveo_web_index - rg-nc-prod-sitecore-prod\\")) OR (@source==(\\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\\",\\"website_001002_Category_index-rg-nc-prod-sitecore-prod\\"))", "firstResult": "0", "categoryFacets": "[{\\"field\\":\\"@catalogitemcategories\\",\\"path\\":[],\\"injectionDepth\\":1000,\\"maximumNumberOfValues\\":6,\\"delimitingCharacter\\":\\"|\\"}]", "facetOptions": "{}", "groupBy": "" }'
#print(search_data)
print(json.loads(search_data))
#fprint(search_data)
fprint(json.loads(search_data))
#search_data = '{ "q": "' + str(partnum) + '" }'
print(search_data)
fprint(search_data)
headers = headers = {
'Authorization': f'Bearer {token}',
'Content-Type': 'application/json'
}
with requests.post(search_url, headers=headers, data=search_data) as r:
print(r.text)"""
fprint(r.text)"""
# TODO: Reimplement in python
# Bash script uses some crazy json formatting that I could not figure out
@ -52,7 +53,7 @@ def query_search(partnum):
command = ["./query-search.sh", partnum]
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
if result.returncode != 0: # error
print("No results found in search database for " + partnum + ". No hi-res part image available.", result.stderr)
fprint("No results found in search database for " + partnum + ". No hi-res part image available.", result.stderr)
return False
else:
data_out = json.loads(result.stdout)
@ -72,7 +73,7 @@ def get_multi(partnums):
sanitized_name = partnum.replace(" ", "")
url = "https://catalog.belden.com/techdata/EN/" + sanitized_name + "_techdata.pdf"
#print(url)
#fprint(url)
try:
with requests.get(url, stream=True) as r:
#r.raise_for_status()
@ -89,10 +90,10 @@ def get_multi(partnums):
bartext = bartext + "."
bar.text = bartext
f.write(chunk)
#print("")
#fprint("")
return output_dir + "/datasheet.pdf"
except KeyboardInterrupt:
print("Quitting!")
fprint("Quitting!")
os.remove(output_dir + "/datasheet.pdf")
sys.exit()
@ -100,7 +101,7 @@ def get_multi(partnums):
def _download_datasheet(url, output_dir): # Download datasheet with known URL
global bartext
#print(url)
#fprint(url)
try:
with requests.get(url, stream=True) as r:
#r.raise_for_status()
@ -117,10 +118,10 @@ def get_multi(partnums):
bartext = bartext + "."
bar.text = bartext
f.write(chunk)
#print("")
#fprint("")
return output_dir + "/datasheet.pdf"
except KeyboardInterrupt:
print("Quitting!")
fprint("Quitting!")
os.remove(output_dir + "/datasheet.pdf")
sys.exit()
@ -128,7 +129,7 @@ def get_multi(partnums):
def _download_image(url, output_dir): # Download datasheet with known URL
global bartext
#print(url)
#fprint(url)
try:
with requests.get(url, stream=True) as r:
#r.raise_for_status()
@ -143,27 +144,27 @@ def get_multi(partnums):
bartext = bartext + "."
bar.text = bartext
f.write(chunk)
#print("")
#fprint("")
return output_dir + "/part-hires." + url.split(".")[-1]
except KeyboardInterrupt:
print("Quitting!")
fprint("Quitting!")
os.remove(partnum + "/datasheet.pdf")
sys.exit()
def __use_cached_datasheet(partnum, path, output_dir):
print("Using cached datasheet for " + partnum, end='')
fprint("Using cached datasheet for " + partnum, end='')
bar.text = "Using cached datasheet for " + partnum
bar(skipped=True)
print("Parsing Datasheet contents of " + partnum, end='')
fprint("Parsing Datasheet contents of " + partnum, end='')
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
read_datasheet.parse(path, output_dir)
bar(skipped=False)
def __downloaded_datasheet(partnum, path, output_dir):
print("Downloaded " + path, end='')
fprint("Downloaded " + path, end='')
bar.text = "Downloaded " + path
bar(skipped=False)
print("Parsing Datasheet contents of " + partnum, end='')
fprint("Parsing Datasheet contents of " + partnum, end='')
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
read_datasheet.parse(path, output_dir)
bar(skipped=False)
@ -182,10 +183,10 @@ def get_multi(partnums):
# Download high resolution part image if available and needed
if not os.path.exists(output_dir + "/found_part_hires"):
if _download_image(search_result["image"], output_dir):
print("Downloaded hi-res part image for " + partnum)
fprint("Downloaded hi-res part image for " + partnum)
touch(output_dir + "/found_part_hires")
else:
print("Using cached hi-res part image for " + partnum)
fprint("Using cached hi-res part image for " + partnum)
# Download datasheet from provided URL if needed
if os.path.exists(path) and os.path.getsize(path) > 1:
@ -203,7 +204,7 @@ def get_multi(partnums):
# Failed to download with search or guess :(
else:
print("Failed to download datasheet for part " + partnum, end='')
fprint("Failed to download datasheet for part " + partnum, end='')
bar.text = "Failed to download datasheet for part " + partnum
failed.append(partnum)
bar(skipped=True)
@ -211,13 +212,13 @@ def get_multi(partnums):
# We already have a hi-res image and the datasheet - perfect!
else:
print("Using cached hi-res part image for " + partnum)
fprint("Using cached hi-res part image for " + partnum)
__use_cached_datasheet(partnum, path, output_dir)
if len(failed) > 0:
print("Failed to download:")
fprint("Failed to download:")
for partnum in failed:
print(partnum)
fprint(partnum)
return False # Go to manual review upload page
else:
return True # All cables downloaded; we are good to go

263405
output.log Normal file

File diff suppressed because one or more lines are too long

View File

@ -8,13 +8,14 @@ import numpy as np
from PIL import Image
import io
import json
from util import fprint
def parse(filename, output_dir):
# Extract table data
tables = camelot.read_pdf(filename, pages="1-end", flavor='lattice', backend="poppler", split_text=False, line_scale=100, process_background=True, resolution=600, interations=1, layout_kwargs={'detect_vertical': False, 'char_margin': 0.5}, shift_text=['r', 't'])
#print("Total tables extracted:", tables.n)
#fprint("Total tables extracted:", tables.n)
n = 0
pagenum = 0
reader = PdfReader(filename)
@ -27,10 +28,10 @@ def parse(filename, output_dir):
table.df.replace(np.nan, '', inplace=True)
if not table.df.empty:
#print("\nTable " + str(n))
#fprint("\nTable " + str(n))
# Extract table names
table_start = table.cells[0][0].lt[1] # Read top-left cell's top-left coordinate
#print(table_start)
#fprint(table_start)
ymin = table_start
ymax = table_start + 10
if pagenum != table.page - 1:
@ -46,20 +47,20 @@ def parse(filename, output_dir):
text_body = "".join(parts).strip('\n')
if len(text_body) == 0:
text_body = str(n)
#print(text_body)
#fprint(text_body)
table_list[text_body] = table.df
#table.to_html("table" + str(n) + ".html")
#print(table.df)
#fprint(table.df)
#camelot.plot(table, kind='grid').savefig("test" + str(n) + ".png")
n=n+1
#camelot.plot(tables[0], kind='grid').savefig("test.png")
#tables.export(output_dir + '/techdata.json', f='json')
# print(table_list)
# fprint(table_list)
# Extract Basic details - part name & description, image, etc
reader = PdfReader(filename)
@ -68,7 +69,7 @@ def parse(filename, output_dir):
skip = False
for image_file_object in page.images:
if image_file_object.name == "img0.png" and skip == False:
#print(Image.open(io.BytesIO(image_file_object.data)).mode)
#fprint(Image.open(io.BytesIO(image_file_object.data)).mode)
if Image.open(io.BytesIO(image_file_object.data)).mode == "P":
skip = True
continue
@ -137,20 +138,20 @@ def parse(filename, output_dir):
# multi-page table check
if table_name.isdigit() and len(tables) > 1:
print(table_name)
print(previous_table)
fprint(table_name)
fprint(previous_table)
main_key = previous_table
cont_key = table_name
print(tables)
fprint(tables)
if vertical == False:
main_keys = list(tables[main_key].keys())
for i, (cont_key, cont_values) in enumerate(tables[cont_key].items()):
if i < len(main_keys):
print(tables[main_key][main_keys[i]])
fprint(tables[main_key][main_keys[i]])
tables[main_key][main_keys[i]] = (tables[main_key][main_keys[i]] + (cont_key,) + cont_values)
del tables[table_name]
@ -163,7 +164,7 @@ def parse(filename, output_dir):
previous_table = table_name
print(tables)
fprint(tables)
with open(output_dir + "/tables.json", 'w') as json_file:
json.dump(tables, json_file)

View File

@ -5,6 +5,7 @@ pypdf2==2.12.1
alive-progress
requests
git+https://github.com/Byeongdulee/python-urx.git
pyyaml
# Development
matplotlib

147
run.py Executable file
View File

@ -0,0 +1,147 @@
#!/usr/bin/env python3
import get_specs
import traceback
import logging
import yaml
from multiprocessing import Process, Manager, Pool, TimeoutError, active_children, log_to_stderr
from multiprocessing.pool import Pool
import multiprocessing
from time import sleep
from util import fprint
from util import run_cmd
import sys
import ur5_control
import os
import signal
config = None
keeprunning = True
arm_ready = False
led_ready = False
killme = None
#pool = None
def arm_start_callback(res):
global arm_ready
arm_ready = True
def led_start_callback(res):
global led_ready
led_ready = True
def cam_start_callback(res):
global cam_ready
cam_ready = True
def wait_for(val, name):
if val is False:
fprint("waiting for " + name + " to complete...")
while val is False:
sleep(0.1)
def setup(pool):
global config
global counter
fprint("Starting Jukebox control system...")
pool.apply_async(ur5_control.init, (config["arm"]["ip"],), callback=arm_start_callback)
#pool.apply_async(led_control.init, callback=led_start_callback)
#pool.apply_async(cam_control.init, callback=led_start_callback)
wait_for(led_ready, "LED controller initialization")
wait_for(arm_ready, "UR5 initilization")
wait_for(cam_ready, "Camera initilization")
return True
def mainloop(pool):
global config
global counter
global killme
if killme.value > 0:
killall()
counter = counter + 1
class Logger(object):
def __init__(self, filename="output.log"):
self.log = open(filename, "a")
self.terminal = sys.stdout
def write(self, message):
self.log.write(message)
#close(filename)
#self.log = open(filename, "a")
try:
self.terminal.write(message)
except:
sleep(0)
def flush(self):
print("",end="")
def killall():
procs = active_children()
for proc in procs:
proc.kill()
fprint("All child processes killed")
os.kill(os.getpid(), 9) # dirty kill of self
def killall_signal(a, b):
killall()
def error(msg, *args):
return multiprocessing.get_logger().error(msg, *args)
class LogExceptions(object):
def __init__(self, callable):
self.__callable = callable
def __call__(self, *args, **kwargs):
try:
result = self.__callable(*args, **kwargs)
except Exception as e:
# Here we add some debugging help. If multiprocessing's
# debugging is on, it will arrange to log the traceback
error(traceback.format_exc())
# Re-raise the original exception so the Pool worker can
# clean up
raise
# It was fine, give a normal answer
return result
class LoggingPool(Pool):
def apply_async(self, func, args=(), kwds={}, callback=None):
return Pool.apply_async(self, LogExceptions(func), args, kwds, callback)
if __name__ == "__main__":
#sys.stdout = Logger(filename="output.log")
#sys.stderr = Logger(filename="output.log")
log_to_stderr(logging.DEBUG)
with open('config.yml', 'r') as fileread:
#global config
config = yaml.safe_load(fileread)
with Manager() as manager:
pool = LoggingPool(processes=10)
counter = 0
killme = manager.Value('d', 0)
signal.signal(signal.SIGINT, killall_signal)
if setup(pool):
while(keeprunning):
mainloop(pool)

View File

@ -7,18 +7,37 @@ import time
import logging
from urx.robotiq_two_finger_gripper import Robotiq_Two_Finger_Gripper
import sys
from util import fprint
rob = urx.Robot("192.168.1.145")
robotiqgrip = Robotiq_Two_Finger_Gripper(rob)
rob = None
rob.set_tcp((0, 0, 0.15, 0, 0, 0))
rob.set_payload(4, (0, 0, 0.1))
#rob.set_payload(2, (0, 0, 0.1))
time.sleep(0.2)
def init(ip):
global rob
#sys.stdout = Logger()
fprint("Starting UR5 power up...")
# power up robot here
# wait for power up (this function runs async)
# trigger auto-initialize
# wait for auto-initialize
# init urx
fprint("Connecting to arm at " + ip)
rob = urx.Robot(ip)
robotiqgrip = Robotiq_Two_Finger_Gripper(rob)
rob.set_tcp((0, 0, 0.15, 0, 0, 0))
rob.set_payload(2, (0, 0, 0.1))
#rob.set_payload(2, (0, 0, 0.1))
time.sleep(0.2)
fprint("UR5 ready.")
def set_pos_abs(x, y, z, xb, yb, zb):
global rob
new_orientation = m3d.Transform()
new_orientation.orient.rotate_xb(xb) # Replace rx with the desired rotation around X-axis
new_orientation.orient.rotate_yb(yb) # Replace ry with the desired rotation around Y-axis
@ -35,7 +54,7 @@ def set_pos_abs(x, y, z, xb, yb, zb):
rob.set_pose(new_trans, acc=5.0, vel=5.0, command="movej") # apply the new pose
def set_pos_rel_rot_abs(x, y, z, xb, yb, zb):
global rob
new_orientation = m3d.Transform()
new_orientation.orient.rotate_xb(xb) # Replace rx with the desired rotation around X-axis
new_orientation.orient.rotate_yb(yb) # Replace ry with the desired rotation around Y-axis
@ -53,15 +72,20 @@ def set_pos_rel_rot_abs(x, y, z, xb, yb, zb):
rob.set_pose(new_trans, acc=0.1, vel=0.4, command="movej") # apply the new pose
#rob.movej((0, 0, 0, 0, 0, 0), 0.1, 0.2)
#rob.movel((x, y, z, rx, ry, rz), a, v)
print("Current tool pose is: ", rob.getl())
#set_pos_rel_rot_abs(0, 0, -0.2, math.pi, 0, -math.pi)
set_pos_abs(0.3, -0.2, 0.5, math.pi, 0, -math.pi)
set_pos_abs(0, 0.2, 0.6, math.pi, 0, -math.pi)
set_pos_abs(-0.5, -0.2, 0.4, math.pi, 0, -math.pi)
#set_pos_rel_rot_abs(0, 0, 0, math.pi, 0, -math.pi)
print("Current tool pose is: ", rob.getl())
sys.exit(0)
rob.stop()
if __name__ == "__main__":
#rob.movej((0, 0, 0, 0, 0, 0), 0.1, 0.2)
#rob.movel((x, y, z, rx, ry, rz), a, v)
init("192.168.1.145")
fprint("Current tool pose is: ", rob.getl())
#set_pos_rel_rot_abs(0, 0, -0.2, math.pi, 0, -math.pi)
set_pos_abs(0.3, -0.2, 0.5, math.pi, 0, -math.pi)
set_pos_abs(0, 0.2, 0.6, math.pi, 0, -math.pi)
set_pos_abs(-0.5, -0.2, 0.4, math.pi, 0, -math.pi)
#set_pos_rel_rot_abs(0, 0, 0, math.pi, 0, -math.pi)
fprint("Current tool pose is: ", rob.getl())
sys.exit(0)
rob.stop()

163
util.py Executable file
View File

@ -0,0 +1,163 @@
import inspect
import sys
import subprocess
import os
from sys import platform
import time as t
from time import sleep
import uuid
import csv
win32 = platform == "win32"
linux = platform == "linux" or platform == "linux2"
macos = platform == "darwin"
datafile = ""
logMsg = ""
logCont = ""
settings = None
if win32:
sysid = hex(uuid.getnode())
# Python is running as Administrator (so netstat can get filename, to block, etc),
# so we use this to see who is actually logged in
# it's very hacky
startupinfo = subprocess.STARTUPINFO()
#if not getattr(sys, "frozen", False):
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW # hide powershell window
res = subprocess.check_output(["WMIC", "ComputerSystem", "GET", "UserName"], universal_newlines=True, startupinfo=startupinfo)
_, username = res.strip().rsplit("\n", 1)
userid, sysdom = username.rsplit("\\", 1)
if linux or macos:
sysid = hex(uuid.getnode())
#fprint(sysid)
res = subprocess.check_output(["who",], universal_newlines=True)
userid = res.strip().split(" ")[0]
#sysdom = subprocess.check_output(["hostname",], universal_newlines=True).strip()
#fprint(sysdom)
#fprint("d")
def time():
return int(t.time())
def kill(pid):
setup_child()
try:
if pid > 4:
fprint("Killing PID " + str(pid), settings)
os.kill(int(pid), 9)
fprint("Signal 9 sent to PID " + str(pid), settings)
except:
fprint("Unable to kill " + str(pid), settings)
def fprint(msg, settings = None):
#if not getattr(sys, "frozen", False):
setup_child()
try:
frm = inspect.stack()[1]
mod = inspect.getmodule(frm[0])
logMsg = '[' + mod.__name__ + ":" + frm.function + ']:' + str(msg)
print(logMsg)
if (settings is not None):
tmpList = settings["logMsg"]
tmpList.append(logMsg)
settings["logMsg"] = tmpList
except Exception as e:
try:
print('[????:' + frm.function + ']:', str(msg))
print('[util:fprint]: ' + str(e))
except:
print('[????]:', str(msg))
# else:
#print(msg)
def find_data_file(filename):
if getattr(sys, "frozen", False):
# The application is frozen
datadir = os.path.dirname(sys.executable)
else:
# The application is not frozen
# Change this bit to match where you store your data files:
datadir = os.path.dirname(__file__)
return os.path.join(datadir, filename)
def run_cmd(cmd):
if win32:
startupinfo = subprocess.STARTUPINFO()
#print("DICKS")
#if not getattr(sys, "frozen", False):
# print("test")
#
#completed = subprocess.run(["powershell", "-Command", cmd], capture_output=True, startupinfo=startupinfo)
#else:
# print("alt")
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW # , "-WindowStyle", "hidden"
fprint("running PS command: " + cmd, settings)
completed = subprocess.run(["powershell", "-Command", cmd], capture_output=True, startupinfo=startupinfo)
fprint("ran PS command successfully", settings)
#completed = subprocess.run(["powershell", "-WindowStyle", "hidden", "-Command", cmd], capture_output=True, startupinfo=startupinfo)
return completed
if linux or macos:
fprint("running sh command: " + cmd, settings)
completed = subprocess.run(["sh", "-c", cmd], capture_output=True)
fprint("ran sh command successfully", settings)
return completed
def setup_child(sets=None):
if not getattr(sys, "frozen", False):
sys.stdout = Logger(filename=find_data_file("output.log"))
sys.stderr = Logger(filename=find_data_file("output.log"))
if sets is not None:
settings = sets
class Logger(object):
def __init__(self, filename="output.log"):
self.log = open(filename, "a")
self.terminal = sys.stdout
def write(self, message):
self.log.write(message)
#close(filename)
#self.log = open(filename, "a")
try:
self.terminal.write(message)
except:
sleep(0)
def flush(self):
print("", end="")
def write_stats(stats):
fprint("Writing stats", settings)
tmp = list()
tmp.append(["connections blocked", "connections allowed", "data uploaded", "data recieved", "block ratio"])
tmp.append(stats)
with open(find_data_file("stats.csv"), "w", newline="") as f:
writer = csv.writer(f)
writer.writerows(tmp)
fprint("Done writing stats", settings)
def read_stats():
with open(find_data_file("stats.csv"), newline='') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',', quotechar='|')
header = True
fprint(csvreader, settings)
data = list()
for line in csvreader:
fprint(line, settings)
if header:
header = False
continue
data = line
for idx in range(len(data) - 1):
data[idx] = int(data[idx])
data[len(data) - 1] = float(data[len(data) - 1])
return data