29 Commits

Author SHA1 Message Date
5ef8795eb4 Merge branch 'main' into dthomas_meilisearch
# Conflicts:
#	.gitignore
#	read_datasheet.py
2024-03-12 16:13:41 -05:00
a63faba2aa Add checks to updating filterable attributes to avoid hitting weird edge cases 2024-03-12 16:08:47 -05:00
dd0ac46662 Improve search & parsing algorithm, easier to source venv files 2024-03-11 23:52:26 -05:00
1a07501d53 Edited test site and added ping 2024-03-09 20:13:43 -06:00
0b97079cfd Created Notes, Edited websocket_test.html 2024-03-08 21:35:21 -06:00
a6d557d1c6 Changes to install files for Ubuntu and added *.png to .gitignore 2024-03-08 20:31:22 -06:00
5a11acfa42 Add basic LED animations 2024-03-08 19:45:45 -06:00
0f2c19e811 Merge remote-tracking branch 'origin/dthomas_meilisearch' into dthomas_meilisearch 2024-03-08 19:13:03 -06:00
b18355fc14 nuke database.py 2024-03-08 19:12:41 -06:00
6921d5c4b4 Added ipywidgets requirement 2024-03-06 11:00:24 -06:00
b861a61f07 Merge branch 'main' of https://git.myitr.org/Jukebox/jukebox-software 2024-03-05 16:30:26 -06:00
925ceb4b5a Commented jupytr notebook 2024-03-05 16:30:19 -06:00
dd2559130d Commented jupytr notebook 2024-03-05 16:27:48 -06:00
1fa7654da5 removed temp file 2024-03-05 16:21:07 -06:00
051cc1d003 Merge branch 'main' of https://git.myitr.org/Jukebox/jukebox-software 2024-03-05 16:20:11 -06:00
e1af00e1db Inverse kinematics complete 2024-03-05 16:20:05 -06:00
aadb6ba24d add search functions to JukeboxSearch 2024-03-01 21:24:37 -06:00
4561b1c1a3 fix error when index does not exist 2024-03-01 20:37:22 -06:00
6edd0b4ef0 fix map datatype 2024-03-01 20:37:02 -06:00
2c242aac29 Merge branch 'main' into dthomas_meilisearch 2024-03-01 19:26:57 -06:00
af6ffe451d Fix get_specs 2024-03-01 19:26:47 -06:00
b585f8cdb7 Merge branch 'main' into dthomas_meilisearch 2024-03-01 19:25:30 -06:00
50bf835d13 Update parsing and stuff 2024-03-01 19:25:01 -06:00
f12d8a8062 add print statement 2024-03-01 19:24:47 -06:00
fc9ff4c8b2 split lists if they contain more than 2 commas 2024-03-01 19:13:28 -06:00
e903150fd4 Add functions for connecting to Meilisearch and adding documents 2024-02-20 10:33:01 -06:00
d0ea696274 reorganize gitignore and add comments 2024-02-20 10:15:56 -06:00
eea8c9f5fa Merge branch 'main' into dthomas_meilisearch 2024-02-20 10:04:33 -06:00
fe5de4e54c Adjust datasheet parsing for meilisearch, add dockerfile 2024-02-17 23:08:21 -06:00
22 changed files with 1817 additions and 276 deletions

13
.gitignore vendored
View File

@ -1,9 +1,18 @@
# python
venv venv
__pycache__ __pycache__
# cable data folder(s)
cables cables
cables-sample.zip
# meilisearch (mainly where I've put the data volume for the container)
meili_data
# IDE things
.vscode .vscode
output.log .idea
# videos
*.webm *.webm
output.mp4 output.mp4
# log files
output.log output.log
cables-sample.zip # images
*.png

11
Dockerfile Normal file
View File

@ -0,0 +1,11 @@
FROM python:latest
RUN apt-get update && apt-get install -y libgl1-mesa-glx ghostscript && apt-get clean && rm -rf /var/lib/apt/lists
COPY . .
#COPY config-server.yml config.yml
RUN pip3 install -r requirements.txt
CMD ["python3", "run.py"]
EXPOSE 5000
EXPOSE 8000
EXPOSE 9000

View File

@ -32,13 +32,13 @@ led:
ledstart: 288 ledstart: 288
ledend: 431 ledend: 431
mode: rgb mode: rgb
- universe: 1 - universe: 4
ip: 192.168.68.130 ip: 192.168.5.40
ledstart: 432 ledstart: 432
ledend: 575 ledend: 575
mode: rgb mode: rgb
- universe: 4 - universe: 1
ip: 192.168.68.131 ip: 192.168.5.4
ledstart: 576 ledstart: 576
ledend: 719 ledend: 719
mode: rgb mode: rgb
@ -159,7 +159,7 @@ led:
size: 24 size: 24
diameter: 63.5 diameter: 63.5
angle: 0 angle: 0
pos: [-65.936, 114.3] pos: [-65.991, 114.3]
- type: circle - type: circle
start: 336 start: 336
size: 24 size: 24
@ -420,6 +420,116 @@ led:
size: 70 size: 70
length: 600 length: 600
angle: 270 # down angle: 270 # down
pos: [300, 300] pos: [375, 300]
global_position_offset: [0,0] # default coordinate spce below as center of arm at 0,0 - adjust if necessary
animation_time: 40
position_map:
- index: 0
pos: [-152.4, 263.965]
- index: 1
pos: [-76.2, 263.965]
- index: 2
pos: [0, 263.965]
- index: 3
pos: [76.2, 263.965]
- index: 4
pos: [152.4, 263.965]
- index: 5
pos: [-190.5, 197.973]
- index: 6
pos: [-114.3, 197.973]
- index: 7
pos: [-38.1, 197.973]
- index: 8
pos: [38.1, 197.973]
- index: 9
pos: [114.3, 197.973]
- index: 10
pos: [190.5, 197.973]
- index: 11
pos: [-228.6, 131.982]
- index: 12
pos: [-152.4, 131.982]
- index: 13
pos: [-76.2, 131.982]
- index: 14
pos: [0, 131.982]
- index: 15
pos: [76.2, 131.982]
- index: 16
pos: [152.4, 131.982]
- index: 17
pos: [228.6, 131.982]
- index: 18
pos: [-266.7, 65.991]
- index: 19
pos: [-190.5, 65.991]
- index: 20
pos: [-114.3, 65.991]
- index: 21
pos: [114.3, 65.991]
- index: 22
pos: [190.5, 65.991]
- index: 23
pos: [266.7, 65.991]
- index: 24
pos: [-304.8, 0]
- index: 25
pos: [-228.6, 0]
- index: 26
pos: [-152.4, 0]
- index: 27
pos: [152.4, 0]
- index: 28
pos: [228.6, 0]
- index: 29
pos: [304.8, 0]
- index: 30
pos: [-266.7, -65.991]
- index: 31
pos: [-190.5, -65.991]
- index: 32
pos: [-114.3, -65.991]
- index: 33
pos: [114.3, -65.991]
- index: 34
pos: [190.5, -65.991]
- index: 35
pos: [266.7, -65.991]
- index: 36
pos: [-228.6, -131.982]
- index: 37
pos: [-152.4, -131.982]
- index: 38
pos: [-76.2, -131.982]
- index: 39
pos: [0, -131.982]
- index: 40
pos: [76.2, -131.982]
- index: 41
pos: [152.4, -131.982]
- index: 42
pos: [228.6, -131.982]
- index: 43
pos: [-190.5, -197.973]
- index: 44
pos: [-114.3, -197.973]
- index: 45
pos: [-38.1, -197.973]
- index: 46
pos: [38.1, -197.973]
- index: 47
pos: [114.3, -197.973]
- index: 48
pos: [190.5, -197.973]
- index: 49
pos: [-152.4, -263.965]
- index: 50
pos: [-76.2, -263.965]
- index: 51
pos: [0, -263.965]
- index: 52
pos: [76.2, -263.965]
- index: 53
pos: [152.4, -263.965]

View File

@ -1,140 +0,0 @@
"""This module contains functionality for interacting with a PostgreSQL database. It will automatically handle error
conditions (i.e. missing columns) without terminating the entire program. Use the :py:class:`DBConnector` class to
handle database interactions, either as a standalone object or in a context manager."""
from __future__ import annotations
import os
import psycopg2
from psycopg2 import DatabaseError, OperationalError
from psycopg2.errors import UndefinedColumn
DB_ADDRESS = os.getenv('DB_ADDRESS', 'localhost')
DB_PORT = os.getenv('DB_PORT', 5432)
DB_USER = os.getenv('DB_USER', 'postgres')
DB_PASSWORD = os.getenv('DB_PASSWORD', '')
DB_NAME = os.getenv('DB_NAME', 'postgres')
DB_TABLE = os.getenv('DB_TABLE', 'cables')
class DBConnector:
"""Context managed database class. Use with statements to automatically open and close the database connection, like
so:
.. code-block:: python
with DBConnector() as db:
db.read()
"""
def _db_start(self):
"""Setup the database connection and cursor."""
try:
self.conn = psycopg2.connect(
f"host={DB_ADDRESS} port={DB_PORT} dbname={DB_NAME} user={DB_USER} password={DB_PASSWORD}")
self.cur = self.conn.cursor()
except OperationalError as e:
raise e
def _db_stop(self):
"""Close the cursor and connection."""
self.cur.close()
self.conn.close()
def __init__(self):
self._db_start()
def __del__(self):
self._db_stop()
def __enter__(self):
self._db_start()
def __exit__(self):
self._db_stop()
def _get_cols(self) -> set[str]:
"""Get the list of columns in the database.
:return: A list of column names."""
query = f"select COLUMN_NAME from information_schema.columns where table_name={DB_TABLE}"
rows = {x["COLUMN_NAME"] for x in self._query(query)}
return rows
def _column_parity(self, columns: list[str] | set[str]) -> set[str]:
"""If the listed columns are not in the database, add them.
:param columns: The columns we expect are in the database.
:return: The list of columns in the database after querying."""
cols = set(columns)
existing = self._get_cols()
needs = cols.difference(existing.intersection(cols))
if len(needs) > 0:
query = f"ALTER TABLE {DB_TABLE} {', '.join([f'ADD COLUMN {c}' for c in needs])}"
self._query(query)
existing = self._get_cols()
return existing
def _query(self, sql) -> list[dict]:
"""Basic function for running queries.
:param sql: SQL query as plaintext.
:return: Results of the query, or an empty list if none."""
result = []
try:
self.cur.execute(sql)
result = self._read_dict()
except DatabaseError as e:
print(f"ERROR {e.pgcode}: {e.pgerror}\n"
f"Caused by query: {sql}")
finally:
return result
def _read_dict(self) -> list[dict]:
"""Read the cursor as a list of dictionaries. psycopg2 defaults to using a list of tuples, so we want to convert
each row into a dictionary before we return it."""
cols = [i.name for i in self.cur.description]
results = []
for row in self.cur:
row_dict = {}
for i in range(0, len(row)):
if row[i]:
row_dict = {**row_dict, cols[i]: row[i]}
results.append(row_dict)
return results
def read(self, **kwargs) -> list[dict]:
"""Read rows from a database that match the specified filters.
:param kwargs: Column constraints; i.e. what value to filter by in what column.
:returns: A list of dictionaries of all matching rows, or an empty list if no match."""
args = []
for kw in kwargs.keys():
args.append(f"{kw} ILIKE {kwargs['kw']}")
query = f"SELECT * FROM {DB_TABLE}"
if len(args) > 0:
query += f" WHERE {' AND '.join(args)}"
return self._query(query)
def write(self, **kwargs) -> dict:
"""Write a row to the database.
:param kwargs: Values to write for each database; specify each column separately!
:returns: The row you just added."""
self._column_parity(set(kwargs.keys()))
values = []
for val in kwargs.keys():
values.append(kwargs[val])
query = f"INSERT INTO {DB_TABLE} ({', '.join(kwargs.keys())}) VALUES ({', '.join(values)})"
self._query(query)
return kwargs
def write_all(self, items: list[dict]) -> list[dict]:
"""Write multiple rows to the database.
:param items: Rows to write, as a list of dictionaries.
:returns: The rows that were added successfully."""
successes = []
for i in items:
res0 = self.write(**i)
if res0:
successes.append(res0)
return successes

View File

@ -5,7 +5,7 @@ import sys
import read_datasheet import read_datasheet
from alive_progress import alive_bar from alive_progress import alive_bar
import requests import requests
#import time import time
import json import json
import subprocess import subprocess
from util import fprint from util import fprint
@ -26,38 +26,123 @@ def check_internet(url='https://belden.com', timeout=5):
def query_search(partnum): def query_search(partnum, source):
"""token_url = "https://www.belden.com/coveo/rest/token?t=" + str(int(time.time())) if source == "Belden":
with requests.get(token_url) as r: token_url = "https://www.belden.com/coveo/rest/token?t=" + str(int(time.time()))
out = json.loads(r.content) with requests.get(token_url) as r:
token = out["token"] out = json.loads(r.content)
search_url = "https://www.belden.com/coveo/rest/search" token = out["token"]
search_data ='{ "q": "' + str(partnum) + '", "sortCriteria": "relevancy", "numberOfResults": "250", "sortCriteria": "@catalogitemwebdisplaypriority ascending", "searchHub": "products-only-search", "pipeline": "Site Search", "maximumAge": "900000", "tab": "products-search", "locale": "en" }' search_url = "https://www.belden.com/coveo/rest/search"
#"aq": "", "cq": "((@z95xlanguage==en) (@z95xlatestversion==1) (@source==\\"Coveo_web_index - rg-nc-prod-sitecore-prod\\")) OR (@source==(\\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\\",\\"website_001002_Category_index-rg-nc-prod-sitecore-prod\\"))", "firstResult": "0", "categoryFacets": "[{\\"field\\":\\"@catalogitemcategories\\",\\"path\\":[],\\"injectionDepth\\":1000,\\"maximumNumberOfValues\\":6,\\"delimitingCharacter\\":\\"|\\"}]", "facetOptions": "{}", "groupBy": "" }'
#fprint(search_data)
fprint(json.loads(search_data))
#search_data = '{ "q": "' + str(partnum) + '" }'
fprint(search_data)
headers = headers = {
'Authorization': f'Bearer {token}',
'Content-Type': 'application/json'
}
with requests.post(search_url, headers=headers, data=search_data) as r:
fprint(r.text)"""
# TODO: Reimplement in python
# Bash script uses some crazy json formatting that I could not figure out
# Despite the fact that I wrote it
# So I'll just leave it, becuase it works.
command = ["./query-search.sh", partnum] # Ridiculous search parameters extracted from website. Do not touch
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) search_data = r"""{ "q": "{QUERY}", "sortCriteria": "relevancy", "numberOfResults": "250", "sortCriteria": "@catalogitemwebdisplaypriority ascending", "searchHub": "products-only-search", "pipeline": "Site Search", "maximumAge": "900000", "tab": "products-search", "locale": "en", "aq": "(NOT @z95xtemplate==(ADB6CA4F03EF4F47B9AC9CE2BA53FF97,FE5DD82648C6436DB87A7C4210C7413B)) ((@syssource==\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\" @catalogitemprimarycategorypublished==true)) ((@catalogitemregionavailable=Global) (@z95xlanguage==en))", "cq": "((@z95xlanguage==en) (@z95xlatestversion==1) (@source==\"Coveo_web_index - rg-nc-prod-sitecore-prod\")) OR (@source==(\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\",\"website_001002_Category_index-rg-nc-prod-sitecore-prod\"))", "firstResult": "0" }, "categoryFacets": "[{\"field\":\"@catalogitemcategories\",\"path\":[],\"injectionDepth\":1000,\"maximumNumberOfValues\":6,\"delimitingCharacter\":\"|\"}]", "facetOptions": "{}", "groupBy": " [{\"field\":\"@contenttype\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[\"Products\"],\"queryOverride\":\"{QUERY}\",\"advancedQueryOverride\":\"(NOT @z95xtemplate==(ADB6CA4F03EF4F47B9AC9CE2BA53FF97,FE5DD82648C6436DB87A7C4210C7413B)) ((((((((@z95xpath=3324AF2D58F64C0FB725521052F679D2 @z95xid<>3324AF2D58F64C0FB725521052F679D2) ((@z95xpath=C292F3A37B3A4E6BAB345DF87ADDE516 @z95xid<>C292F3A37B3A4E6BAB345DF87ADDE516) @z95xtemplate==E4EFEB787BDC4B1A908EFC64D56CB2A4)) OR ((@z95xpath=723501A864754FEEB8AE377E4C710271 @z95xid<>723501A864754FEEB8AE377E4C710271) ((@z95xpath=600114EAB0E5407A84AAA9F0985B6575 @z95xid<>600114EAB0E5407A84AAA9F0985B6575) @z95xtemplate==2BE4FD6B3B2C49EBBD9E1F6C92238B05))) OR (@syssource==\\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\\" @catalogitemprimarycategorypublished==true)) OR ((@z95xpath=3324AF2D58F64C0FB725521052F679D2 @z95xid<>3324AF2D58F64C0FB725521052F679D2) @z95xpath<>C292F3A37B3A4E6BAB345DF87ADDE516)) OR @syssource==\\"website_001002_Category_index-rg-nc-prod-sitecore-prod\\") NOT @z95xtemplate==(ADB6CA4F03EF4F47B9AC9CE2BA53FF97,FE5DD82648C6436DB87A7C4210C7413B))) ((@catalogitemregionavailable=Global) (@z95xlanguage==en) OR (@contenttype=(Blogs,Resources,Other)) (NOT @ez120xcludefromcoveo==1))\",\"constantQueryOverride\":\"((@z95xlanguage==en) (@z95xlatestversion==1) (@source==\\"Coveo_web_index - rg-nc-prod-sitecore-prod\\")) OR (@source==(\\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\\",\\"website_001002_Category_index-rg-nc-prod-sitecore-prod\\"))\"},{\"field\":\"@catalogitembrand\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@catalogitemenvironment\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@catalogitemregionalavailability\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@prez45xtez120xt\",\"maximumNumberOfValues\":5,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@tags\",\"maximumNumberOfValues\":4,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetassettype\",\"maximumNumberOfValues\":3,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetbrand\",\"maximumNumberOfValues\":3,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetmarket\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetsolution\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetsearchcontentpagetype\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]}]" }"""
if result.returncode != 0: # error search_data = search_data.replace(r"{QUERY}", partnum)
fprint("No results found in search database for " + partnum + ". No hi-res part image available.", result.stderr) #"aq": "", "cq": "((@z95xlanguage==en) (@z95xlatestversion==1) (@source==\\"Coveo_web_index - rg-nc-prod-sitecore-prod\\")) OR (@source==(\\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\\",\\"website_001002_Category_index-rg-nc-prod-sitecore-prod\\"))", "firstResult": "0", "categoryFacets": "[{\\"field\\":\\"@catalogitemcategories\\",\\"path\\":[],\\"injectionDepth\\":1000,\\"maximumNumberOfValues\\":6,\\"delimitingCharacter\\":\\"|\\"}]", "facetOptions": "{}", "groupBy": "" }'
#fprint(search_data)
#fprint(json.loads(search_data))
#search_data = '{ "q": "' + str(partnum) + '" }'
#fprint(search_data)
headers = headers = {
'Authorization': f'Bearer {token}',
'Content-Type': 'application/json'
}
try:
with requests.post(search_url, headers=headers, data=search_data) as r:
a = r.text
a = json.loads(a)
idx = -1
name = ""
for partid in range(len(a["results"])):
name = a["results"][partid]["title"]
if name != partnum:
if name.find(partnum) >= 0:
idx = partid
break
elif partnum.find(name) >= 0:
idx = partid
break
else:
idx = partid
break
if idx < 0:
fprint("Could not find part in API: " + partnum)
return False
fprint("Search result found: result " + str(idx) + ", for ID " + name)
#urlname = a["results"][0]["raw"]["catalogitemurlname"]
img = a["results"][idx]["raw"]["catalogitemimageurl"]
img = img[0:img.index("?")]
uri = a["results"][idx]["raw"]["clickableuri"]
dsid = a["results"][idx]["raw"]["catalogitemdatasheetid"]
brand = a["results"][idx]["raw"]["catalogitembrand"]
desc = a["results"][idx]["raw"]["catalogitemlongdesc"]
shortdesc = a["results"][idx]["raw"]["catalogitemshortdesc"]
a = json.dumps(a["results"][idx], indent=2)
#print(a, urlname, img, uri, dsurl)
out = dict()
out["url"] = "https://www.belden.com/products/" + uri
out["datasheet"] = "https://catalog.belden.com/techdata/EN/" + dsid + "_techdata.pdf"
out["brand"] = brand
out["name"] = shortdesc
out["description"] = desc
out["image"] = "https://www.belden.com" + img
out["partnum"] = name
#print(out)
return out
except:
print("falied to search with API. Falling back to datasheet lookup.")
return False
# Original bash script
# superceded by above
if source == "Belden_shell":
command = ["./query-search.sh", partnum]
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
if result.returncode != 0: # error
fprint("No results found in search database for " + partnum + ". No hi-res part image available.", result.stderr)
return False
else:
data_out = json.loads(result.stdout)
return data_out
elif source == "Alphawire":
alphaurl = "https://www.alphawire.com//sxa/search/results/?l=en&s={4A774076-6068-460C-9CC6-A2D8E85E407F}&itemid={BF82F58C-EFD9-4D8B-AE3E-097DD12CF7DA}&sig=&autoFireSearch=true&productpartnumber=*" + partnum + "*&v={B22CD56D-AB95-4048-8AA1-5BBDF2F2D17F}&p=10&e=0&o=ProductPartNumber%2CAscending"
r = requests.get(url=alphaurl)
data = r.json()
output = dict()
#print(data)
try:
if data["Count"] > 0:
#print(data["Results"][0]["Url"])
for result in data["Results"]:
if result["Url"].split("/")[-1] == partnum:
#print(partnum)
#print(result["Html"])
try:
imgidx = result["Html"].index("<img src=") + 10
imgidx2 = result["Html"].index("?", imgidx)
output["image"] = result["Html"][imgidx:imgidx2]
if output["image"].index("http") != 0:
output["image"] = ""
print("No cable image found.")
except:
print("No cable image found.")
dsidx = result["Html"].index("<a href=\"/disteAPI/") + 9
dsidx2 = result["Html"].index(partnum, dsidx) + len(partnum)
output["datasheet"] = "https://www.alphawire.com" + result["Html"][dsidx:dsidx2]
output["partnum"] = partnum
#"test".index()
#print(output)
return output
except:
return False
return False return False
else:
data_out = json.loads(result.stdout)
return data_out
def touch(path): def touch(path):
with open(path, 'a'): with open(path, 'a'):
@ -65,7 +150,7 @@ def touch(path):
def get_multi(partnums): def get_multi(partnums, delay=0.25):
with alive_bar(len(partnums) * 2, dual_line=True, calibrate=30, bar="classic2", spinner="classic") as bar: with alive_bar(len(partnums) * 2, dual_line=True, calibrate=30, bar="classic2", spinner="classic") as bar:
def _try_download_datasheet(partnum, output_dir): # Guess datasheet URL def _try_download_datasheet(partnum, output_dir): # Guess datasheet URL
@ -126,7 +211,7 @@ def get_multi(partnums):
sys.exit() sys.exit()
def _download_image(url, output_dir): # Download datasheet with known URL def _download_image(url, output_dir): # Download image with known URL
global bartext global bartext
#fprint(url) #fprint(url)
@ -151,25 +236,32 @@ def get_multi(partnums):
os.remove(partnum + "/datasheet.pdf") os.remove(partnum + "/datasheet.pdf")
sys.exit() sys.exit()
def __use_cached_datasheet(partnum, path, output_dir): def __use_cached_datasheet(partnum, path, output_dir, dstype):
fprint("Using cached datasheet for " + partnum) fprint("Using cached datasheet for " + partnum)
bar.text = "Using cached datasheet for " + partnum bar.text = "Using cached datasheet for " + partnum
bar(skipped=True) bar(skipped=True)
fprint("Parsing Datasheet contents of " + partnum) if not os.path.exists(output_dir + "/parsed"):
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
read_datasheet.parse(path, output_dir, partnum) fprint("Parsing Datasheet contents of " + partnum)
bar(skipped=False) bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
read_datasheet.parse(path, output_dir, partnum, dstype)
bar(skipped=False)
else:
fprint("Datasheet already parsed for " + partnum)
bar.text = "Datasheet already parsed for " + partnum + ".pdf"
bar(skipped=True)
def __downloaded_datasheet(partnum, path, output_dir): def __downloaded_datasheet(partnum, path, output_dir, dstype):
fprint("Downloaded " + path) fprint("Downloaded " + path)
bar.text = "Downloaded " + path bar.text = "Downloaded " + path
bar(skipped=False) bar(skipped=False)
fprint("Parsing Datasheet contents of " + partnum) fprint("Parsing Datasheet contents of " + partnum)
bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..." bar.text = "Parsing Datasheet contents of " + partnum + ".pdf..."
read_datasheet.parse(path, output_dir, partnum) read_datasheet.parse(path, output_dir, partnum, dstype)
bar(skipped=False) bar(skipped=False)
for partnum in partnums: def run_search(partnum):
output_dir = "cables/" + partnum output_dir = "cables/" + partnum
path = output_dir + "/datasheet.pdf" path = output_dir + "/datasheet.pdf"
bartext = "Downloading files for part " + partnum bartext = "Downloading files for part " + partnum
@ -177,10 +269,16 @@ def get_multi(partnums):
# #
if (not os.path.exists(output_dir + "/found_part_hires")) or not (os.path.exists(path) and os.path.getsize(path) > 1): if (not os.path.exists(output_dir + "/found_part_hires")) or not (os.path.exists(path) and os.path.getsize(path) > 1):
# Use query # Use query
search_result = query_search(partnum.replace(" ", "")) search_result = query_search(partnum, dstype)
# Try to use belden.com search # Try to use belden.com search
if search_result is not False: if search_result is not False:
# Download high resolution part image if available and needed # Download high resolution part image if available and needed
partnum = search_result["partnum"]
output_dir = "cables/" + partnum
path = output_dir + "/datasheet.pdf"
bartext = "Downloading files for part " + partnum
bar.text = bartext
if not os.path.exists(output_dir + "/found_part_hires"): if not os.path.exists(output_dir + "/found_part_hires"):
if _download_image(search_result["image"], output_dir): if _download_image(search_result["image"], output_dir):
fprint("Downloaded hi-res part image for " + partnum) fprint("Downloaded hi-res part image for " + partnum)
@ -190,31 +288,62 @@ def get_multi(partnums):
# Download datasheet from provided URL if needed # Download datasheet from provided URL if needed
if os.path.exists(path) and os.path.getsize(path) > 1: if os.path.exists(path) and os.path.getsize(path) > 1:
__use_cached_datasheet(partnum, path, output_dir) __use_cached_datasheet(partnum, path, output_dir, dstype)
elif _download_datasheet(search_result["datasheet"], output_dir) is not False: elif _download_datasheet(search_result["datasheet"], output_dir) is not False:
__downloaded_datasheet(partnum, path, output_dir) __downloaded_datasheet(partnum, path, output_dir, dstype)
elif os.path.exists(path) and os.path.getsize(path) > 1: elif os.path.exists(path) and os.path.getsize(path) > 1:
__use_cached_datasheet(partnum, path, output_dir) __use_cached_datasheet(partnum, path, output_dir, dstype)
# If search fails, and we don't already have the datasheet, guess datasheet URL and skip the hires image download # If search fails, and we don't already have the datasheet, guess datasheet URL and skip the hires image download
elif _try_download_datasheet(partnum, output_dir) is not False: elif _try_download_datasheet(partnum, output_dir) is not False:
__downloaded_datasheet(partnum, path, output_dir) __downloaded_datasheet(partnum, path, output_dir, dstype)
# Failed to download with search or guess :( # Failed to download with search or guess :(
else: else:
return False
return True
# We already have a hi-res image and the datasheet - perfect!
else:
fprint("Using cached hi-res part image for " + partnum)
__use_cached_datasheet(partnum, path, output_dir, dstype)
return True
for fullpartnum in partnums:
if fullpartnum[0:2] == "BL": # catalog.belden.com entry
partnum = fullpartnum[2:]
dstype = "Belden"
elif fullpartnum[0:2] == "AW":
partnum = fullpartnum[2:]
dstype = "Alphawire"
else:
dstype = "Belden" # guess
partnum = fullpartnum
if not run_search(partnum):
success = False
if len(partnum.split(" ")) > 1:
for name in partnum.split(" "):
fprint("Retrying with alternate name: " + name)
if(run_search(name)):
success = True
break
time.sleep(delay)
if not success:
namestripped = partnum.strip(" ")
fprint("Retrying with alternate name: " + namestripped)
if(run_search(namestripped)):
success = True
time.sleep(delay)
if not success:
fprint("Failed to download datasheet for part " + partnum) fprint("Failed to download datasheet for part " + partnum)
bar.text = "Failed to download datasheet for part " + partnum bar.text = "Failed to download datasheet for part " + partnum
failed.append(partnum) failed.append(partnum)
bar(skipped=True) bar(skipped=True)
bar(skipped=True) bar(skipped=True)
time.sleep(delay)
# We already have a hi-res image and the datasheet - perfect!
else:
fprint("Using cached hi-res part image for " + partnum)
__use_cached_datasheet(partnum, path, output_dir)
if len(failed) > 0: if len(failed) > 0:
fprint("Failed to download:") fprint("Failed to download:")
for partnum in failed: for partnum in failed:
@ -227,21 +356,73 @@ def get_multi(partnums):
if __name__ == "__main__": if __name__ == "__main__":
partnums = ["10GXS12", "RST 5L-RKT 5L-949", # partnums = ["BLFISX012W0", "BL7958A", "BL10GXS12", "BLRST 5L-RKT 5L-949",
"10GXS13", # "BL10GXS13",
"10GXW12", # "BL10GXW12",
"10GXW13", # "BL10GXW13",
"2412", # "BL2412",
"2413", # "BL2413",
"OSP6AU", # "BLOSP6AU",
"FI4D024P9", # "BLFI4D024P9",
"FISD012R9", # "BLFISD012R9",
"FDSD012A9", # "BLFDSD012A9",
"FSSL024NG", # "BLFSSL024NG",
"FISX006W0", # "BLFISX006W0",
"FISX00103", # "BLFISX00103",
"C6D1100007" # "BLC6D1100007"
# ]
partnums = [
# Actual cables in Jukebox
"AW86104CY",
"AW3050",
"AW6714",
"AW1172C",
"AW2211/4",
"BLTF-1LF-006-RS5N",
"BLTF-SD9-006-RI5N",
"BLTT-SLG-024-HTNN",
"BLFISX012W0",
"BLFI4X012W0",
"BLSPE101 006Q",
"BLSPE102 006Q",
"BL7922A 010Q",
"BL7958A 008Q",
"BLIOP6U 010Q",
"BL10GXW13 D15Q",
"BL10GXW53 D15Q",
"BL29501F 010Q",
"BL29512 010Q",
"BL3106A 010Q",
"BL9841 060Q",
"BL3105A 010Q",
"BL3092A 010Q",
"BL8760 060Q",
"BL6300UE 008Q",
"BL6300FE 009Q",
"BLRA500P 006Q",
# Some ones I picked, including some invalid ones
"BL10GXS12",
"BLRST 5L-RKT 5L-949",
"BL10GXS13",
"BL10GXW12",
"BL10GXW13",
"BL2412",
"BL2413",
"BLOSP6AU",
"BLFI4D024P9",
"BLFISD012R9",
"BLFDSD012A9",
"BLFSSL024NG",
"BLFISX006W0",
"BLFISX00103",
"BLC6D1100007"
] ]
get_multi(partnums) #query_search("86104CY", "Alphawire")
get_multi(partnums, 0.25)
#query_search("10GXS13", "Belden")

View File

@ -1,4 +1,5 @@
#!/bin/sh #!/bin/sh
# change this to #!/bin/bash for windows
if ! [ -d "venv" ]; then if ! [ -d "venv" ]; then
./venv-setup.sh ./venv-setup.sh

359
inv_kin_testing.ipynb Normal file

File diff suppressed because one or more lines are too long

View File

@ -22,6 +22,13 @@ leds_size = None
leds_normalized = None leds_normalized = None
controllers = None controllers = None
data = None data = None
exactdata = None
rings = None
ringstatus = None
mode = "Startup"
firstrun = True
changecount = 0
animation_time = 0
start = uptime() start = uptime()
def ping(host): def ping(host):
@ -48,30 +55,50 @@ def map():
global leds_size global leds_size
global leds_normalized global leds_normalized
global controllers global controllers
global rings
global ringstatus
global animation_time
with open('config.yml', 'r') as fileread: with open('config.yml', 'r') as fileread:
#global config #global config
config = yaml.safe_load(fileread) config = yaml.safe_load(fileread)
animation_time = config["animation_time"]
leds = list() leds = list()
leds_size = list() leds_size = list()
controllers = list() controllers = list()
rings = list(range(len(config["position_map"])))
ringstatus = list(range(len(config["position_map"])))
#print(rings)
#fprint(config["led"]["map"]) #fprint(config["led"]["map"])
generate_map = False
map = list()
for shape in config["led"]["map"]: for shape in config["led"]["map"]:
if shape["type"] == "circle": if shape["type"] == "circle":
if generate_map:
map.append((shape["pos"][1],shape["pos"][0]))
#fprint(shape["pos"]) #fprint(shape["pos"])
anglediv = 360.0 / shape["size"] anglediv = 360.0 / shape["size"]
angle = 0 angle = 0
radius = shape["diameter"] / 2 radius = shape["diameter"] / 2
lednum = shape["start"] lednum = shape["start"]
for item in config['position_map']:
# Check if the current item's position matches the target position
#print(item['pos'],(shape["pos"][1],shape["pos"][0]))
if tuple(item['pos']) == (shape["pos"][1],shape["pos"][0]):
rings[item["index"]] = (shape["pos"][1],shape["pos"][0],lednum,lednum+shape["size"]) # rings[index] = x, y, startpos, endpos
ringstatus[item["index"]] = [None, None]
break
if len(leds) < lednum + shape["size"]: if len(leds) < lednum + shape["size"]:
for x in range(lednum + shape["size"] - len(leds)): for x in range(lednum + shape["size"] - len(leds)):
leds.append(None) leds.append(None)
leds_size.append(None) leds_size.append(None)
while angle < 359.999: while angle < 359.999:
tmpangle = angle + shape["angle"] tmpangle = angle + shape["angle"]
x = math.cos(tmpangle * (math.pi / 180.0)) * radius + shape["pos"][0] x = math.cos(tmpangle * (math.pi / 180.0)) * radius + shape["pos"][1] # flip by 90 degress when we changed layout
y = math.sin(tmpangle * (math.pi / 180.0)) * radius + shape["pos"][1] y = math.sin(tmpangle * (math.pi / 180.0)) * radius + shape["pos"][0]
leds[lednum] = (x,y) leds[lednum] = (x,y)
lednum = lednum + 1 lednum = lednum + 1
angle = angle + anglediv angle = angle + anglediv
@ -97,7 +124,23 @@ def map():
dist += distdiv dist += distdiv
lednum = lednum + 1 lednum = lednum + 1
if generate_map:
map = sorted(map, key=lambda x: (-x[1], x[0]))
print(map)
import matplotlib.pyplot as plt
plt.axis('equal')
x, y = zip(*map)
plt.scatter(x, y, s=12)
#plt.plot(x, y, marker='o')
#plt.scatter(*zip(*leds), s=3)
for i, (x_pos, y_pos) in enumerate(map):
plt.text(x_pos, y_pos, str(i), color="red", fontsize=12)
plt.savefig("map2.png", dpi=600, bbox_inches="tight")
data = {"map": [{"index": i, "pos": str(list(pos))} for i, pos in enumerate(map)]}
yaml_str = yaml.dump(data, default_flow_style=False)
print(yaml_str)
print(rings)
flag = 0 flag = 0
for x in leds: for x in leds:
if x is None: if x is None:
@ -148,9 +191,10 @@ def init():
global leds_size global leds_size
global controllers global controllers
global data global data
global exactdata
sender = sacn.sACNsender(fps=config["led"]["fps"], universeDiscovery=False) sender = sacn.sACNsender(fps=config["led"]["fps"], universeDiscovery=False)
sender.start() # start the sending thread sender.start() # start the sending thread
for x in range(len(controllers)): """for x in range(len(controllers)):
print("Waiting for the controller at", controllers[x][2], "to be online...", end="") print("Waiting for the controller at", controllers[x][2], "to be online...", end="")
count = 0 count = 0
while not ping(controllers[x][2]): while not ping(controllers[x][2]):
@ -159,7 +203,7 @@ def init():
fprint(" ERROR: controller still offline after " + str(count) + " seconds, continuing...") fprint(" ERROR: controller still offline after " + str(count) + " seconds, continuing...")
break break
if count < config["led"]["timeout"]: if count < config["led"]["timeout"]:
fprint(" done") fprint(" done")"""
for x in range(len(controllers)): for x in range(len(controllers)):
print("Activating controller", x, "at", controllers[x][2], "with", controllers[x][1]-controllers[x][0], "LEDs.") print("Activating controller", x, "at", controllers[x][2], "with", controllers[x][1]-controllers[x][0], "LEDs.")
sender.activate_output(x+1) # start sending out data sender.activate_output(x+1) # start sending out data
@ -168,22 +212,26 @@ def init():
# initialize global pixel data list # initialize global pixel data list
data = list() data = list()
exactdata = list()
for x in range(len(leds)): for x in range(len(leds)):
if leds_size[x] == 3: if leds_size[x] == 3:
exactdata.append(None)
data.append((20,20,127)) data.append((20,20,127))
elif leds_size[x] == 4: elif leds_size[x] == 4:
exactdata.append(None)
data.append((50,50,255,0)) data.append((50,50,255,0))
else: else:
exactdata.append(None)
data.append((0,0,0)) data.append((0,0,0))
sendall(data) sendall(data)
#time.sleep(50000) #time.sleep(50000)
fprint("Running start-up test sequence...") fprint("Running start-up test sequence...")
for y in range(1): for y in range(1):
for x in range(len(leds)): for x in range(len(leds)):
setpixel(5,5,5,x) setpixel(0,60,144,x)
sendall(data) sendall(data)
#time.sleep(2) #time.sleep(2)
#alloffsmooth() alloffsmooth()
def sendall(datain): def sendall(datain):
# send all LED data to all controllers # send all LED data to all controllers
@ -260,6 +308,209 @@ def setpixelnow(r, g, b, num):
setpixel(r,g,b,num) setpixel(r,g,b,num)
senduniverse(data, num) senduniverse(data, num)
def setmode(stmode, r=0,g=0,b=0):
global mode
global firstrun
if stmode is not None:
if mode != stmode:
firstrun = True
mode = stmode
def setring(r,g,b,idx):
ring = rings[idx]
for pixel in range(ring[2],ring[3]):
setpixel(r,g,b,pixel)
#global data
#senduniverse(data, ring[2])
def runmodes(ring = -1, speed = 1):
global mode
global firstrun
global changecount
fprint("Mode: " + str(mode))
if mode == "Startup":
# loading animation. cable check
if firstrun:
changecount = animation_time * 3
firstrun = False
for x in range(len(ringstatus)):
ringstatus[x] = [True, animation_time]
if changecount > 0:
fprint(changecount)
changecount = fadeorder(0,len(leds), changecount, 0,50,100)
else:
setmode("Startup2")
elif mode == "Startup2":
if firstrun:
firstrun = False
else:
for x in range(len(ringstatus)):
if ringstatus[x][0]:
setring(0, 50, 100, x)
else:
ringstatus[x][1] = fadeall(rings[x][2],rings[x][3], ringstatus[x][1], 100,0,0) # not ready
elif mode == "StartupCheck":
if firstrun:
firstrun = False
for x in range(len(ringstatus)):
ringstatus[x] = [False, animation_time]
else:
for x in range(len(ringstatus)):
if ringstatus[x][0]:
ringstatus[x][1] = fadeall(rings[x][2],rings[x][3], ringstatus[x][1], 0,50,100) # ready
else:
setring(100, 0, 0, x)
elif mode == "GrabA":
if firstrun:
firstrun = False
changecount = animation_time # 100hz
if changecount > 0:
changecount = fadeall(rings[ring][2],rings[ring][3], changecount, 100,0,0)
else:
setring(100,0,0,ring)
setmode("GrabB")
elif mode == "GrabB":
if firstrun:
firstrun = False
changecount = animation_time # 100hz
if changecount > 0:
changecount = fadeorder(rings[ring][2],rings[ring][3], changecount, 0,100,0)
else:
setring(0,100,0,ring)
setmode("idle")
elif mode == "GrabC":
if firstrun:
firstrun = False
changecount = animation_time # 100hz
if changecount > 0:
changecount = fadeall(rings[ring][2],rings[ring][3], changecount, 0,50,100)
else:
setring(0,50,100,ring)
setmode("idle")
elif mode == "idle":
time.sleep(0)
sendall(data)
def fadeall(idxa,idxb,sizerem,r,g,b):
if sizerem < 1:
return 0
global exactdata
sum = 0
for x in range(idxa,idxb):
if exactdata[x] is None:
exactdata[x] = data[x]
old = exactdata[x]
dr = (r - old[0])/sizerem
sum += abs(dr)
dr += old[0]
dg = (g - old[1])/sizerem
sum += abs(dg)
dg += old[1]
db = (b - old[2])/sizerem
db += old[2]
sum += abs(db)
exactdata[x] = (dr, dg, db)
#print(new)
setpixel(dr, dg, db, x)
if sizerem == 1:
exactdata[x] = None
if sum == 0 and sizerem > 2:
sizerem = 2
return sizerem - 1
def fadeorder(idxa,idxb,sizerem,r,g,b):
if sizerem < 1:
return 0
global exactdata
drs = 0
dgs = 0
dbs = 0
sum = 0
for x in range(idxa,idxb):
if exactdata[x] is None:
exactdata[x] = data[x]
old = exactdata[x]
dr = (r - old[0])
dg = (g - old[1])
db = (b - old[2])
drs += dr
dgs += dg
dbs += db
drs /= sizerem
dgs /= sizerem
dbs /= sizerem
sum += abs(drs) + abs(dgs) + abs(dbs)
print(drs,dgs,dbs)
for x in range(idxa,idxb):
old = exactdata[x]
new = list(old)
if drs > 0:
if old[0] + drs > r:
new[0] = r
drs -= r - old[0]
else:
new[0] = old[0] + drs
drs = 0
if dgs > 0:
if old[1] + dgs > g:
new[1] = g
dgs -= g - old[1]
else:
new[1] = old[1] + dgs
dgs = 0
if dbs > 0:
if old[2] + dbs > b:
new[2] = b
dbs -= b - old[2]
else:
new[2] = old[2] + dbs
dbs = 0
if drs < 0:
if old[0] + drs < r:
new[0] = r
drs -= r - old[0]
else:
new[0] = old[0] + drs
drs = 0
if dgs < 0:
if old[1] + dgs < g:
new[1] = g
dgs -= g - old[1]
else:
new[1] = old[1] + dgs
dgs = 0
if dbs < 0:
if old[2] + dbs < b:
new[2] = b
dbs -= b - old[2]
else:
new[2] = old[2] + dbs
dbs = 0
if drs != 0 or dgs != 0 or dbs != 0:
exactdata[x] = new
setpixel(new[0],new[1],new[2],x)
if sizerem == 1:
exactdata[x] = None
if sum == 0 and sizerem > 2:
sizerem = 2
return sizerem - 1
def setpixel(r, g, b, num): def setpixel(r, g, b, num):
global data global data
global leds_size global leds_size
@ -290,7 +541,7 @@ def close():
time.sleep(0.5) time.sleep(0.5)
sender.stop() sender.stop()
def mapimage(image, fps=60): def mapimage(image, fps=90):
global start global start
while uptime() - start < 1/fps: while uptime() - start < 1/fps:
time.sleep(0.00001) time.sleep(0.00001)
@ -328,16 +579,105 @@ def mapimage(image, fps=60):
global data global data
fastsendall(data) fastsendall(data)
def mainloop(stmode, ring = -1, fps = 100, preview = False):
global start
while uptime() - start < 1/fps:
time.sleep(0.00001)
fprint(1 / (uptime() - start))
start = uptime()
if mode is not None:
setmode(stmode)
runmodes(ring)
if preview:
drawdata()
def drawdata():
#tmp = list()
#for x in len(leds):
# led = leds[x]
# tmp.append((led[0], led[1], data[x]))
x = [led[0] for led in leds]
y = [led[1] for led in leds]
colors = data
colors_normalized = [(x[0]/255, x[1]/255, x[2]/255) for x in colors]
# Plot the points
plt.scatter(x, y, c=colors_normalized)
# Optional: add grid, title, and labels
plt.grid(True)
plt.title('Colored Points')
plt.xlabel('X')
plt.ylabel('Y')
plt.show()
plt.savefig("map3.png", dpi=50, bbox_inches="tight")
plt.clf()
def startup_animation(show):
stmode = "Startup"
mainloop(stmode, preview=show)
while mode == "Startup":
mainloop(None, preview=show)
for x in range(54):
ringstatus[x][0] = False
mainloop(None, preview=show)
for x in range(animation_time):
mainloop(None, preview=show)
clear_animations()
stmode = "StartupCheck"
mainloop(stmode, preview=show)
clear_animations()
def clear_animations():
for x in range(len(leds)):
exactdata[x] = None
def do_animation(stmode, ring=-1):
mainloop(stmode, ring, preview=show)
wait_for_animation(ring)
def start_animation(stmode, ring=-1):
mainloop(stmode, ring, preview=show)
def wait_for_animation(ring=-1):
while mode != "idle":
mainloop(None, ring, preview=show)
if __name__ == "__main__": if __name__ == "__main__":
init() init()
cap = cv2.VideoCapture('output.mp4') import matplotlib.pyplot as plt
"""cap = cv2.VideoCapture('badapple.mp4')
while cap.isOpened(): while cap.isOpened():
ret, frame = cap.read() ret, frame = cap.read()
if not ret: if not ret:
break break
mapimage(frame) mapimage(frame, fps=30)"""
show = True
ring = 1
startup_animation(show)
for x in range(54):
ringstatus[x][0] = True
mainloop(None, preview=show)
for x in range(animation_time):
mainloop(None, preview=show)
do_animation("GrabA", 1)
time.sleep(1) do_animation("GrabA", 5)
start_animation("GrabC", 1)
wait_for_animation(1)
do_animation("GrabC", 5)
close() close()
#sys.exit(0) #sys.exit(0)
# blue : default
# green : target
# yellow : crosshair
# red : missing
# uninitialized : red/purple?

BIN
map.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 381 KiB

After

Width:  |  Height:  |  Size: 372 KiB

BIN
map2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 184 KiB

BIN
map3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

View File

@ -2,6 +2,8 @@
# Parse Belden catalog techdata datasheets # Parse Belden catalog techdata datasheets
import pandas as pd
pd.set_option('future.no_silent_downcasting', True)
from PyPDF2 import PdfReader from PyPDF2 import PdfReader
import camelot import camelot
import numpy as np import numpy as np
@ -9,8 +11,15 @@ from PIL import Image
import io import io
import json import json
from util import fprint from util import fprint
import uuid
from util import run_cmd
import os
def parse(filename, output_dir, partnum): def touch(path):
with open(path, 'a'):
os.utime(path, None)
def parse(filename, output_dir, partnum, dstype):
# Extract table data # Extract table data
@ -21,7 +30,9 @@ def parse(filename, output_dir, partnum):
reader = PdfReader(filename) reader = PdfReader(filename)
page = reader.pages[0] page = reader.pages[0]
table_list = {} table_list = {}
for table in tables: for table in tables:
table.df.infer_objects(copy=False)
table.df.replace('', np.nan, inplace=True) table.df.replace('', np.nan, inplace=True)
table.df.dropna(inplace=True, how="all") table.df.dropna(inplace=True, how="all")
table.df.dropna(inplace=True, axis="columns", how="all") table.df.dropna(inplace=True, axis="columns", how="all")
@ -87,6 +98,7 @@ def parse(filename, output_dir, partnum):
# Table parsing and reordring # Table parsing and reordring
tables = dict() tables = dict()
torename = dict()
previous_table = "" previous_table = ""
for table_name in table_list.keys(): for table_name in table_list.keys():
# determine shape: horizontal or vertical # determine shape: horizontal or vertical
@ -118,7 +130,8 @@ def parse(filename, output_dir, partnum):
for table_name_2 in table_list.keys(): for table_name_2 in table_list.keys():
if table_name_2.find(table.iloc[-1, 0]) >= 0: if table_name_2.find(table.iloc[-1, 0]) >= 0:
# Name taken from table directly above - this table does not have a name # Name taken from table directly above - this table does not have a name
table_list["Specs " + str(len(tables))] = table_list.pop(table_name_2, None) # rename table to arbitrary altername name torename[table_name_2] = "Specs " + str(len(tables))
#table_list["Specs " + str(len(tables))] = table_list[table_name_2] # rename table to arbitrary altername name
break break
if vertical: if vertical:
@ -137,46 +150,117 @@ def parse(filename, output_dir, partnum):
# multi-page table check # multi-page table check
if table_name.isdigit() and len(tables) > 1: if dstype == "Belden":
fprint(table_name) if table_name.isdigit() and len(tables) > 1:
fprint(previous_table) #fprint(table_name)
#fprint(previous_table)
main_key = previous_table
cont_key = table_name main_key = previous_table
fprint(tables) cont_key = table_name
if vertical == False: #fprint(tables)
main_keys = list(tables[main_key].keys()) if vertical == False:
for i, (cont_key, cont_values) in enumerate(tables[cont_key].items()): main_keys = list(tables[main_key].keys())
if i < len(main_keys): for i, (cont_key, cont_values) in enumerate(tables[cont_key].items()):
fprint(tables[main_key][main_keys[i]]) if i < len(main_keys):
tables[main_key][main_keys[i]] = (tables[main_key][main_keys[i]] + (cont_key,) + cont_values) #fprint(tables[main_key][main_keys[i]])
tables[main_key][main_keys[i]] = (tuple(tables[main_key][main_keys[i]]) + (cont_key,) + cont_values)
del tables[table_name]
del tables[table_name]
else:
for key in tables[cont_key].keys(): else:
tables[main_key][key] = tables[cont_key][key] for key in tables[cont_key].keys():
del tables[table_name] tables[main_key][key] = tables[cont_key][key]
del tables[table_name]
previous_table = table_name previous_table = table_name
# remove renamed tables
fprint(tables) for table_name in torename.keys():
tables[torename[table_name]] = tables[table_name]
del tables[table_name]
# remove multi-line values that occasionally squeak through
def replace_newlines_in_dict(d):
for key, value in d.items():
if isinstance(value, str):
# Replace \n with " " if the value is a string
d[key] = value.replace('\n', ' ')
elif isinstance(value, dict):
# Recursively call the function if the value is another dictionary
replace_newlines_in_dict(value)
return d
tables = replace_newlines_in_dict(tables)
# summary # summary
tables["partnum"] = partnum output_table = dict()
with open(output_dir + "/tables.json", 'w') as json_file: output_table["partnum"] = partnum
json.dump(tables, json_file) id = str(uuid.uuid4())
output_table["id"] = id
#output_table["position"] = id
#output_table["brand"] = brand
output_table["fullspecs"] = tables
output_table["searchspecs"] = {"partnum": partnum, **flatten(tables)}
output_table["searchspecs"]["id"] = id
#print(output_table)
run_cmd("rm \"" + output_dir + "\"/*.json") # not reliable!
with open(output_dir + "/" + output_table["searchspecs"]["id"] + ".json", 'w') as json_file:
json.dump(output_table["searchspecs"], json_file)
touch(output_dir + "/parsed")
return output_table
def flatten(tables):
def convert_to_number(s):
try:
# First, try converting to an integer.
return int(s)
except ValueError:
# If that fails, try converting to a float.
try:
return float(s)
except ValueError:
# If it fails again, return the original string.
return s
out = dict()
#print("{")
for table in tables.keys():
for key in tables[table].keys():
if len(key) < 64:
keyname = key
else:
keyname = key[0:64]
return tables fullkeyname = (table + ": " + keyname).replace(".","")
if type(tables[table][key]) is not tuple:
out[fullkeyname] = convert_to_number(tables[table][key])
#print("\"" + keyname + "\":", "\"" + str(out[fullkeyname]) + "\",")
elif len(tables[table][key]) == 1:
out[fullkeyname] = convert_to_number(tables[table][key][0])
#print("\"" + keyname + "\":", "\"" + str(out[fullkeyname]) + "\",")
# if the item has at least two commas in it, split it
if tables[table][key].count(',') > 0:
out[fullkeyname] = list(map(lambda x: x.strip(), tables[table][key].split(",")))
#print("\"" + keyname + "\":", "\"" + str(out[fullkeyname]) + "\",")
# if the item has at least two commas in it, split it
if tables[table][key].count(',') > 0:
out[fullkeyname] = list(map(lambda x: x.strip(), tables[table][key].split(",")))
print("\"" + keyname + "\":", "\"" + str(out[fullkeyname]) + "\",")
#print("}")
return out

View File

@ -12,6 +12,9 @@ selenium
sacn sacn
uptime uptime
websockets websockets
numpy
scipy
ipywidgets
# Development # Development
matplotlib matplotlib

26
run.py
View File

@ -89,6 +89,9 @@ def start_server_socket():
while True: while True:
#print("HI") #print("HI")
# Handeling Server Requests Loop, will run forever
if not from_server_queue.empty(): if not from_server_queue.empty():
client_id, message = from_server_queue.get() client_id, message = from_server_queue.get()
fprint(f"Message from client {client_id}: {message}") fprint(f"Message from client {client_id}: {message}")
@ -113,7 +116,7 @@ def start_server_socket():
case "log": case "log":
fprint("log message") fprint("log message")
if call == "send": if call == "send":
fprint("webapp: " + data) fprint("webapp: " + str(data), sendqueue=to_server_queue)
elif call == "request": elif call == "request":
fprint("") fprint("")
@ -124,6 +127,21 @@ def start_server_socket():
elif call == "request": elif call == "request":
fprint("") fprint("")
case "ping":
fprint("Pong!!!")
# Lucas' notes
# Add a ping pong :) response/handler
# Add a get cable response/handler
# this will tell the robot arm to move
# Call for turning off everything
# TODO Helper for converting Python Dictionaries to JSON
# make function: pythonData --> { { "type": "...", "call": "...", "data": pythonData } }
# to send: to_server_queue.put(("*", "JSON STRING HERE")) # replace * with UUID of client to send to one specific location
case "cable_details": case "cable_details":
fprint("cable_details message") fprint("cable_details message")
if call == "send": if call == "send":
@ -235,7 +253,7 @@ def setup_server(pool):
if camera_ready is False: if camera_ready is False:
fprint("waiting for " + "Camera initilization" + " to complete...", sendqueue=to_server_queue) fprint("waiting for " + "Camera initilization" + " to complete...", sendqueue=to_server_queue)
camera = process_video.qr_reader(config["cameras"]["banner"]["ip"], config["cameras"]["banner"]["port"]) # camera = process_video.qr_reader(config["cameras"]["banner"]["ip"], config["cameras"]["banner"]["port"])
fprint("Camera initialized.", sendqueue=to_server_queue) fprint("Camera initialized.", sendqueue=to_server_queue)
@ -262,8 +280,8 @@ def mainloop_server(pool):
killall() killall()
counter = counter + 1 counter = counter + 1
fprint("Looking for QR code...") # fprint("Looking for QR code...")
print(camera.read_qr(30)) # print(camera.read_qr(30))
def run_loading_app(): def run_loading_app():

117
search.py
View File

@ -0,0 +1,117 @@
"""Interactions with the Meilisearch API for adding and searching cables."""
from meilisearch import Client
from meilisearch.task import TaskInfo
from meilisearch.errors import MeilisearchApiError
import json
DEFAULT_URL = "http://localhost:7700"
DEFAULT_APIKEY = "fluffybunnyrabbit" # I WOULD RECOMMEND SOMETHING MORE SECURE
DEFAULT_INDEX = "cables"
DEFAULT_FILTERABLE_ATTRS = ["partnum", "uuid", "position"] # default filterable attributes
class JukeboxSearch:
"""Class for interacting with the Meilisearch API."""
def __init__(self,
url: str = None,
api_key: str = None,
index: str = None,
filterable_attrs: list = None):
"""Connect to Meilisearch and perform first-run tasks as necessary.
:param url: Address of the Meilisearch server. Defaults to ``http://localhost:7700`` if unspecified.
:param api_key: API key used to authenticate with Meilisearch. It is highly recommended to set this as something
secure if you can access this endpoint publicly, but you can ignore this and set Meilisearch's default API key
to ``fluffybunnyrabbit``.
:param index: The name of the index to configure. Defaults to ``cables`` if unspecified.
:param filterable_attrs: List of all the attributes we want to filter by."""
# connect to Meilisearch
url = url or DEFAULT_URL
api_key = api_key or DEFAULT_APIKEY
filterable_attrs = filterable_attrs or DEFAULT_FILTERABLE_ATTRS
self.index = index or DEFAULT_INDEX
self.client = Client(url, api_key)
# create the index if it does not exist already
try:
self.client.get_index(self.index)
except MeilisearchApiError as _:
self.client.create_index(self.index)
# make a variable to easily reference the index
self.idxref = self.client.index(self.index)
# update filterable attributes if needed
self.update_filterables(filterable_attrs)
def add_document(self, document: dict) -> TaskInfo:
"""Add a cable to the Meilisearch index.
:param document: Dictionary containing all the cable data.
:returns: A TaskInfo object for the addition of the new document."""
return self.idxref.add_documents(document)
def add_documents(self, documents: list):
"""Add a list of cables to the Meilisearch index.
:param documents: List of dictionaries containing all the cable data.
:returns: A TaskInfo object for the last new document."""
taskinfo = None
for i in documents:
taskinfo = self.add_document(i)
return taskinfo
def update_filterables(self, filterables: list):
"""Update filterable attributes and wait for database to fully index. If the filterable attributes matches the
current attributes in the database, don't update (saves reindexing).
:param filterables: List of all filterable attributes"""
existing_filterables = self.idxref.get_filterable_attributes()
if len(set(existing_filterables).difference(set(filterables))) > 0:
taskref = self.idxref.update_filterable_attributes(filterables)
self.client.wait_for_task(taskref.index_uid)
def search(self, query: str, filters: str = None):
"""Execute a search query on the Meilisearch index.
:param query: Seach query
:param filters: A meilisearch compatible filter statement.
:returns: The search results dict. Actual results are in a list under "hits", but there are other nice values that are useful in the root element."""
if filters:
q = self.idxref.search(query, {"filter": filters})
else:
q = self.idxref.search(query)
return q
def _filter_one(self, filter: str):
"""Get the first item to match a filter.
:param filter: A meilisearch compatible filter statement.
:returns: A dict containing the results; If no results found, an empty dict."""
q = self.search("", filter)
if q["estimatedTotalHits"] != 0:
return ["hits"][0]
else:
return dict()
def get_position(self, position: str):
"""Get a part by position.
:param partnum: The position to search for."""
return self._filter_one(f"position = {position}")
def get_uuid(self, uuid: str):
"""Get a specific UUID.
:param uuid: The UUID to search for."""
return self._filter_one(f"uuid = {uuid}")
def get_partnum(self, partnum: str):
"""Get a specific part number.
:param partnum: The part number to search for."""
return self._filter_one(f"partnum = {partnum}")
# entrypoint
if __name__ == "__main__":
jbs = JukeboxSearch()

1
source.fish Normal file
View File

@ -0,0 +1 @@
source venv/bin/activate.fish

1
source.sh Normal file
View File

@ -0,0 +1 @@
source venv/bin/activate

106
test.py Normal file
View File

@ -0,0 +1,106 @@
print("\u001b[37m")
class Ring:
def __init__(self) -> None:
self.leds = [0] * 24
self.id = 0
self.dirty = False
def __iter__(self) -> iter:
yield from self.leds
def __repr__(self) -> str:
return f"Ring<id={self.id}, led_state={' '.join(list(map(lambda x: str(x+1), self.leds)))}, dirty={self.dirty}>"
def __add__(self, other):
self.leds.extend(other)
return self
def __bool__(self):
return self.dirty
def __getitem__(self, index):
return self.leds[index]
def __setitem__(self, index, value):
ivalue = self.leds[index]
if ivalue != value:
self.dirty = True
self.leds[index] = value
def __getattr__(self, name):
import word2num
name = int(word2num.word2num(name))
print(name)
if 0 <= name < len(self.leds):
return self.leds[name]
a = Ring()
print(a)
b = Ring()
b.leds[2] = 3
print(a + b)
b.active = True
if b:
print("Bexist")
c = [a, b, b, a, a]
d = list(filter(lambda x: bool(x), c))
print(d)
for i, ring in enumerate(c):
ring[0] = i
print(ring)
print(a, b)
print(f"\u001b[32m{a}")
print(f"\u001b[37ma")
print(getattr(a, "twenty two"))
# eval(f"getattr(a,\"{input()}\")")
# a = r"wow this string is cursed; for example \n"
# SEARCHDATA=r"""{ "q": "{QUERY}", "sortCriteria": "relevancy", "numberOfResults": "250", "sortCriteria": "@catalogitemwebdisplaypriority ascending", "searchHub": "products-only-search", "pipeline": "Site Search", "maximumAge": "900000", "tab": "products-search", "locale": "en", "aq": "(NOT @z95xtemplate==(ADB6CA4F03EF4F47B9AC9CE2BA53FF97,FE5DD82648C6436DB87A7C4210C7413B)) ((@syssource==\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\" @catalogitemprimarycategorypublished==true)) ((@catalogitemregionavailable=Global) (@z95xlanguage==en))", "cq": "((@z95xlanguage==en) (@z95xlatestversion==1) (@source==\"Coveo_web_index - rg-nc-prod-sitecore-prod\")) OR (@source==(\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\",\"website_001002_Category_index-rg-nc-prod-sitecore-prod\"))", "firstResult": "0" }, "categoryFacets": "[{\"field\":\"@catalogitemcategories\",\"path\":[],\"injectionDepth\":1000,\"maximumNumberOfValues\":6,\"delimitingCharacter\":\"|\"}]", "facetOptions": "{}", "groupBy": " [{\"field\":\"@contenttype\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[\"Products\"],\"queryOverride\":\"{QUERY}\",\"advancedQueryOverride\":\"(NOT @z95xtemplate==(ADB6CA4F03EF4F47B9AC9CE2BA53FF97,FE5DD82648C6436DB87A7C4210C7413B)) ((((((((@z95xpath=3324AF2D58F64C0FB725521052F679D2 @z95xid<>3324AF2D58F64C0FB725521052F679D2) ((@z95xpath=C292F3A37B3A4E6BAB345DF87ADDE516 @z95xid<>C292F3A37B3A4E6BAB345DF87ADDE516) @z95xtemplate==E4EFEB787BDC4B1A908EFC64D56CB2A4)) OR ((@z95xpath=723501A864754FEEB8AE377E4C710271 @z95xid<>723501A864754FEEB8AE377E4C710271) ((@z95xpath=600114EAB0E5407A84AAA9F0985B6575 @z95xid<>600114EAB0E5407A84AAA9F0985B6575) @z95xtemplate==2BE4FD6B3B2C49EBBD9E1F6C92238B05))) OR (@syssource==\\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\\" @catalogitemprimarycategorypublished==true)) OR ((@z95xpath=3324AF2D58F64C0FB725521052F679D2 @z95xid<>3324AF2D58F64C0FB725521052F679D2) @z95xpath<>C292F3A37B3A4E6BAB345DF87ADDE516)) OR @syssource==\\"website_001002_Category_index-rg-nc-prod-sitecore-prod\\") NOT @z95xtemplate==(ADB6CA4F03EF4F47B9AC9CE2BA53FF97,FE5DD82648C6436DB87A7C4210C7413B))) ((@catalogitemregionavailable=Global) (@z95xlanguage==en) OR (@contenttype=(Blogs,Resources,Other)) (NOT @ez120xcludefromcoveo==1))\",\"constantQueryOverride\":\"((@z95xlanguage==en) (@z95xlatestversion==1) (@source==\\"Coveo_web_index - rg-nc-prod-sitecore-prod\\")) OR (@source==(\\"website_001002_catalog_index-rg-nc-prod-sitecore-prod\\",\\"website_001002_Category_index-rg-nc-prod-sitecore-prod\\"))\"},{\"field\":\"@catalogitembrand\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@catalogitemenvironment\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@catalogitemregionalavailability\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@prez45xtez120xt\",\"maximumNumberOfValues\":5,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@tags\",\"maximumNumberOfValues\":4,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetassettype\",\"maximumNumberOfValues\":3,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetbrand\",\"maximumNumberOfValues\":3,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetmarket\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetsolution\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]},{\"field\":\"@facetsearchcontentpagetype\",\"maximumNumberOfValues\":6,\"sortCriteria\":\"occurrences\",\"injectionDepth\":1000,\"completeFacetWithStandardValues\":true,\"allowedValues\":[]}]" }"""
# QUERY = "AAAAAAAAAAAA"
# b = SEARCHDATA.replace(r"{QUERY}", QUERY)
q = [i * 2 for i in range(10)]
d = {a : b for a,b in enumerate(q)}
print(q)
print(d)
def stalin_sort(a):
b = sum(a)
b /= len(a)
return [b for _ in range(len(a))]
def mao_sort(a):
i = 0
while i < len(a) - 1:
if a[i+1] < a[i]:
del a[i]
else:
i += 1
return a
print(stalin_sort(list(range(10))))
print(mao_sort([1, 3, 2, 4, 5, 8, 7, 6, 9]))
# i l

View File

@ -1,6 +1,8 @@
import urx import urx
import math3d as m3d import math3d as m3d
from scipy.optimize import fsolve
import math import math
import numpy as np
import time import time
import os import os
import logging import logging
@ -8,6 +10,9 @@ from urx.robotiq_two_finger_gripper import Robotiq_Two_Finger_Gripper
import sys import sys
from util import fprint from util import fprint
rob = None rob = None
@ -45,7 +50,7 @@ def init(ip):
time.sleep(0.2) time.sleep(0.2)
fprint("UR5 ready.") fprint("UR5 ready.")
def set_pos_abs(x, y, z, xb, yb, zb): def set_pos_abs(x, y, z, xb, yb, zb, threshold=None):
global rob global rob
new_orientation = m3d.Transform() new_orientation = m3d.Transform()
new_orientation.orient.rotate_xb(xb) # Replace rx with the desired rotation around X-axis new_orientation.orient.rotate_xb(xb) # Replace rx with the desired rotation around X-axis
@ -60,7 +65,7 @@ def set_pos_abs(x, y, z, xb, yb, zb):
new_trans.pos.y = y new_trans.pos.y = y
new_trans.pos.z = z new_trans.pos.z = z
#rob.speedj(0.2, 0.5, 99999) #rob.speedj(0.2, 0.5, 99999)
rob.set_pose(new_trans, acc=2, vel=2, command="movej") # apply the new pose rob.set_pose(new_trans, acc=2, vel=2, command="movej", threshold=threshold) # apply the new pose
def set_pos_rel_rot_abs(x, y, z, xb, yb, zb): def set_pos_rel_rot_abs(x, y, z, xb, yb, zb):
global rob global rob
@ -80,21 +85,241 @@ def set_pos_rel_rot_abs(x, y, z, xb, yb, zb):
#rob.speedj(0.2, 0.5, 99999) #rob.speedj(0.2, 0.5, 99999)
rob.set_pose(new_trans, acc=0.1, vel=0.4, command="movej") # apply the new pose rob.set_pose(new_trans, acc=0.1, vel=0.4, command="movej") # apply the new pose
def set_pos_abs_rot_rel(x, y, z, xb, yb, zb):
global rob
new_orientation = m3d.Transform()
new_orientation.orient.rotate_xb(xb) # Replace rx with the desired rotation around X-axis
new_orientation.orient.rotate_yb(yb) # Replace ry with the desired rotation around Y-axis
new_orientation.orient.rotate_zb(zb) # Replace rz with the desired rotation around Z-axis
# Get the current pose
trans = rob.getl()
# Apply the new orientation while keeping the current position
new_trans = m3d.Transform(new_orientation.orient, m3d.Vector(trans[0:3]))
new_trans.pos.x = x
new_trans.pos.y = y
new_trans.pos.z = z
#rob.speedj(0.2, 0.5, 99999)
rob.set_pose(new_trans, acc=0.1, vel=0.4, command="movej") # apply the new pose
def is_safe_move(start_pose, end_pose, r=0.25):
start_x, start_y = (start_pose[0], start_pose[1])
end_x, end_y = (end_pose[0], end_pose[1])
try:
m = (end_y-start_y)/(end_x-start_x)
b = start_y - m*start_x
# print('m = y/x =', m)
# print('b =', b)
except:
m = (end_x-start_x)/(end_y-start_y)
b = start_x - m*start_y
# print('m = x/y =', m)
# print('b =', b)
return r**2 - b**2 + m**2 * r**2 < 0
def cartesian_to_polar(x, y):
r = np.sqrt(x**2 + y**2)
theta = np.arctan2(y, x)
return r, theta
def polar_to_cartesian(r, theta):
x = r * np.cos(theta)
y = r * np.sin(theta)
return x, y
def move_to_polar(start_pos, end_pos):
global rob
# Convert to polar coordinates
start_r, start_theta = cartesian_to_polar(start_pos[0], start_pos[1])
end_r, end_theta = cartesian_to_polar(end_pos[0], end_pos[1])
# Interpolate for xy (spiral arc)
n_points = 30
r_intermediate = np.linspace(start_r, end_r, n_points)
theta_intermediate = np.linspace(start_theta, end_theta, n_points)
# Interpolate for z (height)
start_z = start_pos[2]
end_z = end_pos[2]
z_intermediate = np.linspace(start_z, end_z, n_points)
# Interpolate for rz (keep tool rotation fixed relative to robot)
curr_rot = rob.getl()
theta_delta = theta_intermediate[1]-theta_intermediate[0]
rx_intermediate = [curr_rot[5] + theta_delta*i for i in range(n_points)]
# curr_rot = rob.getj()
# start_rz = curr_rot[5]
# rot = end_theta - start_theta
# end_base_joint = curr_rot[0]-start_theta + rot
# end_rz = curr_rot[0] + rot
# # rob.movel([*polar_to_cartesian(end_r, end_theta), *rob.getl()[2:]], acc=2, vel=2)
# print('start_theta = ', math.degrees(start_theta))
# print('end_theta = ', math.degrees(curr_rot[0]-start_theta+rot))
# print('start_rz =', math.degrees(start_rz))
# print('rot =', math.degrees(rot))
# print('end_rz =', math.degrees(end_rz))
# rz_intermediate = np.linspace(start_rz, end_rz, n_points)
# Convert back to cartesian coordinates
curr_pos = rob.getl()
intermediate_points = [[*polar_to_cartesian(r, theta), z, *curr_pos[3:]]
for r, theta, z, rx in zip(r_intermediate,
theta_intermediate,
z_intermediate,
rx_intermediate)]
# Move robot
rob.movels(intermediate_points, acc=2, vel=2, radius=0.1)
return rx_intermediate
def move_to_home():
global rob
# Home position in degrees
home_pos = [0.10421807948612624,
-2.206111555015423,
1.710679229503537,
-1.075834511928354,
-1.569301366430687,
1.675098295930943]
# Move robot
rob.movej(home_pos, acc=2, vel=2)
def normalize_degree(theta):
# Normalizes degree theta from -1.5pi to 1.5pi
multiplier = 1
normalized_theta = theta % (math.pi * multiplier)
# Maintain the negative sign if the original angle is negative
if theta < 0:
normalized_theta -= math.pi * multiplier
# Return angle
return normalized_theta
def get_joints_from_xyz_rel(x, y, z, initial_guess = (math.pi/2, math.pi/2, 0), limbs=(.422864, .359041, .092124)):
# Get polar coordinates of x,y pair
r, theta = cartesian_to_polar(x, y)
# Get length of each limb
l1, l2, l3 = limbs
# Formulas to find out joint positions for (r, z)
def inv_kin_r_z(p):
a, b, c = p
return (l1*math.cos(a) + l2*math.cos(a-b) + l3*math.cos(a-b-c) - r, # r
l1*math.sin(a) + l2*math.sin(a-b) - l3*math.sin(a-b-c) - z, # z
a-b-c) # wrist angle
# Normalize angles
base, shoulder, elbow, wrist = [normalize_degree(deg) for deg in [theta, *fsolve(inv_kin_r_z, initial_guess)]]
# Return result
return base, shoulder, elbow, wrist
def get_joints_from_xyz_abs(x, y, z):
joints = get_joints_from_xyz_rel(x, y, z)
# Joint offsets
# Base, Shoulder, Elbow, Wrist
inverse = [1, -1, 1, 1]
offsets = [0, 0, 0, -math.pi/2]
# Return adjusted joint positions
return [o+j*i for j, o, i in zip(joints, offsets, inverse)]
if __name__ == "__main__": if __name__ == "__main__":
#rob.movej((0, 0, 0, 0, 0, 0), 0.1, 0.2) #rob.movej((0, 0, 0, 0, 0, 0), 0.1, 0.2)
#rob.movel((x, y, z, rx, ry, rz), a, v) #rob.movel((x, y, z, rx, ry, rz), a, v)
init("192.168.1.145") init("192.168.1.145")
fprint("Current tool pose is: ", rob.getl()) print("Current tool pose is: ", rob.getl())
#set_pos_rel_rot_abs(0, 0, -0.2, math.pi, 0, -math.pi) move_to_home()
set_pos_abs(0.3, -0.2, 0.5, math.pi, 0, -math.pi)
set_pos_abs(0, 0.2, 0.6, math.pi, 0, -math.pi) home_pose = [-0.4999999077032916,
set_pos_abs(-0.5, -0.2, 0.4, math.pi, 0, -math.pi) -0.2000072960336574,
#set_pos_rel_rot_abs(0, 0, 0, math.pi, 0, -math.pi) 0.40002172976662786,
fprint("Current tool pose is: ", rob.getl()) 0,
-3.14152741295329,
0]
# time.sleep(.5)
p1 = [0,
0.6,
.4,
0.2226,
3.1126,
0.0510]
p2 = [0.171,
-0.115,
0.2,
0.2226,
3.1126,
0.0510]
curr_pos = rob.getl()
# up/down,
# tool rotation
# tool angle (shouldn't need)
# rob.set_pos(p1[0:3], acc=0.5, vel=0.5)
# set_pos_abs(*home_pose)
angles = get_joints_from_xyz_abs(0.3, 0.3, 0.3)
rob.movej([*angles, *rob.getj()[4:]], acc=1, vel=1)
angles = get_joints_from_xyz_abs(-0.3, -0.3, 0.7)
rob.movej([*angles, *rob.getj()[4:]], acc=1, vel=1)
angles = get_joints_from_xyz_abs(-0.3, 0.4, 0.2)
rob.movej([*angles, *rob.getj()[4:]], acc=1, vel=1)
# set_pos_abs(*p1)
# move = move_to_polar(p1, p2)
# for p in move:
# print(math.degrees(p))
# print("Safe? :", is_safe_move(p1, p2))
# #set_pos_rel_rot_abs(0, 0, -0.2, math.pi, 0, -math.pi)
# set_pos_abs(0.3, -0.2, 0.5, math.pi, 0, -math.pi)
# set_pos_abs(0, 0.2, 0.6, math.pi, 0, -math.pi)
# set_pos_abs(-0.5, -0.2, 0.4, math.pi, 0, -math.pi)
# #set_pos_rel_rot_abs(0, 0, 0, math.pi, 0, -math.pi)
# print("Current tool pose is: ", rob.getl())
# print("getj(): ", rob.getj())
# move_to_home()
rob.stop() rob.stop()
os.kill(os.getpid(), 9) # dirty kill of self os.kill(os.getpid(), 9) # dirty kill of self
sys.exit(0) sys.exit(0)

View File

@ -123,7 +123,7 @@ class Logger(object):
self.terminal = sys.stdout self.terminal = sys.stdout
def write(self, message): def write(self, message):
self.log.write(message) #self.log.write(message)
#close(filename) #close(filename)
#self.log = open(filename, "a") #self.log = open(filename, "a")
try: try:

View File

@ -1,6 +1,7 @@
#!/bin/sh #!/bin/sh
# change this to #!/bin/bash for windows
python -m venv ./venv python3 -m venv ./venv
source ./venv/bin/activate source ./venv/bin/activate
pip install --upgrade pip pip install --upgrade pip

View File

@ -1,15 +1,61 @@
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<title>WebSocket Test</title> <title>WebSocket Test</title>
<style>
footer {
background-color: #333;
color: #fff;
text-align: center;
padding: 10px 0;
position: fixed;
bottom: 0;
width: 100%;
display: flex;
justify-content: space-around;
}
.service-box {
width: 150px;
padding: 10px;
border-radius: 5px;
}
.service-up {
background-color: green;
}
.service-down {
background-color: red;
}
</style>
<script> <script>
document.addEventListener("DOMContentLoaded", function() {
// class Service {
// constructor(name, status) {
// this.name = name
// this.status = status
// }
// }
var updatedTime = new Date();
// Initial status of services
// var serviceA = new Service("234234", 'down');
// var serviceBStatus = 'down';
// var serviceCStatus = 'down';
document.addEventListener("DOMContentLoaded", function () {
// Create WebSocket connection. // Create WebSocket connection.
const socket = new WebSocket('ws://localhost:9000'); const socket = new WebSocket('ws://localhost:9000');
// Connection opened // Connection opened
socket.addEventListener('open', function (event) { socket.addEventListener('open', function (event) {
console.log("Connected to WebSocket server"); console.log("Connected to WebSocket server");
updatedTime = new Date();
}); });
// Listen for messages // Listen for messages
@ -19,6 +65,7 @@
let message = document.createElement('li'); let message = document.createElement('li');
message.textContent = "Received: " + event.data; message.textContent = "Received: " + event.data;
messages.appendChild(message); messages.appendChild(message);
updatedTime = new Date();
}); });
// Send a message to the server // Send a message to the server
@ -28,15 +75,82 @@
console.log('Message sent', message); console.log('Message sent', message);
} }
// This function just sends a ping to make sure the server is there and it is able to responds
function ping() {
let message = `{ "call": "send", "type": "log", "data": "This is a ping!!" }`;
socket.send(message);
console.log('Message sent', message);
}
setInterval(ping, 1500);
// setInterval(() => {
// updateServiceStatus('serviceA', 'down');
// }, 2000);
// Bind send message function to button click // Bind send message function to button click
document.getElementById('sendMessage').addEventListener('click', sendMessage); document.getElementById('sendMessage').addEventListener('click', sendMessage);
}); });
</script> </script>
</head> </head>
<body> <body>
<h2>WebSocket Test</h2> <h2>WebSocket Test</h2>
<input type="text" id="messageInput" placeholder="Type a message..."> <textarea rows="4" cols="50" id="messageInput" placeholder="Type a message..."> </textarea>
<button id="sendMessage">Send Message</button> <button id="sendMessage">Send Message</button>
<p>Example JSON</p>
<p>{ "type": "cable_map", "call": "request", "data": { } }</p>
<p>{ "type": "log", "call": "send", "data": "123123" }</p>
<p>Messages/Logs</p>
<ul id="messages"></ul> <ul id="messages"></ul>
<footer>
<!-- <div id="serviceA" class="service-box"></div>
<div id="serviceB" class="service-box"></div>
<div id="serviceC" class="service-box"></div> -->
<div id="clock"></div>
</footer>
<script>
// // Function to update service status
// function updateServiceStatus(service) {
// // serviceId, status
// var serviceElement = document.getElementById(service.serviceId);
// // updateClock();
// if (service.status === 'up') {
// serviceElement.innerHTML = '<h3>' + service.serviceId + '</h3><p>Running</p>';
// serviceElement.classList.remove('service-down');
// serviceElement.classList.add('service-up');
// } else {
// serviceElement.innerHTML = '<h3>' + service.serviceId + '</h3><p>Down</p>';
// serviceElement.classList.remove('service-up');
// serviceElement.classList.add('service-down');
// }
// }
// // Update service statuses
// updateServiceStatus('node.js (for this page)', serviceAStatus);
// updateServiceStatus('Python WebSocket', serviceBStatus);
// updateServiceStatus('serviceC', serviceCStatus);
// Function to update clock
function updateClock() {
var now = new Date();
now = now.getTime() - updatedTime.getTime();
// console.log(now)
document.getElementById('clock').textContent = 'Milliseconds Since Update: ' + now.toString().padStart(6, '0');
}
// Update clock every second
setInterval(updateClock, 100);
</script>
</body> </body>
</html>
</html>