diff --git a/exporter/SynthesisFusionAddin/README.md b/exporter/SynthesisFusionAddin/README.md index 8f40acf4cd..4fc4933373 100644 --- a/exporter/SynthesisFusionAddin/README.md +++ b/exporter/SynthesisFusionAddin/README.md @@ -1,7 +1,9 @@ # Synthesis Exporter + This is a Addin for Autodesk® Fusion™ that will export a [Mirabuf](https://github.com/HiceS/mirabuf) usable by the Synthesis simulator. ## Features + - [x] Materials - [x] Apperances - [x] Instances @@ -30,16 +32,17 @@ We use `VSCode` Primarily, download it to interact with our code or use your own --- ### How to Build + Run + 1. See root [`README`](/README.md) on how to run `init` script 2. Open `Autodesk Fusion` 3. Select `UTILITIES` from the top bar 4. Click `ADD-INS` Button 5. Click `Add-Ins` tab at the top of Scripts and Add-Ins dialog -6. Press + Button under **My Add-Ins** +6. Press + Button under **My Add-Ins** 7. Navigate to the containing folder for this Addin and click open at bottom - _clone-directory_/synthesis/exporters/SynthesisFusionAddin 8. Synthesis should be an option - select it and click run at the bottom of the dialog 9. There should now be a button that says Synthesis in your utilities menu - - If there is no button there may be a problem - see below for [checking log file](#debug-non-start) + - If there is no button there may be a problem - see below for [checking log file](#debug-non-start) --- @@ -50,8 +53,8 @@ We use `VSCode` Primarily, download it to interact with our code or use your own Most of the runtime for the addin is saved under the `logs` directory in this folder - Open `logs/synthesis.log` - - If nothing appears something went very wrong (make a issue on this github) - - If something appears and you cannot solve it feel free to make an issue anyway and include the file + - If nothing appears something went very wrong (make a issue on this github) + - If something appears and you cannot solve it feel free to make an issue anyway and include the file #### General Debugging @@ -59,12 +62,12 @@ Most of the runtime for the addin is saved under the `logs` directory in this fo 2. Select `UTILITIES` from the top bar 3. Click `ADD-INS` Button 4. Click `Add-Ins` tab at the top of Scripts and Add-Ins dialog -5. Press + Button under **My Add-Ins** +5. Press + Button under **My Add-Ins** 6. Navigate to the containing folder for this Addin and click open at bottom - _clone-directory_/synthesis/exporters/SynthesisFusionAddin 7. Synthesis should be an option - select it and click `Debug` at the bottom of the dialog - - This is in a dropdown with the Run Button + - This is in a dropdown with the Run Button 8. This should open VSCode - Now run with `FN+5` - - Now you may add break points or debug at will + - Now you may add break points or debug at will --- @@ -84,4 +87,44 @@ We format using a Python formatter called `black` [![Code style: black](https:// - use `isort .` followed by `black .` to format all relevant exporter python files. - or, alternatively, run `python ./tools/format.py` to do this for you! -__Note: black will always ignore files in the proto/proto_out folder since google formats those__ +**Note: black will always ignore files in the proto/proto_out folder since google formats those** + +### Docstring standard + +This standard is inconsistently applied, and that's ok + +```python +def foo(bar: fizz="flower") -> Result[walrus, None]: + """ + Turns a fizz into a walrus + + Parameters: + bar - The fizz to be transformed (default = "flower") ; fizz standards are subject to change, old fizzes may no longer be valid + + Returns: + Success - She new walrus + Failure - None if the summoning fails ; the cause of failure will be printed, not returned + + Notes: + - Only works as expected if the bar arg isn't a palindrome or an anagram of coffee. Otherwise unexpected (but still valid) walruses may be returned + - Please do not name your fizz "rizz" either, it hurts the walrus's feelings + + TODO: Consult witch about inconsistent alchemical methods + """ + # More alchemical fizz -> walrus code + some_walrus = bar + "_coffee" + return some_walrus + +``` + +Note that not this much detail is necessary when writing function documentation, notes, defaults, and a differentiation between sucess and failure aren't always necessary. + +#### Where to list potential causes of failure? + +It depends on how many you can list + +- 1: In the failure return case +- 2-3: In the notes section +- 4+: In a dedicated "potential causes of failure section" between the "returns" and "notes" sections + +Additionally, printing the error instead of returning it is bad practice diff --git a/exporter/SynthesisFusionAddin/Synthesis.py b/exporter/SynthesisFusionAddin/Synthesis.py index 37403d9f82..e3c86f0086 100644 --- a/exporter/SynthesisFusionAddin/Synthesis.py +++ b/exporter/SynthesisFusionAddin/Synthesis.py @@ -1,3 +1,5 @@ +# DO NOT CHANGE ORDER, OR ADD IMPORTS BEFORE UNTIL END COMMENT + import logging import os import traceback @@ -5,8 +7,6 @@ import adsk.core -from .src.APS import APS -from .src.configure import setAnalytics, unload_config from .src.general_imports import APP_NAME, DESCRIPTION, INTERNAL_ID, gm, root_logger from .src.Types.OString import OString from .src.UI import ( @@ -20,6 +20,13 @@ ) from .src.UI.Toolbar import Toolbar +# END OF RESTRICTION + +# Transition: AARD-1721 +# Should attempt to fix this ordering scheme within AARD-1741 +from .src.APS import APS # isort:skip +from .src.configure import setAnalytics, unload_config # isort:skip + def run(_): """## Entry point to application from Fusion. @@ -136,7 +143,7 @@ def register_ui() -> None: work_panel, Helper.check_solid_open, ShowAPSAuthCommand.ShowAPSAuthCommandCreatedHandler, - description=f"APS TEST", + description=f"APS", command=True, ) diff --git a/exporter/SynthesisFusionAddin/proto/deps.py b/exporter/SynthesisFusionAddin/proto/deps.py index ea6da5a406..2744952d90 100644 --- a/exporter/SynthesisFusionAddin/proto/deps.py +++ b/exporter/SynthesisFusionAddin/proto/deps.py @@ -6,7 +6,7 @@ import adsk.core import adsk.fusion -from src.general_imports import INTERNAL_ID +from src.strings import INTERNAL_ID system = platform.system() @@ -167,13 +167,24 @@ def _checkDeps() -> bool: return False -try: - import logging.handlers +""" +Checks for, and installs if need be, the dependencies needed by the Synthesis Exporter. Will error if it cannot install the dependencies +correctly. This should crash the exporter, since most of the exporter needs these dependencies to function in +the first place. +""" - import google.protobuf - import pkg_resources - from .proto_out import assembly_pb2, joint_pb2, material_pb2, types_pb2 -except ImportError or ModuleNotFoundError: - installCross(["protobuf==4.23.3"]) - from .proto_out import assembly_pb2, joint_pb2, material_pb2, types_pb2 +def installDependencies(): + try: + import logging.handlers + + import google.protobuf + import pkg_resources + from requests import get, post + + from .proto_out import assembly_pb2, joint_pb2, material_pb2, types_pb2 + except ImportError or ModuleNotFoundError: + installCross(["protobuf==4.23.3", "result==0.17.0"]) + from requests import get, post + + from .proto_out import assembly_pb2, joint_pb2, material_pb2, types_pb2 diff --git a/exporter/SynthesisFusionAddin/src/APS/APS.py b/exporter/SynthesisFusionAddin/src/APS/APS.py index e31964e561..22543438e1 100644 --- a/exporter/SynthesisFusionAddin/src/APS/APS.py +++ b/exporter/SynthesisFusionAddin/src/APS/APS.py @@ -7,15 +7,11 @@ import urllib.parse import urllib.request from dataclasses import dataclass +from typing import Any -from ..general_imports import ( - APP_NAME, - DESCRIPTION, - INTERNAL_ID, - gm, - my_addin_path, - root_logger, -) +import requests + +from ..general_imports import INTERNAL_ID, gm, my_addin_path CLIENT_ID = "GCxaewcLjsYlK8ud7Ka9AKf9dPwMR3e4GlybyfhAK2zvl3tU" auth_path = os.path.abspath(os.path.join(my_addin_path, "..", ".aps_auth")) @@ -59,53 +55,51 @@ def _res_json(res): def getCodeChallenge() -> str | None: - endpoint = "http://localhost:80/api/aps/challenge/" + endpoint = "https://synthesis.autodesk.com/api/aps/challenge/" res = urllib.request.urlopen(endpoint) data = _res_json(res) return data["challenge"] -def getAuth() -> APSAuth: +def getAuth() -> APSAuth | None: global APS_AUTH if APS_AUTH is not None: return APS_AUTH try: + curr_time = time.time() with open(auth_path, "rb") as f: - p = pickle.load(f) - APS_AUTH = APSAuth( - access_token=p["access_token"], - refresh_token=p["refresh_token"], - expires_in=p["expires_in"], - expires_at=int(p["expires_in"] * 1000), - token_type=p["token_type"], - ) - except: - raise Exception("Need to sign in!") + p: APSAuth = pickle.load(f) + logging.getLogger(f"{INTERNAL_ID}").info(msg=f"{json.dumps(p.__dict__)}") + APS_AUTH = p + except Exception as arg: + gm.ui.messageBox(f"ERROR:\n{arg}", "Please Sign In") + return None curr_time = int(time.time() * 1000) if curr_time >= APS_AUTH.expires_at: refreshAuthToken() if APS_USER_INFO is None: - loadUserInfo() + _ = loadUserInfo() return APS_AUTH def convertAuthToken(code: str): global APS_AUTH - authUrl = f'http://localhost:80/api/aps/code/?code={code}&redirect_uri={urllib.parse.quote_plus("http://localhost:80/api/aps/exporter/")}' + authUrl = f'https://synthesis.autodesk.com/api/aps/code/?code={code}&redirect_uri={urllib.parse.quote_plus("https://synthesis.autodesk.com/api/aps/exporter/")}' res = urllib.request.urlopen(authUrl) data = _res_json(res)["response"] + curr_time = time.time() APS_AUTH = APSAuth( access_token=data["access_token"], refresh_token=data["refresh_token"], expires_in=data["expires_in"], - expires_at=int(data["expires_in"] * 1000), + expires_at=int(curr_time + data["expires_in"] * 1000), token_type=data["token_type"], ) with open(auth_path, "wb") as f: - pickle.dump(data, f) + pickle.dump(APS_AUTH, f) f.close() - loadUserInfo() + _ = loadUserInfo() def removeAuth(): @@ -124,7 +118,7 @@ def refreshAuthToken(): "client_id": CLIENT_ID, "grant_type": "refresh_token", "refresh_token": APS_AUTH.refresh_token, - "scope": "data:read", + "scope": "data:create data:write data:search data:read", } ).encode("utf-8") req = urllib.request.Request("https://developer.api.autodesk.com/authentication/v2/token", data=body) @@ -133,13 +127,17 @@ def refreshAuthToken(): try: res = urllib.request.urlopen(req) data = _res_json(res) + curr_time = time.time() APS_AUTH = APSAuth( access_token=data["access_token"], refresh_token=data["refresh_token"], expires_in=data["expires_in"], - expires_at=int(data["expires_in"] * 1000), + expires_at=int(curr_time + data["expires_in"] * 1000), token_type=data["token_type"], ) + with open(auth_path, "wb") as f: + pickle.dump(APS_AUTH, f) + f.close() except urllib.request.HTTPError as e: removeAuth() logging.getLogger(f"{INTERNAL_ID}").error(f"Refresh Error:\n{e.code} - {e.reason}") @@ -182,3 +180,536 @@ def getUserInfo() -> APSUserInfo | None: if APS_USER_INFO is not None: return APS_USER_INFO return loadUserInfo() + + +def create_folder(auth: str, project_id: str, parent_folder_id: str, folder_display_name: str) -> str | None: + """ + creates a folder on an APS project + + params: + auth - auth token + project - project blueprint; might be changed to just the project id + folder - the blueprint for the new folder + + returns: + success - the href of the new folder ; might be changed to the id in the future + failure - none if the API request fails ; the failure text will be printed + """ + headers = {"Authorization": f"Bearer {auth}", "Content-Type": "application/vnd.api+json"} + data: dict[str, Any] = { + "jsonapi": {"version": "1.0"}, + "data": { + "type": "folders", + "attributes": { + "name": folder_display_name, + "extension": {"type": "folders:autodesk.core:Folder", "version": "1.0"}, + }, + "relationships": {"parent": {"data": {"type": "folders", "id": f"{parent_folder_id}"}}}, + }, + } + + res = requests.post( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/folders", headers=headers, json=data + ) + if not res.ok: + gm.ui.messageBox(f"Failed to create new folder: {res.text}", "ERROR") + return None + json: dict[str, Any] = res.json() + id: str = json["data"]["id"] + return id + + +def file_path_to_file_name(file_path: str) -> str: + return file_path.split("/").pop() + + +def upload_mirabuf(project_id: str, folder_id: str, file_name: str, file_contents: str) -> str | None: + """ + uploads mirabuf file to a specific folder in an APS project + the folder and project must be created and valid + if the file has already been created, it will use the APS versioning API to upload it as a new file version + + parameters: + project - the project reference object, used for it's id ; may be changed to project_id in the future + folder - the folder reference object, used for it's id ; may be changed to folder_id in the future + file_path - the path to the file on your machine, to be uploaded to APS + + returns: + success - if the file already exists, the new version id, otherwise, None + failure - none ; the cause of the failure will be printed + + potential causes of failure: + - invalid auth + - incorrectly formatted requests + - API update + - API down + + notes: + - this function is janky as hell, it should bubble errors up but I'm super lazy + - check appropriate called function ~~if~~ when this function fails + + todo: Change so a folder is not needed, and the entire project is checked for files + """ + + # data:create + global APS_AUTH + if APS_AUTH is None: + gm.ui.messageBox("You must login to upload designs to APS", "USER ERROR") + auth = APS_AUTH.access_token + # Get token from APS API later + + new_folder_id = get_item_id(auth, project_id, folder_id, "MirabufDir", "folders") + if new_folder_id is None: + folder_id = create_folder(auth, project_id, folder_id, "MirabufDir") + else: + folder_id = new_folder_id + (lineage_id, file_id, file_version) = get_file_id(auth, project_id, folder_id, file_name) + + """ + Create APS Storage Location + """ + object_id = create_storage_location(auth, project_id, folder_id, file_name) + if object_id is None: + gm.ui.messageBox("UPLOAD ERROR", "Object id is none; check create storage location") + return None + (prefix, object_key) = str(object_id).split("/", 1) + bucket_key = prefix.split(":", 3)[3] # gets the last element smth like: wip.dm.prod + + """ + Create Signed URL For APS Upload + """ + generate_signed_url_result = generate_signed_url(auth, bucket_key, object_key) + if generate_signed_url_result is None: + return None + + (upload_key, signed_url) = generate_signed_url_result + if upload_file(signed_url, file_contents) is None: + return None + + """ + Finish Upload and Initialize File Version + """ + if complete_upload(auth, upload_key, object_key, bucket_key) is None: + return None + if file_id != "": + update_file_version( + auth, project_id, folder_id, lineage_id, file_id, file_name, file_contents, file_version, object_id + ) + else: + _lineage_info = create_first_file_version(auth, str(object_id), project_id, str(folder_id), file_name) + return "" + + +def get_hub_id(auth: str, hub_name: str) -> str | None: + """ + gets a user's hub based on a hub name + + params: + auth - authorization token + hub_name - the name of the desired hub + + returns: + success - the hub's id or none if the hub doesn't exist + failure - the API text if there's an error + """ + + headers = {"Authorization": f"Bearer {auth}"} + hub_list_res = requests.get("https://developer.api.autodesk.com/project/v1/hubs", headers=headers) + if not hub_list_res.ok: + gm.ui.messageBox("UPLOAD ERROR", f"Failed to retrieve hubs: {hub_list_res.text}") + return None + hub_list: list[dict[str, Any]] = hub_list_res.json() + for hub in hub_list: + if hub["attributes"]["name"] == hub_name: + id: str = hub["id"] + return id + return "" + + +def get_project_id(auth: str, hub_id: str, project_name: str) -> str | None: + """ + gets a project in a hub with a project name + + params: + auth - authorization token + hub_id - the id of the hub + project_name - the name of the desired project + + returns: + success - the project's id or none if the project doesn't exist + failure - the API text if there's an error + + notes: + - a hub_id can be derived from it's name with the get_hub_id function + """ + + headers = {"Authorization": f"Bearer {auth}"} + project_list_res = requests.get( + f"https://developer.api.autodesk.com/project/v1/hubs/{hub_id}/projects", headers=headers + ) + if not project_list_res.ok: + gm.ui.messageBox("UPLOAD ERROR", f"Failed to retrieve hubs: {project_list_res.text}") + return None + project_list: list[dict[str, Any]] = project_list_res.json() + for project in project_list: + if project["attributes"]["name"] == project_name: + id: str = project["id"] + return id + return "" + + +def get_item_id(auth: str, project_id: str, parent_folder_id: str, folder_name: str, item_type: str) -> str | None: + headers = {"Authorization": f"Bearer {auth}"} + res = requests.get( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/folders/{parent_folder_id}/contents", + headers=headers, + ) + if not res.ok: + gm.ui.messageBox(f"Failed to get item: {res.text}") + return None + data: list[dict[str, Any]] = res.json()["data"] + if len(data) == 0: + return "" + for item in data: + if item["type"] == item_type and item["attributes"]["name"] == folder_name: + return item["id"] + return None + + +def update_file_version( + auth: str, + project_id: str, + folder_id: str, + lineage_id: str, + file_id: str, + file_name: str, + file_contents: str, + curr_file_version: str, + object_id: str, +) -> str | None: + """ + updates an existing file in an APS folder + + params: + auth - authorization token + project - the project reference object that the file is contain within + folder - the folder reference object that the file is contained within + file_id - the id of the file in APS + file_name - the name of the file in APS ; ex. test.mira + + returns: + success - the new version_id + failure - none + + potential causes of failure: + - invalid auth + - file doesn't exist in that position / with that id / name ; fix: get_file_id() or smth + - version one of the file hasn't been created ; fix: create_first_file_version() + """ + + # object_id = create_storage_location(auth, project_id, folder_id, file_name) + # if object_id is None: + # return None + # + # (prefix, object_key) = str(object_id).split("/", 1) + # bucket_key = prefix.split(":", 3)[3] # gets the last element smth like: wip.dm.prod + # (upload_key, signed_url) = generate_signed_url(auth, bucket_key, object_key) + # + # if upload_file(signed_url, file_contents) is None: + # return None + + # if complete_upload(auth, upload_key, object_key, bucket_key) is None: + # return None + + # gm.ui.messageBox(f"file_name:{file_name}\nlineage_id:{lineage_id}\nfile_id:{file_id}\ncurr_file_version:{curr_file_version}\nobject_id:{object_id}", "REUPLOAD ARGS") + headers = { + "Authorization": f"Bearer {auth}", + "Content-Type": "application/vnd.api+json", + } + + attributes = {"name": file_name, "extension": {"type": "versions:autodesk.core:File", "version": f"1.0"}} + + relationships: dict[str, Any] = { + "item": { + "data": { + "type": "items", + "id": lineage_id, + } + }, + "storage": { + "data": { + "type": "objects", + "id": object_id, + } + }, + } + + data = { + "jsonapi": {"version": "1.0"}, + "data": {"type": "versions", "attributes": attributes, "relationships": relationships}, + } + update_res = requests.post( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/versions", headers=headers, json=data + ) + if not update_res.ok: + gm.ui.messageBox(f"UPLOAD ERROR:\n{update_res.text}", "Updating file to new version failed") + return None + gm.ui.messageBox( + f"Successfully updated file {file_name} to version {int(curr_file_version) + 1} on APS", "UPLOAD SUCCESS" + ) + new_id: str = update_res.json()["data"]["id"] + return new_id + + +def get_file_id(auth: str, project_id: str, folder_id: str, file_name: str) -> tuple[str, str, str] | None: + """ + gets the file id given a file name + + params: + auth - authorization token + project - the project reference object that the file is contain within + folder - the folder reference object that the file is contained within + file_name - the name of the file in APS ; ex. test.mira + + returns: + success - the id of the file and it's current version, or an empty tuple string if the file doesn't exist + failure - none + + potential causes of failure: + - incorrect auth + + notes: + - checking if a file exists is an intended use-case + """ + + headers: dict[str, str] = {"Authorization": f"Bearer {auth}"} + + params = {"filter[attributes.name]": file_name} + + file_res = requests.get( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/folders/{folder_id}/search", + headers=headers, + params=params, + ) + if file_res.status_code == 404: + return ("", "", "") + elif not file_res.ok: + gm.ui.messageBox(f"UPLOAD ERROR: {file_res.text}", "Failed to get file") + return None + file_json: list[dict[str, Any]] = file_res.json() + if len(file_json["data"]) == 0: + return ("", "", "") + id: str = str(file_json["data"][0]["id"]) + lineage: str = str(file_json["data"][0]["relationships"]["item"]["data"]["id"]) + version: str = str(file_json["data"][0]["attributes"]["versionNumber"]) + return (lineage, id, version) + + +def create_storage_location(auth: str, project_id: str, folder_id: str, file_name: str) -> str | None: + """ + creates a storage location (a bucket) + the bucket can be used to upload a file to + every file must have a reserved storage location + I believe at the moment, object, bucket, and storage location are all used semi-interchangeably by APS documentation + + params: + auth - authorization token + project - a project reference object used for project id ; may be changed to project_id later + folder - a folder reference object used for project id ; may be changed to folder_id later + file_name - the name of the file to be later stored in the bucket + + returns: + success - the object_id of the bucket, which can be split into a bucket_key and upload_key + failure - the API failure text + + notes: + - fails if the project doesn't exist or auth is invalid + - the folder must be inside the project, the storage location will be inside the folder + """ + + data = { + "jsonapi": {"version": "1.0"}, + "data": { + "type": "objects", + "attributes": {"name": file_name}, + "relationships": {"target": {"data": {"type": "folders", "id": f"{folder_id}"}}}, + }, + } + headers = { + "Authorization": f"Bearer {auth}", + "Content-Type": "application/vnd.api+json", + } + storage_location_res = requests.post( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/storage", json=data, headers=headers + ) + if not storage_location_res.ok: + gm.ui.messageBox(f"UPLOAD ERROR: {storage_location_res.text}", f"Failed to create storage location") + return None + storage_location_json: dict[str, Any] = storage_location_res.json() + object_id: str = storage_location_json["data"]["id"] + return object_id + + +def generate_signed_url(auth: str, bucket_key: str, object_key: str) -> tuple[str, str] | None: + """ + generates a signed_url for a bucket, given a bucket_key and object_key + + params: + auth - authorization token + bucket_key - the key of the bucket that the file will be stored in + object_key - the key of the object that the file will be stored in + + returns: + success - the upload_key and the signed_url + failure - the API error + + notes: + - fails if auth, the bucket, or object keys are invalid + - both params are returned by the create_storage_location function + """ + + headers = { + "Authorization": f"Bearer {auth}", + } + signed_url_res = requests.get( + f"https://developer.api.autodesk.com/oss/v2/buckets/{bucket_key}/objects/{object_key}/signeds3upload", + headers=headers, + ) + if not signed_url_res.ok: + gm.ui.messageBox(f"UPLOAD ERROR: {signed_url_res.text}", "Failed to get signed url") + return None + signed_url_json: dict[str, str] = signed_url_res.json() + return (signed_url_json["uploadKey"], signed_url_json["urls"][0]) + + +def upload_file(signed_url: str, file_contents: str) -> str | None: + """ + uploads a file to APS given a signed_url a path to the file on your machine + + params: + signed_url - the url to used to upload the file to a specific bucket ; returned by the generate_signed_url function + file_path - the path of the file to be uploaded + + returns: + success - none + failure - the API error + + notes: + - fails if the auth or the signed URL are invalid + """ + upload_response = requests.put(url=signed_url, data=file_contents) + if not upload_response.ok: + gm.ui.messageBox("UPLOAD ERROR", f"Failed to upload to signed url: {upload_response.text}") + return None + return "" + + +def complete_upload(auth: str, upload_key: str, object_key: str, bucket_key: str) -> str | None: + """ + completes and verifies the APS file upload given the upload_key + + params: + auth - authorization token + upload_key - the key to verify the upload, returned by generate_signed_url function + bucket_key - the key of the bucket that the file was uploaded to, returned by the create_storage_location function + + returns: + success - none + failure - the API error + """ + + headers = { + "Authorization": f"Bearer {auth}", + "Content-Type": "application/json", + } + data = {"uploadKey": upload_key} + + completed_res = requests.post( + f"https://developer.api.autodesk.com/oss/v2/buckets/{bucket_key}/objects/{object_key}/signeds3upload", + json=data, + headers=headers, + ) + if not completed_res.ok: + gm.ui.messageBox( + f"UPLOAD ERROR: {completed_res.text}\n{completed_res.status_code}", "Failed to complete upload" + ) + return None + return "" + + +def create_first_file_version( + auth: str, object_id: str, project_id: str, folder_id: str, file_name: str +) -> tuple[str, str] | None: + """ + initializes versioning for a file + + params: + auth - authorization token + project_id - the id of the project the file was uploaded to + object_id - the id of the object the file was uploaded to + folder_id - the id of the folder the file was uploaded to + file_name - the name of the file + + returns: + success - the lineage id of the versioning history of the file and the href to the new version + failure - none + + potential causes of failure + - incorrect auth + - the named file's upload was never completed + - invalid project, object, or folder id + + notes: + - super complex request, probably not written correctly, likely a dev error + """ + + headers = { + "Authorization": f"Bearer {auth}", + "Content-Type": "application/vnd.api+json", + "Accept": "application/vnd.api+json", + } + + included_attributes = {"name": file_name, "extension": {"type": "versions:autodesk.core:File", "version": "1.0"}} + + attributes = { + "displayName": file_name, + "extension": { + "type": "items:autodesk.core:File", + "version": "1.0", + }, + } + + relationships = { + "tip": {"data": {"type": "versions", "id": "1"}}, + "parent": {"data": {"type": "folders", "id": folder_id}}, + } + + included = [ + { + "type": "versions", + "id": "1", + "attributes": included_attributes, + "relationships": {"storage": {"data": {"type": "objects", "id": object_id}}}, + }, + ] + + data = { + "jsonapi": {"version": "1.0"}, + "data": {"type": "items", "attributes": attributes, "relationships": relationships}, + "included": included, + } + + first_version_res = requests.post( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/items", json=data, headers=headers + ) + if not first_version_res.ok: + gm.ui.messageBox(f"Failed to create first file version: {first_version_res.text}", "UPLOAD ERROR") + return None + first_version_json: dict[str, Any] = first_version_res.json() + + lineage_id: str = first_version_json["data"]["id"] + href: str = first_version_json["links"]["self"]["href"] + + gm.ui.messageBox(f"Successful Upload of {file_name} to APS", "UPLOAD SUCCESS") + + return (lineage_id, href) diff --git a/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py b/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py index 31ed4cd0c4..df5d21c9c1 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py +++ b/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py @@ -22,6 +22,7 @@ SignalType = Enum("SignalType", ["PWM", "CAN", "PASSIVE"]) ExportMode = Enum("ExportMode", ["ROBOT", "FIELD"]) # Dynamic / Static export PreferredUnits = Enum("PreferredUnits", ["METRIC", "IMPERIAL"]) +ExportLocation = Enum("ExportLocation", ["UPLOAD", "DOWNLOAD"]) @dataclass @@ -95,6 +96,8 @@ class ExporterOptions: compressOutput: bool = field(default=True) exportAsPart: bool = field(default=False) + exportLocation: ExportLocation = field(default=ExportLocation.UPLOAD) + hierarchy: ModelHierarchy = field(default=ModelHierarchy.FusionAssembly) visualQuality: TriangleMeshQualityOptions = field(default=TriangleMeshQualityOptions.LowQualityTriangleMesh) physicalDepth: PhysicalDepth = field(default=PhysicalDepth.AllOccurrence) diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py index a925f5b119..0fe8a6dafb 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py @@ -7,9 +7,10 @@ from proto.proto_out import assembly_pb2, types_pb2 +from ...APS.APS import upload_mirabuf # This line causes everything to break from ...general_imports import * from ...UI.Camera import captureThumbnail, clearIconCache -from ..ExporterOptions import ExporterOptions, ExportMode +from ..ExporterOptions import ExporterOptions, ExportLocation, ExportMode from . import Components, JointHierarchy, Joints, Materials, PDMessage from .Utilities import * @@ -158,28 +159,41 @@ def export(self) -> bool: self.pdMessage.currentMessage = "Compressing File..." self.pdMessage.update() - # check if entire path exists and create if not since gzip doesn't do that. - path = pathlib.Path(self.exporterOptions.fileLocation).parent - path.mkdir(parents=True, exist_ok=True) - ### Print out assembly as JSON # miraJson = MessageToJson(assembly_out) # miraJsonFile = open(f'', 'wb') # miraJsonFile.write(str.encode(miraJson)) # miraJsonFile.close() - if self.exporterOptions.compressOutput: - self.logger.debug("Compressing file") - with gzip.open(self.exporterOptions.fileLocation, "wb", 9) as f: - self.pdMessage.currentMessage = "Saving File..." - self.pdMessage.update() - f.write(assembly_out.SerializeToString()) + # Upload Mirabuf File to APS + if self.exporterOptions.exportLocation == ExportLocation.UPLOAD: + self.logger.debug("Uploading file to APS") + project = app.data.activeProject + if not project.isValid: + gm.ui.messageBox("Project is invalid", "") + return False # add throw later + project_id = project.id + folder_id = project.rootFolder.id + file_name = f"{self.exporterOptions.fileLocation}.mira" + if upload_mirabuf(project_id, folder_id, file_name, assembly_out.SerializeToString()) is None: + gm.ui.messageBox("FAILED TO UPLOAD FILE TO APS", "ERROR") # add throw later + # Download Mirabuf File else: - f = open(self.exporterOptions.fileLocation, "wb") - f.write(assembly_out.SerializeToString()) - f.close() + # check if entire path exists and create if not since gzip doesn't do that. + path = pathlib.Path(self.exporterOptions.fileLocation).parent + path.mkdir(parents=True, exist_ok=True) + if self.exporterOptions.compressOutput: + self.logger.debug("Compressing file") + with gzip.open(self.exporterOptions.fileLocation, "wb", 9) as f: + self.pdMessage.currentMessage = "Saving File..." + self.pdMessage.update() + f.write(assembly_out.SerializeToString()) + else: + f = open(self.exporterOptions.fileLocation, "wb") + f.write(assembly_out.SerializeToString()) + f.close() - progressDialog.hide() + _ = progressDialog.hide() if DEBUG: part_defs = assembly_out.data.parts.part_definitions diff --git a/exporter/SynthesisFusionAddin/src/UI/ConfigCommand.py b/exporter/SynthesisFusionAddin/src/UI/ConfigCommand.py index febb617ec4..806cd65bff 100644 --- a/exporter/SynthesisFusionAddin/src/UI/ConfigCommand.py +++ b/exporter/SynthesisFusionAddin/src/UI/ConfigCommand.py @@ -4,7 +4,8 @@ import logging import os -import platform + +# import platform import traceback from enum import Enum @@ -17,6 +18,7 @@ from ..general_imports import * from ..Parser.ExporterOptions import ( ExporterOptions, + ExportLocation, ExportMode, Gamepiece, Joint, @@ -223,6 +225,21 @@ def notify(self, args): dropdownExportMode.tooltip = "Export Mode" dropdownExportMode.tooltipDescription = "
Does this object move dynamically?" + # ~~~~~~~~~~~~~~~~ EXPORT LOCATION ~~~~~~~~~~~~~~~~~~ + + dropdownExportLocation = inputs.addDropDownCommandInput( + "location", "Export Location", dropDownStyle=adsk.core.DropDownStyles.LabeledIconDropDownStyle + ) + + upload: bool = exporterOptions.exportLocation == ExportLocation.UPLOAD + dropdownExportLocation.listItems.add("Upload", upload) + dropdownExportLocation.listItems.add("Download", not upload) + + dropdownExportLocation.tooltip = "Export Location" + dropdownExportLocation.tooltipDescription = ( + "
Do you want to upload this mirabuf file to APS, or download it to your local machine?" + ) + # ~~~~~~~~~~~~~~~~ WEIGHT CONFIGURATION ~~~~~~~~~~~~~~~~ """ Table for weight config. @@ -622,11 +639,13 @@ def notify(self, args): """ Creates the advanced tab, which is the parent container for internal command inputs """ - advancedSettings = INPUTS_ROOT.addTabCommandInput("advanced_settings", "Advanced") + advancedSettings: adsk.core.TabCommandInput = INPUTS_ROOT.addTabCommandInput( + "advanced_settings", "Advanced" + ) advancedSettings.tooltip = ( "Additional Advanced Settings to change how your model will be translated into Unity." ) - a_input = advancedSettings.children + a_input: adsk.core.CommandInputs = advancedSettings.children # ~~~~~~~~~~~~~~~~ EXPORTER SETTINGS ~~~~~~~~~~~~~~~~ """ @@ -661,55 +680,38 @@ def notify(self, args): """ Physics settings group command """ - physicsSettings = a_input.addGroupCommandInput("physics_settings", "Physics Settings") + physicsSettings: adsk.core.GroupCommandInput = a_input.addGroupCommandInput( + "physics_settings", "Physics Settings" + ) - physicsSettings.isExpanded = False + physicsSettings.isExpanded = True physicsSettings.isEnabled = True - physicsSettings.tooltip = "tooltip" # TODO: update tooltip - physics_settings = physicsSettings.children - - # AARD-1687 - # Should also be commented out / removed? - # This would cause problems elsewhere but I can't tell i f - # this is even being used. - frictionOverrideTable = self.createTableInput( - "friction_override_table", - "", - physics_settings, - 2, - "1:2", - 1, - columnSpacing=25, - ) - frictionOverrideTable.tablePresentationStyle = 2 - # frictionOverrideTable.isFullWidth = True + physicsSettings.tooltip = "Settings relating to the custom physics of the robot, like the wheel friction" + physics_settings: adsk.core.CommandInputs = physicsSettings.children - frictionOverride = self.createBooleanInput( + frictionOverrideInput = self.createBooleanInput( "friction_override", - "", + "Friction Override", physics_settings, - checked=False, + checked=True, # object is missing attribute tooltip="Manually override the default friction values on the bodies in the assembly.", enabled=True, isCheckBox=False, ) - frictionOverride.resourceFolder = IconPaths.stringIcons["friction_override-enabled"] - frictionOverride.isFullWidth = True + frictionOverrideInput.resourceFolder = IconPaths.stringIcons["friction_override-enabled"] + frictionOverrideInput.isFullWidth = True valueList = [1] for i in range(20): valueList.append(i / 20) - frictionCoeff = physics_settings.addFloatSliderListCommandInput( - "friction_coeff_override", "Friction Coefficient", "", valueList + frictionCoeffSlider: adsk.core.FloatSliderCommandInput = physics_settings.addFloatSliderListCommandInput( + "friction_override_coeff", "Friction Coefficient", "", valueList ) - frictionCoeff.isVisible = False - frictionCoeff.valueOne = 0.5 - frictionCoeff.tooltip = "Friction coefficient of field element." - frictionCoeff.tooltipDescription = "Friction coefficients range from 0 (ice) to 1 (rubber)." - - frictionOverrideTable.addCommandInput(frictionOverride, 0, 0) - frictionOverrideTable.addCommandInput(frictionCoeff, 0, 1) + frictionCoeffSlider.isVisible = True + frictionCoeffSlider.valueOne = 0.5 + frictionCoeffSlider.tooltip = "Friction coefficient of field element." + frictionCoeffSlider.tooltipDescription = "Friction coefficients range from 0 (ice) to 1 (rubber)." # ~~~~~~~~~~~~~~~~ JOINT SETTINGS ~~~~~~~~~~~~~~~~ """ @@ -1008,35 +1010,31 @@ def notify(self, args): self.log.error("Could not execute configuration due to failure") return - export_as_part_boolean = ( - eventArgs.command.commandInputs.itemById("advanced_settings") - .children.itemById("exporter_settings") - .children.itemById("export_as_part") - ).value - processedFileName = gm.app.activeDocument.name.replace(" ", "_") dropdownExportMode = INPUTS_ROOT.itemById("mode") if dropdownExportMode.selectedItem.index == 0: isRobot = True elif dropdownExportMode.selectedItem.index == 1: isRobot = False + dropdownExportLocation = INPUTS_ROOT.itemById("location") + if dropdownExportLocation.selectedItem.index == 1: # Download + if isRobot: + savepath = FileDialogConfig.SaveFileDialog( + defaultPath=exporterOptions.fileLocation, + ext="Synthesis File (*.synth)", + ) + else: + savepath = FileDialogConfig.SaveFileDialog(defaultPath=exporterOptions.fileLocation) - if isRobot: - savepath = FileDialogConfig.SaveFileDialog( - defaultPath=exporterOptions.fileLocation, - ext="Synthesis File (*.synth)", - ) - else: - savepath = FileDialogConfig.SaveFileDialog(defaultPath=exporterOptions.fileLocation) - - if savepath == False: - # save was canceled - return - - updatedPath = pathlib.Path(savepath).parent - if updatedPath != self.current.filePath: - self.current.filePath = str(updatedPath) + if savepath == False: + # save was canceled + return + updatedPath = pathlib.Path(savepath).parent + if updatedPath != self.current.filePath: + self.current.filePath = str(updatedPath) + else: + savepath = processedFileName adsk.doEvents() # get active document design = gm.app.activeDocument.design @@ -1047,7 +1045,8 @@ def notify(self, args): _exportJoints = [] # all selected joints, formatted for parseOptions _exportGamepieces = [] # TODO work on the code to populate Gamepiece _robotWeight = float - _mode = ExportMode.ROBOT + _mode: ExportMode + _location: ExportLocation """ Loops through all rows in the wheel table to extract all the input values @@ -1176,6 +1175,18 @@ def notify(self, args): elif dropdownExportMode.selectedItem.index == 1: _mode = ExportMode.FIELD + """ + Export Location + """ + dropdownExportLocation = INPUTS_ROOT.itemById("location") + if dropdownExportLocation.selectedItem.index == 0: + _location = ExportLocation.UPLOAD + elif dropdownExportLocation.selectedItem.index == 1: + _location = ExportLocation.DOWNLOAD + + """ + Advanced Settings + """ global compress compress = ( eventArgs.command.commandInputs.itemById("advanced_settings") @@ -1183,6 +1194,12 @@ def notify(self, args): .children.itemById("compress") ).value + export_as_part_boolean = ( + eventArgs.command.commandInputs.itemById("advanced_settings") + .children.itemById("exporter_settings") + .children.itemById("export_as_part") + ).value + exporterOptions = ExporterOptions( savepath, name, @@ -1194,11 +1211,12 @@ def notify(self, args): preferredUnits=selectedUnits, robotWeight=_robotWeight, exportMode=_mode, + exportLocation=_location, compressOutput=compress, exportAsPart=export_as_part_boolean, ) - Parser(exporterOptions).export() + _: bool = Parser(exporterOptions).export() exporterOptions.writeToDesign() except: if gm.ui: @@ -1637,7 +1655,7 @@ def notify(self, args): inputs = cmdInput.commandInputs onSelect = gm.handlers[3] - frictionCoeff = INPUTS_ROOT.itemById("friction_coeff_override") + frictionCoeff = INPUTS_ROOT.itemById("friction_override_coeff") wheelSelect = inputs.itemById("wheel_select") jointSelect = inputs.itemById("joint_select") diff --git a/exporter/SynthesisFusionAddin/src/UI/ShowAPSAuthCommand.py b/exporter/SynthesisFusionAddin/src/UI/ShowAPSAuthCommand.py index bacc6e094b..c6dc93038e 100644 --- a/exporter/SynthesisFusionAddin/src/UI/ShowAPSAuthCommand.py +++ b/exporter/SynthesisFusionAddin/src/UI/ShowAPSAuthCommand.py @@ -31,7 +31,7 @@ def notify(self, args): global palette palette = gm.ui.palettes.itemById("authPalette") if not palette: - callbackUrl = "http://localhost:80/api/aps/exporter/" + callbackUrl = "https://synthesis.autodesk.com/api/aps/exporter/" challenge = getCodeChallenge() if challenge is None: logging.getLogger(f"{INTERNAL_ID}").error( @@ -42,7 +42,7 @@ def notify(self, args): "response_type": "code", "client_id": CLIENT_ID, "redirect_uri": urllib.parse.quote_plus(callbackUrl), - "scope": "data:read", + "scope": "data:create data:write data:search data:read", "nonce": time.time(), "prompt": "login", "code_challenge": challenge, @@ -139,7 +139,7 @@ def notify(self, args): convertAuthToken(data["code"]) except: - gm.ui.messageBox("Failed:\n".format(traceback.format_exc())) - logging.getLogger(f"{INTERNAL_ID}").error("Failed:\n".format(traceback.format_exc())) + gm.ui.messageBox("Failed:{}\n".format(traceback.format_exc())) + logging.getLogger(f"{INTERNAL_ID}").error("Failed:{}\n".format(traceback.format_exc())) if palette: palette.deleteMe() diff --git a/exporter/SynthesisFusionAddin/src/general_imports.py b/exporter/SynthesisFusionAddin/src/general_imports.py index 042e4618ea..05269e92c8 100644 --- a/exporter/SynthesisFusionAddin/src/general_imports.py +++ b/exporter/SynthesisFusionAddin/src/general_imports.py @@ -39,9 +39,10 @@ from proto import deps + deps.installDependencies() + except: logging.getLogger(f"{INTERNAL_ID}.import_manager").error("Failed\n{}".format(traceback.format_exc())) - try: # simple analytics endpoint # A_EP = AnalyticsEndpoint("UA-188467590-1", 1) diff --git a/fission/src/Synthesis.tsx b/fission/src/Synthesis.tsx index f33440122c..c6c7048b12 100644 --- a/fission/src/Synthesis.tsx +++ b/fission/src/Synthesis.tsx @@ -56,13 +56,21 @@ import ImportLocalMirabufModal from "@/modals/mirabuf/ImportLocalMirabufModal.ts import APS from "./aps/APS.ts" import ImportMirabufPanel from "@/ui/panels/mirabuf/ImportMirabufPanel.tsx" import Skybox from "./ui/components/Skybox.tsx" +import ProgressNotifications from "./ui/components/ProgressNotification.tsx" +import { ProgressHandle } from "./ui/components/ProgressNotificationData.ts" import ConfigureRobotModal from "./ui/modals/configuring/ConfigureRobotModal.tsx" import ResetAllInputsModal from "./ui/modals/configuring/ResetAllInputsModal.tsx" import ZoneConfigPanel from "./ui/panels/configuring/scoring/ZoneConfigPanel.tsx" import SceneOverlay from "./ui/components/SceneOverlay.tsx" +import WPILibWSWorker from "@/systems/simulation/wpilib_brain/WPILibWSWorker.ts?worker" +import WSViewPanel from "./ui/panels/WSViewPanel.tsx" +import Lazy from "./util/Lazy.ts" + const DEFAULT_MIRA_PATH = "/api/mira/Robots/Team 2471 (2018)_v7.mira" +const worker = new Lazy(() => new WPILibWSWorker()) + function Synthesis() { const urlParams = new URLSearchParams(document.location.search) const has_code = urlParams.has("code") @@ -92,6 +100,8 @@ function Synthesis() { World.InitWorld() + worker.getValue() + let mira_path = DEFAULT_MIRA_PATH if (urlParams.has("mira")) { @@ -100,12 +110,17 @@ function Synthesis() { } const setup = async () => { + const setupProgress = new ProgressHandle("Spawning Default Robot") + setupProgress.Update("Checking cache...", 0.1) + const info = await MirabufCachingService.CacheRemote(mira_path, MiraType.ROBOT) .catch(_ => MirabufCachingService.CacheRemote(DEFAULT_MIRA_PATH, MiraType.ROBOT)) .catch(console.error) const miraAssembly = await MirabufCachingService.Get(info!.id, MiraType.ROBOT) + setupProgress.Update("Parsing assembly...", 0.5) + await (async () => { if (!miraAssembly || !(miraAssembly instanceof mirabuf.Assembly)) { return @@ -114,11 +129,16 @@ function Synthesis() { const parser = new MirabufParser(miraAssembly) if (parser.maxErrorSeverity >= ParseErrorSeverity.Unimportable) { console.error(`Assembly Parser produced significant errors for '${miraAssembly.info!.name!}'`) + setupProgress.Fail("Failed to parse assembly") return } + setupProgress.Update("Creating scene object...", 0.9) + const mirabufSceneObject = new MirabufSceneObject(new MirabufInstance(parser), miraAssembly.info!.name!) World.SceneRenderer.RegisterSceneObject(mirabufSceneObject) + + setupProgress.Done() })() } @@ -180,6 +200,7 @@ function Synthesis() { {modalElement} )} + @@ -245,6 +266,7 @@ const initialPanels: ReactElement[] = [ , , , + , ] export default Synthesis diff --git a/fission/src/mirabuf/MirabufInstance.ts b/fission/src/mirabuf/MirabufInstance.ts index fe4aaa6d2e..174cbcbff3 100644 --- a/fission/src/mirabuf/MirabufInstance.ts +++ b/fission/src/mirabuf/MirabufInstance.ts @@ -2,6 +2,7 @@ import * as THREE from "three" import { mirabuf } from "../proto/mirabuf" import MirabufParser, { ParseErrorSeverity } from "./MirabufParser.ts" import World from "@/systems/World.ts" +import { ProgressHandle } from "@/ui/components/ProgressNotificationData.ts" type MirabufPartInstanceGUID = string @@ -107,7 +108,7 @@ class MirabufInstance { return this._batches } - public constructor(parser: MirabufParser, materialStyle?: MaterialStyle) { + public constructor(parser: MirabufParser, materialStyle?: MaterialStyle, progressHandle?: ProgressHandle) { if (parser.errors.some(x => x[0] >= ParseErrorSeverity.Unimportable)) { throw new Error("Parser has significant errors...") } @@ -117,7 +118,10 @@ class MirabufInstance { this._meshes = new Map() this._batches = new Array() + progressHandle?.Update("Loading materials...", 0.4) this.LoadMaterials(materialStyle ?? MaterialStyle.Regular) + + progressHandle?.Update("Creating meshes...", 0.5) this.CreateMeshes() } @@ -236,8 +240,6 @@ class MirabufInstance { const batchedMesh = new THREE.BatchedMesh(count.maxInstances, count.maxVertices, count.maxIndices) this._batches.push(batchedMesh) - console.debug(`${count.maxInstances}, ${count.maxVertices}, ${count.maxIndices}`) - batchedMesh.material = material batchedMesh.castShadow = true batchedMesh.receiveShadow = true @@ -253,8 +255,6 @@ class MirabufInstance { batchedMesh.setMatrixAt(geoId, mat) - console.debug(geoId) - let bodies = this._meshes.get(instance.info!.GUID!) if (!bodies) { bodies = new Array<[THREE.BatchedMesh, number]>() diff --git a/fission/src/mirabuf/MirabufParser.ts b/fission/src/mirabuf/MirabufParser.ts index 8565a00335..4c4e18060f 100644 --- a/fission/src/mirabuf/MirabufParser.ts +++ b/fission/src/mirabuf/MirabufParser.ts @@ -1,6 +1,7 @@ import * as THREE from "three" import { mirabuf } from "@/proto/mirabuf" import { MirabufTransform_ThreeMatrix4 } from "@/util/TypeConversions" +import { ProgressHandle } from "@/ui/components/ProgressNotificationData" export type RigidNodeId = string @@ -72,11 +73,13 @@ class MirabufParser { return this._rootNode } - public constructor(assembly: mirabuf.Assembly) { + public constructor(assembly: mirabuf.Assembly, progressHandle?: ProgressHandle) { this._assembly = assembly this._errors = new Array() this._globalTransforms = new Map() + progressHandle?.Update("Parsing assembly...", 0.3) + this.GenerateTreeValues() this.LoadGlobalTransforms() diff --git a/fission/src/mirabuf/MirabufSceneObject.ts b/fission/src/mirabuf/MirabufSceneObject.ts index 478a0fd362..4c4cdd1414 100644 --- a/fission/src/mirabuf/MirabufSceneObject.ts +++ b/fission/src/mirabuf/MirabufSceneObject.ts @@ -18,6 +18,7 @@ import { MiraType } from "./MirabufLoader" import IntakeSensorSceneObject from "./IntakeSensorSceneObject" import EjectableSceneObject from "./EjectableSceneObject" import { SceneOverlayTag } from "@/ui/components/SceneOverlayEvents" +import { ProgressHandle } from "@/ui/components/ProgressNotificationData" const DEBUG_BODIES = false @@ -83,12 +84,14 @@ class MirabufSceneObject extends SceneObject { return this._mirabufInstance.parser.rootNode } - public constructor(mirabufInstance: MirabufInstance, assemblyName: string) { + public constructor(mirabufInstance: MirabufInstance, assemblyName: string, progressHandle?: ProgressHandle) { super() this._mirabufInstance = mirabufInstance this._assemblyName = assemblyName + progressHandle?.Update("Creating mechanism...", 0.9) + this._mechanism = World.PhysicsSystem.CreateMechanismFromParser(this._mirabufInstance.parser) if (this._mechanism.layerReserve) { this._physicsLayerReserve = this._mechanism.layerReserve @@ -400,14 +403,17 @@ class MirabufSceneObject extends SceneObject { } } -export async function CreateMirabuf(assembly: mirabuf.Assembly): Promise { - const parser = new MirabufParser(assembly) +export async function CreateMirabuf( + assembly: mirabuf.Assembly, + progressHandle?: ProgressHandle +): Promise { + const parser = new MirabufParser(assembly, progressHandle) if (parser.maxErrorSeverity >= ParseErrorSeverity.Unimportable) { console.error(`Assembly Parser produced significant errors for '${assembly.info!.name!}'`) return } - return new MirabufSceneObject(new MirabufInstance(parser), assembly.info!.name!) + return new MirabufSceneObject(new MirabufInstance(parser), assembly.info!.name!, progressHandle) } /** diff --git a/fission/src/systems/simulation/wpilib_brain/WPILibBrain.ts b/fission/src/systems/simulation/wpilib_brain/WPILibBrain.ts new file mode 100644 index 0000000000..43127ece74 --- /dev/null +++ b/fission/src/systems/simulation/wpilib_brain/WPILibBrain.ts @@ -0,0 +1,233 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import Mechanism from "@/systems/physics/Mechanism" +import Brain from "../Brain" + +import WPILibWSWorker from "./WPILibWSWorker?worker" + +const worker = new WPILibWSWorker() + +const PWM_SPEED = "" ? FieldType.Both : FieldType.Read + case ">": + return FieldType.Write + default: + return FieldType.Unknown + } +} + +export const simMap = new Map>() + +export class SimGeneric { + private constructor() {} + + public static Get(simType: SimType, device: string, field: string, defaultValue?: T): T | undefined { + const fieldType = GetFieldType(field) + if (fieldType != FieldType.Read && fieldType != FieldType.Both) { + console.warn(`Field '${field}' is not a read or both field type`) + return undefined + } + + const map = simMap.get(simType) + if (!map) { + console.warn(`No '${simType}' devices found`) + return undefined + } + + const data = map.get(device) + if (!data) { + console.warn(`No '${simType}' device '${device}' found`) + return undefined + } + + return (data[field] as T | undefined) ?? defaultValue + } + + public static Set(simType: SimType, device: string, field: string, value: T): boolean { + const fieldType = GetFieldType(field) + if (fieldType != FieldType.Write && fieldType != FieldType.Both) { + console.warn(`Field '${field}' is not a write or both field type`) + return false + } + + const map = simMap.get(simType) + if (!map) { + console.warn(`No '${simType}' devices found`) + return false + } + + const data = map.get(device) + if (!data) { + console.warn(`No '${simType}' device '${device}' found`) + return false + } + + const selectedData: any = {} + selectedData[field] = value + + data[field] = value + worker.postMessage({ + command: "update", + data: { + type: simType, + device: device, + data: selectedData, + }, + }) + + window.dispatchEvent(new SimMapUpdateEvent(true)) + return true + } +} + +export class SimPWM { + private constructor() {} + + public static GetSpeed(device: string): number | undefined { + return SimGeneric.Get("PWM", device, PWM_SPEED, 0.0) + } + + public static GetPosition(device: string): number | undefined { + return SimGeneric.Get("PWM", device, PWM_POSITION, 0.0) + } +} + +export class SimCANMotor { + private constructor() {} + + public static GetDutyCycle(device: string): number | undefined { + return SimGeneric.Get("CANMotor", device, CANMOTOR_DUTY_CYCLE, 0.0) + } + + public static SetSupplyVoltage(device: string, voltage: number): boolean { + return SimGeneric.Set("CANMotor", device, CANMOTOR_SUPPLY_VOLTAGE, voltage) + } +} + +export class SimCANEncoder { + private constructor() {} + + public static SetRawInputPosition(device: string, rawInputPosition: number): boolean { + return SimGeneric.Set("CANEncoder", device, CANENCODER_RAW_INPUT_POSITION, rawInputPosition) + } +} + +worker.addEventListener("message", (eventData: MessageEvent) => { + let data: any | undefined + try { + if (typeof eventData.data == "object") { + data = eventData.data + } else { + data = JSON.parse(eventData.data) + } + } catch (e) { + console.warn(`Failed to parse data:\n${JSON.stringify(eventData.data)}`) + } + + if (!data || !data.type) { + console.log("No data, bailing out") + return + } + + // console.debug(data) + + const device = data.device + const updateData = data.data + + switch (data.type) { + case "PWM": + console.debug("pwm") + UpdateSimMap("PWM", device, updateData) + break + case "Solenoid": + console.debug("solenoid") + UpdateSimMap("Solenoid", device, updateData) + break + case "SimDevice": + console.debug("simdevice") + UpdateSimMap("SimDevice", device, updateData) + break + case "CANMotor": + console.debug("canmotor") + UpdateSimMap("CANMotor", device, updateData) + break + case "CANEncoder": + console.debug("canencoder") + UpdateSimMap("CANEncoder", device, updateData) + break + default: + // console.debug(`Unrecognized Message:\n${data}`) + break + } +}) + +function UpdateSimMap(type: SimType, device: string, updateData: any) { + let typeMap = simMap.get(type) + if (!typeMap) { + typeMap = new Map() + simMap.set(type, typeMap) + } + + let currentData = typeMap.get(device) + if (!currentData) { + currentData = {} + typeMap.set(device, currentData) + } + Object.entries(updateData).forEach(kvp => (currentData[kvp[0]] = kvp[1])) + + window.dispatchEvent(new SimMapUpdateEvent(false)) +} + +class WPILibBrain extends Brain { + constructor(mech: Mechanism) { + super(mech) + } + + public Update(_: number): void {} + + public Enable(): void { + worker.postMessage({ command: "connect" }) + } + + public Disable(): void { + worker.postMessage({ command: "disconnect" }) + } +} + +export class SimMapUpdateEvent extends Event { + public static readonly TYPE: string = "ws/sim-map-update" + + private _internalUpdate: boolean + + public get internalUpdate(): boolean { + return this._internalUpdate + } + + public constructor(internalUpdate: boolean) { + super(SimMapUpdateEvent.TYPE) + + this._internalUpdate = internalUpdate + } +} + +export default WPILibBrain diff --git a/fission/src/systems/simulation/wpilib_brain/WPILibWSWorker.ts b/fission/src/systems/simulation/wpilib_brain/WPILibWSWorker.ts new file mode 100644 index 0000000000..836dbd0d4f --- /dev/null +++ b/fission/src/systems/simulation/wpilib_brain/WPILibWSWorker.ts @@ -0,0 +1,65 @@ +import { Mutex } from "async-mutex" + +let socket: WebSocket | undefined = undefined + +const connectMutex = new Mutex() + +async function tryConnect(port: number | undefined): Promise { + await connectMutex + .runExclusive(() => { + if ((socket?.readyState ?? WebSocket.CLOSED) == WebSocket.OPEN) { + return + } + + socket = new WebSocket(`ws://localhost:${port ?? 3300}/wpilibws`) + + socket.addEventListener("open", () => { + console.log("WS Opened") + self.postMessage({ status: "open" }) + }) + socket.addEventListener("error", () => { + console.log("WS Could not open") + self.postMessage({ status: "error" }) + }) + + socket.addEventListener("message", onMessage) + }) + .then(() => console.debug("Mutex released")) +} + +async function tryDisconnect(): Promise { + await connectMutex.runExclusive(() => { + if (!socket) { + return + } + + socket?.close() + socket = undefined + }) +} + +function onMessage(event: MessageEvent) { + // console.log(`${JSON.stringify(JSON.parse(event.data), null, '\t')}`) + self.postMessage(event.data) +} + +self.addEventListener("message", e => { + switch (e.data.command) { + case "connect": + tryConnect(e.data.port) + break + case "disconnect": + tryDisconnect() + break + case "update": + if (socket) { + socket.send(JSON.stringify(e.data.data)) + } + break + default: + console.warn(`Unrecognized command '${e.data.command}'`) + break + } +}) + +console.log("Worker started") diff --git a/fission/src/test/PhysicsSystem.test.ts b/fission/src/test/PhysicsSystem.test.ts index f60447705c..ed2ea62bc5 100644 --- a/fission/src/test/PhysicsSystem.test.ts +++ b/fission/src/test/PhysicsSystem.test.ts @@ -91,7 +91,9 @@ describe("Mirabuf Physics Loading", () => { const assembly = await MirabufCachingService.CacheRemote( "/api/mira/Robots/Team 2471 (2018)_v7.mira", MiraType.ROBOT - ).then(x => MirabufCachingService.Get(x!.id, MiraType.ROBOT)) + ).then(x => { + return MirabufCachingService.Get(x!.id, MiraType.ROBOT) + }) const parser = new MirabufParser(assembly!) const physSystem = new PhysicsSystem() diff --git a/fission/src/ui/components/MainHUD.tsx b/fission/src/ui/components/MainHUD.tsx index 717df814de..5b88f3efbb 100644 --- a/fission/src/ui/components/MainHUD.tsx +++ b/fission/src/ui/components/MainHUD.tsx @@ -2,7 +2,7 @@ import React, { useEffect, useState } from "react" import { BsCodeSquare } from "react-icons/bs" import { FaCar, FaGear, FaMagnifyingGlass, FaPlus } from "react-icons/fa6" import { BiMenuAltLeft } from "react-icons/bi" -import { GrFormClose } from "react-icons/gr" +import { GrConnect, GrFormClose } from "react-icons/gr" import { GiSteeringWheel } from "react-icons/gi" import { HiDownload } from "react-icons/hi" import { IoBasketball, IoBug, IoGameControllerOutline, IoPeople, IoRefresh, IoTimer } from "react-icons/io5" @@ -12,6 +12,7 @@ import { motion } from "framer-motion" import logo from "@/assets/autodesk_logo.png" import { ToastType, useToastContext } from "@/ui/ToastContext" import { Random } from "@/util/Random" +import WPILibBrain from "@/systems/simulation/wpilib_brain/WPILibBrain" import APS, { APS_USER_INFO_UPDATE_EVENT } from "@/aps/APS" import { UserIcon } from "./UserIcon" import World from "@/systems/World" @@ -145,6 +146,7 @@ const MainHUD: React.FC = () => { } }} /> + } onClick={() => openPanel("ws-view")} />
{ }} /> } onClick={() => openModal("drivetrain")} /> + } + onClick={() => { + // worker?.postMessage({ command: 'connect' }); + const miraObjs = [...World.SceneRenderer.sceneObjects.entries()].filter( + x => x[1] instanceof MirabufSceneObject + ) + console.log(`Number of mirabuf scene objects: ${miraObjs.length}`) + if (miraObjs.length > 0) { + const mechanism = (miraObjs[0][1] as MirabufSceneObject).mechanism + const simLayer = World.SimulationSystem.GetSimulationLayer(mechanism) + simLayer?.SetBrain(new WPILibBrain(mechanism)) + } + }} + /> } diff --git a/fission/src/ui/components/ProgressNotification.tsx b/fission/src/ui/components/ProgressNotification.tsx new file mode 100644 index 0000000000..bd55cf0eea --- /dev/null +++ b/fission/src/ui/components/ProgressNotification.tsx @@ -0,0 +1,155 @@ +import { styled, Typography } from "@mui/material" +import { Box } from "@mui/system" +import { useEffect, useReducer, useState } from "react" +import { ProgressHandle, ProgressHandleStatus, ProgressEvent } from "./ProgressNotificationData" +import { easeOutQuad } from "@/util/EasingFunctions" + +interface ProgressData { + lastValue: number + currentValue: number + lastUpdate: number +} + +const handleMap = new Map() + +const TypoStyled = styled(Typography)(_ => ({ + fontFamily: "Artifakt", + textAlign: "center", +})) + +interface NotificationProps { + handle: ProgressHandle +} + +function Interp(elapse: number, progressData: ProgressData) { + const [value, setValue] = useState(0) + + useEffect(() => { + const update = () => { + // Get the portion of the completed elapse timer, passed into an easing function. + const n = Math.min(1.0, Math.max(0.0, (Date.now() - progressData.lastUpdate) / elapse)) + // Convert the result of the easing function [0, 1] to a lerp from last value to current value + const v = progressData.lastValue + (progressData.currentValue - progressData.lastValue) * easeOutQuad(n) + + setValue(v) + } + + const interval = setInterval(update, 5) + const timeout = setTimeout(() => clearInterval(interval), elapse) + + return () => { + clearTimeout(timeout) + clearInterval(interval) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [progressData]) + + return value +} + +function ProgressNotification({ handle }: NotificationProps) { + const [progressData, setProgressData] = useState({ + lastValue: 0, + currentValue: 0, + lastUpdate: Date.now(), + }) + + const interpProgress = Interp(500, progressData) + + useEffect(() => { + setProgressData({ lastValue: progressData.currentValue, currentValue: handle.progress, lastUpdate: Date.now() }) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [handle.progress]) + + return ( + + + + {handle.title} + + {handle.message.length > 0 ? {handle.message} : <>} + + + + ) +} + +function ProgressNotifications() { + const [progressElements, updateProgressElements] = useReducer(() => { + return handleMap.size > 0 + ? [...handleMap.entries()].map(([_, handle]) => ( + + )) + : undefined + }, undefined) + + useEffect(() => { + const onHandleUpdate = (e: ProgressEvent) => { + const handle = e.handle + if (handle.status > 0) { + setTimeout(() => handleMap.delete(handle.handleId) && updateProgressElements(), 2000) + } + handleMap.set(handle.handleId, handle) + updateProgressElements() + } + + ProgressEvent.AddListener(onHandleUpdate) + return () => { + ProgressEvent.RemoveListener(onHandleUpdate) + } + }, [updateProgressElements]) + + return ( + + {progressElements ?? <>} + + ) +} + +export default ProgressNotifications diff --git a/fission/src/ui/components/ProgressNotificationData.ts b/fission/src/ui/components/ProgressNotificationData.ts new file mode 100644 index 0000000000..54963d58d7 --- /dev/null +++ b/fission/src/ui/components/ProgressNotificationData.ts @@ -0,0 +1,73 @@ +let nextHandleId = 0 + +export enum ProgressHandleStatus { + inProgress = 0, + Done = 1, + Error = 2, +} + +export class ProgressHandle { + private _handleId: number + private _title: string + public message: string = "" + public progress: number = 0.0 + public status: ProgressHandleStatus = ProgressHandleStatus.inProgress + + public get handleId() { + return this._handleId + } + public get title() { + return this._title + } + + public constructor(title: string) { + this._handleId = nextHandleId++ + this._title = title + + this.Push() + } + + public Update(message: string, progress: number, status?: ProgressHandleStatus) { + this.message = message + this.progress = progress + status && (this.status = status) + + this.Push() + } + + public Fail(message?: string) { + this.Update(message ?? "Failed", 1, ProgressHandleStatus.Error) + } + + public Done(message?: string) { + this.Update(message ?? "Done", 1, ProgressHandleStatus.Done) + } + + public Push() { + ProgressEvent.Dispatch(this) + } +} + +export class ProgressEvent extends Event { + public static readonly EVENT_KEY = "ProgressEvent" + + public handle: ProgressHandle + + private constructor(handle: ProgressHandle) { + super(ProgressEvent.EVENT_KEY) + + this.handle = handle + } + + public static Dispatch(handle: ProgressHandle) { + window.dispatchEvent(new ProgressEvent(handle)) + } + + public static AddListener(func: (e: ProgressEvent) => void) { + window.addEventListener(this.EVENT_KEY, func as (e: Event) => void) + } + + public static RemoveListener(func: (e: ProgressEvent) => void) { + window.removeEventListener(this.EVENT_KEY, func as (e: Event) => void) + } +} diff --git a/fission/src/ui/panels/WSViewPanel.tsx b/fission/src/ui/panels/WSViewPanel.tsx new file mode 100644 index 0000000000..2168b2f203 --- /dev/null +++ b/fission/src/ui/panels/WSViewPanel.tsx @@ -0,0 +1,218 @@ +import Panel, { PanelPropsImpl } from "@/components/Panel" +import { SimMapUpdateEvent, SimGeneric, simMap, SimType } from "@/systems/simulation/wpilib_brain/WPILibBrain" +import { + Box, + Stack, + styled, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Typography, +} from "@mui/material" +import { useCallback, useEffect, useMemo, useState } from "react" +import { GrConnect } from "react-icons/gr" +import Dropdown from "../components/Dropdown" +import Input from "../components/Input" +import Button from "../components/Button" + +type ValueType = "string" | "number" | "object" | "boolean" + +const TypoStyled = styled(Typography)({ + fontFamily: "Artifakt Legend", + fontWeight: 300, + color: "white", +}) + +function generateTableBody() { + return ( + + {simMap.has("PWM") ? ( + [...simMap.get("PWM")!.entries()] + .filter(x => x[1][" { + return ( + + + PWM + + + {x[0]} + + + {JSON.stringify(x[1])} + + + ) + }) + ) : ( + <> + )} + {simMap.has("SimDevice") ? ( + [...simMap.get("SimDevice")!.entries()].map(x => { + return ( + + + SimDevice + + + {x[0]} + + + {JSON.stringify(x[1])} + + + ) + }) + ) : ( + <> + )} + {simMap.has("CANMotor") ? ( + [...simMap.get("CANMotor")!.entries()].map(x => { + return ( + + + CAN Motor + + + {x[0]} + + + {JSON.stringify(x[1])} + + + ) + }) + ) : ( + <> + )} + {simMap.has("CANEncoder") ? ( + [...simMap.get("CANEncoder")!.entries()].map(x => { + return ( + + + CAN Encoder + + + {x[0]} + + + {JSON.stringify(x[1])} + + + ) + }) + ) : ( + <> + )} + + ) +} + +function setGeneric(simType: SimType, device: string, field: string, value: string, valueType: ValueType) { + switch (valueType) { + case "number": + SimGeneric.Set(simType, device, field, parseFloat(value)) + break + case "object": + SimGeneric.Set(simType, device, field, JSON.parse(value)) + break + case "boolean": + SimGeneric.Set(simType, device, field, value.toLowerCase() == "true") + break + default: + SimGeneric.Set(simType, device, field, value) + break + } +} + +const WSViewPanel: React.FC = ({ panelId }) => { + const [tb, setTb] = useState(generateTableBody()) + + const [selectedType, setSelectedType] = useState() + const [selectedDevice, setSelectedDevice] = useState() + const [field, setField] = useState("") + const [value, setValue] = useState("") + const [selectedValueType, setSelectedValueType] = useState("string") + + const deviceSelect = useMemo(() => { + if (!selectedType || !simMap.has(selectedType)) { + return <> + } + + return setSelectedDevice(v)} /> + }, [selectedType]) + + useEffect(() => { + setSelectedDevice(undefined) + }, [selectedType]) + + const onSimMapUpdate = useCallback((_: Event) => { + setTb(generateTableBody()) + }, []) + + useEffect(() => { + window.addEventListener(SimMapUpdateEvent.TYPE, onSimMapUpdate) + + return () => { + window.removeEventListener(SimMapUpdateEvent.TYPE, onSimMapUpdate) + } + }, [onSimMapUpdate]) + + return ( + } panelId={panelId} openLocation="right" sidePadding={4}> + + + + + + Type + + + Device + + + Data + + + + {tb} +
+
+ + setSelectedType(v as unknown as SimType)} + /> + {deviceSelect} + {selectedDevice ? ( + + setField(v)} /> + setValue(v)} /> + setSelectedValueType(v as ValueType)} + /> +