diff --git a/exporter/SynthesisFusionAddin/README.md b/exporter/SynthesisFusionAddin/README.md index 8f40acf4cd..4fc4933373 100644 --- a/exporter/SynthesisFusionAddin/README.md +++ b/exporter/SynthesisFusionAddin/README.md @@ -1,7 +1,9 @@ # Synthesis Exporter + This is a Addin for Autodesk® Fusion™ that will export a [Mirabuf](https://github.com/HiceS/mirabuf) usable by the Synthesis simulator. ## Features + - [x] Materials - [x] Apperances - [x] Instances @@ -30,16 +32,17 @@ We use `VSCode` Primarily, download it to interact with our code or use your own --- ### How to Build + Run + 1. See root [`README`](/README.md) on how to run `init` script 2. Open `Autodesk Fusion` 3. Select `UTILITIES` from the top bar 4. Click `ADD-INS` Button 5. Click `Add-Ins` tab at the top of Scripts and Add-Ins dialog -6. Press + Button under **My Add-Ins** +6. Press + Button under **My Add-Ins** 7. Navigate to the containing folder for this Addin and click open at bottom - _clone-directory_/synthesis/exporters/SynthesisFusionAddin 8. Synthesis should be an option - select it and click run at the bottom of the dialog 9. There should now be a button that says Synthesis in your utilities menu - - If there is no button there may be a problem - see below for [checking log file](#debug-non-start) + - If there is no button there may be a problem - see below for [checking log file](#debug-non-start) --- @@ -50,8 +53,8 @@ We use `VSCode` Primarily, download it to interact with our code or use your own Most of the runtime for the addin is saved under the `logs` directory in this folder - Open `logs/synthesis.log` - - If nothing appears something went very wrong (make a issue on this github) - - If something appears and you cannot solve it feel free to make an issue anyway and include the file + - If nothing appears something went very wrong (make a issue on this github) + - If something appears and you cannot solve it feel free to make an issue anyway and include the file #### General Debugging @@ -59,12 +62,12 @@ Most of the runtime for the addin is saved under the `logs` directory in this fo 2. Select `UTILITIES` from the top bar 3. Click `ADD-INS` Button 4. Click `Add-Ins` tab at the top of Scripts and Add-Ins dialog -5. Press + Button under **My Add-Ins** +5. Press + Button under **My Add-Ins** 6. Navigate to the containing folder for this Addin and click open at bottom - _clone-directory_/synthesis/exporters/SynthesisFusionAddin 7. Synthesis should be an option - select it and click `Debug` at the bottom of the dialog - - This is in a dropdown with the Run Button + - This is in a dropdown with the Run Button 8. This should open VSCode - Now run with `FN+5` - - Now you may add break points or debug at will + - Now you may add break points or debug at will --- @@ -84,4 +87,44 @@ We format using a Python formatter called `black` [![Code style: black](https:// - use `isort .` followed by `black .` to format all relevant exporter python files. - or, alternatively, run `python ./tools/format.py` to do this for you! -__Note: black will always ignore files in the proto/proto_out folder since google formats those__ +**Note: black will always ignore files in the proto/proto_out folder since google formats those** + +### Docstring standard + +This standard is inconsistently applied, and that's ok + +```python +def foo(bar: fizz="flower") -> Result[walrus, None]: + """ + Turns a fizz into a walrus + + Parameters: + bar - The fizz to be transformed (default = "flower") ; fizz standards are subject to change, old fizzes may no longer be valid + + Returns: + Success - She new walrus + Failure - None if the summoning fails ; the cause of failure will be printed, not returned + + Notes: + - Only works as expected if the bar arg isn't a palindrome or an anagram of coffee. Otherwise unexpected (but still valid) walruses may be returned + - Please do not name your fizz "rizz" either, it hurts the walrus's feelings + + TODO: Consult witch about inconsistent alchemical methods + """ + # More alchemical fizz -> walrus code + some_walrus = bar + "_coffee" + return some_walrus + +``` + +Note that not this much detail is necessary when writing function documentation, notes, defaults, and a differentiation between sucess and failure aren't always necessary. + +#### Where to list potential causes of failure? + +It depends on how many you can list + +- 1: In the failure return case +- 2-3: In the notes section +- 4+: In a dedicated "potential causes of failure section" between the "returns" and "notes" sections + +Additionally, printing the error instead of returning it is bad practice diff --git a/exporter/SynthesisFusionAddin/Synthesis.py b/exporter/SynthesisFusionAddin/Synthesis.py index 37403d9f82..e3c86f0086 100644 --- a/exporter/SynthesisFusionAddin/Synthesis.py +++ b/exporter/SynthesisFusionAddin/Synthesis.py @@ -1,3 +1,5 @@ +# DO NOT CHANGE ORDER, OR ADD IMPORTS BEFORE UNTIL END COMMENT + import logging import os import traceback @@ -5,8 +7,6 @@ import adsk.core -from .src.APS import APS -from .src.configure import setAnalytics, unload_config from .src.general_imports import APP_NAME, DESCRIPTION, INTERNAL_ID, gm, root_logger from .src.Types.OString import OString from .src.UI import ( @@ -20,6 +20,13 @@ ) from .src.UI.Toolbar import Toolbar +# END OF RESTRICTION + +# Transition: AARD-1721 +# Should attempt to fix this ordering scheme within AARD-1741 +from .src.APS import APS # isort:skip +from .src.configure import setAnalytics, unload_config # isort:skip + def run(_): """## Entry point to application from Fusion. @@ -136,7 +143,7 @@ def register_ui() -> None: work_panel, Helper.check_solid_open, ShowAPSAuthCommand.ShowAPSAuthCommandCreatedHandler, - description=f"APS TEST", + description=f"APS", command=True, ) diff --git a/exporter/SynthesisFusionAddin/proto/deps.py b/exporter/SynthesisFusionAddin/proto/deps.py index ea6da5a406..2744952d90 100644 --- a/exporter/SynthesisFusionAddin/proto/deps.py +++ b/exporter/SynthesisFusionAddin/proto/deps.py @@ -6,7 +6,7 @@ import adsk.core import adsk.fusion -from src.general_imports import INTERNAL_ID +from src.strings import INTERNAL_ID system = platform.system() @@ -167,13 +167,24 @@ def _checkDeps() -> bool: return False -try: - import logging.handlers +""" +Checks for, and installs if need be, the dependencies needed by the Synthesis Exporter. Will error if it cannot install the dependencies +correctly. This should crash the exporter, since most of the exporter needs these dependencies to function in +the first place. +""" - import google.protobuf - import pkg_resources - from .proto_out import assembly_pb2, joint_pb2, material_pb2, types_pb2 -except ImportError or ModuleNotFoundError: - installCross(["protobuf==4.23.3"]) - from .proto_out import assembly_pb2, joint_pb2, material_pb2, types_pb2 +def installDependencies(): + try: + import logging.handlers + + import google.protobuf + import pkg_resources + from requests import get, post + + from .proto_out import assembly_pb2, joint_pb2, material_pb2, types_pb2 + except ImportError or ModuleNotFoundError: + installCross(["protobuf==4.23.3", "result==0.17.0"]) + from requests import get, post + + from .proto_out import assembly_pb2, joint_pb2, material_pb2, types_pb2 diff --git a/exporter/SynthesisFusionAddin/src/APS/APS.py b/exporter/SynthesisFusionAddin/src/APS/APS.py index e31964e561..22543438e1 100644 --- a/exporter/SynthesisFusionAddin/src/APS/APS.py +++ b/exporter/SynthesisFusionAddin/src/APS/APS.py @@ -7,15 +7,11 @@ import urllib.parse import urllib.request from dataclasses import dataclass +from typing import Any -from ..general_imports import ( - APP_NAME, - DESCRIPTION, - INTERNAL_ID, - gm, - my_addin_path, - root_logger, -) +import requests + +from ..general_imports import INTERNAL_ID, gm, my_addin_path CLIENT_ID = "GCxaewcLjsYlK8ud7Ka9AKf9dPwMR3e4GlybyfhAK2zvl3tU" auth_path = os.path.abspath(os.path.join(my_addin_path, "..", ".aps_auth")) @@ -59,53 +55,51 @@ def _res_json(res): def getCodeChallenge() -> str | None: - endpoint = "http://localhost:80/api/aps/challenge/" + endpoint = "https://synthesis.autodesk.com/api/aps/challenge/" res = urllib.request.urlopen(endpoint) data = _res_json(res) return data["challenge"] -def getAuth() -> APSAuth: +def getAuth() -> APSAuth | None: global APS_AUTH if APS_AUTH is not None: return APS_AUTH try: + curr_time = time.time() with open(auth_path, "rb") as f: - p = pickle.load(f) - APS_AUTH = APSAuth( - access_token=p["access_token"], - refresh_token=p["refresh_token"], - expires_in=p["expires_in"], - expires_at=int(p["expires_in"] * 1000), - token_type=p["token_type"], - ) - except: - raise Exception("Need to sign in!") + p: APSAuth = pickle.load(f) + logging.getLogger(f"{INTERNAL_ID}").info(msg=f"{json.dumps(p.__dict__)}") + APS_AUTH = p + except Exception as arg: + gm.ui.messageBox(f"ERROR:\n{arg}", "Please Sign In") + return None curr_time = int(time.time() * 1000) if curr_time >= APS_AUTH.expires_at: refreshAuthToken() if APS_USER_INFO is None: - loadUserInfo() + _ = loadUserInfo() return APS_AUTH def convertAuthToken(code: str): global APS_AUTH - authUrl = f'http://localhost:80/api/aps/code/?code={code}&redirect_uri={urllib.parse.quote_plus("http://localhost:80/api/aps/exporter/")}' + authUrl = f'https://synthesis.autodesk.com/api/aps/code/?code={code}&redirect_uri={urllib.parse.quote_plus("https://synthesis.autodesk.com/api/aps/exporter/")}' res = urllib.request.urlopen(authUrl) data = _res_json(res)["response"] + curr_time = time.time() APS_AUTH = APSAuth( access_token=data["access_token"], refresh_token=data["refresh_token"], expires_in=data["expires_in"], - expires_at=int(data["expires_in"] * 1000), + expires_at=int(curr_time + data["expires_in"] * 1000), token_type=data["token_type"], ) with open(auth_path, "wb") as f: - pickle.dump(data, f) + pickle.dump(APS_AUTH, f) f.close() - loadUserInfo() + _ = loadUserInfo() def removeAuth(): @@ -124,7 +118,7 @@ def refreshAuthToken(): "client_id": CLIENT_ID, "grant_type": "refresh_token", "refresh_token": APS_AUTH.refresh_token, - "scope": "data:read", + "scope": "data:create data:write data:search data:read", } ).encode("utf-8") req = urllib.request.Request("https://developer.api.autodesk.com/authentication/v2/token", data=body) @@ -133,13 +127,17 @@ def refreshAuthToken(): try: res = urllib.request.urlopen(req) data = _res_json(res) + curr_time = time.time() APS_AUTH = APSAuth( access_token=data["access_token"], refresh_token=data["refresh_token"], expires_in=data["expires_in"], - expires_at=int(data["expires_in"] * 1000), + expires_at=int(curr_time + data["expires_in"] * 1000), token_type=data["token_type"], ) + with open(auth_path, "wb") as f: + pickle.dump(APS_AUTH, f) + f.close() except urllib.request.HTTPError as e: removeAuth() logging.getLogger(f"{INTERNAL_ID}").error(f"Refresh Error:\n{e.code} - {e.reason}") @@ -182,3 +180,536 @@ def getUserInfo() -> APSUserInfo | None: if APS_USER_INFO is not None: return APS_USER_INFO return loadUserInfo() + + +def create_folder(auth: str, project_id: str, parent_folder_id: str, folder_display_name: str) -> str | None: + """ + creates a folder on an APS project + + params: + auth - auth token + project - project blueprint; might be changed to just the project id + folder - the blueprint for the new folder + + returns: + success - the href of the new folder ; might be changed to the id in the future + failure - none if the API request fails ; the failure text will be printed + """ + headers = {"Authorization": f"Bearer {auth}", "Content-Type": "application/vnd.api+json"} + data: dict[str, Any] = { + "jsonapi": {"version": "1.0"}, + "data": { + "type": "folders", + "attributes": { + "name": folder_display_name, + "extension": {"type": "folders:autodesk.core:Folder", "version": "1.0"}, + }, + "relationships": {"parent": {"data": {"type": "folders", "id": f"{parent_folder_id}"}}}, + }, + } + + res = requests.post( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/folders", headers=headers, json=data + ) + if not res.ok: + gm.ui.messageBox(f"Failed to create new folder: {res.text}", "ERROR") + return None + json: dict[str, Any] = res.json() + id: str = json["data"]["id"] + return id + + +def file_path_to_file_name(file_path: str) -> str: + return file_path.split("/").pop() + + +def upload_mirabuf(project_id: str, folder_id: str, file_name: str, file_contents: str) -> str | None: + """ + uploads mirabuf file to a specific folder in an APS project + the folder and project must be created and valid + if the file has already been created, it will use the APS versioning API to upload it as a new file version + + parameters: + project - the project reference object, used for it's id ; may be changed to project_id in the future + folder - the folder reference object, used for it's id ; may be changed to folder_id in the future + file_path - the path to the file on your machine, to be uploaded to APS + + returns: + success - if the file already exists, the new version id, otherwise, None + failure - none ; the cause of the failure will be printed + + potential causes of failure: + - invalid auth + - incorrectly formatted requests + - API update + - API down + + notes: + - this function is janky as hell, it should bubble errors up but I'm super lazy + - check appropriate called function ~~if~~ when this function fails + + todo: Change so a folder is not needed, and the entire project is checked for files + """ + + # data:create + global APS_AUTH + if APS_AUTH is None: + gm.ui.messageBox("You must login to upload designs to APS", "USER ERROR") + auth = APS_AUTH.access_token + # Get token from APS API later + + new_folder_id = get_item_id(auth, project_id, folder_id, "MirabufDir", "folders") + if new_folder_id is None: + folder_id = create_folder(auth, project_id, folder_id, "MirabufDir") + else: + folder_id = new_folder_id + (lineage_id, file_id, file_version) = get_file_id(auth, project_id, folder_id, file_name) + + """ + Create APS Storage Location + """ + object_id = create_storage_location(auth, project_id, folder_id, file_name) + if object_id is None: + gm.ui.messageBox("UPLOAD ERROR", "Object id is none; check create storage location") + return None + (prefix, object_key) = str(object_id).split("/", 1) + bucket_key = prefix.split(":", 3)[3] # gets the last element smth like: wip.dm.prod + + """ + Create Signed URL For APS Upload + """ + generate_signed_url_result = generate_signed_url(auth, bucket_key, object_key) + if generate_signed_url_result is None: + return None + + (upload_key, signed_url) = generate_signed_url_result + if upload_file(signed_url, file_contents) is None: + return None + + """ + Finish Upload and Initialize File Version + """ + if complete_upload(auth, upload_key, object_key, bucket_key) is None: + return None + if file_id != "": + update_file_version( + auth, project_id, folder_id, lineage_id, file_id, file_name, file_contents, file_version, object_id + ) + else: + _lineage_info = create_first_file_version(auth, str(object_id), project_id, str(folder_id), file_name) + return "" + + +def get_hub_id(auth: str, hub_name: str) -> str | None: + """ + gets a user's hub based on a hub name + + params: + auth - authorization token + hub_name - the name of the desired hub + + returns: + success - the hub's id or none if the hub doesn't exist + failure - the API text if there's an error + """ + + headers = {"Authorization": f"Bearer {auth}"} + hub_list_res = requests.get("https://developer.api.autodesk.com/project/v1/hubs", headers=headers) + if not hub_list_res.ok: + gm.ui.messageBox("UPLOAD ERROR", f"Failed to retrieve hubs: {hub_list_res.text}") + return None + hub_list: list[dict[str, Any]] = hub_list_res.json() + for hub in hub_list: + if hub["attributes"]["name"] == hub_name: + id: str = hub["id"] + return id + return "" + + +def get_project_id(auth: str, hub_id: str, project_name: str) -> str | None: + """ + gets a project in a hub with a project name + + params: + auth - authorization token + hub_id - the id of the hub + project_name - the name of the desired project + + returns: + success - the project's id or none if the project doesn't exist + failure - the API text if there's an error + + notes: + - a hub_id can be derived from it's name with the get_hub_id function + """ + + headers = {"Authorization": f"Bearer {auth}"} + project_list_res = requests.get( + f"https://developer.api.autodesk.com/project/v1/hubs/{hub_id}/projects", headers=headers + ) + if not project_list_res.ok: + gm.ui.messageBox("UPLOAD ERROR", f"Failed to retrieve hubs: {project_list_res.text}") + return None + project_list: list[dict[str, Any]] = project_list_res.json() + for project in project_list: + if project["attributes"]["name"] == project_name: + id: str = project["id"] + return id + return "" + + +def get_item_id(auth: str, project_id: str, parent_folder_id: str, folder_name: str, item_type: str) -> str | None: + headers = {"Authorization": f"Bearer {auth}"} + res = requests.get( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/folders/{parent_folder_id}/contents", + headers=headers, + ) + if not res.ok: + gm.ui.messageBox(f"Failed to get item: {res.text}") + return None + data: list[dict[str, Any]] = res.json()["data"] + if len(data) == 0: + return "" + for item in data: + if item["type"] == item_type and item["attributes"]["name"] == folder_name: + return item["id"] + return None + + +def update_file_version( + auth: str, + project_id: str, + folder_id: str, + lineage_id: str, + file_id: str, + file_name: str, + file_contents: str, + curr_file_version: str, + object_id: str, +) -> str | None: + """ + updates an existing file in an APS folder + + params: + auth - authorization token + project - the project reference object that the file is contain within + folder - the folder reference object that the file is contained within + file_id - the id of the file in APS + file_name - the name of the file in APS ; ex. test.mira + + returns: + success - the new version_id + failure - none + + potential causes of failure: + - invalid auth + - file doesn't exist in that position / with that id / name ; fix: get_file_id() or smth + - version one of the file hasn't been created ; fix: create_first_file_version() + """ + + # object_id = create_storage_location(auth, project_id, folder_id, file_name) + # if object_id is None: + # return None + # + # (prefix, object_key) = str(object_id).split("/", 1) + # bucket_key = prefix.split(":", 3)[3] # gets the last element smth like: wip.dm.prod + # (upload_key, signed_url) = generate_signed_url(auth, bucket_key, object_key) + # + # if upload_file(signed_url, file_contents) is None: + # return None + + # if complete_upload(auth, upload_key, object_key, bucket_key) is None: + # return None + + # gm.ui.messageBox(f"file_name:{file_name}\nlineage_id:{lineage_id}\nfile_id:{file_id}\ncurr_file_version:{curr_file_version}\nobject_id:{object_id}", "REUPLOAD ARGS") + headers = { + "Authorization": f"Bearer {auth}", + "Content-Type": "application/vnd.api+json", + } + + attributes = {"name": file_name, "extension": {"type": "versions:autodesk.core:File", "version": f"1.0"}} + + relationships: dict[str, Any] = { + "item": { + "data": { + "type": "items", + "id": lineage_id, + } + }, + "storage": { + "data": { + "type": "objects", + "id": object_id, + } + }, + } + + data = { + "jsonapi": {"version": "1.0"}, + "data": {"type": "versions", "attributes": attributes, "relationships": relationships}, + } + update_res = requests.post( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/versions", headers=headers, json=data + ) + if not update_res.ok: + gm.ui.messageBox(f"UPLOAD ERROR:\n{update_res.text}", "Updating file to new version failed") + return None + gm.ui.messageBox( + f"Successfully updated file {file_name} to version {int(curr_file_version) + 1} on APS", "UPLOAD SUCCESS" + ) + new_id: str = update_res.json()["data"]["id"] + return new_id + + +def get_file_id(auth: str, project_id: str, folder_id: str, file_name: str) -> tuple[str, str, str] | None: + """ + gets the file id given a file name + + params: + auth - authorization token + project - the project reference object that the file is contain within + folder - the folder reference object that the file is contained within + file_name - the name of the file in APS ; ex. test.mira + + returns: + success - the id of the file and it's current version, or an empty tuple string if the file doesn't exist + failure - none + + potential causes of failure: + - incorrect auth + + notes: + - checking if a file exists is an intended use-case + """ + + headers: dict[str, str] = {"Authorization": f"Bearer {auth}"} + + params = {"filter[attributes.name]": file_name} + + file_res = requests.get( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/folders/{folder_id}/search", + headers=headers, + params=params, + ) + if file_res.status_code == 404: + return ("", "", "") + elif not file_res.ok: + gm.ui.messageBox(f"UPLOAD ERROR: {file_res.text}", "Failed to get file") + return None + file_json: list[dict[str, Any]] = file_res.json() + if len(file_json["data"]) == 0: + return ("", "", "") + id: str = str(file_json["data"][0]["id"]) + lineage: str = str(file_json["data"][0]["relationships"]["item"]["data"]["id"]) + version: str = str(file_json["data"][0]["attributes"]["versionNumber"]) + return (lineage, id, version) + + +def create_storage_location(auth: str, project_id: str, folder_id: str, file_name: str) -> str | None: + """ + creates a storage location (a bucket) + the bucket can be used to upload a file to + every file must have a reserved storage location + I believe at the moment, object, bucket, and storage location are all used semi-interchangeably by APS documentation + + params: + auth - authorization token + project - a project reference object used for project id ; may be changed to project_id later + folder - a folder reference object used for project id ; may be changed to folder_id later + file_name - the name of the file to be later stored in the bucket + + returns: + success - the object_id of the bucket, which can be split into a bucket_key and upload_key + failure - the API failure text + + notes: + - fails if the project doesn't exist or auth is invalid + - the folder must be inside the project, the storage location will be inside the folder + """ + + data = { + "jsonapi": {"version": "1.0"}, + "data": { + "type": "objects", + "attributes": {"name": file_name}, + "relationships": {"target": {"data": {"type": "folders", "id": f"{folder_id}"}}}, + }, + } + headers = { + "Authorization": f"Bearer {auth}", + "Content-Type": "application/vnd.api+json", + } + storage_location_res = requests.post( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/storage", json=data, headers=headers + ) + if not storage_location_res.ok: + gm.ui.messageBox(f"UPLOAD ERROR: {storage_location_res.text}", f"Failed to create storage location") + return None + storage_location_json: dict[str, Any] = storage_location_res.json() + object_id: str = storage_location_json["data"]["id"] + return object_id + + +def generate_signed_url(auth: str, bucket_key: str, object_key: str) -> tuple[str, str] | None: + """ + generates a signed_url for a bucket, given a bucket_key and object_key + + params: + auth - authorization token + bucket_key - the key of the bucket that the file will be stored in + object_key - the key of the object that the file will be stored in + + returns: + success - the upload_key and the signed_url + failure - the API error + + notes: + - fails if auth, the bucket, or object keys are invalid + - both params are returned by the create_storage_location function + """ + + headers = { + "Authorization": f"Bearer {auth}", + } + signed_url_res = requests.get( + f"https://developer.api.autodesk.com/oss/v2/buckets/{bucket_key}/objects/{object_key}/signeds3upload", + headers=headers, + ) + if not signed_url_res.ok: + gm.ui.messageBox(f"UPLOAD ERROR: {signed_url_res.text}", "Failed to get signed url") + return None + signed_url_json: dict[str, str] = signed_url_res.json() + return (signed_url_json["uploadKey"], signed_url_json["urls"][0]) + + +def upload_file(signed_url: str, file_contents: str) -> str | None: + """ + uploads a file to APS given a signed_url a path to the file on your machine + + params: + signed_url - the url to used to upload the file to a specific bucket ; returned by the generate_signed_url function + file_path - the path of the file to be uploaded + + returns: + success - none + failure - the API error + + notes: + - fails if the auth or the signed URL are invalid + """ + upload_response = requests.put(url=signed_url, data=file_contents) + if not upload_response.ok: + gm.ui.messageBox("UPLOAD ERROR", f"Failed to upload to signed url: {upload_response.text}") + return None + return "" + + +def complete_upload(auth: str, upload_key: str, object_key: str, bucket_key: str) -> str | None: + """ + completes and verifies the APS file upload given the upload_key + + params: + auth - authorization token + upload_key - the key to verify the upload, returned by generate_signed_url function + bucket_key - the key of the bucket that the file was uploaded to, returned by the create_storage_location function + + returns: + success - none + failure - the API error + """ + + headers = { + "Authorization": f"Bearer {auth}", + "Content-Type": "application/json", + } + data = {"uploadKey": upload_key} + + completed_res = requests.post( + f"https://developer.api.autodesk.com/oss/v2/buckets/{bucket_key}/objects/{object_key}/signeds3upload", + json=data, + headers=headers, + ) + if not completed_res.ok: + gm.ui.messageBox( + f"UPLOAD ERROR: {completed_res.text}\n{completed_res.status_code}", "Failed to complete upload" + ) + return None + return "" + + +def create_first_file_version( + auth: str, object_id: str, project_id: str, folder_id: str, file_name: str +) -> tuple[str, str] | None: + """ + initializes versioning for a file + + params: + auth - authorization token + project_id - the id of the project the file was uploaded to + object_id - the id of the object the file was uploaded to + folder_id - the id of the folder the file was uploaded to + file_name - the name of the file + + returns: + success - the lineage id of the versioning history of the file and the href to the new version + failure - none + + potential causes of failure + - incorrect auth + - the named file's upload was never completed + - invalid project, object, or folder id + + notes: + - super complex request, probably not written correctly, likely a dev error + """ + + headers = { + "Authorization": f"Bearer {auth}", + "Content-Type": "application/vnd.api+json", + "Accept": "application/vnd.api+json", + } + + included_attributes = {"name": file_name, "extension": {"type": "versions:autodesk.core:File", "version": "1.0"}} + + attributes = { + "displayName": file_name, + "extension": { + "type": "items:autodesk.core:File", + "version": "1.0", + }, + } + + relationships = { + "tip": {"data": {"type": "versions", "id": "1"}}, + "parent": {"data": {"type": "folders", "id": folder_id}}, + } + + included = [ + { + "type": "versions", + "id": "1", + "attributes": included_attributes, + "relationships": {"storage": {"data": {"type": "objects", "id": object_id}}}, + }, + ] + + data = { + "jsonapi": {"version": "1.0"}, + "data": {"type": "items", "attributes": attributes, "relationships": relationships}, + "included": included, + } + + first_version_res = requests.post( + f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/items", json=data, headers=headers + ) + if not first_version_res.ok: + gm.ui.messageBox(f"Failed to create first file version: {first_version_res.text}", "UPLOAD ERROR") + return None + first_version_json: dict[str, Any] = first_version_res.json() + + lineage_id: str = first_version_json["data"]["id"] + href: str = first_version_json["links"]["self"]["href"] + + gm.ui.messageBox(f"Successful Upload of {file_name} to APS", "UPLOAD SUCCESS") + + return (lineage_id, href) diff --git a/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py b/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py index 31ed4cd0c4..df5d21c9c1 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py +++ b/exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py @@ -22,6 +22,7 @@ SignalType = Enum("SignalType", ["PWM", "CAN", "PASSIVE"]) ExportMode = Enum("ExportMode", ["ROBOT", "FIELD"]) # Dynamic / Static export PreferredUnits = Enum("PreferredUnits", ["METRIC", "IMPERIAL"]) +ExportLocation = Enum("ExportLocation", ["UPLOAD", "DOWNLOAD"]) @dataclass @@ -95,6 +96,8 @@ class ExporterOptions: compressOutput: bool = field(default=True) exportAsPart: bool = field(default=False) + exportLocation: ExportLocation = field(default=ExportLocation.UPLOAD) + hierarchy: ModelHierarchy = field(default=ModelHierarchy.FusionAssembly) visualQuality: TriangleMeshQualityOptions = field(default=TriangleMeshQualityOptions.LowQualityTriangleMesh) physicalDepth: PhysicalDepth = field(default=PhysicalDepth.AllOccurrence) diff --git a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py index a925f5b119..0fe8a6dafb 100644 --- a/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py +++ b/exporter/SynthesisFusionAddin/src/Parser/SynthesisParser/Parser.py @@ -7,9 +7,10 @@ from proto.proto_out import assembly_pb2, types_pb2 +from ...APS.APS import upload_mirabuf # This line causes everything to break from ...general_imports import * from ...UI.Camera import captureThumbnail, clearIconCache -from ..ExporterOptions import ExporterOptions, ExportMode +from ..ExporterOptions import ExporterOptions, ExportLocation, ExportMode from . import Components, JointHierarchy, Joints, Materials, PDMessage from .Utilities import * @@ -158,28 +159,41 @@ def export(self) -> bool: self.pdMessage.currentMessage = "Compressing File..." self.pdMessage.update() - # check if entire path exists and create if not since gzip doesn't do that. - path = pathlib.Path(self.exporterOptions.fileLocation).parent - path.mkdir(parents=True, exist_ok=True) - ### Print out assembly as JSON # miraJson = MessageToJson(assembly_out) # miraJsonFile = open(f'', 'wb') # miraJsonFile.write(str.encode(miraJson)) # miraJsonFile.close() - if self.exporterOptions.compressOutput: - self.logger.debug("Compressing file") - with gzip.open(self.exporterOptions.fileLocation, "wb", 9) as f: - self.pdMessage.currentMessage = "Saving File..." - self.pdMessage.update() - f.write(assembly_out.SerializeToString()) + # Upload Mirabuf File to APS + if self.exporterOptions.exportLocation == ExportLocation.UPLOAD: + self.logger.debug("Uploading file to APS") + project = app.data.activeProject + if not project.isValid: + gm.ui.messageBox("Project is invalid", "") + return False # add throw later + project_id = project.id + folder_id = project.rootFolder.id + file_name = f"{self.exporterOptions.fileLocation}.mira" + if upload_mirabuf(project_id, folder_id, file_name, assembly_out.SerializeToString()) is None: + gm.ui.messageBox("FAILED TO UPLOAD FILE TO APS", "ERROR") # add throw later + # Download Mirabuf File else: - f = open(self.exporterOptions.fileLocation, "wb") - f.write(assembly_out.SerializeToString()) - f.close() + # check if entire path exists and create if not since gzip doesn't do that. + path = pathlib.Path(self.exporterOptions.fileLocation).parent + path.mkdir(parents=True, exist_ok=True) + if self.exporterOptions.compressOutput: + self.logger.debug("Compressing file") + with gzip.open(self.exporterOptions.fileLocation, "wb", 9) as f: + self.pdMessage.currentMessage = "Saving File..." + self.pdMessage.update() + f.write(assembly_out.SerializeToString()) + else: + f = open(self.exporterOptions.fileLocation, "wb") + f.write(assembly_out.SerializeToString()) + f.close() - progressDialog.hide() + _ = progressDialog.hide() if DEBUG: part_defs = assembly_out.data.parts.part_definitions diff --git a/exporter/SynthesisFusionAddin/src/UI/ConfigCommand.py b/exporter/SynthesisFusionAddin/src/UI/ConfigCommand.py index febb617ec4..806cd65bff 100644 --- a/exporter/SynthesisFusionAddin/src/UI/ConfigCommand.py +++ b/exporter/SynthesisFusionAddin/src/UI/ConfigCommand.py @@ -4,7 +4,8 @@ import logging import os -import platform + +# import platform import traceback from enum import Enum @@ -17,6 +18,7 @@ from ..general_imports import * from ..Parser.ExporterOptions import ( ExporterOptions, + ExportLocation, ExportMode, Gamepiece, Joint, @@ -223,6 +225,21 @@ def notify(self, args): dropdownExportMode.tooltip = "Export Mode" dropdownExportMode.tooltipDescription = "
This runs after the mode specific periodic functions, but before
+ * LiveWindow and SmartDashboard integrated updating.
+ */
+void Robot::RobotPeriodic() {}
+
+/**
+ * This autonomous (along with the chooser code above) shows how to select
+ * between different autonomous modes using the dashboard. The sendable chooser
+ * code works with the Java SmartDashboard. If you prefer the LabVIEW Dashboard,
+ * remove all of the chooser code and uncomment the GetString line to get the
+ * auto name from the text box below the Gyro.
+ *
+ * You can add additional auto modes by adding additional comparisons to the
+ * if-else structure below with additional strings. If using the SendableChooser
+ * make sure to add them to the chooser code above as well.
+ */
+void Robot::AutonomousInit() {
+ m_autoSelected = m_chooser.GetSelected();
+ // m_autoSelected = SmartDashboard::GetString("Auto Selector",
+ // kAutoNameDefault);
+ fmt::print("Auto selected: {}\n", m_autoSelected);
+
+ if (m_autoSelected == kAutoNameCustom) {
+ // Custom Auto goes here
+ } else {
+ // Default Auto goes here
+ }
+}
+
+void Robot::AutonomousPeriodic() {
+ if (m_autoSelected == kAutoNameCustom) {
+ // Custom Auto goes here
+ } else {
+ // Default Auto goes here
+ }
+}
+
+void Robot::TeleopInit() {}
+
+void Robot::TeleopPeriodic() {}
+
+void Robot::DisabledInit() {}
+
+void Robot::DisabledPeriodic() {}
+
+void Robot::TestInit() {}
+
+void Robot::TestPeriodic() {}
+
+void Robot::SimulationInit() {}
+
+void Robot::SimulationPeriodic() {}
+
+#ifndef RUNNING_FRC_TESTS
+int main() {
+ return frc::StartRobot If you change your main robot class, change the parameter type.
+ */
+ public static void main(String... args) {
+ RobotBase.startRobot(Robot::new);
+ }
+}
diff --git a/simulation/samples/JavaSample/src/main/java/frc/robot/Robot.java b/simulation/samples/JavaSample/src/main/java/frc/robot/Robot.java
new file mode 100644
index 0000000000..8a9aa5024b
--- /dev/null
+++ b/simulation/samples/JavaSample/src/main/java/frc/robot/Robot.java
@@ -0,0 +1,131 @@
+// Copyright (c) FIRST and other WPILib contributors.
+// Open Source Software; you can modify and/or share it under the terms of
+// the WPILib BSD license file in the root directory of this project.
+
+package frc.robot;
+
+import com.ctre.phoenix6.hardware.TalonFX;
+import com.revrobotics.CANSparkBase.IdleMode;
+// import com.revrobotics.CANSparkMax;
+import com.revrobotics.CANSparkLowLevel.MotorType;
+
+import edu.wpi.first.wpilibj.TimedRobot;
+import edu.wpi.first.wpilibj.motorcontrol.Spark;
+import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
+import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
+
+import com.autodesk.synthesis.revrobotics.CANSparkMax;
+
+/**
+ * The VM is configured to automatically run this class, and to call the functions corresponding to
+ * each mode, as described in the TimedRobot documentation. If you change the name of this class or
+ * the package after creating this project, you must also update the build.gradle file in the
+ * project.
+ */
+public class Robot extends TimedRobot {
+ private static final String kDefaultAuto = "Default";
+ private static final String kCustomAuto = "My Auto";
+ private String m_autoSelected;
+ private final SendableChooser This runs after the mode specific periodic functions, but before LiveWindow and
+ * SmartDashboard integrated updating.
+ */
+ @Override
+ public void robotPeriodic() {}
+
+ /**
+ * This autonomous (along with the chooser code above) shows how to select between different
+ * autonomous modes using the dashboard. The sendable chooser code works with the Java
+ * SmartDashboard. If you prefer the LabVIEW Dashboard, remove all of the chooser code and
+ * uncomment the getString line to get the auto name from the text box below the Gyro
+ *
+ * You can add additional auto modes by adding additional comparisons to the switch structure
+ * below with additional strings. If using the SendableChooser make sure to add them to the
+ * chooser code above as well.
+ */
+ @Override
+ public void autonomousInit() {
+ m_autoSelected = m_chooser.getSelected();
+ // m_autoSelected = SmartDashboard.getString("Auto Selector", kDefaultAuto);
+ System.out.println("Auto selected: " + m_autoSelected);
+ }
+
+ /** This function is called periodically during autonomous. */
+ @Override
+ public void autonomousPeriodic() {
+
+ m_Spark.set(0.5);
+ m_SparkMax.set(1.0);
+ m_Talon.set(-1.0);
+
+ switch (m_autoSelected) {
+ case kCustomAuto:
+ // Put custom auto code here
+ break;
+ case kDefaultAuto:
+ default:
+ // Put default auto code here
+ break;
+ }
+ }
+
+ /** This function is called once when teleop is enabled. */
+ @Override
+ public void teleopInit() {}
+
+ /** This function is called periodically during operator control. */
+ @Override
+ public void teleopPeriodic() {
+ m_Spark.set(0.25);
+ m_SparkMax.set(0.75);
+ m_Talon.set(-0.5);
+ }
+
+ /** This function is called once when the robot is disabled. */
+ @Override
+ public void disabledInit() {
+ m_Spark.set(0.0);
+ m_SparkMax.set(0.0);
+ m_Talon.set(0.0);
+ }
+
+ /** This function is called periodically when disabled. */
+ @Override
+ public void disabledPeriodic() {}
+
+ /** This function is called once when test mode is enabled. */
+ @Override
+ public void testInit() {}
+
+ /** This function is called periodically during test mode. */
+ @Override
+ public void testPeriodic() {}
+
+ /** This function is called once when the robot is first started up. */
+ @Override
+ public void simulationInit() {}
+
+ /** This function is called periodically whilst in simulation. */
+ @Override
+ public void simulationPeriodic() {}
+}