From 957862785a8dcd15d0e70bff1423752ff000d26e Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Wed, 18 Oct 2023 16:44:57 -0400 Subject: [PATCH 01/44] Add pre-commit and disable most warnings --- .pre-commit-config.yaml | 9 +++++++++ .pylintrc | 10 ++++++++++ 2 files changed, 19 insertions(+) create mode 100644 .pre-commit-config.yaml create mode 100644 .pylintrc diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..aa2f3d73 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,9 @@ +repos: + - repo: local + hooks: + - id: pylint + name: pylint + entry: python -m pylint + language: system + types: [python] + args: [--rcfile=.pylintrc] diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..9ac1e731 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,10 @@ +[MASTER] + +max-line-length = 120 + +disable = C0114, C0115, C0116, R0903, C0415, R1705, R0913, W1203, R1729, E1120, E1123, C0209, R1710, W0621, C0121, + W0614, W0401, W1202, C0117, W0718, R0205, R0402, R0914, R1725, R1735, C0411, W0237, W0702, W0223, W0613, + W0108, R0912, R0911, W0511, E1136, R0902, W0611, C0412, C0103, C0301, R1732, R0915, W1514, R1718, W1510, + E0602, W1309, C0325, E1101, R1714, R0916, W0719, R1734, E1133, W1201, W0107, W3101, W0640, C0201, W1113, + W0246, W0622, W0221, E1111, R1720, W0221, R1723, E0102, W0201, E0203, E0401, W0602, W0212, W0707, R0904, + W0101, C0302, E0110, W0603, R1701, W0106, R1721 \ No newline at end of file From 141f50b8eaa395fb6be6cfae513e8656ba2a7455 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Wed, 18 Oct 2023 16:56:20 -0400 Subject: [PATCH 02/44] Fix whitespace --- .github/workflows/codeql.yml | 6 +-- .github/workflows/main.yml | 12 ++--- .pre-commit-config.yaml | 13 +++++ .pylintrc | 4 +- pip_version | 2 +- pros/cli/click_classes.py | 2 +- pros/cli/common.py | 2 +- pros/cli/conductor.py | 5 +- pros/cli/misc_commands.py | 4 +- pros/cli/terminal.py | 4 +- pros/cli/upload.py | 14 +++--- pros/conductor/conductor.py | 4 +- pros/conductor/depots.md | 8 +-- pros/conductor/project/__init__.py | 2 +- pros/ga/analytics.py | 10 ++-- pros/serial/ports/exceptions.py | 2 - pros/serial/ports/v5_wireless_port.py | 72 +++++++++++++-------------- version | 2 +- win_version | 2 +- 19 files changed, 90 insertions(+), 80 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index a903ec32..3ea5f6b1 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -48,11 +48,11 @@ jobs: # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. - + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality - + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild @@ -61,7 +61,7 @@ jobs: # ℹī¸ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - # If the Autobuild fails above, remove it and uncomment the following three lines. + # If the Autobuild fails above, remove it and uncomment the following three lines. # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. # - run: | diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1d597153..b956f490 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,24 +31,24 @@ jobs: - uses: actions/checkout@v3.1.0 with: fetch-depth: 0 - + - name: Setup Python uses: actions/setup-python@v4.3.0 with: python-version: 3.9 cache: 'pip' if: matrix.os != 'macos-latest' - + - name: Setup Python MacOS run: | wget https://www.python.org/ftp/python/3.10.11/python-3.10.11-macos11.pkg sudo installer -verbose -pkg ./python-3.10.11-macos11.pkg -target / echo "/Library/Frameworks/Python.framework/Versions/3.10/bin" >> $GITHUB_PATH if: matrix.os == 'macos-latest' - + - name: Install Requirements run: python3 -m pip install --upgrade pip && pip3 install wheel && pip3 install -r requirements.txt && pip3 uninstall -y typing - + - name: Build Wheel run: python3 setup.py bdist_wheel if: matrix.os == 'ubuntu-latest' @@ -59,7 +59,7 @@ jobs: name: pros-cli-wheel-${{needs.update_build_number.outputs.output1}} path: dist/* if: matrix.os == 'ubuntu-latest' - + - name: Run Pyinstaller run: | python3 version.py @@ -78,7 +78,7 @@ jobs: pyinstaller --onefile pros/cli/compile_commands/intercept-cc.py --name=intercept-cc --target-arch=universal2 pyinstaller --onefile pros/cli/compile_commands/intercept-cc.py --name=intercept-c++ --target-arch=universal2 if: matrix.os == 'macos-latest' - + - name: Package Everything Up shell: bash run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aa2f3d73..c22efffd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,17 @@ repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: mixed-line-ending + args: [--fix=lf] + - id: end-of-file-fixer + - id: check-yaml + - id: check-vcs-permalinks + - id: check-merge-conflict + - id: check-case-conflict + - id: check-ast + - id: trailing-whitespace + - id: requirements-txt-fixer - repo: local hooks: - id: pylint diff --git a/.pylintrc b/.pylintrc index 9ac1e731..16e8f317 100644 --- a/.pylintrc +++ b/.pylintrc @@ -4,7 +4,7 @@ max-line-length = 120 disable = C0114, C0115, C0116, R0903, C0415, R1705, R0913, W1203, R1729, E1120, E1123, C0209, R1710, W0621, C0121, W0614, W0401, W1202, C0117, W0718, R0205, R0402, R0914, R1725, R1735, C0411, W0237, W0702, W0223, W0613, - W0108, R0912, R0911, W0511, E1136, R0902, W0611, C0412, C0103, C0301, R1732, R0915, W1514, R1718, W1510, + W0108, R0912, R0911, W0511, E1136, R0902, W0611, C0412, C0103, C0301, R1732, R0915, W1514, R1718, W1510, E0602, W1309, C0325, E1101, R1714, R0916, W0719, R1734, E1133, W1201, W0107, W3101, W0640, C0201, W1113, W0246, W0622, W0221, E1111, R1720, W0221, R1723, E0102, W0201, E0203, E0401, W0602, W0212, W0707, R0904, - W0101, C0302, E0110, W0603, R1701, W0106, R1721 \ No newline at end of file + W0101, C0302, E0110, W0603, R1701, W0106, R1721 diff --git a/pip_version b/pip_version index a423d421..4d9d11cf 100644 --- a/pip_version +++ b/pip_version @@ -1 +1 @@ -3.4.2 \ No newline at end of file +3.4.2 diff --git a/pros/cli/click_classes.py b/pros/cli/click_classes.py index cb5a82c0..8272f488 100644 --- a/pros/cli/click_classes.py +++ b/pros/cli/click_classes.py @@ -162,4 +162,4 @@ def invoke(self, *args, **kwargs): if (isProject): #check if there is a project curr_proj = p() click.echo("PROS-Kernel Version: {}".format(curr_proj.kernel)) - raise e \ No newline at end of file + raise e diff --git a/pros/cli/common.py b/pros/cli/common.py index e666877d..b62c2d5b 100644 --- a/pros/cli/common.py +++ b/pros/cli/common.py @@ -139,7 +139,7 @@ def callback(ctx: click.Context, param: click.Parameter, value: bool): if value: echo("Not sending analytics for this command.\n") analytics.useAnalytics = False - pass + pass decorator = click.option('--no-analytics', expose_value=False, is_flag=True, default=False, is_eager=True, help="Don't send analytics for this command.", callback=callback, cls=PROSOption, hidden=True)(f) decorator.__name__ = f.__name__ diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 79e098f1..3b4c8257 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -222,7 +222,7 @@ def new_project(ctx: click.Context, path: str, target: str, version: str, if version.lower() == 'latest' or not version: version = '>0' if not force_system and c.Project.find_project(path) is not None: - logger(__name__).error('A project already exists in this location at ' + c.Project.find_project(path) + + logger(__name__).error('A project already exists in this location at ' + c.Project.find_project(path) + '! Delete it first. Are you creating a project in an existing one?', extra={'sentry': False}) ctx.exit(-1) try: @@ -311,7 +311,7 @@ def info_project(project: c.Project, ls_upgrades): Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more """ - analytics.send("info-project") + analytics.send("info-project") from pros.conductor.project import ProjectReport report = ProjectReport(project) _conductor = c.Conductor() @@ -366,4 +366,3 @@ def query_depots(url: bool): _conductor = c.Conductor() ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") ui.echo('\n'.join(_conductor.query_depots(url))+"\n") - \ No newline at end of file diff --git a/pros/cli/misc_commands.py b/pros/cli/misc_commands.py index 8566456a..d212a2fc 100644 --- a/pros/cli/misc_commands.py +++ b/pros/cli/misc_commands.py @@ -19,9 +19,9 @@ def upgrade(force_check, no_install): """ with ui.Notification(): ui.echo('The "pros upgrade" command is currently non-functioning. Did you mean to run "pros c upgrade"?', color='yellow') - + return # Dead code below - + analytics.send("upgrade") from pros.upgrade import UpgradeManager manager = UpgradeManager() diff --git a/pros/cli/terminal.py b/pros/cli/terminal.py index 2f05f2fe..a44b89d5 100644 --- a/pros/cli/terminal.py +++ b/pros/cli/terminal.py @@ -42,7 +42,7 @@ def terminal(port: str, backend: str, **kwargs): may be preferred when "share" doesn't perform adequately. Note: share backend is not yet implemented. - """ + """ analytics.send("terminal") from pros.serial.devices.vex.v5_user_device import V5UserDevice from pros.serial.terminal import Terminal @@ -91,7 +91,7 @@ def __init__(self, file): self.log = open(file, 'a') def write(self, data): self.terminal.write(data) - self.log.write(data) + self.log.write(data) def flush(self): pass def end(self): diff --git a/pros/cli/upload.py b/pros/cli/upload.py index 545609a4..e0c74b9b 100644 --- a/pros/cli/upload.py +++ b/pros/cli/upload.py @@ -22,7 +22,7 @@ def upload_cli(): cls=PROSDeprecated, replacement='after') @click.option('--run-screen/--execute', 'run_screen', default=None, help='Display run program screen on the brain after upload.', cls=PROSDeprecated, replacement='after') -@click.option('-af', '--after', type=click.Choice(['run','screen','none']), default=None, help='Action to perform on the brain after upload.', +@click.option('-af', '--after', type=click.Choice(['run','screen','none']), default=None, help='Action to perform on the brain after upload.', cls=PROSOption, group='V5 Options') @click.option('--quirk', type=int, default=0) @click.option('--name', 'remote_name', type=str, default=None, required=False, help='Remote program name.', @@ -37,9 +37,9 @@ def upload_cli(): cls=PROSOption, group='V5 Options', hidden=True) @click.option('--compress-bin/--no-compress-bin', 'compress_bin', cls=PROSOption, group='V5 Options', default=True, help='Compress the program binary before uploading.') -@click.option('--description', default="Made with PROS", type=str, cls=PROSOption, group='V5 Options', +@click.option('--description', default="Made with PROS", type=str, cls=PROSOption, group='V5 Options', help='Change the description displayed for the program.') -@click.option('--name', default=None, type=str, cls=PROSOption, group='V5 Options', +@click.option('--name', default=None, type=str, cls=PROSOption, group='V5 Options', help='Change the name of the program.') @default_options @@ -119,12 +119,12 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg kwargs['remote_name'] = os.path.splitext(os.path.basename(path))[0] kwargs['remote_name'] = kwargs['remote_name'].replace('@', '_') kwargs['slot'] -= 1 - + action_to_kwarg = { - 'run' : vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, - 'screen' : vex.V5Device.FTCompleteOptions.RUN_SCREEN, + 'run' : vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, + 'screen' : vex.V5Device.FTCompleteOptions.RUN_SCREEN, 'none' : vex.V5Device.FTCompleteOptions.DONT_RUN - } + } after_upload_default = 'screen' #Determine which FTCompleteOption to assign to run_after if kwargs['after']==None: diff --git a/pros/conductor/conductor.py b/pros/conductor/conductor.py index b8e50416..1080371d 100644 --- a/pros/conductor/conductor.py +++ b/pros/conductor/conductor.py @@ -177,7 +177,7 @@ def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: results.extend(online_results) logger(__name__).debug('Saving Conductor config after checking for remote updates') self.save() # Save self since there may have been some updates from the depots - + return list(results) def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Optional[BaseTemplate]: @@ -340,6 +340,6 @@ def add_depot(self, name: str, url: str): def remove_depot(self, name: str): del self.depots[name] self.save() - + def query_depots(self, url: bool): return [name + ((' -- ' + depot.location) if url else '') for name, depot in self.depots.items()] diff --git a/pros/conductor/depots.md b/pros/conductor/depots.md index 33a92336..f4efcf3f 100644 --- a/pros/conductor/depots.md +++ b/pros/conductor/depots.md @@ -13,7 +13,7 @@ $ pros conduct add-depot test "https://pros.cs.purdue.edu/v5/_static/beta/testin `pros conduct remove-depot ` Example: -```bash +```bash $ pros conduct remove-depot test > Removed depot test ``` @@ -28,11 +28,11 @@ Examples: ```bash $ pros conduct query-depots --url > Available Depots: -> +> > kernel-beta-mainline -- https://raw.githubusercontent.com/purduesigbots/pros-mainline/master/beta/kernel-beta-mainline.json > pros-mainline -- https://purduesigbots.github.io/pros-mainline/pros-mainline.json > test -- https://pros.cs.purdue.edu/v5/_static/beta/testing-mainline.json -> +> ``` ```bash $ pros conduct query-depots @@ -41,5 +41,5 @@ $ pros conduct query-depots > kernel-beta-mainline > pros-mainline > test -> +> ``` diff --git a/pros/conductor/project/__init__.py b/pros/conductor/project/__init__.py index 76e4d192..486610ca 100644 --- a/pros/conductor/project/__init__.py +++ b/pros/conductor/project/__init__.py @@ -293,7 +293,7 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil if not os.environ.get('PROS_TOOLCHAIN'): ui.logger(__name__).warn("PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n") ui.logger(__name__).error(f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n",extra={'sentry':False}) - if not suppress_output: + if not suppress_output: pipe.close() sys.exit() if not suppress_output: diff --git a/pros/ga/analytics.py b/pros/ga/analytics.py index 6f786105..7a9c76af 100644 --- a/pros/ga/analytics.py +++ b/pros/ga/analytics.py @@ -53,9 +53,9 @@ def send(self,action): 'ni': 0 } - session = FuturesSession() + session = FuturesSession() - #Send payload to GA servers + #Send payload to GA servers future = session.post(url=url, data=payload, headers={'User-Agent': agent}, @@ -71,13 +71,13 @@ def set_use(self, value: bool): self.useAnalytics = value self.cli_config.ga['enabled'] = self.useAnalytics self.cli_config.save() - + def process_requests(self): responses = [] for future in as_completed(self.pendingRequests): try: response = future.result() - + if not response.status_code==200: print("Something went wrong while sending analytics!") print(response) @@ -92,4 +92,4 @@ def process_requests(self): return responses -analytics = Analytics() \ No newline at end of file +analytics = Analytics() diff --git a/pros/serial/ports/exceptions.py b/pros/serial/ports/exceptions.py index cd3f0bca..1a869f38 100644 --- a/pros/serial/ports/exceptions.py +++ b/pros/serial/ports/exceptions.py @@ -26,5 +26,3 @@ def __str__(self): return f"Port not found: Could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " \ f"firmware utilities; moving to a different USB port; {extra}or " \ f"restarting the device." - - diff --git a/pros/serial/ports/v5_wireless_port.py b/pros/serial/ports/v5_wireless_port.py index 80d4717d..dc25c259 100644 --- a/pros/serial/ports/v5_wireless_port.py +++ b/pros/serial/ports/v5_wireless_port.py @@ -1,36 +1,36 @@ -from typing import * - -from pros.serial.devices.vex.v5_device import V5Device -from pros.serial.ports import BasePort, DirectPort - - -class V5WirelessPort(BasePort): - def __init__(self, port): - self.buffer: bytearray = bytearray() - - self.port_instance = DirectPort(port) - self.device = V5Device(self.port_instance) - self.download_channel = self.device.DownloadChannel(self.device) - self.download_channel.__enter__() - - def destroy(self): - self.port_instance.destroy() - self.download_channel.__exit__() - - def config(self, command: str, argument: Any): - return self.port_instance.config(command, argument) - - # TODO: buffer input? technically this is done by the user_fifo_write cmd blocking until whole input is written? - def write(self, data: bytes): - self.device.user_fifo_write(data) - - def read(self, n_bytes: int = 0) -> bytes: - if n_bytes > len(self.buffer): - self.buffer.extend(self.device.user_fifo_read()) - ret = self.buffer[:n_bytes] - self.buffer = self.buffer[n_bytes:] - return ret - - @property - def name(self) -> str: - return self.port_instance.name +from typing import * + +from pros.serial.devices.vex.v5_device import V5Device +from pros.serial.ports import BasePort, DirectPort + + +class V5WirelessPort(BasePort): + def __init__(self, port): + self.buffer: bytearray = bytearray() + + self.port_instance = DirectPort(port) + self.device = V5Device(self.port_instance) + self.download_channel = self.device.DownloadChannel(self.device) + self.download_channel.__enter__() + + def destroy(self): + self.port_instance.destroy() + self.download_channel.__exit__() + + def config(self, command: str, argument: Any): + return self.port_instance.config(command, argument) + + # TODO: buffer input? technically this is done by the user_fifo_write cmd blocking until whole input is written? + def write(self, data: bytes): + self.device.user_fifo_write(data) + + def read(self, n_bytes: int = 0) -> bytes: + if n_bytes > len(self.buffer): + self.buffer.extend(self.device.user_fifo_read()) + ret = self.buffer[:n_bytes] + self.buffer = self.buffer[n_bytes:] + return ret + + @property + def name(self) -> str: + return self.port_instance.name diff --git a/version b/version index a423d421..4d9d11cf 100644 --- a/version +++ b/version @@ -1 +1 @@ -3.4.2 \ No newline at end of file +3.4.2 diff --git a/win_version b/win_version index 0ccc3dcd..e44b972e 100644 --- a/win_version +++ b/win_version @@ -1 +1 @@ -3.4.2.0 \ No newline at end of file +3.4.2.0 From 63b2dbed7f177eb0ac1d2b949a7ce1d2e43ad2be Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Wed, 18 Oct 2023 16:56:43 -0400 Subject: [PATCH 03/44] Sort alphabetically --- requirements.txt | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/requirements.txt b/requirements.txt index 0cbde8f3..6d3b9813 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,16 @@ +cachetools click>=6,<7 +cobs +colorama +jsonpickle +observable +pyinstaller +pypng==0.0.20 pyserial -cachetools +pyzmq requests requests-futures -tabulate -jsonpickle -semantic_version -colorama -pyzmq -cobs scan-build==2.0.13 +semantic_version sentry-sdk -observable -pypng==0.0.20 -pyinstaller \ No newline at end of file +tabulate From f015da39b003911694e979b333b33d50d5d5ea04 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Wed, 18 Oct 2023 17:51:03 -0400 Subject: [PATCH 04/44] Fixed some errors --- .pylintrc | 5 ++--- pros/common/utils.py | 2 +- pros/conductor/project/ProjectTransaction.py | 1 - pros/conductor/project/__init__.py | 2 +- pros/ga/analytics.py | 2 +- pros/serial/devices/vex/crc.py | 2 +- pros/serial/devices/vex/v5_device.py | 5 ++--- pros/serial/ports/__init__.py | 2 +- 8 files changed, 9 insertions(+), 12 deletions(-) diff --git a/.pylintrc b/.pylintrc index 16e8f317..5a21d69f 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,10 +1,9 @@ [MASTER] max-line-length = 120 - -disable = C0114, C0115, C0116, R0903, C0415, R1705, R0913, W1203, R1729, E1120, E1123, C0209, R1710, W0621, C0121, +disable = C0114, C0115, C0116, R0903, C0415, R1705, R0913, W1203, R1729, E1120, E1123, C0209, R1710, W0621, C0121, W0614, W0401, W1202, C0117, W0718, R0205, R0402, R0914, R1725, R1735, C0411, W0237, W0702, W0223, W0613, W0108, R0912, R0911, W0511, E1136, R0902, W0611, C0412, C0103, C0301, R1732, R0915, W1514, R1718, W1510, E0602, W1309, C0325, E1101, R1714, R0916, W0719, R1734, E1133, W1201, W0107, W3101, W0640, C0201, W1113, W0246, W0622, W0221, E1111, R1720, W0221, R1723, E0102, W0201, E0203, E0401, W0602, W0212, W0707, R0904, - W0101, C0302, E0110, W0603, R1701, W0106, R1721 + W0101, C0302, E0110, W0603, R1701, W0106, R1721, W0601, diff --git a/pros/common/utils.py b/pros/common/utils.py index 294da89f..d74d9a2d 100644 --- a/pros/common/utils.py +++ b/pros/common/utils.py @@ -35,7 +35,7 @@ def get_version(): module = pros.cli.main.__name__ for dist in pkg_resources.working_set: scripts = dist.get_entry_map().get('console_scripts') or {} - for script_name, entry_point in iter(scripts.items()): + for _, entry_point in iter(scripts.items()): if entry_point.module_name == module: ver = dist.version if ver is not None: diff --git a/pros/conductor/project/ProjectTransaction.py b/pros/conductor/project/ProjectTransaction.py index 14034d42..edae1330 100644 --- a/pros/conductor/project/ProjectTransaction.py +++ b/pros/conductor/project/ProjectTransaction.py @@ -36,7 +36,6 @@ def execute(self, conductor: c.Conductor, project: c.Project): raise e else: ui.logger(__name__).warning(str(e)) - return None def describe(self, conductor: c.Conductor, project: c.Project): action = project.get_template_actions(conductor.resolve_template(self.template)) diff --git a/pros/conductor/project/__init__.py b/pros/conductor/project/__init__.py index 486610ca..a0777595 100644 --- a/pros/conductor/project/__init__.py +++ b/pros/conductor/project/__init__.py @@ -411,7 +411,7 @@ def find_project(path: str, recurse_times: int = 10): if os.path.isfile(path): path = os.path.dirname(path) if os.path.isdir(path): - for n in range(recurse_times): + for _ in range(recurse_times): if path is not None and os.path.isdir(path): files = [f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) and f.lower() == 'project.pros'] diff --git a/pros/ga/analytics.py b/pros/ga/analytics.py index 7a9c76af..247e6b31 100644 --- a/pros/ga/analytics.py +++ b/pros/ga/analytics.py @@ -62,7 +62,7 @@ def send(self,action): timeout=5.0) self.pendingRequests.append(future) - except Exception as e: + except Exception: from pros.cli.common import logger logger(__name__).warning("Unable to send analytics. Do you have a stable internet connection?", extra={'sentry': False}) diff --git a/pros/serial/devices/vex/crc.py b/pros/serial/devices/vex/crc.py index f53bee5d..2e4270d7 100644 --- a/pros/serial/devices/vex/crc.py +++ b/pros/serial/devices/vex/crc.py @@ -9,7 +9,7 @@ def __init__(self, size: int, polynomial: int): for i in range(256): crc_accumulator = i << (self._size - 8) - for j in range(8): + for _ in range(8): if crc_accumulator & (1 << (self._size - 1)): crc_accumulator = (crc_accumulator << 1) ^ self._polynomial else: diff --git a/pros/serial/devices/vex/v5_device.py b/pros/serial/devices/vex/v5_device.py index a19d0777..124897e0 100644 --- a/pros/serial/devices/vex/v5_device.py +++ b/pros/serial/devices/vex/v5_device.py @@ -268,7 +268,6 @@ def upload_project(self, project: Project, **kwargs): def generate_ini_file(self, remote_name: str = None, slot: int = 0, ini: ConfigParser = None, **kwargs): project_ini = ConfigParser() - from semantic_version import Spec default_icon = 'USER902x.bmp' if Spec('>=1.0.0-22').match(self.status['cpu0_version']) else 'USER999x.bmp' project_ini['project'] = { 'version': str(kwargs.get('ide_version') or get_version()), @@ -612,7 +611,7 @@ def read_ini(self, remote_name: str) -> Optional[ConfigParser]: rx_io.seek(0, 0) config.read_string(rx_io.read().decode('ascii')) return config - except VEXCommError as e: + except VEXCommError: return None @retries @@ -918,7 +917,7 @@ def kv_write(self, kv: str, payload: Union[Iterable, bytes, bytearray, str]): payload = payload.encode(encoding='ascii') tx_fmt =f'<{len(encoded_kv)}s{len(payload)}s' tx_payload = struct.pack(tx_fmt, encoded_kv, payload) - ret = self._txrx_ext_packet(0x2f, tx_payload, 1, check_length=False, check_ack=True) + self._txrx_ext_packet(0x2f, tx_payload, 1, check_length=False, check_ack=True) logger(__name__).debug('Completed ext 0x2f command') return payload diff --git a/pros/serial/ports/__init__.py b/pros/serial/ports/__init__.py index be344a79..4850b2b9 100644 --- a/pros/serial/ports/__init__.py +++ b/pros/serial/ports/__init__.py @@ -1,7 +1,7 @@ from functools import lru_cache from pros.common import logger -from serial.tools import list_ports as list_ports +from serial.tools import list_ports from .base_port import BasePort, PortConnectionException, PortException from .direct_port import DirectPort From 491f4e68531b9c5308e801fcea83e6dac8db99fc Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Wed, 25 Oct 2023 15:42:16 -0400 Subject: [PATCH 05/44] Add pre-commit and pylint --- requirements.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements.txt b/requirements.txt index 6d3b9813..ae307540 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,9 @@ cobs colorama jsonpickle observable +pre-commit pyinstaller +pylint pypng==0.0.20 pyserial pyzmq From a94380595044e0f054f8510506025fcd2ae2ca7e Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Wed, 25 Oct 2023 15:52:54 -0400 Subject: [PATCH 06/44] Add pylint to github actions --- .github/workflows/main.yml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1d597153..74eba702 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -5,6 +5,27 @@ on: pull_request: jobs: + pylint: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pylint + pip install -r requirements.txt + - name: Analysing the code with pylint + run: | + pylint $(git ls-files '*.py') --rcfile=$(git ls-files '.pylintrc') + update_build_number: runs-on: ubuntu-latest outputs: From 9346d25e484351aa258f274ba5a6e7b0f320fc17 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Wed, 25 Oct 2023 15:57:22 -0400 Subject: [PATCH 07/44] Add rcfile --- .pylintrc | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 .pylintrc diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..5a21d69f --- /dev/null +++ b/.pylintrc @@ -0,0 +1,9 @@ +[MASTER] + +max-line-length = 120 +disable = C0114, C0115, C0116, R0903, C0415, R1705, R0913, W1203, R1729, E1120, E1123, C0209, R1710, W0621, C0121, + W0614, W0401, W1202, C0117, W0718, R0205, R0402, R0914, R1725, R1735, C0411, W0237, W0702, W0223, W0613, + W0108, R0912, R0911, W0511, E1136, R0902, W0611, C0412, C0103, C0301, R1732, R0915, W1514, R1718, W1510, + E0602, W1309, C0325, E1101, R1714, R0916, W0719, R1734, E1133, W1201, W0107, W3101, W0640, C0201, W1113, + W0246, W0622, W0221, E1111, R1720, W0221, R1723, E0102, W0201, E0203, E0401, W0602, W0212, W0707, R0904, + W0101, C0302, E0110, W0603, R1701, W0106, R1721, W0601, From f251a16c79f84644cd7958c99b6423a31ae81923 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Wed, 25 Oct 2023 16:02:14 -0400 Subject: [PATCH 08/44] Disable cyclic import --- .pylintrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pylintrc b/.pylintrc index 5a21d69f..4514f984 100644 --- a/.pylintrc +++ b/.pylintrc @@ -6,4 +6,4 @@ disable = C0114, C0115, C0116, R0903, C0415, R1705, R0913, W1203, R1729, E1120 W0108, R0912, R0911, W0511, E1136, R0902, W0611, C0412, C0103, C0301, R1732, R0915, W1514, R1718, W1510, E0602, W1309, C0325, E1101, R1714, R0916, W0719, R1734, E1133, W1201, W0107, W3101, W0640, C0201, W1113, W0246, W0622, W0221, E1111, R1720, W0221, R1723, E0102, W0201, E0203, E0401, W0602, W0212, W0707, R0904, - W0101, C0302, E0110, W0603, R1701, W0106, R1721, W0601, + W0101, C0302, E0110, W0603, R1701, W0106, R1721, W0601, R0401 From 682a4917e194fc0d7f760b8629d2efaf7bd587b0 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:17:20 -0500 Subject: [PATCH 09/44] Add pre-commit and pylint --- requirements.txt | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/requirements.txt b/requirements.txt index 48d35bc3..9bc7673c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,12 +1,14 @@ -click>=8 -rich-click -pyserial -pyzmq -requests -requests-futures -scan-build==2.0.13 -semantic_version -sentry-sdk -observable -pypng==0.0.20 -pyinstaller +click>=8 +observable +pre-commit +pyinstaller +pylint +pypng==0.0.20 +pyserial +pyzmq +requests +requests-futures +rich-click +scan-build==2.0.13 +semantic_version +sentry-sdk From 5eaccfb792fc663353f54e81b0e08efd085ba823 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:18:28 -0500 Subject: [PATCH 10/44] Fix whitespace --- pip_version | 2 +- pros/conductor/conductor.py | 712 ++++++++++++++++++------------------ version | 2 +- win_version | 2 +- 4 files changed, 359 insertions(+), 359 deletions(-) diff --git a/pip_version b/pip_version index 8a0feb98..6cb9d3dd 100644 --- a/pip_version +++ b/pip_version @@ -1 +1 @@ -3.4.3 \ No newline at end of file +3.4.3 diff --git a/pros/conductor/conductor.py b/pros/conductor/conductor.py index c2766f74..86ef05cd 100644 --- a/pros/conductor/conductor.py +++ b/pros/conductor/conductor.py @@ -1,356 +1,356 @@ -import os.path -import shutil -from enum import Enum -from pathlib import Path -from typing import * - -import click -from semantic_version import Spec, Version - -from pros.common import * -from pros.conductor.project import TemplateAction -from pros.conductor.project.template_resolution import InvalidTemplateException -from pros.config import Config -from .depots import Depot, HttpDepot -from .project import Project -from .templates import BaseTemplate, ExternalTemplate, LocalTemplate, Template - -MAINLINE_NAME = 'pros-mainline' -MAINLINE_URL = 'https://pros.cs.purdue.edu/v5/_static/releases/pros-mainline.json' -EARLY_ACCESS_NAME = 'kernel-early-access-mainline' -EARLY_ACCESS_URL = 'https://pros.cs.purdue.edu/v5/_static/beta/beta-pros-mainline.json' - -""" -# TBD? Currently, EarlyAccess value is stored in config file -class ReleaseChannel(Enum): - Stable = 'stable' - Beta = 'beta' -""" - -class Conductor(Config): - """ - Provides entrances for all conductor-related tasks (fetching, applying, creating new projects) - """ - def __init__(self, file=None): - if not file: - file = os.path.join(click.get_app_dir('PROS'), 'conductor.pros') - self.local_templates: Set[LocalTemplate] = set() - self.early_access_local_templates: Set[LocalTemplate] = set() - self.depots: Dict[str, Depot] = {} - self.default_target: str = 'v5' - self.default_libraries: Dict[str, List[str]] = None - self.early_access_libraries: Dict[str, List[str]] = None - self.use_early_access = False - self.warn_early_access = False - super(Conductor, self).__init__(file) - needs_saving = False - if MAINLINE_NAME not in self.depots or \ - not isinstance(self.depots[MAINLINE_NAME], HttpDepot) or \ - self.depots[MAINLINE_NAME].location != MAINLINE_URL: - self.depots[MAINLINE_NAME] = HttpDepot(MAINLINE_NAME, MAINLINE_URL) - needs_saving = True - # add early access depot as another remote depot - if EARLY_ACCESS_NAME not in self.depots or \ - not isinstance(self.depots[EARLY_ACCESS_NAME], HttpDepot) or \ - self.depots[EARLY_ACCESS_NAME].location != EARLY_ACCESS_URL: - self.depots[EARLY_ACCESS_NAME] = HttpDepot(EARLY_ACCESS_NAME, EARLY_ACCESS_URL) - needs_saving = True - if self.default_target is None: - self.default_target = 'v5' - needs_saving = True - if self.default_libraries is None: - self.default_libraries = { - 'v5': ['okapilib'], - 'cortex': [] - } - needs_saving = True - if self.early_access_libraries is None or len(self.early_access_libraries['v5']) != 2: - self.early_access_libraries = { - 'v5': ['liblvgl', 'okapilib'], - 'cortex': [] - } - needs_saving = True - if 'v5' not in self.default_libraries: - self.default_libraries['v5'] = [] - needs_saving = True - if 'cortex' not in self.default_libraries: - self.default_libraries['cortex'] = [] - needs_saving = True - if 'v5' not in self.early_access_libraries: - self.early_access_libraries['v5'] = [] - needs_saving = True - if 'cortex' not in self.early_access_libraries: - self.early_access_libraries['cortex'] = [] - needs_saving = True - if needs_saving: - self.save() - from pros.common.sentry import add_context - add_context(self) - - def get_depot(self, name: str) -> Optional[Depot]: - return self.depots.get(name) - - def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> LocalTemplate: - for t in list(self.local_templates): - if t.identifier == template.identifier: - self.purge_template(t) - - if 'destination' in kwargs: # this is deprecated, will work (maybe) but not desirable behavior - destination = kwargs.pop('destination') - else: - destination = os.path.join(self.directory, 'templates', template.identifier) - if os.path.isdir(destination): - shutil.rmtree(destination) - - template: Template = depot.fetch_template(template, destination, **kwargs) - click.secho(f'Fetched {template.identifier} from {depot.name} depot', dim=True) - local_template = LocalTemplate(orig=template, location=destination) - local_template.metadata['origin'] = depot.name - click.echo(f'Adding {local_template.identifier} to registry...', nl=False) - if depot.name == EARLY_ACCESS_NAME: # check for early access - self.early_access_local_templates.add(local_template) - else: - self.local_templates.add(local_template) - self.save() - if isinstance(template, ExternalTemplate) and template.directory == destination: - template.delete() - click.secho('Done', fg='green') - return local_template - - def purge_template(self, template: LocalTemplate): - if template.metadata['origin'] == EARLY_ACCESS_NAME: - if template not in self.early_access_local_templates: - logger(__name__).info(f"{template.identifier} was not in the Conductor's local early access templates cache.") - else: - self.early_access_local_templates.remove(template) - else: - if template not in self.local_templates: - logger(__name__).info(f"{template.identifier} was not in the Conductor's local templates cache.") - else: - self.local_templates.remove(template) - - if os.path.abspath(template.location).startswith( - os.path.abspath(os.path.join(self.directory, 'templates'))) \ - and os.path.isdir(template.location): - shutil.rmtree(template.location) - self.save() - - def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: bool = True, - allow_offline: bool = True, force_refresh: bool = False, - unique: bool = True, **kwargs) -> List[BaseTemplate]: - results = list() if not unique else set() - kernel_version = kwargs.get('kernel_version', None) - if kwargs.get('early_access', None) is not None: - self.use_early_access = kwargs.get('early_access', False) - if isinstance(identifier, str): - query = BaseTemplate.create_query(name=identifier, **kwargs) - else: - query = identifier - if allow_offline: - if self.use_early_access: - offline_results = list(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.early_access_local_templates)) - else: - offline_results = list(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.local_templates)) - - if unique: - results.update(offline_results) - else: - results.extend(offline_results) - if allow_online: - for depot in self.depots.values(): - # EarlyAccess depot will only be accessed when the --early-access flag is true - if depot.name != EARLY_ACCESS_NAME or (depot.name == EARLY_ACCESS_NAME and self.use_early_access): - remote_templates = depot.get_remote_templates(force_check=force_refresh, **kwargs) - online_results = list(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), - remote_templates)) - - if unique: - results.update(online_results) - else: - results.extend(online_results) - logger(__name__).debug('Saving Conductor config after checking for remote updates') - self.save() # Save self since there may have been some updates from the depots - - if len(results) == 0 and (kernel_version.split('.')[0] == '3' and not self.use_early_access): - raise dont_send( - InvalidTemplateException(f'{identifier.name} does not support kernel version {kernel_version}')) - - return list(results) - - def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Optional[BaseTemplate]: - if isinstance(identifier, str): - kwargs['name'] = identifier - elif isinstance(identifier, BaseTemplate): - kwargs['orig'] = identifier - query = BaseTemplate.create_query(**kwargs) - logger(__name__).info(f'Query: {query}') - logger(__name__).debug(query.__dict__) - templates = self.resolve_templates(query, **kwargs) - logger(__name__).info(f'Candidates: {", ".join([str(t) for t in templates])}') - if not any(templates): - return None - query.version = str(Spec(query.version or '>0').select([Version(t.version) for t in templates])) - v = Version(query.version) - v.prerelease = v.prerelease if len(v.prerelease) else ('',) - v.build = v.build if len(v.build) else ('',) - query.version = f'=={v}' - logger(__name__).info(f'Resolved to {query.identifier}') - templates = self.resolve_templates(query, **kwargs) - if not any(templates): - return None - # prefer local templates first - local_templates = [t for t in templates if isinstance(t, LocalTemplate)] - if any(local_templates): - # there's a local template satisfying the query - if len(local_templates) > 1: - # This should never happen! Conductor state must be invalid - raise Exception(f'Multiple local templates satisfy {query.identifier}!') - return local_templates[0] - - # prefer pros-mainline template second - mainline_templates = [t for t in templates if t.metadata['origin'] == 'pros-mainline'] - if any(mainline_templates): - return mainline_templates[0] - - # No preference, just FCFS - return templates[0] - - def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], **kwargs): - upgrade_ok = kwargs.get('upgrade_ok', True) - install_ok = kwargs.get('install_ok', True) - downgrade_ok = kwargs.get('downgrade_ok', True) - download_ok = kwargs.get('download_ok', True) - force = kwargs.get('force_apply', False) - - kwargs['target'] = project.target - if 'kernel' in project.templates: - # support_kernels for backwards compatibility, but kernel_version should be getting most of the exposure - kwargs['kernel_version'] = kwargs['supported_kernels'] = project.templates['kernel'].version - template = self.resolve_template(identifier=identifier, allow_online=download_ok, **kwargs) - if template is None: - raise dont_send( - InvalidTemplateException(f'Could not find a template satisfying {identifier} for {project.target}')) - - # warn and prompt user if upgrading to PROS 4 or downgrading to PROS 3 - if template.name == 'kernel': - isProject = Project.find_project("") - if isProject: - curr_proj = Project() - if curr_proj.kernel: - if template.version[0] == '4' and curr_proj.kernel[0] == '3': - confirm = ui.confirm(f'Warning! Upgrading project to PROS 4 will cause breaking changes. ' - f'Do you still want to upgrade?') - if not confirm: - raise dont_send( - InvalidTemplateException(f'Not upgrading')) - if template.version[0] == '3' and curr_proj.kernel[0] == '4': - confirm = ui.confirm(f'Warning! Downgrading project to PROS 3 will cause breaking changes. ' - f'Do you still want to downgrade?') - if not confirm: - raise dont_send( - InvalidTemplateException(f'Not downgrading')) - elif not self.use_early_access and template.version[0] == '3' and not self.warn_early_access: - confirm = ui.confirm(f'PROS 4 is now in early access. ' - f'Please use the --early-access flag if you would like to use it.\n' - f'Do you want to use PROS 4 instead?') - self.warn_early_access = True - if confirm: # use pros 4 - self.use_early_access = True - kwargs['version'] = '>=0' - self.save() - # Recall the function with early access enabled - return self.apply_template(project, identifier, **kwargs) - - self.save() - if not isinstance(template, LocalTemplate): - with ui.Notification(): - template = self.fetch_template(self.get_depot(template.metadata['origin']), template, **kwargs) - assert isinstance(template, LocalTemplate) - - logger(__name__).info(str(project)) - valid_action = project.get_template_actions(template) - if valid_action == TemplateAction.NotApplicable: - raise dont_send( - InvalidTemplateException(f'{template.identifier} is not applicable to {project}', reason=valid_action) - ) - if force \ - or (valid_action == TemplateAction.Upgradable and upgrade_ok) \ - or (valid_action == TemplateAction.Installable and install_ok) \ - or (valid_action == TemplateAction.Downgradable and downgrade_ok): - project.apply_template(template, force_system=kwargs.pop('force_system', False), - force_user=kwargs.pop('force_user', False), - remove_empty_directories=kwargs.pop('remove_empty_directories', False)) - ui.finalize('apply', f'Finished applying {template.identifier} to {project.location}') - elif valid_action != TemplateAction.AlreadyInstalled: - raise dont_send( - InvalidTemplateException(f'Could not install {template.identifier} because it is {valid_action.name},' - f' and that is not allowed.', reason=valid_action) - ) - else: - ui.finalize('apply', f'{template.identifier} is already installed in {project.location}') - - @staticmethod - def remove_template(project: Project, identifier: Union[str, BaseTemplate], remove_user: bool = True, - remove_empty_directories: bool = True): - ui.logger(__name__).debug(f'Uninstalling templates matching {identifier}') - if not project.resolve_template(identifier): - ui.echo(f"{identifier} is not an applicable template") - for template in project.resolve_template(identifier): - ui.echo(f'Uninstalling {template.identifier}') - project.remove_template(template, remove_user=remove_user, - remove_empty_directories=remove_empty_directories) - - def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Project: - if kwargs.get('early_access', None) is not None: - self.use_early_access = kwargs.get('early_access', False) - if kwargs["version_source"]: # If true, then the user has not specified a version - if not self.use_early_access and self.warn_early_access: - ui.echo(f"PROS 4 is now in early access. " - f"If you would like to use it, use the --early-access flag.") - elif self.use_early_access: - ui.echo(f'Early access is enabled. Using PROS 4.') - elif self.use_early_access: - ui.echo(f'Early access is enabled.') - - if Path(path).exists() and Path(path).samefile(os.path.expanduser('~')): - raise dont_send(ValueError('Will not create a project in user home directory')) - for char in str(Path(path)): - if char in ['?', '<', '>', '*', '|', '^', '#', '%', '&', '$', '+', '!', '`', '\'', '=', - '@', '\'', '{', '}', '[', ']', '(', ')', '~'] or ord(char) > 127: - raise dont_send(ValueError(f'Invalid character found in directory name: \'{char}\'')) - - proj = Project(path=path, create=True) - if 'target' in kwargs: - proj.target = kwargs['target'] - if 'project_name' in kwargs and kwargs['project_name'] and not kwargs['project_name'].isspace(): - proj.project_name = kwargs['project_name'] - else: - proj.project_name = os.path.basename(os.path.normpath(os.path.abspath(path))) - if 'version' in kwargs: - if kwargs['version'] == 'latest': - kwargs['version'] = '>=0' - self.apply_template(proj, identifier='kernel', **kwargs) - proj.save() - - if not no_default_libs: - libraries = self.early_access_libraries if self.use_early_access else self.default_libraries - for library in libraries[proj.target]: - try: - # remove kernel version so that latest template satisfying query is correctly selected - if 'version' in kwargs: - kwargs.pop('version') - self.apply_template(proj, library, **kwargs) - except Exception as e: - logger(__name__).exception(e) - return proj - - def add_depot(self, name: str, url: str): - self.depots[name] = HttpDepot(name, url) - self.save() - - def remove_depot(self, name: str): - del self.depots[name] - self.save() - - def query_depots(self, url: bool): - return [name + ((' -- ' + depot.location) if url else '') for name, depot in self.depots.items()] +import os.path +import shutil +from enum import Enum +from pathlib import Path +from typing import * + +import click +from semantic_version import Spec, Version + +from pros.common import * +from pros.conductor.project import TemplateAction +from pros.conductor.project.template_resolution import InvalidTemplateException +from pros.config import Config +from .depots import Depot, HttpDepot +from .project import Project +from .templates import BaseTemplate, ExternalTemplate, LocalTemplate, Template + +MAINLINE_NAME = 'pros-mainline' +MAINLINE_URL = 'https://pros.cs.purdue.edu/v5/_static/releases/pros-mainline.json' +EARLY_ACCESS_NAME = 'kernel-early-access-mainline' +EARLY_ACCESS_URL = 'https://pros.cs.purdue.edu/v5/_static/beta/beta-pros-mainline.json' + +""" +# TBD? Currently, EarlyAccess value is stored in config file +class ReleaseChannel(Enum): + Stable = 'stable' + Beta = 'beta' +""" + +class Conductor(Config): + """ + Provides entrances for all conductor-related tasks (fetching, applying, creating new projects) + """ + def __init__(self, file=None): + if not file: + file = os.path.join(click.get_app_dir('PROS'), 'conductor.pros') + self.local_templates: Set[LocalTemplate] = set() + self.early_access_local_templates: Set[LocalTemplate] = set() + self.depots: Dict[str, Depot] = {} + self.default_target: str = 'v5' + self.default_libraries: Dict[str, List[str]] = None + self.early_access_libraries: Dict[str, List[str]] = None + self.use_early_access = False + self.warn_early_access = False + super(Conductor, self).__init__(file) + needs_saving = False + if MAINLINE_NAME not in self.depots or \ + not isinstance(self.depots[MAINLINE_NAME], HttpDepot) or \ + self.depots[MAINLINE_NAME].location != MAINLINE_URL: + self.depots[MAINLINE_NAME] = HttpDepot(MAINLINE_NAME, MAINLINE_URL) + needs_saving = True + # add early access depot as another remote depot + if EARLY_ACCESS_NAME not in self.depots or \ + not isinstance(self.depots[EARLY_ACCESS_NAME], HttpDepot) or \ + self.depots[EARLY_ACCESS_NAME].location != EARLY_ACCESS_URL: + self.depots[EARLY_ACCESS_NAME] = HttpDepot(EARLY_ACCESS_NAME, EARLY_ACCESS_URL) + needs_saving = True + if self.default_target is None: + self.default_target = 'v5' + needs_saving = True + if self.default_libraries is None: + self.default_libraries = { + 'v5': ['okapilib'], + 'cortex': [] + } + needs_saving = True + if self.early_access_libraries is None or len(self.early_access_libraries['v5']) != 2: + self.early_access_libraries = { + 'v5': ['liblvgl', 'okapilib'], + 'cortex': [] + } + needs_saving = True + if 'v5' not in self.default_libraries: + self.default_libraries['v5'] = [] + needs_saving = True + if 'cortex' not in self.default_libraries: + self.default_libraries['cortex'] = [] + needs_saving = True + if 'v5' not in self.early_access_libraries: + self.early_access_libraries['v5'] = [] + needs_saving = True + if 'cortex' not in self.early_access_libraries: + self.early_access_libraries['cortex'] = [] + needs_saving = True + if needs_saving: + self.save() + from pros.common.sentry import add_context + add_context(self) + + def get_depot(self, name: str) -> Optional[Depot]: + return self.depots.get(name) + + def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> LocalTemplate: + for t in list(self.local_templates): + if t.identifier == template.identifier: + self.purge_template(t) + + if 'destination' in kwargs: # this is deprecated, will work (maybe) but not desirable behavior + destination = kwargs.pop('destination') + else: + destination = os.path.join(self.directory, 'templates', template.identifier) + if os.path.isdir(destination): + shutil.rmtree(destination) + + template: Template = depot.fetch_template(template, destination, **kwargs) + click.secho(f'Fetched {template.identifier} from {depot.name} depot', dim=True) + local_template = LocalTemplate(orig=template, location=destination) + local_template.metadata['origin'] = depot.name + click.echo(f'Adding {local_template.identifier} to registry...', nl=False) + if depot.name == EARLY_ACCESS_NAME: # check for early access + self.early_access_local_templates.add(local_template) + else: + self.local_templates.add(local_template) + self.save() + if isinstance(template, ExternalTemplate) and template.directory == destination: + template.delete() + click.secho('Done', fg='green') + return local_template + + def purge_template(self, template: LocalTemplate): + if template.metadata['origin'] == EARLY_ACCESS_NAME: + if template not in self.early_access_local_templates: + logger(__name__).info(f"{template.identifier} was not in the Conductor's local early access templates cache.") + else: + self.early_access_local_templates.remove(template) + else: + if template not in self.local_templates: + logger(__name__).info(f"{template.identifier} was not in the Conductor's local templates cache.") + else: + self.local_templates.remove(template) + + if os.path.abspath(template.location).startswith( + os.path.abspath(os.path.join(self.directory, 'templates'))) \ + and os.path.isdir(template.location): + shutil.rmtree(template.location) + self.save() + + def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: bool = True, + allow_offline: bool = True, force_refresh: bool = False, + unique: bool = True, **kwargs) -> List[BaseTemplate]: + results = list() if not unique else set() + kernel_version = kwargs.get('kernel_version', None) + if kwargs.get('early_access', None) is not None: + self.use_early_access = kwargs.get('early_access', False) + if isinstance(identifier, str): + query = BaseTemplate.create_query(name=identifier, **kwargs) + else: + query = identifier + if allow_offline: + if self.use_early_access: + offline_results = list(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.early_access_local_templates)) + else: + offline_results = list(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.local_templates)) + + if unique: + results.update(offline_results) + else: + results.extend(offline_results) + if allow_online: + for depot in self.depots.values(): + # EarlyAccess depot will only be accessed when the --early-access flag is true + if depot.name != EARLY_ACCESS_NAME or (depot.name == EARLY_ACCESS_NAME and self.use_early_access): + remote_templates = depot.get_remote_templates(force_check=force_refresh, **kwargs) + online_results = list(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), + remote_templates)) + + if unique: + results.update(online_results) + else: + results.extend(online_results) + logger(__name__).debug('Saving Conductor config after checking for remote updates') + self.save() # Save self since there may have been some updates from the depots + + if len(results) == 0 and (kernel_version.split('.')[0] == '3' and not self.use_early_access): + raise dont_send( + InvalidTemplateException(f'{identifier.name} does not support kernel version {kernel_version}')) + + return list(results) + + def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Optional[BaseTemplate]: + if isinstance(identifier, str): + kwargs['name'] = identifier + elif isinstance(identifier, BaseTemplate): + kwargs['orig'] = identifier + query = BaseTemplate.create_query(**kwargs) + logger(__name__).info(f'Query: {query}') + logger(__name__).debug(query.__dict__) + templates = self.resolve_templates(query, **kwargs) + logger(__name__).info(f'Candidates: {", ".join([str(t) for t in templates])}') + if not any(templates): + return None + query.version = str(Spec(query.version or '>0').select([Version(t.version) for t in templates])) + v = Version(query.version) + v.prerelease = v.prerelease if len(v.prerelease) else ('',) + v.build = v.build if len(v.build) else ('',) + query.version = f'=={v}' + logger(__name__).info(f'Resolved to {query.identifier}') + templates = self.resolve_templates(query, **kwargs) + if not any(templates): + return None + # prefer local templates first + local_templates = [t for t in templates if isinstance(t, LocalTemplate)] + if any(local_templates): + # there's a local template satisfying the query + if len(local_templates) > 1: + # This should never happen! Conductor state must be invalid + raise Exception(f'Multiple local templates satisfy {query.identifier}!') + return local_templates[0] + + # prefer pros-mainline template second + mainline_templates = [t for t in templates if t.metadata['origin'] == 'pros-mainline'] + if any(mainline_templates): + return mainline_templates[0] + + # No preference, just FCFS + return templates[0] + + def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], **kwargs): + upgrade_ok = kwargs.get('upgrade_ok', True) + install_ok = kwargs.get('install_ok', True) + downgrade_ok = kwargs.get('downgrade_ok', True) + download_ok = kwargs.get('download_ok', True) + force = kwargs.get('force_apply', False) + + kwargs['target'] = project.target + if 'kernel' in project.templates: + # support_kernels for backwards compatibility, but kernel_version should be getting most of the exposure + kwargs['kernel_version'] = kwargs['supported_kernels'] = project.templates['kernel'].version + template = self.resolve_template(identifier=identifier, allow_online=download_ok, **kwargs) + if template is None: + raise dont_send( + InvalidTemplateException(f'Could not find a template satisfying {identifier} for {project.target}')) + + # warn and prompt user if upgrading to PROS 4 or downgrading to PROS 3 + if template.name == 'kernel': + isProject = Project.find_project("") + if isProject: + curr_proj = Project() + if curr_proj.kernel: + if template.version[0] == '4' and curr_proj.kernel[0] == '3': + confirm = ui.confirm(f'Warning! Upgrading project to PROS 4 will cause breaking changes. ' + f'Do you still want to upgrade?') + if not confirm: + raise dont_send( + InvalidTemplateException(f'Not upgrading')) + if template.version[0] == '3' and curr_proj.kernel[0] == '4': + confirm = ui.confirm(f'Warning! Downgrading project to PROS 3 will cause breaking changes. ' + f'Do you still want to downgrade?') + if not confirm: + raise dont_send( + InvalidTemplateException(f'Not downgrading')) + elif not self.use_early_access and template.version[0] == '3' and not self.warn_early_access: + confirm = ui.confirm(f'PROS 4 is now in early access. ' + f'Please use the --early-access flag if you would like to use it.\n' + f'Do you want to use PROS 4 instead?') + self.warn_early_access = True + if confirm: # use pros 4 + self.use_early_access = True + kwargs['version'] = '>=0' + self.save() + # Recall the function with early access enabled + return self.apply_template(project, identifier, **kwargs) + + self.save() + if not isinstance(template, LocalTemplate): + with ui.Notification(): + template = self.fetch_template(self.get_depot(template.metadata['origin']), template, **kwargs) + assert isinstance(template, LocalTemplate) + + logger(__name__).info(str(project)) + valid_action = project.get_template_actions(template) + if valid_action == TemplateAction.NotApplicable: + raise dont_send( + InvalidTemplateException(f'{template.identifier} is not applicable to {project}', reason=valid_action) + ) + if force \ + or (valid_action == TemplateAction.Upgradable and upgrade_ok) \ + or (valid_action == TemplateAction.Installable and install_ok) \ + or (valid_action == TemplateAction.Downgradable and downgrade_ok): + project.apply_template(template, force_system=kwargs.pop('force_system', False), + force_user=kwargs.pop('force_user', False), + remove_empty_directories=kwargs.pop('remove_empty_directories', False)) + ui.finalize('apply', f'Finished applying {template.identifier} to {project.location}') + elif valid_action != TemplateAction.AlreadyInstalled: + raise dont_send( + InvalidTemplateException(f'Could not install {template.identifier} because it is {valid_action.name},' + f' and that is not allowed.', reason=valid_action) + ) + else: + ui.finalize('apply', f'{template.identifier} is already installed in {project.location}') + + @staticmethod + def remove_template(project: Project, identifier: Union[str, BaseTemplate], remove_user: bool = True, + remove_empty_directories: bool = True): + ui.logger(__name__).debug(f'Uninstalling templates matching {identifier}') + if not project.resolve_template(identifier): + ui.echo(f"{identifier} is not an applicable template") + for template in project.resolve_template(identifier): + ui.echo(f'Uninstalling {template.identifier}') + project.remove_template(template, remove_user=remove_user, + remove_empty_directories=remove_empty_directories) + + def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Project: + if kwargs.get('early_access', None) is not None: + self.use_early_access = kwargs.get('early_access', False) + if kwargs["version_source"]: # If true, then the user has not specified a version + if not self.use_early_access and self.warn_early_access: + ui.echo(f"PROS 4 is now in early access. " + f"If you would like to use it, use the --early-access flag.") + elif self.use_early_access: + ui.echo(f'Early access is enabled. Using PROS 4.') + elif self.use_early_access: + ui.echo(f'Early access is enabled.') + + if Path(path).exists() and Path(path).samefile(os.path.expanduser('~')): + raise dont_send(ValueError('Will not create a project in user home directory')) + for char in str(Path(path)): + if char in ['?', '<', '>', '*', '|', '^', '#', '%', '&', '$', '+', '!', '`', '\'', '=', + '@', '\'', '{', '}', '[', ']', '(', ')', '~'] or ord(char) > 127: + raise dont_send(ValueError(f'Invalid character found in directory name: \'{char}\'')) + + proj = Project(path=path, create=True) + if 'target' in kwargs: + proj.target = kwargs['target'] + if 'project_name' in kwargs and kwargs['project_name'] and not kwargs['project_name'].isspace(): + proj.project_name = kwargs['project_name'] + else: + proj.project_name = os.path.basename(os.path.normpath(os.path.abspath(path))) + if 'version' in kwargs: + if kwargs['version'] == 'latest': + kwargs['version'] = '>=0' + self.apply_template(proj, identifier='kernel', **kwargs) + proj.save() + + if not no_default_libs: + libraries = self.early_access_libraries if self.use_early_access else self.default_libraries + for library in libraries[proj.target]: + try: + # remove kernel version so that latest template satisfying query is correctly selected + if 'version' in kwargs: + kwargs.pop('version') + self.apply_template(proj, library, **kwargs) + except Exception as e: + logger(__name__).exception(e) + return proj + + def add_depot(self, name: str, url: str): + self.depots[name] = HttpDepot(name, url) + self.save() + + def remove_depot(self, name: str): + del self.depots[name] + self.save() + + def query_depots(self, url: bool): + return [name + ((' -- ' + depot.location) if url else '') for name, depot in self.depots.items()] diff --git a/version b/version index 8a0feb98..6cb9d3dd 100644 --- a/version +++ b/version @@ -1 +1 @@ -3.4.3 \ No newline at end of file +3.4.3 diff --git a/win_version b/win_version index 20cef1f4..03f59952 100644 --- a/win_version +++ b/win_version @@ -1 +1 @@ -3.4.3.0 \ No newline at end of file +3.4.3.0 From 29570d16dc72101bf5d4ce8b9b83c907aa326041 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:34:16 -0500 Subject: [PATCH 11/44] Readd removed requirements --- requirements.txt | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/requirements.txt b/requirements.txt index 9bc7673c..7b20814b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,8 @@ +cachetools click>=8 +cobs +colorama +jsonpickle observable pre-commit pyinstaller @@ -12,3 +16,4 @@ rich-click scan-build==2.0.13 semantic_version sentry-sdk +tabulate From a0e62e2174d5f70b7aa5ab0021c8238c939b5254 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Thu, 1 Feb 2024 23:25:29 -0500 Subject: [PATCH 12/44] Add black formatter --- .pre-commit-config.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c22efffd..dc46dd72 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,11 @@ repos: - id: check-ast - id: trailing-whitespace - id: requirements-txt-fixer + - repo: https://github.com/psf/black + rev: 24.1.1 + hooks: + - id: black + args: ["--skip-string-normalization", "--line-length=120"] - repo: local hooks: - id: pylint From 7819ea63ccd698aeae3684b06242c32c2d646656 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Thu, 1 Feb 2024 23:28:29 -0500 Subject: [PATCH 13/44] Run black --- pros/cli/build.py | 18 +- pros/cli/click_classes.py | 23 +- pros/cli/common.py | 177 +++++--- pros/cli/conductor.py | 360 ++++++++++++----- pros/cli/conductor_utils.py | 31 +- pros/cli/interactive.py | 4 + pros/cli/main.py | 35 +- pros/cli/misc_commands.py | 27 +- pros/cli/terminal.py | 30 +- pros/cli/upload.py | 177 +++++--- pros/cli/user_script.py | 2 + pros/cli/v5_utils.py | 31 +- pros/common/sentry.py | 19 +- pros/common/ui/__init__.py | 105 +++-- pros/common/ui/interactive/application.py | 15 +- .../ui/interactive/components/__init__.py | 16 +- .../ui/interactive/components/button.py | 6 +- .../ui/interactive/components/component.py | 4 +- .../ui/interactive/components/container.py | 19 +- .../ui/interactive/components/input_groups.py | 5 +- .../common/ui/interactive/components/label.py | 6 +- pros/common/ui/interactive/observable.py | 15 +- .../ui/interactive/parameters/__init__.py | 10 +- .../parameters/validatable_parameter.py | 8 +- .../renderers/MachineOutputRenderer.py | 5 +- pros/common/ui/log.py | 2 +- pros/common/utils.py | 9 +- pros/conductor/conductor.py | 185 ++++++--- pros/conductor/depots/depot.py | 17 +- pros/conductor/depots/http_depot.py | 7 +- pros/conductor/depots/local_depot.py | 4 +- pros/conductor/interactive/NewProjectModal.py | 13 +- .../interactive/UpdateProjectModal.py | 33 +- pros/conductor/interactive/components.py | 6 +- pros/conductor/interactive/parameters.py | 19 +- pros/conductor/project/ProjectReport.py | 13 +- pros/conductor/project/ProjectTransaction.py | 23 +- pros/conductor/project/__init__.py | 173 +++++--- pros/config/cli_config.py | 2 + pros/config/config.py | 12 +- pros/ga/analytics.py | 41 +- pros/serial/devices/vex/cortex_device.py | 48 +-- pros/serial/devices/vex/crc.py | 4 +- pros/serial/devices/vex/message.py | 6 +- pros/serial/devices/vex/stm32_device.py | 29 +- pros/serial/devices/vex/v5_device.py | 380 ++++++++++++------ pros/serial/devices/vex/vex_device.py | 21 +- pros/serial/interactive/UploadProjectModal.py | 20 +- pros/serial/ports/__init__.py | 1 + pros/serial/ports/direct_port.py | 14 +- pros/serial/ports/exceptions.py | 14 +- pros/serial/ports/serial_share_bridge.py | 45 ++- pros/serial/ports/serial_share_port.py | 16 +- pros/serial/terminal/terminal.py | 38 +- .../instructions/download_instructions.py | 6 +- .../instructions/explorer_instructions.py | 1 + pros/upgrade/manifests/upgrade_manifest_v1.py | 7 +- pros/upgrade/manifests/upgrade_manifest_v2.py | 7 +- setup.py | 7 +- version.py | 17 +- 60 files changed, 1605 insertions(+), 783 deletions(-) diff --git a/pros/cli/build.py b/pros/cli/build.py index bd9fdcb8..b9089e43 100644 --- a/pros/cli/build.py +++ b/pros/cli/build.py @@ -13,7 +13,7 @@ def build_cli(): pass -@build_cli.command(aliases=['build','m']) +@build_cli.command(aliases=['build', 'm']) @project_option() @click.argument('build-args', nargs=-1) @default_options @@ -48,6 +48,7 @@ def make_upload(ctx, project: c.Project, build_args: List[str], **upload_args): def make_upload_terminal(ctx, project: c.Project, build_args, **upload_args): analytics.send("make-upload-terminal") from .terminal import terminal + ctx.invoke(make, project=project, build_args=build_args) ctx.invoke(upload, project=project, **upload_args) ctx.invoke(terminal, port=project.target, request_banner=False) @@ -55,21 +56,24 @@ def make_upload_terminal(ctx, project: c.Project, build_args, **upload_args): @build_cli.command('build-compile-commands', hidden=True) @project_option() -@click.option('--suppress-output/--show-output', 'suppress_output', default=False, show_default=True, - help='Suppress output') +@click.option( + '--suppress-output/--show-output', 'suppress_output', default=False, show_default=True, help='Suppress output' +) @click.option('--compile-commands', type=click.File('w'), default=None) @click.option('--sandbox', default=False, is_flag=True) @click.argument('build-args', nargs=-1) @default_options -def build_compile_commands(project: c.Project, suppress_output: bool, compile_commands, sandbox: bool, - build_args: List[str]): +def build_compile_commands( + project: c.Project, suppress_output: bool, compile_commands, sandbox: bool, build_args: List[str] +): """ Build a compile_commands.json compatible with cquery :return: """ analytics.send("build-compile-commands") - exit_code = project.make_scan_build(build_args, cdb_file=compile_commands, suppress_output=suppress_output, - sandbox=sandbox) + exit_code = project.make_scan_build( + build_args, cdb_file=compile_commands, suppress_output=suppress_output, sandbox=sandbox + ) if exit_code != 0: logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) raise click.ClickException('Failed to build') diff --git a/pros/cli/click_classes.py b/pros/cli/click_classes.py index 27dbf73f..2c017556 100644 --- a/pros/cli/click_classes.py +++ b/pros/cli/click_classes.py @@ -19,8 +19,8 @@ def __init__(self, *args, hidden: bool = False, **kwargs): def format_commands(self, ctx, formatter): """Extra format methods for multi methods that adds all the commands - after the options. - """ + after the options. + """ if not hasattr(self, 'list_commands'): return rows = [] @@ -61,6 +61,7 @@ def format_options(self, ctx, formatter): self.format_commands(ctx, formatter) + class PROSCommand(PROSFormatted, click.Command): pass @@ -81,25 +82,27 @@ def get_help_record(self, ctx): return return super().get_help_record(ctx) + class PROSDeprecated(click.Option): def __init__(self, *args, replacement: str = None, **kwargs): kwargs['help'] = "This option has been deprecated." - if not replacement==None: + if not replacement == None: kwargs['help'] += " Its replacement is '--{}'".format(replacement) super(PROSDeprecated, self).__init__(*args, **kwargs) self.group = "Deprecated" - self.optiontype = "flag" if str(self.type)=="BOOL" else "switch" + self.optiontype = "flag" if str(self.type) == "BOOL" else "switch" self.to_use = replacement - self.arg = args[0][len(args[0])-1] + self.arg = args[0][len(args[0]) - 1] self.msg = "The '{}' {} has been deprecated. Please use '--{}' instead." - if replacement==None: - self.msg = self.msg.split(".")[0]+"." + if replacement == None: + self.msg = self.msg.split(".")[0] + "." def type_cast_value(self, ctx, value): - if not value==self.default: - print("Warning! : "+self.msg.format(self.arg, self.optiontype, self.to_use)+"\n") + if not value == self.default: + print("Warning! : " + self.msg.format(self.arg, self.optiontype, self.to_use) + "\n") return value + class PROSGroup(PROSFormatted, click.Group): def __init__(self, *args, **kwargs): super(PROSGroup, self).__init__(*args, **kwargs) @@ -160,7 +163,7 @@ def invoke(self, *args, **kwargs): except ClickException as e: click.echo("PROS-CLI Version: {}".format(get_version())) isProject = p.find_project("") - if (isProject): #check if there is a project + if isProject: # check if there is a project curr_proj = p() click.echo("PROS-Kernel Version: {}".format(curr_proj.kernel)) raise e diff --git a/pros/cli/common.py b/pros/cli/common.py index 19fbd6ae..417c42fc 100644 --- a/pros/cli/common.py +++ b/pros/cli/common.py @@ -23,8 +23,16 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): logger(__name__).info('Verbose messages enabled') return value - return click.option('--verbose', help='Enable verbose output', is_flag=True, is_eager=True, expose_value=False, - callback=callback, cls=PROSOption, group='Standard Options')(f) + return click.option( + '--verbose', + help='Enable verbose output', + is_flag=True, + is_eager=True, + expose_value=False, + callback=callback, + cls=PROSOption, + group='Standard Options', + )(f) def debug_option(f: Union[click.Command, Callable]): @@ -45,8 +53,16 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): logger('pros').debug(f'CLI Version: {get_version()}') return value - return click.option('--debug', help='Enable debugging output', is_flag=True, is_eager=True, expose_value=False, - callback=callback, cls=PROSOption, group='Standard Options')(f) + return click.option( + '--debug', + help='Enable debugging output', + is_flag=True, + is_eager=True, + expose_value=False, + callback=callback, + cls=PROSOption, + group='Standard Options', + )(f) def logging_option(f: Union[click.Command, Callable]): @@ -63,9 +79,17 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): stdout_handler.setLevel(value) return value - return click.option('-l', '--log', help='Logging level', is_eager=True, expose_value=False, callback=callback, - type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']), - cls=PROSOption, group='Standard Options')(f) + return click.option( + '-l', + '--log', + help='Logging level', + is_eager=True, + expose_value=False, + callback=callback, + type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']), + cls=PROSOption, + group='Standard Options', + )(f) def logfile_option(f: Union[click.Command, Callable]): @@ -87,11 +111,17 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): stdout_handler.setLevel(logging.getLogger().level) # pin stdout_handler to its current log level logging.getLogger().setLevel(min(logging.getLogger().level, level)) - return click.option('--logfile', help='Log messages to a file', is_eager=True, expose_value=False, - callback=callback, default=(None, None), - type=click.Tuple( - [click.Path(), click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])] - ), cls=PROSOption, group='Standard Options')(f) + return click.option( + '--logfile', + help='Log messages to a file', + is_eager=True, + expose_value=False, + callback=callback, + default=(None, None), + type=click.Tuple([click.Path(), click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])]), + cls=PROSOption, + group='Standard Options', + )(f) def machine_output_option(f: Union[click.Command, Callable]): @@ -110,46 +140,82 @@ def callback(ctx: click.Context, param: click.Parameter, value: str): logging.getLogger(__name__).info('Debugging messages enabled') return value - decorator = click.option('--machine-output', expose_value=False, is_flag=True, default=False, is_eager=True, - help='Enable machine friendly output.', callback=callback, cls=PROSOption, hidden=True)(f) + decorator = click.option( + '--machine-output', + expose_value=False, + is_flag=True, + default=False, + is_eager=True, + help='Enable machine friendly output.', + callback=callback, + cls=PROSOption, + hidden=True, + )(f) decorator.__name__ = f.__name__ return decorator + def no_sentry_option(f: Union[click.Command, Callable]): """ disables the sentry y/N prompt when an error/exception occurs """ + def callback(ctx: click.Context, param: click.Parameter, value: bool): ctx.ensure_object(dict) - add_tag('no-sentry',value) + add_tag('no-sentry', value) if value: pros.common.sentry.disable_prompt() - decorator = click.option('--no-sentry', expose_value=False, is_flag=True, default=False, is_eager=True, - help="Disable sentry reporting prompt.", callback=callback, cls=PROSOption, hidden=True)(f) + + decorator = click.option( + '--no-sentry', + expose_value=False, + is_flag=True, + default=False, + is_eager=True, + help="Disable sentry reporting prompt.", + callback=callback, + cls=PROSOption, + hidden=True, + )(f) decorator.__name__ = f.__name__ return decorator + def no_analytics(f: Union[click.Command, Callable]): """ Don't use analytics for this command """ + def callback(ctx: click.Context, param: click.Parameter, value: bool): ctx.ensure_object(dict) - add_tag('no-analytics',value) + add_tag('no-analytics', value) if value: echo("Not sending analytics for this command.\n") analytics.useAnalytics = False pass - decorator = click.option('--no-analytics', expose_value=False, is_flag=True, default=False, is_eager=True, - help="Don't send analytics for this command.", callback=callback, cls=PROSOption, hidden=True)(f) + + decorator = click.option( + '--no-analytics', + expose_value=False, + is_flag=True, + default=False, + is_eager=True, + help="Don't send analytics for this command.", + callback=callback, + cls=PROSOption, + hidden=True, + )(f) decorator.__name__ = f.__name__ return decorator + def default_options(f: Union[click.Command, Callable]): """ - combines verbosity, debug, machine output, no analytics, and no sentry options + combines verbosity, debug, machine output, no analytics, and no sentry options """ - decorator = debug_option(verbose_option(logging_option(logfile_option(machine_output_option(no_sentry_option(no_analytics(f))))))) + decorator = debug_option( + verbose_option(logging_option(logfile_option(machine_output_option(no_sentry_option(no_analytics(f)))))) + ) decorator.__name__ = f.__name__ return decorator @@ -164,15 +230,16 @@ def template_query(arg_name='query', required: bool = False): def callback(ctx: click.Context, param: click.Parameter, value: Tuple[str, ...]): import pros.conductor as c + value = list(value) spec = None if len(value) > 0 and not value[0].startswith('--'): spec = value.pop(0) if not spec and required: raise ValueError(f'A {arg_name} is required to perform this command') - query = c.BaseTemplate.create_query(spec, - **{value[i][2:]: value[i + 1] for i in - range(0, int(len(value) / 2) * 2, 2)}) + query = c.BaseTemplate.create_query( + spec, **{value[i][2:]: value[i + 1] for i in range(0, int(len(value) / 2) * 2, 2)} + ) logger(__name__).debug(query) return query @@ -187,20 +254,29 @@ def callback(ctx: click.Context, param: click.Parameter, value: str): if allow_none and value is None: return None import pros.conductor as c + project_path = c.Project.find_project(value) if project_path is None: if allow_none: return None else: - raise click.UsageError(f'{os.path.abspath(value or ".")} is not inside a PROS project. ' - f'Execute this command from within a PROS project or specify it ' - f'with --project project/path') + raise click.UsageError( + f'{os.path.abspath(value or ".")} is not inside a PROS project. ' + f'Execute this command from within a PROS project or specify it ' + f'with --project project/path' + ) return c.Project(project_path) def wrapper(f: Union[click.Command, Callable]): - return click.option(f'--{arg_name}', callback=callback, required=required, - default=default, type=click.Path(exists=True), show_default=True, - help='PROS Project directory or file')(f) + return click.option( + f'--{arg_name}', + callback=callback, + required=required, + default=default, + type=click.Path(exists=True), + show_default=True, + help='PROS Project directory or file', + )(f) return wrapper @@ -239,6 +315,7 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl wireless interaction. """ from pros.serial.devices.vex import find_v5_ports + # If a port is specified manually, we'll just assume it's # not a joystick. is_joystick = False @@ -247,17 +324,22 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl logger(__name__).debug('Ports: {}'.format(';'.join([str(p.__dict__) for p in ports]))) if len(ports) == 0: if not quiet: - logger(__name__).error('No {0} ports were found! If you think you have a {0} plugged in, ' - 'run this command again with the --debug flag'.format('v5'), - extra={'sentry': False}) + logger(__name__).error( + 'No {0} ports were found! If you think you have a {0} plugged in, ' + 'run this command again with the --debug flag'.format('v5'), + extra={'sentry': False}, + ) return None, False if len(ports) > 1: if not quiet: - port = click.prompt('Multiple {} ports were found. Please choose one: [{}]' - .format('v5', '|'.join([p.device for p in ports])), - default=ports[0].device, - show_default=False, - type=click.Choice([p.device for p in ports])) + port = click.prompt( + 'Multiple {} ports were found. Please choose one: [{}]'.format( + 'v5', '|'.join([p.device for p in ports]) + ), + default=ports[0].device, + show_default=False, + type=click.Choice([p.device for p in ports]), + ) assert port in [p.device for p in ports] else: return None, False @@ -270,19 +352,24 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl def resolve_cortex_port(port: Optional[str], quiet: bool = False) -> Optional[str]: from pros.serial.devices.vex import find_cortex_ports + if not port: ports = find_cortex_ports() if len(ports) == 0: if not quiet: - logger(__name__).error('No {0} ports were found! If you think you have a {0} plugged in, ' - 'run this command again with the --debug flag'.format('cortex'), - extra={'sentry': False}) + logger(__name__).error( + 'No {0} ports were found! If you think you have a {0} plugged in, ' + 'run this command again with the --debug flag'.format('cortex'), + extra={'sentry': False}, + ) return None if len(ports) > 1: if not quiet: - port = click.prompt('Multiple {} ports were found. Please choose one: '.format('cortex'), - default=ports[0].device, - type=click.Choice([p.device for p in ports])) + port = click.prompt( + 'Multiple {} ports were found. Please choose one: '.format('cortex'), + default=ports[0].device, + type=click.Choice([p.device for p in ports]), + ) assert port in [p.device for p in ports] else: return None diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 71bb8c32..8a8f32e7 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -25,8 +25,11 @@ def conductor(): pass -@conductor.command(aliases=['download'], short_help='Fetch/Download a remote template', - context_settings={'ignore_unknown_options': True}) +@conductor.command( + aliases=['download'], + short_help='Fetch/Download a remote template', + context_settings={'ignore_unknown_options': True}, +) @template_query(required=True) @default_options def fetch(query: c.BaseTemplate): @@ -55,8 +58,9 @@ def fetch(query: c.BaseTemplate): return -1 template_file = query.metadata['location'] - if template_file and (os.path.splitext(template_file)[1] in ['.zip'] or - os.path.exists(os.path.join(template_file, 'template.pros'))): + if template_file and ( + os.path.splitext(template_file)[1] in ['.zip'] or os.path.exists(os.path.join(template_file, 'template.pros')) + ): template = ExternalTemplate(template_file) query.metadata['location'] = template_file depot = c.LocalDepot() @@ -79,20 +83,49 @@ def fetch(query: c.BaseTemplate): @conductor.command(context_settings={'ignore_unknown_options': True}) @click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=True, help='Allow upgrading templates in a project') - @click.option('--install/--no-install', 'install_ok', default=True, help='Allow installing templates in a project') -@click.option('--download/--no-download', 'download_ok', default=True, - help='Allow downloading templates or only allow local templates') -@click.option('--upgrade-user-files/--no-upgrade-user-files', 'force_user', default=False, - help='Replace all user files in a template') -@click.option('--force', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-apply', 'force_apply', default=False, is_flag=True, - help="Force apply the template, disregarding if the template is already installed.") -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') -@click.option('--early-access/--disable-early-access', '--early/--disable-early', '-ea/-dea', 'early_access', '--beta/--disable-beta', default=None, - help='Create a project using the PROS 4 kernel') +@click.option( + '--download/--no-download', + 'download_ok', + default=True, + help='Allow downloading templates or only allow local templates', +) +@click.option( + '--upgrade-user-files/--no-upgrade-user-files', + 'force_user', + default=False, + help='Replace all user files in a template', +) +@click.option( + '--force', + 'force_system', + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + '--force-apply', + 'force_apply', + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + '--remove-empty-dirs/--no-remove-empty-dirs', + 'remove_empty_directories', + is_flag=True, + default=True, + help='Remove empty directories when removing files', +) +@click.option( + '--early-access/--disable-early-access', + '--early/--disable-early', + '-ea/-dea', + 'early_access', + '--beta/--disable-beta', + default=None, + help='Create a project using the PROS 4 kernel', +) @project_option() @template_query(required=True) @default_options @@ -109,14 +142,29 @@ def apply(project: c.Project, query: c.BaseTemplate, **kwargs): @conductor.command(aliases=['i', 'in'], context_settings={'ignore_unknown_options': True}) @click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=False) @click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, - help='Replace all user files in a template') -@click.option('--force-system', '-f', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-apply', 'force_apply', default=False, is_flag=True, - help="Force apply the template, disregarding if the template is already installed.") -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') +@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@click.option( + '--force-system', + '-f', + 'force_system', + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + '--force-apply', + 'force_apply', + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + '--remove-empty-dirs/--no-remove-empty-dirs', + 'remove_empty_directories', + is_flag=True, + default=True, + help='Remove empty directories when removing files', +) @project_option() @template_query(required=True) @default_options @@ -134,16 +182,38 @@ def install(ctx: click.Context, **kwargs): @conductor.command(context_settings={'ignore_unknown_options': True}, aliases=['u']) @click.option('--install/--no-install', 'install_ok', default=False) @click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, - help='Replace all user files in a template') -@click.option('--force-system', '-f', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-apply', 'force_apply', default=False, is_flag=True, - help="Force apply the template, disregarding if the template is already installed.") -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') -@click.option('--early-access/--disable-early-access', '--early/--disable-early', '-ea/-dea', 'early_access', '--beta/--disable-beta', default=None, - help='Create a project using the PROS 4 kernel') +@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@click.option( + '--force-system', + '-f', + 'force_system', + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + '--force-apply', + 'force_apply', + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + '--remove-empty-dirs/--no-remove-empty-dirs', + 'remove_empty_directories', + is_flag=True, + default=True, + help='Remove empty directories when removing files', +) +@click.option( + '--early-access/--disable-early-access', + '--early/--disable-early', + '-ea/-dea', + 'early_access', + '--beta/--disable-beta', + default=None, + help='Create a project using the PROS 4 kernel', +) @project_option() @template_query(required=False) @default_options @@ -158,8 +228,9 @@ def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwa if not query.name: for template in project.templates.keys(): click.secho(f'Upgrading {template}', color='yellow') - q = c.BaseTemplate.create_query(name=template, target=project.target, - supported_kernels=project.templates['kernel'].version) + q = c.BaseTemplate.create_query( + name=template, target=project.target, supported_kernels=project.templates['kernel'].version + ) ctx.invoke(apply, upgrade_ok=True, project=project, query=q, **kwargs) else: ctx.invoke(apply, project=project, query=query, upgrade_ok=True, **kwargs) @@ -167,22 +238,33 @@ def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwa @conductor.command('uninstall') @click.option('--remove-user', is_flag=True, default=False, help='Also remove user files') -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') +@click.option( + '--remove-empty-dirs/--no-remove-empty-dirs', + 'remove_empty_directories', + is_flag=True, + default=True, + help='Remove empty directories when removing files', +) @click.option('--no-make-clean', is_flag=True, default=True, help='Do not run make clean after removing') @project_option() @template_query() @default_options -def uninstall_template(project: c.Project, query: c.BaseTemplate, remove_user: bool, - remove_empty_directories: bool = False, no_make_clean: bool = False): +def uninstall_template( + project: c.Project, + query: c.BaseTemplate, + remove_user: bool, + remove_empty_directories: bool = False, + no_make_clean: bool = False, +): """ Uninstall a template from a PROS project Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more """ analytics.send("uninstall-template") - c.Conductor().remove_template(project, query, remove_user=remove_user, - remove_empty_directories=remove_empty_directories) + c.Conductor().remove_template( + project, query, remove_user=remove_user, remove_empty_directories=remove_empty_directories + ) if no_make_clean: with ui.Notification(): project.compile(["clean"]) @@ -192,25 +274,62 @@ def uninstall_template(project: c.Project, query: c.BaseTemplate, remove_user: b @click.argument('path', type=click.Path()) @click.argument('target', default=c.Conductor().default_target, type=click.Choice(['v5', 'cortex'])) @click.argument('version', default='latest') -@click.option('--force-user', 'force_user', default=False, is_flag=True, - help='Replace all user files in a template') -@click.option('--force-system', '-f', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-refresh', is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks') -@click.option('--no-default-libs', 'no_default_libs', default=False, is_flag=True, - help='Do not install any default libraries after creating the project.') -@click.option('--compile-after', is_flag=True, default=True, show_default=True, - help='Compile the project after creation') -@click.option('--build-cache', is_flag=True, default=None, show_default=False, - help='Build compile commands cache after creation. Overrides --compile-after if both are specified.') -@click.option('--early-access/--disable-early-access', '--early/--disable-early', '-ea/-dea', 'early_access', '--beta/--disable-beta', default=None, - help='Create a project using the PROS 4 kernel') +@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@click.option( + '--force-system', + '-f', + 'force_system', + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + '--force-refresh', + is_flag=True, + default=False, + show_default=True, + help='Force update all remote depots, ignoring automatic update checks', +) +@click.option( + '--no-default-libs', + 'no_default_libs', + default=False, + is_flag=True, + help='Do not install any default libraries after creating the project.', +) +@click.option( + '--compile-after', is_flag=True, default=True, show_default=True, help='Compile the project after creation' +) +@click.option( + '--build-cache', + is_flag=True, + default=None, + show_default=False, + help='Build compile commands cache after creation. Overrides --compile-after if both are specified.', +) +@click.option( + '--early-access/--disable-early-access', + '--early/--disable-early', + '-ea/-dea', + 'early_access', + '--beta/--disable-beta', + default=None, + help='Create a project using the PROS 4 kernel', +) @click.pass_context @default_options -def new_project(ctx: click.Context, path: str, target: str, version: str, - force_user: bool = False, force_system: bool = False, - no_default_libs: bool = False, compile_after: bool = True, build_cache: bool = None, **kwargs): +def new_project( + ctx: click.Context, + path: str, + target: str, + version: str, + force_user: bool = False, + force_system: bool = False, + no_default_libs: bool = False, + compile_after: bool = True, + build_cache: bool = None, + **kwargs, +): """ Create a new PROS project @@ -221,16 +340,27 @@ def new_project(ctx: click.Context, path: str, target: str, version: str, if version.lower() == 'latest' or not version: version = '>0' if not force_system and c.Project.find_project(path) is not None: - logger(__name__).error('A project already exists in this location at ' + c.Project.find_project(path) + - '! Delete it first. Are you creating a project in an existing one?', extra={'sentry': False}) + logger(__name__).error( + 'A project already exists in this location at ' + + c.Project.find_project(path) + + '! Delete it first. Are you creating a project in an existing one?', + extra={'sentry': False}, + ) ctx.exit(-1) try: _conductor = c.Conductor() if target is None: target = _conductor.default_target - project = _conductor.new_project(path, target=target, version=version, version_source=version_source, - force_user=force_user, force_system=force_system, - no_default_libs=no_default_libs, **kwargs) + project = _conductor.new_project( + path, + target=target, + version=version, + version_source=version_source, + force_user=force_user, + force_system=force_system, + no_default_libs=no_default_libs, + **kwargs, + ) ui.echo('New PROS Project was created:', output_machine=False) ctx.invoke(info_project, project=project) @@ -247,24 +377,54 @@ def new_project(ctx: click.Context, path: str, target: str, version: str, ctx.exit(-1) -@conductor.command('query-templates', - aliases=['search-templates', 'ls-templates', 'lstemplates', 'querytemplates', 'searchtemplates'], - context_settings={'ignore_unknown_options': True}) -@click.option('--allow-offline/--no-offline', 'allow_offline', default=True, show_default=True, - help='(Dis)allow offline templates in the listing') -@click.option('--allow-online/--no-online', 'allow_online', default=True, show_default=True, - help='(Dis)allow online templates in the listing') -@click.option('--force-refresh', is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks') -@click.option('--limit', type=int, default=15, - help='The maximum number of displayed results for each library') -@click.option('--early-access/--disable-early-access', '--early/--disable-early', '-ea/-dea', 'early_access', '--beta/--disable-beta', default=None, - help='View a list of early access templates') +@conductor.command( + 'query-templates', + aliases=['search-templates', 'ls-templates', 'lstemplates', 'querytemplates', 'searchtemplates'], + context_settings={'ignore_unknown_options': True}, +) +@click.option( + '--allow-offline/--no-offline', + 'allow_offline', + default=True, + show_default=True, + help='(Dis)allow offline templates in the listing', +) +@click.option( + '--allow-online/--no-online', + 'allow_online', + default=True, + show_default=True, + help='(Dis)allow online templates in the listing', +) +@click.option( + '--force-refresh', + is_flag=True, + default=False, + show_default=True, + help='Force update all remote depots, ignoring automatic update checks', +) +@click.option('--limit', type=int, default=15, help='The maximum number of displayed results for each library') +@click.option( + '--early-access/--disable-early-access', + '--early/--disable-early', + '-ea/-dea', + 'early_access', + '--beta/--disable-beta', + default=None, + help='View a list of early access templates', +) @template_query(required=False) @click.pass_context @default_options -def query_templates(ctx, query: c.BaseTemplate, allow_offline: bool, allow_online: bool, force_refresh: bool, - limit: int, early_access: bool): +def query_templates( + ctx, + query: c.BaseTemplate, + allow_offline: bool, + allow_online: bool, + force_refresh: bool, + limit: int, + early_access: bool, +): """ Query local and remote templates based on a spec @@ -273,11 +433,21 @@ def query_templates(ctx, query: c.BaseTemplate, allow_offline: bool, allow_onlin analytics.send("query-templates") if limit < 0: limit = 15 - templates = c.Conductor().resolve_templates(query, allow_offline=allow_offline, allow_online=allow_online, - force_refresh=force_refresh, early_access=early_access) + templates = c.Conductor().resolve_templates( + query, + allow_offline=allow_offline, + allow_online=allow_online, + force_refresh=force_refresh, + early_access=early_access, + ) if early_access: - templates += c.Conductor().resolve_templates(query, allow_offline=allow_offline, allow_online=allow_online, - force_refresh=force_refresh, early_access=False) + templates += c.Conductor().resolve_templates( + query, + allow_offline=allow_offline, + allow_online=allow_online, + force_refresh=force_refresh, + early_access=False, + ) render_templates = {} for template in templates: @@ -291,10 +461,13 @@ def query_templates(ctx, query: c.BaseTemplate, allow_offline: bool, allow_onlin 'version': template.version, 'location': template.origin, 'target': template.target, - 'local': isinstance(template, c.LocalTemplate) + 'local': isinstance(template, c.LocalTemplate), } import semantic_version as semver - render_templates = sorted(render_templates.values(), key=lambda k: (k['name'], semver.Version(k['version']), k['local']), reverse=True) + + render_templates = sorted( + render_templates.values(), key=lambda k: (k['name'], semver.Version(k['version']), k['local']), reverse=True + ) # Impose the output limit for each library's templates output_templates = [] @@ -315,18 +488,23 @@ def info_project(project: c.Project, ls_upgrades): """ analytics.send("info-project") from pros.conductor.project import ProjectReport + report = ProjectReport(project) _conductor = c.Conductor() if ls_upgrades: for template in report.project['templates']: import semantic_version as semver - templates = _conductor.resolve_templates(c.BaseTemplate.create_query(name=template["name"], - version=f'>{template["version"]}', - target=project.target)) + + templates = _conductor.resolve_templates( + c.BaseTemplate.create_query( + name=template["name"], version=f'>{template["version"]}', target=project.target + ) + ) template["upgrades"] = sorted({t.version for t in templates}, key=lambda v: semver.Version(v), reverse=True) ui.finalize('project-report', report) + @conductor.command('add-depot') @click.argument('name') @click.argument('url') @@ -342,6 +520,7 @@ def add_depot(name: str, url: str): ui.echo(f"Added depot {name} from {url}") + @conductor.command('remove-depot') @click.argument('name') @default_options @@ -356,6 +535,7 @@ def remove_depot(name: str): ui.echo(f"Removed depot {name}") + @conductor.command('query-depots') @click.option('--url', is_flag=True) @default_options @@ -367,4 +547,4 @@ def query_depots(url: bool): """ _conductor = c.Conductor() ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") - ui.echo('\n'.join(_conductor.query_depots(url))+"\n") + ui.echo('\n'.join(_conductor.query_depots(url)) + "\n") diff --git a/pros/cli/conductor_utils.py b/pros/cli/conductor_utils.py index cb22cffc..fd25bef3 100644 --- a/pros/cli/conductor_utils.py +++ b/pros/cli/conductor_utils.py @@ -19,14 +19,23 @@ @click.argument('path', type=click.Path(exists=True)) @click.argument('name') @click.argument('version') -@click.option('--system', 'system_files', multiple=True, type=click.Path(), - help='Specify "system" files required by the template') -@click.option('--user', 'user_files', multiple=True, type=click.Path(), - help='Specify files that are intended to be modified by users') +@click.option( + '--system', 'system_files', multiple=True, type=click.Path(), help='Specify "system" files required by the template' +) +@click.option( + '--user', + 'user_files', + multiple=True, + type=click.Path(), + help='Specify files that are intended to be modified by users', +) @click.option('--kernels', 'supported_kernels', help='Specify supported kernels') @click.option('--target', type=click.Choice(['v5', 'cortex']), help='Specify the target platform (cortex or v5)') -@click.option('--destination', type=click.Path(), - help='Specify an alternate destination for the created ZIP file or template descriptor') +@click.option( + '--destination', + type=click.Path(), + help='Specify an alternate destination for the created ZIP file or template descriptor', +) @click.option('--zip/--no-zip', 'do_zip', default=True, help='Create a ZIP file or create a template descriptor.') @default_options @click.pass_context @@ -146,16 +155,18 @@ def filename_remap(file_path: str) -> str: template.save() -@conductor.command('purge-template', help='Purge template(s) from the local cache', - context_settings={'ignore_unknown_options': True}) +@conductor.command( + 'purge-template', help='Purge template(s) from the local cache', context_settings={'ignore_unknown_options': True} +) @click.option('-f', '--force', is_flag=True, default=False, help='Do not prompt for removal of multiple templates') @template_query(required=False) @default_options def purge_template(query: c.BaseTemplate, force): analytics.send("purge-template") if not query: - force = click.confirm('Are you sure you want to remove all cached templates? This action is non-reversable!', - abort=True) + force = click.confirm( + 'Are you sure you want to remove all cached templates? This action is non-reversable!', abort=True + ) cond = c.Conductor() templates = cond.resolve_templates(query, allow_online=False) beta_templates = cond.resolve_templates(query, allow_online=False, beta=True) diff --git a/pros/cli/interactive.py b/pros/cli/interactive.py index 634f1b2f..260706fb 100644 --- a/pros/cli/interactive.py +++ b/pros/cli/interactive.py @@ -5,6 +5,7 @@ from .common import PROSGroup, default_options, project_option, pros_root from pros.ga.analytics import analytics + @pros_root def interactive_cli(): pass @@ -22,6 +23,7 @@ def interactive(): def new_project(directory): from pros.common.ui.interactive.renderers import MachineOutputRenderer from pros.conductor.interactive.NewProjectModal import NewProjectModal + app = NewProjectModal(directory=directory) MachineOutputRenderer(app).run() @@ -32,6 +34,7 @@ def new_project(directory): def update_project(project: Optional[c.Project]): from pros.common.ui.interactive.renderers import MachineOutputRenderer from pros.conductor.interactive.UpdateProjectModal import UpdateProjectModal + app = UpdateProjectModal(project) MachineOutputRenderer(app).run() @@ -42,4 +45,5 @@ def update_project(project: Optional[c.Project]): def upload(project: Optional[c.Project]): from pros.common.ui.interactive.renderers import MachineOutputRenderer from pros.serial.interactive import UploadProjectModal + MachineOutputRenderer(UploadProjectModal(project)).run() diff --git a/pros/cli/main.py b/pros/cli/main.py index 679d9897..10b5077c 100644 --- a/pros/cli/main.py +++ b/pros/cli/main.py @@ -41,7 +41,7 @@ 'v5_utils', 'misc_commands', # misc_commands must be after upload so that "pros u" is an alias for upload, not upgrade 'interactive', - 'user_script' + 'user_script', ] if getattr(sys, 'frozen', False): @@ -64,8 +64,12 @@ def main(): ctx_obj = {} click_handler = pros.common.ui.log.PROSLogHandler(ctx_obj=ctx_obj) ctx_obj['click_handler'] = click_handler - formatter = pros.common.ui.log.PROSLogFormatter('%(levelname)s - %(name)s:%(funcName)s - %(message)s - pros-cli version:{version}' - .format(version = get_version()), ctx_obj) + formatter = pros.common.ui.log.PROSLogFormatter( + '%(levelname)s - %(name)s:%(funcName)s - %(message)s - pros-cli version:{version}'.format( + version=get_version() + ), + ctx_obj, + ) click_handler.setFormatter(formatter) logging.basicConfig(level=logging.WARNING, handlers=[click_handler]) cli.main(prog_name='pros', obj=ctx_obj, windows_expand_args=False) @@ -95,7 +99,9 @@ def use_analytics(ctx: click.Context, param, value): elif str(value).lower().startswith("f"): touse = False else: - ui.echo('Invalid argument provided for \'--use-analytics\'. Try \'--use-analytics=False\' or \'--use-analytics=True\'') + ui.echo( + 'Invalid argument provided for \'--use-analytics\'. Try \'--use-analytics=False\' or \'--use-analytics=True\'' + ) ctx.exit(0) ctx.ensure_object(dict) analytics.set_use(touse) @@ -103,19 +109,26 @@ def use_analytics(ctx: click.Context, param, value): ctx.exit(0) -@click.command('pros', - cls=PROSCommandCollection, - sources=root_commands) +@click.command('pros', cls=PROSCommandCollection, sources=root_commands) @click.pass_context @default_options -@click.option('--version', help='Displays version and exits.', is_flag=True, expose_value=False, is_eager=True, - callback=version) -@click.option('--use-analytics', help='Set analytics usage (True/False).', type=str, expose_value=False, - is_eager=True, default=None, callback=use_analytics) +@click.option( + '--version', help='Displays version and exits.', is_flag=True, expose_value=False, is_eager=True, callback=version +) +@click.option( + '--use-analytics', + help='Set analytics usage (True/False).', + type=str, + expose_value=False, + is_eager=True, + default=None, + callback=use_analytics, +) def cli(ctx): pros.common.sentry.register() ctx.call_on_close(after_command) + def after_command(): analytics.process_requests() diff --git a/pros/cli/misc_commands.py b/pros/cli/misc_commands.py index d212a2fc..d2e62c52 100644 --- a/pros/cli/misc_commands.py +++ b/pros/cli/misc_commands.py @@ -2,34 +2,45 @@ from pros.cli.common import * from pros.ga.analytics import analytics + @pros_root def misc_commands_cli(): pass @misc_commands_cli.command() -@click.option('--force-check', default=False, is_flag=True, - help='Force check for updates, disregarding auto-check frequency') -@click.option('--no-install', default=False, is_flag=True, - help='Only check if a new version is available, do not attempt to install') +@click.option( + '--force-check', default=False, is_flag=True, help='Force check for updates, disregarding auto-check frequency' +) +@click.option( + '--no-install', + default=False, + is_flag=True, + help='Only check if a new version is available, do not attempt to install', +) @default_options def upgrade(force_check, no_install): """ Check for updates to the PROS CLI """ with ui.Notification(): - ui.echo('The "pros upgrade" command is currently non-functioning. Did you mean to run "pros c upgrade"?', color='yellow') + ui.echo( + 'The "pros upgrade" command is currently non-functioning. Did you mean to run "pros c upgrade"?', + color='yellow', + ) - return # Dead code below + return # Dead code below analytics.send("upgrade") from pros.upgrade import UpgradeManager + manager = UpgradeManager() manifest = manager.get_manifest(force_check) ui.logger(__name__).debug(repr(manifest)) if manager.has_stale_manifest: - ui.logger(__name__).error('Failed to get latest upgrade information. ' - 'Try running with --debug for more information') + ui.logger(__name__).error( + 'Failed to get latest upgrade information. ' + 'Try running with --debug for more information' + ) return -1 if not manager.needs_upgrade: ui.finalize('upgradeInfo', 'PROS CLI is up to date') diff --git a/pros/cli/terminal.py b/pros/cli/terminal.py index a44b89d5..ff26c84f 100644 --- a/pros/cli/terminal.py +++ b/pros/cli/terminal.py @@ -13,6 +13,7 @@ from pros.serial.ports.v5_wireless_port import V5WirelessPort from pros.ga.analytics import analytics + @pros_root def terminal_cli(): pass @@ -21,17 +22,24 @@ def terminal_cli(): @terminal_cli.command() @default_options @click.argument('port', default='default') -@click.option('--backend', type=click.Choice(['share', 'solo']), default='solo', - help='Backend port of the terminal. See above for details') -@click.option('--raw', is_flag=True, default=False, - help='Don\'t process the data.') +@click.option( + '--backend', + type=click.Choice(['share', 'solo']), + default='solo', + help='Backend port of the terminal. See above for details', +) +@click.option('--raw', is_flag=True, default=False, help='Don\'t process the data.') @click.option('--hex', is_flag=True, default=False, help="Display data as hexadecimal values. Unaffected by --raw") -@click.option('--ports', nargs=2, type=int, default=(None, None), - help='Specify 2 ports for the "share" backend. The default option deterministically selects ports ' - 'based on the serial port name') +@click.option( + '--ports', + nargs=2, + type=int, + default=(None, None), + help='Specify 2 ports for the "share" backend. The default option deterministically selects ports ' + 'based on the serial port name', +) @click.option('--banner/--no-banner', 'request_banner', default=True) -@click.option('--output', nargs = 1, type=str, is_eager = True, help='Redirect terminal output to a file', default=None) - +@click.option('--output', nargs=1, type=str, is_eager=True, help='Redirect terminal output to a file', default=None) def terminal(port: str, backend: str, **kwargs): """ Open a terminal to a serial port @@ -46,6 +54,7 @@ def terminal(port: str, backend: str, **kwargs): analytics.send("terminal") from pros.serial.devices.vex.v5_user_device import V5UserDevice from pros.serial.terminal import Terminal + is_v5_user_joystick = False if port == 'default': project_path = c.Project.find_project(os.getcwd()) @@ -89,11 +98,14 @@ class TerminalOutput(object): def __init__(self, file): self.terminal = sys.stdout self.log = open(file, 'a') + def write(self, data): self.terminal.write(data) self.log.write(data) + def flush(self): pass + def end(self): self.log.close() diff --git a/pros/cli/upload.py b/pros/cli/upload.py index e0c74b9b..0cc22148 100644 --- a/pros/cli/upload.py +++ b/pros/cli/upload.py @@ -7,41 +7,112 @@ from .common import * from pros.ga.analytics import analytics + @pros_root def upload_cli(): pass @upload_cli.command(aliases=['u']) -@click.option('--target', type=click.Choice(['v5', 'cortex']), default=None, required=False, - help='Specify the target microcontroller. Overridden when a PROS project is specified.') +@click.option( + '--target', + type=click.Choice(['v5', 'cortex']), + default=None, + required=False, + help='Specify the target microcontroller. Overridden when a PROS project is specified.', +) @click.argument('path', type=click.Path(exists=True), default=None, required=False) @click.argument('port', type=str, default=None, required=False) @project_option(required=False, allow_none=True) -@click.option('--run-after/--no-run-after', 'run_after', default=None, help='Immediately run the uploaded program.', - cls=PROSDeprecated, replacement='after') -@click.option('--run-screen/--execute', 'run_screen', default=None, help='Display run program screen on the brain after upload.', - cls=PROSDeprecated, replacement='after') -@click.option('-af', '--after', type=click.Choice(['run','screen','none']), default=None, help='Action to perform on the brain after upload.', - cls=PROSOption, group='V5 Options') +@click.option( + '--run-after/--no-run-after', + 'run_after', + default=None, + help='Immediately run the uploaded program.', + cls=PROSDeprecated, + replacement='after', +) +@click.option( + '--run-screen/--execute', + 'run_screen', + default=None, + help='Display run program screen on the brain after upload.', + cls=PROSDeprecated, + replacement='after', +) +@click.option( + '-af', + '--after', + type=click.Choice(['run', 'screen', 'none']), + default=None, + help='Action to perform on the brain after upload.', + cls=PROSOption, + group='V5 Options', +) @click.option('--quirk', type=int, default=0) -@click.option('--name', 'remote_name', type=str, default=None, required=False, help='Remote program name.', - cls=PROSOption, group='V5 Options') -@click.option('--slot', default=None, type=click.IntRange(min=1, max=8), help='Program slot on the GUI.', - cls=PROSOption, group='V5 Options') -@click.option('--icon', type=click.Choice(['pros','pizza','planet','alien','ufo','robot','clawbot','question','X','power']), default='pros', - help="Change Program's icon on the V5 Brain", cls=PROSOption, group='V5 Options') -@click.option('--program-version', default=None, type=str, help='Specify version metadata for program.', - cls=PROSOption, group='V5 Options', hidden=True) -@click.option('--ini-config', type=click.Path(exists=True), default=None, help='Specify a program configuration file.', - cls=PROSOption, group='V5 Options', hidden=True) -@click.option('--compress-bin/--no-compress-bin', 'compress_bin', cls=PROSOption, group='V5 Options', default=True, - help='Compress the program binary before uploading.') -@click.option('--description', default="Made with PROS", type=str, cls=PROSOption, group='V5 Options', - help='Change the description displayed for the program.') -@click.option('--name', default=None, type=str, cls=PROSOption, group='V5 Options', - help='Change the name of the program.') - +@click.option( + '--name', + 'remote_name', + type=str, + default=None, + required=False, + help='Remote program name.', + cls=PROSOption, + group='V5 Options', +) +@click.option( + '--slot', + default=None, + type=click.IntRange(min=1, max=8), + help='Program slot on the GUI.', + cls=PROSOption, + group='V5 Options', +) +@click.option( + '--icon', + type=click.Choice(['pros', 'pizza', 'planet', 'alien', 'ufo', 'robot', 'clawbot', 'question', 'X', 'power']), + default='pros', + help="Change Program's icon on the V5 Brain", + cls=PROSOption, + group='V5 Options', +) +@click.option( + '--program-version', + default=None, + type=str, + help='Specify version metadata for program.', + cls=PROSOption, + group='V5 Options', + hidden=True, +) +@click.option( + '--ini-config', + type=click.Path(exists=True), + default=None, + help='Specify a program configuration file.', + cls=PROSOption, + group='V5 Options', + hidden=True, +) +@click.option( + '--compress-bin/--no-compress-bin', + 'compress_bin', + cls=PROSOption, + group='V5 Options', + default=True, + help='Compress the program binary before uploading.', +) +@click.option( + '--description', + default="Made with PROS", + type=str, + cls=PROSOption, + group='V5 Options', + help='Change the description displayed for the program.', +) +@click.option( + '--name', default=None, type=str, cls=PROSOption, group='V5 Options', help='Change the name of the program.' +) @default_options def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwargs): """ @@ -56,7 +127,8 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg analytics.send("upload") import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort - kwargs['ide_version'] = project.kernel if not project==None else "None" + + kwargs['ide_version'] = project.kernel if not project == None else "None" kwargs['ide'] = 'PROS' if path is None or os.path.isdir(path): if project is None: @@ -76,9 +148,9 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg kwargs.pop('slot') elif kwargs.get('slot', None) is None: kwargs['slot'] = 1 - if 'icon' in options and kwargs.get('icon','pros') == 'pros': + if 'icon' in options and kwargs.get('icon', 'pros') == 'pros': kwargs.pop('icon') - if 'after' in options and kwargs.get('after','screen') is None: + if 'after' in options and kwargs.get('after', 'screen') is None: kwargs.pop('after') options.update(kwargs) @@ -89,16 +161,16 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg if 'remote_name' not in kwargs: kwargs['remote_name'] = project.name name_to_file = { - 'pros' : 'USER902x.bmp', - 'pizza' : 'USER003x.bmp', - 'planet' : 'USER013x.bmp', - 'alien' : 'USER027x.bmp', - 'ufo' : 'USER029x.bmp', - 'clawbot' : 'USER010x.bmp', - 'robot' : 'USER011x.bmp', - 'question' : 'USER002x.bmp', - 'power' : 'USER012x.bmp', - 'X' : 'USER001x.bmp' + 'pros': 'USER902x.bmp', + 'pizza': 'USER003x.bmp', + 'planet': 'USER013x.bmp', + 'alien': 'USER027x.bmp', + 'ufo': 'USER029x.bmp', + 'clawbot': 'USER010x.bmp', + 'robot': 'USER011x.bmp', + 'question': 'USER002x.bmp', + 'power': 'USER012x.bmp', + 'X': 'USER001x.bmp', } kwargs['icon'] = name_to_file[kwargs['icon']] if 'target' not in kwargs or kwargs['target'] is None: @@ -114,25 +186,25 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg if not port: raise dont_send(click.UsageError('No port provided or located. Make sure to specify --target if needed.')) if kwargs['target'] == 'v5': - kwargs['remote_name'] = kwargs['name'] if kwargs.get("name",None) else kwargs['remote_name'] + kwargs['remote_name'] = kwargs['name'] if kwargs.get("name", None) else kwargs['remote_name'] if kwargs['remote_name'] is None: kwargs['remote_name'] = os.path.splitext(os.path.basename(path))[0] kwargs['remote_name'] = kwargs['remote_name'].replace('@', '_') kwargs['slot'] -= 1 action_to_kwarg = { - 'run' : vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, - 'screen' : vex.V5Device.FTCompleteOptions.RUN_SCREEN, - 'none' : vex.V5Device.FTCompleteOptions.DONT_RUN - } + 'run': vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, + 'screen': vex.V5Device.FTCompleteOptions.RUN_SCREEN, + 'none': vex.V5Device.FTCompleteOptions.DONT_RUN, + } after_upload_default = 'screen' - #Determine which FTCompleteOption to assign to run_after - if kwargs['after']==None: - kwargs['after']=after_upload_default + # Determine which FTCompleteOption to assign to run_after + if kwargs['after'] == None: + kwargs['after'] = after_upload_default if kwargs['run_after']: - kwargs['after']='run' - elif kwargs['run_screen']==False and not kwargs['run_after']: - kwargs['after']='none' + kwargs['after'] = 'run' + elif kwargs['run_screen'] == False and not kwargs['run_after']: + kwargs['after'] = 'none' kwargs['run_after'] = action_to_kwarg[kwargs['after']] kwargs.pop('run_screen') kwargs.pop('after') @@ -157,6 +229,7 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg logger(__name__).exception(e, exc_info=True) exit(1) + @upload_cli.command('lsusb', aliases=['ls-usb', 'ls-devices', 'lsdev', 'list-usb', 'list-devices']) @click.option('--target', type=click.Choice(['v5', 'cortex']), default=None, required=False) @default_options @@ -174,10 +247,7 @@ def __init__(self, header: str, ports: List[Any], machine_header: Optional[str] self.machine_header = machine_header or header def __getstate__(self): - return { - 'device_type': self.machine_header, - 'devices': self.ports - } + return {'device_type': self.machine_header, 'devices': self.ports} def __str__(self): if len(self.ports) == 0: @@ -206,5 +276,6 @@ def __str__(self): def make_upload_terminal(ctx, **upload_kwargs): analytics.send("upload-terminal") from .terminal import terminal + ctx.invoke(upload, **upload_kwargs) ctx.invoke(terminal, request_banner=False) diff --git a/pros/cli/user_script.py b/pros/cli/user_script.py index be0f8259..eac7db18 100644 --- a/pros/cli/user_script.py +++ b/pros/cli/user_script.py @@ -4,6 +4,7 @@ from .common import default_options, pros_root from pros.ga.analytics import analytics + @pros_root def user_script_cli(): pass @@ -19,6 +20,7 @@ def user_script(script_file): analytics.send("user-script") import os.path import importlib.util + package_name = os.path.splitext(os.path.split(script_file)[0])[0] package_path = os.path.abspath(script_file) ui.echo(f'Loading {package_name} from {package_path}') diff --git a/pros/cli/v5_utils.py b/pros/cli/v5_utils.py index 7c0809bd..933cae59 100644 --- a/pros/cli/v5_utils.py +++ b/pros/cli/v5_utils.py @@ -1,6 +1,7 @@ from .common import * from pros.ga.analytics import analytics + @pros_root def v5_utils_cli(): pass @@ -22,6 +23,7 @@ def status(port: str): analytics.send("status") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + port = resolve_v5_port(port, 'system')[0] if not port: return -1 @@ -50,6 +52,7 @@ def ls_files(port: str, vid: int, options: int): analytics.send("ls-files") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + port = resolve_v5_port(port, 'system')[0] if not port: return -1 @@ -75,14 +78,14 @@ def read_file(file_name: str, port: str, vid: int, source: str): analytics.send("read-file") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + port = resolve_v5_port(port, 'system')[0] if not port: return -1 ser = DirectPort(port) device = V5Device(ser) - device.read_file(file=click.get_binary_stream('stdout'), remote_file=file_name, - vid=vid, target=source) + device.read_file(file=click.get_binary_stream('stdout'), remote_file=file_name, vid=vid, target=source) @v5.command(hidden=True) @@ -101,6 +104,7 @@ def write_file(file, port: str, remote_file: str, **kwargs): analytics.send("write-file") from pros.serial.ports import DirectPort from pros.serial.devices.vex import V5Device + port = resolve_v5_port(port, 'system')[0] if not port: return -1 @@ -114,8 +118,13 @@ def write_file(file, port: str, remote_file: str, **kwargs): @click.argument('file_name') @click.argument('port', required=False, default=None) @click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--erase-all/--erase-only', 'erase_all', default=False, show_default=True, - help='Erase all files matching base name.') +@click.option( + '--erase-all/--erase-only', + 'erase_all', + default=False, + show_default=True, + help='Erase all files matching base name.', +) @default_options def rm_file(file_name: str, port: str, vid: int, erase_all: bool): """ @@ -124,6 +133,7 @@ def rm_file(file_name: str, port: str, vid: int, erase_all: bool): analytics.send("rm-file") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + port = resolve_v5_port(port, 'system')[0] if not port: return -1 @@ -145,6 +155,7 @@ def cat_metadata(file_name: str, port: str, vid: int): analytics.send("cat-metadata") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + port = resolve_v5_port(port, 'system')[0] if not port: return -1 @@ -153,6 +164,7 @@ def cat_metadata(file_name: str, port: str, vid: int): device = V5Device(ser) print(device.get_file_metadata_by_name(file_name, vid=vid)) + @v5.command('rm-program') @click.argument('slot') @click.argument('port', type=int, required=False, default=None) @@ -164,9 +176,10 @@ def rm_program(slot: int, port: str, vid: int): """ from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + port = resolve_v5_port(port, 'system')[0] if not port: - return - 1 + return -1 base_name = f'slot_{slot}' ser = DirectPort(port) @@ -174,6 +187,7 @@ def rm_program(slot: int, port: str, vid: int): device.erase_file(f'{base_name}.ini', vid=vid) device.erase_file(f'{base_name}.bin', vid=vid) + @v5.command('rm-all') @click.argument('port', required=False, default=None) @click.option('--vid', type=int, default=1, hidden=True, cls=PROSOption) @@ -185,6 +199,7 @@ def rm_all(port: str, vid: int): analytics.send("rm-all") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + port = resolve_v5_port(port, 'system')[0] if not port: return -1 @@ -210,8 +225,10 @@ def run(slot: str, port: str): analytics.send("run") from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + file = f'slot_{slot}.bin' import re + if not re.match(r'[\w\.]{1,24}', file): logger(__name__).error('file must be a valid V5 filename') return 1 @@ -234,6 +251,7 @@ def stop(port: str): """ from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + port = resolve_v5_port(port, 'system')[0] if not port: return -1 @@ -270,6 +288,7 @@ def capture(file_name: str, port: str, force: bool = False): # Sanity checking and default values for filenames if file_name is None: import time + time_s = time.strftime('%Y-%m-%d-%H%M%S') file_name = f'{time_s}_{width}x{height}_pros_capture.png' if file_name == '-': @@ -291,6 +310,7 @@ def capture(file_name: str, port: str, force: bool = False): print(f'Saved screen capture to {file_name}') + @v5.command(aliases=['sv', 'set'], short_help='Set a kernel variable on a connected V5 device') @click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) @click.argument('value', required=True, type=click.STRING, nargs=1) @@ -308,6 +328,7 @@ def set_variable(variable, value, port): actual_value = device.kv_write(variable, value).decode() print(f'Value of \'{variable}\' set to : {actual_value}') + @v5.command(aliases=['rv', 'get'], short_help='Read a kernel variable from a connected V5 device') @click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) @click.argument('port', type=str, default=None, required=False) diff --git a/pros/common/sentry.py b/pros/common/sentry.py index 6c0c8690..688f42e4 100644 --- a/pros/common/sentry.py +++ b/pros/common/sentry.py @@ -13,10 +13,12 @@ force_prompt_off = False SUPPRESSED_EXCEPTIONS = [PermissionError, click.Abort] + def disable_prompt(): global force_prompt_off force_prompt_off = True + def prompt_to_send(event: Dict[str, Any], hint: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: """ Asks the user for permission to send data to Sentry @@ -31,8 +33,10 @@ def prompt_to_send(event: Dict[str, Any], hint: Optional[Dict[str, Any]]) -> Opt if 'extra' in event and not event['extra'].get('sentry', True): ui.logger(__name__).debug('Not sending candidate event because event was tagged with extra.sentry = False') return - if 'exc_info' in hint and (not getattr(hint['exc_info'][1], 'sentry', True) or - any(isinstance(hint['exc_info'][1], t) for t in SUPPRESSED_EXCEPTIONS)): + if 'exc_info' in hint and ( + not getattr(hint['exc_info'][1], 'sentry', True) + or any(isinstance(hint['exc_info'][1], t) for t in SUPPRESSED_EXCEPTIONS) + ): ui.logger(__name__).debug('Not sending candidate event because exception was tagged with sentry = False') return @@ -48,10 +52,12 @@ def prompt_to_send(event: Dict[str, Any], hint: Optional[Dict[str, Any]]) -> Opt extra_text += event['logentry']['message'] + '\n' if 'exc_info' in hint: import traceback + extra_text += ''.join(traceback.format_exception(*hint['exc_info'], limit=4)) - event['tags']['confirmed'] = ui.confirm('We detected something went wrong! Do you want to send a report?', - log=extra_text) + event['tags']['confirmed'] = ui.confirm( + 'We detected something went wrong! Do you want to send a report?', log=extra_text + ) if event['tags']['confirmed']: ui.echo('Sending bug report.') @@ -77,6 +83,7 @@ class TemplateHandler(jsonpickle.handlers.BaseHandler): Override how templates get pickled by JSON pickle - we don't want to send all of the data about a template from an object """ + from pros.conductor.templates import BaseTemplate def flatten(self, obj: BaseTemplate, data): @@ -98,6 +105,7 @@ def restore(self, obj): jsonpickle.handlers.register(BaseTemplate, TemplateHandler, base=True) from sentry_sdk import configure_scope + with configure_scope() as scope: scope.set_extra((key or obj.__class__.__qualname__), jsonpickle.pickler.Pickler(unpicklable=False).flatten(obj)) @@ -116,6 +124,7 @@ def register(cfg: Optional['CliConfig'] = None): global cli_config, client if cfg is None: from pros.config.cli_config import cli_config as get_cli_config + cli_config = get_cli_config() else: cli_config = cfg @@ -131,7 +140,7 @@ def register(cfg: Optional['CliConfig'] = None): client = sentry.Client( 'https://00bd27dcded6436cad5c8b2941d6a9d6@sentry.io/1226033', before_send=prompt_to_send, - release=ui.get_version() + release=ui.get_version(), ) sentry.Hub.current.bind_client(client) diff --git a/pros/common/ui/__init__.py b/pros/common/ui/__init__.py index 24fcc71d..d79b1e0f 100644 --- a/pros/common/ui/__init__.py +++ b/pros/common/ui/__init__.py @@ -24,8 +24,15 @@ def _machine_notify(method: str, obj: Dict[str, Any], notify_value: Optional[int _machineoutput(obj) -def echo(text: Any, err: bool = False, nl: bool = True, notify_value: int = None, color: Any = None, - output_machine: bool = True, ctx: Optional[click.Context] = None): +def echo( + text: Any, + err: bool = False, + nl: bool = True, + notify_value: int = None, + color: Any = None, + output_machine: bool = True, + ctx: Optional[click.Context] = None, +): add_breadcrumb(message=text, category='echo') if ismachineoutput(ctx): if output_machine: @@ -34,9 +41,16 @@ def echo(text: Any, err: bool = False, nl: bool = True, notify_value: int = None return click.echo(str(text), nl=nl, err=err, color=color) -def confirm(text: str, default: bool = False, abort: bool = False, prompt_suffix: bool = ': ', - show_default: bool = True, err: bool = False, title: AnyStr = 'Please confirm:', - log: str = None): +def confirm( + text: str, + default: bool = False, + abort: bool = False, + prompt_suffix: bool = ': ', + show_default: bool = True, + err: bool = False, + title: AnyStr = 'Please confirm:', + log: str = None, +): add_breadcrumb(message=text, category='confirm') if ismachineoutput(): from pros.common.ui.interactive.ConfirmModal import ConfirmModal @@ -45,37 +59,66 @@ def confirm(text: str, default: bool = False, abort: bool = False, prompt_suffix app = ConfirmModal(text, abort, title, log) rv = MachineOutputRenderer(app).run() else: - rv = click.confirm(text, default=default, abort=abort, prompt_suffix=prompt_suffix, - show_default=show_default, err=err) + rv = click.confirm( + text, default=default, abort=abort, prompt_suffix=prompt_suffix, show_default=show_default, err=err + ) add_breadcrumb(message=f'User responded: {rv}') return rv -def prompt(text, default=None, hide_input=False, - confirmation_prompt=False, type=None, - value_proc=None, prompt_suffix=': ', - show_default=True, err=False): +def prompt( + text, + default=None, + hide_input=False, + confirmation_prompt=False, + type=None, + value_proc=None, + prompt_suffix=': ', + show_default=True, + err=False, +): if ismachineoutput(): # TODO pass else: - return click.prompt(text, default=default, hide_input=hide_input, confirmation_prompt=confirmation_prompt, - type=type, value_proc=value_proc, prompt_suffix=prompt_suffix, show_default=show_default, - err=err) - - -def progressbar(iterable: Iterable = None, length: int = None, label: str = None, show_eta: bool = True, - show_percent: bool = True, show_pos: bool = False, item_show_func: Callable = None, - fill_char: str = '#', empty_char: str = '-', bar_template: str = '%(label)s [%(bar)s] %(info)s', - info_sep: str = ' ', width: int = 36): + return click.prompt( + text, + default=default, + hide_input=hide_input, + confirmation_prompt=confirmation_prompt, + type=type, + value_proc=value_proc, + prompt_suffix=prompt_suffix, + show_default=show_default, + err=err, + ) + + +def progressbar( + iterable: Iterable = None, + length: int = None, + label: str = None, + show_eta: bool = True, + show_percent: bool = True, + show_pos: bool = False, + item_show_func: Callable = None, + fill_char: str = '#', + empty_char: str = '-', + bar_template: str = '%(label)s [%(bar)s] %(info)s', + info_sep: str = ' ', + width: int = 36, +): if ismachineoutput(): return _MachineOutputProgressBar(**locals()) else: return click.progressbar(**locals()) -def finalize(method: str, data: Union[str, Dict, object, List[Union[str, Dict, object, Tuple]]], - human_prefix: Optional[str] = None): +def finalize( + method: str, + data: Union[str, Dict, object, List[Union[str, Dict, object, Tuple]]], + human_prefix: Optional[str] = None, +): """ To all those who have to debug this... RIP """ @@ -96,9 +139,11 @@ def finalize(method: str, data: Union[str, Dict, object, List[Union[str, Dict, o if not isinstance(data[0], dict): data = [d.__dict__ for d in data] import tabulate + human_readable = tabulate.tabulate([d.values() for d in data], headers=data[0].keys()) elif isinstance(data[0], tuple): import tabulate + human_readable = tabulate.tabulate(data[1:], headers=data[0]) else: human_readable = data @@ -108,12 +153,7 @@ def finalize(method: str, data: Union[str, Dict, object, List[Union[str, Dict, o human_readable = data.__dict__ human_readable = (human_prefix or '') + str(human_readable) if ismachineoutput(): - _machineoutput({ - 'type': 'finalize', - 'method': method, - 'data': data, - 'human': human_readable - }) + _machineoutput({'type': 'finalize', 'method': method, 'data': data, 'human': human_readable}) else: echo(human_readable) @@ -170,21 +210,18 @@ def __init__(self, err: bool = False, ctx: Optional[click.Context] = None): self.start() def fileno(self): - """Return the write file descriptor of the pipe - """ + """Return the write file descriptor of the pipe""" return self.fdWrite def run(self): - """Run the thread, logging everything. - """ + """Run the thread, logging everything.""" for line in iter(self.pipeReader.readline, ''): echo(line.strip('\n'), ctx=self.click_ctx, err=self.is_err) self.pipeReader.close() def close(self): - """Close the write end of the pipe. - """ + """Close the write end of the pipe.""" os.close(self.fdWrite) diff --git a/pros/common/ui/interactive/application.py b/pros/common/ui/interactive/application.py index 0db8dfaf..df393471 100644 --- a/pros/common/ui/interactive/application.py +++ b/pros/common/ui/interactive/application.py @@ -75,7 +75,7 @@ def __getstate__(self): return dict( etype=Application.get_hierarchy(self.__class__), elements=[e.__getstate__() for e in self.build()], - uuid=self.uuid + uuid=self.uuid, ) @@ -84,6 +84,7 @@ class Modal(Application[P], Generic[P]): An Application which is typically displayed in a pop-up box. It has a title, description, continue button, and cancel button. """ + # title of the modal to be displayed title: AnyStr # optional description displayed underneath the Modal @@ -96,9 +97,15 @@ class Modal(Application[P], Generic[P]): # Cancel button text cancel_button: AnyStr - def __init__(self, title: AnyStr, description: Optional[AnyStr] = None, - will_abort: bool = True, confirm_button: AnyStr = 'Continue', cancel_button: AnyStr = 'Cancel', - can_confirm: Optional[bool] = None): + def __init__( + self, + title: AnyStr, + description: Optional[AnyStr] = None, + will_abort: bool = True, + confirm_button: AnyStr = 'Continue', + cancel_button: AnyStr = 'Cancel', + can_confirm: Optional[bool] = None, + ): super().__init__() self.title = title self.description = description diff --git a/pros/common/ui/interactive/components/__init__.py b/pros/common/ui/interactive/components/__init__.py index e470f931..abc969a8 100644 --- a/pros/common/ui/interactive/components/__init__.py +++ b/pros/common/ui/interactive/components/__init__.py @@ -6,5 +6,17 @@ from .input_groups import ButtonGroup, DropDownBox from .label import Label, Spinner, VerbatimLabel -__all__ = ['Component', 'Button', 'Container', 'InputBox', 'ButtonGroup', 'DropDownBox', 'Label', - 'DirectorySelector', 'FileSelector', 'Checkbox', 'Spinner', 'VerbatimLabel'] +__all__ = [ + 'Component', + 'Button', + 'Container', + 'InputBox', + 'ButtonGroup', + 'DropDownBox', + 'Label', + 'DirectorySelector', + 'FileSelector', + 'Checkbox', + 'Spinner', + 'VerbatimLabel', +] diff --git a/pros/common/ui/interactive/components/button.py b/pros/common/ui/interactive/components/button.py index a3716158..52f43004 100644 --- a/pros/common/ui/interactive/components/button.py +++ b/pros/common/ui/interactive/components/button.py @@ -17,8 +17,4 @@ def on_clicked(self, *handlers: Callable, **kwargs): return self.on('clicked', *handlers, **kwargs) def __getstate__(self) -> dict: - return dict( - **super(Button, self).__getstate__(), - text=self.text, - uuid=self.uuid - ) + return dict(**super(Button, self).__getstate__(), text=self.text, uuid=self.uuid) diff --git a/pros/common/ui/interactive/components/component.py b/pros/common/ui/interactive/components/component.py index 158fc0bc..3880e5a6 100644 --- a/pros/common/ui/interactive/components/component.py +++ b/pros/common/ui/interactive/components/component.py @@ -29,9 +29,7 @@ def get_hierarchy(cls, base: type) -> Optional[List[str]]: return None def __getstate__(self) -> Dict: - return dict( - etype=Component.get_hierarchy(self.__class__) - ) + return dict(etype=Component.get_hierarchy(self.__class__)) P = TypeVar('P', bound=Parameter) diff --git a/pros/common/ui/interactive/components/container.py b/pros/common/ui/interactive/components/container.py index 8b8615f4..61a2a68e 100644 --- a/pros/common/ui/interactive/components/container.py +++ b/pros/common/ui/interactive/components/container.py @@ -9,25 +9,24 @@ class Container(Component): A Container has multiple Components, possibly a title, and possibly a description """ - def __init__(self, *elements: Component, - title: Optional[AnyStr] = None, description: Optional[AnyStr] = None, - collapsed: Union[BooleanParameter, bool] = False): + def __init__( + self, + *elements: Component, + title: Optional[AnyStr] = None, + description: Optional[AnyStr] = None, + collapsed: Union[BooleanParameter, bool] = False + ): self.title = title self.description = description self.elements = elements self.collapsed = BooleanParameter(collapsed) if isinstance(collapsed, bool) else collapsed def __getstate__(self): - extra_state = { - 'uuid': self.collapsed.uuid, - 'collapsed': self.collapsed.value - } + extra_state = {'uuid': self.collapsed.uuid, 'collapsed': self.collapsed.value} if self.title is not None: extra_state['title'] = self.title if self.description is not None: extra_state['description'] = self.description return dict( - **super(Container, self).__getstate__(), - **extra_state, - elements=[e.__getstate__() for e in self.elements] + **super(Container, self).__getstate__(), **extra_state, elements=[e.__getstate__() for e in self.elements] ) diff --git a/pros/common/ui/interactive/components/input_groups.py b/pros/common/ui/interactive/components/input_groups.py index 93171cfd..9e3c95bc 100644 --- a/pros/common/ui/interactive/components/input_groups.py +++ b/pros/common/ui/interactive/components/input_groups.py @@ -4,10 +4,7 @@ class DropDownBox(BasicParameterizedComponent[OptionParameter]): def __getstate__(self): - return dict( - **super(DropDownBox, self).__getstate__(), - options=self.parameter.options - ) + return dict(**super(DropDownBox, self).__getstate__(), options=self.parameter.options) class ButtonGroup(DropDownBox): diff --git a/pros/common/ui/interactive/components/label.py b/pros/common/ui/interactive/components/label.py index 8b060300..f4ac5592 100644 --- a/pros/common/ui/interactive/components/label.py +++ b/pros/common/ui/interactive/components/label.py @@ -8,16 +8,14 @@ def __init__(self, text: AnyStr): self.text = text def __getstate__(self): - return dict( - **super(Label, self).__getstate__(), - text=self.text - ) + return dict(**super(Label, self).__getstate__(), text=self.text) class VerbatimLabel(Label): """ Should be displayed with a monospace font """ + pass diff --git a/pros/common/ui/interactive/observable.py b/pros/common/ui/interactive/observable.py index ec8b0855..a45a4dda 100644 --- a/pros/common/ui/interactive/observable.py +++ b/pros/common/ui/interactive/observable.py @@ -27,9 +27,14 @@ def notify(cls, uuid, event, *args, **kwargs): else: logger(__name__).warning(f'Could not find an Observable to notify with UUID: {uuid}', sentry=True) - def on(self, event, *handlers, - bound_args: Tuple[Any, ...] = None, bound_kwargs: Dict[str, Any] = None, - asynchronous: bool = False) -> Callable: + def on( + self, + event, + *handlers, + bound_args: Tuple[Any, ...] = None, + bound_kwargs: Dict[str, Any] = None, + asynchronous: bool = False, + ) -> Callable: """ Sets up a callable to be called whenenver "event" is triggered :param event: Event to bind to. Most classes expose an e.g. "on_changed" wrapper which provides the correct @@ -49,16 +54,20 @@ def on(self, event, *handlers, bound_kwargs = {} if asynchronous: + def bind(h): def bound(*args, **kw): from threading import Thread from pros.common.utils import with_click_context + t = Thread(target=with_click_context(h), args=(*bound_args, *args), kwargs={**bound_kwargs, **kw}) t.start() return t return bound + else: + def bind(h): @wraps(h) def bound(*args, **kw): diff --git a/pros/common/ui/interactive/parameters/__init__.py b/pros/common/ui/interactive/parameters/__init__.py index 55c5dafe..8d397694 100644 --- a/pros/common/ui/interactive/parameters/__init__.py +++ b/pros/common/ui/interactive/parameters/__init__.py @@ -2,5 +2,11 @@ from .parameter import Parameter from .validatable_parameter import AlwaysInvalidParameter, ValidatableParameter -__all__ = ['Parameter', 'OptionParameter', 'BooleanParameter', 'ValidatableParameter', 'RangeParameter', - 'AlwaysInvalidParameter'] +__all__ = [ + 'Parameter', + 'OptionParameter', + 'BooleanParameter', + 'ValidatableParameter', + 'RangeParameter', + 'AlwaysInvalidParameter', +] diff --git a/pros/common/ui/interactive/parameters/validatable_parameter.py b/pros/common/ui/interactive/parameters/validatable_parameter.py index ceafd59f..208c3bb3 100644 --- a/pros/common/ui/interactive/parameters/validatable_parameter.py +++ b/pros/common/ui/interactive/parameters/validatable_parameter.py @@ -13,8 +13,12 @@ class ValidatableParameter(Parameter, Generic[T]): the callback get invoked. This event tag is "changed_validated" """ - def __init__(self, initial_value: T, allow_invalid_input: bool = True, - validate: Optional[Callable[[T], Union[bool, str]]] = None): + def __init__( + self, + initial_value: T, + allow_invalid_input: bool = True, + validate: Optional[Callable[[T], Union[bool, str]]] = None, + ): """ :param allow_invalid_input: Allow invalid input to be propagated to the `changed` event """ diff --git a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py index 4bb5eddb..35d614c5 100644 --- a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py +++ b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py @@ -93,10 +93,7 @@ def stop(self): self.wake_me() ui.logger(__name__).debug(f'Broadcasting stop {self.app}') - self._output({ - 'uuid': self.app.uuid, - 'should_exit': True - }) + self._output({'uuid': self.app.uuid, 'should_exit': True}) _remove_renderer(self) top_renderer = _current_renderer() diff --git a/pros/common/ui/log.py b/pros/common/ui/log.py index 8202ef95..bc37e0ad 100644 --- a/pros/common/ui/log.py +++ b/pros/common/ui/log.py @@ -28,7 +28,7 @@ def emit(self, record): 'type': 'log/message', 'level': record.levelname, 'message': formatter.formatMessage(record), - 'simpleMessage': record.message + 'simpleMessage': record.message, } if record.exc_info: obj['trace'] = formatter.formatException(record.exc_info) diff --git a/pros/common/utils.py b/pros/common/utils.py index d74d9a2d..937e5478 100644 --- a/pros/common/utils.py +++ b/pros/common/utils.py @@ -21,6 +21,7 @@ def get_version(): try: if getattr(sys, 'frozen', False): import _constants + ver = _constants.CLI_VERSION if ver is not None: return ver @@ -32,6 +33,7 @@ def get_version(): pass else: import pros.cli.main + module = pros.cli.main.__name__ for dist in pkg_resources.working_set: scripts = dist.get_entry_map().get('console_scripts') or {} @@ -91,6 +93,7 @@ def with_click_context(func): if not ctx or not isinstance(ctx, click.Context): return func else: + def _wrap(*args, **kwargs): with ctx: try: @@ -111,6 +114,7 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non """ import requests from pros.common.ui import progressbar + # from rfc6266_parser import parse_requests_response import re @@ -135,8 +139,9 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non os.makedirs(os.path.dirname(output_path), exist_ok=True) with open(output_path, mode='wb') as file: - with progressbar(length=int(response.headers['Content-Length']), - label=desc or f'Downloading {filename}') as pb: + with progressbar( + length=int(response.headers['Content-Length']), label=desc or f'Downloading {filename}' + ) as pb: for chunk in response.iter_content(256): file.write(chunk) pb.update(len(chunk)) diff --git a/pros/conductor/conductor.py b/pros/conductor/conductor.py index 83c3aab7..073123ff 100644 --- a/pros/conductor/conductor.py +++ b/pros/conductor/conductor.py @@ -27,10 +27,12 @@ class ReleaseChannel(Enum): Beta = 'beta' """ + class Conductor(Config): """ Provides entrances for all conductor-related tasks (fetching, applying, creating new projects) """ + def __init__(self, file=None): if not file: file = os.path.join(click.get_app_dir('PROS'), 'conductor.pros') @@ -44,31 +46,29 @@ def __init__(self, file=None): self.warn_early_access = False super(Conductor, self).__init__(file) needs_saving = False - if MAINLINE_NAME not in self.depots or \ - not isinstance(self.depots[MAINLINE_NAME], HttpDepot) or \ - self.depots[MAINLINE_NAME].location != MAINLINE_URL: + if ( + MAINLINE_NAME not in self.depots + or not isinstance(self.depots[MAINLINE_NAME], HttpDepot) + or self.depots[MAINLINE_NAME].location != MAINLINE_URL + ): self.depots[MAINLINE_NAME] = HttpDepot(MAINLINE_NAME, MAINLINE_URL) needs_saving = True # add early access depot as another remote depot - if EARLY_ACCESS_NAME not in self.depots or \ - not isinstance(self.depots[EARLY_ACCESS_NAME], HttpDepot) or \ - self.depots[EARLY_ACCESS_NAME].location != EARLY_ACCESS_URL: + if ( + EARLY_ACCESS_NAME not in self.depots + or not isinstance(self.depots[EARLY_ACCESS_NAME], HttpDepot) + or self.depots[EARLY_ACCESS_NAME].location != EARLY_ACCESS_URL + ): self.depots[EARLY_ACCESS_NAME] = HttpDepot(EARLY_ACCESS_NAME, EARLY_ACCESS_URL) needs_saving = True if self.default_target is None: self.default_target = 'v5' needs_saving = True if self.default_libraries is None: - self.default_libraries = { - 'v5': ['okapilib'], - 'cortex': [] - } + self.default_libraries = {'v5': ['okapilib'], 'cortex': []} needs_saving = True if self.early_access_libraries is None or len(self.early_access_libraries['v5']) != 2: - self.early_access_libraries = { - 'v5': ['liblvgl', 'okapilib'], - 'cortex': [] - } + self.early_access_libraries = {'v5': ['liblvgl', 'okapilib'], 'cortex': []} needs_saving = True if 'v5' not in self.default_libraries: self.default_libraries['v5'] = [] @@ -85,6 +85,7 @@ def __init__(self, file=None): if needs_saving: self.save() from pros.common.sentry import add_context + add_context(self) def get_depot(self, name: str) -> Optional[Depot]: @@ -107,7 +108,7 @@ def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> Loca local_template = LocalTemplate(orig=template, location=destination) local_template.metadata['origin'] = depot.name click.echo(f'Adding {local_template.identifier} to registry...', nl=False) - if depot.name == EARLY_ACCESS_NAME: # check for early access + if depot.name == EARLY_ACCESS_NAME: # check for early access self.early_access_local_templates.add(local_template) else: self.local_templates.add(local_template) @@ -120,7 +121,9 @@ def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> Loca def purge_template(self, template: LocalTemplate): if template.metadata['origin'] == EARLY_ACCESS_NAME: if template not in self.early_access_local_templates: - logger(__name__).info(f"{template.identifier} was not in the Conductor's local early access templates cache.") + logger(__name__).info( + f"{template.identifier} was not in the Conductor's local early access templates cache." + ) else: self.early_access_local_templates.remove(template) else: @@ -130,14 +133,20 @@ def purge_template(self, template: LocalTemplate): self.local_templates.remove(template) if os.path.abspath(template.location).startswith( - os.path.abspath(os.path.join(self.directory, 'templates'))) \ - and os.path.isdir(template.location): + os.path.abspath(os.path.join(self.directory, 'templates')) + ) and os.path.isdir(template.location): shutil.rmtree(template.location) self.save() - def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: bool = True, - allow_offline: bool = True, force_refresh: bool = False, - unique: bool = True, **kwargs) -> List[BaseTemplate]: + def resolve_templates( + self, + identifier: Union[str, BaseTemplate], + allow_online: bool = True, + allow_offline: bool = True, + force_refresh: bool = False, + unique: bool = True, + **kwargs, + ) -> List[BaseTemplate]: results = list() if not unique else set() kernel_version = kwargs.get('kernel_version', None) if kwargs.get('early_access', None) is not None: @@ -150,9 +159,15 @@ def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: offline_results = list() if self.use_early_access: - offline_results.extend(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.early_access_local_templates)) - - offline_results.extend(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.local_templates)) + offline_results.extend( + filter( + lambda t: t.satisfies(query, kernel_version=kernel_version), self.early_access_local_templates + ) + ) + + offline_results.extend( + filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.local_templates) + ) if unique: results.update(offline_results) @@ -163,8 +178,9 @@ def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: # EarlyAccess depot will only be accessed when the --early-access flag is true if depot.name != EARLY_ACCESS_NAME or (depot.name == EARLY_ACCESS_NAME and self.use_early_access): remote_templates = depot.get_remote_templates(force_check=force_refresh, **kwargs) - online_results = list(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), - remote_templates)) + online_results = list( + filter(lambda t: t.satisfies(query, kernel_version=kernel_version), remote_templates) + ) if unique: results.update(online_results) @@ -175,7 +191,8 @@ def resolve_templates(self, identifier: Union[str, BaseTemplate], allow_online: if len(results) == 0 and not self.use_early_access: raise dont_send( - InvalidTemplateException(f'{identifier.name} does not support kernel version {kernel_version}')) + InvalidTemplateException(f'{identifier.name} does not support kernel version {kernel_version}') + ) return list(results) @@ -231,7 +248,8 @@ def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], template = self.resolve_template(identifier=identifier, allow_online=download_ok, **kwargs) if template is None: raise dont_send( - InvalidTemplateException(f'Could not find a template satisfying {identifier} for {project.target}')) + InvalidTemplateException(f'Could not find a template satisfying {identifier} for {project.target}') + ) # warn and prompt user if upgrading to PROS 4 or downgrading to PROS 3 if template.name == 'kernel': @@ -240,23 +258,27 @@ def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], curr_proj = Project() if curr_proj.kernel: if template.version[0] == '4' and curr_proj.kernel[0] == '3': - confirm = ui.confirm(f'Warning! Upgrading project to PROS 4 will cause breaking changes. ' - f'Do you still want to upgrade?') + confirm = ui.confirm( + f'Warning! Upgrading project to PROS 4 will cause breaking changes. ' + f'Do you still want to upgrade?' + ) if not confirm: - raise dont_send( - InvalidTemplateException(f'Not upgrading')) + raise dont_send(InvalidTemplateException(f'Not upgrading')) if template.version[0] == '3' and curr_proj.kernel[0] == '4': - confirm = ui.confirm(f'Warning! Downgrading project to PROS 3 will cause breaking changes. ' - f'Do you still want to downgrade?') + confirm = ui.confirm( + f'Warning! Downgrading project to PROS 3 will cause breaking changes. ' + f'Do you still want to downgrade?' + ) if not confirm: - raise dont_send( - InvalidTemplateException(f'Not downgrading')) + raise dont_send(InvalidTemplateException(f'Not downgrading')) elif not self.use_early_access and template.version[0] == '3' and not self.warn_early_access: - confirm = ui.confirm(f'PROS 4 is now in early access. ' - f'Please use the --early-access flag if you would like to use it.\n' - f'Do you want to use PROS 4 instead?') + confirm = ui.confirm( + f'PROS 4 is now in early access. ' + f'Please use the --early-access flag if you would like to use it.\n' + f'Do you want to use PROS 4 instead?' + ) self.warn_early_access = True - if confirm: # use pros 4 + if confirm: # use pros 4 self.use_early_access = True kwargs['version'] = '>=0' self.save() @@ -275,40 +297,52 @@ def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], raise dont_send( InvalidTemplateException(f'{template.identifier} is not applicable to {project}', reason=valid_action) ) - if force \ - or (valid_action == TemplateAction.Upgradable and upgrade_ok) \ - or (valid_action == TemplateAction.Installable and install_ok) \ - or (valid_action == TemplateAction.Downgradable and downgrade_ok): - project.apply_template(template, force_system=kwargs.pop('force_system', False), - force_user=kwargs.pop('force_user', False), - remove_empty_directories=kwargs.pop('remove_empty_directories', False)) + if ( + force + or (valid_action == TemplateAction.Upgradable and upgrade_ok) + or (valid_action == TemplateAction.Installable and install_ok) + or (valid_action == TemplateAction.Downgradable and downgrade_ok) + ): + project.apply_template( + template, + force_system=kwargs.pop('force_system', False), + force_user=kwargs.pop('force_user', False), + remove_empty_directories=kwargs.pop('remove_empty_directories', False), + ) ui.finalize('apply', f'Finished applying {template.identifier} to {project.location}') elif valid_action != TemplateAction.AlreadyInstalled: raise dont_send( - InvalidTemplateException(f'Could not install {template.identifier} because it is {valid_action.name},' - f' and that is not allowed.', reason=valid_action) + InvalidTemplateException( + f'Could not install {template.identifier} because it is {valid_action.name},' + f' and that is not allowed.', + reason=valid_action, + ) ) else: ui.finalize('apply', f'{template.identifier} is already installed in {project.location}') @staticmethod - def remove_template(project: Project, identifier: Union[str, BaseTemplate], remove_user: bool = True, - remove_empty_directories: bool = True): + def remove_template( + project: Project, + identifier: Union[str, BaseTemplate], + remove_user: bool = True, + remove_empty_directories: bool = True, + ): ui.logger(__name__).debug(f'Uninstalling templates matching {identifier}') if not project.resolve_template(identifier): ui.echo(f"{identifier} is not an applicable template") for template in project.resolve_template(identifier): ui.echo(f'Uninstalling {template.identifier}') - project.remove_template(template, remove_user=remove_user, - remove_empty_directories=remove_empty_directories) + project.remove_template( + template, remove_user=remove_user, remove_empty_directories=remove_empty_directories + ) def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Project: if kwargs.get('early_access', None) is not None: self.use_early_access = kwargs.get('early_access', False) - if kwargs["version_source"]: # If true, then the user has not specified a version + if kwargs["version_source"]: # If true, then the user has not specified a version if not self.use_early_access and self.warn_early_access: - ui.echo(f"PROS 4 is now in early access. " - f"If you would like to use it, use the --early-access flag.") + ui.echo(f"PROS 4 is now in early access. " f"If you would like to use it, use the --early-access flag.") elif self.use_early_access: ui.echo(f'Early access is enabled. Using PROS 4.') elif self.use_early_access: @@ -317,8 +351,36 @@ def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Pro if Path(path).exists() and Path(path).samefile(os.path.expanduser('~')): raise dont_send(ValueError('Will not create a project in user home directory')) for char in str(Path(path)): - if char in ['?', '<', '>', '*', '|', '^', '#', '%', '&', '$', '+', '!', '`', '\'', '=', - '@', '\'', '{', '}', '[', ']', '(', ')', '~'] or ord(char) > 127: + if ( + char + in [ + '?', + '<', + '>', + '*', + '|', + '^', + '#', + '%', + '&', + '$', + '+', + '!', + '`', + '\'', + '=', + '@', + '\'', + '{', + '}', + '[', + ']', + '(', + ')', + '~', + ] + or ord(char) > 127 + ): raise dont_send(ValueError(f'Invalid character found in directory name: \'{char}\'')) proj = Project(path=path, create=True) @@ -335,7 +397,12 @@ def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Pro proj.save() if not no_default_libs: - libraries = self.early_access_libraries if self.use_early_access and (kwargs.get("version", ">").startswith("4") or kwargs.get("version", ">").startswith(">")) else self.default_libraries + libraries = ( + self.early_access_libraries + if self.use_early_access + and (kwargs.get("version", ">").startswith("4") or kwargs.get("version", ">").startswith(">")) + else self.default_libraries + ) for library in libraries[proj.target]: try: # remove kernel version so that latest template satisfying query is correctly selected diff --git a/pros/conductor/depots/depot.py b/pros/conductor/depots/depot.py index 364d312f..c4d5397a 100644 --- a/pros/conductor/depots/depot.py +++ b/pros/conductor/depots/depot.py @@ -8,9 +8,14 @@ class Depot(object): - def __init__(self, name: str, location: str, config: Dict[str, Any] = None, - update_frequency: timedelta = timedelta(minutes=1), - config_schema: Dict[str, Dict[str, Any]] = None): + def __init__( + self, + name: str, + location: str, + config: Dict[str, Any] = None, + update_frequency: timedelta = timedelta(minutes=1), + config_schema: Dict[str, Dict[str, Any]] = None, + ): self.name: str = name self.location: str = location self.config: Dict[str, Any] = config or {} @@ -28,8 +33,10 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> def get_remote_templates(self, auto_check_freq: Optional[timedelta] = None, force_check: bool = False, **kwargs): if auto_check_freq is None: auto_check_freq = getattr(self, 'update_frequency', cli_config().update_frequency) - logger(__name__).info(f'Last check of {self.name} was {self.last_remote_update} ' - f'({datetime.now() - self.last_remote_update} vs {auto_check_freq}).') + logger(__name__).info( + f'Last check of {self.name} was {self.last_remote_update} ' + f'({datetime.now() - self.last_remote_update} vs {auto_check_freq}).' + ) if force_check or datetime.now() - self.last_remote_update > auto_check_freq: with ui.Notification(): ui.echo(f'Updating {self.name}... ', nl=False) diff --git a/pros/conductor/depots/http_depot.py b/pros/conductor/depots/http_depot.py index dc7e3a25..21e0c404 100644 --- a/pros/conductor/depots/http_depot.py +++ b/pros/conductor/depots/http_depot.py @@ -15,18 +15,18 @@ class HttpDepot(Depot): def __init__(self, name: str, location: str): # Note: If update_frequency = timedelta(minutes=1) isn't included as a parameter, # the beta depot won't be saved in conductor.json correctly - super().__init__(name, location, config_schema={}, update_frequency = timedelta(minutes=1)) + super().__init__(name, location, config_schema={}, update_frequency=timedelta(minutes=1)) def fetch_template(self, template: BaseTemplate, destination: str, **kwargs): import requests + assert 'location' in template.metadata url = template.metadata['location'] tf = download_file(url, ext='zip', desc=f'Downloading {template.identifier}') if tf is None: raise requests.ConnectionError(f'Could not obtain {url}') with zipfile.ZipFile(tf) as zf: - with ui.progressbar(length=len(zf.namelist()), - label=f'Extracting {template.identifier}') as pb: + with ui.progressbar(length=len(zf.namelist()), label=f'Extracting {template.identifier}') as pb: for file in zf.namelist(): zf.extract(file, path=destination) pb.update(1) @@ -35,6 +35,7 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs): def update_remote_templates(self, **_): import requests + response = requests.get(self.location) if response.status_code == 200: self.remote_templates = jsonpickle.decode(response.text) diff --git a/pros/conductor/depots/local_depot.py b/pros/conductor/depots/local_depot.py index 60bff121..f4223dd0 100644 --- a/pros/conductor/depots/local_depot.py +++ b/pros/conductor/depots/local_depot.py @@ -23,8 +23,7 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> template_file = os.path.join(location_dir, 'template.pros') elif zipfile.is_zipfile(location): with zipfile.ZipFile(location) as zf: - with click.progressbar(length=len(zf.namelist()), - label=f"Extracting {location}") as progress_bar: + with click.progressbar(length=len(zf.namelist()), label=f"Extracting {location}") as progress_bar: for file in zf.namelist(): zf.extract(file, path=destination) progress_bar.update(1) @@ -41,6 +40,7 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> if location_dir != destination: n_files = len([os.path.join(dp, f) for dp, dn, fn in os.walk(location_dir) for f in fn]) with click.progressbar(length=n_files, label='Copying to local cache') as pb: + def my_copy(*args): pb.update(1) shutil.copy2(*args) diff --git a/pros/conductor/interactive/NewProjectModal.py b/pros/conductor/interactive/NewProjectModal.py index 9f71c76d..0e08ad53 100644 --- a/pros/conductor/interactive/NewProjectModal.py +++ b/pros/conductor/interactive/NewProjectModal.py @@ -17,8 +17,12 @@ class NewProjectModal(application.Modal[None]): project_name = parameters.Parameter(None) advanced_collapsed = parameters.BooleanParameter(True) - def __init__(self, ctx: Context = None, conductor: Optional[Conductor] = None, - directory=os.path.join(os.path.expanduser('~'), 'My PROS Project')): + def __init__( + self, + ctx: Context = None, + conductor: Optional[Conductor] = None, + directory=os.path.join(os.path.expanduser('~'), 'My PROS Project'), + ): super().__init__('Create a new project') self.conductor = conductor or Conductor() self.click_ctx = ctx or get_current_context() @@ -43,10 +47,11 @@ def confirm(self, *args, **kwargs): target=self.targets.value, version=self.kernel_versions.value, no_default_libs=not self.install_default_libraries.value, - project_name=self.project_name.value + project_name=self.project_name.value, ) from pros.conductor.project import ProjectReport + report = ProjectReport(project) ui.finalize('project-report', report) @@ -69,5 +74,5 @@ def build(self) -> Generator[components.Component, None, None]: components.DropDownBox('Kernel Version', self.kernel_versions), components.Checkbox('Install default libraries', self.install_default_libraries), title='Advanced', - collapsed=self.advanced_collapsed + collapsed=self.advanced_collapsed, ) diff --git a/pros/conductor/interactive/UpdateProjectModal.py b/pros/conductor/interactive/UpdateProjectModal.py index 9cb5124e..e20276a4 100644 --- a/pros/conductor/interactive/UpdateProjectModal.py +++ b/pros/conductor/interactive/UpdateProjectModal.py @@ -25,9 +25,7 @@ def is_processing(self, value: bool): def _generate_transaction(self) -> ProjectTransaction: transaction = ProjectTransaction(self.project, self.conductor) - apply_kwargs = dict( - force_apply=self.force_apply_parameter.value - ) + apply_kwargs = dict(force_apply=self.force_apply_parameter.value) if self.name.value != self.project.name: transaction.change_name(self.name.value) if self.project.template_is_applicable(self.current_kernel.value, **apply_kwargs): @@ -53,8 +51,9 @@ def remove_template(): self.new_templates.append(p) - def __init__(self, ctx: Optional[Context] = None, conductor: Optional[Conductor] = None, - project: Optional[Project] = None): + def __init__( + self, ctx: Optional[Context] = None, conductor: Optional[Conductor] = None, project: Optional[Project] = None + ): super().__init__('Update a project') self.conductor = conductor or Conductor() self.click_ctx = ctx or get_current_context() @@ -93,16 +92,18 @@ def project_changed(self, new_project: ExistingProjectParameter): None, options=sorted( {t for t in self.conductor.resolve_templates(self.project.templates['kernel'].as_query())}, - key=lambda v: Version(v.version), reverse=True - ) + key=lambda v: Version(v.version), + reverse=True, + ), ) self.current_templates = [ TemplateParameter( None, - options=sorted({ - t - for t in self.conductor.resolve_templates(t.as_query()) - }, key=lambda v: Version(v.version), reverse=True) + options=sorted( + {t for t in self.conductor.resolve_templates(t.as_query())}, + key=lambda v: Version(v.version), + reverse=True, + ), ) for t in self.project.templates.values() if t.name != 'kernel' @@ -131,17 +132,19 @@ def build(self) -> Generator[components.Component, None, None]: yield components.InputBox('Project Name', self.name) yield TemplateListingComponent(self.current_kernel, editable=dict(version=True), removable=False) yield components.Container( - *(TemplateListingComponent(t, editable=dict(version=True), removable=True) for t in - self.current_templates), + *( + TemplateListingComponent(t, editable=dict(version=True), removable=True) + for t in self.current_templates + ), *(TemplateListingComponent(t, editable=True, removable=True) for t in self.new_templates), self.add_template_button, title='Templates', - collapsed=self.templates_collapsed + collapsed=self.templates_collapsed, ) yield components.Container( components.Checkbox('Re-apply all templates', self.force_apply_parameter), title='Advanced', - collapsed=self.advanced_collapsed + collapsed=self.advanced_collapsed, ) yield components.Label('What will happen when you click "Continue":') yield components.VerbatimLabel(self._generate_transaction().describe()) diff --git a/pros/conductor/interactive/components.py b/pros/conductor/interactive/components.py index b5bfacb7..cc848fa5 100644 --- a/pros/conductor/interactive/components.py +++ b/pros/conductor/interactive/components.py @@ -26,9 +26,9 @@ def _generate_components(self) -> Generator[components.Component, None, None]: remove_button.on_clicked(lambda: self.template.trigger('removed')) yield remove_button - def __init__(self, template: TemplateParameter, - removable: bool = False, - editable: Union[Dict[str, bool], bool] = True): + def __init__( + self, template: TemplateParameter, removable: bool = False, editable: Union[Dict[str, bool], bool] = True + ): self.template = template self.removable = removable if isinstance(editable, bool): diff --git a/pros/conductor/interactive/parameters.py b/pros/conductor/interactive/parameters.py index 7b0da738..9f05b632 100644 --- a/pros/conductor/interactive/parameters.py +++ b/pros/conductor/interactive/parameters.py @@ -21,10 +21,12 @@ def validate(self, value: str) -> Union[bool, str]: blacklisted_directories = [] # TODO: Proper Windows support if sys.platform == 'win32': - blacklisted_directories.extend([ - os.environ.get('WINDIR', os.path.join('C:', 'Windows')), - os.environ.get('PROGRAMFILES', os.path.join('C:', 'Program Files')) - ]) + blacklisted_directories.extend( + [ + os.environ.get('WINDIR', os.path.join('C:', 'Windows')), + os.environ.get('PROGRAMFILES', os.path.join('C:', 'Program Files')), + ] + ) if any(value.startswith(d) for d in blacklisted_directories): return 'Cannot create project in a system directory' if Path(value).exists() and Path(value).samefile(os.path.expanduser('~')): @@ -64,7 +66,7 @@ def _update_versions(self): if self.name.value in self.options: self.version = p.OptionParameter( self.version.value if self.version else None, - list(sorted(self.options[self.name.value].keys(), reverse=True, key=lambda v: Version(v))) + list(sorted(self.options[self.name.value].keys(), reverse=True, key=lambda v: Version(v))), ) if self.version.value not in self.version.options: @@ -90,7 +92,7 @@ def __init__(self, template: Optional[BaseTemplate], options: List[BaseTemplate] self.name: p.ValidatableParameter[str] = p.ValidatableParameter( self.value.name, allow_invalid_input, - validate=lambda v: True if v in self.options.keys() else f'Could not find a template named {v}' + validate=lambda v: True if v in self.options.keys() else f'Could not find a template named {v}', ) if not self.value.version and self.value.name in self.options: self.value.version = Spec('>0').select([Version(v) for v in self.options[self.value.name].keys()]) @@ -122,5 +124,6 @@ def removed_changed(): self.removed = not self.removed def is_valid(self, value: BaseTemplate = None): - return self.name.is_valid(value.name if value else None) and \ - self.version.is_valid(value.version if value else None) + return self.name.is_valid(value.name if value else None) and self.version.is_valid( + value.version if value else None + ) diff --git a/pros/conductor/project/ProjectReport.py b/pros/conductor/project/ProjectReport.py index 75d2ff3a..6af81707 100644 --- a/pros/conductor/project/ProjectReport.py +++ b/pros/conductor/project/ProjectReport.py @@ -7,14 +7,19 @@ def __init__(self, project: 'Project'): "target": project.target, "location": os.path.abspath(project.location), "name": project.name, - "templates": [{"name": t.name, "version": t.version, "origin": t.origin} for t in - project.templates.values()] + "templates": [ + {"name": t.name, "version": t.version, "origin": t.origin} for t in project.templates.values() + ], } def __str__(self): import tabulate - s = f'PROS Project for {self.project["target"]} at: {self.project["location"]}' \ - f' ({self.project["name"]})' if self.project["name"] else '' + + s = ( + f'PROS Project for {self.project["target"]} at: {self.project["location"]}' f' ({self.project["name"]})' + if self.project["name"] + else '' + ) s += '\n' rows = [t.values() for t in self.project["templates"]] headers = [h.capitalize() for h in self.project["templates"][0].keys()] diff --git a/pros/conductor/project/ProjectTransaction.py b/pros/conductor/project/ProjectTransaction.py index edae1330..8ea963b7 100644 --- a/pros/conductor/project/ProjectTransaction.py +++ b/pros/conductor/project/ProjectTransaction.py @@ -22,8 +22,9 @@ def can_execute(self, conductor: c.Conductor, project: c.Project) -> bool: class ApplyTemplateAction(Action): - def __init__(self, template: c.BaseTemplate, apply_kwargs: Dict[str, Any] = None, - suppress_already_installed: bool = False): + def __init__( + self, template: c.BaseTemplate, apply_kwargs: Dict[str, Any] = None, suppress_already_installed: bool = False + ): self.template = template self.apply_kwargs = apply_kwargs or {} self.suppress_already_installed = suppress_already_installed @@ -44,11 +45,15 @@ def describe(self, conductor: c.Conductor, project: c.Project): if action == TemplateAction.Installable: return f'{self.template.identifier} will installed to project.' if action == TemplateAction.Downgradable: - return f'Project will be downgraded to {self.template.identifier} from' \ + return ( + f'Project will be downgraded to {self.template.identifier} from' f' {project.templates[self.template.name].version}.' + ) if action == TemplateAction.Upgradable: - return f'Project will be upgraded to {self.template.identifier} from' \ + return ( + f'Project will be upgraded to {self.template.identifier} from' f' {project.templates[self.template.name].version}.' + ) if action == TemplateAction.AlreadyInstalled: if self.apply_kwargs.get('force_apply'): return f'{self.template.identifier} will be re-applied.' @@ -65,8 +70,9 @@ def can_execute(self, conductor: c.Conductor, project: c.Project) -> bool: class RemoveTemplateAction(Action): - def __init__(self, template: c.BaseTemplate, remove_kwargs: Dict[str, Any] = None, - suppress_not_removable: bool = False): + def __init__( + self, template: c.BaseTemplate, remove_kwargs: Dict[str, Any] = None, suppress_not_removable: bool = False + ): self.template = template self.remove_kwargs = remove_kwargs or {} self.suppress_not_removable = suppress_not_removable @@ -162,10 +168,7 @@ def change_name(self, new_name: str): def describe(self) -> str: if len(self.actions) > 0: - return '\n'.join( - f'- {a.describe(self.conductor, self.project)}' - for a in self.actions - ) + return '\n'.join(f'- {a.describe(self.conductor, self.project)}' for a in self.actions) else: return 'No actions necessary.' diff --git a/pros/conductor/project/__init__.py b/pros/conductor/project/__init__.py index 38885478..355ddda9 100644 --- a/pros/conductor/project/__init__.py +++ b/pros/conductor/project/__init__.py @@ -41,9 +41,13 @@ def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = super(Project, self).__init__(file, error_on_decode=raise_on_error) if 'kernel' in self.__dict__: # Add backwards compatibility with PROS CLI 2 projects by adding kernel as a pseudo-template - self.templates['kernel'] = Template(user_files=self.all_files, name='kernel', - version=self.__dict__['kernel'], target=self.target, - output='bin/output.bin') + self.templates['kernel'] = Template( + user_files=self.all_files, + name='kernel', + version=self.__dict__['kernel'], + target=self.target, + output='bin/output.bin', + ) @property def location(self) -> pathlib.Path: @@ -55,20 +59,23 @@ def path(self): @property def name(self): - return self.project_name or os.path.basename(self.location) \ - or os.path.basename(self.templates['kernel'].metadata['output']) \ - or 'pros' + return ( + self.project_name + or os.path.basename(self.location) + or os.path.basename(self.templates['kernel'].metadata['output']) + or 'pros' + ) @property def all_files(self) -> Set[str]: - return {os.path.relpath(p, self.location) for p in - glob.glob(f'{self.location}/**/*', recursive=True)} + return {os.path.relpath(p, self.location) for p in glob.glob(f'{self.location}/**/*', recursive=True)} def get_template_actions(self, template: BaseTemplate) -> TemplateAction: ui.logger(__name__).debug(template) if template.target != self.target: return TemplateAction.NotApplicable from semantic_version import Spec, Version + if template.name != 'kernel' and Version(self.kernel) not in Spec(template.supported_kernels or '>0'): if template.name in self.templates.keys(): return TemplateAction.AlreadyInstalled @@ -97,10 +104,16 @@ def template_is_upgradeable(self, query: BaseTemplate) -> bool: def template_is_applicable(self, query: BaseTemplate, force_apply: bool = False) -> bool: ui.logger(__name__).debug(query.target) return self.get_template_actions(query) in ( - TemplateAction.ForcedApplicable if force_apply else TemplateAction.UnforcedApplicable) - - def apply_template(self, template: LocalTemplate, force_system: bool = False, force_user: bool = False, - remove_empty_directories: bool = False): + TemplateAction.ForcedApplicable if force_apply else TemplateAction.UnforcedApplicable + ) + + def apply_template( + self, + template: LocalTemplate, + force_system: bool = False, + force_user: bool = False, + remove_empty_directories: bool = False, + ): """ Applies a template to a project :param remove_empty_directories: @@ -125,12 +138,16 @@ def apply_template(self, template: LocalTemplate, force_system: bool = False, fo # remove newly deprecated user files deprecated_user_files = installed_user_files.intersection(self.all_files) - set(template.user_files) if any(deprecated_user_files): - if force_user or confirm(f'The following user files have been deprecated: {deprecated_user_files}. ' - f'Do you want to update them?'): + if force_user or confirm( + f'The following user files have been deprecated: {deprecated_user_files}. ' + f'Do you want to update them?' + ): transaction.extend_rm(deprecated_user_files) else: - logger(__name__).warning(f'Deprecated user files may cause weird quirks. See migration guidelines from ' - f'{template.identifier}\'s release notes.') + logger(__name__).warning( + f'Deprecated user files may cause weird quirks. See migration guidelines from ' + f'{template.identifier}\'s release notes.' + ) # Carry forward deprecated user files into the template about to be applied so that user gets warned in # future. template.user_files.extend(deprecated_user_files) @@ -143,8 +160,13 @@ def new_user_filter(new_file: str) -> bool: src/opcontrol.c and src/opcontrol.cpp are friends because they have the same stem src/opcontrol.c and include/opcontrol.h are not because they are in different directories """ - return not any([(os.path.normpath(file) in transaction.effective_state) for file in template.user_files if - os.path.splitext(file)[0] == os.path.splitext(new_file)[0]]) + return not any( + [ + (os.path.normpath(file) in transaction.effective_state) + for file in template.user_files + if os.path.splitext(file)[0] == os.path.splitext(new_file)[0] + ] + ) if force_user: new_user_files = template.real_user_files @@ -153,8 +175,11 @@ def new_user_filter(new_file: str) -> bool: transaction.extend_add(new_user_files, template.location) if any([file in transaction.effective_state for file in template.system_files]) and not force_system: - confirm(f'Some required files for {template.identifier} already exist in the project. ' - f'Overwrite the existing files?', abort=True) + confirm( + f'Some required files for {template.identifier} already exist in the project. ' + f'Overwrite the existing files?', + abort=True, + ) transaction.extend_add(template.system_files, template.location) logger(__name__).debug(transaction) @@ -174,8 +199,9 @@ def remove_template(self, template: Template, remove_user: bool = False, remove_ if remove_user: transaction.extend_rm(real_template.real_user_files) logger(__name__).debug(transaction) - transaction.commit(label=f'Removing {template.identifier}...', - remove_empty_directories=remove_empty_directories) + transaction.commit( + label=f'Removing {template.identifier}...', remove_empty_directories=remove_empty_directories + ) del self.templates[real_template.name] self.save() @@ -195,8 +221,10 @@ def resolve_template(self, query: Union[str, BaseTemplate]) -> List[Template]: return [local_template for local_template in self.templates.values() if local_template.satisfies(query)] def __str__(self): - return f'Project: {self.location} ({self.name}) for {self.target} with ' \ + return ( + f'Project: {self.location} ({self.name}) for {self.target} with ' f'{", ".join([str(t) for t in self.templates.values()])}' + ) @property def kernel(self): @@ -216,6 +244,7 @@ def output(self): def make(self, build_args: List[str]): import subprocess + env = os.environ.copy() # Add PROS toolchain to the beginning of PATH to ensure PROS binaries are preferred if os.environ.get('PROS_TOOLCHAIN'): @@ -228,14 +257,24 @@ def make(self, build_args: List[str]): make_cmd = 'make' stdout_pipe = EchoPipe() stderr_pipe = EchoPipe(err=True) - process=None + process = None try: - process = subprocess.Popen(executable=make_cmd, args=[make_cmd, *build_args], cwd=self.directory, env=env, - stdout=stdout_pipe, stderr=stderr_pipe) + process = subprocess.Popen( + executable=make_cmd, + args=[make_cmd, *build_args], + cwd=self.directory, + env=env, + stdout=stdout_pipe, + stderr=stderr_pipe, + ) except Exception as e: if not os.environ.get('PROS_TOOLCHAIN'): - ui.logger(__name__).warn("PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n") - ui.logger(__name__).error(f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n",extra={'sentry':False}) + ui.logger(__name__).warn( + "PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n" + ) + ui.logger(__name__).error( + f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={'sentry': False} + ) stdout_pipe.close() stderr_pipe.close() sys.exit() @@ -244,8 +283,13 @@ def make(self, build_args: List[str]): process.wait() return process.returncode - def make_scan_build(self, build_args: Tuple[str], cdb_file: Optional[Union[str, io.IOBase]] = None, - suppress_output: bool = False, sandbox: bool = False): + def make_scan_build( + self, + build_args: Tuple[str], + cdb_file: Optional[Union[str, io.IOBase]] = None, + suppress_output: bool = False, + sandbox: bool = False, + ): from libscanbuild.compilation import Compilation, CompilationDatabase from libscanbuild.arguments import create_intercept_parser import itertools @@ -255,6 +299,7 @@ def make_scan_build(self, build_args: Tuple[str], cdb_file: Optional[Union[str, if sandbox: import tempfile + td = tempfile.TemporaryDirectory() td_path = td.name.replace("\\", "/") build_args = [*build_args, f'BINDIR={td_path}'] @@ -266,33 +311,44 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil :param args: the parsed and validated command line arguments :return: the exit status of build process. """ - from libscanbuild.intercept import setup_environment, run_build, exec_trace_files, parse_exec_trace, \ - compilations + from libscanbuild.intercept import ( + setup_environment, + run_build, + exec_trace_files, + parse_exec_trace, + compilations, + ) from libear import temporary_directory with temporary_directory(prefix='intercept-') as tmp_dir: # run the build command environment = setup_environment(args, tmp_dir) if os.environ.get('PROS_TOOLCHAIN'): - environment['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + \ - environment['PATH'] + environment['PATH'] = ( + os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + environment['PATH'] + ) if sys.platform == 'darwin': - environment['PATH'] = os.path.dirname(os.path.abspath(sys.executable)) + os.pathsep + \ - environment['PATH'] + environment['PATH'] = ( + os.path.dirname(os.path.abspath(sys.executable)) + os.pathsep + environment['PATH'] + ) if not suppress_output: pipe = EchoPipe() else: pipe = subprocess.DEVNULL logger(__name__).debug(self.directory) - exit_code=None + exit_code = None try: exit_code = run_build(args.build, env=environment, stdout=pipe, stderr=pipe, cwd=self.directory) except Exception as e: if not os.environ.get('PROS_TOOLCHAIN'): - ui.logger(__name__).warn("PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n") - ui.logger(__name__).error(f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n",extra={'sentry':False}) + ui.logger(__name__).warn( + "PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n" + ) + ui.logger(__name__).error( + f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={'sentry': False} + ) if not suppress_output: pipe.close() sys.exit() @@ -310,9 +366,18 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil else: make_cmd = 'make' args = create_intercept_parser().parse_args( - ['--override-compiler', '--use-cc', 'arm-none-eabi-gcc', '--use-c++', 'arm-none-eabi-g++', make_cmd, - *build_args, - 'CC=intercept-cc', 'CXX=intercept-c++']) + [ + '--override-compiler', + '--use-cc', + 'arm-none-eabi-gcc', + '--use-c++', + 'arm-none-eabi-g++', + make_cmd, + *build_args, + 'CC=intercept-cc', + 'CXX=intercept-c++', + ] + ) exit_code, entries = libscanbuild_capture(args) if sandbox and td: @@ -327,8 +392,9 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil # Add PROS toolchain to the beginning of PATH to ensure PROS binaries are preferred if os.environ.get('PROS_TOOLCHAIN'): env['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + env['PATH'] - cc_sysroot = subprocess.run([make_cmd, 'cc-sysroot'], env=env, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, cwd=self.directory) + cc_sysroot = subprocess.run( + [make_cmd, 'cc-sysroot'], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory + ) lines = str(cc_sysroot.stderr.decode()).splitlines() + str(cc_sysroot.stdout.decode()).splitlines() lines = [l.strip() for l in lines] cc_sysroot_includes = [] @@ -342,8 +408,9 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil continue if copy: cc_sysroot_includes.append(f'-isystem{line}') - cxx_sysroot = subprocess.run([make_cmd, 'cxx-sysroot'], env=env, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, cwd=self.directory) + cxx_sysroot = subprocess.run( + [make_cmd, 'cxx-sysroot'], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory + ) lines = str(cxx_sysroot.stderr.decode()).splitlines() + str(cxx_sysroot.stdout.decode()).splitlines() lines = [l.strip() for l in lines] cxx_sysroot_includes = [] @@ -362,8 +429,9 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil if not cdb_file: cdb_file = os.path.join(self.directory, 'compile_commands.json') if isinstance(cdb_file, str) and os.path.isfile(cdb_file): - old_entries = itertools.filterfalse(lambda entry: entry.source in new_sources, - CompilationDatabase.load(cdb_file)) + old_entries = itertools.filterfalse( + lambda entry: entry.source in new_sources, CompilationDatabase.load(cdb_file) + ) else: old_entries = [] @@ -395,6 +463,7 @@ def entry_map(entry: Compilation): if isinstance(cdb_file, str): cdb_file = open(cdb_file, 'w') import json + json.dump(json_entries, cdb_file, sort_keys=True, indent=4) return exit_code @@ -402,6 +471,7 @@ def entry_map(entry: Compilation): def compile(self, build_args: List[str], scan_build: Optional[bool] = None): if scan_build is None: from pros.config.cli_config import cli_config + scan_build = cli_config().use_build_compile_commands return self.make_scan_build(build_args) if scan_build else self.make(build_args) @@ -413,8 +483,11 @@ def find_project(path: str, recurse_times: int = 10): if os.path.isdir(path): for _ in range(recurse_times): if path is not None and os.path.isdir(path): - files = [f for f in os.listdir(path) - if os.path.isfile(os.path.join(path, f)) and f.lower() == 'project.pros'] + files = [ + f + for f in os.listdir(path) + if os.path.isfile(os.path.join(path, f)) and f.lower() == 'project.pros' + ] if len(files) == 1: # found a project.pros file! logger(__name__).info(f'Found Project Path: {os.path.join(path, files[0])}') return os.path.join(path, files[0]) diff --git a/pros/config/cli_config.py b/pros/config/cli_config.py index 8c962047..79b3d1f0 100644 --- a/pros/config/cli_config.py +++ b/pros/config/cli_config.py @@ -6,6 +6,7 @@ import click import pros.common + # import pros.conductor.providers.github_releases as githubreleases from pros.config.config import Config @@ -41,6 +42,7 @@ def get_upgrade_manifest(self, force: bool = False) -> Optional['UpgradeManifest pros.common.logger(__name__).info('Fetching upgrade manifest...') import requests import jsonpickle + r = requests.get('https://purduesigbots.github.io/pros-mainline/cli-updates.json') pros.common.logger(__name__).debug(r) if r.status_code == 200: diff --git a/pros/config/config.py b/pros/config/config.py index 984b668a..e0557a44 100644 --- a/pros/config/config.py +++ b/pros/config/config.py @@ -31,16 +31,17 @@ def __init__(self, file, error_on_decode=False): if isinstance(result, dict): if 'py/state' in result: class_name = '{}.{}'.format(self.__class__.__module__, self.__class__.__qualname__) - logger(__name__).debug( - 'Coercing {} to {}'.format(result['py/object'], class_name)) + logger(__name__).debug('Coercing {} to {}'.format(result['py/object'], class_name)) old_object = result['py/object'] try: result['py/object'] = class_name result = jsonpickle.unpickler.Unpickler().restore(result) except (json.decoder.JSONDecodeError, AttributeError) as e: logger(__name__).debug(e) - logger(__name__).warning(f'Couldn\'t coerce {file} ({old_object}) to ' - f'{class_name}. Using rudimentary coercion') + logger(__name__).warning( + f'Couldn\'t coerce {file} ({old_object}) to ' + f'{class_name}. Using rudimentary coercion' + ) self.__dict__.update(result['py/state']) else: self.__dict__.update(result) @@ -69,6 +70,7 @@ def __init__(self, file, error_on_decode=False): logger(__name__).debug('Failed to save {} ({})'.format(file, e)) from pros.common.sentry import add_context + add_context(self) def __getstate__(self): @@ -100,7 +102,7 @@ def save(self, file: str = None) -> None: logger(__name__).debug('Saved {}'.format(file)) def migrate(self, migration): - for (old, new) in migration.iteritems(): + for old, new in migration.iteritems(): if self.__dict__.get(old) is not None: self.__dict__[new] = self.__dict__[old] del self.__dict__[old] diff --git a/pros/ga/analytics.py b/pros/ga/analytics.py index 247e6b31..6ac1b2b8 100644 --- a/pros/ga/analytics.py +++ b/pros/ga/analytics.py @@ -13,32 +13,30 @@ PROS ANALYTICS CLASS """ -class Analytics(): + +class Analytics: def __init__(self): from pros.config.cli_config import cli_config as get_cli_config + self.cli_config = get_cli_config() - #If GA hasn't been setup yet (first time install/update) + # If GA hasn't been setup yet (first time install/update) if not self.cli_config.ga: - #Default values for GA - self.cli_config.ga = { - "enabled": "True", - "ga_id": "UA-84548828-8", - "u_id": str(uuid.uuid4()) - } + # Default values for GA + self.cli_config.ga = {"enabled": "True", "ga_id": "UA-84548828-8", "u_id": str(uuid.uuid4())} self.cli_config.save() self.sent = False - #Variables that the class will use + # Variables that the class will use self.gaID = self.cli_config.ga['ga_id'] self.useAnalytics = self.cli_config.ga['enabled'] self.uID = self.cli_config.ga['u_id'] self.pendingRequests = [] - def send(self,action): + def send(self, action): if not self.useAnalytics or self.sent: return - self.sent=True # Prevent Send from being called multiple times + self.sent = True # Prevent Send from being called multiple times try: - #Payload to be sent to GA, idk what some of them are but it works + # Payload to be sent to GA, idk what some of them are but it works payload = { 'v': 1, 'tid': self.gaID, @@ -50,24 +48,24 @@ def send(self,action): 'ea': action, 'el': 'CLI', 'ev': '1', - 'ni': 0 + 'ni': 0, } session = FuturesSession() - #Send payload to GA servers - future = session.post(url=url, - data=payload, - headers={'User-Agent': agent}, - timeout=5.0) + # Send payload to GA servers + future = session.post(url=url, data=payload, headers={'User-Agent': agent}, timeout=5.0) self.pendingRequests.append(future) except Exception: from pros.cli.common import logger - logger(__name__).warning("Unable to send analytics. Do you have a stable internet connection?", extra={'sentry': False}) + + logger(__name__).warning( + "Unable to send analytics. Do you have a stable internet connection?", extra={'sentry': False} + ) def set_use(self, value: bool): - #Sets if GA is being used or not + # Sets if GA is being used or not self.useAnalytics = value self.cli_config.ga['enabled'] = self.useAnalytics self.cli_config.save() @@ -78,7 +76,7 @@ def process_requests(self): try: response = future.result() - if not response.status_code==200: + if not response.status_code == 200: print("Something went wrong while sending analytics!") print(response) @@ -87,7 +85,6 @@ def process_requests(self): except Exception: print("Something went wrong while sending analytics!") - self.pendingRequests.clear() return responses diff --git a/pros/serial/devices/vex/cortex_device.py b/pros/serial/devices/vex/cortex_device.py index 02dbfe0f..792c3344 100644 --- a/pros/serial/devices/vex/cortex_device.py +++ b/pros/serial/devices/vex/cortex_device.py @@ -26,26 +26,28 @@ class SystemStatus(object): def __init__(self, data: Tuple[bytes, ...]): self.joystick_firmware = data[0:2] self.robot_firmware = data[2:4] - self.joystick_battery = float(data[4]) * .059 - self.robot_battery = float(data[5]) * .059 - self.backup_battery = float(data[6]) * .059 + self.joystick_battery = float(data[4]) * 0.059 + self.robot_battery = float(data[5]) * 0.059 + self.backup_battery = float(data[6]) * 0.059 self.flags = CortexDevice.SystemStatusFlags(data[7]) def __str__(self): - return f' Tether: {str(self.flags)}\n' \ - f' Cortex: F/W {self.robot_firmware[0]}.{self.robot_firmware[1]} w/ {self.robot_battery:1.2f} V ' \ - f'(Backup: {self.backup_battery:1.2f} V)\n' \ - f'Joystick: F/W {self.joystick_firmware[0]}.{self.robot_firmware[1]} w/ ' \ - f'{self.joystick_battery:1.2f} V' + return ( + f' Tether: {str(self.flags)}\n' + f' Cortex: F/W {self.robot_firmware[0]}.{self.robot_firmware[1]} w/ {self.robot_battery:1.2f} V ' + f'(Backup: {self.backup_battery:1.2f} V)\n' + f'Joystick: F/W {self.joystick_firmware[0]}.{self.robot_firmware[1]} w/ ' + f'{self.joystick_battery:1.2f} V' + ) class SystemStatusFlags(IntFlag): - DL_MODE = (1 << 0) - TETH_VN2 = (1 << 2) - FCS_CONNECT = (1 << 3) - TETH_USB = (1 << 4) - DIRECT_USB = (1 << 5) - FCS_AUTON = (1 << 6) - FCS_DISABLE = (1 << 7) + DL_MODE = 1 << 0 + TETH_VN2 = 1 << 2 + FCS_CONNECT = 1 << 3 + TETH_USB = 1 << 4 + DIRECT_USB = 1 << 5 + FCS_AUTON = 1 << 6 + FCS_DISABLE = 1 << 7 TETH_BITS = DL_MODE | TETH_VN2 | TETH_USB @@ -141,14 +143,14 @@ def _rx_ack(self, timeout: float = 0.01): def _txrx_ack_packet(self, command: int, timeout=0.1): """ - Goes through a send/receive cycle with a VEX device. - Transmits the command with the optional additional payload, then reads and parses the outer layer - of the response - :param command: Command to send the device - :param retries: Number of retries to attempt to parse the output before giving up and raising an error - :return: Returns a dictionary containing the received command field and the payload. Correctly computes - the payload length even if the extended command (0x56) is used (only applies to the V5). - """ + Goes through a send/receive cycle with a VEX device. + Transmits the command with the optional additional payload, then reads and parses the outer layer + of the response + :param command: Command to send the device + :param retries: Number of retries to attempt to parse the output before giving up and raising an error + :return: Returns a dictionary containing the received command field and the payload. Correctly computes + the payload length even if the extended command (0x56) is used (only applies to the V5). + """ tx = self._tx_packet(command) self._rx_ack(timeout=timeout) logger(__name__).debug('TX: {}'.format(bytes_to_str(tx))) diff --git a/pros/serial/devices/vex/crc.py b/pros/serial/devices/vex/crc.py index 2e4270d7..764a42c0 100644 --- a/pros/serial/devices/vex/crc.py +++ b/pros/serial/devices/vex/crc.py @@ -13,11 +13,11 @@ def __init__(self, size: int, polynomial: int): if crc_accumulator & (1 << (self._size - 1)): crc_accumulator = (crc_accumulator << 1) ^ self._polynomial else: - crc_accumulator = (crc_accumulator << 1) + crc_accumulator = crc_accumulator << 1 self._table.append(crc_accumulator) def compute(self, data: Iterable[int], accumulator: int = 0): for d in data: - i = ((accumulator >> (self._size - 8)) ^ d) & 0xff + i = ((accumulator >> (self._size - 8)) ^ d) & 0xFF accumulator = ((accumulator << 8) ^ self._table[i]) & ((1 << self._size) - 1) return accumulator diff --git a/pros/serial/devices/vex/message.py b/pros/serial/devices/vex/message.py index 8a45b0c4..f3ede25c 100644 --- a/pros/serial/devices/vex/message.py +++ b/pros/serial/devices/vex/message.py @@ -4,16 +4,14 @@ class Message(bytes): - def __new__(cls, rx: bytes, tx: bytes, internal_rx: Union[bytes, int] = None, - bookmarks: Dict[str, bytes] = None): + def __new__(cls, rx: bytes, tx: bytes, internal_rx: Union[bytes, int] = None, bookmarks: Dict[str, bytes] = None): if internal_rx is None: internal_rx = rx if isinstance(internal_rx, int): internal_rx = bytes([internal_rx]) return super().__new__(cls, internal_rx) - def __init__(self, rx: bytes, tx: bytes, internal_rx: Union[bytes, int] = None, - bookmarks: Dict[str, bytes] = None): + def __init__(self, rx: bytes, tx: bytes, internal_rx: Union[bytes, int] = None, bookmarks: Dict[str, bytes] = None): if internal_rx is None: internal_rx = rx if isinstance(internal_rx, int): diff --git a/pros/serial/devices/vex/stm32_device.py b/pros/serial/devices/vex/stm32_device.py index eecfdc47..36e2321e 100644 --- a/pros/serial/devices/vex/stm32_device.py +++ b/pros/serial/devices/vex/stm32_device.py @@ -19,7 +19,7 @@ class STM32Device(GenericDevice, SystemDevice): ACK_BYTE = 0x79 NACK_BYTE = 0xFF - NUM_PAGES = 0xff + NUM_PAGES = 0xFF PAGE_SIZE = 0x2000 def __init__(self, port: BasePort, must_initialize: bool = False, do_negoitate: bool = True): @@ -29,7 +29,7 @@ def __init__(self, port: BasePort, must_initialize: bool = False, do_negoitate: if do_negoitate: # self.port.write(b'\0' * 255) if must_initialize: - self._txrx_command(0x7f, checksum=False) + self._txrx_command(0x7F, checksum=False) try: self.get(n_retries=0) except: @@ -38,7 +38,7 @@ def __init__(self, port: BasePort, must_initialize: bool = False, do_negoitate: self.port.rts = 0 for _ in itertools.repeat(None, times=3): time.sleep(0.01) - self._txrx_command(0x7f, checksum=False) + self._txrx_command(0x7F, checksum=False) time.sleep(0.01) self.get() @@ -47,7 +47,8 @@ def write_program(self, file: typing.BinaryIO, preserve_fs: bool = False, go_aft file.seek(0, 0) if file_len > (self.NUM_PAGES * self.PAGE_SIZE): raise VEXCommError( - f'File is too big to be uploaded (max file size: {self.NUM_PAGES * self.PAGE_SIZE} bytes)') + f'File is too big to be uploaded (max file size: {self.NUM_PAGES * self.PAGE_SIZE} bytes)' + ) if hasattr(file, 'name'): display_name = file.name @@ -117,11 +118,13 @@ def go(self, start_address: int): logger(__name__).info(f'STM32: Go 0x{start_address:x}') self._txrx_command(0x21) try: - self._txrx_command(struct.pack('>I', start_address), timeout=5.) + self._txrx_command(struct.pack('>I', start_address), timeout=5.0) except VEXCommError: - logger(__name__).warning('STM32 Bootloader did not acknowledge GO command. ' - 'The program may take a moment to begin running ' - 'or the device should be rebooted.') + logger(__name__).warning( + 'STM32 Bootloader did not acknowledge GO command. ' + 'The program may take a moment to begin running ' + 'or the device should be rebooted.' + ) @retries def write_memory(self, start_address: int, data: bytes): @@ -139,7 +142,7 @@ def erase_all(self): if not self.commands[6] == 0x43: raise VEXCommError('Standard erase not supported on this device (only extended erase)') self._txrx_command(0x43) - self._txrx_command(0xff) + self._txrx_command(0xFF) @retries def erase_memory(self, page_numbers: List[int]): @@ -156,8 +159,8 @@ def extended_erase(self, page_numbers: List[int]): logger(__name__).info(f'STM32: Extended Erase pages: {page_numbers}') if not self.commands[6] == 0x44: raise IOError('Extended erase not supported on this device (only standard erase)') - assert 0 < len(page_numbers) < 0xfff0 - assert all([0 <= p <= 0xffff for p in page_numbers]) + assert 0 < len(page_numbers) < 0xFFF0 + assert all([0 <= p <= 0xFFFF for p in page_numbers]) self._txrx_command(0x44) self._txrx_command(bytes([len(page_numbers) - 1, *struct.pack(f'>{len(page_numbers)}H', *page_numbers)])) @@ -166,7 +169,7 @@ def extended_erase_special(self, command: int): logger(__name__).info(f'STM32: Extended special erase: {command:x}') if not self.commands[6] == 0x44: raise IOError('Extended erase not supported on this device (only standard erase)') - assert 0xfffd <= command <= 0xffff + assert 0xFFFD <= command <= 0xFFFF self._txrx_command(0x44) self._txrx_command(struct.pack('>H', command)) @@ -175,7 +178,7 @@ def _txrx_command(self, command: Union[int, bytes], timeout: float = 0.01, check if isinstance(command, bytes): message = command + (bytes([reduce(operator.xor, command, 0x00)]) if checksum else bytes([])) elif isinstance(command, int): - message = bytearray([command, ~command & 0xff] if checksum else [command]) + message = bytearray([command, ~command & 0xFF] if checksum else [command]) else: raise ValueError(f'Expected command to be bytes or int but got {type(command)}') logger(__name__).debug(f'STM32 TX: {bytes_to_str(message)}') diff --git a/pros/serial/devices/vex/v5_device.py b/pros/serial/devices/vex/v5_device.py index 24483eb0..e4820496 100644 --- a/pros/serial/devices/vex/v5_device.py +++ b/pros/serial/devices/vex/v5_device.py @@ -33,16 +33,22 @@ def find_v5_ports(p_type: str): def filter_vex_ports(p): - return p.vid is not None and p.vid in [0x2888, 0x0501] or \ - p.name is not None and ('VEX' in p.name or 'V5' in p.name) + return ( + p.vid is not None + and p.vid in [0x2888, 0x0501] + or p.name is not None + and ('VEX' in p.name or 'V5' in p.name) + ) def filter_v5_ports(p, locations, names): - return (p.location is not None and any([p.location.endswith(l) for l in locations])) or \ - (p.name is not None and any([n in p.name for n in names])) or \ - (p.description is not None and any([n in p.description for n in names])) + return ( + (p.location is not None and any([p.location.endswith(l) for l in locations])) + or (p.name is not None and any([n in p.name for n in names])) + or (p.description is not None and any([n in p.description for n in names])) + ) def filter_v5_ports_mac(p, device): - return (p.device is not None and p.device.endswith(device)) + return p.device is not None and p.device.endswith(device) ports = [p for p in list_all_comports() if filter_vex_ports(p)] @@ -152,18 +158,22 @@ class ControllerFlags(IntFlag): def __init__(self, data: tuple): from semantic_version import Version + self.system_version = Version('{}.{}.{}-{}.{}'.format(*data[0:5])) self.product = V5Device.SystemVersion.Product(data[5]) self.product_flags = self.flag_map[self.product](data[6]) def __str__(self): - return f'System Version: {self.system_version}\n' \ - f' Product: {self.product.name}\n' \ + return ( + f'System Version: {self.system_version}\n' + f' Product: {self.product.name}\n' f' Product Flags: {self.product_flags.value:x}' + ) class SystemStatus(object): def __init__(self, data: tuple): from semantic_version import Version + self.system_version = Version('{}.{}.{}-{}'.format(*data[0:4])) self.cpu0_version = Version('{}.{}.{}-{}'.format(*data[4:8])) self.cpu1_version = Version('{}.{}.{}-{}'.format(*data[8:12])) @@ -179,7 +189,7 @@ def __init__(self, port: BasePort): super().__init__(port) class DownloadChannel(object): - def __init__(self, device: 'V5Device', timeout: float = 5.): + def __init__(self, device: 'V5Device', timeout: float = 5.0): self.device = device self.timeout = timeout self.did_switch = False @@ -187,19 +197,21 @@ def __init__(self, device: 'V5Device', timeout: float = 5.): def __enter__(self): version = self.device.query_system_version() if version.product == V5Device.SystemVersion.Product.CONTROLLER: - self.device.default_timeout = 2. + self.device.default_timeout = 2.0 if V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags: raise VEXCommError('V5 Controller doesn\'t appear to be connected to a V5 Brain', version) ui.echo('Transferring V5 to download channel') self.device.ft_transfer_channel('download') self.did_switch = True logger(__name__).debug('Sleeping for a while to let V5 start channel transfer') - time.sleep(.25) # wait at least 250ms before starting to poll controller if it's connected yet + time.sleep(0.25) # wait at least 250ms before starting to poll controller if it's connected yet version = self.device.query_system_version() start_time = time.time() # ask controller every 250 ms if it's connected until it is - while V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags and \ - time.time() - start_time < self.timeout: + while ( + V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags + and time.time() - start_time < self.timeout + ): version = self.device.query_system_version() time.sleep(0.25) if V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags: @@ -227,14 +239,17 @@ def can_compress(self): @property def is_wireless(self): version = self.query_system_version() - return version.product == V5Device.SystemVersion.Product.CONTROLLER and \ - V5Device.SystemVersion.ControllerFlags.CONNECTED in version.product_flags + return ( + version.product == V5Device.SystemVersion.Product.CONTROLLER + and V5Device.SystemVersion.ControllerFlags.CONNECTED in version.product_flags + ) def generate_cold_hash(self, project: Project, extra: dict): keys = {k: t.version for k, t in project.templates.items()} keys.update(extra) from hashlib import md5 from base64 import b64encode + msg = str(sorted(keys, key=lambda t: t[0])).encode('ascii') name = b64encode(md5(msg).digest()).rstrip(b'=').decode('ascii') if Spec('<=1.0.0-27').match(self.status['cpu0_version']): @@ -248,8 +263,10 @@ def upload_project(self, project: Project, **kwargs): monolith_path = project.location.joinpath(project.output) if monolith_path.exists(): logger(__name__).debug(f'Monolith exists! ({monolith_path})') - if 'hot_output' in project.templates['kernel'].metadata and \ - 'cold_output' in project.templates['kernel'].metadata: + if ( + 'hot_output' in project.templates['kernel'].metadata + and 'cold_output' in project.templates['kernel'].metadata + ): hot_path = project.location.joinpath(project.templates['kernel'].metadata['hot_output']) cold_path = project.location.joinpath(project.templates['kernel'].metadata['cold_output']) upload_hot_cold = False @@ -271,7 +288,8 @@ def upload_project(self, project: Project, **kwargs): kwargs['linked_file'] = cold kwargs['linked_remote_name'] = self.generate_cold_hash(project, {}) kwargs['linked_file_addr'] = int( - project.templates['kernel'].metadata.get('cold_addr', 0x03800000)) + project.templates['kernel'].metadata.get('cold_addr', 0x03800000) + ) kwargs['addr'] = int(project.templates['kernel'].metadata.get('hot_addr', 0x07800000)) return self.write_program(hot, **kwargs) if not monolith_path.exists(): @@ -284,7 +302,7 @@ def generate_ini_file(self, remote_name: str = None, slot: int = 0, ini: ConfigP default_icon = 'USER902x.bmp' if Spec('>=1.0.0-22').match(self.status['cpu0_version']) else 'USER999x.bmp' project_ini['project'] = { 'version': str(kwargs.get('ide_version') or get_version()), - 'ide': str(kwargs.get('ide') or 'PROS') + 'ide': str(kwargs.get('ide') or 'PROS'), } project_ini['program'] = { 'version': kwargs.get('version', '0.0.0') or '0.0.0', @@ -292,7 +310,7 @@ def generate_ini_file(self, remote_name: str = None, slot: int = 0, ini: ConfigP 'slot': slot, 'icon': kwargs.get('icon', default_icon) or default_icon, 'description': kwargs.get('description', 'Created with PROS'), - 'date': datetime.now().isoformat() + 'date': datetime.now().isoformat(), } if ini: project_ini.update(ini) @@ -302,11 +320,22 @@ def generate_ini_file(self, remote_name: str = None, slot: int = 0, ini: ConfigP return ini_str.getvalue() @with_download_channel - def write_program(self, file: typing.BinaryIO, remote_name: str = None, ini: ConfigParser = None, slot: int = 0, - file_len: int = -1, run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, - target: str = 'flash', quirk: int = 0, linked_file: Optional[typing.BinaryIO] = None, - linked_remote_name: Optional[str] = None, linked_file_addr: Optional[int] = None, - compress_bin: bool = True, **kwargs): + def write_program( + self, + file: typing.BinaryIO, + remote_name: str = None, + ini: ConfigParser = None, + slot: int = 0, + file_len: int = -1, + run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, + target: str = 'flash', + quirk: int = 0, + linked_file: Optional[typing.BinaryIO] = None, + linked_remote_name: Optional[str] = None, + linked_file_addr: Optional[int] = None, + compress_bin: bool = True, + **kwargs, + ): with ui.Notification(): action_string = f'Uploading program "{remote_name}"' finish_string = f'Finished uploading "{remote_name}"' @@ -319,8 +348,16 @@ def write_program(self, file: typing.BinaryIO, remote_name: str = None, ini: Con ui.echo(action_string) remote_base = f'slot_{slot + 1}' if target == 'ddr': - self.write_file(file, f'{remote_base}.bin', file_len=file_len, type='bin', - target='ddr', run_after=run_after, linked_filename=linked_remote_name, **kwargs) + self.write_file( + file, + f'{remote_base}.bin', + file_len=file_len, + type='bin', + target='ddr', + run_after=run_after, + linked_filename=linked_remote_name, + **kwargs, + ) return if not isinstance(ini, ConfigParser): ini = ConfigParser() @@ -334,31 +371,53 @@ def write_program(self, file: typing.BinaryIO, remote_name: str = None, ini: Con logger(__name__).info(f'Created ini: {ini_file}') if linked_file is not None: - self.upload_library(linked_file, remote_name=linked_remote_name, addr=linked_file_addr, - compress=compress_bin, force_upload=kwargs.pop('force_upload_linked', False)) + self.upload_library( + linked_file, + remote_name=linked_remote_name, + addr=linked_file_addr, + compress=compress_bin, + force_upload=kwargs.pop('force_upload_linked', False), + ) bin_kwargs = {k: v for k, v in kwargs.items() if v in ['addr']} - if (quirk & 0xff) == 1: + if (quirk & 0xFF) == 1: # WRITE BIN FILE - self.write_file(file, f'{remote_base}.bin', file_len=file_len, type='bin', run_after=run_after, - linked_filename=linked_remote_name, compress=compress_bin, **bin_kwargs, **kwargs) + self.write_file( + file, + f'{remote_base}.bin', + file_len=file_len, + type='bin', + run_after=run_after, + linked_filename=linked_remote_name, + compress=compress_bin, + **bin_kwargs, + **kwargs, + ) with BytesIO(ini_file.encode(encoding='ascii')) as ini_bin: # WRITE INI FILE self.write_file(ini_bin, f'{remote_base}.ini', type='ini', **kwargs) - elif (quirk & 0xff) == 0: + elif (quirk & 0xFF) == 0: # STOP PROGRAM self.execute_program_file('', run=False) with BytesIO(ini_file.encode(encoding='ascii')) as ini_bin: # WRITE INI FILE self.write_file(ini_bin, f'{remote_base}.ini', type='ini', **kwargs) # WRITE BIN FILE - self.write_file(file, f'{remote_base}.bin', file_len=file_len, type='bin', run_after=run_after, - linked_filename=linked_remote_name, compress=compress_bin, **bin_kwargs, **kwargs) + self.write_file( + file, + f'{remote_base}.bin', + file_len=file_len, + type='bin', + run_after=run_after, + linked_filename=linked_remote_name, + compress=compress_bin, + **bin_kwargs, + **kwargs, + ) else: raise ValueError(f'Unknown quirk option: {quirk}') ui.finalize('upload', f'{finish_string} to V5') - def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, - target_name: Optional[str] = None): + def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, target_name: Optional[str] = None): """ Uses algorithms, for loops, and if statements to determine what files should be removed @@ -379,10 +438,7 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, # assume all libraries unused_libraries = [ (vid, l['filename']) - for l - in [self.get_file_metadata_by_idx(i) - for i in range(0, self.get_dir_count(vid=vid)) - ] + for l in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid=vid))] ] if name is not None: if (vid, name) in unused_libraries: @@ -395,9 +451,7 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, programs: Dict[str, Dict] = { # need the linked file metadata, so we have to use the get_file_metadata_by_name command p['filename']: self.get_file_metadata_by_name(p['filename'], vid='user') - for p - in [self.get_file_metadata_by_idx(i) - for i in range(0, self.get_dir_count(vid='user'))] + for p in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid='user'))] if p['type'] == 'bin' } library_usage: Dict[Tuple[int, str], List[str]] = defaultdict(list) @@ -440,19 +494,23 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, if len(used_libraries) > 3: libraries = [ (linked_vid, linked_name, self.get_file_metadata_by_name(linked_name, vid=linked_vid)['timestamp']) - for linked_vid, linked_name - in used_libraries + for linked_vid, linked_name in used_libraries ] - library_usage_timestamps = sorted([ - ( - linked_vid, - linked_name, - # get the most recent timestamp of the library and all files linking to it - max(linked_timestamp, *[programs[p]['timestamp'] for p in library_usage[(linked_vid, linked_name)]]) - ) - for linked_vid, linked_name, linked_timestamp - in libraries - ], key=lambda t: t[2]) + library_usage_timestamps = sorted( + [ + ( + linked_vid, + linked_name, + # get the most recent timestamp of the library and all files linking to it + max( + linked_timestamp, + *[programs[p]['timestamp'] for p in library_usage[(linked_vid, linked_name)]], + ), + ) + for linked_vid, linked_name, linked_timestamp in libraries + ], + key=lambda t: t[2], + ) evicted_files: List[Union[str, Tuple[int, str]]] = [] evicted_file_list = '' for evicted_library in library_usage_timestamps[:3]: @@ -462,17 +520,27 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, evicted_file_list += ', '.join(library_usage[evicted_file_list[0:2]]) evicted_file_list = evicted_file_list[:2] # remove last ", " assert len(evicted_files) > 0 - if confirm(f'There are too many files on the V5. PROS can remove the following suggested old files: ' - f'{evicted_file_list}', - title='Confirm file eviction plan:'): + if confirm( + f'There are too many files on the V5. PROS can remove the following suggested old files: ' + f'{evicted_file_list}', + title='Confirm file eviction plan:', + ): for file in evicted_files: if isinstance(file, tuple): self.erase_file(file_name=file[1], vid=file[0]) else: self.erase_file(file_name=file, erase_all=True, vid='user') - def upload_library(self, file: typing.BinaryIO, remote_name: str = None, file_len: int = -1, vid: int_str = 'pros', - force_upload: bool = False, compress: bool = True, **kwargs): + def upload_library( + self, + file: typing.BinaryIO, + remote_name: str = None, + file_len: int = -1, + vid: int_str = 'pros', + force_upload: bool = False, + compress: bool = True, + **kwargs, + ): """ Upload a file used for linking. Contains the logic to check if the file is already present in the filesystem and to prompt the user if we need to evict a library (and user programs). @@ -504,20 +572,32 @@ def upload_library(self, file: typing.BinaryIO, remote_name: str = None, file_le ui.echo('Library is already onboard V5') return else: - logger(__name__).warning(f'Library onboard doesn\'t match! ' - f'Length was {response["size"]} but expected {file_len} ' - f'CRC: was {response["crc"]:x} but expected {crc32:x}') + logger(__name__).warning( + f'Library onboard doesn\'t match! ' + f'Length was {response["size"]} but expected {file_len} ' + f'CRC: was {response["crc"]:x} but expected {crc32:x}' + ) except VEXCommError as e: logger(__name__).debug(e) else: logger(__name__).info('Skipping already-uploaded checks') logger(__name__).debug('Going to worry about uploading the file now') - self.ensure_library_space(remote_name, vid, ) + self.ensure_library_space( + remote_name, + vid, + ) self.write_file(file, remote_name, file_len, vid=vid, **kwargs) - def read_file(self, file: typing.IO[bytes], remote_file: str, vid: int_str = 'user', target: int_str = 'flash', - addr: Optional[int] = None, file_len: Optional[int] = None): + def read_file( + self, + file: typing.IO[bytes], + remote_file: str, + vid: int_str = 'user', + target: int_str = 'flash', + addr: Optional[int] = None, + file_len: Optional[int] = None, + ): if isinstance(vid, str): vid = self.vid_map[vid.lower()] if addr is None: @@ -529,8 +609,12 @@ def read_file(self, file: typing.IO[bytes], remote_file: str, vid: int_str = 'us file_len = ft_meta['file_size'] if wireless and file_len > 0x25000: - confirm(f'You\'re about to download {file_len} bytes wirelessly. This could take some time, and you should ' - f'consider downloading directly with a wire.', abort=True, default=False) + confirm( + f'You\'re about to download {file_len} bytes wirelessly. This could take some time, and you should ' + f'consider downloading directly with a wire.', + abort=True, + default=False, + ) max_packet_size = ft_meta['max_packet_size'] with ui.progressbar(length=file_len, label='Downloading {}'.format(remote_file)) as progress: @@ -543,9 +627,17 @@ def read_file(self, file: typing.IO[bytes], remote_file: str, vid: int_str = 'us logger(__name__).debug('Completed {} of {} bytes'.format(i + packet_size, file_len)) self.ft_complete() - def write_file(self, file: typing.BinaryIO, remote_file: str, file_len: int = -1, - run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, linked_filename: Optional[str] = None, - linked_vid: int_str = 'pros', compress: bool = False, **kwargs): + def write_file( + self, + file: typing.BinaryIO, + remote_file: str, + file_len: int = -1, + run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, + linked_filename: Optional[str] = None, + linked_vid: int_str = 'pros', + compress: bool = False, + **kwargs, + ): if file_len < 0: file_len = file.seek(0, 2) file.seek(0, 0) @@ -556,8 +648,12 @@ def write_file(self, file: typing.BinaryIO, remote_file: str, file_len: int = -1 file, file_len = compress_file(file, file_len) if self.is_wireless and file_len > 0x25000: - confirm(f'You\'re about to upload {file_len} bytes wirelessly. This could take some time, and you should ' - f'consider uploading directly with a wire.', abort=True, default=False) + confirm( + f'You\'re about to upload {file_len} bytes wirelessly. This could take some time, and you should ' + f'consider uploading directly with a wire.', + abort=True, + default=False, + ) crc32 = self.VEX_CRC32.compute(file.read(file_len)) file.seek(0, 0) addr = kwargs.get('addr', 0x03800000) @@ -602,9 +698,9 @@ def capture_screen(self) -> Tuple[List[List[int]], int, int]: for x in range(width - 1): if x < 480: px = rx[y * width + x] - data[y].append((px & 0xff0000) >> 16) - data[y].append((px & 0x00ff00) >> 8) - data[y].append(px & 0x0000ff) + data[y].append((px & 0xFF0000) >> 16) + data[y].append((px & 0x00FF00) >> 8) + data[y].append(px & 0x0000FF) return data, 480, height @@ -659,7 +755,7 @@ def ft_initialize(self, file_name: str, **kwargs) -> Dict[str, Any]: 'type': 'bin', 'timestamp': datetime.now(), 'version': 0x01_00_00_00, - 'name': file_name + 'name': file_name, } options.update({k: v for k, v in kwargs.items() if k in options and v is not None}) @@ -677,9 +773,20 @@ def ft_initialize(self, file_name: str, **kwargs) -> Dict[str, Any]: options['timestamp'] = int((options['timestamp'] - datetime(2000, 1, 1)).total_seconds()) logger(__name__).debug('Initializing file transfer w/: {}'.format(options)) - tx_payload = struct.pack("<4B3I4s2I24s", options['function'], options['target'], options['vid'], - options['options'], options['length'], options['addr'], options['crc'], - options['type'], options['timestamp'], options['version'], options['name']) + tx_payload = struct.pack( + "<4B3I4s2I24s", + options['function'], + options['target'], + options['vid'], + options['options'], + options['length'], + options['addr'], + options['crc'], + options['type'], + options['timestamp'], + options['version'], + options['name'], + ) rx = self._txrx_ext_struct(0x11, tx_payload, " int: + def get_dir_count(self, vid: int_str = 1, options: int = 0) -> int: logger(__name__).debug('Sending ext 0x16 command') if isinstance(vid, str): vid = self.vid_map[vid.lower()] @@ -747,8 +853,7 @@ def get_dir_count(self, vid: int_str = 1, options: int = 0) \ return ret @retries - def get_file_metadata_by_idx(self, file_idx: int, options: int = 0) \ - -> Dict[str, Any]: + def get_file_metadata_by_idx(self, file_idx: int, options: int = 0) -> Dict[str, Any]: logger(__name__).debug('Sending ext 0x17 command') tx_payload = struct.pack("<2B", file_idx, options) rx = self._txrx_ext_struct(0x17, tx_payload, " Dict[str, Any]: + def get_file_metadata_by_name(self, file_name: str, vid: int_str = 1, options: int = 0) -> Dict[str, Any]: logger(__name__).debug('Sending ext 0x19 command') if isinstance(vid, str): vid = self.vid_map[vid.lower()] @@ -795,22 +899,29 @@ def set_program_file_metadata(self, file_name: str, **kwargs): options = { 'vid': 'user', 'options': 0, - 'addr': 0xff_ff_ff_ff, + 'addr': 0xFF_FF_FF_FF, 'type': b'\xff\xff\xff\xff', - 'timestamp': 0xff_ff_ff_ff, - 'version': 0xff_ff_ff_ff + 'timestamp': 0xFF_FF_FF_FF, + 'version': 0xFF_FF_FF_FF, } # Dict[str, Any] options.update(**kwargs) if isinstance(options['vid'], str): options['vid'] = self.vid_map[options['vid'].lower()] if isinstance(options['timestamp'], datetime): - assert (isinstance(options['timestamp'], datetime)) + assert isinstance(options['timestamp'], datetime) options['timestamp'] = (options['timestamp'] - datetime(2000, 1, 1)).get_seconds() if isinstance(options['type'], str): options['type'] = options['type'].encode(encoding='ascii') - tx_payload = struct.pack("<2BI4s2I24s", options['vid'], options['options'], - options['addr'], options['type'], options['timestamp'], - options['version'], file_name.encode(encoding='ascii')) + tx_payload = struct.pack( + "<2BI4s2I24s", + options['vid'], + options['options'], + options['addr'], + options['type'], + options['timestamp'], + options['version'], + file_name.encode(encoding='ascii'), + ) ret = self._txrx_ext_packet(0x1A, tx_payload, 0) logger(__name__).debug('Completed ext 0x1A command') return ret @@ -821,7 +932,7 @@ def erase_file(self, file_name: str, erase_all: bool = False, vid: int_str = 'us if isinstance(vid, str): vid = self.vid_map[vid.lower()] options = 0 - options |= (0x80 if erase_all else 0) + options |= 0x80 if erase_all else 0 tx_payload = struct.pack('<2B24s', vid, options, file_name.encode(encoding='ascii')) recv = self._txrx_ext_packet(0x1B, tx_payload, 0) self.ft_complete() @@ -829,8 +940,7 @@ def erase_file(self, file_name: str, erase_all: bool = False, vid: int_str = 'us return recv @retries - def get_program_file_slot(self, file_name: str, vid: int = 1, options: int = 0) \ - -> Dict[str, Any]: + def get_program_file_slot(self, file_name: str, vid: int = 1, options: int = 0) -> Dict[str, Any]: logger(__name__).debug('Sending ext 0x1C command') tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding='ascii')) ret = self._txrx_ext_struct(0x1C, tx_payload, " SystemStatus: from semantic_version import Version + logger(__name__).debug('Sending ext 0x22 command') version = self.query_system_version() - if (version.product == V5Device.SystemVersion.Product.BRAIN and version.system_version in Spec('<1.0.13')) or \ - (version.product == V5Device.SystemVersion.Product.CONTROLLER and version.system_version in Spec('<1.0.0-0.70')): + if (version.product == V5Device.SystemVersion.Product.BRAIN and version.system_version in Spec('<1.0.13')) or ( + version.product == V5Device.SystemVersion.Product.CONTROLLER + and version.system_version in Spec('<1.0.0-0.70') + ): schema = ' bytearray: encoded_kv = f'{kv}\0'.encode(encoding='ascii') tx_payload = struct.pack(f'<{len(encoded_kv)}s', encoded_kv) # Because the length of the kernel variables is not known, use None to indicate we are recieving an unknown length. - ret = self._txrx_ext_packet(0x2e, tx_payload, 1, check_length=False, check_ack=True) + ret = self._txrx_ext_packet(0x2E, tx_payload, 1, check_length=False, check_ack=True) logger(__name__).debug('Completed ext 0x2e command') return ret @@ -918,25 +1031,28 @@ def kv_read(self, kv: str) -> bytearray: def kv_write(self, kv: str, payload: Union[Iterable, bytes, bytearray, str]): logger(__name__).debug('Sending ext 0x2f command') encoded_kv = f'{kv}\0'.encode(encoding='ascii') - kv_to_max_bytes = { - 'teamnumber': 7, - 'robotname': 16 - } + kv_to_max_bytes = {'teamnumber': 7, 'robotname': 16} if len(payload) > kv_to_max_bytes.get(kv, 254): print(f'Truncating input to meet maximum value length ({kv_to_max_bytes[kv]} characters).') # Trim down size of payload to fit within the 255 byte limit and add null terminator. - payload = payload[:kv_to_max_bytes.get(kv, 254)] + "\0" + payload = payload[: kv_to_max_bytes.get(kv, 254)] + "\0" if isinstance(payload, str): payload = payload.encode(encoding='ascii') tx_fmt = f'<{len(encoded_kv)}s{len(payload)}s' tx_payload = struct.pack(tx_fmt, encoded_kv, payload) - self._txrx_ext_packet(0x2f, tx_payload, 1, check_length=False, check_ack=True) + self._txrx_ext_packet(0x2F, tx_payload, 1, check_length=False, check_ack=True) logger(__name__).debug('Completed ext 0x2f command') return payload - def _txrx_ext_struct(self, command: int, tx_data: Union[Iterable, bytes, bytearray], - unpack_fmt: str, check_length: bool = True, check_ack: bool = True, - timeout: Optional[float] = None) -> Tuple: + def _txrx_ext_struct( + self, + command: int, + tx_data: Union[Iterable, bytes, bytearray], + unpack_fmt: str, + check_length: bool = True, + check_ack: bool = True, + timeout: Optional[float] = None, + ) -> Tuple: """ Transmits and receives an extended command to the V5, automatically unpacking the values according to unpack_fmt which gets passed into struct.unpack. The size of the payload is determined from the fmt string @@ -948,14 +1064,21 @@ def _txrx_ext_struct(self, command: int, tx_data: Union[Iterable, bytes, bytearr :param check_ack: If true, then checks the first byte of the extended payload as an AK byte :return: A tuple unpacked according to the unpack_fmt """ - rx = self._txrx_ext_packet(command, tx_data, struct.calcsize(unpack_fmt), - check_length=check_length, check_ack=check_ack, timeout=timeout) + rx = self._txrx_ext_packet( + command, + tx_data, + struct.calcsize(unpack_fmt), + check_length=check_length, + check_ack=check_ack, + timeout=timeout, + ) logger(__name__).debug('Unpacking with format: {}'.format(unpack_fmt)) return struct.unpack(unpack_fmt, rx) @classmethod - def _rx_ext_packet(cls, msg: Message, command: int, rx_length: int, check_ack: bool = True, - check_length: bool = True) -> Message: + def _rx_ext_packet( + cls, msg: Message, command: int, rx_length: int, check_ack: bool = True, check_length: bool = True + ) -> Message: """ Parse a received packet :param msg: data to parse @@ -965,10 +1088,10 @@ def _rx_ext_packet(cls, msg: Message, command: int, rx_length: int, check_ack: b :param tx_payload: what was sent, used if an exception needs to be thrown :return: The payload of the extended message """ - assert (msg['command'] == 0x56) + assert msg['command'] == 0x56 if not cls.VEX_CRC16.compute(msg.rx) == 0: raise VEXCommError("CRC of message didn't match 0: {}".format(cls.VEX_CRC16.compute(msg.rx)), msg) - assert (msg['payload'][0] == command) + assert msg['payload'][0] == command msg = msg['payload'][1:-2] if check_ack: nacks = { @@ -985,7 +1108,7 @@ def _rx_ext_packet(cls, msg: Message, command: int, rx_length: int, check_ack: b 0xD8: "Data downloaded does not match initial length", 0xD9: "Directory entry does not exist", 0xDA: "Max user files, no more room for another user program", - 0xDB: "User file exists" + 0xDB: "User file exists", } if msg[0] in nacks.keys(): raise VEXCommError("Device NACK'd with reason: {}".format(nacks[msg[0]]), msg) @@ -998,12 +1121,19 @@ def _rx_ext_packet(cls, msg: Message, command: int, rx_length: int, check_ack: b raise VEXCommError(f'Received length is less than {rx_length} (got {len(msg)}).', msg) elif len(msg) > rx_length and check_length: ui.echo( - f'WARNING: Recieved length is more than {rx_length} (got {len(msg)}). Consider upgrading the PROS (CLI Version: {get_version()}).') + f'WARNING: Recieved length is more than {rx_length} (got {len(msg)}). Consider upgrading the PROS (CLI Version: {get_version()}).' + ) return msg - def _txrx_ext_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray], - rx_length: int, check_length: bool = True, - check_ack: bool = True, timeout: Optional[float] = None) -> Message: + def _txrx_ext_packet( + self, + command: int, + tx_data: Union[Iterable, bytes, bytearray], + rx_length: int, + check_length: bool = True, + check_ack: bool = True, + timeout: Optional[float] = None, + ) -> Message: """ Transmits and receives an extended command to the V5. :param command: Extended command code @@ -1023,13 +1153,13 @@ def _form_extended_payload(cls, msg: int, payload: Union[Iterable, bytes, bytear if payload is None: payload = bytearray() payload_length = len(payload) - assert payload_length <= 0x7f_ff + assert payload_length <= 0x7F_FF if payload_length >= 0x80: - payload_length = [(payload_length >> 8) | 0x80, payload_length & 0xff] + payload_length = [(payload_length >> 8) | 0x80, payload_length & 0xFF] else: payload_length = [payload_length] packet = bytearray([msg, *payload_length, *payload]) crc = cls.VEX_CRC16.compute(bytes([*cls._form_simple_packet(0x56), *packet])) - packet = bytearray([*packet, crc >> 8, crc & 0xff]) - assert (cls.VEX_CRC16.compute(bytes([*cls._form_simple_packet(0x56), *packet])) == 0) + packet = bytearray([*packet, crc >> 8, crc & 0xFF]) + assert cls.VEX_CRC16.compute(bytes([*cls._form_simple_packet(0x56), *packet])) == 0 return packet diff --git a/pros/serial/devices/vex/vex_device.py b/pros/serial/devices/vex/vex_device.py index ff9862d4..5866afca 100644 --- a/pros/serial/devices/vex/vex_device.py +++ b/pros/serial/devices/vex/vex_device.py @@ -72,8 +72,10 @@ def _rx_packet(self, timeout: Optional[float] = None) -> Dict[str, Union[Union[i response_header_stack = bytearray(response_header) rx = bytearray() if not rx == bytearray(response_header): - raise IOError(f"Couldn't find the response header in the device response after {timeout} s. " - f"Got {rx.hex()} but was expecting {response_header.hex()}") + raise IOError( + f"Couldn't find the response header in the device response after {timeout} s. " + f"Got {rx.hex()} but was expecting {response_header.hex()}" + ) rx.extend(self.port.read(1)) command = rx[-1] rx.extend(self.port.read(1)) @@ -81,14 +83,10 @@ def _rx_packet(self, timeout: Optional[float] = None) -> Dict[str, Union[Union[i if command == 0x56 and (payload_length & 0x80) == 0x80: logger(__name__).debug('Found an extended message payload') rx.extend(self.port.read(1)) - payload_length = ((payload_length & 0x7f) << 8) + rx[-1] + payload_length = ((payload_length & 0x7F) << 8) + rx[-1] payload = self.port.read(payload_length) rx.extend(payload) - return { - 'command': command, - 'payload': payload, - 'raw': rx - } + return {'command': command, 'payload': payload, 'raw': rx} def _tx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, None] = None): tx = self._form_simple_packet(command) @@ -100,8 +98,9 @@ def _tx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, No self.port.flush() return tx - def _txrx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, None] = None, - timeout: Optional[float] = None) -> Message: + def _txrx_packet( + self, command: int, tx_data: Union[Iterable, bytes, bytearray, None] = None, timeout: Optional[float] = None + ) -> Message: """ Goes through a send/receive cycle with a VEX device. Transmits the command with the optional additional payload, then reads and parses the outer layer @@ -121,4 +120,4 @@ def _txrx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, @staticmethod def _form_simple_packet(msg: int) -> bytearray: - return bytearray([0xc9, 0x36, 0xb8, 0x47, msg]) + return bytearray([0xC9, 0x36, 0xB8, 0x47, msg]) diff --git a/pros/serial/interactive/UploadProjectModal.py b/pros/serial/interactive/UploadProjectModal.py index f14dde7e..7fc3ad79 100644 --- a/pros/serial/interactive/UploadProjectModal.py +++ b/pros/serial/interactive/UploadProjectModal.py @@ -44,11 +44,11 @@ def update_slots(self): if self.port.is_valid() and bool(self.port.value): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort + device = V5Device(DirectPort(self.port.value)) slot_options = [ f'{slot}' + ('' if program is None else f' (Currently: {program})') - for slot, program in - device.used_slots().items() + for slot, program in device.used_slots().items() ] else: slot_options = [str(i) for i in range(1, 9)] @@ -67,9 +67,7 @@ def update_slots(self): else: # or just slot 1 selected = slot_options[0] - self.advanced_options['slot'] = parameters.OptionParameter( - selected, slot_options - ) + self.advanced_options['slot'] = parameters.OptionParameter(selected, slot_options) def update_comports(self): list_all_comports.cache_clear() @@ -108,9 +106,7 @@ def project_changed(self, new_project: ExistingProjectParameter): 'description': parameters.Parameter( self.project.upload_options.get('description', 'Created with PROS') ), - 'compress_bin': parameters.BooleanParameter( - self.project.upload_options.get('compress_bin', True) - ) + 'compress_bin': parameters.BooleanParameter(self.project.upload_options.get('compress_bin', True)), } self.update_slots() else: @@ -125,6 +121,7 @@ def project_changed(self, new_project: ExistingProjectParameter): def confirm(self, *args, **kwargs): from pros.cli.upload import upload from click import get_current_context + kwargs = {'path': None, 'project': self.project, 'port': self.port.value} savable_kwargs = {} if self.project.target == 'v5': @@ -145,9 +142,7 @@ def confirm(self, *args, **kwargs): @property def can_confirm(self): advanced_valid = all( - p.is_valid() - for p in self.advanced_options.values() - if isinstance(p, parameters.ValidatableParameter) + p.is_valid() for p in self.advanced_options.values() if isinstance(p, parameters.ValidatableParameter) ) return self.project is not None and self.port.is_valid() and advanced_valid @@ -167,4 +162,5 @@ def build(self) -> Generator[components.Component, None, None]: components.InputBox('Description', self.advanced_options['description']), components.Checkbox('Compress Binary', self.advanced_options['compress_bin']), title='Advanced V5 Options', - collapsed=self.advanced_options_collapsed) + collapsed=self.advanced_options_collapsed, + ) diff --git a/pros/serial/ports/__init__.py b/pros/serial/ports/__init__.py index 4850b2b9..a70468c1 100644 --- a/pros/serial/ports/__init__.py +++ b/pros/serial/ports/__init__.py @@ -5,6 +5,7 @@ from .base_port import BasePort, PortConnectionException, PortException from .direct_port import DirectPort + # from .v5_wireless_port import V5WirelessPort diff --git a/pros/serial/ports/direct_port.py b/pros/serial/ports/direct_port.py index fa225f54..5ac33f3d 100644 --- a/pros/serial/ports/direct_port.py +++ b/pros/serial/ports/direct_port.py @@ -11,22 +11,24 @@ def create_serial_port(port_name: str, timeout: Optional[float] = 1.0) -> serial.Serial: try: logger(__name__).debug(f'Opening serial port {port_name}') - port = serial.Serial(port_name, baudrate=115200, bytesize=serial.EIGHTBITS, - parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE) + port = serial.Serial( + port_name, + baudrate=115200, + bytesize=serial.EIGHTBITS, + parity=serial.PARITY_NONE, + stopbits=serial.STOPBITS_ONE, + ) port.timeout = timeout port.inter_byte_timeout = 0.2 return port except serial.SerialException as e: - if any(msg in str(e) for msg in [ - 'Access is denied', 'Errno 16', 'Errno 13' - ]): + if any(msg in str(e) for msg in ['Access is denied', 'Errno 16', 'Errno 13']): tb = sys.exc_info()[2] raise dont_send(ConnectionRefusedException(port_name, e).with_traceback(tb)) else: raise dont_send(PortNotFoundException(port_name, e)) - class DirectPort(BasePort): def __init__(self, port_name: str, **kwargs): diff --git a/pros/serial/ports/exceptions.py b/pros/serial/ports/exceptions.py index 1a869f38..54eaf0ec 100644 --- a/pros/serial/ports/exceptions.py +++ b/pros/serial/ports/exceptions.py @@ -1,6 +1,7 @@ import os import serial + class ConnectionRefusedException(IOError): def __init__(self, port_name: str, reason: Exception): self.__cause__ = reason @@ -10,9 +11,12 @@ def __str__(self): extra = '' if os.name == 'posix': extra = 'adding yourself to dialout group ' - return f"could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " \ - f"firmware utilities; moving to a different USB port; {extra}or " \ + return ( + f"could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " + f"firmware utilities; moving to a different USB port; {extra}or " f"restarting the device." + ) + class PortNotFoundException(serial.SerialException): def __init__(self, port_name: str, reason: Exception): @@ -23,6 +27,8 @@ def __str__(self): extra = '' if os.name == 'posix': extra = 'adding yourself to dialout group ' - return f"Port not found: Could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " \ - f"firmware utilities; moving to a different USB port; {extra}or " \ + return ( + f"Port not found: Could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " + f"firmware utilities; moving to a different USB port; {extra}or " f"restarting the device." + ) diff --git a/pros/serial/ports/serial_share_bridge.py b/pros/serial/ports/serial_share_bridge.py index b632a5dc..186299dc 100644 --- a/pros/serial/ports/serial_share_bridge.py +++ b/pros/serial/ports/serial_share_bridge.py @@ -24,8 +24,13 @@ def get_to_device_port_num(serial_port_name: str) -> int: class SerialShareBridge(object): - def __init__(self, serial_port_name: str, base_addr: str = '127.0.0.1', - to_device_port_num: int = None, from_device_port_num: int = None): + def __init__( + self, + serial_port_name: str, + base_addr: str = '127.0.0.1', + to_device_port_num: int = None, + from_device_port_num: int = None, + ): self._serial_port_name = serial_port_name self._base_addr = base_addr if to_device_port_num is None: @@ -79,18 +84,21 @@ def _start(self, initialization_barrier: multiprocessing.Barrier): log_file_name = os.path.join(get_pros_dir(), 'logs', 'serial-share-bridge.log') handler = logging.handlers.TimedRotatingFileHandler(log_file_name, backupCount=1) handler.setLevel(logging.DEBUG) - fmt_str = '%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s (%(process)d) ({})' \ - .format(self._serial_port_name) + fmt_str = '%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s (%(process)d) ({})'.format( + self._serial_port_name + ) handler.setFormatter(logging.Formatter(fmt_str)) pros_logger.addHandler(handler) self.zmq_ctx = zmq.Context() # timeout is none, so blocks indefinitely. Helps reduce CPU usage when there's nothing being recv self.port = DirectPort(self._serial_port_name, timeout=None) - self.from_device_thread = threading.Thread(target=self._from_device_loop, name='From Device Reader', - daemon=False, args=(initialization_barrier,)) - self.to_device_thread = threading.Thread(target=self._to_device_loop, name='To Device Reader', - daemon=False, args=(initialization_barrier,)) + self.from_device_thread = threading.Thread( + target=self._from_device_loop, name='From Device Reader', daemon=False, args=(initialization_barrier,) + ) + self.to_device_thread = threading.Thread( + target=self._to_device_loop, name='To Device Reader', daemon=False, args=(initialization_barrier,) + ) self.dying = threading.Event() # type: threading.Event self.from_device_thread.start() self.to_device_thread.start() @@ -98,8 +106,11 @@ def _start(self, initialization_barrier: multiprocessing.Barrier): while not self.dying.wait(10000): pass - logger(__name__).info('Main serial share bridge thread is dying. Everything else should be dead: {}'.format( - threading.active_count() - 1)) + logger(__name__).info( + 'Main serial share bridge thread is dying. Everything else should be dead: {}'.format( + threading.active_count() - 1 + ) + ) self.kill(do_join=True) except Exception as e: initialization_barrier.abort() @@ -132,14 +143,20 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): logger(__name__).error('Unexpected error handling {}'.format(bytes_to_str(msg[:-1]))) logger(__name__).exception(e) errors += 1 - logger(__name__).info('Current from device broadcasting error rate: {} errors. {} successful. {}%' - .format(errors, rxd, errors / (errors + rxd))) + logger(__name__).info( + 'Current from device broadcasting error rate: {} errors. {} successful. {}%'.format( + errors, rxd, errors / (errors + rxd) + ) + ) except Exception as e: initialization_barrier.abort() logger(__name__).exception(e) logger(__name__).warning('From Device Broadcaster is dying now.') - logger(__name__).info('Current from device broadcasting error rate: {} errors. {} successful. {}%' - .format(errors, rxd, errors / (errors + rxd))) + logger(__name__).info( + 'Current from device broadcasting error rate: {} errors. {} successful. {}%'.format( + errors, rxd, errors / (errors + rxd) + ) + ) try: self.kill(do_join=False) except: diff --git a/pros/serial/ports/serial_share_port.py b/pros/serial/ports/serial_share_port.py index f329ac7e..5f5691de 100644 --- a/pros/serial/ports/serial_share_port.py +++ b/pros/serial/ports/serial_share_port.py @@ -3,8 +3,14 @@ class SerialSharePort(BasePort): - def __init__(self, port_name: str, topic: bytes = b'sout', addr: str = '127.0.0.1', - to_device_port: int = None, from_device_port: int = None): + def __init__( + self, + port_name: str, + topic: bytes = b'sout', + addr: str = '127.0.0.1', + to_device_port: int = None, + from_device_port: int = None, + ): self.port_name = port_name self.topic = topic self._base_addr = addr @@ -26,12 +32,14 @@ def __init__(self, port_name: str, topic: bytes = b'sout', addr: str = '127.0.0. self.from_device_sock.setsockopt(zmq.SUBSCRIBE, b'kdbg') self.from_device_sock.connect('tcp://{}:{}'.format(self._base_addr, self._from_port_num)) logger(__name__).info( - 'Connected from device as a subscriber on tcp://{}:{}'.format(self._base_addr, self._from_port_num)) + 'Connected from device as a subscriber on tcp://{}:{}'.format(self._base_addr, self._from_port_num) + ) self.to_device_sock = self.ctx.socket(zmq.PUB) # type: zmq.Socket self.to_device_sock.connect('tcp://{}:{}'.format(self._base_addr, self._to_port_num)) logger(__name__).info( - 'Connected to device as a publisher on tcp://{}:{}'.format(self._base_addr, self._to_port_num)) + 'Connected to device as a publisher on tcp://{}:{}'.format(self._base_addr, self._to_port_num) + ) self.alive = threading.Event() self.watchdog_thread = threading.Thread(target=self._kick_watchdog, name='Client Kicker') diff --git a/pros/serial/terminal/terminal.py b/pros/serial/terminal/terminal.py index a0c78264..b07b9769 100644 --- a/pros/serial/terminal/terminal.py +++ b/pros/serial/terminal/terminal.py @@ -64,7 +64,6 @@ def __exit__(self, *args, **kwargs): import msvcrt import ctypes - class Out(object): """file-like wrapper that uses os.write""" @@ -77,7 +76,6 @@ def flush(self): def write(self, s): os.write(self.fd, s) - class Console(ConsoleBase): def __init__(self): super(Console, self).__init__() @@ -104,7 +102,7 @@ def getkey(self): z = msvcrt.getwch() if z == chr(13): return chr(10) - elif z in (chr(0), chr(0x0e)): # functions keys, ignore + elif z in (chr(0), chr(0x0E)): # functions keys, ignore msvcrt.getwch() else: return z @@ -113,14 +111,13 @@ def cancel(self): # CancelIo, CancelSynchronousIo do not seem to work when using # getwch, so instead, send a key to the window with the console hwnd = ctypes.windll.kernel32.GetConsoleWindow() - ctypes.windll.user32.PostMessageA(hwnd, 0x100, 0x0d, 0) + ctypes.windll.user32.PostMessageA(hwnd, 0x100, 0x0D, 0) elif os.name == 'posix': import atexit import termios import select - class Console(ConsoleBase): def __init__(self): super(Console, self).__init__() @@ -131,8 +128,7 @@ def __init__(self): self.old = termios.tcgetattr(self.fd) atexit.register(self.cleanup) if sys.version_info < (3, 0): - self.enc_stdin = codecs. \ - getreader(sys.stdin.encoding)(sys.stdin) + self.enc_stdin = codecs.getreader(sys.stdin.encoding)(sys.stdin) else: self.enc_stdin = sys.stdin @@ -144,13 +140,12 @@ def setup(self): termios.tcsetattr(self.fd, termios.TCSANOW, new) def getkey(self): - ready, _, _ = select.select([self.enc_stdin, self.pipe_r], [], - [], None) + ready, _, _ = select.select([self.enc_stdin, self.pipe_r], [], [], None) if self.pipe_r in ready: os.read(self.pipe_r, 1) return c = self.enc_stdin.read(1) - if c == chr(0x7f): + if c == chr(0x7F): c = chr(8) # map the BS key (which yields DEL) to backspace return c @@ -161,16 +156,15 @@ def cleanup(self): termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old) else: - raise NotImplementedError( - 'Sorry no implementation for your platform ({})' - ' available.'.format(sys.platform)) + raise NotImplementedError('Sorry no implementation for your platform ({})' ' available.'.format(sys.platform)) class Terminal(object): """This class is loosely based off of the pyserial miniterm""" - def __init__(self, port_instance: StreamDevice, transformations=(), - output_raw: bool = False, request_banner: bool = True): + def __init__( + self, port_instance: StreamDevice, transformations=(), output_raw: bool = False, request_banner: bool = True + ): self.device = port_instance self.device.subscribe(b'sout') self.device.subscribe(b'serr') @@ -189,8 +183,7 @@ def __init__(self, port_instance: StreamDevice, transformations=(), def _start_rx(self): self._reader_alive = True - self.receiver_thread = threading.Thread(target=self.reader, - name='serial-rx-term') + self.receiver_thread = threading.Thread(target=self.reader, name='serial-rx-term') self.receiver_thread.daemon = True self.receiver_thread.start() @@ -200,8 +193,7 @@ def _stop_rx(self): def _start_tx(self): self._transmitter_alive = True - self.transmitter_thread = threading.Thread(target=self.transmitter, - name='serial-tx-term') + self.transmitter_thread = threading.Thread(target=self.transmitter, name='serial-tx-term') self.transmitter_thread.daemon = True self.transmitter_thread.start() @@ -226,9 +218,11 @@ def reader(self): elif data[0] == b'serr': text = '{}{}{}'.format(colorama.Fore.RED, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL) elif data[0] == b'kdbg': - text = '{}\n\nKERNEL DEBUG:\t{}{}\n'.format(colorama.Back.GREEN + colorama.Style.BRIGHT, - decode_bytes_to_str(data[1]), - colorama.Style.RESET_ALL) + text = '{}\n\nKERNEL DEBUG:\t{}{}\n'.format( + colorama.Back.GREEN + colorama.Style.BRIGHT, + decode_bytes_to_str(data[1]), + colorama.Style.RESET_ALL, + ) elif data[0] != b'': text = '{}{}'.format(decode_bytes_to_str(data[0]), decode_bytes_to_str(data[1])) else: diff --git a/pros/upgrade/instructions/download_instructions.py b/pros/upgrade/instructions/download_instructions.py index 48f8b49e..4432b383 100644 --- a/pros/upgrade/instructions/download_instructions.py +++ b/pros/upgrade/instructions/download_instructions.py @@ -9,6 +9,7 @@ class DownloadInstruction(UpgradeInstruction): """ Downloads a file """ + def __init__(self, url='', extension=None, download_description=None, success_explanation=None): self.url: str = url self.extension: Optional[str] = extension @@ -24,8 +25,9 @@ def perform_upgrade(self) -> UpgradeResult: return UpgradeResult(False, explanation=f'Failed to download required file. ({e})', exception=e) if self.success_explanation: - explanation = self.success_explanation.replace('//FILE\\\\', file) \ - .replace('//SHORT\\\\', os.path.split(file)[1]) + explanation = self.success_explanation.replace('//FILE\\\\', file).replace( + '//SHORT\\\\', os.path.split(file)[1] + ) else: explanation = f'Downloaded {os.path.split(file)[1]}' return UpgradeResult(True, explanation=explanation, file=file, origin=self.url) diff --git a/pros/upgrade/instructions/explorer_instructions.py b/pros/upgrade/instructions/explorer_instructions.py index ae843ba3..d54666f1 100644 --- a/pros/upgrade/instructions/explorer_instructions.py +++ b/pros/upgrade/instructions/explorer_instructions.py @@ -11,6 +11,7 @@ def perform_upgrade(self) -> UpgradeResult: result = super().perform_upgrade() if result.successful: import click + click.launch(getattr(result, 'file')) return result diff --git a/pros/upgrade/manifests/upgrade_manifest_v1.py b/pros/upgrade/manifests/upgrade_manifest_v1.py index 51ba9346..102a06a5 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v1.py +++ b/pros/upgrade/manifests/upgrade_manifest_v1.py @@ -26,8 +26,10 @@ def describe_update(self) -> str: :return: """ if self.needs_upgrade: - return f'There is an update available! {self.version} is the latest version.\n' \ - f'Go to {self.info_url} to learn more.' + return ( + f'There is an update available! {self.version} is the latest version.\n' + f'Go to {self.info_url} to learn more.' + ) else: return f'You are up to date. ({self.version})' @@ -41,6 +43,7 @@ def can_perform_upgrade(self) -> bool: def perform_upgrade(self) -> UpgradeResult: logger(__name__).debug(self.__dict__) from click import launch + return UpgradeResult(launch(self.info_url) == 0) def describe_post_install(self, **kwargs) -> str: diff --git a/pros/upgrade/manifests/upgrade_manifest_v2.py b/pros/upgrade/manifests/upgrade_manifest_v2.py index b024aa3d..469f906b 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v2.py +++ b/pros/upgrade/manifests/upgrade_manifest_v2.py @@ -40,6 +40,7 @@ def platform(self) -> 'PlatformsV2': return self._platform if getattr(sys, 'frozen', False): import _constants + frozen_platform = getattr(_constants, 'FROZEN_PLATFORM_V1', None) if isinstance(frozen_platform, str): if frozen_platform.startswith('Windows86'): @@ -51,6 +52,7 @@ def platform(self) -> 'PlatformsV2': else: try: from pip._vendor import pkg_resources + results = [p for p in pkg_resources.working_set if p.project_name.startswith('pros-cli')] if any(results): self._platform = PlatformsV2.Pip @@ -72,7 +74,4 @@ def perform_upgrade(self) -> UpgradeResult: return instructions.perform_upgrade() def __repr__(self): - return repr({ - 'platform': self.platform, - **self.__dict__ - }) + return repr({'platform': self.platform, **self.__dict__}) diff --git a/setup.py b/setup.py index f26a9741..df2b69b8 100644 --- a/setup.py +++ b/setup.py @@ -13,10 +13,5 @@ author_email='pros_development@cs.purdue.edu', description='Command Line Interface for managing PROS projects', install_requires=install_reqs, - entry_points={ - 'console_scripts': [ - 'pros=pros.cli.main:main', - 'prosv5=pros.cli.main:main' - ] - } + entry_points={'console_scripts': ['pros=pros.cli.main:main', 'prosv5=pros.cli.main:main']}, ) diff --git a/version.py b/version.py index 39542079..658e3a4a 100644 --- a/version.py +++ b/version.py @@ -4,18 +4,23 @@ try: with open(os.devnull, 'w') as devnull: - v = subprocess.check_output(['git', 'describe', '--tags', '--dirty', '--abbrev'], stderr=stdout).decode().strip() + v = ( + subprocess.check_output(['git', 'describe', '--tags', '--dirty', '--abbrev'], stderr=stdout) + .decode() + .strip() + ) if '-' in v: - bv = v[:v.index('-')] - bv = bv[:bv.rindex('.') + 1] + str(int(bv[bv.rindex('.') + 1:]) + 1) + bv = v[: v.index('-')] + bv = bv[: bv.rindex('.') + 1] + str(int(bv[bv.rindex('.') + 1 :]) + 1) sempre = 'dirty' if v.endswith('-dirty') else 'commit' pippre = 'alpha' if v.endswith('-dirty') else 'pre' build = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip() - number_since = subprocess.check_output( - ['git', 'rev-list', v[:v.index('-')] + '..HEAD', '--count']).decode().strip() + number_since = ( + subprocess.check_output(['git', 'rev-list', v[: v.index('-')] + '..HEAD', '--count']).decode().strip() + ) semver = bv + '-' + sempre + '+' + build pipver = bv + pippre + number_since - winver = v[:v.index('-')] + '.' + number_since + winver = v[: v.index('-')] + '.' + number_since else: semver = v pipver = v From d13ceeec060358cba57b97cbfc435a923a246a0b Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Thu, 1 Feb 2024 23:58:23 -0500 Subject: [PATCH 14/44] Add isort --- .isort.cfg | 5 +++++ .pre-commit-config.yaml | 7 +++++++ 2 files changed, 12 insertions(+) create mode 100644 .isort.cfg diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 00000000..d555e8fd --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,5 @@ +[settings] + +line_length = 120 + +profile = black diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dc46dd72..049cc229 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,13 @@ repos: - id: check-ast - id: trailing-whitespace - id: requirements-txt-fixer + - repo: https://github.com/pycqa/isort + rev: "5.13.2" + hooks: + - id: isort + name: isort (python) + types: [python] + args: [--settings-file=.isort.cfg] - repo: https://github.com/psf/black rev: 24.1.1 hooks: From 10c8f8dd72bd3b6bd146c6432c0c551d2beec1ff Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Thu, 1 Feb 2024 23:59:13 -0500 Subject: [PATCH 15/44] Run isort --- pros/cli/build.py | 3 +- pros/cli/click_classes.py | 5 +-- pros/cli/common.py | 5 +-- pros/cli/conductor_utils.py | 2 ++ pros/cli/interactive.py | 5 ++- pros/cli/main.py | 32 +++++++++---------- pros/cli/terminal.py | 9 +++--- pros/cli/upload.py | 4 +-- pros/cli/user_script.py | 5 +-- pros/cli/v5_utils.py | 11 ++++--- pros/common/sentry.py | 5 ++- .../ui/interactive/components/button.py | 2 +- .../ui/interactive/components/container.py | 1 + .../ui/interactive/components/input_groups.py | 1 + pros/common/ui/interactive/observable.py | 1 + .../renderers/MachineOutputRenderer.py | 3 +- pros/common/utils.py | 7 ++-- pros/conductor/__init__.py | 2 +- pros/conductor/conductor.py | 1 + pros/conductor/depots/depot.py | 1 + pros/conductor/depots/http_depot.py | 3 +- pros/conductor/depots/local_depot.py | 5 +-- pros/conductor/interactive/NewProjectModal.py | 1 + .../interactive/UpdateProjectModal.py | 1 + pros/conductor/interactive/__init__.py | 3 +- pros/conductor/project/__init__.py | 19 +++++------ pros/config/cli_config.py | 2 +- pros/config/config.py | 1 + pros/ga/analytics.py | 7 ++-- pros/serial/devices/__init__.py | 2 +- pros/serial/devices/vex/cortex_device.py | 4 +-- pros/serial/devices/vex/v5_device.py | 9 +++--- pros/serial/devices/vex/v5_user_device.py | 1 + pros/serial/devices/vex/vex_device.py | 3 +- pros/serial/interactive/UploadProjectModal.py | 3 +- pros/serial/ports/__init__.py | 3 +- pros/serial/ports/direct_port.py | 3 +- pros/serial/ports/exceptions.py | 1 + pros/serial/ports/serial_share_bridge.py | 3 +- pros/serial/terminal/terminal.py | 5 ++- pros/upgrade/instructions/__init__.py | 2 +- .../instructions/download_instructions.py | 1 + pros/upgrade/manifests/__init__.py | 2 +- pros/upgrade/manifests/upgrade_manifest_v1.py | 1 + pros/upgrade/manifests/upgrade_manifest_v2.py | 3 +- pros/upgrade/upgrade_manager.py | 10 +++--- setup.py | 3 +- 47 files changed, 123 insertions(+), 83 deletions(-) diff --git a/pros/cli/build.py b/pros/cli/build.py index b9089e43..25f2098b 100644 --- a/pros/cli/build.py +++ b/pros/cli/build.py @@ -3,8 +3,9 @@ import click import pros.conductor as c -from pros.ga.analytics import analytics from pros.cli.common import default_options, logger, project_option, pros_root, shadow_command +from pros.ga.analytics import analytics + from .upload import upload diff --git a/pros/cli/click_classes.py b/pros/cli/click_classes.py index 2c017556..58aad9b5 100644 --- a/pros/cli/click_classes.py +++ b/pros/cli/click_classes.py @@ -1,11 +1,12 @@ from collections import defaultdict from typing import * -from rich_click import RichCommand import click.decorators from click import ClickException -from pros.conductor.project import Project as p +from rich_click import RichCommand + from pros.common.utils import get_version +from pros.conductor.project import Project as p class PROSFormatted(RichCommand): diff --git a/pros/cli/common.py b/pros/cli/common.py index 417c42fc..30108507 100644 --- a/pros/cli/common.py +++ b/pros/cli/common.py @@ -1,9 +1,10 @@ import click.core from pros.common.sentry import add_tag -from pros.ga.analytics import analytics -from pros.common.utils import * from pros.common.ui import echo +from pros.common.utils import * +from pros.ga.analytics import analytics + from .click_classes import * diff --git a/pros/cli/conductor_utils.py b/pros/cli/conductor_utils.py index fd25bef3..7e148ed7 100644 --- a/pros/cli/conductor_utils.py +++ b/pros/cli/conductor_utils.py @@ -6,11 +6,13 @@ from typing import * import click + import pros.common.ui as ui import pros.conductor as c from pros.common.utils import logger from pros.conductor.templates import ExternalTemplate from pros.ga.analytics import analytics + from .common import default_options, template_query from .conductor import conductor diff --git a/pros/cli/interactive.py b/pros/cli/interactive.py index 260706fb..c36b72a2 100644 --- a/pros/cli/interactive.py +++ b/pros/cli/interactive.py @@ -1,10 +1,13 @@ import os from typing import * + import click + import pros.conductor as c -from .common import PROSGroup, default_options, project_option, pros_root from pros.ga.analytics import analytics +from .common import PROSGroup, default_options, project_option, pros_root + @pros_root def interactive_cli(): diff --git a/pros/cli/main.py b/pros/cli/main.py index 10b5077c..1f539b35 100644 --- a/pros/cli/main.py +++ b/pros/cli/main.py @@ -1,15 +1,21 @@ +import ctypes import logging - -# Setup analytics first because it is used by other files - import os.path - -import pros.common.sentry +import sys import click -import ctypes -import sys +import jsonpickle +import pros.cli.build +import pros.cli.conductor +import pros.cli.conductor_utils +import pros.cli.interactive +import pros.cli.misc_commands +import pros.cli.terminal +import pros.cli.upload +import pros.cli.user_script +import pros.cli.v5_utils +import pros.common.sentry import pros.common.ui as ui import pros.common.ui.log from pros.cli.click_classes import * @@ -17,16 +23,8 @@ from pros.common.utils import get_version, logger from pros.ga.analytics import analytics -import jsonpickle -import pros.cli.build -import pros.cli.conductor -import pros.cli.conductor_utils -import pros.cli.terminal -import pros.cli.upload -import pros.cli.v5_utils -import pros.cli.misc_commands -import pros.cli.interactive -import pros.cli.user_script +# Setup analytics first because it is used by other files + if sys.platform == 'win32': kernel32 = ctypes.windll.kernel32 diff --git a/pros/cli/terminal.py b/pros/cli/terminal.py index ff26c84f..5071110b 100644 --- a/pros/cli/terminal.py +++ b/pros/cli/terminal.py @@ -1,17 +1,18 @@ import os import signal +import sys import time import click -import sys import pros.conductor as c import pros.serial.devices as devices -from pros.serial.ports import DirectPort from pros.common.utils import logger -from .common import default_options, resolve_v5_port, resolve_cortex_port, pros_root -from pros.serial.ports.v5_wireless_port import V5WirelessPort from pros.ga.analytics import analytics +from pros.serial.ports import DirectPort +from pros.serial.ports.v5_wireless_port import V5WirelessPort + +from .common import default_options, pros_root, resolve_cortex_port, resolve_v5_port @pros_root diff --git a/pros/cli/upload.py b/pros/cli/upload.py index 0cc22148..f69c87bd 100644 --- a/pros/cli/upload.py +++ b/pros/cli/upload.py @@ -3,9 +3,9 @@ import pros.common.ui as ui import pros.conductor as c +from pros.ga.analytics import analytics from .common import * -from pros.ga.analytics import analytics @pros_root @@ -238,7 +238,7 @@ def ls_usb(target): List plugged in VEX Devices """ analytics.send("ls-usb") - from pros.serial.devices.vex import find_v5_ports, find_cortex_ports + from pros.serial.devices.vex import find_cortex_ports, find_v5_ports class PortReport(object): def __init__(self, header: str, ports: List[Any], machine_header: Optional[str] = None): diff --git a/pros/cli/user_script.py b/pros/cli/user_script.py index eac7db18..a9057496 100644 --- a/pros/cli/user_script.py +++ b/pros/cli/user_script.py @@ -1,9 +1,10 @@ import click from pros.common import ui -from .common import default_options, pros_root from pros.ga.analytics import analytics +from .common import default_options, pros_root + @pros_root def user_script_cli(): @@ -18,8 +19,8 @@ def user_script(script_file): Run a script file with the PROS CLI package """ analytics.send("user-script") - import os.path import importlib.util + import os.path package_name = os.path.splitext(os.path.split(script_file)[0])[0] package_path = os.path.abspath(script_file) diff --git a/pros/cli/v5_utils.py b/pros/cli/v5_utils.py index 933cae59..f69ecfa6 100644 --- a/pros/cli/v5_utils.py +++ b/pros/cli/v5_utils.py @@ -1,6 +1,7 @@ -from .common import * from pros.ga.analytics import analytics +from .common import * + @pros_root def v5_utils_cli(): @@ -102,8 +103,8 @@ def write_file(file, port: str, remote_file: str, **kwargs): Write a file to the V5. """ analytics.send("write-file") - from pros.serial.ports import DirectPort from pros.serial.devices.vex import V5Device + from pros.serial.ports import DirectPort port = resolve_v5_port(port, 'system')[0] if not port: @@ -269,10 +270,12 @@ def capture(file_name: str, port: str, force: bool = False): """ Take a screen capture of the display """ + import os + + import png + from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - import png - import os port = resolve_v5_port(port, 'system')[0] if not port: diff --git a/pros/common/sentry.py b/pros/common/sentry.py index 688f42e4..38207169 100644 --- a/pros/common/sentry.py +++ b/pros/common/sentry.py @@ -5,8 +5,9 @@ import pros.common.ui as ui if TYPE_CHECKING: - from sentry_sdk import Client, Hub, Scope # noqa: F401, flake8 issue with "if TYPE_CHECKING" import jsonpickle.handlers # noqa: F401, flake8 issue, flake8 issue with "if TYPE_CHECKING" + from sentry_sdk import Client, Hub, Scope # noqa: F401, flake8 issue with "if TYPE_CHECKING" + from pros.config.cli_config import CliConfig # noqa: F401, flake8 issue, flake8 issue with "if TYPE_CHECKING" cli_config: 'CliConfig' = None @@ -76,6 +77,7 @@ def add_context(obj: object, override_handlers: bool = True, key: str = None) -> """ import jsonpickle.handlers # noqa: F811, flake8 issue with "if TYPE_CHECKING" + from pros.conductor.templates import BaseTemplate class TemplateHandler(jsonpickle.handlers.BaseHandler): @@ -135,6 +137,7 @@ def register(cfg: Optional['CliConfig'] = None): return import sentry_sdk as sentry + from pros.upgrade import get_platformv2 client = sentry.Client( diff --git a/pros/common/ui/interactive/components/button.py b/pros/common/ui/interactive/components/button.py index 52f43004..dee126b5 100644 --- a/pros/common/ui/interactive/components/button.py +++ b/pros/common/ui/interactive/components/button.py @@ -1,7 +1,7 @@ from typing import * -from .component import Component from ..observable import Observable +from .component import Component class Button(Component, Observable): diff --git a/pros/common/ui/interactive/components/container.py b/pros/common/ui/interactive/components/container.py index 61a2a68e..6f251110 100644 --- a/pros/common/ui/interactive/components/container.py +++ b/pros/common/ui/interactive/components/container.py @@ -1,6 +1,7 @@ from typing import * from pros.common.ui.interactive.parameters import BooleanParameter + from .component import Component diff --git a/pros/common/ui/interactive/components/input_groups.py b/pros/common/ui/interactive/components/input_groups.py index 9e3c95bc..b8dad1a8 100644 --- a/pros/common/ui/interactive/components/input_groups.py +++ b/pros/common/ui/interactive/components/input_groups.py @@ -1,4 +1,5 @@ from pros.common.ui.interactive.parameters.misc_parameters import OptionParameter + from .component import BasicParameterizedComponent diff --git a/pros/common/ui/interactive/observable.py b/pros/common/ui/interactive/observable.py index a45a4dda..f2d14d92 100644 --- a/pros/common/ui/interactive/observable.py +++ b/pros/common/ui/interactive/observable.py @@ -58,6 +58,7 @@ def on( def bind(h): def bound(*args, **kw): from threading import Thread + from pros.common.utils import with_click_context t = Thread(target=with_click_context(h), args=(*bound_args, *args), kwargs={**bound_kwargs, **kw}) diff --git a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py index 35d614c5..91f88c8e 100644 --- a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py +++ b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py @@ -6,8 +6,9 @@ from pros.common import ui from pros.common.ui.interactive.observable import Observable -from .Renderer import Renderer + from ..application import Application +from .Renderer import Renderer current: List['MachineOutputRenderer'] = [] diff --git a/pros/common/utils.py b/pros/common/utils.py index 937e5478..56e0f08c 100644 --- a/pros/common/utils.py +++ b/pros/common/utils.py @@ -112,12 +112,13 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non :param desc: Description of file being downloaded (for progressbar) :return: The path of the downloaded file, or None if there was an error """ - import requests - from pros.common.ui import progressbar - # from rfc6266_parser import parse_requests_response import re + import requests + + from pros.common.ui import progressbar + response = requests.get(url, stream=True) if response.status_code == 200: filename: str = url.rsplit('/', 1)[-1] diff --git a/pros/conductor/__init__.py b/pros/conductor/__init__.py index 9d8c0406..51ac1e34 100644 --- a/pros/conductor/__init__.py +++ b/pros/conductor/__init__.py @@ -3,4 +3,4 @@ from .conductor import Conductor from .depots import Depot, LocalDepot from .project import Project -from .templates import BaseTemplate, Template, LocalTemplate +from .templates import BaseTemplate, LocalTemplate, Template diff --git a/pros/conductor/conductor.py b/pros/conductor/conductor.py index 073123ff..021e981d 100644 --- a/pros/conductor/conductor.py +++ b/pros/conductor/conductor.py @@ -11,6 +11,7 @@ from pros.conductor.project import TemplateAction from pros.conductor.project.template_resolution import InvalidTemplateException from pros.config import Config + from .depots import Depot, HttpDepot from .project import Project from .templates import BaseTemplate, ExternalTemplate, LocalTemplate, Template diff --git a/pros/conductor/depots/depot.py b/pros/conductor/depots/depot.py index c4d5397a..a0787f43 100644 --- a/pros/conductor/depots/depot.py +++ b/pros/conductor/depots/depot.py @@ -4,6 +4,7 @@ import pros.common.ui as ui from pros.common import logger from pros.config.cli_config import cli_config + from ..templates import BaseTemplate, Template diff --git a/pros/conductor/depots/http_depot.py b/pros/conductor/depots/http_depot.py index 21e0c404..652b8c6e 100644 --- a/pros/conductor/depots/http_depot.py +++ b/pros/conductor/depots/http_depot.py @@ -7,8 +7,9 @@ import pros.common.ui as ui from pros.common import logger from pros.common.utils import download_file -from .depot import Depot + from ..templates import BaseTemplate, ExternalTemplate +from .depot import Depot class HttpDepot(Depot): diff --git a/pros/conductor/depots/local_depot.py b/pros/conductor/depots/local_depot.py index f4223dd0..181ed581 100644 --- a/pros/conductor/depots/local_depot.py +++ b/pros/conductor/depots/local_depot.py @@ -4,10 +4,11 @@ import click +from pros.common.utils import logger from pros.config import ConfigNotFoundException + +from ..templates import BaseTemplate, ExternalTemplate, Template from .depot import Depot -from ..templates import BaseTemplate, Template, ExternalTemplate -from pros.common.utils import logger class LocalDepot(Depot): diff --git a/pros/conductor/interactive/NewProjectModal.py b/pros/conductor/interactive/NewProjectModal.py index 0e08ad53..17132ff0 100644 --- a/pros/conductor/interactive/NewProjectModal.py +++ b/pros/conductor/interactive/NewProjectModal.py @@ -6,6 +6,7 @@ from pros.common import ui from pros.common.ui.interactive import application, components, parameters from pros.conductor import Conductor + from .parameters import NonExistentProjectParameter diff --git a/pros/conductor/interactive/UpdateProjectModal.py b/pros/conductor/interactive/UpdateProjectModal.py index e20276a4..fecb6cb8 100644 --- a/pros/conductor/interactive/UpdateProjectModal.py +++ b/pros/conductor/interactive/UpdateProjectModal.py @@ -8,6 +8,7 @@ from pros.common.ui.interactive import application, components, parameters from pros.conductor import BaseTemplate, Conductor, Project from pros.conductor.project.ProjectTransaction import ProjectTransaction + from .components import TemplateListingComponent from .parameters import ExistingProjectParameter, TemplateParameter diff --git a/pros/conductor/interactive/__init__.py b/pros/conductor/interactive/__init__.py index 89f1e51c..d915ee91 100644 --- a/pros/conductor/interactive/__init__.py +++ b/pros/conductor/interactive/__init__.py @@ -1,4 +1,3 @@ from .NewProjectModal import NewProjectModal -from .UpdateProjectModal import UpdateProjectModal - from .parameters import ExistingProjectParameter, NonExistentProjectParameter +from .UpdateProjectModal import UpdateProjectModal diff --git a/pros/conductor/project/__init__.py b/pros/conductor/project/__init__.py index 355ddda9..cb0509f7 100644 --- a/pros/conductor/project/__init__.py +++ b/pros/conductor/project/__init__.py @@ -10,9 +10,10 @@ from pros.common.ui import EchoPipe from pros.conductor.project.template_resolution import TemplateAction from pros.config.config import Config, ConfigNotFoundException -from .ProjectReport import ProjectReport + from ..templates import BaseTemplate, LocalTemplate, Template from ..transaction import Transaction +from .ProjectReport import ProjectReport class Project(Config): @@ -290,12 +291,12 @@ def make_scan_build( suppress_output: bool = False, sandbox: bool = False, ): - from libscanbuild.compilation import Compilation, CompilationDatabase - from libscanbuild.arguments import create_intercept_parser + import argparse import itertools - import subprocess - import argparse + + from libscanbuild.arguments import create_intercept_parser + from libscanbuild.compilation import Compilation, CompilationDatabase if sandbox: import tempfile @@ -311,14 +312,14 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil :param args: the parsed and validated command line arguments :return: the exit status of build process. """ + from libear import temporary_directory from libscanbuild.intercept import ( - setup_environment, - run_build, + compilations, exec_trace_files, parse_exec_trace, - compilations, + run_build, + setup_environment, ) - from libear import temporary_directory with temporary_directory(prefix='intercept-') as tmp_dir: # run the build command diff --git a/pros/config/cli_config.py b/pros/config/cli_config.py index 79b3d1f0..1600b146 100644 --- a/pros/config/cli_config.py +++ b/pros/config/cli_config.py @@ -40,8 +40,8 @@ def get_upgrade_manifest(self, force: bool = False) -> Optional['UpgradeManifest if not force and not self.needs_online_fetch(self.cached_upgrade[0]): return self.cached_upgrade[1] pros.common.logger(__name__).info('Fetching upgrade manifest...') - import requests import jsonpickle + import requests r = requests.get('https://purduesigbots.github.io/pros-mainline/cli-updates.json') pros.common.logger(__name__).debug(r) diff --git a/pros/config/config.py b/pros/config/config.py index e0557a44..59cda986 100644 --- a/pros/config/config.py +++ b/pros/config/config.py @@ -1,6 +1,7 @@ import json.decoder import jsonpickle + from pros.common.utils import * diff --git a/pros/ga/analytics.py b/pros/ga/analytics.py index 6ac1b2b8..de7d806d 100644 --- a/pros/ga/analytics.py +++ b/pros/ga/analytics.py @@ -1,10 +1,11 @@ import json -from os import path +import random import uuid +from concurrent.futures import as_completed +from os import path + import requests from requests_futures.sessions import FuturesSession -import random -from concurrent.futures import as_completed url = 'https://www.google-analytics.com/collect' agent = 'pros-cli' diff --git a/pros/serial/devices/__init__.py b/pros/serial/devices/__init__.py index ac6cd8c0..f5d0e9b1 100644 --- a/pros/serial/devices/__init__.py +++ b/pros/serial/devices/__init__.py @@ -1,2 +1,2 @@ from .generic_device import GenericDevice -from .stream_device import StreamDevice, RawStreamDevice +from .stream_device import RawStreamDevice, StreamDevice diff --git a/pros/serial/devices/vex/cortex_device.py b/pros/serial/devices/vex/cortex_device.py index 792c3344..13a35d96 100644 --- a/pros/serial/devices/vex/cortex_device.py +++ b/pros/serial/devices/vex/cortex_device.py @@ -6,15 +6,15 @@ from typing import * from pros.common import ui -from pros.common.utils import retries, logger +from pros.common.utils import logger, retries from pros.conductor import Project from pros.serial import bytes_to_str from pros.serial.devices.vex import VEXCommError from pros.serial.devices.vex.stm32_device import STM32Device from pros.serial.ports import list_all_comports -from .vex_device import VEXDevice from ..system_device import SystemDevice +from .vex_device import VEXDevice def find_cortex_ports(): diff --git a/pros/serial/devices/vex/v5_device.py b/pros/serial/devices/vex/v5_device.py index e4820496..c0ee039c 100644 --- a/pros/serial/devices/vex/v5_device.py +++ b/pros/serial/devices/vex/v5_device.py @@ -1,10 +1,10 @@ import gzip import io +import platform import re import struct import time import typing -import platform from collections import defaultdict from configparser import ConfigParser from datetime import datetime, timedelta @@ -16,17 +16,18 @@ from semantic_version import Spec -from pros.common import ui from pros.common import * +from pros.common import ui from pros.common.utils import * from pros.conductor import Project from pros.serial import bytes_to_str, decode_bytes_to_str from pros.serial.ports import BasePort, list_all_comports + +from ..system_device import SystemDevice from .comm_error import VEXCommError from .crc import CRC from .message import Message from .vex_device import VEXDevice -from ..system_device import SystemDevice int_str = Union[int, str] @@ -247,8 +248,8 @@ def is_wireless(self): def generate_cold_hash(self, project: Project, extra: dict): keys = {k: t.version for k, t in project.templates.items()} keys.update(extra) - from hashlib import md5 from base64 import b64encode + from hashlib import md5 msg = str(sorted(keys, key=lambda t: t[0])).encode('ascii') name = b64encode(md5(msg).digest()).rstrip(b'=').decode('ascii') diff --git a/pros/serial/devices/vex/v5_user_device.py b/pros/serial/devices/vex/v5_user_device.py index be40d6b4..95ce2fc6 100644 --- a/pros/serial/devices/vex/v5_user_device.py +++ b/pros/serial/devices/vex/v5_user_device.py @@ -1,6 +1,7 @@ from typing import * from cobs import cobs + from pros.common.utils import logger from pros.serial.devices.stream_device import StreamDevice from pros.serial.ports import BasePort diff --git a/pros/serial/devices/vex/vex_device.py b/pros/serial/devices/vex/vex_device.py index 5866afca..d234dd78 100644 --- a/pros/serial/devices/vex/vex_device.py +++ b/pros/serial/devices/vex/vex_device.py @@ -5,9 +5,10 @@ from pros.common import * from pros.serial import bytes_to_str from pros.serial.ports import BasePort + +from ..generic_device import GenericDevice from . import comm_error from .message import Message -from ..generic_device import GenericDevice def debug(msg): diff --git a/pros/serial/interactive/UploadProjectModal.py b/pros/serial/interactive/UploadProjectModal.py index 7fc3ad79..4336861b 100644 --- a/pros/serial/interactive/UploadProjectModal.py +++ b/pros/serial/interactive/UploadProjectModal.py @@ -119,9 +119,10 @@ def project_changed(self, new_project: ExistingProjectParameter): ui.logger(__name__).exception(e) def confirm(self, *args, **kwargs): - from pros.cli.upload import upload from click import get_current_context + from pros.cli.upload import upload + kwargs = {'path': None, 'project': self.project, 'port': self.port.value} savable_kwargs = {} if self.project.target == 'v5': diff --git a/pros/serial/ports/__init__.py b/pros/serial/ports/__init__.py index a70468c1..e1d15175 100644 --- a/pros/serial/ports/__init__.py +++ b/pros/serial/ports/__init__.py @@ -1,8 +1,9 @@ from functools import lru_cache -from pros.common import logger from serial.tools import list_ports +from pros.common import logger + from .base_port import BasePort, PortConnectionException, PortException from .direct_port import DirectPort diff --git a/pros/serial/ports/direct_port.py b/pros/serial/ports/direct_port.py index 5ac33f3d..0fb20ad3 100644 --- a/pros/serial/ports/direct_port.py +++ b/pros/serial/ports/direct_port.py @@ -3,8 +3,9 @@ import serial -from pros.common import logger, dont_send +from pros.common import dont_send, logger from pros.serial.ports.exceptions import ConnectionRefusedException, PortNotFoundException + from .base_port import BasePort, PortConnectionException diff --git a/pros/serial/ports/exceptions.py b/pros/serial/ports/exceptions.py index 54eaf0ec..71843fee 100644 --- a/pros/serial/ports/exceptions.py +++ b/pros/serial/ports/exceptions.py @@ -1,4 +1,5 @@ import os + import serial diff --git a/pros/serial/ports/serial_share_bridge.py b/pros/serial/ports/serial_share_bridge.py index 186299dc..cc35cd6a 100644 --- a/pros/serial/ports/serial_share_bridge.py +++ b/pros/serial/ports/serial_share_bridge.py @@ -5,10 +5,11 @@ import zmq from cobs import cobs + from pros.common.utils import * -from .direct_port import DirectPort from .. import bytes_to_str +from .direct_port import DirectPort def get_port_num(serial_port_name: str, hash: str) -> int: diff --git a/pros/serial/terminal/terminal.py b/pros/serial/terminal/terminal.py index b07b9769..05710568 100644 --- a/pros/serial/terminal/terminal.py +++ b/pros/serial/terminal/terminal.py @@ -11,7 +11,6 @@ from pros.serial.devices import StreamDevice from pros.serial.ports import PortConnectionException - # This file is a modification of the miniterm implementation on pyserial @@ -61,8 +60,8 @@ def __exit__(self, *args, **kwargs): if os.name == 'nt': # noqa - import msvcrt import ctypes + import msvcrt class Out(object): """file-like wrapper that uses os.write""" @@ -115,8 +114,8 @@ def cancel(self): elif os.name == 'posix': import atexit - import termios import select + import termios class Console(ConsoleBase): def __init__(self): diff --git a/pros/upgrade/instructions/__init__.py b/pros/upgrade/instructions/__init__.py index 26d62f32..003b586a 100644 --- a/pros/upgrade/instructions/__init__.py +++ b/pros/upgrade/instructions/__init__.py @@ -1,6 +1,6 @@ from .base_instructions import UpgradeInstruction, UpgradeResult -from .nothing_instructions import NothingInstruction from .download_instructions import DownloadInstruction from .explorer_instructions import ExplorerInstruction +from .nothing_instructions import NothingInstruction __all__ = ['UpgradeInstruction', 'UpgradeResult', 'NothingInstruction', 'ExplorerInstruction', 'DownloadInstruction'] diff --git a/pros/upgrade/instructions/download_instructions.py b/pros/upgrade/instructions/download_instructions.py index 4432b383..666456c6 100644 --- a/pros/upgrade/instructions/download_instructions.py +++ b/pros/upgrade/instructions/download_instructions.py @@ -2,6 +2,7 @@ from typing import * from pros.common.utils import download_file + from .base_instructions import UpgradeInstruction, UpgradeResult diff --git a/pros/upgrade/manifests/__init__.py b/pros/upgrade/manifests/__init__.py index 290f42c5..cc8fb43d 100644 --- a/pros/upgrade/manifests/__init__.py +++ b/pros/upgrade/manifests/__init__.py @@ -1,7 +1,7 @@ from typing import * from .upgrade_manifest_v1 import UpgradeManifestV1 -from .upgrade_manifest_v2 import UpgradeManifestV2, PlatformsV2 +from .upgrade_manifest_v2 import PlatformsV2, UpgradeManifestV2 # Order of files manifests = [UpgradeManifestV2, UpgradeManifestV1] # type: List[Type] diff --git a/pros/upgrade/manifests/upgrade_manifest_v1.py b/pros/upgrade/manifests/upgrade_manifest_v1.py index 102a06a5..33714ef0 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v1.py +++ b/pros/upgrade/manifests/upgrade_manifest_v1.py @@ -1,6 +1,7 @@ from semantic_version import Version from pros.common.utils import get_version, logger + from ..instructions import UpgradeResult diff --git a/pros/upgrade/manifests/upgrade_manifest_v2.py b/pros/upgrade/manifests/upgrade_manifest_v2.py index 469f906b..6d4f3581 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v2.py +++ b/pros/upgrade/manifests/upgrade_manifest_v2.py @@ -3,8 +3,9 @@ from typing import * from pros.common import logger + +from ..instructions import NothingInstruction, UpgradeInstruction, UpgradeResult from .upgrade_manifest_v1 import UpgradeManifestV1 -from ..instructions import UpgradeInstruction, UpgradeResult, NothingInstruction class PlatformsV2(Enum): diff --git a/pros/upgrade/upgrade_manager.py b/pros/upgrade/upgrade_manager.py index 3ddcf8eb..d0bc3baf 100644 --- a/pros/upgrade/upgrade_manager.py +++ b/pros/upgrade/upgrade_manager.py @@ -3,12 +3,13 @@ from enum import Enum from typing import * -from pros.common import logger import pros.common.ui as ui +from pros.common import logger from pros.config import Config from pros.config.cli_config import cli_config -from .manifests import * + from .instructions import UpgradeResult +from .manifests import * class ReleaseChannel(Enum): @@ -40,10 +41,11 @@ def get_manifest(self, force: bool = False) -> UpgradeManifestV1: return self._manifest ui.echo('Fetching upgrade manifest...') - import requests - import jsonpickle import json + import jsonpickle + import requests + channel_url = f'https://purduesigbots.github.io/pros-mainline/{self.release_channel.value}' self._manifest = None diff --git a/setup.py b/setup.py index df2b69b8..b4cd07b8 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,7 @@ # setup.py for non-frozen builds -from setuptools import setup, find_packages +from setuptools import find_packages, setup + from install_requires import install_requires as install_reqs setup( From 6e284f20ff4ed0db8abaeb49f398e9b0e8bad60d Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Fri, 2 Feb 2024 00:45:04 -0500 Subject: [PATCH 16/44] Add pre-commit to CI --- .github/workflows/pre-commit.yml | 13 +++++++++++++ .pre-commit-config.yaml | 2 ++ 2 files changed, 15 insertions(+) create mode 100644 .github/workflows/pre-commit.yml diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..cdbc0e02 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,13 @@ +name: pre-commit + +on: + pull_request: + push: + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5.0.0 + - uses: pre-commit/action@v3.0.0 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 049cc229..0458cf55 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,5 @@ +ci: + skip: [pylint] repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 From 340d037b952a7560b9d74e6f9fc5d934412047d6 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Fri, 2 Feb 2024 00:50:20 -0500 Subject: [PATCH 17/44] Skip pylint --- .github/workflows/pre-commit.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index cdbc0e02..2706284a 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -11,3 +11,5 @@ jobs: - uses: actions/checkout@v4.1.1 - uses: actions/setup-python@v5.0.0 - uses: pre-commit/action@v3.0.0 + with: + extra_args: SKIP=pylint From 986af1ff159fe3fe12b2edd02279f05543c9d538 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Fri, 2 Feb 2024 00:59:12 -0500 Subject: [PATCH 18/44] Use environment variable to skip pylint --- .github/workflows/pre-commit.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 2706284a..ab52efe4 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -4,6 +4,9 @@ on: pull_request: push: +env: + SKIP: pylint + jobs: pre-commit: runs-on: ubuntu-latest @@ -11,5 +14,3 @@ jobs: - uses: actions/checkout@v4.1.1 - uses: actions/setup-python@v5.0.0 - uses: pre-commit/action@v3.0.0 - with: - extra_args: SKIP=pylint From baa5f177141fc02604a05dc9f25f709a840aa996 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Fri, 2 Feb 2024 01:00:48 -0500 Subject: [PATCH 19/44] Remove redundant skip --- .pre-commit-config.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0458cf55..049cc229 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,3 @@ -ci: - skip: [pylint] repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 From c57fd3c72ea81a246b43010426c1545d590d7061 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Fri, 2 Feb 2024 01:15:49 -0500 Subject: [PATCH 20/44] Add pre-commit action --- .github/workflows/pre-commit.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/workflows/pre-commit.yml diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..65080b90 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,16 @@ +name: pre-commit + +on: + pull_request: + push: + +env: + SKIP: pylint + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5.0.0 + - uses: pre-commit/action@v3.0.0 From 975e51f4c994ab0d0821321682905870e766561e Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Fri, 2 Feb 2024 01:16:17 -0500 Subject: [PATCH 21/44] Move pylint action to separate file --- .github/workflows/main.yml | 21 --------------------- .github/workflows/pylint.yml | 26 ++++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 21 deletions(-) create mode 100644 .github/workflows/pylint.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d004c816..76145e6b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -5,27 +5,6 @@ on: pull_request: jobs: - pylint: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: ["3.10"] - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install pylint - pip install -r requirements.txt - - name: Analysing the code with pylint - run: | - pylint $(git ls-files '*.py') --rcfile=$(git ls-files '.pylintrc') - update_build_number: runs-on: ubuntu-latest outputs: diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml new file mode 100644 index 00000000..5af7078b --- /dev/null +++ b/.github/workflows/pylint.yml @@ -0,0 +1,26 @@ +name: Pylint + +on: + pull_request: + push: + +jobs: + Pylint: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + steps: + - uses: actions/checkout@v4.1.1 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5.0.0 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Analysing the code with pylint + run: | + pylint $(git ls-files '*.py') --rcfile=$(git ls-files '.pylintrc') From dd23c0fd7126ba0872deb210a862e8542ea6722a Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Fri, 2 Feb 2024 16:35:11 -0500 Subject: [PATCH 22/44] Normalize strings --- .pre-commit-config.yaml | 2 +- install_requires.py | 2 +- pros/cli/build.py | 30 +- pros/cli/click_classes.py | 30 +- pros/cli/common.py | 120 ++-- pros/cli/compile_commands/intercept-cc.py | 2 +- pros/cli/conductor.py | 298 ++++----- pros/cli/conductor_utils.py | 94 +-- pros/cli/interactive.py | 2 +- pros/cli/main.py | 56 +- pros/cli/misc_commands.py | 18 +- pros/cli/terminal.py | 56 +- pros/cli/upload.py | 244 +++---- pros/cli/user_script.py | 6 +- pros/cli/v5_utils.py | 184 +++--- pros/common/sentry.py | 72 +- pros/common/ui/__init__.py | 60 +- pros/common/ui/interactive/ConfirmModal.py | 4 +- pros/common/ui/interactive/application.py | 28 +- .../ui/interactive/components/__init__.py | 24 +- .../ui/interactive/components/button.py | 2 +- .../ui/interactive/components/component.py | 6 +- .../ui/interactive/components/container.py | 6 +- .../common/ui/interactive/components/input.py | 2 +- .../common/ui/interactive/components/label.py | 2 +- pros/common/ui/interactive/observable.py | 2 +- .../ui/interactive/parameters/__init__.py | 12 +- .../interactive/parameters/misc_parameters.py | 8 +- .../ui/interactive/parameters/parameter.py | 8 +- .../parameters/validatable_parameter.py | 8 +- .../renderers/MachineOutputRenderer.py | 38 +- .../ui/interactive/renderers/Renderer.py | 2 +- pros/common/ui/log.py | 16 +- pros/common/utils.py | 24 +- pros/conductor/__init__.py | 2 +- pros/conductor/conductor.py | 238 +++---- pros/conductor/depots/depot.py | 12 +- pros/conductor/depots/http_depot.py | 14 +- pros/conductor/depots/local_depot.py | 18 +- pros/conductor/interactive/NewProjectModal.py | 30 +- .../interactive/UpdateProjectModal.py | 24 +- pros/conductor/interactive/components.py | 16 +- pros/conductor/interactive/parameters.py | 46 +- pros/conductor/project/ProjectReport.py | 6 +- pros/conductor/project/ProjectTransaction.py | 52 +- pros/conductor/project/__init__.py | 182 +++--- pros/conductor/templates/base_template.py | 48 +- pros/conductor/templates/external_template.py | 8 +- pros/conductor/templates/local_template.py | 2 +- pros/conductor/transaction.py | 20 +- pros/config/cli_config.py | 18 +- pros/config/config.py | 42 +- pros/ga/analytics.py | 38 +- pros/serial/__init__.py | 10 +- pros/serial/devices/stream_device.py | 2 +- pros/serial/devices/vex/cortex_device.py | 58 +- pros/serial/devices/vex/message.py | 2 +- pros/serial/devices/vex/stm32_device.py | 70 +- pros/serial/devices/vex/v5_device.py | 618 +++++++++--------- pros/serial/devices/vex/v5_user_device.py | 10 +- pros/serial/devices/vex/vex_device.py | 22 +- pros/serial/interactive/UploadProjectModal.py | 76 +-- pros/serial/interactive/__init__.py | 2 +- pros/serial/ports/__init__.py | 2 +- pros/serial/ports/direct_port.py | 10 +- pros/serial/ports/exceptions.py | 12 +- pros/serial/ports/serial_share_bridge.py | 58 +- pros/serial/ports/serial_share_port.py | 32 +- pros/serial/terminal/terminal.py | 46 +- pros/upgrade/__init__.py | 2 +- pros/upgrade/instructions/__init__.py | 2 +- .../instructions/download_instructions.py | 12 +- .../instructions/explorer_instructions.py | 4 +- .../instructions/nothing_instructions.py | 2 +- pros/upgrade/manifests/__init__.py | 2 +- pros/upgrade/manifests/upgrade_manifest_v1.py | 8 +- pros/upgrade/manifests/upgrade_manifest_v2.py | 20 +- pros/upgrade/upgrade_manager.py | 22 +- setup.py | 16 +- version.py | 38 +- 80 files changed, 1721 insertions(+), 1721 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 049cc229..d38ebd27 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: rev: 24.1.1 hooks: - id: black - args: ["--skip-string-normalization", "--line-length=120"] + args: ["--line-length=120"] - repo: local hooks: - id: pylint diff --git a/install_requires.py b/install_requires.py index 6aad2a80..e77dd742 100644 --- a/install_requires.py +++ b/install_requires.py @@ -1,2 +1,2 @@ -with open('requirements.txt') as reqs: +with open("requirements.txt") as reqs: install_requires = [req.strip() for req in reqs.readlines()] diff --git a/pros/cli/build.py b/pros/cli/build.py index 25f2098b..43178b5b 100644 --- a/pros/cli/build.py +++ b/pros/cli/build.py @@ -14,9 +14,9 @@ def build_cli(): pass -@build_cli.command(aliases=['build', 'm']) +@build_cli.command(aliases=["build", "m"]) @project_option() -@click.argument('build-args', nargs=-1) +@click.argument("build-args", nargs=-1) @default_options def make(project: c.Project, build_args): """ @@ -25,13 +25,13 @@ def make(project: c.Project, build_args): analytics.send("make") exit_code = project.compile(build_args) if exit_code != 0: - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") return exit_code -@build_cli.command('make-upload', aliases=['mu'], hidden=True) -@click.option('build_args', '--make', '-m', multiple=True, help='Send arguments to make (e.g. compile target)') +@build_cli.command("make-upload", aliases=["mu"], hidden=True) +@click.option("build_args", "--make", "-m", multiple=True, help="Send arguments to make (e.g. compile target)") @shadow_command(upload) @project_option() @click.pass_context @@ -41,8 +41,8 @@ def make_upload(ctx, project: c.Project, build_args: List[str], **upload_args): ctx.invoke(upload, project=project, **upload_args) -@build_cli.command('make-upload-terminal', aliases=['mut'], hidden=True) -@click.option('build_args', '--make', '-m', multiple=True, help='Send arguments to make (e.g. compile target)') +@build_cli.command("make-upload-terminal", aliases=["mut"], hidden=True) +@click.option("build_args", "--make", "-m", multiple=True, help="Send arguments to make (e.g. compile target)") @shadow_command(upload) @project_option() @click.pass_context @@ -55,14 +55,14 @@ def make_upload_terminal(ctx, project: c.Project, build_args, **upload_args): ctx.invoke(terminal, port=project.target, request_banner=False) -@build_cli.command('build-compile-commands', hidden=True) +@build_cli.command("build-compile-commands", hidden=True) @project_option() @click.option( - '--suppress-output/--show-output', 'suppress_output', default=False, show_default=True, help='Suppress output' + "--suppress-output/--show-output", "suppress_output", default=False, show_default=True, help="Suppress output" ) -@click.option('--compile-commands', type=click.File('w'), default=None) -@click.option('--sandbox', default=False, is_flag=True) -@click.argument('build-args', nargs=-1) +@click.option("--compile-commands", type=click.File("w"), default=None) +@click.option("--sandbox", default=False, is_flag=True) +@click.argument("build-args", nargs=-1) @default_options def build_compile_commands( project: c.Project, suppress_output: bool, compile_commands, sandbox: bool, build_args: List[str] @@ -76,6 +76,6 @@ def build_compile_commands( build_args, cdb_file=compile_commands, suppress_output=suppress_output, sandbox=sandbox ) if exit_code != 0: - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") return exit_code diff --git a/pros/cli/click_classes.py b/pros/cli/click_classes.py index 58aad9b5..6eeda9de 100644 --- a/pros/cli/click_classes.py +++ b/pros/cli/click_classes.py @@ -22,7 +22,7 @@ def format_commands(self, ctx, formatter): """Extra format methods for multi methods that adds all the commands after the options. """ - if not hasattr(self, 'list_commands'): + if not hasattr(self, "list_commands"): return rows = [] for subcommand in self.list_commands(ctx): @@ -30,14 +30,14 @@ def format_commands(self, ctx, formatter): # What is this, the tool lied about a command. Ignore it if cmd is None: continue - if hasattr(cmd, 'hidden') and cmd.hidden: + if hasattr(cmd, "hidden") and cmd.hidden: continue - help = cmd.short_help or '' + help = cmd.short_help or "" rows.append((subcommand, help)) if rows: - with formatter.section('Commands'): + with formatter.section("Commands"): formatter.write_dl(rows) def format_options(self, ctx, formatter): @@ -46,15 +46,15 @@ def format_options(self, ctx, formatter): for param in self.get_params(ctx): rv = param.get_help_record(ctx) if rv is not None: - if hasattr(param, 'group'): + if hasattr(param, "group"): opts[param.group].append(rv) else: - opts['Options'].append(rv) + opts["Options"].append(rv) - if len(opts['Options']) > 0: - with formatter.section('Options'): - formatter.write_dl(opts['Options']) - opts.pop('Options') + if len(opts["Options"]) > 0: + with formatter.section("Options"): + formatter.write_dl(opts["Options"]) + opts.pop("Options") for group, options in opts.items(): with formatter.section(group): @@ -79,16 +79,16 @@ def __init__(self, *args, hidden: bool = False, group: str = None, **kwargs): self.group = group def get_help_record(self, ctx): - if hasattr(self, 'hidden') and self.hidden: + if hasattr(self, "hidden") and self.hidden: return return super().get_help_record(ctx) class PROSDeprecated(click.Option): def __init__(self, *args, replacement: str = None, **kwargs): - kwargs['help'] = "This option has been deprecated." + kwargs["help"] = "This option has been deprecated." if not replacement == None: - kwargs['help'] += " Its replacement is '--{}'".format(replacement) + kwargs["help"] += " Its replacement is '--{}'".format(replacement) super(PROSDeprecated, self).__init__(*args, **kwargs) self.group = "Deprecated" self.optiontype = "flag" if str(self.type) == "BOOL" else "switch" @@ -116,7 +116,7 @@ def decorator(f): for alias in aliases: self.cmd_dict[alias] = f.__name__ if len(args) == 0 else args[0] - cmd = super(PROSGroup, self).command(*args, cls=kwargs.pop('cls', PROSCommand), **kwargs)(f) + cmd = super(PROSGroup, self).command(*args, cls=kwargs.pop("cls", PROSCommand), **kwargs)(f) self.add_command(cmd) return cmd @@ -128,7 +128,7 @@ def group(self, aliases=None, *args, **kwargs): def decorator(f): for alias in aliases: self.cmd_dict[alias] = f.__name__ - cmd = super(PROSGroup, self).group(*args, cls=kwargs.pop('cls', PROSGroup), **kwargs)(f) + cmd = super(PROSGroup, self).group(*args, cls=kwargs.pop("cls", PROSGroup), **kwargs)(f) self.add_command(cmd) return cmd diff --git a/pros/cli/common.py b/pros/cli/common.py index 30108507..8894ca97 100644 --- a/pros/cli/common.py +++ b/pros/cli/common.py @@ -16,23 +16,23 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) if value: logger().setLevel(min(logger().level, logging.INFO)) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.INFO) - logger(__name__).info('Verbose messages enabled') + logger(__name__).info("Verbose messages enabled") return value return click.option( - '--verbose', - help='Enable verbose output', + "--verbose", + help="Enable verbose output", is_flag=True, is_eager=True, expose_value=False, callback=callback, cls=PROSOption, - group='Standard Options', + group="Standard Options", )(f) @@ -44,25 +44,25 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) if value: logging.getLogger().setLevel(logging.DEBUG) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.DEBUG) - logging.getLogger(__name__).info('Debugging messages enabled') - if logger('pros').isEnabledFor(logging.DEBUG): - logger('pros').debug(f'CLI Version: {get_version()}') + logging.getLogger(__name__).info("Debugging messages enabled") + if logger("pros").isEnabledFor(logging.DEBUG): + logger("pros").debug(f"CLI Version: {get_version()}") return value return click.option( - '--debug', - help='Enable debugging output', + "--debug", + help="Enable debugging output", is_flag=True, is_eager=True, expose_value=False, callback=callback, cls=PROSOption, - group='Standard Options', + group="Standard Options", )(f) @@ -74,22 +74,22 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value, str): value = getattr(logging, value.upper(), None) if not isinstance(value, int): - raise ValueError('Invalid log level: {}'.format(value)) + raise ValueError("Invalid log level: {}".format(value)) logging.getLogger().setLevel(min(logger().level, value)) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(value) return value return click.option( - '-l', - '--log', - help='Logging level', + "-l", + "--log", + help="Logging level", is_eager=True, expose_value=False, callback=callback, - type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']), + type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]), cls=PROSOption, - group='Standard Options', + group="Standard Options", )(f) @@ -102,26 +102,26 @@ def callback(ctx: click.Context, param: click.core.Parameter, value: Any): if isinstance(value[1], str): level = getattr(logging, value[1].upper(), None) if not isinstance(level, int): - raise ValueError('Invalid log level: {}'.format(value[1])) - handler = logging.FileHandler(value[0], mode='w') - fmt_str = '%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s' + raise ValueError("Invalid log level: {}".format(value[1])) + handler = logging.FileHandler(value[0], mode="w") + fmt_str = "%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s" handler.setFormatter(logging.Formatter(fmt_str)) handler.setLevel(level) logging.getLogger().addHandler(handler) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.getLogger().level) # pin stdout_handler to its current log level logging.getLogger().setLevel(min(logging.getLogger().level, level)) return click.option( - '--logfile', - help='Log messages to a file', + "--logfile", + help="Log messages to a file", is_eager=True, expose_value=False, callback=callback, default=(None, None), - type=click.Tuple([click.Path(), click.Choice(['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])]), + type=click.Tuple([click.Path(), click.Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])]), cls=PROSOption, - group='Standard Options', + group="Standard Options", )(f) @@ -132,22 +132,22 @@ def machine_output_option(f: Union[click.Command, Callable]): def callback(ctx: click.Context, param: click.Parameter, value: str): ctx.ensure_object(dict) - add_tag('machine-output', value) # goes in sentry report + add_tag("machine-output", value) # goes in sentry report if value: ctx.obj[param.name] = value logging.getLogger().setLevel(logging.DEBUG) - stdout_handler = ctx.obj['click_handler'] # type: logging.Handler + stdout_handler = ctx.obj["click_handler"] # type: logging.Handler stdout_handler.setLevel(logging.DEBUG) - logging.getLogger(__name__).info('Debugging messages enabled') + logging.getLogger(__name__).info("Debugging messages enabled") return value decorator = click.option( - '--machine-output', + "--machine-output", expose_value=False, is_flag=True, default=False, is_eager=True, - help='Enable machine friendly output.', + help="Enable machine friendly output.", callback=callback, cls=PROSOption, hidden=True, @@ -163,12 +163,12 @@ def no_sentry_option(f: Union[click.Command, Callable]): def callback(ctx: click.Context, param: click.Parameter, value: bool): ctx.ensure_object(dict) - add_tag('no-sentry', value) + add_tag("no-sentry", value) if value: pros.common.sentry.disable_prompt() decorator = click.option( - '--no-sentry', + "--no-sentry", expose_value=False, is_flag=True, default=False, @@ -189,14 +189,14 @@ def no_analytics(f: Union[click.Command, Callable]): def callback(ctx: click.Context, param: click.Parameter, value: bool): ctx.ensure_object(dict) - add_tag('no-analytics', value) + add_tag("no-analytics", value) if value: echo("Not sending analytics for this command.\n") analytics.useAnalytics = False pass decorator = click.option( - '--no-analytics', + "--no-analytics", expose_value=False, is_flag=True, default=False, @@ -221,7 +221,7 @@ def default_options(f: Union[click.Command, Callable]): return decorator -def template_query(arg_name='query', required: bool = False): +def template_query(arg_name="query", required: bool = False): """ provides a wrapper for conductor commands which require an optional query @@ -234,10 +234,10 @@ def callback(ctx: click.Context, param: click.Parameter, value: Tuple[str, ...]) value = list(value) spec = None - if len(value) > 0 and not value[0].startswith('--'): + if len(value) > 0 and not value[0].startswith("--"): spec = value.pop(0) if not spec and required: - raise ValueError(f'A {arg_name} is required to perform this command') + raise ValueError(f"A {arg_name} is required to perform this command") query = c.BaseTemplate.create_query( spec, **{value[i][2:]: value[i + 1] for i in range(0, int(len(value) / 2) * 2, 2)} ) @@ -250,7 +250,7 @@ def wrapper(f: Union[click.Command, Callable]): return wrapper -def project_option(arg_name='project', required: bool = True, default: str = '.', allow_none: bool = False): +def project_option(arg_name="project", required: bool = True, default: str = ".", allow_none: bool = False): def callback(ctx: click.Context, param: click.Parameter, value: str): if allow_none and value is None: return None @@ -263,20 +263,20 @@ def callback(ctx: click.Context, param: click.Parameter, value: str): else: raise click.UsageError( f'{os.path.abspath(value or ".")} is not inside a PROS project. ' - f'Execute this command from within a PROS project or specify it ' - f'with --project project/path' + f"Execute this command from within a PROS project or specify it " + f"with --project project/path" ) return c.Project(project_path) def wrapper(f: Union[click.Command, Callable]): return click.option( - f'--{arg_name}', + f"--{arg_name}", callback=callback, required=required, default=default, type=click.Path(exists=True), show_default=True, - help='PROS Project directory or file', + help="PROS Project directory or file", )(f) return wrapper @@ -287,7 +287,7 @@ def wrapper(f: Union[click.Command, Callable]): if isinstance(f, click.Command): f.params.extend(p for p in command.params if p.name not in [p.name for p in command.params]) else: - if not hasattr(f, '__click_params__'): + if not hasattr(f, "__click_params__"): f.__click_params__ = [] f.__click_params__.extend(p for p in command.params if p.name not in [p.name for p in f.__click_params__]) return f @@ -322,20 +322,20 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl is_joystick = False if not port: ports = find_v5_ports(type) - logger(__name__).debug('Ports: {}'.format(';'.join([str(p.__dict__) for p in ports]))) + logger(__name__).debug("Ports: {}".format(";".join([str(p.__dict__) for p in ports]))) if len(ports) == 0: if not quiet: logger(__name__).error( - 'No {0} ports were found! If you think you have a {0} plugged in, ' - 'run this command again with the --debug flag'.format('v5'), - extra={'sentry': False}, + "No {0} ports were found! If you think you have a {0} plugged in, " + "run this command again with the --debug flag".format("v5"), + extra={"sentry": False}, ) return None, False if len(ports) > 1: if not quiet: port = click.prompt( - 'Multiple {} ports were found. Please choose one: [{}]'.format( - 'v5', '|'.join([p.device for p in ports]) + "Multiple {} ports were found. Please choose one: [{}]".format( + "v5", "|".join([p.device for p in ports]) ), default=ports[0].device, show_default=False, @@ -346,8 +346,8 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl return None, False else: port = ports[0].device - is_joystick = type == 'user' and 'Controller' in ports[0].description - logger(__name__).info('Automatically selected {}'.format(port)) + is_joystick = type == "user" and "Controller" in ports[0].description + logger(__name__).info("Automatically selected {}".format(port)) return port, is_joystick @@ -359,15 +359,15 @@ def resolve_cortex_port(port: Optional[str], quiet: bool = False) -> Optional[st if len(ports) == 0: if not quiet: logger(__name__).error( - 'No {0} ports were found! If you think you have a {0} plugged in, ' - 'run this command again with the --debug flag'.format('cortex'), - extra={'sentry': False}, + "No {0} ports were found! If you think you have a {0} plugged in, " + "run this command again with the --debug flag".format("cortex"), + extra={"sentry": False}, ) return None if len(ports) > 1: if not quiet: port = click.prompt( - 'Multiple {} ports were found. Please choose one: '.format('cortex'), + "Multiple {} ports were found. Please choose one: ".format("cortex"), default=ports[0].device, type=click.Choice([p.device for p in ports]), ) @@ -376,5 +376,5 @@ def resolve_cortex_port(port: Optional[str], quiet: bool = False) -> Optional[st return None else: port = ports[0].device - logger(__name__).info('Automatically selected {}'.format(port)) + logger(__name__).info("Automatically selected {}".format(port)) return port diff --git a/pros/cli/compile_commands/intercept-cc.py b/pros/cli/compile_commands/intercept-cc.py index 66026e54..7c50b48d 100644 --- a/pros/cli/compile_commands/intercept-cc.py +++ b/pros/cli/compile_commands/intercept-cc.py @@ -1,4 +1,4 @@ from libscanbuild.intercept import intercept_compiler_wrapper -if __name__ == '__main__': +if __name__ == "__main__": intercept_compiler_wrapper() diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 8a8f32e7..e8ca8e6c 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -13,7 +13,7 @@ def conductor_cli(): pass -@conductor_cli.group(cls=PROSGroup, aliases=['cond', 'c', 'conduct'], short_help='Perform project management for PROS') +@conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") @default_options def conductor(): """ @@ -26,9 +26,9 @@ def conductor(): @conductor.command( - aliases=['download'], - short_help='Fetch/Download a remote template', - context_settings={'ignore_unknown_options': True}, + aliases=["download"], + short_help="Fetch/Download a remote template", + context_settings={"ignore_unknown_options": True}, ) @template_query(required=True) @default_options @@ -51,80 +51,80 @@ def fetch(query: c.BaseTemplate): template_file = query.identifier elif os.path.exists(query.name) and query.version is None: template_file = query.name - elif query.metadata.get('origin', None) == 'local': - if 'location' not in query.metadata: - logger(__name__).error('--location option is required for the local depot. Specify --location ') - logger(__name__).debug(f'Query options provided: {query.metadata}') + elif query.metadata.get("origin", None) == "local": + if "location" not in query.metadata: + logger(__name__).error("--location option is required for the local depot. Specify --location ") + logger(__name__).debug(f"Query options provided: {query.metadata}") return -1 - template_file = query.metadata['location'] + template_file = query.metadata["location"] if template_file and ( - os.path.splitext(template_file)[1] in ['.zip'] or os.path.exists(os.path.join(template_file, 'template.pros')) + os.path.splitext(template_file)[1] in [".zip"] or os.path.exists(os.path.join(template_file, "template.pros")) ): template = ExternalTemplate(template_file) - query.metadata['location'] = template_file + query.metadata["location"] = template_file depot = c.LocalDepot() - logger(__name__).debug(f'Template file found: {template_file}') + logger(__name__).debug(f"Template file found: {template_file}") else: if template_file: - logger(__name__).debug(f'Template file exists but is not a valid template: {template_file}') + logger(__name__).debug(f"Template file exists but is not a valid template: {template_file}") template = c.Conductor().resolve_template(query, allow_offline=False) - logger(__name__).debug(f'Template from resolved query: {template}') + logger(__name__).debug(f"Template from resolved query: {template}") if template is None: - logger(__name__).error(f'There are no templates matching {query}!') + logger(__name__).error(f"There are no templates matching {query}!") return -1 - depot = c.Conductor().get_depot(template.metadata['origin']) - logger(__name__).debug(f'Found depot: {depot}') + depot = c.Conductor().get_depot(template.metadata["origin"]) + logger(__name__).debug(f"Found depot: {depot}") # query.metadata contain all of the extra args that also go to the depot. There's no way for us to determine # whether the arguments are for the template or for the depot, so they share them - logger(__name__).debug(f'Additional depot and template args: {query.metadata}') + logger(__name__).debug(f"Additional depot and template args: {query.metadata}") c.Conductor().fetch_template(depot, template, **query.metadata) -@conductor.command(context_settings={'ignore_unknown_options': True}) -@click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=True, help='Allow upgrading templates in a project') -@click.option('--install/--no-install', 'install_ok', default=True, help='Allow installing templates in a project') +@conductor.command(context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=True, help="Allow upgrading templates in a project") +@click.option("--install/--no-install", "install_ok", default=True, help="Allow installing templates in a project") @click.option( - '--download/--no-download', - 'download_ok', + "--download/--no-download", + "download_ok", default=True, - help='Allow downloading templates or only allow local templates', + help="Allow downloading templates or only allow local templates", ) @click.option( - '--upgrade-user-files/--no-upgrade-user-files', - 'force_user', + "--upgrade-user-files/--no-upgrade-user-files", + "force_user", default=False, - help='Replace all user files in a template', + help="Replace all user files in a template", ) @click.option( - '--force', - 'force_system', + "--force", + "force_system", default=False, is_flag=True, help="Force all system files to be inserted into the project", ) @click.option( - '--force-apply', - 'force_apply', + "--force-apply", + "force_apply", default=False, is_flag=True, help="Force apply the template, disregarding if the template is already installed.", ) @click.option( - '--remove-empty-dirs/--no-remove-empty-dirs', - 'remove_empty_directories', + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", is_flag=True, default=True, - help='Remove empty directories when removing files', + help="Remove empty directories when removing files", ) @click.option( - '--early-access/--disable-early-access', - '--early/--disable-early', - '-ea/-dea', - 'early_access', - '--beta/--disable-beta', + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", default=None, - help='Create a project using the PROS 4 kernel', + help="Create a project using the PROS 4 kernel", ) @project_option() @template_query(required=True) @@ -139,31 +139,31 @@ def apply(project: c.Project, query: c.BaseTemplate, **kwargs): return c.Conductor().apply_template(project, identifier=query, **kwargs) -@conductor.command(aliases=['i', 'in'], context_settings={'ignore_unknown_options': True}) -@click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=False) -@click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@conductor.command(aliases=["i", "in"], context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") @click.option( - '--force-system', - '-f', - 'force_system', + "--force-system", + "-f", + "force_system", default=False, is_flag=True, help="Force all system files to be inserted into the project", ) @click.option( - '--force-apply', - 'force_apply', + "--force-apply", + "force_apply", default=False, is_flag=True, help="Force apply the template, disregarding if the template is already installed.", ) @click.option( - '--remove-empty-dirs/--no-remove-empty-dirs', - 'remove_empty_directories', + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", is_flag=True, default=True, - help='Remove empty directories when removing files', + help="Remove empty directories when removing files", ) @project_option() @template_query(required=True) @@ -179,40 +179,40 @@ def install(ctx: click.Context, **kwargs): return ctx.invoke(apply, install_ok=True, **kwargs) -@conductor.command(context_settings={'ignore_unknown_options': True}, aliases=['u']) -@click.option('--install/--no-install', 'install_ok', default=False) -@click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@conductor.command(context_settings={"ignore_unknown_options": True}, aliases=["u"]) +@click.option("--install/--no-install", "install_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") @click.option( - '--force-system', - '-f', - 'force_system', + "--force-system", + "-f", + "force_system", default=False, is_flag=True, help="Force all system files to be inserted into the project", ) @click.option( - '--force-apply', - 'force_apply', + "--force-apply", + "force_apply", default=False, is_flag=True, help="Force apply the template, disregarding if the template is already installed.", ) @click.option( - '--remove-empty-dirs/--no-remove-empty-dirs', - 'remove_empty_directories', + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", is_flag=True, default=True, - help='Remove empty directories when removing files', + help="Remove empty directories when removing files", ) @click.option( - '--early-access/--disable-early-access', - '--early/--disable-early', - '-ea/-dea', - 'early_access', - '--beta/--disable-beta', + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", default=None, - help='Create a project using the PROS 4 kernel', + help="Create a project using the PROS 4 kernel", ) @project_option() @template_query(required=False) @@ -227,25 +227,25 @@ def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwa analytics.send("upgrade-project") if not query.name: for template in project.templates.keys(): - click.secho(f'Upgrading {template}', color='yellow') + click.secho(f"Upgrading {template}", color="yellow") q = c.BaseTemplate.create_query( - name=template, target=project.target, supported_kernels=project.templates['kernel'].version + name=template, target=project.target, supported_kernels=project.templates["kernel"].version ) ctx.invoke(apply, upgrade_ok=True, project=project, query=q, **kwargs) else: ctx.invoke(apply, project=project, query=query, upgrade_ok=True, **kwargs) -@conductor.command('uninstall') -@click.option('--remove-user', is_flag=True, default=False, help='Also remove user files') +@conductor.command("uninstall") +@click.option("--remove-user", is_flag=True, default=False, help="Also remove user files") @click.option( - '--remove-empty-dirs/--no-remove-empty-dirs', - 'remove_empty_directories', + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", is_flag=True, default=True, - help='Remove empty directories when removing files', + help="Remove empty directories when removing files", ) -@click.option('--no-make-clean', is_flag=True, default=True, help='Do not run make clean after removing') +@click.option("--no-make-clean", is_flag=True, default=True, help="Do not run make clean after removing") @project_option() @template_query() @default_options @@ -270,51 +270,51 @@ def uninstall_template( project.compile(["clean"]) -@conductor.command('new-project', aliases=['new', 'create-project']) -@click.argument('path', type=click.Path()) -@click.argument('target', default=c.Conductor().default_target, type=click.Choice(['v5', 'cortex'])) -@click.argument('version', default='latest') -@click.option('--force-user', 'force_user', default=False, is_flag=True, help='Replace all user files in a template') +@conductor.command("new-project", aliases=["new", "create-project"]) +@click.argument("path", type=click.Path()) +@click.argument("target", default=c.Conductor().default_target, type=click.Choice(["v5", "cortex"])) +@click.argument("version", default="latest") +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") @click.option( - '--force-system', - '-f', - 'force_system', + "--force-system", + "-f", + "force_system", default=False, is_flag=True, help="Force all system files to be inserted into the project", ) @click.option( - '--force-refresh', + "--force-refresh", is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks', + help="Force update all remote depots, ignoring automatic update checks", ) @click.option( - '--no-default-libs', - 'no_default_libs', + "--no-default-libs", + "no_default_libs", default=False, is_flag=True, - help='Do not install any default libraries after creating the project.', + help="Do not install any default libraries after creating the project.", ) @click.option( - '--compile-after', is_flag=True, default=True, show_default=True, help='Compile the project after creation' + "--compile-after", is_flag=True, default=True, show_default=True, help="Compile the project after creation" ) @click.option( - '--build-cache', + "--build-cache", is_flag=True, default=None, show_default=False, - help='Build compile commands cache after creation. Overrides --compile-after if both are specified.', + help="Build compile commands cache after creation. Overrides --compile-after if both are specified.", ) @click.option( - '--early-access/--disable-early-access', - '--early/--disable-early', - '-ea/-dea', - 'early_access', - '--beta/--disable-beta', + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", default=None, - help='Create a project using the PROS 4 kernel', + help="Create a project using the PROS 4 kernel", ) @click.pass_context @default_options @@ -336,15 +336,15 @@ def new_project( Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more """ analytics.send("new-project") - version_source = version.lower() == 'latest' - if version.lower() == 'latest' or not version: - version = '>0' + version_source = version.lower() == "latest" + if version.lower() == "latest" or not version: + version = ">0" if not force_system and c.Project.find_project(path) is not None: logger(__name__).error( - 'A project already exists in this location at ' + "A project already exists in this location at " + c.Project.find_project(path) - + '! Delete it first. Are you creating a project in an existing one?', - extra={'sentry': False}, + + "! Delete it first. Are you creating a project in an existing one?", + extra={"sentry": False}, ) ctx.exit(-1) try: @@ -361,16 +361,16 @@ def new_project( no_default_libs=no_default_libs, **kwargs, ) - ui.echo('New PROS Project was created:', output_machine=False) + ui.echo("New PROS Project was created:", output_machine=False) ctx.invoke(info_project, project=project) if compile_after or build_cache: with ui.Notification(): - ui.echo('Building project...') + ui.echo("Building project...") exit_code = project.compile([], scan_build=build_cache) if exit_code != 0: - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") except Exception as e: pros.common.logger(__name__).exception(e) @@ -378,40 +378,40 @@ def new_project( @conductor.command( - 'query-templates', - aliases=['search-templates', 'ls-templates', 'lstemplates', 'querytemplates', 'searchtemplates'], - context_settings={'ignore_unknown_options': True}, + "query-templates", + aliases=["search-templates", "ls-templates", "lstemplates", "querytemplates", "searchtemplates"], + context_settings={"ignore_unknown_options": True}, ) @click.option( - '--allow-offline/--no-offline', - 'allow_offline', + "--allow-offline/--no-offline", + "allow_offline", default=True, show_default=True, - help='(Dis)allow offline templates in the listing', + help="(Dis)allow offline templates in the listing", ) @click.option( - '--allow-online/--no-online', - 'allow_online', + "--allow-online/--no-online", + "allow_online", default=True, show_default=True, - help='(Dis)allow online templates in the listing', + help="(Dis)allow online templates in the listing", ) @click.option( - '--force-refresh', + "--force-refresh", is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks', + help="Force update all remote depots, ignoring automatic update checks", ) -@click.option('--limit', type=int, default=15, help='The maximum number of displayed results for each library') +@click.option("--limit", type=int, default=15, help="The maximum number of displayed results for each library") @click.option( - '--early-access/--disable-early-access', - '--early/--disable-early', - '-ea/-dea', - 'early_access', - '--beta/--disable-beta', + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", default=None, - help='View a list of early access templates', + help="View a list of early access templates", ) @template_query(required=False) @click.pass_context @@ -454,30 +454,30 @@ def query_templates( key = (template.identifier, template.origin) if key in render_templates: if isinstance(template, c.LocalTemplate): - render_templates[key]['local'] = True + render_templates[key]["local"] = True else: render_templates[key] = { - 'name': template.name, - 'version': template.version, - 'location': template.origin, - 'target': template.target, - 'local': isinstance(template, c.LocalTemplate), + "name": template.name, + "version": template.version, + "location": template.origin, + "target": template.target, + "local": isinstance(template, c.LocalTemplate), } import semantic_version as semver render_templates = sorted( - render_templates.values(), key=lambda k: (k['name'], semver.Version(k['version']), k['local']), reverse=True + render_templates.values(), key=lambda k: (k["name"], semver.Version(k["version"]), k["local"]), reverse=True ) # Impose the output limit for each library's templates output_templates = [] - for _, g in groupby(render_templates, key=lambda t: t['name'] + t['target']): + for _, g in groupby(render_templates, key=lambda t: t["name"] + t["target"]): output_templates += list(g)[:limit] - ui.finalize('template-query', output_templates) + ui.finalize("template-query", output_templates) -@conductor.command('info-project') -@click.option('--ls-upgrades/--no-ls-upgrades', 'ls_upgrades', default=False) +@conductor.command("info-project") +@click.option("--ls-upgrades/--no-ls-upgrades", "ls_upgrades", default=False) @project_option() @default_options def info_project(project: c.Project, ls_upgrades): @@ -492,7 +492,7 @@ def info_project(project: c.Project, ls_upgrades): report = ProjectReport(project) _conductor = c.Conductor() if ls_upgrades: - for template in report.project['templates']: + for template in report.project["templates"]: import semantic_version as semver templates = _conductor.resolve_templates( @@ -502,12 +502,12 @@ def info_project(project: c.Project, ls_upgrades): ) template["upgrades"] = sorted({t.version for t in templates}, key=lambda v: semver.Version(v), reverse=True) - ui.finalize('project-report', report) + ui.finalize("project-report", report) -@conductor.command('add-depot') -@click.argument('name') -@click.argument('url') +@conductor.command("add-depot") +@click.argument("name") +@click.argument("url") @default_options def add_depot(name: str, url: str): """ @@ -521,8 +521,8 @@ def add_depot(name: str, url: str): ui.echo(f"Added depot {name} from {url}") -@conductor.command('remove-depot') -@click.argument('name') +@conductor.command("remove-depot") +@click.argument("name") @default_options def remove_depot(name: str): """ @@ -536,8 +536,8 @@ def remove_depot(name: str): ui.echo(f"Removed depot {name}") -@conductor.command('query-depots') -@click.option('--url', is_flag=True) +@conductor.command("query-depots") +@click.option("--url", is_flag=True) @default_options def query_depots(url: bool): """ @@ -547,4 +547,4 @@ def query_depots(url: bool): """ _conductor = c.Conductor() ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") - ui.echo('\n'.join(_conductor.query_depots(url)) + "\n") + ui.echo("\n".join(_conductor.query_depots(url)) + "\n") diff --git a/pros/cli/conductor_utils.py b/pros/cli/conductor_utils.py index 7e148ed7..4f306030 100644 --- a/pros/cli/conductor_utils.py +++ b/pros/cli/conductor_utils.py @@ -17,28 +17,28 @@ from .conductor import conductor -@conductor.command('create-template', context_settings={'allow_extra_args': True, 'ignore_unknown_options': True}) -@click.argument('path', type=click.Path(exists=True)) -@click.argument('name') -@click.argument('version') +@conductor.command("create-template", context_settings={"allow_extra_args": True, "ignore_unknown_options": True}) +@click.argument("path", type=click.Path(exists=True)) +@click.argument("name") +@click.argument("version") @click.option( - '--system', 'system_files', multiple=True, type=click.Path(), help='Specify "system" files required by the template' + "--system", "system_files", multiple=True, type=click.Path(), help='Specify "system" files required by the template' ) @click.option( - '--user', - 'user_files', + "--user", + "user_files", multiple=True, type=click.Path(), - help='Specify files that are intended to be modified by users', + help="Specify files that are intended to be modified by users", ) -@click.option('--kernels', 'supported_kernels', help='Specify supported kernels') -@click.option('--target', type=click.Choice(['v5', 'cortex']), help='Specify the target platform (cortex or v5)') +@click.option("--kernels", "supported_kernels", help="Specify supported kernels") +@click.option("--target", type=click.Choice(["v5", "cortex"]), help="Specify the target platform (cortex or v5)") @click.option( - '--destination', + "--destination", type=click.Path(), - help='Specify an alternate destination for the created ZIP file or template descriptor', + help="Specify an alternate destination for the created ZIP file or template descriptor", ) -@click.option('--zip/--no-zip', 'do_zip', default=True, help='Create a ZIP file or create a template descriptor.') +@click.option("--zip/--no-zip", "do_zip", default=True, help="Create a ZIP file or create a template descriptor.") @default_options @click.pass_context def create_template(ctx, path: str, destination: str, do_zip: bool, **kwargs): @@ -71,114 +71,114 @@ def create_template(ctx, path: str, destination: str, do_zip: bool, **kwargs): if project: project = c.Project(project) path = project.location - if not kwargs['supported_kernels'] and kwargs['name'] != 'kernel': - kwargs['supported_kernels'] = f'^{project.kernel}' - kwargs['target'] = project.target + if not kwargs["supported_kernels"] and kwargs["name"] != "kernel": + kwargs["supported_kernels"] = f"^{project.kernel}" + kwargs["target"] = project.target if not destination: if os.path.isdir(path): destination = path else: destination = os.path.dirname(path) - kwargs['system_files'] = list(kwargs['system_files']) - kwargs['user_files'] = list(kwargs['user_files']) - kwargs['metadata'] = {ctx.args[i][2:]: ctx.args[i + 1] for i in range(0, int(len(ctx.args) / 2) * 2, 2)} + kwargs["system_files"] = list(kwargs["system_files"]) + kwargs["user_files"] = list(kwargs["user_files"]) + kwargs["metadata"] = {ctx.args[i][2:]: ctx.args[i + 1] for i in range(0, int(len(ctx.args) / 2) * 2, 2)} def get_matching_files(globs: List[str]) -> Set[str]: matching_files: List[str] = [] _path = os.path.normpath(path) + os.path.sep for g in [g for g in globs if glob.has_magic(g)]: - files = glob.glob(f'{path}/{g}', recursive=True) + files = glob.glob(f"{path}/{g}", recursive=True) files = filter(lambda f: os.path.isfile(f), files) files = [os.path.normpath(os.path.normpath(f).split(_path)[-1]) for f in files] matching_files.extend(files) # matches things like src/opcontrol.{c,cpp} so that we can expand to src/opcontrol.c and src/opcontrol.cpp - pattern = re.compile(r'^([\w{}]+.){{((?:\w+,)*\w+)}}$'.format(os.path.sep.replace('\\', '\\\\'))) + pattern = re.compile(r"^([\w{}]+.){{((?:\w+,)*\w+)}}$".format(os.path.sep.replace("\\", "\\\\"))) for f in [os.path.normpath(f) for f in globs if not glob.has_magic(f)]: if re.match(pattern, f): matches = re.split(pattern, f) - logger(__name__).debug(f'Matches on {f}: {matches}') - matching_files.extend([f'{matches[1]}{ext}' for ext in matches[2].split(',')]) + logger(__name__).debug(f"Matches on {f}: {matches}") + matching_files.extend([f"{matches[1]}{ext}" for ext in matches[2].split(",")]) else: matching_files.append(f) matching_files: Set[str] = set(matching_files) return matching_files - matching_system_files: Set[str] = get_matching_files(kwargs['system_files']) - matching_user_files: Set[str] = get_matching_files(kwargs['user_files']) + matching_system_files: Set[str] = get_matching_files(kwargs["system_files"]) + matching_user_files: Set[str] = get_matching_files(kwargs["user_files"]) matching_system_files: Set[str] = matching_system_files - matching_user_files # exclude existing project.pros and template.pros from the template, # and name@*.zip so that we don't redundantly include ZIPs - exclude_files = {'project.pros', 'template.pros', *get_matching_files([f"{kwargs['name']}@*.zip"])} + exclude_files = {"project.pros", "template.pros", *get_matching_files([f"{kwargs['name']}@*.zip"])} if project: exclude_files = exclude_files.union(project.list_template_files()) matching_system_files = matching_system_files - exclude_files matching_user_files = matching_user_files - exclude_files def filename_remap(file_path: str) -> str: - if os.path.dirname(file_path) == 'bin': - return file_path.replace('bin', 'firmware', 1) + if os.path.dirname(file_path) == "bin": + return file_path.replace("bin", "firmware", 1) return file_path - kwargs['system_files'] = list(map(filename_remap, matching_system_files)) - kwargs['user_files'] = list(map(filename_remap, matching_user_files)) + kwargs["system_files"] = list(map(filename_remap, matching_system_files)) + kwargs["user_files"] = list(map(filename_remap, matching_user_files)) if do_zip: - if not os.path.isdir(destination) and os.path.splitext(destination)[-1] != '.zip': - logger(__name__).error(f'{destination} must be a zip file or an existing directory.') + if not os.path.isdir(destination) and os.path.splitext(destination)[-1] != ".zip": + logger(__name__).error(f"{destination} must be a zip file or an existing directory.") return -1 with tempfile.TemporaryDirectory() as td: - template = ExternalTemplate(file=os.path.join(td, 'template.pros'), **kwargs) + template = ExternalTemplate(file=os.path.join(td, "template.pros"), **kwargs) template.save() if os.path.isdir(destination): - destination = os.path.join(destination, f'{template.identifier}.zip') - with zipfile.ZipFile(destination, mode='w') as z: - z.write(template.save_file, arcname='template.pros') + destination = os.path.join(destination, f"{template.identifier}.zip") + with zipfile.ZipFile(destination, mode="w") as z: + z.write(template.save_file, arcname="template.pros") for file in matching_user_files: source_path = os.path.join(path, file) dest_file = filename_remap(file) if os.path.exists(source_path): - ui.echo(f'U: {file}' + (f' -> {dest_file}' if file != dest_file else '')) - z.write(f'{path}/{file}', arcname=dest_file) + ui.echo(f"U: {file}" + (f" -> {dest_file}" if file != dest_file else "")) + z.write(f"{path}/{file}", arcname=dest_file) for file in matching_system_files: source_path = os.path.join(path, file) dest_file = filename_remap(file) if os.path.exists(source_path): - ui.echo(f'S: {file}' + (f' -> {dest_file}' if file != dest_file else '')) - z.write(f'{path}/{file}', arcname=dest_file) + ui.echo(f"S: {file}" + (f" -> {dest_file}" if file != dest_file else "")) + z.write(f"{path}/{file}", arcname=dest_file) else: if os.path.isdir(destination): - destination = os.path.join(destination, 'template.pros') + destination = os.path.join(destination, "template.pros") template = ExternalTemplate(file=destination, **kwargs) template.save() @conductor.command( - 'purge-template', help='Purge template(s) from the local cache', context_settings={'ignore_unknown_options': True} + "purge-template", help="Purge template(s) from the local cache", context_settings={"ignore_unknown_options": True} ) -@click.option('-f', '--force', is_flag=True, default=False, help='Do not prompt for removal of multiple templates') +@click.option("-f", "--force", is_flag=True, default=False, help="Do not prompt for removal of multiple templates") @template_query(required=False) @default_options def purge_template(query: c.BaseTemplate, force): analytics.send("purge-template") if not query: force = click.confirm( - 'Are you sure you want to remove all cached templates? This action is non-reversable!', abort=True + "Are you sure you want to remove all cached templates? This action is non-reversable!", abort=True ) cond = c.Conductor() templates = cond.resolve_templates(query, allow_online=False) beta_templates = cond.resolve_templates(query, allow_online=False, beta=True) if len(templates) == 0: - click.echo('No matching templates were found matching the spec.') + click.echo("No matching templates were found matching the spec.") return 0 t_list = [t.identifier for t in templates] + [t.identifier for t in beta_templates] - click.echo(f'The following template(s) will be removed {t_list}') + click.echo(f"The following template(s) will be removed {t_list}") if len(templates) > 1 and not force: - click.confirm(f'Are you sure you want to remove multiple templates?', abort=True) + click.confirm(f"Are you sure you want to remove multiple templates?", abort=True) for template in templates: if isinstance(template, c.LocalTemplate): cond.purge_template(template) diff --git a/pros/cli/interactive.py b/pros/cli/interactive.py index c36b72a2..465f716f 100644 --- a/pros/cli/interactive.py +++ b/pros/cli/interactive.py @@ -21,7 +21,7 @@ def interactive(): @interactive.command() -@click.option('--directory', default=os.path.join(os.path.expanduser('~'), 'My PROS Project')) +@click.option("--directory", default=os.path.join(os.path.expanduser("~"), "My PROS Project")) @default_options def new_project(directory): from pros.common.ui.interactive.renderers import MachineOutputRenderer diff --git a/pros/cli/main.py b/pros/cli/main.py index 1f539b35..2209ab1a 100644 --- a/pros/cli/main.py +++ b/pros/cli/main.py @@ -26,53 +26,53 @@ # Setup analytics first because it is used by other files -if sys.platform == 'win32': +if sys.platform == "win32": kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) root_sources = [ - 'build', - 'conductor', - 'conductor_utils', - 'terminal', - 'upload', - 'v5_utils', - 'misc_commands', # misc_commands must be after upload so that "pros u" is an alias for upload, not upgrade - 'interactive', - 'user_script', + "build", + "conductor", + "conductor_utils", + "terminal", + "upload", + "v5_utils", + "misc_commands", # misc_commands must be after upload so that "pros u" is an alias for upload, not upgrade + "interactive", + "user_script", ] -if getattr(sys, 'frozen', False): +if getattr(sys, "frozen", False): exe_file = sys.executable else: exe_file = __file__ -if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, '.git')): - root_sources.append('test') +if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, ".git")): + root_sources.append("test") -if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, '.git')): +if os.path.exists(os.path.join(os.path.dirname(exe_file), os.pardir, os.pardir, ".git")): import pros.cli.test for root_source in root_sources: - __import__(f'pros.cli.{root_source}') + __import__(f"pros.cli.{root_source}") def main(): try: ctx_obj = {} click_handler = pros.common.ui.log.PROSLogHandler(ctx_obj=ctx_obj) - ctx_obj['click_handler'] = click_handler + ctx_obj["click_handler"] = click_handler formatter = pros.common.ui.log.PROSLogFormatter( - '%(levelname)s - %(name)s:%(funcName)s - %(message)s - pros-cli version:{version}'.format( + "%(levelname)s - %(name)s:%(funcName)s - %(message)s - pros-cli version:{version}".format( version=get_version() ), ctx_obj, ) click_handler.setFormatter(formatter) logging.basicConfig(level=logging.WARNING, handlers=[click_handler]) - cli.main(prog_name='pros', obj=ctx_obj, windows_expand_args=False) + cli.main(prog_name="pros", obj=ctx_obj, windows_expand_args=False) except KeyboardInterrupt: - click.echo('Aborted!') + click.echo("Aborted!") except Exception as e: logger(__name__).exception(e) @@ -81,10 +81,10 @@ def version(ctx: click.Context, param, value): if not value: return ctx.ensure_object(dict) - if ctx.obj.get('machine_output', False): + if ctx.obj.get("machine_output", False): ui.echo(get_version()) else: - ui.echo('pros, version {}'.format(get_version())) + ui.echo("pros, version {}".format(get_version())) ctx.exit(0) @@ -98,24 +98,24 @@ def use_analytics(ctx: click.Context, param, value): touse = False else: ui.echo( - 'Invalid argument provided for \'--use-analytics\'. Try \'--use-analytics=False\' or \'--use-analytics=True\'' + "Invalid argument provided for '--use-analytics'. Try '--use-analytics=False' or '--use-analytics=True'" ) ctx.exit(0) ctx.ensure_object(dict) analytics.set_use(touse) - ui.echo('Analytics set to : {}'.format(analytics.useAnalytics)) + ui.echo("Analytics set to : {}".format(analytics.useAnalytics)) ctx.exit(0) -@click.command('pros', cls=PROSCommandCollection, sources=root_commands) +@click.command("pros", cls=PROSCommandCollection, sources=root_commands) @click.pass_context @default_options @click.option( - '--version', help='Displays version and exits.', is_flag=True, expose_value=False, is_eager=True, callback=version + "--version", help="Displays version and exits.", is_flag=True, expose_value=False, is_eager=True, callback=version ) @click.option( - '--use-analytics', - help='Set analytics usage (True/False).', + "--use-analytics", + help="Set analytics usage (True/False).", type=str, expose_value=False, is_eager=True, @@ -131,5 +131,5 @@ def after_command(): analytics.process_requests() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/pros/cli/misc_commands.py b/pros/cli/misc_commands.py index d2e62c52..36fa6dd5 100644 --- a/pros/cli/misc_commands.py +++ b/pros/cli/misc_commands.py @@ -10,13 +10,13 @@ def misc_commands_cli(): @misc_commands_cli.command() @click.option( - '--force-check', default=False, is_flag=True, help='Force check for updates, disregarding auto-check frequency' + "--force-check", default=False, is_flag=True, help="Force check for updates, disregarding auto-check frequency" ) @click.option( - '--no-install', + "--no-install", default=False, is_flag=True, - help='Only check if a new version is available, do not attempt to install', + help="Only check if a new version is available, do not attempt to install", ) @default_options def upgrade(force_check, no_install): @@ -26,7 +26,7 @@ def upgrade(force_check, no_install): with ui.Notification(): ui.echo( 'The "pros upgrade" command is currently non-functioning. Did you mean to run "pros c upgrade"?', - color='yellow', + color="yellow", ) return # Dead code below @@ -39,15 +39,15 @@ def upgrade(force_check, no_install): ui.logger(__name__).debug(repr(manifest)) if manager.has_stale_manifest: ui.logger(__name__).error( - 'Failed to get latest upgrade information. ' + 'Try running with --debug for more information' + "Failed to get latest upgrade information. " + "Try running with --debug for more information" ) return -1 if not manager.needs_upgrade: - ui.finalize('upgradeInfo', 'PROS CLI is up to date') + ui.finalize("upgradeInfo", "PROS CLI is up to date") else: - ui.finalize('upgradeInfo', manifest) + ui.finalize("upgradeInfo", manifest) if not no_install: if not manager.can_perform_upgrade: - ui.logger(__name__).error(f'This manifest cannot perform the upgrade.') + ui.logger(__name__).error(f"This manifest cannot perform the upgrade.") return -3 - ui.finalize('upgradeComplete', manager.perform_upgrade()) + ui.finalize("upgradeComplete", manager.perform_upgrade()) diff --git a/pros/cli/terminal.py b/pros/cli/terminal.py index 5071110b..ac6f25a0 100644 --- a/pros/cli/terminal.py +++ b/pros/cli/terminal.py @@ -22,25 +22,25 @@ def terminal_cli(): @terminal_cli.command() @default_options -@click.argument('port', default='default') +@click.argument("port", default="default") @click.option( - '--backend', - type=click.Choice(['share', 'solo']), - default='solo', - help='Backend port of the terminal. See above for details', + "--backend", + type=click.Choice(["share", "solo"]), + default="solo", + help="Backend port of the terminal. See above for details", ) -@click.option('--raw', is_flag=True, default=False, help='Don\'t process the data.') -@click.option('--hex', is_flag=True, default=False, help="Display data as hexadecimal values. Unaffected by --raw") +@click.option("--raw", is_flag=True, default=False, help="Don't process the data.") +@click.option("--hex", is_flag=True, default=False, help="Display data as hexadecimal values. Unaffected by --raw") @click.option( - '--ports', + "--ports", nargs=2, type=int, default=(None, None), help='Specify 2 ports for the "share" backend. The default option deterministically selects ports ' - 'based on the serial port name', + "based on the serial port name", ) -@click.option('--banner/--no-banner', 'request_banner', default=True) -@click.option('--output', nargs=1, type=str, is_eager=True, help='Redirect terminal output to a file', default=None) +@click.option("--banner/--no-banner", "request_banner", default=True) +@click.option("--output", nargs=1, type=str, is_eager=True, help="Redirect terminal output to a file", default=None) def terminal(port: str, backend: str, **kwargs): """ Open a terminal to a serial port @@ -57,31 +57,31 @@ def terminal(port: str, backend: str, **kwargs): from pros.serial.terminal import Terminal is_v5_user_joystick = False - if port == 'default': + if port == "default": project_path = c.Project.find_project(os.getcwd()) if project_path is None: - v5_port, is_v5_user_joystick = resolve_v5_port(None, 'user', quiet=True) + v5_port, is_v5_user_joystick = resolve_v5_port(None, "user", quiet=True) cortex_port = resolve_cortex_port(None, quiet=True) if ((v5_port is None) ^ (cortex_port is None)) or (v5_port is not None and v5_port == cortex_port): port = v5_port or cortex_port else: - raise click.UsageError('You must be in a PROS project directory to enable default port selecting') + raise click.UsageError("You must be in a PROS project directory to enable default port selecting") else: project = c.Project(project_path) port = project.target - if port == 'v5': + if port == "v5": port = None - port, is_v5_user_joystick = resolve_v5_port(port, 'user') - elif port == 'cortex': + port, is_v5_user_joystick = resolve_v5_port(port, "user") + elif port == "cortex": port = None port = resolve_cortex_port(port) - kwargs['raw'] = True + kwargs["raw"] = True if not port: return -1 - if backend == 'share': - raise NotImplementedError('Share backend is not yet implemented') + if backend == "share": + raise NotImplementedError("Share backend is not yet implemented") # ser = SerialSharePort(port) elif is_v5_user_joystick: logger(__name__).debug("it's a v5 joystick") @@ -89,16 +89,16 @@ def terminal(port: str, backend: str, **kwargs): else: logger(__name__).debug("not a v5 joystick") ser = DirectPort(port) - if kwargs.get('raw', False): + if kwargs.get("raw", False): device = devices.RawStreamDevice(ser) else: device = devices.vex.V5UserDevice(ser) - term = Terminal(device, request_banner=kwargs.pop('request_banner', True)) + term = Terminal(device, request_banner=kwargs.pop("request_banner", True)) class TerminalOutput(object): def __init__(self, file): self.terminal = sys.stdout - self.log = open(file, 'a') + self.log = open(file, "a") def write(self, data): self.terminal.write(data) @@ -111,12 +111,12 @@ def end(self): self.log.close() output = None - if kwargs.get('output', None): - output_file = kwargs['output'] - output = TerminalOutput(f'{output_file}') + if kwargs.get("output", None): + output_file = kwargs["output"] + output = TerminalOutput(f"{output_file}") term.console.output = output sys.stdout = output - logger(__name__).info(f'Redirecting Terminal Output to File: {output_file}') + logger(__name__).info(f"Redirecting Terminal Output to File: {output_file}") else: sys.stdout = sys.__stdout__ @@ -130,4 +130,4 @@ def end(self): if output: output.end() term.join() - logger(__name__).info('CLI Main Thread Dying') + logger(__name__).info("CLI Main Thread Dying") diff --git a/pros/cli/upload.py b/pros/cli/upload.py index f69c87bd..1ae3fcc3 100644 --- a/pros/cli/upload.py +++ b/pros/cli/upload.py @@ -13,105 +13,105 @@ def upload_cli(): pass -@upload_cli.command(aliases=['u']) +@upload_cli.command(aliases=["u"]) @click.option( - '--target', - type=click.Choice(['v5', 'cortex']), + "--target", + type=click.Choice(["v5", "cortex"]), default=None, required=False, - help='Specify the target microcontroller. Overridden when a PROS project is specified.', + help="Specify the target microcontroller. Overridden when a PROS project is specified.", ) -@click.argument('path', type=click.Path(exists=True), default=None, required=False) -@click.argument('port', type=str, default=None, required=False) +@click.argument("path", type=click.Path(exists=True), default=None, required=False) +@click.argument("port", type=str, default=None, required=False) @project_option(required=False, allow_none=True) @click.option( - '--run-after/--no-run-after', - 'run_after', + "--run-after/--no-run-after", + "run_after", default=None, - help='Immediately run the uploaded program.', + help="Immediately run the uploaded program.", cls=PROSDeprecated, - replacement='after', + replacement="after", ) @click.option( - '--run-screen/--execute', - 'run_screen', + "--run-screen/--execute", + "run_screen", default=None, - help='Display run program screen on the brain after upload.', + help="Display run program screen on the brain after upload.", cls=PROSDeprecated, - replacement='after', + replacement="after", ) @click.option( - '-af', - '--after', - type=click.Choice(['run', 'screen', 'none']), + "-af", + "--after", + type=click.Choice(["run", "screen", "none"]), default=None, - help='Action to perform on the brain after upload.', + help="Action to perform on the brain after upload.", cls=PROSOption, - group='V5 Options', + group="V5 Options", ) -@click.option('--quirk', type=int, default=0) +@click.option("--quirk", type=int, default=0) @click.option( - '--name', - 'remote_name', + "--name", + "remote_name", type=str, default=None, required=False, - help='Remote program name.', + help="Remote program name.", cls=PROSOption, - group='V5 Options', + group="V5 Options", ) @click.option( - '--slot', + "--slot", default=None, type=click.IntRange(min=1, max=8), - help='Program slot on the GUI.', + help="Program slot on the GUI.", cls=PROSOption, - group='V5 Options', + group="V5 Options", ) @click.option( - '--icon', - type=click.Choice(['pros', 'pizza', 'planet', 'alien', 'ufo', 'robot', 'clawbot', 'question', 'X', 'power']), - default='pros', + "--icon", + type=click.Choice(["pros", "pizza", "planet", "alien", "ufo", "robot", "clawbot", "question", "X", "power"]), + default="pros", help="Change Program's icon on the V5 Brain", cls=PROSOption, - group='V5 Options', + group="V5 Options", ) @click.option( - '--program-version', + "--program-version", default=None, type=str, - help='Specify version metadata for program.', + help="Specify version metadata for program.", cls=PROSOption, - group='V5 Options', + group="V5 Options", hidden=True, ) @click.option( - '--ini-config', + "--ini-config", type=click.Path(exists=True), default=None, - help='Specify a program configuration file.', + help="Specify a program configuration file.", cls=PROSOption, - group='V5 Options', + group="V5 Options", hidden=True, ) @click.option( - '--compress-bin/--no-compress-bin', - 'compress_bin', + "--compress-bin/--no-compress-bin", + "compress_bin", cls=PROSOption, - group='V5 Options', + group="V5 Options", default=True, - help='Compress the program binary before uploading.', + help="Compress the program binary before uploading.", ) @click.option( - '--description', + "--description", default="Made with PROS", type=str, cls=PROSOption, - group='V5 Options', - help='Change the description displayed for the program.', + group="V5 Options", + help="Change the description displayed for the program.", ) @click.option( - '--name', default=None, type=str, cls=PROSOption, group='V5 Options', help='Change the name of the program.' + "--name", default=None, type=str, cls=PROSOption, group="V5 Options", help="Change the name of the program." ) @default_options def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwargs): @@ -128,110 +128,110 @@ def upload(path: Optional[str], project: Optional[c.Project], port: str, **kwarg import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort - kwargs['ide_version'] = project.kernel if not project == None else "None" - kwargs['ide'] = 'PROS' + kwargs["ide_version"] = project.kernel if not project == None else "None" + kwargs["ide"] = "PROS" if path is None or os.path.isdir(path): if project is None: project_path = c.Project.find_project(path or os.getcwd()) if project_path is None: - raise click.UsageError('Specify a file to upload or set the cwd inside a PROS project') + raise click.UsageError("Specify a file to upload or set the cwd inside a PROS project") project = c.Project(project_path) path = os.path.join(project.location, project.output) - if project.target == 'v5' and not kwargs['remote_name']: - kwargs['remote_name'] = project.name + if project.target == "v5" and not kwargs["remote_name"]: + kwargs["remote_name"] = project.name # apply upload_options as a template options = dict(**project.upload_options) - if 'port' in options and port is None: - port = options.get('port', None) - if 'slot' in options and kwargs.get('slot', None) is None: - kwargs.pop('slot') - elif kwargs.get('slot', None) is None: - kwargs['slot'] = 1 - if 'icon' in options and kwargs.get('icon', 'pros') == 'pros': - kwargs.pop('icon') - if 'after' in options and kwargs.get('after', 'screen') is None: - kwargs.pop('after') + if "port" in options and port is None: + port = options.get("port", None) + if "slot" in options and kwargs.get("slot", None) is None: + kwargs.pop("slot") + elif kwargs.get("slot", None) is None: + kwargs["slot"] = 1 + if "icon" in options and kwargs.get("icon", "pros") == "pros": + kwargs.pop("icon") + if "after" in options and kwargs.get("after", "screen") is None: + kwargs.pop("after") options.update(kwargs) kwargs = options - kwargs['target'] = project.target # enforce target because uploading to the wrong uC is VERY bad - if 'program-version' in kwargs: - kwargs['version'] = kwargs['program-version'] - if 'remote_name' not in kwargs: - kwargs['remote_name'] = project.name + kwargs["target"] = project.target # enforce target because uploading to the wrong uC is VERY bad + if "program-version" in kwargs: + kwargs["version"] = kwargs["program-version"] + if "remote_name" not in kwargs: + kwargs["remote_name"] = project.name name_to_file = { - 'pros': 'USER902x.bmp', - 'pizza': 'USER003x.bmp', - 'planet': 'USER013x.bmp', - 'alien': 'USER027x.bmp', - 'ufo': 'USER029x.bmp', - 'clawbot': 'USER010x.bmp', - 'robot': 'USER011x.bmp', - 'question': 'USER002x.bmp', - 'power': 'USER012x.bmp', - 'X': 'USER001x.bmp', + "pros": "USER902x.bmp", + "pizza": "USER003x.bmp", + "planet": "USER013x.bmp", + "alien": "USER027x.bmp", + "ufo": "USER029x.bmp", + "clawbot": "USER010x.bmp", + "robot": "USER011x.bmp", + "question": "USER002x.bmp", + "power": "USER012x.bmp", + "X": "USER001x.bmp", } - kwargs['icon'] = name_to_file[kwargs['icon']] - if 'target' not in kwargs or kwargs['target'] is None: - logger(__name__).debug(f'Target not specified. Arguments provided: {kwargs}') - raise click.UsageError('Target not specified. specify a project (using the file argument) or target manually') - if kwargs['target'] == 'v5': - port = resolve_v5_port(port, 'system')[0] - elif kwargs['target'] == 'cortex': + kwargs["icon"] = name_to_file[kwargs["icon"]] + if "target" not in kwargs or kwargs["target"] is None: + logger(__name__).debug(f"Target not specified. Arguments provided: {kwargs}") + raise click.UsageError("Target not specified. specify a project (using the file argument) or target manually") + if kwargs["target"] == "v5": + port = resolve_v5_port(port, "system")[0] + elif kwargs["target"] == "cortex": port = resolve_cortex_port(port) else: logger(__name__).debug(f"Invalid target provided: {kwargs['target']}") logger(__name__).debug('Target should be one of ("v5" or "cortex").') if not port: - raise dont_send(click.UsageError('No port provided or located. Make sure to specify --target if needed.')) - if kwargs['target'] == 'v5': - kwargs['remote_name'] = kwargs['name'] if kwargs.get("name", None) else kwargs['remote_name'] - if kwargs['remote_name'] is None: - kwargs['remote_name'] = os.path.splitext(os.path.basename(path))[0] - kwargs['remote_name'] = kwargs['remote_name'].replace('@', '_') - kwargs['slot'] -= 1 + raise dont_send(click.UsageError("No port provided or located. Make sure to specify --target if needed.")) + if kwargs["target"] == "v5": + kwargs["remote_name"] = kwargs["name"] if kwargs.get("name", None) else kwargs["remote_name"] + if kwargs["remote_name"] is None: + kwargs["remote_name"] = os.path.splitext(os.path.basename(path))[0] + kwargs["remote_name"] = kwargs["remote_name"].replace("@", "_") + kwargs["slot"] -= 1 action_to_kwarg = { - 'run': vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, - 'screen': vex.V5Device.FTCompleteOptions.RUN_SCREEN, - 'none': vex.V5Device.FTCompleteOptions.DONT_RUN, + "run": vex.V5Device.FTCompleteOptions.RUN_IMMEDIATELY, + "screen": vex.V5Device.FTCompleteOptions.RUN_SCREEN, + "none": vex.V5Device.FTCompleteOptions.DONT_RUN, } - after_upload_default = 'screen' + after_upload_default = "screen" # Determine which FTCompleteOption to assign to run_after - if kwargs['after'] == None: - kwargs['after'] = after_upload_default - if kwargs['run_after']: - kwargs['after'] = 'run' - elif kwargs['run_screen'] == False and not kwargs['run_after']: - kwargs['after'] = 'none' - kwargs['run_after'] = action_to_kwarg[kwargs['after']] - kwargs.pop('run_screen') - kwargs.pop('after') - elif kwargs['target'] == 'cortex': + if kwargs["after"] == None: + kwargs["after"] = after_upload_default + if kwargs["run_after"]: + kwargs["after"] = "run" + elif kwargs["run_screen"] == False and not kwargs["run_after"]: + kwargs["after"] = "none" + kwargs["run_after"] = action_to_kwarg[kwargs["after"]] + kwargs.pop("run_screen") + kwargs.pop("after") + elif kwargs["target"] == "cortex": pass - logger(__name__).debug('Arguments: {}'.format(str(kwargs))) + logger(__name__).debug("Arguments: {}".format(str(kwargs))) # Do the actual uploading! try: ser = DirectPort(port) device = None - if kwargs['target'] == 'v5': + if kwargs["target"] == "v5": device = vex.V5Device(ser) - elif kwargs['target'] == 'cortex': + elif kwargs["target"] == "cortex": device = vex.CortexDevice(ser).get_connected_device() if project is not None: device.upload_project(project, **kwargs) else: - with click.open_file(path, mode='rb') as pf: + with click.open_file(path, mode="rb") as pf: device.write_program(pf, **kwargs) except Exception as e: logger(__name__).exception(e, exc_info=True) exit(1) -@upload_cli.command('lsusb', aliases=['ls-usb', 'ls-devices', 'lsdev', 'list-usb', 'list-devices']) -@click.option('--target', type=click.Choice(['v5', 'cortex']), default=None, required=False) +@upload_cli.command("lsusb", aliases=["ls-usb", "ls-devices", "lsdev", "list-usb", "list-devices"]) +@click.option("--target", type=click.Choice(["v5", "cortex"]), default=None, required=False) @default_options def ls_usb(target): """ @@ -243,34 +243,34 @@ def ls_usb(target): class PortReport(object): def __init__(self, header: str, ports: List[Any], machine_header: Optional[str] = None): self.header = header - self.ports = [{'device': p.device, 'desc': p.description} for p in ports] + self.ports = [{"device": p.device, "desc": p.description} for p in ports] self.machine_header = machine_header or header def __getstate__(self): - return {'device_type': self.machine_header, 'devices': self.ports} + return {"device_type": self.machine_header, "devices": self.ports} def __str__(self): if len(self.ports) == 0: - return f'There are no connected {self.header}' + return f"There are no connected {self.header}" else: port_str = "\n".join([f"{p['device']} - {p['desc']}" for p in self.ports]) - return f'{self.header}:\n{port_str}' + return f"{self.header}:\n{port_str}" result = [] - if target == 'v5' or target is None: - ports = find_v5_ports('system') - result.append(PortReport('VEX EDR V5 System Ports', ports, 'v5/system')) + if target == "v5" or target is None: + ports = find_v5_ports("system") + result.append(PortReport("VEX EDR V5 System Ports", ports, "v5/system")) - ports = find_v5_ports('User') - result.append(PortReport('VEX EDR V5 User ports', ports, 'v5/user')) - if target == 'cortex' or target is None: + ports = find_v5_ports("User") + result.append(PortReport("VEX EDR V5 User ports", ports, "v5/user")) + if target == "cortex" or target is None: ports = find_cortex_ports() - result.append(PortReport('VEX EDR Cortex Microcontroller Ports', ports, 'cortex')) + result.append(PortReport("VEX EDR Cortex Microcontroller Ports", ports, "cortex")) - ui.finalize('lsusb', result) + ui.finalize("lsusb", result) -@upload_cli.command('upload-terminal', aliases=['ut'], hidden=True) +@upload_cli.command("upload-terminal", aliases=["ut"], hidden=True) @shadow_command(upload) @click.pass_context def make_upload_terminal(ctx, **upload_kwargs): diff --git a/pros/cli/user_script.py b/pros/cli/user_script.py index a9057496..e2e26d30 100644 --- a/pros/cli/user_script.py +++ b/pros/cli/user_script.py @@ -11,8 +11,8 @@ def user_script_cli(): pass -@user_script_cli.command(short_help='Run user script files', hidden=True) -@click.argument('script_file') +@user_script_cli.command(short_help="Run user script files", hidden=True) +@click.argument("script_file") @default_options def user_script(script_file): """ @@ -24,6 +24,6 @@ def user_script(script_file): package_name = os.path.splitext(os.path.split(script_file)[0])[0] package_path = os.path.abspath(script_file) - ui.echo(f'Loading {package_name} from {package_path}') + ui.echo(f"Loading {package_name} from {package_path}") spec = importlib.util.spec_from_file_location(package_name, package_path) spec.loader.load_module() diff --git a/pros/cli/v5_utils.py b/pros/cli/v5_utils.py index f69ecfa6..efb51208 100644 --- a/pros/cli/v5_utils.py +++ b/pros/cli/v5_utils.py @@ -8,14 +8,14 @@ def v5_utils_cli(): pass -@v5_utils_cli.group(cls=PROSGroup, help='Utilities for managing the VEX V5') +@v5_utils_cli.group(cls=PROSGroup, help="Utilities for managing the VEX V5") @default_options def v5(): pass @v5.command() -@click.argument('port', required=False, default=None) +@click.argument("port", required=False, default=None) @default_options def status(port: str): """ @@ -25,7 +25,7 @@ def status(port: str): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -34,17 +34,17 @@ def status(port: str): if ismachineoutput(): print(device.status) else: - print('Connected to V5 on {}'.format(port)) - print('System version:', device.status['system_version']) - print('CPU0 F/W version:', device.status['cpu0_version']) - print('CPU1 SDK version:', device.status['cpu1_version']) - print('System ID: 0x{:x}'.format(device.status['system_id'])) + print("Connected to V5 on {}".format(port)) + print("System version:", device.status["system_version"]) + print("CPU0 F/W version:", device.status["cpu0_version"]) + print("CPU1 SDK version:", device.status["cpu1_version"]) + print("System ID: 0x{:x}".format(device.status["system_id"])) -@v5.command('ls-files') -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--options', type=int, default=0, cls=PROSOption, hidden=True) -@click.argument('port', required=False, default=None) +@v5.command("ls-files") +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--options", type=int, default=0, cls=PROSOption, hidden=True) +@click.argument("port", required=False, default=None) @default_options def ls_files(port: str, vid: int, options: int): """ @@ -54,7 +54,7 @@ def ls_files(port: str, vid: int, options: int): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -66,11 +66,11 @@ def ls_files(port: str, vid: int, options: int): @v5.command(hidden=True) -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.argument('outfile', required=False, default=click.get_binary_stream('stdout'), type=click.File('wb')) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--source', type=click.Choice(['ddr', 'flash']), default='flash', cls=PROSOption, hidden=True) +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.argument("outfile", required=False, default=click.get_binary_stream("stdout"), type=click.File("wb")) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--source", type=click.Choice(["ddr", "flash"]), default="flash", cls=PROSOption, hidden=True) @default_options def read_file(file_name: str, port: str, vid: int, source: str): """ @@ -80,23 +80,23 @@ def read_file(file_name: str, port: str, vid: int, source: str): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) device = V5Device(ser) - device.read_file(file=click.get_binary_stream('stdout'), remote_file=file_name, vid=vid, target=source) + device.read_file(file=click.get_binary_stream("stdout"), remote_file=file_name, vid=vid, target=source) @v5.command(hidden=True) -@click.argument('file', type=click.File('rb')) -@click.argument('port', required=False, default=None) -@click.option('--addr', type=int, default=0x03800000, required=False) -@click.option('--remote-file', required=False, default=None) -@click.option('--run-after/--no-run-after', 'run_after', default=False) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) -@click.option('--target', type=click.Choice(['ddr', 'flash']), default='flash') +@click.argument("file", type=click.File("rb")) +@click.argument("port", required=False, default=None) +@click.option("--addr", type=int, default=0x03800000, required=False) +@click.option("--remote-file", required=False, default=None) +@click.option("--run-after/--no-run-after", "run_after", default=False) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) +@click.option("--target", type=click.Choice(["ddr", "flash"]), default="flash") @default_options def write_file(file, port: str, remote_file: str, **kwargs): """ @@ -106,7 +106,7 @@ def write_file(file, port: str, remote_file: str, **kwargs): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -115,16 +115,16 @@ def write_file(file, port: str, remote_file: str, **kwargs): device.write_file(file=file, remote_file=remote_file or os.path.basename(file.name), **kwargs) -@v5.command('rm-file') -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) +@v5.command("rm-file") +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) @click.option( - '--erase-all/--erase-only', - 'erase_all', + "--erase-all/--erase-only", + "erase_all", default=False, show_default=True, - help='Erase all files matching base name.', + help="Erase all files matching base name.", ) @default_options def rm_file(file_name: str, port: str, vid: int, erase_all: bool): @@ -135,7 +135,7 @@ def rm_file(file_name: str, port: str, vid: int, erase_all: bool): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -144,10 +144,10 @@ def rm_file(file_name: str, port: str, vid: int, erase_all: bool): device.erase_file(file_name, vid=vid, erase_all=erase_all) -@v5.command('cat-metadata') -@click.argument('file_name') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) +@v5.command("cat-metadata") +@click.argument("file_name") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) @default_options def cat_metadata(file_name: str, port: str, vid: int): """ @@ -157,7 +157,7 @@ def cat_metadata(file_name: str, port: str, vid: int): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -166,10 +166,10 @@ def cat_metadata(file_name: str, port: str, vid: int): print(device.get_file_metadata_by_name(file_name, vid=vid)) -@v5.command('rm-program') -@click.argument('slot') -@click.argument('port', type=int, required=False, default=None) -@click.option('--vid', type=int, default=1, cls=PROSOption, hidden=True) +@v5.command("rm-program") +@click.argument("slot") +@click.argument("port", type=int, required=False, default=None) +@click.option("--vid", type=int, default=1, cls=PROSOption, hidden=True) @default_options def rm_program(slot: int, port: str, vid: int): """ @@ -178,20 +178,20 @@ def rm_program(slot: int, port: str, vid: int): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 - base_name = f'slot_{slot}' + base_name = f"slot_{slot}" ser = DirectPort(port) device = V5Device(ser) - device.erase_file(f'{base_name}.ini', vid=vid) - device.erase_file(f'{base_name}.bin', vid=vid) + device.erase_file(f"{base_name}.ini", vid=vid) + device.erase_file(f"{base_name}.bin", vid=vid) -@v5.command('rm-all') -@click.argument('port', required=False, default=None) -@click.option('--vid', type=int, default=1, hidden=True, cls=PROSOption) +@v5.command("rm-all") +@click.argument("port", required=False, default=None) +@click.option("--vid", type=int, default=1, hidden=True, cls=PROSOption) @default_options def rm_all(port: str, vid: int): """ @@ -201,7 +201,7 @@ def rm_all(port: str, vid: int): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 @@ -210,14 +210,14 @@ def rm_all(port: str, vid: int): c = device.get_dir_count(vid=vid) files = [] for i in range(0, c): - files.append(device.get_file_metadata_by_idx(i)['filename']) + files.append(device.get_file_metadata_by_idx(i)["filename"]) for file in files: device.erase_file(file, vid=vid) -@v5.command(short_help='Run a V5 Program') -@click.argument('slot', required=False, default=1, type=click.IntRange(1, 8)) -@click.argument('port', required=False, default=None) +@v5.command(short_help="Run a V5 Program") +@click.argument("slot", required=False, default=1, type=click.IntRange(1, 8)) +@click.argument("port", required=False, default=None) @default_options def run(slot: str, port: str): """ @@ -227,13 +227,13 @@ def run(slot: str, port: str): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - file = f'slot_{slot}.bin' + file = f"slot_{slot}.bin" import re - if not re.match(r'[\w\.]{1,24}', file): - logger(__name__).error('file must be a valid V5 filename') + if not re.match(r"[\w\.]{1,24}", file): + logger(__name__).error("file must be a valid V5 filename") return 1 - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) @@ -241,8 +241,8 @@ def run(slot: str, port: str): device.execute_program_file(file, run=True) -@v5.command(short_help='Stop a V5 Program') -@click.argument('port', required=False, default=None) +@v5.command(short_help="Stop a V5 Program") +@click.argument("port", required=False, default=None) @default_options def stop(port: str): """ @@ -253,18 +253,18 @@ def stop(port: str): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) device = V5Device(ser) - device.execute_program_file('', run=False) + device.execute_program_file("", run=False) -@v5.command(short_help='Take a screen capture of the display') -@click.argument('file_name', required=False, default=None) -@click.argument('port', required=False, default=None) -@click.option('--force', is_flag=True, type=bool, default=False) +@v5.command(short_help="Take a screen capture of the display") +@click.argument("file_name", required=False, default=None) +@click.argument("port", required=False, default=None) +@click.option("--force", is_flag=True, type=bool, default=False) @default_options def capture(file_name: str, port: str, force: bool = False): """ @@ -277,7 +277,7 @@ def capture(file_name: str, port: str, force: bool = False): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if not port: return -1 ser = DirectPort(port) @@ -285,65 +285,65 @@ def capture(file_name: str, port: str, force: bool = False): i_data, width, height = device.capture_screen() if i_data is None: - print('Failed to capture screen from connected brain.') + print("Failed to capture screen from connected brain.") return -1 # Sanity checking and default values for filenames if file_name is None: import time - time_s = time.strftime('%Y-%m-%d-%H%M%S') - file_name = f'{time_s}_{width}x{height}_pros_capture.png' - if file_name == '-': + time_s = time.strftime("%Y-%m-%d-%H%M%S") + file_name = f"{time_s}_{width}x{height}_pros_capture.png" + if file_name == "-": # Send the data to stdout to allow for piping - print(i_data, end='') + print(i_data, end="") return - if not file_name.endswith('.png'): - file_name += '.png' + if not file_name.endswith(".png"): + file_name += ".png" if not force and os.path.exists(file_name): - print(f'{file_name} already exists. Refusing to overwrite!') - print('Re-run this command with the --force argument to overwrite existing files.') + print(f"{file_name} already exists. Refusing to overwrite!") + print("Re-run this command with the --force argument to overwrite existing files.") return -1 - with open(file_name, 'wb') as file_: + with open(file_name, "wb") as file_: w = png.Writer(width, height, greyscale=False) w.write(file_, i_data) - print(f'Saved screen capture to {file_name}') + print(f"Saved screen capture to {file_name}") -@v5.command(aliases=['sv', 'set'], short_help='Set a kernel variable on a connected V5 device') -@click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) -@click.argument('value', required=True, type=click.STRING, nargs=1) -@click.argument('port', type=str, default=None, required=False) +@v5.command(aliases=["sv", "set"], short_help="Set a kernel variable on a connected V5 device") +@click.argument("variable", type=click.Choice(["teamnumber", "robotname"]), required=True) +@click.argument("value", required=True, type=click.STRING, nargs=1) +@click.argument("port", type=str, default=None, required=False) @default_options def set_variable(variable, value, port): import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort # Get the connected v5 device - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if port == None: return device = vex.V5Device(DirectPort(port)) actual_value = device.kv_write(variable, value).decode() - print(f'Value of \'{variable}\' set to : {actual_value}') + print(f"Value of '{variable}' set to : {actual_value}") -@v5.command(aliases=['rv', 'get'], short_help='Read a kernel variable from a connected V5 device') -@click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) -@click.argument('port', type=str, default=None, required=False) +@v5.command(aliases=["rv", "get"], short_help="Read a kernel variable from a connected V5 device") +@click.argument("variable", type=click.Choice(["teamnumber", "robotname"]), required=True) +@click.argument("port", type=str, default=None, required=False) @default_options def read_variable(variable, port): import pros.serial.devices.vex as vex from pros.serial.ports import DirectPort # Get the connected v5 device - port = resolve_v5_port(port, 'system')[0] + port = resolve_v5_port(port, "system")[0] if port == None: return device = vex.V5Device(DirectPort(port)) value = device.kv_read(variable).decode() - print(f'Value of \'{variable}\' is : {value}') + print(f"Value of '{variable}' is : {value}") diff --git a/pros/common/sentry.py b/pros/common/sentry.py index 38207169..57032478 100644 --- a/pros/common/sentry.py +++ b/pros/common/sentry.py @@ -10,7 +10,7 @@ from pros.config.cli_config import CliConfig # noqa: F401, flake8 issue, flake8 issue with "if TYPE_CHECKING" -cli_config: 'CliConfig' = None +cli_config: "CliConfig" = None force_prompt_off = False SUPPRESSED_EXCEPTIONS = [PermissionError, click.Abort] @@ -29,43 +29,43 @@ def prompt_to_send(event: Dict[str, Any], hint: Optional[Dict[str, Any]]) -> Opt if cli_config is None or (cli_config.offer_sentry is not None and not cli_config.offer_sentry): return if force_prompt_off: - ui.logger(__name__).debug('Sentry prompt was forced off through click option') + ui.logger(__name__).debug("Sentry prompt was forced off through click option") return - if 'extra' in event and not event['extra'].get('sentry', True): - ui.logger(__name__).debug('Not sending candidate event because event was tagged with extra.sentry = False') + if "extra" in event and not event["extra"].get("sentry", True): + ui.logger(__name__).debug("Not sending candidate event because event was tagged with extra.sentry = False") return - if 'exc_info' in hint and ( - not getattr(hint['exc_info'][1], 'sentry', True) - or any(isinstance(hint['exc_info'][1], t) for t in SUPPRESSED_EXCEPTIONS) + if "exc_info" in hint and ( + not getattr(hint["exc_info"][1], "sentry", True) + or any(isinstance(hint["exc_info"][1], t) for t in SUPPRESSED_EXCEPTIONS) ): - ui.logger(__name__).debug('Not sending candidate event because exception was tagged with sentry = False') + ui.logger(__name__).debug("Not sending candidate event because exception was tagged with sentry = False") return - if not event['tags']: - event['tags'] = dict() - - extra_text = '' - if 'message' in event: - extra_text += event['message'] + '\n' - if 'culprit' in event: - extra_text += event['culprit'] + '\n' - if 'logentry' in event and 'message' in event['logentry']: - extra_text += event['logentry']['message'] + '\n' - if 'exc_info' in hint: + if not event["tags"]: + event["tags"] = dict() + + extra_text = "" + if "message" in event: + extra_text += event["message"] + "\n" + if "culprit" in event: + extra_text += event["culprit"] + "\n" + if "logentry" in event and "message" in event["logentry"]: + extra_text += event["logentry"]["message"] + "\n" + if "exc_info" in hint: import traceback - extra_text += ''.join(traceback.format_exception(*hint['exc_info'], limit=4)) + extra_text += "".join(traceback.format_exception(*hint["exc_info"], limit=4)) - event['tags']['confirmed'] = ui.confirm( - 'We detected something went wrong! Do you want to send a report?', log=extra_text + event["tags"]["confirmed"] = ui.confirm( + "We detected something went wrong! Do you want to send a report?", log=extra_text ) - if event['tags']['confirmed']: - ui.echo('Sending bug report.') + if event["tags"]["confirmed"]: + ui.echo("Sending bug report.") ui.echo(f'Want to get updates? Visit https://pros.cs.purdue.edu/report.html?event={event["event_id"]}') return event else: - ui.echo('Not sending bug report.') + ui.echo("Not sending bug report.") def add_context(obj: object, override_handlers: bool = True, key: str = None) -> None: @@ -90,14 +90,14 @@ class TemplateHandler(jsonpickle.handlers.BaseHandler): def flatten(self, obj: BaseTemplate, data): rv = { - 'name': obj.name, - 'version': obj.version, - 'target': obj.target, + "name": obj.name, + "version": obj.version, + "target": obj.target, } - if hasattr(obj, 'location'): - rv['location'] = obj.location - if hasattr(obj, 'origin'): - rv['origin'] = obj.origin + if hasattr(obj, "location"): + rv["location"] = obj.location + if hasattr(obj, "origin"): + rv["origin"] = obj.origin return rv def restore(self, obj): @@ -122,7 +122,7 @@ def add_tag(key: str, value: str): scope.set_tag(key, value) -def register(cfg: Optional['CliConfig'] = None): +def register(cfg: Optional["CliConfig"] = None): global cli_config, client if cfg is None: from pros.config.cli_config import cli_config as get_cli_config @@ -141,14 +141,14 @@ def register(cfg: Optional['CliConfig'] = None): from pros.upgrade import get_platformv2 client = sentry.Client( - 'https://00bd27dcded6436cad5c8b2941d6a9d6@sentry.io/1226033', + "https://00bd27dcded6436cad5c8b2941d6a9d6@sentry.io/1226033", before_send=prompt_to_send, release=ui.get_version(), ) sentry.Hub.current.bind_client(client) with sentry.configure_scope() as scope: - scope.set_tag('platformv2', get_platformv2().name) + scope.set_tag("platformv2", get_platformv2().name) -__all__ = ['add_context', 'register', 'add_tag'] +__all__ = ["add_context", "register", "add_tag"] diff --git a/pros/common/ui/__init__.py b/pros/common/ui/__init__.py index d79b1e0f..4b4963e4 100644 --- a/pros/common/ui/__init__.py +++ b/pros/common/ui/__init__.py @@ -12,15 +12,15 @@ def _machineoutput(obj: Dict[str, Any]): - click.echo(f'Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}') + click.echo(f"Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}") def _machine_notify(method: str, obj: Dict[str, Any], notify_value: Optional[int]): if notify_value is None: global _current_notify_value notify_value = _current_notify_value - obj['type'] = f'notify/{method}' - obj['notify_value'] = notify_value + obj["type"] = f"notify/{method}" + obj["notify_value"] = notify_value _machineoutput(obj) @@ -33,10 +33,10 @@ def echo( output_machine: bool = True, ctx: Optional[click.Context] = None, ): - add_breadcrumb(message=text, category='echo') + add_breadcrumb(message=text, category="echo") if ismachineoutput(ctx): if output_machine: - return _machine_notify('echo', {'text': str(text) + ('\n' if nl else '')}, notify_value) + return _machine_notify("echo", {"text": str(text) + ("\n" if nl else "")}, notify_value) else: return click.echo(str(text), nl=nl, err=err, color=color) @@ -45,13 +45,13 @@ def confirm( text: str, default: bool = False, abort: bool = False, - prompt_suffix: bool = ': ', + prompt_suffix: bool = ": ", show_default: bool = True, err: bool = False, - title: AnyStr = 'Please confirm:', + title: AnyStr = "Please confirm:", log: str = None, ): - add_breadcrumb(message=text, category='confirm') + add_breadcrumb(message=text, category="confirm") if ismachineoutput(): from pros.common.ui.interactive.ConfirmModal import ConfirmModal from pros.common.ui.interactive.renderers import MachineOutputRenderer @@ -62,7 +62,7 @@ def confirm( rv = click.confirm( text, default=default, abort=abort, prompt_suffix=prompt_suffix, show_default=show_default, err=err ) - add_breadcrumb(message=f'User responded: {rv}') + add_breadcrumb(message=f"User responded: {rv}") return rv @@ -73,7 +73,7 @@ def prompt( confirmation_prompt=False, type=None, value_proc=None, - prompt_suffix=': ', + prompt_suffix=": ", show_default=True, err=False, ): @@ -102,10 +102,10 @@ def progressbar( show_percent: bool = True, show_pos: bool = False, item_show_func: Callable = None, - fill_char: str = '#', - empty_char: str = '-', - bar_template: str = '%(label)s [%(bar)s] %(info)s', - info_sep: str = ' ', + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", width: int = 36, ): if ismachineoutput(): @@ -129,12 +129,12 @@ def finalize( human_readable = data elif isinstance(data, List): if len(data) == 0: - human_readable = '' + human_readable = "" elif isinstance(data[0], str): - human_readable = '\n'.join(data) + human_readable = "\n".join(data) elif isinstance(data[0], dict) or isinstance(data[0], object): - if hasattr(data[0], '__str__'): - human_readable = '\n'.join([str(d) for d in data]) + if hasattr(data[0], "__str__"): + human_readable = "\n".join([str(d) for d in data]) else: if not isinstance(data[0], dict): data = [d.__dict__ for d in data] @@ -147,13 +147,13 @@ def finalize( human_readable = tabulate.tabulate(data[1:], headers=data[0]) else: human_readable = data - elif hasattr(data, '__str__'): + elif hasattr(data, "__str__"): human_readable = str(data) else: human_readable = data.__dict__ - human_readable = (human_prefix or '') + str(human_readable) + human_readable = (human_prefix or "") + str(human_readable) if ismachineoutput(): - _machineoutput({'type': 'finalize', 'method': method, 'data': data, 'human': human_readable}) + _machineoutput({"type": "finalize", "method": method, "data": data, "human": human_readable}) else: echo(human_readable) @@ -161,8 +161,8 @@ def finalize( class _MachineOutputProgressBar(_click_ProgressBar): def __init__(self, *args, **kwargs): global _current_notify_value - kwargs['file'] = open(os.devnull, 'w', encoding='UTF-8') - self.notify_value = kwargs.pop('notify_value', _current_notify_value) + kwargs["file"] = open(os.devnull, "w", encoding="UTF-8") + self.notify_value = kwargs.pop("notify_value", _current_notify_value) super(_MachineOutputProgressBar, self).__init__(*args, **kwargs) def __del__(self): @@ -170,10 +170,10 @@ def __del__(self): def render_progress(self): super(_MachineOutputProgressBar, self).render_progress() - obj = {'text': self.label, 'pct': self.pct} + obj = {"text": self.label, "pct": self.pct} if self.show_eta and self.eta_known and not self.finished: - obj['eta'] = self.eta - _machine_notify('progress', obj, self.notify_value) + obj["eta"] = self.eta + _machine_notify("progress", obj, self.notify_value) class Notification(object): @@ -206,7 +206,7 @@ def __init__(self, err: bool = False, ctx: Optional[click.Context] = None): threading.Thread.__init__(self) self.daemon = False self.fdRead, self.fdWrite = os.pipe() - self.pipeReader = os.fdopen(self.fdRead, encoding='UTF-8') + self.pipeReader = os.fdopen(self.fdRead, encoding="UTF-8") self.start() def fileno(self): @@ -215,8 +215,8 @@ def fileno(self): def run(self): """Run the thread, logging everything.""" - for line in iter(self.pipeReader.readline, ''): - echo(line.strip('\n'), ctx=self.click_ctx, err=self.is_err) + for line in iter(self.pipeReader.readline, ""): + echo(line.strip("\n"), ctx=self.click_ctx, err=self.is_err) self.pipeReader.close() @@ -225,4 +225,4 @@ def close(self): os.close(self.fdWrite) -__all__ = ['finalize', 'echo', 'confirm', 'prompt', 'progressbar', 'EchoPipe'] +__all__ = ["finalize", "echo", "confirm", "prompt", "progressbar", "EchoPipe"] diff --git a/pros/common/ui/interactive/ConfirmModal.py b/pros/common/ui/interactive/ConfirmModal.py index d4c59235..f444ec87 100644 --- a/pros/common/ui/interactive/ConfirmModal.py +++ b/pros/common/ui/interactive/ConfirmModal.py @@ -10,8 +10,8 @@ class ConfirmModal(application.Modal[bool]): In --machine-output mode, this Modal is run instead of a textual confirmation request (e.g. click.confirm()) """ - def __init__(self, text: str, abort: bool = False, title: AnyStr = 'Please confirm:', log: Optional[AnyStr] = None): - super().__init__(title, will_abort=abort, confirm_button='Yes', cancel_button='No', description=text) + def __init__(self, text: str, abort: bool = False, title: AnyStr = "Please confirm:", log: Optional[AnyStr] = None): + super().__init__(title, will_abort=abort, confirm_button="Yes", cancel_button="No", description=text) self.log = log def confirm(self): diff --git a/pros/common/ui/interactive/application.py b/pros/common/ui/interactive/application.py index df393471..fb8bd85f 100644 --- a/pros/common/ui/interactive/application.py +++ b/pros/common/ui/interactive/application.py @@ -3,7 +3,7 @@ from .components import Component from .observable import Observable -P = TypeVar('P') +P = TypeVar("P") class Application(Observable, Generic[P]): @@ -22,7 +22,7 @@ def __del__(self): self.exit() def on_exit(self, *handlers: Callable): - return super(Application, self).on('end', *handlers) + return super(Application, self).on("end", *handlers) def exit(self, **kwargs): """ @@ -31,24 +31,24 @@ def exit(self, **kwargs): :arg return: set the return value before triggering exit. This value would be the value returned by Renderer.run(Application) """ - if 'return' in kwargs: - self.set_return(kwargs['return']) - self.trigger('end') + if "return" in kwargs: + self.set_return(kwargs["return"]) + self.trigger("end") def on_redraw(self, *handlers: Callable, **kwargs) -> Callable: - return super(Application, self).on('redraw', *handlers, **kwargs) + return super(Application, self).on("redraw", *handlers, **kwargs) def redraw(self) -> None: - self.trigger('redraw') + self.trigger("redraw") def set_return(self, value: P) -> None: """ Set the return value of Renderer.run(Application) """ - self.trigger('return', value) + self.trigger("return", value) def on_return_set(self, *handlers: Callable, **kwargs): - return super(Application, self).on('return', *handlers, **kwargs) + return super(Application, self).on("return", *handlers, **kwargs) @classmethod def get_hierarchy(cls, base: type) -> Optional[List[str]]: @@ -102,8 +102,8 @@ def __init__( title: AnyStr, description: Optional[AnyStr] = None, will_abort: bool = True, - confirm_button: AnyStr = 'Continue', - cancel_button: AnyStr = 'Cancel', + confirm_button: AnyStr = "Continue", + cancel_button: AnyStr = "Cancel", can_confirm: Optional[bool] = None, ): super().__init__() @@ -114,13 +114,13 @@ def __init__( self.cancel_button = cancel_button self._can_confirm = can_confirm - self.on('confirm', self._confirm) + self.on("confirm", self._confirm) def on_cancel(): nonlocal self self.cancel() - self.on('cancel', on_cancel) + self.on("cancel", on_cancel) def confirm(self, *args, **kwargs): raise NotImplementedError() @@ -140,7 +140,7 @@ def build(self) -> Generator[Component, None, None]: def __getstate__(self): extra_state = {} if self.description is not None: - extra_state['description'] = self.description + extra_state["description"] = self.description return dict( **super(Modal, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/__init__.py b/pros/common/ui/interactive/components/__init__.py index abc969a8..419bc371 100644 --- a/pros/common/ui/interactive/components/__init__.py +++ b/pros/common/ui/interactive/components/__init__.py @@ -7,16 +7,16 @@ from .label import Label, Spinner, VerbatimLabel __all__ = [ - 'Component', - 'Button', - 'Container', - 'InputBox', - 'ButtonGroup', - 'DropDownBox', - 'Label', - 'DirectorySelector', - 'FileSelector', - 'Checkbox', - 'Spinner', - 'VerbatimLabel', + "Component", + "Button", + "Container", + "InputBox", + "ButtonGroup", + "DropDownBox", + "Label", + "DirectorySelector", + "FileSelector", + "Checkbox", + "Spinner", + "VerbatimLabel", ] diff --git a/pros/common/ui/interactive/components/button.py b/pros/common/ui/interactive/components/button.py index dee126b5..184b930c 100644 --- a/pros/common/ui/interactive/components/button.py +++ b/pros/common/ui/interactive/components/button.py @@ -14,7 +14,7 @@ def __init__(self, text: AnyStr): self.text = text def on_clicked(self, *handlers: Callable, **kwargs): - return self.on('clicked', *handlers, **kwargs) + return self.on("clicked", *handlers, **kwargs) def __getstate__(self) -> dict: return dict(**super(Button, self).__getstate__(), text=self.text, uuid=self.uuid) diff --git a/pros/common/ui/interactive/components/component.py b/pros/common/ui/interactive/components/component.py index 3880e5a6..500454f1 100644 --- a/pros/common/ui/interactive/components/component.py +++ b/pros/common/ui/interactive/components/component.py @@ -32,7 +32,7 @@ def __getstate__(self) -> Dict: return dict(etype=Component.get_hierarchy(self.__class__)) -P = TypeVar('P', bound=Parameter) +P = TypeVar("P", bound=Parameter) class ParameterizedComponent(Component, Generic[P]): @@ -46,10 +46,10 @@ def __init__(self, parameter: P): def __getstate__(self): extra_state = {} if isinstance(self.parameter, ValidatableParameter): - extra_state['valid'] = self.parameter.is_valid() + extra_state["valid"] = self.parameter.is_valid() reason = self.parameter.is_valid_reason() if reason: - extra_state['valid_reason'] = self.parameter.is_valid_reason() + extra_state["valid_reason"] = self.parameter.is_valid_reason() return dict( **super(ParameterizedComponent, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/container.py b/pros/common/ui/interactive/components/container.py index 6f251110..b153c1ac 100644 --- a/pros/common/ui/interactive/components/container.py +++ b/pros/common/ui/interactive/components/container.py @@ -23,11 +23,11 @@ def __init__( self.collapsed = BooleanParameter(collapsed) if isinstance(collapsed, bool) else collapsed def __getstate__(self): - extra_state = {'uuid': self.collapsed.uuid, 'collapsed': self.collapsed.value} + extra_state = {"uuid": self.collapsed.uuid, "collapsed": self.collapsed.value} if self.title is not None: - extra_state['title'] = self.title + extra_state["title"] = self.title if self.description is not None: - extra_state['description'] = self.description + extra_state["description"] = self.description return dict( **super(Container, self).__getstate__(), **extra_state, elements=[e.__getstate__() for e in self.elements] ) diff --git a/pros/common/ui/interactive/components/input.py b/pros/common/ui/interactive/components/input.py index 8d35b5e8..8a9a071a 100644 --- a/pros/common/ui/interactive/components/input.py +++ b/pros/common/ui/interactive/components/input.py @@ -15,7 +15,7 @@ def __init__(self, label: AnyStr, parameter: P, placeholder: Optional = None): def __getstate__(self) -> dict: extra_state = {} if self.placeholder is not None: - extra_state['placeholder'] = self.placeholder + extra_state["placeholder"] = self.placeholder return dict( **super(InputBox, self).__getstate__(), **extra_state, diff --git a/pros/common/ui/interactive/components/label.py b/pros/common/ui/interactive/components/label.py index f4ac5592..df06ec95 100644 --- a/pros/common/ui/interactive/components/label.py +++ b/pros/common/ui/interactive/components/label.py @@ -25,4 +25,4 @@ class Spinner(Label): """ def __init__(self): - super(Spinner, self).__init__('Loading...') + super(Spinner, self).__init__("Loading...") diff --git a/pros/common/ui/interactive/observable.py b/pros/common/ui/interactive/observable.py index f2d14d92..61e00178 100644 --- a/pros/common/ui/interactive/observable.py +++ b/pros/common/ui/interactive/observable.py @@ -25,7 +25,7 @@ def notify(cls, uuid, event, *args, **kwargs): if uuid in _uuid_table: _uuid_table[uuid].trigger(event, *args, **kwargs) else: - logger(__name__).warning(f'Could not find an Observable to notify with UUID: {uuid}', sentry=True) + logger(__name__).warning(f"Could not find an Observable to notify with UUID: {uuid}", sentry=True) def on( self, diff --git a/pros/common/ui/interactive/parameters/__init__.py b/pros/common/ui/interactive/parameters/__init__.py index 8d397694..9185027b 100644 --- a/pros/common/ui/interactive/parameters/__init__.py +++ b/pros/common/ui/interactive/parameters/__init__.py @@ -3,10 +3,10 @@ from .validatable_parameter import AlwaysInvalidParameter, ValidatableParameter __all__ = [ - 'Parameter', - 'OptionParameter', - 'BooleanParameter', - 'ValidatableParameter', - 'RangeParameter', - 'AlwaysInvalidParameter', + "Parameter", + "OptionParameter", + "BooleanParameter", + "ValidatableParameter", + "RangeParameter", + "AlwaysInvalidParameter", ] diff --git a/pros/common/ui/interactive/parameters/misc_parameters.py b/pros/common/ui/interactive/parameters/misc_parameters.py index f19edba9..64cc2a38 100644 --- a/pros/common/ui/interactive/parameters/misc_parameters.py +++ b/pros/common/ui/interactive/parameters/misc_parameters.py @@ -3,7 +3,7 @@ from pros.common.ui.interactive.parameters.parameter import Parameter from pros.common.ui.interactive.parameters.validatable_parameter import ValidatableParameter -T = TypeVar('T') +T = TypeVar("T") class OptionParameter(ValidatableParameter, Generic[T]): @@ -17,8 +17,8 @@ def validate(self, value: Any): class BooleanParameter(Parameter[bool]): def update(self, new_value): - true_prefixes = ['T', 'Y'] - true_matches = ['1'] + true_prefixes = ["T", "Y"] + true_matches = ["1"] v = str(new_value).upper() is_true = v in true_matches or any(v.startswith(p) for p in true_prefixes) super(BooleanParameter, self).update(is_true) @@ -33,7 +33,7 @@ def validate(self, value: T): if self.range[0] <= value <= self.range[1]: return True else: - return f'{value} is not within [{self.range[0]}, {self.range[1]}]' + return f"{value} is not within [{self.range[0]}, {self.range[1]}]" def update(self, new_value): super(RangeParameter, self).update(int(new_value)) diff --git a/pros/common/ui/interactive/parameters/parameter.py b/pros/common/ui/interactive/parameters/parameter.py index 1c11eb5e..c1412e9a 100644 --- a/pros/common/ui/interactive/parameters/parameter.py +++ b/pros/common/ui/interactive/parameters/parameter.py @@ -2,7 +2,7 @@ from pros.common.ui.interactive.observable import Observable -T = TypeVar('T') +T = TypeVar("T") class Parameter(Observable, Generic[T]): @@ -17,11 +17,11 @@ def __init__(self, initial_value: T): super().__init__() self.value = initial_value - self.on('update', self.update) + self.on("update", self.update) def update(self, new_value): self.value = new_value - self.trigger('changed', self) + self.trigger("changed", self) def on_changed(self, *handlers: Callable, **kwargs): - return self.on('changed', *handlers, **kwargs) + return self.on("changed", *handlers, **kwargs) diff --git a/pros/common/ui/interactive/parameters/validatable_parameter.py b/pros/common/ui/interactive/parameters/validatable_parameter.py index 208c3bb3..e631a045 100644 --- a/pros/common/ui/interactive/parameters/validatable_parameter.py +++ b/pros/common/ui/interactive/parameters/validatable_parameter.py @@ -2,7 +2,7 @@ from pros.common.ui.interactive.parameters.parameter import Parameter -T = TypeVar('T') +T = TypeVar("T") class ValidatableParameter(Parameter, Generic[T]): @@ -44,19 +44,19 @@ def update(self, new_value): if self.allow_invalid_input or self.is_valid(new_value): super(ValidatableParameter, self).update(new_value) if self.is_valid(): - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) def on_changed(self, *handlers: Callable, **kwargs): """ Subscribe to event whenever value validly changes """ - return self.on('changed_validated', *handlers, **kwargs) + return self.on("changed_validated", *handlers, **kwargs) def on_any_changed(self, *handlers: Callable, **kwargs): """ Subscribe to event whenever value changes (regardless of whether or not new value is valid) """ - return self.on('changed', *handlers, **kwargs) + return self.on("changed", *handlers, **kwargs) class AlwaysInvalidParameter(ValidatableParameter[T], Generic[T]): diff --git a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py index 91f88c8e..5b348617 100644 --- a/pros/common/ui/interactive/renderers/MachineOutputRenderer.py +++ b/pros/common/ui/interactive/renderers/MachineOutputRenderer.py @@ -10,32 +10,32 @@ from ..application import Application from .Renderer import Renderer -current: List['MachineOutputRenderer'] = [] +current: List["MachineOutputRenderer"] = [] -def _push_renderer(renderer: 'MachineOutputRenderer'): +def _push_renderer(renderer: "MachineOutputRenderer"): global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current stack.append(renderer) -def _remove_renderer(renderer: 'MachineOutputRenderer'): +def _remove_renderer(renderer: "MachineOutputRenderer"): global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current if renderer in stack: stack.remove(renderer) -def _current_renderer() -> Optional['MachineOutputRenderer']: +def _current_renderer() -> Optional["MachineOutputRenderer"]: global current - stack: List['MachineOutputRenderer'] = current + stack: List["MachineOutputRenderer"] = current return stack[-1] if len(stack) > 0 else None -P = TypeVar('P') +P = TypeVar("P") class MachineOutputRenderer(Renderer[P], Generic[P]): @@ -55,7 +55,7 @@ def on_redraw(): @staticmethod def get_line(): - line = click.get_text_stream('stdin').readline().strip() + line = click.get_text_stream("stdin").readline().strip() return line.strip() if line is not None else None def run(self) -> P: @@ -73,8 +73,8 @@ def run(self) -> P: try: value = json.loads(line) - if 'uuid' in value and 'event' in value: - Observable.notify(value['uuid'], value['event'], *value.get('args', []), **value.get('kwargs', {})) + if "uuid" in value and "event" in value: + Observable.notify(value["uuid"], value["event"], *value.get("args", []), **value.get("kwargs", {})) except json.JSONDecodeError as e: ui.logger(__name__).exception(e) except BaseException as e: @@ -85,16 +85,16 @@ def run(self) -> P: return self.run_rv def stop(self): - ui.logger(__name__).debug(f'Stopping {self.app}') + ui.logger(__name__).debug(f"Stopping {self.app}") self.alive = False if current_thread() != self.thread: - ui.logger(__name__).debug(f'Interrupting render thread of {self.app}') + ui.logger(__name__).debug(f"Interrupting render thread of {self.app}") while not self.stop_sem.acquire(timeout=0.1): self.wake_me() - ui.logger(__name__).debug(f'Broadcasting stop {self.app}') - self._output({'uuid': self.app.uuid, 'should_exit': True}) + ui.logger(__name__).debug(f"Broadcasting stop {self.app}") + self._output({"uuid": self.app.uuid, "should_exit": True}) _remove_renderer(self) top_renderer = _current_renderer() @@ -105,15 +105,15 @@ def wake_me(self): """ Hack to wake up input thread to know to shut down """ - ui.logger(__name__).debug(f'Broadcasting WAKEME for {self.app}') + ui.logger(__name__).debug(f"Broadcasting WAKEME for {self.app}") if ui.ismachineoutput(): - ui._machineoutput({'type': 'wakeme'}) + ui._machineoutput({"type": "wakeme"}) else: - ui.echo('Wake up the renderer!') + ui.echo("Wake up the renderer!") @staticmethod def _output(data: dict): - data['type'] = 'input/interactive' + data["type"] = "input/interactive" if ui.ismachineoutput(): ui._machineoutput(data) else: diff --git a/pros/common/ui/interactive/renderers/Renderer.py b/pros/common/ui/interactive/renderers/Renderer.py index 40f17a0e..2bbebf2a 100644 --- a/pros/common/ui/interactive/renderers/Renderer.py +++ b/pros/common/ui/interactive/renderers/Renderer.py @@ -2,7 +2,7 @@ from ..application import Application -P = TypeVar('P') +P = TypeVar("P") class Renderer(Generic[P]): diff --git a/pros/common/ui/log.py b/pros/common/ui/log.py index bc37e0ad..05ec8a85 100644 --- a/pros/common/ui/log.py +++ b/pros/common/ui/log.py @@ -21,18 +21,18 @@ def __init__(self, *args, ctx_obj=None, **kwargs): def emit(self, record): try: - if self.ctx_obj.get('machine_output', False): + if self.ctx_obj.get("machine_output", False): formatter = self.formatter or logging.Formatter() record.message = record.getMessage() obj = { - 'type': 'log/message', - 'level': record.levelname, - 'message': formatter.formatMessage(record), - 'simpleMessage': record.message, + "type": "log/message", + "level": record.levelname, + "message": formatter.formatMessage(record), + "simpleMessage": record.message, } if record.exc_info: - obj['trace'] = formatter.formatException(record.exc_info) - msg = f'Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}' + obj["trace"] = formatter.formatException(record.exc_info) + msg = f"Uc&42BWAaQ{jsonpickle.dumps(obj, unpicklable=False, backend=_machine_pickler)}" else: msg = self.format(record) click.echo(msg) @@ -47,6 +47,6 @@ class PROSLogFormatter(logging.Formatter): def formatException(self, ei): if not isdebug(): - return '\n'.join(super().formatException(ei).split('\n')[-3:]) + return "\n".join(super().formatException(ei).split("\n")[-3:]) else: return super().formatException(ei) diff --git a/pros/common/utils.py b/pros/common/utils.py index 56e0f08c..2c771846 100644 --- a/pros/common/utils.py +++ b/pros/common/utils.py @@ -13,13 +13,13 @@ @lru_cache(1) def get_version(): try: - ver = open(os.path.join(os.path.dirname(__file__), '..', '..', 'version')).read().strip() + ver = open(os.path.join(os.path.dirname(__file__), "..", "..", "version")).read().strip() if ver is not None: return ver except: pass try: - if getattr(sys, 'frozen', False): + if getattr(sys, "frozen", False): import _constants ver = _constants.CLI_VERSION @@ -36,13 +36,13 @@ def get_version(): module = pros.cli.main.__name__ for dist in pkg_resources.working_set: - scripts = dist.get_entry_map().get('console_scripts') or {} + scripts = dist.get_entry_map().get("console_scripts") or {} for _, entry_point in iter(scripts.items()): if entry_point.module_name == module: ver = dist.version if ver is not None: return ver - raise RuntimeError('Could not determine version') + raise RuntimeError("Could not determine version") def retries(func, retry: int = 3): @@ -79,13 +79,13 @@ def ismachineoutput(ctx: click.Context = None) -> bool: if isinstance(ctx, click.Context): ctx.ensure_object(dict) assert isinstance(ctx.obj, dict) - return ctx.obj.get('machine_output', False) + return ctx.obj.get("machine_output", False) else: return False def get_pros_dir(): - return click.get_app_dir('PROS') + return click.get_app_dir("PROS") def with_click_context(func): @@ -121,9 +121,9 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non response = requests.get(url, stream=True) if response.status_code == 200: - filename: str = url.rsplit('/', 1)[-1] - if 'Content-Disposition' in response.headers.keys(): - filename = re.findall("filename=(.+)", response.headers['Content-Disposition'])[0] + filename: str = url.rsplit("/", 1)[-1] + if "Content-Disposition" in response.headers.keys(): + filename = re.findall("filename=(.+)", response.headers["Content-Disposition"])[0] # try: # disposition = parse_requests_response(response) # if isinstance(ext, str): @@ -132,16 +132,16 @@ def download_file(url: str, ext: Optional[str] = None, desc: Optional[str] = Non # filename = disposition.filename_unsafe # except RuntimeError: # pass - output_path = os.path.join(get_pros_dir(), 'download', filename) + output_path = os.path.join(get_pros_dir(), "download", filename) if os.path.exists(output_path): os.remove(output_path) elif not os.path.exists(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path), exist_ok=True) - with open(output_path, mode='wb') as file: + with open(output_path, mode="wb") as file: with progressbar( - length=int(response.headers['Content-Length']), label=desc or f'Downloading {filename}' + length=int(response.headers["Content-Length"]), label=desc or f"Downloading {filename}" ) as pb: for chunk in response.iter_content(256): file.write(chunk) diff --git a/pros/conductor/__init__.py b/pros/conductor/__init__.py index 51ac1e34..e866d1b1 100644 --- a/pros/conductor/__init__.py +++ b/pros/conductor/__init__.py @@ -1,4 +1,4 @@ -__all__ = ['BaseTemplate', 'Template', 'LocalTemplate', 'Depot', 'LocalDepot', 'Project', 'Conductor'] +__all__ = ["BaseTemplate", "Template", "LocalTemplate", "Depot", "LocalDepot", "Project", "Conductor"] from .conductor import Conductor from .depots import Depot, LocalDepot diff --git a/pros/conductor/conductor.py b/pros/conductor/conductor.py index 021e981d..5826cc2a 100644 --- a/pros/conductor/conductor.py +++ b/pros/conductor/conductor.py @@ -16,10 +16,10 @@ from .project import Project from .templates import BaseTemplate, ExternalTemplate, LocalTemplate, Template -MAINLINE_NAME = 'pros-mainline' -MAINLINE_URL = 'https://pros.cs.purdue.edu/v5/_static/releases/pros-mainline.json' -EARLY_ACCESS_NAME = 'kernel-early-access-mainline' -EARLY_ACCESS_URL = 'https://pros.cs.purdue.edu/v5/_static/beta/beta-pros-mainline.json' +MAINLINE_NAME = "pros-mainline" +MAINLINE_URL = "https://pros.cs.purdue.edu/v5/_static/releases/pros-mainline.json" +EARLY_ACCESS_NAME = "kernel-early-access-mainline" +EARLY_ACCESS_URL = "https://pros.cs.purdue.edu/v5/_static/beta/beta-pros-mainline.json" """ # TBD? Currently, EarlyAccess value is stored in config file @@ -36,11 +36,11 @@ class Conductor(Config): def __init__(self, file=None): if not file: - file = os.path.join(click.get_app_dir('PROS'), 'conductor.pros') + file = os.path.join(click.get_app_dir("PROS"), "conductor.pros") self.local_templates: Set[LocalTemplate] = set() self.early_access_local_templates: Set[LocalTemplate] = set() self.depots: Dict[str, Depot] = {} - self.default_target: str = 'v5' + self.default_target: str = "v5" self.default_libraries: Dict[str, List[str]] = None self.early_access_libraries: Dict[str, List[str]] = None self.use_early_access = False @@ -63,25 +63,25 @@ def __init__(self, file=None): self.depots[EARLY_ACCESS_NAME] = HttpDepot(EARLY_ACCESS_NAME, EARLY_ACCESS_URL) needs_saving = True if self.default_target is None: - self.default_target = 'v5' + self.default_target = "v5" needs_saving = True if self.default_libraries is None: - self.default_libraries = {'v5': ['okapilib'], 'cortex': []} + self.default_libraries = {"v5": ["okapilib"], "cortex": []} needs_saving = True - if self.early_access_libraries is None or len(self.early_access_libraries['v5']) != 2: - self.early_access_libraries = {'v5': ['liblvgl', 'okapilib'], 'cortex': []} + if self.early_access_libraries is None or len(self.early_access_libraries["v5"]) != 2: + self.early_access_libraries = {"v5": ["liblvgl", "okapilib"], "cortex": []} needs_saving = True - if 'v5' not in self.default_libraries: - self.default_libraries['v5'] = [] + if "v5" not in self.default_libraries: + self.default_libraries["v5"] = [] needs_saving = True - if 'cortex' not in self.default_libraries: - self.default_libraries['cortex'] = [] + if "cortex" not in self.default_libraries: + self.default_libraries["cortex"] = [] needs_saving = True - if 'v5' not in self.early_access_libraries: - self.early_access_libraries['v5'] = [] + if "v5" not in self.early_access_libraries: + self.early_access_libraries["v5"] = [] needs_saving = True - if 'cortex' not in self.early_access_libraries: - self.early_access_libraries['cortex'] = [] + if "cortex" not in self.early_access_libraries: + self.early_access_libraries["cortex"] = [] needs_saving = True if needs_saving: self.save() @@ -97,18 +97,18 @@ def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> Loca if t.identifier == template.identifier: self.purge_template(t) - if 'destination' in kwargs: # this is deprecated, will work (maybe) but not desirable behavior - destination = kwargs.pop('destination') + if "destination" in kwargs: # this is deprecated, will work (maybe) but not desirable behavior + destination = kwargs.pop("destination") else: - destination = os.path.join(self.directory, 'templates', template.identifier) + destination = os.path.join(self.directory, "templates", template.identifier) if os.path.isdir(destination): shutil.rmtree(destination) template: Template = depot.fetch_template(template, destination, **kwargs) - click.secho(f'Fetched {template.identifier} from {depot.name} depot', dim=True) + click.secho(f"Fetched {template.identifier} from {depot.name} depot", dim=True) local_template = LocalTemplate(orig=template, location=destination) - local_template.metadata['origin'] = depot.name - click.echo(f'Adding {local_template.identifier} to registry...', nl=False) + local_template.metadata["origin"] = depot.name + click.echo(f"Adding {local_template.identifier} to registry...", nl=False) if depot.name == EARLY_ACCESS_NAME: # check for early access self.early_access_local_templates.add(local_template) else: @@ -116,11 +116,11 @@ def fetch_template(self, depot: Depot, template: BaseTemplate, **kwargs) -> Loca self.save() if isinstance(template, ExternalTemplate) and template.directory == destination: template.delete() - click.secho('Done', fg='green') + click.secho("Done", fg="green") return local_template def purge_template(self, template: LocalTemplate): - if template.metadata['origin'] == EARLY_ACCESS_NAME: + if template.metadata["origin"] == EARLY_ACCESS_NAME: if template not in self.early_access_local_templates: logger(__name__).info( f"{template.identifier} was not in the Conductor's local early access templates cache." @@ -134,7 +134,7 @@ def purge_template(self, template: LocalTemplate): self.local_templates.remove(template) if os.path.abspath(template.location).startswith( - os.path.abspath(os.path.join(self.directory, 'templates')) + os.path.abspath(os.path.join(self.directory, "templates")) ) and os.path.isdir(template.location): shutil.rmtree(template.location) self.save() @@ -149,9 +149,9 @@ def resolve_templates( **kwargs, ) -> List[BaseTemplate]: results = list() if not unique else set() - kernel_version = kwargs.get('kernel_version', None) - if kwargs.get('early_access', None) is not None: - self.use_early_access = kwargs.get('early_access', False) + kernel_version = kwargs.get("kernel_version", None) + if kwargs.get("early_access", None) is not None: + self.use_early_access = kwargs.get("early_access", False) if isinstance(identifier, str): query = BaseTemplate.create_query(name=identifier, **kwargs) else: @@ -187,34 +187,34 @@ def resolve_templates( results.update(online_results) else: results.extend(online_results) - logger(__name__).debug('Saving Conductor config after checking for remote updates') + logger(__name__).debug("Saving Conductor config after checking for remote updates") self.save() # Save self since there may have been some updates from the depots if len(results) == 0 and not self.use_early_access: raise dont_send( - InvalidTemplateException(f'{identifier.name} does not support kernel version {kernel_version}') + InvalidTemplateException(f"{identifier.name} does not support kernel version {kernel_version}") ) return list(results) def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Optional[BaseTemplate]: if isinstance(identifier, str): - kwargs['name'] = identifier + kwargs["name"] = identifier elif isinstance(identifier, BaseTemplate): - kwargs['orig'] = identifier + kwargs["orig"] = identifier query = BaseTemplate.create_query(**kwargs) - logger(__name__).info(f'Query: {query}') + logger(__name__).info(f"Query: {query}") logger(__name__).debug(query.__dict__) templates = self.resolve_templates(query, **kwargs) logger(__name__).info(f'Candidates: {", ".join([str(t) for t in templates])}') if not any(templates): return None - query.version = str(Spec(query.version or '>0').select([Version(t.version) for t in templates])) + query.version = str(Spec(query.version or ">0").select([Version(t.version) for t in templates])) v = Version(query.version) - v.prerelease = v.prerelease if len(v.prerelease) else ('',) - v.build = v.build if len(v.build) else ('',) - query.version = f'=={v}' - logger(__name__).info(f'Resolved to {query.identifier}') + v.prerelease = v.prerelease if len(v.prerelease) else ("",) + v.build = v.build if len(v.build) else ("",) + query.version = f"=={v}" + logger(__name__).info(f"Resolved to {query.identifier}") templates = self.resolve_templates(query, **kwargs) if not any(templates): return None @@ -224,11 +224,11 @@ def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Op # there's a local template satisfying the query if len(local_templates) > 1: # This should never happen! Conductor state must be invalid - raise Exception(f'Multiple local templates satisfy {query.identifier}!') + raise Exception(f"Multiple local templates satisfy {query.identifier}!") return local_templates[0] # prefer pros-mainline template second - mainline_templates = [t for t in templates if t.metadata['origin'] == 'pros-mainline'] + mainline_templates = [t for t in templates if t.metadata["origin"] == "pros-mainline"] if any(mainline_templates): return mainline_templates[0] @@ -236,52 +236,52 @@ def resolve_template(self, identifier: Union[str, BaseTemplate], **kwargs) -> Op return templates[0] def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], **kwargs): - upgrade_ok = kwargs.get('upgrade_ok', True) - install_ok = kwargs.get('install_ok', True) - downgrade_ok = kwargs.get('downgrade_ok', True) - download_ok = kwargs.get('download_ok', True) - force = kwargs.get('force_apply', False) - - kwargs['target'] = project.target - if 'kernel' in project.templates: + upgrade_ok = kwargs.get("upgrade_ok", True) + install_ok = kwargs.get("install_ok", True) + downgrade_ok = kwargs.get("downgrade_ok", True) + download_ok = kwargs.get("download_ok", True) + force = kwargs.get("force_apply", False) + + kwargs["target"] = project.target + if "kernel" in project.templates: # support_kernels for backwards compatibility, but kernel_version should be getting most of the exposure - kwargs['kernel_version'] = kwargs['supported_kernels'] = project.templates['kernel'].version + kwargs["kernel_version"] = kwargs["supported_kernels"] = project.templates["kernel"].version template = self.resolve_template(identifier=identifier, allow_online=download_ok, **kwargs) if template is None: raise dont_send( - InvalidTemplateException(f'Could not find a template satisfying {identifier} for {project.target}') + InvalidTemplateException(f"Could not find a template satisfying {identifier} for {project.target}") ) # warn and prompt user if upgrading to PROS 4 or downgrading to PROS 3 - if template.name == 'kernel': + if template.name == "kernel": isProject = Project.find_project("") if isProject: curr_proj = Project() if curr_proj.kernel: - if template.version[0] == '4' and curr_proj.kernel[0] == '3': + if template.version[0] == "4" and curr_proj.kernel[0] == "3": confirm = ui.confirm( - f'Warning! Upgrading project to PROS 4 will cause breaking changes. ' - f'Do you still want to upgrade?' + f"Warning! Upgrading project to PROS 4 will cause breaking changes. " + f"Do you still want to upgrade?" ) if not confirm: - raise dont_send(InvalidTemplateException(f'Not upgrading')) - if template.version[0] == '3' and curr_proj.kernel[0] == '4': + raise dont_send(InvalidTemplateException(f"Not upgrading")) + if template.version[0] == "3" and curr_proj.kernel[0] == "4": confirm = ui.confirm( - f'Warning! Downgrading project to PROS 3 will cause breaking changes. ' - f'Do you still want to downgrade?' + f"Warning! Downgrading project to PROS 3 will cause breaking changes. " + f"Do you still want to downgrade?" ) if not confirm: - raise dont_send(InvalidTemplateException(f'Not downgrading')) - elif not self.use_early_access and template.version[0] == '3' and not self.warn_early_access: + raise dont_send(InvalidTemplateException(f"Not downgrading")) + elif not self.use_early_access and template.version[0] == "3" and not self.warn_early_access: confirm = ui.confirm( - f'PROS 4 is now in early access. ' - f'Please use the --early-access flag if you would like to use it.\n' - f'Do you want to use PROS 4 instead?' + f"PROS 4 is now in early access. " + f"Please use the --early-access flag if you would like to use it.\n" + f"Do you want to use PROS 4 instead?" ) self.warn_early_access = True if confirm: # use pros 4 self.use_early_access = True - kwargs['version'] = '>=0' + kwargs["version"] = ">=0" self.save() # Recall the function with early access enabled return self.apply_template(project, identifier, **kwargs) @@ -289,14 +289,14 @@ def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], self.save() if not isinstance(template, LocalTemplate): with ui.Notification(): - template = self.fetch_template(self.get_depot(template.metadata['origin']), template, **kwargs) + template = self.fetch_template(self.get_depot(template.metadata["origin"]), template, **kwargs) assert isinstance(template, LocalTemplate) logger(__name__).info(str(project)) valid_action = project.get_template_actions(template) if valid_action == TemplateAction.NotApplicable: raise dont_send( - InvalidTemplateException(f'{template.identifier} is not applicable to {project}', reason=valid_action) + InvalidTemplateException(f"{template.identifier} is not applicable to {project}", reason=valid_action) ) if ( force @@ -306,21 +306,21 @@ def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], ): project.apply_template( template, - force_system=kwargs.pop('force_system', False), - force_user=kwargs.pop('force_user', False), - remove_empty_directories=kwargs.pop('remove_empty_directories', False), + force_system=kwargs.pop("force_system", False), + force_user=kwargs.pop("force_user", False), + remove_empty_directories=kwargs.pop("remove_empty_directories", False), ) - ui.finalize('apply', f'Finished applying {template.identifier} to {project.location}') + ui.finalize("apply", f"Finished applying {template.identifier} to {project.location}") elif valid_action != TemplateAction.AlreadyInstalled: raise dont_send( InvalidTemplateException( - f'Could not install {template.identifier} because it is {valid_action.name},' - f' and that is not allowed.', + f"Could not install {template.identifier} because it is {valid_action.name}," + f" and that is not allowed.", reason=valid_action, ) ) else: - ui.finalize('apply', f'{template.identifier} is already installed in {project.location}') + ui.finalize("apply", f"{template.identifier} is already installed in {project.location}") @staticmethod def remove_template( @@ -329,72 +329,72 @@ def remove_template( remove_user: bool = True, remove_empty_directories: bool = True, ): - ui.logger(__name__).debug(f'Uninstalling templates matching {identifier}') + ui.logger(__name__).debug(f"Uninstalling templates matching {identifier}") if not project.resolve_template(identifier): ui.echo(f"{identifier} is not an applicable template") for template in project.resolve_template(identifier): - ui.echo(f'Uninstalling {template.identifier}') + ui.echo(f"Uninstalling {template.identifier}") project.remove_template( template, remove_user=remove_user, remove_empty_directories=remove_empty_directories ) def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Project: - if kwargs.get('early_access', None) is not None: - self.use_early_access = kwargs.get('early_access', False) + if kwargs.get("early_access", None) is not None: + self.use_early_access = kwargs.get("early_access", False) if kwargs["version_source"]: # If true, then the user has not specified a version if not self.use_early_access and self.warn_early_access: ui.echo(f"PROS 4 is now in early access. " f"If you would like to use it, use the --early-access flag.") elif self.use_early_access: - ui.echo(f'Early access is enabled. Using PROS 4.') + ui.echo(f"Early access is enabled. Using PROS 4.") elif self.use_early_access: - ui.echo(f'Early access is enabled.') + ui.echo(f"Early access is enabled.") - if Path(path).exists() and Path(path).samefile(os.path.expanduser('~')): - raise dont_send(ValueError('Will not create a project in user home directory')) + if Path(path).exists() and Path(path).samefile(os.path.expanduser("~")): + raise dont_send(ValueError("Will not create a project in user home directory")) for char in str(Path(path)): if ( char in [ - '?', - '<', - '>', - '*', - '|', - '^', - '#', - '%', - '&', - '$', - '+', - '!', - '`', - '\'', - '=', - '@', - '\'', - '{', - '}', - '[', - ']', - '(', - ')', - '~', + "?", + "<", + ">", + "*", + "|", + "^", + "#", + "%", + "&", + "$", + "+", + "!", + "`", + "'", + "=", + "@", + "'", + "{", + "}", + "[", + "]", + "(", + ")", + "~", ] or ord(char) > 127 ): - raise dont_send(ValueError(f'Invalid character found in directory name: \'{char}\'')) + raise dont_send(ValueError(f"Invalid character found in directory name: '{char}'")) proj = Project(path=path, create=True) - if 'target' in kwargs: - proj.target = kwargs['target'] - if 'project_name' in kwargs and kwargs['project_name'] and not kwargs['project_name'].isspace(): - proj.project_name = kwargs['project_name'] + if "target" in kwargs: + proj.target = kwargs["target"] + if "project_name" in kwargs and kwargs["project_name"] and not kwargs["project_name"].isspace(): + proj.project_name = kwargs["project_name"] else: proj.project_name = os.path.basename(os.path.normpath(os.path.abspath(path))) - if 'version' in kwargs: - if kwargs['version'] == 'latest': - kwargs['version'] = '>=0' - self.apply_template(proj, identifier='kernel', **kwargs) + if "version" in kwargs: + if kwargs["version"] == "latest": + kwargs["version"] = ">=0" + self.apply_template(proj, identifier="kernel", **kwargs) proj.save() if not no_default_libs: @@ -407,8 +407,8 @@ def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Pro for library in libraries[proj.target]: try: # remove kernel version so that latest template satisfying query is correctly selected - if 'version' in kwargs: - kwargs.pop('version') + if "version" in kwargs: + kwargs.pop("version") self.apply_template(proj, library, **kwargs) except Exception as e: logger(__name__).exception(e) @@ -423,4 +423,4 @@ def remove_depot(self, name: str): self.save() def query_depots(self, url: bool): - return [name + ((' -- ' + depot.location) if url else '') for name, depot in self.depots.items()] + return [name + ((" -- " + depot.location) if url else "") for name, depot in self.depots.items()] diff --git a/pros/conductor/depots/depot.py b/pros/conductor/depots/depot.py index a0787f43..33b349f2 100644 --- a/pros/conductor/depots/depot.py +++ b/pros/conductor/depots/depot.py @@ -33,16 +33,16 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> def get_remote_templates(self, auto_check_freq: Optional[timedelta] = None, force_check: bool = False, **kwargs): if auto_check_freq is None: - auto_check_freq = getattr(self, 'update_frequency', cli_config().update_frequency) + auto_check_freq = getattr(self, "update_frequency", cli_config().update_frequency) logger(__name__).info( - f'Last check of {self.name} was {self.last_remote_update} ' - f'({datetime.now() - self.last_remote_update} vs {auto_check_freq}).' + f"Last check of {self.name} was {self.last_remote_update} " + f"({datetime.now() - self.last_remote_update} vs {auto_check_freq})." ) if force_check or datetime.now() - self.last_remote_update > auto_check_freq: with ui.Notification(): - ui.echo(f'Updating {self.name}... ', nl=False) + ui.echo(f"Updating {self.name}... ", nl=False) self.update_remote_templates(**kwargs) - ui.echo('Done', color='green') + ui.echo("Done", color="green") for t in self.remote_templates: - t.metadata['origin'] = self.name + t.metadata["origin"] = self.name return self.remote_templates diff --git a/pros/conductor/depots/http_depot.py b/pros/conductor/depots/http_depot.py index 652b8c6e..feda9472 100644 --- a/pros/conductor/depots/http_depot.py +++ b/pros/conductor/depots/http_depot.py @@ -21,18 +21,18 @@ def __init__(self, name: str, location: str): def fetch_template(self, template: BaseTemplate, destination: str, **kwargs): import requests - assert 'location' in template.metadata - url = template.metadata['location'] - tf = download_file(url, ext='zip', desc=f'Downloading {template.identifier}') + assert "location" in template.metadata + url = template.metadata["location"] + tf = download_file(url, ext="zip", desc=f"Downloading {template.identifier}") if tf is None: - raise requests.ConnectionError(f'Could not obtain {url}') + raise requests.ConnectionError(f"Could not obtain {url}") with zipfile.ZipFile(tf) as zf: - with ui.progressbar(length=len(zf.namelist()), label=f'Extracting {template.identifier}') as pb: + with ui.progressbar(length=len(zf.namelist()), label=f"Extracting {template.identifier}") as pb: for file in zf.namelist(): zf.extract(file, path=destination) pb.update(1) os.remove(tf) - return ExternalTemplate(file=os.path.join(destination, 'template.pros')) + return ExternalTemplate(file=os.path.join(destination, "template.pros")) def update_remote_templates(self, **_): import requests @@ -41,5 +41,5 @@ def update_remote_templates(self, **_): if response.status_code == 200: self.remote_templates = jsonpickle.decode(response.text) else: - logger(__name__).warning(f'Unable to access {self.name} ({self.location}): {response.status_code}') + logger(__name__).warning(f"Unable to access {self.name} ({self.location}): {response.status_code}") self.last_remote_update = datetime.now() diff --git a/pros/conductor/depots/local_depot.py b/pros/conductor/depots/local_depot.py index 181ed581..0dbdb9a6 100644 --- a/pros/conductor/depots/local_depot.py +++ b/pros/conductor/depots/local_depot.py @@ -13,22 +13,22 @@ class LocalDepot(Depot): def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> Template: - if 'location' not in kwargs: + if "location" not in kwargs: logger(__name__).debug(f"Template not specified. Provided arguments: {kwargs}") - raise KeyError('Location of local template must be specified.') - location = kwargs['location'] + raise KeyError("Location of local template must be specified.") + location = kwargs["location"] if os.path.isdir(location): location_dir = location - if not os.path.isfile(os.path.join(location_dir, 'template.pros')): - raise ConfigNotFoundException(f'A template.pros file was not found in {location_dir}.') - template_file = os.path.join(location_dir, 'template.pros') + if not os.path.isfile(os.path.join(location_dir, "template.pros")): + raise ConfigNotFoundException(f"A template.pros file was not found in {location_dir}.") + template_file = os.path.join(location_dir, "template.pros") elif zipfile.is_zipfile(location): with zipfile.ZipFile(location) as zf: with click.progressbar(length=len(zf.namelist()), label=f"Extracting {location}") as progress_bar: for file in zf.namelist(): zf.extract(file, path=destination) progress_bar.update(1) - template_file = os.path.join(destination, 'template.pros') + template_file = os.path.join(destination, "template.pros") location_dir = destination elif os.path.isfile(location): location_dir = os.path.dirname(location) @@ -40,7 +40,7 @@ def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> raise ValueError(f"The specified location was not a file or directory ({location}).") if location_dir != destination: n_files = len([os.path.join(dp, f) for dp, dn, fn in os.walk(location_dir) for f in fn]) - with click.progressbar(length=n_files, label='Copying to local cache') as pb: + with click.progressbar(length=n_files, label="Copying to local cache") as pb: def my_copy(*args): pb.update(1) @@ -50,4 +50,4 @@ def my_copy(*args): return ExternalTemplate(file=template_file) def __init__(self): - super().__init__('local', 'local') + super().__init__("local", "local") diff --git a/pros/conductor/interactive/NewProjectModal.py b/pros/conductor/interactive/NewProjectModal.py index 17132ff0..552f0b73 100644 --- a/pros/conductor/interactive/NewProjectModal.py +++ b/pros/conductor/interactive/NewProjectModal.py @@ -11,8 +11,8 @@ class NewProjectModal(application.Modal[None]): - targets = parameters.OptionParameter('v5', ['v5', 'cortex']) - kernel_versions = parameters.OptionParameter('latest', ['latest']) + targets = parameters.OptionParameter("v5", ["v5", "cortex"]) + kernel_versions = parameters.OptionParameter("latest", ["latest"]) install_default_libraries = parameters.BooleanParameter(True) project_name = parameters.Parameter(None) @@ -22,9 +22,9 @@ def __init__( self, ctx: Context = None, conductor: Optional[Conductor] = None, - directory=os.path.join(os.path.expanduser('~'), 'My PROS Project'), + directory=os.path.join(os.path.expanduser("~"), "My PROS Project"), ): - super().__init__('Create a new project') + super().__init__("Create a new project") self.conductor = conductor or Conductor() self.click_ctx = ctx or get_current_context() self.directory = NonExistentProjectParameter(directory) @@ -33,11 +33,11 @@ def __init__( cb(self.targets) def target_changed(self, new_target): - templates = self.conductor.resolve_templates('kernel', target=new_target.value) + templates = self.conductor.resolve_templates("kernel", target=new_target.value) if len(templates) == 0: - self.kernel_versions.options = ['latest'] + self.kernel_versions.options = ["latest"] else: - self.kernel_versions.options = ['latest'] + sorted({t.version for t in templates}, reverse=True) + self.kernel_versions.options = ["latest"] + sorted({t.version for t in templates}, reverse=True) self.redraw() def confirm(self, *args, **kwargs): @@ -54,10 +54,10 @@ def confirm(self, *args, **kwargs): from pros.conductor.project import ProjectReport report = ProjectReport(project) - ui.finalize('project-report', report) + ui.finalize("project-report", report) with ui.Notification(): - ui.echo('Building project...') + ui.echo("Building project...") project.compile([]) @property @@ -65,15 +65,15 @@ def can_confirm(self): return self.directory.is_valid() and self.targets.is_valid() and self.kernel_versions.is_valid() def build(self) -> Generator[components.Component, None, None]: - yield components.DirectorySelector('Project Directory', self.directory) - yield components.ButtonGroup('Target', self.targets) + yield components.DirectorySelector("Project Directory", self.directory) + yield components.ButtonGroup("Target", self.targets) project_name_placeholder = os.path.basename(os.path.normpath(os.path.abspath(self.directory.value))) yield components.Container( - components.InputBox('Project Name', self.project_name, placeholder=project_name_placeholder), - components.DropDownBox('Kernel Version', self.kernel_versions), - components.Checkbox('Install default libraries', self.install_default_libraries), - title='Advanced', + components.InputBox("Project Name", self.project_name, placeholder=project_name_placeholder), + components.DropDownBox("Kernel Version", self.kernel_versions), + components.Checkbox("Install default libraries", self.install_default_libraries), + title="Advanced", collapsed=self.advanced_collapsed, ) diff --git a/pros/conductor/interactive/UpdateProjectModal.py b/pros/conductor/interactive/UpdateProjectModal.py index fecb6cb8..4e3943cd 100644 --- a/pros/conductor/interactive/UpdateProjectModal.py +++ b/pros/conductor/interactive/UpdateProjectModal.py @@ -46,7 +46,7 @@ def _add_template(self): ui.logger(__name__).debug(options) p = TemplateParameter(None, options) - @p.on('removed') + @p.on("removed") def remove_template(): self.new_templates.remove(p) @@ -55,14 +55,14 @@ def remove_template(): def __init__( self, ctx: Optional[Context] = None, conductor: Optional[Conductor] = None, project: Optional[Project] = None ): - super().__init__('Update a project') + super().__init__("Update a project") self.conductor = conductor or Conductor() self.click_ctx = ctx or get_current_context() self._is_processing = False self.project: Optional[Project] = project self.project_path = ExistingProjectParameter( - str(project.location) if project else os.path.join(os.path.expanduser('~'), 'My PROS Project') + str(project.location) if project else os.path.join(os.path.expanduser("~"), "My PROS Project") ) self.name = parameters.Parameter(None) @@ -74,7 +74,7 @@ def __init__( self.templates_collapsed = parameters.BooleanParameter(False) self.advanced_collapsed = parameters.BooleanParameter(True) - self.add_template_button = components.Button('Add Template') + self.add_template_button = components.Button("Add Template") self.add_template_button.on_clicked(self._add_template) @@ -92,7 +92,7 @@ def project_changed(self, new_project: ExistingProjectParameter): self.current_kernel = TemplateParameter( None, options=sorted( - {t for t in self.conductor.resolve_templates(self.project.templates['kernel'].as_query())}, + {t for t in self.conductor.resolve_templates(self.project.templates["kernel"].as_query())}, key=lambda v: Version(v.version), reverse=True, ), @@ -107,7 +107,7 @@ def project_changed(self, new_project: ExistingProjectParameter): ), ) for t in self.project.templates.values() - if t.name != 'kernel' + if t.name != "kernel" ] self.new_templates = [] @@ -124,13 +124,13 @@ def can_confirm(self): return self.project and self._generate_transaction().can_execute() def build(self) -> Generator[components.Component, None, None]: - yield components.DirectorySelector('Project Directory', self.project_path) + yield components.DirectorySelector("Project Directory", self.project_path) if self.is_processing: yield components.Spinner() elif self.project_path.is_valid(): assert self.project is not None - yield components.Label(f'Modify your {self.project.target} project.') - yield components.InputBox('Project Name', self.name) + yield components.Label(f"Modify your {self.project.target} project.") + yield components.InputBox("Project Name", self.name) yield TemplateListingComponent(self.current_kernel, editable=dict(version=True), removable=False) yield components.Container( *( @@ -139,12 +139,12 @@ def build(self) -> Generator[components.Component, None, None]: ), *(TemplateListingComponent(t, editable=True, removable=True) for t in self.new_templates), self.add_template_button, - title='Templates', + title="Templates", collapsed=self.templates_collapsed, ) yield components.Container( - components.Checkbox('Re-apply all templates', self.force_apply_parameter), - title='Advanced', + components.Checkbox("Re-apply all templates", self.force_apply_parameter), + title="Advanced", collapsed=self.advanced_collapsed, ) yield components.Label('What will happen when you click "Continue":') diff --git a/pros/conductor/interactive/components.py b/pros/conductor/interactive/components.py index cc848fa5..e44b702a 100644 --- a/pros/conductor/interactive/components.py +++ b/pros/conductor/interactive/components.py @@ -7,23 +7,23 @@ class TemplateListingComponent(components.Container): def _generate_components(self) -> Generator[components.Component, None, None]: - if not self.editable['name'] and not self.editable['version']: + if not self.editable["name"] and not self.editable["version"]: yield components.Label(self.template.value.identifier) else: - if self.editable['name']: - yield components.InputBox('Name', self.template.name) + if self.editable["name"]: + yield components.InputBox("Name", self.template.name) else: yield components.Label(self.template.value.name) - if self.editable['version']: + if self.editable["version"]: if isinstance(self.template.version, parameters.OptionParameter): - yield components.DropDownBox('Version', self.template.version) + yield components.DropDownBox("Version", self.template.version) else: - yield components.InputBox('Version', self.template.version) + yield components.InputBox("Version", self.template.version) else: yield components.Label(self.template.value.version) if self.removable: - remove_button = components.Button('Don\'t remove' if self.template.removed else 'Remove') - remove_button.on_clicked(lambda: self.template.trigger('removed')) + remove_button = components.Button("Don't remove" if self.template.removed else "Remove") + remove_button.on_clicked(lambda: self.template.trigger("removed")) yield remove_button def __init__( diff --git a/pros/conductor/interactive/parameters.py b/pros/conductor/interactive/parameters.py index 9f05b632..486a0719 100644 --- a/pros/conductor/interactive/parameters.py +++ b/pros/conductor/interactive/parameters.py @@ -13,24 +13,24 @@ class NonExistentProjectParameter(p.ValidatableParameter[str]): def validate(self, value: str) -> Union[bool, str]: value = os.path.abspath(value) if os.path.isfile(value): - return 'Path is a file' + return "Path is a file" if os.path.isdir(value) and not os.access(value, os.W_OK): - return 'Do not have write permission to path' + return "Do not have write permission to path" if Project.find_project(value) is not None: - return 'Project path already exists, delete it first' + return "Project path already exists, delete it first" blacklisted_directories = [] # TODO: Proper Windows support - if sys.platform == 'win32': + if sys.platform == "win32": blacklisted_directories.extend( [ - os.environ.get('WINDIR', os.path.join('C:', 'Windows')), - os.environ.get('PROGRAMFILES', os.path.join('C:', 'Program Files')), + os.environ.get("WINDIR", os.path.join("C:", "Windows")), + os.environ.get("PROGRAMFILES", os.path.join("C:", "Program Files")), ] ) if any(value.startswith(d) for d in blacklisted_directories): - return 'Cannot create project in a system directory' - if Path(value).exists() and Path(value).samefile(os.path.expanduser('~')): - return 'Should not create a project in home directory' + return "Cannot create project in a system directory" + if Path(value).exists() and Path(value).samefile(os.path.expanduser("~")): + return "Should not create a project in home directory" if not os.path.exists(value): parent = os.path.split(value)[0] while parent and not os.path.exists(parent): @@ -39,13 +39,13 @@ def validate(self, value: str) -> Union[bool, str]: break parent = temp_value if not parent: - return 'Cannot create directory because root does not exist' + return "Cannot create directory because root does not exist" if not os.path.exists(parent): - return f'Cannot create directory because {parent} does not exist' + return f"Cannot create directory because {parent} does not exist" if not os.path.isdir(parent): - return f'Cannot create directory because {parent} is a file' + return f"Cannot create directory because {parent} is a file" if not os.access(parent, os.W_OK | os.X_OK): - return f'Cannot create directory because missing write permissions to {parent}' + return f"Cannot create directory because missing write permissions to {parent}" return True @@ -58,7 +58,7 @@ def update(self, new_value): def validate(self, value: str): project = Project.find_project(value) - return project is not None or 'Path is not inside a PROS project' + return project is not None or "Path is not inside a PROS project" class TemplateParameter(p.ValidatableParameter[BaseTemplate]): @@ -73,29 +73,29 @@ def _update_versions(self): self.version.value = self.version.options[0] self.value = self.options[self.name.value][self.version.value] - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) else: self.version = p.AlwaysInvalidParameter(self.value.version) def __init__(self, template: Optional[BaseTemplate], options: List[BaseTemplate], allow_invalid_input: bool = True): if not template and len(options) == 0: - raise ValueError('At least template or versions must be defined for a TemplateParameter') + raise ValueError("At least template or versions must be defined for a TemplateParameter") self.options = {t.name: {_t.version: _t for _t in options if t.name == _t.name} for t in options} if not template: first_template = list(self.options.values())[0] - template = first_template[str(Spec('>0').select([Version(v) for v in first_template.keys()]))] + template = first_template[str(Spec(">0").select([Version(v) for v in first_template.keys()]))] super().__init__(template, allow_invalid_input) self.name: p.ValidatableParameter[str] = p.ValidatableParameter( self.value.name, allow_invalid_input, - validate=lambda v: True if v in self.options.keys() else f'Could not find a template named {v}', + validate=lambda v: True if v in self.options.keys() else f"Could not find a template named {v}", ) if not self.value.version and self.value.name in self.options: - self.value.version = Spec('>0').select([Version(v) for v in self.options[self.value.name].keys()]) + self.value.version = Spec(">0").select([Version(v) for v in self.options[self.value.name].keys()]) self.version = None self._update_versions() @@ -103,23 +103,23 @@ def __init__(self, template: Optional[BaseTemplate], options: List[BaseTemplate] @self.name.on_any_changed def name_any_changed(v: p.ValidatableParameter): self._update_versions() - self.trigger('changed', self) + self.trigger("changed", self) @self.version.on_any_changed def version_any_changed(v: p.ValidatableParameter): if v.value in self.options[self.name.value].keys(): self.value = self.options[self.name.value][v.value] - self.trigger('changed_validated', self) + self.trigger("changed_validated", self) else: self.value.version = v.value - self.trigger('changed', self) + self.trigger("changed", self) # self.name.on_changed(lambda v: self.trigger('changed_validated', self)) # self.version.on_changed(lambda v: self.trigger('changed_validated', self)) self.removed = False - @self.on('removed') + @self.on("removed") def removed_changed(): self.removed = not self.removed diff --git a/pros/conductor/project/ProjectReport.py b/pros/conductor/project/ProjectReport.py index 6af81707..683964d9 100644 --- a/pros/conductor/project/ProjectReport.py +++ b/pros/conductor/project/ProjectReport.py @@ -2,7 +2,7 @@ class ProjectReport(object): - def __init__(self, project: 'Project'): + def __init__(self, project: "Project"): self.project = { "target": project.target, "location": os.path.abspath(project.location), @@ -18,9 +18,9 @@ def __str__(self): s = ( f'PROS Project for {self.project["target"]} at: {self.project["location"]}' f' ({self.project["name"]})' if self.project["name"] - else '' + else "" ) - s += '\n' + s += "\n" rows = [t.values() for t in self.project["templates"]] headers = [h.capitalize() for h in self.project["templates"][0].keys()] s += tabulate.tabulate(rows, headers=headers) diff --git a/pros/conductor/project/ProjectTransaction.py b/pros/conductor/project/ProjectTransaction.py index 8ea963b7..973cedd5 100644 --- a/pros/conductor/project/ProjectTransaction.py +++ b/pros/conductor/project/ProjectTransaction.py @@ -41,31 +41,31 @@ def execute(self, conductor: c.Conductor, project: c.Project): def describe(self, conductor: c.Conductor, project: c.Project): action = project.get_template_actions(conductor.resolve_template(self.template)) if action == TemplateAction.NotApplicable: - return f'{self.template.identifier} cannot be applied to project!' + return f"{self.template.identifier} cannot be applied to project!" if action == TemplateAction.Installable: - return f'{self.template.identifier} will installed to project.' + return f"{self.template.identifier} will installed to project." if action == TemplateAction.Downgradable: return ( - f'Project will be downgraded to {self.template.identifier} from' - f' {project.templates[self.template.name].version}.' + f"Project will be downgraded to {self.template.identifier} from" + f" {project.templates[self.template.name].version}." ) if action == TemplateAction.Upgradable: return ( - f'Project will be upgraded to {self.template.identifier} from' - f' {project.templates[self.template.name].version}.' + f"Project will be upgraded to {self.template.identifier} from" + f" {project.templates[self.template.name].version}." ) if action == TemplateAction.AlreadyInstalled: - if self.apply_kwargs.get('force_apply'): - return f'{self.template.identifier} will be re-applied.' + if self.apply_kwargs.get("force_apply"): + return f"{self.template.identifier} will be re-applied." elif self.suppress_already_installed: - return f'{self.template.identifier} will not be re-applied.' + return f"{self.template.identifier} will not be re-applied." else: - return f'{self.template.identifier} cannot be applied to project because it is already installed.' + return f"{self.template.identifier} cannot be applied to project because it is already installed." def can_execute(self, conductor: c.Conductor, project: c.Project) -> bool: action = project.get_template_actions(conductor.resolve_template(self.template)) if action == TemplateAction.AlreadyInstalled: - return self.apply_kwargs.get('force_apply') or self.suppress_already_installed + return self.apply_kwargs.get("force_apply") or self.suppress_already_installed return action in [TemplateAction.Installable, TemplateAction.Downgradable, TemplateAction.Upgradable] @@ -87,7 +87,7 @@ def execute(self, conductor: c.Conductor, project: c.Project): ui.logger(__name__).warning(str(e)) def describe(self, conductor: c.Conductor, project: c.Project) -> str: - return f'{self.template.identifier} will be removed' + return f"{self.template.identifier} will be removed" def can_execute(self, conductor: c.Conductor, project: c.Project): return True @@ -119,15 +119,15 @@ def add_action(self, action: Action) -> None: def execute(self): if len(self.actions) == 0: - ui.logger(__name__).warning('No actions necessary.') + ui.logger(__name__).warning("No actions necessary.") return location = self.project.location - tfd, tfn = tempfile.mkstemp(prefix='pros-project-', suffix=f'-{self.project.name}.zip', text='w+b') - with os.fdopen(tfd, 'w+b') as tf: - with zipfile.ZipFile(tf, mode='w') as zf: - files, length = it.tee(location.glob('**/*'), 2) + tfd, tfn = tempfile.mkstemp(prefix="pros-project-", suffix=f"-{self.project.name}.zip", text="w+b") + with os.fdopen(tfd, "w+b") as tf: + with zipfile.ZipFile(tf, mode="w") as zf: + files, length = it.tee(location.glob("**/*"), 2) length = len(list(length)) - with ui.progressbar(files, length=length, label=f'Backing up {self.project.name} to {tfn}') as pb: + with ui.progressbar(files, length=length, label=f"Backing up {self.project.name} to {tfn}") as pb: for file in pb: zf.write(file, arcname=file.relative_to(location)) @@ -136,21 +136,21 @@ def execute(self): for action in self.actions: ui.logger(__name__).debug(action.describe(self.conductor, self.project)) rv = action.execute(self.conductor, self.project) - ui.logger(__name__).debug(f'{action} returned {rv}') + ui.logger(__name__).debug(f"{action} returned {rv}") if rv is not None and not rv: - raise ValueError('Action did not complete successfully') - ui.echo('All actions performed successfully') + raise ValueError("Action did not complete successfully") + ui.echo("All actions performed successfully") except Exception as e: - ui.logger(__name__).warning(f'Failed to perform transaction, restoring project to previous state') + ui.logger(__name__).warning(f"Failed to perform transaction, restoring project to previous state") with zipfile.ZipFile(tfn) as zf: - with ui.progressbar(zf.namelist(), label=f'Restoring {self.project.name} from {tfn}') as pb: + with ui.progressbar(zf.namelist(), label=f"Restoring {self.project.name} from {tfn}") as pb: for file in pb: zf.extract(file, path=location) ui.logger(__name__).exception(e) finally: - ui.echo(f'Removing {tfn}') + ui.echo(f"Removing {tfn}") os.remove(tfn) def apply_template(self, template: c.BaseTemplate, suppress_already_installed: bool = False, **kwargs): @@ -168,9 +168,9 @@ def change_name(self, new_name: str): def describe(self) -> str: if len(self.actions) > 0: - return '\n'.join(f'- {a.describe(self.conductor, self.project)}' for a in self.actions) + return "\n".join(f"- {a.describe(self.conductor, self.project)}" for a in self.actions) else: - return 'No actions necessary.' + return "No actions necessary." def can_execute(self) -> bool: return all(a.can_execute(self.conductor, self.project) for a in self.actions) diff --git a/pros/conductor/project/__init__.py b/pros/conductor/project/__init__.py index cb0509f7..4d262587 100644 --- a/pros/conductor/project/__init__.py +++ b/pros/conductor/project/__init__.py @@ -17,7 +17,7 @@ class Project(Config): - def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = True, defaults: dict = None): + def __init__(self, path: str = ".", create: bool = False, raise_on_error: bool = True, defaults: dict = None): """ Instantiates a PROS project configuration :param path: A path to the project, may be the actual project.pros file, any child directory of the project, @@ -27,27 +27,27 @@ def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = :param raise_on_error: :param defaults: """ - file = Project.find_project(path or '.') + file = Project.find_project(path or ".") if file is None and create: - file = os.path.join(path, 'project.pros') if not os.path.basename(path) == 'project.pros' else path + file = os.path.join(path, "project.pros") if not os.path.basename(path) == "project.pros" else path elif file is None and raise_on_error: - raise ConfigNotFoundException('A project config was not found for {}'.format(path)) + raise ConfigNotFoundException("A project config was not found for {}".format(path)) if defaults is None: defaults = {} - self.target: str = defaults.get('target', 'cortex').lower() # VEX Hardware target (V5/Cortex) - self.templates: Dict[str, Template] = defaults.get('templates', {}) - self.upload_options: Dict = defaults.get('upload_options', {}) - self.project_name: str = defaults.get('project_name', None) + self.target: str = defaults.get("target", "cortex").lower() # VEX Hardware target (V5/Cortex) + self.templates: Dict[str, Template] = defaults.get("templates", {}) + self.upload_options: Dict = defaults.get("upload_options", {}) + self.project_name: str = defaults.get("project_name", None) super(Project, self).__init__(file, error_on_decode=raise_on_error) - if 'kernel' in self.__dict__: + if "kernel" in self.__dict__: # Add backwards compatibility with PROS CLI 2 projects by adding kernel as a pseudo-template - self.templates['kernel'] = Template( + self.templates["kernel"] = Template( user_files=self.all_files, - name='kernel', - version=self.__dict__['kernel'], + name="kernel", + version=self.__dict__["kernel"], target=self.target, - output='bin/output.bin', + output="bin/output.bin", ) @property @@ -63,13 +63,13 @@ def name(self): return ( self.project_name or os.path.basename(self.location) - or os.path.basename(self.templates['kernel'].metadata['output']) - or 'pros' + or os.path.basename(self.templates["kernel"].metadata["output"]) + or "pros" ) @property def all_files(self) -> Set[str]: - return {os.path.relpath(p, self.location) for p in glob.glob(f'{self.location}/**/*', recursive=True)} + return {os.path.relpath(p, self.location) for p in glob.glob(f"{self.location}/**/*", recursive=True)} def get_template_actions(self, template: BaseTemplate) -> TemplateAction: ui.logger(__name__).debug(template) @@ -77,7 +77,7 @@ def get_template_actions(self, template: BaseTemplate) -> TemplateAction: return TemplateAction.NotApplicable from semantic_version import Spec, Version - if template.name != 'kernel' and Version(self.kernel) not in Spec(template.supported_kernels or '>0'): + if template.name != "kernel" and Version(self.kernel) not in Spec(template.supported_kernels or ">0"): if template.name in self.templates.keys(): return TemplateAction.AlreadyInstalled return TemplateAction.NotApplicable @@ -128,7 +128,7 @@ def apply_template( installed_user_files = set() for lib_name, lib in self.templates.items(): if lib_name == template.name or lib.name == template.name: - logger(__name__).debug(f'{lib} is already installed') + logger(__name__).debug(f"{lib} is already installed") logger(__name__).debug(lib.system_files) logger(__name__).debug(lib.user_files) transaction.extend_rm(lib.system_files) @@ -140,14 +140,14 @@ def apply_template( deprecated_user_files = installed_user_files.intersection(self.all_files) - set(template.user_files) if any(deprecated_user_files): if force_user or confirm( - f'The following user files have been deprecated: {deprecated_user_files}. ' - f'Do you want to update them?' + f"The following user files have been deprecated: {deprecated_user_files}. " + f"Do you want to update them?" ): transaction.extend_rm(deprecated_user_files) else: logger(__name__).warning( - f'Deprecated user files may cause weird quirks. See migration guidelines from ' - f'{template.identifier}\'s release notes.' + f"Deprecated user files may cause weird quirks. See migration guidelines from " + f"{template.identifier}'s release notes." ) # Carry forward deprecated user files into the template about to be applied so that user gets warned in # future. @@ -177,22 +177,22 @@ def new_user_filter(new_file: str) -> bool: if any([file in transaction.effective_state for file in template.system_files]) and not force_system: confirm( - f'Some required files for {template.identifier} already exist in the project. ' - f'Overwrite the existing files?', + f"Some required files for {template.identifier} already exist in the project. " + f"Overwrite the existing files?", abort=True, ) transaction.extend_add(template.system_files, template.location) logger(__name__).debug(transaction) - transaction.commit(label=f'Applying {template.identifier}', remove_empty_directories=remove_empty_directories) + transaction.commit(label=f"Applying {template.identifier}", remove_empty_directories=remove_empty_directories) self.templates[template.name] = template self.save() def remove_template(self, template: Template, remove_user: bool = False, remove_empty_directories: bool = True): if not self.template_is_installed(template): - raise ValueError(f'{template.identifier} is not installed on this project.') - if template.name == 'kernel': - raise ValueError(f'Cannot remove the kernel template. Maybe create a new project?') + raise ValueError(f"{template.identifier} is not installed on this project.") + if template.name == "kernel": + raise ValueError(f"Cannot remove the kernel template. Maybe create a new project?") real_template = LocalTemplate(orig=template, location=self.location) transaction = Transaction(self.location, set(self.all_files)) @@ -201,7 +201,7 @@ def remove_template(self, template: Template, remove_user: bool = False, remove_ transaction.extend_rm(real_template.real_user_files) logger(__name__).debug(transaction) transaction.commit( - label=f'Removing {template.identifier}...', remove_empty_directories=remove_empty_directories + label=f"Removing {template.identifier}...", remove_empty_directories=remove_empty_directories ) del self.templates[real_template.name] self.save() @@ -223,39 +223,39 @@ def resolve_template(self, query: Union[str, BaseTemplate]) -> List[Template]: def __str__(self): return ( - f'Project: {self.location} ({self.name}) for {self.target} with ' + f"Project: {self.location} ({self.name}) for {self.target} with " f'{", ".join([str(t) for t in self.templates.values()])}' ) @property def kernel(self): - if 'kernel' in self.templates: - return self.templates['kernel'].version - elif hasattr(self.__dict__, 'kernel'): - return self.__dict__['kernel'] - return '' + if "kernel" in self.templates: + return self.templates["kernel"].version + elif hasattr(self.__dict__, "kernel"): + return self.__dict__["kernel"] + return "" @property def output(self): - if 'kernel' in self.templates: - return self.templates['kernel'].metadata['output'] - elif hasattr(self.__dict__, 'output'): - return self.__dict__['output'] - return 'bin/output.bin' + if "kernel" in self.templates: + return self.templates["kernel"].metadata["output"] + elif hasattr(self.__dict__, "output"): + return self.__dict__["output"] + return "bin/output.bin" def make(self, build_args: List[str]): import subprocess env = os.environ.copy() # Add PROS toolchain to the beginning of PATH to ensure PROS binaries are preferred - if os.environ.get('PROS_TOOLCHAIN'): - env['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + env['PATH'] + if os.environ.get("PROS_TOOLCHAIN"): + env["PATH"] = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + env["PATH"] # call make.exe if on Windows - if os.name == 'nt' and os.environ.get('PROS_TOOLCHAIN'): - make_cmd = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin', 'make.exe') + if os.name == "nt" and os.environ.get("PROS_TOOLCHAIN"): + make_cmd = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin", "make.exe") else: - make_cmd = 'make' + make_cmd = "make" stdout_pipe = EchoPipe() stderr_pipe = EchoPipe(err=True) process = None @@ -269,12 +269,12 @@ def make(self, build_args: List[str]): stderr=stderr_pipe, ) except Exception as e: - if not os.environ.get('PROS_TOOLCHAIN'): + if not os.environ.get("PROS_TOOLCHAIN"): ui.logger(__name__).warn( "PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n" ) ui.logger(__name__).error( - f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={'sentry': False} + f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={"sentry": False} ) stdout_pipe.close() stderr_pipe.close() @@ -303,7 +303,7 @@ def make_scan_build( td = tempfile.TemporaryDirectory() td_path = td.name.replace("\\", "/") - build_args = [*build_args, f'BINDIR={td_path}'] + build_args = [*build_args, f"BINDIR={td_path}"] def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compilation]]: """ @@ -321,17 +321,17 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil setup_environment, ) - with temporary_directory(prefix='intercept-') as tmp_dir: + with temporary_directory(prefix="intercept-") as tmp_dir: # run the build command environment = setup_environment(args, tmp_dir) - if os.environ.get('PROS_TOOLCHAIN'): - environment['PATH'] = ( - os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + environment['PATH'] + if os.environ.get("PROS_TOOLCHAIN"): + environment["PATH"] = ( + os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + environment["PATH"] ) - if sys.platform == 'darwin': - environment['PATH'] = ( - os.path.dirname(os.path.abspath(sys.executable)) + os.pathsep + environment['PATH'] + if sys.platform == "darwin": + environment["PATH"] = ( + os.path.dirname(os.path.abspath(sys.executable)) + os.pathsep + environment["PATH"] ) if not suppress_output: @@ -343,12 +343,12 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil try: exit_code = run_build(args.build, env=environment, stdout=pipe, stderr=pipe, cwd=self.directory) except Exception as e: - if not os.environ.get('PROS_TOOLCHAIN'): + if not os.environ.get("PROS_TOOLCHAIN"): ui.logger(__name__).warn( "PROS toolchain not found! Please ensure the toolchain is installed correctly and your environment variables are set properly.\n" ) ui.logger(__name__).error( - f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={'sentry': False} + f"ERROR WHILE CALLING '{make_cmd}' WITH EXCEPTION: {str(e)}\n", extra={"sentry": False} ) if not suppress_output: pipe.close() @@ -362,21 +362,21 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil return exit_code, iter(set(current)) # call make.exe if on Windows - if os.name == 'nt' and os.environ.get('PROS_TOOLCHAIN'): - make_cmd = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin', 'make.exe') + if os.name == "nt" and os.environ.get("PROS_TOOLCHAIN"): + make_cmd = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin", "make.exe") else: - make_cmd = 'make' + make_cmd = "make" args = create_intercept_parser().parse_args( [ - '--override-compiler', - '--use-cc', - 'arm-none-eabi-gcc', - '--use-c++', - 'arm-none-eabi-g++', + "--override-compiler", + "--use-cc", + "arm-none-eabi-gcc", + "--use-c++", + "arm-none-eabi-g++", make_cmd, *build_args, - 'CC=intercept-cc', - 'CXX=intercept-c++', + "CC=intercept-cc", + "CXX=intercept-c++", ] ) exit_code, entries = libscanbuild_capture(args) @@ -388,47 +388,47 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil if not any(any_entries): return exit_code if not suppress_output: - ui.echo('Capturing metadata for PROS Editor...') + ui.echo("Capturing metadata for PROS Editor...") env = os.environ.copy() # Add PROS toolchain to the beginning of PATH to ensure PROS binaries are preferred - if os.environ.get('PROS_TOOLCHAIN'): - env['PATH'] = os.path.join(os.environ.get('PROS_TOOLCHAIN'), 'bin') + os.pathsep + env['PATH'] + if os.environ.get("PROS_TOOLCHAIN"): + env["PATH"] = os.path.join(os.environ.get("PROS_TOOLCHAIN"), "bin") + os.pathsep + env["PATH"] cc_sysroot = subprocess.run( - [make_cmd, 'cc-sysroot'], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory + [make_cmd, "cc-sysroot"], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory ) lines = str(cc_sysroot.stderr.decode()).splitlines() + str(cc_sysroot.stdout.decode()).splitlines() lines = [l.strip() for l in lines] cc_sysroot_includes = [] copy = False for line in lines: - if line == '#include <...> search starts here:': + if line == "#include <...> search starts here:": copy = True continue - if line == 'End of search list.': + if line == "End of search list.": copy = False continue if copy: - cc_sysroot_includes.append(f'-isystem{line}') + cc_sysroot_includes.append(f"-isystem{line}") cxx_sysroot = subprocess.run( - [make_cmd, 'cxx-sysroot'], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory + [make_cmd, "cxx-sysroot"], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.directory ) lines = str(cxx_sysroot.stderr.decode()).splitlines() + str(cxx_sysroot.stdout.decode()).splitlines() lines = [l.strip() for l in lines] cxx_sysroot_includes = [] copy = False for line in lines: - if line == '#include <...> search starts here:': + if line == "#include <...> search starts here:": copy = True continue - if line == 'End of search list.': + if line == "End of search list.": copy = False continue if copy: - cxx_sysroot_includes.append(f'-isystem{line}') + cxx_sysroot_includes.append(f"-isystem{line}") new_entries, entries = itertools.tee(entries, 2) new_sources = set([e.source for e in entries]) if not cdb_file: - cdb_file = os.path.join(self.directory, 'compile_commands.json') + cdb_file = os.path.join(self.directory, "compile_commands.json") if isinstance(cdb_file, str) and os.path.isfile(cdb_file): old_entries = itertools.filterfalse( lambda entry: entry.source in new_sources, CompilationDatabase.load(cdb_file) @@ -436,19 +436,19 @@ def libscanbuild_capture(args: argparse.Namespace) -> Tuple[int, Iterable[Compil else: old_entries = [] - extra_flags = ['-target', 'armv7ar-none-none-eabi'] - logger(__name__).debug('cc_sysroot_includes') + extra_flags = ["-target", "armv7ar-none-none-eabi"] + logger(__name__).debug("cc_sysroot_includes") logger(__name__).debug(cc_sysroot_includes) - logger(__name__).debug('cxx_sysroot_includes') + logger(__name__).debug("cxx_sysroot_includes") logger(__name__).debug(cxx_sysroot_includes) - if sys.platform == 'win32': + if sys.platform == "win32": extra_flags.extend(["-fno-ms-extensions", "-fno-ms-compatibility", "-fno-delayed-template-parsing"]) def new_entry_map(entry): - if entry.compiler == 'c': + if entry.compiler == "c": entry.flags = extra_flags + cc_sysroot_includes + entry.flags - elif entry.compiler == 'c++': + elif entry.compiler == "c++": entry.flags = extra_flags + cxx_sysroot_includes + entry.flags return entry @@ -456,13 +456,13 @@ def new_entry_map(entry): def entry_map(entry: Compilation): json_entry = entry.as_db_entry() - json_entry['arguments'][0] = 'clang' if entry.compiler == 'c' else 'clang++' + json_entry["arguments"][0] = "clang" if entry.compiler == "c" else "clang++" return json_entry entries = itertools.chain(old_entries, new_entries) json_entries = list(map(entry_map, entries)) if isinstance(cdb_file, str): - cdb_file = open(cdb_file, 'w') + cdb_file = open(cdb_file, "w") import json json.dump(json_entries, cdb_file, sort_keys=True, indent=4) @@ -478,7 +478,7 @@ def compile(self, build_args: List[str], scan_build: Optional[bool] = None): @staticmethod def find_project(path: str, recurse_times: int = 10): - path = os.path.abspath(path or '.') + path = os.path.abspath(path or ".") if os.path.isfile(path): path = os.path.dirname(path) if os.path.isdir(path): @@ -487,10 +487,10 @@ def find_project(path: str, recurse_times: int = 10): files = [ f for f in os.listdir(path) - if os.path.isfile(os.path.join(path, f)) and f.lower() == 'project.pros' + if os.path.isfile(os.path.join(path, f)) and f.lower() == "project.pros" ] if len(files) == 1: # found a project.pros file! - logger(__name__).info(f'Found Project Path: {os.path.join(path, files[0])}') + logger(__name__).info(f"Found Project Path: {os.path.join(path, files[0])}") return os.path.join(path, files[0]) path = os.path.dirname(path) else: @@ -498,4 +498,4 @@ def find_project(path: str, recurse_times: int = 10): return None -__all__ = ['Project', 'ProjectReport'] +__all__ = ["Project", "ProjectReport"] diff --git a/pros/conductor/templates/base_template.py b/pros/conductor/templates/base_template.py index 95a19064..2eb7d6ad 100644 --- a/pros/conductor/templates/base_template.py +++ b/pros/conductor/templates/base_template.py @@ -12,20 +12,20 @@ def __init__(self, **kwargs): self.supported_kernels: str = None self.target: str = None self.metadata: Dict[str, Any] = {} - if 'orig' in kwargs: - self.__dict__.update({k: v for k, v in kwargs.pop('orig').__dict__.items() if k in self.__dict__}) + if "orig" in kwargs: + self.__dict__.update({k: v for k, v in kwargs.pop("orig").__dict__.items() if k in self.__dict__}) self.__dict__.update({k: v for k, v in kwargs.items() if k in self.__dict__}) self.metadata.update({k: v for k, v in kwargs.items() if k not in self.__dict__}) - if 'depot' in self.metadata and 'origin' not in self.metadata: - self.metadata['origin'] = self.metadata.pop('depot') - if 'd' in self.metadata and 'depot' not in self.metadata: - self.metadata['depot'] = self.metadata.pop('d') - if 'l' in self.metadata and 'location' not in self.metadata: - self.metadata['location'] = self.metadata.pop('l') - if self.name == 'pros': - self.name = 'kernel' + if "depot" in self.metadata and "origin" not in self.metadata: + self.metadata["origin"] = self.metadata.pop("depot") + if "d" in self.metadata and "depot" not in self.metadata: + self.metadata["depot"] = self.metadata.pop("d") + if "l" in self.metadata and "location" not in self.metadata: + self.metadata["location"] = self.metadata.pop("l") + if self.name == "pros": + self.name = "kernel" - def satisfies(self, query: 'BaseTemplate', kernel_version: Union[str, Version] = None) -> bool: + def satisfies(self, query: "BaseTemplate", kernel_version: Union[str, Version] = None) -> bool: if query.name and self.name != query.name: return False if query.target and self.target != query.target: @@ -47,7 +47,7 @@ def satisfies(self, query: 'BaseTemplate', kernel_version: Union[str, Version] = def __str__(self): fields = [self.metadata.get("origin", None), self.target, self.__class__.__name__] additional = ", ".join(map(str, filter(bool, fields))) - return f'{self.identifier} ({additional})' + return f"{self.identifier} ({additional})" def __gt__(self, other): if isinstance(other, BaseTemplate): @@ -65,30 +65,30 @@ def __eq__(self, other): def __hash__(self): return self.identifier.__hash__() - def as_query(self, version='>0', metadata=False, **kwargs): + def as_query(self, version=">0", metadata=False, **kwargs): if isinstance(metadata, bool) and not metadata: metadata = dict() return BaseTemplate(orig=self, version=version, metadata=metadata, **kwargs) @property def identifier(self): - return f'{self.name}@{self.version}' + return f"{self.name}@{self.version}" @property def origin(self): - return self.metadata.get('origin', 'Unknown') + return self.metadata.get("origin", "Unknown") @classmethod - def create_query(cls, name: str = None, **kwargs) -> 'BaseTemplate': + def create_query(cls, name: str = None, **kwargs) -> "BaseTemplate": if not isinstance(name, str): return cls(**kwargs) - if name.count('@') > 1: - raise ValueError(f'Malformed identifier: {name}') - if '@' in name: - name, kwargs['version'] = name.split('@') - if kwargs.get('version', 'latest') == 'latest': - kwargs['version'] = '>=0' - if name == 'kernal': + if name.count("@") > 1: + raise ValueError(f"Malformed identifier: {name}") + if "@" in name: + name, kwargs["version"] = name.split("@") + if kwargs.get("version", "latest") == "latest": + kwargs["version"] = ">=0" + if name == "kernal": ui.echo("Assuming 'kernal' is the British spelling of kernel.") - name = 'kernel' + name = "kernel" return cls(name=name, **kwargs) diff --git a/pros/conductor/templates/external_template.py b/pros/conductor/templates/external_template.py index ce08662e..870ef28d 100644 --- a/pros/conductor/templates/external_template.py +++ b/pros/conductor/templates/external_template.py @@ -10,18 +10,18 @@ class ExternalTemplate(Config, Template): def __init__(self, file: str, **kwargs): if os.path.isdir(file): - file = os.path.join(file, 'template.pros') + file = os.path.join(file, "template.pros") elif zipfile.is_zipfile(file): self.tf = tempfile.NamedTemporaryFile(delete=False) with zipfile.ZipFile(file) as zf: - with zf.open('template.pros') as zt: + with zf.open("template.pros") as zt: self.tf.write(zt.read()) self.tf.seek(0, 0) file = self.tf.name - error_on_decode = kwargs.pop('error_on_decode', False) + error_on_decode = kwargs.pop("error_on_decode", False) Template.__init__(self, **kwargs) Config.__init__(self, file, error_on_decode=error_on_decode) def __del__(self): - if hasattr(self, 'tr'): + if hasattr(self, "tr"): del self.tf diff --git a/pros/conductor/templates/local_template.py b/pros/conductor/templates/local_template.py index 53d66e73..d3be5b34 100644 --- a/pros/conductor/templates/local_template.py +++ b/pros/conductor/templates/local_template.py @@ -4,7 +4,7 @@ def _fix_path(*paths: str) -> str: - return os.path.normpath(os.path.join(*paths).replace('\\', '/')) + return os.path.normpath(os.path.join(*paths).replace("\\", "/")) class LocalTemplate(Template): diff --git a/pros/conductor/transaction.py b/pros/conductor/transaction.py index 0fcb05d7..4b7ba650 100644 --- a/pros/conductor/transaction.py +++ b/pros/conductor/transaction.py @@ -19,7 +19,7 @@ def extend_add(self, paths: Iterable[str], src: str): self.add(path, src) def add(self, path: str, src: str): - path = os.path.normpath(path.replace('\\', '/')) + path = os.path.normpath(path.replace("\\", "/")) self._add_files.add(path) self.effective_state.add(path) self._add_srcs[path] = src @@ -31,7 +31,7 @@ def extend_rm(self, paths: Iterable[str]): self.rm(path) def rm(self, path: str): - path = os.path.normpath(path.replace('\\', '/')) + path = os.path.normpath(path.replace("\\", "/")) self._rm_files.add(path) if path in self.effective_state: self.effective_state.remove(path) @@ -39,18 +39,18 @@ def rm(self, path: str): self._add_files.remove(path) self._add_srcs.pop(path) - def commit(self, label: str = 'Committing transaction', remove_empty_directories: bool = True): + def commit(self, label: str = "Committing transaction", remove_empty_directories: bool = True): with ui.progressbar(length=len(self._rm_files) + len(self._add_files), label=label) as pb: - for file in sorted(self._rm_files, key=lambda p: p.count('/') + p.count('\\'), reverse=True): + for file in sorted(self._rm_files, key=lambda p: p.count("/") + p.count("\\"), reverse=True): file_path = os.path.join(self.location, file) if os.path.isfile(file_path): - logger(__name__).info(f'Removing {file}') + logger(__name__).info(f"Removing {file}") os.remove(os.path.join(self.location, file)) else: - logger(__name__).info(f'Not removing nonexistent {file}') + logger(__name__).info(f"Not removing nonexistent {file}") pardir = os.path.abspath(os.path.join(file_path, os.pardir)) while remove_empty_directories and len(os.listdir(pardir)) == 0: - logger(__name__).info(f'Removing {os.path.relpath(pardir, self.location)}') + logger(__name__).info(f"Removing {os.path.relpath(pardir, self.location)}") os.rmdir(pardir) pardir = os.path.abspath(os.path.join(pardir, os.pardir)) if pardir == self.location: @@ -63,13 +63,13 @@ def commit(self, label: str = 'Committing transaction', remove_empty_directories destination = os.path.join(self.location, file) if os.path.isfile(source): if not os.path.isdir(os.path.dirname(destination)): - logger(__name__).debug(f'Creating directories: f{destination}') + logger(__name__).debug(f"Creating directories: f{destination}") os.makedirs(os.path.dirname(destination), exist_ok=True) - logger(__name__).info(f'Adding {file}') + logger(__name__).info(f"Adding {file}") shutil.copy(os.path.join(self._add_srcs[file], file), os.path.join(self.location, file)) else: logger(__name__).info(f"Not copying {file} because {source} doesn't exist.") pb.update(1) def __str__(self): - return f'Transaction Object: ADD: {self._add_files}\tRM: {self._rm_files}\tLocation: {self.location}' + return f"Transaction Object: ADD: {self._add_files}\tRM: {self._rm_files}\tLocation: {self.location}" diff --git a/pros/config/cli_config.py b/pros/config/cli_config.py index 1600b146..45f18a33 100644 --- a/pros/config/cli_config.py +++ b/pros/config/cli_config.py @@ -17,7 +17,7 @@ class CliConfig(Config): def __init__(self, file=None): if not file: - file = os.path.join(click.get_app_dir('PROS'), 'cli.pros') + file = os.path.join(click.get_app_dir("PROS"), "cli.pros") self.update_frequency: timedelta = timedelta(hours=1) self.override_use_build_compile_commands: Optional[bool] = None self.offer_sentry: Optional[bool] = None @@ -31,19 +31,19 @@ def needs_online_fetch(self, last_fetch: datetime) -> bool: def use_build_compile_commands(self): if self.override_use_build_compile_commands is not None: return self.override_use_build_compile_commands - paths = [os.path.join('~', '.pros-atom'), os.path.join('~', '.pros-editor')] + paths = [os.path.join("~", ".pros-atom"), os.path.join("~", ".pros-editor")] return any([os.path.exists(os.path.expanduser(p)) for p in paths]) - def get_upgrade_manifest(self, force: bool = False) -> Optional['UpgradeManifestV1']: + def get_upgrade_manifest(self, force: bool = False) -> Optional["UpgradeManifestV1"]: from pros.upgrade.manifests.upgrade_manifest_v1 import UpgradeManifestV1 # noqa: F811 if not force and not self.needs_online_fetch(self.cached_upgrade[0]): return self.cached_upgrade[1] - pros.common.logger(__name__).info('Fetching upgrade manifest...') + pros.common.logger(__name__).info("Fetching upgrade manifest...") import jsonpickle import requests - r = requests.get('https://purduesigbots.github.io/pros-mainline/cli-updates.json') + r = requests.get("https://purduesigbots.github.io/pros-mainline/cli-updates.json") pros.common.logger(__name__).debug(r) if r.status_code == 200: try: @@ -55,7 +55,7 @@ def get_upgrade_manifest(self, force: bool = False) -> Optional['UpgradeManifest self.save() return self.cached_upgrade[1] else: - pros.common.logger(__name__).warning(f'Failed to fetch CLI updates because status code: {r.status_code}') + pros.common.logger(__name__).warning(f"Failed to fetch CLI updates because status code: {r.status_code}") pros.common.logger(__name__).debug(r) return None @@ -66,6 +66,6 @@ def cli_config() -> CliConfig: return CliConfig() ctx.ensure_object(dict) assert isinstance(ctx.obj, dict) - if not hasattr(ctx.obj, 'cli_config') or not isinstance(ctx.obj['cli_config'], CliConfig): - ctx.obj['cli_config'] = CliConfig() - return ctx.obj['cli_config'] + if not hasattr(ctx.obj, "cli_config") or not isinstance(ctx.obj["cli_config"], CliConfig): + ctx.obj["cli_config"] = CliConfig() + return ctx.obj["cli_config"] diff --git a/pros/config/config.py b/pros/config/config.py index 59cda986..b0bb111e 100644 --- a/pros/config/config.py +++ b/pros/config/config.py @@ -17,40 +17,40 @@ class Config(object): """ def __init__(self, file, error_on_decode=False): - logger(__name__).debug('Opening {} ({})'.format(file, self.__class__.__name__)) + logger(__name__).debug("Opening {} ({})".format(file, self.__class__.__name__)) self.save_file = file # __ignored property has any fields which shouldn't be included the pickled config file - self.__ignored = self.__dict__.get('_Config__ignored', []) - self.__ignored.append('save_file') - self.__ignored.append('_Config__ignored') + self.__ignored = self.__dict__.get("_Config__ignored", []) + self.__ignored.append("save_file") + self.__ignored.append("_Config__ignored") if file: # If the file already exists, update this new config with the values in the file if os.path.isfile(file): - with open(file, 'r') as f: + with open(file, "r") as f: try: result = jsonpickle.decode(f.read()) if isinstance(result, dict): - if 'py/state' in result: - class_name = '{}.{}'.format(self.__class__.__module__, self.__class__.__qualname__) - logger(__name__).debug('Coercing {} to {}'.format(result['py/object'], class_name)) - old_object = result['py/object'] + if "py/state" in result: + class_name = "{}.{}".format(self.__class__.__module__, self.__class__.__qualname__) + logger(__name__).debug("Coercing {} to {}".format(result["py/object"], class_name)) + old_object = result["py/object"] try: - result['py/object'] = class_name + result["py/object"] = class_name result = jsonpickle.unpickler.Unpickler().restore(result) except (json.decoder.JSONDecodeError, AttributeError) as e: logger(__name__).debug(e) logger(__name__).warning( - f'Couldn\'t coerce {file} ({old_object}) to ' - f'{class_name}. Using rudimentary coercion' + f"Couldn't coerce {file} ({old_object}) to " + f"{class_name}. Using rudimentary coercion" ) - self.__dict__.update(result['py/state']) + self.__dict__.update(result["py/state"]) else: self.__dict__.update(result) elif isinstance(result, object): self.__dict__.update(result.__dict__) except (json.decoder.JSONDecodeError, AttributeError, UnicodeDecodeError) as e: if error_on_decode: - logger(__name__).error(f'Error parsing {file}') + logger(__name__).error(f"Error parsing {file}") logger(__name__).exception(e) raise e else: @@ -58,7 +58,7 @@ def __init__(self, file, error_on_decode=False): pass # obvious elif os.path.isdir(file): - raise ValueError('{} must be a file, not a directory'.format(file)) + raise ValueError("{} must be a file, not a directory".format(file)) # The file didn't exist when we created, so we'll save the default values else: try: @@ -68,7 +68,7 @@ def __init__(self, file, error_on_decode=False): logger(__name__).exception(e) raise e else: - logger(__name__).debug('Failed to save {} ({})'.format(file, e)) + logger(__name__).debug("Failed to save {} ({})".format(file, e)) from pros.common.sentry import add_context @@ -76,7 +76,7 @@ def __init__(self, file, error_on_decode=False): def __getstate__(self): state = self.__dict__.copy() - if '_Config__ignored' in self.__dict__: + if "_Config__ignored" in self.__dict__: for key in [k for k in self.__ignored if k in state]: del state[key] return state @@ -85,7 +85,7 @@ def __setstate__(self, state): self.__dict__.update(state) def __str__(self): - jsonpickle.set_encoder_options('json', sort_keys=True) + jsonpickle.set_encoder_options("json", sort_keys=True) return jsonpickle.encode(self) def delete(self): @@ -95,12 +95,12 @@ def delete(self): def save(self, file: str = None) -> None: if file is None: file = self.save_file - jsonpickle.set_encoder_options('json', sort_keys=True, indent=4) + jsonpickle.set_encoder_options("json", sort_keys=True, indent=4) if os.path.dirname(file): os.makedirs(os.path.dirname(file), exist_ok=True) - with open(file, 'w') as f: + with open(file, "w") as f: f.write(jsonpickle.encode(self)) - logger(__name__).debug('Saved {}'.format(file)) + logger(__name__).debug("Saved {}".format(file)) def migrate(self, migration): for old, new in migration.iteritems(): diff --git a/pros/ga/analytics.py b/pros/ga/analytics.py index de7d806d..40202be1 100644 --- a/pros/ga/analytics.py +++ b/pros/ga/analytics.py @@ -7,8 +7,8 @@ import requests from requests_futures.sessions import FuturesSession -url = 'https://www.google-analytics.com/collect' -agent = 'pros-cli' +url = "https://www.google-analytics.com/collect" +agent = "pros-cli" """ PROS ANALYTICS CLASS @@ -27,9 +27,9 @@ def __init__(self): self.cli_config.save() self.sent = False # Variables that the class will use - self.gaID = self.cli_config.ga['ga_id'] - self.useAnalytics = self.cli_config.ga['enabled'] - self.uID = self.cli_config.ga['u_id'] + self.gaID = self.cli_config.ga["ga_id"] + self.useAnalytics = self.cli_config.ga["enabled"] + self.uID = self.cli_config.ga["u_id"] self.pendingRequests = [] def send(self, action): @@ -39,36 +39,36 @@ def send(self, action): try: # Payload to be sent to GA, idk what some of them are but it works payload = { - 'v': 1, - 'tid': self.gaID, - 'aip': 1, - 'z': random.random(), - 'cid': self.uID, - 't': 'event', - 'ec': 'action', - 'ea': action, - 'el': 'CLI', - 'ev': '1', - 'ni': 0, + "v": 1, + "tid": self.gaID, + "aip": 1, + "z": random.random(), + "cid": self.uID, + "t": "event", + "ec": "action", + "ea": action, + "el": "CLI", + "ev": "1", + "ni": 0, } session = FuturesSession() # Send payload to GA servers - future = session.post(url=url, data=payload, headers={'User-Agent': agent}, timeout=5.0) + future = session.post(url=url, data=payload, headers={"User-Agent": agent}, timeout=5.0) self.pendingRequests.append(future) except Exception: from pros.cli.common import logger logger(__name__).warning( - "Unable to send analytics. Do you have a stable internet connection?", extra={'sentry': False} + "Unable to send analytics. Do you have a stable internet connection?", extra={"sentry": False} ) def set_use(self, value: bool): # Sets if GA is being used or not self.useAnalytics = value - self.cli_config.ga['enabled'] = self.useAnalytics + self.cli_config.ga["enabled"] = self.useAnalytics self.cli_config.save() def process_requests(self): diff --git a/pros/serial/__init__.py b/pros/serial/__init__.py index 0177d021..36e16fa2 100644 --- a/pros/serial/__init__.py +++ b/pros/serial/__init__.py @@ -4,11 +4,11 @@ def bytes_to_str(arr): if isinstance(arr, str): arr = bytes(arr) - if hasattr(arr, '__iter__'): - return ''.join('{:02X} '.format(x) for x in arr).strip() + if hasattr(arr, "__iter__"): + return "".join("{:02X} ".format(x) for x in arr).strip() else: # actually just a single byte - return '0x{:02X}'.format(arr) + return "0x{:02X}".format(arr) -def decode_bytes_to_str(data: Union[bytes, bytearray], encoding: str = 'utf-8', errors: str = 'strict') -> str: - return data.split(b'\0', 1)[0].decode(encoding=encoding, errors=errors) +def decode_bytes_to_str(data: Union[bytes, bytearray], encoding: str = "utf-8", errors: str = "strict") -> str: + return data.split(b"\0", 1)[0].decode(encoding=encoding, errors=errors) diff --git a/pros/serial/devices/stream_device.py b/pros/serial/devices/stream_device.py index 2649af97..a285619c 100644 --- a/pros/serial/devices/stream_device.py +++ b/pros/serial/devices/stream_device.py @@ -42,7 +42,7 @@ def promiscuous(self, value: bool): pass def read(self) -> Tuple[bytes, bytes]: - return b'', self.port.read_all() + return b"", self.port.read_all() def write(self, data: Union[bytes, str]): self.port.write(data) diff --git a/pros/serial/devices/vex/cortex_device.py b/pros/serial/devices/vex/cortex_device.py index 13a35d96..189b3e86 100644 --- a/pros/serial/devices/vex/cortex_device.py +++ b/pros/serial/devices/vex/cortex_device.py @@ -33,11 +33,11 @@ def __init__(self, data: Tuple[bytes, ...]): def __str__(self): return ( - f' Tether: {str(self.flags)}\n' - f' Cortex: F/W {self.robot_firmware[0]}.{self.robot_firmware[1]} w/ {self.robot_battery:1.2f} V ' - f'(Backup: {self.backup_battery:1.2f} V)\n' - f'Joystick: F/W {self.joystick_firmware[0]}.{self.robot_firmware[1]} w/ ' - f'{self.joystick_battery:1.2f} V' + f" Tether: {str(self.flags)}\n" + f" Cortex: F/W {self.robot_firmware[0]}.{self.robot_firmware[1]} w/ {self.robot_battery:1.2f} V " + f"(Backup: {self.backup_battery:1.2f} V)\n" + f"Joystick: F/W {self.joystick_firmware[0]}.{self.robot_firmware[1]} w/ " + f"{self.joystick_battery:1.2f} V" ) class SystemStatusFlags(IntFlag): @@ -56,26 +56,26 @@ def andeq(a, b): return (a & b) == b if not self.value & self.TETH_BITS: - s = 'Serial w/VEXnet 1.0 Keys' + s = "Serial w/VEXnet 1.0 Keys" elif andeq(self.value, 0x01): - s = 'Serial w/VEXnet 1.0 Keys (turbo)' + s = "Serial w/VEXnet 1.0 Keys (turbo)" elif andeq(self.value, 0x04): - s = 'Serial w/VEXnet 2.0 Keys' + s = "Serial w/VEXnet 2.0 Keys" elif andeq(self.value, 0x05): - s = 'Serial w/VEXnet 2.0 Keys (download mode)' + s = "Serial w/VEXnet 2.0 Keys (download mode)" elif andeq(self.value, 0x10): - s = 'Serial w/ a USB Cable' + s = "Serial w/ a USB Cable" elif andeq(self.value, 0x20): - s = 'Directly w/ a USB Cable' + s = "Directly w/ a USB Cable" else: - s = 'Unknown' + s = "Unknown" if andeq(self.value, self.FCS_CONNECT): - s += ' - FCS Connected' + s += " - FCS Connected" return s def get_connected_device(self) -> SystemDevice: - logger(__name__).info('Interrogating Cortex...') + logger(__name__).info("Interrogating Cortex...") stm32 = STM32Device(self.port, do_negoitate=False) try: stm32.get(n_retries=1) @@ -84,21 +84,21 @@ def get_connected_device(self) -> SystemDevice: return self def upload_project(self, project: Project, **kwargs): - assert project.target == 'cortex' + assert project.target == "cortex" output_path = project.path.joinpath(project.output) if not output_path.exists(): - raise ui.dont_send(Exception('No output files were found! Have you built your project?')) - with output_path.open(mode='rb') as pf: + raise ui.dont_send(Exception("No output files were found! Have you built your project?")) + with output_path.open(mode="rb") as pf: return self.write_program(pf, **kwargs) def write_program(self, file: typing.BinaryIO, **kwargs): - action_string = '' - if hasattr(file, 'name'): - action_string += f' {Path(file.name).name}' - action_string += f' to Cortex on {self.port}' - ui.echo(f'Uploading {action_string}') + action_string = "" + if hasattr(file, "name"): + action_string += f" {Path(file.name).name}" + action_string += f" to Cortex on {self.port}" + ui.echo(f"Uploading {action_string}") - logger(__name__).info('Writing program to Cortex') + logger(__name__).info("Writing program to Cortex") status = self.query_system() logger(__name__).info(status) if not status.flags | self.SystemStatusFlags.TETH_USB and not status.flags | self.SystemStatusFlags.DL_MODE: @@ -107,25 +107,25 @@ def write_program(self, file: typing.BinaryIO, **kwargs): bootloader = self.expose_bootloader() rv = bootloader.write_program(file, **kwargs) - ui.finalize('upload', f'Finished uploading {action_string}') + ui.finalize("upload", f"Finished uploading {action_string}") return rv @retries def query_system(self) -> SystemStatus: - logger(__name__).info('Querying system information') + logger(__name__).info("Querying system information") rx = self._txrx_simple_struct(0x21, "<8B2x") status = CortexDevice.SystemStatus(rx) - ui.finalize('cortex-status', status) + ui.finalize("cortex-status", status) return status @retries def send_to_download_channel(self): - logger(__name__).info('Sending to download channel') + logger(__name__).info("Sending to download channel") self._txrx_ack_packet(0x35, timeout=1.0) @retries def expose_bootloader(self): - logger(__name__).info('Exposing bootloader') + logger(__name__).info("Exposing bootloader") for _ in itertools.repeat(None, 5): self._tx_packet(0x25) time.sleep(0.1) @@ -153,4 +153,4 @@ def _txrx_ack_packet(self, command: int, timeout=0.1): """ tx = self._tx_packet(command) self._rx_ack(timeout=timeout) - logger(__name__).debug('TX: {}'.format(bytes_to_str(tx))) + logger(__name__).debug("TX: {}".format(bytes_to_str(tx))) diff --git a/pros/serial/devices/vex/message.py b/pros/serial/devices/vex/message.py index f3ede25c..24139ff7 100644 --- a/pros/serial/devices/vex/message.py +++ b/pros/serial/devices/vex/message.py @@ -33,4 +33,4 @@ def __setitem__(self, key, value): self.bookmarks[key] = value def __str__(self): - return 'TX:{}\tRX:{}'.format(bytes_to_str(self.tx), bytes_to_str(self.rx)) + return "TX:{}\tRX:{}".format(bytes_to_str(self.tx), bytes_to_str(self.rx)) diff --git a/pros/serial/devices/vex/stm32_device.py b/pros/serial/devices/vex/stm32_device.py index 36e2321e..0907ef0f 100644 --- a/pros/serial/devices/vex/stm32_device.py +++ b/pros/serial/devices/vex/stm32_device.py @@ -33,7 +33,7 @@ def __init__(self, port: BasePort, must_initialize: bool = False, do_negoitate: try: self.get(n_retries=0) except: - logger(__name__).info('Sending bootloader initialization') + logger(__name__).info("Sending bootloader initialization") time.sleep(0.01) self.port.rts = 0 for _ in itertools.repeat(None, times=3): @@ -47,13 +47,13 @@ def write_program(self, file: typing.BinaryIO, preserve_fs: bool = False, go_aft file.seek(0, 0) if file_len > (self.NUM_PAGES * self.PAGE_SIZE): raise VEXCommError( - f'File is too big to be uploaded (max file size: {self.NUM_PAGES * self.PAGE_SIZE} bytes)' + f"File is too big to be uploaded (max file size: {self.NUM_PAGES * self.PAGE_SIZE} bytes)" ) - if hasattr(file, 'name'): + if hasattr(file, "name"): display_name = file.name else: - display_name = '(memory)' + display_name = "(memory)" if not preserve_fs: self.erase_all() @@ -61,7 +61,7 @@ def write_program(self, file: typing.BinaryIO, preserve_fs: bool = False, go_aft self.erase_memory(list(range(0, int(file_len / self.PAGE_SIZE) + 1))) address = 0x08000000 - with ui.progressbar(length=file_len, label=f'Uploading {display_name}') as progress: + with ui.progressbar(length=file_len, label=f"Uploading {display_name}") as progress: for i in range(0, file_len, 256): write_size = 256 if i + 256 > file_len: @@ -78,77 +78,77 @@ def scan_prosfs(self): @retries def get(self): - logger(__name__).info('STM32: Get') + logger(__name__).info("STM32: Get") self._txrx_command(0x00) n_bytes = self.port.read(1)[0] assert n_bytes == 11 data = self.port.read(n_bytes + 1) - logger(__name__).info(f'STM32 Bootloader version 0x{data[0]:x}') + logger(__name__).info(f"STM32 Bootloader version 0x{data[0]:x}") self.commands = data[1:] - logger(__name__).debug(f'STM32 Bootloader commands are: {bytes_to_str(data[1:])}') + logger(__name__).debug(f"STM32 Bootloader commands are: {bytes_to_str(data[1:])}") assert self.port.read(1)[0] == self.ACK_BYTE @retries def get_read_protection_status(self): - logger(__name__).info('STM32: Get ID & Read Protection Status') + logger(__name__).info("STM32: Get ID & Read Protection Status") self._txrx_command(0x01) data = self.port.read(3) - logger(__name__).debug(f'STM32 Bootloader Get Version & Read Protection Status is: {bytes_to_str(data)}') + logger(__name__).debug(f"STM32 Bootloader Get Version & Read Protection Status is: {bytes_to_str(data)}") assert self.port.read(1)[0] == self.ACK_BYTE @retries def get_id(self): - logger(__name__).info('STM32: Get PID') + logger(__name__).info("STM32: Get PID") self._txrx_command(0x02) n_bytes = self.port.read(1)[0] pid = self.port.read(n_bytes + 1) - logger(__name__).debug(f'STM32 Bootloader PID is {pid}') + logger(__name__).debug(f"STM32 Bootloader PID is {pid}") @retries def read_memory(self, address: int, n_bytes: int): - logger(__name__).info(f'STM32: Read {n_bytes} fromo 0x{address:x}') + logger(__name__).info(f"STM32: Read {n_bytes} fromo 0x{address:x}") assert 255 >= n_bytes > 0 self._txrx_command(0x11) - self._txrx_command(struct.pack('>I', address)) + self._txrx_command(struct.pack(">I", address)) self._txrx_command(n_bytes) return self.port.read(n_bytes) @retries def go(self, start_address: int): - logger(__name__).info(f'STM32: Go 0x{start_address:x}') + logger(__name__).info(f"STM32: Go 0x{start_address:x}") self._txrx_command(0x21) try: - self._txrx_command(struct.pack('>I', start_address), timeout=5.0) + self._txrx_command(struct.pack(">I", start_address), timeout=5.0) except VEXCommError: logger(__name__).warning( - 'STM32 Bootloader did not acknowledge GO command. ' - 'The program may take a moment to begin running ' - 'or the device should be rebooted.' + "STM32 Bootloader did not acknowledge GO command. " + "The program may take a moment to begin running " + "or the device should be rebooted." ) @retries def write_memory(self, start_address: int, data: bytes): - logger(__name__).info(f'STM32: Write {len(data)} to 0x{start_address:x}') + logger(__name__).info(f"STM32: Write {len(data)} to 0x{start_address:x}") assert 0 < len(data) <= 256 if len(data) % 4 != 0: - data = data + (b'\0' * (4 - (len(data) % 4))) + data = data + (b"\0" * (4 - (len(data) % 4))) self._txrx_command(0x31) - self._txrx_command(struct.pack('>I', start_address)) + self._txrx_command(struct.pack(">I", start_address)) self._txrx_command(bytes([len(data) - 1, *data])) @retries def erase_all(self): - logger(__name__).info('STM32: Erase all pages') + logger(__name__).info("STM32: Erase all pages") if not self.commands[6] == 0x43: - raise VEXCommError('Standard erase not supported on this device (only extended erase)') + raise VEXCommError("Standard erase not supported on this device (only extended erase)") self._txrx_command(0x43) self._txrx_command(0xFF) @retries def erase_memory(self, page_numbers: List[int]): - logger(__name__).info(f'STM32: Erase pages: {page_numbers}') + logger(__name__).info(f"STM32: Erase pages: {page_numbers}") if not self.commands[6] == 0x43: - raise VEXCommError('Standard erase not supported on this device (only extended erase)') + raise VEXCommError("Standard erase not supported on this device (only extended erase)") assert 0 < len(page_numbers) <= 255 assert all([0 <= p <= 255 for p in page_numbers]) self._txrx_command(0x43) @@ -156,22 +156,22 @@ def erase_memory(self, page_numbers: List[int]): @retries def extended_erase(self, page_numbers: List[int]): - logger(__name__).info(f'STM32: Extended Erase pages: {page_numbers}') + logger(__name__).info(f"STM32: Extended Erase pages: {page_numbers}") if not self.commands[6] == 0x44: - raise IOError('Extended erase not supported on this device (only standard erase)') + raise IOError("Extended erase not supported on this device (only standard erase)") assert 0 < len(page_numbers) < 0xFFF0 assert all([0 <= p <= 0xFFFF for p in page_numbers]) self._txrx_command(0x44) - self._txrx_command(bytes([len(page_numbers) - 1, *struct.pack(f'>{len(page_numbers)}H', *page_numbers)])) + self._txrx_command(bytes([len(page_numbers) - 1, *struct.pack(f">{len(page_numbers)}H", *page_numbers)])) @retries def extended_erase_special(self, command: int): - logger(__name__).info(f'STM32: Extended special erase: {command:x}') + logger(__name__).info(f"STM32: Extended special erase: {command:x}") if not self.commands[6] == 0x44: - raise IOError('Extended erase not supported on this device (only standard erase)') + raise IOError("Extended erase not supported on this device (only standard erase)") assert 0xFFFD <= command <= 0xFFFF self._txrx_command(0x44) - self._txrx_command(struct.pack('>H', command)) + self._txrx_command(struct.pack(">H", command)) def _txrx_command(self, command: Union[int, bytes], timeout: float = 0.01, checksum: bool = True): self.port.read_all() @@ -180,15 +180,15 @@ def _txrx_command(self, command: Union[int, bytes], timeout: float = 0.01, check elif isinstance(command, int): message = bytearray([command, ~command & 0xFF] if checksum else [command]) else: - raise ValueError(f'Expected command to be bytes or int but got {type(command)}') - logger(__name__).debug(f'STM32 TX: {bytes_to_str(message)}') + raise ValueError(f"Expected command to be bytes or int but got {type(command)}") + logger(__name__).debug(f"STM32 TX: {bytes_to_str(message)}") self.port.write(message) self.port.flush() start_time = time.time() while time.time() - start_time < timeout: data = self.port.read(1) if data and len(data) == 1: - logger(__name__).debug(f'STM32 RX: {data[0]} =?= {self.ACK_BYTE}') + logger(__name__).debug(f"STM32 RX: {data[0]} =?= {self.ACK_BYTE}") if data[0] == self.ACK_BYTE: return raise VEXCommError(f"Device never ACK'd to {command}", command) diff --git a/pros/serial/devices/vex/v5_device.py b/pros/serial/devices/vex/v5_device.py index c0ee039c..1054f24e 100644 --- a/pros/serial/devices/vex/v5_device.py +++ b/pros/serial/devices/vex/v5_device.py @@ -38,7 +38,7 @@ def filter_vex_ports(p): p.vid is not None and p.vid in [0x2888, 0x0501] or p.name is not None - and ('VEX' in p.name or 'V5' in p.name) + and ("VEX" in p.name or "V5" in p.name) ) def filter_v5_ports(p, locations, names): @@ -55,14 +55,14 @@ def filter_v5_ports_mac(p, device): # Initially try filtering based off of location or the name of the device. # Special logic for macOS - if platform.system() == 'Darwin': - user_ports = [p for p in ports if filter_v5_ports_mac(p, '3')] - system_ports = [p for p in ports if filter_v5_ports_mac(p, '1')] - joystick_ports = [p for p in ports if filter_v5_ports_mac(p, '2')] + if platform.system() == "Darwin": + user_ports = [p for p in ports if filter_v5_ports_mac(p, "3")] + system_ports = [p for p in ports if filter_v5_ports_mac(p, "1")] + joystick_ports = [p for p in ports if filter_v5_ports_mac(p, "2")] else: - user_ports = [p for p in ports if filter_v5_ports(p, ['2'], ['User'])] - system_ports = [p for p in ports if filter_v5_ports(p, ['0'], ['System', 'Communications'])] - joystick_ports = [p for p in ports if filter_v5_ports(p, ['1'], ['Controller'])] + user_ports = [p for p in ports if filter_v5_ports(p, ["2"], ["User"])] + system_ports = [p for p in ports if filter_v5_ports(p, ["0"], ["System", "Communications"])] + joystick_ports = [p for p in ports if filter_v5_ports(p, ["1"], ["Controller"])] # Fallback for when a brain port's location is not detected properly if len(user_ports) != len(system_ports): @@ -72,31 +72,31 @@ def filter_v5_ports_mac(p, device): user_ports = [p for p in ports if p not in system_ports and p not in joystick_ports] if len(user_ports) == len(system_ports) and len(user_ports) > 0: - if p_type.lower() == 'user': + if p_type.lower() == "user": return user_ports - elif p_type.lower() == 'system': + elif p_type.lower() == "system": return system_ports + joystick_ports else: - raise ValueError(f'Invalid port type specified: {p_type}') + raise ValueError(f"Invalid port type specified: {p_type}") # None of the typical filters worked, so if there are only two ports, then the lower one is always* # the USER? port (*always = I haven't found a guarantee) if len(ports) == 2: # natural sort based on: https://stackoverflow.com/a/16090640 def natural_key(chunk: str): - return [int(text) if text.isdigit() else text.lower() for text in re.split(r'(\d+)', chunk)] + return [int(text) if text.isdigit() else text.lower() for text in re.split(r"(\d+)", chunk)] ports = sorted(ports, key=lambda p: natural_key(p.device)) - if p_type.lower() == 'user': + if p_type.lower() == "user": return [ports[1]] - elif p_type.lower() == 'system': + elif p_type.lower() == "system": # check if ports contain the word Brain in the description and return that port for port in ports: if "Brain" in port.description: return [port] return [ports[0], *joystick_ports] else: - raise ValueError(f'Invalid port type specified: {p_type}') + raise ValueError(f"Invalid port type specified: {p_type}") # these can now also be used as user ports if len(joystick_ports) > 0: # and p_type.lower() == 'system': return joystick_ports @@ -116,10 +116,10 @@ def wrapped(device, *args, **kwargs): return wrapped -def compress_file(file: BinaryIO, file_len: int, label='Compressing binary') -> Tuple[BinaryIO, int]: +def compress_file(file: BinaryIO, file_len: int, label="Compressing binary") -> Tuple[BinaryIO, int]: buf = io.BytesIO() with ui.progressbar(length=file_len, label=label) as progress: - with gzip.GzipFile(fileobj=buf, mode='wb', mtime=0) as f: + with gzip.GzipFile(fileobj=buf, mode="wb", mtime=0) as f: while True: data = file.read(16 * 1024) if not data: @@ -133,8 +133,8 @@ def compress_file(file: BinaryIO, file_len: int, label='Compressing binary') -> class V5Device(VEXDevice, SystemDevice): - vid_map = {'user': 1, 'system': 15, 'rms': 16, 'pros': 24, 'mw': 32} # type: Dict[str, int] - channel_map = {'pit': 0, 'download': 1} # type: Dict[str, int] + vid_map = {"user": 1, "system": 15, "rms": 16, "pros": 24, "mw": 32} # type: Dict[str, int] + channel_map = {"pit": 0, "download": 1} # type: Dict[str, int] class FTCompleteOptions(IntEnum): DONT_RUN = 0 @@ -160,24 +160,24 @@ class ControllerFlags(IntFlag): def __init__(self, data: tuple): from semantic_version import Version - self.system_version = Version('{}.{}.{}-{}.{}'.format(*data[0:5])) + self.system_version = Version("{}.{}.{}-{}.{}".format(*data[0:5])) self.product = V5Device.SystemVersion.Product(data[5]) self.product_flags = self.flag_map[self.product](data[6]) def __str__(self): return ( - f'System Version: {self.system_version}\n' - f' Product: {self.product.name}\n' - f' Product Flags: {self.product_flags.value:x}' + f"System Version: {self.system_version}\n" + f" Product: {self.product.name}\n" + f" Product Flags: {self.product_flags.value:x}" ) class SystemStatus(object): def __init__(self, data: tuple): from semantic_version import Version - self.system_version = Version('{}.{}.{}-{}'.format(*data[0:4])) - self.cpu0_version = Version('{}.{}.{}-{}'.format(*data[4:8])) - self.cpu1_version = Version('{}.{}.{}-{}'.format(*data[8:12])) + self.system_version = Version("{}.{}.{}-{}".format(*data[0:4])) + self.cpu0_version = Version("{}.{}.{}-{}".format(*data[4:8])) + self.cpu1_version = Version("{}.{}.{}-{}".format(*data[8:12])) self.touch_version = data[12] self.system_id = data[13] @@ -186,11 +186,11 @@ def __getitem__(self, item): def __init__(self, port: BasePort): self._status = None - self._serial_cache = b'' + self._serial_cache = b"" super().__init__(port) class DownloadChannel(object): - def __init__(self, device: 'V5Device', timeout: float = 5.0): + def __init__(self, device: "V5Device", timeout: float = 5.0): self.device = device self.timeout = timeout self.did_switch = False @@ -200,11 +200,11 @@ def __enter__(self): if version.product == V5Device.SystemVersion.Product.CONTROLLER: self.device.default_timeout = 2.0 if V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags: - raise VEXCommError('V5 Controller doesn\'t appear to be connected to a V5 Brain', version) - ui.echo('Transferring V5 to download channel') - self.device.ft_transfer_channel('download') + raise VEXCommError("V5 Controller doesn't appear to be connected to a V5 Brain", version) + ui.echo("Transferring V5 to download channel") + self.device.ft_transfer_channel("download") self.did_switch = True - logger(__name__).debug('Sleeping for a while to let V5 start channel transfer') + logger(__name__).debug("Sleeping for a while to let V5 start channel transfer") time.sleep(0.25) # wait at least 250ms before starting to poll controller if it's connected yet version = self.device.query_system_version() start_time = time.time() @@ -216,16 +216,16 @@ def __enter__(self): version = self.device.query_system_version() time.sleep(0.25) if V5Device.SystemVersion.ControllerFlags.CONNECTED not in version.product_flags: - raise VEXCommError('Could not transfer V5 Controller to download channel', version) - logger(__name__).info('V5 should been transferred to higher bandwidth download channel') + raise VEXCommError("Could not transfer V5 Controller to download channel", version) + logger(__name__).info("V5 should been transferred to higher bandwidth download channel") return self else: return self def __exit__(self, *exc): if self.did_switch: - self.device.ft_transfer_channel('pit') - ui.echo('V5 has been transferred back to pit channel') + self.device.ft_transfer_channel("pit") + ui.echo("V5 has been transferred back to pit channel") @property def status(self): @@ -235,7 +235,7 @@ def status(self): @property def can_compress(self): - return self.status['system_version'] in Spec('>=1.0.5') + return self.status["system_version"] in Spec(">=1.0.5") @property def is_wireless(self): @@ -251,73 +251,73 @@ def generate_cold_hash(self, project: Project, extra: dict): from base64 import b64encode from hashlib import md5 - msg = str(sorted(keys, key=lambda t: t[0])).encode('ascii') - name = b64encode(md5(msg).digest()).rstrip(b'=').decode('ascii') - if Spec('<=1.0.0-27').match(self.status['cpu0_version']): + msg = str(sorted(keys, key=lambda t: t[0])).encode("ascii") + name = b64encode(md5(msg).digest()).rstrip(b"=").decode("ascii") + if Spec("<=1.0.0-27").match(self.status["cpu0_version"]): # Bug prevents linked files from being > 18 characters long. # 17 characters is probably good enough for hash, so no need to fail out name = name[:17] return name def upload_project(self, project: Project, **kwargs): - assert project.target == 'v5' + assert project.target == "v5" monolith_path = project.location.joinpath(project.output) if monolith_path.exists(): - logger(__name__).debug(f'Monolith exists! ({monolith_path})') + logger(__name__).debug(f"Monolith exists! ({monolith_path})") if ( - 'hot_output' in project.templates['kernel'].metadata - and 'cold_output' in project.templates['kernel'].metadata + "hot_output" in project.templates["kernel"].metadata + and "cold_output" in project.templates["kernel"].metadata ): - hot_path = project.location.joinpath(project.templates['kernel'].metadata['hot_output']) - cold_path = project.location.joinpath(project.templates['kernel'].metadata['cold_output']) + hot_path = project.location.joinpath(project.templates["kernel"].metadata["hot_output"]) + cold_path = project.location.joinpath(project.templates["kernel"].metadata["cold_output"]) upload_hot_cold = False if hot_path.exists() and cold_path.exists(): - logger(__name__).debug(f'Hot and cold files exist! ({hot_path}; {cold_path})') + logger(__name__).debug(f"Hot and cold files exist! ({hot_path}; {cold_path})") if monolith_path.exists(): monolith_mtime = monolith_path.stat().st_mtime hot_mtime = hot_path.stat().st_mtime - logger(__name__).debug(f'Monolith last modified: {monolith_mtime}') - logger(__name__).debug(f'Hot last modified: {hot_mtime}') + logger(__name__).debug(f"Monolith last modified: {monolith_mtime}") + logger(__name__).debug(f"Hot last modified: {hot_mtime}") if hot_mtime > monolith_mtime: upload_hot_cold = True - logger(__name__).debug('Hot file is newer than monolith!') + logger(__name__).debug("Hot file is newer than monolith!") else: upload_hot_cold = True if upload_hot_cold: - with hot_path.open(mode='rb') as hot: - with cold_path.open(mode='rb') as cold: - kwargs['linked_file'] = cold - kwargs['linked_remote_name'] = self.generate_cold_hash(project, {}) - kwargs['linked_file_addr'] = int( - project.templates['kernel'].metadata.get('cold_addr', 0x03800000) + with hot_path.open(mode="rb") as hot: + with cold_path.open(mode="rb") as cold: + kwargs["linked_file"] = cold + kwargs["linked_remote_name"] = self.generate_cold_hash(project, {}) + kwargs["linked_file_addr"] = int( + project.templates["kernel"].metadata.get("cold_addr", 0x03800000) ) - kwargs['addr'] = int(project.templates['kernel'].metadata.get('hot_addr', 0x07800000)) + kwargs["addr"] = int(project.templates["kernel"].metadata.get("hot_addr", 0x07800000)) return self.write_program(hot, **kwargs) if not monolith_path.exists(): - raise ui.dont_send(Exception('No output files were found! Have you built your project?')) - with monolith_path.open(mode='rb') as pf: + raise ui.dont_send(Exception("No output files were found! Have you built your project?")) + with monolith_path.open(mode="rb") as pf: return self.write_program(pf, **kwargs) def generate_ini_file(self, remote_name: str = None, slot: int = 0, ini: ConfigParser = None, **kwargs): project_ini = ConfigParser() - default_icon = 'USER902x.bmp' if Spec('>=1.0.0-22').match(self.status['cpu0_version']) else 'USER999x.bmp' - project_ini['project'] = { - 'version': str(kwargs.get('ide_version') or get_version()), - 'ide': str(kwargs.get('ide') or 'PROS'), + default_icon = "USER902x.bmp" if Spec(">=1.0.0-22").match(self.status["cpu0_version"]) else "USER999x.bmp" + project_ini["project"] = { + "version": str(kwargs.get("ide_version") or get_version()), + "ide": str(kwargs.get("ide") or "PROS"), } - project_ini['program'] = { - 'version': kwargs.get('version', '0.0.0') or '0.0.0', - 'name': remote_name, - 'slot': slot, - 'icon': kwargs.get('icon', default_icon) or default_icon, - 'description': kwargs.get('description', 'Created with PROS'), - 'date': datetime.now().isoformat(), + project_ini["program"] = { + "version": kwargs.get("version", "0.0.0") or "0.0.0", + "name": remote_name, + "slot": slot, + "icon": kwargs.get("icon", default_icon) or default_icon, + "description": kwargs.get("description", "Created with PROS"), + "date": datetime.now().isoformat(), } if ini: project_ini.update(ini) with StringIO() as ini_str: project_ini.write(ini_str) - logger(__name__).info(f'Created ini: {ini_str.getvalue()}') + logger(__name__).info(f"Created ini: {ini_str.getvalue()}") return ini_str.getvalue() @with_download_channel @@ -329,7 +329,7 @@ def write_program( slot: int = 0, file_len: int = -1, run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, - target: str = 'flash', + target: str = "flash", quirk: int = 0, linked_file: Optional[typing.BinaryIO] = None, linked_remote_name: Optional[str] = None, @@ -340,21 +340,21 @@ def write_program( with ui.Notification(): action_string = f'Uploading program "{remote_name}"' finish_string = f'Finished uploading "{remote_name}"' - if hasattr(file, 'name'): - action_string += f' ({remote_name if remote_name else Path(file.name).name})' - finish_string += f' ({remote_name if remote_name else Path(file.name).name})' - action_string += f' to V5 slot {slot + 1} on {self.port}' + if hasattr(file, "name"): + action_string += f" ({remote_name if remote_name else Path(file.name).name})" + finish_string += f" ({remote_name if remote_name else Path(file.name).name})" + action_string += f" to V5 slot {slot + 1} on {self.port}" if compress_bin: - action_string += ' (compressed)' + action_string += " (compressed)" ui.echo(action_string) - remote_base = f'slot_{slot + 1}' - if target == 'ddr': + remote_base = f"slot_{slot + 1}" + if target == "ddr": self.write_file( file, - f'{remote_base}.bin', + f"{remote_base}.bin", file_len=file_len, - type='bin', - target='ddr', + type="bin", + target="ddr", run_after=run_after, linked_filename=linked_remote_name, **kwargs, @@ -365,11 +365,11 @@ def write_program( if not remote_name: remote_name = file.name if len(remote_name) > 23: - logger(__name__).info('Truncating remote name to {} for length.'.format(remote_name[:20])) + logger(__name__).info("Truncating remote name to {} for length.".format(remote_name[:20])) remote_name = remote_name[:23] ini_file = self.generate_ini_file(remote_name=remote_name, slot=slot, ini=ini, **kwargs) - logger(__name__).info(f'Created ini: {ini_file}') + logger(__name__).info(f"Created ini: {ini_file}") if linked_file is not None: self.upload_library( @@ -377,37 +377,37 @@ def write_program( remote_name=linked_remote_name, addr=linked_file_addr, compress=compress_bin, - force_upload=kwargs.pop('force_upload_linked', False), + force_upload=kwargs.pop("force_upload_linked", False), ) - bin_kwargs = {k: v for k, v in kwargs.items() if v in ['addr']} + bin_kwargs = {k: v for k, v in kwargs.items() if v in ["addr"]} if (quirk & 0xFF) == 1: # WRITE BIN FILE self.write_file( file, - f'{remote_base}.bin', + f"{remote_base}.bin", file_len=file_len, - type='bin', + type="bin", run_after=run_after, linked_filename=linked_remote_name, compress=compress_bin, **bin_kwargs, **kwargs, ) - with BytesIO(ini_file.encode(encoding='ascii')) as ini_bin: + with BytesIO(ini_file.encode(encoding="ascii")) as ini_bin: # WRITE INI FILE - self.write_file(ini_bin, f'{remote_base}.ini', type='ini', **kwargs) + self.write_file(ini_bin, f"{remote_base}.ini", type="ini", **kwargs) elif (quirk & 0xFF) == 0: # STOP PROGRAM - self.execute_program_file('', run=False) - with BytesIO(ini_file.encode(encoding='ascii')) as ini_bin: + self.execute_program_file("", run=False) + with BytesIO(ini_file.encode(encoding="ascii")) as ini_bin: # WRITE INI FILE - self.write_file(ini_bin, f'{remote_base}.ini', type='ini', **kwargs) + self.write_file(ini_bin, f"{remote_base}.ini", type="ini", **kwargs) # WRITE BIN FILE self.write_file( file, - f'{remote_base}.bin', + f"{remote_base}.bin", file_len=file_len, - type='bin', + type="bin", run_after=run_after, linked_filename=linked_remote_name, compress=compress_bin, @@ -415,8 +415,8 @@ def write_program( **kwargs, ) else: - raise ValueError(f'Unknown quirk option: {quirk}') - ui.finalize('upload', f'{finish_string} to V5') + raise ValueError(f"Unknown quirk option: {quirk}") + ui.finalize("upload", f"{finish_string} to V5") def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, target_name: Optional[str] = None): """ @@ -438,7 +438,7 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, vid = self.vid_map[vid.lower()] # assume all libraries unused_libraries = [ - (vid, l['filename']) + (vid, l["filename"]) for l in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid=vid))] ] if name is not None: @@ -451,50 +451,50 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, programs: Dict[str, Dict] = { # need the linked file metadata, so we have to use the get_file_metadata_by_name command - p['filename']: self.get_file_metadata_by_name(p['filename'], vid='user') - for p in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid='user'))] - if p['type'] == 'bin' + p["filename"]: self.get_file_metadata_by_name(p["filename"], vid="user") + for p in [self.get_file_metadata_by_idx(i) for i in range(0, self.get_dir_count(vid="user"))] + if p["type"] == "bin" } library_usage: Dict[Tuple[int, str], List[str]] = defaultdict(list) for program_name, metadata in programs.items(): - library_usage[(metadata['linked_vid'], metadata['linked_filename'])].append(program_name) + library_usage[(metadata["linked_vid"], metadata["linked_filename"])].append(program_name) orphaned_files: List[Union[str, Tuple[int, str]]] = [] for link, program_names in library_usage.items(): linked_vid, linked_name = link if name is not None and linked_vid == vid and linked_name == name: - logger(__name__).debug(f'{program_names} will be removed because the library will be replaced') + logger(__name__).debug(f"{program_names} will be removed because the library will be replaced") orphaned_files.extend(program_names) elif linked_vid != 0: # linked_vid == 0 means there's no link. Can't be orphaned if there's no link if link in unused_libraries: # the library is being used - logger(__name__).debug(f'{link} is being used') + logger(__name__).debug(f"{link} is being used") unused_libraries.remove(link) used_libraries.append(link) else: try: self.get_file_metadata_by_name(linked_name, vid=linked_vid) - logger(__name__).debug(f'{link} exists') + logger(__name__).debug(f"{link} exists") used_libraries.extend(link) except VEXCommError as e: logger(__name__).debug(dont_send(e)) - logger(__name__).debug(f'{program_names} will be removed because {link} does not exist') + logger(__name__).debug(f"{program_names} will be removed because {link} does not exist") orphaned_files.extend(program_names) orphaned_files.extend(unused_libraries) if target_name is not None and target_name in orphaned_files: # the file will be overwritten anyway orphaned_files.remove(target_name) if len(orphaned_files) > 0: - logger(__name__).warning(f'Removing {len(orphaned_files)} orphaned file(s) ({orphaned_files})') + logger(__name__).warning(f"Removing {len(orphaned_files)} orphaned file(s) ({orphaned_files})") for file in orphaned_files: if isinstance(file, tuple): self.erase_file(file_name=file[1], vid=file[0]) else: - self.erase_file(file_name=file, erase_all=True, vid='user') + self.erase_file(file_name=file, erase_all=True, vid="user") if len(used_libraries) > 3: libraries = [ - (linked_vid, linked_name, self.get_file_metadata_by_name(linked_name, vid=linked_vid)['timestamp']) + (linked_vid, linked_name, self.get_file_metadata_by_name(linked_name, vid=linked_vid)["timestamp"]) for linked_vid, linked_name in used_libraries ] library_usage_timestamps = sorted( @@ -505,7 +505,7 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, # get the most recent timestamp of the library and all files linking to it max( linked_timestamp, - *[programs[p]['timestamp'] for p in library_usage[(linked_vid, linked_name)]], + *[programs[p]["timestamp"] for p in library_usage[(linked_vid, linked_name)]], ), ) for linked_vid, linked_name, linked_timestamp in libraries @@ -513,31 +513,31 @@ def ensure_library_space(self, name: Optional[str] = None, vid: int_str = None, key=lambda t: t[2], ) evicted_files: List[Union[str, Tuple[int, str]]] = [] - evicted_file_list = '' + evicted_file_list = "" for evicted_library in library_usage_timestamps[:3]: evicted_files.append(evicted_library[0:2]) evicted_files.extend(library_usage[evicted_library[0:2]]) - evicted_file_list += evicted_library[1] + ', ' - evicted_file_list += ', '.join(library_usage[evicted_file_list[0:2]]) + evicted_file_list += evicted_library[1] + ", " + evicted_file_list += ", ".join(library_usage[evicted_file_list[0:2]]) evicted_file_list = evicted_file_list[:2] # remove last ", " assert len(evicted_files) > 0 if confirm( - f'There are too many files on the V5. PROS can remove the following suggested old files: ' - f'{evicted_file_list}', - title='Confirm file eviction plan:', + f"There are too many files on the V5. PROS can remove the following suggested old files: " + f"{evicted_file_list}", + title="Confirm file eviction plan:", ): for file in evicted_files: if isinstance(file, tuple): self.erase_file(file_name=file[1], vid=file[0]) else: - self.erase_file(file_name=file, erase_all=True, vid='user') + self.erase_file(file_name=file, erase_all=True, vid="user") def upload_library( self, file: typing.BinaryIO, remote_name: str = None, file_len: int = -1, - vid: int_str = 'pros', + vid: int_str = "pros", force_upload: bool = False, compress: bool = True, **kwargs, @@ -551,7 +551,7 @@ def upload_library( if not remote_name: remote_name = file.name if len(remote_name) > 23: - logger(__name__).info('Truncating remote name to {} for length.'.format(remote_name[:23])) + logger(__name__).info("Truncating remote name to {} for length.".format(remote_name[:23])) remote_name = remote_name[:23] if file_len < 0: @@ -559,7 +559,7 @@ def upload_library( file.seek(0, 0) if compress and self.can_compress: - file, file_len = compress_file(file, file_len, label='Compressing library') + file, file_len = compress_file(file, file_len, label="Compressing library") crc32 = self.VEX_CRC32.compute(file.read(file_len)) file.seek(0, 0) @@ -568,22 +568,22 @@ def upload_library( try: response = self.get_file_metadata_by_name(remote_name, vid) logger(__name__).debug(response) - logger(__name__).debug({'file len': file_len, 'crc': crc32}) - if response['size'] == file_len and response['crc'] == crc32: - ui.echo('Library is already onboard V5') + logger(__name__).debug({"file len": file_len, "crc": crc32}) + if response["size"] == file_len and response["crc"] == crc32: + ui.echo("Library is already onboard V5") return else: logger(__name__).warning( - f'Library onboard doesn\'t match! ' + f"Library onboard doesn't match! " f'Length was {response["size"]} but expected {file_len} ' f'CRC: was {response["crc"]:x} but expected {crc32:x}' ) except VEXCommError as e: logger(__name__).debug(e) else: - logger(__name__).info('Skipping already-uploaded checks') + logger(__name__).info("Skipping already-uploaded checks") - logger(__name__).debug('Going to worry about uploading the file now') + logger(__name__).debug("Going to worry about uploading the file now") self.ensure_library_space( remote_name, vid, @@ -594,8 +594,8 @@ def read_file( self, file: typing.IO[bytes], remote_file: str, - vid: int_str = 'user', - target: int_str = 'flash', + vid: int_str = "user", + target: int_str = "flash", addr: Optional[int] = None, file_len: Optional[int] = None, ): @@ -603,29 +603,29 @@ def read_file( vid = self.vid_map[vid.lower()] if addr is None: metadata = self.get_file_metadata_by_name(remote_file, vid=vid) - addr = metadata['addr'] + addr = metadata["addr"] wireless = self.is_wireless - ft_meta = self.ft_initialize(remote_file, function='download', vid=vid, target=target, addr=addr) + ft_meta = self.ft_initialize(remote_file, function="download", vid=vid, target=target, addr=addr) if file_len is None: - file_len = ft_meta['file_size'] + file_len = ft_meta["file_size"] if wireless and file_len > 0x25000: confirm( - f'You\'re about to download {file_len} bytes wirelessly. This could take some time, and you should ' - f'consider downloading directly with a wire.', + f"You're about to download {file_len} bytes wirelessly. This could take some time, and you should " + f"consider downloading directly with a wire.", abort=True, default=False, ) - max_packet_size = ft_meta['max_packet_size'] - with ui.progressbar(length=file_len, label='Downloading {}'.format(remote_file)) as progress: + max_packet_size = ft_meta["max_packet_size"] + with ui.progressbar(length=file_len, label="Downloading {}".format(remote_file)) as progress: for i in range(0, file_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > file_len: packet_size = file_len - i file.write(self.ft_read(addr + i, packet_size)) progress.update(packet_size) - logger(__name__).debug('Completed {} of {} bytes'.format(i + packet_size, file_len)) + logger(__name__).debug("Completed {} of {} bytes".format(i + packet_size, file_len)) self.ft_complete() def write_file( @@ -635,7 +635,7 @@ def write_file( file_len: int = -1, run_after: FTCompleteOptions = FTCompleteOptions.DONT_RUN, linked_filename: Optional[str] = None, - linked_vid: int_str = 'pros', + linked_vid: int_str = "pros", compress: bool = False, **kwargs, ): @@ -643,43 +643,43 @@ def write_file( file_len = file.seek(0, 2) file.seek(0, 0) display_name = remote_file - if hasattr(file, 'name'): - display_name = f'{remote_file} ({Path(file.name).name})' + if hasattr(file, "name"): + display_name = f"{remote_file} ({Path(file.name).name})" if compress and self.can_compress: file, file_len = compress_file(file, file_len) if self.is_wireless and file_len > 0x25000: confirm( - f'You\'re about to upload {file_len} bytes wirelessly. This could take some time, and you should ' - f'consider uploading directly with a wire.', + f"You're about to upload {file_len} bytes wirelessly. This could take some time, and you should " + f"consider uploading directly with a wire.", abort=True, default=False, ) crc32 = self.VEX_CRC32.compute(file.read(file_len)) file.seek(0, 0) - addr = kwargs.get('addr', 0x03800000) - logger(__name__).info('Transferring {} ({} bytes) to the V5 from {}'.format(remote_file, file_len, file)) - ft_meta = self.ft_initialize(remote_file, function='upload', length=file_len, crc=crc32, **kwargs) + addr = kwargs.get("addr", 0x03800000) + logger(__name__).info("Transferring {} ({} bytes) to the V5 from {}".format(remote_file, file_len, file)) + ft_meta = self.ft_initialize(remote_file, function="upload", length=file_len, crc=crc32, **kwargs) if linked_filename is not None: - logger(__name__).debug('Setting file link') + logger(__name__).debug("Setting file link") self.ft_set_link(linked_filename, vid=linked_vid) - assert ft_meta['file_size'] >= file_len + assert ft_meta["file_size"] >= file_len if len(remote_file) > 24: - logger(__name__).info('Truncating {} to {} due to length'.format(remote_file, remote_file[:24])) + logger(__name__).info("Truncating {} to {} due to length".format(remote_file, remote_file[:24])) remote_file = remote_file[:24] - max_packet_size = int(ft_meta['max_packet_size'] / 2) - with ui.progressbar(length=file_len, label='Uploading {}'.format(display_name)) as progress: + max_packet_size = int(ft_meta["max_packet_size"] / 2) + with ui.progressbar(length=file_len, label="Uploading {}".format(display_name)) as progress: for i in range(0, file_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > file_len: packet_size = file_len - i - logger(__name__).debug('Writing {} bytes at 0x{:02X}'.format(packet_size, addr + i)) + logger(__name__).debug("Writing {} bytes at 0x{:02X}".format(packet_size, addr + i)) self.ft_write(addr + i, file.read(packet_size)) progress.update(packet_size) - logger(__name__).debug('Completed {} of {} bytes'.format(i + packet_size, file_len)) - logger(__name__).debug('Data transfer complete, sending ft complete') - if compress and self.status['system_version'] in Spec('>=1.0.5'): - logger(__name__).info('Closing gzip file') + logger(__name__).debug("Completed {} of {} bytes".format(i + packet_size, file_len)) + logger(__name__).debug("Data transfer complete, sending ft complete") + if compress and self.status["system_version"] in Spec(">=1.0.5"): + logger(__name__).info("Closing gzip file") file.close() self.ft_complete(options=run_after) @@ -690,9 +690,9 @@ def capture_screen(self) -> Tuple[List[List[int]], int, int]: file_size = width * height * 4 # ARGB rx_io = BytesIO() - self.read_file(rx_io, '', vid='system', target='screen', addr=0, file_len=file_size) + self.read_file(rx_io, "", vid="system", target="screen", addr=0, file_len=file_size) rx = rx_io.getvalue() - rx = struct.unpack('<{}I'.format(len(rx) // 4), rx) + rx = struct.unpack("<{}I".format(len(rx) // 4), rx) data = [[] for _ in range(height)] for y in range(height): @@ -709,8 +709,8 @@ def used_slots(self) -> Dict[int, Optional[str]]: with ui.Notification(): rv = {} for slot in range(1, 9): - ini = self.read_ini(f'slot_{slot}.ini') - rv[slot] = ini['program']['name'] if ini is not None else None + ini = self.read_ini(f"slot_{slot}.ini") + rv[slot] = ini["program"]["name"] if ini is not None else None return rv def read_ini(self, remote_name: str) -> Optional[ConfigParser]: @@ -719,96 +719,96 @@ def read_ini(self, remote_name: str) -> Optional[ConfigParser]: self.read_file(rx_io, remote_name) config = ConfigParser() rx_io.seek(0, 0) - config.read_string(rx_io.read().decode('ascii')) + config.read_string(rx_io.read().decode("ascii")) return config except VEXCommError: return None @retries def query_system_version(self) -> SystemVersion: - logger(__name__).debug('Sending simple 0xA408 command') - ret = self._txrx_simple_struct(0xA4, '>8B') - logger(__name__).debug('Completed simple 0xA408 command') + logger(__name__).debug("Sending simple 0xA408 command") + ret = self._txrx_simple_struct(0xA4, ">8B") + logger(__name__).debug("Completed simple 0xA408 command") return V5Device.SystemVersion(ret) @retries def ft_transfer_channel(self, channel: int_str): - logger(__name__).debug(f'Transferring to {channel} channel') - logger(__name__).debug('Sending ext 0x10 command') + logger(__name__).debug(f"Transferring to {channel} channel") + logger(__name__).debug("Sending ext 0x10 command") if isinstance(channel, str): channel = self.channel_map[channel] assert isinstance(channel, int) and 0 <= channel <= 1 - self._txrx_ext_packet(0x10, struct.pack('<2B', 1, channel), rx_length=0) - logger(__name__).debug('Completed ext 0x10 command') + self._txrx_ext_packet(0x10, struct.pack("<2B", 1, channel), rx_length=0) + logger(__name__).debug("Completed ext 0x10 command") @retries def ft_initialize(self, file_name: str, **kwargs) -> Dict[str, Any]: - logger(__name__).debug('Sending ext 0x11 command') + logger(__name__).debug("Sending ext 0x11 command") options = { - 'function': 'upload', - 'target': 'flash', - 'vid': 'user', - 'overwrite': True, - 'options': 0, - 'length': 0, - 'addr': 0x03800000, - 'crc': 0, - 'type': 'bin', - 'timestamp': datetime.now(), - 'version': 0x01_00_00_00, - 'name': file_name, + "function": "upload", + "target": "flash", + "vid": "user", + "overwrite": True, + "options": 0, + "length": 0, + "addr": 0x03800000, + "crc": 0, + "type": "bin", + "timestamp": datetime.now(), + "version": 0x01_00_00_00, + "name": file_name, } options.update({k: v for k, v in kwargs.items() if k in options and v is not None}) - if isinstance(options['function'], str): - options['function'] = {'upload': 1, 'download': 2}[options['function'].lower()] - if isinstance(options['target'], str): - options['target'] = {'ddr': 0, 'flash': 1, 'screen': 2}[options['target'].lower()] - if isinstance(options['vid'], str): - options['vid'] = self.vid_map[options['vid'].lower()] - if isinstance(options['type'], str): - options['type'] = options['type'].encode(encoding='ascii') - if isinstance(options['name'], str): - options['name'] = options['name'].encode(encoding='ascii') - options['options'] |= 1 if options['overwrite'] else 0 - options['timestamp'] = int((options['timestamp'] - datetime(2000, 1, 1)).total_seconds()) - - logger(__name__).debug('Initializing file transfer w/: {}'.format(options)) + if isinstance(options["function"], str): + options["function"] = {"upload": 1, "download": 2}[options["function"].lower()] + if isinstance(options["target"], str): + options["target"] = {"ddr": 0, "flash": 1, "screen": 2}[options["target"].lower()] + if isinstance(options["vid"], str): + options["vid"] = self.vid_map[options["vid"].lower()] + if isinstance(options["type"], str): + options["type"] = options["type"].encode(encoding="ascii") + if isinstance(options["name"], str): + options["name"] = options["name"].encode(encoding="ascii") + options["options"] |= 1 if options["overwrite"] else 0 + options["timestamp"] = int((options["timestamp"] - datetime(2000, 1, 1)).total_seconds()) + + logger(__name__).debug("Initializing file transfer w/: {}".format(options)) tx_payload = struct.pack( "<4B3I4s2I24s", - options['function'], - options['target'], - options['vid'], - options['options'], - options['length'], - options['addr'], - options['crc'], - options['type'], - options['timestamp'], - options['version'], - options['name'], + options["function"], + options["target"], + options["vid"], + options["options"], + options["length"], + options["addr"], + options["crc"], + options["type"], + options["timestamp"], + options["version"], + options["name"], ) - rx = self._txrx_ext_struct(0x11, tx_payload, " bytearray: - logger(__name__).debug('Sending ext 0x14 command') + logger(__name__).debug("Sending ext 0x14 command") actual_n_bytes = n_bytes + (0 if n_bytes % 4 == 0 else 4 - n_bytes % 4) ui.logger(__name__).debug(dict(actual_n_bytes=actual_n_bytes, addr=addr)) tx_payload = struct.pack(" int: - logger(__name__).debug('Sending ext 0x16 command') + logger(__name__).debug("Sending ext 0x16 command") if isinstance(vid, str): vid = self.vid_map[vid.lower()] tx_payload = struct.pack("<2B", vid, options) ret = self._txrx_ext_struct(0x16, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x17 command') + logger(__name__).debug("Sending ext 0x17 command") tx_payload = struct.pack("<2B", file_idx, options) rx = self._txrx_ext_struct(0x17, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x19 command') + logger(__name__).debug("Sending ext 0x19 command") if isinstance(vid, str): vid = self.vid_map[vid.lower()] - ui.logger(__name__).debug(f'Options: {dict(vid=vid, file_name=file_name)}') - tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding='ascii')) + ui.logger(__name__).debug(f"Options: {dict(vid=vid, file_name=file_name)}") + tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding="ascii")) rx = self._txrx_ext_struct(0x19, tx_payload, " Dict[str, Any]: - logger(__name__).debug('Sending ext 0x1C command') - tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding='ascii')) + logger(__name__).debug("Sending ext 0x1C command") + tx_payload = struct.pack("<2B24s", vid, options, file_name.encode(encoding="ascii")) ret = self._txrx_ext_struct(0x1C, tx_payload, " SystemStatus: from semantic_version import Version - logger(__name__).debug('Sending ext 0x22 command') + logger(__name__).debug("Sending ext 0x22 command") version = self.query_system_version() - if (version.product == V5Device.SystemVersion.Product.BRAIN and version.system_version in Spec('<1.0.13')) or ( + if (version.product == V5Device.SystemVersion.Product.BRAIN and version.system_version in Spec("<1.0.13")) or ( version.product == V5Device.SystemVersion.Product.CONTROLLER - and version.system_version in Spec('<1.0.0-0.70') + and version.system_version in Spec("<1.0.0-0.70") ): - schema = ' bytes: # read/write are the same command, behavior dictated by specifying # length-to-read as 0xFF and providing additional payload bytes to write or # specifying a length-to-read and no additional data to read. - logger(__name__).debug('Sending ext 0x27 command (read)') + logger(__name__).debug("Sending ext 0x27 command (read)") # specifying a length to read (0x40 bytes) with no additional payload data. - tx_payload = struct.pack("<2B", self.channel_map['download'], 0x40) + tx_payload = struct.pack("<2B", self.channel_map["download"], 0x40) # RX length isn't always 0x40 (end of buffer reached), so don't check_length. self._serial_cache += self._txrx_ext_packet(0x27, tx_payload, 0, check_length=False)[1:] - logger(__name__).debug('Completed ext 0x27 command (read)') + logger(__name__).debug("Completed ext 0x27 command (read)") # if _serial_cache doesn't have a \x00, pretend we didn't read anything. - if b'\x00' not in self._serial_cache: - return b'' + if b"\x00" not in self._serial_cache: + return b"" # _serial_cache has a \x00, split off the beginning part and hand it down. - parts = self._serial_cache.split(b'\x00') - ret = parts[0] + b'\x00' - self._serial_cache = b'\x00'.join(parts[1:]) + parts = self._serial_cache.split(b"\x00") + ret = parts[0] + b"\x00" + self._serial_cache = b"\x00".join(parts[1:]) return ret @@ -997,16 +997,16 @@ def user_fifo_read(self) -> bytes: def user_fifo_write(self, payload: Union[Iterable, bytes, bytearray, str]): # Not currently implemented return - logger(__name__).debug('Sending ext 0x27 command (write)') + logger(__name__).debug("Sending ext 0x27 command (write)") max_packet_size = 224 pl_len = len(payload) for i in range(0, pl_len, max_packet_size): packet_size = max_packet_size if i + max_packet_size > pl_len: packet_size = pl_len - i - logger(__name__).debug(f'Writing {packet_size} bytes to user FIFO') - self._txrx_ext_packet(0x27, b'\x01\x00' + payload[i:packet_size], 0, check_length=False)[1:] - logger(__name__).debug('Completed ext 0x27 command (write)') + logger(__name__).debug(f"Writing {packet_size} bytes to user FIFO") + self._txrx_ext_packet(0x27, b"\x01\x00" + payload[i:packet_size], 0, check_length=False)[1:] + logger(__name__).debug("Completed ext 0x27 command (write)") @retries def sc_init(self) -> None: @@ -1014,35 +1014,35 @@ def sc_init(self) -> None: Send command to initialize screen capture """ # This will only copy data in memory, not send! - logger(__name__).debug('Sending ext 0x28 command') - self._txrx_ext_struct(0x28, [], '') - logger(__name__).debug('Completed ext 0x28 command') + logger(__name__).debug("Sending ext 0x28 command") + self._txrx_ext_struct(0x28, [], "") + logger(__name__).debug("Completed ext 0x28 command") @retries def kv_read(self, kv: str) -> bytearray: - logger(__name__).debug('Sending ext 0x2e command') - encoded_kv = f'{kv}\0'.encode(encoding='ascii') - tx_payload = struct.pack(f'<{len(encoded_kv)}s', encoded_kv) + logger(__name__).debug("Sending ext 0x2e command") + encoded_kv = f"{kv}\0".encode(encoding="ascii") + tx_payload = struct.pack(f"<{len(encoded_kv)}s", encoded_kv) # Because the length of the kernel variables is not known, use None to indicate we are recieving an unknown length. ret = self._txrx_ext_packet(0x2E, tx_payload, 1, check_length=False, check_ack=True) - logger(__name__).debug('Completed ext 0x2e command') + logger(__name__).debug("Completed ext 0x2e command") return ret @retries def kv_write(self, kv: str, payload: Union[Iterable, bytes, bytearray, str]): - logger(__name__).debug('Sending ext 0x2f command') - encoded_kv = f'{kv}\0'.encode(encoding='ascii') - kv_to_max_bytes = {'teamnumber': 7, 'robotname': 16} + logger(__name__).debug("Sending ext 0x2f command") + encoded_kv = f"{kv}\0".encode(encoding="ascii") + kv_to_max_bytes = {"teamnumber": 7, "robotname": 16} if len(payload) > kv_to_max_bytes.get(kv, 254): - print(f'Truncating input to meet maximum value length ({kv_to_max_bytes[kv]} characters).') + print(f"Truncating input to meet maximum value length ({kv_to_max_bytes[kv]} characters).") # Trim down size of payload to fit within the 255 byte limit and add null terminator. payload = payload[: kv_to_max_bytes.get(kv, 254)] + "\0" if isinstance(payload, str): - payload = payload.encode(encoding='ascii') - tx_fmt = f'<{len(encoded_kv)}s{len(payload)}s' + payload = payload.encode(encoding="ascii") + tx_fmt = f"<{len(encoded_kv)}s{len(payload)}s" tx_payload = struct.pack(tx_fmt, encoded_kv, payload) self._txrx_ext_packet(0x2F, tx_payload, 1, check_length=False, check_ack=True) - logger(__name__).debug('Completed ext 0x2f command') + logger(__name__).debug("Completed ext 0x2f command") return payload def _txrx_ext_struct( @@ -1073,7 +1073,7 @@ def _txrx_ext_struct( check_ack=check_ack, timeout=timeout, ) - logger(__name__).debug('Unpacking with format: {}'.format(unpack_fmt)) + logger(__name__).debug("Unpacking with format: {}".format(unpack_fmt)) return struct.unpack(unpack_fmt, rx) @classmethod @@ -1089,11 +1089,11 @@ def _rx_ext_packet( :param tx_payload: what was sent, used if an exception needs to be thrown :return: The payload of the extended message """ - assert msg['command'] == 0x56 + assert msg["command"] == 0x56 if not cls.VEX_CRC16.compute(msg.rx) == 0: raise VEXCommError("CRC of message didn't match 0: {}".format(cls.VEX_CRC16.compute(msg.rx)), msg) - assert msg['payload'][0] == command - msg = msg['payload'][1:-2] + assert msg["payload"][0] == command + msg = msg["payload"][1:-2] if check_ack: nacks = { 0xFF: "General NACK", @@ -1117,12 +1117,12 @@ def _rx_ext_packet( raise VEXCommError("Device didn't ACK", msg) msg = msg[1:] if len(msg) > 0: - logger(cls).debug('Set msg window to {}'.format(bytes_to_str(msg))) + logger(cls).debug("Set msg window to {}".format(bytes_to_str(msg))) if len(msg) < rx_length and check_length: - raise VEXCommError(f'Received length is less than {rx_length} (got {len(msg)}).', msg) + raise VEXCommError(f"Received length is less than {rx_length} (got {len(msg)}).", msg) elif len(msg) > rx_length and check_length: ui.echo( - f'WARNING: Recieved length is more than {rx_length} (got {len(msg)}). Consider upgrading the PROS (CLI Version: {get_version()}).' + f"WARNING: Recieved length is more than {rx_length} (got {len(msg)}). Consider upgrading the PROS (CLI Version: {get_version()})." ) return msg diff --git a/pros/serial/devices/vex/v5_user_device.py b/pros/serial/devices/vex/v5_user_device.py index 95ce2fc6..f6d88c9e 100644 --- a/pros/serial/devices/vex/v5_user_device.py +++ b/pros/serial/devices/vex/v5_user_device.py @@ -30,21 +30,21 @@ def promiscuous(self, value: bool): def write(self, data: Union[str, bytes]): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") self.port.write(data) def read(self) -> Tuple[bytes, bytes]: msg = None, None while msg[0] is None or (msg[0] not in self.topics and not self._accept_all): - while b'\0' not in self.buffer: + while b"\0" not in self.buffer: self.buffer.extend(self.port.read(1)) self.buffer.extend(self.port.read(-1)) - assert b'\0' in self.buffer - msg, self.buffer = self.buffer.split(b'\0', 1) + assert b"\0" in self.buffer + msg, self.buffer = self.buffer.split(b"\0", 1) try: msg = cobs.decode(msg) except cobs.DecodeError: - logger(__name__).warning(f'Could not decode bytes: {msg.hex()}') + logger(__name__).warning(f"Could not decode bytes: {msg.hex()}") assert len(msg) >= 4 msg = bytes(msg[:4]), bytes(msg[4:]) return msg diff --git a/pros/serial/devices/vex/vex_device.py b/pros/serial/devices/vex/vex_device.py index d234dd78..261415e6 100644 --- a/pros/serial/devices/vex/vex_device.py +++ b/pros/serial/devices/vex/vex_device.py @@ -29,7 +29,7 @@ def query_system(self) -> bytearray: Verify that a VEX device is connected. Returned payload varies by product :return: Payload response """ - logger(__name__).debug('Sending simple 0x21 command') + logger(__name__).debug("Sending simple 0x21 command") return self._txrx_simple_packet(0x21, 0x0A) def _txrx_simple_struct(self, command: int, unpack_fmt: str, timeout: Optional[float] = None) -> Tuple: @@ -46,11 +46,11 @@ def _txrx_simple_packet(self, command: int, rx_len: int, timeout: Optional[float :return: They payload of the message, or raises and exception if there was an issue """ msg = self._txrx_packet(command, timeout=timeout) - if msg['command'] != command: - raise comm_error.VEXCommError('Received command does not match sent command.', msg) - if len(msg['payload']) != rx_len: + if msg["command"] != command: + raise comm_error.VEXCommError("Received command does not match sent command.", msg) + if len(msg["payload"]) != rx_len: raise comm_error.VEXCommError("Received data doesn't match expected length", msg) - return msg['payload'] + return msg["payload"] def _rx_packet(self, timeout: Optional[float] = None) -> Dict[str, Union[Union[int, bytes, bytearray], Any]]: # Optimized to read as quickly as possible w/o delay @@ -82,18 +82,18 @@ def _rx_packet(self, timeout: Optional[float] = None) -> Dict[str, Union[Union[i rx.extend(self.port.read(1)) payload_length = rx[-1] if command == 0x56 and (payload_length & 0x80) == 0x80: - logger(__name__).debug('Found an extended message payload') + logger(__name__).debug("Found an extended message payload") rx.extend(self.port.read(1)) payload_length = ((payload_length & 0x7F) << 8) + rx[-1] payload = self.port.read(payload_length) rx.extend(payload) - return {'command': command, 'payload': payload, 'raw': rx} + return {"command": command, "payload": payload, "raw": rx} def _tx_packet(self, command: int, tx_data: Union[Iterable, bytes, bytearray, None] = None): tx = self._form_simple_packet(command) if tx_data is not None: tx = bytes([*tx, *tx_data]) - logger(__name__).debug(f'{self.__class__.__name__} TX: {bytes_to_str(tx)}') + logger(__name__).debug(f"{self.__class__.__name__} TX: {bytes_to_str(tx)}") self.port.read_all() self.port.write(tx) self.port.flush() @@ -113,10 +113,10 @@ def _txrx_packet( """ tx = self._tx_packet(command, tx_data) rx = self._rx_packet(timeout=timeout) - msg = Message(rx['raw'], tx) + msg = Message(rx["raw"], tx) logger(__name__).debug(msg) - msg['payload'] = Message(rx['raw'], tx, internal_rx=rx['payload']) - msg['command'] = rx['command'] + msg["payload"] = Message(rx["raw"], tx, internal_rx=rx["payload"]) + msg["command"] = rx["command"] return msg @staticmethod diff --git a/pros/serial/interactive/UploadProjectModal.py b/pros/serial/interactive/UploadProjectModal.py index 4336861b..7b0b7702 100644 --- a/pros/serial/interactive/UploadProjectModal.py +++ b/pros/serial/interactive/UploadProjectModal.py @@ -14,14 +14,14 @@ class UploadProjectModal(application.Modal[None]): def __init__(self, project: Optional[Project]): - super(UploadProjectModal, self).__init__('Upload Project', confirm_button='Upload') + super(UploadProjectModal, self).__init__("Upload Project", confirm_button="Upload") self.project: Optional[Project] = project self.project_path = ExistingProjectParameter( - str(project.location) if project else os.path.join(os.path.expanduser('~'), 'My PROS Project') + str(project.location) if project else os.path.join(os.path.expanduser("~"), "My PROS Project") ) - self.port = parameters.OptionParameter('', ['']) + self.port = parameters.OptionParameter("", [""]) self.save_settings = parameters.BooleanParameter(True) self.advanced_options: Dict[str, parameters.Parameter] = {} self.advanced_options_collapsed = parameters.BooleanParameter(True) @@ -40,51 +40,51 @@ def cleanup_poll_comports_thread(): cb(self.project_path) def update_slots(self): - assert self.project.target == 'v5' + assert self.project.target == "v5" if self.port.is_valid() and bool(self.port.value): from pros.serial.devices.vex import V5Device from pros.serial.ports import DirectPort device = V5Device(DirectPort(self.port.value)) slot_options = [ - f'{slot}' + ('' if program is None else f' (Currently: {program})') + f"{slot}" + ("" if program is None else f" (Currently: {program})") for slot, program in device.used_slots().items() ] else: slot_options = [str(i) for i in range(1, 9)] - project_name = self.advanced_options['name'].value - if 'slot' in self.project.upload_options: + project_name = self.advanced_options["name"].value + if "slot" in self.project.upload_options: # first, see if the project has it specified in its upload options - selected = slot_options[self.project.upload_options['slot'] - 1] + selected = slot_options[self.project.upload_options["slot"] - 1] else: # otherwise, try to do a name match - matched_slots = [i for i, slot in enumerate(slot_options) if slot.endswith(f'{project_name})')] + matched_slots = [i for i, slot in enumerate(slot_options) if slot.endswith(f"{project_name})")] if len(matched_slots) > 0: selected = slot_options[matched_slots[0]] - elif 'slot' in self.advanced_options: + elif "slot" in self.advanced_options: # or whatever the last value was - selected = slot_options[int(self.advanced_options['slot'].value[0]) - 1] + selected = slot_options[int(self.advanced_options["slot"].value[0]) - 1] else: # or just slot 1 selected = slot_options[0] - self.advanced_options['slot'] = parameters.OptionParameter(selected, slot_options) + self.advanced_options["slot"] = parameters.OptionParameter(selected, slot_options) def update_comports(self): list_all_comports.cache_clear() if isinstance(self.project, Project): options = {} - if self.project.target == 'v5': - options = {p.device for p in find_v5_ports('system')} - elif self.project.target == 'cortex': + if self.project.target == "v5": + options = {p.device for p in find_v5_ports("system")} + elif self.project.target == "cortex": options = [p.device for p in find_cortex_ports()] if options != {*self.port.options}: self.port.options = list(options) if self.port.value not in options: - self.port.update(self.port.options[0] if len(self.port.options) > 0 else 'No ports found') - ui.logger(__name__).debug('Updating ports') + self.port.update(self.port.options[0] if len(self.port.options) > 0 else "No ports found") + ui.logger(__name__).debug("Updating ports") - if self.project and self.project.target == 'v5': + if self.project and self.project.target == "v5": self.update_slots() self.redraw() @@ -100,13 +100,13 @@ def project_changed(self, new_project: ExistingProjectParameter): assert self.project is not None - if self.project.target == 'v5': + if self.project.target == "v5": self.advanced_options = { - 'name': parameters.Parameter(self.project.upload_options.get('remote_name', self.project.name)), - 'description': parameters.Parameter( - self.project.upload_options.get('description', 'Created with PROS') + "name": parameters.Parameter(self.project.upload_options.get("remote_name", self.project.name)), + "description": parameters.Parameter( + self.project.upload_options.get("description", "Created with PROS") ), - 'compress_bin': parameters.BooleanParameter(self.project.upload_options.get('compress_bin', True)), + "compress_bin": parameters.BooleanParameter(self.project.upload_options.get("compress_bin", True)), } self.update_slots() else: @@ -123,14 +123,14 @@ def confirm(self, *args, **kwargs): from pros.cli.upload import upload - kwargs = {'path': None, 'project': self.project, 'port': self.port.value} + kwargs = {"path": None, "project": self.project, "port": self.port.value} savable_kwargs = {} - if self.project.target == 'v5': - savable_kwargs['remote_name'] = self.advanced_options['name'].value + if self.project.target == "v5": + savable_kwargs["remote_name"] = self.advanced_options["name"].value # XXX: the first character is the slot number - savable_kwargs['slot'] = int(self.advanced_options['slot'].value[0]) - savable_kwargs['description'] = self.advanced_options['description'].value - savable_kwargs['compress_bin'] = self.advanced_options['compress_bin'].value + savable_kwargs["slot"] = int(self.advanced_options["slot"].value[0]) + savable_kwargs["description"] = self.advanced_options["description"].value + savable_kwargs["compress_bin"] = self.advanced_options["compress_bin"].value if self.save_settings.value: self.project.upload_options.update(savable_kwargs) @@ -152,16 +152,16 @@ def build(self) -> Generator[components.Component, None, None]: self.poll_comports_thread = Thread(target=with_click_context(self.poll_comports)) self.poll_comports_thread.start() - yield components.DirectorySelector('Project Directory', self.project_path) - yield components.DropDownBox('Port', self.port) - yield components.Checkbox('Save upload settings', self.save_settings) + yield components.DirectorySelector("Project Directory", self.project_path) + yield components.DropDownBox("Port", self.port) + yield components.Checkbox("Save upload settings", self.save_settings) - if isinstance(self.project, Project) and self.project.target == 'v5': + if isinstance(self.project, Project) and self.project.target == "v5": yield components.Container( - components.InputBox('Program Name', self.advanced_options['name']), - components.DropDownBox('Slot', self.advanced_options['slot']), - components.InputBox('Description', self.advanced_options['description']), - components.Checkbox('Compress Binary', self.advanced_options['compress_bin']), - title='Advanced V5 Options', + components.InputBox("Program Name", self.advanced_options["name"]), + components.DropDownBox("Slot", self.advanced_options["slot"]), + components.InputBox("Description", self.advanced_options["description"]), + components.Checkbox("Compress Binary", self.advanced_options["compress_bin"]), + title="Advanced V5 Options", collapsed=self.advanced_options_collapsed, ) diff --git a/pros/serial/interactive/__init__.py b/pros/serial/interactive/__init__.py index aa7f4062..ec961c20 100644 --- a/pros/serial/interactive/__init__.py +++ b/pros/serial/interactive/__init__.py @@ -1,3 +1,3 @@ from .UploadProjectModal import UploadProjectModal -__all__ = ['UploadProjectModal'] +__all__ = ["UploadProjectModal"] diff --git a/pros/serial/ports/__init__.py b/pros/serial/ports/__init__.py index e1d15175..a880d536 100644 --- a/pros/serial/ports/__init__.py +++ b/pros/serial/ports/__init__.py @@ -13,5 +13,5 @@ @lru_cache() def list_all_comports(): ports = list_ports.comports() - logger(__name__).debug('Connected: {}'.format(';'.join([str(p.__dict__) for p in ports]))) + logger(__name__).debug("Connected: {}".format(";".join([str(p.__dict__) for p in ports]))) return ports diff --git a/pros/serial/ports/direct_port.py b/pros/serial/ports/direct_port.py index 0fb20ad3..d18d36e2 100644 --- a/pros/serial/ports/direct_port.py +++ b/pros/serial/ports/direct_port.py @@ -11,7 +11,7 @@ def create_serial_port(port_name: str, timeout: Optional[float] = 1.0) -> serial.Serial: try: - logger(__name__).debug(f'Opening serial port {port_name}') + logger(__name__).debug(f"Opening serial port {port_name}") port = serial.Serial( port_name, baudrate=115200, @@ -23,7 +23,7 @@ def create_serial_port(port_name: str, timeout: Optional[float] = 1.0) -> serial port.inter_byte_timeout = 0.2 return port except serial.SerialException as e: - if any(msg in str(e) for msg in ['Access is denied', 'Errno 16', 'Errno 13']): + if any(msg in str(e) for msg in ["Access is denied", "Errno 16", "Errno 13"]): tb = sys.exc_info()[2] raise dont_send(ConnectionRefusedException(port_name, e).with_traceback(tb)) else: @@ -33,7 +33,7 @@ def create_serial_port(port_name: str, timeout: Optional[float] = 1.0) -> serial class DirectPort(BasePort): def __init__(self, port_name: str, **kwargs): - self.serial: serial.Serial = create_serial_port(port_name=port_name, timeout=kwargs.pop('timeout', 1.0)) + self.serial: serial.Serial = create_serial_port(port_name=port_name, timeout=kwargs.pop("timeout", 1.0)) self.buffer: bytearray = bytearray() def read(self, n_bytes: int = 0) -> bytes: @@ -58,14 +58,14 @@ def read(self, n_bytes: int = 0) -> bytes: def write(self, data: Union[str, bytes]): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") self.serial.write(data) def flush(self): self.serial.flush() def destroy(self): - logger(__name__).debug(f'Destroying {self.__class__.__name__} to {self.serial.name}') + logger(__name__).debug(f"Destroying {self.__class__.__name__} to {self.serial.name}") self.serial.close() @property diff --git a/pros/serial/ports/exceptions.py b/pros/serial/ports/exceptions.py index 71843fee..44e63f30 100644 --- a/pros/serial/ports/exceptions.py +++ b/pros/serial/ports/exceptions.py @@ -9,9 +9,9 @@ def __init__(self, port_name: str, reason: Exception): self.port_name = port_name def __str__(self): - extra = '' - if os.name == 'posix': - extra = 'adding yourself to dialout group ' + extra = "" + if os.name == "posix": + extra = "adding yourself to dialout group " return ( f"could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " f"firmware utilities; moving to a different USB port; {extra}or " @@ -25,9 +25,9 @@ def __init__(self, port_name: str, reason: Exception): self.port_name = port_name def __str__(self): - extra = '' - if os.name == 'posix': - extra = 'adding yourself to dialout group ' + extra = "" + if os.name == "posix": + extra = "adding yourself to dialout group " return ( f"Port not found: Could not open port '{self.port_name}'. Try closing any other VEX IDEs such as VEXCode, Robot Mesh Studio, or " f"firmware utilities; moving to a different USB port; {extra}or " diff --git a/pros/serial/ports/serial_share_bridge.py b/pros/serial/ports/serial_share_bridge.py index cc35cd6a..b06827c6 100644 --- a/pros/serial/ports/serial_share_bridge.py +++ b/pros/serial/ports/serial_share_bridge.py @@ -13,22 +13,22 @@ def get_port_num(serial_port_name: str, hash: str) -> int: - return sum("Powered by PROS: {}-{}".format(serial_port_name, hash).encode(encoding='ascii')) + return sum("Powered by PROS: {}-{}".format(serial_port_name, hash).encode(encoding="ascii")) def get_from_device_port_num(serial_port_name: str) -> int: - return get_port_num(serial_port_name, 'from') + return get_port_num(serial_port_name, "from") def get_to_device_port_num(serial_port_name: str) -> int: - return get_port_num(serial_port_name, 'to') + return get_port_num(serial_port_name, "to") class SerialShareBridge(object): def __init__( self, serial_port_name: str, - base_addr: str = '127.0.0.1', + base_addr: str = "127.0.0.1", to_device_port_num: int = None, from_device_port_num: int = None, ): @@ -56,16 +56,16 @@ def from_device_port_num(self): def start(self): # this function is still in the parent process - mp_ctx = multiprocessing.get_context('spawn') + mp_ctx = multiprocessing.get_context("spawn") barrier = multiprocessing.Barrier(3) - task = mp_ctx.Process(target=self._start, name='Serial Share Bridge', args=(barrier,)) + task = mp_ctx.Process(target=self._start, name="Serial Share Bridge", args=(barrier,)) task.daemon = False task.start() barrier.wait(1) return task def kill(self, do_join: bool = False): - logger(__name__).info('Killing serial share server due to watchdog') + logger(__name__).info("Killing serial share server due to watchdog") self.dying.set() self.port.destroy() if not self.zmq_ctx.closed: @@ -78,14 +78,14 @@ def kill(self, do_join: bool = False): def _start(self, initialization_barrier: multiprocessing.Barrier): try: - log_dir = os.path.join(get_pros_dir(), 'logs') + log_dir = os.path.join(get_pros_dir(), "logs") os.makedirs(log_dir, exist_ok=True) pros_logger = logging.getLogger(pros.__name__) pros_logger.setLevel(logging.DEBUG) - log_file_name = os.path.join(get_pros_dir(), 'logs', 'serial-share-bridge.log') + log_file_name = os.path.join(get_pros_dir(), "logs", "serial-share-bridge.log") handler = logging.handlers.TimedRotatingFileHandler(log_file_name, backupCount=1) handler.setLevel(logging.DEBUG) - fmt_str = '%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s (%(process)d) ({})'.format( + fmt_str = "%(name)s.%(funcName)s:%(levelname)s - %(asctime)s - %(message)s (%(process)d) ({})".format( self._serial_port_name ) handler.setFormatter(logging.Formatter(fmt_str)) @@ -95,10 +95,10 @@ def _start(self, initialization_barrier: multiprocessing.Barrier): # timeout is none, so blocks indefinitely. Helps reduce CPU usage when there's nothing being recv self.port = DirectPort(self._serial_port_name, timeout=None) self.from_device_thread = threading.Thread( - target=self._from_device_loop, name='From Device Reader', daemon=False, args=(initialization_barrier,) + target=self._from_device_loop, name="From Device Reader", daemon=False, args=(initialization_barrier,) ) self.to_device_thread = threading.Thread( - target=self._to_device_loop, name='To Device Reader', daemon=False, args=(initialization_barrier,) + target=self._to_device_loop, name="To Device Reader", daemon=False, args=(initialization_barrier,) ) self.dying = threading.Event() # type: threading.Event self.from_device_thread.start() @@ -108,7 +108,7 @@ def _start(self, initialization_barrier: multiprocessing.Barrier): pass logger(__name__).info( - 'Main serial share bridge thread is dying. Everything else should be dead: {}'.format( + "Main serial share bridge thread is dying. Everything else should be dead: {}".format( threading.active_count() - 1 ) ) @@ -122,9 +122,9 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): rxd = 0 try: from_ser_sock = self.zmq_ctx.socket(zmq.PUB) - addr = 'tcp://{}:{}'.format(self._base_addr, self._from_port_num) + addr = "tcp://{}:{}".format(self._base_addr, self._from_port_num) from_ser_sock.bind(addr) - logger(__name__).info('Bound from device broadcaster as a publisher to {}'.format(addr)) + logger(__name__).info("Bound from device broadcaster as a publisher to {}".format(addr)) initialization_barrier.wait() buffer = bytearray() while not self.dying.is_set(): @@ -133,28 +133,28 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): # then read everything available buffer.extend(self.port.read(1)) buffer.extend(self.port.read(-1)) - while b'\0' in buffer and not self.dying.is_set(): - msg, buffer = buffer.split(b'\0', 1) + while b"\0" in buffer and not self.dying.is_set(): + msg, buffer = buffer.split(b"\0", 1) msg = cobs.decode(msg) from_ser_sock.send_multipart((msg[:4], msg[4:])) rxd += 1 time.sleep(0) except Exception as e: # TODO: when getting a COBS decode error, rebroadcast the bytes on sout - logger(__name__).error('Unexpected error handling {}'.format(bytes_to_str(msg[:-1]))) + logger(__name__).error("Unexpected error handling {}".format(bytes_to_str(msg[:-1]))) logger(__name__).exception(e) errors += 1 logger(__name__).info( - 'Current from device broadcasting error rate: {} errors. {} successful. {}%'.format( + "Current from device broadcasting error rate: {} errors. {} successful. {}%".format( errors, rxd, errors / (errors + rxd) ) ) except Exception as e: initialization_barrier.abort() logger(__name__).exception(e) - logger(__name__).warning('From Device Broadcaster is dying now.') + logger(__name__).warning("From Device Broadcaster is dying now.") logger(__name__).info( - 'Current from device broadcasting error rate: {} errors. {} successful. {}%'.format( + "Current from device broadcasting error rate: {} errors. {} successful. {}%".format( errors, rxd, errors / (errors + rxd) ) ) @@ -166,10 +166,10 @@ def _from_device_loop(self, initialization_barrier: multiprocessing.Barrier): def _to_device_loop(self, initialization_barrier: multiprocessing.Barrier): try: to_ser_sock = self.zmq_ctx.socket(zmq.SUB) - addr = 'tcp://{}:{}'.format(self._base_addr, self._to_port_num) + addr = "tcp://{}:{}".format(self._base_addr, self._to_port_num) to_ser_sock.bind(addr) - to_ser_sock.setsockopt(zmq.SUBSCRIBE, b'') - logger(__name__).info('Bound to device broadcaster as a subscriber to {}'.format(addr)) + to_ser_sock.setsockopt(zmq.SUBSCRIBE, b"") + logger(__name__).info("Bound to device broadcaster as a subscriber to {}".format(addr)) watchdog = threading.Timer(10, self.kill) initialization_barrier.wait() watchdog.start() @@ -177,18 +177,18 @@ def _to_device_loop(self, initialization_barrier: multiprocessing.Barrier): msg = to_ser_sock.recv_multipart() if not msg or self.dying.is_set(): continue - if msg[0] == b'kick': - logger(__name__).debug('Kicking watchdog on server {}'.format(threading.current_thread())) + if msg[0] == b"kick": + logger(__name__).debug("Kicking watchdog on server {}".format(threading.current_thread())) watchdog.cancel() watchdog = threading.Timer(msg[1][1] if len(msg) > 1 and len(msg[1]) > 0 else 5, self.kill) watchdog.start() - elif msg[0] == b'send': - logger(self).debug('Writing {} to {}'.format(bytes_to_str(msg[1]), self.port.port_name)) + elif msg[0] == b"send": + logger(self).debug("Writing {} to {}".format(bytes_to_str(msg[1]), self.port.port_name)) self.port.write(msg[1]) except Exception as e: initialization_barrier.abort() logger(__name__).exception(e) - logger(__name__).warning('To Device Broadcaster is dying now.') + logger(__name__).warning("To Device Broadcaster is dying now.") try: self.kill(do_join=False) except: diff --git a/pros/serial/ports/serial_share_port.py b/pros/serial/ports/serial_share_port.py index 5f5691de..1a9df09a 100644 --- a/pros/serial/ports/serial_share_port.py +++ b/pros/serial/ports/serial_share_port.py @@ -6,8 +6,8 @@ class SerialSharePort(BasePort): def __init__( self, port_name: str, - topic: bytes = b'sout', - addr: str = '127.0.0.1', + topic: bytes = b"sout", + addr: str = "127.0.0.1", to_device_port: int = None, from_device_port: int = None, ): @@ -29,20 +29,20 @@ def __init__( self.from_device_sock = self.ctx.socket(zmq.SUB) # type: zmq.Socket self.from_device_sock.setsockopt(zmq.SUBSCRIBE, self.topic) - self.from_device_sock.setsockopt(zmq.SUBSCRIBE, b'kdbg') - self.from_device_sock.connect('tcp://{}:{}'.format(self._base_addr, self._from_port_num)) + self.from_device_sock.setsockopt(zmq.SUBSCRIBE, b"kdbg") + self.from_device_sock.connect("tcp://{}:{}".format(self._base_addr, self._from_port_num)) logger(__name__).info( - 'Connected from device as a subscriber on tcp://{}:{}'.format(self._base_addr, self._from_port_num) + "Connected from device as a subscriber on tcp://{}:{}".format(self._base_addr, self._from_port_num) ) self.to_device_sock = self.ctx.socket(zmq.PUB) # type: zmq.Socket - self.to_device_sock.connect('tcp://{}:{}'.format(self._base_addr, self._to_port_num)) + self.to_device_sock.connect("tcp://{}:{}".format(self._base_addr, self._to_port_num)) logger(__name__).info( - 'Connected to device as a publisher on tcp://{}:{}'.format(self._base_addr, self._to_port_num) + "Connected to device as a publisher on tcp://{}:{}".format(self._base_addr, self._to_port_num) ) self.alive = threading.Event() - self.watchdog_thread = threading.Thread(target=self._kick_watchdog, name='Client Kicker') + self.watchdog_thread = threading.Thread(target=self._kick_watchdog, name="Client Kicker") self.watchdog_thread.start() def read(self, n_bytes: int = -1): @@ -58,22 +58,22 @@ def read_packet(self): def write(self, data: AnyStr): if isinstance(data, str): - data = data.encode(encoding='ascii') + data = data.encode(encoding="ascii") assert isinstance(data, bytes) - self.to_device_sock.send_multipart([b'send', data]) + self.to_device_sock.send_multipart([b"send", data]) def subscribe(self, topic: bytes): assert len(topic) == 4 - self.write(bytearray([*b'pRe', *topic])) + self.write(bytearray([*b"pRe", *topic])) self.from_device_sock.subscribe(topic=topic) def unsubscribe(self, topic: bytes): assert len(topic) == 4 - self.write(bytearray([*b'pRd', *topic])) + self.write(bytearray([*b"pRd", *topic])) self.from_device_sock.unsubscribe(topic=topic) def destroy(self): - logger(__name__).info('Destroying {}'.format(self)) + logger(__name__).info("Destroying {}".format(self)) self.alive.set() if self.watchdog_thread.is_alive(): self.watchdog_thread.join() @@ -85,7 +85,7 @@ def destroy(self): def _kick_watchdog(self): time.sleep(0.5) while not self.alive.is_set(): - logger(__name__).debug('Kicking server from {}'.format(threading.current_thread())) - self.to_device_sock.send_multipart([b'kick']) + logger(__name__).debug("Kicking server from {}".format(threading.current_thread())) + self.to_device_sock.send_multipart([b"kick"]) self.alive.wait(2.5) - logger(__name__).info('Watchdog kicker is dying') + logger(__name__).info("Watchdog kicker is dying") diff --git a/pros/serial/terminal/terminal.py b/pros/serial/terminal/terminal.py index 05710568..4bd805f7 100644 --- a/pros/serial/terminal/terminal.py +++ b/pros/serial/terminal/terminal.py @@ -59,7 +59,7 @@ def __exit__(self, *args, **kwargs): self.setup() -if os.name == 'nt': # noqa +if os.name == "nt": # noqa import ctypes import msvcrt @@ -112,7 +112,7 @@ def cancel(self): hwnd = ctypes.windll.kernel32.GetConsoleWindow() ctypes.windll.user32.PostMessageA(hwnd, 0x100, 0x0D, 0) -elif os.name == 'posix': +elif os.name == "posix": import atexit import select import termios @@ -155,7 +155,7 @@ def cleanup(self): termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old) else: - raise NotImplementedError('Sorry no implementation for your platform ({})' ' available.'.format(sys.platform)) + raise NotImplementedError("Sorry no implementation for your platform ({})" " available.".format(sys.platform)) class Terminal(object): @@ -165,8 +165,8 @@ def __init__( self, port_instance: StreamDevice, transformations=(), output_raw: bool = False, request_banner: bool = True ): self.device = port_instance - self.device.subscribe(b'sout') - self.device.subscribe(b'serr') + self.device.subscribe(b"sout") + self.device.subscribe(b"serr") self.transformations = transformations self._reader_alive = None self.receiver_thread = None # type: threading.Thread @@ -182,7 +182,7 @@ def __init__( def _start_rx(self): self._reader_alive = True - self.receiver_thread = threading.Thread(target=self.reader, name='serial-rx-term') + self.receiver_thread = threading.Thread(target=self.reader, name="serial-rx-term") self.receiver_thread.daemon = True self.receiver_thread.start() @@ -192,7 +192,7 @@ def _stop_rx(self): def _start_tx(self): self._transmitter_alive = True - self.transmitter_thread = threading.Thread(target=self.transmitter, name='serial-tx-term') + self.transmitter_thread = threading.Thread(target=self.transmitter, name="serial-tx-term") self.transmitter_thread.daemon = True self.transmitter_thread.start() @@ -204,7 +204,7 @@ def _stop_tx(self): def reader(self): if self.request_banner: try: - self.device.write(b'pRb') + self.device.write(b"pRb") except Exception as e: logger(__name__).exception(e) try: @@ -212,25 +212,25 @@ def reader(self): data = self.device.read() if not data: continue - if data[0] == b'sout': + if data[0] == b"sout": text = decode_bytes_to_str(data[1]) - elif data[0] == b'serr': - text = '{}{}{}'.format(colorama.Fore.RED, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL) - elif data[0] == b'kdbg': - text = '{}\n\nKERNEL DEBUG:\t{}{}\n'.format( + elif data[0] == b"serr": + text = "{}{}{}".format(colorama.Fore.RED, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL) + elif data[0] == b"kdbg": + text = "{}\n\nKERNEL DEBUG:\t{}{}\n".format( colorama.Back.GREEN + colorama.Style.BRIGHT, decode_bytes_to_str(data[1]), colorama.Style.RESET_ALL, ) - elif data[0] != b'': - text = '{}{}'.format(decode_bytes_to_str(data[0]), decode_bytes_to_str(data[1])) + elif data[0] != b"": + text = "{}{}".format(decode_bytes_to_str(data[0]), decode_bytes_to_str(data[1])) else: text = "{}".format(decode_bytes_to_str(data[1])) self.console.write(text) except UnicodeError as e: logger(__name__).exception(e) except PortConnectionException: - logger(__name__).warning(f'Connection to {self.device.name} broken') + logger(__name__).warning(f"Connection to {self.device.name} broken") if not self.alive.is_set(): self.stop() except Exception as e: @@ -239,7 +239,7 @@ def reader(self): else: logger(__name__).debug(e) self.stop() - logger(__name__).info('Terminal receiver dying') + logger(__name__).info("Terminal receiver dying") def transmitter(self): try: @@ -247,14 +247,14 @@ def transmitter(self): try: c = self.console.getkey() except KeyboardInterrupt: - c = '\x03' + c = "\x03" if self.alive.is_set(): break - if c == '\x03' or not self.no_sigint: + if c == "\x03" or not self.no_sigint: self.stop() break else: - self.device.write(c.encode(encoding='utf-8')) + self.device.write(c.encode(encoding="utf-8")) self.console.write(c) except Exception as e: if not self.alive.is_set(): @@ -262,7 +262,7 @@ def transmitter(self): else: logger(__name__).debug(e) self.stop() - logger(__name__).info('Terminal transmitter dying') + logger(__name__).info("Terminal transmitter dying") def catch_sigint(self): self.no_sigint = False @@ -277,13 +277,13 @@ def start(self): def stop(self, *args): self.console.cleanup() if not self.alive.is_set(): - logger(__name__).warning('Stopping terminal') + logger(__name__).warning("Stopping terminal") self.alive.set() self.device.destroy() if threading.current_thread() != self.transmitter_thread and self.transmitter_thread.is_alive(): self.console.cleanup() self.console.cancel() - logger(__name__).info('All done!') + logger(__name__).info("All done!") def join(self): try: diff --git a/pros/upgrade/__init__.py b/pros/upgrade/__init__.py index 9794ad32..4c546227 100644 --- a/pros/upgrade/__init__.py +++ b/pros/upgrade/__init__.py @@ -5,4 +5,4 @@ def get_platformv2(): return UpgradeManifestV2().platform -__all__ = ['UpgradeManager', 'get_platformv2'] +__all__ = ["UpgradeManager", "get_platformv2"] diff --git a/pros/upgrade/instructions/__init__.py b/pros/upgrade/instructions/__init__.py index 003b586a..452e2915 100644 --- a/pros/upgrade/instructions/__init__.py +++ b/pros/upgrade/instructions/__init__.py @@ -3,4 +3,4 @@ from .explorer_instructions import ExplorerInstruction from .nothing_instructions import NothingInstruction -__all__ = ['UpgradeInstruction', 'UpgradeResult', 'NothingInstruction', 'ExplorerInstruction', 'DownloadInstruction'] +__all__ = ["UpgradeInstruction", "UpgradeResult", "NothingInstruction", "ExplorerInstruction", "DownloadInstruction"] diff --git a/pros/upgrade/instructions/download_instructions.py b/pros/upgrade/instructions/download_instructions.py index 666456c6..7a428c8c 100644 --- a/pros/upgrade/instructions/download_instructions.py +++ b/pros/upgrade/instructions/download_instructions.py @@ -11,7 +11,7 @@ class DownloadInstruction(UpgradeInstruction): Downloads a file """ - def __init__(self, url='', extension=None, download_description=None, success_explanation=None): + def __init__(self, url="", extension=None, download_description=None, success_explanation=None): self.url: str = url self.extension: Optional[str] = extension self.download_description: Optional[str] = download_description @@ -23,15 +23,15 @@ def perform_upgrade(self) -> UpgradeResult: file = download_file(self.url, ext=self.extension, desc=self.download_description) assert file except (AssertionError, IOError) as e: - return UpgradeResult(False, explanation=f'Failed to download required file. ({e})', exception=e) + return UpgradeResult(False, explanation=f"Failed to download required file. ({e})", exception=e) if self.success_explanation: - explanation = self.success_explanation.replace('//FILE\\\\', file).replace( - '//SHORT\\\\', os.path.split(file)[1] + explanation = self.success_explanation.replace("//FILE\\\\", file).replace( + "//SHORT\\\\", os.path.split(file)[1] ) else: - explanation = f'Downloaded {os.path.split(file)[1]}' + explanation = f"Downloaded {os.path.split(file)[1]}" return UpgradeResult(True, explanation=explanation, file=file, origin=self.url) def __str__(self) -> str: - return 'Download required file.' + return "Download required file." diff --git a/pros/upgrade/instructions/explorer_instructions.py b/pros/upgrade/instructions/explorer_instructions.py index d54666f1..c54748b9 100644 --- a/pros/upgrade/instructions/explorer_instructions.py +++ b/pros/upgrade/instructions/explorer_instructions.py @@ -12,8 +12,8 @@ def perform_upgrade(self) -> UpgradeResult: if result.successful: import click - click.launch(getattr(result, 'file')) + click.launch(getattr(result, "file")) return result def __str__(self) -> str: - return 'Download required file.' + return "Download required file." diff --git a/pros/upgrade/instructions/nothing_instructions.py b/pros/upgrade/instructions/nothing_instructions.py index a3619173..1c11df8c 100644 --- a/pros/upgrade/instructions/nothing_instructions.py +++ b/pros/upgrade/instructions/nothing_instructions.py @@ -3,7 +3,7 @@ class NothingInstruction(UpgradeInstruction): def __str__(self) -> str: - return 'No automated instructions. View release notes for installation instructions.' + return "No automated instructions. View release notes for installation instructions." def perform_upgrade(self) -> UpgradeResult: return UpgradeResult(True) diff --git a/pros/upgrade/manifests/__init__.py b/pros/upgrade/manifests/__init__.py index cc8fb43d..4e58eb16 100644 --- a/pros/upgrade/manifests/__init__.py +++ b/pros/upgrade/manifests/__init__.py @@ -5,4 +5,4 @@ # Order of files manifests = [UpgradeManifestV2, UpgradeManifestV1] # type: List[Type] -__all__ = ['UpgradeManifestV1', 'UpgradeManifestV2', 'manifests', 'PlatformsV2'] +__all__ = ["UpgradeManifestV1", "UpgradeManifestV2", "manifests", "PlatformsV2"] diff --git a/pros/upgrade/manifests/upgrade_manifest_v1.py b/pros/upgrade/manifests/upgrade_manifest_v1.py index 33714ef0..f0187d4a 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v1.py +++ b/pros/upgrade/manifests/upgrade_manifest_v1.py @@ -28,11 +28,11 @@ def describe_update(self) -> str: """ if self.needs_upgrade: return ( - f'There is an update available! {self.version} is the latest version.\n' - f'Go to {self.info_url} to learn more.' + f"There is an update available! {self.version} is the latest version.\n" + f"Go to {self.info_url} to learn more." ) else: - return f'You are up to date. ({self.version})' + return f"You are up to date. ({self.version})" def __str__(self): return self.describe_update() @@ -48,4 +48,4 @@ def perform_upgrade(self) -> UpgradeResult: return UpgradeResult(launch(self.info_url) == 0) def describe_post_install(self, **kwargs) -> str: - return f'Download the latest version from {self.info_url}' + return f"Download the latest version from {self.info_url}" diff --git a/pros/upgrade/manifests/upgrade_manifest_v2.py b/pros/upgrade/manifests/upgrade_manifest_v2.py index 6d4f3581..00ee656f 100644 --- a/pros/upgrade/manifests/upgrade_manifest_v2.py +++ b/pros/upgrade/manifests/upgrade_manifest_v2.py @@ -27,34 +27,34 @@ def __init__(self): super().__init__() self.platform_instructions: Dict[PlatformsV2, UpgradeInstruction] = {} - self._platform: 'PlatformsV2' = None + self._platform: "PlatformsV2" = None self._last_file: Optional[str] = None @property - def platform(self) -> 'PlatformsV2': + def platform(self) -> "PlatformsV2": """ Attempts to detect the current platform type :return: The detected platform type, or Unknown """ if self._platform is not None: return self._platform - if getattr(sys, 'frozen', False): + if getattr(sys, "frozen", False): import _constants - frozen_platform = getattr(_constants, 'FROZEN_PLATFORM_V1', None) + frozen_platform = getattr(_constants, "FROZEN_PLATFORM_V1", None) if isinstance(frozen_platform, str): - if frozen_platform.startswith('Windows86'): + if frozen_platform.startswith("Windows86"): self._platform = PlatformsV2.Windows86 - elif frozen_platform.startswith('Windows64'): + elif frozen_platform.startswith("Windows64"): self._platform = PlatformsV2.Windows64 - elif frozen_platform.startswith('MacOS'): + elif frozen_platform.startswith("MacOS"): self._platform = PlatformsV2.MacOS else: try: from pip._vendor import pkg_resources - results = [p for p in pkg_resources.working_set if p.project_name.startswith('pros-cli')] + results = [p for p in pkg_resources.working_set if p.project_name.startswith("pros-cli")] if any(results): self._platform = PlatformsV2.Pip except ImportError: @@ -70,9 +70,9 @@ def can_perform_upgrade(self) -> bool: def perform_upgrade(self) -> UpgradeResult: instructions: UpgradeInstruction = self.platform_instructions.get(self.platform, NothingInstruction()) logger(__name__).debug(self.__dict__) - logger(__name__).debug(f'Platform: {self.platform}') + logger(__name__).debug(f"Platform: {self.platform}") logger(__name__).debug(instructions.__dict__) return instructions.perform_upgrade() def __repr__(self): - return repr({'platform': self.platform, **self.__dict__}) + return repr({"platform": self.platform, **self.__dict__}) diff --git a/pros/upgrade/upgrade_manager.py b/pros/upgrade/upgrade_manager.py index d0bc3baf..efd39464 100644 --- a/pros/upgrade/upgrade_manager.py +++ b/pros/upgrade/upgrade_manager.py @@ -13,14 +13,14 @@ class ReleaseChannel(Enum): - Stable = 'stable' - Beta = 'beta' + Stable = "stable" + Beta = "beta" class UpgradeManager(Config): def __init__(self, file=None): if file is None: - file = os.path.join(cli_config().directory, 'upgrade.pros.json') + file = os.path.join(cli_config().directory, "upgrade.pros.json") self._last_check: datetime = datetime.min self._manifest: Optional[UpgradeManifestV1] = None self.release_channel: ReleaseChannel = ReleaseChannel.Stable @@ -30,23 +30,23 @@ def __init__(self, file=None): @property def has_stale_manifest(self): if self._manifest is None: - logger(__name__).debug('Upgrade manager\'s manifest is nonexistent') + logger(__name__).debug("Upgrade manager's manifest is nonexistent") if datetime.now() - self._last_check > cli_config().update_frequency: - logger(__name__).debug(f'Upgrade manager\'s last check occured at {self._last_check}.') - logger(__name__).debug(f'Was longer ago than update frequency ({cli_config().update_frequency}) allows.') + logger(__name__).debug(f"Upgrade manager's last check occured at {self._last_check}.") + logger(__name__).debug(f"Was longer ago than update frequency ({cli_config().update_frequency}) allows.") return (self._manifest is None) or (datetime.now() - self._last_check > cli_config().update_frequency) def get_manifest(self, force: bool = False) -> UpgradeManifestV1: if not force and not self.has_stale_manifest: return self._manifest - ui.echo('Fetching upgrade manifest...') + ui.echo("Fetching upgrade manifest...") import json import jsonpickle import requests - channel_url = f'https://purduesigbots.github.io/pros-mainline/{self.release_channel.value}' + channel_url = f"https://purduesigbots.github.io/pros-mainline/{self.release_channel.value}" self._manifest = None manifest_urls = [f"{channel_url}/{manifest.__name__}.json" for manifest in manifests] @@ -60,13 +60,13 @@ def get_manifest(self, force: bool = False) -> UpgradeManifestV1: self.save() break except json.decoder.JSONDecodeError as e: - logger(__name__).warning(f'Failed to decode {manifest_url}') + logger(__name__).warning(f"Failed to decode {manifest_url}") logger(__name__).debug(e) else: - logger(__name__).debug(f'Failed to get {manifest_url} ({resp.status_code})') + logger(__name__).debug(f"Failed to get {manifest_url} ({resp.status_code})") if not self._manifest: manifest_list = "\n".join(manifest_urls) - logger(__name__).warning(f'Could not access any upgrade manifests from any of:\n{manifest_list}') + logger(__name__).warning(f"Could not access any upgrade manifests from any of:\n{manifest_list}") return self._manifest @property diff --git a/setup.py b/setup.py index b4cd07b8..6785b4e2 100644 --- a/setup.py +++ b/setup.py @@ -5,14 +5,14 @@ from install_requires import install_requires as install_reqs setup( - name='pros-cli', - version=open('pip_version').read().strip(), + name="pros-cli", + version=open("pip_version").read().strip(), packages=find_packages(), - url='https://github.com/purduesigbots/pros-cli', - license='MPL-2.0', - author='Purdue ACM SIGBots', - author_email='pros_development@cs.purdue.edu', - description='Command Line Interface for managing PROS projects', + url="https://github.com/purduesigbots/pros-cli", + license="MPL-2.0", + author="Purdue ACM SIGBots", + author_email="pros_development@cs.purdue.edu", + description="Command Line Interface for managing PROS projects", install_requires=install_reqs, - entry_points={'console_scripts': ['pros=pros.cli.main:main', 'prosv5=pros.cli.main:main']}, + entry_points={"console_scripts": ["pros=pros.cli.main:main", "prosv5=pros.cli.main:main"]}, ) diff --git a/version.py b/version.py index 658e3a4a..0523dbdf 100644 --- a/version.py +++ b/version.py @@ -3,38 +3,38 @@ from sys import stdout try: - with open(os.devnull, 'w') as devnull: + with open(os.devnull, "w") as devnull: v = ( - subprocess.check_output(['git', 'describe', '--tags', '--dirty', '--abbrev'], stderr=stdout) + subprocess.check_output(["git", "describe", "--tags", "--dirty", "--abbrev"], stderr=stdout) .decode() .strip() ) - if '-' in v: - bv = v[: v.index('-')] - bv = bv[: bv.rindex('.') + 1] + str(int(bv[bv.rindex('.') + 1 :]) + 1) - sempre = 'dirty' if v.endswith('-dirty') else 'commit' - pippre = 'alpha' if v.endswith('-dirty') else 'pre' - build = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip() + if "-" in v: + bv = v[: v.index("-")] + bv = bv[: bv.rindex(".") + 1] + str(int(bv[bv.rindex(".") + 1 :]) + 1) + sempre = "dirty" if v.endswith("-dirty") else "commit" + pippre = "alpha" if v.endswith("-dirty") else "pre" + build = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode().strip() number_since = ( - subprocess.check_output(['git', 'rev-list', v[: v.index('-')] + '..HEAD', '--count']).decode().strip() + subprocess.check_output(["git", "rev-list", v[: v.index("-")] + "..HEAD", "--count"]).decode().strip() ) - semver = bv + '-' + sempre + '+' + build + semver = bv + "-" + sempre + "+" + build pipver = bv + pippre + number_since - winver = v[: v.index('-')] + '.' + number_since + winver = v[: v.index("-")] + "." + number_since else: semver = v pipver = v - winver = v + '.0' + winver = v + ".0" - with open('version', 'w') as f: - print('Semantic version is ' + semver) + with open("version", "w") as f: + print("Semantic version is " + semver) f.write(semver) - with open('pip_version', 'w') as f: - print('PIP version is ' + pipver) + with open("pip_version", "w") as f: + print("PIP version is " + pipver) f.write(pipver) - with open('win_version', 'w') as f: - print('Windows version is ' + winver) + with open("win_version", "w") as f: + print("Windows version is " + winver) f.write(winver) except Exception as e: - print('Error calling git') + print("Error calling git") print(e) From ea4aab76af7ff35923de76d78eabaf3e13a10ed7 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 02:16:27 -0500 Subject: [PATCH 23/44] Update black --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d38ebd27..a5d2a7b6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,11 +16,11 @@ repos: rev: "5.13.2" hooks: - id: isort - name: isort (python) + name: isort types: [python] args: [--settings-file=.isort.cfg] - repo: https://github.com/psf/black - rev: 24.1.1 + rev: 24.2.0 hooks: - id: black args: ["--line-length=120"] From b1fa6d39a47d60679cf8b1a4523655caa9f73fba Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 02:49:59 -0500 Subject: [PATCH 24/44] Add formatter actions --- .github/workflows/formatter.yml | 23 +++++++++++++++++++++++ .github/workflows/pre-commit.yml | 2 +- 2 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/formatter.yml diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml new file mode 100644 index 00000000..0fede87a --- /dev/null +++ b/.github/workflows/formatter.yml @@ -0,0 +1,23 @@ +name: formatter + +on: + pull_request: + push: + +jobs: + isort: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.1 + - uses: isort/isort-action@v1.1.0 + with: + configuration: "--settings-file=.isort.cfg" + requirements-files: "requirements.txt" + black: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.1 + - uses: psf/black@stable + with: + options: "--line-length=120" + version: "24.2.0" diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 65080b90..2309a25e 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -5,7 +5,7 @@ on: push: env: - SKIP: pylint + SKIP: pylint,black,isort jobs: pre-commit: From 4a201e73322cddbec7cb401cb7c88056a21f1231 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 02:54:08 -0500 Subject: [PATCH 25/44] Spelling --- .github/workflows/formatter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml index 0fede87a..4f7f2deb 100644 --- a/.github/workflows/formatter.yml +++ b/.github/workflows/formatter.yml @@ -1,4 +1,4 @@ -name: formatter +name: Formatter on: pull_request: From b878ca0d8eb0925e878ce9a98241a510a6c56c84 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 02:55:25 -0500 Subject: [PATCH 26/44] Testing action --- pros/cli/conductor.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index e8ca8e6c..352d3503 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -534,8 +534,6 @@ def remove_depot(name: str): _conductor.remove_depot(name) ui.echo(f"Removed depot {name}") - - @conductor.command("query-depots") @click.option("--url", is_flag=True) @default_options From 8cc43ad3f5effa281e61ed334a7dda696658eedc Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 03:02:38 -0500 Subject: [PATCH 27/44] Testing action --- .github/workflows/formatter.yml | 2 +- pros/cli/conductor.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml index 4f7f2deb..bacc67c4 100644 --- a/.github/workflows/formatter.yml +++ b/.github/workflows/formatter.yml @@ -19,5 +19,5 @@ jobs: - uses: actions/checkout@v4.1.1 - uses: psf/black@stable with: - options: "--line-length=120" + options: "--line-length=120 --check --diff" version: "24.2.0" diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 352d3503..6fe62f87 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -1,11 +1,12 @@ import os.path -from itertools import groupby + +from pros.cli.common import * import pros.common.ui as ui import pros.conductor as c -from pros.cli.common import * from pros.conductor.templates import ExternalTemplate from pros.ga.analytics import analytics +from itertools import groupby @pros_root From 90e847d616763f6a85508f20f8d0b6e86a4f76f4 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 03:07:24 -0500 Subject: [PATCH 28/44] Testing action --- .github/workflows/formatter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml index bacc67c4..86b65499 100644 --- a/.github/workflows/formatter.yml +++ b/.github/workflows/formatter.yml @@ -11,7 +11,7 @@ jobs: - uses: actions/checkout@v4.1.1 - uses: isort/isort-action@v1.1.0 with: - configuration: "--settings-file=.isort.cfg" + configuration: "--settings-file=.isort.cfg --check-only --diff" requirements-files: "requirements.txt" black: runs-on: ubuntu-latest From 7573f599d6f97d48dc31f2dfc7ce25b63c678917 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 03:09:35 -0500 Subject: [PATCH 29/44] Testing action --- .github/workflows/pre-commit.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 2309a25e..65080b90 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -5,7 +5,7 @@ on: push: env: - SKIP: pylint,black,isort + SKIP: pylint jobs: pre-commit: From 846f5561033fa34865d4f9e2e9880b12ee318609 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 03:15:28 -0500 Subject: [PATCH 30/44] Fix action --- .github/workflows/formatter.yml | 23 ----------------------- pros/cli/conductor.py | 7 ++++--- 2 files changed, 4 insertions(+), 26 deletions(-) delete mode 100644 .github/workflows/formatter.yml diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml deleted file mode 100644 index 86b65499..00000000 --- a/.github/workflows/formatter.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Formatter - -on: - pull_request: - push: - -jobs: - isort: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4.1.1 - - uses: isort/isort-action@v1.1.0 - with: - configuration: "--settings-file=.isort.cfg --check-only --diff" - requirements-files: "requirements.txt" - black: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4.1.1 - - uses: psf/black@stable - with: - options: "--line-length=120 --check --diff" - version: "24.2.0" diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 6fe62f87..e8ca8e6c 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -1,12 +1,11 @@ import os.path +from itertools import groupby - -from pros.cli.common import * import pros.common.ui as ui import pros.conductor as c +from pros.cli.common import * from pros.conductor.templates import ExternalTemplate from pros.ga.analytics import analytics -from itertools import groupby @pros_root @@ -535,6 +534,8 @@ def remove_depot(name: str): _conductor.remove_depot(name) ui.echo(f"Removed depot {name}") + + @conductor.command("query-depots") @click.option("--url", is_flag=True) @default_options From bf7c3fb4833afd1b397cee93d540ee2d3a4b6509 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 21:34:29 -0500 Subject: [PATCH 31/44] Add auto commit --- .github/workflows/formatter.yml | 35 +++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 .github/workflows/formatter.yml diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml new file mode 100644 index 00000000..62100fb4 --- /dev/null +++ b/.github/workflows/formatter.yml @@ -0,0 +1,35 @@ +name: Formatter + +on: + pull_request: + push: + +jobs: + isort: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4.1.1 + with: + ref: ${{ github.head_ref }} + - uses: isort/isort-action@v1.1.0 + with: + configuration: "--settings-file=.isort.cfg --check-only --diff" + requirements-files: "requirements.txt" + - uses: stefanzweifel/git-auto-commit-action@v5.0.0 + with: + commit_message: "Run isort" + black: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.1 + with: + ref: ${{ github.head_ref }} + - uses: psf/black@stable + with: + options: "--line-length=120 --check --diff" + version: "24.2.0" + - uses: stefanzweifel/git-auto-commit-action@v5.0.0 + with: + commit_message: "Run black" From 68581bc031ab6b91db206182d3dd94abc236e742 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 21:36:16 -0500 Subject: [PATCH 32/44] Testing action --- pros/cli/conductor.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index e8ca8e6c..2fc0b54d 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -1,18 +1,17 @@ +from pros.cli.common import * import os.path from itertools import groupby +from pros.ga.analytics import analytics -import pros.common.ui as ui import pros.conductor as c -from pros.cli.common import * + from pros.conductor.templates import ExternalTemplate -from pros.ga.analytics import analytics +import pros.common.ui as ui @pros_root def conductor_cli(): pass - - @conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") @default_options def conductor(): From b714982c662f86415306a7affa53fc9adafe28ea Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 21:37:56 -0500 Subject: [PATCH 33/44] Testing action --- .github/workflows/formatter.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml index 62100fb4..adfc0ca1 100644 --- a/.github/workflows/formatter.yml +++ b/.github/workflows/formatter.yml @@ -15,7 +15,7 @@ jobs: ref: ${{ github.head_ref }} - uses: isort/isort-action@v1.1.0 with: - configuration: "--settings-file=.isort.cfg --check-only --diff" + configuration: "--settings-file=.isort.cfg" requirements-files: "requirements.txt" - uses: stefanzweifel/git-auto-commit-action@v5.0.0 with: @@ -28,7 +28,7 @@ jobs: ref: ${{ github.head_ref }} - uses: psf/black@stable with: - options: "--line-length=120 --check --diff" + options: "--line-length=120" version: "24.2.0" - uses: stefanzweifel/git-auto-commit-action@v5.0.0 with: From f1a794ca6f18612adb0877a502e44a61b193b9e4 Mon Sep 17 00:00:00 2001 From: mayankpatibandla Date: Wed, 21 Feb 2024 02:38:27 +0000 Subject: [PATCH 34/44] Run black --- pros/cli/conductor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 2fc0b54d..9fde0dfc 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -12,6 +12,8 @@ @pros_root def conductor_cli(): pass + + @conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") @default_options def conductor(): From 004ca6bba426a10303dd6b24af0fd57641268d8d Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 21:48:05 -0500 Subject: [PATCH 35/44] Run synchronously --- .github/workflows/formatter.yml | 13 +- pros/cli/conductor.py | 1100 +++++++++++++++---------------- 2 files changed, 551 insertions(+), 562 deletions(-) diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml index adfc0ca1..86bd3a76 100644 --- a/.github/workflows/formatter.yml +++ b/.github/workflows/formatter.yml @@ -5,7 +5,7 @@ on: push: jobs: - isort: + Formatter: runs-on: ubuntu-latest permissions: contents: write @@ -17,19 +17,10 @@ jobs: with: configuration: "--settings-file=.isort.cfg" requirements-files: "requirements.txt" - - uses: stefanzweifel/git-auto-commit-action@v5.0.0 - with: - commit_message: "Run isort" - black: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4.1.1 - with: - ref: ${{ github.head_ref }} - uses: psf/black@stable with: options: "--line-length=120" version: "24.2.0" - uses: stefanzweifel/git-auto-commit-action@v5.0.0 with: - commit_message: "Run black" + commit_message: "Format code with isort and black" diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index 9fde0dfc..bbca04c6 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -1,551 +1,549 @@ -from pros.cli.common import * -import os.path -from itertools import groupby -from pros.ga.analytics import analytics - -import pros.conductor as c - -from pros.conductor.templates import ExternalTemplate -import pros.common.ui as ui - - -@pros_root -def conductor_cli(): - pass - - -@conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") -@default_options -def conductor(): - """ - Conductor is PROS's project management facility. It is responsible for obtaining - templates for which to create projects from. - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - pass - - -@conductor.command( - aliases=["download"], - short_help="Fetch/Download a remote template", - context_settings={"ignore_unknown_options": True}, -) -@template_query(required=True) -@default_options -def fetch(query: c.BaseTemplate): - """ - Fetch/download a template from a depot. - - Only a template spec is required. A template spec is the name and version - of the template formatted as name@version (libblrs@1.0.0). Semantic version - ranges are accepted (e.g., libblrs@^1.0.0). The version parameter is also - optional (e.g., libblrs) - - Additional parameters are available according to the depot. - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("fetch-template") - template_file = None - if os.path.exists(query.identifier): - template_file = query.identifier - elif os.path.exists(query.name) and query.version is None: - template_file = query.name - elif query.metadata.get("origin", None) == "local": - if "location" not in query.metadata: - logger(__name__).error("--location option is required for the local depot. Specify --location ") - logger(__name__).debug(f"Query options provided: {query.metadata}") - return -1 - template_file = query.metadata["location"] - - if template_file and ( - os.path.splitext(template_file)[1] in [".zip"] or os.path.exists(os.path.join(template_file, "template.pros")) - ): - template = ExternalTemplate(template_file) - query.metadata["location"] = template_file - depot = c.LocalDepot() - logger(__name__).debug(f"Template file found: {template_file}") - else: - if template_file: - logger(__name__).debug(f"Template file exists but is not a valid template: {template_file}") - template = c.Conductor().resolve_template(query, allow_offline=False) - logger(__name__).debug(f"Template from resolved query: {template}") - if template is None: - logger(__name__).error(f"There are no templates matching {query}!") - return -1 - depot = c.Conductor().get_depot(template.metadata["origin"]) - logger(__name__).debug(f"Found depot: {depot}") - # query.metadata contain all of the extra args that also go to the depot. There's no way for us to determine - # whether the arguments are for the template or for the depot, so they share them - logger(__name__).debug(f"Additional depot and template args: {query.metadata}") - c.Conductor().fetch_template(depot, template, **query.metadata) - - -@conductor.command(context_settings={"ignore_unknown_options": True}) -@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=True, help="Allow upgrading templates in a project") -@click.option("--install/--no-install", "install_ok", default=True, help="Allow installing templates in a project") -@click.option( - "--download/--no-download", - "download_ok", - default=True, - help="Allow downloading templates or only allow local templates", -) -@click.option( - "--upgrade-user-files/--no-upgrade-user-files", - "force_user", - default=False, - help="Replace all user files in a template", -) -@click.option( - "--force", - "force_system", - default=False, - is_flag=True, - help="Force all system files to be inserted into the project", -) -@click.option( - "--force-apply", - "force_apply", - default=False, - is_flag=True, - help="Force apply the template, disregarding if the template is already installed.", -) -@click.option( - "--remove-empty-dirs/--no-remove-empty-dirs", - "remove_empty_directories", - is_flag=True, - default=True, - help="Remove empty directories when removing files", -) -@click.option( - "--early-access/--disable-early-access", - "--early/--disable-early", - "-ea/-dea", - "early_access", - "--beta/--disable-beta", - default=None, - help="Create a project using the PROS 4 kernel", -) -@project_option() -@template_query(required=True) -@default_options -def apply(project: c.Project, query: c.BaseTemplate, **kwargs): - """ - Upgrade or install a template to a PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("apply-template") - return c.Conductor().apply_template(project, identifier=query, **kwargs) - - -@conductor.command(aliases=["i", "in"], context_settings={"ignore_unknown_options": True}) -@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=False) -@click.option("--download/--no-download", "download_ok", default=True) -@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") -@click.option( - "--force-system", - "-f", - "force_system", - default=False, - is_flag=True, - help="Force all system files to be inserted into the project", -) -@click.option( - "--force-apply", - "force_apply", - default=False, - is_flag=True, - help="Force apply the template, disregarding if the template is already installed.", -) -@click.option( - "--remove-empty-dirs/--no-remove-empty-dirs", - "remove_empty_directories", - is_flag=True, - default=True, - help="Remove empty directories when removing files", -) -@project_option() -@template_query(required=True) -@default_options -@click.pass_context -def install(ctx: click.Context, **kwargs): - """ - Install a library into a PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("install-template") - return ctx.invoke(apply, install_ok=True, **kwargs) - - -@conductor.command(context_settings={"ignore_unknown_options": True}, aliases=["u"]) -@click.option("--install/--no-install", "install_ok", default=False) -@click.option("--download/--no-download", "download_ok", default=True) -@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") -@click.option( - "--force-system", - "-f", - "force_system", - default=False, - is_flag=True, - help="Force all system files to be inserted into the project", -) -@click.option( - "--force-apply", - "force_apply", - default=False, - is_flag=True, - help="Force apply the template, disregarding if the template is already installed.", -) -@click.option( - "--remove-empty-dirs/--no-remove-empty-dirs", - "remove_empty_directories", - is_flag=True, - default=True, - help="Remove empty directories when removing files", -) -@click.option( - "--early-access/--disable-early-access", - "--early/--disable-early", - "-ea/-dea", - "early_access", - "--beta/--disable-beta", - default=None, - help="Create a project using the PROS 4 kernel", -) -@project_option() -@template_query(required=False) -@default_options -@click.pass_context -def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwargs): - """ - Upgrade a PROS project or one of its libraries - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("upgrade-project") - if not query.name: - for template in project.templates.keys(): - click.secho(f"Upgrading {template}", color="yellow") - q = c.BaseTemplate.create_query( - name=template, target=project.target, supported_kernels=project.templates["kernel"].version - ) - ctx.invoke(apply, upgrade_ok=True, project=project, query=q, **kwargs) - else: - ctx.invoke(apply, project=project, query=query, upgrade_ok=True, **kwargs) - - -@conductor.command("uninstall") -@click.option("--remove-user", is_flag=True, default=False, help="Also remove user files") -@click.option( - "--remove-empty-dirs/--no-remove-empty-dirs", - "remove_empty_directories", - is_flag=True, - default=True, - help="Remove empty directories when removing files", -) -@click.option("--no-make-clean", is_flag=True, default=True, help="Do not run make clean after removing") -@project_option() -@template_query() -@default_options -def uninstall_template( - project: c.Project, - query: c.BaseTemplate, - remove_user: bool, - remove_empty_directories: bool = False, - no_make_clean: bool = False, -): - """ - Uninstall a template from a PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("uninstall-template") - c.Conductor().remove_template( - project, query, remove_user=remove_user, remove_empty_directories=remove_empty_directories - ) - if no_make_clean: - with ui.Notification(): - project.compile(["clean"]) - - -@conductor.command("new-project", aliases=["new", "create-project"]) -@click.argument("path", type=click.Path()) -@click.argument("target", default=c.Conductor().default_target, type=click.Choice(["v5", "cortex"])) -@click.argument("version", default="latest") -@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") -@click.option( - "--force-system", - "-f", - "force_system", - default=False, - is_flag=True, - help="Force all system files to be inserted into the project", -) -@click.option( - "--force-refresh", - is_flag=True, - default=False, - show_default=True, - help="Force update all remote depots, ignoring automatic update checks", -) -@click.option( - "--no-default-libs", - "no_default_libs", - default=False, - is_flag=True, - help="Do not install any default libraries after creating the project.", -) -@click.option( - "--compile-after", is_flag=True, default=True, show_default=True, help="Compile the project after creation" -) -@click.option( - "--build-cache", - is_flag=True, - default=None, - show_default=False, - help="Build compile commands cache after creation. Overrides --compile-after if both are specified.", -) -@click.option( - "--early-access/--disable-early-access", - "--early/--disable-early", - "-ea/-dea", - "early_access", - "--beta/--disable-beta", - default=None, - help="Create a project using the PROS 4 kernel", -) -@click.pass_context -@default_options -def new_project( - ctx: click.Context, - path: str, - target: str, - version: str, - force_user: bool = False, - force_system: bool = False, - no_default_libs: bool = False, - compile_after: bool = True, - build_cache: bool = None, - **kwargs, -): - """ - Create a new PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("new-project") - version_source = version.lower() == "latest" - if version.lower() == "latest" or not version: - version = ">0" - if not force_system and c.Project.find_project(path) is not None: - logger(__name__).error( - "A project already exists in this location at " - + c.Project.find_project(path) - + "! Delete it first. Are you creating a project in an existing one?", - extra={"sentry": False}, - ) - ctx.exit(-1) - try: - _conductor = c.Conductor() - if target is None: - target = _conductor.default_target - project = _conductor.new_project( - path, - target=target, - version=version, - version_source=version_source, - force_user=force_user, - force_system=force_system, - no_default_libs=no_default_libs, - **kwargs, - ) - ui.echo("New PROS Project was created:", output_machine=False) - ctx.invoke(info_project, project=project) - - if compile_after or build_cache: - with ui.Notification(): - ui.echo("Building project...") - exit_code = project.compile([], scan_build=build_cache) - if exit_code != 0: - logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) - raise click.ClickException("Failed to build") - - except Exception as e: - pros.common.logger(__name__).exception(e) - ctx.exit(-1) - - -@conductor.command( - "query-templates", - aliases=["search-templates", "ls-templates", "lstemplates", "querytemplates", "searchtemplates"], - context_settings={"ignore_unknown_options": True}, -) -@click.option( - "--allow-offline/--no-offline", - "allow_offline", - default=True, - show_default=True, - help="(Dis)allow offline templates in the listing", -) -@click.option( - "--allow-online/--no-online", - "allow_online", - default=True, - show_default=True, - help="(Dis)allow online templates in the listing", -) -@click.option( - "--force-refresh", - is_flag=True, - default=False, - show_default=True, - help="Force update all remote depots, ignoring automatic update checks", -) -@click.option("--limit", type=int, default=15, help="The maximum number of displayed results for each library") -@click.option( - "--early-access/--disable-early-access", - "--early/--disable-early", - "-ea/-dea", - "early_access", - "--beta/--disable-beta", - default=None, - help="View a list of early access templates", -) -@template_query(required=False) -@click.pass_context -@default_options -def query_templates( - ctx, - query: c.BaseTemplate, - allow_offline: bool, - allow_online: bool, - force_refresh: bool, - limit: int, - early_access: bool, -): - """ - Query local and remote templates based on a spec - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("query-templates") - if limit < 0: - limit = 15 - templates = c.Conductor().resolve_templates( - query, - allow_offline=allow_offline, - allow_online=allow_online, - force_refresh=force_refresh, - early_access=early_access, - ) - if early_access: - templates += c.Conductor().resolve_templates( - query, - allow_offline=allow_offline, - allow_online=allow_online, - force_refresh=force_refresh, - early_access=False, - ) - - render_templates = {} - for template in templates: - key = (template.identifier, template.origin) - if key in render_templates: - if isinstance(template, c.LocalTemplate): - render_templates[key]["local"] = True - else: - render_templates[key] = { - "name": template.name, - "version": template.version, - "location": template.origin, - "target": template.target, - "local": isinstance(template, c.LocalTemplate), - } - import semantic_version as semver - - render_templates = sorted( - render_templates.values(), key=lambda k: (k["name"], semver.Version(k["version"]), k["local"]), reverse=True - ) - - # Impose the output limit for each library's templates - output_templates = [] - for _, g in groupby(render_templates, key=lambda t: t["name"] + t["target"]): - output_templates += list(g)[:limit] - ui.finalize("template-query", output_templates) - - -@conductor.command("info-project") -@click.option("--ls-upgrades/--no-ls-upgrades", "ls_upgrades", default=False) -@project_option() -@default_options -def info_project(project: c.Project, ls_upgrades): - """ - Display information about a PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("info-project") - from pros.conductor.project import ProjectReport - - report = ProjectReport(project) - _conductor = c.Conductor() - if ls_upgrades: - for template in report.project["templates"]: - import semantic_version as semver - - templates = _conductor.resolve_templates( - c.BaseTemplate.create_query( - name=template["name"], version=f'>{template["version"]}', target=project.target - ) - ) - template["upgrades"] = sorted({t.version for t in templates}, key=lambda v: semver.Version(v), reverse=True) - - ui.finalize("project-report", report) - - -@conductor.command("add-depot") -@click.argument("name") -@click.argument("url") -@default_options -def add_depot(name: str, url: str): - """ - Add a depot - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - _conductor = c.Conductor() - _conductor.add_depot(name, url) - - ui.echo(f"Added depot {name} from {url}") - - -@conductor.command("remove-depot") -@click.argument("name") -@default_options -def remove_depot(name: str): - """ - Remove a depot - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - _conductor = c.Conductor() - _conductor.remove_depot(name) - - ui.echo(f"Removed depot {name}") - - -@conductor.command("query-depots") -@click.option("--url", is_flag=True) -@default_options -def query_depots(url: bool): - """ - Gets all the stored depots - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - _conductor = c.Conductor() - ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") - ui.echo("\n".join(_conductor.query_depots(url)) + "\n") +from pros.cli.common import * +import os.path +from itertools import groupby +from pros.ga.analytics import analytics + +import pros.conductor as c + +from pros.conductor.templates import ExternalTemplate +import pros.common.ui as ui + + +@pros_root +def conductor_cli(): + pass +@conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") +@default_options +def conductor(): + """ + Conductor is PROS's project management facility. It is responsible for obtaining + templates for which to create projects from. + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + pass + + +@conductor.command( + aliases=["download"], + short_help="Fetch/Download a remote template", + context_settings={"ignore_unknown_options": True}, +) +@template_query(required=True) +@default_options +def fetch(query: c.BaseTemplate): + """ + Fetch/download a template from a depot. + + Only a template spec is required. A template spec is the name and version + of the template formatted as name@version (libblrs@1.0.0). Semantic version + ranges are accepted (e.g., libblrs@^1.0.0). The version parameter is also + optional (e.g., libblrs) + + Additional parameters are available according to the depot. + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("fetch-template") + template_file = None + if os.path.exists(query.identifier): + template_file = query.identifier + elif os.path.exists(query.name) and query.version is None: + template_file = query.name + elif query.metadata.get("origin", None) == "local": + if "location" not in query.metadata: + logger(__name__).error("--location option is required for the local depot. Specify --location ") + logger(__name__).debug(f"Query options provided: {query.metadata}") + return -1 + template_file = query.metadata["location"] + + if template_file and ( + os.path.splitext(template_file)[1] in [".zip"] or os.path.exists(os.path.join(template_file, "template.pros")) + ): + template = ExternalTemplate(template_file) + query.metadata["location"] = template_file + depot = c.LocalDepot() + logger(__name__).debug(f"Template file found: {template_file}") + else: + if template_file: + logger(__name__).debug(f"Template file exists but is not a valid template: {template_file}") + template = c.Conductor().resolve_template(query, allow_offline=False) + logger(__name__).debug(f"Template from resolved query: {template}") + if template is None: + logger(__name__).error(f"There are no templates matching {query}!") + return -1 + depot = c.Conductor().get_depot(template.metadata["origin"]) + logger(__name__).debug(f"Found depot: {depot}") + # query.metadata contain all of the extra args that also go to the depot. There's no way for us to determine + # whether the arguments are for the template or for the depot, so they share them + logger(__name__).debug(f"Additional depot and template args: {query.metadata}") + c.Conductor().fetch_template(depot, template, **query.metadata) + + +@conductor.command(context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=True, help="Allow upgrading templates in a project") +@click.option("--install/--no-install", "install_ok", default=True, help="Allow installing templates in a project") +@click.option( + "--download/--no-download", + "download_ok", + default=True, + help="Allow downloading templates or only allow local templates", +) +@click.option( + "--upgrade-user-files/--no-upgrade-user-files", + "force_user", + default=False, + help="Replace all user files in a template", +) +@click.option( + "--force", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option( + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) +@project_option() +@template_query(required=True) +@default_options +def apply(project: c.Project, query: c.BaseTemplate, **kwargs): + """ + Upgrade or install a template to a PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("apply-template") + return c.Conductor().apply_template(project, identifier=query, **kwargs) + + +@conductor.command(aliases=["i", "in"], context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@project_option() +@template_query(required=True) +@default_options +@click.pass_context +def install(ctx: click.Context, **kwargs): + """ + Install a library into a PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("install-template") + return ctx.invoke(apply, install_ok=True, **kwargs) + + +@conductor.command(context_settings={"ignore_unknown_options": True}, aliases=["u"]) +@click.option("--install/--no-install", "install_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option( + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) +@project_option() +@template_query(required=False) +@default_options +@click.pass_context +def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwargs): + """ + Upgrade a PROS project or one of its libraries + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("upgrade-project") + if not query.name: + for template in project.templates.keys(): + click.secho(f"Upgrading {template}", color="yellow") + q = c.BaseTemplate.create_query( + name=template, target=project.target, supported_kernels=project.templates["kernel"].version + ) + ctx.invoke(apply, upgrade_ok=True, project=project, query=q, **kwargs) + else: + ctx.invoke(apply, project=project, query=query, upgrade_ok=True, **kwargs) + + +@conductor.command("uninstall") +@click.option("--remove-user", is_flag=True, default=False, help="Also remove user files") +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option("--no-make-clean", is_flag=True, default=True, help="Do not run make clean after removing") +@project_option() +@template_query() +@default_options +def uninstall_template( + project: c.Project, + query: c.BaseTemplate, + remove_user: bool, + remove_empty_directories: bool = False, + no_make_clean: bool = False, +): + """ + Uninstall a template from a PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("uninstall-template") + c.Conductor().remove_template( + project, query, remove_user=remove_user, remove_empty_directories=remove_empty_directories + ) + if no_make_clean: + with ui.Notification(): + project.compile(["clean"]) + + +@conductor.command("new-project", aliases=["new", "create-project"]) +@click.argument("path", type=click.Path()) +@click.argument("target", default=c.Conductor().default_target, type=click.Choice(["v5", "cortex"])) +@click.argument("version", default="latest") +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-refresh", + is_flag=True, + default=False, + show_default=True, + help="Force update all remote depots, ignoring automatic update checks", +) +@click.option( + "--no-default-libs", + "no_default_libs", + default=False, + is_flag=True, + help="Do not install any default libraries after creating the project.", +) +@click.option( + "--compile-after", is_flag=True, default=True, show_default=True, help="Compile the project after creation" +) +@click.option( + "--build-cache", + is_flag=True, + default=None, + show_default=False, + help="Build compile commands cache after creation. Overrides --compile-after if both are specified.", +) +@click.option( + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) +@click.pass_context +@default_options +def new_project( + ctx: click.Context, + path: str, + target: str, + version: str, + force_user: bool = False, + force_system: bool = False, + no_default_libs: bool = False, + compile_after: bool = True, + build_cache: bool = None, + **kwargs, +): + """ + Create a new PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("new-project") + version_source = version.lower() == "latest" + if version.lower() == "latest" or not version: + version = ">0" + if not force_system and c.Project.find_project(path) is not None: + logger(__name__).error( + "A project already exists in this location at " + + c.Project.find_project(path) + + "! Delete it first. Are you creating a project in an existing one?", + extra={"sentry": False}, + ) + ctx.exit(-1) + try: + _conductor = c.Conductor() + if target is None: + target = _conductor.default_target + project = _conductor.new_project( + path, + target=target, + version=version, + version_source=version_source, + force_user=force_user, + force_system=force_system, + no_default_libs=no_default_libs, + **kwargs, + ) + ui.echo("New PROS Project was created:", output_machine=False) + ctx.invoke(info_project, project=project) + + if compile_after or build_cache: + with ui.Notification(): + ui.echo("Building project...") + exit_code = project.compile([], scan_build=build_cache) + if exit_code != 0: + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") + + except Exception as e: + pros.common.logger(__name__).exception(e) + ctx.exit(-1) + + +@conductor.command( + "query-templates", + aliases=["search-templates", "ls-templates", "lstemplates", "querytemplates", "searchtemplates"], + context_settings={"ignore_unknown_options": True}, +) +@click.option( + "--allow-offline/--no-offline", + "allow_offline", + default=True, + show_default=True, + help="(Dis)allow offline templates in the listing", +) +@click.option( + "--allow-online/--no-online", + "allow_online", + default=True, + show_default=True, + help="(Dis)allow online templates in the listing", +) +@click.option( + "--force-refresh", + is_flag=True, + default=False, + show_default=True, + help="Force update all remote depots, ignoring automatic update checks", +) +@click.option("--limit", type=int, default=15, help="The maximum number of displayed results for each library") +@click.option( + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", + default=None, + help="View a list of early access templates", +) +@template_query(required=False) +@click.pass_context +@default_options +def query_templates( + ctx, + query: c.BaseTemplate, + allow_offline: bool, + allow_online: bool, + force_refresh: bool, + limit: int, + early_access: bool, +): + """ + Query local and remote templates based on a spec + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("query-templates") + if limit < 0: + limit = 15 + templates = c.Conductor().resolve_templates( + query, + allow_offline=allow_offline, + allow_online=allow_online, + force_refresh=force_refresh, + early_access=early_access, + ) + if early_access: + templates += c.Conductor().resolve_templates( + query, + allow_offline=allow_offline, + allow_online=allow_online, + force_refresh=force_refresh, + early_access=False, + ) + + render_templates = {} + for template in templates: + key = (template.identifier, template.origin) + if key in render_templates: + if isinstance(template, c.LocalTemplate): + render_templates[key]["local"] = True + else: + render_templates[key] = { + "name": template.name, + "version": template.version, + "location": template.origin, + "target": template.target, + "local": isinstance(template, c.LocalTemplate), + } + import semantic_version as semver + + render_templates = sorted( + render_templates.values(), key=lambda k: (k["name"], semver.Version(k["version"]), k["local"]), reverse=True + ) + + # Impose the output limit for each library's templates + output_templates = [] + for _, g in groupby(render_templates, key=lambda t: t["name"] + t["target"]): + output_templates += list(g)[:limit] + ui.finalize("template-query", output_templates) + + +@conductor.command("info-project") +@click.option("--ls-upgrades/--no-ls-upgrades", "ls_upgrades", default=False) +@project_option() +@default_options +def info_project(project: c.Project, ls_upgrades): + """ + Display information about a PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("info-project") + from pros.conductor.project import ProjectReport + + report = ProjectReport(project) + _conductor = c.Conductor() + if ls_upgrades: + for template in report.project["templates"]: + import semantic_version as semver + + templates = _conductor.resolve_templates( + c.BaseTemplate.create_query( + name=template["name"], version=f'>{template["version"]}', target=project.target + ) + ) + template["upgrades"] = sorted({t.version for t in templates}, key=lambda v: semver.Version(v), reverse=True) + + ui.finalize("project-report", report) + + +@conductor.command("add-depot") +@click.argument("name") +@click.argument("url") +@default_options +def add_depot(name: str, url: str): + """ + Add a depot + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + _conductor = c.Conductor() + _conductor.add_depot(name, url) + + ui.echo(f"Added depot {name} from {url}") + + +@conductor.command("remove-depot") +@click.argument("name") +@default_options +def remove_depot(name: str): + """ + Remove a depot + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + _conductor = c.Conductor() + _conductor.remove_depot(name) + + ui.echo(f"Removed depot {name}") + + +@conductor.command("query-depots") +@click.option("--url", is_flag=True) +@default_options +def query_depots(url: bool): + """ + Gets all the stored depots + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + _conductor = c.Conductor() + ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") + ui.echo("\n".join(_conductor.query_depots(url)) + "\n") From 0c9c79613910b22e28021fb169ade1a85b93df25 Mon Sep 17 00:00:00 2001 From: mayankpatibandla Date: Wed, 21 Feb 2024 02:48:39 +0000 Subject: [PATCH 36/44] Format code with isort and black --- pros/cli/conductor.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index bbca04c6..f3cc6d3c 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -1,17 +1,18 @@ -from pros.cli.common import * import os.path from itertools import groupby -from pros.ga.analytics import analytics +import pros.common.ui as ui import pros.conductor as c - +from pros.cli.common import * from pros.conductor.templates import ExternalTemplate -import pros.common.ui as ui +from pros.ga.analytics import analytics @pros_root def conductor_cli(): pass + + @conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") @default_options def conductor(): From 6b72a7a00e2d4cf1cbf778f6940c75c2ef760289 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 22:23:52 -0500 Subject: [PATCH 37/44] Testing action --- .github/workflows/pre-commit.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 65080b90..9b06d134 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -5,7 +5,7 @@ on: push: env: - SKIP: pylint + SKIP: pylint,isort,black jobs: pre-commit: From e54887f2d3eb199817744203298f93c42c200cbe Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 22:27:11 -0500 Subject: [PATCH 38/44] Testing action --- pros/cli/conductor.py | 1098 ++++++++++++++++++++--------------------- 1 file changed, 548 insertions(+), 550 deletions(-) diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index f3cc6d3c..c6cf6cba 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -1,550 +1,548 @@ -import os.path -from itertools import groupby - -import pros.common.ui as ui -import pros.conductor as c -from pros.cli.common import * -from pros.conductor.templates import ExternalTemplate -from pros.ga.analytics import analytics - - -@pros_root -def conductor_cli(): - pass - - -@conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") -@default_options -def conductor(): - """ - Conductor is PROS's project management facility. It is responsible for obtaining - templates for which to create projects from. - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - pass - - -@conductor.command( - aliases=["download"], - short_help="Fetch/Download a remote template", - context_settings={"ignore_unknown_options": True}, -) -@template_query(required=True) -@default_options -def fetch(query: c.BaseTemplate): - """ - Fetch/download a template from a depot. - - Only a template spec is required. A template spec is the name and version - of the template formatted as name@version (libblrs@1.0.0). Semantic version - ranges are accepted (e.g., libblrs@^1.0.0). The version parameter is also - optional (e.g., libblrs) - - Additional parameters are available according to the depot. - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("fetch-template") - template_file = None - if os.path.exists(query.identifier): - template_file = query.identifier - elif os.path.exists(query.name) and query.version is None: - template_file = query.name - elif query.metadata.get("origin", None) == "local": - if "location" not in query.metadata: - logger(__name__).error("--location option is required for the local depot. Specify --location ") - logger(__name__).debug(f"Query options provided: {query.metadata}") - return -1 - template_file = query.metadata["location"] - - if template_file and ( - os.path.splitext(template_file)[1] in [".zip"] or os.path.exists(os.path.join(template_file, "template.pros")) - ): - template = ExternalTemplate(template_file) - query.metadata["location"] = template_file - depot = c.LocalDepot() - logger(__name__).debug(f"Template file found: {template_file}") - else: - if template_file: - logger(__name__).debug(f"Template file exists but is not a valid template: {template_file}") - template = c.Conductor().resolve_template(query, allow_offline=False) - logger(__name__).debug(f"Template from resolved query: {template}") - if template is None: - logger(__name__).error(f"There are no templates matching {query}!") - return -1 - depot = c.Conductor().get_depot(template.metadata["origin"]) - logger(__name__).debug(f"Found depot: {depot}") - # query.metadata contain all of the extra args that also go to the depot. There's no way for us to determine - # whether the arguments are for the template or for the depot, so they share them - logger(__name__).debug(f"Additional depot and template args: {query.metadata}") - c.Conductor().fetch_template(depot, template, **query.metadata) - - -@conductor.command(context_settings={"ignore_unknown_options": True}) -@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=True, help="Allow upgrading templates in a project") -@click.option("--install/--no-install", "install_ok", default=True, help="Allow installing templates in a project") -@click.option( - "--download/--no-download", - "download_ok", - default=True, - help="Allow downloading templates or only allow local templates", -) -@click.option( - "--upgrade-user-files/--no-upgrade-user-files", - "force_user", - default=False, - help="Replace all user files in a template", -) -@click.option( - "--force", - "force_system", - default=False, - is_flag=True, - help="Force all system files to be inserted into the project", -) -@click.option( - "--force-apply", - "force_apply", - default=False, - is_flag=True, - help="Force apply the template, disregarding if the template is already installed.", -) -@click.option( - "--remove-empty-dirs/--no-remove-empty-dirs", - "remove_empty_directories", - is_flag=True, - default=True, - help="Remove empty directories when removing files", -) -@click.option( - "--early-access/--disable-early-access", - "--early/--disable-early", - "-ea/-dea", - "early_access", - "--beta/--disable-beta", - default=None, - help="Create a project using the PROS 4 kernel", -) -@project_option() -@template_query(required=True) -@default_options -def apply(project: c.Project, query: c.BaseTemplate, **kwargs): - """ - Upgrade or install a template to a PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("apply-template") - return c.Conductor().apply_template(project, identifier=query, **kwargs) - - -@conductor.command(aliases=["i", "in"], context_settings={"ignore_unknown_options": True}) -@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=False) -@click.option("--download/--no-download", "download_ok", default=True) -@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") -@click.option( - "--force-system", - "-f", - "force_system", - default=False, - is_flag=True, - help="Force all system files to be inserted into the project", -) -@click.option( - "--force-apply", - "force_apply", - default=False, - is_flag=True, - help="Force apply the template, disregarding if the template is already installed.", -) -@click.option( - "--remove-empty-dirs/--no-remove-empty-dirs", - "remove_empty_directories", - is_flag=True, - default=True, - help="Remove empty directories when removing files", -) -@project_option() -@template_query(required=True) -@default_options -@click.pass_context -def install(ctx: click.Context, **kwargs): - """ - Install a library into a PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("install-template") - return ctx.invoke(apply, install_ok=True, **kwargs) - - -@conductor.command(context_settings={"ignore_unknown_options": True}, aliases=["u"]) -@click.option("--install/--no-install", "install_ok", default=False) -@click.option("--download/--no-download", "download_ok", default=True) -@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") -@click.option( - "--force-system", - "-f", - "force_system", - default=False, - is_flag=True, - help="Force all system files to be inserted into the project", -) -@click.option( - "--force-apply", - "force_apply", - default=False, - is_flag=True, - help="Force apply the template, disregarding if the template is already installed.", -) -@click.option( - "--remove-empty-dirs/--no-remove-empty-dirs", - "remove_empty_directories", - is_flag=True, - default=True, - help="Remove empty directories when removing files", -) -@click.option( - "--early-access/--disable-early-access", - "--early/--disable-early", - "-ea/-dea", - "early_access", - "--beta/--disable-beta", - default=None, - help="Create a project using the PROS 4 kernel", -) -@project_option() -@template_query(required=False) -@default_options -@click.pass_context -def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwargs): - """ - Upgrade a PROS project or one of its libraries - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("upgrade-project") - if not query.name: - for template in project.templates.keys(): - click.secho(f"Upgrading {template}", color="yellow") - q = c.BaseTemplate.create_query( - name=template, target=project.target, supported_kernels=project.templates["kernel"].version - ) - ctx.invoke(apply, upgrade_ok=True, project=project, query=q, **kwargs) - else: - ctx.invoke(apply, project=project, query=query, upgrade_ok=True, **kwargs) - - -@conductor.command("uninstall") -@click.option("--remove-user", is_flag=True, default=False, help="Also remove user files") -@click.option( - "--remove-empty-dirs/--no-remove-empty-dirs", - "remove_empty_directories", - is_flag=True, - default=True, - help="Remove empty directories when removing files", -) -@click.option("--no-make-clean", is_flag=True, default=True, help="Do not run make clean after removing") -@project_option() -@template_query() -@default_options -def uninstall_template( - project: c.Project, - query: c.BaseTemplate, - remove_user: bool, - remove_empty_directories: bool = False, - no_make_clean: bool = False, -): - """ - Uninstall a template from a PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("uninstall-template") - c.Conductor().remove_template( - project, query, remove_user=remove_user, remove_empty_directories=remove_empty_directories - ) - if no_make_clean: - with ui.Notification(): - project.compile(["clean"]) - - -@conductor.command("new-project", aliases=["new", "create-project"]) -@click.argument("path", type=click.Path()) -@click.argument("target", default=c.Conductor().default_target, type=click.Choice(["v5", "cortex"])) -@click.argument("version", default="latest") -@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") -@click.option( - "--force-system", - "-f", - "force_system", - default=False, - is_flag=True, - help="Force all system files to be inserted into the project", -) -@click.option( - "--force-refresh", - is_flag=True, - default=False, - show_default=True, - help="Force update all remote depots, ignoring automatic update checks", -) -@click.option( - "--no-default-libs", - "no_default_libs", - default=False, - is_flag=True, - help="Do not install any default libraries after creating the project.", -) -@click.option( - "--compile-after", is_flag=True, default=True, show_default=True, help="Compile the project after creation" -) -@click.option( - "--build-cache", - is_flag=True, - default=None, - show_default=False, - help="Build compile commands cache after creation. Overrides --compile-after if both are specified.", -) -@click.option( - "--early-access/--disable-early-access", - "--early/--disable-early", - "-ea/-dea", - "early_access", - "--beta/--disable-beta", - default=None, - help="Create a project using the PROS 4 kernel", -) -@click.pass_context -@default_options -def new_project( - ctx: click.Context, - path: str, - target: str, - version: str, - force_user: bool = False, - force_system: bool = False, - no_default_libs: bool = False, - compile_after: bool = True, - build_cache: bool = None, - **kwargs, -): - """ - Create a new PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("new-project") - version_source = version.lower() == "latest" - if version.lower() == "latest" or not version: - version = ">0" - if not force_system and c.Project.find_project(path) is not None: - logger(__name__).error( - "A project already exists in this location at " - + c.Project.find_project(path) - + "! Delete it first. Are you creating a project in an existing one?", - extra={"sentry": False}, - ) - ctx.exit(-1) - try: - _conductor = c.Conductor() - if target is None: - target = _conductor.default_target - project = _conductor.new_project( - path, - target=target, - version=version, - version_source=version_source, - force_user=force_user, - force_system=force_system, - no_default_libs=no_default_libs, - **kwargs, - ) - ui.echo("New PROS Project was created:", output_machine=False) - ctx.invoke(info_project, project=project) - - if compile_after or build_cache: - with ui.Notification(): - ui.echo("Building project...") - exit_code = project.compile([], scan_build=build_cache) - if exit_code != 0: - logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) - raise click.ClickException("Failed to build") - - except Exception as e: - pros.common.logger(__name__).exception(e) - ctx.exit(-1) - - -@conductor.command( - "query-templates", - aliases=["search-templates", "ls-templates", "lstemplates", "querytemplates", "searchtemplates"], - context_settings={"ignore_unknown_options": True}, -) -@click.option( - "--allow-offline/--no-offline", - "allow_offline", - default=True, - show_default=True, - help="(Dis)allow offline templates in the listing", -) -@click.option( - "--allow-online/--no-online", - "allow_online", - default=True, - show_default=True, - help="(Dis)allow online templates in the listing", -) -@click.option( - "--force-refresh", - is_flag=True, - default=False, - show_default=True, - help="Force update all remote depots, ignoring automatic update checks", -) -@click.option("--limit", type=int, default=15, help="The maximum number of displayed results for each library") -@click.option( - "--early-access/--disable-early-access", - "--early/--disable-early", - "-ea/-dea", - "early_access", - "--beta/--disable-beta", - default=None, - help="View a list of early access templates", -) -@template_query(required=False) -@click.pass_context -@default_options -def query_templates( - ctx, - query: c.BaseTemplate, - allow_offline: bool, - allow_online: bool, - force_refresh: bool, - limit: int, - early_access: bool, -): - """ - Query local and remote templates based on a spec - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("query-templates") - if limit < 0: - limit = 15 - templates = c.Conductor().resolve_templates( - query, - allow_offline=allow_offline, - allow_online=allow_online, - force_refresh=force_refresh, - early_access=early_access, - ) - if early_access: - templates += c.Conductor().resolve_templates( - query, - allow_offline=allow_offline, - allow_online=allow_online, - force_refresh=force_refresh, - early_access=False, - ) - - render_templates = {} - for template in templates: - key = (template.identifier, template.origin) - if key in render_templates: - if isinstance(template, c.LocalTemplate): - render_templates[key]["local"] = True - else: - render_templates[key] = { - "name": template.name, - "version": template.version, - "location": template.origin, - "target": template.target, - "local": isinstance(template, c.LocalTemplate), - } - import semantic_version as semver - - render_templates = sorted( - render_templates.values(), key=lambda k: (k["name"], semver.Version(k["version"]), k["local"]), reverse=True - ) - - # Impose the output limit for each library's templates - output_templates = [] - for _, g in groupby(render_templates, key=lambda t: t["name"] + t["target"]): - output_templates += list(g)[:limit] - ui.finalize("template-query", output_templates) - - -@conductor.command("info-project") -@click.option("--ls-upgrades/--no-ls-upgrades", "ls_upgrades", default=False) -@project_option() -@default_options -def info_project(project: c.Project, ls_upgrades): - """ - Display information about a PROS project - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - analytics.send("info-project") - from pros.conductor.project import ProjectReport - - report = ProjectReport(project) - _conductor = c.Conductor() - if ls_upgrades: - for template in report.project["templates"]: - import semantic_version as semver - - templates = _conductor.resolve_templates( - c.BaseTemplate.create_query( - name=template["name"], version=f'>{template["version"]}', target=project.target - ) - ) - template["upgrades"] = sorted({t.version for t in templates}, key=lambda v: semver.Version(v), reverse=True) - - ui.finalize("project-report", report) - - -@conductor.command("add-depot") -@click.argument("name") -@click.argument("url") -@default_options -def add_depot(name: str, url: str): - """ - Add a depot - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - _conductor = c.Conductor() - _conductor.add_depot(name, url) - - ui.echo(f"Added depot {name} from {url}") - - -@conductor.command("remove-depot") -@click.argument("name") -@default_options -def remove_depot(name: str): - """ - Remove a depot - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - _conductor = c.Conductor() - _conductor.remove_depot(name) - - ui.echo(f"Removed depot {name}") - - -@conductor.command("query-depots") -@click.option("--url", is_flag=True) -@default_options -def query_depots(url: bool): - """ - Gets all the stored depots - - Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more - """ - _conductor = c.Conductor() - ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") - ui.echo("\n".join(_conductor.query_depots(url)) + "\n") +import os.path +from itertools import groupby +from pros.ga.analytics import analytics + +import pros.conductor as c +from pros.cli.common import * +from pros.conductor.templates import ExternalTemplate + +import pros.common.ui as ui + +@pros_root +def conductor_cli(): + pass +@conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") +@default_options +def conductor(): + """ + Conductor is PROS's project management facility. It is responsible for obtaining + templates for which to create projects from. + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + pass + + +@conductor.command( + aliases=["download"], + short_help="Fetch/Download a remote template", + context_settings={"ignore_unknown_options": True}, +) +@template_query(required=True) +@default_options +def fetch(query: c.BaseTemplate): + """ + Fetch/download a template from a depot. + + Only a template spec is required. A template spec is the name and version + of the template formatted as name@version (libblrs@1.0.0). Semantic version + ranges are accepted (e.g., libblrs@^1.0.0). The version parameter is also + optional (e.g., libblrs) + + Additional parameters are available according to the depot. + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("fetch-template") + template_file = None + if os.path.exists(query.identifier): + template_file = query.identifier + elif os.path.exists(query.name) and query.version is None: + template_file = query.name + elif query.metadata.get("origin", None) == "local": + if "location" not in query.metadata: + logger(__name__).error("--location option is required for the local depot. Specify --location ") + logger(__name__).debug(f"Query options provided: {query.metadata}") + return -1 + template_file = query.metadata["location"] + + if template_file and ( + os.path.splitext(template_file)[1] in [".zip"] or os.path.exists(os.path.join(template_file, "template.pros")) + ): + template = ExternalTemplate(template_file) + query.metadata["location"] = template_file + depot = c.LocalDepot() + logger(__name__).debug(f"Template file found: {template_file}") + else: + if template_file: + logger(__name__).debug(f"Template file exists but is not a valid template: {template_file}") + template = c.Conductor().resolve_template(query, allow_offline=False) + logger(__name__).debug(f"Template from resolved query: {template}") + if template is None: + logger(__name__).error(f"There are no templates matching {query}!") + return -1 + depot = c.Conductor().get_depot(template.metadata["origin"]) + logger(__name__).debug(f"Found depot: {depot}") + # query.metadata contain all of the extra args that also go to the depot. There's no way for us to determine + # whether the arguments are for the template or for the depot, so they share them + logger(__name__).debug(f"Additional depot and template args: {query.metadata}") + c.Conductor().fetch_template(depot, template, **query.metadata) + + +@conductor.command(context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=True, help="Allow upgrading templates in a project") +@click.option("--install/--no-install", "install_ok", default=True, help="Allow installing templates in a project") +@click.option( + "--download/--no-download", + "download_ok", + default=True, + help="Allow downloading templates or only allow local templates", +) +@click.option( + "--upgrade-user-files/--no-upgrade-user-files", + "force_user", + default=False, + help="Replace all user files in a template", +) +@click.option( + "--force", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option( + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) +@project_option() +@template_query(required=True) +@default_options +def apply(project: c.Project, query: c.BaseTemplate, **kwargs): + """ + Upgrade or install a template to a PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("apply-template") + return c.Conductor().apply_template(project, identifier=query, **kwargs) + + +@conductor.command(aliases=["i", "in"], context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@project_option() +@template_query(required=True) +@default_options +@click.pass_context +def install(ctx: click.Context, **kwargs): + """ + Install a library into a PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("install-template") + return ctx.invoke(apply, install_ok=True, **kwargs) + + +@conductor.command(context_settings={"ignore_unknown_options": True}, aliases=["u"]) +@click.option("--install/--no-install", "install_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option( + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) +@project_option() +@template_query(required=False) +@default_options +@click.pass_context +def upgrade(ctx: click.Context, project: c.Project, query: c.BaseTemplate, **kwargs): + """ + Upgrade a PROS project or one of its libraries + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("upgrade-project") + if not query.name: + for template in project.templates.keys(): + click.secho(f"Upgrading {template}", color="yellow") + q = c.BaseTemplate.create_query( + name=template, target=project.target, supported_kernels=project.templates["kernel"].version + ) + ctx.invoke(apply, upgrade_ok=True, project=project, query=q, **kwargs) + else: + ctx.invoke(apply, project=project, query=query, upgrade_ok=True, **kwargs) + + +@conductor.command("uninstall") +@click.option("--remove-user", is_flag=True, default=False, help="Also remove user files") +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option("--no-make-clean", is_flag=True, default=True, help="Do not run make clean after removing") +@project_option() +@template_query() +@default_options +def uninstall_template( + project: c.Project, + query: c.BaseTemplate, + remove_user: bool, + remove_empty_directories: bool = False, + no_make_clean: bool = False, +): + """ + Uninstall a template from a PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("uninstall-template") + c.Conductor().remove_template( + project, query, remove_user=remove_user, remove_empty_directories=remove_empty_directories + ) + if no_make_clean: + with ui.Notification(): + project.compile(["clean"]) + + +@conductor.command("new-project", aliases=["new", "create-project"]) +@click.argument("path", type=click.Path()) +@click.argument("target", default=c.Conductor().default_target, type=click.Choice(["v5", "cortex"])) +@click.argument("version", default="latest") +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-refresh", + is_flag=True, + default=False, + show_default=True, + help="Force update all remote depots, ignoring automatic update checks", +) +@click.option( + "--no-default-libs", + "no_default_libs", + default=False, + is_flag=True, + help="Do not install any default libraries after creating the project.", +) +@click.option( + "--compile-after", is_flag=True, default=True, show_default=True, help="Compile the project after creation" +) +@click.option( + "--build-cache", + is_flag=True, + default=None, + show_default=False, + help="Build compile commands cache after creation. Overrides --compile-after if both are specified.", +) +@click.option( + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) +@click.pass_context +@default_options +def new_project( + ctx: click.Context, + path: str, + target: str, + version: str, + force_user: bool = False, + force_system: bool = False, + no_default_libs: bool = False, + compile_after: bool = True, + build_cache: bool = None, + **kwargs, +): + """ + Create a new PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("new-project") + version_source = version.lower() == "latest" + if version.lower() == "latest" or not version: + version = ">0" + if not force_system and c.Project.find_project(path) is not None: + logger(__name__).error( + "A project already exists in this location at " + + c.Project.find_project(path) + + "! Delete it first. Are you creating a project in an existing one?", + extra={"sentry": False}, + ) + ctx.exit(-1) + try: + _conductor = c.Conductor() + if target is None: + target = _conductor.default_target + project = _conductor.new_project( + path, + target=target, + version=version, + version_source=version_source, + force_user=force_user, + force_system=force_system, + no_default_libs=no_default_libs, + **kwargs, + ) + ui.echo("New PROS Project was created:", output_machine=False) + ctx.invoke(info_project, project=project) + + if compile_after or build_cache: + with ui.Notification(): + ui.echo("Building project...") + exit_code = project.compile([], scan_build=build_cache) + if exit_code != 0: + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") + + except Exception as e: + pros.common.logger(__name__).exception(e) + ctx.exit(-1) + + +@conductor.command( + "query-templates", + aliases=["search-templates", "ls-templates", "lstemplates", "querytemplates", "searchtemplates"], + context_settings={"ignore_unknown_options": True}, +) +@click.option( + "--allow-offline/--no-offline", + "allow_offline", + default=True, + show_default=True, + help="(Dis)allow offline templates in the listing", +) +@click.option( + "--allow-online/--no-online", + "allow_online", + default=True, + show_default=True, + help="(Dis)allow online templates in the listing", +) +@click.option( + "--force-refresh", + is_flag=True, + default=False, + show_default=True, + help="Force update all remote depots, ignoring automatic update checks", +) +@click.option("--limit", type=int, default=15, help="The maximum number of displayed results for each library") +@click.option( + "--early-access/--disable-early-access", + "--early/--disable-early", + "-ea/-dea", + "early_access", + "--beta/--disable-beta", + default=None, + help="View a list of early access templates", +) +@template_query(required=False) +@click.pass_context +@default_options +def query_templates( + ctx, + query: c.BaseTemplate, + allow_offline: bool, + allow_online: bool, + force_refresh: bool, + limit: int, + early_access: bool, +): + """ + Query local and remote templates based on a spec + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("query-templates") + if limit < 0: + limit = 15 + templates = c.Conductor().resolve_templates( + query, + allow_offline=allow_offline, + allow_online=allow_online, + force_refresh=force_refresh, + early_access=early_access, + ) + if early_access: + templates += c.Conductor().resolve_templates( + query, + allow_offline=allow_offline, + allow_online=allow_online, + force_refresh=force_refresh, + early_access=False, + ) + + render_templates = {} + for template in templates: + key = (template.identifier, template.origin) + if key in render_templates: + if isinstance(template, c.LocalTemplate): + render_templates[key]["local"] = True + else: + render_templates[key] = { + "name": template.name, + "version": template.version, + "location": template.origin, + "target": template.target, + "local": isinstance(template, c.LocalTemplate), + } + import semantic_version as semver + + render_templates = sorted( + render_templates.values(), key=lambda k: (k["name"], semver.Version(k["version"]), k["local"]), reverse=True + ) + + # Impose the output limit for each library's templates + output_templates = [] + for _, g in groupby(render_templates, key=lambda t: t["name"] + t["target"]): + output_templates += list(g)[:limit] + ui.finalize("template-query", output_templates) + + +@conductor.command("info-project") +@click.option("--ls-upgrades/--no-ls-upgrades", "ls_upgrades", default=False) +@project_option() +@default_options +def info_project(project: c.Project, ls_upgrades): + """ + Display information about a PROS project + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + analytics.send("info-project") + from pros.conductor.project import ProjectReport + + report = ProjectReport(project) + _conductor = c.Conductor() + if ls_upgrades: + for template in report.project["templates"]: + import semantic_version as semver + + templates = _conductor.resolve_templates( + c.BaseTemplate.create_query( + name=template["name"], version=f'>{template["version"]}', target=project.target + ) + ) + template["upgrades"] = sorted({t.version for t in templates}, key=lambda v: semver.Version(v), reverse=True) + + ui.finalize("project-report", report) + + +@conductor.command("add-depot") +@click.argument("name") +@click.argument("url") +@default_options +def add_depot(name: str, url: str): + """ + Add a depot + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + _conductor = c.Conductor() + _conductor.add_depot(name, url) + + ui.echo(f"Added depot {name} from {url}") + + +@conductor.command("remove-depot") +@click.argument("name") +@default_options +def remove_depot(name: str): + """ + Remove a depot + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + _conductor = c.Conductor() + _conductor.remove_depot(name) + + ui.echo(f"Removed depot {name}") + + +@conductor.command("query-depots") +@click.option("--url", is_flag=True) +@default_options +def query_depots(url: bool): + """ + Gets all the stored depots + + Visit https://pros.cs.purdue.edu/v5/cli/conductor.html to learn more + """ + _conductor = c.Conductor() + ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") + ui.echo("\n".join(_conductor.query_depots(url)) + "\n") From 3cb2f74980d5dddae0744468359de20b935b1787 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 20 Feb 2024 22:33:31 -0500 Subject: [PATCH 39/44] Consolidate --- .github/workflows/formatter.yml | 26 --------------- .github/workflows/lint-format.yml | 54 +++++++++++++++++++++++++++++++ .github/workflows/pre-commit.yml | 16 --------- .github/workflows/pylint.yml | 26 --------------- 4 files changed, 54 insertions(+), 68 deletions(-) delete mode 100644 .github/workflows/formatter.yml create mode 100644 .github/workflows/lint-format.yml delete mode 100644 .github/workflows/pre-commit.yml delete mode 100644 .github/workflows/pylint.yml diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml deleted file mode 100644 index 86bd3a76..00000000 --- a/.github/workflows/formatter.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Formatter - -on: - pull_request: - push: - -jobs: - Formatter: - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - uses: actions/checkout@v4.1.1 - with: - ref: ${{ github.head_ref }} - - uses: isort/isort-action@v1.1.0 - with: - configuration: "--settings-file=.isort.cfg" - requirements-files: "requirements.txt" - - uses: psf/black@stable - with: - options: "--line-length=120" - version: "24.2.0" - - uses: stefanzweifel/git-auto-commit-action@v5.0.0 - with: - commit_message: "Format code with isort and black" diff --git a/.github/workflows/lint-format.yml b/.github/workflows/lint-format.yml new file mode 100644 index 00000000..36360ded --- /dev/null +++ b/.github/workflows/lint-format.yml @@ -0,0 +1,54 @@ +name: Formatter + +on: + pull_request: + push: + +env: + SKIP: pylint,isort,black + +jobs: + Formatter: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4.1.1 + with: + ref: ${{ github.head_ref }} + - uses: isort/isort-action@v1.1.0 + with: + configuration: "--settings-file=.isort.cfg" + requirements-files: "requirements.txt" + - uses: psf/black@stable + with: + options: "--line-length=120" + version: "24.2.0" + - uses: stefanzweifel/git-auto-commit-action@v5.0.0 + with: + commit_message: "Format code with isort and black" + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5.0.0 + - uses: pre-commit/action@v3.0.0 + Pylint: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + steps: + - uses: actions/checkout@v4.1.1 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5.0.0 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + - name: Analysing the code with pylint + run: | + pylint $(git ls-files '*.py') --rcfile=$(git ls-files '.pylintrc') diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml deleted file mode 100644 index 9b06d134..00000000 --- a/.github/workflows/pre-commit.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: pre-commit - -on: - pull_request: - push: - -env: - SKIP: pylint,isort,black - -jobs: - pre-commit: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4.1.1 - - uses: actions/setup-python@v5.0.0 - - uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml deleted file mode 100644 index 5af7078b..00000000 --- a/.github/workflows/pylint.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Pylint - -on: - pull_request: - push: - -jobs: - Pylint: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: ["3.10"] - steps: - - uses: actions/checkout@v4.1.1 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5.0.0 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - name: Analysing the code with pylint - run: | - pylint $(git ls-files '*.py') --rcfile=$(git ls-files '.pylintrc') From d378befbb39e961cb45ca967e81daf4ef71b49dc Mon Sep 17 00:00:00 2001 From: mayankpatibandla Date: Wed, 21 Feb 2024 03:34:07 +0000 Subject: [PATCH 40/44] Format code with isort and black --- pros/cli/conductor.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index c6cf6cba..e8ca8e6c 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -1,16 +1,18 @@ import os.path from itertools import groupby -from pros.ga.analytics import analytics +import pros.common.ui as ui import pros.conductor as c from pros.cli.common import * from pros.conductor.templates import ExternalTemplate +from pros.ga.analytics import analytics -import pros.common.ui as ui @pros_root def conductor_cli(): pass + + @conductor_cli.group(cls=PROSGroup, aliases=["cond", "c", "conduct"], short_help="Perform project management for PROS") @default_options def conductor(): From 4b2db8bbacb367907426d400f78927a517cf06ab Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Mon, 18 Mar 2024 23:52:19 -0400 Subject: [PATCH 41/44] Update black --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a5d2a7b6..5fc5f6a4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: types: [python] args: [--settings-file=.isort.cfg] - repo: https://github.com/psf/black - rev: 24.2.0 + rev: 24.3.0 hooks: - id: black args: ["--line-length=120"] From 5b80149b618518b34ff44124b05c5187c87155ce Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 19 Mar 2024 01:06:12 -0400 Subject: [PATCH 42/44] Update black in workflow --- .github/workflows/lint-format.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint-format.yml b/.github/workflows/lint-format.yml index 36360ded..709c5f70 100644 --- a/.github/workflows/lint-format.yml +++ b/.github/workflows/lint-format.yml @@ -23,7 +23,7 @@ jobs: - uses: psf/black@stable with: options: "--line-length=120" - version: "24.2.0" + version: "24.3.0" - uses: stefanzweifel/git-auto-commit-action@v5.0.0 with: commit_message: "Format code with isort and black" From b5ee8a464385ca0ae029690abb636e719c29e037 Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:46:26 -0400 Subject: [PATCH 43/44] Formatter --- pros/cli/build.py | 12 +- pros/cli/common.py | 37 ++-- pros/cli/conductor.py | 262 +++++++++++++++++++++-------- pros/cli/main.py | 52 +++--- pros/cli/upload.py | 6 +- pros/cli/v5_utils.py | 22 ++- pros/conductor/conductor.py | 111 ++++++------ pros/conductor/project/__init__.py | 17 +- 8 files changed, 340 insertions(+), 179 deletions(-) diff --git a/pros/cli/build.py b/pros/cli/build.py index 845bd0d2..fcf512d9 100644 --- a/pros/cli/build.py +++ b/pros/cli/build.py @@ -27,12 +27,12 @@ def make(project: c.Project, build_args): analytics.send("make") exit_code = project.compile(build_args) if exit_code != 0: - if sys.platform == 'win32': + if sys.platform == "win32": kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") return exit_code @@ -82,10 +82,10 @@ def build_compile_commands( build_args, cdb_file=compile_commands, suppress_output=suppress_output, sandbox=sandbox ) if exit_code != 0: - if sys.platform == 'win32': + if sys.platform == "win32": kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) - logger(__name__).error(f'Failed to make project: Exit Code {exit_code}', extra={'sentry': False}) - raise click.ClickException('Failed to build') + logger(__name__).error(f"Failed to make project: Exit Code {exit_code}", extra={"sentry": False}) + raise click.ClickException("Failed to build") return exit_code diff --git a/pros/cli/common.py b/pros/cli/common.py index f66c0a7d..c3dc8a9e 100644 --- a/pros/cli/common.py +++ b/pros/cli/common.py @@ -166,8 +166,18 @@ def callback(ctx: click.Context, param: click.Parameter, value: bool): add_tag("no-sentry", value) if value: pros.common.sentry.disable_prompt() - decorator = click.option('--no-sentry', expose_value=False, is_flag=True, default=True, is_eager=True, - help="Disable sentry reporting prompt.", callback=callback, cls=PROSOption, hidden=True)(f) + + decorator = click.option( + "--no-sentry", + expose_value=False, + is_flag=True, + default=True, + is_eager=True, + help="Disable sentry reporting prompt.", + callback=callback, + cls=PROSOption, + hidden=True, + )(f) decorator.__name__ = f.__name__ return decorator @@ -251,9 +261,11 @@ def callback(ctx: click.Context, param: click.Parameter, value: str): if allow_none: return None elif required: - raise click.UsageError(f'{os.path.abspath(value or ".")} is not inside a PROS project. ' - f'Execute this command from within a PROS project or specify it ' - f'with --project project/path') + raise click.UsageError( + f'{os.path.abspath(value or ".")} is not inside a PROS project. ' + f"Execute this command from within a PROS project or specify it " + f"with --project project/path" + ) else: return None @@ -324,12 +336,15 @@ def resolve_v5_port(port: Optional[str], type: str, quiet: bool = False) -> Tupl return None, False if len(ports) > 1: if not quiet: - brain_id = click.prompt('Multiple {} Brains were found. Please choose one to upload the program: [{}]' - .format('v5', ' | '.join([p.product.split(' ')[-1] for p in ports])), - default=ports[0].product.split(' ')[-1], - show_default=False, - type=click.Choice([p.description.split(' ')[-1] for p in ports])) - port = [p.device for p in ports if p.description.split(' ')[-1] == brain_id][0] + brain_id = click.prompt( + "Multiple {} Brains were found. Please choose one to upload the program: [{}]".format( + "v5", " | ".join([p.product.split(" ")[-1] for p in ports]) + ), + default=ports[0].product.split(" ")[-1], + show_default=False, + type=click.Choice([p.description.split(" ")[-1] for p in ports]), + ) + port = [p.device for p in ports if p.description.split(" ")[-1] == brain_id][0] assert port in [p.device for p in ports] else: diff --git a/pros/cli/conductor.py b/pros/cli/conductor.py index a9786c70..8a06487e 100644 --- a/pros/cli/conductor.py +++ b/pros/cli/conductor.py @@ -67,9 +67,9 @@ def fetch(query: c.BaseTemplate): logger(__name__).debug(f"Template file found: {template_file}") else: if template_file: - logger(__name__).debug(f'Template file exists but is not a valid template: {template_file}') + logger(__name__).debug(f"Template file exists but is not a valid template: {template_file}") else: - logger(__name__).error(f'Template not found: {query.name}') + logger(__name__).error(f"Template not found: {query.name}") return -1 template = c.Conductor().resolve_template(query, allow_offline=False) logger(__name__).debug(f"Template from resolved query: {template}") @@ -84,22 +84,51 @@ def fetch(query: c.BaseTemplate): c.Conductor().fetch_template(depot, template, **query.metadata) -@conductor.command(context_settings={'ignore_unknown_options': True}) -@click.option('--upgrade/--no-upgrade', 'upgrade_ok', default=True, help='Allow upgrading templates in a project') - -@click.option('--install/--no-install', 'install_ok', default=True, help='Allow installing templates in a project') -@click.option('--download/--no-download', 'download_ok', default=True, - help='Allow downloading templates or only allow local templates') -@click.option('--upgrade-user-files/--no-upgrade-user-files', 'force_user', default=False, - help='Replace all user files in a template') -@click.option('--force', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-apply', 'force_apply', default=False, is_flag=True, - help="Force apply the template, disregarding if the template is already installed.") -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') -@click.option('--early-access/--no-early-access', '--early/--no-early', '-ea/-nea', 'early_access', '--beta/--no-beta', default=None, - help='Create a project using the PROS 4 kernel') +@conductor.command(context_settings={"ignore_unknown_options": True}) +@click.option("--upgrade/--no-upgrade", "upgrade_ok", default=True, help="Allow upgrading templates in a project") +@click.option("--install/--no-install", "install_ok", default=True, help="Allow installing templates in a project") +@click.option( + "--download/--no-download", + "download_ok", + default=True, + help="Allow downloading templates or only allow local templates", +) +@click.option( + "--upgrade-user-files/--no-upgrade-user-files", + "force_user", + default=False, + help="Replace all user files in a template", +) +@click.option( + "--force", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option( + "--early-access/--no-early-access", + "--early/--no-early", + "-ea/-nea", + "early_access", + "--beta/--no-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) @project_option() @template_query(required=True) @default_options @@ -153,19 +182,41 @@ def install(ctx: click.Context, **kwargs): return ctx.invoke(apply, install_ok=True, **kwargs) -@conductor.command(context_settings={'ignore_unknown_options': True}, aliases=['u']) -@click.option('--install/--no-install', 'install_ok', default=False) -@click.option('--download/--no-download', 'download_ok', default=True) -@click.option('--force-user', 'force_user', default=False, is_flag=True, - help='Replace all user files in a template') -@click.option('--force-system', '-f', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-apply', 'force_apply', default=False, is_flag=True, - help="Force apply the template, disregarding if the template is already installed.") -@click.option('--remove-empty-dirs/--no-remove-empty-dirs', 'remove_empty_directories', is_flag=True, default=True, - help='Remove empty directories when removing files') -@click.option('--early-access/--no-early-access', '--early/--no-early', '-ea/-nea', 'early_access', '--beta/--no-beta', default=None, - help='Create a project using the PROS 4 kernel') +@conductor.command(context_settings={"ignore_unknown_options": True}, aliases=["u"]) +@click.option("--install/--no-install", "install_ok", default=False) +@click.option("--download/--no-download", "download_ok", default=True) +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-apply", + "force_apply", + default=False, + is_flag=True, + help="Force apply the template, disregarding if the template is already installed.", +) +@click.option( + "--remove-empty-dirs/--no-remove-empty-dirs", + "remove_empty_directories", + is_flag=True, + default=True, + help="Remove empty directories when removing files", +) +@click.option( + "--early-access/--no-early-access", + "--early/--no-early", + "-ea/-nea", + "early_access", + "--beta/--no-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) @project_option() @template_query(required=False) @default_options @@ -222,24 +273,52 @@ def uninstall_template( project.compile(["clean"]) -@conductor.command('new-project', aliases=['new', 'create-project']) -@click.argument('path', type=click.Path()) -@click.argument('target', default=c.Conductor().default_target, type=click.Choice(['v5', 'cortex'])) -@click.argument('version', default='latest') -@click.option('--force-user', 'force_user', default=False, is_flag=True, - help='Replace all user files in a template') -@click.option('--force-system', '-f', 'force_system', default=False, is_flag=True, - help="Force all system files to be inserted into the project") -@click.option('--force-refresh', is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks') -@click.option('--no-default-libs', 'no_default_libs', default=False, is_flag=True, - help='Do not install any default libraries after creating the project.') -@click.option('--compile-after', is_flag=True, default=True, show_default=True, - help='Compile the project after creation') -@click.option('--build-cache', is_flag=True, default=None, show_default=False, - help='Build compile commands cache after creation. Overrides --compile-after if both are specified.') -@click.option('--early-access/--no-early-access', '--early/--no-early', '-ea/-nea', 'early_access', '--beta/--no-beta', default=None, - help='Create a project using the PROS 4 kernel') +@conductor.command("new-project", aliases=["new", "create-project"]) +@click.argument("path", type=click.Path()) +@click.argument("target", default=c.Conductor().default_target, type=click.Choice(["v5", "cortex"])) +@click.argument("version", default="latest") +@click.option("--force-user", "force_user", default=False, is_flag=True, help="Replace all user files in a template") +@click.option( + "--force-system", + "-f", + "force_system", + default=False, + is_flag=True, + help="Force all system files to be inserted into the project", +) +@click.option( + "--force-refresh", + is_flag=True, + default=False, + show_default=True, + help="Force update all remote depots, ignoring automatic update checks", +) +@click.option( + "--no-default-libs", + "no_default_libs", + default=False, + is_flag=True, + help="Do not install any default libraries after creating the project.", +) +@click.option( + "--compile-after", is_flag=True, default=True, show_default=True, help="Compile the project after creation" +) +@click.option( + "--build-cache", + is_flag=True, + default=None, + show_default=False, + help="Build compile commands cache after creation. Overrides --compile-after if both are specified.", +) +@click.option( + "--early-access/--no-early-access", + "--early/--no-early", + "-ea/-nea", + "early_access", + "--beta/--no-beta", + default=None, + help="Create a project using the PROS 4 kernel", +) @click.pass_context @default_options def new_project( @@ -301,25 +380,56 @@ def new_project( ctx.exit(-1) -@conductor.command('query-templates', - aliases=['search-templates', 'ls-templates', 'lstemplates', 'querytemplates', 'searchtemplates', 'q'], - context_settings={'ignore_unknown_options': True}) -@click.option('--allow-offline/--no-offline', 'allow_offline', default=True, show_default=True, - help='(Dis)allow offline templates in the listing') -@click.option('--allow-online/--no-online', 'allow_online', default=True, show_default=True, - help='(Dis)allow online templates in the listing') -@click.option('--force-refresh', is_flag=True, default=False, show_default=True, - help='Force update all remote depots, ignoring automatic update checks') -@click.option('--limit', type=int, default=15, - help='The maximum number of displayed results for each library') -@click.option('--early-access/--no-early-access', '--early/--no-early', '-ea/-nea', 'early_access', '--beta/--no-beta', default=None, - help='View a list of early access templates') +@conductor.command( + "query-templates", + aliases=["search-templates", "ls-templates", "lstemplates", "querytemplates", "searchtemplates", "q"], + context_settings={"ignore_unknown_options": True}, +) +@click.option( + "--allow-offline/--no-offline", + "allow_offline", + default=True, + show_default=True, + help="(Dis)allow offline templates in the listing", +) +@click.option( + "--allow-online/--no-online", + "allow_online", + default=True, + show_default=True, + help="(Dis)allow online templates in the listing", +) +@click.option( + "--force-refresh", + is_flag=True, + default=False, + show_default=True, + help="Force update all remote depots, ignoring automatic update checks", +) +@click.option("--limit", type=int, default=15, help="The maximum number of displayed results for each library") +@click.option( + "--early-access/--no-early-access", + "--early/--no-early", + "-ea/-nea", + "early_access", + "--beta/--no-beta", + default=None, + help="View a list of early access templates", +) @template_query(required=False) @project_option(required=False) @click.pass_context @default_options -def query_templates(ctx, project: Optional[c.Project], query: c.BaseTemplate, allow_offline: bool, allow_online: bool, force_refresh: bool, - limit: int, early_access: bool): +def query_templates( + ctx, + project: Optional[c.Project], + query: c.BaseTemplate, + allow_offline: bool, + allow_online: bool, + force_refresh: bool, + limit: int, + early_access: bool, +): """ Query local and remote templates based on a spec @@ -330,8 +440,13 @@ def query_templates(ctx, project: Optional[c.Project], query: c.BaseTemplate, al limit = 15 if early_access is None and project is not None: early_access = project.use_early_access - templates = c.Conductor().resolve_templates(query, allow_offline=allow_offline, allow_online=allow_online, - force_refresh=force_refresh, early_access=early_access) + templates = c.Conductor().resolve_templates( + query, + allow_offline=allow_offline, + allow_online=allow_online, + force_refresh=force_refresh, + early_access=early_access, + ) render_templates = {} for template in templates: key = (template.identifier, template.origin) @@ -430,10 +545,11 @@ def query_depots(url: bool): """ _conductor = c.Conductor() ui.echo(f"Available Depots{' (Add --url for the url)' if not url else ''}:\n") - ui.echo('\n'.join(_conductor.query_depots(url))+"\n") + ui.echo("\n".join(_conductor.query_depots(url)) + "\n") + -@conductor.command('reset') -@click.option('--force', is_flag=True, default=False, help='Force reset') +@conductor.command("reset") +@click.option("--force", is_flag=True, default=False, help="Force reset") @default_options def reset(force: bool): """ @@ -443,12 +559,14 @@ def reset(force: bool): """ if not force: - if not ui.confirm("This will remove all depots and templates. You will be unable to create a new PROS project if you do not have internet connection. Are you sure you want to continue?"): + if not ui.confirm( + "This will remove all depots and templates. You will be unable to create a new PROS project if you do not have internet connection. Are you sure you want to continue?" + ): ui.echo("Aborting") return - + # Delete conductor.pros - file = os.path.join(click.get_app_dir('PROS'), 'conductor.pros') + file = os.path.join(click.get_app_dir("PROS"), "conductor.pros") if os.path.exists(file): os.remove(file) diff --git a/pros/cli/main.py b/pros/cli/main.py index 9d83fd9c..70a982dd 100644 --- a/pros/cli/main.py +++ b/pros/cli/main.py @@ -18,24 +18,12 @@ import pros.common.sentry import pros.common.ui as ui import pros.common.ui.log +import pros.conductor as c from pros.cli.click_classes import * from pros.cli.common import default_options, root_commands from pros.common.utils import get_version, logger from pros.ga.analytics import analytics -import jsonpickle -import pros.cli.build -import pros.cli.conductor -import pros.cli.conductor_utils -import pros.cli.terminal -import pros.cli.upload -import pros.cli.v5_utils -import pros.cli.misc_commands -import pros.cli.interactive -import pros.cli.user_script -import pros.conductor as c - - if sys.platform == "win32": kernel32 = ctypes.windll.kernel32 kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7) @@ -113,9 +101,10 @@ def use_analytics(ctx: click.Context, param, value): ctx.exit(0) ctx.ensure_object(dict) analytics.set_use(touse) - ui.echo(f'Analytics usage set to: {analytics.useAnalytics}') + ui.echo(f"Analytics usage set to: {analytics.useAnalytics}") ctx.exit(0) - + + def use_early_access(ctx: click.Context, param, value): if value is None: return @@ -126,22 +115,39 @@ def use_early_access(ctx: click.Context, param, value): elif value.startswith("f") or value in ["0", "no", "n"]: conductor.use_early_access = False else: - ui.echo('Invalid argument provided for \'--use-early-access\'. Try \'--use-early-access=False\' or \'--use-early-access=True\'') + ui.echo( + "Invalid argument provided for '--use-early-access'. Try '--use-early-access=False' or '--use-early-access=True'" + ) ctx.exit(0) conductor.save() - ui.echo(f'Early access set to: {conductor.use_early_access}') + ui.echo(f"Early access set to: {conductor.use_early_access}") ctx.exit(0) @click.command("pros", cls=PROSCommandCollection, sources=root_commands) @click.pass_context @default_options -@click.option('--version', help='Displays version and exits.', is_flag=True, expose_value=False, is_eager=True, - callback=version) -@click.option('--use-analytics', help='Set analytics usage (True/False).', type=str, expose_value=False, - is_eager=True, default=None, callback=use_analytics) -@click.option('--use-early-access', type=str, expose_value=False, is_eager=True, default=None, - help='Create projects with PROS 4 kernel by default', callback=use_early_access) +@click.option( + "--version", help="Displays version and exits.", is_flag=True, expose_value=False, is_eager=True, callback=version +) +@click.option( + "--use-analytics", + help="Set analytics usage (True/False).", + type=str, + expose_value=False, + is_eager=True, + default=None, + callback=use_analytics, +) +@click.option( + "--use-early-access", + type=str, + expose_value=False, + is_eager=True, + default=None, + help="Create projects with PROS 4 kernel by default", + callback=use_early_access, +) def cli(ctx): pros.common.sentry.register() ctx.call_on_close(after_command) diff --git a/pros/cli/upload.py b/pros/cli/upload.py index 972acc76..9712bf8f 100644 --- a/pros/cli/upload.py +++ b/pros/cli/upload.py @@ -261,9 +261,9 @@ def __str__(self): ports = find_v5_ports("system") result.append(PortReport("VEX EDR V5 System Ports", ports, "v5/system")) - ports = find_v5_ports('User') - result.append(PortReport('VEX EDR V5 User Ports', ports, 'v5/user')) - if target == 'cortex' or target is None: + ports = find_v5_ports("User") + result.append(PortReport("VEX EDR V5 User Ports", ports, "v5/user")) + if target == "cortex" or target is None: ports = find_cortex_ports() result.append(PortReport("VEX EDR Cortex Microcontroller Ports", ports, "cortex")) diff --git a/pros/cli/v5_utils.py b/pros/cli/v5_utils.py index 47c3b0e0..f22cbe74 100644 --- a/pros/cli/v5_utils.py +++ b/pros/cli/v5_utils.py @@ -313,10 +313,13 @@ def capture(file_name: str, port: str, force: bool = False): print(f"Saved screen capture to {file_name}") -@v5.command('set-variable', aliases=['sv', 'set', 'set_variable'], short_help='Set a kernel variable on a connected V5 device') -@click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) -@click.argument('value', required=True, type=click.STRING, nargs=1) -@click.argument('port', type=str, default=None, required=False) + +@v5.command( + "set-variable", aliases=["sv", "set", "set_variable"], short_help="Set a kernel variable on a connected V5 device" +) +@click.argument("variable", type=click.Choice(["teamnumber", "robotname"]), required=True) +@click.argument("value", required=True, type=click.STRING, nargs=1) +@click.argument("port", type=str, default=None, required=False) @default_options def set_variable(variable, value, port): import pros.serial.devices.vex as vex @@ -330,9 +333,14 @@ def set_variable(variable, value, port): actual_value = device.kv_write(variable, value).decode() print(f"Value of '{variable}' set to : {actual_value}") -@v5.command('read-variable', aliases=['rv', 'get', 'read_variable'], short_help='Read a kernel variable from a connected V5 device') -@click.argument('variable', type=click.Choice(['teamnumber', 'robotname']), required=True) -@click.argument('port', type=str, default=None, required=False) + +@v5.command( + "read-variable", + aliases=["rv", "get", "read_variable"], + short_help="Read a kernel variable from a connected V5 device", +) +@click.argument("variable", type=click.Choice(["teamnumber", "robotname"]), required=True) +@click.argument("port", type=str, default=None, required=False) @default_options def read_variable(variable, port): import pros.serial.devices.vex as vex diff --git a/pros/conductor/conductor.py b/pros/conductor/conductor.py index cad827a1..151e29f8 100644 --- a/pros/conductor/conductor.py +++ b/pros/conductor/conductor.py @@ -1,11 +1,11 @@ import errno import os.path +import re import shutil +import sys from enum import Enum from pathlib import Path -import sys from typing import * -import re import click from semantic_version import Spec, Version @@ -31,40 +31,35 @@ class ReleaseChannel(Enum): Beta = 'beta' """ + def is_pathname_valid(pathname: str) -> bool: - ''' + """ A more detailed check for path validity than regex. https://stackoverflow.com/a/34102855/11177720 - ''' + """ try: if not isinstance(pathname, str) or not pathname: return False - + _, pathname = os.path.splitdrive(pathname) - - root_dirname = os.environ.get('HOMEDRIVE', 'C:') \ - if sys.platform == 'win32' else os.path.sep + + root_dirname = os.environ.get("HOMEDRIVE", "C:") if sys.platform == "win32" else os.path.sep assert os.path.isdir(root_dirname) - + root_dirname = root_dirname.rstrip(os.path.sep) + os.path.sep for pathname_part in pathname.split(os.path.sep): try: os.lstat(root_dirname + pathname_part) except OSError as exc: - if hasattr(exc, 'winerror'): - if exc.winerror == 123: # ERROR_INVALID_NAME, python doesn't have this constant + if hasattr(exc, "winerror"): + if exc.winerror == 123: # ERROR_INVALID_NAME, python doesn't have this constant return False elif exc.errno in {errno.ENAMETOOLONG, errno.ERANGE}: return False - + # Check for emojis # https://stackoverflow.com/a/62898106/11177720 - ranges = [ - (ord(u'\U0001F300'), ord(u"\U0001FAF6")), # 127744, 129782 - (126980, 127569), - (169, 174), - (8205, 12953) - ] + ranges = [(ord("\U0001F300"), ord("\U0001FAF6")), (126980, 127569), (169, 174), (8205, 12953)] # 127744, 129782 for a_char in pathname: char_code = ord(a_char) for range_min, range_max in ranges: @@ -75,6 +70,7 @@ def is_pathname_valid(pathname: str) -> bool: else: return True + class Conductor(Config): """ Provides entrances for all conductor-related tasks (fetching, applying, creating new projects) @@ -195,9 +191,9 @@ def resolve_templates( **kwargs, ) -> List[BaseTemplate]: results = list() if not unique else set() - kernel_version = kwargs.get('kernel_version', None) - if kwargs.get('early_access', None) is not None: - use_early_access = kwargs.get('early_access', False) + kernel_version = kwargs.get("kernel_version", None) + if kwargs.get("early_access", None) is not None: + use_early_access = kwargs.get("early_access", False) else: use_early_access = self.use_early_access if isinstance(identifier, str): @@ -208,7 +204,11 @@ def resolve_templates( offline_results = list() if use_early_access: - offline_results.extend(filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.early_access_local_templates)) + offline_results.extend( + filter( + lambda t: t.satisfies(query, kernel_version=kernel_version), self.early_access_local_templates + ) + ) offline_results.extend( filter(lambda t: t.satisfies(query, kernel_version=kernel_version), self.local_templates) @@ -315,18 +315,19 @@ def apply_template(self, project: Project, identifier: Union[str, BaseTemplate], f"Do you still want to downgrade?" ) if not confirm: - raise dont_send( - InvalidTemplateException(f'Not downgrading')) - elif not project.use_early_access and template.version[0] == '3' and not self.warn_early_access: - confirm = ui.confirm(f'PROS 4 is now in early access. ' - f'Please use the --early-access flag if you would like to use it.\n' - f'Do you want to use PROS 4 instead?') + raise dont_send(InvalidTemplateException(f"Not downgrading")) + elif not project.use_early_access and template.version[0] == "3" and not self.warn_early_access: + confirm = ui.confirm( + f"PROS 4 is now in early access. " + f"Please use the --early-access flag if you would like to use it.\n" + f"Do you want to use PROS 4 instead?" + ) self.warn_early_access = True - if confirm: # use pros 4 + if confirm: # use pros 4 project.use_early_access = True project.save() - kwargs['version'] = '>=0' - kwargs['early_access'] = True + kwargs["version"] = ">=0" + kwargs["early_access"] = True # Recall the function with early access enabled return self.apply_template(project, identifier, **kwargs) @@ -383,39 +384,40 @@ def remove_template( ) def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Project: - if kwargs.get('early_access', None) is not None: - use_early_access = kwargs.get('early_access', False) + if kwargs.get("early_access", None) is not None: + use_early_access = kwargs.get("early_access", False) else: use_early_access = self.use_early_access kwargs["early_access"] = use_early_access - if kwargs["version_source"]: # If true, then the user has not specified a version + if kwargs["version_source"]: # If true, then the user has not specified a version if not use_early_access and self.warn_early_access: - ui.echo(f"PROS 4 is now in early access. " - f"If you would like to use it, use the --early-access flag.") + ui.echo(f"PROS 4 is now in early access. " f"If you would like to use it, use the --early-access flag.") elif not use_early_access and not self.warn_early_access: - confirm = ui.confirm(f'PROS 4 is now in early access. ' - f'Please use the --early-access flag if you would like to use it.\n' - f'Do you want to use PROS 4 instead?') + confirm = ui.confirm( + f"PROS 4 is now in early access. " + f"Please use the --early-access flag if you would like to use it.\n" + f"Do you want to use PROS 4 instead?" + ) self.warn_early_access = True if confirm: use_early_access = True - kwargs['early_access'] = True + kwargs["early_access"] = True elif use_early_access: - ui.echo(f'Early access is enabled. Using PROS 4.') + ui.echo(f"Early access is enabled. Using PROS 4.") elif use_early_access: - ui.echo(f'Early access is enabled.') + ui.echo(f"Early access is enabled.") if not is_pathname_valid(str(Path(path).absolute())): - raise dont_send(ValueError('Project path contains invalid characters.')) - - if Path(path).exists() and Path(path).samefile(os.path.expanduser('~')): - raise dont_send(ValueError('Will not create a project in user home directory')) - + raise dont_send(ValueError("Project path contains invalid characters.")) + + if Path(path).exists() and Path(path).samefile(os.path.expanduser("~")): + raise dont_send(ValueError("Will not create a project in user home directory")) + proj = Project(path=path, create=True, early_access=use_early_access) - if 'target' in kwargs: - proj.target = kwargs['target'] - if 'project_name' in kwargs and kwargs['project_name'] and not kwargs['project_name'].isspace(): - proj.project_name = kwargs['project_name'] + if "target" in kwargs: + proj.target = kwargs["target"] + if "project_name" in kwargs and kwargs["project_name"] and not kwargs["project_name"].isspace(): + proj.project_name = kwargs["project_name"] else: proj.project_name = os.path.basename(os.path.normpath(os.path.abspath(path))) if "version" in kwargs: @@ -425,7 +427,12 @@ def new_project(self, path: str, no_default_libs: bool = False, **kwargs) -> Pro proj.save() if not no_default_libs: - libraries = self.early_access_libraries if proj.use_early_access and (kwargs.get("version", ">").startswith("4") or kwargs.get("version", ">").startswith(">")) else self.default_libraries + libraries = ( + self.early_access_libraries + if proj.use_early_access + and (kwargs.get("version", ">").startswith("4") or kwargs.get("version", ">").startswith(">")) + else self.default_libraries + ) for library in libraries[proj.target]: try: # remove kernel version so that latest template satisfying query is correctly selected diff --git a/pros/conductor/project/__init__.py b/pros/conductor/project/__init__.py index 603ee6ee..773e1c60 100644 --- a/pros/conductor/project/__init__.py +++ b/pros/conductor/project/__init__.py @@ -17,7 +17,14 @@ class Project(Config): - def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = True, defaults: dict = None, early_access: bool = False): + def __init__( + self, + path: str = ".", + create: bool = False, + raise_on_error: bool = True, + defaults: dict = None, + early_access: bool = False, + ): """ Instantiates a PROS project configuration :param path: A path to the project, may be the actual project.pros file, any child directory of the project, @@ -35,10 +42,10 @@ def __init__(self, path: str = '.', create: bool = False, raise_on_error: bool = if defaults is None: defaults = {} - self.target: str = defaults.get('target', 'cortex').lower() # VEX Hardware target (V5/Cortex) - self.templates: Dict[str, Template] = defaults.get('templates', {}) - self.upload_options: Dict = defaults.get('upload_options', {}) - self.project_name: str = defaults.get('project_name', None) + self.target: str = defaults.get("target", "cortex").lower() # VEX Hardware target (V5/Cortex) + self.templates: Dict[str, Template] = defaults.get("templates", {}) + self.upload_options: Dict = defaults.get("upload_options", {}) + self.project_name: str = defaults.get("project_name", None) self.use_early_access = early_access super(Project, self).__init__(file, error_on_decode=raise_on_error) if "kernel" in self.__dict__: From 30a68e2f56fd7ae5b5b83fdc953643fc7237adab Mon Sep 17 00:00:00 2001 From: Mayank Patibandla <34776435+mayankpatibandla@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:50:50 -0400 Subject: [PATCH 44/44] Disable R1731 and W0612 --- .pylintrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pylintrc b/.pylintrc index 4514f984..18de6cf1 100644 --- a/.pylintrc +++ b/.pylintrc @@ -6,4 +6,4 @@ disable = C0114, C0115, C0116, R0903, C0415, R1705, R0913, W1203, R1729, E1120 W0108, R0912, R0911, W0511, E1136, R0902, W0611, C0412, C0103, C0301, R1732, R0915, W1514, R1718, W1510, E0602, W1309, C0325, E1101, R1714, R0916, W0719, R1734, E1133, W1201, W0107, W3101, W0640, C0201, W1113, W0246, W0622, W0221, E1111, R1720, W0221, R1723, E0102, W0201, E0203, E0401, W0602, W0212, W0707, R0904, - W0101, C0302, E0110, W0603, R1701, W0106, R1721, W0601, R0401 + W0101, C0302, E0110, W0603, R1701, W0106, R1721, W0601, R0401, R1731, W0612