From 7c37649f0704e76eb71ad9c0cfe5157257f069fd Mon Sep 17 00:00:00 2001 From: Meng Zhang Date: Tue, 28 Nov 2023 12:20:21 +0800 Subject: [PATCH] chore(lint): add autofix-python --- .github/workflows/autofix-python.yml | 31 ++++++++++++++++++++++++++++ python/tabby-eval/modal/predict.py | 15 ++++++-------- 2 files changed, 37 insertions(+), 9 deletions(-) create mode 100644 .github/workflows/autofix-python.yml diff --git a/.github/workflows/autofix-python.yml b/.github/workflows/autofix-python.yml new file mode 100644 index 000000000000..d70326d3e559 --- /dev/null +++ b/.github/workflows/autofix-python.yml @@ -0,0 +1,31 @@ +name: autofix.ci (python) + +on: + pull_request: + branches: [ "main" ] + paths: + - 'python/**' + +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref_name }} + + # If this is enabled it will cancel current running and start latest + cancel-in-progress: true + +jobs: + autofix: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + + - uses: chartboost/ruff-action@v1 + with: + src: "./python" + args: --fix + + - uses: autofix-ci/action@d3e591514b99d0fca6779455ff8338516663f7cc diff --git a/python/tabby-eval/modal/predict.py b/python/tabby-eval/modal/predict.py index 9a78635e7763..8f288400cd65 100644 --- a/python/tabby-eval/modal/predict.py +++ b/python/tabby-eval/modal/predict.py @@ -1,14 +1,11 @@ import asyncio import json -import modal import os import pandas as pd -from collections import namedtuple from datetime import datetime -from modal import Image, Mount, Secret, Stub, asgi_app, gpu, method -from pathlib import Path -from typing import Union, List, Optional, Any, Tuple +from modal import Image, Stub, gpu, method +from typing import List, Optional, Tuple GPU_CONFIG = gpu.A10G() @@ -61,7 +58,8 @@ class Model: def __enter__(self): import socket - import subprocess, os + import subprocess + import os import time from tabby_python_client import Client @@ -108,12 +106,11 @@ async def health(self): @method() async def complete(self, language: str, index: int, prompt: str) -> Tuple[int, Optional[str], Optional[str]]: + from tabby_python_client import errors from tabby_python_client.api.v1 import completion from tabby_python_client.models import ( CompletionRequest, DebugOptions, - CompletionResponse, - Segments, ) from tabby_python_client.types import Response @@ -127,7 +124,7 @@ async def complete(self, language: str, index: int, prompt: str) -> Tuple[int, O client=self.client, json_body=request ) - if resp.parsed != None: + if resp.parsed is not None: return index, resp.parsed.choices[0].text, None else: return index, None, f"<{resp.status_code}>"