Skip to content

Commit

Permalink
apply ruff reformatting in tasks
Browse files Browse the repository at this point in the history
  • Loading branch information
bowenliang123 committed Aug 23, 2024
1 parent 3ace01c commit ba1721a
Show file tree
Hide file tree
Showing 29 changed files with 546 additions and 508 deletions.
1 change: 0 additions & 1 deletion api/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ exclude = [
"models/**/*.py",
"migrations/**/*",
"services/**/*.py",
"tasks/**/*.py",
"tests/**/*.py",
"configs/**/*.py",
]
Expand Down
32 changes: 18 additions & 14 deletions api/tasks/add_document_to_index_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,32 +14,33 @@
from models.dataset import DocumentSegment


@shared_task(queue='dataset')
@shared_task(queue="dataset")
def add_document_to_index_task(dataset_document_id: str):
"""
Async Add document to index
:param document_id:
Usage: add_document_to_index.delay(document_id)
"""
logging.info(click.style('Start add document to index: {}'.format(dataset_document_id), fg='green'))
logging.info(click.style("Start add document to index: {}".format(dataset_document_id), fg="green"))
start_at = time.perf_counter()

dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == dataset_document_id).first()
if not dataset_document:
raise NotFound('Document not found')
raise NotFound("Document not found")

if dataset_document.indexing_status != 'completed':
if dataset_document.indexing_status != "completed":
return

indexing_cache_key = 'document_{}_indexing'.format(dataset_document.id)
indexing_cache_key = "document_{}_indexing".format(dataset_document.id)

try:
segments = db.session.query(DocumentSegment).filter(
DocumentSegment.document_id == dataset_document.id,
DocumentSegment.enabled == True
) \
.order_by(DocumentSegment.position.asc()).all()
segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == dataset_document.id, DocumentSegment.enabled == True)
.order_by(DocumentSegment.position.asc())
.all()
)

documents = []
for segment in segments:
Expand All @@ -50,28 +51,31 @@ def add_document_to_index_task(dataset_document_id: str):
"doc_hash": segment.index_node_hash,
"document_id": segment.document_id,
"dataset_id": segment.dataset_id,
}
},
)

documents.append(document)

dataset = dataset_document.dataset

if not dataset:
raise Exception('Document has no dataset')
raise Exception("Document has no dataset")

index_type = dataset.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
index_processor.load(dataset, documents)

end_at = time.perf_counter()
logging.info(
click.style('Document added to index: {} latency: {}'.format(dataset_document.id, end_at - start_at), fg='green'))
click.style(
"Document added to index: {} latency: {}".format(dataset_document.id, end_at - start_at), fg="green"
)
)
except Exception as e:
logging.exception("add document to index failed")
dataset_document.enabled = False
dataset_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.status = 'error'
dataset_document.status = "error"
dataset_document.error = str(e)
db.session.commit()
finally:
Expand Down
31 changes: 14 additions & 17 deletions api/tasks/annotation/add_annotation_to_index_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@
from services.dataset_service import DatasetCollectionBindingService


@shared_task(queue='dataset')
def add_annotation_to_index_task(annotation_id: str, question: str, tenant_id: str, app_id: str,
collection_binding_id: str):
@shared_task(queue="dataset")
def add_annotation_to_index_task(
annotation_id: str, question: str, tenant_id: str, app_id: str, collection_binding_id: str
):
"""
Add annotation to index.
:param annotation_id: annotation id
Expand All @@ -23,38 +24,34 @@ def add_annotation_to_index_task(annotation_id: str, question: str, tenant_id: s
Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct)
"""
logging.info(click.style('Start build index for annotation: {}'.format(annotation_id), fg='green'))
logging.info(click.style("Start build index for annotation: {}".format(annotation_id), fg="green"))
start_at = time.perf_counter()

try:
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
collection_binding_id,
'annotation'
collection_binding_id, "annotation"
)
dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
indexing_technique="high_quality",
embedding_model_provider=dataset_collection_binding.provider_name,
embedding_model=dataset_collection_binding.model_name,
collection_binding_id=dataset_collection_binding.id
collection_binding_id=dataset_collection_binding.id,
)

document = Document(
page_content=question,
metadata={
"annotation_id": annotation_id,
"app_id": app_id,
"doc_id": annotation_id
}
page_content=question, metadata={"annotation_id": annotation_id, "app_id": app_id, "doc_id": annotation_id}
)
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.create([document], duplicate_check=True)

end_at = time.perf_counter()
logging.info(
click.style(
'Build index successful for annotation: {} latency: {}'.format(annotation_id, end_at - start_at),
fg='green'))
"Build index successful for annotation: {} latency: {}".format(annotation_id, end_at - start_at),
fg="green",
)
)
except Exception:
logging.exception("Build index for annotation failed")
61 changes: 27 additions & 34 deletions api/tasks/annotation/batch_import_annotations_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,8 @@
from services.dataset_service import DatasetCollectionBindingService


@shared_task(queue='dataset')
def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: str, tenant_id: str,
user_id: str):
@shared_task(queue="dataset")
def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: str, tenant_id: str, user_id: str):
"""
Add annotation to index.
:param job_id: job_id
Expand All @@ -26,72 +25,66 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id:
:param user_id: user_id
"""
logging.info(click.style('Start batch import annotation: {}'.format(job_id), fg='green'))
logging.info(click.style("Start batch import annotation: {}".format(job_id), fg="green"))
start_at = time.perf_counter()
indexing_cache_key = 'app_annotation_batch_import_{}'.format(str(job_id))
indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
# get app info
app = db.session.query(App).filter(
App.id == app_id,
App.tenant_id == tenant_id,
App.status == 'normal'
).first()
app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()

if app:
try:
documents = []
for content in content_list:
annotation = MessageAnnotation(
app_id=app.id,
content=content['answer'],
question=content['question'],
account_id=user_id
app_id=app.id, content=content["answer"], question=content["question"], account_id=user_id
)
db.session.add(annotation)
db.session.flush()

document = Document(
page_content=content['question'],
metadata={
"annotation_id": annotation.id,
"app_id": app_id,
"doc_id": annotation.id
}
page_content=content["question"],
metadata={"annotation_id": annotation.id, "app_id": app_id, "doc_id": annotation.id},
)
documents.append(document)
# if annotation reply is enabled , batch add annotations' index
app_annotation_setting = db.session.query(AppAnnotationSetting).filter(
AppAnnotationSetting.app_id == app_id
).first()
app_annotation_setting = (
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
)

if app_annotation_setting:
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
app_annotation_setting.collection_binding_id,
'annotation'
dataset_collection_binding = (
DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
app_annotation_setting.collection_binding_id, "annotation"
)
)
if not dataset_collection_binding:
raise NotFound("App annotation setting not found")
dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
indexing_technique="high_quality",
embedding_model_provider=dataset_collection_binding.provider_name,
embedding_model=dataset_collection_binding.model_name,
collection_binding_id=dataset_collection_binding.id
collection_binding_id=dataset_collection_binding.id,
)

vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.create(documents, duplicate_check=True)

db.session.commit()
redis_client.setex(indexing_cache_key, 600, 'completed')
redis_client.setex(indexing_cache_key, 600, "completed")
end_at = time.perf_counter()
logging.info(
click.style(
'Build index successful for batch import annotation: {} latency: {}'.format(job_id, end_at - start_at),
fg='green'))
"Build index successful for batch import annotation: {} latency: {}".format(
job_id, end_at - start_at
),
fg="green",
)
)
except Exception as e:
db.session.rollback()
redis_client.setex(indexing_cache_key, 600, 'error')
indexing_error_msg_key = 'app_annotation_batch_import_error_msg_{}'.format(str(job_id))
redis_client.setex(indexing_cache_key, 600, "error")
indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id))
redis_client.setex(indexing_error_msg_key, 600, str(e))
logging.exception("Build index for batch import annotations failed")
23 changes: 10 additions & 13 deletions api/tasks/annotation/delete_annotation_index_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,36 +9,33 @@
from services.dataset_service import DatasetCollectionBindingService


@shared_task(queue='dataset')
def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str,
collection_binding_id: str):
@shared_task(queue="dataset")
def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str, collection_binding_id: str):
"""
Async delete annotation index task
"""
logging.info(click.style('Start delete app annotation index: {}'.format(app_id), fg='green'))
logging.info(click.style("Start delete app annotation index: {}".format(app_id), fg="green"))
start_at = time.perf_counter()
try:
dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
collection_binding_id,
'annotation'
collection_binding_id, "annotation"
)

dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
collection_binding_id=dataset_collection_binding.id
indexing_technique="high_quality",
collection_binding_id=dataset_collection_binding.id,
)

try:
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector.delete_by_metadata_field('annotation_id', annotation_id)
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.delete_by_metadata_field("annotation_id", annotation_id)
except Exception:
logging.exception("Delete annotation index failed when annotation deleted.")
end_at = time.perf_counter()
logging.info(
click.style('App annotations index deleted : {} latency: {}'.format(app_id, end_at - start_at),
fg='green'))
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
)
except Exception as e:
logging.exception("Annotation deleted index failed:{}".format(str(e)))

39 changes: 17 additions & 22 deletions api/tasks/annotation/disable_annotation_reply_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,62 +12,57 @@
from models.model import App, AppAnnotationSetting, MessageAnnotation


@shared_task(queue='dataset')
@shared_task(queue="dataset")
def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
"""
Async enable annotation reply task
"""
logging.info(click.style('Start delete app annotations index: {}'.format(app_id), fg='green'))
logging.info(click.style("Start delete app annotations index: {}".format(app_id), fg="green"))
start_at = time.perf_counter()
# get app info
app = db.session.query(App).filter(
App.id == app_id,
App.tenant_id == tenant_id,
App.status == 'normal'
).first()
app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
annotations_count = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id).count()
if not app:
raise NotFound("App not found")

app_annotation_setting = db.session.query(AppAnnotationSetting).filter(
AppAnnotationSetting.app_id == app_id
).first()
app_annotation_setting = (
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
)

if not app_annotation_setting:
raise NotFound("App annotation setting not found")

disable_app_annotation_key = 'disable_app_annotation_{}'.format(str(app_id))
disable_app_annotation_job_key = 'disable_app_annotation_job_{}'.format(str(job_id))
disable_app_annotation_key = "disable_app_annotation_{}".format(str(app_id))
disable_app_annotation_job_key = "disable_app_annotation_job_{}".format(str(job_id))

try:

dataset = Dataset(
id=app_id,
tenant_id=tenant_id,
indexing_technique='high_quality',
collection_binding_id=app_annotation_setting.collection_binding_id
indexing_technique="high_quality",
collection_binding_id=app_annotation_setting.collection_binding_id,
)

try:
if annotations_count > 0:
vector = Vector(dataset, attributes=['doc_id', 'annotation_id', 'app_id'])
vector.delete_by_metadata_field('app_id', app_id)
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
vector.delete_by_metadata_field("app_id", app_id)
except Exception:
logging.exception("Delete annotation index failed when annotation deleted.")
redis_client.setex(disable_app_annotation_job_key, 600, 'completed')
redis_client.setex(disable_app_annotation_job_key, 600, "completed")

# delete annotation setting
db.session.delete(app_annotation_setting)
db.session.commit()

end_at = time.perf_counter()
logging.info(
click.style('App annotations index deleted : {} latency: {}'.format(app_id, end_at - start_at),
fg='green'))
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
)
except Exception as e:
logging.exception("Annotation batch deleted index failed:{}".format(str(e)))
redis_client.setex(disable_app_annotation_job_key, 600, 'error')
disable_app_annotation_error_key = 'disable_app_annotation_error_{}'.format(str(job_id))
redis_client.setex(disable_app_annotation_job_key, 600, "error")
disable_app_annotation_error_key = "disable_app_annotation_error_{}".format(str(job_id))
redis_client.setex(disable_app_annotation_error_key, 600, str(e))
finally:
redis_client.delete(disable_app_annotation_key)
Loading

0 comments on commit ba1721a

Please sign in to comment.