Skip to content

Commit

Permalink
fix py3.9
Browse files Browse the repository at this point in the history
  • Loading branch information
gpetretto committed Nov 14, 2023
1 parent 20b9c33 commit 7751ed3
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 41 deletions.
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ requires-python = ">=3.9"
dependencies =[
"jobflow[strict]",
"pydantic>=2.0.1",
"fireworks",
"fabric",
"tomlkit",
"qtoolkit",
Expand Down
6 changes: 3 additions & 3 deletions src/jobflow_remote/cli/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,14 +143,14 @@ def delete(
if not confirmed:
raise typer.Exit(0)

to_delete = [fi.db_ids[0] for fi in flows_info]
to_delete = [fi.flow_id for fi in flows_info]
with loading_spinner(False) as progress:
progress.add_task(description="Deleting...", total=None)

jc.delete_flows(db_ids=to_delete)
jc.delete_flows(flow_ids=to_delete)

out_console.print(
f"Deleted Flow(s) with db_id: {', '.join(str(i) for i in to_delete)}"
f"Deleted Flow(s) with id: {', '.join(str(i) for i in to_delete)}"
)


Expand Down
72 changes: 35 additions & 37 deletions src/jobflow_remote/jobs/data.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
from __future__ import annotations

from collections import defaultdict
from datetime import datetime, timezone
from enum import Enum
from functools import cached_property
from typing import Optional, Union

from jobflow import Flow, Job, JobStore
from monty.json import jsanitize
Expand All @@ -19,11 +18,11 @@

def get_initial_job_doc_dict(
job: Job,
parents: list[str] | None,
parents: Optional[list[str]],
db_id: int,
worker: str,
exec_config: ExecutionConfig | None,
resources: dict | QResources | None,
exec_config: Optional[ExecutionConfig],
resources: Optional[Union[dict, QResources]],
):
from monty.json import jsanitize

Expand All @@ -50,7 +49,6 @@ def get_initial_job_doc_dict(


def get_initial_flow_doc_dict(flow: Flow, job_dicts: list[dict]):

jobs = [j["uuid"] for j in job_dicts]
ids = [(j["db_id"], j["uuid"], j["index"]) for j in job_dicts]
parents = {j["uuid"]: {"1": j["parents"]} for j in job_dicts}
Expand All @@ -69,10 +67,10 @@ def get_initial_flow_doc_dict(flow: Flow, job_dicts: list[dict]):

class RemoteInfo(BaseModel):
step_attempts: int = 0
queue_state: QState | None = None
process_id: str | None = None
retry_time_limit: datetime | None = None
error: str | None = None
queue_state: Optional[QState] = None
process_id: Optional[str] = None
retry_time_limit: Optional[datetime] = None
error: Optional[str] = None


class JobInfo(BaseModel):
Expand All @@ -83,32 +81,32 @@ class JobInfo(BaseModel):
name: str
state: JobState
remote: RemoteInfo = RemoteInfo()
parents: list[str] | None = None
previous_state: JobState | None = None
error: str | None = None
lock_id: str | None = None
lock_time: datetime | None = None
run_dir: str | None = None
start_time: datetime | None = None
end_time: datetime | None = None
parents: Optional[list[str]] = None
previous_state: Optional[JobState] = None
error: Optional[str] = None
lock_id: Optional[str] = None
lock_time: Optional[datetime] = None
run_dir: Optional[str] = None
start_time: Optional[datetime] = None
end_time: Optional[datetime] = None
created_on: datetime = datetime.utcnow()
updated_on: datetime = datetime.utcnow()
priority: int = 0
metadata: dict | None = None
metadata: Optional[dict] = None

@property
def is_locked(self) -> bool:
return self.lock_id is not None

@property
def run_time(self) -> float | None:
def run_time(self) -> Optional[float]:
if self.start_time and self.end_time:
return (self.end_time - self.start_time).total_seconds()

return None

@property
def estimated_run_time(self) -> float | None:
def estimated_run_time(self) -> Optional[float]:
if self.start_time:
return (
datetime.now(tz=self.start_time.tzinfo) - self.start_time
Expand All @@ -117,7 +115,7 @@ def estimated_run_time(self) -> float | None:
return None

@classmethod
def from_query_output(cls, d) -> JobInfo:
def from_query_output(cls, d) -> "JobInfo":
job = d.pop("job")
for k in ["name", "metadata"]:
d[k] = job[k]
Expand Down Expand Up @@ -151,23 +149,23 @@ class JobDoc(BaseModel):
# among the parents, all the index will still be parents.
# Note that for just the uuid this condition is not true: JobDocs with
# the same uuid but different indexes may have different parents
parents: list[str] | None = None
previous_state: JobState | None = None
error: str | None = None # TODO is there a better way to serialize it?
lock_id: str | None = None
lock_time: datetime | None = None
run_dir: str | None = None
start_time: datetime | None = None
end_time: datetime | None = None
parents: Optional[list[str]] = None
previous_state: Optional[JobState] = None
error: Optional[str] = None # TODO is there a better way to serialize it?
lock_id: Optional[str] = None
lock_time: Optional[datetime] = None
run_dir: Optional[str] = None
start_time: Optional[datetime] = None
end_time: Optional[datetime] = None
created_on: datetime = datetime.utcnow()
updated_on: datetime = datetime.utcnow()
priority: int = 0
store: JobStore | None = None
exec_config: ExecutionConfig | str | None = None
resources: QResources | dict | None = None
store: Optional[JobStore] = None
exec_config: Optional[Union[ExecutionConfig, str]] = None
resources: Optional[Union[QResources, dict]] = None

stored_data: dict | None = None
history: list[str] | None = None # ?
stored_data: Optional[dict] = None
history: Optional[list[str]] = None # ?

def as_db_dict(self):
# required since the resources are not serialized otherwise
Expand All @@ -189,8 +187,8 @@ class FlowDoc(BaseModel):
jobs: list[str]
state: FlowState
name: str
lock_id: str | None = None
lock_time: datetime | None = None
lock_id: Optional[str] = None
lock_time: Optional[datetime] = None
created_on: datetime = datetime.utcnow()
updated_on: datetime = datetime.utcnow()
metadata: dict = Field(default_factory=dict)
Expand Down

0 comments on commit 7751ed3

Please sign in to comment.