Skip to content

fix(backend/scheduler): Unbreak Scheduler.get_execution_schedules #9919

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 30 additions & 22 deletions autogpt_platform/backend/backend/executor/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from autogpt_libs.utils.cache import thread_cached
from dotenv import load_dotenv
from prisma.enums import NotificationType
from pydantic import BaseModel
from pydantic import BaseModel, ValidationError
from sqlalchemy import MetaData, create_engine

from backend.data.block import BlockInput
Expand Down Expand Up @@ -72,7 +72,7 @@ def get_notification_client():


def execute_graph(**kwargs):
args = ExecutionJobArgs(**kwargs)
args = GraphExecutionJobArgs(**kwargs)
try:
log(f"Executing recurring job for graph #{args.graph_id}")
execution_utils.add_graph_execution(
Expand Down Expand Up @@ -140,22 +140,24 @@ class Jobstores(Enum):
WEEKLY_NOTIFICATIONS = "weekly_notifications"


class ExecutionJobArgs(BaseModel):
class GraphExecutionJobArgs(BaseModel):
graph_id: str
input_data: BlockInput
user_id: str
graph_version: int
cron: str


class ExecutionJobInfo(ExecutionJobArgs):
class GraphExecutionJobInfo(GraphExecutionJobArgs):
id: str
name: str
next_run_time: str

@staticmethod
def from_db(job_args: ExecutionJobArgs, job_obj: JobObj) -> "ExecutionJobInfo":
return ExecutionJobInfo(
def from_db(
job_args: GraphExecutionJobArgs, job_obj: JobObj
) -> "GraphExecutionJobInfo":
return GraphExecutionJobInfo(
id=job_obj.id,
name=job_obj.name,
next_run_time=job_obj.next_run_time.isoformat(),
Expand Down Expand Up @@ -269,15 +271,15 @@ def cleanup(self):
self.scheduler.shutdown(wait=False)

@expose
def add_execution_schedule(
def add_graph_execution_schedule(
self,
graph_id: str,
graph_version: int,
cron: str,
input_data: BlockInput,
user_id: str,
) -> ExecutionJobInfo:
job_args = ExecutionJobArgs(
) -> GraphExecutionJobInfo:
job_args = GraphExecutionJobArgs(
graph_id=graph_id,
input_data=input_data,
user_id=user_id,
Expand All @@ -292,40 +294,46 @@ def add_execution_schedule(
jobstore=Jobstores.EXECUTION.value,
)
log(f"Added job {job.id} with cron schedule '{cron}' input data: {input_data}")
return ExecutionJobInfo.from_db(job_args, job)
return GraphExecutionJobInfo.from_db(job_args, job)

@expose
def delete_schedule(self, schedule_id: str, user_id: str) -> ExecutionJobInfo:
def delete_graph_execution_schedule(
self, schedule_id: str, user_id: str
) -> GraphExecutionJobInfo:
job = self.scheduler.get_job(schedule_id, jobstore=Jobstores.EXECUTION.value)
if not job:
log(f"Job {schedule_id} not found.")
raise ValueError(f"Job #{schedule_id} not found.")

job_args = ExecutionJobArgs(**job.kwargs)
job_args = GraphExecutionJobArgs(**job.kwargs)
if job_args.user_id != user_id:
raise ValueError("User ID does not match the job's user ID.")

log(f"Deleting job {schedule_id}")
job.remove()

return ExecutionJobInfo.from_db(job_args, job)
return GraphExecutionJobInfo.from_db(job_args, job)

@expose
def get_execution_schedules(
def get_graph_execution_schedules(
self, graph_id: str | None = None, user_id: str | None = None
) -> list[ExecutionJobInfo]:
) -> list[GraphExecutionJobInfo]:
jobs: list[JobObj] = self.scheduler.get_jobs(jobstore=Jobstores.EXECUTION.value)
schedules = []
for job in self.scheduler.get_jobs(jobstore=Jobstores.EXECUTION.value):
logger.info(
for job in jobs:
logger.debug(
f"Found job {job.id} with cron schedule {job.trigger} and args {job.kwargs}"
)
job_args = ExecutionJobArgs(**job.kwargs)
try:
job_args = GraphExecutionJobArgs.model_validate(job.kwargs)
except ValidationError:
continue
if (
job.next_run_time is not None
and (graph_id is None or job_args.graph_id == graph_id)
and (user_id is None or job_args.user_id == user_id)
):
schedules.append(ExecutionJobInfo.from_db(job_args, job))
schedules.append(GraphExecutionJobInfo.from_db(job_args, job))
return schedules

@expose
Expand All @@ -346,6 +354,6 @@ class SchedulerClient(AppServiceClient):
def get_service_type(cls):
return Scheduler

add_execution_schedule = endpoint_to_async(Scheduler.add_execution_schedule)
delete_schedule = endpoint_to_async(Scheduler.delete_schedule)
get_execution_schedules = endpoint_to_async(Scheduler.get_execution_schedules)
add_execution_schedule = endpoint_to_async(Scheduler.add_graph_execution_schedule)
delete_schedule = endpoint_to_async(Scheduler.delete_graph_execution_schedule)
get_execution_schedules = endpoint_to_async(Scheduler.get_graph_execution_schedules)
4 changes: 2 additions & 2 deletions autogpt_platform/backend/backend/server/routers/v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -769,7 +769,7 @@ class ScheduleCreationRequest(pydantic.BaseModel):
async def create_schedule(
user_id: Annotated[str, Depends(get_user_id)],
schedule: ScheduleCreationRequest,
) -> scheduler.ExecutionJobInfo:
) -> scheduler.GraphExecutionJobInfo:
graph = await graph_db.get_graph(
schedule.graph_id, schedule.graph_version, user_id=user_id
)
Expand Down Expand Up @@ -809,7 +809,7 @@ async def delete_schedule(
async def get_execution_schedules(
user_id: Annotated[str, Depends(get_user_id)],
graph_id: str | None = None,
) -> list[scheduler.ExecutionJobInfo]:
) -> list[scheduler.GraphExecutionJobInfo]:
return await execution_scheduler_client().get_execution_schedules(
user_id=user_id,
graph_id=graph_id,
Expand Down