diff --git a/.github/workflows/sdk-python.yml b/.github/workflows/sdk-python.yml index 6fafd17ce9..7737720620 100644 --- a/.github/workflows/sdk-python.yml +++ b/.github/workflows/sdk-python.yml @@ -43,9 +43,6 @@ jobs: - name: Run Black run: poetry run black . --check --verbose --diff --color - - name: Run Isort - run: poetry run isort . --check-only --diff - - name: Run MyPy run: poetry run mypy --config-file=pyproject.toml diff --git a/api-contracts/dispatcher/dispatcher.proto b/api-contracts/dispatcher/dispatcher.proto index 12cc7d0864..f138c5464e 100644 --- a/api-contracts/dispatcher/dispatcher.proto +++ b/api-contracts/dispatcher/dispatcher.proto @@ -37,8 +37,8 @@ service Dispatcher { message WorkerLabels { // value of the label - optional string strValue = 1; - optional int32 intValue = 2; + optional string str_value = 1; + optional int32 int_value = 2; } enum SDKS { @@ -49,16 +49,16 @@ enum SDKS { } message RuntimeInfo { - optional string sdkVersion = 1; + optional string sdk_version = 1; optional SDKS language = 2; - optional string languageVersion = 3; + optional string language_version = 3; optional string os = 4; optional string extra = 5; } message WorkerRegisterRequest { // the name of the worker - string workerName = 1; + string worker_name = 1; // a list of actions that this worker can run repeated string actions = 2; @@ -66,34 +66,40 @@ message WorkerRegisterRequest { // (optional) the services for this worker repeated string services = 3; - // (optional) the max number of runs this worker can handle - optional int32 maxRuns = 4; + // (optional) the number of default slots this worker can handle + optional int32 slots = 4; // (optional) worker labels (i.e. state or other metadata) map labels = 5; // (optional) webhookId is the id of the webhook that the worker is associated with (if any) - optional string webhookId = 6; + optional string webhook_id = 6; // (optional) information regarding the runtime environment of the worker - optional RuntimeInfo runtimeInfo = 7; + optional RuntimeInfo runtime_info = 7; + + // (optional) the max number of durable slots this worker can handle + optional int32 durable_slots = 8; + + // (optional) slot config for this worker (slot_type -> units) + map slot_config = 9; } message WorkerRegisterResponse { // the tenant id - string tenantId = 1; + string tenant_id = 1; // the id of the worker - string workerId = 2; + string worker_id = 2; // the name of the worker - string workerName = 3; + string worker_name = 3; } message UpsertWorkerLabelsRequest { // the name of the worker - string workerId = 1; + string worker_id = 1; // (optional) the worker labels map labels = 2; @@ -102,10 +108,10 @@ message UpsertWorkerLabelsRequest { message UpsertWorkerLabelsResponse { // the tenant id - string tenantId = 1; + string tenant_id = 1; // the id of the worker - string workerId = 2; + string worker_id = 2; } enum ActionType { @@ -116,43 +122,43 @@ enum ActionType { message AssignedAction { // the tenant id - string tenantId = 1; + string tenant_id = 1; // the workflow run id (optional) - string workflowRunId = 2; + string workflow_run_id = 2; // the get group key run id (optional) - string getGroupKeyRunId = 3; + string get_group_key_run_id = 3; // the job id - string jobId = 4; + string job_id = 4; // the job name - string jobName = 5; + string job_name = 5; // the job run id - string jobRunId = 6; + string job_run_id = 6; - // the step id - string stepId = 7; + // the task id + string task_id = 7; - // the step run id - string stepRunId = 8; + // the task external run id + string task_run_external_id = 8; // the action id - string actionId = 9; + string action_id = 9; // the action type - ActionType actionType = 10; + ActionType action_type = 10; // the action payload - string actionPayload = 11; + string action_payload = 11; - // the step name - string stepName = 12; + // the task name + string task_name = 12; // the count number of the retry attempt - int32 retryCount = 13; + int32 retry_count = 13; // (optional) additional metadata set on the workflow optional string additional_metadata = 14; @@ -170,28 +176,28 @@ message AssignedAction { int32 priority = 18; // (optional) the workflow id - optional string workflowId = 19; + optional string workflow_id = 19; // (optional) the workflow version id - optional string workflowVersionId = 20; + optional string workflow_version_id = 20; } message WorkerListenRequest { // the id of the worker - string workerId = 1; + string worker_id = 1; } message WorkerUnsubscribeRequest { // the id of the worker - string workerId = 1; + string worker_id = 1; } message WorkerUnsubscribeResponse { // the tenant id to unsubscribe from - string tenantId = 1; + string tenant_id = 1; // the id of the worker - string workerId = 2; + string worker_id = 2; } enum GroupKeyActionEventType { @@ -203,23 +209,23 @@ enum GroupKeyActionEventType { message GroupKeyActionEvent { // the id of the worker - string workerId = 1; + string worker_id = 1; // the id of the job - string workflowRunId = 2; + string workflow_run_id = 2; - string getGroupKeyRunId = 3; + string get_group_key_run_id = 3; // the action id - string actionId = 4; + string action_id = 4; - google.protobuf.Timestamp eventTimestamp = 5; + google.protobuf.Timestamp event_timestamp = 5; - // the step event type - GroupKeyActionEventType eventType = 6; + // the task event type + GroupKeyActionEventType event_type = 6; // the event payload - string eventPayload = 7; + string event_payload = 7; } enum StepActionEventType { @@ -232,60 +238,60 @@ enum StepActionEventType { message StepActionEvent { // the id of the worker - string workerId = 1; + string worker_id = 1; // the id of the job - string jobId = 2; + string job_id = 2; // the job run id - string jobRunId = 3; + string job_run_id = 3; - // the id of the step - string stepId = 4; + // the id of the task + string task_id = 4; - // the step run id - string stepRunId = 5; + // the task external run id + string task_run_external_id = 5; // the action id - string actionId = 6; + string action_id = 6; - google.protobuf.Timestamp eventTimestamp = 7; + google.protobuf.Timestamp event_timestamp = 7; - // the step event type - StepActionEventType eventType = 8; + // the task event type + StepActionEventType event_type = 8; // the event payload - string eventPayload = 9; + string event_payload = 9; // the retry count - optional int32 retryCount = 10; + optional int32 retry_count = 10; // a flag indicating if the task should _not_ be retried - optional bool shouldNotRetry = 11; + optional bool should_not_retry = 11; } message ActionEventResponse { // the tenant id - string tenantId = 1; + string tenant_id = 1; // the id of the worker - string workerId = 2; + string worker_id = 2; } message SubscribeToWorkflowEventsRequest { // the id of the workflow run - optional string workflowRunId = 1; + optional string workflow_run_id = 1; // the key of the additional meta field to subscribe to - optional string additionalMetaKey = 2; + optional string additional_meta_key = 2; // the value of the additional meta field to subscribe to - optional string additionalMetaValue = 3; + optional string additional_meta_value = 3; } message SubscribeToWorkflowRunsRequest { // the id of the workflow run - string workflowRunId = 1; + string workflow_run_id = 1; } enum ResourceType { @@ -306,30 +312,30 @@ enum ResourceEventType { message WorkflowEvent { // the id of the workflow run - string workflowRunId = 1; + string workflow_run_id = 1; - ResourceType resourceType = 2; + ResourceType resource_type = 2; - ResourceEventType eventType = 3; + ResourceEventType event_type = 3; - string resourceId = 4; + string resource_id = 4; - google.protobuf.Timestamp eventTimestamp = 5; + google.protobuf.Timestamp event_timestamp = 5; // the event payload - string eventPayload = 6; + string event_payload = 6; // whether this is the last event for the workflow run - server // will hang up the connection but clients might want to case bool hangup = 7; - // (optional) the max number of retries this step can handle - optional int32 stepRetries = 8; + // (optional) the max number of retries this task can handle + optional int32 task_retries = 8; - // (optional) the retry count of this step - optional int32 retryCount = 9; + // (optional) the retry count of this task + optional int32 retry_count = 9; - optional int64 eventIndex = 10; + optional int64 event_index = 10; } enum WorkflowRunEventType { @@ -338,21 +344,21 @@ enum WorkflowRunEventType { message WorkflowRunEvent { // the id of the workflow run - string workflowRunId = 1; + string workflow_run_id = 1; - WorkflowRunEventType eventType = 2; + WorkflowRunEventType event_type = 2; - google.protobuf.Timestamp eventTimestamp = 3; + google.protobuf.Timestamp event_timestamp = 3; repeated StepRunResult results = 4; } message StepRunResult { - string stepRunId = 1; + string task_run_external_id = 1; - string stepReadableId = 2; + string task_name = 2; - string jobRunId = 3; + string job_run_id = 3; optional string error = 4; @@ -360,8 +366,8 @@ message StepRunResult { } message OverridesData { - // the step run id - string stepRunId = 1; + // the task run id + string task_run_external_id = 1; // the path of the data to set string path = 2; @@ -370,35 +376,35 @@ message OverridesData { string value = 3; // the filename of the caller - string callerFilename = 4; + string caller_filename = 4; } message OverridesDataResponse {} message HeartbeatRequest { // the id of the worker - string workerId = 1; + string worker_id = 1; // heartbeatAt is the time the worker sent the heartbeat - google.protobuf.Timestamp heartbeatAt = 2; + google.protobuf.Timestamp heartbeat_at = 2; } message HeartbeatResponse {} message RefreshTimeoutRequest { - // the id of the step run to release - string stepRunId = 1; + // the id of the task run to release + string task_run_external_id = 1; - string incrementTimeoutBy = 2; + string increment_timeout_by = 2; } message RefreshTimeoutResponse { - google.protobuf.Timestamp timeoutAt = 1; + google.protobuf.Timestamp timeout_at = 1; } message ReleaseSlotRequest { - // the id of the step run to release - string stepRunId = 1; + // the id of the task run to release + string task_run_external_id = 1; } message ReleaseSlotResponse {} diff --git a/api-contracts/events/events.proto b/api-contracts/events/events.proto index 538aaed8e5..d353d85d45 100644 --- a/api-contracts/events/events.proto +++ b/api-contracts/events/events.proto @@ -18,10 +18,10 @@ service EventsService { message Event { // the tenant id - string tenantId = 1; + string tenant_id = 1; // the id of the event - string eventId = 2; + string event_id = 2; // the key for the event string key = 3; @@ -30,10 +30,10 @@ message Event { string payload = 4; // when the event was generated - google.protobuf.Timestamp eventTimestamp = 5; + google.protobuf.Timestamp event_timestamp = 5; // the additional metadata for the event - optional string additionalMetadata = 6; + optional string additional_metadata = 6; // the scope associated with this filter. Used for subsetting candidate filters at evaluation time optional string scope = 7; @@ -45,11 +45,11 @@ message Events { } message PutLogRequest { - // the step run id for the request - string stepRunId = 1; + // the task external run id for the request + string task_run_external_id = 1; // when the log line was created - google.protobuf.Timestamp createdAt = 2; + google.protobuf.Timestamp created_at = 2; // the log line message string message = 3; @@ -61,17 +61,17 @@ message PutLogRequest { string metadata = 5; // the retry count of the task run - optional int32 taskRetryCount = 6; + optional int32 task_retry_count = 6; } message PutLogResponse {} message PutStreamEventRequest { - // the step run id for the request - string stepRunId = 1; + // the task external run id for the request + string task_run_external_id = 1; // when the stream event was created - google.protobuf.Timestamp createdAt = 2; + google.protobuf.Timestamp created_at = 2; // the stream event message bytes message = 3; @@ -79,7 +79,7 @@ message PutStreamEventRequest { // associated stream event metadata string metadata = 5; - optional int64 eventIndex = 6; + optional int64 event_index = 6; } message PutStreamEventResponse {} @@ -98,10 +98,10 @@ message PushEventRequest { string payload = 2; // when the event was generated - google.protobuf.Timestamp eventTimestamp = 3; + google.protobuf.Timestamp event_timestamp = 3; // metadata for the event - optional string additionalMetadata = 4; + optional string additional_metadata = 4; optional int32 priority = 5; @@ -111,5 +111,5 @@ message PushEventRequest { message ReplayEventRequest { // the event id to replay - string eventId = 1; + string event_id = 1; } diff --git a/api-contracts/openapi/components/schemas/worker.yaml b/api-contracts/openapi/components/schemas/worker.yaml index a07d219213..86cf9f7b5d 100644 --- a/api-contracts/openapi/components/schemas/worker.yaml +++ b/api-contracts/openapi/components/schemas/worker.yaml @@ -76,6 +76,19 @@ WorkerType: - MANAGED - WEBHOOK +WorkerSlotConfig: + type: object + description: Slot availability and limits for a slot type. + properties: + available: + type: integer + description: The number of available units for this slot type. + limit: + type: integer + description: The maximum number of units for this slot type. + required: + - limit + RegisteredWorkflow: type: object properties: @@ -136,12 +149,11 @@ Worker: - ACTIVE - INACTIVE - PAUSED - maxRuns: - type: integer - description: The maximum number of runs this worker can execute concurrently. - availableRuns: - type: integer - description: The number of runs this worker can execute concurrently. + slotConfig: + type: object + description: Slot availability and limits for this worker (slot_type -> { available, limit }). + additionalProperties: + $ref: "#/WorkerSlotConfig" dispatcherId: type: string description: "the id of the assigned dispatcher, in UUID format" diff --git a/api-contracts/openapi/components/schemas/workflow.yaml b/api-contracts/openapi/components/schemas/workflow.yaml index 97e88d13a9..0c3d8031e2 100644 --- a/api-contracts/openapi/components/schemas/workflow.yaml +++ b/api-contracts/openapi/components/schemas/workflow.yaml @@ -281,6 +281,14 @@ Step: timeout: type: string description: The timeout of the step. + isDurable: + type: boolean + description: Whether the step is durable. + slotRequests: + type: object + description: Slot requests for the step (slot_type -> units). + additionalProperties: + type: integer children: type: array items: diff --git a/api-contracts/v1/workflows.proto b/api-contracts/v1/workflows.proto index 4738b0a631..8634018ce0 100644 --- a/api-contracts/v1/workflows.proto +++ b/api-contracts/v1/workflows.proto @@ -17,12 +17,12 @@ service AdminService { } message CancelTasksRequest { - repeated string externalIds = 1; // a list of external UUIDs + repeated string external_ids = 1; // a list of external UUIDs optional TasksFilter filter = 2; } message ReplayTasksRequest { - repeated string externalIds = 1; // a list of external UUIDs + repeated string external_ids = 1; // a list of external UUIDs optional TasksFilter filter = 2; } @@ -90,7 +90,7 @@ message CreateWorkflowVersionRequest { Concurrency concurrency = 7; // (optional) the workflow concurrency options optional string cron_input = 8; // (optional) the input for the cron trigger optional CreateTaskOpts on_failure_task = 9; // (optional) the job to run on failure - optional StickyStrategy sticky = 10; // (optional) the sticky strategy for assigning steps to workers + optional StickyStrategy sticky = 10; // (optional) the sticky strategy for assigning tasks to workers optional int32 default_priority = 11; // (optional) the default priority for the workflow repeated Concurrency concurrency_arr = 12; // (optional) the workflow concurrency options repeated DefaultFilter default_filters = 13; // (optional) the default filters for the workflow @@ -129,8 +129,8 @@ enum WorkerLabelComparator { message DesiredWorkerLabels { // value of the affinity - optional string strValue = 1; - optional int32 intValue = 2; + optional string str_value = 1; + optional int32 int_value = 2; /** * (optional) Specifies whether the affinity setting is required. @@ -160,19 +160,21 @@ message CreateTaskOpts { string timeout = 3; // (optional) the task timeout string inputs = 4; // (optional) the task inputs, assuming string representation of JSON repeated string parents = 5; // (optional) the task parents. if none are passed in, this is a root task - int32 retries = 6; // (optional) the number of retries for the step, default 0 - repeated CreateTaskRateLimit rate_limits = 7; // (optional) the rate limits for the step - map worker_labels = 8; // (optional) the desired worker affinity state for the step - optional float backoff_factor = 9; // (optional) the retry backoff factor for the step - optional int32 backoff_max_seconds = 10; // (optional) the maximum backoff time for the step + int32 retries = 6; // (optional) the number of retries for the task, default 0 + repeated CreateTaskRateLimit rate_limits = 7; // (optional) the rate limits for the task + map worker_labels = 8; // (optional) the desired worker affinity state for the task + optional float backoff_factor = 9; // (optional) the retry backoff factor for the task + optional int32 backoff_max_seconds = 10; // (optional) the maximum backoff time for the task repeated Concurrency concurrency = 11; // (optional) the task concurrency options optional TaskConditions conditions = 12; // (optional) the task conditions for creating the task optional string schedule_timeout = 13; // (optional) the timeout for the schedule + bool is_durable = 14; // (optional) whether the task is durable + map slot_requests = 15; // (optional) slot requests (slot_type -> units) } message CreateTaskRateLimit { string key = 1; // (required) the key for the rate limit - optional int32 units = 2; // (optional) the number of units this step consumes + optional int32 units = 2; // (optional) the number of units this task consumes optional string key_expr = 3; // (optional) a CEL expression for determining the rate limit key optional string units_expr = 4; // (optional) a CEL expression for determining the number of units consumed optional string limit_values_expr = 5; // (optional) a CEL expression for determining the total amount of rate limit units diff --git a/api-contracts/workflows/workflows.proto b/api-contracts/workflows/workflows.proto index 3c18953782..20106468ca 100644 --- a/api-contracts/workflows/workflows.proto +++ b/api-contracts/workflows/workflows.proto @@ -41,7 +41,7 @@ message CreateWorkflowVersionOpts { optional string schedule_timeout = 9; // (optional) the timeout for the schedule optional string cron_input = 10; // (optional) the input for the cron trigger optional CreateWorkflowJobOpts on_failure_job = 11; // (optional) the job to run on failure - optional StickyStrategy sticky = 12; // (optional) the sticky strategy for assigning steps to workers + optional StickyStrategy sticky = 12; // (optional) the sticky strategy for assigning tasks to workers optional WorkflowKind kind = 13; // (optional) the kind of workflow optional int32 default_priority = 14; // (optional) the priority of the workflow } @@ -67,7 +67,7 @@ message CreateWorkflowJobOpts { string name = 1; // (required) the job name string description = 2; // (optional) the job description reserved 3; // (deprecated) timeout - repeated CreateWorkflowStepOpts steps = 4; // (required) the job steps + repeated CreateWorkflowStepOpts steps = 4; // (required) the job tasks } enum WorkerLabelComparator { @@ -81,8 +81,8 @@ enum WorkerLabelComparator { message DesiredWorkerLabels { // value of the affinity - optional string strValue = 1; - optional int32 intValue = 2; + optional string str_value = 1; + optional int32 int_value = 2; /** * (optional) Specifies whether the affinity setting is required. @@ -105,24 +105,24 @@ message DesiredWorkerLabels { optional int32 weight = 5; } -// CreateWorkflowStepOpts represents options to create a workflow step. +// CreateWorkflowStepOpts represents options to create a workflow task. message CreateWorkflowStepOpts { - string readable_id = 1; // (required) the step name - string action = 2; // (required) the step action id - string timeout = 3; // (optional) the step timeout - string inputs = 4; // (optional) the step inputs, assuming string representation of JSON - repeated string parents = 5; // (optional) the step parents. if none are passed in, this is a root step - string user_data = 6; // (optional) the custom step user data, assuming string representation of JSON - int32 retries = 7; // (optional) the number of retries for the step, default 0 - repeated CreateStepRateLimit rate_limits = 8; // (optional) the rate limits for the step - map worker_labels = 9; // (optional) the desired worker affinity state for the step - optional float backoff_factor = 10; // (optional) the retry backoff factor for the step - optional int32 backoff_max_seconds = 11; // (optional) the maximum backoff time for the step + string readable_id = 1; // (required) the task name + string action = 2; // (required) the task action id + string timeout = 3; // (optional) the task timeout + string inputs = 4; // (optional) the task inputs, assuming string representation of JSON + repeated string parents = 5; // (optional) the task parents. if none are passed in, this is a root task + string user_data = 6; // (optional) the custom task user data, assuming string representation of JSON + int32 retries = 7; // (optional) the number of retries for the task, default 0 + repeated CreateStepRateLimit rate_limits = 8; // (optional) the rate limits for the task + map worker_labels = 9; // (optional) the desired worker affinity state for the task + optional float backoff_factor = 10; // (optional) the retry backoff factor for the task + optional int32 backoff_max_seconds = 11; // (optional) the maximum backoff time for the task } message CreateStepRateLimit { string key = 1; // (required) the key for the rate limit - optional int32 units = 2; // (optional) the number of units this step consumes + optional int32 units = 2; // (optional) the number of units this task consumes optional string key_expr = 3; // (optional) a CEL expression for determining the rate limit key optional string units_expr = 4; // (optional) a CEL expression for determining the number of units consumed optional string limit_values_expr = 5; // (optional) a CEL expression for determining the total amount of rate limit units @@ -143,8 +143,8 @@ message ScheduleWorkflowRequest { // (optional) the parent workflow run id optional string parent_id = 4; - // (optional) the parent step run id - optional string parent_step_run_id = 5; + // (optional) the parent task external run id + optional string parent_task_run_external_id = 5; // (optional) the index of the child workflow. if this is set, matches on the index or the // child key will be a no-op, even if the schedule has changed. @@ -208,16 +208,16 @@ message TriggerWorkflowRequest { // (optional) the parent workflow run id optional string parent_id = 3; - // (optional) the parent step run id - optional string parent_step_run_id = 4; + // (optional) the parent task external run id + optional string parent_task_run_external_id = 4; // (optional) the index of the child workflow. if this is set, matches on the index or the - // child key will return an existing workflow run if the parent id, parent step run id, and + // child key will return an existing workflow run if the parent id, parent task run id, and // child index/key match an existing workflow run. optional int32 child_index = 5; // (optional) the key for the child. if this is set, matches on the index or the - // child key will return an existing workflow run if the parent id, parent step run id, and + // child key will return an existing workflow run if the parent id, parent task run id, and // child index/key match an existing workflow run. optional string child_key = 6; @@ -228,7 +228,7 @@ message TriggerWorkflowRequest { // requires the workflow definition to have a sticky strategy optional string desired_worker_id = 8; - // (optional) override for the priority of the workflow steps, will set all steps to this priority + // (optional) override for the priority of the workflow tasks, will set all tasks to this priority optional int32 priority = 9; } diff --git a/api/v1/server/handlers/workers/get.go b/api/v1/server/handlers/workers/get.go index e032644c3a..3e4db283a6 100644 --- a/api/v1/server/handlers/workers/get.go +++ b/api/v1/server/handlers/workers/get.go @@ -27,21 +27,16 @@ func (t *WorkerService) workerGetV1(ctx echo.Context, tenant *sqlcv1.Tenant, req return nil, err } - slotState, err := t.config.V1.Workers().ListWorkerState( + workerIdToActions, err := t.config.V1.Workers().GetWorkerActionsByWorkerId( worker.Worker.TenantId, - worker.Worker.ID, - int(worker.Worker.MaxRuns), + []uuid.UUID{worker.Worker.ID}, ) if err != nil { return nil, err } - workerIdToActions, err := t.config.V1.Workers().GetWorkerActionsByWorkerId( - worker.Worker.TenantId, - []uuid.UUID{worker.Worker.ID}, - ) - + workerSlotConfig, err := buildWorkerSlotConfig(ctx.Request().Context(), t.config.V1.Workers(), worker.Worker.TenantId, []uuid.UUID{worker.Worker.ID}) if err != nil { return nil, err } @@ -59,12 +54,11 @@ func (t *WorkerService) workerGetV1(ctx echo.Context, tenant *sqlcv1.Tenant, req respStepRuns := make([]gen.RecentStepRuns, 0) - slots := int(worker.RemainingSlots) + slotConfig := workerSlotConfig[worker.Worker.ID] - workerResp := *transformersv1.ToWorkerSqlc(&worker.Worker, &slots, &worker.WebhookUrl.String, actions, &workerWorkflows) + workerResp := *transformersv1.ToWorkerSqlc(&worker.Worker, slotConfig, &worker.WebhookUrl.String, actions, &workerWorkflows) workerResp.RecentStepRuns = &respStepRuns - workerResp.Slots = transformersv1.ToSlotState(slotState, slots) affinity, err := t.config.V1.Workers().ListWorkerLabels( worker.Worker.TenantId, diff --git a/api/v1/server/handlers/workers/list.go b/api/v1/server/handlers/workers/list.go index b503d38f8d..dc882fcea7 100644 --- a/api/v1/server/handlers/workers/list.go +++ b/api/v1/server/handlers/workers/list.go @@ -58,12 +58,21 @@ func (t *WorkerService) workerListV0(ctx echo.Context, tenant *sqlcv1.Tenant, re ) rows := make([]gen.Worker, len(workers)) + workerIds := make([]uuid.UUID, 0, len(workers)) + for _, worker := range workers { + workerIds = append(workerIds, worker.Worker.ID) + } + + workerSlotConfig, err := buildWorkerSlotConfig(reqCtx, t.config.V1.Workers(), tenantId, workerIds) + if err != nil { + listSpan.RecordError(err) + return nil, err + } for i, worker := range workers { workerCp := worker - slots := int(worker.RemainingSlots) - - rows[i] = *transformers.ToWorkerSqlc(&workerCp.Worker, &slots, &workerCp.WebhookUrl.String, nil) + slotConfig := workerSlotConfig[workerCp.Worker.ID] + rows[i] = *transformers.ToWorkerSqlc(&workerCp.Worker, slotConfig, &workerCp.WebhookUrl.String, nil) } return gen.WorkerList200JSONResponse( @@ -129,6 +138,12 @@ func (t *WorkerService) workerListV1(ctx echo.Context, tenant *sqlcv1.Tenant, re return nil, err } + workerSlotConfig, err := buildWorkerSlotConfig(listCtx, t.config.V1.Workers(), tenant.ID, workerIds) + if err != nil { + actionsSpan.RecordError(err) + return nil, err + } + telemetry.WithAttributes(actionsSpan, telemetry.AttributeKV{Key: "worker_actions.mappings.count", Value: len(workerIdToActionIds)}, ) @@ -137,10 +152,10 @@ func (t *WorkerService) workerListV1(ctx echo.Context, tenant *sqlcv1.Tenant, re for i, worker := range workers { workerCp := worker - slots := int(worker.RemainingSlots) actions := workerIdToActionIds[workerCp.Worker.ID.String()] + slotConfig := workerSlotConfig[workerCp.Worker.ID] - rows[i] = *transformersv1.ToWorkerSqlc(&workerCp.Worker, &slots, &workerCp.WebhookUrl.String, actions, nil) + rows[i] = *transformersv1.ToWorkerSqlc(&workerCp.Worker, slotConfig, &workerCp.WebhookUrl.String, actions, nil) } return gen.WorkerList200JSONResponse( diff --git a/api/v1/server/handlers/workers/slot_capacities.go b/api/v1/server/handlers/workers/slot_capacities.go new file mode 100644 index 0000000000..5f446e73c7 --- /dev/null +++ b/api/v1/server/handlers/workers/slot_capacities.go @@ -0,0 +1,63 @@ +package workers + +import ( + "context" + "fmt" + + "github.com/google/uuid" + + "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" +) + +type slotAvailabilityRepository interface { + ListWorkerSlotConfigs(tenantId uuid.UUID, workerIds []uuid.UUID) (map[uuid.UUID]map[string]int32, error) + ListAvailableSlotsForWorkers(ctx context.Context, tenantId uuid.UUID, workerIds []uuid.UUID, slotType string) (map[uuid.UUID]int32, error) +} + +func buildWorkerSlotConfig(ctx context.Context, repo slotAvailabilityRepository, tenantId uuid.UUID, workerIds []uuid.UUID) (map[uuid.UUID]map[string]gen.WorkerSlotConfig, error) { + if len(workerIds) == 0 { + return map[uuid.UUID]map[string]gen.WorkerSlotConfig{}, nil + } + + slotConfigByWorker, err := repo.ListWorkerSlotConfigs(tenantId, workerIds) + if err != nil { + return nil, fmt.Errorf("could not list worker slot config: %w", err) + } + + slotTypes := make(map[string]struct{}) + for _, config := range slotConfigByWorker { + for slotType := range config { + slotTypes[slotType] = struct{}{} + } + } + + availableBySlotType := make(map[string]map[uuid.UUID]int32, len(slotTypes)) + for slotType := range slotTypes { + available, err := repo.ListAvailableSlotsForWorkers(ctx, tenantId, workerIds, slotType) + if err != nil { + return nil, fmt.Errorf("could not list available slots for slot type %s: %w", slotType, err) + } + availableBySlotType[slotType] = available + } + + result := make(map[uuid.UUID]map[string]gen.WorkerSlotConfig, len(slotConfigByWorker)) + for workerId, config := range slotConfigByWorker { + workerSlots := make(map[string]gen.WorkerSlotConfig, len(config)) + for slotType, limit := range config { + available := 0 + if slotAvailability, ok := availableBySlotType[slotType]; ok { + if value, ok := slotAvailability[workerId]; ok { + available = int(value) + } + } + + workerSlots[slotType] = gen.WorkerSlotConfig{ + Available: &available, + Limit: int(limit), + } + } + result[workerId] = workerSlots + } + + return result, nil +} diff --git a/api/v1/server/handlers/workers/update.go b/api/v1/server/handlers/workers/update.go index c7cd860aeb..7402cb7028 100644 --- a/api/v1/server/handlers/workers/update.go +++ b/api/v1/server/handlers/workers/update.go @@ -1,6 +1,7 @@ package workers import ( + "github.com/google/uuid" "github.com/labstack/echo/v4" "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" @@ -36,5 +37,12 @@ func (t *WorkerService) WorkerUpdate(ctx echo.Context, request gen.WorkerUpdateR return nil, err } - return gen.WorkerUpdate200JSONResponse(*transformers.ToWorkerSqlc(updatedWorker, nil, nil, nil)), nil + workerSlotConfig, err := buildWorkerSlotConfig(ctx.Request().Context(), t.config.V1.Workers(), worker.Worker.TenantId, []uuid.UUID{updatedWorker.ID}) + if err != nil { + return nil, err + } + + slotConfig := workerSlotConfig[updatedWorker.ID] + + return gen.WorkerUpdate200JSONResponse(*transformers.ToWorkerSqlc(updatedWorker, slotConfig, nil, nil)), nil } diff --git a/api/v1/server/oas/gen/openapi.gen.go b/api/v1/server/oas/gen/openapi.gen.go index f06cd77d37..116443d0b2 100644 --- a/api/v1/server/oas/gen/openapi.gen.go +++ b/api/v1/server/oas/gen/openapi.gen.go @@ -997,15 +997,21 @@ type SlackWebhook struct { // Step defines model for Step. type Step struct { - Action string `json:"action"` - Children *[]string `json:"children,omitempty"` - JobId string `json:"jobId"` - Metadata APIResourceMeta `json:"metadata"` - Parents *[]string `json:"parents,omitempty"` + Action string `json:"action"` + Children *[]string `json:"children,omitempty"` + + // IsDurable Whether the step is durable. + IsDurable *bool `json:"isDurable,omitempty"` + JobId string `json:"jobId"` + Metadata APIResourceMeta `json:"metadata"` + Parents *[]string `json:"parents,omitempty"` // ReadableId The readable id of the step. ReadableId string `json:"readableId"` - TenantId string `json:"tenantId"` + + // SlotRequests Slot requests for the step (slot_type -> units). + SlotRequests *map[string]int `json:"slotRequests,omitempty"` + TenantId string `json:"tenantId"` // Timeout The timeout of the step. Timeout *string `json:"timeout,omitempty"` @@ -2076,9 +2082,6 @@ type Worker struct { // Actions The actions this worker can perform. Actions *[]string `json:"actions,omitempty"` - // AvailableRuns The number of runs this worker can execute concurrently. - AvailableRuns *int `json:"availableRuns,omitempty"` - // DispatcherId the id of the assigned dispatcher, in UUID format DispatcherId *openapi_types.UUID `json:"dispatcherId,omitempty"` @@ -2089,11 +2092,8 @@ type Worker struct { LastHeartbeatAt *time.Time `json:"lastHeartbeatAt,omitempty"` // LastListenerEstablished The time this worker last sent a heartbeat. - LastListenerEstablished *time.Time `json:"lastListenerEstablished,omitempty"` - - // MaxRuns The maximum number of runs this worker can execute concurrently. - MaxRuns *int `json:"maxRuns,omitempty"` - Metadata APIResourceMeta `json:"metadata"` + LastListenerEstablished *time.Time `json:"lastListenerEstablished,omitempty"` + Metadata APIResourceMeta `json:"metadata"` // Name The name of the worker. Name string `json:"name"` @@ -2105,6 +2105,9 @@ type Worker struct { RegisteredWorkflows *[]RegisteredWorkflow `json:"registeredWorkflows,omitempty"` RuntimeInfo *WorkerRuntimeInfo `json:"runtimeInfo,omitempty"` + // SlotConfig Slot availability and limits for this worker (slot_type -> { available, limit }). + SlotConfig *map[string]WorkerSlotConfig `json:"slotConfig,omitempty"` + // Slots The semaphore slot state for the worker. Slots *[]SemaphoreSlots `json:"slots,omitempty"` @@ -2150,6 +2153,15 @@ type WorkerRuntimeInfo struct { // WorkerRuntimeSDKs defines model for WorkerRuntimeSDKs. type WorkerRuntimeSDKs string +// WorkerSlotConfig Slot availability and limits for a slot type. +type WorkerSlotConfig struct { + // Available The number of available units for this slot type. + Available *int `json:"available,omitempty"` + + // Limit The maximum number of units for this slot type. + Limit int `json:"limit"` +} + // WorkerType defines model for WorkerType. type WorkerType string @@ -16101,7 +16113,7 @@ func (sh *strictHandler) WorkflowVersionGet(ctx echo.Context, workflow openapi_t // Base64 encoded, gzipped, json marshaled Swagger object var swaggerSpec = []string{ - "H4sIAAAAAAAC/+y9e3PbOLI4+lVYurfqN1Ml+ZXJnDmp+v2h2EqiiWN7JTm5e+akvBAJSxhTJJcA7WhT", + "H4sIAAAAAAAC/+y9e3PbOLI4+lVYurfqN1Ml+ZXJnDmp+v2h2EqiiWP7SHJy98ymvBAJSxhTJJcA7WhT", "/u638CJBEiBBvSwlrNracUQ8Go3uRqPRj+8dN1xEYQADgjtvvnewO4cLwP7s3wwHcRzG9O8oDiMYEwTZ", "Fzf0IP2vB7Ebo4igMOi86QDHTTAJF84HQNw5JA6kvR3WuNuB38Ai8mHnzelvJyfdzn0YLwDpvOkkKCC/", "/9bpdsgygp03HRQQOINx57mbH748m/Jv5z6MHTJHmM+pTtfpZw0foYBpATEGM5jNikmMghmbNHTxnY+C", @@ -16121,302 +16133,304 @@ var swaggerSpec = []string{ "rsNhCls3FS8pMrRIdF0YEa4jjOC/E8iFSR6fXCHgmF2POhcoMBNrt/OtF4II9ehlYQaDHvxGYtAjYMag", "eAQ+ovvSeZOuuJskyOs8lwiJw6tb79vEf+A62OARBsS4ZPgo70JW+qpmyFrNlc/w9bnbOafnkG8B0NDL", "g9R4O7ILV8K4rcn2WC2IQsiWFAZuEscwcJeXaIHImMSAwNmSn97JgnY471+dDy7vhld3N6Pr96PBeNzp", - "di5G1zd3V4Mvg/Gk0+3843ZwO8j++X50fXtzN7q+vbq4G12/HV4pe5xBqcw9dsMIqnN+uR59fHd5/aXT", - "7Uz644+1/SEh9FediIkhxtpLKWVnNxvDydp2qRLoUW16BgNIMeIAemQ693G4cAjADw4KooTgriMZuetA", - "4h7pxJBfxGslgZr245kRwSgJsH4hC/ANLZKFEySLKdXX77OlEecpjB/u/fDJiZMgL0BRQF6dae/zWG6J", - "Jbh8C2lHAqMRBB7VlnRKN4U2Ft/TwxY6tBvF+NMcuXN+yKmbg/kO83sxPwVqJKzAVnEDuipNyGXqRJC6", - "NgJIHW2V9v0BLrnu53mILh34N7nu6h4YbColmPgP3230Mi7q5OFrllf82Bka+MNL4sxkIrcGihMZYYcJ", - "+/Jm2B8R4QKRAPldORFbjP747fPDl9841zp92fhfLZCGozDAsIw1IhWaMsZyYFWDwUcxw3Eeh8EXwbqT", - "GM1mMDbuY0ZlnxS1pzSwG4fBoJpuaZMrsQFlpZmKPe3IUYzCGJFlkbSZeBHSqfPmFTu8+N+nZZIvKQh0", - "tq5ucQqcpVV9TTFYfVbrcVYgurRNKupTCmQnqbLNGTL0YzGGshvgQXeJo/3ZKWTonm2TuhnlMeRXKXrT", - "cZocC+Vh2ScGHBvQuUc+gRSiek7g11GGtWzzxldjxbpg3EUSRsjtxyZ2XID/hIEjFXyHUozzS3909atc", - "/fhq7LAx1hFjqaa7QMH/Pe0uwLf/e/b697LKmwJr5npudOz7MCaDBUD++zhMIrP8pk2wTlj6CBO6Rt5C", - "mrZieiJa2n1WWL6HHmGXzVheuwC1buU1lxw+uHav2Se5rXStVJ/gl4yN7K1cV7cTh36tbsRX8wlSfWxE", - "22vx0RGD1WHFjI9ghgL4GcZSoNfDJBs/dzsweERxGCwgN3PX9x0oHawvytwWvok9YEgMg2kIYg8Fswsh", - "Z/U6Fjc/G+V5NgyXyiR0MAljyB5h9HBne4P9ZGYQg34y2/zCu+LNiZ14zwYTJQNKT0mZJoFtD8IqpGoV", - "C61EUWzAZfttqk40mmsNw84Cknno1ZsJFHR94l0UYq88blfWfbodTi1DTzuHvMPVfDZqbrKBYH7tMGYj", - "VQqabqDC7DlYBWVkdJDuQS2dXiKdvIvADAXpe0PVLt6kLVNFnonupyb2IpVvrN5FdLSjGDYuBu/6t5eT", - "DrOL6s0a6gDXsQfjt8t38lVZDhNIxReWLK/ZSEz73aXau6bWugZfk/Sltv4IK7JaGdzhRV6AF1/oxfu9", - "cSGS/kdJME4WCxDX2n3YVn0pd6tgSa4zpwv5Kjdcnon5TW9yI3F++XN8feVMlwTiX+uV91RtZ9N/XI8G", - "5Bh7wPzpcsp8LwHdFygrQBQS5ALF0JUgSSkCsNvhCpJZfpgkkIXoGUMQu3PtaWSi9/LrIbO5ax+RmZaZ", - "mTtlQ62R02BguwfIYmjeqsm4EQw8YY+uGlg0azLyvxOY1EPMWzUZN06CwAJi0azJyDhxXQi9eqDThvaj", - "p1SOq56GNDdF9u1IvQqvwGNrnFhmsa68N/0ZTjWCvMrPjslzxdNOnGJ/h9OjLb2QlsbEBEb20mtMYKRD", - "bKUqTNAChgnRL198rFv647pq8KOi/srrF1u6Tq/9M5yOkqBCuvE3cLt37bRT6vBpbjKCABsuZvcoQHje", - "bOq/OUVW7SglWt7SsHtrEF0MceLrzc+YgJg0WwwmgCTYYj30fOJt5fOWeIazJnG6+c2p3H2AcTULNFmu", - "opTWgawczIWe618b+SCSQNJdMHPNON0mqXrcDK4uhlfvO93O6Pbqiv81vj0/HwwuBhedbuddf3jJ/uAv", - "1/zvt/3zj9fv3mm1FarG6f3ZbL1gi101my0mYS9L2Py0tFPlMfXN0eqPFOK8ER6/MLx5aGpdHRTYxEQ6", - "MmPL9IH78AVO52H48OKLVGDZ1BLD2SUKYCPnPOYeQT9TRYJKFnmk+uHM8VEAm3hicQ9+7Rx0ONGgVkkx", - "9eYtNDaJArZUr7UsrCCd4WuGqkv4CP284ebtLRU0w6t3151u50t/dNXpdgaj0fVIL1OUcdLLk9X+5yDQ", - "CRLx/eXvnpKs9NKDf1zj/pkfoeENVHSuuINqEKD6an3vCPeZu4jR7lm3E8Bv8l+vup0gWbB/4M6b0xNm", - "Bc5xVq6zzqVTeudEnArTic+srlUKLFr/Z/itPPIru5GzdWk9UUMCfPUSS5syy46PMOGvG1n80InNLU4j", - "sf5Bb7CfIImRq5HHQbK4sbtiMzqWF+0j03r/YXWr5mMh7pjKrtjGAUd212k+orhUH3VqHSIyUHOzdFWE", - "6OT/CBDI/MnKqLSy2TIXOOZHpXdxA5iM4D3yDQ+zzEFZeDCrgzHv5Zh1hMyLaAtu3myiz8BPoK3jXMyf", - "WrHDImOEyVfs+hMKvPBJv+2bsCnXIPrRvA4pTTTrWAAP2i6Cf9NPwb+xZdC9RIHiEZahmcdw3IexCz1b", - "zw/lnqDsl1xvClWO0r6qdL0Hh2HGY9rjMP28xoFYHKN0JHJsSqwpqNSOBl0YkLFyny28EzHwTPTMvzo6", - "7z/VANHkhrqKRWINa8LWTAYCpZnNoHSBLvp3V/NIuhFd9W4tYCmOrhX/cIYwgTH05M1eEzxg2OfUdxh5", - "TpyOw6OzEGafYXyk8VkvIc/OVyTzVa6azCJCxejkOYL0r58nlmIEIx8sf6iwBb4kxUyFjSvLccfLrk9p", - "/vrkpGa9BbhNqzaZkZTu9kdYwe5nC5+ELqYyj4m+CrbS+w9rHX/pqAWLj2bAGcTkNjZonrejS+bXBQOP", - "OXqKSz92SLgdFwTTcZkE6N9UN/JgQNA9gnGqWwt1UMT2cX9UNSR2Cv0wmEmIa6XsFt1h7Qy9lS6uY3cO", - "vcSHCqWt69K+ZZf0bodw13t7PaGJF3s2+FcFPd7m7N4sNIv+MT7/MLi4pT/qlMF05u26Ce6pw1959ZnX", - "3y6c+xqT2Ob8AUdJcK4agRs/JnEAdn2WKgDYLHFspbh/KXV4ScfJjCgqfSbLtPs28R8uoA8JfMdCMFZ0", - "AUwjCFIPwAe4dNjl0okA4ulGeJCHM13mc008wOXpG9b0lLuqnfF/nTVJO9HtRCDOrqj6q1NDuuEjfqm7", - "kK1IjRsY7LnhFhuPz/t075tJvhL1sKifQiuNNr2+cjzkQzHdeIEC8c9TG6/bagyZlGSPffc2vJIiETfM", - "qaRfil2aJWVB3aqcS1Vz6HNC6aMBN0LupTxRDUC+ZaknKKWYLBrrbua6yl9ekjddmZG7ec6NdalKQV9T", - "FlQXKYFpvjoTZ26TZ9KEJVvley2KVuHMPTBtay4Hz6tJ5VUCIsqjmMzfqsZU/To8hgsQzcMYjv2QbNj2", - "nbMr6x0UuTkT+yF/AhM97B0qVrRDY1WR0kSuERg5cSIXVm9qUJ3Q6heKfF96Z9qvtHTRqLBQW4Ne4M0M", - "LV3V1l6wq1OqUT1zyr40cxAE0DeBKT47yNM//WE6uPPER9c/qvARrox2dDkFs6evOMlaFjCwMK2efltj", - "6bS7ed1s8HUWvRe2OzvrmkREiu48XXQVMtSeLwRGJnGndyWeI9+LYd4bslbn3Yr7L7+74WaQxKuloqkl", - "k7W80g0zmClAWUWOHKQXrdhA7hZUsfVb8ELvk0EU5lysFKvXhnzVGRF+MT1p1NJArjs+D5OA6ME1X3VW", - "eZvO+lRgqGi+zjnbW/hqi9CCtP3m2S5MiAnEFTmS+U7174V5wg6ZG/f9510qdmYNJcs27IW2NYkTC1nT", - "ZMVpl4oV8/f11S2cKQWmK6v07xeo68fuHD3Cg5RLzS3neyViQnqR0neq4PoYknhZIUW3xo/K7WU3LFFx", - "UVCQIPGov3Sa6H0f7vV5BtT6rYk2hlwCrpkKzA+2nr6DEiWgITnJgxbrEa4urAelG/gI5QOebe+x7GNF", - "d+9QjMkYciXZnvYuQdNeDSOx+C0jB2Bh5hSzCprU0Ai+vxXEvC9h8DkyrSXkTKRL09FowB/K766u775c", - "jz4ORp1u9uOoPxncXQ4/DSfZQ/rw6v3dZPhpcHF3fcvMV+Px8P0Vf2qf9EcT9lf//OPV9ZfLwcV7/kI/", - "vBqOP+Qf60eDyeif/DFffbenQ1/fTu5Gg3ejgegzGiiTqHOPL69py8tBf5yOORxc3L39593tmC1FJlS9", - "G91e3fH8rB8H/7xT3QcMTQSgWiuajmMUpCqxMmKBo+FkeN6/rBqtyu9B/HXH0fBpcFVAfAO/CPE3b10V", - "HDgB+EGfADSLxa9MOiL6J5iNko+1b9JRZ2GVbSozjdpM0qkaXUCgkf5pilT7lDqFtKqaC0Loe+LNw04q", - "sn3YfK7VkADfqrMWdWlCmmJRExiLpHADQ+q+L7I4Quiw1tLKtGC91NS3akmNAPhLglx8HZHrhFSMmpmt", - "5gA7YUSg5wjTRDqIfo51k8VtPSG7Kd3a2vnasoQBDTPs1aZ9Z3Blo381klIhIeRuM0FuKdWFOSGkds17", - "oGbo90KXOHMW9jjRdkbsvew5vyoUzETScrw7IcFzsA2+RYjuMov8ZsBUj8978Wmw88QqM7AgdgfE0AFR", - "FIfAnaNgxks0MARXzS8TWnIiYfEsK0LBlyxrYZThYQEwlbhQbIrvAPKTGFqAwryJVUBy+dVZuiD9nD7A", - "fKnm18EsVA4EYmfZC2ExQ291UAz4JonsHbO2iQNaG/3m3MsmDiAyoktQ1WZfiMySQAuwWS4M8geR1BP9", - "0AU+i596hH4Ysc8sLNdL3EIxNEW9U5LObi/b7HNa4KPyrVSWdxGlvXZZ8mS1lLZ1T2eCRU0Pf/KzGWu8", - "RdXTHxshlxneeIrXHEUyF2+2V2p+PSM1ctrZm8NJkHKzM4nvaRn+FyMo+1SOlPXqWt9iGPMeN8nUR24V", - "KbDxKrIyqzDvzaaL/Vtl00din6QUvf5yxSwG/YtPw6tOt/Np8OntYFQhO6tD9esvZ03uYlWYyMGhGMtW", - "vRoXxyuGLKUIkJRfLGCTGl4Go7vx5fWk0+0MPnObxaQ//ng3ur1iNpHrKyU8g6UYOb/+NLx6f/dl8PbD", - "9fXHCtzntCidIgniRUXwO/suXLq1ApqH6ZPQeQIxSydXUq94b30webO8APqUAJuJ8udjm5eoh3+9VGUp", - "TdSzb0pBdjH+dRvWPLR/AQmMZYC/PEf5WM4v6AgeOaeOB5Zd59R5gvCB/ncRBmT+64puLCl6tAH/ZrEr", - "EXUT+sjVpAvlGn/VJTitnMebapSGBmI3z351PqACOPPqhAXUVqAaBZJSDUDKo88nnW7n86lelHC3yR3E", - "5BnDPLk/cJOCNxV55Z/TAceauAVzIZE1Xb2rvbw5QD9jcQ915TVR9xupq2HU3FRARP+XB8TMagdtKW4N", - "TS9paNqiAWgrpd0aGPJXtsMbuPAL83kyJyzANyDButxgKptwxykHYSdirR0QeI4LgiAkDmClW1lNeJnX", - "unRg6aDDuvt4rT0KeF4MMVbtUjktWho6yuYp+uEDwHPdcTMHeK4O+X9wYTpxAHFFlJdUH/Pq5M75nBVM", - "1k/4GcboHtWhl1nXqAx6FM1FWf8cDHpOmAN8I4v/280BnEh0cDAkBv7axkuWh3Dkg2WOEeT+NTZk5bH7", - "1UBg53MQzKBEkJEJAvhkRiLjXfiUYU1q1HrYV9A75Mhs3VElICkQlfhbD4ZSBlbxpZvDkwnll+EMBatX", - "N1uNv9cqdrZ3GJdrjOpwLfNeHRS67U5Ig2DYw92S9c1tN01Vq/EcRfhQjawlo/MOT/NtnDJ8Mt22fT49", - "H1xewGky23St1a7QRzFaJD4gEGfJKNhrmRsmvudMIXsg5doHCEQVozB2QE5j1pWsqisQfj64VAqDs/vB", - "I/ATSv1ad2yfwPgGLP0QGDhQJMyIeJvy+oD8RLUPJwzoDzF8RGGCe8K9WIzRqcqvU56YfSrPR0oRlCJd", - "UbXpJldEW9hxaiijMtbbwAX0k0za5SBZrpZtAKtKzev3aHYic1/XhXzhxE9jsQo7nI3epROyojQY3ye+", - "VhG0ixEpY0GGi5QczI3BEsYxDKG89Ftuiem6WDE+bhVkXpKsnr85kfnn03MWCTEB+KGijDiBcQB8EVJv", - "NFeJZs7wAktSdEHgxPBeXL4RV8gBfqD8myNMtbNq59pwug67vCmfTyk+ZIaUZ/2GyQgS2hTrElRg02sF", - "RxdDQ7ps5GEu9J5gDLN6V1tDxTNfBJM5fKFVVeQrpajCX/J2UJRhqgZTIT6lcDQNo0SF1VWq1r6f8PGO", - "nFt6i6eT4GSKuaMWRbnHFB/RCjuAqNLILlVXZYrVdRNjGZI08kgshpDckWcQNGzLRZy7sudhAK/vO2/+", - "qhV2mv5vAUZuPyHzznN3lf79myEvxbdK5w+f+ued56/GxYnBmdHVX2eJkAFY0HzoomuliRiKQyLwxLpO", - "liYqpjvHbLEJmcOAIFdQYcgsMZJBRNi7IvT7N8O7j4N/aoR9Me+wnJ5DoqEWM0oZMvRpZj/C5aCx1qUu", - "iat3D3B55EyYtxR2mNGNhLxgCcy3cu7jcKHiQgqRozWyFKdYLXsaM3OP9Fiy2vVx1sVU2Tlr0dVh0Z6R", - "M0bcA3JXpcKWqP1tfzw83y6tM/GyB9ikcGwXmWylG8PlBZidK9klitlUNHkn6jWytHBuWbHzwMw2+7qG", - "l36iYspWClh4nwGgqvJT6IBg6fw5vr7qYRgj4KP/sAc1vrKjlVS1ismk5Bf36jB2XEDgLIzRf9Q6n2Ux", - "DWFQlbkIE7CIxPNfeppwV2wY2Dsm7VdhauGiwPILm0qjKrcsORl7bcyuHukoznRZmNGSUxkzTRRgtNUR", - "+XcUzIR8u2pyNAtv6hTUDE52rwdR5COXEuaGKniLRa1Vw1s779dM/OyBJVQKQsNtsbyxhryk9RQuGLq0", - "jYwcNXtYm67MIpWYQvwpo6VTx0lwtKX7mbl0h5msfpBS2W1B64qEE3Fa4OvfsuxXNnu6J0quEiEtTHmf", - "G9hpqqwjOu5C+AK6PogBEblczE/tgrMRdrysi/MLiRP4Kz3AozicxWCxAAS5zi/3wMfw100/wxt1HEVZ", - "k6oOU9jK+DgM49MmFIqKbW9g26obe5OCtToDu8kclpGFykZ7cepmubjr065+PjXWewWEwEVkUHvFR0WC", - "Fcu9ahIp7aSArC+rsVYjqVg59eXqzhaTJOkeoUi8dFh+FRtMNy9kW0DHGqVss5H2gRMqi86mn6uq7PXH", - "551u52IwPjcslxdaah+8mj54cbxt570rFmNv+bmLgm4y9TSXnXRBernJXrY/VWS8YldVacGr35hB2nzF", - "DFvW+d4qtWlIYgRx/fLplwvuiWIs30LbWBnseE4rZrBplkpLXkObJeblTThw6tTqnmW41l/q0i3bC5Ga", - "EX0dV0iC3HLarIZ5suRYufxYxZxY+oRaxTxZ48HV5G6iLiZdwx0/IUtJvc5Hg/6kUGvr4/Dmhn+8vr2k", - "2JncjQdXF8rI+pNHkbGWpmb7DC4YBTwesUmOediUjrJEqKWqCwFB/ioFrqqrNDSrw8CRYGbKmxAFhMfe", - "lXdA0KJWtmYJx/QhzWgBV40r4400Gc2slqE5iLkDVNOdVVFjeQ9hKlQSmPDpVuYQtXKsUklO70xVlcSw", - "AGFTjGRL05B7DjZFZKZCIktWd3796eZyMCnlqKtIvZd/7VqtfoVy+c8f1Nk06z5vMY1OGE5L2N+oQqW+", - "F5o1TNmKDYTtHyxqnhZrbsHZe1KKkyeAhbNCgyh3L68x2Tn3arZAGTHJCqtqhhNfi0N1HRQ4C+T7CEM3", - "DDxsp+PW+XcWZnF+SQPRAYGY0N9+ra8bboV+OrzsZo//Ou/aCpQLqhe+4vLHCAYgQkdXYXCV+D6Y+vDP", - "McsGkbbqoUUUxmxS4WBebhwBesXpzBCZJ9MjN1wczwFx55D0PPgo/z4GETp+PD3GMH6E8XEI2Bn9rReI", - "sTpvmKF1zeCmZDGOwFMAvfNKdlRs5Lx5mTGrMlKXB+TfGlLQAe0JT7TP1PDU8GD9hMU7p7KzVoHawn3P", - "oiaShkO3VBepqKhmWfgNNZHKB+W6lofVNnKDs1s8CFRe3ocBhnHzIw+Jbk0dKGzfL47UMqQ7LVtf66Ml", - "81oIG4283pyHwT2aabNoVFcVXavk7wrEV4iksQYnVze3PJOI59ZMtE7FJNU+rmpNXW69kTEumvMqPWe6", - "2QWiwK6q9SfPCvlSTdwQlHt00m/B16JCv12rULUBdlNacSlMNgVeQGK+kE3QQjzdb9EC68GIzA16L/2U", - "UyYQ9wJ7AgTG98D39UPuTBFdu4jWdjSJhoKT+zQ0RBY9RXhHe3T9bAqNxrq+gbtiq7T8QErLas5wqg6w", - "VkVDLnwLR+xF7qBe5dD9WjhCXvIcpdTE0ns3Ok7F0bex03Rnqd26nShGoawAovEbF19NpKSvRqbqszVu", - "v6J1fRx7btyukoTu8ylPCdQGO67sb6Z/BhCZlkoBhQcWHFYMgbcNwzJWDFaDdOzCA2WH5+4hxNJtveDH", - "9sLwNuM4aa45bOWvaBn8p0R7fVUJTgkmLZNehD6aAoz6N0O21wqp5APFdPieQ+DB2E5W87ZFUhTT1uJK", - "makr11HJeH2FzfJhgd00GLZrim1TxsmFThZVCqusO3SpUzoKQ6gOjQmmCo8JifJr7UAFlKWj1uTfyYcS", - "+jN6Ys8XKt7GH/qnnS79z9nr3/kfr0/POt3Op4vX1dhLoxM1mS6ViewjHdNeLMmiG3oWNbVyIwxkJ+Yc", - "MQsASWL4YW06pkM76Xha2YRmASsA48bQcBXB7BtjQymPaS+rCYrhmCmiFDzpV1wErZZGBgre0yDRwf/H", - "SqqNByzEgf9xO7qsJo+9cISSJ7Wle0P53FDQ8H5wNRgxGfN+OPlw+5Y5OI2GNwPmm9Q//9jpdi6HV4O+", - "yRdIUdo3H3da+TDf/DlbWmbaJ+32SfvHetJuX53LtuI1bU/7bTs9GNNdw2fBmnc4jZFPPM2tZehDXs7K", - "l91t8q9yuUey9AFONc0op+EFJDIfesGvMQnsH2JF1Dmeg/oLuBp+S9u/C2MNPNJGzpIO2EQ6sIZZuo38", - "A+v6vtscHLy5DBq1b9bl8NVODicS3RKy8tbm1YH89no1AQNbKEynTlkF7EsZmlXtqIGl2YDxTVmdv+he", - "uSWKzIvZUbxTwRdDDYzrvxe1mbQquVD3ecb4jdZKa2TtEtnmtcpvEhvyycq+Sew3skYJqwEdV7fXOZTw", - "vEbmLOKbWiS2uzdTuSpSFdNTzBneO0FInCgOH5EHva4DnBgEXriQnZ6Q7ztT6MxgAGN5jVGp62xrGG+O", - "Zm8/CXC1vdk1Kadw1iKbSi1z6tqdmify4sfKRJHrYmRMcWm/A4Z9Y69AIPCyUm0xH2q1K/8CknnoNVqt", - "AP0T75nq9uehZ6DaD5PJjUyC64ZeSsGxQL59iPUd4DHWbObcxF8tEV5NQgKVNee8pHnZ2jrXkpYCVqad", - "T+nWZVauSafbubkes//cTpiWZDohedwJrgpKweLhhBdUcUHgRDCmdHXUqFo5eASIXWbNia5yOWDK08Jv", - "0E0IdNwwEGX3/KXBkQzhiN2stUmNKNWhNF0awBjNAug5WSdmebq9HV44gn12f6P0wRT6uLrmIGvDWCr3", - "5M6PATtS5AKVjqPbMh9g8gGCmEwhIFW2gdxWsRKSLPk7cOayd/5WfnZydtY7Peudvpqcvn5z8vub3/44", - "+uOPP169/qN38vrNyYl9FgrAmZmqBwNMwNRnxrY9hHQBvpkJfwG+oUWy2BwDbF/vMOsbMXRhWjgRm1Jt", - "0DbclZ8XzgrjVQh4lJ9LQ8OxKDiSFRzEtWlxsJP1csJA3YcGkBXn1UKXBJRghsF9aMerI6UDPXT90HRO", - "YbgA0TyMoUMbCTGxIprHcqwxm08XyWyd+T6bOk3+cT4ZfuZVdtM/b/q3Y0OcpY1zP0dW6tjPz01jQiRx", - "knN5XwCy3pjHe9/W6ca3o0vN8E1VZdZeq+Yoorx0ylcmLpWpbmjXTfuMVFTO5RVzayavztNYgYeXf7o0", - "XgpSIEd55i+UzQXBLBFPWtZiYXzxEfNjkXdWqs6Ws4vo1TYhkQbfSAy0DbD3YB62tDgGkaqcXl/2WYD1", - "zT8nH9gDyeSfN4Px+Wh4M9FbeDJOVn0eBpfvPlyPeXz2p/5Vn+d9qKrO/UWpPV4wYqq0qQ8+SH+x8G3s", - "NqhQyA8cWaNQX9nu73BqEKz0iw4gK/r8M5zqBPlONAcj5mQ9K43yBmarrzW1LgLt1aT6gUl4k2U3hsoV", - "iBeaZnJCeQySyKy0+mrOhdTf2iATxSMB1xt1dbpnkCjfWRVkjf9CIDMV8MRZM0h4vSI36+rMaN/0rFMs", - "z0fGOvFjEgMCZ7UpfRUIL3P9mmvYmRKdr+FaTAr66qzeMCGnLq6mq8Vq1RYNL3S5y1IAhxdaHMreH1GQ", - "MwW8u706nwyZmL24HfXfXlLV6qL/vlJA0kHk+dmIgtnsGvaS3/WH8lqBUzs+z/Vq/HPFfhoTvjAm+Qir", - "YqBISICvo9iUxx7g0uAVI4enZGkXZiVvYcDBEXTRPXKzSZxfIoAx9JxHBITz+a96rjAiooHLVPbrjdKa", - "xAnUjF/3Aqn6HqXX+tOTkxOjL5F2mLz3T0NHnkYL+jucSjFme44bsrWvHZLIT8Rdm7743OJO/zIg5Nxh", - "NunaonotaP1bzPUB3i4bDD5RepUdThqqJEaXlXUS/mYDqc4oCthfq4XJntzwFLcV+0NhlARrJEMtj/IO", - "QT937qux9hkt56SYIhlrJhlLd5xWdreyu5XdLyW7DXP8gKK9wp9vBdHMRhsSuDB7CBruK/WdjSWyxix/", - "UXWWzDV9prIUSRvPfLSBAQ0yvZhHsxhQLhbVLSFSGbWOekrpHW8GVxc8q2OW31GTujOf6DHNCfm2f/7x", - "+t272lOSTbvSvTkvUMzEOMmLk6LHSBjcKJK/BCttMHbn0Ev8ivTWhs5rH0dfiskNLAVMzWZjXtDX6EeT", - "y6mwRXasKiKEaxdhNBKwNKlN6EgOdc471mmhheal+TOG0GaErUq+K5lO+1Ewl/ab5NHmKX2rFjsBMx16", - "fVOZ86Ym/2DDGRGEWZdDWEU/Qiicx/Qic6+XC1qW5nx5hwzcWDchcx/XzsjkyJ14ctz0tFi/wuaaQQFv", - "GskL06CBVQZO8bNZ5Z6rW3r0ZRrYnXiFaI5mnhfCKE83+bJVBYaizRZZNveEYbMh6qsHSzJ2DxKf3FSm", - "RhGNjClSrB4JxC3yT8wP3oWhbs2f4+srhwNdDjxhI2gjQuWz4As99oWxx/0JLdCAhdoxQQsYGipaYILc", - "h6XJgYR+c7B4VrF7SVTkRQO2ZTrY42nhpcwKx0qfMc/YokP5Y0bZ5syLNgt8Ut6zbd8tGme4tL4GymVJ", - "wsgN9LWe0xlZbfJtqAl97sWe7Arh3KEiexQqlAKNIXPyOjfXAViAbzUtnpop+6ZiADx2IaHyl8lPDuEU", - "ghjGMm0Fwyg7VtjP2abMCYnYtScMHxCUzRHdVf6TfDt/0xFBuFlfkcGE9k4wCReWkz0zic+deTT+73wW", - "p38zZDVqCLOJ5X9NCbFzenRydMLomIchd950Xh2dHp2IiGKGCRY17IvajjNdiMd7+TxPWwUQYye1x9BN", - "B7IiQedSfH/P0CBd8tksZycn5YE/QOCTOUPRa/7dDQMiajSJIrC06fHfmPMVTg/AGj4exHFIpfAzO6rV", - "Oa9Ckq4jRxydN3997XawLLxAV501lD4lfwmY3Tl0HzpfaX+GvxgCb1mPQNoMVWFwJBvsOwrZgh0SOsB1", - "YUQcEoP7e+TWYjTFQC1KH0+PgU9FSjDrwQVAfo89JOPj7+xn9bdnjhcfEs3t6YL9jh2QJnSi3R3Wnb9N", - "l3ahT1sMaAPmasFHYDwTgwUkTB/4q8LJpzSDI5ITd97wSP5UaJSW0lGFGn8fyHZsvUKaX0v09FsZW+PE", - "dSHG94nvLx2OUi+XDauEvOdu57ddUV7fWQCfYgF6DkuU5MnAGQ7Gq42DoYPiXRhPkedBfvvI6JvTSRWZ", - "SYoXZeG/djvferFQOdgHUVW+qyGMr+zaS1xNamN+3VqHxPkIPwaJM3p4G3J5vBFi4Njhm1ZAXBp5VSaT", - "SmyR0EkkzvPYeNaL/Y0sRLsEHew5McABbcWApRjg1LI9MaAekBHqkfABBvRUlH+z0zAKdUH5I/gYPkAH", - "BCwnH2stvLXSGQtiIkIT2koadGh3GymRDm+QCRLWvTruYrY8QecMuh+bqHETqhakQzd2InZOknH2WxUl", - "p1ueo2DXDxPvWL2hmzXoUq4zee1hgzgowAQELiwR8Tn9LN1LzIr19nHLAHGSIA1i3RsCq9HaOYLV93qx", - "9Z+UF7ZvPTlEL4y4s4s40ZT95ubw4+/sv89V+02lFGt1VNpQZhXnG1kriXiGW5Nywr7uVAhtbrNFcqCa", - "w5sXPngUYo1jg+1YK9tyJK5gJiNvjuIKqcbp56uZwo/rxBrbllSq1dD8RSrAfna6v2Ak3NL+ftH+Aq58", - "hhtP790d3CJnWBOaSo/EAznIN3GE0zGOmZ2e7xI27vglwvQC5Du51qYNpq2H+YZb2206l9hxZcqGmy9z", - "uORWt0+EkG4924jCJpT3P7fJYYBISKX58XfO8c/HURxOoflyKd8+HZA9PJPQYXZdhq98BL+Z4dOpb0JM", - "Rklww+a1t02ZDr1Ucu341KsgKJGLg9MTw+/RTk+Fq5CwRPNhjP7Dk5GLrDw8WwWP0iyZOQlAPvQcbrd3", - "2PY474Q8H2bbqj84cmSGfeA+HH9n/7Gw4jtj2lCpDpGnHPZVpDeyN9rnxjQSDwNxL63zeZzsk2pzuhsw", - "boOMhPnEr3czMc+axZIPAt8Pn+j0uheBItVK0ct+r1KxONHlOSbAx99xgK245WqsSv0yvwS4AZvkBzMz", - "iji5945NCshoGWUPGaVEsCmrXI0rGSXAGjaRiotibdKrLnReeSUusUjjt7EX0z+6ZkMAr7+zkiVAgeHs", - "9escEKeb0IGiOKT/gF57hu0Ra5oukawCgQOiSFJ7+VjjbQr8SMDUh8cemOHjNHm58dKI2a2RtXPIHBBn", - "Cv0wmKlZBdJE2WBWvlJ+Pr0ArEbkRNQ9rjeXyRTVWYIWnjSascy/ExgvM57xwOwOedXH3LYiRKzkTgHe", - "l7r4WFPvxgpXX4BZWvBbmzOrQg7RKeXrH5v157YSdjuvdyX86C0ULSIfLmBASroBM15IOkifzgF+0EoY", - "1vD4O/1PzfMSr9UwXXK+KQoQOoGlqZ0XEjcd+hTQHR/5+YrpBqEga66rsJRiobZpxy9UpWhkemNY/dn5", - "8zd+99n+rBO1aDbVFO7DhCdp2hMRkfFzSUSY7wzERoQc++GsTlfxw5njowDKzEcCjqJEuQxnlyjgFUUO", - "UaqILE8kdFh6Mme6NEgW9rmjhQYFhNUOLAddGlK+xkQkXA6dGSQU1QzLhpkx4pZHzcwVqRsM96Y0L77V", - "1ElAkL+BqfsOlXc9Ar8RB0MQu3OHzUTB4DmzqtbPOuhEevVaGQXDR+j/gn+lE6HA9RMPmvaXtsQdrbZb", - "LfAlC9ABbJVbTya3oYCxKBUz5bHPd9PlXdopB6UVcKWcOlaHrNX27MGRqwqhBgqxiGJt383zWmkq+ZVj", - "5zKcrX/q0P/vZaHD5tdVpdSY8eBJK4n9AEcPfkCRifnv7zHcyLmz1ZNu+yp1ttcrOMi0195Wrc7JOJ2E", - "WV/FZi0UE70L/WMPTpOZ2Ug/eAR+wioWOeeDSwemdfIdMAMowFkFMFHh1gMEHGnk4Tn0L9hUh+JSsPmI", - "ls+n54NLhoSaABaGSUxFIat4S8WEHvk7jWNRwZdpF2tEHRTU42nW0Oo16kvcNJmVWEzh+fPBpZnlrXjd", - "Qq/hDwB50ZPW5S3yczPdZh/f6H4k/UZzo5XG/Ae4xMpFyTgtbdf8esnIQETc110sz8MAI3qVFCTGHplC", - "l2Xe8BxwT1gOGoQdcW3fprGhGpYpvA9jWAvMpswP7/jWkDAHDYhZhbPQRUyCPiEyV9/iigWONfBlaSUM", - "O7vlZzL7deXS+DsLQNw5Yk+PLowJQEEWul+1zjQbH1zJUFIoXW69uHRLxCqnS3rcodjhz5U6iEXCvhfd", - "lunSyTLkZj7irGhWei8x2FTKCYS1C9EUc5DTPMBlj9cRigCKsfOLB5ngo9y3dIDzrzf/+rUotiqdIOwM", - "W9gNI2glD3lL23Wx1uvBu907qv39tLVA1VmgUt6wDNtooKAds2PYUkvjZ7uVpvYRLg9FWdt6GJPERVNG", - "YOhumUHHDI7QHrfAEN8fT3sNAleZbwHBev+CJjGse+xXaIJJYupAmVPsT3tAbSS0EDcJK0wpx4ozuY5j", - "c0yJlrVnFFdJW3PCvpoTSlV2LRTo2ttn5RSlKyK7jPM5j9YvNdHsroCTKYbEcUHgIZZnRtL1Rm8PVSt2", - "bjH0GBtxWAi9HpfhAUTaXNnbvaFoxk4vHgprNxDsUsS0kj2vbUm8ZLKd47dK1+oa3nbOWakhBzgBfBID", - "G0Uzb/tzP94wFHB02DzgsPeblJQdVtiJW/V3+WYjyKOO9UTZKQXg9kl6V0/SV9krdI7hU/5MedOe5+21", - "OHbB4n/bhDeCOknROHHnfqlxglsRi8v25Fr0l60UE4d527IUDTKWsxULLykWbFm/qxAmPforQjFSBd5s", - "MOGzHbLFJOXnn5yLZyFpD3ejxWSFM7bIaJVpguuPzQMPeM4dm2mS3ZdkuG1cAfgmrXwFeIHkw9byQeYb", - "buXD4Z3yFso+821fZMXqKtQCIRllILATJ4EjelbnLeYeFJcIE+5FIWvjHapMK0dCKWio8U+yAHTt4Kh6", - "aDbloFS0zTLrb+Bxbx3z9GlNM/RCni4Ubl61jpHy/8FqygED0KLKHW1/J1vfsdZbJbYsBQJ/42OuUmnl", - "3Sz+1pBsgDdEweyO1/DbEeR9jQPRQ+9R+PRYPBJknkR3i0pXopc1YlPBNkoCKdGax02rUrTNcbA/Acxs", - "bxbpQWUXY2F/4kYhCojlubtAQUIgvY7Lv2IIHrzwKUiP4gbH8HtIbujkh34IswNP+gYroTvCYN3pKlXq", - "z07OTnsn9H+Tk5M37H//Y5A7onv/nt9ENnFAMkhTz2EV1JDCtwaw9yhAeA69t2zw5uBuXzbmSG0F6cj4", - "pJWPeyof87uzcSmJj11WCtwchcZLhaf5aHTyjjf5uR8oGQqYqlJTIInn+AodVyJtp1FkbFIfejxPWO3L", - "pGzeJolqo2VLMqogGTYumWIY+WBZVdyJfq+UTLzJTy2ZOAqaSKZYIm2XkomDaSuYYtG6lUutXCrJpYJc", - "2KBcEqk/bbxvZXr1Ou9bkb29db/dZ/dbTi4OHdYufo21v6LNVwmGFDQxTkextbdKorMGVHSogLR6khf3", - "cFXZp4GLa8rI7Vt83sc1RUwmNwWK1/ZyNRWxSDex9XMVfq4CH01euSVTvpCnq6SRJq6u+5j8/Of2dS1n", - "Nrfg/QZqE3N3Ff+w83etlRkH7vFKJ5dvj5KF631fM6yYgd2tHdqW/6U/a8v7e+HqUsveXZXcalxaJf0K", - "n1ahHhr49pDdWgsK8I/Go9JbteVRg7tqzTEJA3oK9mJAYI/dQOnmir235LI6f9baY/HAPVq3y2Hb8079", - "cRV36aLaCoY9Utw18mD1k11/g78JMcvvgQI3XKBgltLrAmIMZhUn/Ai6ED22MqiJDAoS3y9RfrB0IrD0", - "Q+A5KHBAsHTEarsdAr+R48gHqEBpxSnXlSGZp+BNTLebIDoOX6iYK5z+Dd0qG1wOR/fAx7BVLAw1xzjT", - "aVhtVe62uaMLf+FenAR17xv5jIG1LxxZhsD2lWP/c5ZikcXR6p1jZxkfmQ8+iH0EMctzDa3A22JAgA9I", - "E1A2Vi1lb5y+LfPUHEikAgUijaGzya4D4y2793+ZQzLnAkBUp3Eu+u8xPb3CwF+qv6c1A3UCKfCXd7JB", - "rZIyDUMfgsAiniNXQNICZy8U2qEpc2mM8bDI6vtisR7OvQ9m7Kh9EnQRxsz5QiWD9G4JAs8JE0L/FKoj", - "projbSD1wCPnAt6DxOe57v9F6eFfDrp3kgBDdozrli9mupODdipJaGe19Jq+/rYOQ/tWcyOnUaqKrvx9", - "RH9f8xVK1XCPPYQjHyx7zFWiRt8VbemwwrUivK9Qgqt14As+GHO5OGh9WBGtOH3DyiFFxEoK9AnUmRUB", - "RZa+SKnhLZvftSTQiq5WdDUVXZJPepRPqiVXjkeZ9qBP9p+ltquQXAMx2NA7XMHV3nPbe+5Pcs/d2XGW", - "yYX2NPuRTrPc6bGTk01cr80hPxPeQHqU5i/sFUdX61p6KlCnIKXmmTpHCiQUvpu7fp9WtGZIAPJxMx9T", - "lULa96aiy2eBgTbA4Hl+Zv6eyi81ZSTyJAcCjzmSpec/CdOrpCiU9L8djxHF/3acyPAYndGPpctZDgZu", - "25yxnoYXYGV5B5vHcAUua0/xPT7Fi6FvlgzdLRH0Cix+LMrFVXE64Rm+SMIMR3m+P6rl4rGsR7ciL6vT", - "K+r6j8na6vWzZek9dfA6DxPf47G09CKp01z2KC9JjqvS4pAvImtYoieL8rosJJcHuHNLvf3VIS0wb230", - "+nmq0WRiVWsA+XEl6koVHVuh2upJRdlF0AIFs3ptSbRrLL3eQzIRUxzs3UcrgzwYkTnPVsIzmjnuHPle", - "DE2uG6xDQ+m3fUHCN6eVJAcvSar4c9PiBUZCpsg/n49B7M7RI6zTgkQrASbtrhUhYwIj4a7blwNbiA85", - "ntF6KuFtXXdX18i2KZPEvos9t5JK+YSSbU3Q3ediSrmukI+pLKRy7K8wv5RPdPupbKoSTSkL18skm3uZ", - "KNtvL48Gsr5qK41+Emlkf9dqZdHhyCKF8bcvifxwVucp5Yczx0dBSTcqm6Mvw9klCqCtNagVQy8bz+TD", - "R+hbuQzxlrmZq5hB0gHt9Q5B3zNmj4P04HXYbAocFYVMWIemgIx5L20oCWCBAmHsVa2ffX675GtpOPm1", - "2teABz69h2Loikj3CigulGarQJL13+4hpUqDtnj+uunnUimsnAWX4az5MSAcjSrSmjMPCCw8iQyO+xP2", - "87nq+LJpxxw+OJ+oLkEvd016GVccDmEj5xuB1B+bxlfwukmJLc1MK/xpikSuo+jUda7WZMxdY8QLeyWB", - "N03GlAZ2iBmMTz678ZZ7WYqX6ZJaat/tbYMToxdCftGA3/gJXCqiYctsuWym1fmXAj4bCmbVfHU4WZi2", - "5HXKEdDkcIvS/CK5YqbtOXdI55zgkxVYr+K8OwY+JYxg1oMLgPzeLA6TqPLhlCp38hYoyIuN4bABHDFA", - "kXX7tMmAtnhPGxxKpNP2T0IdYhqWmzJuQss7+dfECmptdI5ZX33Kc9Uxxk8fUqHe3Aq4sTvrSihvdLU7", - "3S57r3ACamio5Wvt3U/LbZs9JY8xJKTOtQiz3ZNdHNmlOpuBQi4omI1FnwNJ6LujY1JBzBpnpLonLStp", - "rnUaNG2MjyLUI+EDrEmG5/Rvhg5vV801/QhNaLNWn8THzK/oZsjwgUdiloZ8Iv2jWht6UXmkFMlRqzBD", - "+uM6ZVyCjNrtiL3VERkCJK0rauE2TRjFSVv+2nDYbMZMDRms6sCx8JbileVyLlOmtKuZ00ybbnWv3RMe", - "4NLKOYG2a55+hpHBR7i0yWuSwZS6Lw8vsG0+TC4rGgMoXaKHFyuCmMWgrZHKxwbCURLwOEph+HoRVw+2", - "ny/j6MGm3gM3DxUO1cmjgliyDEJw6TwCP4H6PELwG1hEPqQi+wEuT9+wpqedLv3XGf/XGRXv1fmGPm02", - "3VC2DJ64NM04VE3nrPHw8DMNrRRp13rXBGafS0VpYchd34TMxjXoIO0VgCGA4aLGLCwSE7+Iew+nhCY2", - "X8h7/Oze1Wf/vZtZR4I/hXoKv7kQetBQypHvTQM+r7+YHE8T/8HsTvc28UUNI4gzmYArhQLt8xMLBrr8", - "hsIBv6R0wM3FQxt9sWfygbGpKiTwhqWECwIX+hVut+w7N2QoibNzKq5JanC3Ej7Cz6xQMATYKxTiwhDD", - "yAfLjYuNSCkI9T21BIySYMiTE2+riId13SkhmhjSYJajpBVSeyukRoxStyOfmBnN0sbKbXMWdtaPcNk+", - "62XGxpVu6wzZ7Y1dd2N3hO13k3wgTgPjOc15EDc7mkfyiPlZj2aOgH05mjdjVuPAtVr9T3pgfmf/7T0h", - "Mu/JT8y6XRt+BAjgh2dQaSC8AAS8h+QLIvOJZPta+SHZRy8+SiDv+u3yhz/l6aatko6BUUV7yud92RTM", - "WPNuV0Pk1fyMgkdEYNOACdlL7wQ6ZF9b3Vf6fir4WMnrU2K79fXUhUNktLilGAg+QSWtt89ZStQDR4ld", - "sAPH7YtGOHBwVwlsEITxs8f2np3tSOsFxO6dq8i3OrkAAzD1YS8GBPbYmJQ9BK+tohcLKSR/6PF/P3MR", - "40MCy8Lmgv2OUzOSjaDhfQ7Wey/P9dWw9VJ0HPrJXytbOIXss2zJsRknwoxcTbpofh9rI+ibccLhRNEf", - "CidsN9B/Na3gxUL9LTmXw3cwnCtC8BtzbtXJt4CLKWO+RjdI2UvP4p/Y1/YGKalRwcdKN0iJ7fYGqbtB", - "ZrS4mSBBMd7xd/6HhRLoAAGEcx+Hi7ogW04NP4YqKJZtgo1/3inv/rYV3l1FB/w5uHaPctVeGVLTpkya", - "25gG8qIrCdkijVRpErMI+DF04L0QAdtVfvl22Sm/Ah17kvLKUnpp9GCxb63wemHhZZQrKwivKq0nisMF", - "JHOY4N6C6qBuffmirIsjuqQ+eHWZKW/Srp/EZD/ERYHAb+Q48gEqUEVxpCZ3gDKWW6Z8aaakHKDZl03d", - "QP6dwARasyFr3ZgD/0F7HRDzHXZk8yEFq27fHpKjvdUyWDiPMMYoDFqZuE8yMd2dskSUnLOqTMye+mxc", - "veP0sbHO13sECLykDdu8GvtcnXYTORhqMbnNTAspne1BtoUiLLsqq5HntQbBBAo7t36GBSu4iptM3DJv", - "i0v+66oSV/ToRaGP3GV9yknZweEdbBJOSlfoG9ajTTd5rEPLao9Ghd1oH492nrUV+8B9qE40OaZNnCc4", - "nYfhQ/k5lX3+wr+2z6k8x6SKkya3hwKq94kddlTx+DYACZmHMfoP9PjEr3cz8SdI5qHHKnoA3w+f9NWW", - "+QYxPZCzgHqesY9rMeIxJiAmRnYc06/8HLvuJ2TusMtKkSFvsXy2YQBdU4SynofIma9OzjR4ULmHoUwc", - "KzmszCHwhNeIH3KCqbF4sg2HbhIjsmT4ccPwAUE6KCuK9FWlB4bS/IySEOgOrEwHdXl/x1fjIgEWBHKA", - "Wzks5PDVeKiiqoEkLmK5lcV7J4vLjJBK4qvxGumGCwPrGKyNxmAIyPNXZZbhzdFsflLrqIrirrYMvUcM", - "beQ8S46uPFFFnc7eLp6sROnwQ3u52r65QIeYZjaDtJ51bmfaR5V9eFRJ92bTz8y6quqVrJsVUHemS85Q", - "hdObE+KB2PG6+1rZfZsSQ2zRivKhlQg7K4Wq0uIT4PVQ60SEeqjTn+hGr1plu1pO1OYE7BMCF5FIbsna", - "KuLDJDgOLRlgK0GqXOIRZr7SQoRwIvD374Lwwo94dYyyK4aOIe1YkTuMJVm05WHWvGXhfcxmFieB2Koa", - "j3YURAnzh+CPu7rlPu+FptLmMquQL2zDX0KgZGuqtAXwZsJZoE64vIdkzIdtRcvLaQfNsvQaLA1iuPZC", - "sc8XCrlLW5EaBOCHHiaA1BgMAX5g1aCEpbDGSjgB+GHMBrUXEcOLH9E2mCKiAYdqcd3y6B6YAU1ssIv0", - "SMJrpvcUxg9VySIyB2yjS1PrzZQFk3BUfGFIpQipqupJkZEGvPCOjtyO9rlt397PFfJfPYmhGMTEQj/9", - "O3mOfzg2dlSMVzOz1ygFodzalnP376FcZbyVDktGFdUPafSE5MK72ks+Oxt++sMyw0Rb83ojGaql9pCP", - "0Vvdu1IimhuCmteiUKv/akpSKCV728IUSmEKBS+4xqCbq6/8cmUqdHBbl7NXbL05gmkvqXtZviK/R+Vw", - "4GpTUhOB8139Z50fS44Tak9gQaaH7NZSYH09aCoGD1hNENu1amaB1s3FHNeff0Gqj+nv5mlqdX4+Zo+R", - "tY9J/MmSM7QK9FENXw/Z6C1zvzxzZ1lMbpQilBzGdd6d8jhi292atXdk1v6i4j6wyR+SbVJTlWFzEgfP", - "QQS3pEeM2ditvDkYZYJvWKtR/EAaRRq7InyGKiNDRaV2xuK+n76PY42uUcX6LHCSu7IMZGG/VgZsHMBL", - "gIkzvGAJ6+fQ8YHcQVOaIoDJ0DPmKXp1pstTtAMf2yYFPUtl+VqTyP751qwgS+wdb+xkIbZ6mWAt7TSa", - "nzJxmgfvQeKTzpuTbk5U7CKFWjr361UmH/NMatOlwybQTyo+mfM57ELtah97Nq9vbTIlYzpmbTDQuYxr", - "mALizkuPPVUa0+EEA23Ly0F5J+HIsHXbF9Ek5aeSTT/2RIql5nuq9I2SYOjhXOrZtRBczrfb0CAkIpDa", - "16Oa9GicbHbxcoOP3TgM6jUS2sr5O5xmQJEYzWa17hPncRj81GrKweR3TTcWeXTaGSSpSnxUk8bbdHHb", - "wl2XztwUvKs6VUo7JaP4JtPRDs2nOswM5RU5c6dL517k5d1Y6l5VimD79L3T5fYy+CpKwY5z+OaQsYaG", - "3h67Gi29dM5tSV2nh+7xd/qfnvzVrsxd+SC2fvighHPgRe/S1ZvAymF092XvLOvTaTexzQ9crBenR1Oz", - "t4o8QXx97lY9Jq7JXIfsnrTHnLWlo7M9Ng/BsN/osN6IfKgrL8lmTWe0Fg4HXmtyv+TDtqpNqgJiwg0c", - "VrY+SgW8hKONba9OVVCLQbaqQrUcEGy5DVFgp8qz48D2QU99Zax3U2oNZvtsMGOPyA2sZaz9Dk1l+2jH", - "i0BMkWZwXSmAxRt/UR8zdgSfJkWMFjbhJLJduPra+CyWiCDB0Kreomy7inVrzPoKO5MNcA8o8KygYg0b", - "g/QRBV49NAdvTCVoAR1wTwEtOU8/ASxjmdUldM5Ozk57J/R/k5OTN+x//2M0VrPufTqBnnjpsdqjUHRs", - "q5FTiKfwPozhNkF+y2bYJMwVWL5HAcLz1WGW/XeK500BvVFMb+9xoGyJ/2mfBoq6Y2vh2Iq79HbeBJiH", - "tE3+fuAI0OhBl2d/NaG/ZSDEIVegbtXwVg3fvRre6patbvkiIVB4zYrtTAC1lUXqz/ctVE/PznkKqpf4", - "9HissRqmLVexH45l59aKuM9WxO3di1ICOCjPqVaZapWpg1GmsmVkonojttkUJCsGT620Gpi3GiNZkjCt", - "1WGzWolBA9iuXnI8TfyHXuaJqI8oepv4D8KpbUOKCh3xcPwTt+SHUOapDC22YUfT+q3ZbR2RyjWZE8+p", - "JBan7VoJISXEW6t93rqk4O4qNZKCN3J+iaHs/esGxcbhOFftVGzINJ0NxIbYp/0VG3JNNWJDrKMVGwax", - "UbvP2xQb39M/e6WckbUREHqQGwqNA4+D0ODAWM1Ii+q9DY3Q727r8FiMjTDgqZnHo4E2aqIkNsKAB12h", - "+KC4b5sHcnvXP/QYim3Lkepoitx1YEOS5cADLfZeuGwr9qIkXRrUR83IqJz38WWvLLUSUg32+CmVnwOo", - "/nZbdVnalKy0u0SlKTSfs8wtVWWsHOAE8Mmcv8U+fYuIhzqcolf1mUSqc2ZWgrYj0cixvWpYmqgcbdz8", - "ncrGZsG3aq0uM/ytZNy9ZNy7QidC0FVR+XZSZymyOOfUo5fHUjcQEtlew9UpRq0U3qUUljuwgmZaodbt", - "uWKqSuBWMW3Fr0n8CoWkTifeuMjl1fN6bpgEpCZegrWRuchl2UfwCJAPpj5k0lcRN3r7wntIeHU+fM5m", - "PHjRW5cy/sBLRuQ2a0UzJScVTj7tC6LBYTqHpNUKSeTZP8EwxsduEsewmrMxvx3whg7tVuLeWwzj95Cc", - "i8G2SHd0poZ0xiBuCxC/fAFi6CYxIksmxt0wfECwn1DZ9ddXKqoKSYfy5CbJnW2/hoxniMyT6bELfH8K", - "3AcjOZ+Hi8iHBHKavqbzO9rziE7E7VHv2dDXFJfncvgCgb86Oat5e3XFvF553jkEHjvcvnf8kG9Gfh+K", - "Yv25gMwc7uQC83NYog8TEJtFwZh+XQ1xrGtzrDF4to8zBl1DhIXhzIfboTc29A9Obxx9G6a3DHE/HL2h", - "4BERWF27CbNoJqkN8w5M6bY6vukIE9Z3KOba4imuTmTlzO4jLDcmv8BWX7Q+VllNngL2MsqbaG6IOdo7", - "Bq4LI2K2vPXZd5xa2MQkJWpTN5/36WzHnsQH5xMphiSDAaiC+vjKdfTXekyl5MWxXdp7e/qKIatuUVFJ", - "n35vRl+8T2dbdenp4BugL77ylr4q6YtjewX68sMZCsxkdRnOsIMCB7Cz8ahCwbhkA23JOYMewXT8ekLa", - "3T3aD2cz6DkoaK/PL3x97nZ+Ozvb1bqjOKQ0wIy2g4AgsnR6ziPwkccmo5simqBg5kA5klnhZYStv8p3", - "O996MKBT9WJAYI/ZwKkOzd9qdMwcJqSGm8OE2LFzmLy8sUowWbhnhbpbI1WNNs2ox9Y+tYCLKYzxHEUN", - "7nBKJ7t7HD8DP2XdRFKKrRK4ftLmFzoVRe2lbpVLnYrBepKMAMZPYVzhSpHmYqcdHNm+SqTeyDG3pySd", - "z0EwSyfaJ23JZZB5KaJacd4qTc2UpmpW55SfZ8a19akYzqgkjquu3bwFrlSpUk+pbfG9BGOfOF4ir31o", - "bJl+MzclSeWbuSxhH7gPW3mkGtOR9/iNqkaSNny0eoQxFiAY3Z/oGkQ76QKFYfyo0dKHwX34HpLPYtCN", - "1iRWIM0yNJ4enRyd6HJAKp5Hf6Vdv1qUG55ULLbgbVlB7F+gE0OSxEEOeYWbDhWzSRBQ/kmn+NaTQ/bC", - "iKecKrPAE5zOw/ChJxzRjr+LHyzC3+lRJ1qXHdX47/aR7WIgsyNYOtGO/cAsQ8UlfO3B9vLGiWJ4ukqm", - "Ru8v0eKrFXMcCzzbmClkU+FXX8MxQnHDtoky95ZvNuM/yaHn7pMCNRQzVRlXKFbSOiACO+l2tey5R+zJ", - "rDKlLWrKoylvsj+ea7yveSutYzVzzrTiOe5kWuWzrDnjD8djubHvqFhxa48sOSWXAr7kBcXsg8zU6vrK", - "j5WEbJ92YC9oeVtR/Llzw3RWCAwkEmW7i4Oy5DU1KL/lNEPNxXWYrXCaFIN7rBKBNavB2uBetJcRMk2S", - "aKUAtgF6L5w5QhCrQjErxsd06zQse05ooHL9DIFiKwaHtbz10rylRqGtw1g2ap89dzXTA/eCwTavC+aR", - "YRsrL3KS5rhs18qhlUQoqoetPDAqiOsxZ42aaFUuj25Svi5eyniP6UuH8aRsUB5vH/hZU6KCF5jYQP3g", - "1asH6wGbxWESsbofGQhyo4ygsE4f4bJTmwZky0JizVpc8lGpLce1h9rESvW/GgkumZrI6Nwis2o0TRa0", - "Uo6gvZRcEw27HDnDe2bdxgmlDuh1GVf5gEBMUp5C2LmHxJ1Dz1QdKhP8e65ICTJYMfHQi6UbUuBtlGeo", - "zS7UZhfaQnahRqJZyAZs8aqVO8mtxLLwrTkgE8yPIJe3LOWkw9R6qmAr7/ZKBcxIcVUVsOj4N4UghnHq", - "+NfVugIyTzIuD5LY77zpdJ6/Pv//AQAA///diGUt2DEDAA==", + "di5G1zd3V4Mvg/Gk0+38z+3gdpD98/3o+vbmbnR9e3VxN7p+O7xS9jiDUpl77IYRVOf8cj36+O7y+kun", + "25n0xx9r+0NC6K86ERNDjLWXUsrObjaGk7XtUiXQo9r0DAaQYsQB9Mh07uNw4RCAHxwURAnBXUcycteB", + "xD3SiSG/iNdKAjXtxzMjglESYP1CFuAbWiQLJ0gWU6qv32dLI85TGD/c++GTEydBXoCigLw6097nsdwS", + "S3D5FtKOBEYjCDyqLemUbgptLL6nhy10aDeK8ac5cuf8kFM3B/Md5vdifgrUSFiBreIGdFWakMvUiSB1", + "bQSQOtoq7fsDXHLdz/MQXTrwb3Ld1T0w2FRKMPEfvtvoZVzUycPXLK/4sTM08IeXxJnJRG4NFCcywg4T", + "9uXNsD8iwgUiAfK7ciK2GP3x2+eHL79xrnX6svG/WiANR2GAYRlrRCo0ZYzlwKoGg49ihuM8DoMvgnUn", + "MZrNYGzcx4zKPilqT2lgNw6DQTXd0iZXYgPKSjMVe9qRoxiFMSLLImkz8SKkU+fNK3Z48b9PyyRfUhDo", + "bF3d4hQ4S6v6mmKw+qzW46xAdGmbVNSnFMhOUmWbM2Tox2IMZTfAg+4SR/uzU8jQPdsmdTPKY8ivUvSm", + "4zQ5FsrDsk8MODagc498AilE9ZzAr6MMa9nmja/GinXBuIskjJDbj03suAD/CQNHKvgOpRjnl/7o6le5", + "+vHV2GFjrCPGUk13gYL/e9pdgG//9+z172WVNwXWzPXc6Nj3YUwGC4D893GYRGb5TZtgnbD0ESZ0jbyF", + "NG3F9ES0tPussHwPPcIum7G8dgFq3cprLjl8cO1es09yW+laqT7BLxkb2Vu5rm4nDv1a3Yiv5hOk+tiI", + "ttfioyMGq8OKGR/BDAXwM4ylQK+HSTZ+7nZg8IjiMFhAbuau7ztQOlhflLktfBN7wJAYBtMQxB4KZhdC", + "zup1LG5+NsrzbBgulUnoYBLGkD3C6OHO9gb7ycwgBv1ktvmFd8WbEzvxng0mSgaUnpIyTQLbHoRVSNUq", + "FlqJotiAy/bbVJ1oNNcahp0FJPPQqzcTKOj6xLsoxF553K6s+3Q7nFqGnnYOeYer+WzU3GQDwfzaYcxG", + "qhQ03UCF2XOwCsrI6CDdg1o6vUQ6eReBGQrS94aqXbxJW6aKPBPdT03sRSrfWL2L6GhHMWxcDN71by8n", + "HWYX1Zs11AGuYw/Gb5fv5KuyHCaQii8sWV6zkZj2u0u1d02tdQ2+JulLbf0RVmS1MrjDi7wAL77Qi/d7", + "40Ik/Y+SYJwsFiCutfuwrfpS7lbBklxnThfyVW64PBPzm97kRuL88uf4+sqZLgnEv9Yr76nazqb/uB4N", + "yDH2gPnT5ZT5XgK6L1BWgCgkyAWKoStBklIEYLfDFSSz/DBJIAvRM4Ygdufa08hE7+XXQ2Zz1z4iMy0z", + "M3fKhlojp8HAdg+QxdC8VZNxIxh4wh5dNbBo1mTkfycwqYeYt2oybpwEgQXEolmTkXHiuhB69UCnDe1H", + "T6kcVz0NaW6K7NuRehVegcfWOLHMYl15b/oznGoEeZWfHZPniqedOMX+DqdHW3ohLY2JCYzspdeYwEiH", + "2EpVmKAFDBOiX774WLf0x3XV4EdF/ZXXL7Z0nV77ZzgdJUGFdONv4Hbv2mmn1OHT3GQEATZczO5RgPC8", + "2dR/c4qs2lFKtLylYffWILoY4sTXm58xATFpthhMAEmwxXro+cTbyuct8QxnTeJ085tTufsA42oWaLJc", + "RSmtA1k5mAs917828kEkgaS7YOaacbpNUvW4GVxdDK/ed7qd0e3VFf9rfHt+PhhcDC463c67/vCS/cFf", + "rvnfb/vnH6/fvdNqK1SN0/uz2XrBFrtqNltMwl6WsPlpaafKY+qbo9UfKcR5Izx+YXjz0NS6OiiwiYl0", + "ZMaW6QP34QuczsPw4cUXqcCyqSWGs0sUwEbOecw9gn6migSVLPJI9cOZ46MANvHE4h782jnocKJBrZJi", + "6s1baGwSBWypXmtZWEE6w9cMVZfwEfp5w83bWypohlfvrjvdzpf+6KrT7QxGo+uRXqYo46SXJ6v9z0Gg", + "EyTi+8vfPSVZ6aUH/7jG/TM/QsMbqOhccQfVIED11freEe4zdxGj3bNuJ4Df5L9edTtBsmD/wJ03pyfM", + "CpzjrFxnnUun9M6JOBWmE59ZXasUWLT+z/BbeeRXdiNn69J6ooYE+OolljZllh0fYcJfN7L4oRObW5xG", + "Yv0PvcF+giRGrkYeB8nixu6KzehYXrSPTOv9H6tbNR8LccdUdsU2Djiyu07zEcWl+qhT6xCRgZqbpasi", + "RCf/R4BA5k9WRqWVzZa5wDE/Kr2LG8BkBO+Rb3iYZQ7KwoNZHYx5L8esI2ReRFtw82YTfQZ+Am0d52L+", + "1IodFhkjTL5i159Q4IVP+m3fhE25BtGP5nVIaaJZxwJ40HYR/Jt+Cv6NLYPuJQoUj7AMzTyG4z6MXejZ", + "en4o9wRlv+R6U6hylPZVpes9OAwzHtMeh+nnNQ7E4hilI5FjU2JNQaV2NOjCgIyV+2zhnYiBZ6Jn/tXR", + "ef+pBogmN9RVLBJrWBO2ZjIQKM1sBqULdNG/u5pH0o3oqndrAUtxdK34hzOECYyhJ2/2muABwz6nvsPI", + "c+J0HB6dhTD7DOMjjc96CXl2viKZr3LVZBYRKkYnzxGkf/08sRQjGPlg+UOFLfAlKWYqbFxZjjtedn1K", + "89cnJzXrLcBtWrXJjKR0tz/CCnY/W/gkdDGVeUz0VbCV3n9Y6/hLRy1YfDQDziAmt7FB87wdXTK/Lhh4", + "zNFTXPqxQ8LtuCCYjsskQP+mupEHA4LuEYxT3VqogyK2j/ujqiGxU+iHwUxCXCtlt+gOa2forXRxHbtz", + "6CU+VChtXZf2LbukdzuEu97b6wlNvNizwb8q6PE2Z/dmoVn0j/H5h8HFLf1RpwymM2/XTXBPHf7Kq8+8", + "/nbh3NeYxDbnDzhKgnPVCNz4MYkDsOuzVAHAZoljK8X9S6nDSzpOZkRR6TNZpt23if9wAX1I4DsWgrGi", + "C2AaQZB6AD7ApcMul04EEE83woM8nOkyn2viAS5P37Cmp9xV7Yz/66xJ2oluJwJxdkXVX50a0g0f8Uvd", + "hWxFatzAYM8Nt9h4fN6ne99M8pWoh0X9FFpptOn1leMhH4rpxgsUiH+e2njdVmPIpCR77Lu34ZUUibhh", + "TiX9UuzSLCkL6lblXKqaQ58TSh8NuBFyL+WJagDyLUs9QSnFZNFYdzPXVf7ykrzpyozczXNurEtVCvqa", + "sqC6SAlM89WZOHObPJMmLNkq32tRtApn7oFpW3M5eF5NKq8SEFEexWT+VjWm6tfhMVyAaB7GcOyHZMO2", + "75xdWe+gyM2Z2A/5E5joYe9QsaIdGquKlCZyjcDIiRO5sHpTg+qEVr9Q5PvSO9N+paWLRoWF2hr0Am9m", + "aOmqtvaCXZ1SjeqZU/almYMggL4JTPHZQZ7+6Q/TwZ0nPrr+UYWPcGW0o8spmD19xUnWsoCBhWn19Nsa", + "S6fdzetmg6+z6L2w3dlZ1yQiUnTn6aKrkKH2fCEwMok7vSvxHPleDPPekLU6L8IXScyyl+pSe4ncf0Li", + "IMzSjEx91Z1CCRLdiisxvwfiZquKV0troz8l/FA+x1SmianJtNuhB1j21C8JkyH2FzrHHR3A6f0zOTl5", + "xUiZ5GK6lNwym3K5NyzZTN4KWnO0Ll2EBXVyn6cKut6Ci32fDKIw5z+mbMSGHPEZh30xvdfUEmWuOz4P", + "k4DowTXf41Z5eM/6VGCoaJvPRRJYOKKLuIm0/eblQJgQE4grigjmGNa/F7YXO2RuPLCBd6nYmTU0SNuY", + "HtrWJE4sZE2TFaddKlbMnQdWN9+mFJiurDJ4QaCuH7tz9AgPUi41fxbYKxET0luivlMF18eQxMsKKbo1", + "flSuZrthiYpbkIIEiUf9jdpE7/tgtMgzoNYpT7QxJEpwzVRgfo329B2UEAgNyUketFiP8ONhPSjdwEco", + "Xydte49lHyu6e4diTMaQ3wDsae8SNO3VMMyMX6FyABZmTjGroEmN++D7W0HM+xLjnyPTWkLORLq0i40G", + "3Avg7ur67sv16ONg1OlmP476k8Hd5fDTcJJ5CQyv3t9Nhp8GF3fXt8w2Nx4P319xP4JJfzRhf/XPP15d", + "f7kcXLzn7gfDq+H4Q94TYTSYjP7BPRVUpwQ69PXt5G40eDcaiD6jgTKJOvf48pq2vBz0x+mYw8HF3dt/", + "3N2O2VJktti70e3VHU8++3HwjzvVN8LQRACqNRHqOEZBqhIIJBY4Gk6G5/3LqtGqnDrEX3ccDZ8GVwXE", + "N3D6EH/z1lWRjxOAH/TZTbNEA5UZVUT/BLNR8okEmnTUmY9lm8r7sc0knarRBQQa6Z/mf7XPF1TIGau5", + "IIS+Jx507KQi24fNJ5INCfCtOmtRl2bbKVZsgbHIeDcw5CVMrT+hw1pLE9qC9cJ6CxAIgL8kyMXXEblO", + "SLVNSQw4B9gJIwI9R5gm0kH0c6ybCW/r2eZNueTWTkaXZUNomD6wNqc9gysb/auRlArZLneb5nJLeTzM", + "2S61a94DNUO/F7qsoLOwx4m2M2KPgc/5VaFgJjKy490JCZ5gbvAtQnSXWVg7A6Z6fN6LT4OdJ1Z2gkXo", + "OyCGDoiiOATuHAUzXn+CIbhqfpmtkxMJC9ZZEQq+ZFnoowwPi+6pxIViU3wHkJ/E0AIU5iqtApJLHs9y", + "Ienn9AHmSzU/fWZxgCAQO8ueP4vph6sjfsA3SWTvmLVNHNDa0D7nXjZxAJHhaoKqNvv8ZZYEWoDNcmGQ", + "P4iknuiHLvBZcNgj9MOIfWYxx17iFiq9KeqdklF3e6l0n9PqJZUPwbJ2jahbtst6Lqvl6617FxQsanrV", + "lJ/NWOMtqt412Qi5tPfGU7zmKJKJhrO9UpMHGqmR087eHE6ClJudSXxPy/C/GEHZ56mkrFfX+hbDmPe4", + "SaY+cqtIgY1XkXJahXlvNl3s3yqbPhL7JKXo9ZcrZjHoX3waXnW6nU+DT28HowrZWZ2HoP5y1uQuVoWJ", + "HByKsWzVq3FxvGI8VooASfnF6jyp4WUwuhtfXk863c7gM7dZTPrjj3ej2ytmE7m+UmJPWP6U8+tPw6v3", + "d18Gbz9cX3+swH1Oi9IpkiBeVET2s+/CX10roHkOAhI6TyBmufJK6hXvrY+Ub5b0QJ/vYDMpDPjY5iXq", + "4V8vD1tKE/Xsm1KQXQKDug1rnrdgAQmMZfYCeY7ysZxf0BE8ck4dDyy7zqnzBOED/e8iDMj81xV9dFL0", + "aLMZmMWuRNRN6CNXkwuVa/xVl+C0LCBvqlEaGojdPPvVObgK4MyrExZQW4FqFEhKqQMpjz6fdLqdz6d6", + "UcJ9QncQcGiMYeXOzk2q+VQkzX9OBxxrgjLMVVLW9GOvdmHnAP2MlUvUldekFNhI0RCj5qYCIvq/PCBm", + "VjtoS3FraHpJQ9MWDUBbqVvXwJC/sh3ewIVfmM+TORsDvgEJ1iU+U9mEO045CDsRa+2AwHNcEAQhcQCr", + "S8sK3suk3aUDSwcd1t3Ha+1RwPNiiLFql8pp0dLQUTZP0Q8fAJ7rjps5wHN1yP+DC9OJA4grorxe/JiX", + "XnfO56watH7CzzBG96gOvcy6RmXQo2hOf0VxHgY9J8wBvgEYP4Wx7RzAiUQHB0Ni4K9tvGR5CEc+WOYY", + "Qe5fY0NWHrtfDQR2PgfBDEoEGZkggE9mJDLehU8Z1qRGrYd9Bb1DjszWHVUCkgJRib/1YCillxVfujk8", + "mVB+Gc5QsHrpttX4e61KbnuHcbnGqA7XMqnXQaHb7oQ0CIY93C1ZvN1201S1Gs9RhA/VyFoyOu/wNN/G", + "KcMn023b59PzweUFnCazTReS7Qp9FKNF4gMCcZZpg72WuWHie84UsgdSrn2AQJRoCmMH5DRmXUhPXfXz", + "88GlUvWc3Q8egZ9Q6te6Y/sExjdg6YfAwIEiG0jE25TXB+Qnqn04YUB/iOEjChPcE+7FYoxOVfKg8sTs", + "U3k+UgoPFbmYqk03uQrhwo5TQxmVgewGLqCfZEYyB8lavGwDWMltXpxIsxOZ+7ouBg0nfhqLVdjhbPQu", + "nZBV3MH4PvG1iqBdjEgZCzJcpORgbgyWMI5hiFOm33JLTNfFKg1yqyDzkhyPK7O0fz49Z5EQE4AfKmqk", + "ExgHwBf5AozmKtHMGV5gSYouCJwY3ovLN+IKOcAPlH9zhKl2Vu1cG85FYpcU5vMpxYdM//Ks3zAZQUKb", + "Yl32DWx6reDoYmhIl408zIXeE4xhVsxra6h45otgMocvtKpEfqUUVfhL3g6KMkzVYCrEpxSOpmGUqLC6", + "Mtza9xM+3pFzS2/xdBKcTDF31KIo95jiI1phBxBVGtnlIavMH7tu1i9DBkoeicUQkjvyDIKGbbkI4lf2", + "PAzg9X3nzV+1wk7T/y3AyO0nZN557q7Sv38z5HUGV+n84VP/vPP81bg4MTgzuvrrLBEyAAuaD110rTQR", + "Q3FIBJ5Y18nSRMUsajm8d2grGBDkCioMmSVGMoiI6VeEfv9mePdx8A+NsC8mVZbTc0g01GJGKUOGPofu", + "R7gcNNa61CVx9e4BLo+cCfOWwg4zupGQV2OB+VbOfRwuVFxIIXK0RgrmFKtlT2Nm7pEeS1a7Ps66mMpW", + "Zy26OizaM3LGiHtA7qpU2BK1v+2Ph+fbpXUmXvYAmxSO7SKTrXRjuLwAs3MldUYxVYwmqUa9RpZWBS4r", + "dh6Y2aaW1/DST1Qp2koBC+8zAFRVfgodECydP8fXVz0MYwR89B/2oMZXdrSSqlYxmZT84l4dxo4LCJyF", + "MfqPWsS0LKYhDKrSMmECFpF4/ktPE+6KDQN7x6T9qrotXBRY8mRT3VflliUnY6+N2dUjHcWZLgszWnIq", + "Y6aJAoy29CP/joKZkG9XTY5m4U2dgprBye71IIp85BYy6qxTnlwsaq0C5dp5v2biZw8soVIQGm6L5Y01", + "JF2tp3DB0KVtZOSo2cPaXGwWedIU4k8ZLZ06ToKjLd3PzHVJzGT1g9QBb6t1VySciNPqZf+WNc2y2dM9", + "UXKVCGlhSmrdwE5TZR3RcRfCF9D1QQyIyOVifmoXnI2w42VdnF9InMBf6QEexeEsBosFIMh1frkHPoa/", + "bvoZ3qjjKMqaVHWYwlbGx2EYnzahUFRsewPbVt3YmxSs1enlTeawjCxUNtqLUzdLNF6fU/bzqbGYLSAE", + "LiKD2is+KhKsWMtWk0hpJ9VxfVlqthpJxbKwL1dUt5gkSfcIReKlw/Kr2GC6eZXeAjrWqNObjbQPnFBZ", + "UTf9XFVCsD8+73Q7F4PxuWG5vIpU++DV9MGL4207712xGHvLz10UdJOpp7nspAvSy032sv2pIuMVu6pK", + "C179xgzS5itm2LLO91apTUMSI4jrl0+/XHBPFGNtGtrGymDHc1oxg02zVFryGtos6zBvwoFTp1b3LMO1", + "/lKXbtleiNSM6Ou4QhLkltNmNcyTJcfK5ccq5sTSJ9Qq5skaD64mdxN1Meka7vgJWUrqdT4a9CeFQmIf", + "hzc3/OP17SXFzuRuPLi6UEbWnzyKjLU0NdtncMEo4PGITRLow6Z0lCVCLZWUCAjyV6neVV2ColmRCY4E", + "M1PehCggPPauvAOCFrWyNUs4pg9pRgu4alwZb6TJaGa1DM1BzB2gmu6sihrLewhToZLAhE+3MoeolWOV", + "SnJ6Z6qqJIYFCJtiJFuahtxzsCkiMxUSWbK68+tPN5eDSSlHXUXqvfxr12rFOZTLf/6gzqZZ93mLaXTC", + "cFrC/kYVKvW90KxhylZsIGz/YFHztFhzC87ek1KcPAEsnBUaRLl7eY3JzrlXswXKiElWNVYznPhaHKrr", + "oMBZIN9HGLph4GE7HbfOv7Mwi/NLGogOCMSE/vZrfVF0K/TT4WU3e/zXeddWoFxQvfAVlz9GMAAROroK", + "g6vE98HUh3+OWTaItFUPLaIwZpMKB/Ny4wjQK05nhsg8mR654eJ4Dog7h6TnwUf59zGI0PHj6TGG8SOM", + "j0PAzuhvvUCM1XnDDK1rBjcli3EEngLonVeyo2Ij583LjFmVkbo8IP/WkIIOaE94on2mhqeGB+snLN45", + "lZ21CtQW7nsWBZ80HLqlok9FRTXLwm8o+FQ+KNe1PKy2kRuc3eJBoPLyPgwwjJsfeUh0a+pAYft+caTW", + "WN1pTf5aHy2Z10LYaOT15jwM7tFMm0WjumTqWvWMVyC+QiSNNTi5osDlmUQ8t2aidcpBqfZxVWvqcuuN", + "jHHRnFfpOdPNLhAFdlWtP3lWyNeh4oag3KOTfgu+FhX67VqFqg2wm9KKS2GyKfACEvOFbIIW4ul+ixZY", + "D0ZkbtB76aecMoG4F9gTIDC+B76vH3JniujaVb22o0k0FJzcp6Ehsugpwjvao+tnU2g01vUN3BVbpeUH", + "UlpWc4ZTdYC1yjVy4Vs4Yi9yB/Uqh+7XwhHykucopSaW3rvRcSqOvo2dpjtL7dbtRDEKZQUQjd+4+Goi", + "JX01MlWfrXH7Fa3r49hz43aVJHSfT3lKoDbYcWV/M/0zgMi0VAooPLDgsGIIvG0YlrEcshqkYxceKDs8", + "dw8hlm7rBT+2F4a3GcdJc0FlK39Fy+A/Jdrrq0pwSjBpmfQi9NEUYNS/GbK9VkglHyimw/ccAg/GdrKa", + "ty2Sopi2FlfKTF25jkrG6ytslg8L7KbBsF1TbJsyTi50sqhSWGXdoUud0lEYQnVoTDBVeExIlF9rByqg", + "LB21Jv9OPpTQn9ETe75Q8Tb+0D/tdOl/zl7/zv94fXrW6XY+Xbyuxl4anajJdKlMZB/pmPZiSRbd0LOo", + "qZUbYSA7MeeIWQBIEsMPa9MxHdpJx9PKJjQLWAEYN4aGqwhm3xgbSnlMe1lNUAzHTBGl4Em/4iJotTQy", + "UPCeBokO/j9WUm08YCEO/I/b0WU1eeyFI5Q8qS3dG8rnhoKG94OrwYjJmPfDyYfbt8zBaTS8GTDfpP75", + "x063czm8GvRNvkCK0r75uNPKh/nmz9nSMtM+abdP2j/Wk3b76ly2Fa9pe9pv2+nBmO4aPgvWvMNpjHzi", + "aW4tQx/ycla+7G6Tf5XLPZKlD3CqaUY5DS8gkfnQC36NSWD/ECuizvEc1F/A1fBb2v5dGGvgkTZylnTA", + "JtKBNczSbeQfWNf33ebg4M1l0Kh9sy6Hr3ZyOJHolpCVtzavDuS316sJGNhCYTp1yipgX8rQrGpHDSzN", + "Boxvyur8RffKLVFkXsyO4p0KvhhqYFz/vajNpFXJhbrPM8ZvtFZaI2uXyDavVX6T2JBPVvZNYr+RNUpY", + "Dei4ur3OoYTnNTJnEd/UIrHdvZnKVZGqmJ5izvDeCULiRHH4iDzodR3gxCDwwoXs9IR835lCZwYDGMtr", + "jEpdZ1vDeHM0e/tJgKvtza5JOYWzFtlUaplT1+7UPJEXP1YmilwXI2OKS/sdMOwbewUCgZeVaov5UKtd", + "+ReQzEOv0WoF6J94z1S3Pw89A9V+mExuZBJcN/RSCo4F8u1DrO8Aj7FmM+cm/mqJ8GoSEqisOeclzcvW", + "1rmWtBSwMu18Srcus3JNOt3OzfWY/ed2wrQk0wnJ405wVVAKFg8nvKCKCwIngjGlq6NG1cqpXsRuu9pE", + "Q5QSUJrCDGCMZgH0nKwTswbd3g4vHEHSu7/l+WAKfVxdB5C1YWSeewbnotmOPLiQo+Po0OgDTD5AEJMp", + "BKTqvp7bNVbWkSVkB85c9s7flM9Ozs56p2e901eT09dvTn5/89sfR3/88cer13/0Tl6/OTmxzwwBOIPR", + "I3uACZj6zAC2h5Bu/3Q2n8oxdGFaXhCbElLQNtzhnZeXCuNVSGqUn0tDVbEoy5GV5cO1yWOwk/VywkDd", + "xQaQFefVQpcEdAuHwX1oxz0jpQM9mvyQZBfkVQrP8mHH2TjMW1VFDv3mgEeAfDBFPiJLdjznKlpmRP4L", + "heiOpfzs/TM5OXkFne+ysw+7ojbq86/61Ix+aDqbMFyAaB7G0KGNhBhakWjGcqwxm08XvWyd7T6bOk34", + "cT4ZfuaVddM/b/q3Y0NspY1DP9+j1Jmfn5XGJEji9ObnSQHIegMe731bpw/fji41wzdVj1l7rWqjHBWl", + "k70yWalMb0O7btpPpKJaLq+SWzN5dW7GCjy8/HOl8SKQAjnKi7JCqVwQzBLxjGUt5MYXHzE/dnlnpdJs", + "OaOIXlUT8nXwjcRA2wB7D+ZhS4tjEKkK6fVlnwVV3/xj8oE9ikz+cTMYn4+GNxO9VacobUvEVCttAZeA", + "dGhKYgWlV0raOt/ztKGTBDk5nhtcU2Vb1v7WVndEi2ShTNJkaF354gqWKFrTxoPLdx+uxzys/VP/qs/T", + "ZVQVNf+ilGwv2H7VteljNtJfLFxCuw0KO3INRJZ21BcE/DucGs4m+kUHkBWL/xlOdWfhTlRJI+ZkGTCN", + "fg1mq681NcoC7Y2u+l1OOOFll7rKFYiHrWaiVnlDk8isNJZrjtbUTd3AQ+JthV/vdOXNZ5Ao31nxaI3b", + "RyATPPB8YzNIeJknN+vqzGjfVF1QDPZHxvL6YxIDAme1mZAVCC9z/XhhWvMNpCytUohJvvRtMZfqq7N6", + "8SWnLq6mq8Vq1RYNL3Qp31IAhxdaHMreH1GQs6C8u706nwzZSXVxO+q/vaTa6UX/faWApINIFaQRBbPZ", + "Newlv+v1mrXizXasEunvdc8V+2nMk8OY5COsCh0jIQG+jmJTHnuAS4MzkRyekqVddJq8lgMHR9BF98jN", + "JnF+iQDG0HMeERA++7/qucKIiAaeZvp7LYkTqBm/7uFWddlKLS+nJycnRhcs7TB5p6mG/k+NFvR3OJVi", + "zPYcNyS5XzuSk5+Iu7ZO8rmFkedlQMh5EW3SI0h19tC6BZnLKrxdNhh8ovQq++k0VEmMnj7r5EnOBlJ9", + "eBSwv1YLkz25JCvePvaHwigJ1sghWx7lHYJ+7txXUxRktJyTYopkrJlkLL2YWtndyu5Wdr+U7DbM8QOK", + "9go3yBVEMxttSODC7FhpuK/UdzZWFhuztE/VyUXXdDXLMkttPGHUBgY0yPRi+tFiHL5YVLeESGXUOuop", + "ZcW8GVxd8GSYWVpMTcbTfH7MNJXm2/75x+t372pPSTbtSvfmvEAxE+MkL06KjjZhcKNI/hKstMHYnUMv", + "8Suyghs6r30cfSnmhLAUMDWbjXkdZKP7US4VxRbZsar2Eq5dhNFIwLLLNqEjOdQ571inhRaal+bPGEKb", + "SLcqZ7FkOu1HwVzab5JHm2dCrlrsBMx06PVN1eGbmvyDDSeSEGZdDmEV/QihcB7Ti8y9Xi5oWZrz5R0y", + "cGPdhMzrXjsjkyN34tV209Ni/QqbawYFvGkkL0xjLVYZOMXPZpV7rm7p0ZdpYHfiFaI5mnk6DaM83eTL", + "VhUYijZbZNncE4bNhqivHszb5R4kPrmpzCgjGhkzy1g9Eohb5J+YH7wLQ7mfP8fXVw4Huhyvw0bQes/I", + "Z8EXeuwLY4+7YVqgAQu1Y4IWMDQUAsEEuQ9Lkw8O/eZg8axi95KoyIsGbMt0sMfTwkuZFY6VPmOe6EaH", + "8seMss0JK20W+KS8Z9u+WzRODGp9DZTLkoSRG+hrPaczstrk21AT+tyLPdkVwrlDRfYoVKigGkPI/VWM", + "5RMW4FtNi6dmyr6phgIP+Uio/GXyk0M4hSCGscz2wTDKjhX2c7Ypc0Iidu0JwwcEZXNEd5X/JN/O33RE", + "7HLWVyR+ob0TTMKF5WTPTOJzfyhN2ACfxenfDFlpH8JsYvlfU0LsnB6dHJ0wOubR2503nVdHp0cnIhCb", + "YYIFW/uiJOZMFxnzXj7P01YBxNhJ7TF004Es5NC5FN/fMzTISAY2y9nJSXngDxD4ZM5Q9Jp/d8OAiNJW", + "onYubXr8N+Z8hdMDsIaPB3EcUin8XHJMvQpJuo4ccXTe/PW128GyXgVdddZQ+pT8JWB259B96Hyl/Rn+", + "Ygi8ZT0CaTNUhcGRbLDvKGQLdkjoANeFEXFIDO7vkVuL0RQDtSh9PD0GPhUpwawHFwD5PfaQjI+/s5/V", + "3545XnxINLenC/Y7dkCaB4t2d1h3/jZd2oU+bTGgDZirBR+B8UwMFpAwfeCvCief0gyOyOncecMTIKRC", + "o7SUjirU+PtAtmPr1R/9WqKn3zSehInrQozvE99fOhylXi6JWAl5z93Ob7uivL6zAD7FAvQcll/Kk/FG", + "HIxXGwdDB8W7MJ4iz4P89pHRN6eTKjKTFC+q6X/tdr71YqFysA+iGH9XQxhf2bWXuJqM0Py6tQ6J8xF+", + "DBJn9PA25PJ4I8TAscM3rYC4NGCtTCaV2CKhk0ic57HxrBf7G1mIdgk62HNigAPaigFLMcCpZXtiQD0g", + "I9Qj4QMM6Kko/2anYRTqchmM4GP4AB0QsFSGrLXw1kpnLIiJCE1oK2nQod1tpEQ6vEEmSFj36riL2fIE", + "nTPofmyixk2oWpAO3diJ2DlJxtlvVZScbnmOgl0/TLxj9YZu1qBLKeLktYcN4qAAExC4sETE5/SzdC8x", + "K9bbxy0DxEmCLOBiXwisRmvnCFbf68XWf1Je2L715BC9MOLOLuJEU/abm8OPv7P/PlftN5VSrNVRaUOZ", + "VZxvZK0k4omBTcoJ+7pTIbS5zRY5lWoOb14v4lGINY4NtmOtbMuRuIKZjLw5iiukGqefr2YKP64Ta2xb", + "UqlWQ/MXqQD72en+gpFwS/v7RfsLuPIZbjy9d3dwi1RrTWgqPRIP5CDfxBFOxzhmdnq+S9i445cI0wuQ", + "7+RamzaYth7mG25tt+lcYseVKRtuvkx9k1vdPhFCuvVsIwqbUN7/3CaHASIhlebH3znHPx9HcTiF5sul", + "fPt0QPbwTEKH2XUZvvJJEMwMn059E2IySoIbNq+9bcp06KWSa8enXgVBwW/QTaRtheH3aKenwlVIWH7+", + "MEb/4TncRTIjHnzNozRLZk4CkA89h9vtHbY9zjshz4fZtuoPjhyZYR+4D8ff2X8srPjOmDZUimrkKYd9", + "FVmh7I32uTGNxMNA3EvrfB4n+6TanO4GjNsgI2E+8evdTMyTjbGcjcD3wyc6ve5FoEi1UvSy36tULE50", + "eY4J8PF3HGArbrkaq1K/zC8BbsAm+cHMjCJO7r1jkwIyWkbZQ0YpEWzKKlfjSkYJsIZNpOKiWJv0qgud", + "V16JSyzS+G3sxfSPrtkQwMsWrWQJUGA4e/06B8TpJnSgKA7pP6DXnmF7xJqmSyQr3OCAKJLUXj7WeJsC", + "PxIw9eGxB2b4OM35brw0YnZrZO0cMgfEmUI/DGZqVoE0vziYla+Un08vACutORHlouvNZTKzd5aghefa", + "Zizz7wTGy4xnPDC7Q171MbetCBEruVOA96UuPtbUu7F63xdgltZJ16Ydq5BDdEr5+sdm/bmthN3O610J", + "P3oLRYvIhwsYkJJuwIwXkg7Sp3OAH7QShjU8/k7/U/O8xEtcTJecb4oChE5gaWrn9ddNhz4FdMdHfr7Q", + "vEEoyFL1KiylWKht2vELxTwamd4YVn92/vyN3322P+tErTVONYX7MOFJmvZERGT8XBIR5jsDsREhx344", + "q9NV/HDm+CiAMvORgKMoUS7D2SUKeCGWQ5QqIssTCUU+3unSIFl4GkYtNCggrORiOejSkDU3JiIndujM", + "IKGoZlg2zIwRtzxqZq5I3WC4N6XlBKymTgKC/A1M3XeovOsR+I04GILYnTtsJgoGz5lVtX7WQSfSq9fK", + "KBg+Qv8X/CudCAWun3jQtL+0Je5otd1qgS9ZgA5gq9x6MrkNBYxFqZgpj32+my7v0k45KK2AK+XUsTpk", + "rbZnD45cVQg1UIhFFGv7bp7XSlPJrxw7l+Fs/VOH/n8vCx02v64qFdqMB09agO0HOHrwA4pMzH9/j+FG", + "zp2tnnTbV6mzvV7BQaa99rZqdU7G6STM+io2a6GY6F3oH3twmszMRvrBI/ATVujJOR9cOvBbFEPMgmrB", + "DKAAZ4XTRGFgDxBwpJGH59C/YFMdikvB5iNaPp+eDy4ZEmoCWBgmMRWFrFAwFRN65O80jkUFX6ZdrBF1", + "UFCPp1lDq9eoL3HTZFZiMYXnzweXZpa34nULvYY/AORFT1rOuMjPzXSbfXyj+5H0G82NVhrzH+ASKxcl", + "47S0XfPrJSMDEXFfd7E8DwOM6FVSkBh7ZApdlnnDc8A9gaL8hLi2b9PYUA3LFN6HMawFZlPmh3d8a0iY", + "gwbErAhd6CImQZ8QmatvccW60Br4srQShp3d8jOZ/bpyafydBSDuHLGnRxfGBKAgC92vWmeajQ+uZCgp", + "VHy3Xly6JWKV0yU97lDs8OdKHcQiYd+Lbst06WQZcjMfcVZFLb2XGGwq5QTC2oVoijnIaR7gssdLMUUA", + "xdj5xYNM8FHuWzrA+debf/1aFFuVThB2hi3shhG0koe8pe26WOv14N3uHdX+ftpaoOosUClvWIZtNFDQ", + "jtkxbKml8bPdSlP7CJeHoqxtPYxJ4qIpIzB0t8ygYwZHaI9bYIjvj6e9BoGrzLeAYL1/QZMY1j32KzTB", + "JDF1oMwp9qc9oDYSWoibhBWmlGPFmVzHsTmmRMvaM4qrpK05YV/NCaWyyxYKdO3ts3KK0hWRXcb5nEfr", + "l5podlfAyRRD4rgg8BDLMyPpeqO3h6oVO7cYeoyNOCyEXo/L8AAiba7s7d5QNGOnFw+FtRsIdiliWsme", + "17YkXjLZzvFbpWt1DW8756zUkAOcAD6JgY2imbf9uR9vGAo4OmwecNj7TUrKDivsxK36u3yzEeRRx3qi", + "7JQCcPskvasn6avsFTrH8Cl/prxpz/P2Why7YPG/bcIbQZ2kaJy4c7/UOMGtiMVle3It+stWionDvG1Z", + "igYZy9mKhZcUC7as31UIkx79FaEYqQJvNpjw2Q7ZYpLy80/OxbOQtIe70WKywhlbZLTKNMH1x+aBBzzn", + "js00ye5LMtw2rgB8k1a+ArxA8mFr+SDzDbfy4fBOeQtln/m2L7JidRVqgZCMMhDYiZPAET2r8xZzD4pL", + "hAn3opC18Q5VppUjoRQ01PgnWQC6dnBUPTSbclAq2maZ9TfwuLeOefq0phl6IU8XCjevWsdI+f9gNeWA", + "AWhR5Y62v5Ot71jrrRJblgKBv/ExV6m08m4Wf2tINsAbomB2x2v47QjyvsaB6KH3KHx6LB4JMk+iu0Wl", + "K9HLGrGpYBslgZRozeOmVSna5jjYnwBmtjeL9KCyi7GwP3GjEAXE8txdoCAhkF7H5V8xBA9e+BSkR3GD", + "Y/g9JDd08kM/hNmBJ32DldAdYbDudJUq9WcnZ6e9E/q/ycnJG/a//zXIHdG9f89vIps4IBmkqeewCmpI", + "4VsD2HsUIDyH3ls2eHNwty8bc6S2gnRkfNLKxz2Vj/nd2biUxMcuKwVujkLjpcLTfDQ6eceb/NwPlAwF", + "TFWpKZDEc3yFjiuRttMoMjapDz2eJ6z2ZVI2b5NEtdGyJRlVkAwbl0wxjHywrCruRL9XSibe5KeWTBwF", + "TSRTLJG2S8nEwbQVTLFo3cqlVi6V5FJBLmxQLonUnzbetzK9ep33rcje3rrf7rP7LScXhw5rF7/G2l/R", + "5qsEQwqaGKej2NpbJdFZAyo6VEBaPcmLe7iq7NPAxTVl5PYtPu/jmiImk5sCxWt7uZqKWKSb2Pq5Cj9X", + "gY8mr9ySKV/I01XSSBNX131Mfv5z+7qWM5tb8H4DtYm5u4p/2Pm71sqMA/d4pZPLt0fJwvW+rxlWzMDu", + "1g5ty//Sn7Xl/b1wdall765KbjUurZJ+hU+rUA8NfHvIbq0FBfhH41HprdryqMFdteaYhAE9BXsxILDH", + "bqB0c8XeW3JZnT9r7bF44B6t2+Ww7Xmn/riKu3RRbQXDHinuGnmw+smuv8HfhJjl90CBGy5QMEvpdQEx", + "BrOKE34EXYgeWxnURAYFie+XKD9YOhFY+iHwHBQ4IFg6YrXdDoHfyHHkA1SgtOKU68qQzFPwJqbbTRAd", + "hy9UzBVO/4ZulQ0uh6N74GPYKhaGmmOc6TSstip329zRhb9wL06CuveNfMbA2heOLENg+8qx/zlLscji", + "aPXOsbOMj8wHH8Q+gpjluYZW4G0xIMAHpAkoG6uWsjdO35Z5ag4kUoECkcbQ2WTXgfGW3fu/zCGZcwEg", + "qtM4F/33mJ5eYeAv1d/TmoE6gRT4yzvZoFZJmYahD0FgEc+RKyBpgbMXCu3QlLk0xnhYZPV9sVgP594H", + "M3bUPgm6CGPmfKGSQXq3BIHnhAmhfwrVEVPdkTaQeuCRcwHvQeLzXPf/ovTwLwfdO0mAITvGdcsXM93J", + "QTuVJLSzWnpNX39bh6F9q7mR0yhVRVf+PqK/r/kKpWq4xx7CkQ+WPeYqUaPvirZ0WOFaEd5XKMHVOvAF", + "H4y5XBy0PqyIVpy+YeWQImIlBfoE6syKgCJLX6TU8JbN71oSaEVXK7qaii7JJz3KJ9WSK8ejTHvQJ/vP", + "UttVSK6BGGzoHa7gau+57T33J7nn7uw4y+RCe5r9SKdZ7vTYyckmrtfmkJ8JbyA9SvMX9oqjq3UtPRWo", + "U5BS80ydIwUSCt/NXb9PK1ozJAD5uJmPqUoh7XtT0eWzwEAbYPA8PzN/T+WXmjISeZIDgcccydLzn4Tp", + "VVIUSvpnx2NE8c+OExkeozP6sXQ5y8HAbZsz1tPwAqws72DzGK7AZe0pvseneDH0zZKhuyWCXoHFj0W5", + "uCpOJzzDF0mY4SjP90e1XDyW9ehW5GV1ekVd/zFZW71+tiy9pw5e52HiezyWll4kdZrLHuUlyXFVWhzy", + "RWQNS/RkUV6XheTyAHduqbe/OqQF5q2NXj9PNZpMrGoNID+uRF2pomMrVFs9qSi7CFqgYFavLYl2jaXX", + "e0gmYoqDvftoZZAHIzLn2Up4RjPHnSPfi6HJdYN1aCj9ti9I+Oa0kuTgJUkVf25avMBIyBT55/MxiN05", + "eoR1WpBoJcCk3bUiZExgJNx1+3JgC/EhxzNaTyW8revu6hrZNmWS2Hex51ZSKZ9Qsq0JuvtcTCnXFfIx", + "lYVUjv0V5pfyiW4/lU1Voill4XqZZHMvE2X77eXRQNZXbaXRTyKN7O9arSw6HFmkMP72JZEfzuo8pfxw", + "5vgoKOlGZXP0ZTi7RAG0tQa1Yuhl45l8+Ah9K5ch3jI3cxUzSDqgvd4h6HvG7HGQHrwOm02Bo6KQCevQ", + "FJAx76UNJQEsUCCMvar1s89vl3wtDSe/Vvsa8MCn91AMXRHpXgHFhdJsFUiy/ts9pFRp0BbPXzf9XCqF", + "lbPgMpw1PwaEo1FFWnPmAYGFJ5HBcX/Cfj5XHV827ZjDB+cT1SXo5a5JL+OKwyFs5HwjkPpj0/gKXjcp", + "saWZaYU/TZHIdRSdus7Vmoy5a4x4Ya8k8KbJmNLADjGD8clnN95yL0vxMl1SS+27vW1wYvRCyC8a8Bs/", + "gUtFNGyZLZfNtDr/UsBnQ8Gsmq8OJwvTlrxOOQKaHG5Rml8kV8y0PecO6ZwTfLIC61Wcd8fAp4QRzHpw", + "AZDfm8VhElU+nFLlTt4CBXmxMRw2gCMGKLJunzYZ0BbvaYNDiXTa/kmoQ0zDclPGTWh5J/+aWEGtjc4x", + "66tPea46xvjpQyrUm1sBN3ZnXQnlja52p9tl7xVOQA0NtXytvftpuW2zp+QxhoTUuRZhtnuyiyO7VGcz", + "UMgFBbOx6HMgCX13dEwqiFnjjFT3pGUlzbVOg6aN8VGEeiR8gDXJ8Jz+zdDh7aq5ph+hCW3W6pP4mPkV", + "3QwZPvBIzNKQT6R/VGtDLyqPlCI5ahVmSH9cp4xLkFG7HbG3OiJDgKR1RS3cpgmjOGnLXxsOm82YqSGD", + "VR04Ft5SvLJczmXKlHY1c5pp063utXvCA1xaOSfQds3TzzAy+AiXNnlNMphS9+XhBbbNh8llRWMApUv0", + "8GJFELMYtDVS+dhAOEoCHkcpDF8v4urB9vNlHD3Y1Hvg5qHCoTp5VBBLlkEILp1H4CdQn0cIfgOLyIdU", + "ZD/A5ekb1vS006X/OuP/OqPivTrf0KfNphvKlsETl6YZh6rpnDUeHn6moZUi7VrvmsDsc6koLQy565uQ", + "2bgGHaS9AjAEMFzUmIVFYuIXce/hlNDE5gt5j5/du/rsv3cz60jwp1BP4TcXQg8aSjnyvWnA5/UXk+Np", + "4j+Y3eneJr6oYQRxJhNwpVCgfX5iwUCX31A44JeUDri5eGijL/ZMPjA2VYUE3rCUcEHgQr/C7ZZ954YM", + "JXF2TsU1SQ3uVsJH+JkVCoYAe4VCXBhiGPlguXGxESkFob6nloBREgx5cuJtFfGwrjslRBNDGsxylLRC", + "am+F1IhR6nbkEzOjWdpYuW3Ows76ES7bZ73M2LjSbZ0hu72x627sjrD9bpIPxGlgPKc5D+JmR/NIHjE/", + "69HMEbAvR/NmzGocuFar/0kPzO/sv70nROY9+YlZt2vDjwAB/PAMKg2EF4CA95B8QWQ+kWxfKz8k++jF", + "RwnkXb9d/vCnPN20VdIxMKpoT/m8L5uCGWve7WqIvJqfUfCICGwaMCF76Z1Ah+xrq/tK308FHyt5fUps", + "t76eunCIjBa3FAPBJ6ik9fY5S4l64CixC3bguH3RCAcO7iqBDYIwfvbY3rOzHWm9gNi9cxX5VicXYACm", + "PuzFgMAeG5Oyh+C1VfRiIYXkDz3+72cuYnxIYFnYXLDfcWpGshE0vM/Beu/lub4atl6KjkM/+WtlC6eQ", + "fZYtOTbjRJiRq0kXze9jbQR9M044nCj6Q+GE7Qb6r6YVvFiovyXncvgOhnNFCH5jzq06+RZwMWXM1+gG", + "KXvpWfwT+9reICU1KvhY6QYpsd3eIHU3yIwWNxMkKMY7/s7/sFACHSCAcO7jcFEXZMup4cdQBcWyTbDx", + "zzvl3d+2wrur6IA/B9fuUa7aK0Nq2pRJcxvTQF50JSFbpJEqTWIWAT+GDrwXImC7yi/fLjvlV6BjT1Je", + "WUovjR4s9q0VXi8svIxyZQXhVaX1RHG4gGQOE9xbUB3UrS9flHVxRJfUB68uM+VN2vWTmOyHuCgQ+I0c", + "Rz5ABaoojtTkDlDGcsuUL82UlAM0+7KpG8i/E5hAazZkrRtz4P/QXgfEfIcd2XxIwarbt4fkaG+1DBbO", + "I4wxCoNWJu6TTEx3pywRJeesKhOzpz4bV+84fWys8/UeAQIvacM2r8Y+V6fdRA6GWkxuM9NCSmd7kG2h", + "CMuuymrkea1BMIHCzq2fYcEKruImE7fM2+KS/7qqxBU9elHoI3dZn3JSdnB4B5uEk9IV+ob1aNNNHuvQ", + "stqjUWE32sejnWdtxT5wH6oTTY5pE+cJTudh+FB+TmWfv/Cv7XMqzzGp4qTJ7aGA6n1ihx1VPL4NQELm", + "YYz+Az0+8evdTPwJknnosYoewPfDJ321Zb5BTA/kLKCeZ+zjWox4jAmIiZEdx/QrP8eu+wmZO+yyUmTI", + "WyyfbRhA1xShrOchcuarkzMNHlTuYSgTx0oOK3MIPOE14oecYGosnmzDoZvEiCwZftwwfECQDsqKIn1V", + "6YGhND+jJAS6AyvTQV3e3/HVuEiABYEc4FYOCzl8NR6qqGogiYtYbmXx3sniMiOkkvhqvEa64cLAOgZr", + "ozEYAvL8VZlleHM0m5/UOqqiuKstQ+8RQxs5z5KjK09UUaezt4snK1E6/NBerrZvLtAhppnNIK1nnduZ", + "9lFlHx5V0r3Z9DOzrqp6JetmBdSd6ZIzVOH05oR4IHa87r5Wdt+mxBBbtKJ8aCXCzkqhqrT4BHg91DoR", + "oR7q9Ce60atW2a6WE7U5AfuEwEUkkluytor4MAmOQ0sG2EqQKpd4hJmvtBAhnAj8/bsgvPAjXh2j7Iqh", + "Y0g7VuQOY0kWbXmYNW9ZeB+zmcVJILaqxqMdBVHC/CH4465uuc97oam0ucwq5Avb8JcQKNmaKm0BvJlw", + "FqgTLu8hGfNhW9HyctpBsyy9BkuDGK69UOzzhULu0lakBgH4oYcJIDUGQ4AfWDUoYSmssRJOAH4Ys0Ht", + "RcTw4ke0DaaIaMChWly3PLoHZkATG+wiPZLwmuk9hfFDVbKIzAHb6NLUejNlwSQcFV8YUilCqqp6UmSk", + "AS+8oyO3o31u27f3c4X8V09iKAYxsdBP/06e4x+OjR0V49XM7DVKQSi3tuXc/XsoVxlvpcOSUUX1Qxo9", + "IbnwrvaSz86Gn/6wzDDR1rzeSIZqqT3kY/RW966UiOaGoOa1KNTqv5qSFErJ3rYwhVKYQsELrjHo5uor", + "v1yZCh3c1uXsFVtvjmDaS+pelq/I71E5HLjalNRE4HxX/1nnx5LjhNoTWJDpIbu1FFhfD5qKwQNWE8R2", + "rZpZoHVzMcf151+Q6mP6u3maWp2fj9ljZO1jEn+y5AytAn1Uw9dDNnrL3C/P3FkWkxulCCWHcZ13pzyO", + "2Ha3Zu0dmbW/qLgPbPKHZJvUVGXYnMTBcxDBLekRYzZ2K28ORpngG9ZqFD+QRpHGrgifocrIUFGpnbG4", + "76fv41ija1SxPguc5K4sA1nYr5UBGwfwEmDiDC9Ywvo5dHwgd9CUpghgMvSMeYpenenyFO3Ax7ZJQc9S", + "Wb7WJLJ/vjUryBJ7xxs7WYitXiZYSzuN5qdMnObBe5D4pPPmpJsTFbtIoZbO/XqVycc8k9p06bAJ9JOK", + "T+Z8DrtQu9rHns3rW5tMyZiOWRsMdC7jGqaAuPPSY0+VxnQ4wUDb8nJQ3kk4Mmzd9kU0SfmpZNOPPZFi", + "qfmeKn2jJBh6OJd6di0El/PtNjQIiQik9vWoJj0aJ5tdvNzgYzcOg3qNhLZy/g6nGVAkRrNZrfvEeRwG", + "P7WacjD5XdONRR6ddgZJqhIf1aTxNl3ctnDXpTM3Be+qTpXSTskovsl0tEPzqQ4zQ3lFztzp0rkXeXk3", + "lrpXlSLYPn3vdLm9DL6KUrDjHL45ZKyhobfHrkZLL51zW1LX6aF7/J3+pyd/tStzVz6IrR8+KOEceNG7", + "dPUmsHIY3X3ZO8v6dNpNbPMDF+vF6dHU7K0iTxBfn7tVj4lrMtchuyftMWdt6ehsj81DMOw3Oqw3Ih/q", + "ykuyWdMZrYXDgdea3C/5sK1qk6qAmHADh5Wtj1IBL+FoY9urUxXUYpCtqlAtBwRbbkMU2Kny7DiwfdBT", + "Xxnr3ZRag9k+G8zYI3IDaxlrv0NT2T7a8SIQU6QZXFcKYPHGX9THjB3Bp0kRo4VNOIlsF66+Nj6LJSJI", + "MLSqtyjbrmLdGrO+ws5kA9wDCjwrqFjDxiB9RIFXD83BG1MJWkAH3FNAS87TTwDLWGZ1CZ2zk7PT3gn9", + "3+Tk5A373/8ajdWse59OoCdeeqz2KBQd22rkFOIpvA9juE2Q37IZNglzBZbvUYDwfHWYZf+d4nlTQG8U", + "09t7HChb4n/ap4Gi7thaOLbiLr2dNwHmIW2Tvx84AjR60OXZX03obxkIccgVqFs1vFXDd6+Gt7plq1u+", + "SAgUXrNiOxNAbWWR+vN9C9XTs3OeguolPj0ea6yGactV7Idj2bm1Iu6zFXF796KUAA7Kc6pVplpl6mCU", + "qWwZmajeiG02BcmKwVMrrQbmrcZIliRMa3XYrFZi0AC2q5ccTxP/oZd5Iuojit4m/oNwatuQokJHPBz/", + "xC35IZR5KkOLbdjRtH5rdltHpHJN5sRzKonFabtWQkgJ8dZqn7cuKbi7So2k4I2cX2Ioe/+6QbFxOM5V", + "OxUbMk1nA7Eh9ml/xYZcU43YEOtoxYZBbNTu8zbFxvf0z14pZ2RtBIQe5IZC48DjIDQ4MFYz0qJ6b0Mj", + "9LvbOjwWYyMMeGrm8WigjZooiY0w4EFXKD4o7tvmgdze9Q89hmLbcqQ6miJ3HdiQZDnwQIu9Fy7bir0o", + "SZcG9VEzMirnfXzZK0uthFSDPX5K5ecAqr/dVl2WNiUr7S5RaQrN5yxzS1UZKwc4AXwy52+xT98i4qEO", + "p+hVfSaR6pyZlaDtSDRybK8aliYqRxs3f6eysVnwrVqrywx/Kxl3Lxn3rtCJEHRVVL6d1FmKLM459ejl", + "sdQNhES213B1ilErhXcpheUOrKCZVqh1e66YqhK4VUxb8WsSv0IhqdOJNy5yefW8nhsmAamJl2BtZC5y", + "WfYRPALkg6kPmfRVxI3evvAeEl6dD5+zGQ9e9NaljD/wkhG5zVrRTMlJhZNP+4JocJjOIWm1QhJ59k8w", + "jPGxm8QxrOZszG8HvKFDu5W49xbD+D0k52KwLdIdnakhnTGI2wLEL1+AGLpJjMiSiXE3DB8Q7CdUdv31", + "lYqqQtKhPLlJcmfbryHjGSLzZHrsAt+fAvfBSM7n4SLyIYGcpq/p/I72PKITcXvUezb0NcXluRy+QOCv", + "Ts5q3l5dMa9XnncOgccOt+8dP+Sbkd+Holh/LiAzhzu5wPwclujDBMRmUTCmX1dDHOvaHGsMnu3jjEHX", + "EGFhOPPhduiNDf2D0xtH34bpLUPcD0dvKHhEBFbXbsIsmklqw7wDU7qtjm86woT1HYq5tniKqxNZObP7", + "CMuNyS+w1Retj1VWk6eAvYzyJpobYo72joHrwoiYLW999h2nFjYxSYna1M3nfTrbsSfxwflEiiHJYACq", + "oD6+ch39tR5TKXlxbJf23p6+YsiqW1RU0qffm9EX79PZVl16OvgG6IuvvKWvSvri2F6BvvxwhgIzWV2G", + "M+ygwAHsbDyqUDAu2UBbcs6gRzAdv56QdneP9sPZDHoOCtrr8wtfn7ud387OdrXuKA4pDTCj7SAgiCyd", + "nvMIfOSxyeimiCYomDlQjmRWeBlh66/y3c63HgzoVL0YENhjNnCqQ/O3Gh0zhwmp4eYwIXbsHCYvb6wS", + "TBbuWaHu1khVo00z6rG1Ty3gYgpjPEdRgzuc0snuHsfPwE9ZN5GUYqsErp+0+YVORVF7qVvlUqdisJ4k", + "I4DxUxhXuFKkudhpB0e2rxKpN3LM7SlJ53MQzNKJ9klbchlkXoqoVpy3SlMzpama1Tnl55lxbX0qhjMq", + "ieOqazdvgStVqtRTalt8L8HYJ46XyGsfGlum38xNSVL5Zi5L2Afuw1YeqcZ05D1+o6qRpA0frR5hjAUI", + "RvcnugbRTrpAYRg/arT0YXAfvofksxh0ozWJFUizDI2nRydHJ7ockIrn0V9p168W5YYnFYsteFtWEPsX", + "6MSQJHGQQ17hpkPFbBIElH/SKb715JC9MOIpp8os8ASn8zB86AlHtOPv4geL8Hd61InWZUc1/rt9ZLsY", + "yOwIlk60Yz8wy1BxCV97sL28caIYnq6SqdH7S7T4asUcxwLPNmYK2VT41ddwjFDcsG2izL3lm834T3Lo", + "ufukQA3FTFXGFYqVtA6IwE66XS177hF7MqtMaYua8mjKm+yP5xrva95K61jNnDOteI47mVb5LGvO+MPx", + "WG7sOypW3NojS07JpYAveUEx+yAztbq+8mMlIdunHdgLWt5WFH/u3DCdFQIDiUTZ7uKgLHlNDcpvOc1Q", + "c3EdZiucJsXgHqtEYM1qsDa4F+1lhEyTJFopgG2A3gtnjhDEqlDMivEx3ToNy54TGqhcP0Og2IrBYS1v", + "vTRvqVFo6zCWjdpnz13N9MC9YLDN64J5ZNjGyoucpDku27VyaCURiuphKw+MCuJ6zFmjJlqVy6OblK+L", + "lzLeY/rSYTwpG5TH2wd+1pSo4AUmNlA/ePXqwXrAZnGYRKzuRwaC3CgjKKzTR7js1KYB2bKQWLMWl3xU", + "astx7aE2sVL9r0aCS6YmMjq3yKwaTZMFrZQjaC8l10TDLkfO8J5Zt3FCqQN6XcZVPiAQk5SnEHbuIXHn", + "0DNVh8oE/54rUoIMVkw89GLphhR4G+UZarMLtdmFtpBdqJFoFrIBW7xq5U5yK7EsfGsOyATzI8jlLUs5", + "6TC1nirYyru9UgEzUlxVBSw6/k0hiGGcOv51ta6AzJOMy4Mk9jtvOp3nr8//fwAAAP//DHrjAewzAwA=", } // GetSwagger returns the content of the embedded swagger specification file diff --git a/api/v1/server/oas/transformers/v1/worker.go b/api/v1/server/oas/transformers/v1/worker.go index 2e2cf1678c..3c4975f557 100644 --- a/api/v1/server/oas/transformers/v1/worker.go +++ b/api/v1/server/oas/transformers/v1/worker.go @@ -59,12 +59,10 @@ func ToWorkerRuntimeInfo(worker *sqlcv1.Worker) *gen.WorkerRuntimeInfo { return runtime } -func ToWorkerSqlc(worker *sqlcv1.Worker, remainingSlots *int, webhookUrl *string, actions []string, workflows *[]*sqlcv1.Workflow) *gen.Worker { +func ToWorkerSqlc(worker *sqlcv1.Worker, slotConfig map[string]gen.WorkerSlotConfig, webhookUrl *string, actions []string, workflows *[]*sqlcv1.Workflow) *gen.Worker { dispatcherId := worker.DispatcherId - maxRuns := int(worker.MaxRuns) - status := gen.ACTIVE if worker.IsPaused { @@ -75,10 +73,13 @@ func ToWorkerSqlc(worker *sqlcv1.Worker, remainingSlots *int, webhookUrl *string status = gen.INACTIVE } - var availableRuns int - - if remainingSlots != nil { - availableRuns = *remainingSlots + var slotConfigInt *map[string]gen.WorkerSlotConfig + if len(slotConfig) > 0 { + tmp := make(map[string]gen.WorkerSlotConfig, len(slotConfig)) + for k, v := range slotConfig { + tmp[k] = v + } + slotConfigInt = &tmp } res := &gen.Worker{ @@ -87,15 +88,14 @@ func ToWorkerSqlc(worker *sqlcv1.Worker, remainingSlots *int, webhookUrl *string CreatedAt: worker.CreatedAt.Time, UpdatedAt: worker.UpdatedAt.Time, }, - Name: worker.Name, - Type: gen.WorkerType(worker.Type), - Status: &status, - DispatcherId: dispatcherId, - MaxRuns: &maxRuns, - AvailableRuns: &availableRuns, - WebhookUrl: webhookUrl, - RuntimeInfo: ToWorkerRuntimeInfo(worker), - WebhookId: worker.WebhookId, + Name: worker.Name, + Type: gen.WorkerType(worker.Type), + Status: &status, + DispatcherId: dispatcherId, + SlotConfig: slotConfigInt, + WebhookUrl: webhookUrl, + RuntimeInfo: ToWorkerRuntimeInfo(worker), + WebhookId: worker.WebhookId, } if !worker.LastHeartbeatAt.Time.IsZero() { diff --git a/api/v1/server/oas/transformers/worker.go b/api/v1/server/oas/transformers/worker.go index 4f35f335f2..ffe2c83dc7 100644 --- a/api/v1/server/oas/transformers/worker.go +++ b/api/v1/server/oas/transformers/worker.go @@ -55,12 +55,10 @@ func ToWorkerRuntimeInfo(worker *sqlcv1.Worker) *gen.WorkerRuntimeInfo { return runtime } -func ToWorkerSqlc(worker *sqlcv1.Worker, remainingSlots *int, webhookUrl *string, actions []string) *gen.Worker { +func ToWorkerSqlc(worker *sqlcv1.Worker, slotConfig map[string]gen.WorkerSlotConfig, webhookUrl *string, actions []string) *gen.Worker { dispatcherId := worker.DispatcherId - maxRuns := int(worker.MaxRuns) - status := gen.ACTIVE if worker.IsPaused { @@ -71,23 +69,25 @@ func ToWorkerSqlc(worker *sqlcv1.Worker, remainingSlots *int, webhookUrl *string status = gen.INACTIVE } - var availableRuns int - - if remainingSlots != nil { - availableRuns = *remainingSlots + var slotConfigInt *map[string]gen.WorkerSlotConfig + if len(slotConfig) > 0 { + tmp := make(map[string]gen.WorkerSlotConfig, len(slotConfig)) + for k, v := range slotConfig { + tmp[k] = v + } + slotConfigInt = &tmp } res := &gen.Worker{ - Metadata: *toAPIMetadata(worker.ID, worker.CreatedAt.Time, worker.UpdatedAt.Time), - Name: worker.Name, - Type: gen.WorkerType(worker.Type), - Status: &status, - DispatcherId: dispatcherId, - MaxRuns: &maxRuns, - AvailableRuns: &availableRuns, - WebhookUrl: webhookUrl, - RuntimeInfo: ToWorkerRuntimeInfo(worker), - WebhookId: worker.WebhookId, + Metadata: *toAPIMetadata(worker.ID, worker.CreatedAt.Time, worker.UpdatedAt.Time), + Name: worker.Name, + Type: gen.WorkerType(worker.Type), + Status: &status, + DispatcherId: dispatcherId, + SlotConfig: slotConfigInt, + WebhookUrl: webhookUrl, + RuntimeInfo: ToWorkerRuntimeInfo(worker), + WebhookId: worker.WebhookId, } if !worker.LastHeartbeatAt.Time.IsZero() { diff --git a/api/v1/server/oas/transformers/workflow.go b/api/v1/server/oas/transformers/workflow.go index 16f00c4d17..a827b4aec8 100644 --- a/api/v1/server/oas/transformers/workflow.go +++ b/api/v1/server/oas/transformers/workflow.go @@ -213,6 +213,7 @@ func ToJob(job *sqlcv1.Job, steps []*sqlcv1.GetStepsForJobsRow) *gen.Job { } func ToStep(step *sqlcv1.Step, parents []uuid.UUID) *gen.Step { + isDurable := step.IsDurable res := &gen.Step{ Metadata: *toAPIMetadata( step.ID, @@ -224,6 +225,7 @@ func ToStep(step *sqlcv1.Step, parents []uuid.UUID) *gen.Step { TenantId: step.TenantId.String(), ReadableId: step.ReadableId.String, Timeout: &step.Timeout.String, + IsDurable: &isDurable, } parentStr := make([]string, 0) diff --git a/cmd/hatchet-cli/cli/tui/worker_details.go b/cmd/hatchet-cli/cli/tui/worker_details.go index 709d54bc69..b0cf8ad47d 100644 --- a/cmd/hatchet-cli/cli/tui/worker_details.go +++ b/cmd/hatchet-cli/cli/tui/worker_details.go @@ -245,10 +245,18 @@ func (v *WorkerDetailsView) renderWorkerInfo() string { b.WriteString(sectionStyle.Render(labelStyle.Render("Last Heartbeat: ") + lastHeartbeat)) b.WriteString("\n\n") - // Available Run Slots + // Available Run Slots - aggregate across all slot types slotsStr := "N/A" - if v.worker.AvailableRuns != nil && v.worker.MaxRuns != nil { - slotsStr = fmt.Sprintf("%d / %d", *v.worker.AvailableRuns, *v.worker.MaxRuns) + if v.worker.SlotConfig != nil && len(*v.worker.SlotConfig) > 0 { + totalAvailable := 0 + totalLimit := 0 + for _, slotConfig := range *v.worker.SlotConfig { + if slotConfig.Available != nil { + totalAvailable += *slotConfig.Available + } + totalLimit += slotConfig.Limit + } + slotsStr = fmt.Sprintf("%d / %d", totalAvailable, totalLimit) } b.WriteString(sectionStyle.Render(labelStyle.Render("Available Run Slots: ") + slotsStr)) b.WriteString("\n\n") diff --git a/cmd/hatchet-cli/cli/tui/workers.go b/cmd/hatchet-cli/cli/tui/workers.go index 12a2b72711..6a45594deb 100644 --- a/cmd/hatchet-cli/cli/tui/workers.go +++ b/cmd/hatchet-cli/cli/tui/workers.go @@ -512,10 +512,18 @@ func (v *WorkersView) updateTableRows() { // Started At startedAt := formatRelativeTime(worker.Metadata.CreatedAt) - // Slots + // Slots - aggregate across all slot types slots := "N/A" - if worker.AvailableRuns != nil && worker.MaxRuns != nil { - slots = fmt.Sprintf("%d / %d", *worker.AvailableRuns, *worker.MaxRuns) + if worker.SlotConfig != nil && len(*worker.SlotConfig) > 0 { + totalAvailable := 0 + totalLimit := 0 + for _, slotConfig := range *worker.SlotConfig { + if slotConfig.Available != nil { + totalAvailable += *slotConfig.Available + } + totalLimit += slotConfig.Limit + } + slots = fmt.Sprintf("%d / %d", totalAvailable, totalLimit) } // Last Seen diff --git a/cmd/hatchet-migrate/migrate/migrations/20260203130000_v1_0_75.sql b/cmd/hatchet-migrate/migrate/migrations/20260203130000_v1_0_75.sql new file mode 100644 index 0000000000..bb62404c1f --- /dev/null +++ b/cmd/hatchet-migrate/migrate/migrations/20260203130000_v1_0_75.sql @@ -0,0 +1,150 @@ +-- +goose Up +-- +goose StatementBegin +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM pg_type + WHERE typname = 'v1_worker_slot_group' + ) THEN + CREATE TYPE v1_worker_slot_group AS ENUM ('SLOTS', 'DURABLE_SLOTS'); + END IF; +END +$$; + +ALTER TABLE "Worker" + ADD COLUMN IF NOT EXISTS "durableMaxRuns" INTEGER NOT NULL DEFAULT 0; + +ALTER TABLE "Step" + ADD COLUMN IF NOT EXISTS "isDurable" BOOLEAN NOT NULL DEFAULT false; + +ALTER TABLE v1_task_runtime + ADD COLUMN IF NOT EXISTS slot_group v1_worker_slot_group NOT NULL DEFAULT 'SLOTS'; + +CREATE TABLE IF NOT EXISTS v1_worker_slot_config ( + tenant_id UUID NOT NULL, + worker_id UUID NOT NULL, + slot_type TEXT NOT NULL, + max_units INTEGER NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (tenant_id, worker_id, slot_type) +); + +CREATE TABLE IF NOT EXISTS v1_step_slot_request ( + tenant_id UUID NOT NULL, + step_id UUID NOT NULL, + slot_type TEXT NOT NULL, + units INTEGER NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (tenant_id, step_id, slot_type) +); + +CREATE TABLE IF NOT EXISTS v1_task_runtime_slot ( + tenant_id UUID NOT NULL, + task_id BIGINT NOT NULL, + task_inserted_at TIMESTAMPTZ NOT NULL, + retry_count INTEGER NOT NULL, + worker_id UUID NOT NULL, + slot_type TEXT NOT NULL, + units INTEGER NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (task_id, task_inserted_at, retry_count, slot_type) +); +-- +goose StatementEnd + +-- -- +goose NO TRANSACTION +CREATE INDEX IF NOT EXISTS v1_task_runtime_tenantId_workerId_slotGroup_idx + ON v1_task_runtime (tenant_id ASC, worker_id ASC, slot_group ASC) + WHERE worker_id IS NOT NULL; + +CREATE INDEX IF NOT EXISTS v1_task_runtime_slot_tenant_worker_type_idx + ON v1_task_runtime_slot (tenant_id ASC, worker_id ASC, slot_type ASC); + +CREATE INDEX IF NOT EXISTS v1_step_slot_request_step_idx + ON v1_step_slot_request (step_id ASC); + +-- +goose StatementBegin +INSERT INTO v1_worker_slot_config (tenant_id, worker_id, slot_type, max_units) +SELECT + "tenantId", + "id", + 'default'::text, + "maxRuns" +FROM "Worker" +WHERE "maxRuns" IS NOT NULL +ON CONFLICT DO NOTHING; + +INSERT INTO v1_worker_slot_config (tenant_id, worker_id, slot_type, max_units) +SELECT + "tenantId", + "id", + 'durable'::text, + "durableMaxRuns" +FROM "Worker" +WHERE "durableMaxRuns" IS NOT NULL AND "durableMaxRuns" > 0 +ON CONFLICT DO NOTHING; + +INSERT INTO v1_step_slot_request (tenant_id, step_id, slot_type, units) +SELECT + "tenantId", + "id", + CASE WHEN "isDurable" THEN 'durable'::text ELSE 'default'::text END, + 1 +FROM "Step" +ON CONFLICT DO NOTHING; + +INSERT INTO v1_task_runtime_slot ( + tenant_id, + task_id, + task_inserted_at, + retry_count, + worker_id, + slot_type, + units +) +SELECT + tenant_id, + task_id, + task_inserted_at, + retry_count, + worker_id, + CASE + WHEN slot_group = 'DURABLE_SLOTS'::v1_worker_slot_group THEN 'durable'::text + ELSE 'default'::text + END, + 1 +FROM v1_task_runtime +WHERE worker_id IS NOT NULL +ON CONFLICT DO NOTHING; + +ALTER TABLE "Worker" + DROP COLUMN IF EXISTS "maxRuns", + DROP COLUMN IF EXISTS "durableMaxRuns"; + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin +DROP INDEX IF EXISTS v1_task_runtime_slot_tenant_worker_type_idx; +DROP INDEX IF EXISTS v1_step_slot_request_step_idx; +DROP TABLE IF EXISTS v1_task_runtime_slot; +DROP TABLE IF EXISTS v1_step_slot_request; +DROP TABLE IF EXISTS v1_worker_slot_config; + +DROP INDEX IF EXISTS v1_task_runtime_tenantId_workerId_slotGroup_idx; + +ALTER TABLE v1_task_runtime + DROP COLUMN IF EXISTS slot_group; + +ALTER TABLE "Step" + DROP COLUMN IF EXISTS "isDurable"; + +ALTER TABLE "Worker" + ADD COLUMN IF NOT EXISTS "maxRuns" INTEGER NOT NULL DEFAULT 100, + ADD COLUMN IF NOT EXISTS "durableMaxRuns" INTEGER NOT NULL DEFAULT 0; + +DROP TYPE IF EXISTS v1_worker_slot_group; +-- +goose StatementEnd diff --git a/examples/go/durable/main.go b/examples/go/durable/main.go deleted file mode 100644 index 1020b01686..0000000000 --- a/examples/go/durable/main.go +++ /dev/null @@ -1,70 +0,0 @@ -package main - -import ( - "context" - "log" - "time" - - "github.com/hatchet-dev/hatchet/pkg/cmdutils" - hatchet "github.com/hatchet-dev/hatchet/sdks/go" -) - -type DurableInput struct { - Message string `json:"message"` - Delay int `json:"delay"` // seconds -} - -type DurableOutput struct { - ProcessedAt string `json:"processed_at"` - Message string `json:"message"` -} - -func main() { - client, err := hatchet.NewClient() - if err != nil { - log.Fatalf("failed to create hatchet client: %v", err) - } - - task := client.NewStandaloneDurableTask("long-running-task", func(ctx hatchet.DurableContext, input DurableInput) (DurableOutput, error) { - log.Printf("Starting task, will sleep for %d seconds", input.Delay) - - if _, err := ctx.SleepFor(time.Duration(input.Delay) * time.Second); err != nil { - return DurableOutput{}, err - } - - log.Printf("Finished sleeping, processing message: %s", input.Message) - - return DurableOutput{ - ProcessedAt: time.Now().Format(time.RFC3339), - Message: "Processed: " + input.Message, - }, nil - }) - - worker, err := client.NewWorker("durable-worker", - hatchet.WithWorkflows(task), - hatchet.WithDurableSlots(10), - ) - if err != nil { - log.Fatalf("failed to create worker: %v", err) - } - - interruptCtx, cancel := cmdutils.NewInterruptContext() - defer cancel() - - go func() { - if err := worker.StartBlocking(interruptCtx); err != nil { - log.Fatalf("failed to start worker: %v", err) - } - }() - - // Run the workflow with a 30-second delay - _, err = client.Run(context.Background(), "durable-workflow", DurableInput{ - Message: "Hello from durable task!", - Delay: 30, - }) - if err != nil { - log.Fatalf("failed to run workflow: %v", err) - } - - <-interruptCtx.Done() -} diff --git a/examples/go/streaming/main.go b/examples/go/streaming/main.go deleted file mode 100644 index aa62e1d57b..0000000000 --- a/examples/go/streaming/main.go +++ /dev/null @@ -1,277 +0,0 @@ -package main - -import ( - "context" - "fmt" - "log" - "net/http" - "strings" - "time" - - "github.com/hatchet-dev/hatchet/pkg/cmdutils" - hatchet "github.com/hatchet-dev/hatchet/sdks/go" -) - -type StreamingInput struct { - Content string `json:"content"` - ChunkSize int `json:"chunk_size"` -} - -type StreamingOutput struct { - Message string `json:"message"` - TotalChunks int `json:"total_chunks"` -} - -const sampleText = ` -The Go programming language is an open source project to make programmers more productive. -Go is expressive, concise, clean, and efficient. Its concurrency mechanisms make it easy to -write programs that get the most out of multicore and networked machines, while its novel -type system enables flexible and modular program construction. Go compiles quickly to -machine code yet has the convenience of garbage collection and the power of run-time reflection. -It's a fast, statically typed, compiled language that feels like a dynamically typed, interpreted language. -` - -func main() { - // Create a new Hatchet client - client, err := hatchet.NewClient() - if err != nil { - log.Fatalf("failed to create hatchet client: %v", err) - } - - // Create a workflow for streaming - workflow := client.NewWorkflow("streaming-workflow") - - // Define the streaming task - workflow.NewTask("stream-content", func(ctx hatchet.Context, input StreamingInput) (StreamingOutput, error) { - content := input.Content - if content == "" { - content = sampleText - } - - chunkSize := input.ChunkSize - if chunkSize <= 0 { - chunkSize = 50 - } - - // Split content into chunks and stream them - chunks := createChunks(content, chunkSize) - - log.Printf("Starting to stream %d chunks...", len(chunks)) - - // Add a small delay at the start to ensure subscription is ready - time.Sleep(200 * time.Millisecond) - - // Send an initial message to establish the stream - ctx.PutStream("Stream initialized, starting chunks...") - time.Sleep(100 * time.Millisecond) - - for i, chunk := range chunks { - // Stream each chunk - ctx.PutStream(fmt.Sprintf("Chunk %d: %s", i+1, strings.TrimSpace(chunk))) - - // Small delay between chunks to simulate processing - time.Sleep(300 * time.Millisecond) - } - - return StreamingOutput{ - Message: "Content streaming finished", - TotalChunks: len(chunks), - }, nil - }) - - // Create a worker to run the workflow - worker, err := client.NewWorker("streaming-worker", hatchet.WithWorkflows(workflow)) - if err != nil { - log.Fatalf("failed to create worker: %v", err) - } - - interruptCtx, cancel := cmdutils.NewInterruptContext() - defer cancel() - - // Start the worker in a goroutine - go func() { - log.Println("Starting streaming worker...") - if err := worker.StartBlocking(interruptCtx); err != nil { - log.Printf("worker failed: %v", err) - } - }() - - // Wait a moment for the worker to start - time.Sleep(2 * time.Second) - - // Start HTTP server to demonstrate streaming - http.HandleFunc("/stream", func(w http.ResponseWriter, r *http.Request) { - ctx := context.Background() - - // Set headers for streaming response - w.Header().Set("Content-Type", "text/plain; charset=utf-8") - w.Header().Set("Cache-Control", "no-cache") - w.Header().Set("Connection", "keep-alive") - w.Header().Set("Access-Control-Allow-Origin", "*") - - // Run the streaming workflow - workflowRun, err := client.RunNoWait(ctx, "streaming-workflow", StreamingInput{ - Content: sampleText, - ChunkSize: 80, - }) - if err != nil { - http.Error(w, fmt.Sprintf("failed to run workflow: %v", err), http.StatusInternalServerError) - return - } - - // Wait a moment for the workflow to start before subscribing - time.Sleep(100 * time.Millisecond) - - // Subscribe to the stream - stream := client.Runs().SubscribeToStream(ctx, workflowRun.RunId) - - // Stream the content to the HTTP response - flusher, ok := w.(http.Flusher) - if !ok { - http.Error(w, "streaming not supported", http.StatusInternalServerError) - return - } - - for message := range stream { - fmt.Fprintf(w, "data: %s\n\n", message) - flusher.Flush() - } - }) - - http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - html := ` - - - - Hatchet Streaming Example - - - -
-

Hatchet Streaming Example

-

Click the button below to start streaming content from a Hatchet workflow:

- - -
-
- - - -` - w.Header().Set("Content-Type", "text/html") - fmt.Fprint(w, html) - }) - - server := &http.Server{ //nolint:gosec // This is a demo - Addr: ":8888", - } - - // Start server in goroutine - go func() { - log.Println("Starting HTTP server on :8888...") - log.Println("Visit http://localhost:8888 to see the streaming example") - if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed { - log.Printf("HTTP server error: %v", err) - } - }() - - // Wait for interrupt signal - <-interruptCtx.Done() - log.Println("Shutting down HTTP server...") - - // Gracefully shutdown the server - shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 5*time.Second) - defer shutdownCancel() - - if err := server.Shutdown(shutdownCtx); err != nil { - log.Printf("HTTP server shutdown error: %v", err) - } else { - log.Println("HTTP server stopped gracefully") - } -} - -func createChunks(content string, chunkSize int) []string { - var chunks []string - words := strings.Fields(strings.TrimSpace(content)) - - currentChunk := "" - for _, word := range words { - if len(currentChunk)+len(word)+1 > chunkSize && currentChunk != "" { - chunks = append(chunks, currentChunk) - currentChunk = word - } else { - if currentChunk != "" { - currentChunk += " " - } - currentChunk += word - } - } - - if currentChunk != "" { - chunks = append(chunks, currentChunk) - } - - return chunks -} diff --git a/examples/go/webhooks/main.go b/examples/go/webhooks/main.go deleted file mode 100644 index a7f8d077e9..0000000000 --- a/examples/go/webhooks/main.go +++ /dev/null @@ -1,141 +0,0 @@ -package main - -import ( - "context" - "fmt" - "log" - - "github.com/google/uuid" - - "github.com/hatchet-dev/hatchet/pkg/client/rest" - hatchet "github.com/hatchet-dev/hatchet/sdks/go" - "github.com/hatchet-dev/hatchet/sdks/go/features" -) - -func main() { - client, err := hatchet.NewClient() - if err != nil { - log.Fatalf("failed to create hatchet client: %v", err) - } - - ctx := context.Background() - - // Generate a unique suffix for webhook names - suffix := uuid.New().String()[:8] - - // List existing webhooks - fmt.Println("Listing existing webhooks...") - webhooks, err := client.Webhooks().List(ctx, rest.V1WebhookListParams{}) - if err != nil { - log.Fatalf("failed to list webhooks: %v", err) - } - if webhooks.Rows != nil { - fmt.Printf("Found %d existing webhooks\n", len(*webhooks.Rows)) - } - - // Create a webhook with Basic Auth - fmt.Println("\nCreating webhook with Basic Auth...") - basicWebhook, err := client.Webhooks().Create(ctx, features.CreateWebhookOpts{ - Name: fmt.Sprintf("test-basic-webhook-%s", suffix), - SourceName: rest.GENERIC, - EventKeyExpression: "body.event_type", - Auth: features.BasicAuth{ - Username: "testuser", - Password: "testpass", - }, - }) - if err != nil { - log.Fatalf("failed to create basic auth webhook: %v", err) - } - fmt.Printf("Created webhook: %s\n", basicWebhook.Name) - - // Get the webhook - fmt.Println("\nGetting webhook...") - retrieved, err := client.Webhooks().Get(ctx, basicWebhook.Name) - if err != nil { - log.Fatalf("failed to get webhook: %v", err) - } - fmt.Printf("Retrieved webhook: %s, AuthType: %s\n", retrieved.Name, retrieved.AuthType) - - // Update the webhook - fmt.Println("\nUpdating webhook...") - updated, err := client.Webhooks().Update(ctx, basicWebhook.Name, features.UpdateWebhookOpts{ - EventKeyExpression: "body.type", - }) - if err != nil { - log.Fatalf("failed to update webhook: %v", err) - } - fmt.Printf("Updated webhook expression to: %s\n", updated.EventKeyExpression) - - // Create a webhook with API Key Auth - fmt.Println("\nCreating webhook with API Key Auth...") - apiKeyWebhook, err := client.Webhooks().Create(ctx, features.CreateWebhookOpts{ - Name: fmt.Sprintf("test-apikey-webhook-%s", suffix), - SourceName: rest.STRIPE, - EventKeyExpression: "body.type", - Auth: features.APIKeyAuth{ - HeaderName: "X-API-Key", - APIKey: "sk_test_123", - }, - }) - if err != nil { - log.Fatalf("failed to create api key webhook: %v", err) - } - fmt.Printf("Created webhook: %s\n", apiKeyWebhook.Name) - - // Create a webhook with HMAC Auth - fmt.Println("\nCreating webhook with HMAC Auth...") - hmacWebhook, err := client.Webhooks().Create(ctx, features.CreateWebhookOpts{ - Name: fmt.Sprintf("test-hmac-webhook-%s", suffix), - SourceName: rest.GITHUB, - EventKeyExpression: "headers['X-GitHub-Event']", - Auth: features.HMACAuth{ - SigningSecret: "whsec_test123", - SignatureHeaderName: "X-Hub-Signature-256", - Algorithm: rest.SHA256, - Encoding: rest.HEX, - }, - }) - if err != nil { - log.Fatalf("failed to create hmac webhook: %v", err) - } - fmt.Printf("Created webhook: %s\n", hmacWebhook.Name) - - // List webhooks again to see our new ones - fmt.Println("\nListing all webhooks...") - webhooks, err = client.Webhooks().List(ctx, rest.V1WebhookListParams{}) - if err != nil { - log.Fatalf("failed to list webhooks: %v", err) - } - if webhooks.Rows != nil { - for _, w := range *webhooks.Rows { - fmt.Printf(" - %s (source: %s, auth: %s)\n", w.Name, w.SourceName, w.AuthType) - } - } - - // Clean up - delete the webhooks we created - fmt.Println("\nCleaning up - deleting test webhooks...") - - err = client.Webhooks().Delete(ctx, basicWebhook.Name) - if err != nil { - log.Printf("failed to delete basic webhook: %v", err) - } else { - fmt.Printf("Deleted webhook: %s\n", basicWebhook.Name) - } - - err = client.Webhooks().Delete(ctx, apiKeyWebhook.Name) - if err != nil { - log.Printf("failed to delete apikey webhook: %v", err) - } else { - fmt.Printf("Deleted webhook: %s\n", apiKeyWebhook.Name) - } - - err = client.Webhooks().Delete(ctx, hmacWebhook.Name) - if err != nil { - log.Printf("failed to delete hmac webhook: %v", err) - } else { - fmt.Printf("Deleted webhook: %s\n", hmacWebhook.Name) - } - - fmt.Println("\nDone!") -} diff --git a/examples/python/bulk_fanout/test_bulk_fanout.py b/examples/python/bulk_fanout/test_bulk_fanout.py deleted file mode 100644 index e4a977e001..0000000000 --- a/examples/python/bulk_fanout/test_bulk_fanout.py +++ /dev/null @@ -1,10 +0,0 @@ -import pytest - -from examples.bulk_fanout.worker import ParentInput, bulk_parent_wf - - -@pytest.mark.asyncio(loop_scope="session") -async def test_run() -> None: - result = await bulk_parent_wf.aio_run(input=ParentInput(n=12)) - - assert len(result["spawn"]["results"]) == 12 diff --git a/examples/python/bulk_operations/test_bulk_replay.py b/examples/python/bulk_operations/test_bulk_replay.py deleted file mode 100644 index 834758ae36..0000000000 --- a/examples/python/bulk_operations/test_bulk_replay.py +++ /dev/null @@ -1,106 +0,0 @@ -import asyncio -from datetime import datetime, timedelta, timezone -from uuid import uuid4 - -import pytest - -from examples.bulk_operations.worker import ( - bulk_replay_test_1, - bulk_replay_test_2, - bulk_replay_test_3, -) -from hatchet_sdk import BulkCancelReplayOpts, Hatchet, RunFilter, TriggerWorkflowOptions -from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus - - -@pytest.mark.asyncio(loop_scope="session") -async def test_bulk_replay(hatchet: Hatchet) -> None: - test_run_id = str(uuid4()) - n = 100 - - with pytest.raises(Exception): - await bulk_replay_test_1.aio_run_many( - [ - bulk_replay_test_1.create_bulk_run_item( - options=TriggerWorkflowOptions( - additional_metadata={ - "test_run_id": test_run_id, - } - ) - ) - for _ in range(n + 1) - ] - ) - - with pytest.raises(Exception): - await bulk_replay_test_2.aio_run_many( - [ - bulk_replay_test_2.create_bulk_run_item( - options=TriggerWorkflowOptions( - additional_metadata={ - "test_run_id": test_run_id, - } - ) - ) - for _ in range((n // 2) - 1) - ] - ) - - with pytest.raises(Exception): - await bulk_replay_test_3.aio_run_many( - [ - bulk_replay_test_3.create_bulk_run_item( - options=TriggerWorkflowOptions( - additional_metadata={ - "test_run_id": test_run_id, - } - ) - ) - for _ in range((n // 2) - 2) - ] - ) - - workflow_ids = [ - bulk_replay_test_1.id, - bulk_replay_test_2.id, - bulk_replay_test_3.id, - ] - - ## Should result in two batches of replays - await hatchet.runs.aio_bulk_replay( - opts=BulkCancelReplayOpts( - filters=RunFilter( - workflow_ids=workflow_ids, - since=datetime.now(tz=timezone.utc) - timedelta(minutes=2), - additional_metadata={"test_run_id": test_run_id}, - ) - ) - ) - - await asyncio.sleep(10) - - runs = await hatchet.runs.aio_list( - workflow_ids=workflow_ids, - since=datetime.now(tz=timezone.utc) - timedelta(minutes=2), - additional_metadata={"test_run_id": test_run_id}, - limit=1000, - ) - - assert len(runs.rows) == n + 1 + (n // 2 - 1) + (n // 2 - 2) - - for run in runs.rows: - assert run.status == V1TaskStatus.COMPLETED - assert run.retry_count == 1 - assert run.attempt == 2 - - assert ( - len([r for r in runs.rows if r.workflow_id == bulk_replay_test_1.id]) == n + 1 - ) - assert ( - len([r for r in runs.rows if r.workflow_id == bulk_replay_test_2.id]) - == n // 2 - 1 - ) - assert ( - len([r for r in runs.rows if r.workflow_id == bulk_replay_test_3.id]) - == n // 2 - 2 - ) diff --git a/examples/python/cancellation/test_cancellation.py b/examples/python/cancellation/test_cancellation.py deleted file mode 100644 index e10ae1171e..0000000000 --- a/examples/python/cancellation/test_cancellation.py +++ /dev/null @@ -1,29 +0,0 @@ -import asyncio - -import pytest - -from examples.cancellation.worker import cancellation_workflow -from hatchet_sdk import Hatchet -from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus - - -@pytest.mark.asyncio(loop_scope="session") -async def test_cancellation(hatchet: Hatchet) -> None: - ref = await cancellation_workflow.aio_run_no_wait() - - """Sleep for a long time since we only need cancellation to happen _eventually_""" - await asyncio.sleep(10) - - for i in range(30): - run = await hatchet.runs.aio_get(ref.workflow_run_id) - - if run.run.status == V1TaskStatus.RUNNING: - await asyncio.sleep(1) - continue - - assert run.run.status == V1TaskStatus.CANCELLED - assert not run.run.output - - break - else: - assert False, "Workflow run did not cancel in time" diff --git a/examples/python/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress.py b/examples/python/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress.py deleted file mode 100644 index ecf3dbc2eb..0000000000 --- a/examples/python/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress.py +++ /dev/null @@ -1,47 +0,0 @@ -import asyncio -import time -from uuid import uuid4 - -import pytest - -from examples.concurrency_cancel_in_progress.worker import ( - WorkflowInput, - concurrency_cancel_in_progress_workflow, -) -from hatchet_sdk import Hatchet, TriggerWorkflowOptions, V1TaskStatus, WorkflowRunRef - - -@pytest.mark.asyncio(loop_scope="session") -async def test_run(hatchet: Hatchet) -> None: - test_run_id = str(uuid4()) - refs: list[WorkflowRunRef] = [] - - for i in range(10): - ref = await concurrency_cancel_in_progress_workflow.aio_run_no_wait( - WorkflowInput(group="A"), - options=TriggerWorkflowOptions( - additional_metadata={"test_run_id": test_run_id, "i": str(i)}, - ), - ) - refs.append(ref) - await asyncio.sleep(1) - - for ref in refs: - print(f"Waiting for run {ref.workflow_run_id} to complete") - try: - await ref.aio_result() - except Exception: - continue - - ## wait for the olap repo to catch up - await asyncio.sleep(5) - - runs = sorted( - hatchet.runs.list(additional_metadata={"test_run_id": test_run_id}).rows, - key=lambda r: int((r.additional_metadata or {}).get("i", "0")), - ) - - assert len(runs) == 10 - assert (runs[-1].additional_metadata or {}).get("i") == "9" - assert runs[-1].status == V1TaskStatus.COMPLETED - assert all(r.status == V1TaskStatus.CANCELLED for r in runs[:-1]) diff --git a/examples/python/concurrency_cancel_newest/test_concurrency_cancel_newest.py b/examples/python/concurrency_cancel_newest/test_concurrency_cancel_newest.py deleted file mode 100644 index 3a78f1afbe..0000000000 --- a/examples/python/concurrency_cancel_newest/test_concurrency_cancel_newest.py +++ /dev/null @@ -1,61 +0,0 @@ -import asyncio -import time -from uuid import uuid4 - -import pytest - -from examples.concurrency_cancel_newest.worker import ( - WorkflowInput, - concurrency_cancel_newest_workflow, -) -from hatchet_sdk import Hatchet, TriggerWorkflowOptions, V1TaskStatus - - -@pytest.mark.asyncio(loop_scope="session") -async def test_run(hatchet: Hatchet) -> None: - test_run_id = str(uuid4()) - to_run = await concurrency_cancel_newest_workflow.aio_run_no_wait( - WorkflowInput(group="A"), - options=TriggerWorkflowOptions( - additional_metadata={ - "test_run_id": test_run_id, - }, - ), - ) - await asyncio.sleep(1) - - to_cancel = await concurrency_cancel_newest_workflow.aio_run_many_no_wait( - [ - concurrency_cancel_newest_workflow.create_bulk_run_item( - input=WorkflowInput(group="A"), - options=TriggerWorkflowOptions( - additional_metadata={ - "test_run_id": test_run_id, - }, - ), - ) - for _ in range(10) - ] - ) - - await to_run.aio_result() - - for ref in to_cancel: - try: - await ref.aio_result() - except Exception: - pass - - ## wait for the olap repo to catch up - await asyncio.sleep(5) - - successful_run = hatchet.runs.get(to_run.workflow_run_id) - - assert successful_run.run.status == V1TaskStatus.COMPLETED - assert all( - r.status == V1TaskStatus.CANCELLED - for r in hatchet.runs.list( - additional_metadata={"test_run_id": test_run_id} - ).rows - if r.metadata.id != to_run.workflow_run_id - ) diff --git a/examples/python/concurrency_limit_rr/test_concurrency_limit_rr.py b/examples/python/concurrency_limit_rr/test_concurrency_limit_rr.py deleted file mode 100644 index d1de0a29c4..0000000000 --- a/examples/python/concurrency_limit_rr/test_concurrency_limit_rr.py +++ /dev/null @@ -1,55 +0,0 @@ -import time - -import pytest - -from examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow -from hatchet_sdk.workflow_run import WorkflowRunRef - - -@pytest.mark.skip(reason="The timing for this test is not reliable") -@pytest.mark.asyncio(loop_scope="session") -async def test_run() -> None: - num_groups = 2 - runs: list[WorkflowRunRef] = [] - - # Start all runs - for i in range(1, num_groups + 1): - run = concurrency_limit_rr_workflow.run_no_wait() - runs.append(run) - run = concurrency_limit_rr_workflow.run_no_wait() - runs.append(run) - - # Wait for all results - successful_runs = [] - cancelled_runs = [] - - start_time = time.time() - - # Process each run individually - for i, run in enumerate(runs, start=1): - try: - result = await run.aio_result() - successful_runs.append((i, result)) - except Exception as e: - if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e): - cancelled_runs.append((i, str(e))) - else: - raise # Re-raise if it's an unexpected error - - end_time = time.time() - total_time = end_time - start_time - - # Check that we have the correct number of successful and cancelled runs - assert ( - len(successful_runs) == 4 - ), f"Expected 4 successful runs, got {len(successful_runs)}" - assert ( - len(cancelled_runs) == 0 - ), f"Expected 0 cancelled run, got {len(cancelled_runs)}" - - # Check that the total time is close to 2 seconds - assert ( - 3.8 <= total_time <= 7 - ), f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds" - - print(f"Total execution time: {total_time:.2f} seconds") diff --git a/examples/python/concurrency_multiple_keys/test_multiple_concurrency_keys.py b/examples/python/concurrency_multiple_keys/test_multiple_concurrency_keys.py deleted file mode 100644 index 960450283a..0000000000 --- a/examples/python/concurrency_multiple_keys/test_multiple_concurrency_keys.py +++ /dev/null @@ -1,159 +0,0 @@ -import asyncio -from collections import Counter -from datetime import datetime -from random import choice -from typing import Literal -from uuid import uuid4 - -import pytest -from pydantic import BaseModel - -from examples.concurrency_multiple_keys.worker import ( - DIGIT_MAX_RUNS, - NAME_MAX_RUNS, - WorkflowInput, - concurrency_multiple_keys_workflow, -) -from hatchet_sdk import Hatchet, TriggerWorkflowOptions -from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary - -Character = Literal["Anna", "Vronsky", "Stiva", "Dolly", "Levin", "Karenin"] -characters: list[Character] = [ - "Anna", - "Vronsky", - "Stiva", - "Dolly", - "Levin", - "Karenin", -] - - -class RunMetadata(BaseModel): - test_run_id: str - key: str - name: Character - digit: str - started_at: datetime - finished_at: datetime - - @staticmethod - def parse(task: V1TaskSummary) -> "RunMetadata": - return RunMetadata( - test_run_id=task.additional_metadata["test_run_id"], # type: ignore - key=task.additional_metadata["key"], # type: ignore - name=task.additional_metadata["name"], # type: ignore - digit=task.additional_metadata["digit"], # type: ignore - started_at=task.started_at or datetime.max, - finished_at=task.finished_at or datetime.min, - ) - - def __str__(self) -> str: - return self.key - - -@pytest.mark.asyncio(loop_scope="session") -async def test_multi_concurrency_key(hatchet: Hatchet) -> None: - test_run_id = str(uuid4()) - - run_refs = await concurrency_multiple_keys_workflow.aio_run_many_no_wait( - [ - concurrency_multiple_keys_workflow.create_bulk_run_item( - WorkflowInput( - name=(name := choice(characters)), - digit=(digit := choice([str(i) for i in range(6)])), - ), - options=TriggerWorkflowOptions( - additional_metadata={ - "test_run_id": test_run_id, - "key": f"{name}-{digit}", - "name": name, - "digit": digit, - }, - ), - ) - for _ in range(100) - ] - ) - - await asyncio.gather(*[r.aio_result() for r in run_refs]) - - workflows = ( - await hatchet.workflows.aio_list( - workflow_name=concurrency_multiple_keys_workflow.name, - limit=1_000, - ) - ).rows - - assert workflows - - workflow = next( - (w for w in workflows if w.name == concurrency_multiple_keys_workflow.name), - None, - ) - - assert workflow - - assert workflow.name == concurrency_multiple_keys_workflow.name - - runs = await hatchet.runs.aio_list( - workflow_ids=[workflow.metadata.id], - additional_metadata={ - "test_run_id": test_run_id, - }, - limit=1_000, - ) - - sorted_runs = sorted( - [RunMetadata.parse(r) for r in runs.rows], key=lambda r: r.started_at - ) - - overlapping_groups: dict[int, list[RunMetadata]] = {} - - for run in sorted_runs: - has_group_membership = False - - if not overlapping_groups: - overlapping_groups[1] = [run] - continue - - if has_group_membership: - continue - - for id, group in overlapping_groups.items(): - if all(are_overlapping(run, task) for task in group): - overlapping_groups[id].append(run) - has_group_membership = True - break - - if not has_group_membership: - overlapping_groups[len(overlapping_groups) + 1] = [run] - - assert {s.key for s in sorted_runs} == { - k.key for v in overlapping_groups.values() for k in v - } - - for id, group in overlapping_groups.items(): - assert is_valid_group(group), f"Group {id} is not valid" - - -def are_overlapping(x: RunMetadata, y: RunMetadata) -> bool: - return (x.started_at < y.finished_at and x.finished_at > y.started_at) or ( - x.finished_at > y.started_at and x.started_at < y.finished_at - ) - - -def is_valid_group(group: list[RunMetadata]) -> bool: - digits = Counter[str]() - names = Counter[str]() - - for task in group: - digits[task.digit] += 1 - names[task.name] += 1 - - if any(v > DIGIT_MAX_RUNS for v in digits.values()): - return False - - if any(v > NAME_MAX_RUNS for v in names.values()): - return False - - return True diff --git a/examples/python/concurrency_workflow_level/test_workflow_level_concurrency.py b/examples/python/concurrency_workflow_level/test_workflow_level_concurrency.py deleted file mode 100644 index 3af1ad3b65..0000000000 --- a/examples/python/concurrency_workflow_level/test_workflow_level_concurrency.py +++ /dev/null @@ -1,155 +0,0 @@ -import asyncio -from collections import Counter -from datetime import datetime -from random import choice -from typing import Literal -from uuid import uuid4 - -import pytest -from pydantic import BaseModel - -from examples.concurrency_workflow_level.worker import ( - DIGIT_MAX_RUNS, - NAME_MAX_RUNS, - WorkflowInput, - concurrency_workflow_level_workflow, -) -from hatchet_sdk import Hatchet, TriggerWorkflowOptions -from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary - -Character = Literal["Anna", "Vronsky", "Stiva", "Dolly", "Levin", "Karenin"] -characters: list[Character] = [ - "Anna", - "Vronsky", - "Stiva", - "Dolly", - "Levin", - "Karenin", -] - - -class RunMetadata(BaseModel): - test_run_id: str - key: str - name: Character - digit: str - started_at: datetime - finished_at: datetime - - @staticmethod - def parse(task: V1TaskSummary) -> "RunMetadata": - return RunMetadata( - test_run_id=task.additional_metadata["test_run_id"], # type: ignore - key=task.additional_metadata["key"], # type: ignore - name=task.additional_metadata["name"], # type: ignore - digit=task.additional_metadata["digit"], # type: ignore - started_at=task.started_at or datetime.max, - finished_at=task.finished_at or datetime.min, - ) - - def __str__(self) -> str: - return self.key - - -@pytest.mark.asyncio(loop_scope="session") -async def test_workflow_level_concurrency(hatchet: Hatchet) -> None: - test_run_id = str(uuid4()) - - run_refs = await concurrency_workflow_level_workflow.aio_run_many_no_wait( - [ - concurrency_workflow_level_workflow.create_bulk_run_item( - WorkflowInput( - name=(name := choice(characters)), - digit=(digit := choice([str(i) for i in range(6)])), - ), - options=TriggerWorkflowOptions( - additional_metadata={ - "test_run_id": test_run_id, - "key": f"{name}-{digit}", - "name": name, - "digit": digit, - }, - ), - ) - for _ in range(100) - ] - ) - - await asyncio.gather(*[r.aio_result() for r in run_refs]) - - workflows = ( - await hatchet.workflows.aio_list( - workflow_name=concurrency_workflow_level_workflow.name, - limit=1_000, - ) - ).rows - - assert workflows - - workflow = next( - (w for w in workflows if w.name == concurrency_workflow_level_workflow.name), - None, - ) - - assert workflow - - assert workflow.name == concurrency_workflow_level_workflow.name - - runs = await hatchet.runs.aio_list( - workflow_ids=[workflow.metadata.id], - additional_metadata={ - "test_run_id": test_run_id, - }, - limit=1_000, - ) - - sorted_runs = sorted( - [RunMetadata.parse(r) for r in runs.rows], key=lambda r: r.started_at - ) - - overlapping_groups: dict[int, list[RunMetadata]] = {} - - for run in sorted_runs: - has_group_membership = False - - if not overlapping_groups: - overlapping_groups[1] = [run] - continue - - if has_group_membership: - continue - - for id, group in overlapping_groups.items(): - if all(are_overlapping(run, task) for task in group): - overlapping_groups[id].append(run) - has_group_membership = True - break - - if not has_group_membership: - overlapping_groups[len(overlapping_groups) + 1] = [run] - - for id, group in overlapping_groups.items(): - assert is_valid_group(group), f"Group {id} is not valid" - - -def are_overlapping(x: RunMetadata, y: RunMetadata) -> bool: - return (x.started_at < y.finished_at and x.finished_at > y.started_at) or ( - x.finished_at > y.started_at and x.started_at < y.finished_at - ) - - -def is_valid_group(group: list[RunMetadata]) -> bool: - digits = Counter[str]() - names = Counter[str]() - - for task in group: - digits[task.digit] += 1 - names[task.name] += 1 - - if any(v > DIGIT_MAX_RUNS for v in digits.values()): - return False - - if any(v > NAME_MAX_RUNS for v in names.values()): - return False - - return True diff --git a/examples/python/conditions/test_conditions.py b/examples/python/conditions/test_conditions.py deleted file mode 100644 index 3acf1091a2..0000000000 --- a/examples/python/conditions/test_conditions.py +++ /dev/null @@ -1,47 +0,0 @@ -import asyncio - -import pytest - -from examples.conditions.worker import task_condition_workflow -from hatchet_sdk import Hatchet - - -@pytest.mark.asyncio(loop_scope="session") -async def test_waits(hatchet: Hatchet) -> None: - ref = task_condition_workflow.run_no_wait() - - await asyncio.sleep(15) - - hatchet.event.push("skip_on_event:skip", {}) - hatchet.event.push("wait_for_event:start", {}) - - result = await ref.aio_result() - - assert result["skip_on_event"] == {"skipped": True} - - first_random_number = result["start"]["random_number"] - wait_for_event_random_number = result["wait_for_event"]["random_number"] - wait_for_sleep_random_number = result["wait_for_sleep"]["random_number"] - - left_branch = result["left_branch"] - right_branch = result["right_branch"] - - assert left_branch.get("skipped") is True or right_branch.get("skipped") is True - - skip_with_multiple_parents = result["skip_with_multiple_parents"] - - assert skip_with_multiple_parents.get("skipped") is True - - branch_random_number = left_branch.get("random_number") or right_branch.get( - "random_number" - ) - - result_sum = result["sum"]["sum"] - - assert ( - result_sum - == first_random_number - + wait_for_event_random_number - + wait_for_sleep_random_number - + branch_random_number - ) diff --git a/examples/python/dag/test_dag.py b/examples/python/dag/test_dag.py deleted file mode 100644 index 36cf1d4665..0000000000 --- a/examples/python/dag/test_dag.py +++ /dev/null @@ -1,14 +0,0 @@ -import pytest - -from examples.dag.worker import dag_workflow -from hatchet_sdk import Hatchet - - -@pytest.mark.asyncio(loop_scope="session") -async def test_run(hatchet: Hatchet) -> None: - result = await dag_workflow.aio_run() - - one = result["step1"]["random_number"] - two = result["step2"]["random_number"] - assert result["step3"]["sum"] == one + two - assert result["step4"]["step4"] == "step4" diff --git a/examples/python/delayed/test_delayed.py b/examples/python/delayed/test_delayed.py deleted file mode 100644 index 3fccdd01a4..0000000000 --- a/examples/python/delayed/test_delayed.py +++ /dev/null @@ -1,11 +0,0 @@ -# from hatchet_sdk import Hatchet -# import pytest - -# from tests.utils import fixture_bg_worker - - -# worker = fixture_bg_worker(["poetry", "run", "manual_trigger"]) - -# # @pytest.mark.asyncio(loop_scope="session") -# async def test_run(hatchet: Hatchet): -# # TODO diff --git a/examples/python/dependency_injection/test_dependency_injection.py b/examples/python/dependency_injection/test_dependency_injection.py deleted file mode 100644 index 8c991d6f79..0000000000 --- a/examples/python/dependency_injection/test_dependency_injection.py +++ /dev/null @@ -1,49 +0,0 @@ -import pytest - -from examples.dependency_injection.worker import ( - ASYNC_DEPENDENCY_VALUE, - SYNC_DEPENDENCY_VALUE, - Output, - async_dep, - async_task_with_dependencies, - di_workflow, - durable_async_task_with_dependencies, - durable_sync_task_with_dependencies, - sync_dep, - sync_task_with_dependencies, -) -from hatchet_sdk import EmptyModel -from hatchet_sdk.runnables.workflow import Standalone - - -@pytest.mark.parametrize( - "task", - [ - async_task_with_dependencies, - sync_task_with_dependencies, - durable_async_task_with_dependencies, - durable_sync_task_with_dependencies, - ], -) -@pytest.mark.asyncio(loop_scope="session") -async def test_di_standalones( - task: Standalone[EmptyModel, Output], -) -> None: - result = await task.aio_run() - - assert isinstance(result, Output) - assert result.sync_dep == SYNC_DEPENDENCY_VALUE - assert result.async_dep == ASYNC_DEPENDENCY_VALUE - - -@pytest.mark.asyncio(loop_scope="session") -async def test_di_workflows() -> None: - result = await di_workflow.aio_run() - - assert len(result) == 4 - - for output in result.values(): - parsed = Output.model_validate(output) - - assert parsed.sync_dep == SYNC_DEPENDENCY_VALUE - assert parsed.async_dep == ASYNC_DEPENDENCY_VALUE diff --git a/examples/python/durable/test_durable.py b/examples/python/durable/test_durable.py deleted file mode 100644 index 1287c3e788..0000000000 --- a/examples/python/durable/test_durable.py +++ /dev/null @@ -1,74 +0,0 @@ -import asyncio - -import pytest - -from examples.durable.worker import ( - EVENT_KEY, - SLEEP_TIME, - durable_workflow, - wait_for_sleep_twice, -) -from hatchet_sdk import Hatchet - - -@pytest.mark.asyncio(loop_scope="session") -async def test_durable(hatchet: Hatchet) -> None: - ref = durable_workflow.run_no_wait() - - await asyncio.sleep(SLEEP_TIME + 10) - - hatchet.event.push(EVENT_KEY, {"test": "test"}) - - result = await ref.aio_result() - - workers = await hatchet.workers.aio_list() - - assert workers.rows - - active_workers = [w for w in workers.rows if w.status == "ACTIVE"] - - assert len(active_workers) == 2 - assert any( - w.name == hatchet.config.apply_namespace("e2e-test-worker") - for w in active_workers - ) - assert any( - w.name == hatchet.config.apply_namespace("e2e-test-worker_durable") - for w in active_workers - ) - - assert result["durable_task"]["status"] == "success" - - wait_group_1 = result["wait_for_or_group_1"] - wait_group_2 = result["wait_for_or_group_2"] - - assert abs(wait_group_1["runtime"] - SLEEP_TIME) < 3 - - assert wait_group_1["key"] == wait_group_2["key"] - assert wait_group_1["key"] == "CREATE" - assert "sleep" in wait_group_1["event_id"] - assert "event" in wait_group_2["event_id"] - - wait_for_multi_sleep = result["wait_for_multi_sleep"] - - assert wait_for_multi_sleep["runtime"] > 3 * SLEEP_TIME - - -@pytest.mark.asyncio(loop_scope="session") -async def test_durable_sleep_cancel_replay(hatchet: Hatchet) -> None: - first_sleep = await wait_for_sleep_twice.aio_run_no_wait() - - await asyncio.sleep(SLEEP_TIME / 2) - - await hatchet.runs.aio_cancel(first_sleep.workflow_run_id) - - await first_sleep.aio_result() - - await hatchet.runs.aio_replay( - first_sleep.workflow_run_id, - ) - - second_sleep_result = await first_sleep.aio_result() - - """We've already slept for a little bit by the time the task is cancelled""" - assert second_sleep_result["runtime"] <= SLEEP_TIME diff --git a/examples/python/events/test_event.py b/examples/python/events/test_event.py deleted file mode 100644 index 94c3b27beb..0000000000 --- a/examples/python/events/test_event.py +++ /dev/null @@ -1,569 +0,0 @@ -import asyncio -import json -from collections.abc import AsyncGenerator -from contextlib import asynccontextmanager -from datetime import datetime, timedelta, timezone -from typing import cast -from uuid import uuid4 - -import pytest -from pydantic import BaseModel - -from examples.events.worker import ( - EVENT_KEY, - SECONDARY_KEY, - WILDCARD_KEY, - EventWorkflowInput, - event_workflow, -) -from hatchet_sdk.clients.events import ( - BulkPushEventOptions, - BulkPushEventWithMetadata, - PushEventOptions, -) -from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus -from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary -from hatchet_sdk.contracts.events_pb2 import Event -from hatchet_sdk.hatchet import Hatchet - - -class ProcessedEvent(BaseModel): - id: str - payload: dict[str, str | bool] - meta: dict[str, str | bool | int] - should_have_runs: bool - test_run_id: str - - def __hash__(self) -> int: - return hash(self.model_dump_json()) - - -@asynccontextmanager -async def event_filter( - hatchet: Hatchet, - test_run_id: str, - expression: str | None = None, - payload: dict[str, str] = {}, - scope: str | None = None, -) -> AsyncGenerator[None, None]: - expression = ( - expression - or f"input.should_skip == false && payload.test_run_id == '{test_run_id}'" - ) - - f = await hatchet.filters.aio_create( - workflow_id=event_workflow.id, - expression=expression, - scope=scope or test_run_id, - payload={"test_run_id": test_run_id, **payload}, - ) - - try: - yield - finally: - await hatchet.filters.aio_delete(f.metadata.id) - - -async def fetch_runs_for_event( - hatchet: Hatchet, event: Event -) -> tuple[ProcessedEvent, list[V1TaskSummary]]: - runs = await hatchet.runs.aio_list(triggering_event_external_id=event.eventId) - - meta = ( - cast(dict[str, str | int | bool], json.loads(event.additionalMetadata)) - if event.additionalMetadata - else {} - ) - payload = ( - cast(dict[str, str | bool], json.loads(event.payload)) if event.payload else {} - ) - - processed_event = ProcessedEvent( - id=event.eventId, - payload=payload, - meta=meta, - should_have_runs=meta.get("should_have_runs", False) is True, - test_run_id=cast(str, meta["test_run_id"]), - ) - - if not all([r.output for r in runs.rows]): - return (processed_event, []) - - return ( - processed_event, - runs.rows or [], - ) - - -async def wait_for_result( - hatchet: Hatchet, events: list[Event] -) -> dict[ProcessedEvent, list[V1TaskSummary]]: - await asyncio.sleep(3) - - since = datetime.now(tz=timezone.utc) - timedelta(minutes=2) - - persisted = (await hatchet.event.aio_list(limit=100, since=since)).rows or [] - - assert {e.eventId for e in events}.issubset({e.metadata.id for e in persisted}) - - iters = 0 - while True: - print("Waiting for event runs to complete...") - if iters > 15: - print("Timed out waiting for event runs to complete.") - return { - ProcessedEvent( - id=event.eventId, - payload=json.loads(event.payload) if event.payload else {}, - meta=( - json.loads(event.additionalMetadata) - if event.additionalMetadata - else {} - ), - should_have_runs=False, - test_run_id=cast( - str, json.loads(event.additionalMetadata).get("test_run_id", "") - ), - ): [] - for event in events - } - - iters += 1 - - event_runs = await asyncio.gather( - *[fetch_runs_for_event(hatchet, event) for event in events] - ) - - all_empty = all(not event_run for _, event_run in event_runs) - - if all_empty: - await asyncio.sleep(1) - continue - - event_id_to_runs = {event_id: runs for (event_id, runs) in event_runs} - - any_queued_or_running = any( - run.status in [V1TaskStatus.QUEUED, V1TaskStatus.RUNNING] - for runs in event_id_to_runs.values() - for run in runs - ) - - if any_queued_or_running: - await asyncio.sleep(1) - continue - - break - - return event_id_to_runs - - -async def wait_for_result_and_assert(hatchet: Hatchet, events: list[Event]) -> None: - event_to_runs = await wait_for_result(hatchet, events) - - for event, runs in event_to_runs.items(): - await assert_event_runs_processed(event, runs) - - -async def assert_event_runs_processed( - event: ProcessedEvent, - runs: list[V1TaskSummary], -) -> None: - runs = [ - run - for run in runs - if (run.additional_metadata or {}).get("hatchet__event_id") == event.id - ] - - if event.should_have_runs: - assert len(runs) > 0 - - for run in runs: - assert run.status == V1TaskStatus.COMPLETED - assert run.output.get("test_run_id") == event.test_run_id - else: - assert len(runs) == 0 - - -def bpi( - index: int = 1, - test_run_id: str = "", - should_skip: bool = False, - should_have_runs: bool = True, - key: str = EVENT_KEY, - payload: dict[str, str] = {}, - scope: str | None = None, -) -> BulkPushEventWithMetadata: - return BulkPushEventWithMetadata( - key=key, - payload={ - "should_skip": should_skip, - **payload, - }, - additional_metadata={ - "should_have_runs": should_have_runs, - "test_run_id": test_run_id, - "key": index, - }, - scope=scope, - ) - - -def cp(should_skip: bool) -> dict[str, bool]: - return EventWorkflowInput(should_skip=should_skip).model_dump() - - -@pytest.mark.asyncio(loop_scope="session") -async def test_event_push(hatchet: Hatchet) -> None: - e = hatchet.event.push(EVENT_KEY, cp(False)) - - assert e.eventId is not None - - -@pytest.mark.asyncio(loop_scope="session") -async def test_async_event_push(hatchet: Hatchet) -> None: - e = await hatchet.event.aio_push(EVENT_KEY, cp(False)) - - assert e.eventId is not None - - -@pytest.mark.asyncio(loop_scope="session") -async def test_async_event_bulk_push(hatchet: Hatchet) -> None: - events = [ - BulkPushEventWithMetadata( - key="event1", - payload={"message": "This is event 1", "should_skip": False}, - additional_metadata={"source": "test", "user_id": "user123"}, - ), - BulkPushEventWithMetadata( - key="event2", - payload={"message": "This is event 2", "should_skip": False}, - additional_metadata={"source": "test", "user_id": "user456"}, - ), - BulkPushEventWithMetadata( - key="event3", - payload={"message": "This is event 3", "should_skip": False}, - additional_metadata={"source": "test", "user_id": "user789"}, - ), - ] - opts = BulkPushEventOptions(namespace="bulk-test") - - e = await hatchet.event.aio_bulk_push(events, opts) - - assert len(e) == 3 - - # Sort both lists of events by their key to ensure comparison order - sorted_events = sorted(events, key=lambda x: x.key) - sorted_returned_events = sorted(e, key=lambda x: x.key) - namespace = "bulk-test" - - # Check that the returned events match the original events - for original_event, returned_event in zip( - sorted_events, sorted_returned_events, strict=False - ): - assert returned_event.key == namespace + original_event.key - - -@pytest.fixture(scope="function") -def test_run_id() -> str: - return str(uuid4()) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_event_engine_behavior(hatchet: Hatchet) -> None: - test_run_id = str(uuid4()) - events = [ - bpi( - test_run_id=test_run_id, - ), - bpi( - test_run_id=test_run_id, - key="thisisafakeeventfoobarbaz", - should_have_runs=False, - ), - ] - - result = await hatchet.event.aio_bulk_push(events) - - await wait_for_result_and_assert(hatchet, result) - - -def gen_bulk_events(test_run_id: str) -> list[BulkPushEventWithMetadata]: - return [ - ## No scope, so it shouldn't have any runs - bpi( - index=1, - test_run_id=test_run_id, - should_skip=False, - should_have_runs=False, - ), - ## No scope, so it shouldn't have any runs - bpi( - index=2, - test_run_id=test_run_id, - should_skip=True, - should_have_runs=False, - ), - ## Scope is set and `should_skip` is False, so it should have runs - bpi( - index=3, - test_run_id=test_run_id, - should_skip=False, - should_have_runs=True, - scope=test_run_id, - ), - ## Scope is set and `should_skip` is True, so it shouldn't have runs - bpi( - index=4, - test_run_id=test_run_id, - should_skip=True, - should_have_runs=False, - scope=test_run_id, - ), - ## Scope is set, `should_skip` is False, but key is different, so it shouldn't have runs - bpi( - index=5, - test_run_id=test_run_id, - should_skip=True, - should_have_runs=False, - scope=test_run_id, - key="thisisafakeeventfoobarbaz", - ), - ## Scope is set, `should_skip` is False, but key is different, so it shouldn't have runs - bpi( - index=6, - test_run_id=test_run_id, - should_skip=False, - should_have_runs=False, - scope=test_run_id, - key="thisisafakeeventfoobarbaz", - ), - ] - - -@pytest.mark.asyncio(loop_scope="session") -async def test_event_skipping_filtering(hatchet: Hatchet, test_run_id: str) -> None: - async with event_filter(hatchet, test_run_id): - events = gen_bulk_events(test_run_id) - - result = await hatchet.event.aio_bulk_push(events) - - await wait_for_result_and_assert(hatchet, result) - - -async def bulk_to_single(hatchet: Hatchet, event: BulkPushEventWithMetadata) -> Event: - return await hatchet.event.aio_push( - event_key=event.key, - payload=event.payload, - options=PushEventOptions( - scope=event.scope, - additional_metadata=event.additional_metadata, - priority=event.priority, - ), - ) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_event_skipping_filtering_no_bulk( - hatchet: Hatchet, test_run_id: str -) -> None: - async with event_filter(hatchet, test_run_id): - raw_events = gen_bulk_events(test_run_id) - events = await asyncio.gather( - *[bulk_to_single(hatchet, event) for event in raw_events] - ) - - await wait_for_result_and_assert(hatchet, events) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_event_payload_filtering(hatchet: Hatchet, test_run_id: str) -> None: - async with event_filter( - hatchet, - test_run_id, - "input.should_skip == false && payload.foobar == 'baz'", - {"foobar": "qux"}, - ): - event = await hatchet.event.aio_push( - event_key=EVENT_KEY, - payload={"message": "This is event 1", "should_skip": False}, - options=PushEventOptions( - scope=test_run_id, - additional_metadata={ - "should_have_runs": False, - "test_run_id": test_run_id, - "key": 1, - }, - ), - ) - - await wait_for_result_and_assert(hatchet, [event]) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_event_payload_filtering_with_payload_match( - hatchet: Hatchet, test_run_id: str -) -> None: - async with event_filter( - hatchet, - test_run_id, - "input.should_skip == false && payload.foobar == 'baz'", - {"foobar": "baz"}, - ): - event = await hatchet.event.aio_push( - event_key=EVENT_KEY, - payload={"message": "This is event 1", "should_skip": False}, - options=PushEventOptions( - scope=test_run_id, - additional_metadata={ - "should_have_runs": True, - "test_run_id": test_run_id, - "key": 1, - }, - ), - ) - - await wait_for_result_and_assert(hatchet, [event]) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_filtering_by_event_key(hatchet: Hatchet, test_run_id: str) -> None: - async with event_filter( - hatchet, - test_run_id, - f"event_key == '{SECONDARY_KEY}'", - ): - event_1 = await hatchet.event.aio_push( - event_key=SECONDARY_KEY, - payload={ - "message": "Should run because filter matches", - "should_skip": False, - }, - options=PushEventOptions( - scope=test_run_id, - additional_metadata={ - "should_have_runs": True, - "test_run_id": test_run_id, - }, - ), - ) - event_2 = await hatchet.event.aio_push( - event_key=EVENT_KEY, - payload={ - "message": "Should skip because filter does not match", - "should_skip": False, - }, - options=PushEventOptions( - scope=test_run_id, - additional_metadata={ - "should_have_runs": False, - "test_run_id": test_run_id, - }, - ), - ) - - await wait_for_result_and_assert(hatchet, [event_1, event_2]) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_key_wildcards(hatchet: Hatchet, test_run_id: str) -> None: - keys = [ - WILDCARD_KEY.replace("*", "1"), - WILDCARD_KEY.replace("*", "2"), - "foobar", - EVENT_KEY, - ] - - async with event_filter( - hatchet, - test_run_id, - ): - events = [ - await hatchet.event.aio_push( - event_key=key, - payload={ - "should_skip": False, - }, - options=PushEventOptions( - scope=test_run_id, - additional_metadata={ - "should_have_runs": key != "foobar", - "test_run_id": test_run_id, - }, - ), - ) - for key in keys - ] - - await wait_for_result_and_assert(hatchet, events) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_multiple_runs_for_multiple_scope_matches( - hatchet: Hatchet, test_run_id: str -) -> None: - async with event_filter( - hatchet, test_run_id, payload={"filter_id": "1"}, expression="1 == 1" - ): - async with event_filter( - hatchet, test_run_id, payload={"filter_id": "2"}, expression="2 == 2" - ): - event = await hatchet.event.aio_push( - event_key=EVENT_KEY, - payload={ - "should_skip": False, - }, - options=PushEventOptions( - scope=test_run_id, - additional_metadata={ - "should_have_runs": True, - "test_run_id": test_run_id, - }, - ), - ) - - event_to_runs = await wait_for_result(hatchet, [event]) - - assert len(event_to_runs.keys()) == 1 - - runs = list(event_to_runs.values())[0] - - assert len(runs) == 2 - - assert {r.output.get("filter_id") for r in runs} == {"1", "2"} - - -@pytest.mark.asyncio(loop_scope="session") -async def test_multi_scope_bug(hatchet: Hatchet, test_run_id: str) -> None: - async with event_filter(hatchet, test_run_id, expression="1 == 1", scope="a"): - async with event_filter( - hatchet, - test_run_id, - expression="2 == 2", - scope="b", - ): - events = await hatchet.event.aio_bulk_push( - [ - BulkPushEventWithMetadata( - key=EVENT_KEY, - payload={ - "should_skip": False, - }, - additional_metadata={ - "should_have_runs": True, - "test_run_id": test_run_id, - }, - scope="a" if i % 2 == 0 else "b", - ) - for i in range(100) - ], - ) - - await asyncio.sleep(15) - - for event in events: - runs = await hatchet.runs.aio_list( - triggering_event_external_id=event.eventId, - additional_metadata={"test_run_id": test_run_id}, - ) - - assert len(runs.rows) == 1 diff --git a/examples/python/fanout/test_fanout.py b/examples/python/fanout/test_fanout.py deleted file mode 100644 index 9831c0a527..0000000000 --- a/examples/python/fanout/test_fanout.py +++ /dev/null @@ -1,50 +0,0 @@ -import asyncio -from uuid import uuid4 - -import pytest - -from examples.fanout.worker import ParentInput, parent_wf -from hatchet_sdk import Hatchet, TriggerWorkflowOptions - - -@pytest.mark.asyncio(loop_scope="session") -async def test_run(hatchet: Hatchet) -> None: - ref = await parent_wf.aio_run_no_wait( - ParentInput(n=2), - ) - - result = await ref.aio_result() - - assert len(result["spawn"]["results"]) == 2 - - -@pytest.mark.asyncio(loop_scope="session") -async def test_additional_metadata_propagation(hatchet: Hatchet) -> None: - test_run_id = uuid4().hex - - ref = await parent_wf.aio_run_no_wait( - ParentInput(n=2), - options=TriggerWorkflowOptions( - additional_metadata={"test_run_id": test_run_id} - ), - ) - - await ref.aio_result() - await asyncio.sleep(1) - - runs = await hatchet.runs.aio_list( - parent_task_external_id=ref.workflow_run_id, - additional_metadata={"test_run_id": test_run_id}, - ) - - assert runs.rows - - """Assert that the additional metadata is propagated to the child runs.""" - for run in runs.rows: - assert run.additional_metadata - assert run.additional_metadata["test_run_id"] == test_run_id - - assert run.children - for child in run.children: - assert child.additional_metadata - assert child.additional_metadata["test_run_id"] == test_run_id diff --git a/examples/python/fanout_sync/test_fanout_sync.py b/examples/python/fanout_sync/test_fanout_sync.py deleted file mode 100644 index ea93d080f0..0000000000 --- a/examples/python/fanout_sync/test_fanout_sync.py +++ /dev/null @@ -1,47 +0,0 @@ -import asyncio -from uuid import uuid4 - -import pytest - -from examples.fanout_sync.worker import ParentInput, sync_fanout_parent -from hatchet_sdk import Hatchet, TriggerWorkflowOptions - - -def test_run() -> None: - N = 2 - - result = sync_fanout_parent.run(ParentInput(n=N)) - - assert len(result["spawn"]["results"]) == N - - -@pytest.mark.asyncio(loop_scope="session") -async def test_additional_metadata_propagation_sync(hatchet: Hatchet) -> None: - test_run_id = uuid4().hex - - ref = await sync_fanout_parent.aio_run_no_wait( - ParentInput(n=2), - options=TriggerWorkflowOptions( - additional_metadata={"test_run_id": test_run_id} - ), - ) - - await ref.aio_result() - await asyncio.sleep(1) - - runs = await hatchet.runs.aio_list( - parent_task_external_id=ref.workflow_run_id, - additional_metadata={"test_run_id": test_run_id}, - ) - - assert runs.rows - - """Assert that the additional metadata is propagated to the child runs.""" - for run in runs.rows: - assert run.additional_metadata - assert run.additional_metadata["test_run_id"] == test_run_id - - assert run.children - for child in run.children: - assert child.additional_metadata - assert child.additional_metadata["test_run_id"] == test_run_id diff --git a/examples/python/lifespans/test_lifespans.py b/examples/python/lifespans/test_lifespans.py deleted file mode 100644 index 5496692900..0000000000 --- a/examples/python/lifespans/test_lifespans.py +++ /dev/null @@ -1,12 +0,0 @@ -import pytest - -from examples.lifespans.simple import Lifespan, lifespan_task - - -@pytest.mark.asyncio(loop_scope="session") -async def test_lifespans() -> None: - result = await lifespan_task.aio_run() - - assert isinstance(result, Lifespan) - assert result.pi == 3.14 - assert result.foo == "bar" diff --git a/examples/python/logger/test_logger.py b/examples/python/logger/test_logger.py deleted file mode 100644 index 6c1c2cf7d8..0000000000 --- a/examples/python/logger/test_logger.py +++ /dev/null @@ -1,10 +0,0 @@ -import pytest - -from examples.logger.workflow import logging_workflow - - -@pytest.mark.asyncio(loop_scope="session") -async def test_run() -> None: - result = await logging_workflow.aio_run() - - assert result["root_logger"]["status"] == "success" diff --git a/examples/python/migration_guides/__init__.py b/examples/python/migration_guides/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/examples/python/non_retryable/test_no_retry.py b/examples/python/non_retryable/test_no_retry.py deleted file mode 100644 index f1414392e3..0000000000 --- a/examples/python/non_retryable/test_no_retry.py +++ /dev/null @@ -1,71 +0,0 @@ -import asyncio - -import pytest - -from examples.non_retryable.worker import ( - non_retryable_workflow, - should_not_retry, - should_not_retry_successful_task, - should_retry_wrong_exception_type, -) -from hatchet_sdk import Hatchet -from hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType -from hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails -from hatchet_sdk.exceptions import FailedTaskRunExceptionGroup - - -def find_id(runs: V1WorkflowRunDetails, match: str) -> str: - return next(t.metadata.id for t in runs.tasks if match in t.display_name) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_no_retry(hatchet: Hatchet) -> None: - ref = await non_retryable_workflow.aio_run_no_wait() - - with pytest.raises(FailedTaskRunExceptionGroup) as exc_info: - await ref.aio_result() - - exception_group = exc_info.value - - assert len(exception_group.exceptions) == 2 - - exc_text = [e.exc for e in exception_group.exceptions] - - non_retries = [ - e - for e in exc_text - if "This task should retry because it's not a NonRetryableException" in e - ] - - other_errors = [e for e in exc_text if "This task should not retry" in e] - - assert len(non_retries) == 1 - assert len(other_errors) == 1 - - await asyncio.sleep(3) - - runs = await hatchet.runs.aio_get(ref.workflow_run_id) - task_to_id = { - task: find_id(runs, task.name) - for task in [ - should_not_retry_successful_task, - should_retry_wrong_exception_type, - should_not_retry, - ] - } - - retrying_events = [ - e for e in runs.task_events if e.event_type == V1TaskEventType.RETRYING - ] - - """Only one task should be retried.""" - assert len(retrying_events) == 1 - - """The task id of the retrying events should match the tasks that are retried""" - assert retrying_events[0].task_id == task_to_id[should_retry_wrong_exception_type] - - """Three failed events should emit, one each for the two failing initial runs and one for the retry.""" - assert ( - len([e for e in runs.task_events if e.event_type == V1TaskEventType.FAILED]) - == 3 - ) diff --git a/examples/python/on_failure/test_on_failure.py b/examples/python/on_failure/test_on_failure.py deleted file mode 100644 index ecbc12223f..0000000000 --- a/examples/python/on_failure/test_on_failure.py +++ /dev/null @@ -1,34 +0,0 @@ -import asyncio - -import pytest - -from examples.on_failure.worker import on_failure_wf -from hatchet_sdk import Hatchet -from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus - - -@pytest.mark.asyncio(loop_scope="session") -async def test_run_timeout(hatchet: Hatchet) -> None: - run = on_failure_wf.run_no_wait() - try: - await run.aio_result() - - assert False, "Expected workflow to timeout" - except Exception as e: - assert "step1 failed" in str(e) - - await asyncio.sleep(5) # Wait for the on_failure job to finish - - details = await hatchet.runs.aio_get(run.workflow_run_id) - - assert len(details.tasks) == 2 - assert sum(t.status == V1TaskStatus.COMPLETED for t in details.tasks) == 1 - assert sum(t.status == V1TaskStatus.FAILED for t in details.tasks) == 1 - - completed_task = next( - t for t in details.tasks if t.status == V1TaskStatus.COMPLETED - ) - failed_task = next(t for t in details.tasks if t.status == V1TaskStatus.FAILED) - - assert "on_failure" in completed_task.display_name - assert "step1" in failed_task.display_name diff --git a/examples/python/priority/test_priority.py b/examples/python/priority/test_priority.py deleted file mode 100644 index 7a5cbe10a7..0000000000 --- a/examples/python/priority/test_priority.py +++ /dev/null @@ -1,361 +0,0 @@ -import asyncio -from collections.abc import AsyncGenerator -from datetime import datetime, timedelta, timezone -from random import choice -from subprocess import Popen -from typing import Any, Literal -from uuid import uuid4 - -import pytest -import pytest_asyncio -from pydantic import BaseModel - -from examples.priority.worker import DEFAULT_PRIORITY, SLEEP_TIME, priority_workflow -from hatchet_sdk import Hatchet, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions -from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus - -Priority = Literal["low", "medium", "high", "default"] - - -class RunPriorityStartedAt(BaseModel): - priority: Priority - started_at: datetime - finished_at: datetime - - -def priority_to_int(priority: Priority) -> int: - match priority: - case "high": - return 3 - case "medium": - return 2 - case "low": - return 1 - case "default": - return DEFAULT_PRIORITY - case _: - raise ValueError(f"Invalid priority: {priority}") - - -@pytest_asyncio.fixture(loop_scope="session", scope="function") -async def dummy_runs() -> None: - priority: Priority = "high" - - await priority_workflow.aio_run_many_no_wait( - [ - priority_workflow.create_bulk_run_item( - options=TriggerWorkflowOptions( - priority=(priority_to_int(priority)), - additional_metadata={ - "priority": priority, - "key": ix, - "type": "dummy", - }, - ) - ) - for ix in range(40) - ] - ) - - await asyncio.sleep(3) - - return - - -@pytest.mark.parametrize( - "on_demand_worker", - [ - ( - ["poetry", "run", "python", "examples/priority/worker.py", "--slots", "1"], - 8003, - ) - ], - indirect=True, -) -@pytest.mark.asyncio(loop_scope="session") -async def test_priority( - hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any] -) -> None: - test_run_id = str(uuid4()) - choices: list[Priority] = ["low", "medium", "high", "default"] - N = 30 - - run_refs = await priority_workflow.aio_run_many_no_wait( - [ - priority_workflow.create_bulk_run_item( - options=TriggerWorkflowOptions( - priority=(priority_to_int(priority := choice(choices))), - additional_metadata={ - "priority": priority, - "key": ix, - "test_run_id": test_run_id, - }, - ) - ) - for ix in range(N) - ] - ) - - await asyncio.gather(*[r.aio_result() for r in run_refs]) - - workflows = ( - await hatchet.workflows.aio_list(workflow_name=priority_workflow.name) - ).rows - - assert workflows - - workflow = next((w for w in workflows if w.name == priority_workflow.name), None) - - assert workflow - - assert workflow.name == priority_workflow.name - - runs = await hatchet.runs.aio_list( - workflow_ids=[workflow.metadata.id], - additional_metadata={ - "test_run_id": test_run_id, - }, - limit=1_000, - ) - - runs_ids_started_ats: list[RunPriorityStartedAt] = sorted( - [ - RunPriorityStartedAt( - priority=(r.additional_metadata or {}).get("priority") or "low", - started_at=r.started_at or datetime.min, - finished_at=r.finished_at or datetime.min, - ) - for r in runs.rows - ], - key=lambda x: x.started_at, - ) - - assert len(runs_ids_started_ats) == len(run_refs) - assert len(runs_ids_started_ats) == N - - for i in range(len(runs_ids_started_ats) - 1): - curr = runs_ids_started_ats[i] - nxt = runs_ids_started_ats[i + 1] - - """Run start times should be in order of priority""" - assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority) - - """Runs should proceed one at a time""" - assert curr.finished_at <= nxt.finished_at - assert nxt.finished_at >= nxt.started_at - - """Runs should finish after starting (this is mostly a test for engine datetime handling bugs)""" - assert curr.finished_at >= curr.started_at - - -@pytest.mark.parametrize( - "on_demand_worker", - [ - ( - ["poetry", "run", "python", "examples/priority/worker.py", "--slots", "1"], - 8003, - ) - ], - indirect=True, -) -@pytest.mark.asyncio(loop_scope="session") -async def test_priority_via_scheduling( - hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any] -) -> None: - test_run_id = str(uuid4()) - sleep_time = 3 - n = 30 - choices: list[Priority] = ["low", "medium", "high", "default"] - run_at = datetime.now(tz=timezone.utc) + timedelta(seconds=sleep_time) - - versions = await asyncio.gather( - *[ - priority_workflow.aio_schedule( - run_at=run_at, - options=ScheduleTriggerWorkflowOptions( - priority=(priority_to_int(priority := choice(choices))), - additional_metadata={ - "priority": priority, - "key": ix, - "test_run_id": test_run_id, - }, - ), - ) - for ix in range(n) - ] - ) - - await asyncio.sleep(sleep_time * 2) - - workflow_id = versions[0].workflow_id - - attempts = 0 - - while True: - if attempts >= SLEEP_TIME * n * 2: - raise TimeoutError("Timed out waiting for runs to finish") - - attempts += 1 - await asyncio.sleep(1) - runs = await hatchet.runs.aio_list( - workflow_ids=[workflow_id], - additional_metadata={ - "test_run_id": test_run_id, - }, - limit=1_000, - ) - - if not runs.rows: - continue - - if any( - r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows - ): - raise ValueError("One or more runs failed or were cancelled") - - if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows): - break - - runs_ids_started_ats: list[RunPriorityStartedAt] = sorted( - [ - RunPriorityStartedAt( - priority=(r.additional_metadata or {}).get("priority") or "low", - started_at=r.started_at or datetime.min, - finished_at=r.finished_at or datetime.min, - ) - for r in runs.rows - ], - key=lambda x: x.started_at, - ) - - assert len(runs_ids_started_ats) == len(versions) - - for i in range(len(runs_ids_started_ats) - 1): - curr = runs_ids_started_ats[i] - nxt = runs_ids_started_ats[i + 1] - - """Run start times should be in order of priority""" - assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority) - - """Runs should proceed one at a time""" - assert curr.finished_at <= nxt.finished_at - assert nxt.finished_at >= nxt.started_at - - """Runs should finish after starting (this is mostly a test for engine datetime handling bugs)""" - assert curr.finished_at >= curr.started_at - - -@pytest_asyncio.fixture(loop_scope="session", scope="function") -async def crons( - hatchet: Hatchet, dummy_runs: None -) -> AsyncGenerator[tuple[str, str, int], None]: - test_run_id = str(uuid4()) - choices: list[Priority] = ["low", "medium", "high"] - n = 30 - - crons = await asyncio.gather( - *[ - hatchet.cron.aio_create( - workflow_name=priority_workflow.name, - cron_name=f"{test_run_id}-cron-{i}", - expression="* * * * *", - input={}, - additional_metadata={ - "trigger": "cron", - "test_run_id": test_run_id, - "priority": (priority := choice(choices)), - "key": str(i), - }, - priority=(priority_to_int(priority)), - ) - for i in range(n) - ] - ) - - yield crons[0].workflow_id, test_run_id, n - - await asyncio.gather(*[hatchet.cron.aio_delete(cron.metadata.id) for cron in crons]) - - -def time_until_next_minute() -> float: - now = datetime.now(tz=timezone.utc) - next_minute = (now + timedelta(minutes=1)).replace(second=0, microsecond=0) - - return (next_minute - now).total_seconds() - - -@pytest.mark.skip( - reason="Test is flaky because the first jobs that are picked up don't necessarily go in priority order" -) -@pytest.mark.parametrize( - "on_demand_worker", - [ - ( - ["poetry", "run", "python", "examples/priority/worker.py", "--slots", "1"], - 8003, - ) - ], - indirect=True, -) -@pytest.mark.asyncio(loop_scope="session") -async def test_priority_via_cron( - hatchet: Hatchet, crons: tuple[str, str, int], on_demand_worker: Popen[Any] -) -> None: - workflow_id, test_run_id, n = crons - - await asyncio.sleep(time_until_next_minute() + 10) - - attempts = 0 - - while True: - if attempts >= SLEEP_TIME * n * 2: - raise TimeoutError("Timed out waiting for runs to finish") - - attempts += 1 - await asyncio.sleep(1) - runs = await hatchet.runs.aio_list( - workflow_ids=[workflow_id], - additional_metadata={ - "test_run_id": test_run_id, - }, - limit=1_000, - ) - - if not runs.rows: - continue - - if any( - r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows - ): - raise ValueError("One or more runs failed or were cancelled") - - if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows): - break - - runs_ids_started_ats: list[RunPriorityStartedAt] = sorted( - [ - RunPriorityStartedAt( - priority=(r.additional_metadata or {}).get("priority") or "low", - started_at=r.started_at or datetime.min, - finished_at=r.finished_at or datetime.min, - ) - for r in runs.rows - ], - key=lambda x: x.started_at, - ) - - assert len(runs_ids_started_ats) == n - - for i in range(len(runs_ids_started_ats) - 1): - curr = runs_ids_started_ats[i] - nxt = runs_ids_started_ats[i + 1] - - """Run start times should be in order of priority""" - assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority) - - """Runs should proceed one at a time""" - assert curr.finished_at <= nxt.finished_at - assert nxt.finished_at >= nxt.started_at - - """Runs should finish after starting (this is mostly a test for engine datetime handling bugs)""" - assert curr.finished_at >= curr.started_at diff --git a/examples/python/return_exceptions/test_return_exceptions.py b/examples/python/return_exceptions/test_return_exceptions.py deleted file mode 100644 index 2dca8d6797..0000000000 --- a/examples/python/return_exceptions/test_return_exceptions.py +++ /dev/null @@ -1,40 +0,0 @@ -import asyncio - -import pytest - -from examples.return_exceptions.worker import Input, return_exceptions_task - - -@pytest.mark.asyncio(loop_scope="session") -async def test_return_exceptions_async() -> None: - results = await return_exceptions_task.aio_run_many( - [ - return_exceptions_task.create_bulk_run_item(input=Input(index=i)) - for i in range(10) - ], - return_exceptions=True, - ) - - for i, result in enumerate(results): - if i % 2 == 0: - assert isinstance(result, Exception) - assert f"error in task with index {i}" in str(result) - else: - assert result == {"message": "this is a successful task."} - - -def test_return_exceptions_sync() -> None: - results = return_exceptions_task.run_many( - [ - return_exceptions_task.create_bulk_run_item(input=Input(index=i)) - for i in range(10) - ], - return_exceptions=True, - ) - - for i, result in enumerate(results): - if i % 2 == 0: - assert isinstance(result, Exception) - assert f"error in task with index {i}" in str(result) - else: - assert result == {"message": "this is a successful task."} diff --git a/examples/python/simple/test_simple_workflow.py b/examples/python/simple/test_simple_workflow.py deleted file mode 100644 index 2f4bea7ff4..0000000000 --- a/examples/python/simple/test_simple_workflow.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest - -from examples.simple.worker import simple, simple_durable -from hatchet_sdk import EmptyModel -from hatchet_sdk.runnables.workflow import Standalone - - -@pytest.mark.parametrize("task", [simple, simple_durable]) -@pytest.mark.asyncio(loop_scope="session") -async def test_simple_workflow_running_options( - task: Standalone[EmptyModel, dict[str, str]], -) -> None: - x1 = task.run() - x2 = await task.aio_run() - - x3 = task.run_many([task.create_bulk_run_item()])[0] - x4 = (await task.aio_run_many([task.create_bulk_run_item()]))[0] - - x5 = task.run_no_wait().result() - x6 = (await task.aio_run_no_wait()).result() - x7 = [x.result() for x in task.run_many_no_wait([task.create_bulk_run_item()])][0] - x8 = [ - x.result() - for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()]) - ][0] - - x9 = await task.run_no_wait().aio_result() - x10 = await (await task.aio_run_no_wait()).aio_result() - x11 = [ - await x.aio_result() - for x in task.run_many_no_wait([task.create_bulk_run_item()]) - ][0] - x12 = [ - await x.aio_result() - for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()]) - ][0] - - assert all( - x == {"result": "Hello, world!"} - for x in [x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12] - ) diff --git a/examples/python/simple/worker.py b/examples/python/simple/worker.py index 2bd0361661..c2714ff954 100644 --- a/examples/python/simple/worker.py +++ b/examples/python/simple/worker.py @@ -1,22 +1,29 @@ # > Simple +import time -from hatchet_sdk import Context, EmptyModel, Hatchet +from hatchet_sdk import Context, DurableContext, EmptyModel, Hatchet hatchet = Hatchet(debug=True) @hatchet.task() def simple(input: EmptyModel, ctx: Context) -> dict[str, str]: + time.sleep(50) return {"result": "Hello, world!"} @hatchet.durable_task() -def simple_durable(input: EmptyModel, ctx: Context) -> dict[str, str]: +async def simple_durable(input: EmptyModel, ctx: DurableContext) -> dict[str, str]: + res = await simple.aio_run(input) + print(res) return {"result": "Hello, world!"} def main() -> None: - worker = hatchet.worker("test-worker", workflows=[simple, simple_durable]) + worker = hatchet.worker( + "test-worker", + workflows=[simple, simple_durable], + ) worker.start() diff --git a/examples/python/streaming/test_streaming.py b/examples/python/streaming/test_streaming.py deleted file mode 100644 index 777ea79b2b..0000000000 --- a/examples/python/streaming/test_streaming.py +++ /dev/null @@ -1,41 +0,0 @@ -from subprocess import Popen -from typing import Any - -import pytest - -from examples.streaming.worker import chunks, stream_task -from hatchet_sdk import Hatchet -from hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType - - -@pytest.mark.parametrize( - "on_demand_worker", - [ - ( - ["poetry", "run", "python", "examples/streaming/worker.py", "--slots", "1"], - 8008, - ) - ], - indirect=True, -) -@pytest.mark.parametrize("execution_number", range(5)) # run test multiple times -@pytest.mark.asyncio(loop_scope="session") -async def test_streaming_ordering_and_completeness( - execution_number: int, - hatchet: Hatchet, - on_demand_worker: Popen[Any], -) -> None: - ref = await stream_task.aio_run_no_wait() - - ix = 0 - anna_karenina = "" - - async for chunk in hatchet.runs.subscribe_to_stream(ref.workflow_run_id): - assert chunks[ix] == chunk - ix += 1 - anna_karenina += chunk - - assert ix == len(chunks) - assert anna_karenina == "".join(chunks) - - await ref.aio_result() diff --git a/examples/python/timeout/test_timeout.py b/examples/python/timeout/test_timeout.py deleted file mode 100644 index 1f46e52a11..0000000000 --- a/examples/python/timeout/test_timeout.py +++ /dev/null @@ -1,21 +0,0 @@ -import pytest - -from examples.timeout.worker import refresh_timeout_wf, timeout_wf - - -@pytest.mark.asyncio(loop_scope="session") -async def test_execution_timeout() -> None: - run = timeout_wf.run_no_wait() - - with pytest.raises( - Exception, - match="(Task exceeded timeout|TIMED_OUT|Workflow run .* failed with multiple errors)", - ): - await run.aio_result() - - -@pytest.mark.asyncio(loop_scope="session") -async def test_run_refresh_timeout() -> None: - result = await refresh_timeout_wf.aio_run() - - assert result["refresh_task"]["status"] == "success" diff --git a/examples/python/unit_testing/test_unit.py b/examples/python/unit_testing/test_unit.py deleted file mode 100644 index cebc84f17e..0000000000 --- a/examples/python/unit_testing/test_unit.py +++ /dev/null @@ -1,96 +0,0 @@ -import pytest - -from examples.unit_testing.workflows import ( - Lifespan, - UnitTestInput, - UnitTestOutput, - async_complex_workflow, - async_simple_workflow, - async_standalone, - durable_async_complex_workflow, - durable_async_simple_workflow, - durable_async_standalone, - durable_sync_complex_workflow, - durable_sync_simple_workflow, - durable_sync_standalone, - start, - sync_complex_workflow, - sync_simple_workflow, - sync_standalone, -) -from hatchet_sdk import Task - - -@pytest.mark.parametrize( - "func", - [ - sync_standalone, - durable_sync_standalone, - sync_simple_workflow, - durable_sync_simple_workflow, - sync_complex_workflow, - durable_sync_complex_workflow, - ], -) -def test_simple_unit_sync(func: Task[UnitTestInput, UnitTestOutput]) -> None: - input = UnitTestInput(key="test_key", number=42) - additional_metadata = {"meta_key": "meta_value"} - lifespan = Lifespan(mock_db_url="sqlite:///:memory:") - retry_count = 1 - - expected_output = UnitTestOutput( - key=input.key, - number=input.number, - additional_metadata=additional_metadata, - retry_count=retry_count, - mock_db_url=lifespan.mock_db_url, - ) - - assert ( - func.mock_run( - input=input, - additional_metadata=additional_metadata, - lifespan=lifespan, - retry_count=retry_count, - parent_outputs={start.name: expected_output.model_dump()}, - ) - == expected_output - ) - - -@pytest.mark.parametrize( - "func", - [ - async_standalone, - durable_async_standalone, - async_simple_workflow, - durable_async_simple_workflow, - async_complex_workflow, - durable_async_complex_workflow, - ], -) -@pytest.mark.asyncio(loop_scope="session") -async def test_simple_unit_async(func: Task[UnitTestInput, UnitTestOutput]) -> None: - input = UnitTestInput(key="test_key", number=42) - additional_metadata = {"meta_key": "meta_value"} - lifespan = Lifespan(mock_db_url="sqlite:///:memory:") - retry_count = 1 - - expected_output = UnitTestOutput( - key=input.key, - number=input.number, - additional_metadata=additional_metadata, - retry_count=retry_count, - mock_db_url=lifespan.mock_db_url, - ) - - assert ( - await func.aio_mock_run( - input=input, - additional_metadata=additional_metadata, - lifespan=lifespan, - retry_count=retry_count, - parent_outputs={start.name: expected_output.model_dump()}, - ) - == expected_output - ) diff --git a/examples/python/webhooks/test_webhooks.py b/examples/python/webhooks/test_webhooks.py deleted file mode 100644 index 317ce06575..0000000000 --- a/examples/python/webhooks/test_webhooks.py +++ /dev/null @@ -1,643 +0,0 @@ -import asyncio -import base64 -import hashlib -import hmac -import json -from collections.abc import AsyncGenerator -from contextlib import asynccontextmanager -from datetime import datetime, timezone -from typing import Any -from uuid import uuid4 - -import aiohttp -import pytest - -from examples.webhooks.worker import WebhookInput -from hatchet_sdk import Hatchet -from hatchet_sdk.clients.rest.api.webhook_api import WebhookApi -from hatchet_sdk.clients.rest.models.v1_create_webhook_request import ( - V1CreateWebhookRequest, -) -from hatchet_sdk.clients.rest.models.v1_create_webhook_request_api_key import ( - V1CreateWebhookRequestAPIKey, -) -from hatchet_sdk.clients.rest.models.v1_create_webhook_request_basic_auth import ( - V1CreateWebhookRequestBasicAuth, -) -from hatchet_sdk.clients.rest.models.v1_create_webhook_request_hmac import ( - V1CreateWebhookRequestHMAC, -) -from hatchet_sdk.clients.rest.models.v1_event import V1Event -from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus -from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary -from hatchet_sdk.clients.rest.models.v1_webhook import V1Webhook -from hatchet_sdk.clients.rest.models.v1_webhook_api_key_auth import V1WebhookAPIKeyAuth -from hatchet_sdk.clients.rest.models.v1_webhook_basic_auth import V1WebhookBasicAuth -from hatchet_sdk.clients.rest.models.v1_webhook_hmac_algorithm import ( - V1WebhookHMACAlgorithm, -) -from hatchet_sdk.clients.rest.models.v1_webhook_hmac_auth import V1WebhookHMACAuth -from hatchet_sdk.clients.rest.models.v1_webhook_hmac_encoding import ( - V1WebhookHMACEncoding, -) -from hatchet_sdk.clients.rest.models.v1_webhook_source_name import V1WebhookSourceName - -TEST_BASIC_USERNAME = "test_user" -TEST_BASIC_PASSWORD = "test_password" -TEST_API_KEY_HEADER = "X-API-Key" -TEST_API_KEY_VALUE = "test_api_key_123" -TEST_HMAC_SIGNATURE_HEADER = "X-Signature" -TEST_HMAC_SECRET = "test_hmac_secret" - - -@pytest.fixture -def webhook_body() -> WebhookInput: - return WebhookInput(type="test", message="Hello, world!") - - -@pytest.fixture -def test_run_id() -> str: - return str(uuid4()) - - -@pytest.fixture -def test_start() -> datetime: - return datetime.now(timezone.utc) - - -def create_hmac_signature( - payload: bytes, - secret: str, - algorithm: V1WebhookHMACAlgorithm = V1WebhookHMACAlgorithm.SHA256, - encoding: V1WebhookHMACEncoding = V1WebhookHMACEncoding.HEX, -) -> str: - algorithm_map = { - V1WebhookHMACAlgorithm.SHA1: hashlib.sha1, - V1WebhookHMACAlgorithm.SHA256: hashlib.sha256, - V1WebhookHMACAlgorithm.SHA512: hashlib.sha512, - V1WebhookHMACAlgorithm.MD5: hashlib.md5, - } - - hash_func = algorithm_map[algorithm] - signature = hmac.new(secret.encode(), payload, hash_func).digest() - - if encoding == V1WebhookHMACEncoding.HEX: - return signature.hex() - if encoding == V1WebhookHMACEncoding.BASE64: - return base64.b64encode(signature).decode() - if encoding == V1WebhookHMACEncoding.BASE64URL: - return base64.urlsafe_b64encode(signature).decode() - - raise ValueError(f"Unsupported encoding: {encoding}") - - -async def send_webhook_request( - url: str, - body: WebhookInput, - auth_type: str, - auth_data: dict[str, Any] | None = None, - headers: dict[str, str] | None = None, -) -> aiohttp.ClientResponse: - request_headers = headers or {} - auth = None - - if auth_type == "BASIC" and auth_data: - auth = aiohttp.BasicAuth(auth_data["username"], auth_data["password"]) - elif auth_type == "API_KEY" and auth_data: - request_headers[auth_data["header_name"]] = auth_data["api_key"] - elif auth_type == "HMAC" and auth_data: - payload = json.dumps(body.model_dump()).encode() - signature = create_hmac_signature( - payload, - auth_data["secret"], - auth_data.get("algorithm", V1WebhookHMACAlgorithm.SHA256), - auth_data.get("encoding", V1WebhookHMACEncoding.HEX), - ) - request_headers[auth_data["header_name"]] = signature - - async with aiohttp.ClientSession() as session: - return await session.post( - url, json=body.model_dump(), auth=auth, headers=request_headers - ) - - -async def wait_for_event( - hatchet: Hatchet, - webhook_name: str, - test_start: datetime, -) -> V1Event | None: - await asyncio.sleep(5) - - events = await hatchet.event.aio_list(since=test_start) - - if events.rows is None: - return None - - return next( - ( - event - for event in events.rows - if event.triggering_webhook_name == webhook_name - ), - None, - ) - - -async def wait_for_workflow_run( - hatchet: Hatchet, event_id: str, test_start: datetime -) -> V1TaskSummary | None: - await asyncio.sleep(5) - - runs = await hatchet.runs.aio_list( - since=test_start, - additional_metadata={ - "hatchet__event_id": event_id, - }, - ) - - if len(runs.rows) == 0: - return None - - return runs.rows[0] - - -@asynccontextmanager -async def basic_auth_webhook( - hatchet: Hatchet, - test_run_id: str, - username: str = TEST_BASIC_USERNAME, - password: str = TEST_BASIC_PASSWORD, - source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC, -) -> AsyncGenerator[V1Webhook, None]: - ## Hack to get the API client - client = hatchet.metrics.client() - webhook_api = WebhookApi(client) - - webhook_request = V1CreateWebhookRequestBasicAuth( - sourceName=source_name, - name=f"test-webhook-basic-{test_run_id}", - eventKeyExpression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type", - authType="BASIC", - auth=V1WebhookBasicAuth( - username=username, - password=password, - ), - ) - - incoming_webhook = webhook_api.v1_webhook_create( - tenant=hatchet.tenant_id, - v1_create_webhook_request=V1CreateWebhookRequest(webhook_request), - ) - - try: - yield incoming_webhook - finally: - webhook_api.v1_webhook_delete( - tenant=hatchet.tenant_id, - v1_webhook=incoming_webhook.name, - ) - - -@asynccontextmanager -async def api_key_webhook( - hatchet: Hatchet, - test_run_id: str, - header_name: str = TEST_API_KEY_HEADER, - api_key: str = TEST_API_KEY_VALUE, - source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC, -) -> AsyncGenerator[V1Webhook, None]: - client = hatchet.metrics.client() - webhook_api = WebhookApi(client) - - webhook_request = V1CreateWebhookRequestAPIKey( - sourceName=source_name, - name=f"test-webhook-apikey-{test_run_id}", - eventKeyExpression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type", - authType="API_KEY", - auth=V1WebhookAPIKeyAuth( - headerName=header_name, - apiKey=api_key, - ), - ) - - incoming_webhook = webhook_api.v1_webhook_create( - tenant=hatchet.tenant_id, - v1_create_webhook_request=V1CreateWebhookRequest(webhook_request), - ) - - try: - yield incoming_webhook - finally: - webhook_api.v1_webhook_delete( - tenant=hatchet.tenant_id, - v1_webhook=incoming_webhook.name, - ) - - -@asynccontextmanager -async def hmac_webhook( - hatchet: Hatchet, - test_run_id: str, - signature_header_name: str = TEST_HMAC_SIGNATURE_HEADER, - signing_secret: str = TEST_HMAC_SECRET, - algorithm: V1WebhookHMACAlgorithm = V1WebhookHMACAlgorithm.SHA256, - encoding: V1WebhookHMACEncoding = V1WebhookHMACEncoding.HEX, - source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC, -) -> AsyncGenerator[V1Webhook, None]: - client = hatchet.metrics.client() - webhook_api = WebhookApi(client) - - webhook_request = V1CreateWebhookRequestHMAC( - sourceName=source_name, - name=f"test-webhook-hmac-{test_run_id}", - eventKeyExpression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type", - authType="HMAC", - auth=V1WebhookHMACAuth( - algorithm=algorithm, - encoding=encoding, - signatureHeaderName=signature_header_name, - signingSecret=signing_secret, - ), - ) - - incoming_webhook = webhook_api.v1_webhook_create( - tenant=hatchet.tenant_id, - v1_create_webhook_request=V1CreateWebhookRequest(webhook_request), - ) - - try: - yield incoming_webhook - finally: - webhook_api.v1_webhook_delete( - tenant=hatchet.tenant_id, - v1_webhook=incoming_webhook.name, - ) - - -def url(tenant_id: str, webhook_name: str) -> str: - return f"http://localhost:8080/api/v1/stable/tenants/{tenant_id}/webhooks/{webhook_name}" - - -async def assert_has_runs( - hatchet: Hatchet, - test_start: datetime, - webhook_body: WebhookInput, - incoming_webhook: V1Webhook, -) -> None: - triggered_event = await wait_for_event(hatchet, incoming_webhook.name, test_start) - assert triggered_event is not None - assert ( - triggered_event.key - == f"{hatchet.config.apply_namespace('webhook')}:{webhook_body.type}" - ) - assert triggered_event.payload == webhook_body.model_dump() - - workflow_run = await wait_for_workflow_run( - hatchet, triggered_event.metadata.id, test_start - ) - assert workflow_run is not None - assert workflow_run.status == V1TaskStatus.COMPLETED - assert workflow_run.additional_metadata is not None - - assert ( - workflow_run.additional_metadata["hatchet__event_id"] - == triggered_event.metadata.id - ) - assert workflow_run.additional_metadata["hatchet__event_key"] == triggered_event.key - assert workflow_run.status == V1TaskStatus.COMPLETED - - -async def assert_event_not_created( - hatchet: Hatchet, - test_start: datetime, - incoming_webhook: V1Webhook, -) -> None: - triggered_event = await wait_for_event(hatchet, incoming_webhook.name, test_start) - assert triggered_event is None - - -@pytest.mark.asyncio(loop_scope="session") -async def test_basic_auth_success( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, -) -> None: - async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), - webhook_body, - "BASIC", - {"username": TEST_BASIC_USERNAME, "password": TEST_BASIC_PASSWORD}, - ) as response: - assert response.status == 200 - data = await response.json() - assert data == {"message": "ok"} - - await assert_has_runs( - hatchet, - test_start, - webhook_body, - incoming_webhook, - ) - - -@pytest.mark.parametrize( - "username,password", - [ - ("test_user", "incorrect_password"), - ("incorrect_user", "test_password"), - ("incorrect_user", "incorrect_password"), - ("", ""), - ], -) -@pytest.mark.asyncio(loop_scope="session") -async def test_basic_auth_failure( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, - username: str, - password: str, -) -> None: - """Test basic authentication failures.""" - async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), - webhook_body, - "BASIC", - {"username": username, "password": password}, - ) as response: - assert response.status == 403 - - await assert_event_not_created( - hatchet, - test_start, - incoming_webhook, - ) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_basic_auth_missing_credentials( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, -) -> None: - async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), webhook_body, "NONE" - ) as response: - assert response.status == 403 - - await assert_event_not_created( - hatchet, - test_start, - incoming_webhook, - ) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_api_key_success( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, -) -> None: - async with api_key_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), - webhook_body, - "API_KEY", - {"header_name": TEST_API_KEY_HEADER, "api_key": TEST_API_KEY_VALUE}, - ) as response: - assert response.status == 200 - data = await response.json() - assert data == {"message": "ok"} - - await assert_has_runs( - hatchet, - test_start, - webhook_body, - incoming_webhook, - ) - - -@pytest.mark.parametrize( - "api_key", - [ - "incorrect_api_key", - "", - "partial_key", - ], -) -@pytest.mark.asyncio(loop_scope="session") -async def test_api_key_failure( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, - api_key: str, -) -> None: - async with api_key_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), - webhook_body, - "API_KEY", - {"header_name": TEST_API_KEY_HEADER, "api_key": api_key}, - ) as response: - assert response.status == 403 - - await assert_event_not_created( - hatchet, - test_start, - incoming_webhook, - ) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_api_key_missing_header( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, -) -> None: - async with api_key_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), webhook_body, "NONE" - ) as response: - assert response.status == 403 - - await assert_event_not_created( - hatchet, - test_start, - incoming_webhook, - ) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_hmac_success( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, -) -> None: - async with hmac_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), - webhook_body, - "HMAC", - { - "header_name": TEST_HMAC_SIGNATURE_HEADER, - "secret": TEST_HMAC_SECRET, - "algorithm": V1WebhookHMACAlgorithm.SHA256, - "encoding": V1WebhookHMACEncoding.HEX, - }, - ) as response: - assert response.status == 200 - data = await response.json() - assert data == {"message": "ok"} - - await assert_has_runs( - hatchet, - test_start, - webhook_body, - incoming_webhook, - ) - - -@pytest.mark.parametrize( - "algorithm,encoding", - [ - (V1WebhookHMACAlgorithm.SHA1, V1WebhookHMACEncoding.HEX), - (V1WebhookHMACAlgorithm.SHA256, V1WebhookHMACEncoding.BASE64), - (V1WebhookHMACAlgorithm.SHA512, V1WebhookHMACEncoding.BASE64URL), - (V1WebhookHMACAlgorithm.MD5, V1WebhookHMACEncoding.HEX), - ], -) -@pytest.mark.asyncio(loop_scope="session") -async def test_hmac_different_algorithms_and_encodings( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, - algorithm: V1WebhookHMACAlgorithm, - encoding: V1WebhookHMACEncoding, -) -> None: - async with hmac_webhook( - hatchet, test_run_id, algorithm=algorithm, encoding=encoding - ) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), - webhook_body, - "HMAC", - { - "header_name": TEST_HMAC_SIGNATURE_HEADER, - "secret": TEST_HMAC_SECRET, - "algorithm": algorithm, - "encoding": encoding, - }, - ) as response: - assert response.status == 200 - data = await response.json() - assert data == {"message": "ok"} - - await assert_has_runs( - hatchet, - test_start, - webhook_body, - incoming_webhook, - ) - - -@pytest.mark.parametrize( - "secret", - [ - "incorrect_secret", - "", - "partial_secret", - ], -) -@pytest.mark.asyncio(loop_scope="session") -async def test_hmac_signature_failure( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, - secret: str, -) -> None: - async with hmac_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), - webhook_body, - "HMAC", - { - "header_name": TEST_HMAC_SIGNATURE_HEADER, - "secret": secret, - "algorithm": V1WebhookHMACAlgorithm.SHA256, - "encoding": V1WebhookHMACEncoding.HEX, - }, - ) as response: - assert response.status == 403 - - await assert_event_not_created( - hatchet, - test_start, - incoming_webhook, - ) - - -@pytest.mark.asyncio(loop_scope="session") -async def test_hmac_missing_signature_header( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, -) -> None: - async with hmac_webhook(hatchet, test_run_id) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), webhook_body, "NONE" - ) as response: - assert response.status == 403 - - await assert_event_not_created( - hatchet, - test_start, - incoming_webhook, - ) - - -@pytest.mark.parametrize( - "source_name", - [ - V1WebhookSourceName.GENERIC, - V1WebhookSourceName.GITHUB, - ], -) -@pytest.mark.asyncio(loop_scope="session") -async def test_different_source_types( - hatchet: Hatchet, - test_run_id: str, - test_start: datetime, - webhook_body: WebhookInput, - source_name: V1WebhookSourceName, -) -> None: - async with basic_auth_webhook( - hatchet, test_run_id, source_name=source_name - ) as incoming_webhook: - async with await send_webhook_request( - url(hatchet.tenant_id, incoming_webhook.name), - webhook_body, - "BASIC", - {"username": TEST_BASIC_USERNAME, "password": TEST_BASIC_PASSWORD}, - ) as response: - assert response.status == 200 - data = await response.json() - assert data == {"message": "ok"} - - await assert_has_runs( - hatchet, - test_start, - webhook_body, - incoming_webhook, - ) diff --git a/examples/python/worker_existing_loop/worker.py b/examples/python/worker_existing_loop/worker.py deleted file mode 100644 index c5040dd20d..0000000000 --- a/examples/python/worker_existing_loop/worker.py +++ /dev/null @@ -1,42 +0,0 @@ -import asyncio -from contextlib import suppress - -from hatchet_sdk import Context, EmptyModel, Hatchet - -hatchet = Hatchet(debug=True) - -existing_loop_worker = hatchet.workflow(name="WorkerExistingLoopWorkflow") - - -@existing_loop_worker.task() -async def task(input: EmptyModel, ctx: Context) -> dict[str, str]: - print("started") - await asyncio.sleep(10) - print("finished") - return {"result": "returned result"} - - -async def async_main() -> None: - worker = None - try: - worker = hatchet.worker( - "test-worker", slots=1, workflows=[existing_loop_worker] - ) - worker.start() - - ref = existing_loop_worker.run_no_wait() - print(await ref.aio_result()) - while True: - await asyncio.sleep(1) - finally: - if worker: - await worker.exit_gracefully() - - -def main() -> None: - with suppress(KeyboardInterrupt): - asyncio.run(async_main()) - - -if __name__ == "__main__": - main() diff --git a/examples/typescript/__e2e__/harness.ts b/examples/typescript/__e2e__/harness.ts new file mode 100644 index 0000000000..d5cfbb8035 --- /dev/null +++ b/examples/typescript/__e2e__/harness.ts @@ -0,0 +1,75 @@ +import sleep from '@hatchet-dev/typescript-sdk/util/sleep'; +import { randomUUID } from 'crypto'; +import { HatchetClient } from '@hatchet-dev/typescript-sdk/v1'; +import type { BaseWorkflowDeclaration } from '@hatchet-dev/typescript-sdk/v1'; +import { Worker } from '../../client/worker/worker'; + +export function requireEnv(name: string): string { + const value = process.env[name]; + if (!value) { + throw new Error( + `Missing required environment variable ${name}. ` + + `E2E tests require a configured Hatchet instance and credentials.` + ); + } + return value; +} + +export function makeE2EClient(): HatchetClient { + // ConfigLoader requires a token; this makes the failure message obvious. + requireEnv('HATCHET_CLIENT_TOKEN'); + return HatchetClient.init(); +} + +export function makeTestScope(prefix = 'ts_e2e'): string { + return `${prefix}_${randomUUID()}`; +} + +export async function startWorker({ + client, + name, + workflows, + slots = 50, +}: { + client: HatchetClient; + name: string; + workflows: Array>; + slots?: number; +}): Promise { + const worker = await client.worker(name, { workflows, slots }); + void worker.start(); + return worker; +} + +export async function stopWorker(worker: Worker | undefined) { + if (!worker) return; + await worker.stop(); + // give the engine a beat to settle + await sleep(1500); +} + +export async function poll( + fn: () => Promise, + { + timeoutMs = 30_000, + intervalMs = 1000, + shouldStop, + label = 'poll', + }: { + timeoutMs?: number; + intervalMs?: number; + shouldStop: (value: T) => boolean; + label?: string; + } +): Promise { + const start = Date.now(); + while (true) { + const value = await fn(); + if (shouldStop(value)) return value; + if (Date.now() - start > timeoutMs) { + throw new Error(`Timed out waiting for ${label} after ${timeoutMs}ms`); + } + await sleep(intervalMs); + } +} + diff --git a/examples/typescript/affinity/affinity-workers.ts b/examples/typescript/affinity/affinity-workers.ts index 486af75a20..201ed1021a 100644 --- a/examples/typescript/affinity/affinity-workers.ts +++ b/examples/typescript/affinity/affinity-workers.ts @@ -1,4 +1,4 @@ -import { WorkerLabelComparator } from '@hatchet-dev/typescript-sdk/protoc/workflows'; +import { WorkerLabelComparator } from '@hatchet-dev/typescript-sdk/v1'; import { hatchet } from '../hatchet-client'; // > AffinityWorkflow @@ -11,10 +11,10 @@ const workflow = hatchet.workflow({ workflow.task({ name: 'step1', fn: async (_, ctx) => { - const results: Promise[] = []; + const results = []; for (let i = 0; i < 50; i++) { - const result = await ctx.spawnWorkflow(childWorkflow.id, {}); - results.push(result.output); + const result = await childWorkflow.run({}); + results.push(result); } console.log('Spawned 50 child workflows'); console.log('Results:', await Promise.all(results)); diff --git a/examples/typescript/bulk_fanout/workflow.ts b/examples/typescript/bulk_fanout/workflow.ts new file mode 100644 index 0000000000..4d40198838 --- /dev/null +++ b/examples/typescript/bulk_fanout/workflow.ts @@ -0,0 +1,30 @@ +import { hatchet } from '../hatchet-client'; + +export type ParentInput = { n: number }; + +export const bulkChild = hatchet.task({ + name: 'bulk-child', + fn: async (input: { i: number }) => { + return { i: input.i }; + }, +}); + +export const bulkParentWorkflow = hatchet.workflow({ + name: 'bulk-parent', +}); + +bulkParentWorkflow.task({ + name: 'spawn', + fn: async (input, ctx) => { + const typed = input as ParentInput; + const children = Array.from({ length: typed.n }, (_, i) => ({ + workflow: bulkChild, + input: { i }, + })); + + const results = await ctx.bulkRunChildren(children); + + return { results }; + }, +}); + diff --git a/examples/typescript/bulk_operations/workflow.ts b/examples/typescript/bulk_operations/workflow.ts new file mode 100644 index 0000000000..ab17e5a126 --- /dev/null +++ b/examples/typescript/bulk_operations/workflow.ts @@ -0,0 +1,35 @@ +import { hatchet } from '../hatchet-client'; + +export const bulkReplayTest1 = hatchet.task({ + name: 'bulk-replay-test-1', + retries: 1, + fn: async (_input, ctx) => { + console.log('retrying bulk replay test task', ctx.retryCount()); + if (ctx.retryCount() === 0) { + throw new Error('This is a test error to trigger a retry.'); + } + }, +}); + +export const bulkReplayTest2 = hatchet.task({ + name: 'bulk-replay-test-2', + retries: 1, + fn: async (_input, ctx) => { + console.log('retrying bulk replay test task', ctx.retryCount()); + if (ctx.retryCount() === 0) { + throw new Error('This is a test error to trigger a retry.'); + } + }, +}); + +export const bulkReplayTest3 = hatchet.task({ + name: 'bulk-replay-test-3', + retries: 1, + fn: async (_input, ctx) => { + console.log('retrying bulk replay test task', ctx.retryCount()); + if (ctx.retryCount() === 0) { + throw new Error('This is a test error to trigger a retry.'); + } + }, +}); + diff --git a/examples/typescript/cancellation/cancellation-workflow.ts b/examples/typescript/cancellation/cancellation-workflow.ts new file mode 100644 index 0000000000..3dee249c65 --- /dev/null +++ b/examples/typescript/cancellation/cancellation-workflow.ts @@ -0,0 +1,81 @@ +import sleep from '@hatchet-dev/typescript-sdk/util/sleep'; +import axios from 'axios'; +import { hatchet } from '../hatchet-client'; + +function sleepWithAbort(signal: AbortSignal, ms: number) { + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + signal.removeEventListener('abort', onAbort); + resolve(); + }, ms); + + const onAbort = () => { + clearTimeout(timer); + reject(new Error('Cancelled')); + }; + + if (signal.aborted) { + clearTimeout(timer); + reject(new Error('Cancelled')); + return; + } + + signal.addEventListener('abort', onAbort, { once: true }); + }); +} + +// > Self-cancelling workflow (mirrors Python example) +export const cancellationWorkflow = hatchet.workflow({ + name: 'CancelWorkflow', +}); + +cancellationWorkflow.task({ + name: 'self-cancel', + fn: async (_, ctx) => { + await sleepWithAbort(ctx.abortController.signal, 2000); + + // Cancel the current task run (server-side) and optimistically abort local execution. + await ctx.cancel(); + + // If cancellation didn't stop execution yet, keep waiting but cooperatively. + await sleepWithAbort(ctx.abortController.signal, 10_000); + + return { error: 'Task should have been cancelled' }; + }, +}); + +cancellationWorkflow.task({ + name: 'check-flag', + fn: async (_, ctx) => { + for (let i = 0; i < 3; i += 1) { + await sleepWithAbort(ctx.abortController.signal, 1000); + if (ctx.cancelled) { + throw new Error('Cancelled'); + } + } + return { error: 'Task should have been cancelled' }; + }, +}); + +// > Abort Signal +export const abortSignal = hatchet.task({ + name: 'abort-signal', + fn: async (_, { abortController }) => { + try { + const response = await axios.get('https://api.example.com/data', { + signal: abortController.signal, + }); + // Handle the response + } catch (error) { + if (axios.isCancel(error)) { + // Request was canceled + console.log('Request canceled'); + } else { + // Handle other errors + } + } + }, +}); + +// see ./worker.ts and ./run.ts for how to run the workflow + diff --git a/examples/typescript/cancellations/run.ts b/examples/typescript/cancellation/run.ts similarity index 68% rename from examples/typescript/cancellations/run.ts rename to examples/typescript/cancellation/run.ts index 41017b62a6..b219e40cac 100644 --- a/examples/typescript/cancellations/run.ts +++ b/examples/typescript/cancellation/run.ts @@ -1,11 +1,11 @@ // > Running a Task with Results import sleep from '@hatchet-dev/typescript-sdk/util/sleep'; -import { cancellation } from './workflow'; +import { cancellationWorkflow } from './cancellation-workflow'; import { hatchet } from '../hatchet-client'; // ... async function main() { - const run = await cancellation.runNoWait({}); - const run1 = await cancellation.runNoWait({}); + const run = await cancellationWorkflow.runNoWait({}); + const run1 = await cancellationWorkflow.runNoWait({}); await sleep(1000); @@ -25,8 +25,8 @@ async function main() { console.log(resReplay); - const run2 = await cancellation.runNoWait({}, { additionalMetadata: { test: 'abc' } }); - const run4 = await cancellation.runNoWait({}, { additionalMetadata: { test: 'test' } }); + const run2 = await cancellationWorkflow.runNoWait({}, { additionalMetadata: { test: 'abc' } }); + const run4 = await cancellationWorkflow.runNoWait({}, { additionalMetadata: { test: 'test' } }); await sleep(1000); @@ -46,3 +46,4 @@ if (require.main === module) { .catch(console.error) .finally(() => process.exit(0)); } + diff --git a/examples/typescript/cancellations/worker.ts b/examples/typescript/cancellation/worker.ts similarity index 78% rename from examples/typescript/cancellations/worker.ts rename to examples/typescript/cancellation/worker.ts index c808ea9d0c..8fd6ed93f2 100644 --- a/examples/typescript/cancellations/worker.ts +++ b/examples/typescript/cancellation/worker.ts @@ -1,11 +1,11 @@ // > Declaring a Worker import { hatchet } from '../hatchet-client'; -import { cancellation } from './workflow'; +import { cancellationWorkflow } from './cancellation-workflow'; async function main() { const worker = await hatchet.worker('cancellation-worker', { // 👀 Declare the workflows that the worker can execute - workflows: [cancellation], + workflows: [cancellationWorkflow], // 👀 Declare the number of concurrent task runs the worker can accept slots: 100, }); @@ -16,3 +16,4 @@ async function main() { if (require.main === module) { main(); } + diff --git a/examples/typescript/cancellations/workflow.ts b/examples/typescript/cancellations/workflow.ts deleted file mode 100644 index 011bfd4911..0000000000 --- a/examples/typescript/cancellations/workflow.ts +++ /dev/null @@ -1,41 +0,0 @@ -import sleep from '@hatchet-dev/typescript-sdk/util/sleep'; -import axios from 'axios'; -import { hatchet } from '../hatchet-client'; - -// > Declaring a Task -export const cancellation = hatchet.task({ - name: 'cancellation', - fn: async (_, ctx) => { - await sleep(10 * 1000); - - if (ctx.cancelled) { - throw new Error('Task was cancelled'); - } - - return { - Completed: true, - }; - }, -}); - -// > Abort Signal -export const abortSignal = hatchet.task({ - name: 'abort-signal', - fn: async (_, { abortController }) => { - try { - const response = await axios.get('https://api.example.com/data', { - signal: abortController.signal, - }); - // Handle the response - } catch (error) { - if (axios.isCancel(error)) { - // Request was canceled - console.log('Request canceled'); - } else { - // Handle other errors - } - } - }, -}); - -// see ./worker.ts and ./run.ts for how to run the workflow diff --git a/examples/typescript/concurrency-rr/load.ts b/examples/typescript/concurrency_limit_rr/load.ts similarity index 99% rename from examples/typescript/concurrency-rr/load.ts rename to examples/typescript/concurrency_limit_rr/load.ts index 1c2691f26b..c992c79bc0 100644 --- a/examples/typescript/concurrency-rr/load.ts +++ b/examples/typescript/concurrency_limit_rr/load.ts @@ -44,3 +44,4 @@ async function main() { if (require.main === module) { main().then(() => process.exit(0)); } + diff --git a/examples/typescript/concurrency-rr/run.ts b/examples/typescript/concurrency_limit_rr/run.ts similarity index 99% rename from examples/typescript/concurrency-rr/run.ts rename to examples/typescript/concurrency_limit_rr/run.ts index e087ae33d7..04e05de015 100644 --- a/examples/typescript/concurrency-rr/run.ts +++ b/examples/typescript/concurrency_limit_rr/run.ts @@ -24,3 +24,4 @@ async function main() { if (require.main === module) { main().then(() => process.exit(0)); } + diff --git a/sdks/typescript/src/v1/examples/concurrency-rr/worker.ts b/examples/typescript/concurrency_limit_rr/worker.ts similarity index 99% rename from sdks/typescript/src/v1/examples/concurrency-rr/worker.ts rename to examples/typescript/concurrency_limit_rr/worker.ts index 038c0771a5..46d0d30480 100644 --- a/sdks/typescript/src/v1/examples/concurrency-rr/worker.ts +++ b/examples/typescript/concurrency_limit_rr/worker.ts @@ -12,3 +12,4 @@ async function main() { if (require.main === module) { main(); } + diff --git a/examples/typescript/concurrency-rr/workflow.ts b/examples/typescript/concurrency_limit_rr/workflow.ts similarity index 98% rename from examples/typescript/concurrency-rr/workflow.ts rename to examples/typescript/concurrency_limit_rr/workflow.ts index b107d49d0c..2de7f98bd7 100644 --- a/examples/typescript/concurrency-rr/workflow.ts +++ b/examples/typescript/concurrency_limit_rr/workflow.ts @@ -1,4 +1,4 @@ -import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/workflow'; +import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/v1'; import { hatchet } from '../hatchet-client'; type SimpleInput = { @@ -63,3 +63,4 @@ multipleConcurrencyKeys.task({ }; }, }); + diff --git a/examples/typescript/dag_match_condition/complex-workflow.ts b/examples/typescript/conditions/complex-workflow.ts similarity index 99% rename from examples/typescript/dag_match_condition/complex-workflow.ts rename to examples/typescript/conditions/complex-workflow.ts index b4be8d32c5..6209966691 100644 --- a/examples/typescript/dag_match_condition/complex-workflow.ts +++ b/examples/typescript/conditions/complex-workflow.ts @@ -95,3 +95,4 @@ taskConditionWorkflow.task({ }; }, }); + diff --git a/examples/typescript/durable-sleep/event.ts b/examples/typescript/conditions/event.ts similarity index 99% rename from examples/typescript/durable-sleep/event.ts rename to examples/typescript/conditions/event.ts index f20a6bb7f3..bf146990a1 100644 --- a/examples/typescript/durable-sleep/event.ts +++ b/examples/typescript/conditions/event.ts @@ -14,3 +14,4 @@ if (require.main === module) { process.exit(1); }); } + diff --git a/examples/typescript/dag_match_condition/run.ts b/examples/typescript/conditions/run.ts similarity index 99% rename from examples/typescript/dag_match_condition/run.ts rename to examples/typescript/conditions/run.ts index fd86707a0a..e01d36e9d9 100644 --- a/examples/typescript/dag_match_condition/run.ts +++ b/examples/typescript/conditions/run.ts @@ -12,3 +12,4 @@ if (require.main === module) { .catch(console.error) .finally(() => process.exit(0)); } + diff --git a/sdks/typescript/src/v1/examples/dag_match_condition/worker.ts b/examples/typescript/conditions/worker.ts similarity index 99% rename from sdks/typescript/src/v1/examples/dag_match_condition/worker.ts rename to examples/typescript/conditions/worker.ts index 5eafa72d83..350fb0e26e 100644 --- a/sdks/typescript/src/v1/examples/dag_match_condition/worker.ts +++ b/examples/typescript/conditions/worker.ts @@ -12,3 +12,4 @@ async function main() { if (require.main === module) { main(); } + diff --git a/examples/typescript/dag_match_condition/workflow.ts b/examples/typescript/conditions/workflow.ts similarity index 99% rename from examples/typescript/dag_match_condition/workflow.ts rename to examples/typescript/conditions/workflow.ts index 7461d6a15e..15b479c402 100644 --- a/examples/typescript/dag_match_condition/workflow.ts +++ b/examples/typescript/conditions/workflow.ts @@ -39,3 +39,4 @@ dagWithConditions.task({ }; }, }); + diff --git a/examples/typescript/durable/workflow.ts b/examples/typescript/durable/workflow.ts new file mode 100644 index 0000000000..4e3eb936a6 --- /dev/null +++ b/examples/typescript/durable/workflow.ts @@ -0,0 +1,122 @@ +import { hatchet } from '../hatchet-client'; +import { Or, SleepCondition, UserEventCondition } from '@hatchet-dev/typescript-sdk/v1/conditions'; + +export const EVENT_KEY = 'durable-example:event'; +export const SLEEP_TIME_SECONDS = 5; +export const SLEEP_TIME = `${SLEEP_TIME_SECONDS}s` as const; + +// > Create a durable workflow +export const durableWorkflow = hatchet.workflow({ + name: 'DurableWorkflow', +}); + +durableWorkflow.task({ + name: 'ephemeral_task', + fn: async () => { + console.log('Running non-durable task'); + }, +}); + +durableWorkflow.durableTask({ + name: 'durable_task', + executionTimeout: '10m', + fn: async (_input, ctx) => { + console.log('Waiting for sleep'); + await ctx.sleepFor(SLEEP_TIME); + console.log('Sleep finished'); + + console.log('Waiting for event'); + await ctx.waitFor({ eventKey: EVENT_KEY }); + console.log('Event received'); + + return { status: 'success' }; + }, +}); + +function extractKeyAndEventId(waitResult: unknown): { key: string; event_id: string } { + // DurableContext.waitFor currently returns the CREATE payload directly. + // The shape is typically `{ [readableDataKey]: { [eventId]: ... } }`. + const obj = waitResult as any; + if (obj && typeof obj === 'object') { + const key = Object.keys(obj)[0]; + const inner = obj[key]; + if (inner && typeof inner === 'object') { + const eventId = Object.keys(inner)[0]; + if (eventId) { + return { key: 'CREATE', event_id: eventId }; + } + } + if (key) { + return { key: 'CREATE', event_id: key }; + } + } + + return { key: 'CREATE', event_id: '' }; +} + +durableWorkflow.durableTask({ + name: 'wait_for_or_group_1', + executionTimeout: '10m', + fn: async (_input, ctx) => { + const start = Date.now(); + const waitResult = await ctx.waitFor( + Or(new SleepCondition(SLEEP_TIME, 'sleep'), new UserEventCondition(EVENT_KEY, '', 'event')) + ); + const { key, event_id } = extractKeyAndEventId(waitResult); + return { + runtime: Math.round((Date.now() - start) / 1000), + key, + event_id, + }; + }, +}); + +durableWorkflow.durableTask({ + name: 'wait_for_or_group_2', + executionTimeout: '10m', + fn: async (_input, ctx) => { + const start = Date.now(); + const waitResult = await ctx.waitFor( + Or( + new SleepCondition(`${6 * SLEEP_TIME_SECONDS}s`, 'sleep'), + new UserEventCondition(EVENT_KEY, '', 'event') + ) + ); + const { key, event_id } = extractKeyAndEventId(waitResult); + return { + runtime: Math.round((Date.now() - start) / 1000), + key, + event_id, + }; + }, +}); + +durableWorkflow.durableTask({ + name: 'wait_for_multi_sleep', + executionTimeout: '10m', + fn: async (_input, ctx) => { + const start = Date.now(); + // sleep 3 times + for (let i = 0; i < 3; i += 1) { + await ctx.sleepFor(SLEEP_TIME); + } + + return { runtime: Math.round((Date.now() - start) / 1000) }; + }, +}); + +export const waitForSleepTwice = hatchet.durableTask({ + name: 'wait-for-sleep-twice', + executionTimeout: '10m', + fn: async (_input, ctx) => { + try { + const start = Date.now(); + await ctx.sleepFor(SLEEP_TIME); + return { runtime: Math.round((Date.now() - start) / 1000) }; + } catch (e) { + // treat cancellation as a successful completion for parity with Python sample + return { runtime: -1 }; + } + }, +}); + diff --git a/examples/typescript/durable-event/event.ts b/examples/typescript/durable_event/event.ts similarity index 99% rename from examples/typescript/durable-event/event.ts rename to examples/typescript/durable_event/event.ts index 3d44590761..b313f29685 100644 --- a/examples/typescript/durable-event/event.ts +++ b/examples/typescript/durable_event/event.ts @@ -14,3 +14,4 @@ if (require.main === module) { process.exit(1); }); } + diff --git a/examples/typescript/durable-event/run.ts b/examples/typescript/durable_event/run.ts similarity index 99% rename from examples/typescript/durable-event/run.ts rename to examples/typescript/durable_event/run.ts index 7891fa8964..2440f2f42e 100644 --- a/examples/typescript/durable-event/run.ts +++ b/examples/typescript/durable_event/run.ts @@ -15,3 +15,4 @@ if (require.main === module) { process.exit(1); }); } + diff --git a/sdks/typescript/src/v1/examples/durable-event/worker.ts b/examples/typescript/durable_event/worker.ts similarity index 99% rename from sdks/typescript/src/v1/examples/durable-event/worker.ts rename to examples/typescript/durable_event/worker.ts index b99b05830a..cc8eee362e 100644 --- a/sdks/typescript/src/v1/examples/durable-event/worker.ts +++ b/examples/typescript/durable_event/worker.ts @@ -12,3 +12,4 @@ async function main() { if (require.main === module) { main(); } + diff --git a/examples/typescript/durable-event/workflow.ts b/examples/typescript/durable_event/workflow.ts similarity index 99% rename from examples/typescript/durable-event/workflow.ts rename to examples/typescript/durable_event/workflow.ts index 72360d8c5a..c9a4a6c272 100644 --- a/examples/typescript/durable-event/workflow.ts +++ b/examples/typescript/durable_event/workflow.ts @@ -35,3 +35,4 @@ export const durableEventWithFilter = hatchet.durableTask({ }; }, }); + diff --git a/examples/typescript/dag_match_condition/event.ts b/examples/typescript/durable_sleep/event.ts similarity index 99% rename from examples/typescript/dag_match_condition/event.ts rename to examples/typescript/durable_sleep/event.ts index f20a6bb7f3..bf146990a1 100644 --- a/examples/typescript/dag_match_condition/event.ts +++ b/examples/typescript/durable_sleep/event.ts @@ -14,3 +14,4 @@ if (require.main === module) { process.exit(1); }); } + diff --git a/examples/typescript/durable-sleep/run.ts b/examples/typescript/durable_sleep/run.ts similarity index 99% rename from examples/typescript/durable-sleep/run.ts rename to examples/typescript/durable_sleep/run.ts index d9f9fc3c4b..a4654d669b 100644 --- a/examples/typescript/durable-sleep/run.ts +++ b/examples/typescript/durable_sleep/run.ts @@ -15,3 +15,4 @@ if (require.main === module) { process.exit(1); }); } + diff --git a/sdks/typescript/src/v1/examples/durable-sleep/worker.ts b/examples/typescript/durable_sleep/worker.ts similarity index 99% rename from sdks/typescript/src/v1/examples/durable-sleep/worker.ts rename to examples/typescript/durable_sleep/worker.ts index 7e9238c55f..12e7f870d4 100644 --- a/sdks/typescript/src/v1/examples/durable-sleep/worker.ts +++ b/examples/typescript/durable_sleep/worker.ts @@ -12,3 +12,4 @@ async function main() { if (require.main === module) { main(); } + diff --git a/examples/typescript/durable-sleep/workflow.ts b/examples/typescript/durable_sleep/workflow.ts similarity index 99% rename from examples/typescript/durable-sleep/workflow.ts rename to examples/typescript/durable_sleep/workflow.ts index b73b231c62..2f72ae39db 100644 --- a/examples/typescript/durable-sleep/workflow.ts +++ b/examples/typescript/durable_sleep/workflow.ts @@ -19,3 +19,4 @@ durableSleep.durableTask({ }; }, }); + diff --git a/examples/typescript/on_event/event.ts b/examples/typescript/events/event.ts similarity index 99% rename from examples/typescript/on_event/event.ts rename to examples/typescript/events/event.ts index 07d582aaf5..4c72fc7e1d 100644 --- a/examples/typescript/on_event/event.ts +++ b/examples/typescript/events/event.ts @@ -45,3 +45,4 @@ async function main() { if (require.main === module) { main(); } + diff --git a/examples/typescript/on_event/filter.ts b/examples/typescript/events/filter.ts similarity index 95% rename from examples/typescript/on_event/filter.ts rename to examples/typescript/events/filter.ts index 42b73bfbbc..41063f22ee 100644 --- a/examples/typescript/on_event/filter.ts +++ b/examples/typescript/events/filter.ts @@ -3,7 +3,7 @@ import { lower, SIMPLE_EVENT } from './workflow'; // > Create a filter hatchet.filters.create({ - workflowId: lower.id, + workflowId: lower.name, expression: 'input.ShouldSkip == false', scope: 'foobarbaz', payload: { @@ -36,3 +36,4 @@ hatchet.events.push( scope: 'foobarbaz', } ); + diff --git a/sdks/typescript/src/v1/examples/on_event/worker.ts b/examples/typescript/events/worker.ts similarity index 99% rename from sdks/typescript/src/v1/examples/on_event/worker.ts rename to examples/typescript/events/worker.ts index eedbd831d9..13bc226bce 100644 --- a/sdks/typescript/src/v1/examples/on_event/worker.ts +++ b/examples/typescript/events/worker.ts @@ -12,3 +12,4 @@ async function main() { if (require.main === module) { main(); } + diff --git a/examples/typescript/on_event/workflow.ts b/examples/typescript/events/workflow.ts similarity index 99% rename from examples/typescript/on_event/workflow.ts rename to examples/typescript/events/workflow.ts index d1d522d867..3c26d0bb69 100644 --- a/examples/typescript/on_event/workflow.ts +++ b/examples/typescript/events/workflow.ts @@ -76,3 +76,4 @@ lowerWithFilter.task({ console.log(ctx.filterPayload()); }, }); + diff --git a/examples/typescript/high-memory/workflow-with-child.ts b/examples/typescript/high-memory/workflow-with-child.ts index 3fa009e112..176e3e9f76 100644 --- a/examples/typescript/high-memory/workflow-with-child.ts +++ b/examples/typescript/high-memory/workflow-with-child.ts @@ -23,7 +23,7 @@ export const child = hatchet.task({ export const parent = hatchet.task({ name: 'parent', - timeout: '10m', + executionTimeout: '10m', fn: async (input: ParentInput, ctx) => { // lets generate large payload 1 mb const largePayload = new Array(1024 * 1024).fill('a').join(''); diff --git a/examples/typescript/legacy/run.ts b/examples/typescript/legacy/run.ts deleted file mode 100644 index eaef40d6b6..0000000000 --- a/examples/typescript/legacy/run.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { hatchet } from '../hatchet-client'; -import { simple } from './workflow'; - -async function main() { - const res = await hatchet.run<{ Message: string }, { step2: string }>(simple, { - Message: 'hello', - }); - - console.log(res.step2); -} - -if (require.main === module) { - main(); -} diff --git a/examples/typescript/legacy/worker.ts b/examples/typescript/legacy/worker.ts deleted file mode 100644 index 81d05536fa..0000000000 --- a/examples/typescript/legacy/worker.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { hatchet } from '../hatchet-client'; -import { simple } from './workflow'; - -async function main() { - const worker = await hatchet.worker('legacy-worker', { - workflows: [simple], - }); - - await worker.start(); -} - -if (require.main === module) { - main(); -} diff --git a/examples/typescript/legacy/workflow.ts b/examples/typescript/legacy/workflow.ts deleted file mode 100644 index 9cf0e0868d..0000000000 --- a/examples/typescript/legacy/workflow.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Workflow } from '@hatchet-dev/typescript-sdk/workflow'; - -export const simple: Workflow = { - id: 'legacy-workflow', - description: 'test', - on: { - event: 'user:create', - }, - steps: [ - { - name: 'step1', - run: async (ctx) => { - const input = ctx.workflowInput(); - - return { step1: `original input: ${input.Message}` }; - }, - }, - { - name: 'step2', - parents: ['step1'], - run: (ctx) => { - const step1Output = ctx.stepOutput('step1'); - - return { step2: `step1 output: ${step1Output.step1}` }; - }, - }, - ], -}; diff --git a/examples/typescript/logging/byo-logger.ts b/examples/typescript/logger/byo-logger.ts similarity index 99% rename from examples/typescript/logging/byo-logger.ts rename to examples/typescript/logger/byo-logger.ts index dd1480025f..2e27ef6aee 100644 --- a/examples/typescript/logging/byo-logger.ts +++ b/examples/typescript/logger/byo-logger.ts @@ -72,3 +72,4 @@ async function main() { } main(); + diff --git a/examples/typescript/logging/logger.ts b/examples/typescript/logger/logger.ts similarity index 99% rename from examples/typescript/logging/logger.ts rename to examples/typescript/logger/logger.ts index 764c58e539..7e313083ce 100644 --- a/examples/typescript/logging/logger.ts +++ b/examples/typescript/logger/logger.ts @@ -38,3 +38,4 @@ async function main() { } main(); + diff --git a/examples/typescript/logger/workflow.ts b/examples/typescript/logger/workflow.ts new file mode 100644 index 0000000000..c7b33a5725 --- /dev/null +++ b/examples/typescript/logger/workflow.ts @@ -0,0 +1,35 @@ +import { hatchet } from '../hatchet-client'; + +// Mirrors `sdks/python/examples/logger/workflow.py` +export const loggingWorkflow = hatchet.workflow({ + name: 'LoggingWorkflow', +}); + +loggingWorkflow.task({ + name: 'root_logger', + fn: async () => { + for (let i = 0; i < 12; i += 1) { + console.info(`executed step1 - ${i}`); + console.info({ step1: 'step1' }); + // keep this fast for e2e + } + + return { status: 'success' }; + }, +}); + +loggingWorkflow.task({ + name: 'context_logger', + fn: async (_input, ctx) => { + for (let i = 0; i < 12; i += 1) { + // Python uses ctx.log; TS has both ctx.log (deprecated) and ctx.logger.* + // Use ctx.log to stay closer semantically. + await ctx.log(`executed step1 - ${i}`); + await ctx.log(JSON.stringify({ step1: 'step1' })); + } + + return { status: 'success' }; + }, +}); + + diff --git a/examples/typescript/multiple_wf_concurrency/workflow.ts b/examples/typescript/multiple_wf_concurrency/workflow.ts index 9953c8d0a2..e396cdf586 100644 --- a/examples/typescript/multiple_wf_concurrency/workflow.ts +++ b/examples/typescript/multiple_wf_concurrency/workflow.ts @@ -1,4 +1,4 @@ -import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/workflow'; +import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/v1'; import { hatchet } from '../hatchet-client'; type SimpleInput = { diff --git a/examples/typescript/on_event/event.e2e.ts b/examples/typescript/on_event/event.e2e.ts deleted file mode 100644 index cae8ac0c65..0000000000 --- a/examples/typescript/on_event/event.e2e.ts +++ /dev/null @@ -1,372 +0,0 @@ -import sleep from '@hatchet-dev/typescript-sdk-dev/typescript-sdk/util/sleep'; -import { randomUUID } from 'crypto'; -import { Event } from '@hatchet-dev/typescript-sdk-dev/typescript-sdk/protoc/events'; -import { SIMPLE_EVENT, lower, Input } from './workflow'; -import { hatchet } from '../hatchet-client'; -import { Worker } from '../../client/worker/worker'; - -xdescribe('events-e2e', () => { - let worker: Worker; - let testRunId: string; - - beforeEach(async () => { - testRunId = randomUUID(); - - worker = await hatchet.worker('event-worker'); - await worker.registerWorkflow(lower); - - void worker.start(); - }); - - afterAll(async () => { - await worker.stop(); - await sleep(2000); - }); - - async function setupEventFilter(expression?: string, payload: Record = {}) { - const finalExpression = - expression || `input.ShouldSkip == false && payload.testRunId == '${testRunId}'`; - - const workflowId = (await hatchet.workflows.get(lower.name)).metadata.id; - - const filter = await hatchet.filters.create({ - workflowId, - expression: finalExpression, - scope: testRunId, - payload: { testRunId, ...payload }, - }); - - return async () => { - await hatchet.filters.delete(filter.metadata.id); - }; - } - - // Helper function to wait for events to process and fetch runs - async function waitForEventsToProcess(events: Event[]): Promise> { - await sleep(3000); - - const persisted = (await hatchet.events.list({ limit: 100 })).rows || []; - - // Ensure all our events are persisted - const eventIds = new Set(events.map((e) => e.eventId)); - const persistedIds = new Set(persisted.map((e) => e.metadata.id)); - expect(Array.from(eventIds).every((id) => persistedIds.has(id))).toBeTruthy(); - - let attempts = 0; - const maxAttempts = 15; - const eventToRuns: Record = {}; - - while (true) { - console.log('Waiting for event runs to complete...'); - if (attempts > maxAttempts) { - console.log('Timed out waiting for event runs to complete.'); - return {}; - } - - attempts += 1; - - // For each event, fetch its runs - const runsPromises = events.map(async (event) => { - const runs = await hatchet.runs.list({ - triggeringEventExternalId: event.eventId, - }); - - // Extract metadata from event - const meta = event.additionalMetadata ? JSON.parse(event.additionalMetadata) : {}; - - const payload = event.payload ? JSON.parse(event.payload) : {}; - - return { - event: { - id: event.eventId, - payload, - meta, - shouldHaveRuns: Boolean(meta.should_have_runs), - testRunId: meta.test_run_id, - }, - runs: runs.rows || [], - }; - }); - - const eventRuns = await Promise.all(runsPromises); - - // If all events have no runs yet, wait and retry - if (eventRuns.every(({ runs }) => runs.length === 0)) { - await sleep(1000); - - continue; - } - - // Store runs by event ID - for (const { event, runs } of eventRuns) { - eventToRuns[event.id] = runs; - } - - // Check if any runs are still in progress - const anyInProgress = Object.values(eventToRuns).some((runs) => - runs.some((run) => run.status === 'QUEUED' || run.status === 'RUNNING') - ); - - if (anyInProgress) { - await sleep(1000); - - continue; - } - - break; - } - - return eventToRuns; - } - - // Helper to verify runs match expectations - function verifyEventRuns(eventData: any, runs: any[]) { - if (eventData.shouldHaveRuns) { - expect(runs.length).toBeGreaterThan(0); - } else { - expect(runs.length).toBe(0); - } - } - - // Helper to create bulk push event objects - function createBulkPushEvent({ - index = 1, - ShouldSkip = false, - shouldHaveRuns = true, - key = SIMPLE_EVENT, - payload = {}, - scope = null, - }: { - index?: number; - ShouldSkip?: boolean; - shouldHaveRuns?: boolean; - key?: string; - payload?: Record; - scope?: string | null; - }) { - return { - key, - payload: { - ShouldSkip, - Message: `This is event ${index}`, - ...payload, - }, - additionalMetadata: { - should_have_runs: shouldHaveRuns, - test_run_id: testRunId, - key, - index, - }, - scope: scope || undefined, - }; - } - - // Helper to create payload object - function createEventPayload(ShouldSkip: boolean): Input { - return { ShouldSkip, Message: 'This is event 1' }; - } - - it('should push an event', async () => { - const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false)); - expect(event.eventId).toBeTruthy(); - }, 10000); - - it('should push an event asynchronously', async () => { - const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false)); - expect(event.eventId).toBeTruthy(); - }, 10000); - - it('should bulk push events', async () => { - const events = [ - { - key: SIMPLE_EVENT, - payload: { Message: 'This is event 1', ShouldSkip: false }, - additionalMetadata: { source: 'test', user_id: 'user123' }, - }, - { - key: SIMPLE_EVENT, - payload: { Message: 'This is event 2', ShouldSkip: false }, - additionalMetadata: { source: 'test', user_id: 'user456' }, - }, - { - key: SIMPLE_EVENT, - payload: { Message: 'This is event 3', ShouldSkip: false }, - additionalMetadata: { source: 'test', user_id: 'user789' }, - }, - ]; - - const result = await hatchet.events.bulkPush(SIMPLE_EVENT, events); - - expect(result.events.length).toBe(3); - - // Sort and verify namespacing - const sortedEvents = [...events].sort((a, b) => a.key.localeCompare(b.key)); - const sortedResults = [...result.events].sort((a, b) => a.key.localeCompare(b.key)); - - sortedEvents.forEach((originalEvent, index) => { - const returnedEvent = sortedResults[index]; - expect(returnedEvent.key).toBe(originalEvent.key); - }); - }, 15000); - - it('should process events according to event engine behavior', async () => { - const eventPromises = [ - createBulkPushEvent({}), - createBulkPushEvent({ - key: 'thisisafakeeventfoobarbaz', - shouldHaveRuns: false, - }), - ].map((event) => convertBulkToSingle(event)); - const events = await Promise.all(eventPromises); - - const eventToRuns = await waitForEventsToProcess(events); - - // Verify each event's runs - Object.keys(eventToRuns).forEach((eventId) => { - const runs = eventToRuns[eventId]; - const eventInfo = events.find((e) => e.eventId === eventId); - - if (eventInfo) { - const meta = JSON.parse(eventInfo.additionalMetadata || '{}'); - verifyEventRuns( - { - shouldHaveRuns: Boolean(meta.should_have_runs), - }, - runs - ); - } - }); - }, 30000); - - function generateBulkEvents() { - return [ - createBulkPushEvent({ - index: 1, - ShouldSkip: false, - shouldHaveRuns: true, - }), - createBulkPushEvent({ - index: 2, - ShouldSkip: true, - shouldHaveRuns: true, - }), - createBulkPushEvent({ - index: 3, - ShouldSkip: false, - shouldHaveRuns: true, - scope: testRunId, - }), - createBulkPushEvent({ - index: 4, - ShouldSkip: true, - shouldHaveRuns: false, - scope: testRunId, - }), - createBulkPushEvent({ - index: 5, - ShouldSkip: true, - shouldHaveRuns: false, - scope: testRunId, - key: 'thisisafakeeventfoobarbaz', - }), - createBulkPushEvent({ - index: 6, - ShouldSkip: false, - shouldHaveRuns: false, - scope: testRunId, - key: 'thisisafakeeventfoobarbaz', - }), - ]; - } - - async function convertBulkToSingle(event: any) { - return hatchet.events.push(event.key, event.payload, { - scope: event.scope, - additionalMetadata: event.additionalMetadata, - priority: event.priority, - }); - } - - it('should handle event skipping and filtering without bulk push', async () => { - const cleanup = await setupEventFilter(); - - try { - const rawEvents = generateBulkEvents(); - const eventPromises = rawEvents.map((event) => convertBulkToSingle(event)); - const events = await Promise.all(eventPromises); - - const eventToRuns = await waitForEventsToProcess(events); - - // Verify each event's runs - Object.keys(eventToRuns).forEach((eventId) => { - const runs = eventToRuns[eventId]; - const eventInfo = events.find((e) => e.eventId === eventId); - - if (eventInfo) { - const meta = JSON.parse(eventInfo.additionalMetadata || '{}'); - verifyEventRuns( - { - shouldHaveRuns: Boolean(meta.should_have_runs), - }, - runs - ); - } - }); - } finally { - await cleanup(); - } - }, 30000); - - it('should filter events by payload expression not matching', async () => { - const cleanup = await setupEventFilter("input.ShouldSkip == false && payload.foobar == 'baz'", { - foobar: 'qux', - }); - - try { - const event = await hatchet.events.push( - SIMPLE_EVENT, - { Message: 'This is event 1', ShouldSkip: false }, - { - scope: testRunId, - additionalMetadata: { - should_have_runs: 'false', - test_run_id: testRunId, - key: '1', - }, - } - ); - - const eventToRuns = await waitForEventsToProcess([event]); - expect(Object.keys(eventToRuns).length).toBe(0); - } finally { - await cleanup(); - } - }, 20000); - - it('should filter events by payload expression matching', async () => { - const cleanup = await setupEventFilter("input.ShouldSkip == false && payload.foobar == 'baz'", { - foobar: 'baz', - }); - - try { - const event = await hatchet.events.push( - SIMPLE_EVENT, - { Message: 'This is event 1', ShouldSkip: false }, - { - scope: testRunId, - additionalMetadata: { - should_have_runs: 'true', - test_run_id: testRunId, - key: '1', - }, - } - ); - - const eventToRuns = await waitForEventsToProcess([event]); - const runs = Object.values(eventToRuns)[0] || []; - expect(runs.length).toBeGreaterThan(0); - } finally { - await cleanup(); - } - }, 20000); -}); diff --git a/examples/typescript/on_failure/workflow.ts b/examples/typescript/on_failure/workflow.ts index 02b6c3a0f0..3cfc0dd515 100644 --- a/examples/typescript/on_failure/workflow.ts +++ b/examples/typescript/on_failure/workflow.ts @@ -1,23 +1,30 @@ import { hatchet } from '../hatchet-client'; +export const ERROR_TEXT = 'step1 failed'; + // > On Failure Task +// This workflow will fail because `step1` throws. We define an `onFailure` handler to run cleanup. export const failureWorkflow = hatchet.workflow({ - name: 'always-fail', + name: 'OnFailureWorkflow', }); failureWorkflow.task({ - name: 'always-fail', + name: 'step1', + executionTimeout: '1s', fn: async () => { - throw new Error('intentional failure'); + throw new Error(ERROR_TEXT); }, }); +// 👀 After the workflow fails, this special step will run failureWorkflow.onFailure({ - name: 'on-failure', - fn: async (input, ctx) => { + name: 'on_failure', + fn: async (_input, ctx) => { console.log('onFailure for run:', ctx.workflowRunId()); + console.log('upstream errors:', ctx.errors()); + return { - 'on-failure': 'success', + status: 'success', }; }, }); diff --git a/examples/typescript/simple/e2e-workflows.ts b/examples/typescript/simple/e2e-workflows.ts new file mode 100644 index 0000000000..7d3b4a61b7 --- /dev/null +++ b/examples/typescript/simple/e2e-workflows.ts @@ -0,0 +1,19 @@ +import { hatchet } from '../hatchet-client'; +import type { InputType } from '@hatchet-dev/typescript-sdk/v1'; + +// Mirrors `sdks/python/examples/simple/worker.py` outputs for e2e. +export const helloWorld = hatchet.task({ + name: 'hello-world', + fn: async (_input: InputType) => { + return { result: 'Hello, world!' }; + }, +}); + +export const helloWorldDurable = hatchet.durableTask({ + name: 'hello-world-durable', + executionTimeout: '10m', + fn: async (_input: InputType) => { + return { result: 'Hello, world!' }; + }, +}); + diff --git a/examples/typescript/simple/enqueue.ts b/examples/typescript/simple/enqueue.ts index 9313af15be..d4e5a36c8a 100644 --- a/examples/typescript/simple/enqueue.ts +++ b/examples/typescript/simple/enqueue.ts @@ -18,12 +18,12 @@ async function main() { // > Subscribing to results // the return object of the enqueue method is a WorkflowRunRef which includes a listener for the result of the workflow - const result = await run.result(); + const result = await run.output; console.log(result); // if you need to subscribe to the result of the workflow at a later time, you can use the runRef method and the stored runId const ref = hatchet.runRef(runId); - const result2 = await ref.result(); + const result2 = await ref.output; console.log(result2); } diff --git a/examples/typescript/simple/workflow.ts b/examples/typescript/simple/workflow.ts index faf58b90b8..51005a25cc 100644 --- a/examples/typescript/simple/workflow.ts +++ b/examples/typescript/simple/workflow.ts @@ -1,4 +1,5 @@ // > Declaring a Task +import { StickyStrategy } from '@hatchet-dev/typescript-sdk/v1'; import { hatchet } from '../hatchet-client'; // (optional) Define the input type for the workflow @@ -8,6 +9,7 @@ export type SimpleInput = { export const simple = hatchet.task({ name: 'simple', + sticky: StickyStrategy.SOFT, retries: 3, fn: async (input: SimpleInput) => { return { diff --git a/examples/typescript/sticky/workflow.ts b/examples/typescript/sticky/workflow.ts index 49f782b4bb..36916f9c61 100644 --- a/examples/typescript/sticky/workflow.ts +++ b/examples/typescript/sticky/workflow.ts @@ -1,4 +1,4 @@ -import { StickyStrategy } from '@hatchet-dev/typescript-sdk/protoc/workflows'; +import { StickyStrategy } from '@hatchet-dev/typescript-sdk/v1'; import { hatchet } from '../hatchet-client'; import { child } from '../child_workflows/workflow'; diff --git a/examples/typescript/timeout/run.ts b/examples/typescript/timeout/run.ts new file mode 100644 index 0000000000..d4c115a085 --- /dev/null +++ b/examples/typescript/timeout/run.ts @@ -0,0 +1,19 @@ +import { refreshTimeoutTask, timeoutTask } from './workflow'; + +async function main() { + try { + await timeoutTask.run({ Message: 'hello' }); + } catch (e) { + console.log('timeoutTask failed as expected', e); + } + + const res = await refreshTimeoutTask.run({ Message: 'hello' }); + console.log(res); +} + +if (require.main === module) { + main() + .catch(console.error) + .finally(() => process.exit(0)); +} + diff --git a/examples/typescript/timeout/worker.ts b/examples/typescript/timeout/worker.ts new file mode 100644 index 0000000000..48d22a3aa5 --- /dev/null +++ b/examples/typescript/timeout/worker.ts @@ -0,0 +1,16 @@ +import { hatchet } from '../hatchet-client'; +import { refreshTimeoutTask, timeoutTask } from './workflow'; + +async function main() { + const worker = await hatchet.worker('timeout-worker', { + workflows: [timeoutTask, refreshTimeoutTask], + slots: 50, + }); + + await worker.start(); +} + +if (require.main === module) { + main(); +} + diff --git a/examples/typescript/timeout/workflow.ts b/examples/typescript/timeout/workflow.ts new file mode 100644 index 0000000000..2f67a7a7f2 --- /dev/null +++ b/examples/typescript/timeout/workflow.ts @@ -0,0 +1,46 @@ +import sleep from '@hatchet-dev/typescript-sdk/util/sleep'; +import { hatchet } from '../hatchet-client'; + +export type SimpleInput = { + Message: string; +}; + +// > Execution Timeout +// Mirrors Python `examples/timeout/test_timeout.py::test_execution_timeout` +export const timeoutTask = hatchet.task({ + name: 'timeout', + executionTimeout: '3s', + fn: async (_: SimpleInput, { cancelled }) => { + await sleep(10 * 1000); + + if (cancelled) { + throw new Error('Task was cancelled'); + } + + return { + status: 'success', + }; + }, +}); + +// > Refresh Timeout +// Mirrors Python `examples/timeout/test_timeout.py::test_run_refresh_timeout` +export const refreshTimeoutTask = hatchet.task({ + name: 'refresh-timeout', + executionTimeout: '10s', + scheduleTimeout: '10s', + fn: async (input: SimpleInput, ctx) => { + ctx.refreshTimeout('15s'); + await sleep(15000); + + if (ctx.abortController.signal.aborted) { + throw new Error('cancelled'); + } + + return { + status: 'success', + message: input.Message.toLowerCase(), + }; + }, +}); + diff --git a/examples/typescript/timeouts/run.ts b/examples/typescript/timeouts/run.ts deleted file mode 100644 index 047f00ab14..0000000000 --- a/examples/typescript/timeouts/run.ts +++ /dev/null @@ -1,16 +0,0 @@ -// > Running a Task with Results -import { cancellation } from './workflow'; -// ... -async function main() { - // 👀 Run the workflow with results - const res = await cancellation.run({}); - - // 👀 Access the results of the workflow - console.log(res.Completed); -} - -if (require.main === module) { - main() - .catch(console.error) - .finally(() => process.exit(0)); -} diff --git a/examples/typescript/timeouts/worker.ts b/examples/typescript/timeouts/worker.ts deleted file mode 100644 index c808ea9d0c..0000000000 --- a/examples/typescript/timeouts/worker.ts +++ /dev/null @@ -1,18 +0,0 @@ -// > Declaring a Worker -import { hatchet } from '../hatchet-client'; -import { cancellation } from './workflow'; - -async function main() { - const worker = await hatchet.worker('cancellation-worker', { - // 👀 Declare the workflows that the worker can execute - workflows: [cancellation], - // 👀 Declare the number of concurrent task runs the worker can accept - slots: 100, - }); - - await worker.start(); -} - -if (require.main === module) { - main(); -} diff --git a/examples/typescript/timeouts/workflow.ts b/examples/typescript/timeouts/workflow.ts deleted file mode 100644 index f856b24607..0000000000 --- a/examples/typescript/timeouts/workflow.ts +++ /dev/null @@ -1,22 +0,0 @@ -// > Declaring a Task -import sleep from '@hatchet-dev/typescript-sdk/util/sleep'; -import { hatchet } from '../hatchet-client'; - -// (optional) Define the input type for the workflow -export const cancellation = hatchet.task({ - name: 'cancellation', - executionTimeout: '3s', - fn: async (_, { cancelled }) => { - await sleep(10 * 1000); - - if (cancelled) { - throw new Error('Task was cancelled'); - } - - return { - Completed: true, - }; - }, -}); - -// see ./worker.ts and ./run.ts for how to run the workflow diff --git a/examples/typescript/with_timeouts/workflow.ts b/examples/typescript/with_timeouts/workflow.ts deleted file mode 100644 index da8480d2be..0000000000 --- a/examples/typescript/with_timeouts/workflow.ts +++ /dev/null @@ -1,57 +0,0 @@ -import sleep from '@hatchet-dev/typescript-sdk/util/sleep'; -import { hatchet } from '../hatchet-client'; - -// (optional) Define the input type for the workflow -export type SimpleInput = { - Message: string; -}; - -// > Execution Timeout -export const withTimeouts = hatchet.task({ - name: 'with-timeouts', - // time the task can wait in the queue before it is cancelled - scheduleTimeout: '10s', - // time the task can run before it is cancelled - executionTimeout: '10s', - fn: async (input: SimpleInput, ctx) => { - // wait 15 seconds - await sleep(15000); - - // get the abort controller - const { abortController } = ctx; - - // if the abort controller is aborted, throw an error - if (abortController.signal.aborted) { - throw new Error('cancelled'); - } - - return { - TransformedMessage: input.Message.toLowerCase(), - }; - }, -}); - -// > Refresh Timeout -export const refreshTimeout = hatchet.task({ - name: 'refresh-timeout', - executionTimeout: '10s', - scheduleTimeout: '10s', - fn: async (input: SimpleInput, ctx) => { - // adds 15 seconds to the execution timeout - ctx.refreshTimeout('15s'); - await sleep(15000); - - // get the abort controller - const { abortController } = ctx; - - // now this condition will not be met - // if the abort controller is aborted, throw an error - if (abortController.signal.aborted) { - throw new Error('cancelled'); - } - - return { - TransformedMessage: input.Message.toLowerCase(), - }; - }, -}); diff --git a/frontend/app/src/lib/api/generated/data-contracts.ts b/frontend/app/src/lib/api/generated/data-contracts.ts index 31c8fbc64a..3394324984 100644 --- a/frontend/app/src/lib/api/generated/data-contracts.ts +++ b/frontend/app/src/lib/api/generated/data-contracts.ts @@ -1556,6 +1556,10 @@ export interface Step { action: string; /** The timeout of the step. */ timeout?: string; + /** Whether the step is durable. */ + isDurable?: boolean; + /** Slot requests for the step (slot_type -> units). */ + slotRequests?: Record; children?: string[]; parents?: string[]; } @@ -2134,6 +2138,14 @@ export interface RecentStepRuns { workflowRunId: string; } +/** Slot availability and limits for a slot type. */ +export interface WorkerSlotConfig { + /** The number of available units for this slot type. */ + available?: number; + /** The maximum number of units for this slot type. */ + limit: number; +} + export interface WorkerLabel { metadata: APIResourceMeta; /** The key of the label. */ @@ -2177,10 +2189,8 @@ export interface Worker { recentStepRuns?: RecentStepRuns[]; /** The status of the worker. */ status?: "ACTIVE" | "INACTIVE" | "PAUSED"; - /** The maximum number of runs this worker can execute concurrently. */ - maxRuns?: number; - /** The number of runs this worker can execute concurrently. */ - availableRuns?: number; + /** Slot availability and limits for this worker (slot_type -> { available, limit }). */ + slotConfig?: Record; /** * the id of the assigned dispatcher, in UUID format * @format uuid diff --git a/frontend/app/src/pages/main/v1/workers/$worker/index.tsx b/frontend/app/src/pages/main/v1/workers/$worker/index.tsx index 053b7c138b..5608d28e46 100644 --- a/frontend/app/src/pages/main/v1/workers/$worker/index.tsx +++ b/frontend/app/src/pages/main/v1/workers/$worker/index.tsx @@ -177,11 +177,9 @@ export default function WorkerDetail() { return ; } - const availableSlots = worker.availableRuns ?? 0; - const maxSlots = worker.maxRuns ?? 0; - const usedSlots = maxSlots - availableSlots; - const usedPercentage = - maxSlots > 0 ? Math.round((usedSlots / maxSlots) * 100) : 0; + const slotCapacityEntries = Object.entries(worker.slotConfig || {}).sort( + ([a], [b]) => a.localeCompare(b), + ); // dynamically set the max columns in the grid based on the presence of runtime info and labels const maxCols = @@ -276,30 +274,54 @@ export default function WorkerDetail() { className="h-52 overflow-y-auto bg-background border-none" > - Available Run Slots + Slots - -
- - {maxSlots > 0 ? availableSlots : '∞'} - - {maxSlots > 0 && ( - - / {maxSlots} total - - )} -
- {maxSlots > 0 && ( -
-
-
-
-
- {usedSlots} used, {availableSlots} available -
+ + {slotCapacityEntries.length === 0 ? ( +
+ No slots +
+ ) : ( +
+ {slotCapacityEntries.map(([slotType, capacity]) => { + const available = capacity?.available; + const limit = capacity?.limit ?? 0; + const showAvailability = available !== undefined; + const used = showAvailability ? limit - available : 0; + const usedPercentage = + showAvailability && limit > 0 + ? Math.round((used / limit) * 100) + : 0; + const label = showAvailability + ? `${available} / ${limit}` + : `${limit}`; + + return ( +
+
+ + {slotType} + + + {label} + +
+ {showAvailability && limit > 0 && ( +
+
+
+
+
+ {used} used, {available} available +
+
+ )} +
+ ); + })}
)}

diff --git a/frontend/app/src/pages/main/v1/workers/components/worker-columns.tsx b/frontend/app/src/pages/main/v1/workers/components/worker-columns.tsx index 0efa7d50b9..397db6dd64 100644 --- a/frontend/app/src/pages/main/v1/workers/components/worker-columns.tsx +++ b/frontend/app/src/pages/main/v1/workers/components/worker-columns.tsx @@ -13,7 +13,7 @@ export const WorkerColumn = { name: 'Name', type: 'Type', startedAt: 'Started at', - slots: 'Available Slots', + slots: 'Slots', lastHeartbeatAt: 'Last seen', runtime: 'SDK Version', } as const; @@ -181,11 +181,34 @@ export const columns: (tenantId: string) => ColumnDef[] = ( header: ({ column }) => ( ), - cell: ({ row }) => ( -

- {row.original.availableRuns} / {row.original.maxRuns} -
- ), + cell: ({ row }) => { + const slotConfig = row.original.slotConfig || {}; + const entries = Object.entries(slotConfig).sort(([a], [b]) => + a.localeCompare(b), + ); + + if (entries.length === 0) { + return
No slots
; + } + + return ( +
+ {entries.map(([slotType, capacity]) => { + const available = capacity?.available; + const limit = capacity?.limit; + const label = + available !== undefined ? `${available} / ${limit}` : `${limit}`; + + return ( +
+ {slotType}:{' '} + {label} +
+ ); + })} +
+ ); + }, enableSorting: false, enableHiding: true, }, diff --git a/frontend/docs/pages/home/cancellation.mdx b/frontend/docs/pages/home/cancellation.mdx index 18568b42e7..dbefe28a7f 100644 --- a/frontend/docs/pages/home/cancellation.mdx +++ b/frontend/docs/pages/home/cancellation.mdx @@ -22,6 +22,48 @@ When a task is canceled, Hatchet sends a cancellation signal to the task. The ta /> +### CancelledError Exception + +When a sync task is cancelled while waiting for a child workflow or during a cancellation-aware operation, a `CancelledError` exception is raised. + + + **Important:** `CancelledError` inherits from `BaseException`, not + `Exception`. This means it will **not** be caught by bare `except Exception:` + handlers. This is intentional and mirrors the behavior of Python's + `asyncio.CancelledError`. + + +```python +from hatchet_sdk import CancelledError, CancellationReason + +@hatchet.task() +def my_task(input: MyInput, ctx: Context) -> dict: + try: + result = child_workflow.run(input) + except CancelledError as e: + # Handle cancellation - i.e. perform cleanup, then re-raise + print(f"Task cancelled: {e.reason}") + # Always re-raise CancelledError so Hatchet can properly handle the cancellation + raise + except Exception as e: + # This will NOT catch CancelledError + print(f"Other error: {e}") + raise + return result +``` + +### Cancellation Reasons + +The `CancelledError` includes a `reason` attribute that indicates why the cancellation occurred: + +| Reason | Description | +| --------------------------------------- | --------------------------------------------------------------------- | +| `CancellationReason.USER_REQUESTED` | The user explicitly requested cancellation via `ctx.cancel()` | +| `CancellationReason.WORKFLOW_CANCELLED` | The workflow run was cancelled (e.g., via API or concurrency control) | +| `CancellationReason.PARENT_CANCELLED` | The parent workflow was cancelled while waiting for a child | +| `CancellationReason.TIMEOUT` | The operation timed out | +| `CancellationReason.UNKNOWN` | Unknown or unspecified reason | + +### AbortError behavior + +Hatchet cancellation in TypeScript is driven by an internal `ctx.abortController.signal` (an `AbortSignal`). When a task is cancelled, Hatchet aborts that signal and cancellation-aware operations (like waiting on a child run result) will reject with an **`AbortError`**. + + + **Important:** JavaScript/TypeScript cannot make cancellation “uncatchable”. A broad `catch (e) { ... }` can swallow cancellation. Hatchet’s SDK will avoid enqueueing new child runs once the parent task is cancelled, and it will not report a cancelled task as “completed” even if user code catches the abort — but your code should still exit quickly to avoid wasted work. + + +If you must catch errors, re-throw abort/cancellation errors: + +```ts +try { + // ... work ... + await simple.run({}); +} catch (e) { + ctx.rethrowIfCancelled(e); + // ... other error handling ... +} +``` + diff --git a/frontend/docs/pages/home/durable-execution.mdx b/frontend/docs/pages/home/durable-execution.mdx index efda8b42dc..7c1985935c 100644 --- a/frontend/docs/pages/home/durable-execution.mdx +++ b/frontend/docs/pages/home/durable-execution.mdx @@ -22,13 +22,13 @@ This is especially useful in cases such as: ## How Hatchet Runs Durable Tasks -When you register a durable task, Hatchet will start a second worker in the background for running durable tasks. If you don't register any durable workflows, the durable worker will not be started. Similarly, if you start a worker with _only_ durable workflows, the "main" worker will not start, and _only_ the durable worker will run. The durable worker will show up as a second worker in the Hatchet Dashboard. +Durable tasks run on the same worker process as regular tasks, but they consume a separate slot type so they do not compete with regular tasks for slots. This pattern prevents deadlock scenarios where durable tasks would starve children tasks for slots which are needed for the parent durable task to complete. Tasks that are declared as being durable (using `durable_task` instead of `task`), will receive a `DurableContext` object instead of a normal `Context,` which extends the `Context` by providing some additional tools for working with durable execution features. ## Example Task -Now that we know a bit about how Hatchet handles durable execution, let's build a task. We'll start by declaring a task that will run durably, on the "durable worker". +Now that we know a bit about how Hatchet handles durable execution, let's build a task. We'll start by declaring a task that will run durably. diff --git a/frontend/docs/pages/sdks/python/client.mdx b/frontend/docs/pages/sdks/python/client.mdx index f500de1330..7039cb85fc 100644 --- a/frontend/docs/pages/sdks/python/client.mdx +++ b/frontend/docs/pages/sdks/python/client.mdx @@ -73,14 +73,14 @@ Create a Hatchet worker on which to run workflows. Parameters: -| Name | Type | Description | Default | -| --------------- | --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------- | -| `name` | `str` | The name of the worker. | _required_ | -| `slots` | `int` | The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time | `100` | -| `durable_slots` | `int` | The number of durable workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time that are durable. | `1000` | -| `labels` | `dict[str, str \| int] \| None` | A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. | `None` | -| `workflows` | `list[BaseWorkflow[Any]] \| None` | A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. | `None` | -| `lifespan` | `LifespanFn \| None` | A lifespan function to run on the worker. This function will be called when the worker is started, and can be used to perform any setup or teardown tasks. | `None` | +| Name | Type | Description | Default | +| --------------- | --------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- | +| `name` | `str` | The name of the worker. | _required_ | +| `slots` | `int` | Maximum number of concurrent runs. | `100` | +| `durable_slots` | `int` | Maximum number of concurrent durable tasks. | `1000` | +| `labels` | `dict[str, str \| int] \| None` | A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. | `None` | +| `workflows` | `list[BaseWorkflow[Any]] \| None` | A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. | `None` | +| `lifespan` | `LifespanFn \| None` | A lifespan function to run on the worker. This function will be called when the worker is started, and can be used to perform any setup or teardown tasks. | `None` | Returns: diff --git a/internal/services/admin/contracts/workflows.pb.go b/internal/services/admin/contracts/workflows.pb.go index 5d9597ab9b..88c022d1a1 100644 --- a/internal/services/admin/contracts/workflows.pb.go +++ b/internal/services/admin/contracts/workflows.pb.go @@ -354,7 +354,7 @@ type CreateWorkflowVersionOpts struct { ScheduleTimeout *string `protobuf:"bytes,9,opt,name=schedule_timeout,json=scheduleTimeout,proto3,oneof" json:"schedule_timeout,omitempty"` // (optional) the timeout for the schedule CronInput *string `protobuf:"bytes,10,opt,name=cron_input,json=cronInput,proto3,oneof" json:"cron_input,omitempty"` // (optional) the input for the cron trigger OnFailureJob *CreateWorkflowJobOpts `protobuf:"bytes,11,opt,name=on_failure_job,json=onFailureJob,proto3,oneof" json:"on_failure_job,omitempty"` // (optional) the job to run on failure - Sticky *StickyStrategy `protobuf:"varint,12,opt,name=sticky,proto3,enum=StickyStrategy,oneof" json:"sticky,omitempty"` // (optional) the sticky strategy for assigning steps to workers + Sticky *StickyStrategy `protobuf:"varint,12,opt,name=sticky,proto3,enum=StickyStrategy,oneof" json:"sticky,omitempty"` // (optional) the sticky strategy for assigning tasks to workers Kind *WorkflowKind `protobuf:"varint,13,opt,name=kind,proto3,enum=WorkflowKind,oneof" json:"kind,omitempty"` // (optional) the kind of workflow DefaultPriority *int32 `protobuf:"varint,14,opt,name=default_priority,json=defaultPriority,proto3,oneof" json:"default_priority,omitempty"` // (optional) the priority of the workflow } @@ -568,7 +568,7 @@ type CreateWorkflowJobOpts struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // (required) the job name Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` // (optional) the job description - Steps []*CreateWorkflowStepOpts `protobuf:"bytes,4,rep,name=steps,proto3" json:"steps,omitempty"` // (required) the job steps + Steps []*CreateWorkflowStepOpts `protobuf:"bytes,4,rep,name=steps,proto3" json:"steps,omitempty"` // (required) the job tasks } func (x *CreateWorkflowJobOpts) Reset() { @@ -630,8 +630,8 @@ type DesiredWorkerLabels struct { unknownFields protoimpl.UnknownFields // value of the affinity - StrValue *string `protobuf:"bytes,1,opt,name=strValue,proto3,oneof" json:"strValue,omitempty"` - IntValue *int32 `protobuf:"varint,2,opt,name=intValue,proto3,oneof" json:"intValue,omitempty"` + StrValue *string `protobuf:"bytes,1,opt,name=str_value,json=strValue,proto3,oneof" json:"str_value,omitempty"` + IntValue *int32 `protobuf:"varint,2,opt,name=int_value,json=intValue,proto3,oneof" json:"int_value,omitempty"` // * // (optional) Specifies whether the affinity setting is required. // If required, the worker will not accept actions that do not have a truthy affinity setting. @@ -715,23 +715,23 @@ func (x *DesiredWorkerLabels) GetWeight() int32 { return 0 } -// CreateWorkflowStepOpts represents options to create a workflow step. +// CreateWorkflowStepOpts represents options to create a workflow task. type CreateWorkflowStepOpts struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ReadableId string `protobuf:"bytes,1,opt,name=readable_id,json=readableId,proto3" json:"readable_id,omitempty"` // (required) the step name - Action string `protobuf:"bytes,2,opt,name=action,proto3" json:"action,omitempty"` // (required) the step action id - Timeout string `protobuf:"bytes,3,opt,name=timeout,proto3" json:"timeout,omitempty"` // (optional) the step timeout - Inputs string `protobuf:"bytes,4,opt,name=inputs,proto3" json:"inputs,omitempty"` // (optional) the step inputs, assuming string representation of JSON - Parents []string `protobuf:"bytes,5,rep,name=parents,proto3" json:"parents,omitempty"` // (optional) the step parents. if none are passed in, this is a root step - UserData string `protobuf:"bytes,6,opt,name=user_data,json=userData,proto3" json:"user_data,omitempty"` // (optional) the custom step user data, assuming string representation of JSON - Retries int32 `protobuf:"varint,7,opt,name=retries,proto3" json:"retries,omitempty"` // (optional) the number of retries for the step, default 0 - RateLimits []*CreateStepRateLimit `protobuf:"bytes,8,rep,name=rate_limits,json=rateLimits,proto3" json:"rate_limits,omitempty"` // (optional) the rate limits for the step - WorkerLabels map[string]*DesiredWorkerLabels `protobuf:"bytes,9,rep,name=worker_labels,json=workerLabels,proto3" json:"worker_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (optional) the desired worker affinity state for the step - BackoffFactor *float32 `protobuf:"fixed32,10,opt,name=backoff_factor,json=backoffFactor,proto3,oneof" json:"backoff_factor,omitempty"` // (optional) the retry backoff factor for the step - BackoffMaxSeconds *int32 `protobuf:"varint,11,opt,name=backoff_max_seconds,json=backoffMaxSeconds,proto3,oneof" json:"backoff_max_seconds,omitempty"` // (optional) the maximum backoff time for the step + ReadableId string `protobuf:"bytes,1,opt,name=readable_id,json=readableId,proto3" json:"readable_id,omitempty"` // (required) the task name + Action string `protobuf:"bytes,2,opt,name=action,proto3" json:"action,omitempty"` // (required) the task action id + Timeout string `protobuf:"bytes,3,opt,name=timeout,proto3" json:"timeout,omitempty"` // (optional) the task timeout + Inputs string `protobuf:"bytes,4,opt,name=inputs,proto3" json:"inputs,omitempty"` // (optional) the task inputs, assuming string representation of JSON + Parents []string `protobuf:"bytes,5,rep,name=parents,proto3" json:"parents,omitempty"` // (optional) the task parents. if none are passed in, this is a root task + UserData string `protobuf:"bytes,6,opt,name=user_data,json=userData,proto3" json:"user_data,omitempty"` // (optional) the custom task user data, assuming string representation of JSON + Retries int32 `protobuf:"varint,7,opt,name=retries,proto3" json:"retries,omitempty"` // (optional) the number of retries for the task, default 0 + RateLimits []*CreateStepRateLimit `protobuf:"bytes,8,rep,name=rate_limits,json=rateLimits,proto3" json:"rate_limits,omitempty"` // (optional) the rate limits for the task + WorkerLabels map[string]*DesiredWorkerLabels `protobuf:"bytes,9,rep,name=worker_labels,json=workerLabels,proto3" json:"worker_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (optional) the desired worker affinity state for the task + BackoffFactor *float32 `protobuf:"fixed32,10,opt,name=backoff_factor,json=backoffFactor,proto3,oneof" json:"backoff_factor,omitempty"` // (optional) the retry backoff factor for the task + BackoffMaxSeconds *int32 `protobuf:"varint,11,opt,name=backoff_max_seconds,json=backoffMaxSeconds,proto3,oneof" json:"backoff_max_seconds,omitempty"` // (optional) the maximum backoff time for the task } func (x *CreateWorkflowStepOpts) Reset() { @@ -849,7 +849,7 @@ type CreateStepRateLimit struct { unknownFields protoimpl.UnknownFields Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` // (required) the key for the rate limit - Units *int32 `protobuf:"varint,2,opt,name=units,proto3,oneof" json:"units,omitempty"` // (optional) the number of units this step consumes + Units *int32 `protobuf:"varint,2,opt,name=units,proto3,oneof" json:"units,omitempty"` // (optional) the number of units this task consumes KeyExpr *string `protobuf:"bytes,3,opt,name=key_expr,json=keyExpr,proto3,oneof" json:"key_expr,omitempty"` // (optional) a CEL expression for determining the rate limit key UnitsExpr *string `protobuf:"bytes,4,opt,name=units_expr,json=unitsExpr,proto3,oneof" json:"units_expr,omitempty"` // (optional) a CEL expression for determining the number of units consumed LimitValuesExpr *string `protobuf:"bytes,5,opt,name=limit_values_expr,json=limitValuesExpr,proto3,oneof" json:"limit_values_expr,omitempty"` // (optional) a CEL expression for determining the total amount of rate limit units @@ -980,8 +980,8 @@ type ScheduleWorkflowRequest struct { Input string `protobuf:"bytes,3,opt,name=input,proto3" json:"input,omitempty"` // (optional) the parent workflow run id ParentId *string `protobuf:"bytes,4,opt,name=parent_id,json=parentId,proto3,oneof" json:"parent_id,omitempty"` - // (optional) the parent step run id - ParentStepRunId *string `protobuf:"bytes,5,opt,name=parent_step_run_id,json=parentStepRunId,proto3,oneof" json:"parent_step_run_id,omitempty"` + // (optional) the parent task external run id + ParentTaskRunExternalId *string `protobuf:"bytes,5,opt,name=parent_task_run_external_id,json=parentTaskRunExternalId,proto3,oneof" json:"parent_task_run_external_id,omitempty"` // (optional) the index of the child workflow. if this is set, matches on the index or the // child key will be a no-op, even if the schedule has changed. ChildIndex *int32 `protobuf:"varint,6,opt,name=child_index,json=childIndex,proto3,oneof" json:"child_index,omitempty"` @@ -1054,9 +1054,9 @@ func (x *ScheduleWorkflowRequest) GetParentId() string { return "" } -func (x *ScheduleWorkflowRequest) GetParentStepRunId() string { - if x != nil && x.ParentStepRunId != nil { - return *x.ParentStepRunId +func (x *ScheduleWorkflowRequest) GetParentTaskRunExternalId() string { + if x != nil && x.ParentTaskRunExternalId != nil { + return *x.ParentTaskRunExternalId } return "" } @@ -1457,14 +1457,14 @@ type TriggerWorkflowRequest struct { Input string `protobuf:"bytes,2,opt,name=input,proto3" json:"input,omitempty"` // (optional) the parent workflow run id ParentId *string `protobuf:"bytes,3,opt,name=parent_id,json=parentId,proto3,oneof" json:"parent_id,omitempty"` - // (optional) the parent step run id - ParentStepRunId *string `protobuf:"bytes,4,opt,name=parent_step_run_id,json=parentStepRunId,proto3,oneof" json:"parent_step_run_id,omitempty"` + // (optional) the parent task external run id + ParentTaskRunExternalId *string `protobuf:"bytes,4,opt,name=parent_task_run_external_id,json=parentTaskRunExternalId,proto3,oneof" json:"parent_task_run_external_id,omitempty"` // (optional) the index of the child workflow. if this is set, matches on the index or the - // child key will return an existing workflow run if the parent id, parent step run id, and + // child key will return an existing workflow run if the parent id, parent task run id, and // child index/key match an existing workflow run. ChildIndex *int32 `protobuf:"varint,5,opt,name=child_index,json=childIndex,proto3,oneof" json:"child_index,omitempty"` // (optional) the key for the child. if this is set, matches on the index or the - // child key will return an existing workflow run if the parent id, parent step run id, and + // child key will return an existing workflow run if the parent id, parent task run id, and // child index/key match an existing workflow run. ChildKey *string `protobuf:"bytes,6,opt,name=child_key,json=childKey,proto3,oneof" json:"child_key,omitempty"` // (optional) additional metadata for the workflow @@ -1472,7 +1472,7 @@ type TriggerWorkflowRequest struct { // (optional) desired worker id for the workflow run, // requires the workflow definition to have a sticky strategy DesiredWorkerId *string `protobuf:"bytes,8,opt,name=desired_worker_id,json=desiredWorkerId,proto3,oneof" json:"desired_worker_id,omitempty"` - // (optional) override for the priority of the workflow steps, will set all steps to this priority + // (optional) override for the priority of the workflow tasks, will set all tasks to this priority Priority *int32 `protobuf:"varint,9,opt,name=priority,proto3,oneof" json:"priority,omitempty"` } @@ -1529,9 +1529,9 @@ func (x *TriggerWorkflowRequest) GetParentId() string { return "" } -func (x *TriggerWorkflowRequest) GetParentStepRunId() string { - if x != nil && x.ParentStepRunId != nil { - return *x.ParentStepRunId +func (x *TriggerWorkflowRequest) GetParentTaskRunExternalId() string { + if x != nil && x.ParentTaskRunExternalId != nil { + return *x.ParentTaskRunExternalId } return "" } @@ -1803,254 +1803,257 @@ var file_workflows_proto_rawDesc = []byte{ 0x6e, 0x12, 0x2d, 0x0a, 0x05, 0x73, 0x74, 0x65, 0x70, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x53, 0x74, 0x65, 0x70, 0x4f, 0x70, 0x74, 0x73, 0x52, 0x05, 0x73, 0x74, 0x65, 0x70, 0x73, - 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x22, 0x93, 0x02, 0x0a, 0x13, 0x44, 0x65, 0x73, 0x69, 0x72, - 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1f, - 0x0a, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x48, 0x00, 0x52, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, 0x12, - 0x1f, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x05, 0x48, 0x01, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, - 0x12, 0x1f, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x08, 0x48, 0x02, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x88, 0x01, - 0x01, 0x12, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, - 0x62, 0x65, 0x6c, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x48, 0x03, 0x52, - 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x1b, - 0x0a, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x04, - 0x52, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, - 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x69, 0x6e, 0x74, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, - 0x65, 0x64, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0xbe, 0x04, 0x0a, - 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x53, - 0x74, 0x65, 0x70, 0x4f, 0x70, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x61, 0x64, 0x61, - 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, - 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1b, 0x0a, 0x09, - 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x08, 0x75, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x65, 0x74, - 0x72, 0x69, 0x65, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x72, 0x65, 0x74, 0x72, - 0x69, 0x65, 0x73, 0x12, 0x35, 0x0a, 0x0b, 0x72, 0x61, 0x74, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, - 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x53, 0x74, 0x65, 0x70, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, 0x0a, - 0x72, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x73, 0x12, 0x4e, 0x0a, 0x0d, 0x77, 0x6f, - 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x09, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x29, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x53, 0x74, 0x65, 0x70, 0x4f, 0x70, 0x74, 0x73, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, - 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x77, 0x6f, - 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x2a, 0x0a, 0x0e, 0x62, 0x61, - 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, - 0x28, 0x02, 0x48, 0x00, 0x52, 0x0d, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x46, 0x61, 0x63, - 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x13, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, - 0x66, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x0b, 0x20, - 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x11, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x61, - 0x78, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x88, 0x01, 0x01, 0x1a, 0x55, 0x0a, 0x11, 0x57, + 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x22, 0x97, 0x02, 0x0a, 0x13, 0x44, 0x65, 0x73, 0x69, 0x72, + 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x20, + 0x0a, 0x09, 0x73, 0x74, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x00, 0x52, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, + 0x12, 0x20, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x88, + 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x08, 0x48, 0x02, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, + 0x88, 0x01, 0x01, 0x12, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, + 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, + 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x48, + 0x03, 0x52, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, + 0x12, 0x1b, 0x0a, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, + 0x48, 0x04, 0x52, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, + 0x0a, 0x5f, 0x73, 0x74, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, + 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x72, 0x65, + 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, + 0x72, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, + 0x22, 0xbe, 0x04, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x53, 0x74, 0x65, 0x70, 0x4f, 0x70, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x72, + 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0a, 0x72, 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, + 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x16, + 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, + 0x12, 0x1b, 0x0a, 0x09, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x18, 0x0a, + 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, + 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x12, 0x35, 0x0a, 0x0b, 0x72, 0x61, 0x74, 0x65, 0x5f, + 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, 0x65, 0x70, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, + 0x69, 0x74, 0x52, 0x0a, 0x72, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x73, 0x12, 0x4e, + 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, + 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x53, 0x74, 0x65, 0x70, 0x4f, 0x70, 0x74, 0x73, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x14, 0x2e, 0x44, 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x65, - 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, - 0x38, 0x01, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x66, - 0x61, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, - 0x66, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x22, 0xb5, 0x02, - 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, 0x65, 0x70, 0x52, 0x61, 0x74, 0x65, - 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x19, 0x0a, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x88, - 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x6b, 0x65, 0x79, 0x45, 0x78, 0x70, 0x72, 0x88, - 0x01, 0x01, 0x12, 0x22, 0x0a, 0x0a, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x09, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x45, - 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x11, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x03, 0x52, 0x0f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x45, 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x12, 0x2e, 0x52, 0x61, 0x74, 0x65, - 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x04, 0x52, - 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, - 0x5f, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x65, - 0x78, 0x70, 0x72, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x5f, 0x65, 0x78, - 0x70, 0x72, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x64, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x16, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xd8, 0x03, - 0x0a, 0x17, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, - 0x09, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, 0x63, - 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x20, 0x0a, - 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x48, 0x00, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, - 0x30, 0x0a, 0x12, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x65, 0x70, 0x5f, 0x72, - 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0f, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x88, 0x01, - 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x49, - 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x08, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x4b, 0x65, 0x79, 0x88, 0x01, 0x01, 0x12, 0x34, 0x0a, 0x13, 0x61, 0x64, 0x64, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x04, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, - 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, - 0x05, 0x48, 0x05, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01, 0x01, - 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x42, 0x15, - 0x0a, 0x13, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x65, 0x70, 0x5f, 0x72, - 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, - 0x6b, 0x65, 0x79, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x0b, 0x0a, 0x09, 0x5f, - 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x22, 0x5e, 0x0a, 0x11, 0x53, 0x63, 0x68, 0x65, - 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x0e, 0x0a, - 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x39, 0x0a, - 0x0a, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, - 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x41, 0x74, 0x22, 0xad, 0x02, 0x0a, 0x0f, 0x57, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x39, 0x0a, 0x0a, - 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, - 0x41, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, - 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, - 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, - 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, - 0x77, 0x49, 0x64, 0x12, 0x43, 0x0a, 0x13, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, - 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x12, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x12, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x22, 0x53, 0x0a, 0x17, 0x57, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x52, 0x65, 0x66, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, - 0x12, 0x1b, 0x0a, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4b, 0x65, 0x79, 0x22, 0x49, 0x0a, - 0x16, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, - 0x43, 0x72, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x22, 0x53, 0x0a, 0x1a, 0x42, 0x75, 0x6c, 0x6b, - 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x35, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x54, 0x72, 0x69, 0x67, - 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x52, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x22, 0x47, 0x0a, - 0x1b, 0x42, 0x75, 0x6c, 0x6b, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x28, 0x0a, 0x10, - 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x52, 0x75, 0x6e, 0x49, 0x64, 0x73, 0x22, 0xe4, 0x03, 0x0a, 0x16, 0x54, 0x72, 0x69, 0x67, 0x67, - 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x20, 0x0a, 0x09, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, - 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x30, 0x0a, - 0x12, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x65, 0x70, 0x5f, 0x72, 0x75, 0x6e, - 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0f, 0x70, 0x61, 0x72, - 0x65, 0x6e, 0x74, 0x53, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, - 0x24, 0x0a, 0x0b, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x05, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x49, 0x6e, 0x64, - 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x6b, - 0x65, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x08, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x4b, 0x65, 0x79, 0x88, 0x01, 0x01, 0x12, 0x34, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x07, - 0x20, 0x01, 0x28, 0x09, 0x48, 0x04, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, - 0x11, 0x64, 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, - 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x05, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x69, - 0x72, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x1f, - 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, - 0x48, 0x06, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01, 0x01, 0x42, - 0x0c, 0x0a, 0x0a, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x42, 0x15, 0x0a, - 0x13, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x65, 0x70, 0x5f, 0x72, 0x75, - 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x6b, - 0x65, 0x79, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, - 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, - 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, - 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x22, 0x41, 0x0a, - 0x17, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, - 0x22, 0x6d, 0x0a, 0x13, 0x50, 0x75, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, - 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, - 0x2e, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x12, 0x2e, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x16, 0x0a, 0x14, 0x50, 0x75, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x24, 0x0a, 0x0e, 0x53, 0x74, 0x69, 0x63, 0x6b, - 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x4f, 0x46, - 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x41, 0x52, 0x44, 0x10, 0x01, 0x2a, 0x32, 0x0a, - 0x0c, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x0c, 0x0a, - 0x08, 0x46, 0x55, 0x4e, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x44, - 0x55, 0x52, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x44, 0x41, 0x47, 0x10, - 0x02, 0x2a, 0x7f, 0x0a, 0x18, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, - 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x12, 0x16, 0x0a, - 0x12, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x49, 0x4e, 0x5f, 0x50, 0x52, 0x4f, 0x47, 0x52, - 0x45, 0x53, 0x53, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x44, 0x52, 0x4f, 0x50, 0x5f, 0x4e, 0x45, - 0x57, 0x45, 0x53, 0x54, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x51, 0x55, 0x45, 0x55, 0x45, 0x5f, - 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x02, 0x12, 0x15, 0x0a, 0x11, 0x47, 0x52, 0x4f, 0x55, - 0x50, 0x5f, 0x52, 0x4f, 0x55, 0x4e, 0x44, 0x5f, 0x52, 0x4f, 0x42, 0x49, 0x4e, 0x10, 0x03, 0x12, - 0x11, 0x0a, 0x0d, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, - 0x10, 0x04, 0x2a, 0x85, 0x01, 0x0a, 0x15, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, - 0x65, 0x6c, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x09, 0x0a, 0x05, - 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x54, 0x5f, 0x45, - 0x51, 0x55, 0x41, 0x4c, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, - 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x02, 0x12, 0x19, 0x0a, 0x15, 0x47, 0x52, 0x45, 0x41, - 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x5f, 0x4f, 0x52, 0x5f, 0x45, 0x51, 0x55, 0x41, - 0x4c, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x4c, 0x45, 0x53, 0x53, 0x5f, 0x54, 0x48, 0x41, 0x4e, - 0x10, 0x04, 0x12, 0x16, 0x0a, 0x12, 0x4c, 0x45, 0x53, 0x53, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x5f, - 0x4f, 0x52, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x05, 0x2a, 0x5d, 0x0a, 0x11, 0x52, 0x61, - 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, - 0x0a, 0x0a, 0x06, 0x53, 0x45, 0x43, 0x4f, 0x4e, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4d, - 0x49, 0x4e, 0x55, 0x54, 0x45, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x4f, 0x55, 0x52, 0x10, - 0x02, 0x12, 0x07, 0x0a, 0x03, 0x44, 0x41, 0x59, 0x10, 0x03, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x45, - 0x45, 0x4b, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x4f, 0x4e, 0x54, 0x48, 0x10, 0x05, 0x12, - 0x08, 0x0a, 0x04, 0x59, 0x45, 0x41, 0x52, 0x10, 0x06, 0x32, 0xdc, 0x02, 0x0a, 0x0f, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x34, 0x0a, - 0x0b, 0x50, 0x75, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x13, 0x2e, 0x50, - 0x75, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x10, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x10, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x18, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, - 0x6c, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x10, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x12, 0x44, 0x0a, 0x0f, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x17, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, - 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x18, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, - 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x50, 0x0a, 0x13, 0x42, 0x75, 0x6c, - 0x6b, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x12, 0x1b, 0x2e, 0x42, 0x75, 0x6c, 0x6b, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, - 0x42, 0x75, 0x6c, 0x6b, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3b, 0x0a, 0x0c, 0x50, - 0x75, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x14, 0x2e, 0x50, 0x75, - 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x15, 0x2e, 0x50, 0x75, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, - 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, - 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x61, 0x64, 0x6d, - 0x69, 0x6e, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x2a, + 0x0a, 0x0e, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x02, 0x48, 0x00, 0x52, 0x0d, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, + 0x66, 0x46, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x13, 0x62, 0x61, + 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, + 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x11, 0x62, 0x61, 0x63, 0x6b, 0x6f, + 0x66, 0x66, 0x4d, 0x61, 0x78, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x88, 0x01, 0x01, 0x1a, + 0x55, 0x0a, 0x11, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x44, 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x57, + 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, + 0x66, 0x66, 0x5f, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x62, 0x61, + 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, + 0x73, 0x22, 0xb5, 0x02, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, 0x65, 0x70, + 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x19, 0x0a, 0x05, 0x75, + 0x6e, 0x69, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x05, 0x75, 0x6e, + 0x69, 0x74, 0x73, 0x88, 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x5f, 0x65, 0x78, + 0x70, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x6b, 0x65, 0x79, 0x45, + 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x22, 0x0a, 0x0a, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x5f, + 0x65, 0x78, 0x70, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x09, 0x75, 0x6e, + 0x69, 0x74, 0x73, 0x45, 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x11, 0x6c, 0x69, + 0x6d, 0x69, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x0f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x45, 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x08, 0x64, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x12, 0x2e, + 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x48, 0x04, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, + 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6b, + 0x65, 0x79, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x75, 0x6e, 0x69, 0x74, + 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, + 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x0b, 0x0a, 0x09, + 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x16, 0x0a, 0x14, 0x4c, 0x69, 0x73, + 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x22, 0xf2, 0x03, 0x0a, 0x17, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x57, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x38, 0x0a, 0x09, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x52, 0x09, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x12, 0x20, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, + 0x88, 0x01, 0x01, 0x12, 0x41, 0x0a, 0x1b, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x61, + 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, + 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x17, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x48, 0x02, 0x52, 0x0a, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, + 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x03, 0x52, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4b, 0x65, 0x79, 0x88, 0x01, 0x01, 0x12, 0x34, + 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x04, 0x52, 0x12, 0x61, + 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, + 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x48, 0x05, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, + 0x74, 0x79, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x5f, 0x69, 0x64, 0x42, 0x1e, 0x0a, 0x1c, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x74, + 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x6b, 0x65, + 0x79, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, + 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, + 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x22, 0x5e, 0x0a, 0x11, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, + 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x39, 0x0a, 0x0a, 0x74, + 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x72, 0x69, + 0x67, 0x67, 0x65, 0x72, 0x41, 0x74, 0x22, 0xad, 0x02, 0x0a, 0x0f, 0x57, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, + 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, + 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, + 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, + 0x64, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, + 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, + 0x64, 0x12, 0x43, 0x0a, 0x13, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x5f, 0x77, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, + 0x2e, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x52, 0x12, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, + 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x22, 0x53, 0x0a, 0x17, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, + 0x66, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1b, + 0x0a, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4b, 0x65, 0x79, 0x22, 0x49, 0x0a, 0x16, 0x57, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x43, 0x72, + 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x22, 0x53, 0x0a, 0x1a, 0x42, 0x75, 0x6c, 0x6b, 0x54, 0x72, + 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x35, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, + 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x52, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x22, 0x47, 0x0a, 0x1b, 0x42, + 0x75, 0x6c, 0x6b, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x77, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, + 0x6e, 0x49, 0x64, 0x73, 0x22, 0xfe, 0x03, 0x0a, 0x16, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, + 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x20, 0x0a, 0x09, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x41, 0x0a, 0x1b, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x01, 0x52, 0x17, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x75, + 0x6e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x24, + 0x0a, 0x0b, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x05, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x6b, 0x65, + 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x4b, 0x65, 0x79, 0x88, 0x01, 0x01, 0x12, 0x34, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x04, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, + 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x11, + 0x64, 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, + 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x05, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x69, 0x72, + 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, + 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x48, + 0x06, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01, 0x01, 0x42, 0x0c, + 0x0a, 0x0a, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x42, 0x1e, 0x0a, 0x1c, + 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, + 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, + 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x0c, 0x0a, 0x0a, + 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, + 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x64, 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x5f, 0x77, + 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x69, + 0x6f, 0x72, 0x69, 0x74, 0x79, 0x22, 0x41, 0x0a, 0x17, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, + 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x26, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x6d, 0x0a, 0x13, 0x50, 0x75, 0x74, 0x52, + 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x2e, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x12, 0x2e, 0x52, 0x61, 0x74, 0x65, + 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x64, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x16, 0x0a, 0x14, 0x50, 0x75, 0x74, 0x52, 0x61, + 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, + 0x24, 0x0a, 0x0e, 0x53, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, + 0x79, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x4f, 0x46, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x48, + 0x41, 0x52, 0x44, 0x10, 0x01, 0x2a, 0x32, 0x0a, 0x0c, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, + 0x77, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x0c, 0x0a, 0x08, 0x46, 0x55, 0x4e, 0x43, 0x54, 0x49, 0x4f, + 0x4e, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x55, 0x52, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, + 0x12, 0x07, 0x0a, 0x03, 0x44, 0x41, 0x47, 0x10, 0x02, 0x2a, 0x7f, 0x0a, 0x18, 0x43, 0x6f, 0x6e, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x53, 0x74, 0x72, + 0x61, 0x74, 0x65, 0x67, 0x79, 0x12, 0x16, 0x0a, 0x12, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, + 0x49, 0x4e, 0x5f, 0x50, 0x52, 0x4f, 0x47, 0x52, 0x45, 0x53, 0x53, 0x10, 0x00, 0x12, 0x0f, 0x0a, + 0x0b, 0x44, 0x52, 0x4f, 0x50, 0x5f, 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x01, 0x12, 0x10, + 0x0a, 0x0c, 0x51, 0x55, 0x45, 0x55, 0x45, 0x5f, 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x02, + 0x12, 0x15, 0x0a, 0x11, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x52, 0x4f, 0x55, 0x4e, 0x44, 0x5f, + 0x52, 0x4f, 0x42, 0x49, 0x4e, 0x10, 0x03, 0x12, 0x11, 0x0a, 0x0d, 0x43, 0x41, 0x4e, 0x43, 0x45, + 0x4c, 0x5f, 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x04, 0x2a, 0x85, 0x01, 0x0a, 0x15, 0x57, + 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, + 0x61, 0x74, 0x6f, 0x72, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x00, 0x12, + 0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x54, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x01, 0x12, 0x10, + 0x0a, 0x0c, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x02, + 0x12, 0x19, 0x0a, 0x15, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, + 0x5f, 0x4f, 0x52, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x4c, + 0x45, 0x53, 0x53, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x04, 0x12, 0x16, 0x0a, 0x12, 0x4c, 0x45, + 0x53, 0x53, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x5f, 0x4f, 0x52, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, + 0x10, 0x05, 0x2a, 0x5d, 0x0a, 0x11, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x44, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x45, 0x43, 0x4f, 0x4e, + 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x49, 0x4e, 0x55, 0x54, 0x45, 0x10, 0x01, 0x12, + 0x08, 0x0a, 0x04, 0x48, 0x4f, 0x55, 0x52, 0x10, 0x02, 0x12, 0x07, 0x0a, 0x03, 0x44, 0x41, 0x59, + 0x10, 0x03, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x45, 0x45, 0x4b, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, + 0x4d, 0x4f, 0x4e, 0x54, 0x48, 0x10, 0x05, 0x12, 0x08, 0x0a, 0x04, 0x59, 0x45, 0x41, 0x52, 0x10, + 0x06, 0x32, 0xdc, 0x02, 0x0a, 0x0f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x53, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x34, 0x0a, 0x0b, 0x50, 0x75, 0x74, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x13, 0x2e, 0x50, 0x75, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x10, 0x2e, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x10, 0x53, + 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, + 0x18, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x10, 0x2e, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x44, 0x0a, 0x0f, 0x54, + 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x17, + 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, + 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x50, 0x0a, 0x13, 0x42, 0x75, 0x6c, 0x6b, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, + 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x1b, 0x2e, 0x42, 0x75, 0x6c, 0x6b, 0x54, + 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x42, 0x75, 0x6c, 0x6b, 0x54, 0x72, 0x69, 0x67, + 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x3b, 0x0a, 0x0c, 0x50, 0x75, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, + 0x6d, 0x69, 0x74, 0x12, 0x14, 0x2e, 0x50, 0x75, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, + 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x50, 0x75, 0x74, 0x52, + 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, + 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, + 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x73, 0x2f, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x61, 0x63, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/internal/services/admin/server.go b/internal/services/admin/server.go index 2d06dd4b45..1451fa19be 100644 --- a/internal/services/admin/server.go +++ b/internal/services/admin/server.go @@ -100,10 +100,10 @@ func (a *AdminServiceImpl) ScheduleWorkflow(ctx context.Context, req *contracts. isParentTriggered := req.ParentId != nil if isParentTriggered { - if req.ParentStepRunId == nil { + if req.ParentTaskRunExternalId == nil { return nil, status.Error( codes.InvalidArgument, - "parent step run id is required when parent id is provided", + "parent task run id is required when parent id is provided", ) } diff --git a/internal/services/admin/server_v1.go b/internal/services/admin/server_v1.go index 4bed97527d..2f8daf67d0 100644 --- a/internal/services/admin/server_v1.go +++ b/internal/services/admin/server_v1.go @@ -177,7 +177,7 @@ func (i *AdminServiceImpl) newTriggerOpt( span.SetAttributes( attribute.String("admin_service.new_trigger_opt.workflow_name", req.Name), attribute.Int("admin_service.new_trigger_opt.payload_size", len(req.Input)), - attribute.Bool("admin_service.new_trigger_opt.is_child_workflow", req.ParentStepRunId != nil), + attribute.Bool("admin_service.new_trigger_opt.is_child_workflow", req.ParentTaskRunExternalId != nil), ) additionalMeta := "" @@ -210,8 +210,8 @@ func (i *AdminServiceImpl) newTriggerOpt( t.Priority = req.Priority } - if req.ParentStepRunId != nil { - parentStepRunId, err := uuid.Parse(*req.ParentStepRunId) + if req.ParentTaskRunExternalId != nil { + parentTaskExternalId, err := uuid.Parse(*req.ParentTaskRunExternalId) if err != nil { return nil, status.Errorf(codes.InvalidArgument, "parentStepRunId must be a valid UUID: %s", err) @@ -221,7 +221,7 @@ func (i *AdminServiceImpl) newTriggerOpt( parentTask, err := i.repov1.Tasks().GetTaskByExternalId( ctx, tenantId, - parentStepRunId, + parentTaskExternalId, false, ) diff --git a/internal/services/admin/v1/server.go b/internal/services/admin/v1/server.go index d74b0a1486..cdaf361044 100644 --- a/internal/services/admin/v1/server.go +++ b/internal/services/admin/v1/server.go @@ -863,6 +863,12 @@ func getCreateTaskOpts(tasks []*contracts.CreateTaskOpts, kind string) ([]v1.Cre TriggerConditions: make([]v1.CreateStepMatchConditionOpt, 0), RateLimits: make([]v1.CreateWorkflowStepRateLimitOpts, 0), // Initialize to avoid nil ScheduleTimeout: stepCp.ScheduleTimeout, + IsDurable: stepCp.IsDurable, + SlotRequests: nil, + } + + if stepCp.SlotRequests != nil { + steps[j].SlotRequests = stepCp.SlotRequests } // Safely set Parents diff --git a/internal/services/controllers/metrics/collector.go b/internal/services/controllers/metrics/collector.go index 51d89ba0ac..4457c321a6 100644 --- a/internal/services/controllers/metrics/collector.go +++ b/internal/services/controllers/metrics/collector.go @@ -457,21 +457,40 @@ func (mc *MetricsCollectorImpl) collectWorkerMetrics(ctx context.Context) func() mc.l.Debug().Msg("collecting worker metrics") - // Count active slots per tenant - activeSlots, err := mc.repo.Workers().CountActiveSlotsPerTenant() + // Count active slots per tenant (total) + activeSlotsTotal, err := mc.repo.Workers().ListTotalActiveSlotsPerTenant() switch { case err != nil: - mc.l.Error().Err(err).Msg("failed to count active slots per tenant") - case len(activeSlots) == 0: + mc.l.Error().Err(err).Msg("failed to list total active slots per tenant") + case len(activeSlotsTotal) == 0: mc.l.Debug().Msg("no active worker slots found") default: - mc.l.Info().Int("tenant_count", len(activeSlots)).Msg("recording active slots metrics") - for tenantId, count := range activeSlots { + mc.l.Info().Int("tenant_count", len(activeSlotsTotal)).Msg("recording active slots metrics") + for tenantId, count := range activeSlotsTotal { mc.recorder.RecordActiveSlots(ctx, tenantId, count) mc.l.Debug().Str("tenant_id", tenantId.String()).Int64("count", count).Msg("recorded active slots metric") } } + // Count active slots per tenant and slot key + activeSlotsByKey, err := mc.repo.Workers().ListActiveSlotsPerTenantAndSlotType() + switch { + case err != nil: + mc.l.Error().Err(err).Msg("failed to list active slots per tenant and slot key") + case len(activeSlotsByKey) == 0: + mc.l.Debug().Msg("no active worker slots by key found") + default: + mc.l.Info().Int("slot_count", len(activeSlotsByKey)).Msg("recording active slots by key metrics") + for tuple, count := range activeSlotsByKey { + mc.recorder.RecordActiveSlotsByKey(ctx, tuple.TenantId, tuple.SlotType, count) + mc.l.Debug(). + Str("tenant_id", tuple.TenantId.String()). + Str("slot_key", tuple.SlotType). + Int64("count", count). + Msg("recorded active slots by key metric") + } + } + // Count active workers per tenant activeWorkers, err := mc.repo.Workers().CountActiveWorkersPerTenant() switch { diff --git a/internal/services/dispatcher/contracts/dispatcher.pb.go b/internal/services/dispatcher/contracts/dispatcher.pb.go index c62d510626..fbbbc1c26f 100644 --- a/internal/services/dispatcher/contracts/dispatcher.pb.go +++ b/internal/services/dispatcher/contracts/dispatcher.pb.go @@ -388,8 +388,8 @@ type WorkerLabels struct { unknownFields protoimpl.UnknownFields // value of the label - StrValue *string `protobuf:"bytes,1,opt,name=strValue,proto3,oneof" json:"strValue,omitempty"` - IntValue *int32 `protobuf:"varint,2,opt,name=intValue,proto3,oneof" json:"intValue,omitempty"` + StrValue *string `protobuf:"bytes,1,opt,name=str_value,json=strValue,proto3,oneof" json:"str_value,omitempty"` + IntValue *int32 `protobuf:"varint,2,opt,name=int_value,json=intValue,proto3,oneof" json:"int_value,omitempty"` } func (x *WorkerLabels) Reset() { @@ -443,9 +443,9 @@ type RuntimeInfo struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - SdkVersion *string `protobuf:"bytes,1,opt,name=sdkVersion,proto3,oneof" json:"sdkVersion,omitempty"` + SdkVersion *string `protobuf:"bytes,1,opt,name=sdk_version,json=sdkVersion,proto3,oneof" json:"sdk_version,omitempty"` Language *SDKS `protobuf:"varint,2,opt,name=language,proto3,enum=SDKS,oneof" json:"language,omitempty"` - LanguageVersion *string `protobuf:"bytes,3,opt,name=languageVersion,proto3,oneof" json:"languageVersion,omitempty"` + LanguageVersion *string `protobuf:"bytes,3,opt,name=language_version,json=languageVersion,proto3,oneof" json:"language_version,omitempty"` Os *string `protobuf:"bytes,4,opt,name=os,proto3,oneof" json:"os,omitempty"` Extra *string `protobuf:"bytes,5,opt,name=extra,proto3,oneof" json:"extra,omitempty"` } @@ -523,19 +523,23 @@ type WorkerRegisterRequest struct { unknownFields protoimpl.UnknownFields // the name of the worker - WorkerName string `protobuf:"bytes,1,opt,name=workerName,proto3" json:"workerName,omitempty"` + WorkerName string `protobuf:"bytes,1,opt,name=worker_name,json=workerName,proto3" json:"worker_name,omitempty"` // a list of actions that this worker can run Actions []string `protobuf:"bytes,2,rep,name=actions,proto3" json:"actions,omitempty"` // (optional) the services for this worker Services []string `protobuf:"bytes,3,rep,name=services,proto3" json:"services,omitempty"` - // (optional) the max number of runs this worker can handle - MaxRuns *int32 `protobuf:"varint,4,opt,name=maxRuns,proto3,oneof" json:"maxRuns,omitempty"` + // (optional) the number of default slots this worker can handle + Slots *int32 `protobuf:"varint,4,opt,name=slots,proto3,oneof" json:"slots,omitempty"` // (optional) worker labels (i.e. state or other metadata) Labels map[string]*WorkerLabels `protobuf:"bytes,5,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (optional) webhookId is the id of the webhook that the worker is associated with (if any) - WebhookId *string `protobuf:"bytes,6,opt,name=webhookId,proto3,oneof" json:"webhookId,omitempty"` + WebhookId *string `protobuf:"bytes,6,opt,name=webhook_id,json=webhookId,proto3,oneof" json:"webhook_id,omitempty"` // (optional) information regarding the runtime environment of the worker - RuntimeInfo *RuntimeInfo `protobuf:"bytes,7,opt,name=runtimeInfo,proto3,oneof" json:"runtimeInfo,omitempty"` + RuntimeInfo *RuntimeInfo `protobuf:"bytes,7,opt,name=runtime_info,json=runtimeInfo,proto3,oneof" json:"runtime_info,omitempty"` + // (optional) the max number of durable slots this worker can handle + DurableSlots *int32 `protobuf:"varint,8,opt,name=durable_slots,json=durableSlots,proto3,oneof" json:"durable_slots,omitempty"` + // (optional) slot config for this worker (slot_type -> units) + SlotConfig map[string]int32 `protobuf:"bytes,9,rep,name=slot_config,json=slotConfig,proto3" json:"slot_config,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` } func (x *WorkerRegisterRequest) Reset() { @@ -591,9 +595,9 @@ func (x *WorkerRegisterRequest) GetServices() []string { return nil } -func (x *WorkerRegisterRequest) GetMaxRuns() int32 { - if x != nil && x.MaxRuns != nil { - return *x.MaxRuns +func (x *WorkerRegisterRequest) GetSlots() int32 { + if x != nil && x.Slots != nil { + return *x.Slots } return 0 } @@ -619,17 +623,31 @@ func (x *WorkerRegisterRequest) GetRuntimeInfo() *RuntimeInfo { return nil } +func (x *WorkerRegisterRequest) GetDurableSlots() int32 { + if x != nil && x.DurableSlots != nil { + return *x.DurableSlots + } + return 0 +} + +func (x *WorkerRegisterRequest) GetSlotConfig() map[string]int32 { + if x != nil { + return x.SlotConfig + } + return nil +} + type WorkerRegisterResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // the tenant id - TenantId string `protobuf:"bytes,1,opt,name=tenantId,proto3" json:"tenantId,omitempty"` + TenantId string `protobuf:"bytes,1,opt,name=tenant_id,json=tenantId,proto3" json:"tenant_id,omitempty"` // the id of the worker - WorkerId string `protobuf:"bytes,2,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` // the name of the worker - WorkerName string `protobuf:"bytes,3,opt,name=workerName,proto3" json:"workerName,omitempty"` + WorkerName string `protobuf:"bytes,3,opt,name=worker_name,json=workerName,proto3" json:"worker_name,omitempty"` } func (x *WorkerRegisterResponse) Reset() { @@ -691,7 +709,7 @@ type UpsertWorkerLabelsRequest struct { unknownFields protoimpl.UnknownFields // the name of the worker - WorkerId string `protobuf:"bytes,1,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,1,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` // (optional) the worker labels Labels map[string]*WorkerLabels `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } @@ -748,9 +766,9 @@ type UpsertWorkerLabelsResponse struct { unknownFields protoimpl.UnknownFields // the tenant id - TenantId string `protobuf:"bytes,1,opt,name=tenantId,proto3" json:"tenantId,omitempty"` + TenantId string `protobuf:"bytes,1,opt,name=tenant_id,json=tenantId,proto3" json:"tenant_id,omitempty"` // the id of the worker - WorkerId string `protobuf:"bytes,2,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` } func (x *UpsertWorkerLabelsResponse) Reset() { @@ -805,31 +823,31 @@ type AssignedAction struct { unknownFields protoimpl.UnknownFields // the tenant id - TenantId string `protobuf:"bytes,1,opt,name=tenantId,proto3" json:"tenantId,omitempty"` + TenantId string `protobuf:"bytes,1,opt,name=tenant_id,json=tenantId,proto3" json:"tenant_id,omitempty"` // the workflow run id (optional) - WorkflowRunId string `protobuf:"bytes,2,opt,name=workflowRunId,proto3" json:"workflowRunId,omitempty"` + WorkflowRunId string `protobuf:"bytes,2,opt,name=workflow_run_id,json=workflowRunId,proto3" json:"workflow_run_id,omitempty"` // the get group key run id (optional) - GetGroupKeyRunId string `protobuf:"bytes,3,opt,name=getGroupKeyRunId,proto3" json:"getGroupKeyRunId,omitempty"` + GetGroupKeyRunId string `protobuf:"bytes,3,opt,name=get_group_key_run_id,json=getGroupKeyRunId,proto3" json:"get_group_key_run_id,omitempty"` // the job id - JobId string `protobuf:"bytes,4,opt,name=jobId,proto3" json:"jobId,omitempty"` + JobId string `protobuf:"bytes,4,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` // the job name - JobName string `protobuf:"bytes,5,opt,name=jobName,proto3" json:"jobName,omitempty"` + JobName string `protobuf:"bytes,5,opt,name=job_name,json=jobName,proto3" json:"job_name,omitempty"` // the job run id - JobRunId string `protobuf:"bytes,6,opt,name=jobRunId,proto3" json:"jobRunId,omitempty"` - // the step id - StepId string `protobuf:"bytes,7,opt,name=stepId,proto3" json:"stepId,omitempty"` - // the step run id - StepRunId string `protobuf:"bytes,8,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` + JobRunId string `protobuf:"bytes,6,opt,name=job_run_id,json=jobRunId,proto3" json:"job_run_id,omitempty"` + // the task id + TaskId string `protobuf:"bytes,7,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + // the task external run id + TaskRunExternalId string `protobuf:"bytes,8,opt,name=task_run_external_id,json=taskRunExternalId,proto3" json:"task_run_external_id,omitempty"` // the action id - ActionId string `protobuf:"bytes,9,opt,name=actionId,proto3" json:"actionId,omitempty"` + ActionId string `protobuf:"bytes,9,opt,name=action_id,json=actionId,proto3" json:"action_id,omitempty"` // the action type - ActionType ActionType `protobuf:"varint,10,opt,name=actionType,proto3,enum=ActionType" json:"actionType,omitempty"` + ActionType ActionType `protobuf:"varint,10,opt,name=action_type,json=actionType,proto3,enum=ActionType" json:"action_type,omitempty"` // the action payload - ActionPayload string `protobuf:"bytes,11,opt,name=actionPayload,proto3" json:"actionPayload,omitempty"` - // the step name - StepName string `protobuf:"bytes,12,opt,name=stepName,proto3" json:"stepName,omitempty"` + ActionPayload string `protobuf:"bytes,11,opt,name=action_payload,json=actionPayload,proto3" json:"action_payload,omitempty"` + // the task name + TaskName string `protobuf:"bytes,12,opt,name=task_name,json=taskName,proto3" json:"task_name,omitempty"` // the count number of the retry attempt - RetryCount int32 `protobuf:"varint,13,opt,name=retryCount,proto3" json:"retryCount,omitempty"` + RetryCount int32 `protobuf:"varint,13,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` // (optional) additional metadata set on the workflow AdditionalMetadata *string `protobuf:"bytes,14,opt,name=additional_metadata,json=additionalMetadata,proto3,oneof" json:"additional_metadata,omitempty"` // (optional) the child workflow index (if this is a child workflow) @@ -841,9 +859,9 @@ type AssignedAction struct { // (optional) the priority of the run Priority int32 `protobuf:"varint,18,opt,name=priority,proto3" json:"priority,omitempty"` // (optional) the workflow id - WorkflowId *string `protobuf:"bytes,19,opt,name=workflowId,proto3,oneof" json:"workflowId,omitempty"` + WorkflowId *string `protobuf:"bytes,19,opt,name=workflow_id,json=workflowId,proto3,oneof" json:"workflow_id,omitempty"` // (optional) the workflow version id - WorkflowVersionId *string `protobuf:"bytes,20,opt,name=workflowVersionId,proto3,oneof" json:"workflowVersionId,omitempty"` + WorkflowVersionId *string `protobuf:"bytes,20,opt,name=workflow_version_id,json=workflowVersionId,proto3,oneof" json:"workflow_version_id,omitempty"` } func (x *AssignedAction) Reset() { @@ -920,16 +938,16 @@ func (x *AssignedAction) GetJobRunId() string { return "" } -func (x *AssignedAction) GetStepId() string { +func (x *AssignedAction) GetTaskId() string { if x != nil { - return x.StepId + return x.TaskId } return "" } -func (x *AssignedAction) GetStepRunId() string { +func (x *AssignedAction) GetTaskRunExternalId() string { if x != nil { - return x.StepRunId + return x.TaskRunExternalId } return "" } @@ -955,9 +973,9 @@ func (x *AssignedAction) GetActionPayload() string { return "" } -func (x *AssignedAction) GetStepName() string { +func (x *AssignedAction) GetTaskName() string { if x != nil { - return x.StepName + return x.TaskName } return "" } @@ -1024,7 +1042,7 @@ type WorkerListenRequest struct { unknownFields protoimpl.UnknownFields // the id of the worker - WorkerId string `protobuf:"bytes,1,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,1,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` } func (x *WorkerListenRequest) Reset() { @@ -1072,7 +1090,7 @@ type WorkerUnsubscribeRequest struct { unknownFields protoimpl.UnknownFields // the id of the worker - WorkerId string `protobuf:"bytes,1,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,1,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` } func (x *WorkerUnsubscribeRequest) Reset() { @@ -1120,9 +1138,9 @@ type WorkerUnsubscribeResponse struct { unknownFields protoimpl.UnknownFields // the tenant id to unsubscribe from - TenantId string `protobuf:"bytes,1,opt,name=tenantId,proto3" json:"tenantId,omitempty"` + TenantId string `protobuf:"bytes,1,opt,name=tenant_id,json=tenantId,proto3" json:"tenant_id,omitempty"` // the id of the worker - WorkerId string `protobuf:"bytes,2,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` } func (x *WorkerUnsubscribeResponse) Reset() { @@ -1177,17 +1195,17 @@ type GroupKeyActionEvent struct { unknownFields protoimpl.UnknownFields // the id of the worker - WorkerId string `protobuf:"bytes,1,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,1,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` // the id of the job - WorkflowRunId string `protobuf:"bytes,2,opt,name=workflowRunId,proto3" json:"workflowRunId,omitempty"` - GetGroupKeyRunId string `protobuf:"bytes,3,opt,name=getGroupKeyRunId,proto3" json:"getGroupKeyRunId,omitempty"` + WorkflowRunId string `protobuf:"bytes,2,opt,name=workflow_run_id,json=workflowRunId,proto3" json:"workflow_run_id,omitempty"` + GetGroupKeyRunId string `protobuf:"bytes,3,opt,name=get_group_key_run_id,json=getGroupKeyRunId,proto3" json:"get_group_key_run_id,omitempty"` // the action id - ActionId string `protobuf:"bytes,4,opt,name=actionId,proto3" json:"actionId,omitempty"` - EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` - // the step event type - EventType GroupKeyActionEventType `protobuf:"varint,6,opt,name=eventType,proto3,enum=GroupKeyActionEventType" json:"eventType,omitempty"` + ActionId string `protobuf:"bytes,4,opt,name=action_id,json=actionId,proto3" json:"action_id,omitempty"` + EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` + // the task event type + EventType GroupKeyActionEventType `protobuf:"varint,6,opt,name=event_type,json=eventType,proto3,enum=GroupKeyActionEventType" json:"event_type,omitempty"` // the event payload - EventPayload string `protobuf:"bytes,7,opt,name=eventPayload,proto3" json:"eventPayload,omitempty"` + EventPayload string `protobuf:"bytes,7,opt,name=event_payload,json=eventPayload,proto3" json:"event_payload,omitempty"` } func (x *GroupKeyActionEvent) Reset() { @@ -1277,26 +1295,26 @@ type StepActionEvent struct { unknownFields protoimpl.UnknownFields // the id of the worker - WorkerId string `protobuf:"bytes,1,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,1,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` // the id of the job - JobId string `protobuf:"bytes,2,opt,name=jobId,proto3" json:"jobId,omitempty"` + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` // the job run id - JobRunId string `protobuf:"bytes,3,opt,name=jobRunId,proto3" json:"jobRunId,omitempty"` - // the id of the step - StepId string `protobuf:"bytes,4,opt,name=stepId,proto3" json:"stepId,omitempty"` - // the step run id - StepRunId string `protobuf:"bytes,5,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` + JobRunId string `protobuf:"bytes,3,opt,name=job_run_id,json=jobRunId,proto3" json:"job_run_id,omitempty"` + // the id of the task + TaskId string `protobuf:"bytes,4,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + // the task external run id + TaskRunExternalId string `protobuf:"bytes,5,opt,name=task_run_external_id,json=taskRunExternalId,proto3" json:"task_run_external_id,omitempty"` // the action id - ActionId string `protobuf:"bytes,6,opt,name=actionId,proto3" json:"actionId,omitempty"` - EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` - // the step event type - EventType StepActionEventType `protobuf:"varint,8,opt,name=eventType,proto3,enum=StepActionEventType" json:"eventType,omitempty"` + ActionId string `protobuf:"bytes,6,opt,name=action_id,json=actionId,proto3" json:"action_id,omitempty"` + EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` + // the task event type + EventType StepActionEventType `protobuf:"varint,8,opt,name=event_type,json=eventType,proto3,enum=StepActionEventType" json:"event_type,omitempty"` // the event payload - EventPayload string `protobuf:"bytes,9,opt,name=eventPayload,proto3" json:"eventPayload,omitempty"` + EventPayload string `protobuf:"bytes,9,opt,name=event_payload,json=eventPayload,proto3" json:"event_payload,omitempty"` // the retry count - RetryCount *int32 `protobuf:"varint,10,opt,name=retryCount,proto3,oneof" json:"retryCount,omitempty"` + RetryCount *int32 `protobuf:"varint,10,opt,name=retry_count,json=retryCount,proto3,oneof" json:"retry_count,omitempty"` // a flag indicating if the task should _not_ be retried - ShouldNotRetry *bool `protobuf:"varint,11,opt,name=shouldNotRetry,proto3,oneof" json:"shouldNotRetry,omitempty"` + ShouldNotRetry *bool `protobuf:"varint,11,opt,name=should_not_retry,json=shouldNotRetry,proto3,oneof" json:"should_not_retry,omitempty"` } func (x *StepActionEvent) Reset() { @@ -1352,16 +1370,16 @@ func (x *StepActionEvent) GetJobRunId() string { return "" } -func (x *StepActionEvent) GetStepId() string { +func (x *StepActionEvent) GetTaskId() string { if x != nil { - return x.StepId + return x.TaskId } return "" } -func (x *StepActionEvent) GetStepRunId() string { +func (x *StepActionEvent) GetTaskRunExternalId() string { if x != nil { - return x.StepRunId + return x.TaskRunExternalId } return "" } @@ -1414,9 +1432,9 @@ type ActionEventResponse struct { unknownFields protoimpl.UnknownFields // the tenant id - TenantId string `protobuf:"bytes,1,opt,name=tenantId,proto3" json:"tenantId,omitempty"` + TenantId string `protobuf:"bytes,1,opt,name=tenant_id,json=tenantId,proto3" json:"tenant_id,omitempty"` // the id of the worker - WorkerId string `protobuf:"bytes,2,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` } func (x *ActionEventResponse) Reset() { @@ -1471,11 +1489,11 @@ type SubscribeToWorkflowEventsRequest struct { unknownFields protoimpl.UnknownFields // the id of the workflow run - WorkflowRunId *string `protobuf:"bytes,1,opt,name=workflowRunId,proto3,oneof" json:"workflowRunId,omitempty"` + WorkflowRunId *string `protobuf:"bytes,1,opt,name=workflow_run_id,json=workflowRunId,proto3,oneof" json:"workflow_run_id,omitempty"` // the key of the additional meta field to subscribe to - AdditionalMetaKey *string `protobuf:"bytes,2,opt,name=additionalMetaKey,proto3,oneof" json:"additionalMetaKey,omitempty"` + AdditionalMetaKey *string `protobuf:"bytes,2,opt,name=additional_meta_key,json=additionalMetaKey,proto3,oneof" json:"additional_meta_key,omitempty"` // the value of the additional meta field to subscribe to - AdditionalMetaValue *string `protobuf:"bytes,3,opt,name=additionalMetaValue,proto3,oneof" json:"additionalMetaValue,omitempty"` + AdditionalMetaValue *string `protobuf:"bytes,3,opt,name=additional_meta_value,json=additionalMetaValue,proto3,oneof" json:"additional_meta_value,omitempty"` } func (x *SubscribeToWorkflowEventsRequest) Reset() { @@ -1537,7 +1555,7 @@ type SubscribeToWorkflowRunsRequest struct { unknownFields protoimpl.UnknownFields // the id of the workflow run - WorkflowRunId string `protobuf:"bytes,1,opt,name=workflowRunId,proto3" json:"workflowRunId,omitempty"` + WorkflowRunId string `protobuf:"bytes,1,opt,name=workflow_run_id,json=workflowRunId,proto3" json:"workflow_run_id,omitempty"` } func (x *SubscribeToWorkflowRunsRequest) Reset() { @@ -1585,21 +1603,21 @@ type WorkflowEvent struct { unknownFields protoimpl.UnknownFields // the id of the workflow run - WorkflowRunId string `protobuf:"bytes,1,opt,name=workflowRunId,proto3" json:"workflowRunId,omitempty"` - ResourceType ResourceType `protobuf:"varint,2,opt,name=resourceType,proto3,enum=ResourceType" json:"resourceType,omitempty"` - EventType ResourceEventType `protobuf:"varint,3,opt,name=eventType,proto3,enum=ResourceEventType" json:"eventType,omitempty"` - ResourceId string `protobuf:"bytes,4,opt,name=resourceId,proto3" json:"resourceId,omitempty"` - EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` + WorkflowRunId string `protobuf:"bytes,1,opt,name=workflow_run_id,json=workflowRunId,proto3" json:"workflow_run_id,omitempty"` + ResourceType ResourceType `protobuf:"varint,2,opt,name=resource_type,json=resourceType,proto3,enum=ResourceType" json:"resource_type,omitempty"` + EventType ResourceEventType `protobuf:"varint,3,opt,name=event_type,json=eventType,proto3,enum=ResourceEventType" json:"event_type,omitempty"` + ResourceId string `protobuf:"bytes,4,opt,name=resource_id,json=resourceId,proto3" json:"resource_id,omitempty"` + EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` // the event payload - EventPayload string `protobuf:"bytes,6,opt,name=eventPayload,proto3" json:"eventPayload,omitempty"` + EventPayload string `protobuf:"bytes,6,opt,name=event_payload,json=eventPayload,proto3" json:"event_payload,omitempty"` // whether this is the last event for the workflow run - server // will hang up the connection but clients might want to case Hangup bool `protobuf:"varint,7,opt,name=hangup,proto3" json:"hangup,omitempty"` - // (optional) the max number of retries this step can handle - StepRetries *int32 `protobuf:"varint,8,opt,name=stepRetries,proto3,oneof" json:"stepRetries,omitempty"` - // (optional) the retry count of this step - RetryCount *int32 `protobuf:"varint,9,opt,name=retryCount,proto3,oneof" json:"retryCount,omitempty"` - EventIndex *int64 `protobuf:"varint,10,opt,name=eventIndex,proto3,oneof" json:"eventIndex,omitempty"` + // (optional) the max number of retries this task can handle + TaskRetries *int32 `protobuf:"varint,8,opt,name=task_retries,json=taskRetries,proto3,oneof" json:"task_retries,omitempty"` + // (optional) the retry count of this task + RetryCount *int32 `protobuf:"varint,9,opt,name=retry_count,json=retryCount,proto3,oneof" json:"retry_count,omitempty"` + EventIndex *int64 `protobuf:"varint,10,opt,name=event_index,json=eventIndex,proto3,oneof" json:"event_index,omitempty"` } func (x *WorkflowEvent) Reset() { @@ -1683,9 +1701,9 @@ func (x *WorkflowEvent) GetHangup() bool { return false } -func (x *WorkflowEvent) GetStepRetries() int32 { - if x != nil && x.StepRetries != nil { - return *x.StepRetries +func (x *WorkflowEvent) GetTaskRetries() int32 { + if x != nil && x.TaskRetries != nil { + return *x.TaskRetries } return 0 } @@ -1710,9 +1728,9 @@ type WorkflowRunEvent struct { unknownFields protoimpl.UnknownFields // the id of the workflow run - WorkflowRunId string `protobuf:"bytes,1,opt,name=workflowRunId,proto3" json:"workflowRunId,omitempty"` - EventType WorkflowRunEventType `protobuf:"varint,2,opt,name=eventType,proto3,enum=WorkflowRunEventType" json:"eventType,omitempty"` - EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` + WorkflowRunId string `protobuf:"bytes,1,opt,name=workflow_run_id,json=workflowRunId,proto3" json:"workflow_run_id,omitempty"` + EventType WorkflowRunEventType `protobuf:"varint,2,opt,name=event_type,json=eventType,proto3,enum=WorkflowRunEventType" json:"event_type,omitempty"` + EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` Results []*StepRunResult `protobuf:"bytes,4,rep,name=results,proto3" json:"results,omitempty"` } @@ -1781,11 +1799,11 @@ type StepRunResult struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - StepRunId string `protobuf:"bytes,1,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` - StepReadableId string `protobuf:"bytes,2,opt,name=stepReadableId,proto3" json:"stepReadableId,omitempty"` - JobRunId string `protobuf:"bytes,3,opt,name=jobRunId,proto3" json:"jobRunId,omitempty"` - Error *string `protobuf:"bytes,4,opt,name=error,proto3,oneof" json:"error,omitempty"` - Output *string `protobuf:"bytes,5,opt,name=output,proto3,oneof" json:"output,omitempty"` + TaskRunExternalId string `protobuf:"bytes,1,opt,name=task_run_external_id,json=taskRunExternalId,proto3" json:"task_run_external_id,omitempty"` + TaskName string `protobuf:"bytes,2,opt,name=task_name,json=taskName,proto3" json:"task_name,omitempty"` + JobRunId string `protobuf:"bytes,3,opt,name=job_run_id,json=jobRunId,proto3" json:"job_run_id,omitempty"` + Error *string `protobuf:"bytes,4,opt,name=error,proto3,oneof" json:"error,omitempty"` + Output *string `protobuf:"bytes,5,opt,name=output,proto3,oneof" json:"output,omitempty"` } func (x *StepRunResult) Reset() { @@ -1820,16 +1838,16 @@ func (*StepRunResult) Descriptor() ([]byte, []int) { return file_dispatcher_proto_rawDescGZIP(), []int{17} } -func (x *StepRunResult) GetStepRunId() string { +func (x *StepRunResult) GetTaskRunExternalId() string { if x != nil { - return x.StepRunId + return x.TaskRunExternalId } return "" } -func (x *StepRunResult) GetStepReadableId() string { +func (x *StepRunResult) GetTaskName() string { if x != nil { - return x.StepReadableId + return x.TaskName } return "" } @@ -1860,14 +1878,14 @@ type OverridesData struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // the step run id - StepRunId string `protobuf:"bytes,1,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` + // the task run id + TaskRunExternalId string `protobuf:"bytes,1,opt,name=task_run_external_id,json=taskRunExternalId,proto3" json:"task_run_external_id,omitempty"` // the path of the data to set Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` // the value to set Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` // the filename of the caller - CallerFilename string `protobuf:"bytes,4,opt,name=callerFilename,proto3" json:"callerFilename,omitempty"` + CallerFilename string `protobuf:"bytes,4,opt,name=caller_filename,json=callerFilename,proto3" json:"caller_filename,omitempty"` } func (x *OverridesData) Reset() { @@ -1902,9 +1920,9 @@ func (*OverridesData) Descriptor() ([]byte, []int) { return file_dispatcher_proto_rawDescGZIP(), []int{18} } -func (x *OverridesData) GetStepRunId() string { +func (x *OverridesData) GetTaskRunExternalId() string { if x != nil { - return x.StepRunId + return x.TaskRunExternalId } return "" } @@ -1974,9 +1992,9 @@ type HeartbeatRequest struct { unknownFields protoimpl.UnknownFields // the id of the worker - WorkerId string `protobuf:"bytes,1,opt,name=workerId,proto3" json:"workerId,omitempty"` + WorkerId string `protobuf:"bytes,1,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` // heartbeatAt is the time the worker sent the heartbeat - HeartbeatAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=heartbeatAt,proto3" json:"heartbeatAt,omitempty"` + HeartbeatAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=heartbeat_at,json=heartbeatAt,proto3" json:"heartbeat_at,omitempty"` } func (x *HeartbeatRequest) Reset() { @@ -2068,9 +2086,9 @@ type RefreshTimeoutRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // the id of the step run to release - StepRunId string `protobuf:"bytes,1,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` - IncrementTimeoutBy string `protobuf:"bytes,2,opt,name=incrementTimeoutBy,proto3" json:"incrementTimeoutBy,omitempty"` + // the id of the task run to release + TaskRunExternalId string `protobuf:"bytes,1,opt,name=task_run_external_id,json=taskRunExternalId,proto3" json:"task_run_external_id,omitempty"` + IncrementTimeoutBy string `protobuf:"bytes,2,opt,name=increment_timeout_by,json=incrementTimeoutBy,proto3" json:"increment_timeout_by,omitempty"` } func (x *RefreshTimeoutRequest) Reset() { @@ -2105,9 +2123,9 @@ func (*RefreshTimeoutRequest) Descriptor() ([]byte, []int) { return file_dispatcher_proto_rawDescGZIP(), []int{22} } -func (x *RefreshTimeoutRequest) GetStepRunId() string { +func (x *RefreshTimeoutRequest) GetTaskRunExternalId() string { if x != nil { - return x.StepRunId + return x.TaskRunExternalId } return "" } @@ -2124,7 +2142,7 @@ type RefreshTimeoutResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - TimeoutAt *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=timeoutAt,proto3" json:"timeoutAt,omitempty"` + TimeoutAt *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=timeout_at,json=timeoutAt,proto3" json:"timeout_at,omitempty"` } func (x *RefreshTimeoutResponse) Reset() { @@ -2171,8 +2189,8 @@ type ReleaseSlotRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // the id of the step run to release - StepRunId string `protobuf:"bytes,1,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` + // the id of the task run to release + TaskRunExternalId string `protobuf:"bytes,1,opt,name=task_run_external_id,json=taskRunExternalId,proto3" json:"task_run_external_id,omitempty"` } func (x *ReleaseSlotRequest) Reset() { @@ -2207,9 +2225,9 @@ func (*ReleaseSlotRequest) Descriptor() ([]byte, []int) { return file_dispatcher_proto_rawDescGZIP(), []int{24} } -func (x *ReleaseSlotRequest) GetStepRunId() string { +func (x *ReleaseSlotRequest) GetTaskRunExternalId() string { if x != nil { - return x.StepRunId + return x.TaskRunExternalId } return "" } @@ -2258,428 +2276,453 @@ var file_dispatcher_proto_rawDesc = []byte{ 0x0a, 0x10, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x22, 0x6a, 0x0a, 0x0c, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, - 0x65, 0x6c, 0x73, 0x12, 0x1f, 0x0a, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x22, - 0xfa, 0x01, 0x0a, 0x0b, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, - 0x23, 0x0a, 0x0a, 0x73, 0x64, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x64, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x88, 0x01, 0x01, 0x12, 0x26, 0x0a, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x05, 0x2e, 0x53, 0x44, 0x4b, 0x53, 0x48, 0x01, 0x52, - 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x12, 0x2d, 0x0a, 0x0f, - 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, - 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x13, 0x0a, 0x02, 0x6f, - 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x02, 0x6f, 0x73, 0x88, 0x01, 0x01, - 0x12, 0x19, 0x0a, 0x05, 0x65, 0x78, 0x74, 0x72, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, - 0x04, 0x52, 0x05, 0x65, 0x78, 0x74, 0x72, 0x61, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, - 0x73, 0x64, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6c, - 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x6c, 0x61, 0x6e, 0x67, - 0x75, 0x61, 0x67, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x05, 0x0a, 0x03, 0x5f, - 0x6f, 0x73, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, 0x78, 0x74, 0x72, 0x61, 0x22, 0x94, 0x03, 0x0a, - 0x15, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, - 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, - 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x1d, 0x0a, 0x07, - 0x6d, 0x61, 0x78, 0x52, 0x75, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, - 0x07, 0x6d, 0x61, 0x78, 0x52, 0x75, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x3a, 0x0a, 0x06, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x57, 0x6f, - 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x21, 0x0a, 0x09, 0x77, 0x65, 0x62, 0x68, 0x6f, - 0x6f, 0x6b, 0x49, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x09, 0x77, 0x65, - 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x0b, 0x72, 0x75, - 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x0c, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x02, 0x52, - 0x0b, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x88, 0x01, 0x01, 0x1a, - 0x48, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x23, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x0d, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6d, 0x61, - 0x78, 0x52, 0x75, 0x6e, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f, - 0x6b, 0x49, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x49, - 0x6e, 0x66, 0x6f, 0x22, 0x70, 0x0a, 0x16, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, - 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, - 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, - 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, - 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4e, - 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x65, - 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xc1, 0x01, 0x0a, 0x19, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, - 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, - 0x3e, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x26, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, - 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, - 0x48, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x23, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x0d, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x54, 0x0a, 0x1a, 0x55, 0x70, 0x73, - 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, - 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, - 0x74, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x22, - 0x81, 0x07, 0x0a, 0x0e, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x24, - 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, - 0x75, 0x6e, 0x49, 0x64, 0x12, 0x2a, 0x0a, 0x10, 0x67, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, - 0x4b, 0x65, 0x79, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, - 0x67, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x52, 0x75, 0x6e, 0x49, 0x64, - 0x12, 0x14, 0x0a, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x6a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, - 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, - 0x12, 0x1a, 0x0a, 0x08, 0x6a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x06, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x6a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, - 0x73, 0x74, 0x65, 0x70, 0x49, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, - 0x65, 0x70, 0x49, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, - 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, - 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x2b, - 0x0a, 0x0a, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x18, 0x0a, 0x20, 0x01, - 0x28, 0x0e, 0x32, 0x0b, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x52, - 0x0a, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x61, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x0b, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0d, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x74, 0x65, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x0c, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x74, 0x65, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, - 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, - 0x05, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x34, 0x0a, - 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x12, 0x61, 0x64, - 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x88, 0x01, 0x01, 0x12, 0x35, 0x0a, 0x14, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x77, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x0f, 0x20, 0x01, 0x28, - 0x05, 0x48, 0x01, 0x52, 0x12, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x31, 0x0a, 0x12, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6b, 0x65, 0x79, - 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x10, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4b, 0x65, 0x79, 0x88, 0x01, 0x01, 0x12, 0x38, 0x0a, - 0x16, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, - 0x13, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, - 0x75, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, - 0x69, 0x74, 0x79, 0x18, 0x12, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, - 0x69, 0x74, 0x79, 0x12, 0x23, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, - 0x64, 0x18, 0x13, 0x20, 0x01, 0x28, 0x09, 0x48, 0x04, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x31, 0x0a, 0x11, 0x77, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x18, 0x14, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x05, 0x52, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, - 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x42, 0x16, 0x0a, 0x14, 0x5f, - 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x42, 0x17, 0x0a, 0x15, 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x77, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x15, 0x0a, 0x13, - 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, - 0x6b, 0x65, 0x79, 0x42, 0x19, 0x0a, 0x17, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0d, - 0x0a, 0x0b, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x42, 0x14, 0x0a, - 0x12, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x49, 0x64, 0x22, 0x31, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x69, 0x73, - 0x74, 0x65, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, - 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, - 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x22, 0x36, 0x0a, 0x18, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, - 0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x22, 0x53, - 0x0a, 0x19, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, - 0x69, 0x62, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x74, - 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, - 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, - 0x72, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, - 0x72, 0x49, 0x64, 0x22, 0xbf, 0x02, 0x0a, 0x13, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, - 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x77, - 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, - 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x24, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, - 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x2a, 0x0a, - 0x10, 0x67, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x52, 0x75, 0x6e, 0x49, - 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x67, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, - 0x70, 0x4b, 0x65, 0x79, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x61, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x36, 0x0a, 0x09, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x47, - 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, - 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0xc1, 0x03, 0x0a, 0x0f, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, - 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, - 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6a, - 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6a, - 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x65, 0x70, 0x49, - 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x65, 0x70, 0x49, 0x64, 0x12, - 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x1a, 0x0a, - 0x08, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x08, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x32, 0x0a, - 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x14, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, - 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x23, 0x0a, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, - 0x75, 0x6e, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x0a, 0x72, 0x65, 0x74, - 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x12, 0x2b, 0x0a, 0x0e, 0x73, 0x68, - 0x6f, 0x75, 0x6c, 0x64, 0x4e, 0x6f, 0x74, 0x52, 0x65, 0x74, 0x72, 0x79, 0x18, 0x0b, 0x20, 0x01, - 0x28, 0x08, 0x48, 0x01, 0x52, 0x0e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x4e, 0x6f, 0x74, 0x52, - 0x65, 0x74, 0x72, 0x79, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x72, 0x65, 0x74, 0x72, - 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x73, 0x68, 0x6f, 0x75, 0x6c, - 0x64, 0x4e, 0x6f, 0x74, 0x52, 0x65, 0x74, 0x72, 0x79, 0x22, 0x4d, 0x0a, 0x13, 0x41, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, - 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x22, 0xf7, 0x01, 0x0a, 0x20, 0x53, 0x75, 0x62, - 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x29, 0x0a, - 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, + 0x6f, 0x74, 0x6f, 0x22, 0x6e, 0x0a, 0x0c, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, + 0x65, 0x6c, 0x73, 0x12, 0x20, 0x0a, 0x09, 0x73, 0x74, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x73, 0x74, 0x72, 0x5f, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x22, 0xfe, 0x01, 0x0a, 0x0b, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x49, + 0x6e, 0x66, 0x6f, 0x12, 0x24, 0x0a, 0x0b, 0x73, 0x64, 0x6b, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x64, 0x6b, 0x56, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x26, 0x0a, 0x08, 0x6c, 0x61, 0x6e, + 0x67, 0x75, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x05, 0x2e, 0x53, 0x44, + 0x4b, 0x53, 0x48, 0x01, 0x52, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x88, 0x01, + 0x01, 0x12, 0x2e, 0x0a, 0x10, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0f, 0x6c, + 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x88, 0x01, + 0x01, 0x12, 0x13, 0x0a, 0x02, 0x6f, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, + 0x02, 0x6f, 0x73, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x65, 0x78, 0x74, 0x72, 0x61, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x04, 0x52, 0x05, 0x65, 0x78, 0x74, 0x72, 0x61, 0x88, 0x01, + 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x64, 0x6b, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x42, 0x13, + 0x0a, 0x11, 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x42, 0x05, 0x0a, 0x03, 0x5f, 0x6f, 0x73, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, + 0x78, 0x74, 0x72, 0x61, 0x22, 0xd7, 0x04, 0x0a, 0x15, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, + 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, + 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, + 0x18, 0x0a, 0x07, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x07, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x05, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x88, 0x01, 0x01, + 0x12, 0x3a, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x22, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, + 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x22, 0x0a, 0x0a, + 0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x01, 0x52, 0x09, 0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x49, 0x64, 0x88, 0x01, 0x01, + 0x12, 0x34, 0x0a, 0x0c, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, + 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x02, 0x52, 0x0b, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x49, + 0x6e, 0x66, 0x6f, 0x88, 0x01, 0x01, 0x12, 0x28, 0x0a, 0x0d, 0x64, 0x75, 0x72, 0x61, 0x62, 0x6c, + 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, 0x48, 0x03, 0x52, + 0x0c, 0x64, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x73, 0x88, 0x01, 0x01, + 0x12, 0x47, 0x0a, 0x0b, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, + 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, + 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x6c, + 0x6f, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x73, + 0x6c, 0x6f, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x48, 0x0a, 0x0b, 0x4c, 0x61, 0x62, + 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x23, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x57, 0x6f, 0x72, 0x6b, + 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x1a, 0x3d, 0x0a, 0x0f, 0x53, 0x6c, 0x6f, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x42, 0x0d, 0x0a, 0x0b, + 0x5f, 0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x5f, 0x69, 0x64, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, + 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x42, 0x10, 0x0a, 0x0e, + 0x5f, 0x64, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x73, 0x6c, 0x6f, 0x74, 0x73, 0x22, 0x73, + 0x0a, 0x16, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x65, 0x6e, 0x61, + 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, + 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, + 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4e, + 0x61, 0x6d, 0x65, 0x22, 0xc2, 0x01, 0x0a, 0x19, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, + 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x3e, + 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, + 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, + 0x65, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x48, + 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x23, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, + 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x56, 0x0a, 0x1a, 0x55, 0x70, 0x73, 0x65, + 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, + 0x74, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, + 0x22, 0xab, 0x07, 0x0a, 0x0e, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x41, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x5f, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, + 0x12, 0x26, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, + 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x2e, 0x0a, 0x14, 0x67, 0x65, 0x74, 0x5f, + 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x67, 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, + 0x4b, 0x65, 0x79, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, + 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, + 0x19, 0x0a, 0x08, 0x6a, 0x6f, 0x62, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6a, 0x6f, 0x62, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x0a, 0x6a, 0x6f, + 0x62, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x6a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x74, 0x61, 0x73, 0x6b, + 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, 0x73, 0x6b, 0x49, + 0x64, 0x12, 0x2f, 0x0a, 0x14, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x11, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, + 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, + 0x2c, 0x0a, 0x0b, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x0a, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0b, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, + 0x65, 0x52, 0x0a, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x25, 0x0a, + 0x0e, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, + 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x4e, 0x61, 0x6d, + 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x18, 0x0d, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, + 0x6e, 0x74, 0x12, 0x34, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, + 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x35, 0x0a, 0x14, 0x63, 0x68, 0x69, 0x6c, + 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x18, 0x0f, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x12, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x57, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, + 0x31, 0x0a, 0x12, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, + 0x77, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x10, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4b, 0x65, 0x79, 0x88, + 0x01, 0x01, 0x12, 0x38, 0x0a, 0x16, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x77, 0x6f, 0x72, + 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x03, 0x52, 0x13, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x1a, 0x0a, 0x08, + 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x12, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, + 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x24, 0x0a, 0x0b, 0x77, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x13, 0x20, 0x01, 0x28, 0x09, 0x48, 0x04, 0x52, + 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x33, + 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x14, 0x20, 0x01, 0x28, 0x09, 0x48, 0x05, 0x52, 0x11, 0x77, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, + 0x88, 0x01, 0x01, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x17, 0x0a, 0x15, 0x5f, + 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x42, 0x15, 0x0a, 0x13, 0x5f, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x77, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6b, 0x65, 0x79, 0x42, 0x19, 0x0a, 0x17, 0x5f, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, + 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x22, 0x32, + 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, + 0x49, 0x64, 0x22, 0x37, 0x0a, 0x18, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x55, 0x6e, 0x73, 0x75, + 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, + 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x22, 0x55, 0x0a, 0x19, 0x57, + 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x65, 0x6e, 0x61, + 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, + 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, + 0x49, 0x64, 0x22, 0xca, 0x02, 0x0a, 0x13, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, + 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, + 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, + 0x2e, 0x0a, 0x14, 0x67, 0x65, 0x74, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x6b, 0x65, 0x79, + 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x67, + 0x65, 0x74, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, + 0x1b, 0x0a, 0x09, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x43, 0x0a, 0x0f, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x12, 0x37, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, + 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, + 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, + 0xe3, 0x03, 0x0a, 0x0f, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, + 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1c, 0x0a, 0x0a, 0x6a, 0x6f, 0x62, 0x5f, 0x72, + 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6a, 0x6f, 0x62, + 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, 0x73, 0x6b, 0x49, 0x64, 0x12, 0x2f, + 0x0a, 0x14, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x74, 0x61, + 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x12, + 0x1b, 0x0a, 0x09, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x43, 0x0a, 0x0f, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x12, 0x33, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, + 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x14, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, + 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x24, 0x0a, 0x0b, 0x72, + 0x65, 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x05, + 0x48, 0x00, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, 0x01, + 0x01, 0x12, 0x2d, 0x0a, 0x10, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x5f, 0x6e, 0x6f, 0x74, 0x5f, + 0x72, 0x65, 0x74, 0x72, 0x79, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x48, 0x01, 0x52, 0x0e, 0x73, + 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x4e, 0x6f, 0x74, 0x52, 0x65, 0x74, 0x72, 0x79, 0x88, 0x01, 0x01, + 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x5f, 0x6e, 0x6f, 0x74, 0x5f, + 0x72, 0x65, 0x74, 0x72, 0x79, 0x22, 0x4f, 0x0a, 0x13, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, + 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, + 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, + 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x22, 0x83, 0x02, 0x0a, 0x20, 0x53, 0x75, 0x62, 0x73, 0x63, + 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x0f, 0x77, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x52, 0x75, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x31, 0x0a, 0x11, 0x61, 0x64, 0x64, 0x69, - 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x11, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, - 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x88, 0x01, 0x01, 0x12, 0x35, 0x0a, 0x13, 0x61, - 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x13, 0x61, 0x64, 0x64, 0x69, - 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x88, - 0x01, 0x01, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, - 0x75, 0x6e, 0x49, 0x64, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, - 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x22, 0x46, 0x0a, 0x1e, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, - 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0xd9, 0x03, 0x0a, 0x0d, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x24, 0x0a, 0x0d, - 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, - 0x49, 0x64, 0x12, 0x31, 0x0a, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, - 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0d, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x30, 0x0a, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, + 0x52, 0x75, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, + 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x5f, 0x6b, 0x65, 0x79, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x11, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x88, 0x01, 0x01, 0x12, 0x37, 0x0a, + 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, + 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x13, + 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, + 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x5f, 0x6b, + 0x65, 0x79, 0x42, 0x18, 0x0a, 0x16, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, + 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x48, 0x0a, 0x1e, + 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, + 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, + 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0xe6, 0x03, 0x0a, 0x0d, 0x57, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, + 0x12, 0x32, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x79, 0x70, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0d, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x31, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x12, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x49, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x22, 0x0a, 0x0c, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, - 0x16, 0x0a, 0x06, 0x68, 0x61, 0x6e, 0x67, 0x75, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, - 0x06, 0x68, 0x61, 0x6e, 0x67, 0x75, 0x70, 0x12, 0x25, 0x0a, 0x0b, 0x73, 0x74, 0x65, 0x70, 0x52, - 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x0b, - 0x73, 0x74, 0x65, 0x70, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x88, 0x01, 0x01, 0x12, 0x23, - 0x0a, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x09, 0x20, 0x01, - 0x28, 0x05, 0x48, 0x01, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, - 0x88, 0x01, 0x01, 0x12, 0x23, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, - 0x78, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x48, 0x02, 0x52, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x74, 0x65, - 0x70, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x72, 0x65, 0x74, - 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0xdb, 0x01, 0x0a, 0x10, 0x57, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, - 0x64, 0x12, 0x33, 0x0a, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, - 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x28, 0x0a, 0x07, 0x72, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x53, 0x74, - 0x65, 0x70, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x73, 0x22, 0xbe, 0x01, 0x0a, 0x0d, 0x53, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, - 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, - 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, - 0x75, 0x6e, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x0e, 0x73, 0x74, 0x65, 0x70, 0x52, 0x65, 0x61, 0x64, - 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x74, - 0x65, 0x70, 0x52, 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, - 0x6a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x6a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x19, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x88, 0x01, 0x01, - 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x7f, 0x0a, 0x0d, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, - 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, - 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, - 0x75, 0x6e, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x26, - 0x0a, 0x0e, 0x63, 0x61, 0x6c, 0x6c, 0x65, 0x72, 0x46, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x61, 0x6c, 0x6c, 0x65, 0x72, 0x46, 0x69, - 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x17, 0x0a, 0x15, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, - 0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, - 0x6c, 0x0a, 0x10, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, - 0x3c, 0x0a, 0x0b, 0x68, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x41, 0x74, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x0b, 0x68, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x41, 0x74, 0x22, 0x13, 0x0a, - 0x11, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x65, 0x0a, 0x15, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, - 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, - 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x2e, 0x0a, 0x12, 0x69, 0x6e, 0x63, - 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x42, 0x79, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x69, 0x6e, 0x63, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, - 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x42, 0x79, 0x22, 0x52, 0x0a, 0x16, 0x52, 0x65, 0x66, - 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x41, 0x74, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x41, 0x74, 0x22, 0x32, 0x0a, - 0x12, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, - 0x64, 0x22, 0x15, 0x0a, 0x13, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x37, 0x0a, 0x04, 0x53, 0x44, 0x4b, 0x53, - 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x06, 0x0a, - 0x02, 0x47, 0x4f, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x59, 0x54, 0x48, 0x4f, 0x4e, 0x10, - 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x10, - 0x03, 0x2a, 0x4e, 0x0a, 0x0a, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, - 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x52, 0x54, 0x5f, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52, 0x55, - 0x4e, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x53, 0x54, - 0x45, 0x50, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x54, 0x41, 0x52, - 0x54, 0x5f, 0x47, 0x45, 0x54, 0x5f, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x10, - 0x02, 0x2a, 0xa2, 0x01, 0x0a, 0x17, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, - 0x1c, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, - 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, - 0x20, 0x0a, 0x1c, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, - 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, - 0x01, 0x12, 0x22, 0x0a, 0x1e, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, - 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, - 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1f, 0x0a, 0x1b, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, - 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41, - 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x2a, 0xac, 0x01, 0x0a, 0x13, 0x53, 0x74, 0x65, 0x70, 0x41, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, - 0x0a, 0x17, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, - 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x53, - 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, - 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x54, 0x45, 0x50, - 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, - 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x54, 0x45, 0x50, 0x5f, - 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45, - 0x44, 0x10, 0x03, 0x12, 0x20, 0x0a, 0x1c, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, - 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x41, 0x43, 0x4b, 0x4e, 0x4f, 0x57, 0x4c, 0x45, 0x44, - 0x47, 0x45, 0x44, 0x10, 0x04, 0x2a, 0x65, 0x0a, 0x0c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x15, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, - 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, - 0x12, 0x1a, 0x0a, 0x16, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, - 0x45, 0x5f, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x01, 0x12, 0x1e, 0x0a, 0x1a, - 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x57, 0x4f, - 0x52, 0x4b, 0x46, 0x4c, 0x4f, 0x57, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x02, 0x2a, 0xfe, 0x01, 0x0a, - 0x11, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, - 0x70, 0x65, 0x12, 0x1f, 0x0a, 0x1b, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, - 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, - 0x4e, 0x10, 0x00, 0x12, 0x1f, 0x0a, 0x1b, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, - 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, - 0x45, 0x44, 0x10, 0x01, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, - 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, - 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1e, 0x0a, 0x1a, 0x52, 0x45, 0x53, 0x4f, 0x55, - 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, - 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55, - 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, - 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x4c, 0x45, 0x44, 0x10, 0x04, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, + 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x64, 0x12, 0x43, 0x0a, 0x0f, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x23, 0x0a, + 0x0d, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x68, 0x61, 0x6e, 0x67, 0x75, 0x70, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x06, 0x68, 0x61, 0x6e, 0x67, 0x75, 0x70, 0x12, 0x26, 0x0a, 0x0c, 0x74, 0x61, + 0x73, 0x6b, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, + 0x48, 0x00, 0x52, 0x0b, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x88, + 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, + 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, + 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x12, 0x24, 0x0a, 0x0b, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x48, 0x02, 0x52, + 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x42, 0x0f, + 0x0a, 0x0d, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x42, + 0x0e, 0x0a, 0x0c, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, + 0x0e, 0x0a, 0x0c, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x22, + 0xdf, 0x01, 0x0a, 0x10, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, + 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x34, 0x0a, 0x0a, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x15, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x43, 0x0a, 0x0f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x28, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x52, + 0x75, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x73, 0x22, 0xc8, 0x01, 0x0a, 0x0d, 0x53, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x12, 0x2f, 0x0a, 0x14, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, + 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x11, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x4e, 0x61, 0x6d, + 0x65, 0x12, 0x1c, 0x0a, 0x0a, 0x6a, 0x6f, 0x62, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, + 0x19, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x6f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x6f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x93, 0x01, 0x0a, + 0x0d, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x12, 0x2f, + 0x0a, 0x14, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x74, 0x61, + 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x12, + 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, + 0x61, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x63, 0x61, 0x6c, + 0x6c, 0x65, 0x72, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0e, 0x63, 0x61, 0x6c, 0x6c, 0x65, 0x72, 0x46, 0x69, 0x6c, 0x65, 0x6e, 0x61, + 0x6d, 0x65, 0x22, 0x17, 0x0a, 0x15, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, + 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x6e, 0x0a, 0x10, 0x48, + 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x3d, 0x0a, 0x0c, + 0x68, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0b, + 0x68, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x41, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x48, + 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x7a, 0x0a, 0x15, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, + 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x14, 0x74, 0x61, 0x73, + 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, + 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x12, 0x30, 0x0a, 0x14, 0x69, 0x6e, + 0x63, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, + 0x62, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x69, 0x6e, 0x63, 0x72, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x42, 0x79, 0x22, 0x53, 0x0a, 0x16, + 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, + 0x74, 0x5f, 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x41, + 0x74, 0x22, 0x45, 0x0a, 0x12, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x14, 0x74, 0x61, 0x73, 0x6b, 0x5f, + 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x45, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x22, 0x15, 0x0a, 0x13, 0x52, 0x65, 0x6c, 0x65, + 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, + 0x37, 0x0a, 0x04, 0x53, 0x44, 0x4b, 0x53, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, + 0x57, 0x4e, 0x10, 0x00, 0x12, 0x06, 0x0a, 0x02, 0x47, 0x4f, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, + 0x50, 0x59, 0x54, 0x48, 0x4f, 0x4e, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, + 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x10, 0x03, 0x2a, 0x4e, 0x0a, 0x0a, 0x41, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x52, 0x54, 0x5f, + 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x43, 0x41, + 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x01, 0x12, + 0x17, 0x0a, 0x13, 0x53, 0x54, 0x41, 0x52, 0x54, 0x5f, 0x47, 0x45, 0x54, 0x5f, 0x47, 0x52, 0x4f, + 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x10, 0x02, 0x2a, 0xa2, 0x01, 0x0a, 0x17, 0x47, 0x72, 0x6f, + 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x1c, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, + 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, + 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x20, 0x0a, 0x1c, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, + 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, + 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x22, 0x0a, 0x1e, 0x47, 0x52, 0x4f, 0x55, + 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, + 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1f, 0x0a, 0x1b, + 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, + 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x2a, 0xac, 0x01, + 0x0a, 0x13, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, + 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, + 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, + 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, + 0x1d, 0x0a, 0x19, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1a, + 0x0a, 0x16, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, + 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x12, 0x20, 0x0a, 0x1c, 0x53, 0x54, + 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x41, 0x43, + 0x4b, 0x4e, 0x4f, 0x57, 0x4c, 0x45, 0x44, 0x47, 0x45, 0x44, 0x10, 0x04, 0x2a, 0x65, 0x0a, 0x0c, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x15, + 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, + 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1a, 0x0a, 0x16, 0x52, 0x45, 0x53, 0x4f, 0x55, + 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52, 0x55, + 0x4e, 0x10, 0x01, 0x12, 0x1e, 0x0a, 0x1a, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, + 0x54, 0x59, 0x50, 0x45, 0x5f, 0x57, 0x4f, 0x52, 0x4b, 0x46, 0x4c, 0x4f, 0x57, 0x5f, 0x52, 0x55, + 0x4e, 0x10, 0x02, 0x2a, 0xfe, 0x01, 0x0a, 0x11, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1f, 0x0a, 0x1b, 0x52, 0x45, 0x53, + 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, + 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1f, 0x0a, 0x1b, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, - 0x45, 0x5f, 0x54, 0x49, 0x4d, 0x45, 0x44, 0x5f, 0x4f, 0x55, 0x54, 0x10, 0x05, 0x12, 0x1e, 0x0a, - 0x1a, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, - 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x10, 0x06, 0x2a, 0x3c, 0x0a, - 0x14, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x24, 0x0a, 0x20, 0x57, 0x4f, 0x52, 0x4b, 0x46, 0x4c, 0x4f, - 0x57, 0x5f, 0x52, 0x55, 0x4e, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, - 0x5f, 0x46, 0x49, 0x4e, 0x49, 0x53, 0x48, 0x45, 0x44, 0x10, 0x00, 0x32, 0xf8, 0x06, 0x0a, 0x0a, - 0x44, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x08, 0x52, 0x65, - 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x12, 0x16, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, - 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, - 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x33, 0x0a, 0x06, 0x4c, 0x69, 0x73, - 0x74, 0x65, 0x6e, 0x12, 0x14, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74, - 0x65, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x41, 0x73, 0x73, 0x69, - 0x67, 0x6e, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, 0x30, 0x01, 0x12, 0x35, - 0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x56, 0x32, 0x12, 0x14, 0x2e, 0x57, 0x6f, 0x72, + 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x21, 0x0a, 0x1d, 0x52, + 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1e, + 0x0a, 0x1a, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, + 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x12, 0x21, + 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, + 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x4c, 0x45, 0x44, 0x10, + 0x04, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, + 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x49, 0x4d, 0x45, 0x44, 0x5f, 0x4f, + 0x55, 0x54, 0x10, 0x05, 0x12, 0x1e, 0x0a, 0x1a, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, + 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x52, 0x45, + 0x41, 0x4d, 0x10, 0x06, 0x2a, 0x3c, 0x0a, 0x14, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, + 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x24, 0x0a, 0x20, + 0x57, 0x4f, 0x52, 0x4b, 0x46, 0x4c, 0x4f, 0x57, 0x5f, 0x52, 0x55, 0x4e, 0x5f, 0x45, 0x56, 0x45, + 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x49, 0x4e, 0x49, 0x53, 0x48, 0x45, 0x44, + 0x10, 0x00, 0x32, 0xf8, 0x06, 0x0a, 0x0a, 0x44, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, + 0x72, 0x12, 0x3d, 0x0a, 0x08, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x12, 0x16, 0x2e, + 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, + 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x12, 0x33, 0x0a, 0x06, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x12, 0x14, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x00, 0x30, 0x01, 0x12, 0x34, 0x0a, 0x09, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, - 0x61, 0x74, 0x12, 0x11, 0x2e, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x12, 0x2e, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, - 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x52, 0x0a, 0x19, 0x53, - 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x2e, 0x53, 0x75, 0x62, 0x73, 0x63, - 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, - 0x53, 0x0a, 0x17, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x73, 0x12, 0x1f, 0x2e, 0x53, 0x75, 0x62, - 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x11, 0x2e, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, - 0x28, 0x01, 0x30, 0x01, 0x12, 0x3f, 0x0a, 0x13, 0x53, 0x65, 0x6e, 0x64, 0x53, 0x74, 0x65, 0x70, - 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x10, 0x2e, 0x53, 0x74, - 0x65, 0x70, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x1a, 0x14, 0x2e, + 0x6e, 0x22, 0x00, 0x30, 0x01, 0x12, 0x35, 0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x56, + 0x32, 0x12, 0x14, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, + 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, 0x30, 0x01, 0x12, 0x34, 0x0a, 0x09, + 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x12, 0x11, 0x2e, 0x48, 0x65, 0x61, 0x72, + 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x12, 0x2e, 0x48, + 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x00, 0x12, 0x52, 0x0a, 0x19, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, + 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, + 0x21, 0x2e, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, + 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x53, 0x0a, 0x17, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, + 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, + 0x73, 0x12, 0x1f, 0x2e, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x11, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x28, 0x01, 0x30, 0x01, 0x12, 0x3f, 0x0a, 0x13, 0x53, + 0x65, 0x6e, 0x64, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x12, 0x10, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x1a, 0x14, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x47, 0x0a, 0x17, + 0x53, 0x65, 0x6e, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x14, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, + 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x1a, 0x14, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x47, 0x0a, 0x17, 0x53, 0x65, 0x6e, 0x64, 0x47, 0x72, 0x6f, - 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x12, 0x14, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x1a, 0x14, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3c, - 0x0a, 0x10, 0x50, 0x75, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, - 0x74, 0x61, 0x12, 0x0e, 0x2e, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, - 0x74, 0x61, 0x1a, 0x16, 0x2e, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, - 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x46, 0x0a, 0x0b, - 0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x12, 0x19, 0x2e, 0x57, 0x6f, - 0x72, 0x6b, 0x65, 0x72, 0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x55, - 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x0e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, - 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x16, 0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, - 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, - 0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3a, 0x0a, 0x0b, 0x52, 0x65, 0x6c, - 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, - 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, - 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4f, 0x0a, 0x12, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, - 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1a, 0x2e, 0x55, 0x70, - 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, - 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x47, 0x5a, 0x45, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, - 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, - 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x64, 0x69, 0x73, 0x70, 0x61, - 0x74, 0x63, 0x68, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3c, 0x0a, 0x10, 0x50, 0x75, 0x74, 0x4f, 0x76, 0x65, 0x72, + 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x12, 0x0e, 0x2e, 0x4f, 0x76, 0x65, 0x72, + 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x16, 0x2e, 0x4f, 0x76, 0x65, 0x72, + 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x46, 0x0a, 0x0b, 0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, + 0x62, 0x65, 0x12, 0x19, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x55, 0x6e, 0x73, 0x75, 0x62, + 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, + 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, + 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x0e, 0x52, + 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x16, 0x2e, + 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, + 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x12, 0x3a, 0x0a, 0x0b, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x12, + 0x13, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, + 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4f, 0x0a, 0x12, + 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, + 0x6c, 0x73, 0x12, 0x1a, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, + 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, + 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, + 0x65, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x47, 0x5a, + 0x45, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, + 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x73, 0x2f, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2695,7 +2738,7 @@ func file_dispatcher_proto_rawDescGZIP() []byte { } var file_dispatcher_proto_enumTypes = make([]protoimpl.EnumInfo, 7) -var file_dispatcher_proto_msgTypes = make([]protoimpl.MessageInfo, 28) +var file_dispatcher_proto_msgTypes = make([]protoimpl.MessageInfo, 29) var file_dispatcher_proto_goTypes = []interface{}{ (SDKS)(0), // 0: SDKS (ActionType)(0), // 1: ActionType @@ -2731,60 +2774,62 @@ var file_dispatcher_proto_goTypes = []interface{}{ (*ReleaseSlotRequest)(nil), // 31: ReleaseSlotRequest (*ReleaseSlotResponse)(nil), // 32: ReleaseSlotResponse nil, // 33: WorkerRegisterRequest.LabelsEntry - nil, // 34: UpsertWorkerLabelsRequest.LabelsEntry - (*timestamppb.Timestamp)(nil), // 35: google.protobuf.Timestamp + nil, // 34: WorkerRegisterRequest.SlotConfigEntry + nil, // 35: UpsertWorkerLabelsRequest.LabelsEntry + (*timestamppb.Timestamp)(nil), // 36: google.protobuf.Timestamp } var file_dispatcher_proto_depIdxs = []int32{ 0, // 0: RuntimeInfo.language:type_name -> SDKS 33, // 1: WorkerRegisterRequest.labels:type_name -> WorkerRegisterRequest.LabelsEntry - 8, // 2: WorkerRegisterRequest.runtimeInfo:type_name -> RuntimeInfo - 34, // 3: UpsertWorkerLabelsRequest.labels:type_name -> UpsertWorkerLabelsRequest.LabelsEntry - 1, // 4: AssignedAction.actionType:type_name -> ActionType - 35, // 5: GroupKeyActionEvent.eventTimestamp:type_name -> google.protobuf.Timestamp - 2, // 6: GroupKeyActionEvent.eventType:type_name -> GroupKeyActionEventType - 35, // 7: StepActionEvent.eventTimestamp:type_name -> google.protobuf.Timestamp - 3, // 8: StepActionEvent.eventType:type_name -> StepActionEventType - 4, // 9: WorkflowEvent.resourceType:type_name -> ResourceType - 5, // 10: WorkflowEvent.eventType:type_name -> ResourceEventType - 35, // 11: WorkflowEvent.eventTimestamp:type_name -> google.protobuf.Timestamp - 6, // 12: WorkflowRunEvent.eventType:type_name -> WorkflowRunEventType - 35, // 13: WorkflowRunEvent.eventTimestamp:type_name -> google.protobuf.Timestamp - 24, // 14: WorkflowRunEvent.results:type_name -> StepRunResult - 35, // 15: HeartbeatRequest.heartbeatAt:type_name -> google.protobuf.Timestamp - 35, // 16: RefreshTimeoutResponse.timeoutAt:type_name -> google.protobuf.Timestamp - 7, // 17: WorkerRegisterRequest.LabelsEntry.value:type_name -> WorkerLabels - 7, // 18: UpsertWorkerLabelsRequest.LabelsEntry.value:type_name -> WorkerLabels - 9, // 19: Dispatcher.Register:input_type -> WorkerRegisterRequest - 14, // 20: Dispatcher.Listen:input_type -> WorkerListenRequest - 14, // 21: Dispatcher.ListenV2:input_type -> WorkerListenRequest - 27, // 22: Dispatcher.Heartbeat:input_type -> HeartbeatRequest - 20, // 23: Dispatcher.SubscribeToWorkflowEvents:input_type -> SubscribeToWorkflowEventsRequest - 21, // 24: Dispatcher.SubscribeToWorkflowRuns:input_type -> SubscribeToWorkflowRunsRequest - 18, // 25: Dispatcher.SendStepActionEvent:input_type -> StepActionEvent - 17, // 26: Dispatcher.SendGroupKeyActionEvent:input_type -> GroupKeyActionEvent - 25, // 27: Dispatcher.PutOverridesData:input_type -> OverridesData - 15, // 28: Dispatcher.Unsubscribe:input_type -> WorkerUnsubscribeRequest - 29, // 29: Dispatcher.RefreshTimeout:input_type -> RefreshTimeoutRequest - 31, // 30: Dispatcher.ReleaseSlot:input_type -> ReleaseSlotRequest - 11, // 31: Dispatcher.UpsertWorkerLabels:input_type -> UpsertWorkerLabelsRequest - 10, // 32: Dispatcher.Register:output_type -> WorkerRegisterResponse - 13, // 33: Dispatcher.Listen:output_type -> AssignedAction - 13, // 34: Dispatcher.ListenV2:output_type -> AssignedAction - 28, // 35: Dispatcher.Heartbeat:output_type -> HeartbeatResponse - 22, // 36: Dispatcher.SubscribeToWorkflowEvents:output_type -> WorkflowEvent - 23, // 37: Dispatcher.SubscribeToWorkflowRuns:output_type -> WorkflowRunEvent - 19, // 38: Dispatcher.SendStepActionEvent:output_type -> ActionEventResponse - 19, // 39: Dispatcher.SendGroupKeyActionEvent:output_type -> ActionEventResponse - 26, // 40: Dispatcher.PutOverridesData:output_type -> OverridesDataResponse - 16, // 41: Dispatcher.Unsubscribe:output_type -> WorkerUnsubscribeResponse - 30, // 42: Dispatcher.RefreshTimeout:output_type -> RefreshTimeoutResponse - 32, // 43: Dispatcher.ReleaseSlot:output_type -> ReleaseSlotResponse - 12, // 44: Dispatcher.UpsertWorkerLabels:output_type -> UpsertWorkerLabelsResponse - 32, // [32:45] is the sub-list for method output_type - 19, // [19:32] is the sub-list for method input_type - 19, // [19:19] is the sub-list for extension type_name - 19, // [19:19] is the sub-list for extension extendee - 0, // [0:19] is the sub-list for field type_name + 8, // 2: WorkerRegisterRequest.runtime_info:type_name -> RuntimeInfo + 34, // 3: WorkerRegisterRequest.slot_config:type_name -> WorkerRegisterRequest.SlotConfigEntry + 35, // 4: UpsertWorkerLabelsRequest.labels:type_name -> UpsertWorkerLabelsRequest.LabelsEntry + 1, // 5: AssignedAction.action_type:type_name -> ActionType + 36, // 6: GroupKeyActionEvent.event_timestamp:type_name -> google.protobuf.Timestamp + 2, // 7: GroupKeyActionEvent.event_type:type_name -> GroupKeyActionEventType + 36, // 8: StepActionEvent.event_timestamp:type_name -> google.protobuf.Timestamp + 3, // 9: StepActionEvent.event_type:type_name -> StepActionEventType + 4, // 10: WorkflowEvent.resource_type:type_name -> ResourceType + 5, // 11: WorkflowEvent.event_type:type_name -> ResourceEventType + 36, // 12: WorkflowEvent.event_timestamp:type_name -> google.protobuf.Timestamp + 6, // 13: WorkflowRunEvent.event_type:type_name -> WorkflowRunEventType + 36, // 14: WorkflowRunEvent.event_timestamp:type_name -> google.protobuf.Timestamp + 24, // 15: WorkflowRunEvent.results:type_name -> StepRunResult + 36, // 16: HeartbeatRequest.heartbeat_at:type_name -> google.protobuf.Timestamp + 36, // 17: RefreshTimeoutResponse.timeout_at:type_name -> google.protobuf.Timestamp + 7, // 18: WorkerRegisterRequest.LabelsEntry.value:type_name -> WorkerLabels + 7, // 19: UpsertWorkerLabelsRequest.LabelsEntry.value:type_name -> WorkerLabels + 9, // 20: Dispatcher.Register:input_type -> WorkerRegisterRequest + 14, // 21: Dispatcher.Listen:input_type -> WorkerListenRequest + 14, // 22: Dispatcher.ListenV2:input_type -> WorkerListenRequest + 27, // 23: Dispatcher.Heartbeat:input_type -> HeartbeatRequest + 20, // 24: Dispatcher.SubscribeToWorkflowEvents:input_type -> SubscribeToWorkflowEventsRequest + 21, // 25: Dispatcher.SubscribeToWorkflowRuns:input_type -> SubscribeToWorkflowRunsRequest + 18, // 26: Dispatcher.SendStepActionEvent:input_type -> StepActionEvent + 17, // 27: Dispatcher.SendGroupKeyActionEvent:input_type -> GroupKeyActionEvent + 25, // 28: Dispatcher.PutOverridesData:input_type -> OverridesData + 15, // 29: Dispatcher.Unsubscribe:input_type -> WorkerUnsubscribeRequest + 29, // 30: Dispatcher.RefreshTimeout:input_type -> RefreshTimeoutRequest + 31, // 31: Dispatcher.ReleaseSlot:input_type -> ReleaseSlotRequest + 11, // 32: Dispatcher.UpsertWorkerLabels:input_type -> UpsertWorkerLabelsRequest + 10, // 33: Dispatcher.Register:output_type -> WorkerRegisterResponse + 13, // 34: Dispatcher.Listen:output_type -> AssignedAction + 13, // 35: Dispatcher.ListenV2:output_type -> AssignedAction + 28, // 36: Dispatcher.Heartbeat:output_type -> HeartbeatResponse + 22, // 37: Dispatcher.SubscribeToWorkflowEvents:output_type -> WorkflowEvent + 23, // 38: Dispatcher.SubscribeToWorkflowRuns:output_type -> WorkflowRunEvent + 19, // 39: Dispatcher.SendStepActionEvent:output_type -> ActionEventResponse + 19, // 40: Dispatcher.SendGroupKeyActionEvent:output_type -> ActionEventResponse + 26, // 41: Dispatcher.PutOverridesData:output_type -> OverridesDataResponse + 16, // 42: Dispatcher.Unsubscribe:output_type -> WorkerUnsubscribeResponse + 30, // 43: Dispatcher.RefreshTimeout:output_type -> RefreshTimeoutResponse + 32, // 44: Dispatcher.ReleaseSlot:output_type -> ReleaseSlotResponse + 12, // 45: Dispatcher.UpsertWorkerLabels:output_type -> UpsertWorkerLabelsResponse + 33, // [33:46] is the sub-list for method output_type + 20, // [20:33] is the sub-list for method input_type + 20, // [20:20] is the sub-list for extension type_name + 20, // [20:20] is the sub-list for extension extendee + 0, // [0:20] is the sub-list for field type_name } func init() { file_dispatcher_proto_init() } @@ -3120,7 +3165,7 @@ func file_dispatcher_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_dispatcher_proto_rawDesc, NumEnums: 7, - NumMessages: 28, + NumMessages: 29, NumExtensions: 0, NumServices: 1, }, diff --git a/internal/services/dispatcher/server.go b/internal/services/dispatcher/server.go index 3472aca9b0..500ccd5f84 100644 --- a/internal/services/dispatcher/server.go +++ b/internal/services/dispatcher/server.go @@ -50,9 +50,18 @@ func (s *DispatcherImpl) Register(ctx context.Context, request *contracts.Worker } } - if request.MaxRuns != nil { - mr := int(*request.MaxRuns) - opts.MaxRuns = &mr + if request.Slots != nil { + mr := int(*request.Slots) + opts.Slots = &mr + } + + if request.DurableSlots != nil { + dr := int(*request.DurableSlots) + opts.DurableSlots = &dr + } + + if len(request.SlotConfig) > 0 { + opts.SlotConfig = request.SlotConfig } if apiErrors, err := s.v.ValidateAPI(opts); err != nil { diff --git a/internal/services/dispatcher/server_v1.go b/internal/services/dispatcher/server_v1.go index 510fd29937..d894be2174 100644 --- a/internal/services/dispatcher/server_v1.go +++ b/internal/services/dispatcher/server_v1.go @@ -518,9 +518,9 @@ func (s *DispatcherImpl) taskEventsToWorkflowRunEvent(tenantId uuid.UUID, finali for _, event := range wr.OutputEvents { res := &contracts.StepRunResult{ - StepRunId: event.TaskExternalId.String(), - StepReadableId: event.StepReadableID, - JobRunId: event.TaskExternalId.String(), + TaskRunExternalId: event.TaskExternalId.String(), + TaskName: event.StepReadableID, + JobRunId: event.TaskExternalId.String(), } switch event.EventType { @@ -554,16 +554,16 @@ func (s *DispatcherImpl) sendStepActionEventV1(ctx context.Context, request *con // if there's no retry count, we need to read it from the task, so we can't skip the cache skipCache := request.RetryCount == nil - stepRunId, err := uuid.Parse(request.StepRunId) + taskExternalId, err := uuid.Parse(request.TaskRunExternalId) if err != nil { - return nil, status.Errorf(codes.InvalidArgument, "invalid step run id %s: %v", request.StepRunId, err) + return nil, status.Errorf(codes.InvalidArgument, "invalid task external run id %s: %v", request.TaskRunExternalId, err) } - task, err := s.getSingleTask(ctx, tenant.ID, stepRunId, skipCache) + task, err := s.getSingleTask(ctx, tenant.ID, taskExternalId, skipCache) if err != nil { - return nil, fmt.Errorf("could not get task %s: %w", request.StepRunId, err) + return nil, status.Errorf(codes.InvalidArgument, "invalid task external run id %s: %v", request.TaskRunExternalId, err) } retryCount := task.RetryCount @@ -597,7 +597,7 @@ func (s *DispatcherImpl) sendStepActionEventV1(ctx context.Context, request *con return s.handleTaskFailed(ctx, task, retryCount, request) } - return nil, status.Errorf(codes.InvalidArgument, "invalid step run id %s", request.StepRunId) + return nil, status.Errorf(codes.InvalidArgument, "invalid task external run id %s", request.TaskRunExternalId) } func (s *DispatcherImpl) handleTaskStarted(inputCtx context.Context, task *sqlcv1.FlattenExternalIdsRow, retryCount int32, request *contracts.StepActionEvent) (*contracts.ActionEventResponse, error) { @@ -726,9 +726,9 @@ func (d *DispatcherImpl) getSingleTask(ctx context.Context, tenantId, taskExtern func (d *DispatcherImpl) refreshTimeoutV1(ctx context.Context, tenant *sqlcv1.Tenant, request *contracts.RefreshTimeoutRequest) (*contracts.RefreshTimeoutResponse, error) { tenantId := tenant.ID - taskExternalId, err := uuid.Parse(request.StepRunId) + taskExternalId, err := uuid.Parse(request.TaskRunExternalId) if err != nil { - return nil, status.Errorf(codes.InvalidArgument, "invalid step run id %s: %v", request.StepRunId, err) + return nil, status.Errorf(codes.InvalidArgument, "invalid task external run id %s: %v", request.TaskRunExternalId, err) } opts := v1.RefreshTimeoutBy{ @@ -780,9 +780,9 @@ func (d *DispatcherImpl) refreshTimeoutV1(ctx context.Context, tenant *sqlcv1.Te func (d *DispatcherImpl) releaseSlotV1(ctx context.Context, tenant *sqlcv1.Tenant, request *contracts.ReleaseSlotRequest) (*contracts.ReleaseSlotResponse, error) { tenantId := tenant.ID - stepRunId, err := uuid.Parse(request.StepRunId) + stepRunId, err := uuid.Parse(request.TaskRunExternalId) if err != nil { - return nil, status.Errorf(codes.InvalidArgument, "invalid step run id %s: %v", request.StepRunId, err) + return nil, status.Errorf(codes.InvalidArgument, "invalid task external run id %s: %v", request.TaskRunExternalId, err) } releasedSlot, err := d.repov1.Tasks().ReleaseSlot(ctx, tenantId, stepRunId) @@ -1278,7 +1278,7 @@ func (s *DispatcherImpl) msgsToWorkflowEvent(msgId string, payloads [][]byte, fi workflowEvents = append(workflowEvents, &contracts.WorkflowEvent{ WorkflowRunId: payload.WorkflowRunId.String(), ResourceType: contracts.ResourceType_RESOURCE_TYPE_STEP_RUN, - ResourceId: payload.StepRunId.String(), + ResourceId: payload.TaskRunId.String(), EventType: contracts.ResourceEventType_RESOURCE_EVENT_TYPE_STREAM, EventTimestamp: timestamppb.New(payload.CreatedAt), EventPayload: string(payload.Payload), diff --git a/internal/services/dispatcher/subscribed_worker_v1.go b/internal/services/dispatcher/subscribed_worker_v1.go index 34911c13a7..f2768cc843 100644 --- a/internal/services/dispatcher/subscribed_worker_v1.go +++ b/internal/services/dispatcher/subscribed_worker_v1.go @@ -251,10 +251,10 @@ func populateAssignedAction(tenantID uuid.UUID, task *sqlcv1.V1Task, retryCount JobId: task.StepID.String(), // FIXME JobName: task.StepReadableID, JobRunId: task.ExternalID.String(), // FIXME - StepId: task.StepID.String(), - StepRunId: task.ExternalID.String(), + TaskId: task.StepID.String(), + TaskRunExternalId: task.ExternalID.String(), ActionId: task.ActionID, - StepName: task.StepReadableID, + TaskName: task.StepReadableID, WorkflowRunId: task.WorkflowRunID.String(), RetryCount: retryCount, Priority: task.Priority.Int32, diff --git a/internal/services/ingestor/contracts/events.pb.go b/internal/services/ingestor/contracts/events.pb.go index 7a7927aec9..b1e0063a8f 100644 --- a/internal/services/ingestor/contracts/events.pb.go +++ b/internal/services/ingestor/contracts/events.pb.go @@ -27,17 +27,17 @@ type Event struct { unknownFields protoimpl.UnknownFields // the tenant id - TenantId string `protobuf:"bytes,1,opt,name=tenantId,proto3" json:"tenantId,omitempty"` + TenantId string `protobuf:"bytes,1,opt,name=tenant_id,json=tenantId,proto3" json:"tenant_id,omitempty"` // the id of the event - EventId string `protobuf:"bytes,2,opt,name=eventId,proto3" json:"eventId,omitempty"` + EventId string `protobuf:"bytes,2,opt,name=event_id,json=eventId,proto3" json:"event_id,omitempty"` // the key for the event Key string `protobuf:"bytes,3,opt,name=key,proto3" json:"key,omitempty"` // the payload for the event Payload string `protobuf:"bytes,4,opt,name=payload,proto3" json:"payload,omitempty"` // when the event was generated - EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` + EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` // the additional metadata for the event - AdditionalMetadata *string `protobuf:"bytes,6,opt,name=additionalMetadata,proto3,oneof" json:"additionalMetadata,omitempty"` + AdditionalMetadata *string `protobuf:"bytes,6,opt,name=additional_metadata,json=additionalMetadata,proto3,oneof" json:"additional_metadata,omitempty"` // the scope associated with this filter. Used for subsetting candidate filters at evaluation time Scope *string `protobuf:"bytes,7,opt,name=scope,proto3,oneof" json:"scope,omitempty"` } @@ -175,10 +175,10 @@ type PutLogRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // the step run id for the request - StepRunId string `protobuf:"bytes,1,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` + // the task external run id for the request + TaskRunExternalId string `protobuf:"bytes,1,opt,name=task_run_external_id,json=taskRunExternalId,proto3" json:"task_run_external_id,omitempty"` // when the log line was created - CreatedAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=createdAt,proto3" json:"createdAt,omitempty"` + CreatedAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` // the log line message Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` // the log line level @@ -186,7 +186,7 @@ type PutLogRequest struct { // associated log line metadata Metadata string `protobuf:"bytes,5,opt,name=metadata,proto3" json:"metadata,omitempty"` // the retry count of the task run - TaskRetryCount *int32 `protobuf:"varint,6,opt,name=taskRetryCount,proto3,oneof" json:"taskRetryCount,omitempty"` + TaskRetryCount *int32 `protobuf:"varint,6,opt,name=task_retry_count,json=taskRetryCount,proto3,oneof" json:"task_retry_count,omitempty"` } func (x *PutLogRequest) Reset() { @@ -221,9 +221,9 @@ func (*PutLogRequest) Descriptor() ([]byte, []int) { return file_events_proto_rawDescGZIP(), []int{2} } -func (x *PutLogRequest) GetStepRunId() string { +func (x *PutLogRequest) GetTaskRunExternalId() string { if x != nil { - return x.StepRunId + return x.TaskRunExternalId } return "" } @@ -306,15 +306,15 @@ type PutStreamEventRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // the step run id for the request - StepRunId string `protobuf:"bytes,1,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` + // the task external run id for the request + TaskRunExternalId string `protobuf:"bytes,1,opt,name=task_run_external_id,json=taskRunExternalId,proto3" json:"task_run_external_id,omitempty"` // when the stream event was created - CreatedAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=createdAt,proto3" json:"createdAt,omitempty"` + CreatedAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` // the stream event message Message []byte `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` // associated stream event metadata Metadata string `protobuf:"bytes,5,opt,name=metadata,proto3" json:"metadata,omitempty"` - EventIndex *int64 `protobuf:"varint,6,opt,name=eventIndex,proto3,oneof" json:"eventIndex,omitempty"` + EventIndex *int64 `protobuf:"varint,6,opt,name=event_index,json=eventIndex,proto3,oneof" json:"event_index,omitempty"` } func (x *PutStreamEventRequest) Reset() { @@ -349,9 +349,9 @@ func (*PutStreamEventRequest) Descriptor() ([]byte, []int) { return file_events_proto_rawDescGZIP(), []int{4} } -func (x *PutStreamEventRequest) GetStepRunId() string { +func (x *PutStreamEventRequest) GetTaskRunExternalId() string { if x != nil { - return x.StepRunId + return x.TaskRunExternalId } return "" } @@ -479,9 +479,9 @@ type PushEventRequest struct { // the payload for the event Payload string `protobuf:"bytes,2,opt,name=payload,proto3" json:"payload,omitempty"` // when the event was generated - EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` + EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` // metadata for the event - AdditionalMetadata *string `protobuf:"bytes,4,opt,name=additionalMetadata,proto3,oneof" json:"additionalMetadata,omitempty"` + AdditionalMetadata *string `protobuf:"bytes,4,opt,name=additional_metadata,json=additionalMetadata,proto3,oneof" json:"additional_metadata,omitempty"` Priority *int32 `protobuf:"varint,5,opt,name=priority,proto3,oneof" json:"priority,omitempty"` // the scope associated with this filter. Used for subsetting candidate filters at evaluation time Scope *string `protobuf:"bytes,6,opt,name=scope,proto3,oneof" json:"scope,omitempty"` @@ -567,7 +567,7 @@ type ReplayEventRequest struct { unknownFields protoimpl.UnknownFields // the event id to replay - EventId string `protobuf:"bytes,1,opt,name=eventId,proto3" json:"eventId,omitempty"` + EventId string `protobuf:"bytes,1,opt,name=event_id,json=eventId,proto3" json:"event_id,omitempty"` } func (x *ReplayEventRequest) Reset() { @@ -615,107 +615,110 @@ var file_events_proto_rawDesc = []byte{ 0x0a, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, - 0x9e, 0x02, 0x0a, 0x05, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x65, 0x6e, - 0x61, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, - 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, - 0x33, 0x0a, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x12, 0x61, - 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x07, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x88, 0x01, 0x01, 0x42, - 0x15, 0x0a, 0x13, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, - 0x22, 0x28, 0x0a, 0x06, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x06, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x82, 0x02, 0x0a, 0x0d, 0x50, - 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, - 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0xa3, 0x02, 0x0a, 0x05, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x65, 0x6e, + 0x61, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, + 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, + 0x64, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x43, 0x0a, + 0x0f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x12, 0x34, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, + 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, + 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, + 0x88, 0x01, 0x01, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x08, 0x0a, 0x06, 0x5f, + 0x73, 0x63, 0x6f, 0x70, 0x65, 0x22, 0x28, 0x0a, 0x06, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, + 0x1e, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, + 0x9a, 0x02, 0x0a, 0x0d, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x2f, 0x0a, 0x14, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x11, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x49, 0x64, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x18, 0x0a, + 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x19, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x88, + 0x01, 0x01, 0x12, 0x1a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2d, + 0x0a, 0x10, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x0e, 0x74, 0x61, 0x73, 0x6b, + 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, + 0x06, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x74, 0x61, 0x73, 0x6b, + 0x5f, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x10, 0x0a, 0x0e, + 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xef, + 0x01, 0x0a, 0x15, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x14, 0x74, 0x61, 0x73, 0x6b, + 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x45, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x19, - 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, - 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x1a, 0x0a, 0x08, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2b, 0x0a, 0x0e, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, - 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, - 0x0e, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, - 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x42, 0x11, 0x0a, 0x0f, - 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, - 0x10, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0xd9, 0x01, 0x0a, 0x15, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, - 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x64, 0x41, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1a, 0x0a, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x23, 0x0a, 0x0a, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, - 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x42, 0x0d, - 0x0a, 0x0b, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x18, 0x0a, - 0x16, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x41, 0x0a, 0x14, 0x42, 0x75, 0x6c, 0x6b, 0x50, - 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x29, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1a, + 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0b, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x48, + 0x00, 0x52, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, + 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x22, 0x18, 0x0a, 0x16, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x41, 0x0a, 0x14, 0x42, 0x75, + 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x29, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xa4, 0x02, + 0x0a, 0x10, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x43, + 0x0a, 0x0f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x12, 0x34, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, + 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x00, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, + 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x70, + 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, + 0x6f, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, + 0x70, 0x65, 0x88, 0x01, 0x01, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x0b, 0x0a, + 0x09, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, + 0x63, 0x6f, 0x70, 0x65, 0x22, 0x2f, 0x0a, 0x12, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x49, 0x64, 0x32, 0x88, 0x02, 0x0a, 0x0d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, + 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x23, 0x0a, 0x04, 0x50, 0x75, 0x73, 0x68, 0x12, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xa1, 0x02, 0x0a, 0x10, 0x50, - 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, - 0x33, 0x0a, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x12, 0x61, - 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, - 0x74, 0x79, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x88, 0x01, 0x01, - 0x42, 0x15, 0x0a, 0x13, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x69, 0x6f, - 0x72, 0x69, 0x74, 0x79, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x22, 0x2e, - 0x0a, 0x12, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x32, 0x88, - 0x02, 0x0a, 0x0d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, - 0x12, 0x23, 0x0a, 0x04, 0x50, 0x75, 0x73, 0x68, 0x12, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2c, 0x0a, 0x08, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, - 0x68, 0x12, 0x15, 0x2e, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x07, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x73, 0x22, 0x00, 0x12, 0x32, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x53, 0x69, 0x6e, - 0x67, 0x6c, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, - 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2b, 0x0a, 0x06, 0x50, 0x75, 0x74, 0x4c, 0x6f, - 0x67, 0x12, 0x0e, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x0f, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, - 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x16, 0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, - 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, - 0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x45, 0x5a, 0x43, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, - 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x69, 0x6e, - 0x67, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2c, 0x0a, 0x08, + 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x12, 0x15, 0x2e, 0x42, 0x75, 0x6c, 0x6b, 0x50, + 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x07, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x00, 0x12, 0x32, 0x0a, 0x11, 0x52, 0x65, + 0x70, 0x6c, 0x61, 0x79, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, + 0x13, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2b, + 0x0a, 0x06, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x12, 0x0e, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, + 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x0e, 0x50, + 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x16, 0x2e, + 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, + 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x42, 0x45, 0x5a, 0x43, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, + 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, + 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x73, 0x2f, 0x69, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x2f, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -744,12 +747,12 @@ var file_events_proto_goTypes = []interface{}{ (*timestamppb.Timestamp)(nil), // 9: google.protobuf.Timestamp } var file_events_proto_depIdxs = []int32{ - 9, // 0: Event.eventTimestamp:type_name -> google.protobuf.Timestamp + 9, // 0: Event.event_timestamp:type_name -> google.protobuf.Timestamp 0, // 1: Events.events:type_name -> Event - 9, // 2: PutLogRequest.createdAt:type_name -> google.protobuf.Timestamp - 9, // 3: PutStreamEventRequest.createdAt:type_name -> google.protobuf.Timestamp + 9, // 2: PutLogRequest.created_at:type_name -> google.protobuf.Timestamp + 9, // 3: PutStreamEventRequest.created_at:type_name -> google.protobuf.Timestamp 7, // 4: BulkPushEventRequest.events:type_name -> PushEventRequest - 9, // 5: PushEventRequest.eventTimestamp:type_name -> google.protobuf.Timestamp + 9, // 5: PushEventRequest.event_timestamp:type_name -> google.protobuf.Timestamp 7, // 6: EventsService.Push:input_type -> PushEventRequest 6, // 7: EventsService.BulkPush:input_type -> BulkPushEventRequest 8, // 8: EventsService.ReplaySingleEvent:input_type -> ReplayEventRequest diff --git a/internal/services/ingestor/server_v1.go b/internal/services/ingestor/server_v1.go index 5e160d9e7f..14e1558951 100644 --- a/internal/services/ingestor/server_v1.go +++ b/internal/services/ingestor/server_v1.go @@ -19,14 +19,14 @@ import ( func (i *IngestorImpl) putStreamEventV1(ctx context.Context, tenant *sqlcv1.Tenant, req *contracts.PutStreamEventRequest) (*contracts.PutStreamEventResponse, error) { tenantId := tenant.ID - stepRunId, err := uuid.Parse(req.StepRunId) + taskExternalId, err := uuid.Parse(req.TaskRunExternalId) if err != nil { - return nil, status.Error(codes.InvalidArgument, "step run id is not a valid uuid") + return nil, status.Error(codes.InvalidArgument, "task external run id is not a valid uuid") } // get single task - task, err := i.getSingleTask(ctx, tenantId, stepRunId, false) + task, err := i.getSingleTask(ctx, tenantId, taskExternalId, false) if err != nil { return nil, err @@ -39,7 +39,7 @@ func (i *IngestorImpl) putStreamEventV1(ctx context.Context, tenant *sqlcv1.Tena false, tasktypes.StreamEventPayload{ WorkflowRunId: task.WorkflowRunID, - StepRunId: stepRunId, + TaskRunId: taskExternalId, CreatedAt: req.CreatedAt.AsTime(), Payload: req.Message, EventIndex: req.EventIndex, @@ -67,17 +67,17 @@ func (i *IngestorImpl) getSingleTask(ctx context.Context, tenantId, taskExternal func (i *IngestorImpl) putLogV1(ctx context.Context, tenant *sqlcv1.Tenant, req *contracts.PutLogRequest) (*contracts.PutLogResponse, error) { tenantId := tenant.ID - stepRunId, err := uuid.Parse(req.StepRunId) + taskExternalId, err := uuid.Parse(req.TaskRunExternalId) if err != nil { - return nil, status.Error(codes.InvalidArgument, "step run id is not a valid uuid") + return nil, status.Error(codes.InvalidArgument, "task external run id is not a valid uuid") } if !i.isLogIngestionEnabled { return &contracts.PutLogResponse{}, nil } - task, err := i.getSingleTask(ctx, tenantId, stepRunId, false) + task, err := i.getSingleTask(ctx, tenantId, taskExternalId, false) if err != nil { return nil, err diff --git a/internal/services/shared/proto/v1/workflows.pb.go b/internal/services/shared/proto/v1/workflows.pb.go index 46e7121cf6..bc41f31310 100644 --- a/internal/services/shared/proto/v1/workflows.pb.go +++ b/internal/services/shared/proto/v1/workflows.pb.go @@ -301,7 +301,7 @@ type CancelTasksRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ExternalIds []string `protobuf:"bytes,1,rep,name=externalIds,proto3" json:"externalIds,omitempty"` // a list of external UUIDs + ExternalIds []string `protobuf:"bytes,1,rep,name=external_ids,json=externalIds,proto3" json:"external_ids,omitempty"` // a list of external UUIDs Filter *TasksFilter `protobuf:"bytes,2,opt,name=filter,proto3,oneof" json:"filter,omitempty"` } @@ -356,7 +356,7 @@ type ReplayTasksRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ExternalIds []string `protobuf:"bytes,1,rep,name=externalIds,proto3" json:"externalIds,omitempty"` // a list of external UUIDs + ExternalIds []string `protobuf:"bytes,1,rep,name=external_ids,json=externalIds,proto3" json:"external_ids,omitempty"` // a list of external UUIDs Filter *TasksFilter `protobuf:"bytes,2,opt,name=filter,proto3,oneof" json:"filter,omitempty"` } @@ -713,7 +713,7 @@ type CreateWorkflowVersionRequest struct { Concurrency *Concurrency `protobuf:"bytes,7,opt,name=concurrency,proto3" json:"concurrency,omitempty"` // (optional) the workflow concurrency options CronInput *string `protobuf:"bytes,8,opt,name=cron_input,json=cronInput,proto3,oneof" json:"cron_input,omitempty"` // (optional) the input for the cron trigger OnFailureTask *CreateTaskOpts `protobuf:"bytes,9,opt,name=on_failure_task,json=onFailureTask,proto3,oneof" json:"on_failure_task,omitempty"` // (optional) the job to run on failure - Sticky *StickyStrategy `protobuf:"varint,10,opt,name=sticky,proto3,enum=v1.StickyStrategy,oneof" json:"sticky,omitempty"` // (optional) the sticky strategy for assigning steps to workers + Sticky *StickyStrategy `protobuf:"varint,10,opt,name=sticky,proto3,enum=v1.StickyStrategy,oneof" json:"sticky,omitempty"` // (optional) the sticky strategy for assigning tasks to workers DefaultPriority *int32 `protobuf:"varint,11,opt,name=default_priority,json=defaultPriority,proto3,oneof" json:"default_priority,omitempty"` // (optional) the default priority for the workflow ConcurrencyArr []*Concurrency `protobuf:"bytes,12,rep,name=concurrency_arr,json=concurrencyArr,proto3" json:"concurrency_arr,omitempty"` // (optional) the workflow concurrency options DefaultFilters []*DefaultFilter `protobuf:"bytes,13,rep,name=default_filters,json=defaultFilters,proto3" json:"default_filters,omitempty"` // (optional) the default filters for the workflow @@ -982,8 +982,8 @@ type DesiredWorkerLabels struct { unknownFields protoimpl.UnknownFields // value of the affinity - StrValue *string `protobuf:"bytes,1,opt,name=strValue,proto3,oneof" json:"strValue,omitempty"` - IntValue *int32 `protobuf:"varint,2,opt,name=intValue,proto3,oneof" json:"intValue,omitempty"` + StrValue *string `protobuf:"bytes,1,opt,name=str_value,json=strValue,proto3,oneof" json:"str_value,omitempty"` + IntValue *int32 `protobuf:"varint,2,opt,name=int_value,json=intValue,proto3,oneof" json:"int_value,omitempty"` // * // (optional) Specifies whether the affinity setting is required. // If required, the worker will not accept actions that do not have a truthy affinity setting. @@ -1073,19 +1073,21 @@ type CreateTaskOpts struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ReadableId string `protobuf:"bytes,1,opt,name=readable_id,json=readableId,proto3" json:"readable_id,omitempty"` // (required) the task name - Action string `protobuf:"bytes,2,opt,name=action,proto3" json:"action,omitempty"` // (required) the task action id - Timeout string `protobuf:"bytes,3,opt,name=timeout,proto3" json:"timeout,omitempty"` // (optional) the task timeout - Inputs string `protobuf:"bytes,4,opt,name=inputs,proto3" json:"inputs,omitempty"` // (optional) the task inputs, assuming string representation of JSON - Parents []string `protobuf:"bytes,5,rep,name=parents,proto3" json:"parents,omitempty"` // (optional) the task parents. if none are passed in, this is a root task - Retries int32 `protobuf:"varint,6,opt,name=retries,proto3" json:"retries,omitempty"` // (optional) the number of retries for the step, default 0 - RateLimits []*CreateTaskRateLimit `protobuf:"bytes,7,rep,name=rate_limits,json=rateLimits,proto3" json:"rate_limits,omitempty"` // (optional) the rate limits for the step - WorkerLabels map[string]*DesiredWorkerLabels `protobuf:"bytes,8,rep,name=worker_labels,json=workerLabels,proto3" json:"worker_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (optional) the desired worker affinity state for the step - BackoffFactor *float32 `protobuf:"fixed32,9,opt,name=backoff_factor,json=backoffFactor,proto3,oneof" json:"backoff_factor,omitempty"` // (optional) the retry backoff factor for the step - BackoffMaxSeconds *int32 `protobuf:"varint,10,opt,name=backoff_max_seconds,json=backoffMaxSeconds,proto3,oneof" json:"backoff_max_seconds,omitempty"` // (optional) the maximum backoff time for the step - Concurrency []*Concurrency `protobuf:"bytes,11,rep,name=concurrency,proto3" json:"concurrency,omitempty"` // (optional) the task concurrency options - Conditions *TaskConditions `protobuf:"bytes,12,opt,name=conditions,proto3,oneof" json:"conditions,omitempty"` // (optional) the task conditions for creating the task - ScheduleTimeout *string `protobuf:"bytes,13,opt,name=schedule_timeout,json=scheduleTimeout,proto3,oneof" json:"schedule_timeout,omitempty"` // (optional) the timeout for the schedule + ReadableId string `protobuf:"bytes,1,opt,name=readable_id,json=readableId,proto3" json:"readable_id,omitempty"` // (required) the task name + Action string `protobuf:"bytes,2,opt,name=action,proto3" json:"action,omitempty"` // (required) the task action id + Timeout string `protobuf:"bytes,3,opt,name=timeout,proto3" json:"timeout,omitempty"` // (optional) the task timeout + Inputs string `protobuf:"bytes,4,opt,name=inputs,proto3" json:"inputs,omitempty"` // (optional) the task inputs, assuming string representation of JSON + Parents []string `protobuf:"bytes,5,rep,name=parents,proto3" json:"parents,omitempty"` // (optional) the task parents. if none are passed in, this is a root task + Retries int32 `protobuf:"varint,6,opt,name=retries,proto3" json:"retries,omitempty"` // (optional) the number of retries for the task, default 0 + RateLimits []*CreateTaskRateLimit `protobuf:"bytes,7,rep,name=rate_limits,json=rateLimits,proto3" json:"rate_limits,omitempty"` // (optional) the rate limits for the task + WorkerLabels map[string]*DesiredWorkerLabels `protobuf:"bytes,8,rep,name=worker_labels,json=workerLabels,proto3" json:"worker_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // (optional) the desired worker affinity state for the task + BackoffFactor *float32 `protobuf:"fixed32,9,opt,name=backoff_factor,json=backoffFactor,proto3,oneof" json:"backoff_factor,omitempty"` // (optional) the retry backoff factor for the task + BackoffMaxSeconds *int32 `protobuf:"varint,10,opt,name=backoff_max_seconds,json=backoffMaxSeconds,proto3,oneof" json:"backoff_max_seconds,omitempty"` // (optional) the maximum backoff time for the task + Concurrency []*Concurrency `protobuf:"bytes,11,rep,name=concurrency,proto3" json:"concurrency,omitempty"` // (optional) the task concurrency options + Conditions *TaskConditions `protobuf:"bytes,12,opt,name=conditions,proto3,oneof" json:"conditions,omitempty"` // (optional) the task conditions for creating the task + ScheduleTimeout *string `protobuf:"bytes,13,opt,name=schedule_timeout,json=scheduleTimeout,proto3,oneof" json:"schedule_timeout,omitempty"` // (optional) the timeout for the schedule + IsDurable bool `protobuf:"varint,14,opt,name=is_durable,json=isDurable,proto3" json:"is_durable,omitempty"` // (optional) whether the task is durable + SlotRequests map[string]int32 `protobuf:"bytes,15,rep,name=slot_requests,json=slotRequests,proto3" json:"slot_requests,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` // (optional) slot requests (slot_type -> units) } func (x *CreateTaskOpts) Reset() { @@ -1211,13 +1213,27 @@ func (x *CreateTaskOpts) GetScheduleTimeout() string { return "" } +func (x *CreateTaskOpts) GetIsDurable() bool { + if x != nil { + return x.IsDurable + } + return false +} + +func (x *CreateTaskOpts) GetSlotRequests() map[string]int32 { + if x != nil { + return x.SlotRequests + } + return nil +} + type CreateTaskRateLimit struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` // (required) the key for the rate limit - Units *int32 `protobuf:"varint,2,opt,name=units,proto3,oneof" json:"units,omitempty"` // (optional) the number of units this step consumes + Units *int32 `protobuf:"varint,2,opt,name=units,proto3,oneof" json:"units,omitempty"` // (optional) the number of units this task consumes KeyExpr *string `protobuf:"bytes,3,opt,name=key_expr,json=keyExpr,proto3,oneof" json:"key_expr,omitempty"` // (optional) a CEL expression for determining the rate limit key UnitsExpr *string `protobuf:"bytes,4,opt,name=units_expr,json=unitsExpr,proto3,oneof" json:"units_expr,omitempty"` // (optional) a CEL expression for determining the number of units consumed LimitValuesExpr *string `protobuf:"bytes,5,opt,name=limit_values_expr,json=limitValuesExpr,proto3,oneof" json:"limit_values_expr,omitempty"` // (optional) a CEL expression for determining the total amount of rate limit units @@ -1567,306 +1583,317 @@ var file_v1_workflows_proto_rawDesc = []byte{ 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x76, 0x31, 0x2f, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x6f, 0x0a, 0x12, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, 0x61, - 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x65, 0x78, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x73, 0x12, 0x2c, 0x0a, 0x06, - 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x76, - 0x31, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, - 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x66, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x6f, 0x0a, 0x12, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, - 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x65, - 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, + 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x70, 0x0a, 0x12, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, 0x61, + 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x65, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x73, 0x12, 0x2c, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, - 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0xf0, 0x01, 0x0a, 0x0b, 0x54, 0x61, 0x73, 0x6b, 0x73, - 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x65, 0x73, 0x12, 0x30, 0x0a, 0x05, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x05, 0x73, - 0x69, 0x6e, 0x63, 0x65, 0x12, 0x35, 0x0a, 0x05, 0x75, 0x6e, 0x74, 0x69, 0x6c, 0x18, 0x03, 0x20, + 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x70, 0x0a, 0x12, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, + 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, + 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x73, 0x12, + 0x2c, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x0f, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, + 0x48, 0x00, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, + 0x07, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0xf0, 0x01, 0x0a, 0x0b, 0x54, 0x61, 0x73, + 0x6b, 0x73, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x73, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x65, 0x73, 0x12, 0x30, 0x0a, 0x05, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, - 0x00, 0x52, 0x05, 0x75, 0x6e, 0x74, 0x69, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x21, 0x0a, 0x0c, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, - 0x09, 0x52, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x73, 0x12, 0x2f, - 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x61, 0x64, 0x64, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, - 0x08, 0x0a, 0x06, 0x5f, 0x75, 0x6e, 0x74, 0x69, 0x6c, 0x22, 0x3e, 0x0a, 0x13, 0x43, 0x61, 0x6e, - 0x63, 0x65, 0x6c, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x27, 0x0a, 0x0f, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64, 0x5f, 0x74, 0x61, - 0x73, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x61, 0x6e, 0x63, 0x65, - 0x6c, 0x6c, 0x65, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x22, 0x3c, 0x0a, 0x13, 0x52, 0x65, 0x70, - 0x6c, 0x61, 0x79, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x25, 0x0a, 0x0e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x73, - 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x79, - 0x65, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x22, 0xb5, 0x01, 0x0a, 0x19, 0x54, 0x72, 0x69, 0x67, - 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, - 0x77, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x77, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, + 0x05, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x12, 0x35, 0x0a, 0x05, 0x75, 0x6e, 0x74, 0x69, 0x6c, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x48, 0x00, 0x52, 0x05, 0x75, 0x6e, 0x74, 0x69, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x21, 0x0a, + 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x09, 0x52, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x73, 0x12, 0x2f, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x12, 0x61, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, - 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x22, - 0x3d, 0x0a, 0x1a, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1f, 0x0a, - 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x22, 0xdd, - 0x05, 0x0a, 0x1c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, - 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, - 0x25, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, - 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x72, - 0x69, 0x67, 0x67, 0x65, 0x72, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x72, 0x6f, 0x6e, 0x5f, 0x74, - 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x63, - 0x72, 0x6f, 0x6e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x73, 0x12, 0x28, 0x0a, 0x05, 0x74, - 0x61, 0x73, 0x6b, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, 0x52, 0x05, - 0x74, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x31, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x63, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x52, 0x0b, 0x63, 0x6f, 0x6e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x22, 0x0a, 0x0a, 0x63, 0x72, 0x6f, 0x6e, - 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x09, - 0x63, 0x72, 0x6f, 0x6e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x88, 0x01, 0x01, 0x12, 0x3f, 0x0a, 0x0f, - 0x6f, 0x6e, 0x5f, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, 0x48, 0x01, 0x52, 0x0d, 0x6f, 0x6e, 0x46, - 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, - 0x06, 0x73, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x12, 0x2e, - 0x76, 0x31, 0x2e, 0x53, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, - 0x79, 0x48, 0x02, 0x52, 0x06, 0x73, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x88, 0x01, 0x01, 0x12, 0x2e, - 0x0a, 0x10, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, - 0x74, 0x79, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05, 0x48, 0x03, 0x52, 0x0f, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x50, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01, 0x01, 0x12, 0x38, - 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x61, 0x72, - 0x72, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x63, 0x79, 0x41, 0x72, 0x72, 0x12, 0x3a, 0x0a, 0x0f, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x11, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x46, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x52, 0x0e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x46, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x73, 0x12, 0x2f, 0x0a, 0x11, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x6a, 0x73, - 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0c, 0x48, - 0x04, 0x52, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x63, 0x72, 0x6f, 0x6e, 0x5f, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x6f, 0x6e, 0x5f, 0x66, 0x61, 0x69, 0x6c, - 0x75, 0x72, 0x65, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, 0x74, 0x69, - 0x63, 0x6b, 0x79, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, - 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0x70, - 0x0a, 0x0d, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, - 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, - 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x73, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, - 0x22, 0xb7, 0x01, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, - 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x12, 0x1e, 0x0a, 0x08, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x75, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x05, 0x48, 0x00, 0x52, 0x07, 0x6d, 0x61, 0x78, 0x52, 0x75, 0x6e, 0x73, 0x88, 0x01, 0x01, - 0x12, 0x48, 0x0a, 0x0e, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, - 0x67, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, - 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x53, 0x74, - 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x48, 0x01, 0x52, 0x0d, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x53, - 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, - 0x61, 0x78, 0x5f, 0x72, 0x75, 0x6e, 0x73, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x6c, 0x69, 0x6d, 0x69, - 0x74, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x22, 0x96, 0x02, 0x0a, 0x13, 0x44, - 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x12, 0x1f, 0x0a, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x02, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, - 0x65, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3e, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, - 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x76, 0x31, 0x2e, 0x57, - 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, - 0x61, 0x74, 0x6f, 0x72, 0x48, 0x03, 0x52, 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, - 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x04, 0x52, 0x06, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x88, - 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x73, 0x74, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, - 0x0b, 0x0a, 0x09, 0x5f, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0b, 0x0a, 0x09, - 0x5f, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x63, 0x6f, - 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x77, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x22, 0xda, 0x05, 0x0a, 0x0e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, - 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x61, 0x64, 0x61, 0x62, - 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x61, - 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, - 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x72, - 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x72, 0x65, - 0x74, 0x72, 0x69, 0x65, 0x73, 0x12, 0x38, 0x0a, 0x0b, 0x72, 0x61, 0x74, 0x65, 0x5f, 0x6c, 0x69, - 0x6d, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, - 0x6d, 0x69, 0x74, 0x52, 0x0a, 0x72, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x73, 0x12, - 0x49, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, - 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, - 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x77, 0x6f, - 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x2a, 0x0a, 0x0e, 0x62, 0x61, - 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x09, 0x20, 0x01, - 0x28, 0x02, 0x48, 0x00, 0x52, 0x0d, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x46, 0x61, 0x63, - 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x13, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, - 0x66, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x0a, 0x20, - 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x11, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x61, - 0x78, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x88, 0x01, 0x01, 0x12, 0x31, 0x0a, 0x0b, 0x63, - 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x0f, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, - 0x79, 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x37, - 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0c, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x6f, 0x6e, 0x64, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x2e, 0x0a, 0x10, 0x73, 0x63, 0x68, 0x65, 0x64, - 0x75, 0x6c, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x03, 0x52, 0x0f, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x54, 0x69, 0x6d, - 0x65, 0x6f, 0x75, 0x74, 0x88, 0x01, 0x01, 0x1a, 0x58, 0x0a, 0x11, 0x57, 0x6f, 0x72, 0x6b, 0x65, + 0x61, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x75, 0x6e, 0x74, 0x69, 0x6c, 0x22, 0x3e, 0x0a, 0x13, 0x43, + 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64, 0x5f, + 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x61, 0x6e, + 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x22, 0x3c, 0x0a, 0x13, 0x52, + 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x65, 0x64, 0x5f, 0x74, + 0x61, 0x73, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x79, 0x65, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x22, 0xb5, 0x01, 0x0a, 0x19, 0x54, 0x72, + 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, + 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x12, 0x2f, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, + 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, + 0x79, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, + 0x79, 0x22, 0x3d, 0x0a, 0x1a, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x1f, 0x0a, 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, + 0x22, 0xdd, 0x05, 0x0a, 0x1c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x72, 0x69, 0x67, 0x67, + 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x72, 0x6f, 0x6e, + 0x5f, 0x74, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0c, 0x63, 0x72, 0x6f, 0x6e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x73, 0x12, 0x28, 0x0a, + 0x05, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x76, + 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, + 0x52, 0x05, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x31, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x76, + 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x52, 0x0b, 0x63, + 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x22, 0x0a, 0x0a, 0x63, 0x72, + 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x09, 0x63, 0x72, 0x6f, 0x6e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x88, 0x01, 0x01, 0x12, 0x3f, + 0x0a, 0x0f, 0x6f, 0x6e, 0x5f, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x5f, 0x74, 0x61, 0x73, + 0x6b, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, 0x48, 0x01, 0x52, 0x0d, 0x6f, + 0x6e, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x88, 0x01, 0x01, 0x12, + 0x2f, 0x0a, 0x06, 0x73, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x12, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, + 0x65, 0x67, 0x79, 0x48, 0x02, 0x52, 0x06, 0x73, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x88, 0x01, 0x01, + 0x12, 0x2e, 0x0a, 0x10, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x70, 0x72, 0x69, 0x6f, + 0x72, 0x69, 0x74, 0x79, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05, 0x48, 0x03, 0x52, 0x0f, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01, 0x01, + 0x12, 0x38, 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x5f, + 0x61, 0x72, 0x72, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x76, 0x31, 0x2e, 0x43, + 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x41, 0x72, 0x72, 0x12, 0x3a, 0x0a, 0x0f, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x18, 0x0d, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x0e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x46, + 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x12, 0x2f, 0x0a, 0x11, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, + 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x0e, 0x20, 0x01, 0x28, + 0x0c, 0x48, 0x04, 0x52, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x63, 0x72, 0x6f, 0x6e, + 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x6f, 0x6e, 0x5f, 0x66, 0x61, + 0x69, 0x6c, 0x75, 0x72, 0x65, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x73, + 0x74, 0x69, 0x63, 0x6b, 0x79, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x22, 0x70, 0x0a, 0x0d, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x46, 0x69, 0x6c, 0x74, 0x65, + 0x72, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, + 0x6f, 0x61, 0x64, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x22, 0xb7, 0x01, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x63, 0x79, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x1e, 0x0a, 0x08, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x75, 0x6e, 0x73, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x07, 0x6d, 0x61, 0x78, 0x52, 0x75, 0x6e, 0x73, 0x88, + 0x01, 0x01, 0x12, 0x48, 0x0a, 0x0e, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x61, + 0x74, 0x65, 0x67, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x76, 0x31, 0x2e, + 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, + 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x48, 0x01, 0x52, 0x0d, 0x6c, 0x69, 0x6d, 0x69, + 0x74, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, + 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x75, 0x6e, 0x73, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x6c, 0x69, + 0x6d, 0x69, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x22, 0x9a, 0x02, 0x0a, + 0x13, 0x44, 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, + 0x62, 0x65, 0x6c, 0x73, 0x12, 0x20, 0x0a, 0x09, 0x73, 0x74, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x73, 0x74, 0x72, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x69, 0x6e, 0x74, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, + 0x69, 0x72, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x48, 0x02, 0x52, 0x08, 0x72, 0x65, + 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x88, 0x01, 0x01, 0x12, 0x3e, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, + 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, + 0x76, 0x31, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x43, 0x6f, + 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x48, 0x03, 0x52, 0x0a, 0x63, 0x6f, 0x6d, 0x70, + 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x77, 0x65, 0x69, + 0x67, 0x68, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x04, 0x52, 0x06, 0x77, 0x65, 0x69, + 0x67, 0x68, 0x74, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x73, 0x74, 0x72, 0x5f, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x42, + 0x0d, 0x0a, 0x0b, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x42, 0x09, + 0x0a, 0x07, 0x5f, 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x85, 0x07, 0x0a, 0x0e, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x0b, + 0x72, 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x72, 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x16, 0x0a, + 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, + 0x16, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x73, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x12, 0x38, 0x0a, 0x0b, 0x72, + 0x61, 0x74, 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x17, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, + 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, 0x0a, 0x72, 0x61, 0x74, 0x65, 0x4c, + 0x69, 0x6d, 0x69, 0x74, 0x73, 0x12, 0x49, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x76, + 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, + 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, + 0x12, 0x2a, 0x0a, 0x0e, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x66, 0x61, 0x63, 0x74, + 0x6f, 0x72, 0x18, 0x09, 0x20, 0x01, 0x28, 0x02, 0x48, 0x00, 0x52, 0x0d, 0x62, 0x61, 0x63, 0x6b, + 0x6f, 0x66, 0x66, 0x46, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x13, + 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x63, 0x6f, + 0x6e, 0x64, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x11, 0x62, 0x61, 0x63, + 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x61, 0x78, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x88, 0x01, + 0x01, 0x12, 0x31, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, + 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, + 0x65, 0x6e, 0x63, 0x79, 0x12, 0x37, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, + 0x73, 0x6b, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x02, 0x52, 0x0a, + 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x2e, 0x0a, + 0x10, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, + 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x0f, 0x73, 0x63, 0x68, 0x65, 0x64, + 0x75, 0x6c, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, + 0x0a, 0x69, 0x73, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, + 0x08, 0x52, 0x09, 0x69, 0x73, 0x44, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x49, 0x0a, 0x0d, + 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x18, 0x0f, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, + 0x61, 0x73, 0x6b, 0x4f, 0x70, 0x74, 0x73, 0x2e, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x73, 0x6c, 0x6f, 0x74, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x1a, 0x58, 0x0a, 0x11, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2d, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x73, 0x69, 0x72, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x66, 0x61, - 0x63, 0x74, 0x6f, 0x72, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, - 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x42, 0x0d, 0x0a, 0x0b, - 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x13, 0x0a, 0x11, 0x5f, - 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, - 0x22, 0xb8, 0x02, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x52, - 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x19, 0x0a, 0x05, 0x75, 0x6e, - 0x69, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x05, 0x75, 0x6e, 0x69, - 0x74, 0x73, 0x88, 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x5f, 0x65, 0x78, 0x70, - 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x6b, 0x65, 0x79, 0x45, 0x78, - 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x22, 0x0a, 0x0a, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x5f, 0x65, - 0x78, 0x70, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x09, 0x75, 0x6e, 0x69, - 0x74, 0x73, 0x45, 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x11, 0x6c, 0x69, 0x6d, - 0x69, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x0f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x45, 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x36, 0x0a, 0x08, 0x64, 0x75, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x76, - 0x31, 0x2e, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x48, 0x04, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, - 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x42, 0x0b, 0x0a, 0x09, - 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x75, 0x6e, - 0x69, 0x74, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x6c, 0x69, 0x6d, - 0x69, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x0b, - 0x0a, 0x09, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x50, 0x0a, 0x1d, 0x43, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1f, 0x0a, 0x0b, - 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x22, 0x37, 0x0a, - 0x14, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, - 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, - 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x22, 0xc5, 0x01, 0x0a, 0x0d, 0x54, 0x61, 0x73, 0x6b, 0x52, - 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x1f, 0x0a, 0x0b, 0x65, 0x78, 0x74, 0x65, - 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, - 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x06, 0x73, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0d, 0x2e, 0x76, 0x31, 0x2e, 0x52, - 0x75, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x12, 0x19, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, - 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x6f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x01, 0x52, 0x06, 0x6f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x61, 0x64, - 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, - 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0xaf, - 0x02, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x25, - 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0d, - 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x44, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, 0x75, - 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, - 0x74, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x64, - 0x6f, 0x6e, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x04, 0x64, 0x6f, 0x6e, 0x65, 0x12, - 0x2f, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x12, 0x61, 0x64, - 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x1a, 0x4e, 0x0a, 0x0d, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x44, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x2a, 0x24, 0x0a, 0x0e, 0x53, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, - 0x67, 0x79, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x4f, 0x46, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, - 0x48, 0x41, 0x52, 0x44, 0x10, 0x01, 0x2a, 0x5d, 0x0a, 0x11, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, - 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0a, 0x0a, 0x06, 0x53, - 0x45, 0x43, 0x4f, 0x4e, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x49, 0x4e, 0x55, 0x54, - 0x45, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x4f, 0x55, 0x52, 0x10, 0x02, 0x12, 0x07, 0x0a, - 0x03, 0x44, 0x41, 0x59, 0x10, 0x03, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x45, 0x45, 0x4b, 0x10, 0x04, - 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x4f, 0x4e, 0x54, 0x48, 0x10, 0x05, 0x12, 0x08, 0x0a, 0x04, 0x59, - 0x45, 0x41, 0x52, 0x10, 0x06, 0x2a, 0x4e, 0x0a, 0x09, 0x52, 0x75, 0x6e, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, - 0x0a, 0x07, 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0d, 0x0a, 0x09, 0x43, - 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, - 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, - 0x4c, 0x45, 0x44, 0x10, 0x04, 0x2a, 0x7f, 0x0a, 0x18, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, - 0x79, 0x12, 0x16, 0x0a, 0x12, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x49, 0x4e, 0x5f, 0x50, - 0x52, 0x4f, 0x47, 0x52, 0x45, 0x53, 0x53, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x44, 0x52, 0x4f, - 0x50, 0x5f, 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x51, 0x55, - 0x45, 0x55, 0x45, 0x5f, 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x02, 0x12, 0x15, 0x0a, 0x11, - 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x52, 0x4f, 0x55, 0x4e, 0x44, 0x5f, 0x52, 0x4f, 0x42, 0x49, - 0x4e, 0x10, 0x03, 0x12, 0x11, 0x0a, 0x0d, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x4e, 0x45, - 0x57, 0x45, 0x53, 0x54, 0x10, 0x04, 0x2a, 0x85, 0x01, 0x0a, 0x15, 0x57, 0x6f, 0x72, 0x6b, 0x65, - 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, - 0x12, 0x09, 0x0a, 0x05, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x4e, - 0x4f, 0x54, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x47, 0x52, - 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x02, 0x12, 0x19, 0x0a, 0x15, - 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x5f, 0x4f, 0x52, 0x5f, - 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x4c, 0x45, 0x53, 0x53, 0x5f, - 0x54, 0x48, 0x41, 0x4e, 0x10, 0x04, 0x12, 0x16, 0x0a, 0x12, 0x4c, 0x45, 0x53, 0x53, 0x5f, 0x54, - 0x48, 0x41, 0x4e, 0x5f, 0x4f, 0x52, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x05, 0x32, 0xfd, - 0x02, 0x0a, 0x0c, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, - 0x52, 0x0a, 0x0b, 0x50, 0x75, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x20, - 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, - 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x21, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, 0x61, 0x73, - 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, 0x61, - 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0b, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, 0x61, 0x73, - 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, 0x61, - 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x76, 0x31, 0x2e, - 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x12, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x12, 0x1d, 0x2e, 0x76, 0x31, 0x2e, 0x54, + 0x01, 0x1a, 0x3f, 0x0a, 0x11, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x66, + 0x61, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, + 0x66, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x42, 0x0d, 0x0a, + 0x0b, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x13, 0x0a, 0x11, + 0x5f, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, + 0x74, 0x22, 0xb8, 0x02, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, + 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x19, 0x0a, 0x05, 0x75, + 0x6e, 0x69, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x05, 0x75, 0x6e, + 0x69, 0x74, 0x73, 0x88, 0x01, 0x01, 0x12, 0x1e, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x5f, 0x65, 0x78, + 0x70, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x07, 0x6b, 0x65, 0x79, 0x45, + 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x22, 0x0a, 0x0a, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x5f, + 0x65, 0x78, 0x70, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x09, 0x75, 0x6e, + 0x69, 0x74, 0x73, 0x45, 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x11, 0x6c, 0x69, + 0x6d, 0x69, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x03, 0x52, 0x0f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x45, 0x78, 0x70, 0x72, 0x88, 0x01, 0x01, 0x12, 0x36, 0x0a, 0x08, 0x64, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, + 0x76, 0x31, 0x2e, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x04, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x42, 0x0b, 0x0a, + 0x09, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x75, + 0x6e, 0x69, 0x74, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x6c, 0x69, + 0x6d, 0x69, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x5f, 0x65, 0x78, 0x70, 0x72, 0x42, + 0x0b, 0x0a, 0x09, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x50, 0x0a, 0x1d, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1f, 0x0a, + 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x22, 0x37, + 0x0a, 0x14, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x22, 0xc5, 0x01, 0x0a, 0x0d, 0x54, 0x61, 0x73, 0x6b, + 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x1f, 0x0a, 0x0b, 0x65, 0x78, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x06, 0x73, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0d, 0x2e, 0x76, 0x31, 0x2e, + 0x52, 0x75, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x12, 0x19, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x01, 0x52, 0x06, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x61, + 0x64, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x72, 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, + 0xaf, 0x02, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x12, + 0x25, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x0d, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x75, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, + 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x44, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x72, + 0x75, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x76, 0x31, 0x2e, 0x47, + 0x65, 0x74, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x73, 0x12, 0x12, 0x0a, 0x04, + 0x64, 0x6f, 0x6e, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x04, 0x64, 0x6f, 0x6e, 0x65, + 0x12, 0x2f, 0x0a, 0x13, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x12, 0x61, + 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x1a, 0x4e, 0x0a, 0x0d, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x75, 0x6e, + 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, + 0x01, 0x2a, 0x24, 0x0a, 0x0e, 0x53, 0x74, 0x69, 0x63, 0x6b, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, + 0x65, 0x67, 0x79, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x4f, 0x46, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, + 0x04, 0x48, 0x41, 0x52, 0x44, 0x10, 0x01, 0x2a, 0x5d, 0x0a, 0x11, 0x52, 0x61, 0x74, 0x65, 0x4c, + 0x69, 0x6d, 0x69, 0x74, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0a, 0x0a, 0x06, + 0x53, 0x45, 0x43, 0x4f, 0x4e, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x49, 0x4e, 0x55, + 0x54, 0x45, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x4f, 0x55, 0x52, 0x10, 0x02, 0x12, 0x07, + 0x0a, 0x03, 0x44, 0x41, 0x59, 0x10, 0x03, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x45, 0x45, 0x4b, 0x10, + 0x04, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x4f, 0x4e, 0x54, 0x48, 0x10, 0x05, 0x12, 0x08, 0x0a, 0x04, + 0x59, 0x45, 0x41, 0x52, 0x10, 0x06, 0x2a, 0x4e, 0x0a, 0x09, 0x52, 0x75, 0x6e, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, 0x00, 0x12, + 0x0b, 0x0a, 0x07, 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0d, 0x0a, 0x09, + 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x46, + 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x41, 0x4e, 0x43, 0x45, + 0x4c, 0x4c, 0x45, 0x44, 0x10, 0x04, 0x2a, 0x7f, 0x0a, 0x18, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, + 0x67, 0x79, 0x12, 0x16, 0x0a, 0x12, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x49, 0x4e, 0x5f, + 0x50, 0x52, 0x4f, 0x47, 0x52, 0x45, 0x53, 0x53, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x44, 0x52, + 0x4f, 0x50, 0x5f, 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x51, + 0x55, 0x45, 0x55, 0x45, 0x5f, 0x4e, 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x02, 0x12, 0x15, 0x0a, + 0x11, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x52, 0x4f, 0x55, 0x4e, 0x44, 0x5f, 0x52, 0x4f, 0x42, + 0x49, 0x4e, 0x10, 0x03, 0x12, 0x11, 0x0a, 0x0d, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x4e, + 0x45, 0x57, 0x45, 0x53, 0x54, 0x10, 0x04, 0x2a, 0x85, 0x01, 0x0a, 0x15, 0x57, 0x6f, 0x72, 0x6b, + 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, + 0x72, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, + 0x4e, 0x4f, 0x54, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x47, + 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x02, 0x12, 0x19, 0x0a, + 0x15, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x5f, 0x4f, 0x52, + 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x4c, 0x45, 0x53, 0x53, + 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x04, 0x12, 0x16, 0x0a, 0x12, 0x4c, 0x45, 0x53, 0x53, 0x5f, + 0x54, 0x48, 0x41, 0x4e, 0x5f, 0x4f, 0x52, 0x5f, 0x45, 0x51, 0x55, 0x41, 0x4c, 0x10, 0x05, 0x32, + 0xfd, 0x02, 0x0a, 0x0c, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x12, 0x52, 0x0a, 0x0b, 0x50, 0x75, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, + 0x20, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x21, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, + 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, 0x61, + 0x73, 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, + 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x76, 0x31, + 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0b, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, 0x61, + 0x73, 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, + 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x76, 0x31, + 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x12, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x12, 0x1d, 0x2e, 0x76, 0x31, 0x2e, + 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, + 0x75, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, - 0x69, 0x67, 0x67, 0x65, 0x72, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x44, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x52, - 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x18, 0x2e, 0x76, 0x31, 0x2e, 0x47, - 0x65, 0x74, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x44, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x42, - 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, - 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, - 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, - 0x65, 0x73, 0x2f, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, - 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x44, 0x0a, 0x0d, 0x47, 0x65, 0x74, + 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x18, 0x2e, 0x76, 0x31, 0x2e, + 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x75, 0x6e, + 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, + 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, + 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, + 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x73, 0x2f, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2f, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1882,7 +1909,7 @@ func file_v1_workflows_proto_rawDescGZIP() []byte { } var file_v1_workflows_proto_enumTypes = make([]protoimpl.EnumInfo, 5) -var file_v1_workflows_proto_msgTypes = make([]protoimpl.MessageInfo, 19) +var file_v1_workflows_proto_msgTypes = make([]protoimpl.MessageInfo, 20) var file_v1_workflows_proto_goTypes = []interface{}{ (StickyStrategy)(0), // 0: v1.StickyStrategy (RateLimitDuration)(0), // 1: v1.RateLimitDuration @@ -1907,15 +1934,16 @@ var file_v1_workflows_proto_goTypes = []interface{}{ (*TaskRunDetail)(nil), // 20: v1.TaskRunDetail (*GetRunDetailsResponse)(nil), // 21: v1.GetRunDetailsResponse nil, // 22: v1.CreateTaskOpts.WorkerLabelsEntry - nil, // 23: v1.GetRunDetailsResponse.TaskRunsEntry - (*timestamppb.Timestamp)(nil), // 24: google.protobuf.Timestamp - (*TaskConditions)(nil), // 25: v1.TaskConditions + nil, // 23: v1.CreateTaskOpts.SlotRequestsEntry + nil, // 24: v1.GetRunDetailsResponse.TaskRunsEntry + (*timestamppb.Timestamp)(nil), // 25: google.protobuf.Timestamp + (*TaskConditions)(nil), // 26: v1.TaskConditions } var file_v1_workflows_proto_depIdxs = []int32{ 7, // 0: v1.CancelTasksRequest.filter:type_name -> v1.TasksFilter 7, // 1: v1.ReplayTasksRequest.filter:type_name -> v1.TasksFilter - 24, // 2: v1.TasksFilter.since:type_name -> google.protobuf.Timestamp - 24, // 3: v1.TasksFilter.until:type_name -> google.protobuf.Timestamp + 25, // 2: v1.TasksFilter.since:type_name -> google.protobuf.Timestamp + 25, // 3: v1.TasksFilter.until:type_name -> google.protobuf.Timestamp 16, // 4: v1.CreateWorkflowVersionRequest.tasks:type_name -> v1.CreateTaskOpts 14, // 5: v1.CreateWorkflowVersionRequest.concurrency:type_name -> v1.Concurrency 16, // 6: v1.CreateWorkflowVersionRequest.on_failure_task:type_name -> v1.CreateTaskOpts @@ -1927,28 +1955,29 @@ var file_v1_workflows_proto_depIdxs = []int32{ 17, // 12: v1.CreateTaskOpts.rate_limits:type_name -> v1.CreateTaskRateLimit 22, // 13: v1.CreateTaskOpts.worker_labels:type_name -> v1.CreateTaskOpts.WorkerLabelsEntry 14, // 14: v1.CreateTaskOpts.concurrency:type_name -> v1.Concurrency - 25, // 15: v1.CreateTaskOpts.conditions:type_name -> v1.TaskConditions - 1, // 16: v1.CreateTaskRateLimit.duration:type_name -> v1.RateLimitDuration - 2, // 17: v1.TaskRunDetail.status:type_name -> v1.RunStatus - 2, // 18: v1.GetRunDetailsResponse.status:type_name -> v1.RunStatus - 23, // 19: v1.GetRunDetailsResponse.task_runs:type_name -> v1.GetRunDetailsResponse.TaskRunsEntry - 15, // 20: v1.CreateTaskOpts.WorkerLabelsEntry.value:type_name -> v1.DesiredWorkerLabels - 20, // 21: v1.GetRunDetailsResponse.TaskRunsEntry.value:type_name -> v1.TaskRunDetail - 12, // 22: v1.AdminService.PutWorkflow:input_type -> v1.CreateWorkflowVersionRequest - 5, // 23: v1.AdminService.CancelTasks:input_type -> v1.CancelTasksRequest - 6, // 24: v1.AdminService.ReplayTasks:input_type -> v1.ReplayTasksRequest - 10, // 25: v1.AdminService.TriggerWorkflowRun:input_type -> v1.TriggerWorkflowRunRequest - 19, // 26: v1.AdminService.GetRunDetails:input_type -> v1.GetRunDetailsRequest - 18, // 27: v1.AdminService.PutWorkflow:output_type -> v1.CreateWorkflowVersionResponse - 8, // 28: v1.AdminService.CancelTasks:output_type -> v1.CancelTasksResponse - 9, // 29: v1.AdminService.ReplayTasks:output_type -> v1.ReplayTasksResponse - 11, // 30: v1.AdminService.TriggerWorkflowRun:output_type -> v1.TriggerWorkflowRunResponse - 21, // 31: v1.AdminService.GetRunDetails:output_type -> v1.GetRunDetailsResponse - 27, // [27:32] is the sub-list for method output_type - 22, // [22:27] is the sub-list for method input_type - 22, // [22:22] is the sub-list for extension type_name - 22, // [22:22] is the sub-list for extension extendee - 0, // [0:22] is the sub-list for field type_name + 26, // 15: v1.CreateTaskOpts.conditions:type_name -> v1.TaskConditions + 23, // 16: v1.CreateTaskOpts.slot_requests:type_name -> v1.CreateTaskOpts.SlotRequestsEntry + 1, // 17: v1.CreateTaskRateLimit.duration:type_name -> v1.RateLimitDuration + 2, // 18: v1.TaskRunDetail.status:type_name -> v1.RunStatus + 2, // 19: v1.GetRunDetailsResponse.status:type_name -> v1.RunStatus + 24, // 20: v1.GetRunDetailsResponse.task_runs:type_name -> v1.GetRunDetailsResponse.TaskRunsEntry + 15, // 21: v1.CreateTaskOpts.WorkerLabelsEntry.value:type_name -> v1.DesiredWorkerLabels + 20, // 22: v1.GetRunDetailsResponse.TaskRunsEntry.value:type_name -> v1.TaskRunDetail + 12, // 23: v1.AdminService.PutWorkflow:input_type -> v1.CreateWorkflowVersionRequest + 5, // 24: v1.AdminService.CancelTasks:input_type -> v1.CancelTasksRequest + 6, // 25: v1.AdminService.ReplayTasks:input_type -> v1.ReplayTasksRequest + 10, // 26: v1.AdminService.TriggerWorkflowRun:input_type -> v1.TriggerWorkflowRunRequest + 19, // 27: v1.AdminService.GetRunDetails:input_type -> v1.GetRunDetailsRequest + 18, // 28: v1.AdminService.PutWorkflow:output_type -> v1.CreateWorkflowVersionResponse + 8, // 29: v1.AdminService.CancelTasks:output_type -> v1.CancelTasksResponse + 9, // 30: v1.AdminService.ReplayTasks:output_type -> v1.ReplayTasksResponse + 11, // 31: v1.AdminService.TriggerWorkflowRun:output_type -> v1.TriggerWorkflowRunResponse + 21, // 32: v1.AdminService.GetRunDetails:output_type -> v1.GetRunDetailsResponse + 28, // [28:33] is the sub-list for method output_type + 23, // [23:28] is the sub-list for method input_type + 23, // [23:23] is the sub-list for extension type_name + 23, // [23:23] is the sub-list for extension extendee + 0, // [0:23] is the sub-list for field type_name } func init() { file_v1_workflows_proto_init() } @@ -2180,7 +2209,7 @@ func file_v1_workflows_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_v1_workflows_proto_rawDesc, NumEnums: 5, - NumMessages: 19, + NumMessages: 20, NumExtensions: 0, NumServices: 1, }, diff --git a/internal/services/shared/tasktypes/v1/event.go b/internal/services/shared/tasktypes/v1/event.go index 7cd2b2f767..589f742677 100644 --- a/internal/services/shared/tasktypes/v1/event.go +++ b/internal/services/shared/tasktypes/v1/event.go @@ -9,13 +9,13 @@ import ( ) type UserEventTaskPayload struct { - EventExternalId uuid.UUID `json:"event_id" validate:"required"` - EventKey string `json:"event_key" validate:"required"` - EventData []byte `json:"event_data" validate:"required"` - EventAdditionalMetadata []byte `json:"event_additional_metadata"` - EventPriority *int32 `json:"event_priority,omitempty"` - EventScope *string `json:"event_scope,omitempty"` - TriggeringWebhookName *string `json:"triggering_webhook_name,omitempty"` + EventExternalId uuid.UUID `json:"event_id" validate:"required"` + EventKey string `json:"event_key" validate:"required"` + EventData []byte `json:"event_data" validate:"required"` + EventAdditionalMetadata []byte `json:"event_additional_metadata"` + EventPriority *int32 `json:"event_priority,omitempty"` + EventScope *string `json:"event_scope,omitempty"` + TriggeringWebhookName *string `json:"triggering_webhook_name,omitempty"` // WasProcessedLocally indicates whether the event was written and tasks were triggered on the gRPC server // instead of the controller, so we can skip the triggering logic downstream @@ -34,7 +34,7 @@ func NewInternalEventMessage(tenantId uuid.UUID, timestamp time.Time, events ... type StreamEventPayload struct { WorkflowRunId uuid.UUID `json:"workflow_run_id" validate:"required"` - StepRunId uuid.UUID `json:"step_run_id" validate:"required"` + TaskRunId uuid.UUID `json:"task_run_id" validate:"required"` CreatedAt time.Time `json:"created_at" validate:"required"` Payload []byte `json:"payload"` RetryCount *int32 `json:"retry_count,omitempty"` diff --git a/pkg/client/admin.go b/pkg/client/admin.go index 36ac79eee0..1a5fa58682 100644 --- a/pkg/client/admin.go +++ b/pkg/client/admin.go @@ -24,7 +24,7 @@ import ( type ChildWorkflowOpts struct { ParentId string - ParentStepRunId string + ParentTaskRunId string ChildIndex int ChildKey *string DesiredWorkerId *string @@ -342,15 +342,15 @@ func (a *adminClientImpl) RunChildWorkflow(workflowName string, input interface{ metadata := string(metadataBytes) res, err := a.client.TriggerWorkflow(a.ctx.newContext(context.Background()), &admincontracts.TriggerWorkflowRequest{ - Name: workflowName, - Input: string(inputBytes), - ParentId: &opts.ParentId, - ParentStepRunId: &opts.ParentStepRunId, - ChildIndex: &childIndex, - ChildKey: opts.ChildKey, - DesiredWorkerId: opts.DesiredWorkerId, - AdditionalMetadata: &metadata, - Priority: opts.Priority, + Name: workflowName, + Input: string(inputBytes), + ParentId: &opts.ParentId, + ParentTaskRunExternalId: &opts.ParentTaskRunId, + ChildIndex: &childIndex, + ChildKey: opts.ChildKey, + DesiredWorkerId: opts.DesiredWorkerId, + AdditionalMetadata: &metadata, + Priority: opts.Priority, }) if err != nil { @@ -404,15 +404,15 @@ func (a *adminClientImpl) RunChildWorkflows(workflows []*RunChildWorkflowsOpts) metadata := string(metadataBytes) triggerWorkflowRequests[i] = &admincontracts.TriggerWorkflowRequest{ - Name: workflowName, - Input: string(inputBytes), - ParentId: &workflow.Opts.ParentId, - ParentStepRunId: &workflow.Opts.ParentStepRunId, - ChildIndex: &childIndex, - ChildKey: workflow.Opts.ChildKey, - DesiredWorkerId: workflow.Opts.DesiredWorkerId, - AdditionalMetadata: &metadata, - Priority: workflow.Opts.Priority, + Name: workflowName, + Input: string(inputBytes), + ParentId: &workflow.Opts.ParentId, + ParentTaskRunExternalId: &workflow.Opts.ParentTaskRunId, + ChildIndex: &childIndex, + ChildKey: workflow.Opts.ChildKey, + DesiredWorkerId: workflow.Opts.DesiredWorkerId, + AdditionalMetadata: &metadata, + Priority: workflow.Opts.Priority, } } diff --git a/pkg/client/dispatcher.go b/pkg/client/dispatcher.go index 73e82cb897..ca9d6965cf 100644 --- a/pkg/client/dispatcher.go +++ b/pkg/client/dispatcher.go @@ -42,12 +42,14 @@ const ( ) type GetActionListenerRequest struct { - WorkerName string - Services []string - Actions []string - MaxRuns *int - Labels map[string]interface{} - WebhookId *string + WorkerName string + Services []string + Actions []string + Slots *int + DurableSlots *int + SlotConfig map[string]int32 + Labels map[string]interface{} + WebhookId *string } // ActionPayload unmarshals the action payload into the target. It also validates the resulting target. @@ -268,9 +270,27 @@ func (d *dispatcherClientImpl) newActionListener(ctx context.Context, req *GetAc } } - if req.MaxRuns != nil { - mr := int32(*req.MaxRuns) // nolint: gosec - registerReq.MaxRuns = &mr + if req.Slots != nil { + mr := int32(*req.Slots) // nolint: gosec + registerReq.Slots = &mr + } + + if req.DurableSlots != nil { + dr := int32(*req.DurableSlots) // nolint: gosec + registerReq.DurableSlots = &dr + } + + if req.SlotConfig != nil { + registerReq.SlotConfig = req.SlotConfig + } else if req.Slots != nil || req.DurableSlots != nil { + slotConfig := map[string]int32{} + if req.Slots != nil { + slotConfig["default"] = int32(*req.Slots) // nolint: gosec + } + if req.DurableSlots != nil { + slotConfig["durable"] = int32(*req.DurableSlots) // nolint: gosec + } + registerReq.SlotConfig = slotConfig } // register the worker @@ -436,9 +456,9 @@ func (a *actionListenerImpl) Actions(ctx context.Context) (<-chan *Action, <-cha JobId: assignedAction.JobId, JobName: assignedAction.JobName, JobRunId: assignedAction.JobRunId, - StepId: assignedAction.StepId, - StepName: assignedAction.StepName, - StepRunId: assignedAction.StepRunId, + StepId: assignedAction.TaskId, + StepName: assignedAction.TaskName, + StepRunId: assignedAction.TaskRunExternalId, ActionId: assignedAction.ActionId, ActionType: actionType, ActionPayload: []byte(unquoted), @@ -546,17 +566,17 @@ func (d *dispatcherClientImpl) SendStepActionEvent(ctx context.Context, in *Acti } resp, err := d.client.SendStepActionEvent(d.ctx.newContext(ctx), &dispatchercontracts.StepActionEvent{ - WorkerId: in.WorkerId, - JobId: in.JobId, - JobRunId: in.JobRunId, - StepId: in.StepId, - StepRunId: in.StepRunId, - ActionId: in.ActionId, - EventTimestamp: timestamppb.New(*in.EventTimestamp), - EventType: actionEventType, - EventPayload: string(payloadBytes), - RetryCount: &in.RetryCount, - ShouldNotRetry: in.ShouldNotRetry, + WorkerId: in.WorkerId, + JobId: in.JobId, + JobRunId: in.JobRunId, + TaskId: in.StepId, + TaskRunExternalId: in.StepRunId, + ActionId: in.ActionId, + EventTimestamp: timestamppb.New(*in.EventTimestamp), + EventType: actionEventType, + EventPayload: string(payloadBytes), + RetryCount: &in.RetryCount, + ShouldNotRetry: in.ShouldNotRetry, }) if err != nil { @@ -616,7 +636,7 @@ func (d *dispatcherClientImpl) SendGroupKeyActionEvent(ctx context.Context, in * func (a *dispatcherClientImpl) ReleaseSlot(ctx context.Context, stepRunId string) error { _, err := a.client.ReleaseSlot(a.ctx.newContext(ctx), &dispatchercontracts.ReleaseSlotRequest{ - StepRunId: stepRunId, + TaskRunExternalId: stepRunId, }) if err != nil { @@ -628,7 +648,7 @@ func (a *dispatcherClientImpl) ReleaseSlot(ctx context.Context, stepRunId string func (a *dispatcherClientImpl) RefreshTimeout(ctx context.Context, stepRunId string, incrementTimeoutBy string) error { _, err := a.client.RefreshTimeout(a.ctx.newContext(ctx), &dispatchercontracts.RefreshTimeoutRequest{ - StepRunId: stepRunId, + TaskRunExternalId: stepRunId, IncrementTimeoutBy: incrementTimeoutBy, }) diff --git a/pkg/client/event.go b/pkg/client/event.go index cdecbe4777..869ca74743 100644 --- a/pkg/client/event.go +++ b/pkg/client/event.go @@ -41,7 +41,7 @@ type EventClient interface { BulkPush(ctx context.Context, payloads []EventWithAdditionalMetadata, options ...BulkPushOpFunc) error - PutLog(ctx context.Context, stepRunId, msg string, level *string, taskRetryCount *int32) error + PutLog(ctx context.Context, taskRunId, msg string, level *string, taskRetryCount *int32) error PutStreamEvent(ctx context.Context, stepRunId string, message []byte, options ...StreamEventOption) error } @@ -190,19 +190,19 @@ func (a *eventClientImpl) BulkPush(ctx context.Context, payload []EventWithAddit return nil } -func (a *eventClientImpl) PutLog(ctx context.Context, stepRunId, msg string, level *string, taskRetryCount *int32) error { +func (a *eventClientImpl) PutLog(ctx context.Context, taskRunId, msg string, level *string, taskRetryCount *int32) error { _, err := a.client.PutLog(a.ctx.newContext(ctx), &eventcontracts.PutLogRequest{ - CreatedAt: timestamppb.Now(), - StepRunId: stepRunId, - Message: msg, - Level: level, - TaskRetryCount: taskRetryCount, + CreatedAt: timestamppb.Now(), + TaskRunExternalId: taskRunId, + Message: msg, + Level: level, + TaskRetryCount: taskRetryCount, }) return err } -func (a *eventClientImpl) PutStreamEvent(ctx context.Context, stepRunId string, message []byte, options ...StreamEventOption) error { +func (a *eventClientImpl) PutStreamEvent(ctx context.Context, taskRunId string, message []byte, options ...StreamEventOption) error { opts := &streamEventOpts{} for _, optionFunc := range options { @@ -210,9 +210,9 @@ func (a *eventClientImpl) PutStreamEvent(ctx context.Context, stepRunId string, } request := &eventcontracts.PutStreamEventRequest{ - CreatedAt: timestamppb.Now(), - StepRunId: stepRunId, - Message: message, + CreatedAt: timestamppb.Now(), + TaskRunExternalId: taskRunId, + Message: message, } if opts.index != nil { diff --git a/pkg/client/rest/gen.go b/pkg/client/rest/gen.go index 44a844e9c0..1c653a23b6 100644 --- a/pkg/client/rest/gen.go +++ b/pkg/client/rest/gen.go @@ -994,15 +994,21 @@ type SlackWebhook struct { // Step defines model for Step. type Step struct { - Action string `json:"action"` - Children *[]string `json:"children,omitempty"` - JobId string `json:"jobId"` - Metadata APIResourceMeta `json:"metadata"` - Parents *[]string `json:"parents,omitempty"` + Action string `json:"action"` + Children *[]string `json:"children,omitempty"` + + // IsDurable Whether the step is durable. + IsDurable *bool `json:"isDurable,omitempty"` + JobId string `json:"jobId"` + Metadata APIResourceMeta `json:"metadata"` + Parents *[]string `json:"parents,omitempty"` // ReadableId The readable id of the step. ReadableId string `json:"readableId"` - TenantId string `json:"tenantId"` + + // SlotRequests Slot requests for the step (slot_type -> units). + SlotRequests *map[string]int `json:"slotRequests,omitempty"` + TenantId string `json:"tenantId"` // Timeout The timeout of the step. Timeout *string `json:"timeout,omitempty"` @@ -2073,9 +2079,6 @@ type Worker struct { // Actions The actions this worker can perform. Actions *[]string `json:"actions,omitempty"` - // AvailableRuns The number of runs this worker can execute concurrently. - AvailableRuns *int `json:"availableRuns,omitempty"` - // DispatcherId the id of the assigned dispatcher, in UUID format DispatcherId *openapi_types.UUID `json:"dispatcherId,omitempty"` @@ -2086,11 +2089,8 @@ type Worker struct { LastHeartbeatAt *time.Time `json:"lastHeartbeatAt,omitempty"` // LastListenerEstablished The time this worker last sent a heartbeat. - LastListenerEstablished *time.Time `json:"lastListenerEstablished,omitempty"` - - // MaxRuns The maximum number of runs this worker can execute concurrently. - MaxRuns *int `json:"maxRuns,omitempty"` - Metadata APIResourceMeta `json:"metadata"` + LastListenerEstablished *time.Time `json:"lastListenerEstablished,omitempty"` + Metadata APIResourceMeta `json:"metadata"` // Name The name of the worker. Name string `json:"name"` @@ -2102,6 +2102,9 @@ type Worker struct { RegisteredWorkflows *[]RegisteredWorkflow `json:"registeredWorkflows,omitempty"` RuntimeInfo *WorkerRuntimeInfo `json:"runtimeInfo,omitempty"` + // SlotConfig Slot availability and limits for this worker (slot_type -> { available, limit }). + SlotConfig *map[string]WorkerSlotConfig `json:"slotConfig,omitempty"` + // Slots The semaphore slot state for the worker. Slots *[]SemaphoreSlots `json:"slots,omitempty"` @@ -2147,6 +2150,15 @@ type WorkerRuntimeInfo struct { // WorkerRuntimeSDKs defines model for WorkerRuntimeSDKs. type WorkerRuntimeSDKs string +// WorkerSlotConfig Slot availability and limits for a slot type. +type WorkerSlotConfig struct { + // Available The number of available units for this slot type. + Available *int `json:"available,omitempty"` + + // Limit The maximum number of units for this slot type. + Limit int `json:"limit"` +} + // WorkerType defines model for WorkerType. type WorkerType string diff --git a/pkg/client/workflow.go b/pkg/client/workflow.go index 4fec83518e..2cf18ed829 100644 --- a/pkg/client/workflow.go +++ b/pkg/client/workflow.go @@ -46,7 +46,7 @@ type WorkflowResult struct { func (r *WorkflowResult) StepOutput(key string, v interface{}) error { var outputBytes []byte for _, stepRunResult := range r.workflowRun.Results { - if stepRunResult.StepReadableId == key { + if stepRunResult.TaskName == key { if stepRunResult.Error != nil { return fmt.Errorf("%s", *stepRunResult.Error) } @@ -81,7 +81,7 @@ func (r *WorkflowResult) Results() (interface{}, error) { } if stepRunResult.Output != nil { - results[stepRunResult.StepReadableId] = stepRunResult.Output + results[stepRunResult.TaskName] = stepRunResult.Output } } diff --git a/pkg/repository/scheduler.go b/pkg/repository/scheduler.go index 2755f41a29..7a489dd01a 100644 --- a/pkg/repository/scheduler.go +++ b/pkg/repository/scheduler.go @@ -36,12 +36,14 @@ type QueueRepository interface { GetTaskRateLimits(ctx context.Context, tx *OptimisticTx, queueItems []*sqlcv1.V1QueueItem) (map[int64]map[string]int32, error) RequeueRateLimitedItems(ctx context.Context, tenantId uuid.UUID, queueName string) ([]*sqlcv1.RequeueRateLimitedQueueItemsRow, error) GetDesiredLabels(ctx context.Context, tx *OptimisticTx, stepIds []uuid.UUID) (map[uuid.UUID][]*sqlcv1.GetDesiredLabelsRow, error) + GetStepSlotRequests(ctx context.Context, stepIds []uuid.UUID) (map[uuid.UUID]map[string]int32, error) Cleanup() } type AssignmentRepository interface { ListActionsForWorkers(ctx context.Context, tenantId uuid.UUID, workerIds []uuid.UUID) ([]*sqlcv1.ListActionsForWorkersRow, error) ListAvailableSlotsForWorkers(ctx context.Context, tenantId uuid.UUID, params sqlcv1.ListAvailableSlotsForWorkersParams) ([]*sqlcv1.ListAvailableSlotsForWorkersRow, error) + ListWorkerSlotConfigs(ctx context.Context, tenantId uuid.UUID, workerIds []uuid.UUID) ([]*sqlcv1.ListWorkerSlotConfigsRow, error) } type OptimisticSchedulingRepository interface { diff --git a/pkg/repository/scheduler_assignment.go b/pkg/repository/scheduler_assignment.go index aa390e0272..8ac678292b 100644 --- a/pkg/repository/scheduler_assignment.go +++ b/pkg/repository/scheduler_assignment.go @@ -34,3 +34,13 @@ func (d *assignmentRepository) ListAvailableSlotsForWorkers(ctx context.Context, return d.queries.ListAvailableSlotsForWorkers(ctx, d.pool, params) } + +func (d *assignmentRepository) ListWorkerSlotConfigs(ctx context.Context, tenantId uuid.UUID, workerIds []uuid.UUID) ([]*sqlcv1.ListWorkerSlotConfigsRow, error) { + ctx, span := telemetry.NewSpan(ctx, "list-worker-slot-configs") + defer span.End() + + return d.queries.ListWorkerSlotConfigs(ctx, d.pool, sqlcv1.ListWorkerSlotConfigsParams{ + Tenantid: tenantId, + Workerids: workerIds, + }) +} diff --git a/pkg/repository/scheduler_lease.go b/pkg/repository/scheduler_lease.go index 7716ba5a8d..fbdd2da430 100644 --- a/pkg/repository/scheduler_lease.go +++ b/pkg/repository/scheduler_lease.go @@ -13,10 +13,9 @@ import ( ) type ListActiveWorkersResult struct { - ID uuid.UUID - MaxRuns int - Name string - Labels []*sqlcv1.ListManyWorkerLabelsRow + ID uuid.UUID + Name string + Labels []*sqlcv1.ListManyWorkerLabelsRow } type leaseRepository struct { @@ -152,10 +151,9 @@ func (d *leaseRepository) ListActiveWorkers(ctx context.Context, tenantId uuid.U for _, worker := range activeWorkers { wId := worker.ID.String() res = append(res, &ListActiveWorkersResult{ - ID: worker.ID, - MaxRuns: int(worker.MaxRuns), - Labels: workerIdsToLabels[wId], - Name: worker.Name, + ID: worker.ID, + Labels: workerIdsToLabels[wId], + Name: worker.Name, }) } diff --git a/pkg/repository/scheduler_queue.go b/pkg/repository/scheduler_queue.go index 84e94848a3..f1af740d54 100644 --- a/pkg/repository/scheduler_queue.go +++ b/pkg/repository/scheduler_queue.go @@ -31,7 +31,7 @@ type RateLimitResult struct { const rateLimitedRequeueAfterThreshold = 2 * time.Second type AssignedItem struct { - WorkerId uuid.UUID + WorkerId uuid.UUID QueueItem *sqlcv1.V1QueueItem @@ -639,6 +639,29 @@ func (d *queueRepository) GetDesiredLabels(ctx context.Context, tx *OptimisticTx return stepIdToLabels, nil } +func (d *queueRepository) GetStepSlotRequests(ctx context.Context, stepIds []uuid.UUID) (map[uuid.UUID]map[string]int32, error) { + ctx, span := telemetry.NewSpan(ctx, "get-step-slot-requests") + defer span.End() + + uniqueStepIds := sqlchelpers.UniqueSet(stepIds) + + rows, err := d.queries.GetStepSlotRequests(ctx, d.pool, uniqueStepIds) + if err != nil { + return nil, err + } + + stepIdToRequests := make(map[uuid.UUID]map[string]int32, len(rows)) + for _, row := range rows { + if _, ok := stepIdToRequests[row.StepID]; !ok { + stepIdToRequests[row.StepID] = make(map[string]int32) + } + + stepIdToRequests[row.StepID][row.SlotType] = row.Units + } + + return stepIdToRequests, nil +} + func (d *queueRepository) RequeueRateLimitedItems(ctx context.Context, tenantId uuid.UUID, queueName string) ([]*sqlcv1.RequeueRateLimitedQueueItemsRow, error) { tx, commit, rollback, err := sqlchelpers.PrepareTx(ctx, d.pool, d.l) diff --git a/pkg/repository/slot_types.go b/pkg/repository/slot_types.go new file mode 100644 index 0000000000..42f774e204 --- /dev/null +++ b/pkg/repository/slot_types.go @@ -0,0 +1,7 @@ +package repository + +// SlotType constants for worker slot configurations. +const ( + SlotTypeDefault = "default" + SlotTypeDurable = "durable" +) diff --git a/pkg/repository/sqlcv1/lease.sql b/pkg/repository/sqlcv1/lease.sql index 3027a4d976..32ac9e8374 100644 --- a/pkg/repository/sqlcv1/lease.sql +++ b/pkg/repository/sqlcv1/lease.sql @@ -50,11 +50,12 @@ RETURNING l.*; -- name: ListActiveWorkers :many SELECT - w."id", - w."maxRuns", + DISTINCT w."id", w."name" FROM "Worker" w +JOIN + v1_worker_slot_config wsc ON w."id" = wsc."worker_id" WHERE w."tenantId" = @tenantId::uuid AND w."dispatcherId" IS NOT NULL diff --git a/pkg/repository/sqlcv1/lease.sql.go b/pkg/repository/sqlcv1/lease.sql.go index 2119947394..8fe7d0b712 100644 --- a/pkg/repository/sqlcv1/lease.sql.go +++ b/pkg/repository/sqlcv1/lease.sql.go @@ -106,11 +106,12 @@ func (q *Queries) GetLeasesToAcquire(ctx context.Context, db DBTX, arg GetLeases const listActiveWorkers = `-- name: ListActiveWorkers :many SELECT - w."id", - w."maxRuns", + DISTINCT w."id", w."name" FROM "Worker" w +JOIN + v1_worker_slot_config wsc ON w."id" = wsc."worker_id" WHERE w."tenantId" = $1::uuid AND w."dispatcherId" IS NOT NULL @@ -120,9 +121,8 @@ WHERE ` type ListActiveWorkersRow struct { - ID uuid.UUID `json:"id"` - MaxRuns int32 `json:"maxRuns"` - Name string `json:"name"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` } func (q *Queries) ListActiveWorkers(ctx context.Context, db DBTX, tenantid uuid.UUID) ([]*ListActiveWorkersRow, error) { @@ -134,7 +134,7 @@ func (q *Queries) ListActiveWorkers(ctx context.Context, db DBTX, tenantid uuid. var items []*ListActiveWorkersRow for rows.Next() { var i ListActiveWorkersRow - if err := rows.Scan(&i.ID, &i.MaxRuns, &i.Name); err != nil { + if err := rows.Scan(&i.ID, &i.Name); err != nil { return nil, err } items = append(items, &i) diff --git a/pkg/repository/sqlcv1/models.go b/pkg/repository/sqlcv1/models.go index 3d9de50fc6..ef00f1a664 100644 --- a/pkg/repository/sqlcv1/models.go +++ b/pkg/repository/sqlcv1/models.go @@ -1872,6 +1872,48 @@ func (ns NullV1TaskInitialState) Value() (driver.Value, error) { return string(ns.V1TaskInitialState), nil } +type V1WorkerSlotGroup string + +const ( + V1WorkerSlotGroupSLOTS V1WorkerSlotGroup = "SLOTS" + V1WorkerSlotGroupDURABLESLOTS V1WorkerSlotGroup = "DURABLE_SLOTS" +) + +func (e *V1WorkerSlotGroup) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = V1WorkerSlotGroup(s) + case string: + *e = V1WorkerSlotGroup(s) + default: + return fmt.Errorf("unsupported scan type for V1WorkerSlotGroup: %T", src) + } + return nil +} + +type NullV1WorkerSlotGroup struct { + V1WorkerSlotGroup V1WorkerSlotGroup `json:"v1_worker_slot_group"` + Valid bool `json:"valid"` // Valid is true if V1WorkerSlotGroup is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullV1WorkerSlotGroup) Scan(value interface{}) error { + if value == nil { + ns.V1WorkerSlotGroup, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.V1WorkerSlotGroup.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullV1WorkerSlotGroup) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.V1WorkerSlotGroup), nil +} + type VcsProvider string const ( @@ -2639,6 +2681,7 @@ type Step struct { RetryBackoffFactor pgtype.Float8 `json:"retryBackoffFactor"` RetryMaxBackoff pgtype.Int4 `json:"retryMaxBackoff"` ScheduleTimeout string `json:"scheduleTimeout"` + IsDurable bool `json:"isDurable"` } type StepDesiredWorkerLabel struct { @@ -3342,6 +3385,15 @@ type V1StepMatchCondition struct { ParentReadableID pgtype.Text `json:"parent_readable_id"` } +type V1StepSlotRequest struct { + TenantID uuid.UUID `json:"tenant_id"` + StepID uuid.UUID `json:"step_id"` + SlotType string `json:"slot_type"` + Units int32 `json:"units"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + type V1Task struct { ID int64 `json:"id"` InsertedAt pgtype.Timestamptz `json:"inserted_at"` @@ -3444,6 +3496,19 @@ type V1TaskRuntime struct { WorkerID *uuid.UUID `json:"worker_id"` TenantID uuid.UUID `json:"tenant_id"` TimeoutAt pgtype.Timestamp `json:"timeout_at"` + SlotGroup V1WorkerSlotGroup `json:"slot_group"` +} + +type V1TaskRuntimeSlot struct { + TenantID uuid.UUID `json:"tenant_id"` + TaskID int64 `json:"task_id"` + TaskInsertedAt pgtype.Timestamptz `json:"task_inserted_at"` + RetryCount int32 `json:"retry_count"` + WorkerID uuid.UUID `json:"worker_id"` + SlotType string `json:"slot_type"` + Units int32 `json:"units"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` } type V1TaskStatusUpdatesTmp struct { @@ -3482,6 +3547,15 @@ type V1TasksOlap struct { ParentTaskExternalID *uuid.UUID `json:"parent_task_external_id"` } +type V1WorkerSlotConfig struct { + TenantID uuid.UUID `json:"tenant_id"` + WorkerID uuid.UUID `json:"worker_id"` + SlotType string `json:"slot_type"` + MaxUnits int32 `json:"max_units"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + type V1WorkflowConcurrency struct { ID int64 `json:"id"` WorkflowID uuid.UUID `json:"workflow_id"` @@ -3544,7 +3618,6 @@ type Worker struct { LastHeartbeatAt pgtype.Timestamp `json:"lastHeartbeatAt"` Name string `json:"name"` DispatcherId *uuid.UUID `json:"dispatcherId"` - MaxRuns int32 `json:"maxRuns"` IsActive bool `json:"isActive"` LastListenerEstablished pgtype.Timestamp `json:"lastListenerEstablished"` IsPaused bool `json:"isPaused"` diff --git a/pkg/repository/sqlcv1/queue.sql b/pkg/repository/sqlcv1/queue.sql index 9f42d34ef1..eba688d5e3 100644 --- a/pkg/repository/sqlcv1/queue.sql +++ b/pkg/repository/sqlcv1/queue.sql @@ -51,35 +51,36 @@ WHERE AND w."isPaused" = false; -- name: ListAvailableSlotsForWorkers :many -WITH worker_max_runs AS ( +WITH worker_capacities AS ( SELECT - "id", - "maxRuns" + worker_id, + max_units FROM - "Worker" + v1_worker_slot_config WHERE - "tenantId" = @tenantId::uuid - AND "id" = ANY(@workerIds::uuid[]) -), worker_filled_slots AS ( + tenant_id = @tenantId::uuid + AND worker_id = ANY(@workerIds::uuid[]) + AND slot_type = @slotType::text +), worker_used_slots AS ( SELECT worker_id, - COUNT(task_id) AS "filledSlots" + SUM(units) AS used_units FROM - v1_task_runtime + v1_task_runtime_slot WHERE tenant_id = @tenantId::uuid AND worker_id = ANY(@workerIds::uuid[]) + AND slot_type = @slotType::text GROUP BY worker_id ) --- subtract the filled slots from the max runs to get the available slots SELECT - wmr."id", - wmr."maxRuns" - COALESCE(wfs."filledSlots", 0) AS "availableSlots" + wc.worker_id AS "id", + wc.max_units - COALESCE(wus.used_units, 0) AS "availableSlots" FROM - worker_max_runs wmr + worker_capacities wc LEFT JOIN - worker_filled_slots wfs ON wmr."id" = wfs.worker_id; + worker_used_slots wus ON wc.worker_id = wus.worker_id; -- name: ListQueues :many SELECT @@ -230,6 +231,7 @@ WITH input AS ( t.retry_count, i.worker_id, t.tenant_id, + t.step_id, CURRENT_TIMESTAMP + convert_duration_to_interval(t.step_timeout) AS timeout_at FROM v1_task t @@ -259,6 +261,42 @@ WITH input AS ( ON CONFLICT (task_id, task_inserted_at, retry_count) DO NOTHING -- only return the task ids that were successfully assigned RETURNING task_id, worker_id +), slot_requests AS ( + SELECT + t.id, + t.inserted_at, + t.retry_count, + t.worker_id, + t.tenant_id, + COALESCE(req.slot_type, 'default'::text) AS slot_type, + COALESCE(req.units, 1) AS units + FROM + updated_tasks t + LEFT JOIN + v1_step_slot_request req + ON req.step_id = t.step_id AND req.tenant_id = t.tenant_id +), assigned_slots AS ( + INSERT INTO v1_task_runtime_slot ( + tenant_id, + task_id, + task_inserted_at, + retry_count, + worker_id, + slot_type, + units + ) + SELECT + tenant_id, + id, + inserted_at, + retry_count, + worker_id, + slot_type, + units + FROM + slot_requests + ON CONFLICT (task_id, task_inserted_at, retry_count, slot_type) DO NOTHING + RETURNING task_id ) SELECT asr.task_id, @@ -280,6 +318,16 @@ FROM WHERE "stepId" = ANY(@stepIds::uuid[]); +-- name: GetStepSlotRequests :many +SELECT + step_id, + slot_type, + units +FROM + v1_step_slot_request +WHERE + step_id = ANY(@stepIds::uuid[]); + -- name: GetQueuedCounts :many SELECT queue, diff --git a/pkg/repository/sqlcv1/queue.sql.go b/pkg/repository/sqlcv1/queue.sql.go index fe8a3e48ef..c4120ff067 100644 --- a/pkg/repository/sqlcv1/queue.sql.go +++ b/pkg/repository/sqlcv1/queue.sql.go @@ -333,6 +333,43 @@ func (q *Queries) GetQueuedCounts(ctx context.Context, db DBTX, tenantid uuid.UU return items, nil } +const getStepSlotRequests = `-- name: GetStepSlotRequests :many +SELECT + step_id, + slot_type, + units +FROM + v1_step_slot_request +WHERE + step_id = ANY($1::uuid[]) +` + +type GetStepSlotRequestsRow struct { + StepID uuid.UUID `json:"step_id"` + SlotType string `json:"slot_type"` + Units int32 `json:"units"` +} + +func (q *Queries) GetStepSlotRequests(ctx context.Context, db DBTX, stepids []uuid.UUID) ([]*GetStepSlotRequestsRow, error) { + rows, err := db.Query(ctx, getStepSlotRequests, stepids) + if err != nil { + return nil, err + } + defer rows.Close() + var items []*GetStepSlotRequestsRow + for rows.Next() { + var i GetStepSlotRequestsRow + if err := rows.Scan(&i.StepID, &i.SlotType, &i.Units); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const listActionsForWorkers = `-- name: ListActionsForWorkers :many SELECT w."id" as "workerId", @@ -383,39 +420,42 @@ func (q *Queries) ListActionsForWorkers(ctx context.Context, db DBTX, arg ListAc } const listAvailableSlotsForWorkers = `-- name: ListAvailableSlotsForWorkers :many -WITH worker_max_runs AS ( +WITH worker_capacities AS ( SELECT - "id", - "maxRuns" + worker_id, + max_units FROM - "Worker" + v1_worker_slot_config WHERE - "tenantId" = $1::uuid - AND "id" = ANY($2::uuid[]) -), worker_filled_slots AS ( + tenant_id = $1::uuid + AND worker_id = ANY($2::uuid[]) + AND slot_type = $3::text +), worker_used_slots AS ( SELECT worker_id, - COUNT(task_id) AS "filledSlots" + SUM(units) AS used_units FROM - v1_task_runtime + v1_task_runtime_slot WHERE tenant_id = $1::uuid AND worker_id = ANY($2::uuid[]) + AND slot_type = $3::text GROUP BY worker_id ) SELECT - wmr."id", - wmr."maxRuns" - COALESCE(wfs."filledSlots", 0) AS "availableSlots" + wc.worker_id AS "id", + wc.max_units - COALESCE(wus.used_units, 0) AS "availableSlots" FROM - worker_max_runs wmr + worker_capacities wc LEFT JOIN - worker_filled_slots wfs ON wmr."id" = wfs.worker_id + worker_used_slots wus ON wc.worker_id = wus.worker_id ` type ListAvailableSlotsForWorkersParams struct { Tenantid uuid.UUID `json:"tenantid"` Workerids []uuid.UUID `json:"workerids"` + Slottype string `json:"slottype"` } type ListAvailableSlotsForWorkersRow struct { @@ -423,9 +463,8 @@ type ListAvailableSlotsForWorkersRow struct { AvailableSlots int32 `json:"availableSlots"` } -// subtract the filled slots from the max runs to get the available slots func (q *Queries) ListAvailableSlotsForWorkers(ctx context.Context, db DBTX, arg ListAvailableSlotsForWorkersParams) ([]*ListAvailableSlotsForWorkersRow, error) { - rows, err := db.Query(ctx, listAvailableSlotsForWorkers, arg.Tenantid, arg.Workerids) + rows, err := db.Query(ctx, listAvailableSlotsForWorkers, arg.Tenantid, arg.Workerids, arg.Slottype) if err != nil { return nil, err } @@ -886,6 +925,7 @@ WITH input AS ( t.retry_count, i.worker_id, t.tenant_id, + t.step_id, CURRENT_TIMESTAMP + convert_duration_to_interval(t.step_timeout) AS timeout_at FROM v1_task t @@ -915,6 +955,42 @@ WITH input AS ( ON CONFLICT (task_id, task_inserted_at, retry_count) DO NOTHING -- only return the task ids that were successfully assigned RETURNING task_id, worker_id +), slot_requests AS ( + SELECT + t.id, + t.inserted_at, + t.retry_count, + t.worker_id, + t.tenant_id, + COALESCE(req.slot_type, 'default'::text) AS slot_type, + COALESCE(req.units, 1) AS units + FROM + updated_tasks t + LEFT JOIN + v1_step_slot_request req + ON req.step_id = t.step_id AND req.tenant_id = t.tenant_id +), assigned_slots AS ( + INSERT INTO v1_task_runtime_slot ( + tenant_id, + task_id, + task_inserted_at, + retry_count, + worker_id, + slot_type, + units + ) + SELECT + tenant_id, + id, + inserted_at, + retry_count, + worker_id, + slot_type, + units + FROM + slot_requests + ON CONFLICT (task_id, task_inserted_at, retry_count, slot_type) DO NOTHING + RETURNING task_id ) SELECT asr.task_id, diff --git a/pkg/repository/sqlcv1/tasks-overwrite.go b/pkg/repository/sqlcv1/tasks-overwrite.go index 1adca72dd5..a56a3226b1 100644 --- a/pkg/repository/sqlcv1/tasks-overwrite.go +++ b/pkg/repository/sqlcv1/tasks-overwrite.go @@ -729,6 +729,13 @@ WITH input AS ( ORDER BY task_id, task_inserted_at, retry_count FOR UPDATE +), deleted_slots AS ( + DELETE FROM + v1_task_runtime_slot + WHERE + (task_id, task_inserted_at, retry_count) IN (SELECT task_id, task_inserted_at, retry_count FROM input) + -- return a constant for ordering + RETURNING 1 AS cte_order ), deleted_runtimes AS ( DELETE FROM v1_task_runtime diff --git a/pkg/repository/sqlcv1/tasks.sql b/pkg/repository/sqlcv1/tasks.sql index 9c7a818462..0ede52e3bf 100644 --- a/pkg/repository/sqlcv1/tasks.sql +++ b/pkg/repository/sqlcv1/tasks.sql @@ -912,6 +912,11 @@ WITH task AS ( ORDER BY task_id, task_inserted_at, retry_count FOR UPDATE +), deleted_slots AS ( + DELETE FROM v1_task_runtime_slot + WHERE + (task_id, task_inserted_at, retry_count) IN (SELECT id, inserted_at, retry_count FROM task) + RETURNING task_id ) UPDATE v1_task_runtime @@ -970,6 +975,12 @@ WITH locked_trs AS ( LIMIT @batchSize::int FOR UPDATE SKIP LOCKED ) +DELETE FROM v1_task_runtime_slot +WHERE (task_id, task_inserted_at, retry_count) IN ( + SELECT task_id, task_inserted_at, retry_count + FROM locked_trs +); + DELETE FROM v1_task_runtime WHERE (task_id, task_inserted_at, retry_count) IN ( SELECT task_id, task_inserted_at, retry_count @@ -1160,7 +1171,14 @@ SELECT FROM running_tasks; -- name: FindOldestRunningTask :one -SELECT * +SELECT + task_id, + task_inserted_at, + retry_count, + worker_id, + tenant_id, + timeout_at, + slot_group FROM v1_task_runtime ORDER BY task_id, task_inserted_at LIMIT 1; diff --git a/pkg/repository/sqlcv1/tasks.sql.go b/pkg/repository/sqlcv1/tasks.sql.go index a03202a061..f2f21306e8 100644 --- a/pkg/repository/sqlcv1/tasks.sql.go +++ b/pkg/repository/sqlcv1/tasks.sql.go @@ -125,7 +125,7 @@ WITH locked_trs AS ( LIMIT $1::int FOR UPDATE SKIP LOCKED ) -DELETE FROM v1_task_runtime +DELETE FROM v1_task_runtime_slot WHERE (task_id, task_inserted_at, retry_count) IN ( SELECT task_id, task_inserted_at, retry_count FROM locked_trs @@ -551,7 +551,14 @@ func (q *Queries) FailTaskInternalFailure(ctx context.Context, db DBTX, arg Fail } const findOldestRunningTask = `-- name: FindOldestRunningTask :one -SELECT task_id, task_inserted_at, retry_count, worker_id, tenant_id, timeout_at +SELECT + task_id, + task_inserted_at, + retry_count, + worker_id, + tenant_id, + timeout_at, + slot_group FROM v1_task_runtime ORDER BY task_id, task_inserted_at LIMIT 1 @@ -567,6 +574,7 @@ func (q *Queries) FindOldestRunningTask(ctx context.Context, db DBTX) (*V1TaskRu &i.WorkerID, &i.TenantID, &i.TimeoutAt, + &i.SlotGroup, ) return &i, err } @@ -2139,6 +2147,11 @@ WITH task AS ( ORDER BY task_id, task_inserted_at, retry_count FOR UPDATE +), deleted_slots AS ( + DELETE FROM v1_task_runtime_slot + WHERE + (task_id, task_inserted_at, retry_count) IN (SELECT id, inserted_at, retry_count FROM task) + RETURNING task_id ) UPDATE v1_task_runtime @@ -2149,7 +2162,7 @@ FROM WHERE (v1_task_runtime.task_id, v1_task_runtime.task_inserted_at, v1_task_runtime.retry_count) IN (SELECT id, inserted_at, retry_count FROM task) RETURNING - v1_task_runtime.task_id, v1_task_runtime.task_inserted_at, v1_task_runtime.retry_count, v1_task_runtime.worker_id, v1_task_runtime.tenant_id, v1_task_runtime.timeout_at + v1_task_runtime.task_id, v1_task_runtime.task_inserted_at, v1_task_runtime.retry_count, v1_task_runtime.worker_id, v1_task_runtime.tenant_id, v1_task_runtime.timeout_at, v1_task_runtime.slot_group ` type ManualSlotReleaseParams struct { @@ -2167,6 +2180,7 @@ func (q *Queries) ManualSlotRelease(ctx context.Context, db DBTX, arg ManualSlot &i.WorkerID, &i.TenantID, &i.TimeoutAt, + &i.SlotGroup, ) return &i, err } @@ -2416,7 +2430,7 @@ FROM WHERE (v1_task_runtime.task_id, v1_task_runtime.task_inserted_at, v1_task_runtime.retry_count) IN (SELECT id, inserted_at, retry_count FROM task) RETURNING - v1_task_runtime.task_id, v1_task_runtime.task_inserted_at, v1_task_runtime.retry_count, v1_task_runtime.worker_id, v1_task_runtime.tenant_id, v1_task_runtime.timeout_at + v1_task_runtime.task_id, v1_task_runtime.task_inserted_at, v1_task_runtime.retry_count, v1_task_runtime.worker_id, v1_task_runtime.tenant_id, v1_task_runtime.timeout_at, v1_task_runtime.slot_group ` type RefreshTimeoutByParams struct { @@ -2435,6 +2449,7 @@ func (q *Queries) RefreshTimeoutBy(ctx context.Context, db DBTX, arg RefreshTime &i.WorkerID, &i.TenantID, &i.TimeoutAt, + &i.SlotGroup, ) return &i, err } diff --git a/pkg/repository/sqlcv1/tenant_limits.sql b/pkg/repository/sqlcv1/tenant_limits.sql index 9686adf3b3..5cf75653e4 100644 --- a/pkg/repository/sqlcv1/tenant_limits.sql +++ b/pkg/repository/sqlcv1/tenant_limits.sql @@ -95,10 +95,3 @@ FROM "Worker" WHERE "tenantId" = @tenantId::uuid AND "lastHeartbeatAt" >= NOW() - '30 seconds'::INTERVAL AND "isActive" = true; - --- name: CountTenantWorkerSlots :one -SELECT COALESCE(SUM(w."maxRuns"), 0)::int AS "count" -FROM "Worker" w -WHERE "tenantId" = @tenantId::uuid -AND "lastHeartbeatAt" >= NOW() - '30 seconds'::INTERVAL -AND "isActive" = true; diff --git a/pkg/repository/sqlcv1/tenant_limits.sql.go b/pkg/repository/sqlcv1/tenant_limits.sql.go index 39bb15108d..cb9010954a 100644 --- a/pkg/repository/sqlcv1/tenant_limits.sql.go +++ b/pkg/repository/sqlcv1/tenant_limits.sql.go @@ -12,21 +12,6 @@ import ( "github.com/jackc/pgx/v5/pgtype" ) -const countTenantWorkerSlots = `-- name: CountTenantWorkerSlots :one -SELECT COALESCE(SUM(w."maxRuns"), 0)::int AS "count" -FROM "Worker" w -WHERE "tenantId" = $1::uuid -AND "lastHeartbeatAt" >= NOW() - '30 seconds'::INTERVAL -AND "isActive" = true -` - -func (q *Queries) CountTenantWorkerSlots(ctx context.Context, db DBTX, tenantid uuid.UUID) (int32, error) { - row := db.QueryRow(ctx, countTenantWorkerSlots, tenantid) - var count int32 - err := row.Scan(&count) - return count, err -} - const countTenantWorkers = `-- name: CountTenantWorkers :one SELECT COUNT(distinct id) AS "count" FROM "Worker" diff --git a/pkg/repository/sqlcv1/tenants.sql b/pkg/repository/sqlcv1/tenants.sql index 91d13ce3ac..98c6f865ab 100644 --- a/pkg/repository/sqlcv1/tenants.sql +++ b/pkg/repository/sqlcv1/tenants.sql @@ -648,34 +648,3 @@ UPDATE "Tenant" SET "deletedAt" = NOW(), slug = slug || '_deleted_' || gen_random_uuid() WHERE "id" = @id::uuid; - --- name: GetTenantUsageData :one -WITH active_workers AS ( - SELECT - workers."id", - workers."maxRuns" - FROM - "Worker" workers - WHERE - workers."tenantId" = @tenantId::uuid - AND workers."dispatcherId" IS NOT NULL - AND workers."lastHeartbeatAt" > NOW() - INTERVAL '5 seconds' - AND workers."isActive" = true - AND workers."isPaused" = false -), worker_slots AS ( - SELECT - aw."id" AS worker_id, - aw."maxRuns" - ( - SELECT COUNT(*) - FROM v1_task_runtime runtime - WHERE - runtime.tenant_id = @tenantId::uuid AND - runtime.worker_id = aw."id" - ) AS "remainingSlots" - FROM - active_workers aw -) -SELECT - (SELECT COUNT(*) FROM active_workers) AS "workerCount", - COALESCE((SELECT SUM("maxRuns") - SUM("remainingSlots") FROM active_workers aw JOIN worker_slots ws ON aw."id" = ws.worker_id), 0)::bigint AS "usedWorkerSlotsCount", - (SELECT COUNT(*) FROM "TenantMember" WHERE "tenantId" = @tenantId::uuid) AS "tenantMembersCount"; diff --git a/pkg/repository/sqlcv1/tenants.sql.go b/pkg/repository/sqlcv1/tenants.sql.go index 8f0a9314ba..23aa9d3760 100644 --- a/pkg/repository/sqlcv1/tenants.sql.go +++ b/pkg/repository/sqlcv1/tenants.sql.go @@ -758,51 +758,6 @@ func (q *Queries) GetTenantTotalQueueMetrics(ctx context.Context, db DBTX, arg G return &i, err } -const getTenantUsageData = `-- name: GetTenantUsageData :one -WITH active_workers AS ( - SELECT - workers."id", - workers."maxRuns" - FROM - "Worker" workers - WHERE - workers."tenantId" = $1::uuid - AND workers."dispatcherId" IS NOT NULL - AND workers."lastHeartbeatAt" > NOW() - INTERVAL '5 seconds' - AND workers."isActive" = true - AND workers."isPaused" = false -), worker_slots AS ( - SELECT - aw."id" AS worker_id, - aw."maxRuns" - ( - SELECT COUNT(*) - FROM v1_task_runtime runtime - WHERE - runtime.tenant_id = $1::uuid AND - runtime.worker_id = aw."id" - ) AS "remainingSlots" - FROM - active_workers aw -) -SELECT - (SELECT COUNT(*) FROM active_workers) AS "workerCount", - COALESCE((SELECT SUM("maxRuns") - SUM("remainingSlots") FROM active_workers aw JOIN worker_slots ws ON aw."id" = ws.worker_id), 0)::bigint AS "usedWorkerSlotsCount", - (SELECT COUNT(*) FROM "TenantMember" WHERE "tenantId" = $1::uuid) AS "tenantMembersCount" -` - -type GetTenantUsageDataRow struct { - WorkerCount int64 `json:"workerCount"` - UsedWorkerSlotsCount int64 `json:"usedWorkerSlotsCount"` - TenantMembersCount int64 `json:"tenantMembersCount"` -} - -func (q *Queries) GetTenantUsageData(ctx context.Context, db DBTX, tenantid uuid.UUID) (*GetTenantUsageDataRow, error) { - row := db.QueryRow(ctx, getTenantUsageData, tenantid) - var i GetTenantUsageDataRow - err := row.Scan(&i.WorkerCount, &i.UsedWorkerSlotsCount, &i.TenantMembersCount) - return &i, err -} - const getTenantWorkflowQueueMetrics = `-- name: GetTenantWorkflowQueueMetrics :many WITH valid_workflow_runs AS ( SELECT diff --git a/pkg/repository/sqlcv1/workers.sql b/pkg/repository/sqlcv1/workers.sql index 7fe8d282ea..24fdcf5ca0 100644 --- a/pkg/repository/sqlcv1/workers.sql +++ b/pkg/repository/sqlcv1/workers.sql @@ -10,18 +10,77 @@ SELECT FROM "WorkerLabel" wl WHERE wl."workerId" = ANY(@workerIds::uuid[]); +-- name: ListWorkerSlotConfigs :many +SELECT + worker_id, + slot_type, + max_units +FROM + v1_worker_slot_config +WHERE + tenant_id = @tenantId::uuid + AND worker_id = ANY(@workerIds::uuid[]); + +-- name: UpsertWorkerSlotConfigs :exec +INSERT INTO v1_worker_slot_config ( + tenant_id, + worker_id, + slot_type, + max_units, + created_at, + updated_at +) +SELECT + @tenantId::uuid, + @workerId::uuid, + unnest(@slotTypes::text[]), + unnest(@maxUnits::integer[]), + CURRENT_TIMESTAMP, + CURRENT_TIMESTAMP +ON CONFLICT (tenant_id, worker_id, slot_type) DO UPDATE +SET + max_units = EXCLUDED.max_units, + updated_at = CURRENT_TIMESTAMP; + -- name: ListWorkersWithSlotCount :many SELECT sqlc.embed(workers), ww."url" AS "webhookUrl", ww."id" AS "webhookId", - workers."maxRuns" - ( - SELECT COUNT(*) - FROM v1_task_runtime runtime + -- TODO do we still need this? + COALESCE(( + SELECT COALESCE(cap.max_units, 0) + FROM v1_worker_slot_config cap + WHERE + cap.tenant_id = workers."tenantId" + AND cap.worker_id = workers."id" + AND cap.slot_type = 'default'::text + ) - ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime WHERE runtime.tenant_id = workers."tenantId" AND - runtime.worker_id = workers."id" - ) AS "remainingSlots" + runtime.worker_id = workers."id" AND + runtime.slot_type = 'default'::text + ), 0)::int AS "remainingSlots" + , + COALESCE(( + ( + SELECT COALESCE(cap.max_units, 0) + FROM v1_worker_slot_config cap + WHERE + cap.tenant_id = workers."tenantId" + AND cap.worker_id = workers."id" + AND cap.slot_type = 'durable'::text + ) - ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime + WHERE + runtime.tenant_id = workers."tenantId" AND + runtime.worker_id = workers."id" AND + runtime.slot_type = 'durable'::text + ) + ), 0)::int AS "remainingDurableSlots" FROM "Worker" workers LEFT JOIN @@ -43,11 +102,14 @@ WHERE ) AND ( sqlc.narg('assignable')::boolean IS NULL OR - workers."maxRuns" IS NULL OR - (sqlc.narg('assignable')::boolean AND workers."maxRuns" > ( - SELECT COUNT(*) - FROM "StepRun" srs - WHERE srs."workerId" = workers."id" AND srs."status" = 'RUNNING' + (sqlc.narg('assignable')::boolean AND ( + SELECT COALESCE(SUM(cap.max_units), 0) + FROM v1_worker_slot_config cap + WHERE cap.tenant_id = workers."tenantId" AND cap.worker_id = workers."id" + ) > ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime + WHERE runtime.tenant_id = workers."tenantId" AND runtime.worker_id = workers."id" )) ) GROUP BY @@ -57,13 +119,39 @@ GROUP BY SELECT sqlc.embed(w), ww."url" AS "webhookUrl", - w."maxRuns" - ( - SELECT COUNT(*) - FROM v1_task_runtime runtime + COALESCE(( + SELECT COALESCE(cap.max_units, 0) + FROM v1_worker_slot_config cap + WHERE + cap.tenant_id = w."tenantId" + AND cap.worker_id = w."id" + AND cap.slot_type = 'default'::text + ) - ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime WHERE runtime.tenant_id = w."tenantId" AND - runtime.worker_id = w."id" - ) AS "remainingSlots" + runtime.worker_id = w."id" AND + runtime.slot_type = 'default'::text + ), 0)::int AS "remainingSlots" + , + COALESCE(( + ( + SELECT COALESCE(cap.max_units, 0) + FROM v1_worker_slot_config cap + WHERE + cap.tenant_id = w."tenantId" + AND cap.worker_id = w."id" + AND cap.slot_type = 'durable'::text + ) - ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime + WHERE + runtime.tenant_id = w."tenantId" AND + runtime.worker_id = w."id" AND + runtime.slot_type = 'durable'::text + ) + ), 0)::int AS "remainingDurableSlots" FROM "Worker" w LEFT JOIN @@ -85,14 +173,32 @@ LIMIT COALESCE(sqlc.narg('limit')::int, 100); -- name: ListTotalActiveSlotsPerTenant :many -SELECT "tenantId", SUM("maxRuns") AS "totalActiveSlots" -FROM "Worker" +SELECT + wc.tenant_id AS "tenantId", + SUM(wc.max_units) AS "totalActiveSlots" +FROM v1_worker_slot_config wc +JOIN "Worker" w ON w."id" = wc.worker_id AND w."tenantId" = wc.tenant_id WHERE - "dispatcherId" IS NOT NULL - AND "lastHeartbeatAt" > NOW() - INTERVAL '5 seconds' - AND "isActive" = true - AND "isPaused" = false -GROUP BY "tenantId" + w."dispatcherId" IS NOT NULL + AND w."lastHeartbeatAt" > NOW() - INTERVAL '5 seconds' + AND w."isActive" = true + AND w."isPaused" = false +GROUP BY wc.tenant_id +; + +-- name: ListActiveSlotsPerTenantAndSlotType :many +SELECT + wc.tenant_id AS "tenantId", + wc.slot_type AS "slotType", + SUM(wc.max_units) AS "activeSlots" +FROM v1_worker_slot_config wc +JOIN "Worker" w ON w."id" = wc.worker_id AND w."tenantId" = wc.tenant_id +WHERE + w."dispatcherId" IS NOT NULL + AND w."lastHeartbeatAt" > NOW() - INTERVAL '5 seconds' + AND w."isActive" = true + AND w."isPaused" = false +GROUP BY wc.tenant_id, wc.slot_type ; -- name: ListActiveSDKsPerTenant :many @@ -186,7 +292,6 @@ UPDATE SET "updatedAt" = CURRENT_TIMESTAMP, "dispatcherId" = coalesce(sqlc.narg('dispatcherId')::uuid, "dispatcherId"), - "maxRuns" = coalesce(sqlc.narg('maxRuns')::int, "maxRuns"), "lastHeartbeatAt" = coalesce(sqlc.narg('lastHeartbeatAt')::timestamp, "lastHeartbeatAt"), "isActive" = coalesce(sqlc.narg('isActive')::boolean, "isActive"), "isPaused" = coalesce(sqlc.narg('isPaused')::boolean, "isPaused") @@ -326,7 +431,6 @@ INSERT INTO "Worker" ( "tenantId", "name", "dispatcherId", - "maxRuns", "webhookId", "type", "sdkVersion", @@ -341,7 +445,6 @@ INSERT INTO "Worker" ( @tenantId::uuid, @name::text, @dispatcherId::uuid, - sqlc.narg('maxRuns')::int, sqlc.narg('webhookId')::uuid, sqlc.narg('type')::"WorkerType", sqlc.narg('sdkVersion')::text, diff --git a/pkg/repository/sqlcv1/workers.sql.go b/pkg/repository/sqlcv1/workers.sql.go index 3a378f2af6..1384235089 100644 --- a/pkg/repository/sqlcv1/workers.sql.go +++ b/pkg/repository/sqlcv1/workers.sql.go @@ -20,7 +20,6 @@ INSERT INTO "Worker" ( "tenantId", "name", "dispatcherId", - "maxRuns", "webhookId", "type", "sdkVersion", @@ -35,22 +34,20 @@ INSERT INTO "Worker" ( $1::uuid, $2::text, $3::uuid, - $4::int, - $5::uuid, - $6::"WorkerType", - $7::text, - $8::"WorkerSDKS", + $4::uuid, + $5::"WorkerType", + $6::text, + $7::"WorkerSDKS", + $8::text, $9::text, - $10::text, - $11::text -) RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "maxRuns", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" + $10::text +) RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" ` type CreateWorkerParams struct { Tenantid uuid.UUID `json:"tenantid"` Name string `json:"name"` Dispatcherid uuid.UUID `json:"dispatcherid"` - MaxRuns pgtype.Int4 `json:"maxRuns"` WebhookId *uuid.UUID `json:"webhookId"` Type NullWorkerType `json:"type"` SdkVersion pgtype.Text `json:"sdkVersion"` @@ -65,7 +62,6 @@ func (q *Queries) CreateWorker(ctx context.Context, db DBTX, arg CreateWorkerPar arg.Tenantid, arg.Name, arg.Dispatcherid, - arg.MaxRuns, arg.WebhookId, arg.Type, arg.SdkVersion, @@ -84,7 +80,6 @@ func (q *Queries) CreateWorker(ctx context.Context, db DBTX, arg CreateWorkerPar &i.LastHeartbeatAt, &i.Name, &i.DispatcherId, - &i.MaxRuns, &i.IsActive, &i.LastListenerEstablished, &i.IsPaused, @@ -149,7 +144,7 @@ DELETE FROM "Worker" WHERE "id" = $1::uuid -RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "maxRuns", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" +RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" ` func (q *Queries) DeleteWorker(ctx context.Context, db DBTX, id uuid.UUID) (*Worker, error) { @@ -164,7 +159,6 @@ func (q *Queries) DeleteWorker(ctx context.Context, db DBTX, id uuid.UUID) (*Wor &i.LastHeartbeatAt, &i.Name, &i.DispatcherId, - &i.MaxRuns, &i.IsActive, &i.LastListenerEstablished, &i.IsPaused, @@ -227,15 +221,41 @@ func (q *Queries) GetWorkerActionsByWorkerId(ctx context.Context, db DBTX, arg G const getWorkerById = `-- name: GetWorkerById :one SELECT - w.id, w."createdAt", w."updatedAt", w."deletedAt", w."tenantId", w."lastHeartbeatAt", w.name, w."dispatcherId", w."maxRuns", w."isActive", w."lastListenerEstablished", w."isPaused", w.type, w."webhookId", w.language, w."languageVersion", w.os, w."runtimeExtra", w."sdkVersion", + w.id, w."createdAt", w."updatedAt", w."deletedAt", w."tenantId", w."lastHeartbeatAt", w.name, w."dispatcherId", w."isActive", w."lastListenerEstablished", w."isPaused", w.type, w."webhookId", w.language, w."languageVersion", w.os, w."runtimeExtra", w."sdkVersion", ww."url" AS "webhookUrl", - w."maxRuns" - ( - SELECT COUNT(*) - FROM v1_task_runtime runtime + COALESCE(( + SELECT COALESCE(cap.max_units, 0) + FROM v1_worker_slot_config cap + WHERE + cap.tenant_id = w."tenantId" + AND cap.worker_id = w."id" + AND cap.slot_type = 'default'::text + ) - ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime WHERE runtime.tenant_id = w."tenantId" AND - runtime.worker_id = w."id" - ) AS "remainingSlots" + runtime.worker_id = w."id" AND + runtime.slot_type = 'default'::text + ), 0)::int AS "remainingSlots" + , + COALESCE(( + ( + SELECT COALESCE(cap.max_units, 0) + FROM v1_worker_slot_config cap + WHERE + cap.tenant_id = w."tenantId" + AND cap.worker_id = w."id" + AND cap.slot_type = 'durable'::text + ) - ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime + WHERE + runtime.tenant_id = w."tenantId" AND + runtime.worker_id = w."id" AND + runtime.slot_type = 'durable'::text + ) + ), 0)::int AS "remainingDurableSlots" FROM "Worker" w LEFT JOIN @@ -245,9 +265,10 @@ WHERE ` type GetWorkerByIdRow struct { - Worker Worker `json:"worker"` - WebhookUrl pgtype.Text `json:"webhookUrl"` - RemainingSlots int32 `json:"remainingSlots"` + Worker Worker `json:"worker"` + WebhookUrl pgtype.Text `json:"webhookUrl"` + RemainingSlots int32 `json:"remainingSlots"` + RemainingDurableSlots int32 `json:"remainingDurableSlots"` } func (q *Queries) GetWorkerById(ctx context.Context, db DBTX, id uuid.UUID) (*GetWorkerByIdRow, error) { @@ -262,7 +283,6 @@ func (q *Queries) GetWorkerById(ctx context.Context, db DBTX, id uuid.UUID) (*Ge &i.Worker.LastHeartbeatAt, &i.Worker.Name, &i.Worker.DispatcherId, - &i.Worker.MaxRuns, &i.Worker.IsActive, &i.Worker.LastListenerEstablished, &i.Worker.IsPaused, @@ -275,6 +295,7 @@ func (q *Queries) GetWorkerById(ctx context.Context, db DBTX, id uuid.UUID) (*Ge &i.Worker.SdkVersion, &i.WebhookUrl, &i.RemainingSlots, + &i.RemainingDurableSlots, ) return &i, err } @@ -467,6 +488,47 @@ func (q *Queries) ListActiveSDKsPerTenant(ctx context.Context, db DBTX) ([]*List return items, nil } +const listActiveSlotsPerTenantAndSlotType = `-- name: ListActiveSlotsPerTenantAndSlotType :many +SELECT + wc.tenant_id AS "tenantId", + wc.slot_type AS "slotType", + SUM(wc.max_units) AS "activeSlots" +FROM v1_worker_slot_config wc +JOIN "Worker" w ON w."id" = wc.worker_id AND w."tenantId" = wc.tenant_id +WHERE + w."dispatcherId" IS NOT NULL + AND w."lastHeartbeatAt" > NOW() - INTERVAL '5 seconds' + AND w."isActive" = true + AND w."isPaused" = false +GROUP BY wc.tenant_id, wc.slot_type +` + +type ListActiveSlotsPerTenantAndSlotTypeRow struct { + TenantId uuid.UUID `json:"tenantId"` + SlotType string `json:"slotType"` + ActiveSlots int64 `json:"activeSlots"` +} + +func (q *Queries) ListActiveSlotsPerTenantAndSlotType(ctx context.Context, db DBTX) ([]*ListActiveSlotsPerTenantAndSlotTypeRow, error) { + rows, err := db.Query(ctx, listActiveSlotsPerTenantAndSlotType) + if err != nil { + return nil, err + } + defer rows.Close() + var items []*ListActiveSlotsPerTenantAndSlotTypeRow + for rows.Next() { + var i ListActiveSlotsPerTenantAndSlotTypeRow + if err := rows.Scan(&i.TenantId, &i.SlotType, &i.ActiveSlots); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const listActiveWorkersPerTenant = `-- name: ListActiveWorkersPerTenant :many SELECT "tenantId", COUNT(*) FROM "Worker" @@ -597,7 +659,7 @@ func (q *Queries) ListManyWorkerLabels(ctx context.Context, db DBTX, workerids [ const listSemaphoreSlotsWithStateForWorker = `-- name: ListSemaphoreSlotsWithStateForWorker :many SELECT - task_id, task_inserted_at, runtime.retry_count, worker_id, runtime.tenant_id, timeout_at, id, inserted_at, v1_task.tenant_id, queue, action_id, step_id, step_readable_id, workflow_id, workflow_version_id, workflow_run_id, schedule_timeout, step_timeout, priority, sticky, desired_worker_id, external_id, display_name, input, v1_task.retry_count, internal_retry_count, app_retry_count, step_index, additional_metadata, dag_id, dag_inserted_at, parent_task_external_id, parent_task_id, parent_task_inserted_at, child_index, child_key, initial_state, initial_state_reason, concurrency_parent_strategy_ids, concurrency_strategy_ids, concurrency_keys, retry_backoff_factor, retry_max_backoff + task_id, task_inserted_at, runtime.retry_count, worker_id, runtime.tenant_id, timeout_at, slot_group, id, inserted_at, v1_task.tenant_id, queue, action_id, step_id, step_readable_id, workflow_id, workflow_version_id, workflow_run_id, schedule_timeout, step_timeout, priority, sticky, desired_worker_id, external_id, display_name, input, v1_task.retry_count, internal_retry_count, app_retry_count, step_index, additional_metadata, dag_id, dag_inserted_at, parent_task_external_id, parent_task_id, parent_task_inserted_at, child_index, child_key, initial_state, initial_state_reason, concurrency_parent_strategy_ids, concurrency_strategy_ids, concurrency_keys, retry_backoff_factor, retry_max_backoff FROM v1_task_runtime runtime JOIN @@ -622,6 +684,7 @@ type ListSemaphoreSlotsWithStateForWorkerRow struct { WorkerID *uuid.UUID `json:"worker_id"` TenantID uuid.UUID `json:"tenant_id"` TimeoutAt pgtype.Timestamp `json:"timeout_at"` + SlotGroup V1WorkerSlotGroup `json:"slot_group"` ID int64 `json:"id"` InsertedAt pgtype.Timestamptz `json:"inserted_at"` TenantID_2 uuid.UUID `json:"tenant_id_2"` @@ -677,6 +740,7 @@ func (q *Queries) ListSemaphoreSlotsWithStateForWorker(ctx context.Context, db D &i.WorkerID, &i.TenantID, &i.TimeoutAt, + &i.SlotGroup, &i.ID, &i.InsertedAt, &i.TenantID_2, @@ -726,14 +790,17 @@ func (q *Queries) ListSemaphoreSlotsWithStateForWorker(ctx context.Context, db D } const listTotalActiveSlotsPerTenant = `-- name: ListTotalActiveSlotsPerTenant :many -SELECT "tenantId", SUM("maxRuns") AS "totalActiveSlots" -FROM "Worker" +SELECT + wc.tenant_id AS "tenantId", + SUM(wc.max_units) AS "totalActiveSlots" +FROM v1_worker_slot_config wc +JOIN "Worker" w ON w."id" = wc.worker_id AND w."tenantId" = wc.tenant_id WHERE - "dispatcherId" IS NOT NULL - AND "lastHeartbeatAt" > NOW() - INTERVAL '5 seconds' - AND "isActive" = true - AND "isPaused" = false -GROUP BY "tenantId" + w."dispatcherId" IS NOT NULL + AND w."lastHeartbeatAt" > NOW() - INTERVAL '5 seconds' + AND w."isActive" = true + AND w."isPaused" = false +GROUP BY wc.tenant_id ` type ListTotalActiveSlotsPerTenantRow struct { @@ -809,18 +876,88 @@ func (q *Queries) ListWorkerLabels(ctx context.Context, db DBTX, workerid uuid.U return items, nil } +const listWorkerSlotConfigs = `-- name: ListWorkerSlotConfigs :many +SELECT + worker_id, + slot_type, + max_units +FROM + v1_worker_slot_config +WHERE + tenant_id = $1::uuid + AND worker_id = ANY($2::uuid[]) +` + +type ListWorkerSlotConfigsParams struct { + Tenantid uuid.UUID `json:"tenantid"` + Workerids []uuid.UUID `json:"workerids"` +} + +type ListWorkerSlotConfigsRow struct { + WorkerID uuid.UUID `json:"worker_id"` + SlotType string `json:"slot_type"` + MaxUnits int32 `json:"max_units"` +} + +func (q *Queries) ListWorkerSlotConfigs(ctx context.Context, db DBTX, arg ListWorkerSlotConfigsParams) ([]*ListWorkerSlotConfigsRow, error) { + rows, err := db.Query(ctx, listWorkerSlotConfigs, arg.Tenantid, arg.Workerids) + if err != nil { + return nil, err + } + defer rows.Close() + var items []*ListWorkerSlotConfigsRow + for rows.Next() { + var i ListWorkerSlotConfigsRow + if err := rows.Scan(&i.WorkerID, &i.SlotType, &i.MaxUnits); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const listWorkersWithSlotCount = `-- name: ListWorkersWithSlotCount :many SELECT - workers.id, workers."createdAt", workers."updatedAt", workers."deletedAt", workers."tenantId", workers."lastHeartbeatAt", workers.name, workers."dispatcherId", workers."maxRuns", workers."isActive", workers."lastListenerEstablished", workers."isPaused", workers.type, workers."webhookId", workers.language, workers."languageVersion", workers.os, workers."runtimeExtra", workers."sdkVersion", + workers.id, workers."createdAt", workers."updatedAt", workers."deletedAt", workers."tenantId", workers."lastHeartbeatAt", workers.name, workers."dispatcherId", workers."isActive", workers."lastListenerEstablished", workers."isPaused", workers.type, workers."webhookId", workers.language, workers."languageVersion", workers.os, workers."runtimeExtra", workers."sdkVersion", ww."url" AS "webhookUrl", ww."id" AS "webhookId", - workers."maxRuns" - ( - SELECT COUNT(*) - FROM v1_task_runtime runtime + -- TODO do we still need this? + COALESCE(( + SELECT COALESCE(cap.max_units, 0) + FROM v1_worker_slot_config cap + WHERE + cap.tenant_id = workers."tenantId" + AND cap.worker_id = workers."id" + AND cap.slot_type = 'default'::text + ) - ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime WHERE runtime.tenant_id = workers."tenantId" AND - runtime.worker_id = workers."id" - ) AS "remainingSlots" + runtime.worker_id = workers."id" AND + runtime.slot_type = 'default'::text + ), 0)::int AS "remainingSlots" + , + COALESCE(( + ( + SELECT COALESCE(cap.max_units, 0) + FROM v1_worker_slot_config cap + WHERE + cap.tenant_id = workers."tenantId" + AND cap.worker_id = workers."id" + AND cap.slot_type = 'durable'::text + ) - ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime + WHERE + runtime.tenant_id = workers."tenantId" AND + runtime.worker_id = workers."id" AND + runtime.slot_type = 'durable'::text + ) + ), 0)::int AS "remainingDurableSlots" FROM "Worker" workers LEFT JOIN @@ -842,11 +979,14 @@ WHERE ) AND ( $4::boolean IS NULL OR - workers."maxRuns" IS NULL OR - ($4::boolean AND workers."maxRuns" > ( - SELECT COUNT(*) - FROM "StepRun" srs - WHERE srs."workerId" = workers."id" AND srs."status" = 'RUNNING' + ($4::boolean AND ( + SELECT COALESCE(SUM(cap.max_units), 0) + FROM v1_worker_slot_config cap + WHERE cap.tenant_id = workers."tenantId" AND cap.worker_id = workers."id" + ) > ( + SELECT COALESCE(SUM(runtime.units), 0) + FROM v1_task_runtime_slot runtime + WHERE runtime.tenant_id = workers."tenantId" AND runtime.worker_id = workers."id" )) ) GROUP BY @@ -861,10 +1001,11 @@ type ListWorkersWithSlotCountParams struct { } type ListWorkersWithSlotCountRow struct { - Worker Worker `json:"worker"` - WebhookUrl pgtype.Text `json:"webhookUrl"` - WebhookId *uuid.UUID `json:"webhookId"` - RemainingSlots int32 `json:"remainingSlots"` + Worker Worker `json:"worker"` + WebhookUrl pgtype.Text `json:"webhookUrl"` + WebhookId *uuid.UUID `json:"webhookId"` + RemainingSlots int32 `json:"remainingSlots"` + RemainingDurableSlots int32 `json:"remainingDurableSlots"` } func (q *Queries) ListWorkersWithSlotCount(ctx context.Context, db DBTX, arg ListWorkersWithSlotCountParams) ([]*ListWorkersWithSlotCountRow, error) { @@ -890,7 +1031,6 @@ func (q *Queries) ListWorkersWithSlotCount(ctx context.Context, db DBTX, arg Lis &i.Worker.LastHeartbeatAt, &i.Worker.Name, &i.Worker.DispatcherId, - &i.Worker.MaxRuns, &i.Worker.IsActive, &i.Worker.LastListenerEstablished, &i.Worker.IsPaused, @@ -904,6 +1044,7 @@ func (q *Queries) ListWorkersWithSlotCount(ctx context.Context, db DBTX, arg Lis &i.WebhookUrl, &i.WebhookId, &i.RemainingSlots, + &i.RemainingDurableSlots, ); err != nil { return nil, err } @@ -921,18 +1062,16 @@ UPDATE SET "updatedAt" = CURRENT_TIMESTAMP, "dispatcherId" = coalesce($1::uuid, "dispatcherId"), - "maxRuns" = coalesce($2::int, "maxRuns"), - "lastHeartbeatAt" = coalesce($3::timestamp, "lastHeartbeatAt"), - "isActive" = coalesce($4::boolean, "isActive"), - "isPaused" = coalesce($5::boolean, "isPaused") + "lastHeartbeatAt" = coalesce($2::timestamp, "lastHeartbeatAt"), + "isActive" = coalesce($3::boolean, "isActive"), + "isPaused" = coalesce($4::boolean, "isPaused") WHERE - "id" = $6::uuid -RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "maxRuns", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" + "id" = $5::uuid +RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" ` type UpdateWorkerParams struct { DispatcherId *uuid.UUID `json:"dispatcherId"` - MaxRuns pgtype.Int4 `json:"maxRuns"` LastHeartbeatAt pgtype.Timestamp `json:"lastHeartbeatAt"` IsActive pgtype.Bool `json:"isActive"` IsPaused pgtype.Bool `json:"isPaused"` @@ -942,7 +1081,6 @@ type UpdateWorkerParams struct { func (q *Queries) UpdateWorker(ctx context.Context, db DBTX, arg UpdateWorkerParams) (*Worker, error) { row := db.QueryRow(ctx, updateWorker, arg.DispatcherId, - arg.MaxRuns, arg.LastHeartbeatAt, arg.IsActive, arg.IsPaused, @@ -958,7 +1096,6 @@ func (q *Queries) UpdateWorker(ctx context.Context, db DBTX, arg UpdateWorkerPar &i.LastHeartbeatAt, &i.Name, &i.DispatcherId, - &i.MaxRuns, &i.IsActive, &i.LastListenerEstablished, &i.IsPaused, @@ -984,7 +1121,7 @@ WHERE "lastListenerEstablished" IS NULL OR "lastListenerEstablished" <= $2::timestamp ) -RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "maxRuns", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" +RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" ` type UpdateWorkerActiveStatusParams struct { @@ -1005,7 +1142,6 @@ func (q *Queries) UpdateWorkerActiveStatus(ctx context.Context, db DBTX, arg Upd &i.LastHeartbeatAt, &i.Name, &i.DispatcherId, - &i.MaxRuns, &i.IsActive, &i.LastListenerEstablished, &i.IsPaused, @@ -1028,7 +1164,7 @@ SET "lastHeartbeatAt" = $1::timestamp WHERE "id" = $2::uuid -RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "maxRuns", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" +RETURNING id, "createdAt", "updatedAt", "deletedAt", "tenantId", "lastHeartbeatAt", name, "dispatcherId", "isActive", "lastListenerEstablished", "isPaused", type, "webhookId", language, "languageVersion", os, "runtimeExtra", "sdkVersion" ` type UpdateWorkerHeartbeatParams struct { @@ -1048,7 +1184,6 @@ func (q *Queries) UpdateWorkerHeartbeat(ctx context.Context, db DBTX, arg Update &i.LastHeartbeatAt, &i.Name, &i.DispatcherId, - &i.MaxRuns, &i.IsActive, &i.LastListenerEstablished, &i.IsPaused, @@ -1155,3 +1290,42 @@ func (q *Queries) UpsertWorkerLabel(ctx context.Context, db DBTX, arg UpsertWork ) return &i, err } + +const upsertWorkerSlotConfigs = `-- name: UpsertWorkerSlotConfigs :exec +INSERT INTO v1_worker_slot_config ( + tenant_id, + worker_id, + slot_type, + max_units, + created_at, + updated_at +) +SELECT + $1::uuid, + $2::uuid, + unnest($3::text[]), + unnest($4::integer[]), + CURRENT_TIMESTAMP, + CURRENT_TIMESTAMP +ON CONFLICT (tenant_id, worker_id, slot_type) DO UPDATE +SET + max_units = EXCLUDED.max_units, + updated_at = CURRENT_TIMESTAMP +` + +type UpsertWorkerSlotConfigsParams struct { + Tenantid uuid.UUID `json:"tenantid"` + Workerid uuid.UUID `json:"workerid"` + Slottypes []string `json:"slottypes"` + Maxunits []int32 `json:"maxunits"` +} + +func (q *Queries) UpsertWorkerSlotConfigs(ctx context.Context, db DBTX, arg UpsertWorkerSlotConfigsParams) error { + _, err := db.Exec(ctx, upsertWorkerSlotConfigs, + arg.Tenantid, + arg.Workerid, + arg.Slottypes, + arg.Maxunits, + ) + return err +} diff --git a/pkg/repository/sqlcv1/workflows.sql b/pkg/repository/sqlcv1/workflows.sql index 8e40372a5b..edd67860e2 100644 --- a/pkg/repository/sqlcv1/workflows.sql +++ b/pkg/repository/sqlcv1/workflows.sql @@ -286,7 +286,8 @@ INSERT INTO "Step" ( "retries", "scheduleTimeout", "retryBackoffFactor", - "retryMaxBackoff" + "retryMaxBackoff", + "isDurable" ) VALUES ( @id::uuid, coalesce(sqlc.narg('createdAt')::timestamp, CURRENT_TIMESTAMP), @@ -301,9 +302,31 @@ INSERT INTO "Step" ( coalesce(sqlc.narg('retries')::integer, 0), coalesce(sqlc.narg('scheduleTimeout')::text, '5m'), sqlc.narg('retryBackoffFactor'), - sqlc.narg('retryMaxBackoff') + sqlc.narg('retryMaxBackoff'), + coalesce(sqlc.narg('isDurable')::boolean, false) ) RETURNING *; +-- name: CreateStepSlotRequests :exec +INSERT INTO v1_step_slot_request ( + tenant_id, + step_id, + slot_type, + units, + created_at, + updated_at +) +SELECT + @tenantId::uuid, + @stepId::uuid, + unnest(@slotTypes::text[]), + unnest(@units::integer[]), + CURRENT_TIMESTAMP, + CURRENT_TIMESTAMP +ON CONFLICT (tenant_id, step_id, slot_type) DO UPDATE +SET + units = EXCLUDED.units, + updated_at = CURRENT_TIMESTAMP; + -- name: AddStepParents :exec INSERT INTO "_StepOrder" ("A", "B") SELECT diff --git a/pkg/repository/sqlcv1/workflows.sql.go b/pkg/repository/sqlcv1/workflows.sql.go index f7da3109b7..32ee2d259f 100644 --- a/pkg/repository/sqlcv1/workflows.sql.go +++ b/pkg/repository/sqlcv1/workflows.sql.go @@ -167,7 +167,8 @@ INSERT INTO "Step" ( "retries", "scheduleTimeout", "retryBackoffFactor", - "retryMaxBackoff" + "retryMaxBackoff", + "isDurable" ) VALUES ( $1::uuid, coalesce($2::timestamp, CURRENT_TIMESTAMP), @@ -182,8 +183,9 @@ INSERT INTO "Step" ( coalesce($11::integer, 0), coalesce($12::text, '5m'), $13, - $14 -) RETURNING id, "createdAt", "updatedAt", "deletedAt", "readableId", "tenantId", "jobId", "actionId", timeout, "customUserData", retries, "retryBackoffFactor", "retryMaxBackoff", "scheduleTimeout" + $14, + coalesce($15::boolean, false) +) RETURNING id, "createdAt", "updatedAt", "deletedAt", "readableId", "tenantId", "jobId", "actionId", timeout, "customUserData", retries, "retryBackoffFactor", "retryMaxBackoff", "scheduleTimeout", "isDurable" ` type CreateStepParams struct { @@ -201,6 +203,7 @@ type CreateStepParams struct { ScheduleTimeout pgtype.Text `json:"scheduleTimeout"` RetryBackoffFactor pgtype.Float8 `json:"retryBackoffFactor"` RetryMaxBackoff pgtype.Int4 `json:"retryMaxBackoff"` + IsDurable pgtype.Bool `json:"isDurable"` } func (q *Queries) CreateStep(ctx context.Context, db DBTX, arg CreateStepParams) (*Step, error) { @@ -219,6 +222,7 @@ func (q *Queries) CreateStep(ctx context.Context, db DBTX, arg CreateStepParams) arg.ScheduleTimeout, arg.RetryBackoffFactor, arg.RetryMaxBackoff, + arg.IsDurable, ) var i Step err := row.Scan( @@ -236,6 +240,7 @@ func (q *Queries) CreateStep(ctx context.Context, db DBTX, arg CreateStepParams) &i.RetryBackoffFactor, &i.RetryMaxBackoff, &i.ScheduleTimeout, + &i.IsDurable, ) return &i, err } @@ -443,6 +448,45 @@ func (q *Queries) CreateStepRateLimit(ctx context.Context, db DBTX, arg CreateSt return &i, err } +const createStepSlotRequests = `-- name: CreateStepSlotRequests :exec +INSERT INTO v1_step_slot_request ( + tenant_id, + step_id, + slot_type, + units, + created_at, + updated_at +) +SELECT + $1::uuid, + $2::uuid, + unnest($3::text[]), + unnest($4::integer[]), + CURRENT_TIMESTAMP, + CURRENT_TIMESTAMP +ON CONFLICT (tenant_id, step_id, slot_type) DO UPDATE +SET + units = EXCLUDED.units, + updated_at = CURRENT_TIMESTAMP +` + +type CreateStepSlotRequestsParams struct { + Tenantid uuid.UUID `json:"tenantid"` + Stepid uuid.UUID `json:"stepid"` + Slottypes []string `json:"slottypes"` + Units []int32 `json:"units"` +} + +func (q *Queries) CreateStepSlotRequests(ctx context.Context, db DBTX, arg CreateStepSlotRequestsParams) error { + _, err := db.Exec(ctx, createStepSlotRequests, + arg.Tenantid, + arg.Stepid, + arg.Slottypes, + arg.Units, + ) + return err +} + const createWorkflow = `-- name: CreateWorkflow :one INSERT INTO "Workflow" ( "id", @@ -984,7 +1028,7 @@ func (q *Queries) GetLatestWorkflowVersionForWorkflows(ctx context.Context, db D const getStepsForJobs = `-- name: GetStepsForJobs :many SELECT j."id" as "jobId", - s.id, s."createdAt", s."updatedAt", s."deletedAt", s."readableId", s."tenantId", s."jobId", s."actionId", s.timeout, s."customUserData", s.retries, s."retryBackoffFactor", s."retryMaxBackoff", s."scheduleTimeout", + s.id, s."createdAt", s."updatedAt", s."deletedAt", s."readableId", s."tenantId", s."jobId", s."actionId", s.timeout, s."customUserData", s.retries, s."retryBackoffFactor", s."retryMaxBackoff", s."scheduleTimeout", s."isDurable", ( SELECT array_agg(so."A")::uuid[] -- Casting the array_agg result to uuid[] FROM "_StepOrder" so @@ -1034,6 +1078,7 @@ func (q *Queries) GetStepsForJobs(ctx context.Context, db DBTX, arg GetStepsForJ &i.Step.RetryBackoffFactor, &i.Step.RetryMaxBackoff, &i.Step.ScheduleTimeout, + &i.Step.IsDurable, &i.Parents, ); err != nil { return nil, err @@ -1550,7 +1595,7 @@ func (q *Queries) ListStepMatchConditions(ctx context.Context, db DBTX, arg List const listStepsByIds = `-- name: ListStepsByIds :many SELECT - s.id, s."createdAt", s."updatedAt", s."deletedAt", s."readableId", s."tenantId", s."jobId", s."actionId", s.timeout, s."customUserData", s.retries, s."retryBackoffFactor", s."retryMaxBackoff", s."scheduleTimeout", + s.id, s."createdAt", s."updatedAt", s."deletedAt", s."readableId", s."tenantId", s."jobId", s."actionId", s.timeout, s."customUserData", s.retries, s."retryBackoffFactor", s."retryMaxBackoff", s."scheduleTimeout", s."isDurable", wv."id" as "workflowVersionId", wv."sticky" as "workflowVersionSticky", w."name" as "workflowName", @@ -1599,6 +1644,7 @@ type ListStepsByIdsRow struct { RetryBackoffFactor pgtype.Float8 `json:"retryBackoffFactor"` RetryMaxBackoff pgtype.Int4 `json:"retryMaxBackoff"` ScheduleTimeout string `json:"scheduleTimeout"` + IsDurable bool `json:"isDurable"` WorkflowVersionId uuid.UUID `json:"workflowVersionId"` WorkflowVersionSticky NullStickyStrategy `json:"workflowVersionSticky"` WorkflowName string `json:"workflowName"` @@ -1632,6 +1678,7 @@ func (q *Queries) ListStepsByIds(ctx context.Context, db DBTX, arg ListStepsById &i.RetryBackoffFactor, &i.RetryMaxBackoff, &i.ScheduleTimeout, + &i.IsDurable, &i.WorkflowVersionId, &i.WorkflowVersionSticky, &i.WorkflowName, @@ -1653,7 +1700,7 @@ func (q *Queries) ListStepsByIds(ctx context.Context, db DBTX, arg ListStepsById const listStepsByWorkflowVersionIds = `-- name: ListStepsByWorkflowVersionIds :many WITH steps AS ( SELECT - s.id, s."createdAt", s."updatedAt", s."deletedAt", s."readableId", s."tenantId", s."jobId", s."actionId", s.timeout, s."customUserData", s.retries, s."retryBackoffFactor", s."retryMaxBackoff", s."scheduleTimeout", + s.id, s."createdAt", s."updatedAt", s."deletedAt", s."readableId", s."tenantId", s."jobId", s."actionId", s.timeout, s."customUserData", s.retries, s."retryBackoffFactor", s."retryMaxBackoff", s."scheduleTimeout", s."isDurable", wv."id" as "workflowVersionId", w."name" as "workflowName", w."id" as "workflowId", @@ -1688,7 +1735,7 @@ WITH steps AS ( so."B" ) SELECT - s.id, s."createdAt", s."updatedAt", s."deletedAt", s."readableId", s."tenantId", s."jobId", s."actionId", s.timeout, s."customUserData", s.retries, s."retryBackoffFactor", s."retryMaxBackoff", s."scheduleTimeout", s."workflowVersionId", s."workflowName", s."workflowId", s."jobKind", s."matchConditionCount", + s.id, s."createdAt", s."updatedAt", s."deletedAt", s."readableId", s."tenantId", s."jobId", s."actionId", s.timeout, s."customUserData", s.retries, s."retryBackoffFactor", s."retryMaxBackoff", s."scheduleTimeout", s."isDurable", s."workflowVersionId", s."workflowName", s."workflowId", s."jobKind", s."matchConditionCount", COALESCE(so."parents", '{}'::uuid[]) as "parents" FROM steps s @@ -1716,6 +1763,7 @@ type ListStepsByWorkflowVersionIdsRow struct { RetryBackoffFactor pgtype.Float8 `json:"retryBackoffFactor"` RetryMaxBackoff pgtype.Int4 `json:"retryMaxBackoff"` ScheduleTimeout string `json:"scheduleTimeout"` + IsDurable bool `json:"isDurable"` WorkflowVersionId uuid.UUID `json:"workflowVersionId"` WorkflowName string `json:"workflowName"` WorkflowId uuid.UUID `json:"workflowId"` @@ -1748,6 +1796,7 @@ func (q *Queries) ListStepsByWorkflowVersionIds(ctx context.Context, db DBTX, ar &i.RetryBackoffFactor, &i.RetryMaxBackoff, &i.ScheduleTimeout, + &i.IsDurable, &i.WorkflowVersionId, &i.WorkflowName, &i.WorkflowId, diff --git a/pkg/repository/tenant.go b/pkg/repository/tenant.go index bd8618bbb8..3250d759c3 100644 --- a/pkg/repository/tenant.go +++ b/pkg/repository/tenant.go @@ -171,8 +171,6 @@ type TenantRepository interface { RebalanceInactiveTenantWorkerPartitions(ctx context.Context) error DeleteTenant(ctx context.Context, id uuid.UUID) error - - GetTenantUsageData(ctx context.Context, tenantId uuid.UUID) (*sqlcv1.GetTenantUsageDataRow, error) } type tenantRepository struct { @@ -792,10 +790,6 @@ func (r *tenantRepository) DeleteTenant(ctx context.Context, id uuid.UUID) error return r.queries.DeleteTenant(ctx, r.pool, id) } -func (r *tenantRepository) GetTenantUsageData(ctx context.Context, tenantId uuid.UUID) (*sqlcv1.GetTenantUsageDataRow, error) { - return r.queries.GetTenantUsageData(ctx, r.pool, tenantId) -} - func getPartitionName() pgtype.Text { hostname, ok := os.LookupEnv("HOSTNAME") diff --git a/pkg/repository/tenant_limit.go b/pkg/repository/tenant_limit.go index dbe20c203f..4f5354bd4d 100644 --- a/pkg/repository/tenant_limit.go +++ b/pkg/repository/tenant_limit.go @@ -260,10 +260,17 @@ func (t *tenantLimitRepository) GetLimits(ctx context.Context, tenantId uuid.UUI } if limit.Resource == sqlcv1.LimitResourceWORKERSLOT { - workerSlotCount, err := t.queries.CountTenantWorkerSlots(ctx, t.pool, tenantId) + totalSlotsRows, err := t.queries.ListTotalActiveSlotsPerTenant(ctx, t.pool) if err != nil { return nil, err } + var workerSlotCount int32 + for _, row := range totalSlotsRows { + if row.TenantId == tenantId { + workerSlotCount = int32(row.TotalActiveSlots) // nolint: gosec + break + } + } limit.Value = workerSlotCount } diff --git a/pkg/repository/worker.go b/pkg/repository/worker.go index 6bf5c6c8dc..3b57daf184 100644 --- a/pkg/repository/worker.go +++ b/pkg/repository/worker.go @@ -28,7 +28,13 @@ type CreateWorkerOpts struct { DispatcherId uuid.UUID `validate:"required"` // The maximum number of runs this worker can run at a time - MaxRuns *int `validate:"omitempty,gte=1"` + Slots *int `validate:"omitempty,gte=1"` + + // The maximum number of durable runs this worker can run at a time + DurableSlots *int `validate:"omitempty,gte=0"` + + // Slot config for this worker (slot_type -> max units) + SlotConfig map[string]int32 `validate:"omitempty"` // The name of the worker Name string `validate:"required,hatchetName"` @@ -77,8 +83,8 @@ type UpsertWorkerLabelOpts struct { type WorkerRepository interface { ListWorkers(tenantId uuid.UUID, opts *ListWorkersOpts) ([]*sqlcv1.ListWorkersWithSlotCountRow, error) GetWorkerById(workerId uuid.UUID) (*sqlcv1.GetWorkerByIdRow, error) - ListWorkerState(tenantId uuid.UUID, workerId uuid.UUID, maxRuns int) ([]*sqlcv1.ListSemaphoreSlotsWithStateForWorkerRow, error) - CountActiveSlotsPerTenant() (map[uuid.UUID]int64, error) + ListTotalActiveSlotsPerTenant() (map[uuid.UUID]int64, error) + ListActiveSlotsPerTenantAndSlotType() (map[TenantIdSlotTypeTuple]int64, error) CountActiveWorkersPerTenant() (map[uuid.UUID]int64, error) ListActiveSDKsPerTenant() (map[TenantIdSDKTuple]int64, error) @@ -91,6 +97,12 @@ type WorkerRepository interface { // ListWorkerLabels returns a list of labels config for a worker ListWorkerLabels(tenantId uuid.UUID, workerId uuid.UUID) ([]*sqlcv1.ListWorkerLabelsRow, error) + // ListWorkerSlotConfigs returns slot config for workers. + ListWorkerSlotConfigs(tenantId uuid.UUID, workerIds []uuid.UUID) (map[uuid.UUID]map[string]int32, error) + + // ListAvailableSlotsForWorkers returns available slot units by worker for a slot type. + ListAvailableSlotsForWorkers(ctx context.Context, tenantId uuid.UUID, workerIds []uuid.UUID, slotType string) (map[uuid.UUID]int32, error) + // CreateNewWorker creates a new worker for a given tenant. CreateNewWorker(ctx context.Context, tenantId uuid.UUID, opts *CreateWorkerOpts) (*sqlcv1.Worker, error) @@ -166,39 +178,6 @@ func (w *workerRepository) GetWorkerById(workerId uuid.UUID) (*sqlcv1.GetWorkerB return w.queries.GetWorkerById(context.Background(), w.pool, workerId) } -func (w *workerRepository) ListWorkerState(tenantId uuid.UUID, workerId uuid.UUID, maxRuns int) ([]*sqlcv1.ListSemaphoreSlotsWithStateForWorkerRow, error) { - slots, err := w.queries.ListSemaphoreSlotsWithStateForWorker(context.Background(), w.pool, sqlcv1.ListSemaphoreSlotsWithStateForWorkerParams{ - Workerid: workerId, - Tenantid: tenantId, - Limit: pgtype.Int4{ - Int32: int32(maxRuns), // nolint: gosec - Valid: true, - }, - }) - - if err != nil { - return nil, fmt.Errorf("could not list worker slot state: %w", err) - } - - return slots, nil -} - -func (w *workerRepository) CountActiveSlotsPerTenant() (map[uuid.UUID]int64, error) { - slots, err := w.queries.ListTotalActiveSlotsPerTenant(context.Background(), w.pool) - - if err != nil { - return nil, fmt.Errorf("could not list active slots per tenant: %w", err) - } - - tenantToSlots := make(map[uuid.UUID]int64) - - for _, slot := range slots { - tenantToSlots[slot.TenantId] = slot.TotalActiveSlots - } - - return tenantToSlots, nil -} - type SDK struct { OperatingSystem string Language string @@ -211,6 +190,11 @@ type TenantIdSDKTuple struct { SDK SDK } +type TenantIdSlotTypeTuple struct { + TenantId uuid.UUID + SlotType string +} + func (w *workerRepository) ListActiveSDKsPerTenant() (map[TenantIdSDKTuple]int64, error) { sdks, err := w.queries.ListActiveSDKsPerTenant(context.Background(), w.pool) @@ -238,6 +222,37 @@ func (w *workerRepository) ListActiveSDKsPerTenant() (map[TenantIdSDKTuple]int64 return tenantIdSDKTupleToCount, nil } +func (w *workerRepository) ListTotalActiveSlotsPerTenant() (map[uuid.UUID]int64, error) { + rows, err := w.queries.ListTotalActiveSlotsPerTenant(context.Background(), w.pool) + if err != nil { + return nil, fmt.Errorf("could not list total active slots per tenant: %w", err) + } + + tenantToSlots := make(map[uuid.UUID]int64, len(rows)) + for _, row := range rows { + tenantToSlots[row.TenantId] = row.TotalActiveSlots + } + + return tenantToSlots, nil +} + +func (w *workerRepository) ListActiveSlotsPerTenantAndSlotType() (map[TenantIdSlotTypeTuple]int64, error) { + rows, err := w.queries.ListActiveSlotsPerTenantAndSlotType(context.Background(), w.pool) + if err != nil { + return nil, fmt.Errorf("could not list active slots per tenant and slot type: %w", err) + } + + res := make(map[TenantIdSlotTypeTuple]int64, len(rows)) + for _, row := range rows { + res[TenantIdSlotTypeTuple{ + TenantId: row.TenantId, + SlotType: row.SlotType, + }] = row.ActiveSlots + } + + return res, nil +} + func (w *workerRepository) CountActiveWorkersPerTenant() (map[uuid.UUID]int64, error) { workers, err := w.queries.ListActiveWorkersPerTenant(context.Background(), w.pool) @@ -291,6 +306,46 @@ func (w *workerRepository) ListWorkerLabels(tenantId uuid.UUID, workerId uuid.UU return w.queries.ListWorkerLabels(context.Background(), w.pool, workerId) } +func (w *workerRepository) ListWorkerSlotConfigs(tenantId uuid.UUID, workerIds []uuid.UUID) (map[uuid.UUID]map[string]int32, error) { + rows, err := w.queries.ListWorkerSlotConfigs(context.Background(), w.pool, sqlcv1.ListWorkerSlotConfigsParams{ + Tenantid: tenantId, + Workerids: workerIds, + }) + + if err != nil { + return nil, err + } + + res := make(map[uuid.UUID]map[string]int32) + for _, row := range rows { + if _, ok := res[row.WorkerID]; !ok { + res[row.WorkerID] = make(map[string]int32) + } + res[row.WorkerID][row.SlotType] = row.MaxUnits + } + + return res, nil +} + +func (w *workerRepository) ListAvailableSlotsForWorkers(ctx context.Context, tenantId uuid.UUID, workerIds []uuid.UUID, slotType string) (map[uuid.UUID]int32, error) { + rows, err := w.queries.ListAvailableSlotsForWorkers(ctx, w.pool, sqlcv1.ListAvailableSlotsForWorkersParams{ + Tenantid: tenantId, + Workerids: workerIds, + Slottype: slotType, + }) + + if err != nil { + return nil, fmt.Errorf("could not list available slots for workers: %w", err) + } + + res := make(map[uuid.UUID]int32, len(rows)) + for _, row := range rows { + res[row.ID] = row.AvailableSlots + } + + return res, nil +} + func (w *workerRepository) GetWorkerForEngine(ctx context.Context, tenantId uuid.UUID, workerId uuid.UUID) (*sqlcv1.GetWorkerForEngineRow, error) { return w.queries.GetWorkerForEngine(ctx, w.pool, sqlcv1.GetWorkerForEngineParams{ ID: workerId, @@ -305,13 +360,27 @@ func (w *workerRepository) CreateNewWorker(ctx context.Context, tenantId uuid.UU return nil, err } - maxRuns := int32(100) + slotConfig := opts.SlotConfig + slots := int32(0) - if opts.MaxRuns != nil { - maxRuns = int32(*opts.MaxRuns) // nolint: gosec + if len(slotConfig) == 0 { + slots = 100 + if opts.Slots != nil { + slots = int32(*opts.Slots) // nolint: gosec + } + + if opts.DurableSlots != nil && *opts.DurableSlots > 0 { + slots += int32(*opts.DurableSlots) // nolint: gosec + } + } else { + for _, units := range slotConfig { + if units > 0 { + slots += units + } + } } - preWorkerSlot, postWorkerSlot := w.m.Meter(ctx, sqlcv1.LimitResourceWORKERSLOT, tenantId, maxRuns) + preWorkerSlot, postWorkerSlot := w.m.Meter(ctx, sqlcv1.LimitResourceWORKERSLOT, tenantId, slots) if err := preWorkerSlot(); err != nil { return nil, err @@ -341,18 +410,6 @@ func (w *workerRepository) CreateNewWorker(ctx context.Context, tenantId uuid.UU Valid: true, } - if opts.MaxRuns != nil { - createParams.MaxRuns = pgtype.Int4{ - Int32: int32(*opts.MaxRuns), // nolint: gosec - Valid: true, - } - } else { - createParams.MaxRuns = pgtype.Int4{ - Int32: 100, - Valid: true, - } - } - var worker *sqlcv1.Worker if opts.RuntimeInfo != nil { @@ -391,11 +448,45 @@ func (w *workerRepository) CreateNewWorker(ctx context.Context, tenantId uuid.UU } } - if worker == nil { - worker, err = w.queries.CreateWorker(ctx, tx, createParams) + worker, err = w.queries.CreateWorker(ctx, tx, createParams) + + if err != nil { + return nil, fmt.Errorf("could not create worker: %w", err) + } + + slotTypes := make([]string, 0) + maxUnits := make([]int32, 0) + + if len(slotConfig) > 0 { + for slotType, units := range slotConfig { + slotTypes = append(slotTypes, slotType) + maxUnits = append(maxUnits, units) + } + } else { + defaultUnits := int32(100) + + if opts.Slots != nil { + defaultUnits = int32(*opts.Slots) // nolint: gosec + } + + slotTypes = append(slotTypes, SlotTypeDefault) + maxUnits = append(maxUnits, defaultUnits) + + if opts.DurableSlots != nil && *opts.DurableSlots > 0 { + slotTypes = append(slotTypes, SlotTypeDurable) + maxUnits = append(maxUnits, int32(*opts.DurableSlots)) // nolint: gosec + } + } + if len(slotTypes) > 0 { + err = w.queries.UpsertWorkerSlotConfigs(ctx, tx, sqlcv1.UpsertWorkerSlotConfigsParams{ + Tenantid: tenantId, + Workerid: worker.ID, + Slottypes: slotTypes, + Maxunits: maxUnits, + }) if err != nil { - return nil, fmt.Errorf("could not create worker: %w", err) + return nil, fmt.Errorf("could not upsert worker slot config: %w", err) } } diff --git a/pkg/repository/workflow.go b/pkg/repository/workflow.go index dbb6da3dea..3fe05509a4 100644 --- a/pkg/repository/workflow.go +++ b/pkg/repository/workflow.go @@ -100,6 +100,12 @@ type CreateStepOpts struct { // (optional) the step retry backoff max seconds (can't be greater than 86400) RetryBackoffMaxSeconds *int `validate:"omitnil,min=1,max=86400"` + // (optional) whether this step is durable + IsDurable bool + + // (optional) slot requests for this step (slot_type -> units) + SlotRequests map[string]int32 `validate:"omitempty"` + // (optional) a list of additional trigger conditions TriggerConditions []CreateStepMatchConditionOpt `validate:"omitempty,dive"` @@ -727,6 +733,10 @@ func (r *workflowRepository) createJobTx(ctx context.Context, tx sqlcv1.DBTX, te Readableid: stepOpts.ReadableId, CustomUserData: customUserData, Retries: retries, + IsDurable: pgtype.Bool{ + Bool: stepOpts.IsDurable, + Valid: true, + }, } if stepOpts.ScheduleTimeout != nil { @@ -757,6 +767,45 @@ func (r *workflowRepository) createJobTx(ctx context.Context, tx sqlcv1.DBTX, te return nil, err } + slotRequests := stepOpts.SlotRequests + if len(slotRequests) == 0 { + if stepOpts.IsDurable { + slotRequests = map[string]int32{SlotTypeDurable: 1} + } else { + slotRequests = map[string]int32{SlotTypeDefault: 1} + } + } + + slotTypes := make([]string, 0, len(slotRequests)) + units := make([]int32, 0, len(slotRequests)) + for slotType, unit := range slotRequests { + if unit <= 0 { + continue + } + slotTypes = append(slotTypes, slotType) + units = append(units, unit) + } + + if len(slotTypes) == 0 { + slotTypes = append(slotTypes, SlotTypeDefault) + units = append(units, 1) + } + + err = r.queries.CreateStepSlotRequests( + ctx, + tx, + sqlcv1.CreateStepSlotRequestsParams{ + Tenantid: tenantId, + Stepid: stepId, + Slottypes: slotTypes, + Units: units, + }, + ) + + if err != nil { + return nil, err + } + if len(stepOpts.DesiredWorkerLabels) > 0 { for i := range stepOpts.DesiredWorkerLabels { key := (stepOpts.DesiredWorkerLabels)[i].Key diff --git a/pkg/scheduling/v1/action.go b/pkg/scheduling/v1/action.go index 1d54247cfd..7c40a21038 100644 --- a/pkg/scheduling/v1/action.go +++ b/pkg/scheduling/v1/action.go @@ -3,17 +3,20 @@ package v1 import ( "slices" "sync" + + "github.com/google/uuid" ) type action struct { - mu sync.RWMutex - actionId string + mu sync.RWMutex + actionId string lastReplenishedSlotCount int lastReplenishedWorkerCount int // note that slots can be used across multiple actions, hence the pointer - slots []*slot + slots []*slot + slotsByWorker map[uuid.UUID]map[string][]*slot } func (a *action) activeCount() int { diff --git a/pkg/scheduling/v1/queuer.go b/pkg/scheduling/v1/queuer.go index 29ef0efb93..66bc8fdbe8 100644 --- a/pkg/scheduling/v1/queuer.go +++ b/pkg/scheduling/v1/queuer.go @@ -207,7 +207,21 @@ func (q *Queuer) loopQueue(ctx context.Context) { desiredLabelsTime := time.Since(checkpoint) checkpoint = time.Now() - assignCh := q.s.tryAssign(ctx, qis, labels, rls) + stepRequests, err := q.repo.GetStepSlotRequests(ctx, stepIds) + + if err != nil { + span.RecordError(err) + span.End() + q.l.Error().Err(err).Msg("error getting step slot requests") + + q.unackedToUnassigned(qis) + continue + } + + getSlotRequestsTime := time.Since(checkpoint) + checkpoint = time.Now() + + assignCh := q.s.tryAssign(ctx, qis, labels, stepRequests, rls) count := 0 countMu := sync.Mutex{} @@ -286,6 +300,8 @@ func (q *Queuer) loopQueue(ctx context.Context) { "rate_limit_time", rateLimitTime, ).Dur( "desired_labels_time", desiredLabelsTime, + ).Dur( + "get_slot_requests_time", getSlotRequestsTime, ).Dur( "assign_time", assignTime, ).Msgf("queue %s took longer than 100ms (%s) to process %d items", q.queueName, elapsed, len(qis)) @@ -594,7 +610,12 @@ func (q *Queuer) runOptimisticQueue( return nil, nil, err } - assignCh := q.s.tryAssign(ctx, qis, labels, rls) + stepRequests, err := q.repo.GetStepSlotRequests(ctx, stepIds) + if err != nil { + return nil, nil, err + } + + assignCh := q.s.tryAssign(ctx, qis, labels, stepRequests, rls) var allLocalAssigned []*v1.AssignedItem var allQueueResults []*QueueResults diff --git a/pkg/scheduling/v1/scheduler.go b/pkg/scheduling/v1/scheduler.go index 7b24cf2b67..b391cd3bf2 100644 --- a/pkg/scheduling/v1/scheduler.go +++ b/pkg/scheduling/v1/scheduler.go @@ -38,7 +38,7 @@ type Scheduler struct { // unackedSlots are slots which have been assigned to a worker, but have not been flushed // to the database yet. They negatively count towards a worker's available slot count. - unackedSlots map[int]*slot + unackedSlots map[int]*assignedSlots unackedMu mutex rl *rateLimiter @@ -53,7 +53,7 @@ func newScheduler(cf *sharedConfig, tenantId uuid.UUID, rl *rateLimiter, exts *E tenantId: tenantId, l: &l, actions: make(map[string]*action), - unackedSlots: make(map[int]*slot), + unackedSlots: make(map[int]*assignedSlots), rl: rl, actionsMu: newRWMu(cf.l), replenishMu: newMu(cf.l), @@ -69,8 +69,8 @@ func (s *Scheduler) ack(ids []int) { defer s.unackedMu.Unlock() for _, id := range ids { - if slot, ok := s.unackedSlots[id]; ok { - slot.ack() + if assigned, ok := s.unackedSlots[id]; ok { + assigned.ack() delete(s.unackedSlots, id) } } @@ -81,13 +81,45 @@ func (s *Scheduler) nack(ids []int) { defer s.unackedMu.Unlock() for _, id := range ids { - if slot, ok := s.unackedSlots[id]; ok { - slot.nack() + if assigned, ok := s.unackedSlots[id]; ok { + assigned.nack() delete(s.unackedSlots, id) } } } +type assignedSlots struct { + slots []*slot + rateLimitAck func() + rateLimitNack func() +} + +func (a *assignedSlots) workerId() uuid.UUID { + if len(a.slots) == 0 { + return uuid.Nil + } + + return a.slots[0].getWorkerId() +} + +func (a *assignedSlots) ack() { + for _, slot := range a.slots { + slot.ack() + } + if a.rateLimitAck != nil { + a.rateLimitAck() + } +} + +func (a *assignedSlots) nack() { + for _, slot := range a.slots { + slot.nack() + } + if a.rateLimitNack != nil { + a.rateLimitNack() + } +} + func (s *Scheduler) setWorkers(workers []*v1.ListActiveWorkersResult) { s.workersMu.Lock() defer s.workersMu.Unlock() @@ -110,6 +142,24 @@ func (s *Scheduler) getWorkers() map[uuid.UUID]*worker { return s.workers } +func (s *Scheduler) ensureAction(actionId string) *action { + s.actionsMu.Lock() + defer s.actionsMu.Unlock() + + if existing, ok := s.actions[actionId]; ok { + return existing + } + + newAction := &action{ + actionId: actionId, + slotsByWorker: make(map[uuid.UUID]map[string][]*slot), + } + + s.actions[actionId] = newAction + + return newAction +} + // replenish loads new slots from the database. func (s *Scheduler) replenish(ctx context.Context, mustReplenish bool) error { if ok := s.replenishMu.TryLock(); !ok { @@ -185,7 +235,8 @@ func (s *Scheduler) replenish(ctx context.Context, mustReplenish bool) error { // if the action is not in the map, it should be replenished if _, ok := s.actions[actionId]; !ok { newAction := &action{ - actionId: actionId, + actionId: actionId, + slotsByWorker: make(map[uuid.UUID]map[string][]*slot), } actionsToReplenish[actionId] = newAction @@ -240,20 +291,26 @@ func (s *Scheduler) replenish(ctx context.Context, mustReplenish bool) error { checkpoint = time.Now() // FUNCTION 2: for each action which should be replenished, load the available slots - uniqueWorkerIds := make(map[uuid.UUID]bool) + workerSlotConfigs, err := s.repo.ListWorkerSlotConfigs(ctx, s.tenantId, workerIds) + if err != nil { + return err + } - for actionId := range actionsToReplenish { - workerIds := actionsToWorkerIds[actionId] + workerSlotTypes := make(map[uuid.UUID]map[string]bool, len(workerSlotConfigs)) + slotTypeToWorkerIds := make(map[string]map[uuid.UUID]bool) - for _, workerId := range workerIds { - uniqueWorkerIds[workerId] = true + for _, config := range workerSlotConfigs { + if _, ok := workerSlotTypes[config.WorkerID]; !ok { + workerSlotTypes[config.WorkerID] = make(map[string]bool) } - } - workerUUIDs := make([]uuid.UUID, 0, len(uniqueWorkerIds)) + workerSlotTypes[config.WorkerID][config.SlotType] = true + + if _, ok := slotTypeToWorkerIds[config.SlotType]; !ok { + slotTypeToWorkerIds[config.SlotType] = make(map[uuid.UUID]bool) + } - for workerId := range uniqueWorkerIds { - workerUUIDs = append(workerUUIDs, workerId) + slotTypeToWorkerIds[config.SlotType][config.WorkerID] = true } orderedLock(actionsToReplenish) @@ -263,59 +320,105 @@ func (s *Scheduler) replenish(ctx context.Context, mustReplenish bool) error { s.unackedMu.Lock() defer s.unackedMu.Unlock() - availableSlots, err := s.repo.ListAvailableSlotsForWorkers(ctx, s.tenantId, sqlcv1.ListAvailableSlotsForWorkersParams{ - Tenantid: s.tenantId, - Workerids: workerUUIDs, - }) + availableSlotsByType := make(map[string]map[uuid.UUID]int, len(slotTypeToWorkerIds)) - if err != nil { - return err + for slotType, workerSet := range slotTypeToWorkerIds { + workerUUIDs := make([]uuid.UUID, 0, len(workerSet)) + + for workerId := range workerSet { + workerUUIDs = append(workerUUIDs, workerId) + } + + if len(workerUUIDs) == 0 { + continue + } + + availableSlots, err := s.repo.ListAvailableSlotsForWorkers(ctx, s.tenantId, sqlcv1.ListAvailableSlotsForWorkersParams{ + Tenantid: s.tenantId, + Workerids: workerUUIDs, + Slottype: slotType, + }) + + if err != nil { + return err + } + + if _, ok := availableSlotsByType[slotType]; !ok { + availableSlotsByType[slotType] = make(map[uuid.UUID]int, len(availableSlots)) + } + + for _, row := range availableSlots { + availableSlotsByType[slotType][row.ID] = int(row.AvailableSlots) + } } s.l.Debug().Msgf("loading available slots took %s", time.Since(checkpoint)) // FUNCTION 3: list unacked slots (so they're not counted towards the worker slot count) - workersToUnackedSlots := make(map[uuid.UUID][]*slot) + workersToUnackedSlots := make(map[uuid.UUID]map[string][]*slot) for _, unackedSlot := range s.unackedSlots { - s := unackedSlot - workerId := s.getWorkerId() + for _, assignedSlot := range unackedSlot.slots { + workerId := assignedSlot.getWorkerId() + slotType := assignedSlot.getSlotType() - if _, ok := workersToUnackedSlots[workerId]; !ok { - workersToUnackedSlots[workerId] = make([]*slot, 0) - } + if _, ok := workersToUnackedSlots[workerId]; !ok { + workersToUnackedSlots[workerId] = make(map[string][]*slot) + } - workersToUnackedSlots[workerId] = append(workersToUnackedSlots[workerId], s) + workersToUnackedSlots[workerId][slotType] = append(workersToUnackedSlots[workerId][slotType], assignedSlot) + } } // FUNCTION 4: write the new slots to the scheduler and clean up expired slots actionsToNewSlots := make(map[string][]*slot) actionsToTotalSlots := make(map[string]int) - for _, worker := range availableSlots { - workerId := worker.ID - actions := workerIdsToActions[workerId] - unackedSlots := workersToUnackedSlots[workerId] + actionsToSlotsByWorker := make(map[string]map[uuid.UUID]map[string][]*slot) - // create a slot for each available slot - slots := make([]*slot, 0) + for slotType, availableSlotsByWorker := range availableSlotsByType { + for workerId, availableSlots := range availableSlotsByWorker { + actions := workerIdsToActions[workerId] + unackedSlots := workersToUnackedSlots[workerId][slotType] - for i := 0; i < int(worker.AvailableSlots)-len(unackedSlots); i++ { - slots = append(slots, newSlot(workers[workerId], actions)) - } + // create a slot for each available slot + slots := make([]*slot, 0) + availableCount := availableSlots - len(unackedSlots) + if availableCount < 0 { + availableCount = 0 + } - // extend expiry of all unacked slots - for _, unackedSlot := range unackedSlots { - unackedSlot.extendExpiry() - } + for i := 0; i < availableCount; i++ { + slots = append(slots, newSlot(workers[workerId], actions, slotType)) + } + + // extend expiry of all unacked slots + for _, unackedSlot := range unackedSlots { + unackedSlot.extendExpiry() + } + + s.l.Debug().Msgf("worker %s has %d total slots (%s), %d unacked slots", workerId, availableSlots, slotType, len(unackedSlots)) + + slots = append(slots, unackedSlots...) + + for _, actionId := range actions { + if s.actions[actionId] == nil { + continue + } - s.l.Debug().Msgf("worker %s has %d total slots, %d unacked slots", workerId, worker.AvailableSlots, len(unackedSlots)) + actionsToNewSlots[actionId] = append(actionsToNewSlots[actionId], slots...) + actionsToTotalSlots[actionId] += len(slots) - slots = append(slots, unackedSlots...) + if _, ok := actionsToSlotsByWorker[actionId]; !ok { + actionsToSlotsByWorker[actionId] = make(map[uuid.UUID]map[string][]*slot) + } + + if _, ok := actionsToSlotsByWorker[actionId][workerId]; !ok { + actionsToSlotsByWorker[actionId][workerId] = make(map[string][]*slot) + } - for _, actionId := range actions { - actionsToNewSlots[actionId] = append(actionsToNewSlots[actionId], slots...) - actionsToTotalSlots[actionId] += len(slots) + actionsToSlotsByWorker[actionId][workerId][slotType] = slots + } } } @@ -330,6 +433,7 @@ func (s *Scheduler) replenish(ctx context.Context, mustReplenish bool) error { // we overwrite the slots for the action. we know that the action is in the map because we checked // for it in the first pass. s.actions[actionId].slots = newSlots + s.actions[actionId].slotsByWorker = actionsToSlotsByWorker[actionId] s.actions[actionId].lastReplenishedSlotCount = actionsToTotalSlots[actionId] s.actions[actionId].lastReplenishedWorkerCount = len(actionsToWorkerIds[actionId]) @@ -341,14 +445,26 @@ func (s *Scheduler) replenish(ctx context.Context, mustReplenish bool) error { newSlots := make([]*slot, 0, len(storedAction.slots)) for i := range storedAction.slots { - slot := storedAction.slots[i] + slotItem := storedAction.slots[i] - if !slot.expired() { - newSlots = append(newSlots, slot) + if !slotItem.expired() { + newSlots = append(newSlots, slotItem) } } storedAction.slots = newSlots + storedAction.slotsByWorker = make(map[uuid.UUID]map[string][]*slot) + + for _, slotItem := range newSlots { + workerId := slotItem.getWorkerId() + slotType := slotItem.getSlotType() + + if _, ok := storedAction.slotsByWorker[workerId]; !ok { + storedAction.slotsByWorker[workerId] = make(map[string][]*slot) + } + + storedAction.slotsByWorker[workerId][slotType] = append(storedAction.slotsByWorker[workerId][slotType], slotItem) + } s.l.Debug().Msgf("after cleanup, action %s has %d slots", storedAction.actionId, len(newSlots)) } @@ -463,6 +579,7 @@ func (s *Scheduler) tryAssignBatch( // slots concurrently. ringOffset int, stepIdsToLabels map[uuid.UUID][]*sqlcv1.GetDesiredLabelsRow, + stepIdsToRequests map[uuid.UUID]map[string]int32, taskIdsToRateLimits map[int64]map[string]int32, ) ( res []*assignSingleResult, newRingOffset int, err error, @@ -489,6 +606,12 @@ func (s *Scheduler) tryAssignBatch( } } + // Get or create the action and try to assign the batch of queue items. + // NOTE: if we change the position of these locks, make sure that we are still acquiring locks in the same + // order as the replenish() function, otherwise we may deadlock. The order is: + // actionsMu -> action.mu -> unackedMu + action := s.ensureAction(actionId) + rlAcks := make([]func(), len(qis)) rlNacks := make([]func(), len(qis)) @@ -530,15 +653,9 @@ func (s *Scheduler) tryAssignBatch( rlNacks[i] = rateLimitNack } - // lock the actions map and try to assign the batch of queue items. - // NOTE: if we change the position of this lock, make sure that we are still acquiring locks in the same - // order as the replenish() function, otherwise we may deadlock. - s.actionsMu.RLock() - - action, ok := s.actions[actionId] - - if !ok || len(action.slots) == 0 { - s.actionsMu.RUnlock() + action.mu.RLock() + if len(action.slots) == 0 { + action.mu.RUnlock() s.l.Debug().Msgf("no slots for action %s", actionId) @@ -550,8 +667,7 @@ func (s *Scheduler) tryAssignBatch( return res, newRingOffset, nil } - - s.actionsMu.RUnlock() + action.mu.RUnlock() action.mu.Lock() defer action.mu.Unlock() @@ -576,12 +692,16 @@ func (s *Scheduler) tryAssignBatch( qi := qis[i] + requests := normalizeSlotRequests(stepIdsToRequests[qi.StepID]) + singleRes, err := s.tryAssignSingleton( ctx, qi, + action, candidateSlots, childRingOffset, stepIdsToLabels[qi.StepID], + requests, rlAcks[i], rlNacks[i], ) @@ -603,36 +723,127 @@ func (s *Scheduler) tryAssignBatch( return res, newRingOffset, nil } -func findSlot( +func findAssignableSlots( candidateSlots []*slot, + action *action, + requests map[string]int32, rateLimitAck func(), rateLimitNack func(), -) *slot { - var assignedSlot *slot +) *assignedSlots { + for _, candidateSlot := range candidateSlots { + if !candidateSlot.active() { + continue + } - for _, slot := range candidateSlots { - if !slot.active() { + workerId := candidateSlot.getWorkerId() + workerSlots := action.slotsByWorker[workerId] + if len(workerSlots) == 0 { continue } - if !slot.use([]func(){rateLimitAck}, []func(){rateLimitNack}) { + selected, ok := selectSlotsForWorker(workerSlots, requests) + if !ok { continue } - assignedSlot = slot - break + usedSlots := make([]*slot, 0, len(selected)) + success := true + + for _, selectedSlot := range selected { + if !selectedSlot.use(nil, nil) { + success = false + break + } + usedSlots = append(usedSlots, selectedSlot) + } + + if !success { + // Release partially allocated slots + for _, usedSlot := range usedSlots { + usedSlot.nack() + } + continue + } + + // Rate limit callbacks are stored at assignedSlots level, + // not on individual slots. They're called once when the + // entire assignment is acked/nacked. + return &assignedSlots{ + slots: usedSlots, + rateLimitAck: rateLimitAck, + rateLimitNack: rateLimitNack, + } } - return assignedSlot + return nil } +func selectSlotsForWorker(workerSlots map[string][]*slot, requests map[string]int32) ([]*slot, bool) { + selected := make([]*slot, 0) + + for slotType, units := range requests { + if units <= 0 { + continue + } + + slots, ok := workerSlots[slotType] + if !ok { + return nil, false + } + + needed := int(units) + activeSlots := make([]*slot, 0, needed) + + for _, slotItem := range slots { + if !slotItem.active() { + continue + } + activeSlots = append(activeSlots, slotItem) + if len(activeSlots) >= needed { + break + } + } + + if len(activeSlots) < needed { + return nil, false + } + + selected = append(selected, activeSlots...) + } + + return selected, true +} + +func normalizeSlotRequests(requests map[string]int32) map[string]int32 { + if len(requests) == 0 { + return map[string]int32{v1.SlotTypeDefault: 1} + } + + normalized := make(map[string]int32, len(requests)) + for slotType, units := range requests { + if units <= 0 { + continue + } + normalized[slotType] = units + } + + if len(normalized) == 0 { + return map[string]int32{v1.SlotTypeDefault: 1} + } + + return normalized +} + + // tryAssignSingleton attempts to assign a singleton step to a worker. func (s *Scheduler) tryAssignSingleton( ctx context.Context, qi *sqlcv1.V1QueueItem, + action *action, candidateSlots []*slot, ringOffset int, labels []*sqlcv1.GetDesiredLabelsRow, + requests map[string]int32, rateLimitAck func(), rateLimitNack func(), ) ( @@ -650,10 +861,10 @@ func (s *Scheduler) tryAssignSingleton( ringOffset = 0 } - assignedSlot := findSlot(candidateSlots[ringOffset:], rateLimitAck, rateLimitNack) + assignedSlot := findAssignableSlots(candidateSlots[ringOffset:], action, requests, rateLimitAck, rateLimitNack) if assignedSlot == nil { - assignedSlot = findSlot(candidateSlots[:ringOffset], rateLimitAck, rateLimitNack) + assignedSlot = findAssignableSlots(candidateSlots[:ringOffset], action, requests, rateLimitAck, rateLimitNack) } if assignedSlot == nil { @@ -670,7 +881,7 @@ func (s *Scheduler) tryAssignSingleton( s.unackedSlots[res.ackId] = assignedSlot s.unackedMu.Unlock() - res.workerId = assignedSlot.getWorkerId() + res.workerId = assignedSlot.workerId() res.succeeded = true return res, nil @@ -695,6 +906,7 @@ func (s *Scheduler) tryAssign( ctx context.Context, qis []*sqlcv1.V1QueueItem, stepIdsToLabels map[uuid.UUID][]*sqlcv1.GetDesiredLabelsRow, + stepIdsToRequests map[uuid.UUID]map[string]int32, taskIdsToRateLimits map[int64]map[string]int32, ) <-chan *assignResults { ctx, span := telemetry.NewSpan(ctx, "try-assign") @@ -741,7 +953,6 @@ func (s *Scheduler) tryAssign( batched := make([]*sqlcv1.V1QueueItem, 0) schedulingTimedOut := make([]*sqlcv1.V1QueueItem, 0, len(qis)) - for i := range qis { qi := qis[i] @@ -765,7 +976,7 @@ func (s *Scheduler) tryAssign( batchStart := time.Now() - results, newRingOffset, err := s.tryAssignBatch(ctx, actionId, batchQis, ringOffset, stepIdsToLabels, taskIdsToRateLimits) + results, newRingOffset, err := s.tryAssignBatch(ctx, actionId, batchQis, ringOffset, stepIdsToLabels, stepIdsToRequests, taskIdsToRateLimits) if err != nil { return err @@ -872,7 +1083,6 @@ func (s *Scheduler) getSnapshotInput(mustSnapshot bool) (*SnapshotInput, bool) { for workerId, worker := range workers { res.Workers[workerId] = &WorkerCp{ WorkerId: workerId, - MaxRuns: worker.MaxRuns, Labels: worker.Labels, Name: worker.Name, } diff --git a/pkg/scheduling/v1/slot.go b/pkg/scheduling/v1/slot.go index f3dab7e1d3..d303a13ed3 100644 --- a/pkg/scheduling/v1/slot.go +++ b/pkg/scheduling/v1/slot.go @@ -14,8 +14,9 @@ import ( const defaultSlotExpiry = 1500 * time.Millisecond type slot struct { - worker *worker - actions []string + worker *worker + actions []string + slotType string // expiresAt is when the slot is no longer valid, but has not been cleaned up yet expiresAt *time.Time @@ -29,12 +30,13 @@ type slot struct { mu sync.RWMutex } -func newSlot(worker *worker, actions []string) *slot { +func newSlot(worker *worker, actions []string, slotType string) *slot { expires := time.Now().Add(defaultSlotExpiry) return &slot{ worker: worker, actions: actions, + slotType: slotType, expiresAt: &expires, } } @@ -43,6 +45,10 @@ func (s *slot) getWorkerId() uuid.UUID { return s.worker.ID } +func (s *slot) getSlotType() string { + return s.slotType +} + func (s *slot) extendExpiry() { s.mu.Lock() defer s.mu.Unlock() diff --git a/pkg/scheduling/v1/slot_test.go b/pkg/scheduling/v1/slot_test.go index ce11cd358b..b056d0c67a 100644 --- a/pkg/scheduling/v1/slot_test.go +++ b/pkg/scheduling/v1/slot_test.go @@ -36,8 +36,8 @@ func TestGetRankedSlots(t *testing.T) { DesiredWorkerID: &stableWorkerId1, }, slots: []*slot{ - newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: stableWorkerId1}}, []string{}), - newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: uuid.New()}}, []string{}), + newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: stableWorkerId1}}, []string{}, "default"), + newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: uuid.New()}}, []string{}, "default"), }, expectedWorker: []string{stableWorkerId1.String()}, }, @@ -48,8 +48,8 @@ func TestGetRankedSlots(t *testing.T) { DesiredWorkerID: ptrUUID(uuid.New().String()), }, slots: []*slot{ - newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: uuid.New()}}, []string{}), - newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: uuid.New()}}, []string{}), + newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: uuid.New()}}, []string{}, "default"), + newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: uuid.New()}}, []string{}, "default"), }, expectedWorker: []string{}, }, @@ -60,9 +60,9 @@ func TestGetRankedSlots(t *testing.T) { DesiredWorkerID: &stableWorkerId1, }, slots: []*slot{ - newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId2)}}, []string{}), - newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId1)}}, []string{}), - newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId1)}}, []string{}), + newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId2)}}, []string{}, "default"), + newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId1)}}, []string{}, "default"), + newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId1)}}, []string{}, "default"), }, expectedWorker: []string{stableWorkerId1.String(), stableWorkerId1.String(), stableWorkerId2.String()}, }, @@ -89,14 +89,14 @@ func TestGetRankedSlots(t *testing.T) { newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId1), Labels: []*sqlcv1.ListManyWorkerLabelsRow{{ Key: "key1", IntValue: pgtype.Int4{Int32: 2, Valid: true}, - }}}}, []string{}), + }}}}, []string{}, "default"), newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId2), Labels: []*sqlcv1.ListManyWorkerLabelsRow{{ Key: "key1", IntValue: pgtype.Int4{Int32: 4, Valid: true}, }, { Key: "key2", IntValue: pgtype.Int4{Int32: 4, Valid: true}, - }}}}, []string{}), + }}}}, []string{}, "default"), }, expectedWorker: []string{stableWorkerId2.String(), stableWorkerId1.String()}, }, @@ -116,7 +116,7 @@ func TestGetRankedSlots(t *testing.T) { newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId1), Labels: []*sqlcv1.ListManyWorkerLabelsRow{{ Key: "key1", IntValue: pgtype.Int4{Int32: 1, Valid: true}, - }}}}, []string{}), + }}}}, []string{}, "default"), }, expectedWorker: []string{stableWorkerId1.String()}, }, @@ -136,7 +136,7 @@ func TestGetRankedSlots(t *testing.T) { newSlot(&worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: (stableWorkerId2), Labels: []*sqlcv1.ListManyWorkerLabelsRow{{ Key: "key1", IntValue: pgtype.Int4{Int32: 2, Valid: true}, - }}}}, []string{}), + }}}}, []string{}, "default"), }, expectedWorker: []string{}, }, @@ -154,3 +154,26 @@ func TestGetRankedSlots(t *testing.T) { }) } } + +func TestSelectSlotsForWorker(t *testing.T) { + workerId := uuid.New() + worker := &worker{ListActiveWorkersResult: &v1.ListActiveWorkersResult{ID: workerId}} + + slotsByType := map[string][]*slot{ + "cpu": { + newSlot(worker, []string{}, "cpu"), + newSlot(worker, []string{}, "cpu"), + newSlot(worker, []string{}, "cpu"), + }, + "mem": { + newSlot(worker, []string{}, "mem"), + }, + } + + selected, ok := selectSlotsForWorker(slotsByType, map[string]int32{"cpu": 2, "mem": 1}) + assert.True(t, ok) + assert.Len(t, selected, 3) + + _, ok = selectSlotsForWorker(slotsByType, map[string]int32{"cpu": 4}) + assert.False(t, ok) +} diff --git a/pkg/telemetry/metrics.go b/pkg/telemetry/metrics.go index 81fe6754b9..106554024b 100644 --- a/pkg/telemetry/metrics.go +++ b/pkg/telemetry/metrics.go @@ -28,9 +28,10 @@ type MetricsRecorder struct { yesterdayRunCountGauge metric.Int64Gauge // Worker metrics - activeSlotsGauge metric.Int64Gauge - activeWorkersGauge metric.Int64Gauge - activeSDKsGauge metric.Int64Gauge + activeSlotsGauge metric.Int64Gauge + activeSlotsByKeyGauge metric.Int64Gauge + activeWorkersGauge metric.Int64Gauge + activeSDKsGauge metric.Int64Gauge } // NewMetricsRecorder creates a new metrics recorder with all instruments registered @@ -120,6 +121,14 @@ func NewMetricsRecorder(ctx context.Context) (*MetricsRecorder, error) { return nil, fmt.Errorf("failed to create active slots gauge: %w", err) } + activeSlotsByKeyGauge, err := meter.Int64Gauge( + "hatchet.workers.active_slots.by_key", + metric.WithDescription("Number of active worker slots per tenant and slot key"), + ) + if err != nil { + return nil, fmt.Errorf("failed to create active slots by key gauge: %w", err) + } + activeWorkersGauge, err := meter.Int64Gauge( "hatchet.workers.active_count", metric.WithDescription("Number of active workers per tenant"), @@ -148,6 +157,7 @@ func NewMetricsRecorder(ctx context.Context) (*MetricsRecorder, error) { olapTempTableSizeTaskGauge: olapTempTableSizeTaskGauge, yesterdayRunCountGauge: yesterdayRunCountGauge, activeSlotsGauge: activeSlotsGauge, + activeSlotsByKeyGauge: activeSlotsByKeyGauge, activeWorkersGauge: activeWorkersGauge, activeSDKsGauge: activeSDKsGauge, }, nil @@ -210,6 +220,15 @@ func (m *MetricsRecorder) RecordActiveSlots(ctx context.Context, tenantId uuid.U metric.WithAttributes(attribute.String("tenant_id", tenantId.String()))) } +// RecordActiveSlotsByKey records the number of active worker slots by key +func (m *MetricsRecorder) RecordActiveSlotsByKey(ctx context.Context, tenantId uuid.UUID, slotKey string, count int64) { + m.activeSlotsByKeyGauge.Record(ctx, count, + metric.WithAttributes( + attribute.String("tenant_id", tenantId.String()), + attribute.String("slot_key", slotKey), + )) +} + // RecordActiveWorkers records the number of active workers func (m *MetricsRecorder) RecordActiveWorkers(ctx context.Context, tenantId uuid.UUID, count int64) { m.activeWorkersGauge.Record(ctx, count, diff --git a/pkg/v1/task/task.go b/pkg/v1/task/task.go index 8e81dab43f..d0a4912b7f 100644 --- a/pkg/v1/task/task.go +++ b/pkg/v1/task/task.go @@ -275,6 +275,7 @@ func (t *TaskDeclaration[I]) Dump(workflowName string, taskDefaults *create.Task base := makeContractTaskOpts(&t.TaskShared, taskDefaults) base.ReadableId = t.Name base.Action = getActionID(workflowName, t.Name) + base.IsDurable = false base.Parents = make([]string, len(t.Parents)) copy(base.Parents, t.Parents) @@ -327,6 +328,7 @@ func (t *DurableTaskDeclaration[I]) Dump(workflowName string, taskDefaults *crea base := makeContractTaskOpts(&t.TaskShared, taskDefaults) base.ReadableId = t.Name base.Action = getActionID(workflowName, t.Name) + base.IsDurable = true base.Parents = make([]string, len(t.Parents)) copy(base.Parents, t.Parents) return base @@ -338,6 +340,7 @@ func (t *OnFailureTaskDeclaration[I]) Dump(workflowName string, taskDefaults *cr base.ReadableId = "on-failure" base.Action = getActionID(workflowName, "on-failure") + base.IsDurable = false return base } diff --git a/pkg/v1/worker/worker.go b/pkg/v1/worker/worker.go index 2ae4f2afef..2605431587 100644 --- a/pkg/v1/worker/worker.go +++ b/pkg/v1/worker/worker.go @@ -73,12 +73,9 @@ type WorkerImpl struct { // v1 workers client workers features.WorkersClient - // nonDurableWorker is the underlying non-durable worker implementation. (default) + // nonDurableWorker is the underlying main worker implementation. nonDurableWorker *worker.Worker - // durableWorker is the underlying worker implementation for durable tasks. - durableWorker *worker.Worker - // name is the friendly name of the worker. name string @@ -154,16 +151,16 @@ func (w *WorkerImpl) RegisterWorkflows(workflows ...workflow.WorkflowBase) error for _, workflow := range workflows { dump, fns, durableFns, onFailureFn := workflow.Dump() - // Check if there are non-durable tasks in this workflow - hasNonDurableTasks := len(fns) > 0 || (dump.OnFailureTask != nil && onFailureFn != nil) - hasDurableTasks := len(durableFns) > 0 + hasAnyTasks := len(fns) > 0 || len(durableFns) > 0 || (dump.OnFailureTask != nil && onFailureFn != nil) - // Create non-durable worker on demand if needed and not already created - if hasNonDurableTasks && w.nonDurableWorker == nil { + // Create worker on demand if needed and not already created + if hasAnyTasks && w.nonDurableWorker == nil { + totalRuns := w.slots + w.durableSlots opts := []worker.WorkerOpt{ worker.WithClient(w.v0), worker.WithName(w.name), - worker.WithMaxRuns(w.slots), + worker.WithSlots(totalRuns), + worker.WithDurableSlots(w.durableSlots), worker.WithLogger(w.logger), worker.WithLogLevel(w.logLevel), worker.WithLabels(w.labels), @@ -182,38 +179,7 @@ func (w *WorkerImpl) RegisterWorkflows(workflows ...workflow.WorkflowBase) error w.nonDurableWorker = nonDurableWorker } - // Create durable worker on demand if needed and not already created - if hasDurableTasks && w.durableWorker == nil { - // Reuse logger from main worker if exists - var logger *zerolog.Logger - if w.nonDurableWorker != nil { - logger = w.nonDurableWorker.Logger() - } - - labels := make(map[string]interface{}) - for k, v := range w.labels { - labels[k] = fmt.Sprintf("%v-durable", v) - } - - opts := []worker.WorkerOpt{ - worker.WithClient(w.v0), - worker.WithName(w.name + "-durable"), - worker.WithMaxRuns(w.durableSlots), - worker.WithLogger(logger), - worker.WithLogLevel(w.logLevel), - worker.WithLabels(labels), - } - - durableWorker, err := worker.NewWorker( - opts..., - ) - if err != nil { - return err - } - w.durableWorker = durableWorker - } - - // Register workflow with non-durable worker if it exists + // Register workflow with worker if it exists if w.nonDurableWorker != nil { err := w.nonDurableWorker.RegisterWorkflowV1(dump) if err != nil { @@ -228,24 +194,17 @@ func (w *WorkerImpl) RegisterWorkflows(workflows ...workflow.WorkflowBase) error } } - if dump.OnFailureTask != nil && onFailureFn != nil { - actionId := dump.OnFailureTask.Action - err := w.nonDurableWorker.RegisterAction(actionId, onFailureFn) + // Register durable actions on the same worker + for _, namedFn := range durableFns { + err := w.nonDurableWorker.RegisterAction(namedFn.ActionID, namedFn.Fn) if err != nil { return err } } - } - - // Register durable actions with durable worker - if w.durableWorker != nil { - err := w.durableWorker.RegisterWorkflowV1(dump) - if err != nil { - return err - } - for _, namedFn := range durableFns { - err := w.durableWorker.RegisterAction(namedFn.ActionID, namedFn.Fn) + if dump.OnFailureTask != nil && onFailureFn != nil { + actionId := dump.OnFailureTask.Action + err := w.nonDurableWorker.RegisterAction(actionId, onFailureFn) if err != nil { return err } @@ -265,9 +224,6 @@ func (w *WorkerImpl) Start() (func() error, error) { if w.nonDurableWorker != nil { workers = append(workers, w.nonDurableWorker) } - if w.durableWorker != nil { - workers = append(workers, w.durableWorker) - } // Track cleanup functions with a mutex to safely access from multiple goroutines var cleanupFuncs []func() error @@ -349,11 +305,6 @@ func (w *WorkerImpl) IsPaused(ctx context.Context) (bool, error) { workerIDs = append(workerIDs, *mainID) } - if w.durableWorker != nil { - durableID := w.durableWorker.ID() - workerIDs = append(workerIDs, *durableID) - } - // If no workers exist, consider it not paused if len(workerIDs) == 0 { return false, nil @@ -386,14 +337,6 @@ func (w *WorkerImpl) Pause(ctx context.Context) error { } } - // Pause durable worker if it exists - if w.durableWorker != nil { - _, err := w.workers.Pause(ctx, *w.durableWorker.ID()) - if err != nil { - return err - } - } - return nil } @@ -407,13 +350,5 @@ func (w *WorkerImpl) Unpause(ctx context.Context) error { } } - // Unpause durable worker if it exists - if w.durableWorker != nil { - _, err := w.workers.Unpause(ctx, *w.durableWorker.ID()) - if err != nil { - return err - } - } - return nil } diff --git a/pkg/worker/context.go b/pkg/worker/context.go index 4d3299d900..6dd2361745 100644 --- a/pkg/worker/context.go +++ b/pkg/worker/context.go @@ -407,7 +407,7 @@ func (h *hatchetContext) SpawnWorkflow(workflowName string, input any, opts *Spa input, &client.ChildWorkflowOpts{ ParentId: h.WorkflowRunId(), - ParentStepRunId: h.StepRunId(), + ParentTaskRunId: h.StepRunId(), ChildIndex: childIndex, ChildKey: opts.Key, DesiredWorkerId: desiredWorker, @@ -467,7 +467,7 @@ func (h *hatchetContext) SpawnWorkflows(childWorkflows []*SpawnWorkflowsOpts) ([ Input: c.Input, Opts: &client.ChildWorkflowOpts{ ParentId: h.WorkflowRunId(), - ParentStepRunId: h.StepRunId(), + ParentTaskRunId: h.StepRunId(), ChildIndex: childIndex, ChildKey: c.Key, DesiredWorkerId: desiredWorker, diff --git a/pkg/worker/worker.go b/pkg/worker/worker.go index 9c7c45e7c7..40cef12c06 100644 --- a/pkg/worker/worker.go +++ b/pkg/worker/worker.go @@ -100,7 +100,9 @@ type Worker struct { middlewares *middlewares - maxRuns *int + slots *int + durableSlots *int + slotConfig map[string]int32 initActionNames []string @@ -118,9 +120,11 @@ type WorkerOpts struct { name string l *zerolog.Logger - integrations []integrations.Integration - alerter errors.Alerter - maxRuns *int + integrations []integrations.Integration + alerter errors.Alerter + slots *int + durableSlots *int + slotConfig map[string]int32 actions []string @@ -166,9 +170,27 @@ func WithErrorAlerter(alerter errors.Alerter) WorkerOpt { } } +// Deprecated: use WithSlots instead. func WithMaxRuns(maxRuns int) WorkerOpt { + return WithSlots(maxRuns) +} + +// WithSlots sets the number of concurrent slots this worker can handle. +func WithSlots(slots int) WorkerOpt { + return func(opts *WorkerOpts) { + opts.slots = &slots + } +} + +func WithDurableSlots(durableSlots int) WorkerOpt { return func(opts *WorkerOpts) { - opts.maxRuns = &maxRuns + opts.durableSlots = &durableSlots + } +} + +func WithSlotConfig(slotConfig map[string]int32) WorkerOpt { + return func(opts *WorkerOpts) { + opts.slotConfig = slotConfig } } @@ -232,6 +254,10 @@ func NewWorker(fs ...WorkerOpt) (*Worker, error) { opts.l = &l } + if opts.slotConfig != nil && (opts.slots != nil || opts.durableSlots != nil) { + return nil, fmt.Errorf("cannot set both slot config and slots/durable slots") + } + w := &Worker{ client: opts.client, name: opts.name, @@ -239,7 +265,9 @@ func NewWorker(fs ...WorkerOpt) (*Worker, error) { actions: ActionRegistry{}, alerter: opts.alerter, middlewares: mws, - maxRuns: opts.maxRuns, + slots: opts.slots, + durableSlots: opts.durableSlots, + slotConfig: opts.slotConfig, initActionNames: opts.actions, labels: opts.labels, registered_workflows: map[string]bool{}, @@ -454,10 +482,12 @@ func (w *Worker) startBlocking(ctx context.Context) error { _ = NewManagedCompute(&w.actions, w.client, 1) listener, id, err := w.client.Dispatcher().GetActionListener(ctx, &client.GetActionListenerRequest{ - WorkerName: w.name, - Actions: actionNames, - MaxRuns: w.maxRuns, - Labels: w.labels, + WorkerName: w.name, + Actions: actionNames, + Slots: w.slots, + Labels: w.labels, + DurableSlots: w.durableSlots, + SlotConfig: w.slotConfig, }) w.id = id diff --git a/sdks/go/client.go b/sdks/go/client.go index 9a5a3891d3..ba19cbeb4a 100644 --- a/sdks/go/client.go +++ b/sdks/go/client.go @@ -49,11 +49,18 @@ func NewClient(opts ...v0Client.ClientOpt) (*Client, error) { // Worker represents a worker that can execute workflows. type Worker struct { - nonDurable *worker.Worker - durable *worker.Worker - name string + worker *worker.Worker + name string } +// slotType represents supported slot types (internal use). +type slotType string + +const ( + slotTypeDefault slotType = "default" + slotTypeDurable slotType = "durable" +) + // NewWorker creates a worker that can execute workflows. func (c *Client) NewWorker(name string, options ...WorkerOption) (*Worker, error) { config := &workerConfig{ @@ -65,10 +72,26 @@ func (c *Client) NewWorker(name string, options ...WorkerOption) (*Worker, error opt(config) } + dumps := gatherWorkflowDumps(config.workflows) + + slotConfig := resolveWorkerSlotConfig(map[slotType]int{}, dumps) + workerOpts := []worker.WorkerOpt{ worker.WithClient(c.legacyClient), worker.WithName(name), - worker.WithMaxRuns(config.slots), + } + + if len(slotConfig) > 0 { + slotConfigMap := make(map[string]int32, len(slotConfig)) + for key, value := range slotConfig { + slotConfigMap[string(key)] = int32(value) + } + workerOpts = append(workerOpts, worker.WithSlotConfig(slotConfigMap)) + } else { + workerOpts = append(workerOpts, + worker.WithSlots(config.slots), + worker.WithDurableSlots(config.durableSlots), + ) } if config.logger != nil { @@ -79,67 +102,40 @@ func (c *Client) NewWorker(name string, options ...WorkerOption) (*Worker, error workerOpts = append(workerOpts, worker.WithLabels(config.labels)) } - nonDurableWorker, err := worker.NewWorker(workerOpts...) + mainWorker, err := worker.NewWorker(workerOpts...) if err != nil { return nil, err } if config.panicHandler != nil { - nonDurableWorker.SetPanicHandler(config.panicHandler) + mainWorker.SetPanicHandler(config.panicHandler) } - var durableWorker *worker.Worker - - for _, workflow := range config.workflows { - req, regularActions, durableActions, onFailureFn := workflow.Dump() - hasDurableTasks := len(durableActions) > 0 - - if hasDurableTasks { - if durableWorker == nil { - durableWorkerOpts := workerOpts - durableWorkerOpts = append(durableWorkerOpts, worker.WithName(name+"-durable")) - durableWorkerOpts = append(durableWorkerOpts, worker.WithMaxRuns(config.durableSlots)) - - durableWorker, err = worker.NewWorker(durableWorkerOpts...) - if err != nil { - return nil, err - } - - if config.panicHandler != nil { - durableWorker.SetPanicHandler(config.panicHandler) - } - } - - err := durableWorker.RegisterWorkflowV1(req) - if err != nil { - return nil, err - } - } else { - err := nonDurableWorker.RegisterWorkflowV1(req) - if err != nil { - return nil, err - } + for _, dump := range dumps { + err := mainWorker.RegisterWorkflowV1(dump.req) + if err != nil { + return nil, err } - for _, namedFn := range durableActions { - err = durableWorker.RegisterAction(namedFn.ActionID, namedFn.Fn) + for _, namedFn := range dump.durableActions { + err = mainWorker.RegisterAction(namedFn.ActionID, namedFn.Fn) if err != nil { return nil, err } } - for _, namedFn := range regularActions { - err = nonDurableWorker.RegisterAction(namedFn.ActionID, namedFn.Fn) + for _, namedFn := range dump.regularActions { + err = mainWorker.RegisterAction(namedFn.ActionID, namedFn.Fn) if err != nil { return nil, err } } // Register on failure function if exists - if req.OnFailureTask != nil && onFailureFn != nil { - actionId := req.OnFailureTask.Action - err = nonDurableWorker.RegisterAction(actionId, func(ctx worker.HatchetContext) (any, error) { - return onFailureFn(ctx) + if dump.req.OnFailureTask != nil && dump.onFailureFn != nil { + actionId := dump.req.OnFailureTask.Action + err = mainWorker.RegisterAction(actionId, func(ctx worker.HatchetContext) (any, error) { + return dump.onFailureFn(ctx) }) if err != nil { return nil, err @@ -148,22 +144,93 @@ func (c *Client) NewWorker(name string, options ...WorkerOption) (*Worker, error } return &Worker{ - nonDurable: nonDurableWorker, - durable: durableWorker, - name: name, + worker: mainWorker, + name: name, }, nil } +type workflowDump struct { + req *v1.CreateWorkflowVersionRequest + regularActions []internal.NamedFunction + durableActions []internal.NamedFunction + onFailureFn internal.WrappedTaskFn +} + +func gatherWorkflowDumps(workflows []WorkflowBase) []workflowDump { + dumps := make([]workflowDump, 0, len(workflows)) + for _, workflow := range workflows { + req, regularActions, durableActions, onFailureFn := workflow.Dump() + dumps = append(dumps, workflowDump{ + req: req, + regularActions: regularActions, + durableActions: durableActions, + onFailureFn: onFailureFn, + }) + } + return dumps +} + +func resolveWorkerSlotConfig( + slotConfig map[slotType]int, + dumps []workflowDump, +) map[slotType]int { + requiredSlotTypes := map[slotType]bool{} + addFromRequests := func(requests map[string]int32) { + if requests == nil { + return + } + if _, ok := requests[string(slotTypeDefault)]; ok { + requiredSlotTypes[slotTypeDefault] = true + } + if _, ok := requests[string(slotTypeDurable)]; ok { + requiredSlotTypes[slotTypeDurable] = true + } + } + + for _, dump := range dumps { + for _, task := range dump.req.Tasks { + addFromRequests(task.SlotRequests) + } + if dump.req.OnFailureTask != nil { + addFromRequests(dump.req.OnFailureTask.SlotRequests) + } + } + + if len(dumps) > 0 { + for _, dump := range dumps { + for _, task := range dump.req.Tasks { + if task.IsDurable { + requiredSlotTypes[slotTypeDurable] = true + break + } + } + } + } + + if requiredSlotTypes[slotTypeDefault] { + if _, ok := slotConfig[slotTypeDefault]; !ok { + slotConfig[slotTypeDefault] = 100 + } + } + if requiredSlotTypes[slotTypeDurable] { + if _, ok := slotConfig[slotTypeDurable]; !ok { + slotConfig[slotTypeDurable] = 1000 + } + } + + if len(slotConfig) == 0 { + slotConfig[slotTypeDefault] = 100 + } + + return slotConfig +} + // Starts the worker instance and returns a cleanup function. func (w *Worker) Start() (func() error, error) { var workers []*worker.Worker - if w.nonDurable != nil { - workers = append(workers, w.nonDurable) - } - - if w.durable != nil { - workers = append(workers, w.durable) + if w.worker != nil { + workers = append(workers, w.worker) } // Track cleanup functions with a mutex to safely access from multiple goroutines diff --git a/sdks/go/internal/task/task.go b/sdks/go/internal/task/task.go index 0a469dd48f..508d130ab3 100644 --- a/sdks/go/internal/task/task.go +++ b/sdks/go/internal/task/task.go @@ -19,6 +19,11 @@ type TaskBase interface { Dump(workflowName string, taskDefaults *create.TaskDefaults) *contracts.CreateTaskOpts } +const ( + slotTypeDefault = "default" + slotTypeDurable = "durable" +) + type TaskShared struct { // ExecutionTimeout specifies the maximum duration a task can run before being terminated ExecutionTimeout *time.Duration @@ -231,6 +236,10 @@ func (t *TaskDeclaration[I]) Dump(workflowName string, taskDefaults *create.Task base := makeContractTaskOpts(&t.TaskShared, taskDefaults) base.ReadableId = t.Name base.Action = getActionID(workflowName, t.Name) + base.IsDurable = false + if base.SlotRequests == nil { + base.SlotRequests = map[string]int32{slotTypeDefault: 1} + } base.Parents = make([]string, len(t.Parents)) copy(base.Parents, t.Parents) @@ -283,6 +292,10 @@ func (t *DurableTaskDeclaration[I]) Dump(workflowName string, taskDefaults *crea base := makeContractTaskOpts(&t.TaskShared, taskDefaults) base.ReadableId = t.Name base.Action = getActionID(workflowName, t.Name) + base.IsDurable = true + if base.SlotRequests == nil { + base.SlotRequests = map[string]int32{slotTypeDurable: 1} + } base.Parents = make([]string, len(t.Parents)) copy(base.Parents, t.Parents) return base @@ -294,6 +307,10 @@ func (t *OnFailureTaskDeclaration[I]) Dump(workflowName string, taskDefaults *cr base.ReadableId = "on-failure" base.Action = getActionID(workflowName, "on-failure") + base.IsDurable = false + if base.SlotRequests == nil { + base.SlotRequests = map[string]int32{slotTypeDefault: 1} + } return base } diff --git a/sdks/go/slot_capacities_test.go b/sdks/go/slot_capacities_test.go new file mode 100644 index 0000000000..dc65475428 --- /dev/null +++ b/sdks/go/slot_capacities_test.go @@ -0,0 +1,83 @@ +package hatchet + +import ( + "testing" + + v1 "github.com/hatchet-dev/hatchet/internal/services/shared/proto/v1" +) + +func TestResolveWorkerSlotConfig_NoDurable(t *testing.T) { + dumps := []workflowDump{ + { + req: &v1.CreateWorkflowVersionRequest{ + Tasks: []*v1.CreateTaskOpts{ + { + IsDurable: false, + SlotRequests: map[string]int32{"default": 1}, + }, + }, + }, + }, + } + + resolved := resolveWorkerSlotConfig(map[slotType]int{}, dumps) + + if resolved[slotTypeDefault] != 100 { + t.Fatalf("expected default slots to be 100, got %d", resolved[slotTypeDefault]) + } + if _, ok := resolved[slotTypeDurable]; ok { + t.Fatalf("expected durable slots to be unset, got %d", resolved[slotTypeDurable]) + } +} + +func TestResolveWorkerSlotConfig_OnlyDurable(t *testing.T) { + dumps := []workflowDump{ + { + req: &v1.CreateWorkflowVersionRequest{ + Tasks: []*v1.CreateTaskOpts{ + { + IsDurable: true, + SlotRequests: map[string]int32{"durable": 1}, + }, + }, + }, + }, + } + + resolved := resolveWorkerSlotConfig(map[slotType]int{}, dumps) + + if resolved[slotTypeDurable] != 1000 { + t.Fatalf("expected durable slots to be 1000, got %d", resolved[slotTypeDurable]) + } + if _, ok := resolved[slotTypeDefault]; ok { + t.Fatalf("expected default slots to be unset, got %d", resolved[slotTypeDefault]) + } +} + +func TestResolveWorkerSlotConfig_Mixed(t *testing.T) { + dumps := []workflowDump{ + { + req: &v1.CreateWorkflowVersionRequest{ + Tasks: []*v1.CreateTaskOpts{ + { + IsDurable: false, + SlotRequests: map[string]int32{"default": 1}, + }, + { + IsDurable: true, + SlotRequests: map[string]int32{"durable": 1}, + }, + }, + }, + }, + } + + resolved := resolveWorkerSlotConfig(map[slotType]int{}, dumps) + + if resolved[slotTypeDefault] != 100 { + t.Fatalf("expected default slots to be 100, got %d", resolved[slotTypeDefault]) + } + if resolved[slotTypeDurable] != 1000 { + t.Fatalf("expected durable slots to be 1000, got %d", resolved[slotTypeDurable]) + } +} diff --git a/sdks/go/worker.go b/sdks/go/worker.go index 43fc455d0e..11869a4fad 100644 --- a/sdks/go/worker.go +++ b/sdks/go/worker.go @@ -11,12 +11,14 @@ import ( type WorkerOption func(*workerConfig) type workerConfig struct { - workflows []WorkflowBase - slots int - durableSlots int - labels map[string]any - logger *zerolog.Logger - panicHandler func(ctx Context, recovered any) + workflows []WorkflowBase + slots int + slotsSet bool + durableSlots int + durableSlotsSet bool + labels map[string]any + logger *zerolog.Logger + panicHandler func(ctx Context, recovered any) } type WorkflowBase interface { @@ -39,6 +41,7 @@ func WithWorkflows(workflows ...WorkflowBase) WorkerOption { func WithSlots(slots int) WorkerOption { return func(config *workerConfig) { config.slots = slots + config.slotsSet = true } } @@ -60,6 +63,7 @@ func WithLogger(logger *zerolog.Logger) WorkerOption { func WithDurableSlots(durableSlots int) WorkerOption { return func(config *workerConfig) { config.durableSlots = durableSlots + config.durableSlotsSet = true } } diff --git a/sdks/python/CHANGELOG.md b/sdks/python/CHANGELOG.md index cb7fae6a73..b81b4f6e1c 100644 --- a/sdks/python/CHANGELOG.md +++ b/sdks/python/CHANGELOG.md @@ -5,6 +5,12 @@ All notable changes to Hatchet's Python SDK will be documented in this changelog The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.23.0] - 2026-02-02 + +### Internal Only + +- Updated gRPC/REST contract field names to snake_case for consistency across SDKs. + ## [1.22.15] - 2026-02-02 ### Added diff --git a/sdks/python/examples/durable/test_durable.py b/sdks/python/examples/durable/test_durable.py index 562bb169ed..f85115ee40 100644 --- a/sdks/python/examples/durable/test_durable.py +++ b/sdks/python/examples/durable/test_durable.py @@ -27,15 +27,11 @@ async def test_durable(hatchet: Hatchet) -> None: active_workers = [w for w in workers.rows if w.status == "ACTIVE"] - assert len(active_workers) >= 2 + assert len(active_workers) == 1 assert any( w.name == hatchet.config.apply_namespace("e2e-test-worker") for w in active_workers ) - assert any( - w.name == hatchet.config.apply_namespace("e2e-test-worker_durable") - for w in active_workers - ) assert result["durable_task"]["status"] == "success" diff --git a/sdks/python/examples/events/test_event.py b/sdks/python/examples/events/test_event.py index f8d6b23b3e..740d40be62 100644 --- a/sdks/python/examples/events/test_event.py +++ b/sdks/python/examples/events/test_event.py @@ -19,11 +19,11 @@ from hatchet_sdk.clients.events import ( BulkPushEventOptions, BulkPushEventWithMetadata, + Event, PushEventOptions, ) from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary -from hatchet_sdk.contracts.events_pb2 import Event from hatchet_sdk.hatchet import Hatchet @@ -67,11 +67,11 @@ async def event_filter( async def fetch_runs_for_event( hatchet: Hatchet, event: Event ) -> tuple[ProcessedEvent, list[V1TaskSummary]]: - runs = await hatchet.runs.aio_list(triggering_event_external_id=event.eventId) + runs = await hatchet.runs.aio_list(triggering_event_external_id=event.event_id) meta = ( - cast(dict[str, str | int | bool], json.loads(event.additionalMetadata)) - if event.additionalMetadata + cast(dict[str, str | int | bool], json.loads(event.additional_metadata)) + if event.additional_metadata else {} ) payload = ( @@ -79,7 +79,7 @@ async def fetch_runs_for_event( ) processed_event = ProcessedEvent( - id=event.eventId, + id=event.event_id, payload=payload, meta=meta, should_have_runs=meta.get("should_have_runs", False) is True, @@ -110,7 +110,7 @@ async def wait_for_result( persisted = (await hatchet.event.aio_list(limit=100, since=since)).rows or [] - assert {e.eventId for e in events}.issubset({e.metadata.id for e in persisted}) + assert {e.event_id for e in events}.issubset({e.metadata.id for e in persisted}) iters = 0 while True: @@ -163,9 +163,14 @@ async def wait_for_result_and_assert(hatchet: Hatchet, events: list[Event]) -> N event_to_runs = await wait_for_result(hatchet, events) unique_events_with_runs = { - event.eventId + event.event_id for event in events - if json.loads(event.additionalMetadata).get("should_have_runs", False) is True + if ( + json.loads(event.additional_metadata) + if isinstance(event.additional_metadata, str) + else {} + ).get("should_have_runs", False) + is True } unique_events_with_runs_in_results = { @@ -232,14 +237,14 @@ def cp(should_skip: bool) -> dict[str, bool]: async def test_event_push(hatchet: Hatchet) -> None: e = hatchet.event.push(EVENT_KEY, cp(False)) - assert e.eventId is not None + assert e.event_id is not None @pytest.mark.asyncio(loop_scope="session") async def test_async_event_push(hatchet: Hatchet) -> None: e = await hatchet.event.aio_push(EVENT_KEY, cp(False)) - assert e.eventId is not None + assert e.event_id is not None @pytest.mark.asyncio(loop_scope="session") @@ -578,7 +583,7 @@ async def test_multi_scope_bug(hatchet: Hatchet, test_run_id: str) -> None: for event in events: runs = await hatchet.runs.aio_list( - triggering_event_external_id=event.eventId, + triggering_event_external_id=event.event_id, additional_metadata={"test_run_id": test_run_id}, ) diff --git a/sdks/python/examples/simple/worker.py b/sdks/python/examples/simple/worker.py index 7986b21fd5..9a86211d58 100644 --- a/sdks/python/examples/simple/worker.py +++ b/sdks/python/examples/simple/worker.py @@ -1,22 +1,29 @@ # > Simple +import time -from hatchet_sdk import Context, EmptyModel, Hatchet +from hatchet_sdk import Context, DurableContext, EmptyModel, Hatchet hatchet = Hatchet(debug=True) @hatchet.task() def simple(input: EmptyModel, ctx: Context) -> dict[str, str]: + time.sleep(50) return {"result": "Hello, world!"} @hatchet.durable_task() -def simple_durable(input: EmptyModel, ctx: Context) -> dict[str, str]: +async def simple_durable(input: EmptyModel, ctx: DurableContext) -> dict[str, str]: + res = await simple.aio_run(input) + print(res) return {"result": "Hello, world!"} def main() -> None: - worker = hatchet.worker("test-worker", workflows=[simple, simple_durable]) + worker = hatchet.worker( + "test-worker", + workflows=[simple, simple_durable], + ) worker.start() diff --git a/sdks/python/hatchet_sdk/__init__.py b/sdks/python/hatchet_sdk/__init__.py index 43b001b409..5b13deeece 100644 --- a/sdks/python/hatchet_sdk/__init__.py +++ b/sdks/python/hatchet_sdk/__init__.py @@ -1,3 +1,4 @@ +from hatchet_sdk.cancellation import CancellationToken from hatchet_sdk.clients.admin import ( RunStatus, ScheduleTriggerWorkflowOptions, @@ -148,6 +149,8 @@ WorkerLabelComparator, ) from hatchet_sdk.exceptions import ( + CancellationReason, + CancelledError, DedupeViolationError, FailedTaskRunExceptionGroup, NonRetryableException, @@ -170,6 +173,7 @@ from hatchet_sdk.serde import is_in_hatchet_serialization_context from hatchet_sdk.utils.opentelemetry import OTelAttribute from hatchet_sdk.utils.serde import remove_null_unicode_character +from hatchet_sdk.worker.slot_types import SlotType from hatchet_sdk.worker.worker import Worker, WorkerStartOptions, WorkerStatus from hatchet_sdk.workflow_run import WorkflowRunRef @@ -186,6 +190,9 @@ "CELEvaluationResult", "CELFailure", "CELSuccess", + "CancellationReason", + "CancellationToken", + "CancelledError", "ClientConfig", "ClientTLSConfig", "ConcurrencyExpression", @@ -247,6 +254,7 @@ "RunStatus", "ScheduleTriggerWorkflowOptions", "SleepCondition", + "SlotType", "StepRun", "StepRunDiff", "StepRunEventType", diff --git a/sdks/python/hatchet_sdk/cancellation.py b/sdks/python/hatchet_sdk/cancellation.py new file mode 100644 index 0000000000..a73c0af78a --- /dev/null +++ b/sdks/python/hatchet_sdk/cancellation.py @@ -0,0 +1,195 @@ +"""Cancellation token for coordinating cancellation across async and sync operations.""" + +from __future__ import annotations + +import asyncio +import threading +from collections.abc import Callable +from typing import TYPE_CHECKING + +from hatchet_sdk.exceptions import CancellationReason +from hatchet_sdk.logger import logger + +if TYPE_CHECKING: + pass + + +class CancellationToken: + """ + A token that can be used to signal cancellation across async and sync operations. + + The token provides both asyncio and threading event primitives, allowing it to work + seamlessly in both async and sync code paths. Child workflow run IDs can be registered + with the token so they can be cancelled when the parent is cancelled. + + Example: + ```python + token = CancellationToken() + + # In async code + await token.aio_wait() # Blocks until cancelled + + # In sync code + token.wait(timeout=1.0) # Returns True if cancelled within timeout + + # Check if cancelled + if token.is_cancelled: + raise CancelledError("Operation was cancelled") + + # Trigger cancellation + token.cancel() + ``` + """ + + def __init__(self) -> None: + self._cancelled = False + self._reason: CancellationReason | None = None + self._async_event: asyncio.Event | None = None + self._sync_event = threading.Event() + self._child_run_ids: list[str] = [] + self._callbacks: list[Callable[[], None]] = [] + self._lock = threading.Lock() + + def _get_async_event(self) -> asyncio.Event: + """Lazily create the asyncio event to avoid requiring an event loop at init time.""" + if self._async_event is None: + self._async_event = asyncio.Event() + # If already cancelled, set the event + if self._cancelled: + self._async_event.set() + return self._async_event + + def cancel(self, reason: CancellationReason = CancellationReason.UNKNOWN) -> None: + """ + Trigger cancellation. + + This will: + - Set the cancelled flag and reason + - Signal both async and sync events + - Invoke all registered callbacks + + Args: + reason: The reason for cancellation. + """ + with self._lock: + if self._cancelled: + logger.debug( + f"CancellationToken: cancel() called but already cancelled, " + f"reason={self._reason.value if self._reason else 'none'}" + ) + return + + logger.debug( + f"CancellationToken: cancel() called, reason={reason.value}, " + f"{len(self._child_run_ids)} children registered" + ) + + self._cancelled = True + self._reason = reason + + # Signal both event types + if self._async_event is not None: + self._async_event.set() + self._sync_event.set() + + # Invoke callbacks + for callback in self._callbacks: + try: + logger.debug(f"CancellationToken: invoking callback {callback}") + callback() + except Exception as e: # noqa: PERF203 + logger.warning(f"CancellationToken: callback raised exception: {e}") + + logger.debug(f"CancellationToken: cancel() complete, reason={reason.value}") + + @property + def is_cancelled(self) -> bool: + """Check if cancellation has been triggered.""" + return self._cancelled + + @property + def reason(self) -> CancellationReason | None: + """Get the reason for cancellation, or None if not cancelled.""" + return self._reason + + async def aio_wait(self) -> None: + """ + Await until cancelled (for use in asyncio). + + This will block until cancel() is called. + """ + await self._get_async_event().wait() + logger.debug( + f"CancellationToken: async wait completed (cancelled), " + f"reason={self._reason.value if self._reason else 'none'}" + ) + + def wait(self, timeout: float | None = None) -> bool: + """ + Block until cancelled (for use in sync code). + + Args: + timeout: Maximum time to wait in seconds. None means wait forever. + + Returns: + True if the token was cancelled (event was set), False if timeout expired. + """ + result = self._sync_event.wait(timeout) + if result: + logger.debug( + f"CancellationToken: sync wait interrupted by cancellation, " + f"reason={self._reason.value if self._reason else 'none'}" + ) + return result + + def register_child(self, run_id: str) -> None: + """ + Register a child workflow run ID with this token. + + When the parent is cancelled, these child run IDs can be used to cancel + the child workflows as well. + + Args: + run_id: The workflow run ID of the child workflow. + """ + with self._lock: + logger.debug(f"CancellationToken: registering child workflow {run_id}") + self._child_run_ids.append(run_id) + + def get_child_run_ids(self) -> list[str]: + """ + Get a copy of the registered child run IDs. + + Returns: + A list of child workflow run IDs. + """ + with self._lock: + return self._child_run_ids.copy() + + def add_callback(self, callback: Callable[[], None]) -> None: + """ + Register a callback to be invoked when cancellation is triggered. + + If the token is already cancelled, the callback will be invoked immediately. + + Args: + callback: A callable that takes no arguments. + """ + with self._lock: + if self._cancelled: + # Already cancelled, invoke immediately + logger.debug( + f"CancellationToken: invoking callback immediately (already cancelled): {callback}" + ) + try: + callback() + except Exception as e: + logger.warning(f"CancellationToken: callback raised exception: {e}") + else: + self._callbacks.append(callback) + + def __repr__(self) -> str: + return ( + f"CancellationToken(cancelled={self._cancelled}, " + f"children={len(self._child_run_ids)}, callbacks={len(self._callbacks)})" + ) diff --git a/sdks/python/hatchet_sdk/clients/admin.py b/sdks/python/hatchet_sdk/clients/admin.py index 01c170ca55..4cb7dc49ac 100644 --- a/sdks/python/hatchet_sdk/clients/admin.py +++ b/sdks/python/hatchet_sdk/clients/admin.py @@ -198,7 +198,7 @@ def _prepare_workflow_request( name=workflow_name, input=payload_data, parent_id=_options.parent_id, - parent_step_run_id=_options.parent_step_run_id, + parent_task_run_external_id=_options.parent_step_run_id, child_index=_options.child_index, child_key=_options.child_key, additional_metadata=_options.additional_metadata, @@ -232,7 +232,7 @@ def _prepare_schedule_workflow_request( schedules=[self._parse_schedule(schedule) for schedule in schedules], input=json.dumps(input), parent_id=options.parent_id, - parent_step_run_id=options.parent_step_run_id, + parent_task_run_external_id=options.parent_step_run_id, child_index=options.child_index, child_key=options.child_key, additional_metadata=json.dumps(options.additional_metadata), diff --git a/sdks/python/hatchet_sdk/clients/dispatcher/action_listener.py b/sdks/python/hatchet_sdk/clients/dispatcher/action_listener.py index a7a36a10d1..b7547d7d6b 100644 --- a/sdks/python/hatchet_sdk/clients/dispatcher/action_listener.py +++ b/sdks/python/hatchet_sdk/clients/dispatcher/action_listener.py @@ -48,7 +48,7 @@ class GetActionListenerRequest(BaseModel): worker_name: str services: list[str] actions: list[str] - slots: int + slot_config: dict[str, int] raw_labels: dict[str, str | int] = Field(default_factory=dict) labels: dict[str, WorkerLabels] = Field(default_factory=dict) @@ -59,9 +59,9 @@ def validate_labels(self) -> "GetActionListenerRequest": for key, value in self.raw_labels.items(): if isinstance(value, int): - self.labels[key] = WorkerLabels(intValue=value) + self.labels[key] = WorkerLabels(int_value=value) else: - self.labels[key] = WorkerLabels(strValue=str(value)) + self.labels[key] = WorkerLabels(str_value=str(value)) return self @@ -109,8 +109,8 @@ async def heartbeat(self) -> None: logger.debug("sending heartbeat") await self.aio_client.Heartbeat( HeartbeatRequest( - workerId=self.worker_id, - heartbeatAt=proto_timestamp_now(), + worker_id=self.worker_id, + heartbeat_at=proto_timestamp_now(), ), timeout=5, metadata=get_metadata(self.token), @@ -217,9 +217,9 @@ async def _generator(self) -> AsyncGenerator[Action | None, None]: try: action_payload = ( ActionPayload() - if not assigned_action.actionPayload + if not assigned_action.action_payload else ActionPayload.model_validate_json( - assigned_action.actionPayload + assigned_action.action_payload ) ) except (ValueError, json.JSONDecodeError): @@ -228,22 +228,22 @@ async def _generator(self) -> AsyncGenerator[Action | None, None]: action_payload = ActionPayload() action = Action( - tenant_id=assigned_action.tenantId, + tenant_id=assigned_action.tenant_id, worker_id=self.worker_id, - workflow_run_id=assigned_action.workflowRunId, - job_id=assigned_action.jobId, - job_name=assigned_action.jobName, - job_run_id=assigned_action.jobRunId, - step_id=assigned_action.stepId, - step_run_id=assigned_action.stepRunId, - action_id=assigned_action.actionId, + workflow_run_id=assigned_action.workflow_run_id, + job_id=assigned_action.job_id, + job_name=assigned_action.job_name, + job_run_id=assigned_action.job_run_id, + step_id=assigned_action.task_id, + step_run_id=assigned_action.task_run_external_id, + action_id=assigned_action.action_id, action_payload=action_payload, action_type=convert_proto_enum_to_python( - assigned_action.actionType, + assigned_action.action_type, ActionType, ActionTypeProto, ), - retry_count=assigned_action.retryCount, + retry_count=assigned_action.retry_count, additional_metadata=parse_additional_metadata( assigned_action.additional_metadata ), @@ -251,8 +251,8 @@ async def _generator(self) -> AsyncGenerator[Action | None, None]: child_workflow_key=assigned_action.child_workflow_key, parent_workflow_run_id=assigned_action.parent_workflow_run_id, priority=assigned_action.priority, - workflow_version_id=assigned_action.workflowVersionId, - workflow_id=assigned_action.workflowId, + workflow_version_id=assigned_action.workflow_version_id, + workflow_id=assigned_action.workflow_id, ) yield action @@ -319,7 +319,7 @@ async def get_listen_client( # we should await for the listener to be established before # starting the heartbeater listener = self.aio_client.ListenV2( - WorkerListenRequest(workerId=self.worker_id), + WorkerListenRequest(worker_id=self.worker_id), timeout=self.config.listener_v2_timeout, metadata=get_metadata(self.token), ) @@ -327,7 +327,7 @@ async def get_listen_client( else: # if ListenV2 is not available, fallback to Listen listener = self.aio_client.Listen( - WorkerListenRequest(workerId=self.worker_id), + WorkerListenRequest(worker_id=self.worker_id), timeout=DEFAULT_ACTION_TIMEOUT, metadata=get_metadata(self.token), ) @@ -359,7 +359,7 @@ def unregister(self) -> WorkerUnsubscribeRequest: try: req = self.aio_client.Unsubscribe( - WorkerUnsubscribeRequest(workerId=self.worker_id), + WorkerUnsubscribeRequest(worker_id=self.worker_id), timeout=5, metadata=get_metadata(self.token), ) diff --git a/sdks/python/hatchet_sdk/clients/dispatcher/dispatcher.py b/sdks/python/hatchet_sdk/clients/dispatcher/dispatcher.py index da7616cabb..a1e04a0865 100644 --- a/sdks/python/hatchet_sdk/clients/dispatcher/dispatcher.py +++ b/sdks/python/hatchet_sdk/clients/dispatcher/dispatcher.py @@ -64,21 +64,21 @@ async def get_action_listener( preset_labels = self.config.worker_preset_labels for key, value in preset_labels.items(): - req.labels[key] = WorkerLabels(strValue=str(value)) + req.labels[key] = WorkerLabels(str_value=str(value)) response = cast( WorkerRegisterResponse, await self.aio_client.Register( WorkerRegisterRequest( - workerName=req.worker_name, + worker_name=req.worker_name, actions=req.actions, services=req.services, - maxRuns=req.slots, labels=req.labels, - runtimeInfo=RuntimeInfo( - sdkVersion=version("hatchet_sdk"), + slot_config=req.slot_config, + runtime_info=RuntimeInfo( + sdk_version=version("hatchet_sdk"), language=SDKS.PYTHON, - languageVersion=f"{version_info.major}.{version_info.minor}.{version_info.micro}", + language_version=f"{version_info.major}.{version_info.minor}.{version_info.micro}", os=platform.system().lower(), ), ), @@ -87,7 +87,7 @@ async def get_action_listener( ), ) - return ActionListener(self.config, response.workerId) + return ActionListener(self.config, response.worker_id) async def send_step_action_event( self, @@ -127,17 +127,17 @@ async def _try_send_step_action_event( event_timestamp.GetCurrentTime() event = StepActionEvent( - workerId=action.worker_id, - jobId=action.job_id, - jobRunId=action.job_run_id, - stepId=action.step_id, - stepRunId=action.step_run_id, - actionId=action.action_id, - eventTimestamp=event_timestamp, - eventType=event_type, - eventPayload=payload, - retryCount=action.retry_count, - shouldNotRetry=should_not_retry, + worker_id=action.worker_id, + job_id=action.job_id, + job_run_id=action.job_run_id, + task_id=action.step_id, + task_run_external_id=action.step_run_id, + action_id=action.action_id, + event_timestamp=event_timestamp, + event_type=event_type, + event_payload=payload, + retry_count=action.retry_count, + should_not_retry=should_not_retry, ) send_step_action_event = tenacity_retry( @@ -167,7 +167,7 @@ def release_slot(self, step_run_id: str) -> None: client = self._get_or_create_client() client.ReleaseSlot( - ReleaseSlotRequest(stepRunId=step_run_id), + ReleaseSlotRequest(task_run_external_id=step_run_id), timeout=DEFAULT_REGISTER_TIMEOUT, metadata=get_metadata(self.token), ) @@ -177,8 +177,8 @@ def refresh_timeout(self, step_run_id: str, increment_by: str) -> None: client.RefreshTimeout( RefreshTimeoutRequest( - stepRunId=step_run_id, - incrementTimeoutBy=increment_by, + task_run_external_id=step_run_id, + increment_timeout_by=increment_by, ), timeout=DEFAULT_REGISTER_TIMEOUT, metadata=get_metadata(self.token), @@ -191,14 +191,14 @@ def upsert_worker_labels( for key, value in labels.items(): if isinstance(value, int): - worker_labels[key] = WorkerLabels(intValue=value) + worker_labels[key] = WorkerLabels(int_value=value) else: - worker_labels[key] = WorkerLabels(strValue=str(value)) + worker_labels[key] = WorkerLabels(str_value=str(value)) client = self._get_or_create_client() client.UpsertWorkerLabels( - UpsertWorkerLabelsRequest(workerId=worker_id, labels=worker_labels), + UpsertWorkerLabelsRequest(worker_id=worker_id, labels=worker_labels), timeout=DEFAULT_REGISTER_TIMEOUT, metadata=get_metadata(self.token), ) @@ -216,12 +216,12 @@ async def async_upsert_worker_labels( for key, value in labels.items(): if isinstance(value, int): - worker_labels[key] = WorkerLabels(intValue=value) + worker_labels[key] = WorkerLabels(int_value=value) else: - worker_labels[key] = WorkerLabels(strValue=str(value)) + worker_labels[key] = WorkerLabels(str_value=str(value)) await self.aio_client.UpsertWorkerLabels( - UpsertWorkerLabelsRequest(workerId=worker_id, labels=worker_labels), + UpsertWorkerLabelsRequest(worker_id=worker_id, labels=worker_labels), timeout=DEFAULT_REGISTER_TIMEOUT, metadata=get_metadata(self.token), ) diff --git a/sdks/python/hatchet_sdk/clients/events.py b/sdks/python/hatchet_sdk/clients/events.py index 7f2ab7ec2b..98fb4c7d64 100644 --- a/sdks/python/hatchet_sdk/clients/events.py +++ b/sdks/python/hatchet_sdk/clients/events.py @@ -4,7 +4,7 @@ from typing import cast from google.protobuf import timestamp_pb2 -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from hatchet_sdk.clients.rest.api.event_api import EventApi from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi @@ -21,12 +21,12 @@ from hatchet_sdk.connection import new_conn from hatchet_sdk.contracts.events_pb2 import ( BulkPushEventRequest, - Event, - Events, PushEventRequest, PutLogRequest, PutStreamEventRequest, ) +from hatchet_sdk.contracts.events_pb2 import Event as EventProto +from hatchet_sdk.contracts.events_pb2 import Events as EventsProto from hatchet_sdk.contracts.events_pb2_grpc import EventsServiceStub from hatchet_sdk.logger import logger from hatchet_sdk.metadata import get_metadata @@ -60,6 +60,43 @@ class BulkPushEventWithMetadata(BaseModel): scope: str | None = None +class Event(BaseModel): + tenant_id: str + event_id: str + key: str + payload: str + event_timestamp: timestamp_pb2.Timestamp + additional_metadata: str | None = None + scope: str | None = None + + model_config = ConfigDict(arbitrary_types_allowed=True) + + @property + def eventTimestamp(self) -> timestamp_pb2.Timestamp: # noqa: N802 + return self.event_timestamp + + @property + def additionalMetadata(self) -> str | None: # noqa: N802 + return self.additional_metadata + + @classmethod + def from_proto(cls, proto: EventProto) -> "Event": + additional_metadata = ( + proto.additional_metadata if proto.HasField("additional_metadata") else None + ) + scope = proto.scope if proto.HasField("scope") else None + + return cls( + tenant_id=proto.tenant_id, + event_id=proto.event_id, + key=proto.key, + payload=proto.payload, + event_timestamp=proto.event_timestamp, + additional_metadata=additional_metadata, + scope=scope, + ) + + class EventClient(BaseRestClient): def __init__(self, config: ClientConfig): super().__init__(config) @@ -119,16 +156,17 @@ def push( request = PushEventRequest( key=namespaced_event_key, payload=payload_str, - eventTimestamp=proto_timestamp_now(), - additionalMetadata=meta_bytes, + event_timestamp=proto_timestamp_now(), + additional_metadata=meta_bytes, priority=options.priority, scope=options.scope, ) - return cast( - Event, + response = cast( + EventProto, push_event(request, metadata=get_metadata(self.token)), ) + return Event.from_proto(response) def _create_push_event_request( self, @@ -153,8 +191,8 @@ def _create_push_event_request( return PushEventRequest( key=event_key, payload=serialized_payload, - eventTimestamp=proto_timestamp_now(), - additionalMetadata=meta_str, + event_timestamp=proto_timestamp_now(), + additional_metadata=meta_str, priority=event.priority, scope=event.scope, ) @@ -176,12 +214,11 @@ def bulk_push( ] ) - return list( - cast( - Events, - bulk_push(bulk_request, metadata=get_metadata(self.token)), - ).events + response = cast( + EventsProto, + bulk_push(bulk_request, metadata=get_metadata(self.token)), ) + return [Event.from_proto(event) for event in response.events] def log( self, @@ -198,11 +235,11 @@ def log( self.events_service_client.PutLog, self.client_config.tenacity ) request = PutLogRequest( - stepRunId=step_run_id, - createdAt=proto_timestamp_now(), + task_run_external_id=step_run_id, + created_at=proto_timestamp_now(), message=message, level=level.value if level else None, - taskRetryCount=task_retry_count, + task_retry_count=task_retry_count, ) put_log(request, metadata=get_metadata(self.token)) @@ -219,10 +256,10 @@ def stream(self, data: str | bytes, step_run_id: str, index: int) -> None: raise ValueError("Invalid data type. Expected str, bytes, or file.") request = PutStreamEventRequest( - stepRunId=step_run_id, - createdAt=proto_timestamp_now(), + task_run_external_id=step_run_id, + created_at=proto_timestamp_now(), message=data_bytes, - eventIndex=index, + event_index=index, ) try: diff --git a/sdks/python/hatchet_sdk/clients/listeners/pooled_listener.py b/sdks/python/hatchet_sdk/clients/listeners/pooled_listener.py index 8a99d8fdce..0752c80121 100644 --- a/sdks/python/hatchet_sdk/clients/listeners/pooled_listener.py +++ b/sdks/python/hatchet_sdk/clients/listeners/pooled_listener.py @@ -1,7 +1,10 @@ +from __future__ import annotations + import asyncio +import contextlib from abc import ABC, abstractmethod from collections.abc import AsyncIterator -from typing import Generic, Literal, TypeVar +from typing import TYPE_CHECKING, Generic, Literal, TypeVar import grpc import grpc.aio @@ -15,6 +18,9 @@ from hatchet_sdk.logger import logger from hatchet_sdk.metadata import get_metadata +if TYPE_CHECKING: + from hatchet_sdk.cancellation import CancellationToken + DEFAULT_LISTENER_RETRY_INTERVAL = 3 # seconds DEFAULT_LISTENER_RETRY_COUNT = 5 DEFAULT_LISTENER_INTERRUPT_INTERVAL = 1800 # 30 minutes @@ -36,7 +42,7 @@ def __init__(self, id: int) -> None: self.id = id self.queue: asyncio.Queue[T | SentinelValue] = asyncio.Queue() - async def __aiter__(self) -> "Subscription[T]": + async def __aiter__(self) -> Subscription[T]: return self async def __anext__(self) -> T | SentinelValue: @@ -199,7 +205,17 @@ def cleanup_subscription(self, subscription_id: int) -> None: del self.from_subscriptions[subscription_id] del self.events[subscription_id] - async def subscribe(self, id: str) -> T: + async def subscribe( + self, id: str, cancellation_token: CancellationToken | None = None + ) -> T: + """ + Subscribe to events for the given ID. + + :param id: The ID to subscribe to (e.g., workflow run ID). + :param cancellation_token: Optional cancellation token to abort the subscription wait. + :return: The event received for this ID. + :raises asyncio.CancelledError: If the cancellation token is triggered or if externally cancelled. + """ subscription_id: int | None = None try: @@ -221,8 +237,42 @@ async def subscribe(self, id: str) -> T: if not self.listener_task or self.listener_task.done(): self.listener_task = asyncio.create_task(self._init_producer()) - return await self.events[subscription_id].get() + logger.debug( + f"PooledListener.subscribe: waiting for event on id={id}, " + f"subscription_id={subscription_id}, token={cancellation_token is not None}" + ) + + if cancellation_token: + # Race the event wait against the cancellation token + cancel_task = asyncio.create_task(cancellation_token.aio_wait()) + result_task = asyncio.create_task(self.events[subscription_id].get()) + + done, pending = await asyncio.wait( + [cancel_task, result_task], + return_when=asyncio.FIRST_COMPLETED, + ) + + # Cancel pending tasks + for task in pending: + task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await task + + if cancel_task in done: + logger.debug( + f"PooledListener.subscribe: cancelled while waiting for id={id}" + ) + raise asyncio.CancelledError( + "Operation cancelled by cancellation token" + ) + + logger.debug(f"PooledListener.subscribe: received event for id={id}") + return result_task.result() + result = await self.events[subscription_id].get() + logger.debug(f"PooledListener.subscribe: received event for id={id}") + return result except asyncio.CancelledError: + logger.debug(f"PooledListener.subscribe: externally cancelled for id={id}") raise finally: if subscription_id: diff --git a/sdks/python/hatchet_sdk/clients/listeners/run_event_listener.py b/sdks/python/hatchet_sdk/clients/listeners/run_event_listener.py index 3cbd4cc101..fadbb1bf86 100644 --- a/sdks/python/hatchet_sdk/clients/listeners/run_event_listener.py +++ b/sdks/python/hatchet_sdk/clients/listeners/run_event_listener.py @@ -142,33 +142,33 @@ async def _generator(self) -> AsyncGenerator[StepRunEvent, None]: try: async for workflow_event in listener: - eventType = None - if workflow_event.resourceType == RESOURCE_TYPE_STEP_RUN: - if workflow_event.eventType in step_run_event_type_mapping: - eventType = step_run_event_type_mapping[ - workflow_event.eventType + event_type = None + if workflow_event.resource_type == RESOURCE_TYPE_STEP_RUN: + if workflow_event.event_type in step_run_event_type_mapping: + event_type = step_run_event_type_mapping[ + workflow_event.event_type ] else: raise Exception( - f"Unknown event type: {workflow_event.eventType}" + f"Unknown event type: {workflow_event.event_type}" ) yield StepRunEvent( - type=eventType, payload=workflow_event.eventPayload + type=event_type, payload=workflow_event.event_payload ) - elif workflow_event.resourceType == RESOURCE_TYPE_WORKFLOW_RUN: - if workflow_event.eventType in step_run_event_type_mapping: - workflowRunEventType = step_run_event_type_mapping[ - workflow_event.eventType + elif workflow_event.resource_type == RESOURCE_TYPE_WORKFLOW_RUN: + if workflow_event.event_type in step_run_event_type_mapping: + workflow_run_event_type = step_run_event_type_mapping[ + workflow_event.event_type ] else: raise Exception( - f"Unknown event type: {workflow_event.eventType}" + f"Unknown event type: {workflow_event.event_type}" ) yield StepRunEvent( - type=workflowRunEventType, - payload=workflow_event.eventPayload, + type=workflow_run_event_type, + payload=workflow_event.event_payload, ) if workflow_event.hangup: @@ -211,7 +211,7 @@ async def retry_subscribe(self) -> AsyncGenerator[WorkflowEvent, None]: AsyncGenerator[WorkflowEvent, None], self.client.SubscribeToWorkflowEvents( SubscribeToWorkflowEventsRequest( - workflowRunId=self.workflow_run_id, + workflow_run_id=self.workflow_run_id, ), metadata=get_metadata(self.config.token), ), @@ -221,8 +221,8 @@ async def retry_subscribe(self) -> AsyncGenerator[WorkflowEvent, None]: AsyncGenerator[WorkflowEvent, None], self.client.SubscribeToWorkflowEvents( SubscribeToWorkflowEventsRequest( - additionalMetaKey=self.additional_meta_kv[0], - additionalMetaValue=self.additional_meta_kv[1], + additional_meta_key=self.additional_meta_kv[0], + additional_meta_value=self.additional_meta_kv[1], ), metadata=get_metadata(self.config.token), ), diff --git a/sdks/python/hatchet_sdk/clients/listeners/workflow_listener.py b/sdks/python/hatchet_sdk/clients/listeners/workflow_listener.py index 0ea995b0e8..c049b66f01 100644 --- a/sdks/python/hatchet_sdk/clients/listeners/workflow_listener.py +++ b/sdks/python/hatchet_sdk/clients/listeners/workflow_listener.py @@ -26,16 +26,16 @@ class PooledWorkflowRunListener( ): def create_request_body(self, item: str) -> SubscribeToWorkflowRunsRequest: return SubscribeToWorkflowRunsRequest( - workflowRunId=item, + workflow_run_id=item, ) def generate_key(self, response: WorkflowRunEvent) -> str: - return response.workflowRunId + return response.workflow_run_id async def aio_result(self, id: str) -> dict[str, Any]: event = await self.subscribe(id) errors = [result.error for result in event.results if result.error] - workflow_run_id = event.workflowRunId + workflow_run_id = event.workflow_run_id if errors: if DEDUPE_MESSAGE in errors[0]: @@ -47,7 +47,7 @@ async def aio_result(self, id: str) -> dict[str, Any]: ) return { - result.stepReadableId: json.loads(result.output) + result.task_name: json.loads(result.output) for result in event.results if result.output } diff --git a/sdks/python/hatchet_sdk/clients/rest/__init__.py b/sdks/python/hatchet_sdk/clients/rest/__init__.py index f597b0500e..8dd0aebf0e 100644 --- a/sdks/python/hatchet_sdk/clients/rest/__init__.py +++ b/sdks/python/hatchet_sdk/clients/rest/__init__.py @@ -305,6 +305,9 @@ from hatchet_sdk.clients.rest.models.v1_log_line import V1LogLine from hatchet_sdk.clients.rest.models.v1_log_line_level import V1LogLineLevel from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList +from hatchet_sdk.clients.rest.models.v1_log_line_order_by_direction import ( + V1LogLineOrderByDirection, +) from hatchet_sdk.clients.rest.models.v1_replay_task_request import V1ReplayTaskRequest from hatchet_sdk.clients.rest.models.v1_replayed_tasks import V1ReplayedTasks from hatchet_sdk.clients.rest.models.v1_task_event import V1TaskEvent @@ -374,6 +377,7 @@ from hatchet_sdk.clients.rest.models.worker_list import WorkerList from hatchet_sdk.clients.rest.models.worker_runtime_info import WorkerRuntimeInfo from hatchet_sdk.clients.rest.models.worker_runtime_sdks import WorkerRuntimeSDKs +from hatchet_sdk.clients.rest.models.worker_slot_config import WorkerSlotConfig from hatchet_sdk.clients.rest.models.worker_type import WorkerType from hatchet_sdk.clients.rest.models.workflow import Workflow from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency diff --git a/sdks/python/hatchet_sdk/clients/rest/api/log_api.py b/sdks/python/hatchet_sdk/clients/rest/api/log_api.py index ae48e75e59..2c50fe1976 100644 --- a/sdks/python/hatchet_sdk/clients/rest/api/log_api.py +++ b/sdks/python/hatchet_sdk/clients/rest/api/log_api.py @@ -28,6 +28,9 @@ from hatchet_sdk.clients.rest.models.log_line_order_by_field import LogLineOrderByField from hatchet_sdk.clients.rest.models.v1_log_line_level import V1LogLineLevel from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList +from hatchet_sdk.clients.rest.models.v1_log_line_order_by_direction import ( + V1LogLineOrderByDirection, +) from hatchet_sdk.clients.rest.rest import RESTResponseType @@ -473,6 +476,13 @@ def v1_log_line_list( Optional[List[V1LogLineLevel]], Field(description="The log level(s) to include"), ] = None, + order_by_direction: Annotated[ + Optional[V1LogLineOrderByDirection], + Field(description="The direction to order by"), + ] = None, + attempt: Annotated[ + Optional[StrictInt], Field(description="The attempt number to filter for") + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -501,6 +511,10 @@ def v1_log_line_list( :type search: str :param levels: The log level(s) to include :type levels: List[V1LogLineLevel] + :param order_by_direction: The direction to order by + :type order_by_direction: V1LogLineOrderByDirection + :param attempt: The attempt number to filter for + :type attempt: int :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -530,6 +544,8 @@ def v1_log_line_list( until=until, search=search, levels=levels, + order_by_direction=order_by_direction, + attempt=attempt, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -574,6 +590,13 @@ def v1_log_line_list_with_http_info( Optional[List[V1LogLineLevel]], Field(description="The log level(s) to include"), ] = None, + order_by_direction: Annotated[ + Optional[V1LogLineOrderByDirection], + Field(description="The direction to order by"), + ] = None, + attempt: Annotated[ + Optional[StrictInt], Field(description="The attempt number to filter for") + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -602,6 +625,10 @@ def v1_log_line_list_with_http_info( :type search: str :param levels: The log level(s) to include :type levels: List[V1LogLineLevel] + :param order_by_direction: The direction to order by + :type order_by_direction: V1LogLineOrderByDirection + :param attempt: The attempt number to filter for + :type attempt: int :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -631,6 +658,8 @@ def v1_log_line_list_with_http_info( until=until, search=search, levels=levels, + order_by_direction=order_by_direction, + attempt=attempt, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -675,6 +704,13 @@ def v1_log_line_list_without_preload_content( Optional[List[V1LogLineLevel]], Field(description="The log level(s) to include"), ] = None, + order_by_direction: Annotated[ + Optional[V1LogLineOrderByDirection], + Field(description="The direction to order by"), + ] = None, + attempt: Annotated[ + Optional[StrictInt], Field(description="The attempt number to filter for") + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -703,6 +739,10 @@ def v1_log_line_list_without_preload_content( :type search: str :param levels: The log level(s) to include :type levels: List[V1LogLineLevel] + :param order_by_direction: The direction to order by + :type order_by_direction: V1LogLineOrderByDirection + :param attempt: The attempt number to filter for + :type attempt: int :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -732,6 +772,8 @@ def v1_log_line_list_without_preload_content( until=until, search=search, levels=levels, + order_by_direction=order_by_direction, + attempt=attempt, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -756,6 +798,8 @@ def _v1_log_line_list_serialize( until, search, levels, + order_by_direction, + attempt, _request_auth, _content_type, _headers, @@ -815,6 +859,14 @@ def _v1_log_line_list_serialize( _query_params.append(("levels", levels)) + if order_by_direction is not None: + + _query_params.append(("order_by_direction", order_by_direction.value)) + + if attempt is not None: + + _query_params.append(("attempt", attempt)) + # process the header parameters # process the form parameters # process the body parameter diff --git a/sdks/python/hatchet_sdk/clients/rest/models/__init__.py b/sdks/python/hatchet_sdk/clients/rest/models/__init__.py index 2e774948d1..438894966b 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/__init__.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/__init__.py @@ -267,6 +267,9 @@ from hatchet_sdk.clients.rest.models.v1_log_line import V1LogLine from hatchet_sdk.clients.rest.models.v1_log_line_level import V1LogLineLevel from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList +from hatchet_sdk.clients.rest.models.v1_log_line_order_by_direction import ( + V1LogLineOrderByDirection, +) from hatchet_sdk.clients.rest.models.v1_replay_task_request import V1ReplayTaskRequest from hatchet_sdk.clients.rest.models.v1_replayed_tasks import V1ReplayedTasks from hatchet_sdk.clients.rest.models.v1_task_event import V1TaskEvent @@ -336,6 +339,7 @@ from hatchet_sdk.clients.rest.models.worker_list import WorkerList from hatchet_sdk.clients.rest.models.worker_runtime_info import WorkerRuntimeInfo from hatchet_sdk.clients.rest.models.worker_runtime_sdks import WorkerRuntimeSDKs +from hatchet_sdk.clients.rest.models.worker_slot_config import WorkerSlotConfig from hatchet_sdk.clients.rest.models.worker_type import WorkerType from hatchet_sdk.clients.rest.models.workflow import Workflow from hatchet_sdk.clients.rest.models.workflow_concurrency import WorkflowConcurrency diff --git a/sdks/python/hatchet_sdk/clients/rest/models/step.py b/sdks/python/hatchet_sdk/clients/rest/models/step.py index 2014b7e9b3..9c1dde53a4 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/step.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/step.py @@ -19,7 +19,7 @@ import re # noqa: F401 from typing import Any, ClassVar, Dict, List, Optional, Set -from pydantic import BaseModel, ConfigDict, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr from typing_extensions import Self from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta @@ -40,6 +40,14 @@ class Step(BaseModel): timeout: Optional[StrictStr] = Field( default=None, description="The timeout of the step." ) + is_durable: Optional[StrictBool] = Field( + default=None, description="Whether the step is durable.", alias="isDurable" + ) + slot_requests: Optional[Dict[str, StrictInt]] = Field( + default=None, + description="Slot requests for the step (slot_type -> units).", + alias="slotRequests", + ) children: Optional[List[StrictStr]] = None parents: Optional[List[StrictStr]] = None __properties: ClassVar[List[str]] = [ @@ -49,6 +57,8 @@ class Step(BaseModel): "jobId", "action", "timeout", + "isDurable", + "slotRequests", "children", "parents", ] @@ -116,6 +126,8 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: "jobId": obj.get("jobId"), "action": obj.get("action"), "timeout": obj.get("timeout"), + "isDurable": obj.get("isDurable"), + "slotRequests": obj.get("slotRequests"), "children": obj.get("children"), "parents": obj.get("parents"), } diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_log_line_order_by_direction.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_log_line_order_by_direction.py new file mode 100644 index 0000000000..9acf316e35 --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_log_line_order_by_direction.py @@ -0,0 +1,37 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +from enum import Enum + +from typing_extensions import Self + + +class V1LogLineOrderByDirection(str, Enum): + """ + V1LogLineOrderByDirection + """ + + """ + allowed enum values + """ + ASC = "ASC" + DESC = "DESC" + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of V1LogLineOrderByDirection from a JSON string""" + return cls(json.loads(json_str)) diff --git a/sdks/python/hatchet_sdk/clients/rest/models/worker.py b/sdks/python/hatchet_sdk/clients/rest/models/worker.py index cfa2555895..6859ca32b6 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/worker.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/worker.py @@ -20,7 +20,7 @@ from datetime import datetime from typing import Any, ClassVar, Dict, List, Optional, Set -from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator from typing_extensions import Annotated, Self from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta @@ -29,6 +29,7 @@ from hatchet_sdk.clients.rest.models.semaphore_slots import SemaphoreSlots from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel from hatchet_sdk.clients.rest.models.worker_runtime_info import WorkerRuntimeInfo +from hatchet_sdk.clients.rest.models.worker_slot_config import WorkerSlotConfig from hatchet_sdk.clients.rest.models.worker_type import WorkerType @@ -69,15 +70,10 @@ class Worker(BaseModel): status: Optional[StrictStr] = Field( default=None, description="The status of the worker." ) - max_runs: Optional[StrictInt] = Field( + slot_config: Optional[Dict[str, WorkerSlotConfig]] = Field( default=None, - description="The maximum number of runs this worker can execute concurrently.", - alias="maxRuns", - ) - available_runs: Optional[StrictInt] = Field( - default=None, - description="The number of runs this worker can execute concurrently.", - alias="availableRuns", + description="Slot availability and limits for this worker (slot_type -> { available, limit }).", + alias="slotConfig", ) dispatcher_id: Optional[ Annotated[str, Field(min_length=36, strict=True, max_length=36)] @@ -107,8 +103,7 @@ class Worker(BaseModel): "slots", "recentStepRuns", "status", - "maxRuns", - "availableRuns", + "slotConfig", "dispatcherId", "labels", "webhookUrl", @@ -189,6 +184,15 @@ def to_dict(self) -> Dict[str, Any]: if _item_recent_step_runs: _items.append(_item_recent_step_runs.to_dict()) _dict["recentStepRuns"] = _items + # override the default output from pydantic by calling `to_dict()` of each value in slot_config (dict) + _field_dict = {} + if self.slot_config: + for _key_slot_config in self.slot_config: + if self.slot_config[_key_slot_config]: + _field_dict[_key_slot_config] = self.slot_config[ + _key_slot_config + ].to_dict() + _dict["slotConfig"] = _field_dict # override the default output from pydantic by calling `to_dict()` of each item in labels (list) _items = [] if self.labels: @@ -241,8 +245,14 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: else None ), "status": obj.get("status"), - "maxRuns": obj.get("maxRuns"), - "availableRuns": obj.get("availableRuns"), + "slotConfig": ( + dict( + (_k, WorkerSlotConfig.from_dict(_v)) + for _k, _v in obj["slotConfig"].items() + ) + if obj.get("slotConfig") is not None + else None + ), "dispatcherId": obj.get("dispatcherId"), "labels": ( [WorkerLabel.from_dict(_item) for _item in obj["labels"]] diff --git a/sdks/python/hatchet_sdk/clients/rest/models/worker_slot_config.py b/sdks/python/hatchet_sdk/clients/rest/models/worker_slot_config.py new file mode 100644 index 0000000000..4d3a60f2c6 --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/models/worker_slot_config.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing_extensions import Self + + +class WorkerSlotConfig(BaseModel): + """ + Slot availability and limits for a slot type. + """ # noqa: E501 + + available: Optional[StrictInt] = Field( + default=None, description="The number of available units for this slot type." + ) + limit: StrictInt = Field( + description="The maximum number of units for this slot type." + ) + __properties: ClassVar[List[str]] = ["available", "limit"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WorkerSlotConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WorkerSlotConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + {"available": obj.get("available"), "limit": obj.get("limit")} + ) + return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/workflow_version.py b/sdks/python/hatchet_sdk/clients/rest/models/workflow_version.py index 6a755bbb0a..e822f0e5f2 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/workflow_version.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/workflow_version.py @@ -58,6 +58,11 @@ class WorkflowVersion(BaseModel): v1_concurrency: Optional[List[ConcurrencySetting]] = Field( default=None, alias="v1Concurrency" ) + input_json_schema: Optional[Dict[str, Any]] = Field( + default=None, + description="The JSON schema for the workflow input.", + alias="inputJsonSchema", + ) __properties: ClassVar[List[str]] = [ "metadata", "version", @@ -72,6 +77,7 @@ class WorkflowVersion(BaseModel): "jobs", "workflowConfig", "v1Concurrency", + "inputJsonSchema", ] model_config = ConfigDict( @@ -190,6 +196,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if obj.get("v1Concurrency") is not None else None ), + "inputJsonSchema": obj.get("inputJsonSchema"), } ) return _obj diff --git a/sdks/python/hatchet_sdk/config.py b/sdks/python/hatchet_sdk/config.py index e49d2c9eac..63c7f6fdc7 100644 --- a/sdks/python/hatchet_sdk/config.py +++ b/sdks/python/hatchet_sdk/config.py @@ -135,6 +135,37 @@ class ClientConfig(BaseSettings): force_shutdown_on_shutdown_signal: bool = False tenacity: TenacityConfig = TenacityConfig() + # Cancellation configuration + cancellation_grace_period: timedelta = Field( + default=timedelta(seconds=5), + description="The maximum time to wait for a task to complete after cancellation is triggered before force-cancelling. Value is interpreted as seconds when provided as int/float.", + ) + cancellation_warning_threshold: timedelta = Field( + default=timedelta(seconds=3), + description="If a task has not completed cancellation within this duration, a warning will be logged. Value is interpreted as seconds when provided as int/float.", + ) + + @field_validator( + "cancellation_grace_period", "cancellation_warning_threshold", mode="before" + ) + @classmethod + def validate_cancellation_timedelta( + cls, value: timedelta | int | float | str + ) -> timedelta: + """Convert int/float/string to timedelta, interpreting as seconds.""" + if isinstance(value, timedelta): + return value + + if isinstance(value, int | float): + return timedelta(seconds=float(value)) + + v = value.strip() + # Allow a small convenience suffix, but keep "seconds" as the contract. + if v.endswith("s"): + v = v[:-1].strip() + + return timedelta(seconds=float(v)) + @model_validator(mode="after") def validate_token_and_tenant(self) -> "ClientConfig": if not self.token: diff --git a/sdks/python/hatchet_sdk/context/context.py b/sdks/python/hatchet_sdk/context/context.py index 549d7dc7f0..8a07f7ebf7 100644 --- a/sdks/python/hatchet_sdk/context/context.py +++ b/sdks/python/hatchet_sdk/context/context.py @@ -4,6 +4,7 @@ from typing import TYPE_CHECKING, Any, cast from warnings import warn +from hatchet_sdk.cancellation import CancellationToken from hatchet_sdk.clients.admin import AdminClient from hatchet_sdk.clients.dispatcher.dispatcher import ( # type: ignore[attr-defined] Action, @@ -21,9 +22,10 @@ flatten_conditions, ) from hatchet_sdk.context.worker_context import WorkerContext -from hatchet_sdk.exceptions import TaskRunError +from hatchet_sdk.exceptions import CancellationReason, TaskRunError from hatchet_sdk.features.runs import RunsClient from hatchet_sdk.logger import logger +from hatchet_sdk.utils.cancellation import await_with_cancellation from hatchet_sdk.utils.timedelta_to_expression import Duration, timedelta_to_expr from hatchet_sdk.utils.typing import JSONSerializableMapping, LogLevel from hatchet_sdk.worker.runner.utils.capture_logs import AsyncLogSender, LogRecord @@ -56,7 +58,7 @@ def __init__( self.action = action self.step_run_id = action.step_run_id - self.exit_flag = False + self.cancellation_token = CancellationToken() self.dispatcher_client = dispatcher_client self.admin_client = admin_client self.event_client = event_client @@ -74,6 +76,31 @@ def __init__( self._workflow_name = workflow_name self._task_name = task_name + @property + def exit_flag(self) -> bool: + """ + Check if the cancellation flag has been set. + + This property is maintained for backwards compatibility. + Use `cancellation_token.is_cancelled` for new code. + + :return: True if the task has been cancelled, False otherwise. + """ + return self.cancellation_token.is_cancelled + + @exit_flag.setter + def exit_flag(self, value: bool) -> None: + """ + Set the cancellation flag. + + This setter is maintained for backwards compatibility. + Setting to True will trigger the cancellation token. + + :param value: True to trigger cancellation, False is a no-op. + """ + if value: + self.cancellation_token.cancel(CancellationReason.USER_REQUESTED) + def _increment_stream_index(self) -> int: index = self.stream_index self.stream_index += 1 @@ -169,8 +196,25 @@ def workflow_run_id(self) -> str: """ return self.action.workflow_run_id - def _set_cancellation_flag(self) -> None: - self.exit_flag = True + def _set_cancellation_flag( + self, reason: CancellationReason = CancellationReason.WORKFLOW_CANCELLED + ) -> None: + """ + Internal method to trigger cancellation. + + This triggers the cancellation token, which will: + - Signal all waiters (async and sync) + - Set the exit_flag property to True + - Allow child workflow cancellation + + Args: + reason: The reason for cancellation. + """ + logger.debug( + f"Context: setting cancellation flag for step_run_id={self.step_run_id}, " + f"reason={reason.value}" + ) + self.cancellation_token.cancel(reason) def cancel(self) -> None: """ @@ -178,9 +222,9 @@ def cancel(self) -> None: :return: None """ - logger.debug("cancelling step...") + logger.debug(f"Context: cancel() called for step_run_id={self.step_run_id}") self.runs_client.cancel(self.step_run_id) - self._set_cancellation_flag() + self._set_cancellation_flag(CancellationReason.USER_REQUESTED) async def aio_cancel(self) -> None: """ @@ -188,9 +232,9 @@ async def aio_cancel(self) -> None: :return: None """ - logger.debug("cancelling step...") + logger.debug(f"Context: aio_cancel() called for step_run_id={self.step_run_id}") await self.runs_client.aio_cancel(self.step_run_id) - self._set_cancellation_flag() + self._set_cancellation_flag(CancellationReason.USER_REQUESTED) def done(self) -> bool: """ @@ -198,7 +242,7 @@ def done(self) -> bool: :return: True if the task run has been cancelled, False otherwise. """ - return self.exit_flag + return self.cancellation_token.is_cancelled def log( self, line: str | JSONSerializableMapping, raise_on_error: bool = False @@ -482,8 +526,11 @@ async def aio_wait_for( """ Durably wait for either a sleep or an event. + This method respects the context's cancellation token. If the task is cancelled + while waiting, an asyncio.CancelledError will be raised. + :param signal_key: The key to use for the durable event. This is used to identify the event in the Hatchet API. - :param *conditions: The conditions to wait for. Can be a SleepCondition or UserEventCondition. + :param \\*conditions: The conditions to wait for. Can be a SleepCondition or UserEventCondition. :return: A dictionary containing the results of the wait. :raises ValueError: If the durable event listener is not available. @@ -493,6 +540,10 @@ async def aio_wait_for( task_id = self.step_run_id + logger.debug( + f"DurableContext.aio_wait_for: waiting for signal_key={signal_key}, task_id={task_id}" + ) + request = RegisterDurableEventRequest( task_id=task_id, signal_key=signal_key, @@ -502,20 +553,30 @@ async def aio_wait_for( self.durable_event_listener.register_durable_event(request) - return await self.durable_event_listener.result( - task_id, - signal_key, + # Use await_with_cancellation to respect the cancellation token + return await await_with_cancellation( + self.durable_event_listener.result(task_id, signal_key), + self.cancellation_token, ) async def aio_sleep_for(self, duration: Duration) -> dict[str, Any]: """ Lightweight wrapper for durable sleep. Allows for shorthand usage of `ctx.aio_wait_for` when specifying a sleep condition. + This method respects the context's cancellation token. If the task is cancelled + while sleeping, an asyncio.CancelledError will be raised. + For more complicated conditions, use `ctx.aio_wait_for` directly. - """ + :param duration: The duration to sleep for. + :return: A dictionary containing the results of the wait. + """ wait_index = self._increment_wait_index() + logger.debug( + f"DurableContext.aio_sleep_for: sleeping for {duration}, wait_index={wait_index}" + ) + return await self.aio_wait_for( f"sleep:{timedelta_to_expr(duration)}-{wait_index}", SleepCondition(duration=duration), diff --git a/examples/python/__init__.py b/sdks/python/hatchet_sdk/contracts/__init__.py similarity index 100% rename from examples/python/__init__.py rename to sdks/python/hatchet_sdk/contracts/__init__.py diff --git a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.py b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.py index 568d0d5c8e..3548a7f6ea 100644 --- a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.py +++ b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.py @@ -9,6 +9,7 @@ from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, @@ -22,10 +23,10 @@ _sym_db = _symbol_database.Default() -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - +from google.protobuf import \ + timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\x0cWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValue\"\xc8\x01\n\x0bRuntimeInfo\x12\x17\n\nsdkVersion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1c\n\x08language\x18\x02 \x01(\x0e\x32\x05.SDKSH\x01\x88\x01\x01\x12\x1c\n\x0flanguageVersion\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x0f\n\x02os\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x12\n\x05\x65xtra\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\r\n\x0b_sdkVersionB\x0b\n\t_languageB\x12\n\x10_languageVersionB\x05\n\x03_osB\x08\n\x06_extra\"\xc0\x02\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x12\x16\n\twebhookId\x18\x06 \x01(\tH\x01\x88\x01\x01\x12&\n\x0bruntimeInfo\x18\x07 \x01(\x0b\x32\x0c.RuntimeInfoH\x02\x88\x01\x01\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\n\n\x08_maxRunsB\x0c\n\n_webhookIdB\x0e\n\x0c_runtimeInfo\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\xa3\x01\n\x19UpsertWorkerLabelsRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"@\n\x1aUpsertWorkerLabelsResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xf6\x04\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\x12\x12\n\nretryCount\x18\r \x01(\x05\x12 \n\x13\x61\x64\x64itional_metadata\x18\x0e \x01(\tH\x00\x88\x01\x01\x12!\n\x14\x63hild_workflow_index\x18\x0f \x01(\x05H\x01\x88\x01\x01\x12\x1f\n\x12\x63hild_workflow_key\x18\x10 \x01(\tH\x02\x88\x01\x01\x12#\n\x16parent_workflow_run_id\x18\x11 \x01(\tH\x03\x88\x01\x01\x12\x10\n\x08priority\x18\x12 \x01(\x05\x12\x17\n\nworkflowId\x18\x13 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11workflowVersionId\x18\x14 \x01(\tH\x05\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x17\n\x15_child_workflow_indexB\x15\n\x13_child_workflow_keyB\x19\n\x17_parent_workflow_run_idB\r\n\x0b_workflowIdB\x14\n\x12_workflowVersionId\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xc4\x02\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\x12\x17\n\nretryCount\x18\n \x01(\x05H\x00\x88\x01\x01\x12\x1b\n\x0eshouldNotRetry\x18\x0b \x01(\x08H\x01\x88\x01\x01\x42\r\n\x0b_retryCountB\x11\n\x0f_shouldNotRetry\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xc0\x01\n SubscribeToWorkflowEventsRequest\x12\x1a\n\rworkflowRunId\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1e\n\x11\x61\x64\x64itionalMetaKey\x18\x02 \x01(\tH\x01\x88\x01\x01\x12 \n\x13\x61\x64\x64itionalMetaValue\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x10\n\x0e_workflowRunIdB\x14\n\x12_additionalMetaKeyB\x16\n\x14_additionalMetaValue\"7\n\x1eSubscribeToWorkflowRunsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xda\x02\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x18\n\x0bstepRetries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x17\n\nretryCount\x18\t \x01(\x05H\x01\x88\x01\x01\x12\x17\n\neventIndex\x18\n \x01(\x03H\x02\x88\x01\x01\x42\x0e\n\x0c_stepRetriesB\r\n\x0b_retryCountB\r\n\x0b_eventIndex\"\xa8\x01\n\x10WorkflowRunEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12(\n\teventType\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x8a\x01\n\rStepRunResult\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x16\n\x0estepReadableId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"U\n\x10HeartbeatRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12/\n\x0bheartbeatAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"F\n\x15RefreshTimeoutRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x1a\n\x12incrementTimeoutBy\x18\x02 \x01(\t\"G\n\x16RefreshTimeoutResponse\x12-\n\ttimeoutAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\'\n\x12ReleaseSlotRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*7\n\x04SDKS\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02GO\x10\x01\x12\n\n\x06PYTHON\x10\x02\x12\x0e\n\nTYPESCRIPT\x10\x03*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\xac\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03\x12 \n\x1cSTEP_EVENT_TYPE_ACKNOWLEDGED\x10\x04*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"Z\n\x0cWorkerLabels\x12\x16\n\tstr_value\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tint_value\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0c\n\n_str_valueB\x0c\n\n_int_value\"\xcc\x01\n\x0bRuntimeInfo\x12\x18\n\x0bsdk_version\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1c\n\x08language\x18\x02 \x01(\x0e\x32\x05.SDKSH\x01\x88\x01\x01\x12\x1d\n\x10language_version\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x0f\n\x02os\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x12\n\x05\x65xtra\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_sdk_versionB\x0b\n\t_languageB\x13\n\x11_language_versionB\x05\n\x03_osB\x08\n\x06_extra\"\xc1\x02\n\x15WorkerRegisterRequest\x12\x13\n\x0bworker_name\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x12\n\x05slots\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x12\x17\n\nwebhook_id\x18\x06 \x01(\tH\x01\x88\x01\x01\x12\'\n\x0cruntime_info\x18\x07 \x01(\x0b\x32\x0c.RuntimeInfoH\x02\x88\x01\x01\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\x08\n\x06_slotsB\r\n\x0b_webhook_idB\x0f\n\r_runtime_info\"S\n\x16WorkerRegisterResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\x12\x13\n\x0bworker_name\x18\x03 \x01(\t\"\xa4\x01\n\x19UpsertWorkerLabelsRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"B\n\x1aUpsertWorkerLabelsResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\x98\x05\n\x0e\x41ssignedAction\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x17\n\x0fworkflow_run_id\x18\x02 \x01(\t\x12\x1c\n\x14get_group_key_run_id\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x04 \x01(\t\x12\x10\n\x08job_name\x18\x05 \x01(\t\x12\x12\n\njob_run_id\x18\x06 \x01(\t\x12\x0f\n\x07task_id\x18\x07 \x01(\t\x12\x1c\n\x14task_run_external_id\x18\x08 \x01(\t\x12\x11\n\taction_id\x18\t \x01(\t\x12 \n\x0b\x61\x63tion_type\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x16\n\x0e\x61\x63tion_payload\x18\x0b \x01(\t\x12\x11\n\ttask_name\x18\x0c \x01(\t\x12\x13\n\x0bretry_count\x18\r \x01(\x05\x12 \n\x13\x61\x64\x64itional_metadata\x18\x0e \x01(\tH\x00\x88\x01\x01\x12!\n\x14\x63hild_workflow_index\x18\x0f \x01(\x05H\x01\x88\x01\x01\x12\x1f\n\x12\x63hild_workflow_key\x18\x10 \x01(\tH\x02\x88\x01\x01\x12#\n\x16parent_workflow_run_id\x18\x11 \x01(\tH\x03\x88\x01\x01\x12\x10\n\x08priority\x18\x12 \x01(\x05\x12\x18\n\x0bworkflow_id\x18\x13 \x01(\tH\x04\x88\x01\x01\x12 \n\x13workflow_version_id\x18\x14 \x01(\tH\x05\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x17\n\x15_child_workflow_indexB\x15\n\x13_child_workflow_keyB\x19\n\x17_parent_workflow_run_idB\x0e\n\x0c_workflow_idB\x16\n\x14_workflow_version_id\"(\n\x13WorkerListenRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\"-\n\x18WorkerUnsubscribeRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\"A\n\x19WorkerUnsubscribeResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\xec\x01\n\x13GroupKeyActionEvent\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x17\n\x0fworkflow_run_id\x18\x02 \x01(\t\x12\x1c\n\x14get_group_key_run_id\x18\x03 \x01(\t\x12\x11\n\taction_id\x18\x04 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\nevent_type\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x15\n\revent_payload\x18\x07 \x01(\t\"\xde\x02\n\x0fStepActionEvent\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t\x12\x12\n\njob_run_id\x18\x03 \x01(\t\x12\x0f\n\x07task_id\x18\x04 \x01(\t\x12\x1c\n\x14task_run_external_id\x18\x05 \x01(\t\x12\x11\n\taction_id\x18\x06 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\nevent_type\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x15\n\revent_payload\x18\t \x01(\t\x12\x18\n\x0bretry_count\x18\n \x01(\x05H\x00\x88\x01\x01\x12\x1d\n\x10should_not_retry\x18\x0b \x01(\x08H\x01\x88\x01\x01\x42\x0e\n\x0c_retry_countB\x13\n\x11_should_not_retry\";\n\x13\x41\x63tionEventResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\xcc\x01\n SubscribeToWorkflowEventsRequest\x12\x1c\n\x0fworkflow_run_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_meta_key\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\"\n\x15\x61\x64\x64itional_meta_value\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x12\n\x10_workflow_run_idB\x16\n\x14_additional_meta_keyB\x18\n\x16_additional_meta_value\"9\n\x1eSubscribeToWorkflowRunsRequest\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"\xe7\x02\n\rWorkflowEvent\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\x12$\n\rresource_type\x18\x02 \x01(\x0e\x32\r.ResourceType\x12&\n\nevent_type\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x13\n\x0bresource_id\x18\x04 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x15\n\revent_payload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x19\n\x0ctask_retries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x18\n\x0bretry_count\x18\t \x01(\x05H\x01\x88\x01\x01\x12\x18\n\x0b\x65vent_index\x18\n \x01(\x03H\x02\x88\x01\x01\x42\x0f\n\r_task_retriesB\x0e\n\x0c_retry_countB\x0e\n\x0c_event_index\"\xac\x01\n\x10WorkflowRunEvent\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\x12)\n\nevent_type\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x33\n\x0f\x65vent_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x92\x01\n\rStepRunResult\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x11\n\ttask_name\x18\x02 \x01(\t\x12\x12\n\njob_run_id\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"c\n\rOverridesData\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x17\n\x0f\x63\x61ller_filename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"W\n\x10HeartbeatRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x30\n\x0cheartbeat_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"S\n\x15RefreshTimeoutRequest\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x1c\n\x14increment_timeout_by\x18\x02 \x01(\t\"H\n\x16RefreshTimeoutResponse\x12.\n\ntimeout_at\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"2\n\x12ReleaseSlotRequest\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*7\n\x04SDKS\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02GO\x10\x01\x12\n\n\x06PYTHON\x10\x02\x12\x0e\n\nTYPESCRIPT\x10\x03*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\xac\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03\x12 \n\x1cSTEP_EVENT_TYPE_ACKNOWLEDGED\x10\x04*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -35,78 +36,80 @@ _globals['DESCRIPTOR']._serialized_options = b'ZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts' _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._loaded_options = None _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_options = b'8\001' + _globals['_WORKERREGISTERREQUEST_SLOTCONFIGENTRY']._loaded_options = None + _globals['_WORKERREGISTERREQUEST_SLOTCONFIGENTRY']._serialized_options = b'8\001' _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._loaded_options = None _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_options = b'8\001' - _globals['_SDKS']._serialized_start=3724 - _globals['_SDKS']._serialized_end=3779 - _globals['_ACTIONTYPE']._serialized_start=3781 - _globals['_ACTIONTYPE']._serialized_end=3859 - _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=3862 - _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=4024 - _globals['_STEPACTIONEVENTTYPE']._serialized_start=4027 - _globals['_STEPACTIONEVENTTYPE']._serialized_end=4199 - _globals['_RESOURCETYPE']._serialized_start=4201 - _globals['_RESOURCETYPE']._serialized_end=4302 - _globals['_RESOURCEEVENTTYPE']._serialized_start=4305 - _globals['_RESOURCEEVENTTYPE']._serialized_end=4559 - _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=4561 - _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=4621 + _globals['_SDKS']._serialized_start=3894 + _globals['_SDKS']._serialized_end=3949 + _globals['_ACTIONTYPE']._serialized_start=3951 + _globals['_ACTIONTYPE']._serialized_end=4029 + _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=4032 + _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=4194 + _globals['_STEPACTIONEVENTTYPE']._serialized_start=4197 + _globals['_STEPACTIONEVENTTYPE']._serialized_end=4369 + _globals['_RESOURCETYPE']._serialized_start=4371 + _globals['_RESOURCETYPE']._serialized_end=4472 + _globals['_RESOURCEEVENTTYPE']._serialized_start=4475 + _globals['_RESOURCEEVENTTYPE']._serialized_end=4729 + _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=4731 + _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=4791 _globals['_WORKERLABELS']._serialized_start=53 - _globals['_WORKERLABELS']._serialized_end=139 - _globals['_RUNTIMEINFO']._serialized_start=142 - _globals['_RUNTIMEINFO']._serialized_end=342 - _globals['_WORKERREGISTERREQUEST']._serialized_start=345 - _globals['_WORKERREGISTERREQUEST']._serialized_end=665 - _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_start=563 - _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_end=623 - _globals['_WORKERREGISTERRESPONSE']._serialized_start=667 - _globals['_WORKERREGISTERRESPONSE']._serialized_end=747 - _globals['_UPSERTWORKERLABELSREQUEST']._serialized_start=750 - _globals['_UPSERTWORKERLABELSREQUEST']._serialized_end=913 - _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_start=563 - _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_end=623 - _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_start=915 - _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_end=979 - _globals['_ASSIGNEDACTION']._serialized_start=982 - _globals['_ASSIGNEDACTION']._serialized_end=1612 - _globals['_WORKERLISTENREQUEST']._serialized_start=1614 - _globals['_WORKERLISTENREQUEST']._serialized_end=1653 - _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=1655 - _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=1699 - _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=1701 - _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=1764 - _globals['_GROUPKEYACTIONEVENT']._serialized_start=1767 - _globals['_GROUPKEYACTIONEVENT']._serialized_end=1992 - _globals['_STEPACTIONEVENT']._serialized_start=1995 - _globals['_STEPACTIONEVENT']._serialized_end=2319 - _globals['_ACTIONEVENTRESPONSE']._serialized_start=2321 - _globals['_ACTIONEVENTRESPONSE']._serialized_end=2378 - _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=2381 - _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=2573 - _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_start=2575 - _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_end=2630 - _globals['_WORKFLOWEVENT']._serialized_start=2633 - _globals['_WORKFLOWEVENT']._serialized_end=2979 - _globals['_WORKFLOWRUNEVENT']._serialized_start=2982 - _globals['_WORKFLOWRUNEVENT']._serialized_end=3150 - _globals['_STEPRUNRESULT']._serialized_start=3153 - _globals['_STEPRUNRESULT']._serialized_end=3291 - _globals['_OVERRIDESDATA']._serialized_start=3293 - _globals['_OVERRIDESDATA']._serialized_end=3380 - _globals['_OVERRIDESDATARESPONSE']._serialized_start=3382 - _globals['_OVERRIDESDATARESPONSE']._serialized_end=3405 - _globals['_HEARTBEATREQUEST']._serialized_start=3407 - _globals['_HEARTBEATREQUEST']._serialized_end=3492 - _globals['_HEARTBEATRESPONSE']._serialized_start=3494 - _globals['_HEARTBEATRESPONSE']._serialized_end=3513 - _globals['_REFRESHTIMEOUTREQUEST']._serialized_start=3515 - _globals['_REFRESHTIMEOUTREQUEST']._serialized_end=3585 - _globals['_REFRESHTIMEOUTRESPONSE']._serialized_start=3587 - _globals['_REFRESHTIMEOUTRESPONSE']._serialized_end=3658 - _globals['_RELEASESLOTREQUEST']._serialized_start=3660 - _globals['_RELEASESLOTREQUEST']._serialized_end=3699 - _globals['_RELEASESLOTRESPONSE']._serialized_start=3701 - _globals['_RELEASESLOTRESPONSE']._serialized_end=3722 - _globals['_DISPATCHER']._serialized_start=4624 - _globals['_DISPATCHER']._serialized_end=5512 + _globals['_WORKERLABELS']._serialized_end=143 + _globals['_RUNTIMEINFO']._serialized_start=146 + _globals['_RUNTIMEINFO']._serialized_end=350 + _globals['_WORKERREGISTERREQUEST']._serialized_start=353 + _globals['_WORKERREGISTERREQUEST']._serialized_end=674 + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_start=572 + _globals['_WORKERREGISTERREQUEST_LABELSENTRY']._serialized_end=632 + _globals['_WORKERREGISTERRESPONSE']._serialized_start=676 + _globals['_WORKERREGISTERRESPONSE']._serialized_end=759 + _globals['_UPSERTWORKERLABELSREQUEST']._serialized_start=762 + _globals['_UPSERTWORKERLABELSREQUEST']._serialized_end=926 + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_start=572 + _globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_end=632 + _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_start=928 + _globals['_UPSERTWORKERLABELSRESPONSE']._serialized_end=994 + _globals['_ASSIGNEDACTION']._serialized_start=997 + _globals['_ASSIGNEDACTION']._serialized_end=1661 + _globals['_WORKERLISTENREQUEST']._serialized_start=1663 + _globals['_WORKERLISTENREQUEST']._serialized_end=1703 + _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=1705 + _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=1750 + _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=1752 + _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=1817 + _globals['_GROUPKEYACTIONEVENT']._serialized_start=1820 + _globals['_GROUPKEYACTIONEVENT']._serialized_end=2056 + _globals['_STEPACTIONEVENT']._serialized_start=2059 + _globals['_STEPACTIONEVENT']._serialized_end=2409 + _globals['_ACTIONEVENTRESPONSE']._serialized_start=2411 + _globals['_ACTIONEVENTRESPONSE']._serialized_end=2470 + _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=2473 + _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=2677 + _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_start=2679 + _globals['_SUBSCRIBETOWORKFLOWRUNSREQUEST']._serialized_end=2736 + _globals['_WORKFLOWEVENT']._serialized_start=2739 + _globals['_WORKFLOWEVENT']._serialized_end=3098 + _globals['_WORKFLOWRUNEVENT']._serialized_start=3101 + _globals['_WORKFLOWRUNEVENT']._serialized_end=3273 + _globals['_STEPRUNRESULT']._serialized_start=3276 + _globals['_STEPRUNRESULT']._serialized_end=3422 + _globals['_OVERRIDESDATA']._serialized_start=3424 + _globals['_OVERRIDESDATA']._serialized_end=3523 + _globals['_OVERRIDESDATARESPONSE']._serialized_start=3525 + _globals['_OVERRIDESDATARESPONSE']._serialized_end=3548 + _globals['_HEARTBEATREQUEST']._serialized_start=3550 + _globals['_HEARTBEATREQUEST']._serialized_end=3637 + _globals['_HEARTBEATRESPONSE']._serialized_start=3639 + _globals['_HEARTBEATRESPONSE']._serialized_end=3658 + _globals['_REFRESHTIMEOUTREQUEST']._serialized_start=3660 + _globals['_REFRESHTIMEOUTREQUEST']._serialized_end=3743 + _globals['_REFRESHTIMEOUTRESPONSE']._serialized_start=3745 + _globals['_REFRESHTIMEOUTRESPONSE']._serialized_end=3817 + _globals['_RELEASESLOTREQUEST']._serialized_start=3819 + _globals['_RELEASESLOTREQUEST']._serialized_end=3869 + _globals['_RELEASESLOTRESPONSE']._serialized_start=3871 + _globals['_RELEASESLOTRESPONSE']._serialized_end=3892 + _globals['_DISPATCHER']._serialized_start=4794 + _globals['_DISPATCHER']._serialized_end=5682 # @@protoc_insertion_point(module_scope) diff --git a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.pyi b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.pyi index 9a4e3de1c2..56a3fb9ecc 100644 --- a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.pyi +++ b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.pyi @@ -1,12 +1,15 @@ import datetime +from collections.abc import Iterable as _Iterable +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional +from typing import Union as _Union +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message from google.protobuf import timestamp_pb2 as _timestamp_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor @@ -86,29 +89,29 @@ RESOURCE_EVENT_TYPE_STREAM: ResourceEventType WORKFLOW_RUN_EVENT_TYPE_FINISHED: WorkflowRunEventType class WorkerLabels(_message.Message): - __slots__ = ("strValue", "intValue") - STRVALUE_FIELD_NUMBER: _ClassVar[int] - INTVALUE_FIELD_NUMBER: _ClassVar[int] - strValue: str - intValue: int - def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ...) -> None: ... + __slots__ = ("str_value", "int_value") + STR_VALUE_FIELD_NUMBER: _ClassVar[int] + INT_VALUE_FIELD_NUMBER: _ClassVar[int] + str_value: str + int_value: int + def __init__(self, str_value: _Optional[str] = ..., int_value: _Optional[int] = ...) -> None: ... class RuntimeInfo(_message.Message): - __slots__ = ("sdkVersion", "language", "languageVersion", "os", "extra") - SDKVERSION_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("sdk_version", "language", "language_version", "os", "extra") + SDK_VERSION_FIELD_NUMBER: _ClassVar[int] LANGUAGE_FIELD_NUMBER: _ClassVar[int] - LANGUAGEVERSION_FIELD_NUMBER: _ClassVar[int] + LANGUAGE_VERSION_FIELD_NUMBER: _ClassVar[int] OS_FIELD_NUMBER: _ClassVar[int] EXTRA_FIELD_NUMBER: _ClassVar[int] - sdkVersion: str + sdk_version: str language: SDKS - languageVersion: str + language_version: str os: str extra: str - def __init__(self, sdkVersion: _Optional[str] = ..., language: _Optional[_Union[SDKS, str]] = ..., languageVersion: _Optional[str] = ..., os: _Optional[str] = ..., extra: _Optional[str] = ...) -> None: ... + def __init__(self, sdk_version: _Optional[str] = ..., language: _Optional[_Union[SDKS, str]] = ..., language_version: _Optional[str] = ..., os: _Optional[str] = ..., extra: _Optional[str] = ...) -> None: ... class WorkerRegisterRequest(_message.Message): - __slots__ = ("workerName", "actions", "services", "maxRuns", "labels", "webhookId", "runtimeInfo") + __slots__ = ("worker_name", "actions", "services", "slots", "labels", "webhook_id", "runtime_info", "durable_slots", "slot_config") class LabelsEntry(_message.Message): __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] @@ -116,34 +119,45 @@ class WorkerRegisterRequest(_message.Message): key: str value: WorkerLabels def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[WorkerLabels, _Mapping]] = ...) -> None: ... - WORKERNAME_FIELD_NUMBER: _ClassVar[int] + class SlotConfigEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: int + def __init__(self, key: _Optional[str] = ..., value: _Optional[int] = ...) -> None: ... + WORKER_NAME_FIELD_NUMBER: _ClassVar[int] ACTIONS_FIELD_NUMBER: _ClassVar[int] SERVICES_FIELD_NUMBER: _ClassVar[int] - MAXRUNS_FIELD_NUMBER: _ClassVar[int] + SLOTS_FIELD_NUMBER: _ClassVar[int] LABELS_FIELD_NUMBER: _ClassVar[int] - WEBHOOKID_FIELD_NUMBER: _ClassVar[int] - RUNTIMEINFO_FIELD_NUMBER: _ClassVar[int] - workerName: str + WEBHOOK_ID_FIELD_NUMBER: _ClassVar[int] + RUNTIME_INFO_FIELD_NUMBER: _ClassVar[int] + DURABLE_SLOTS_FIELD_NUMBER: _ClassVar[int] + SLOT_CONFIG_FIELD_NUMBER: _ClassVar[int] + worker_name: str actions: _containers.RepeatedScalarFieldContainer[str] services: _containers.RepeatedScalarFieldContainer[str] - maxRuns: int + slots: int labels: _containers.MessageMap[str, WorkerLabels] - webhookId: str - runtimeInfo: RuntimeInfo - def __init__(self, workerName: _Optional[str] = ..., actions: _Optional[_Iterable[str]] = ..., services: _Optional[_Iterable[str]] = ..., maxRuns: _Optional[int] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ..., webhookId: _Optional[str] = ..., runtimeInfo: _Optional[_Union[RuntimeInfo, _Mapping]] = ...) -> None: ... + webhook_id: str + runtime_info: RuntimeInfo + durable_slots: int + slot_config: _containers.ScalarMap[str, int] + def __init__(self, worker_name: _Optional[str] = ..., actions: _Optional[_Iterable[str]] = ..., services: _Optional[_Iterable[str]] = ..., slots: _Optional[int] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ..., webhook_id: _Optional[str] = ..., runtime_info: _Optional[_Union[RuntimeInfo, _Mapping]] = ..., durable_slots: _Optional[int] = ..., slot_config: _Optional[_Mapping[str, int]] = ...) -> None: ... class WorkerRegisterResponse(_message.Message): - __slots__ = ("tenantId", "workerId", "workerName") - TENANTID_FIELD_NUMBER: _ClassVar[int] - WORKERID_FIELD_NUMBER: _ClassVar[int] - WORKERNAME_FIELD_NUMBER: _ClassVar[int] - tenantId: str - workerId: str - workerName: str - def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ..., workerName: _Optional[str] = ...) -> None: ... + __slots__ = ("tenant_id", "worker_id", "worker_name") + TENANT_ID_FIELD_NUMBER: _ClassVar[int] + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + WORKER_NAME_FIELD_NUMBER: _ClassVar[int] + tenant_id: str + worker_id: str + worker_name: str + def __init__(self, tenant_id: _Optional[str] = ..., worker_id: _Optional[str] = ..., worker_name: _Optional[str] = ...) -> None: ... class UpsertWorkerLabelsRequest(_message.Message): - __slots__ = ("workerId", "labels") + __slots__ = ("worker_id", "labels") class LabelsEntry(_message.Message): __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] @@ -151,249 +165,249 @@ class UpsertWorkerLabelsRequest(_message.Message): key: str value: WorkerLabels def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[WorkerLabels, _Mapping]] = ...) -> None: ... - WORKERID_FIELD_NUMBER: _ClassVar[int] + WORKER_ID_FIELD_NUMBER: _ClassVar[int] LABELS_FIELD_NUMBER: _ClassVar[int] - workerId: str + worker_id: str labels: _containers.MessageMap[str, WorkerLabels] - def __init__(self, workerId: _Optional[str] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ...) -> None: ... + def __init__(self, worker_id: _Optional[str] = ..., labels: _Optional[_Mapping[str, WorkerLabels]] = ...) -> None: ... class UpsertWorkerLabelsResponse(_message.Message): - __slots__ = ("tenantId", "workerId") - TENANTID_FIELD_NUMBER: _ClassVar[int] - WORKERID_FIELD_NUMBER: _ClassVar[int] - tenantId: str - workerId: str - def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ...) -> None: ... + __slots__ = ("tenant_id", "worker_id") + TENANT_ID_FIELD_NUMBER: _ClassVar[int] + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + tenant_id: str + worker_id: str + def __init__(self, tenant_id: _Optional[str] = ..., worker_id: _Optional[str] = ...) -> None: ... class AssignedAction(_message.Message): - __slots__ = ("tenantId", "workflowRunId", "getGroupKeyRunId", "jobId", "jobName", "jobRunId", "stepId", "stepRunId", "actionId", "actionType", "actionPayload", "stepName", "retryCount", "additional_metadata", "child_workflow_index", "child_workflow_key", "parent_workflow_run_id", "priority", "workflowId", "workflowVersionId") - TENANTID_FIELD_NUMBER: _ClassVar[int] - WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int] - GETGROUPKEYRUNID_FIELD_NUMBER: _ClassVar[int] - JOBID_FIELD_NUMBER: _ClassVar[int] - JOBNAME_FIELD_NUMBER: _ClassVar[int] - JOBRUNID_FIELD_NUMBER: _ClassVar[int] - STEPID_FIELD_NUMBER: _ClassVar[int] - STEPRUNID_FIELD_NUMBER: _ClassVar[int] - ACTIONID_FIELD_NUMBER: _ClassVar[int] - ACTIONTYPE_FIELD_NUMBER: _ClassVar[int] - ACTIONPAYLOAD_FIELD_NUMBER: _ClassVar[int] - STEPNAME_FIELD_NUMBER: _ClassVar[int] - RETRYCOUNT_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("tenant_id", "workflow_run_id", "get_group_key_run_id", "job_id", "job_name", "job_run_id", "task_id", "task_run_external_id", "action_id", "action_type", "action_payload", "task_name", "retry_count", "additional_metadata", "child_workflow_index", "child_workflow_key", "parent_workflow_run_id", "priority", "workflow_id", "workflow_version_id") + TENANT_ID_FIELD_NUMBER: _ClassVar[int] + WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int] + GET_GROUP_KEY_RUN_ID_FIELD_NUMBER: _ClassVar[int] + JOB_ID_FIELD_NUMBER: _ClassVar[int] + JOB_NAME_FIELD_NUMBER: _ClassVar[int] + JOB_RUN_ID_FIELD_NUMBER: _ClassVar[int] + TASK_ID_FIELD_NUMBER: _ClassVar[int] + TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] + ACTION_ID_FIELD_NUMBER: _ClassVar[int] + ACTION_TYPE_FIELD_NUMBER: _ClassVar[int] + ACTION_PAYLOAD_FIELD_NUMBER: _ClassVar[int] + TASK_NAME_FIELD_NUMBER: _ClassVar[int] + RETRY_COUNT_FIELD_NUMBER: _ClassVar[int] ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int] CHILD_WORKFLOW_INDEX_FIELD_NUMBER: _ClassVar[int] CHILD_WORKFLOW_KEY_FIELD_NUMBER: _ClassVar[int] PARENT_WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int] PRIORITY_FIELD_NUMBER: _ClassVar[int] - WORKFLOWID_FIELD_NUMBER: _ClassVar[int] - WORKFLOWVERSIONID_FIELD_NUMBER: _ClassVar[int] - tenantId: str - workflowRunId: str - getGroupKeyRunId: str - jobId: str - jobName: str - jobRunId: str - stepId: str - stepRunId: str - actionId: str - actionType: ActionType - actionPayload: str - stepName: str - retryCount: int + WORKFLOW_ID_FIELD_NUMBER: _ClassVar[int] + WORKFLOW_VERSION_ID_FIELD_NUMBER: _ClassVar[int] + tenant_id: str + workflow_run_id: str + get_group_key_run_id: str + job_id: str + job_name: str + job_run_id: str + task_id: str + task_run_external_id: str + action_id: str + action_type: ActionType + action_payload: str + task_name: str + retry_count: int additional_metadata: str child_workflow_index: int child_workflow_key: str parent_workflow_run_id: str priority: int - workflowId: str - workflowVersionId: str - def __init__(self, tenantId: _Optional[str] = ..., workflowRunId: _Optional[str] = ..., getGroupKeyRunId: _Optional[str] = ..., jobId: _Optional[str] = ..., jobName: _Optional[str] = ..., jobRunId: _Optional[str] = ..., stepId: _Optional[str] = ..., stepRunId: _Optional[str] = ..., actionId: _Optional[str] = ..., actionType: _Optional[_Union[ActionType, str]] = ..., actionPayload: _Optional[str] = ..., stepName: _Optional[str] = ..., retryCount: _Optional[int] = ..., additional_metadata: _Optional[str] = ..., child_workflow_index: _Optional[int] = ..., child_workflow_key: _Optional[str] = ..., parent_workflow_run_id: _Optional[str] = ..., priority: _Optional[int] = ..., workflowId: _Optional[str] = ..., workflowVersionId: _Optional[str] = ...) -> None: ... + workflow_id: str + workflow_version_id: str + def __init__(self, tenant_id: _Optional[str] = ..., workflow_run_id: _Optional[str] = ..., get_group_key_run_id: _Optional[str] = ..., job_id: _Optional[str] = ..., job_name: _Optional[str] = ..., job_run_id: _Optional[str] = ..., task_id: _Optional[str] = ..., task_run_external_id: _Optional[str] = ..., action_id: _Optional[str] = ..., action_type: _Optional[_Union[ActionType, str]] = ..., action_payload: _Optional[str] = ..., task_name: _Optional[str] = ..., retry_count: _Optional[int] = ..., additional_metadata: _Optional[str] = ..., child_workflow_index: _Optional[int] = ..., child_workflow_key: _Optional[str] = ..., parent_workflow_run_id: _Optional[str] = ..., priority: _Optional[int] = ..., workflow_id: _Optional[str] = ..., workflow_version_id: _Optional[str] = ...) -> None: ... class WorkerListenRequest(_message.Message): - __slots__ = ("workerId",) - WORKERID_FIELD_NUMBER: _ClassVar[int] - workerId: str - def __init__(self, workerId: _Optional[str] = ...) -> None: ... + __slots__ = ("worker_id",) + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + worker_id: str + def __init__(self, worker_id: _Optional[str] = ...) -> None: ... class WorkerUnsubscribeRequest(_message.Message): - __slots__ = ("workerId",) - WORKERID_FIELD_NUMBER: _ClassVar[int] - workerId: str - def __init__(self, workerId: _Optional[str] = ...) -> None: ... + __slots__ = ("worker_id",) + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + worker_id: str + def __init__(self, worker_id: _Optional[str] = ...) -> None: ... class WorkerUnsubscribeResponse(_message.Message): - __slots__ = ("tenantId", "workerId") - TENANTID_FIELD_NUMBER: _ClassVar[int] - WORKERID_FIELD_NUMBER: _ClassVar[int] - tenantId: str - workerId: str - def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ...) -> None: ... + __slots__ = ("tenant_id", "worker_id") + TENANT_ID_FIELD_NUMBER: _ClassVar[int] + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + tenant_id: str + worker_id: str + def __init__(self, tenant_id: _Optional[str] = ..., worker_id: _Optional[str] = ...) -> None: ... class GroupKeyActionEvent(_message.Message): - __slots__ = ("workerId", "workflowRunId", "getGroupKeyRunId", "actionId", "eventTimestamp", "eventType", "eventPayload") - WORKERID_FIELD_NUMBER: _ClassVar[int] - WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int] - GETGROUPKEYRUNID_FIELD_NUMBER: _ClassVar[int] - ACTIONID_FIELD_NUMBER: _ClassVar[int] - EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int] - EVENTTYPE_FIELD_NUMBER: _ClassVar[int] - EVENTPAYLOAD_FIELD_NUMBER: _ClassVar[int] - workerId: str - workflowRunId: str - getGroupKeyRunId: str - actionId: str - eventTimestamp: _timestamp_pb2.Timestamp - eventType: GroupKeyActionEventType - eventPayload: str - def __init__(self, workerId: _Optional[str] = ..., workflowRunId: _Optional[str] = ..., getGroupKeyRunId: _Optional[str] = ..., actionId: _Optional[str] = ..., eventTimestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., eventType: _Optional[_Union[GroupKeyActionEventType, str]] = ..., eventPayload: _Optional[str] = ...) -> None: ... + __slots__ = ("worker_id", "workflow_run_id", "get_group_key_run_id", "action_id", "event_timestamp", "event_type", "event_payload") + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int] + GET_GROUP_KEY_RUN_ID_FIELD_NUMBER: _ClassVar[int] + ACTION_ID_FIELD_NUMBER: _ClassVar[int] + EVENT_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + EVENT_TYPE_FIELD_NUMBER: _ClassVar[int] + EVENT_PAYLOAD_FIELD_NUMBER: _ClassVar[int] + worker_id: str + workflow_run_id: str + get_group_key_run_id: str + action_id: str + event_timestamp: _timestamp_pb2.Timestamp + event_type: GroupKeyActionEventType + event_payload: str + def __init__(self, worker_id: _Optional[str] = ..., workflow_run_id: _Optional[str] = ..., get_group_key_run_id: _Optional[str] = ..., action_id: _Optional[str] = ..., event_timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., event_type: _Optional[_Union[GroupKeyActionEventType, str]] = ..., event_payload: _Optional[str] = ...) -> None: ... class StepActionEvent(_message.Message): - __slots__ = ("workerId", "jobId", "jobRunId", "stepId", "stepRunId", "actionId", "eventTimestamp", "eventType", "eventPayload", "retryCount", "shouldNotRetry") - WORKERID_FIELD_NUMBER: _ClassVar[int] - JOBID_FIELD_NUMBER: _ClassVar[int] - JOBRUNID_FIELD_NUMBER: _ClassVar[int] - STEPID_FIELD_NUMBER: _ClassVar[int] - STEPRUNID_FIELD_NUMBER: _ClassVar[int] - ACTIONID_FIELD_NUMBER: _ClassVar[int] - EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int] - EVENTTYPE_FIELD_NUMBER: _ClassVar[int] - EVENTPAYLOAD_FIELD_NUMBER: _ClassVar[int] - RETRYCOUNT_FIELD_NUMBER: _ClassVar[int] - SHOULDNOTRETRY_FIELD_NUMBER: _ClassVar[int] - workerId: str - jobId: str - jobRunId: str - stepId: str - stepRunId: str - actionId: str - eventTimestamp: _timestamp_pb2.Timestamp - eventType: StepActionEventType - eventPayload: str - retryCount: int - shouldNotRetry: bool - def __init__(self, workerId: _Optional[str] = ..., jobId: _Optional[str] = ..., jobRunId: _Optional[str] = ..., stepId: _Optional[str] = ..., stepRunId: _Optional[str] = ..., actionId: _Optional[str] = ..., eventTimestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., eventType: _Optional[_Union[StepActionEventType, str]] = ..., eventPayload: _Optional[str] = ..., retryCount: _Optional[int] = ..., shouldNotRetry: bool = ...) -> None: ... + __slots__ = ("worker_id", "job_id", "job_run_id", "task_id", "task_run_external_id", "action_id", "event_timestamp", "event_type", "event_payload", "retry_count", "should_not_retry") + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + JOB_ID_FIELD_NUMBER: _ClassVar[int] + JOB_RUN_ID_FIELD_NUMBER: _ClassVar[int] + TASK_ID_FIELD_NUMBER: _ClassVar[int] + TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] + ACTION_ID_FIELD_NUMBER: _ClassVar[int] + EVENT_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + EVENT_TYPE_FIELD_NUMBER: _ClassVar[int] + EVENT_PAYLOAD_FIELD_NUMBER: _ClassVar[int] + RETRY_COUNT_FIELD_NUMBER: _ClassVar[int] + SHOULD_NOT_RETRY_FIELD_NUMBER: _ClassVar[int] + worker_id: str + job_id: str + job_run_id: str + task_id: str + task_run_external_id: str + action_id: str + event_timestamp: _timestamp_pb2.Timestamp + event_type: StepActionEventType + event_payload: str + retry_count: int + should_not_retry: bool + def __init__(self, worker_id: _Optional[str] = ..., job_id: _Optional[str] = ..., job_run_id: _Optional[str] = ..., task_id: _Optional[str] = ..., task_run_external_id: _Optional[str] = ..., action_id: _Optional[str] = ..., event_timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., event_type: _Optional[_Union[StepActionEventType, str]] = ..., event_payload: _Optional[str] = ..., retry_count: _Optional[int] = ..., should_not_retry: bool = ...) -> None: ... class ActionEventResponse(_message.Message): - __slots__ = ("tenantId", "workerId") - TENANTID_FIELD_NUMBER: _ClassVar[int] - WORKERID_FIELD_NUMBER: _ClassVar[int] - tenantId: str - workerId: str - def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ...) -> None: ... + __slots__ = ("tenant_id", "worker_id") + TENANT_ID_FIELD_NUMBER: _ClassVar[int] + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + tenant_id: str + worker_id: str + def __init__(self, tenant_id: _Optional[str] = ..., worker_id: _Optional[str] = ...) -> None: ... class SubscribeToWorkflowEventsRequest(_message.Message): - __slots__ = ("workflowRunId", "additionalMetaKey", "additionalMetaValue") - WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int] - ADDITIONALMETAKEY_FIELD_NUMBER: _ClassVar[int] - ADDITIONALMETAVALUE_FIELD_NUMBER: _ClassVar[int] - workflowRunId: str - additionalMetaKey: str - additionalMetaValue: str - def __init__(self, workflowRunId: _Optional[str] = ..., additionalMetaKey: _Optional[str] = ..., additionalMetaValue: _Optional[str] = ...) -> None: ... + __slots__ = ("workflow_run_id", "additional_meta_key", "additional_meta_value") + WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int] + ADDITIONAL_META_KEY_FIELD_NUMBER: _ClassVar[int] + ADDITIONAL_META_VALUE_FIELD_NUMBER: _ClassVar[int] + workflow_run_id: str + additional_meta_key: str + additional_meta_value: str + def __init__(self, workflow_run_id: _Optional[str] = ..., additional_meta_key: _Optional[str] = ..., additional_meta_value: _Optional[str] = ...) -> None: ... class SubscribeToWorkflowRunsRequest(_message.Message): - __slots__ = ("workflowRunId",) - WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int] - workflowRunId: str - def __init__(self, workflowRunId: _Optional[str] = ...) -> None: ... + __slots__ = ("workflow_run_id",) + WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int] + workflow_run_id: str + def __init__(self, workflow_run_id: _Optional[str] = ...) -> None: ... class WorkflowEvent(_message.Message): - __slots__ = ("workflowRunId", "resourceType", "eventType", "resourceId", "eventTimestamp", "eventPayload", "hangup", "stepRetries", "retryCount", "eventIndex") - WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int] - RESOURCETYPE_FIELD_NUMBER: _ClassVar[int] - EVENTTYPE_FIELD_NUMBER: _ClassVar[int] - RESOURCEID_FIELD_NUMBER: _ClassVar[int] - EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int] - EVENTPAYLOAD_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("workflow_run_id", "resource_type", "event_type", "resource_id", "event_timestamp", "event_payload", "hangup", "task_retries", "retry_count", "event_index") + WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int] + RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + EVENT_TYPE_FIELD_NUMBER: _ClassVar[int] + RESOURCE_ID_FIELD_NUMBER: _ClassVar[int] + EVENT_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + EVENT_PAYLOAD_FIELD_NUMBER: _ClassVar[int] HANGUP_FIELD_NUMBER: _ClassVar[int] - STEPRETRIES_FIELD_NUMBER: _ClassVar[int] - RETRYCOUNT_FIELD_NUMBER: _ClassVar[int] - EVENTINDEX_FIELD_NUMBER: _ClassVar[int] - workflowRunId: str - resourceType: ResourceType - eventType: ResourceEventType - resourceId: str - eventTimestamp: _timestamp_pb2.Timestamp - eventPayload: str + TASK_RETRIES_FIELD_NUMBER: _ClassVar[int] + RETRY_COUNT_FIELD_NUMBER: _ClassVar[int] + EVENT_INDEX_FIELD_NUMBER: _ClassVar[int] + workflow_run_id: str + resource_type: ResourceType + event_type: ResourceEventType + resource_id: str + event_timestamp: _timestamp_pb2.Timestamp + event_payload: str hangup: bool - stepRetries: int - retryCount: int - eventIndex: int - def __init__(self, workflowRunId: _Optional[str] = ..., resourceType: _Optional[_Union[ResourceType, str]] = ..., eventType: _Optional[_Union[ResourceEventType, str]] = ..., resourceId: _Optional[str] = ..., eventTimestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., eventPayload: _Optional[str] = ..., hangup: bool = ..., stepRetries: _Optional[int] = ..., retryCount: _Optional[int] = ..., eventIndex: _Optional[int] = ...) -> None: ... + task_retries: int + retry_count: int + event_index: int + def __init__(self, workflow_run_id: _Optional[str] = ..., resource_type: _Optional[_Union[ResourceType, str]] = ..., event_type: _Optional[_Union[ResourceEventType, str]] = ..., resource_id: _Optional[str] = ..., event_timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., event_payload: _Optional[str] = ..., hangup: bool = ..., task_retries: _Optional[int] = ..., retry_count: _Optional[int] = ..., event_index: _Optional[int] = ...) -> None: ... class WorkflowRunEvent(_message.Message): - __slots__ = ("workflowRunId", "eventType", "eventTimestamp", "results") - WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int] - EVENTTYPE_FIELD_NUMBER: _ClassVar[int] - EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("workflow_run_id", "event_type", "event_timestamp", "results") + WORKFLOW_RUN_ID_FIELD_NUMBER: _ClassVar[int] + EVENT_TYPE_FIELD_NUMBER: _ClassVar[int] + EVENT_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] RESULTS_FIELD_NUMBER: _ClassVar[int] - workflowRunId: str - eventType: WorkflowRunEventType - eventTimestamp: _timestamp_pb2.Timestamp + workflow_run_id: str + event_type: WorkflowRunEventType + event_timestamp: _timestamp_pb2.Timestamp results: _containers.RepeatedCompositeFieldContainer[StepRunResult] - def __init__(self, workflowRunId: _Optional[str] = ..., eventType: _Optional[_Union[WorkflowRunEventType, str]] = ..., eventTimestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., results: _Optional[_Iterable[_Union[StepRunResult, _Mapping]]] = ...) -> None: ... + def __init__(self, workflow_run_id: _Optional[str] = ..., event_type: _Optional[_Union[WorkflowRunEventType, str]] = ..., event_timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., results: _Optional[_Iterable[_Union[StepRunResult, _Mapping]]] = ...) -> None: ... class StepRunResult(_message.Message): - __slots__ = ("stepRunId", "stepReadableId", "jobRunId", "error", "output") - STEPRUNID_FIELD_NUMBER: _ClassVar[int] - STEPREADABLEID_FIELD_NUMBER: _ClassVar[int] - JOBRUNID_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("task_run_external_id", "task_name", "job_run_id", "error", "output") + TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] + TASK_NAME_FIELD_NUMBER: _ClassVar[int] + JOB_RUN_ID_FIELD_NUMBER: _ClassVar[int] ERROR_FIELD_NUMBER: _ClassVar[int] OUTPUT_FIELD_NUMBER: _ClassVar[int] - stepRunId: str - stepReadableId: str - jobRunId: str + task_run_external_id: str + task_name: str + job_run_id: str error: str output: str - def __init__(self, stepRunId: _Optional[str] = ..., stepReadableId: _Optional[str] = ..., jobRunId: _Optional[str] = ..., error: _Optional[str] = ..., output: _Optional[str] = ...) -> None: ... + def __init__(self, task_run_external_id: _Optional[str] = ..., task_name: _Optional[str] = ..., job_run_id: _Optional[str] = ..., error: _Optional[str] = ..., output: _Optional[str] = ...) -> None: ... class OverridesData(_message.Message): - __slots__ = ("stepRunId", "path", "value", "callerFilename") - STEPRUNID_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("task_run_external_id", "path", "value", "caller_filename") + TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] PATH_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] - CALLERFILENAME_FIELD_NUMBER: _ClassVar[int] - stepRunId: str + CALLER_FILENAME_FIELD_NUMBER: _ClassVar[int] + task_run_external_id: str path: str value: str - callerFilename: str - def __init__(self, stepRunId: _Optional[str] = ..., path: _Optional[str] = ..., value: _Optional[str] = ..., callerFilename: _Optional[str] = ...) -> None: ... + caller_filename: str + def __init__(self, task_run_external_id: _Optional[str] = ..., path: _Optional[str] = ..., value: _Optional[str] = ..., caller_filename: _Optional[str] = ...) -> None: ... class OverridesDataResponse(_message.Message): __slots__ = () def __init__(self) -> None: ... class HeartbeatRequest(_message.Message): - __slots__ = ("workerId", "heartbeatAt") - WORKERID_FIELD_NUMBER: _ClassVar[int] - HEARTBEATAT_FIELD_NUMBER: _ClassVar[int] - workerId: str - heartbeatAt: _timestamp_pb2.Timestamp - def __init__(self, workerId: _Optional[str] = ..., heartbeatAt: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + __slots__ = ("worker_id", "heartbeat_at") + WORKER_ID_FIELD_NUMBER: _ClassVar[int] + HEARTBEAT_AT_FIELD_NUMBER: _ClassVar[int] + worker_id: str + heartbeat_at: _timestamp_pb2.Timestamp + def __init__(self, worker_id: _Optional[str] = ..., heartbeat_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... class HeartbeatResponse(_message.Message): __slots__ = () def __init__(self) -> None: ... class RefreshTimeoutRequest(_message.Message): - __slots__ = ("stepRunId", "incrementTimeoutBy") - STEPRUNID_FIELD_NUMBER: _ClassVar[int] - INCREMENTTIMEOUTBY_FIELD_NUMBER: _ClassVar[int] - stepRunId: str - incrementTimeoutBy: str - def __init__(self, stepRunId: _Optional[str] = ..., incrementTimeoutBy: _Optional[str] = ...) -> None: ... + __slots__ = ("task_run_external_id", "increment_timeout_by") + TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] + INCREMENT_TIMEOUT_BY_FIELD_NUMBER: _ClassVar[int] + task_run_external_id: str + increment_timeout_by: str + def __init__(self, task_run_external_id: _Optional[str] = ..., increment_timeout_by: _Optional[str] = ...) -> None: ... class RefreshTimeoutResponse(_message.Message): - __slots__ = ("timeoutAt",) - TIMEOUTAT_FIELD_NUMBER: _ClassVar[int] - timeoutAt: _timestamp_pb2.Timestamp - def __init__(self, timeoutAt: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + __slots__ = ("timeout_at",) + TIMEOUT_AT_FIELD_NUMBER: _ClassVar[int] + timeout_at: _timestamp_pb2.Timestamp + def __init__(self, timeout_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... class ReleaseSlotRequest(_message.Message): - __slots__ = ("stepRunId",) - STEPRUNID_FIELD_NUMBER: _ClassVar[int] - stepRunId: str - def __init__(self, stepRunId: _Optional[str] = ...) -> None: ... + __slots__ = ("task_run_external_id",) + TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] + task_run_external_id: str + def __init__(self, task_run_external_id: _Optional[str] = ...) -> None: ... class ReleaseSlotResponse(_message.Message): __slots__ = () diff --git a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2_grpc.py b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2_grpc.py index 30fb814d6b..6f24372962 100644 --- a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2_grpc.py +++ b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2_grpc.py @@ -1,8 +1,9 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc import warnings +import grpc + from hatchet_sdk.contracts import dispatcher_pb2 as dispatcher__pb2 GRPC_GENERATED_VERSION = '1.76.0' diff --git a/sdks/python/hatchet_sdk/contracts/events_pb2.py b/sdks/python/hatchet_sdk/contracts/events_pb2.py index 90c1c9f86b..88a1f7a376 100644 --- a/sdks/python/hatchet_sdk/contracts/events_pb2.py +++ b/sdks/python/hatchet_sdk/contracts/events_pb2.py @@ -9,6 +9,7 @@ from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, @@ -22,10 +23,10 @@ _sym_db = _symbol_database.Default() -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - +from google.protobuf import \ + timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd2\x01\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05scope\x18\x07 \x01(\tH\x01\x88\x01\x01\x42\x15\n\x13_additionalMetadataB\x08\n\x06_scope\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\xc2\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\t\x12\x1b\n\x0etaskRetryCount\x18\x06 \x01(\x05H\x01\x88\x01\x01\x42\x08\n\x06_levelB\x11\n\x0f_taskRetryCount\"\x10\n\x0ePutLogResponse\"\xa4\x01\n\x15PutStreamEventRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\x12\x17\n\neventIndex\x18\x06 \x01(\x03H\x00\x88\x01\x01\x42\r\n\x0b_eventIndex\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\xde\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08priority\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x12\n\x05scope\x18\x06 \x01(\tH\x02\x88\x01\x01\x42\x15\n\x13_additionalMetadataB\x0b\n\t_priorityB\x08\n\x06_scope\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42\x45ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd7\x01\n\x05\x45vent\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x10\n\x08\x65vent_id\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12 \n\x13\x61\x64\x64itional_metadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05scope\x18\x07 \x01(\tH\x01\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x08\n\x06_scope\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\xd2\x01\n\rPutLogRequest\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\t\x12\x1d\n\x10task_retry_count\x18\x06 \x01(\x05H\x01\x88\x01\x01\x42\x08\n\x06_levelB\x13\n\x11_task_retry_count\"\x10\n\x0ePutLogResponse\"\xb2\x01\n\x15PutStreamEventRequest\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\x12\x18\n\x0b\x65vent_index\x18\x06 \x01(\x03H\x00\x88\x01\x01\x42\x0e\n\x0c_event_index\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\xe1\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12 \n\x13\x61\x64\x64itional_metadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08priority\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x12\n\x05scope\x18\x06 \x01(\tH\x02\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x0b\n\t_priorityB\x08\n\x06_scope\"&\n\x12ReplayEventRequest\x12\x10\n\x08\x65vent_id\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42\x45ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -34,23 +35,23 @@ _globals['DESCRIPTOR']._loaded_options = None _globals['DESCRIPTOR']._serialized_options = b'ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contracts' _globals['_EVENT']._serialized_start=50 - _globals['_EVENT']._serialized_end=260 - _globals['_EVENTS']._serialized_start=262 - _globals['_EVENTS']._serialized_end=294 - _globals['_PUTLOGREQUEST']._serialized_start=297 - _globals['_PUTLOGREQUEST']._serialized_end=491 - _globals['_PUTLOGRESPONSE']._serialized_start=493 - _globals['_PUTLOGRESPONSE']._serialized_end=509 - _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=512 - _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=676 - _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=678 - _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=702 - _globals['_BULKPUSHEVENTREQUEST']._serialized_start=704 - _globals['_BULKPUSHEVENTREQUEST']._serialized_end=761 - _globals['_PUSHEVENTREQUEST']._serialized_start=764 - _globals['_PUSHEVENTREQUEST']._serialized_end=986 - _globals['_REPLAYEVENTREQUEST']._serialized_start=988 - _globals['_REPLAYEVENTREQUEST']._serialized_end=1025 - _globals['_EVENTSSERVICE']._serialized_start=1028 - _globals['_EVENTSSERVICE']._serialized_end=1292 + _globals['_EVENT']._serialized_end=265 + _globals['_EVENTS']._serialized_start=267 + _globals['_EVENTS']._serialized_end=299 + _globals['_PUTLOGREQUEST']._serialized_start=302 + _globals['_PUTLOGREQUEST']._serialized_end=512 + _globals['_PUTLOGRESPONSE']._serialized_start=514 + _globals['_PUTLOGRESPONSE']._serialized_end=530 + _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=533 + _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=711 + _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=713 + _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=737 + _globals['_BULKPUSHEVENTREQUEST']._serialized_start=739 + _globals['_BULKPUSHEVENTREQUEST']._serialized_end=796 + _globals['_PUSHEVENTREQUEST']._serialized_start=799 + _globals['_PUSHEVENTREQUEST']._serialized_end=1024 + _globals['_REPLAYEVENTREQUEST']._serialized_start=1026 + _globals['_REPLAYEVENTREQUEST']._serialized_end=1064 + _globals['_EVENTSSERVICE']._serialized_start=1067 + _globals['_EVENTSSERVICE']._serialized_end=1331 # @@protoc_insertion_point(module_scope) diff --git a/sdks/python/hatchet_sdk/contracts/events_pb2.pyi b/sdks/python/hatchet_sdk/contracts/events_pb2.pyi index b3937b73d3..eee5d158f5 100644 --- a/sdks/python/hatchet_sdk/contracts/events_pb2.pyi +++ b/sdks/python/hatchet_sdk/contracts/events_pb2.pyi @@ -1,31 +1,34 @@ import datetime +from collections.abc import Iterable as _Iterable +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional +from typing import Union as _Union -from google.protobuf import timestamp_pb2 as _timestamp_pb2 -from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf.internal import containers as _containers DESCRIPTOR: _descriptor.FileDescriptor class Event(_message.Message): - __slots__ = ("tenantId", "eventId", "key", "payload", "eventTimestamp", "additionalMetadata", "scope") - TENANTID_FIELD_NUMBER: _ClassVar[int] - EVENTID_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("tenant_id", "event_id", "key", "payload", "event_timestamp", "additional_metadata", "scope") + TENANT_ID_FIELD_NUMBER: _ClassVar[int] + EVENT_ID_FIELD_NUMBER: _ClassVar[int] KEY_FIELD_NUMBER: _ClassVar[int] PAYLOAD_FIELD_NUMBER: _ClassVar[int] - EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int] - ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int] + EVENT_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int] SCOPE_FIELD_NUMBER: _ClassVar[int] - tenantId: str - eventId: str + tenant_id: str + event_id: str key: str payload: str - eventTimestamp: _timestamp_pb2.Timestamp - additionalMetadata: str + event_timestamp: _timestamp_pb2.Timestamp + additional_metadata: str scope: str - def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ..., scope: _Optional[str] = ...) -> None: ... + def __init__(self, tenant_id: _Optional[str] = ..., event_id: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., event_timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., additional_metadata: _Optional[str] = ..., scope: _Optional[str] = ...) -> None: ... class Events(_message.Message): __slots__ = ("events",) @@ -34,38 +37,38 @@ class Events(_message.Message): def __init__(self, events: _Optional[_Iterable[_Union[Event, _Mapping]]] = ...) -> None: ... class PutLogRequest(_message.Message): - __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata", "taskRetryCount") - STEPRUNID_FIELD_NUMBER: _ClassVar[int] - CREATEDAT_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("task_run_external_id", "created_at", "message", "level", "metadata", "task_retry_count") + TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] + CREATED_AT_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] LEVEL_FIELD_NUMBER: _ClassVar[int] METADATA_FIELD_NUMBER: _ClassVar[int] - TASKRETRYCOUNT_FIELD_NUMBER: _ClassVar[int] - stepRunId: str - createdAt: _timestamp_pb2.Timestamp + TASK_RETRY_COUNT_FIELD_NUMBER: _ClassVar[int] + task_run_external_id: str + created_at: _timestamp_pb2.Timestamp message: str level: str metadata: str - taskRetryCount: int - def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ..., taskRetryCount: _Optional[int] = ...) -> None: ... + task_retry_count: int + def __init__(self, task_run_external_id: _Optional[str] = ..., created_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ..., task_retry_count: _Optional[int] = ...) -> None: ... class PutLogResponse(_message.Message): __slots__ = () def __init__(self) -> None: ... class PutStreamEventRequest(_message.Message): - __slots__ = ("stepRunId", "createdAt", "message", "metadata", "eventIndex") - STEPRUNID_FIELD_NUMBER: _ClassVar[int] - CREATEDAT_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("task_run_external_id", "created_at", "message", "metadata", "event_index") + TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] + CREATED_AT_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] METADATA_FIELD_NUMBER: _ClassVar[int] - EVENTINDEX_FIELD_NUMBER: _ClassVar[int] - stepRunId: str - createdAt: _timestamp_pb2.Timestamp + EVENT_INDEX_FIELD_NUMBER: _ClassVar[int] + task_run_external_id: str + created_at: _timestamp_pb2.Timestamp message: bytes metadata: str - eventIndex: int - def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[bytes] = ..., metadata: _Optional[str] = ..., eventIndex: _Optional[int] = ...) -> None: ... + event_index: int + def __init__(self, task_run_external_id: _Optional[str] = ..., created_at: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[bytes] = ..., metadata: _Optional[str] = ..., event_index: _Optional[int] = ...) -> None: ... class PutStreamEventResponse(_message.Message): __slots__ = () @@ -78,23 +81,23 @@ class BulkPushEventRequest(_message.Message): def __init__(self, events: _Optional[_Iterable[_Union[PushEventRequest, _Mapping]]] = ...) -> None: ... class PushEventRequest(_message.Message): - __slots__ = ("key", "payload", "eventTimestamp", "additionalMetadata", "priority", "scope") + __slots__ = ("key", "payload", "event_timestamp", "additional_metadata", "priority", "scope") KEY_FIELD_NUMBER: _ClassVar[int] PAYLOAD_FIELD_NUMBER: _ClassVar[int] - EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int] - ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int] + EVENT_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int] PRIORITY_FIELD_NUMBER: _ClassVar[int] SCOPE_FIELD_NUMBER: _ClassVar[int] key: str payload: str - eventTimestamp: _timestamp_pb2.Timestamp - additionalMetadata: str + event_timestamp: _timestamp_pb2.Timestamp + additional_metadata: str priority: int scope: str - def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ..., priority: _Optional[int] = ..., scope: _Optional[str] = ...) -> None: ... + def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., event_timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., additional_metadata: _Optional[str] = ..., priority: _Optional[int] = ..., scope: _Optional[str] = ...) -> None: ... class ReplayEventRequest(_message.Message): - __slots__ = ("eventId",) - EVENTID_FIELD_NUMBER: _ClassVar[int] - eventId: str - def __init__(self, eventId: _Optional[str] = ...) -> None: ... + __slots__ = ("event_id",) + EVENT_ID_FIELD_NUMBER: _ClassVar[int] + event_id: str + def __init__(self, event_id: _Optional[str] = ...) -> None: ... diff --git a/sdks/python/hatchet_sdk/contracts/events_pb2_grpc.py b/sdks/python/hatchet_sdk/contracts/events_pb2_grpc.py index c9324a61ea..bcd916bbc4 100644 --- a/sdks/python/hatchet_sdk/contracts/events_pb2_grpc.py +++ b/sdks/python/hatchet_sdk/contracts/events_pb2_grpc.py @@ -1,8 +1,9 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc import warnings +import grpc + from hatchet_sdk.contracts import events_pb2 as events__pb2 GRPC_GENERATED_VERSION = '1.76.0' diff --git a/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2.py b/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2.py index 988661d822..42f0873288 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2.py +++ b/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2.py @@ -9,6 +9,7 @@ from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, @@ -22,8 +23,8 @@ _sym_db = _symbol_database.Default() -from hatchet_sdk.contracts.v1.shared import condition_pb2 as v1_dot_shared_dot_condition__pb2 - +from hatchet_sdk.contracts.v1.shared import \ + condition_pb2 as v1_dot_shared_dot_condition__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13v1/dispatcher.proto\x12\x02v1\x1a\x19v1/shared/condition.proto\"z\n\x1bRegisterDurableEventRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x12\n\nsignal_key\x18\x02 \x01(\t\x12\x36\n\nconditions\x18\x03 \x01(\x0b\x32\".v1.DurableEventListenerConditions\"\x1e\n\x1cRegisterDurableEventResponse\"C\n\x1cListenForDurableEventRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x12\n\nsignal_key\x18\x02 \x01(\t\"A\n\x0c\x44urableEvent\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x12\n\nsignal_key\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x32\xbe\x01\n\x0cV1Dispatcher\x12[\n\x14RegisterDurableEvent\x12\x1f.v1.RegisterDurableEventRequest\x1a .v1.RegisterDurableEventResponse\"\x00\x12Q\n\x15ListenForDurableEvent\x12 .v1.ListenForDurableEventRequest\x1a\x10.v1.DurableEvent\"\x00(\x01\x30\x01\x42\x42Z@github.com/hatchet-dev/hatchet/internal/services/shared/proto/v1b\x06proto3') diff --git a/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2.pyi b/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2.pyi index c8b3ddc79a..9acb6587c6 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2.pyi +++ b/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2.pyi @@ -1,8 +1,12 @@ -from hatchet_sdk.contracts.v1.shared import condition_pb2 as _condition_pb2 +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional +from typing import Union as _Union + from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message -from collections.abc import Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +from hatchet_sdk.contracts.v1.shared import condition_pb2 as _condition_pb2 DESCRIPTOR: _descriptor.FileDescriptor diff --git a/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py b/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py index 74d39ceec3..050dacc899 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py +++ b/sdks/python/hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py @@ -1,8 +1,9 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc import warnings +import grpc + from hatchet_sdk.contracts.v1 import dispatcher_pb2 as v1_dot_dispatcher__pb2 GRPC_GENERATED_VERSION = '1.76.0' diff --git a/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2.py b/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2.py index f44a6f9c49..309f1e86ac 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2.py +++ b/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2.py @@ -9,6 +9,7 @@ from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, diff --git a/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2.pyi b/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2.pyi index d158631cbb..f5600a58f4 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2.pyi +++ b/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2.pyi @@ -1,9 +1,13 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from collections.abc import Iterable as _Iterable +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional +from typing import Union as _Union + from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper DESCRIPTOR: _descriptor.FileDescriptor diff --git a/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2_grpc.py b/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2_grpc.py index 151bf772f6..a5f3c72088 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2_grpc.py +++ b/sdks/python/hatchet_sdk/contracts/v1/shared/condition_pb2_grpc.py @@ -1,8 +1,8 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc import warnings +import grpc GRPC_GENERATED_VERSION = '1.76.0' GRPC_VERSION = grpc.__version__ diff --git a/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2.py b/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2.py index c227540b14..5763f689fe 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2.py +++ b/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2.py @@ -9,6 +9,7 @@ from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, @@ -22,11 +23,13 @@ _sym_db = _symbol_database.Default() -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from hatchet_sdk.contracts.v1.shared import condition_pb2 as v1_dot_shared_dot_condition__pb2 +from google.protobuf import \ + timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from hatchet_sdk.contracts.v1.shared import \ + condition_pb2 as v1_dot_shared_dot_condition__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x12v1/workflows.proto\x12\x02v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19v1/shared/condition.proto\"Z\n\x12\x43\x61ncelTasksRequest\x12\x13\n\x0b\x65xternalIds\x18\x01 \x03(\t\x12$\n\x06\x66ilter\x18\x02 \x01(\x0b\x32\x0f.v1.TasksFilterH\x00\x88\x01\x01\x42\t\n\x07_filter\"Z\n\x12ReplayTasksRequest\x12\x13\n\x0b\x65xternalIds\x18\x01 \x03(\t\x12$\n\x06\x66ilter\x18\x02 \x01(\x0b\x32\x0f.v1.TasksFilterH\x00\x88\x01\x01\x42\t\n\x07_filter\"\xb7\x01\n\x0bTasksFilter\x12\x10\n\x08statuses\x18\x01 \x03(\t\x12)\n\x05since\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\x05until\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x88\x01\x01\x12\x14\n\x0cworkflow_ids\x18\x04 \x03(\t\x12\x1b\n\x13\x61\x64\x64itional_metadata\x18\x05 \x03(\tB\x08\n\x06_until\".\n\x13\x43\x61ncelTasksResponse\x12\x17\n\x0f\x63\x61ncelled_tasks\x18\x01 \x03(\t\"-\n\x13ReplayTasksResponse\x12\x16\n\x0ereplayed_tasks\x18\x01 \x03(\t\"\x82\x01\n\x19TriggerWorkflowRunRequest\x12\x15\n\rworkflow_name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\x0c\x12\x1b\n\x13\x61\x64\x64itional_metadata\x18\x03 \x01(\x0c\x12\x15\n\x08priority\x18\x04 \x01(\x05H\x00\x88\x01\x01\x42\x0b\n\t_priority\"1\n\x1aTriggerWorkflowRunResponse\x12\x13\n\x0b\x65xternal_id\x18\x01 \x01(\t\"\xac\x04\n\x1c\x43reateWorkflowVersionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12!\n\x05tasks\x18\x06 \x03(\x0b\x32\x12.v1.CreateTaskOpts\x12$\n\x0b\x63oncurrency\x18\x07 \x01(\x0b\x32\x0f.v1.Concurrency\x12\x17\n\ncron_input\x18\x08 \x01(\tH\x00\x88\x01\x01\x12\x30\n\x0fon_failure_task\x18\t \x01(\x0b\x32\x12.v1.CreateTaskOptsH\x01\x88\x01\x01\x12\'\n\x06sticky\x18\n \x01(\x0e\x32\x12.v1.StickyStrategyH\x02\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0b \x01(\x05H\x03\x88\x01\x01\x12(\n\x0f\x63oncurrency_arr\x18\x0c \x03(\x0b\x32\x0f.v1.Concurrency\x12*\n\x0f\x64\x65\x66\x61ult_filters\x18\r \x03(\x0b\x32\x11.v1.DefaultFilter\x12\x1e\n\x11input_json_schema\x18\x0e \x01(\x0cH\x04\x88\x01\x01\x42\r\n\x0b_cron_inputB\x12\n\x10_on_failure_taskB\t\n\x07_stickyB\x13\n\x11_default_priorityB\x14\n\x12_input_json_schema\"T\n\rDefaultFilter\x12\x12\n\nexpression\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t\x12\x14\n\x07payload\x18\x03 \x01(\x0cH\x00\x88\x01\x01\x42\n\n\x08_payload\"\x93\x01\n\x0b\x43oncurrency\x12\x12\n\nexpression\x18\x01 \x01(\t\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x39\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x1c.v1.ConcurrencyLimitStrategyH\x01\x88\x01\x01\x42\x0b\n\t_max_runsB\x11\n\x0f_limit_strategy\"\xe4\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12\x32\n\ncomparator\x18\x04 \x01(\x0e\x32\x19.v1.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xb1\x04\n\x0e\x43reateTaskOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x0f\n\x07retries\x18\x06 \x01(\x05\x12,\n\x0brate_limits\x18\x07 \x03(\x0b\x32\x17.v1.CreateTaskRateLimit\x12;\n\rworker_labels\x18\x08 \x03(\x0b\x32$.v1.CreateTaskOpts.WorkerLabelsEntry\x12\x1b\n\x0e\x62\x61\x63koff_factor\x18\t \x01(\x02H\x00\x88\x01\x01\x12 \n\x13\x62\x61\x63koff_max_seconds\x18\n \x01(\x05H\x01\x88\x01\x01\x12$\n\x0b\x63oncurrency\x18\x0b \x03(\x0b\x32\x0f.v1.Concurrency\x12+\n\nconditions\x18\x0c \x01(\x0b\x32\x12.v1.TaskConditionsH\x02\x88\x01\x01\x12\x1d\n\x10schedule_timeout\x18\r \x01(\tH\x03\x88\x01\x01\x1aL\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.v1.DesiredWorkerLabels:\x02\x38\x01\x42\x11\n\x0f_backoff_factorB\x16\n\x14_backoff_max_secondsB\r\n\x0b_conditionsB\x13\n\x11_schedule_timeout\"\xfd\x01\n\x13\x43reateTaskRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12,\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x15.v1.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"@\n\x1d\x43reateWorkflowVersionResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\"+\n\x14GetRunDetailsRequest\x12\x13\n\x0b\x65xternal_id\x18\x01 \x01(\t\"\x96\x01\n\rTaskRunDetail\x12\x13\n\x0b\x65xternal_id\x18\x01 \x01(\t\x12\x1d\n\x06status\x18\x02 \x01(\x0e\x32\r.v1.RunStatus\x12\x12\n\x05\x65rror\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x04 \x01(\x0cH\x01\x88\x01\x01\x12\x13\n\x0breadable_id\x18\x05 \x01(\tB\x08\n\x06_errorB\t\n\x07_output\"\xf0\x01\n\x15GetRunDetailsResponse\x12\r\n\x05input\x18\x01 \x01(\x0c\x12\x1d\n\x06status\x18\x02 \x01(\x0e\x32\r.v1.RunStatus\x12:\n\ttask_runs\x18\x03 \x03(\x0b\x32\'.v1.GetRunDetailsResponse.TaskRunsEntry\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\x12\x1b\n\x13\x61\x64\x64itional_metadata\x18\x05 \x01(\x0c\x1a\x42\n\rTaskRunsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.v1.TaskRunDetail:\x02\x38\x01*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06*N\n\tRunStatus\x12\n\n\x06QUEUED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tCOMPLETED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\r\n\tCANCELLED\x10\x04*\x7f\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03\x12\x11\n\rCANCEL_NEWEST\x10\x04*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05\x32\xfd\x02\n\x0c\x41\x64minService\x12R\n\x0bPutWorkflow\x12 .v1.CreateWorkflowVersionRequest\x1a!.v1.CreateWorkflowVersionResponse\x12>\n\x0b\x43\x61ncelTasks\x12\x16.v1.CancelTasksRequest\x1a\x17.v1.CancelTasksResponse\x12>\n\x0bReplayTasks\x12\x16.v1.ReplayTasksRequest\x1a\x17.v1.ReplayTasksResponse\x12S\n\x12TriggerWorkflowRun\x12\x1d.v1.TriggerWorkflowRunRequest\x1a\x1e.v1.TriggerWorkflowRunResponse\x12\x44\n\rGetRunDetails\x12\x18.v1.GetRunDetailsRequest\x1a\x19.v1.GetRunDetailsResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/shared/proto/v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x12v1/workflows.proto\x12\x02v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19v1/shared/condition.proto\"[\n\x12\x43\x61ncelTasksRequest\x12\x14\n\x0c\x65xternal_ids\x18\x01 \x03(\t\x12$\n\x06\x66ilter\x18\x02 \x01(\x0b\x32\x0f.v1.TasksFilterH\x00\x88\x01\x01\x42\t\n\x07_filter\"[\n\x12ReplayTasksRequest\x12\x14\n\x0c\x65xternal_ids\x18\x01 \x03(\t\x12$\n\x06\x66ilter\x18\x02 \x01(\x0b\x32\x0f.v1.TasksFilterH\x00\x88\x01\x01\x42\t\n\x07_filter\"\xb7\x01\n\x0bTasksFilter\x12\x10\n\x08statuses\x18\x01 \x03(\t\x12)\n\x05since\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\x05until\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x88\x01\x01\x12\x14\n\x0cworkflow_ids\x18\x04 \x03(\t\x12\x1b\n\x13\x61\x64\x64itional_metadata\x18\x05 \x03(\tB\x08\n\x06_until\".\n\x13\x43\x61ncelTasksResponse\x12\x17\n\x0f\x63\x61ncelled_tasks\x18\x01 \x03(\t\"-\n\x13ReplayTasksResponse\x12\x16\n\x0ereplayed_tasks\x18\x01 \x03(\t\"\x82\x01\n\x19TriggerWorkflowRunRequest\x12\x15\n\rworkflow_name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\x0c\x12\x1b\n\x13\x61\x64\x64itional_metadata\x18\x03 \x01(\x0c\x12\x15\n\x08priority\x18\x04 \x01(\x05H\x00\x88\x01\x01\x42\x0b\n\t_priority\"1\n\x1aTriggerWorkflowRunResponse\x12\x13\n\x0b\x65xternal_id\x18\x01 \x01(\t\"\xac\x04\n\x1c\x43reateWorkflowVersionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12!\n\x05tasks\x18\x06 \x03(\x0b\x32\x12.v1.CreateTaskOpts\x12$\n\x0b\x63oncurrency\x18\x07 \x01(\x0b\x32\x0f.v1.Concurrency\x12\x17\n\ncron_input\x18\x08 \x01(\tH\x00\x88\x01\x01\x12\x30\n\x0fon_failure_task\x18\t \x01(\x0b\x32\x12.v1.CreateTaskOptsH\x01\x88\x01\x01\x12\'\n\x06sticky\x18\n \x01(\x0e\x32\x12.v1.StickyStrategyH\x02\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0b \x01(\x05H\x03\x88\x01\x01\x12(\n\x0f\x63oncurrency_arr\x18\x0c \x03(\x0b\x32\x0f.v1.Concurrency\x12*\n\x0f\x64\x65\x66\x61ult_filters\x18\r \x03(\x0b\x32\x11.v1.DefaultFilter\x12\x1e\n\x11input_json_schema\x18\x0e \x01(\x0cH\x04\x88\x01\x01\x42\r\n\x0b_cron_inputB\x12\n\x10_on_failure_taskB\t\n\x07_stickyB\x13\n\x11_default_priorityB\x14\n\x12_input_json_schema\"T\n\rDefaultFilter\x12\x12\n\nexpression\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t\x12\x14\n\x07payload\x18\x03 \x01(\x0cH\x00\x88\x01\x01\x42\n\n\x08_payload\"\x93\x01\n\x0b\x43oncurrency\x12\x12\n\nexpression\x18\x01 \x01(\t\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x39\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x1c.v1.ConcurrencyLimitStrategyH\x01\x88\x01\x01\x42\x0b\n\t_max_runsB\x11\n\x0f_limit_strategy\"\xe8\x01\n\x13\x44\x65siredWorkerLabels\x12\x16\n\tstr_value\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tint_value\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12\x32\n\ncomparator\x18\x04 \x01(\x0e\x32\x19.v1.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0c\n\n_str_valueB\x0c\n\n_int_valueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xb7\x05\n\x0e\x43reateTaskOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x0f\n\x07retries\x18\x06 \x01(\x05\x12,\n\x0brate_limits\x18\x07 \x03(\x0b\x32\x17.v1.CreateTaskRateLimit\x12;\n\rworker_labels\x18\x08 \x03(\x0b\x32$.v1.CreateTaskOpts.WorkerLabelsEntry\x12\x1b\n\x0e\x62\x61\x63koff_factor\x18\t \x01(\x02H\x00\x88\x01\x01\x12 \n\x13\x62\x61\x63koff_max_seconds\x18\n \x01(\x05H\x01\x88\x01\x01\x12$\n\x0b\x63oncurrency\x18\x0b \x03(\x0b\x32\x0f.v1.Concurrency\x12+\n\nconditions\x18\x0c \x01(\x0b\x32\x12.v1.TaskConditionsH\x02\x88\x01\x01\x12\x1d\n\x10schedule_timeout\x18\r \x01(\tH\x03\x88\x01\x01\x12\x12\n\nis_durable\x18\x0e \x01(\x08\x12;\n\rslot_requests\x18\x0f \x03(\x0b\x32$.v1.CreateTaskOpts.SlotRequestsEntry\x1aL\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.v1.DesiredWorkerLabels:\x02\x38\x01\x1a\x33\n\x11SlotRequestsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42\x11\n\x0f_backoff_factorB\x16\n\x14_backoff_max_secondsB\r\n\x0b_conditionsB\x13\n\x11_schedule_timeout\"\xfd\x01\n\x13\x43reateTaskRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12,\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x15.v1.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"@\n\x1d\x43reateWorkflowVersionResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\"+\n\x14GetRunDetailsRequest\x12\x13\n\x0b\x65xternal_id\x18\x01 \x01(\t\"\x96\x01\n\rTaskRunDetail\x12\x13\n\x0b\x65xternal_id\x18\x01 \x01(\t\x12\x1d\n\x06status\x18\x02 \x01(\x0e\x32\r.v1.RunStatus\x12\x12\n\x05\x65rror\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x04 \x01(\x0cH\x01\x88\x01\x01\x12\x13\n\x0breadable_id\x18\x05 \x01(\tB\x08\n\x06_errorB\t\n\x07_output\"\xf0\x01\n\x15GetRunDetailsResponse\x12\r\n\x05input\x18\x01 \x01(\x0c\x12\x1d\n\x06status\x18\x02 \x01(\x0e\x32\r.v1.RunStatus\x12:\n\ttask_runs\x18\x03 \x03(\x0b\x32\'.v1.GetRunDetailsResponse.TaskRunsEntry\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\x12\x1b\n\x13\x61\x64\x64itional_metadata\x18\x05 \x01(\x0c\x1a\x42\n\rTaskRunsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.v1.TaskRunDetail:\x02\x38\x01*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06*N\n\tRunStatus\x12\n\n\x06QUEUED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tCOMPLETED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\r\n\tCANCELLED\x10\x04*\x7f\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03\x12\x11\n\rCANCEL_NEWEST\x10\x04*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05\x32\xfd\x02\n\x0c\x41\x64minService\x12R\n\x0bPutWorkflow\x12 .v1.CreateWorkflowVersionRequest\x1a!.v1.CreateWorkflowVersionResponse\x12>\n\x0b\x43\x61ncelTasks\x12\x16.v1.CancelTasksRequest\x1a\x17.v1.CancelTasksResponse\x12>\n\x0bReplayTasks\x12\x16.v1.ReplayTasksRequest\x1a\x17.v1.ReplayTasksResponse\x12S\n\x12TriggerWorkflowRun\x12\x1d.v1.TriggerWorkflowRunRequest\x1a\x1e.v1.TriggerWorkflowRunResponse\x12\x44\n\rGetRunDetails\x12\x18.v1.GetRunDetailsRequest\x1a\x19.v1.GetRunDetailsResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/shared/proto/v1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -36,56 +39,60 @@ _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/shared/proto/v1' _globals['_CREATETASKOPTS_WORKERLABELSENTRY']._loaded_options = None _globals['_CREATETASKOPTS_WORKERLABELSENTRY']._serialized_options = b'8\001' + _globals['_CREATETASKOPTS_SLOTREQUESTSENTRY']._loaded_options = None + _globals['_CREATETASKOPTS_SLOTREQUESTSENTRY']._serialized_options = b'8\001' _globals['_GETRUNDETAILSRESPONSE_TASKRUNSENTRY']._loaded_options = None _globals['_GETRUNDETAILSRESPONSE_TASKRUNSENTRY']._serialized_options = b'8\001' - _globals['_STICKYSTRATEGY']._serialized_start=3088 - _globals['_STICKYSTRATEGY']._serialized_end=3124 - _globals['_RATELIMITDURATION']._serialized_start=3126 - _globals['_RATELIMITDURATION']._serialized_end=3219 - _globals['_RUNSTATUS']._serialized_start=3221 - _globals['_RUNSTATUS']._serialized_end=3299 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3301 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3428 - _globals['_WORKERLABELCOMPARATOR']._serialized_start=3431 - _globals['_WORKERLABELCOMPARATOR']._serialized_end=3564 + _globals['_STICKYSTRATEGY']._serialized_start=3228 + _globals['_STICKYSTRATEGY']._serialized_end=3264 + _globals['_RATELIMITDURATION']._serialized_start=3266 + _globals['_RATELIMITDURATION']._serialized_end=3359 + _globals['_RUNSTATUS']._serialized_start=3361 + _globals['_RUNSTATUS']._serialized_end=3439 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3441 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3568 + _globals['_WORKERLABELCOMPARATOR']._serialized_start=3571 + _globals['_WORKERLABELCOMPARATOR']._serialized_end=3704 _globals['_CANCELTASKSREQUEST']._serialized_start=86 - _globals['_CANCELTASKSREQUEST']._serialized_end=176 - _globals['_REPLAYTASKSREQUEST']._serialized_start=178 - _globals['_REPLAYTASKSREQUEST']._serialized_end=268 - _globals['_TASKSFILTER']._serialized_start=271 - _globals['_TASKSFILTER']._serialized_end=454 - _globals['_CANCELTASKSRESPONSE']._serialized_start=456 - _globals['_CANCELTASKSRESPONSE']._serialized_end=502 - _globals['_REPLAYTASKSRESPONSE']._serialized_start=504 - _globals['_REPLAYTASKSRESPONSE']._serialized_end=549 - _globals['_TRIGGERWORKFLOWRUNREQUEST']._serialized_start=552 - _globals['_TRIGGERWORKFLOWRUNREQUEST']._serialized_end=682 - _globals['_TRIGGERWORKFLOWRUNRESPONSE']._serialized_start=684 - _globals['_TRIGGERWORKFLOWRUNRESPONSE']._serialized_end=733 - _globals['_CREATEWORKFLOWVERSIONREQUEST']._serialized_start=736 - _globals['_CREATEWORKFLOWVERSIONREQUEST']._serialized_end=1292 - _globals['_DEFAULTFILTER']._serialized_start=1294 - _globals['_DEFAULTFILTER']._serialized_end=1378 - _globals['_CONCURRENCY']._serialized_start=1381 - _globals['_CONCURRENCY']._serialized_end=1528 - _globals['_DESIREDWORKERLABELS']._serialized_start=1531 - _globals['_DESIREDWORKERLABELS']._serialized_end=1759 - _globals['_CREATETASKOPTS']._serialized_start=1762 - _globals['_CREATETASKOPTS']._serialized_end=2323 - _globals['_CREATETASKOPTS_WORKERLABELSENTRY']._serialized_start=2168 - _globals['_CREATETASKOPTS_WORKERLABELSENTRY']._serialized_end=2244 - _globals['_CREATETASKRATELIMIT']._serialized_start=2326 - _globals['_CREATETASKRATELIMIT']._serialized_end=2579 - _globals['_CREATEWORKFLOWVERSIONRESPONSE']._serialized_start=2581 - _globals['_CREATEWORKFLOWVERSIONRESPONSE']._serialized_end=2645 - _globals['_GETRUNDETAILSREQUEST']._serialized_start=2647 - _globals['_GETRUNDETAILSREQUEST']._serialized_end=2690 - _globals['_TASKRUNDETAIL']._serialized_start=2693 - _globals['_TASKRUNDETAIL']._serialized_end=2843 - _globals['_GETRUNDETAILSRESPONSE']._serialized_start=2846 - _globals['_GETRUNDETAILSRESPONSE']._serialized_end=3086 - _globals['_GETRUNDETAILSRESPONSE_TASKRUNSENTRY']._serialized_start=3020 - _globals['_GETRUNDETAILSRESPONSE_TASKRUNSENTRY']._serialized_end=3086 - _globals['_ADMINSERVICE']._serialized_start=3567 - _globals['_ADMINSERVICE']._serialized_end=3948 + _globals['_CANCELTASKSREQUEST']._serialized_end=177 + _globals['_REPLAYTASKSREQUEST']._serialized_start=179 + _globals['_REPLAYTASKSREQUEST']._serialized_end=270 + _globals['_TASKSFILTER']._serialized_start=273 + _globals['_TASKSFILTER']._serialized_end=456 + _globals['_CANCELTASKSRESPONSE']._serialized_start=458 + _globals['_CANCELTASKSRESPONSE']._serialized_end=504 + _globals['_REPLAYTASKSRESPONSE']._serialized_start=506 + _globals['_REPLAYTASKSRESPONSE']._serialized_end=551 + _globals['_TRIGGERWORKFLOWRUNREQUEST']._serialized_start=554 + _globals['_TRIGGERWORKFLOWRUNREQUEST']._serialized_end=684 + _globals['_TRIGGERWORKFLOWRUNRESPONSE']._serialized_start=686 + _globals['_TRIGGERWORKFLOWRUNRESPONSE']._serialized_end=735 + _globals['_CREATEWORKFLOWVERSIONREQUEST']._serialized_start=738 + _globals['_CREATEWORKFLOWVERSIONREQUEST']._serialized_end=1294 + _globals['_DEFAULTFILTER']._serialized_start=1296 + _globals['_DEFAULTFILTER']._serialized_end=1380 + _globals['_CONCURRENCY']._serialized_start=1383 + _globals['_CONCURRENCY']._serialized_end=1530 + _globals['_DESIREDWORKERLABELS']._serialized_start=1533 + _globals['_DESIREDWORKERLABELS']._serialized_end=1765 + _globals['_CREATETASKOPTS']._serialized_start=1768 + _globals['_CREATETASKOPTS']._serialized_end=2463 + _globals['_CREATETASKOPTS_WORKERLABELSENTRY']._serialized_start=2255 + _globals['_CREATETASKOPTS_WORKERLABELSENTRY']._serialized_end=2331 + _globals['_CREATETASKOPTS_SLOTREQUESTSENTRY']._serialized_start=2333 + _globals['_CREATETASKOPTS_SLOTREQUESTSENTRY']._serialized_end=2384 + _globals['_CREATETASKRATELIMIT']._serialized_start=2466 + _globals['_CREATETASKRATELIMIT']._serialized_end=2719 + _globals['_CREATEWORKFLOWVERSIONRESPONSE']._serialized_start=2721 + _globals['_CREATEWORKFLOWVERSIONRESPONSE']._serialized_end=2785 + _globals['_GETRUNDETAILSREQUEST']._serialized_start=2787 + _globals['_GETRUNDETAILSREQUEST']._serialized_end=2830 + _globals['_TASKRUNDETAIL']._serialized_start=2833 + _globals['_TASKRUNDETAIL']._serialized_end=2983 + _globals['_GETRUNDETAILSRESPONSE']._serialized_start=2986 + _globals['_GETRUNDETAILSRESPONSE']._serialized_end=3226 + _globals['_GETRUNDETAILSRESPONSE_TASKRUNSENTRY']._serialized_start=3160 + _globals['_GETRUNDETAILSRESPONSE_TASKRUNSENTRY']._serialized_end=3226 + _globals['_ADMINSERVICE']._serialized_start=3707 + _globals['_ADMINSERVICE']._serialized_end=4088 # @@protoc_insertion_point(module_scope) diff --git a/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2.pyi b/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2.pyi index cb16c27e46..f6fbe66283 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2.pyi +++ b/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2.pyi @@ -1,13 +1,17 @@ import datetime +from collections.abc import Iterable as _Iterable +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional +from typing import Union as _Union +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message from google.protobuf import timestamp_pb2 as _timestamp_pb2 -from hatchet_sdk.contracts.v1.shared import condition_pb2 as _condition_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +from hatchet_sdk.contracts.v1.shared import condition_pb2 as _condition_pb2 DESCRIPTOR: _descriptor.FileDescriptor @@ -77,20 +81,20 @@ LESS_THAN: WorkerLabelComparator LESS_THAN_OR_EQUAL: WorkerLabelComparator class CancelTasksRequest(_message.Message): - __slots__ = ("externalIds", "filter") - EXTERNALIDS_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("external_ids", "filter") + EXTERNAL_IDS_FIELD_NUMBER: _ClassVar[int] FILTER_FIELD_NUMBER: _ClassVar[int] - externalIds: _containers.RepeatedScalarFieldContainer[str] + external_ids: _containers.RepeatedScalarFieldContainer[str] filter: TasksFilter - def __init__(self, externalIds: _Optional[_Iterable[str]] = ..., filter: _Optional[_Union[TasksFilter, _Mapping]] = ...) -> None: ... + def __init__(self, external_ids: _Optional[_Iterable[str]] = ..., filter: _Optional[_Union[TasksFilter, _Mapping]] = ...) -> None: ... class ReplayTasksRequest(_message.Message): - __slots__ = ("externalIds", "filter") - EXTERNALIDS_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("external_ids", "filter") + EXTERNAL_IDS_FIELD_NUMBER: _ClassVar[int] FILTER_FIELD_NUMBER: _ClassVar[int] - externalIds: _containers.RepeatedScalarFieldContainer[str] + external_ids: _containers.RepeatedScalarFieldContainer[str] filter: TasksFilter - def __init__(self, externalIds: _Optional[_Iterable[str]] = ..., filter: _Optional[_Union[TasksFilter, _Mapping]] = ...) -> None: ... + def __init__(self, external_ids: _Optional[_Iterable[str]] = ..., filter: _Optional[_Union[TasksFilter, _Mapping]] = ...) -> None: ... class TasksFilter(_message.Message): __slots__ = ("statuses", "since", "until", "workflow_ids", "additional_metadata") @@ -189,21 +193,21 @@ class Concurrency(_message.Message): def __init__(self, expression: _Optional[str] = ..., max_runs: _Optional[int] = ..., limit_strategy: _Optional[_Union[ConcurrencyLimitStrategy, str]] = ...) -> None: ... class DesiredWorkerLabels(_message.Message): - __slots__ = ("strValue", "intValue", "required", "comparator", "weight") - STRVALUE_FIELD_NUMBER: _ClassVar[int] - INTVALUE_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("str_value", "int_value", "required", "comparator", "weight") + STR_VALUE_FIELD_NUMBER: _ClassVar[int] + INT_VALUE_FIELD_NUMBER: _ClassVar[int] REQUIRED_FIELD_NUMBER: _ClassVar[int] COMPARATOR_FIELD_NUMBER: _ClassVar[int] WEIGHT_FIELD_NUMBER: _ClassVar[int] - strValue: str - intValue: int + str_value: str + int_value: int required: bool comparator: WorkerLabelComparator weight: int - def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ..., required: bool = ..., comparator: _Optional[_Union[WorkerLabelComparator, str]] = ..., weight: _Optional[int] = ...) -> None: ... + def __init__(self, str_value: _Optional[str] = ..., int_value: _Optional[int] = ..., required: bool = ..., comparator: _Optional[_Union[WorkerLabelComparator, str]] = ..., weight: _Optional[int] = ...) -> None: ... class CreateTaskOpts(_message.Message): - __slots__ = ("readable_id", "action", "timeout", "inputs", "parents", "retries", "rate_limits", "worker_labels", "backoff_factor", "backoff_max_seconds", "concurrency", "conditions", "schedule_timeout") + __slots__ = ("readable_id", "action", "timeout", "inputs", "parents", "retries", "rate_limits", "worker_labels", "backoff_factor", "backoff_max_seconds", "concurrency", "conditions", "schedule_timeout", "is_durable", "slot_requests") class WorkerLabelsEntry(_message.Message): __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] @@ -211,6 +215,13 @@ class CreateTaskOpts(_message.Message): key: str value: DesiredWorkerLabels def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[DesiredWorkerLabels, _Mapping]] = ...) -> None: ... + class SlotRequestsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: int + def __init__(self, key: _Optional[str] = ..., value: _Optional[int] = ...) -> None: ... READABLE_ID_FIELD_NUMBER: _ClassVar[int] ACTION_FIELD_NUMBER: _ClassVar[int] TIMEOUT_FIELD_NUMBER: _ClassVar[int] @@ -224,6 +235,8 @@ class CreateTaskOpts(_message.Message): CONCURRENCY_FIELD_NUMBER: _ClassVar[int] CONDITIONS_FIELD_NUMBER: _ClassVar[int] SCHEDULE_TIMEOUT_FIELD_NUMBER: _ClassVar[int] + IS_DURABLE_FIELD_NUMBER: _ClassVar[int] + SLOT_REQUESTS_FIELD_NUMBER: _ClassVar[int] readable_id: str action: str timeout: str @@ -237,7 +250,9 @@ class CreateTaskOpts(_message.Message): concurrency: _containers.RepeatedCompositeFieldContainer[Concurrency] conditions: _condition_pb2.TaskConditions schedule_timeout: str - def __init__(self, readable_id: _Optional[str] = ..., action: _Optional[str] = ..., timeout: _Optional[str] = ..., inputs: _Optional[str] = ..., parents: _Optional[_Iterable[str]] = ..., retries: _Optional[int] = ..., rate_limits: _Optional[_Iterable[_Union[CreateTaskRateLimit, _Mapping]]] = ..., worker_labels: _Optional[_Mapping[str, DesiredWorkerLabels]] = ..., backoff_factor: _Optional[float] = ..., backoff_max_seconds: _Optional[int] = ..., concurrency: _Optional[_Iterable[_Union[Concurrency, _Mapping]]] = ..., conditions: _Optional[_Union[_condition_pb2.TaskConditions, _Mapping]] = ..., schedule_timeout: _Optional[str] = ...) -> None: ... + is_durable: bool + slot_requests: _containers.ScalarMap[str, int] + def __init__(self, readable_id: _Optional[str] = ..., action: _Optional[str] = ..., timeout: _Optional[str] = ..., inputs: _Optional[str] = ..., parents: _Optional[_Iterable[str]] = ..., retries: _Optional[int] = ..., rate_limits: _Optional[_Iterable[_Union[CreateTaskRateLimit, _Mapping]]] = ..., worker_labels: _Optional[_Mapping[str, DesiredWorkerLabels]] = ..., backoff_factor: _Optional[float] = ..., backoff_max_seconds: _Optional[int] = ..., concurrency: _Optional[_Iterable[_Union[Concurrency, _Mapping]]] = ..., conditions: _Optional[_Union[_condition_pb2.TaskConditions, _Mapping]] = ..., schedule_timeout: _Optional[str] = ..., is_durable: bool = ..., slot_requests: _Optional[_Mapping[str, int]] = ...) -> None: ... class CreateTaskRateLimit(_message.Message): __slots__ = ("key", "units", "key_expr", "units_expr", "limit_values_expr", "duration") diff --git a/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2_grpc.py b/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2_grpc.py index 7229fe29bd..9b1db40daa 100644 --- a/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2_grpc.py +++ b/sdks/python/hatchet_sdk/contracts/v1/workflows_pb2_grpc.py @@ -1,8 +1,9 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc import warnings +import grpc + from hatchet_sdk.contracts.v1 import workflows_pb2 as v1_dot_workflows__pb2 GRPC_GENERATED_VERSION = '1.76.0' diff --git a/sdks/python/hatchet_sdk/contracts/workflows_pb2.py b/sdks/python/hatchet_sdk/contracts/workflows_pb2.py index c1b2a866bb..f17c801ca3 100644 --- a/sdks/python/hatchet_sdk/contracts/workflows_pb2.py +++ b/sdks/python/hatchet_sdk/contracts/workflows_pb2.py @@ -9,6 +9,7 @@ from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + _runtime_version.ValidateProtobufRuntimeVersion( _runtime_version.Domain.PUBLIC, 6, @@ -22,10 +23,10 @@ _sym_db = _symbol_database.Default() -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - +from google.protobuf import \ + timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe1\x01\n\x13\x44\x65siredWorkerLabels\x12\x15\n\x08strValue\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08intValue\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0b\n\t_strValueB\x0b\n\t_intValueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xb5\x03\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x12\x1b\n\x0e\x62\x61\x63koff_factor\x18\n \x01(\x02H\x00\x88\x01\x01\x12 \n\x13\x62\x61\x63koff_max_seconds\x18\x0b \x01(\x05H\x01\x88\x01\x01\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\x42\x11\n\x0f_backoff_factorB\x16\n\x14_backoff_max_seconds\"\xfa\x01\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12)\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x12.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"\x16\n\x14ListWorkflowsRequest\"\xf1\x02\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x08 \x01(\tH\x04\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x05\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x0b\n\t_priority\"O\n\x11ScheduledWorkflow\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ntrigger_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xe3\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x03\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\x12/\n\x13scheduled_workflows\x18\x08 \x03(\x0b\x32\x12.ScheduledWorkflow\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"H\n\x1a\x42ulkTriggerWorkflowRequest\x12*\n\tworkflows\x18\x01 \x03(\x0b\x32\x17.TriggerWorkflowRequest\"7\n\x1b\x42ulkTriggerWorkflowResponse\x12\x18\n\x10workflow_run_ids\x18\x01 \x03(\t\"\xf7\x02\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12\x1f\n\x12parent_step_run_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x15\n\x13_parent_step_run_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*\x7f\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03\x12\x11\n\rCANCEL_NEWEST\x10\x04*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\xdc\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12P\n\x13\x42ulkTriggerWorkflow\x12\x1b.BulkTriggerWorkflowRequest\x1a\x1c.BulkTriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fworkflows.proto\x1a\x1fgoogle/protobuf/timestamp.proto\">\n\x12PutWorkflowRequest\x12(\n\x04opts\x18\x01 \x01(\x0b\x32\x1a.CreateWorkflowVersionOpts\"\xbf\x04\n\x19\x43reateWorkflowVersionOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x16\n\x0e\x65vent_triggers\x18\x04 \x03(\t\x12\x15\n\rcron_triggers\x18\x05 \x03(\t\x12\x36\n\x12scheduled_triggers\x18\x06 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12$\n\x04jobs\x18\x07 \x03(\x0b\x32\x16.CreateWorkflowJobOpts\x12-\n\x0b\x63oncurrency\x18\x08 \x01(\x0b\x32\x18.WorkflowConcurrencyOpts\x12\x1d\n\x10schedule_timeout\x18\t \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncron_input\x18\n \x01(\tH\x01\x88\x01\x01\x12\x33\n\x0eon_failure_job\x18\x0b \x01(\x0b\x32\x16.CreateWorkflowJobOptsH\x02\x88\x01\x01\x12$\n\x06sticky\x18\x0c \x01(\x0e\x32\x0f.StickyStrategyH\x03\x88\x01\x01\x12 \n\x04kind\x18\r \x01(\x0e\x32\r.WorkflowKindH\x04\x88\x01\x01\x12\x1d\n\x10\x64\x65\x66\x61ult_priority\x18\x0e \x01(\x05H\x05\x88\x01\x01\x42\x13\n\x11_schedule_timeoutB\r\n\x0b_cron_inputB\x11\n\x0f_on_failure_jobB\t\n\x07_stickyB\x07\n\x05_kindB\x13\n\x11_default_priority\"\xd0\x01\n\x17WorkflowConcurrencyOpts\x12\x13\n\x06\x61\x63tion\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08max_runs\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x36\n\x0elimit_strategy\x18\x03 \x01(\x0e\x32\x19.ConcurrencyLimitStrategyH\x02\x88\x01\x01\x12\x17\n\nexpression\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\t\n\x07_actionB\x0b\n\t_max_runsB\x11\n\x0f_limit_strategyB\r\n\x0b_expression\"h\n\x15\x43reateWorkflowJobOpts\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12&\n\x05steps\x18\x04 \x03(\x0b\x32\x17.CreateWorkflowStepOptsJ\x04\x08\x03\x10\x04\"\xe5\x01\n\x13\x44\x65siredWorkerLabels\x12\x16\n\tstr_value\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tint_value\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x15\n\x08required\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12/\n\ncomparator\x18\x04 \x01(\x0e\x32\x16.WorkerLabelComparatorH\x03\x88\x01\x01\x12\x13\n\x06weight\x18\x05 \x01(\x05H\x04\x88\x01\x01\x42\x0c\n\n_str_valueB\x0c\n\n_int_valueB\x0b\n\t_requiredB\r\n\x0b_comparatorB\t\n\x07_weight\"\xb5\x03\n\x16\x43reateWorkflowStepOpts\x12\x13\n\x0breadable_id\x18\x01 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x02 \x01(\t\x12\x0f\n\x07timeout\x18\x03 \x01(\t\x12\x0e\n\x06inputs\x18\x04 \x01(\t\x12\x0f\n\x07parents\x18\x05 \x03(\t\x12\x11\n\tuser_data\x18\x06 \x01(\t\x12\x0f\n\x07retries\x18\x07 \x01(\x05\x12)\n\x0brate_limits\x18\x08 \x03(\x0b\x32\x14.CreateStepRateLimit\x12@\n\rworker_labels\x18\t \x03(\x0b\x32).CreateWorkflowStepOpts.WorkerLabelsEntry\x12\x1b\n\x0e\x62\x61\x63koff_factor\x18\n \x01(\x02H\x00\x88\x01\x01\x12 \n\x13\x62\x61\x63koff_max_seconds\x18\x0b \x01(\x05H\x01\x88\x01\x01\x1aI\n\x11WorkerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.DesiredWorkerLabels:\x02\x38\x01\x42\x11\n\x0f_backoff_factorB\x16\n\x14_backoff_max_seconds\"\xfa\x01\n\x13\x43reateStepRateLimit\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x12\n\x05units\x18\x02 \x01(\x05H\x00\x88\x01\x01\x12\x15\n\x08key_expr\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nunits_expr\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x1e\n\x11limit_values_expr\x18\x05 \x01(\tH\x03\x88\x01\x01\x12)\n\x08\x64uration\x18\x06 \x01(\x0e\x32\x12.RateLimitDurationH\x04\x88\x01\x01\x42\x08\n\x06_unitsB\x0b\n\t_key_exprB\r\n\x0b_units_exprB\x14\n\x12_limit_values_exprB\x0b\n\t_duration\"\x16\n\x14ListWorkflowsRequest\"\x83\x03\n\x17ScheduleWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tschedules\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05input\x18\x03 \x01(\t\x12\x16\n\tparent_id\x18\x04 \x01(\tH\x00\x88\x01\x01\x12(\n\x1bparent_task_run_external_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x06 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x07 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x08 \x01(\tH\x04\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x05\x88\x01\x01\x42\x0c\n\n_parent_idB\x1e\n\x1c_parent_task_run_external_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x0b\n\t_priority\"O\n\x11ScheduledWorkflow\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ntrigger_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xe3\x01\n\x0fWorkflowVersion\x12\n\n\x02id\x18\x01 \x01(\t\x12.\n\ncreated_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nupdated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\r\n\x05order\x18\x06 \x01(\x03\x12\x13\n\x0bworkflow_id\x18\x07 \x01(\t\x12/\n\x13scheduled_workflows\x18\x08 \x03(\x0b\x32\x12.ScheduledWorkflow\"?\n\x17WorkflowTriggerEventRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x11\n\tevent_key\x18\x02 \x01(\t\"9\n\x16WorkflowTriggerCronRef\x12\x11\n\tparent_id\x18\x01 \x01(\t\x12\x0c\n\x04\x63ron\x18\x02 \x01(\t\"H\n\x1a\x42ulkTriggerWorkflowRequest\x12*\n\tworkflows\x18\x01 \x03(\x0b\x32\x17.TriggerWorkflowRequest\"7\n\x1b\x42ulkTriggerWorkflowResponse\x12\x18\n\x10workflow_run_ids\x18\x01 \x03(\t\"\x89\x03\n\x16TriggerWorkflowRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x02 \x01(\t\x12\x16\n\tparent_id\x18\x03 \x01(\tH\x00\x88\x01\x01\x12(\n\x1bparent_task_run_external_id\x18\x04 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63hild_index\x18\x05 \x01(\x05H\x02\x88\x01\x01\x12\x16\n\tchild_key\x18\x06 \x01(\tH\x03\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_metadata\x18\x07 \x01(\tH\x04\x88\x01\x01\x12\x1e\n\x11\x64\x65sired_worker_id\x18\x08 \x01(\tH\x05\x88\x01\x01\x12\x15\n\x08priority\x18\t \x01(\x05H\x06\x88\x01\x01\x42\x0c\n\n_parent_idB\x1e\n\x1c_parent_task_run_external_idB\x0e\n\x0c_child_indexB\x0c\n\n_child_keyB\x16\n\x14_additional_metadataB\x14\n\x12_desired_worker_idB\x0b\n\t_priority\"2\n\x17TriggerWorkflowResponse\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"W\n\x13PutRateLimitRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x05\x12$\n\x08\x64uration\x18\x03 \x01(\x0e\x32\x12.RateLimitDuration\"\x16\n\x14PutRateLimitResponse*$\n\x0eStickyStrategy\x12\x08\n\x04SOFT\x10\x00\x12\x08\n\x04HARD\x10\x01*2\n\x0cWorkflowKind\x12\x0c\n\x08\x46UNCTION\x10\x00\x12\x0b\n\x07\x44URABLE\x10\x01\x12\x07\n\x03\x44\x41G\x10\x02*\x7f\n\x18\x43oncurrencyLimitStrategy\x12\x16\n\x12\x43\x41NCEL_IN_PROGRESS\x10\x00\x12\x0f\n\x0b\x44ROP_NEWEST\x10\x01\x12\x10\n\x0cQUEUE_NEWEST\x10\x02\x12\x15\n\x11GROUP_ROUND_ROBIN\x10\x03\x12\x11\n\rCANCEL_NEWEST\x10\x04*\x85\x01\n\x15WorkerLabelComparator\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x05*]\n\x11RateLimitDuration\x12\n\n\x06SECOND\x10\x00\x12\n\n\x06MINUTE\x10\x01\x12\x08\n\x04HOUR\x10\x02\x12\x07\n\x03\x44\x41Y\x10\x03\x12\x08\n\x04WEEK\x10\x04\x12\t\n\x05MONTH\x10\x05\x12\x08\n\x04YEAR\x10\x06\x32\xdc\x02\n\x0fWorkflowService\x12\x34\n\x0bPutWorkflow\x12\x13.PutWorkflowRequest\x1a\x10.WorkflowVersion\x12>\n\x10ScheduleWorkflow\x12\x18.ScheduleWorkflowRequest\x1a\x10.WorkflowVersion\x12\x44\n\x0fTriggerWorkflow\x12\x17.TriggerWorkflowRequest\x1a\x18.TriggerWorkflowResponse\x12P\n\x13\x42ulkTriggerWorkflow\x12\x1b.BulkTriggerWorkflowRequest\x1a\x1c.BulkTriggerWorkflowResponse\x12;\n\x0cPutRateLimit\x12\x14.PutRateLimitRequest\x1a\x15.PutRateLimitResponseBBZ@github.com/hatchet-dev/hatchet/internal/services/admin/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -35,16 +36,16 @@ _globals['DESCRIPTOR']._serialized_options = b'Z@github.com/hatchet-dev/hatchet/internal/services/admin/contracts' _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._loaded_options = None _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_options = b'8\001' - _globals['_STICKYSTRATEGY']._serialized_start=3437 - _globals['_STICKYSTRATEGY']._serialized_end=3473 - _globals['_WORKFLOWKIND']._serialized_start=3475 - _globals['_WORKFLOWKIND']._serialized_end=3525 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3527 - _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3654 - _globals['_WORKERLABELCOMPARATOR']._serialized_start=3657 - _globals['_WORKERLABELCOMPARATOR']._serialized_end=3790 - _globals['_RATELIMITDURATION']._serialized_start=3792 - _globals['_RATELIMITDURATION']._serialized_end=3885 + _globals['_STICKYSTRATEGY']._serialized_start=3477 + _globals['_STICKYSTRATEGY']._serialized_end=3513 + _globals['_WORKFLOWKIND']._serialized_start=3515 + _globals['_WORKFLOWKIND']._serialized_end=3565 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_start=3567 + _globals['_CONCURRENCYLIMITSTRATEGY']._serialized_end=3694 + _globals['_WORKERLABELCOMPARATOR']._serialized_start=3697 + _globals['_WORKERLABELCOMPARATOR']._serialized_end=3830 + _globals['_RATELIMITDURATION']._serialized_start=3832 + _globals['_RATELIMITDURATION']._serialized_end=3925 _globals['_PUTWORKFLOWREQUEST']._serialized_start=52 _globals['_PUTWORKFLOWREQUEST']._serialized_end=114 _globals['_CREATEWORKFLOWVERSIONOPTS']._serialized_start=117 @@ -54,37 +55,37 @@ _globals['_CREATEWORKFLOWJOBOPTS']._serialized_start=905 _globals['_CREATEWORKFLOWJOBOPTS']._serialized_end=1009 _globals['_DESIREDWORKERLABELS']._serialized_start=1012 - _globals['_DESIREDWORKERLABELS']._serialized_end=1237 - _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_start=1240 - _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_end=1677 - _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_start=1561 - _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_end=1634 - _globals['_CREATESTEPRATELIMIT']._serialized_start=1680 - _globals['_CREATESTEPRATELIMIT']._serialized_end=1930 - _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1932 - _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1954 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1957 - _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=2326 - _globals['_SCHEDULEDWORKFLOW']._serialized_start=2328 - _globals['_SCHEDULEDWORKFLOW']._serialized_end=2407 - _globals['_WORKFLOWVERSION']._serialized_start=2410 - _globals['_WORKFLOWVERSION']._serialized_end=2637 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2639 - _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2702 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2704 - _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2761 - _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_start=2763 - _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_end=2835 - _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_start=2837 - _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_end=2892 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2895 - _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=3270 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=3272 - _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=3322 - _globals['_PUTRATELIMITREQUEST']._serialized_start=3324 - _globals['_PUTRATELIMITREQUEST']._serialized_end=3411 - _globals['_PUTRATELIMITRESPONSE']._serialized_start=3413 - _globals['_PUTRATELIMITRESPONSE']._serialized_end=3435 - _globals['_WORKFLOWSERVICE']._serialized_start=3888 - _globals['_WORKFLOWSERVICE']._serialized_end=4236 + _globals['_DESIREDWORKERLABELS']._serialized_end=1241 + _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_start=1244 + _globals['_CREATEWORKFLOWSTEPOPTS']._serialized_end=1681 + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_start=1565 + _globals['_CREATEWORKFLOWSTEPOPTS_WORKERLABELSENTRY']._serialized_end=1638 + _globals['_CREATESTEPRATELIMIT']._serialized_start=1684 + _globals['_CREATESTEPRATELIMIT']._serialized_end=1934 + _globals['_LISTWORKFLOWSREQUEST']._serialized_start=1936 + _globals['_LISTWORKFLOWSREQUEST']._serialized_end=1958 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_start=1961 + _globals['_SCHEDULEWORKFLOWREQUEST']._serialized_end=2348 + _globals['_SCHEDULEDWORKFLOW']._serialized_start=2350 + _globals['_SCHEDULEDWORKFLOW']._serialized_end=2429 + _globals['_WORKFLOWVERSION']._serialized_start=2432 + _globals['_WORKFLOWVERSION']._serialized_end=2659 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_start=2661 + _globals['_WORKFLOWTRIGGEREVENTREF']._serialized_end=2724 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_start=2726 + _globals['_WORKFLOWTRIGGERCRONREF']._serialized_end=2783 + _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_start=2785 + _globals['_BULKTRIGGERWORKFLOWREQUEST']._serialized_end=2857 + _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_start=2859 + _globals['_BULKTRIGGERWORKFLOWRESPONSE']._serialized_end=2914 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_start=2917 + _globals['_TRIGGERWORKFLOWREQUEST']._serialized_end=3310 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_start=3312 + _globals['_TRIGGERWORKFLOWRESPONSE']._serialized_end=3362 + _globals['_PUTRATELIMITREQUEST']._serialized_start=3364 + _globals['_PUTRATELIMITREQUEST']._serialized_end=3451 + _globals['_PUTRATELIMITRESPONSE']._serialized_start=3453 + _globals['_PUTRATELIMITRESPONSE']._serialized_end=3475 + _globals['_WORKFLOWSERVICE']._serialized_start=3928 + _globals['_WORKFLOWSERVICE']._serialized_end=4276 # @@protoc_insertion_point(module_scope) diff --git a/sdks/python/hatchet_sdk/contracts/workflows_pb2.pyi b/sdks/python/hatchet_sdk/contracts/workflows_pb2.pyi index 5ff0f641d9..6056044721 100644 --- a/sdks/python/hatchet_sdk/contracts/workflows_pb2.pyi +++ b/sdks/python/hatchet_sdk/contracts/workflows_pb2.pyi @@ -1,12 +1,15 @@ import datetime +from collections.abc import Iterable as _Iterable +from collections.abc import Mapping as _Mapping +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional +from typing import Union as _Union +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message from google.protobuf import timestamp_pb2 as _timestamp_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from collections.abc import Iterable as _Iterable, Mapping as _Mapping -from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor @@ -132,18 +135,18 @@ class CreateWorkflowJobOpts(_message.Message): def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., steps: _Optional[_Iterable[_Union[CreateWorkflowStepOpts, _Mapping]]] = ...) -> None: ... class DesiredWorkerLabels(_message.Message): - __slots__ = ("strValue", "intValue", "required", "comparator", "weight") - STRVALUE_FIELD_NUMBER: _ClassVar[int] - INTVALUE_FIELD_NUMBER: _ClassVar[int] + __slots__ = ("str_value", "int_value", "required", "comparator", "weight") + STR_VALUE_FIELD_NUMBER: _ClassVar[int] + INT_VALUE_FIELD_NUMBER: _ClassVar[int] REQUIRED_FIELD_NUMBER: _ClassVar[int] COMPARATOR_FIELD_NUMBER: _ClassVar[int] WEIGHT_FIELD_NUMBER: _ClassVar[int] - strValue: str - intValue: int + str_value: str + int_value: int required: bool comparator: WorkerLabelComparator weight: int - def __init__(self, strValue: _Optional[str] = ..., intValue: _Optional[int] = ..., required: bool = ..., comparator: _Optional[_Union[WorkerLabelComparator, str]] = ..., weight: _Optional[int] = ...) -> None: ... + def __init__(self, str_value: _Optional[str] = ..., int_value: _Optional[int] = ..., required: bool = ..., comparator: _Optional[_Union[WorkerLabelComparator, str]] = ..., weight: _Optional[int] = ...) -> None: ... class CreateWorkflowStepOpts(_message.Message): __slots__ = ("readable_id", "action", "timeout", "inputs", "parents", "user_data", "retries", "rate_limits", "worker_labels", "backoff_factor", "backoff_max_seconds") @@ -199,12 +202,12 @@ class ListWorkflowsRequest(_message.Message): def __init__(self) -> None: ... class ScheduleWorkflowRequest(_message.Message): - __slots__ = ("name", "schedules", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata", "priority") + __slots__ = ("name", "schedules", "input", "parent_id", "parent_task_run_external_id", "child_index", "child_key", "additional_metadata", "priority") NAME_FIELD_NUMBER: _ClassVar[int] SCHEDULES_FIELD_NUMBER: _ClassVar[int] INPUT_FIELD_NUMBER: _ClassVar[int] PARENT_ID_FIELD_NUMBER: _ClassVar[int] - PARENT_STEP_RUN_ID_FIELD_NUMBER: _ClassVar[int] + PARENT_TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] CHILD_INDEX_FIELD_NUMBER: _ClassVar[int] CHILD_KEY_FIELD_NUMBER: _ClassVar[int] ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int] @@ -213,12 +216,12 @@ class ScheduleWorkflowRequest(_message.Message): schedules: _containers.RepeatedCompositeFieldContainer[_timestamp_pb2.Timestamp] input: str parent_id: str - parent_step_run_id: str + parent_task_run_external_id: str child_index: int child_key: str additional_metadata: str priority: int - def __init__(self, name: _Optional[str] = ..., schedules: _Optional[_Iterable[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]]] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ..., priority: _Optional[int] = ...) -> None: ... + def __init__(self, name: _Optional[str] = ..., schedules: _Optional[_Iterable[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]]] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_task_run_external_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ..., priority: _Optional[int] = ...) -> None: ... class ScheduledWorkflow(_message.Message): __slots__ = ("id", "trigger_at") @@ -275,11 +278,11 @@ class BulkTriggerWorkflowResponse(_message.Message): def __init__(self, workflow_run_ids: _Optional[_Iterable[str]] = ...) -> None: ... class TriggerWorkflowRequest(_message.Message): - __slots__ = ("name", "input", "parent_id", "parent_step_run_id", "child_index", "child_key", "additional_metadata", "desired_worker_id", "priority") + __slots__ = ("name", "input", "parent_id", "parent_task_run_external_id", "child_index", "child_key", "additional_metadata", "desired_worker_id", "priority") NAME_FIELD_NUMBER: _ClassVar[int] INPUT_FIELD_NUMBER: _ClassVar[int] PARENT_ID_FIELD_NUMBER: _ClassVar[int] - PARENT_STEP_RUN_ID_FIELD_NUMBER: _ClassVar[int] + PARENT_TASK_RUN_EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] CHILD_INDEX_FIELD_NUMBER: _ClassVar[int] CHILD_KEY_FIELD_NUMBER: _ClassVar[int] ADDITIONAL_METADATA_FIELD_NUMBER: _ClassVar[int] @@ -288,13 +291,13 @@ class TriggerWorkflowRequest(_message.Message): name: str input: str parent_id: str - parent_step_run_id: str + parent_task_run_external_id: str child_index: int child_key: str additional_metadata: str desired_worker_id: str priority: int - def __init__(self, name: _Optional[str] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_step_run_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ..., desired_worker_id: _Optional[str] = ..., priority: _Optional[int] = ...) -> None: ... + def __init__(self, name: _Optional[str] = ..., input: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_task_run_external_id: _Optional[str] = ..., child_index: _Optional[int] = ..., child_key: _Optional[str] = ..., additional_metadata: _Optional[str] = ..., desired_worker_id: _Optional[str] = ..., priority: _Optional[int] = ...) -> None: ... class TriggerWorkflowResponse(_message.Message): __slots__ = ("workflow_run_id",) diff --git a/sdks/python/hatchet_sdk/contracts/workflows_pb2_grpc.py b/sdks/python/hatchet_sdk/contracts/workflows_pb2_grpc.py index aa944e5dff..006939da0b 100644 --- a/sdks/python/hatchet_sdk/contracts/workflows_pb2_grpc.py +++ b/sdks/python/hatchet_sdk/contracts/workflows_pb2_grpc.py @@ -1,8 +1,9 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc import warnings +import grpc + from hatchet_sdk.contracts import workflows_pb2 as workflows__pb2 GRPC_GENERATED_VERSION = '1.76.0' diff --git a/sdks/python/hatchet_sdk/exceptions.py b/sdks/python/hatchet_sdk/exceptions.py index b7898e6bd5..ef2a074b69 100644 --- a/sdks/python/hatchet_sdk/exceptions.py +++ b/sdks/python/hatchet_sdk/exceptions.py @@ -1,5 +1,6 @@ import json import traceback +from enum import Enum from typing import cast @@ -171,3 +172,57 @@ class IllegalTaskOutputError(Exception): class LifespanSetupError(Exception): pass + + +class CancellationReason(Enum): + """Reason for cancellation of an operation.""" + + USER_REQUESTED = "user_requested" + """The user explicitly requested cancellation.""" + + TIMEOUT = "timeout" + """The operation timed out.""" + + PARENT_CANCELLED = "parent_cancelled" + """The parent workflow or task was cancelled.""" + + WORKFLOW_CANCELLED = "workflow_cancelled" + """The workflow run was cancelled.""" + + TOKEN_CANCELLED = "token_cancelled" + """The cancellation token was cancelled.""" + + UNKNOWN = "unknown" + """Unknown or unspecified cancellation reason.""" + + +class CancelledError(BaseException): + """ + Raised when an operation is cancelled via CancellationToken. + + This exception inherits from BaseException (not Exception) so that it + won't be caught by bare `except Exception:` handlers. This mirrors the + behavior of asyncio.CancelledError in Python 3.8+. + + To catch this exception, use: + - `except CancelledError:` (recommended) + - `except BaseException:` (catches all exceptions) + + This exception is used for sync code paths. For async code paths, + asyncio.CancelledError is used instead. + + :param message: Optional message describing the cancellation. + :param reason: Optional enum indicating the reason for cancellation. + """ + + def __init__( + self, + message: str = "Operation cancelled", + reason: CancellationReason | None = None, + ) -> None: + self.reason = reason + super().__init__(message) + + @property + def message(self) -> str: + return str(self.args[0]) if self.args else "Operation cancelled" diff --git a/sdks/python/hatchet_sdk/hatchet.py b/sdks/python/hatchet_sdk/hatchet.py index 65c24db684..568f10ce44 100644 --- a/sdks/python/hatchet_sdk/hatchet.py +++ b/sdks/python/hatchet_sdk/hatchet.py @@ -38,6 +38,7 @@ normalize_validator, ) from hatchet_sdk.runnables.workflow import BaseWorkflow, Standalone, Workflow +from hatchet_sdk.utils.slots import normalize_slot_config, resolve_worker_slot_config from hatchet_sdk.utils.timedelta_to_expression import Duration from hatchet_sdk.utils.typing import CoroutineLike, JSONSerializableMapping from hatchet_sdk.worker.worker import LifespanFn, Worker @@ -178,8 +179,8 @@ def namespace(self) -> str: def worker( self, name: str, - slots: int = 100, - durable_slots: int = 1_000, + slots: int | None = None, + durable_slots: int | None = None, labels: dict[str, str | int] | None = None, workflows: list[BaseWorkflow[Any]] | None = None, lifespan: LifespanFn | None = None, @@ -189,9 +190,9 @@ def worker( :param name: The name of the worker. - :param slots: The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time + :param slots: slot count for standard tasks. - :param durable_slots: The number of durable workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time that are durable. + :param durable_slots: slot count for durable tasks. :param labels: A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. @@ -207,10 +208,16 @@ def worker( except RuntimeError: loop = None + resolved_config = resolve_worker_slot_config( + None, + slots, + durable_slots, + workflows, + ) + return Worker( name=name, - slots=slots, - durable_slots=durable_slots, + slot_config=normalize_slot_config(resolved_config), labels=labels, config=self._client.config, debug=self._client.debug, diff --git a/sdks/python/hatchet_sdk/runnables/contextvars.py b/sdks/python/hatchet_sdk/runnables/contextvars.py index 0d3c9d4904..dc0c9b2244 100644 --- a/sdks/python/hatchet_sdk/runnables/contextvars.py +++ b/sdks/python/hatchet_sdk/runnables/contextvars.py @@ -1,11 +1,17 @@ +from __future__ import annotations + import asyncio import threading from collections import Counter from contextvars import ContextVar +from typing import TYPE_CHECKING from hatchet_sdk.runnables.action import ActionKey from hatchet_sdk.utils.typing import JSONSerializableMapping +if TYPE_CHECKING: + from hatchet_sdk.cancellation import CancellationToken + ctx_workflow_run_id: ContextVar[str | None] = ContextVar( "ctx_workflow_run_id", default=None ) @@ -20,6 +26,9 @@ ctx_task_retry_count: ContextVar[int | None] = ContextVar( "ctx_task_retry_count", default=0 ) +ctx_cancellation_token: ContextVar[CancellationToken | None] = ContextVar( + "ctx_cancellation_token", default=None +) workflow_spawn_indices = Counter[ActionKey]() spawn_index_lock = asyncio.Lock() diff --git a/sdks/python/hatchet_sdk/runnables/task.py b/sdks/python/hatchet_sdk/runnables/task.py index d43b20df82..7671c739fb 100644 --- a/sdks/python/hatchet_sdk/runnables/task.py +++ b/sdks/python/hatchet_sdk/runnables/task.py @@ -149,8 +149,12 @@ def __init__( wait_for: list[Condition | OrGroup] | None, skip_if: list[Condition | OrGroup] | None, cancel_if: list[Condition | OrGroup] | None, + slot_requests: dict[str, int] | None = None, ) -> None: self.is_durable = is_durable + if slot_requests is None: + slot_requests = {"durable": 1} if is_durable else {"default": 1} + self.slot_requests = slot_requests self.fn = _fn self.is_async_function = is_async_fn(self.fn) # type: ignore @@ -377,21 +381,27 @@ def to_proto(self, service_name: str) -> CreateTaskOpts: else: concurrency = self.concurrency - return CreateTaskOpts( - readable_id=self.name, - action=service_name + ":" + self.name, - timeout=timedelta_to_expr(self.execution_timeout), - inputs="{}", - parents=[p.name for p in self.parents], - retries=self.retries, - rate_limits=self.rate_limits, - worker_labels=self.desired_worker_labels, - backoff_factor=self.backoff_factor, - backoff_max_seconds=self.backoff_max_seconds, - concurrency=[t.to_proto() for t in concurrency], - conditions=self._conditions_to_proto(), - schedule_timeout=timedelta_to_expr(self.schedule_timeout), - ) + opts: dict[str, Any] = { + "readable_id": self.name, + "action": service_name + ":" + self.name, + "timeout": timedelta_to_expr(self.execution_timeout), + "inputs": "{}", + "parents": [p.name for p in self.parents], + "retries": self.retries, + "rate_limits": self.rate_limits, + "worker_labels": self.desired_worker_labels, + "backoff_factor": self.backoff_factor, + "backoff_max_seconds": self.backoff_max_seconds, + "concurrency": [t.to_proto() for t in concurrency], + "conditions": self._conditions_to_proto(), + "schedule_timeout": timedelta_to_expr(self.schedule_timeout), + } + if "is_durable" in CreateTaskOpts.DESCRIPTOR.fields_by_name: + opts["is_durable"] = self.is_durable + if "slot_requests" in CreateTaskOpts.DESCRIPTOR.fields_by_name: + opts["slot_requests"] = self.slot_requests + + return CreateTaskOpts(**opts) def _assign_action(self, condition: Condition, action: Action) -> Condition: condition.base.action = action diff --git a/sdks/python/hatchet_sdk/runnables/workflow.py b/sdks/python/hatchet_sdk/runnables/workflow.py index 1880004192..8533ba6f68 100644 --- a/sdks/python/hatchet_sdk/runnables/workflow.py +++ b/sdks/python/hatchet_sdk/runnables/workflow.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import json from collections.abc import Callable @@ -37,8 +39,11 @@ ) from hatchet_sdk.contracts.v1.workflows_pb2 import StickyStrategy as StickyStrategyProto from hatchet_sdk.contracts.workflows_pb2 import WorkflowVersion +from hatchet_sdk.exceptions import CancellationReason, CancelledError from hatchet_sdk.labels import DesiredWorkerLabel +from hatchet_sdk.logger import logger from hatchet_sdk.rate_limit import RateLimit +from hatchet_sdk.runnables.contextvars import ctx_cancellation_token from hatchet_sdk.runnables.task import Task from hatchet_sdk.runnables.types import ( ConcurrencyExpression, @@ -52,6 +57,7 @@ normalize_validator, ) from hatchet_sdk.serde import HATCHET_PYDANTIC_SENTINEL +from hatchet_sdk.utils.cancellation import await_with_cancellation from hatchet_sdk.utils.proto_enums import convert_python_enum_to_proto from hatchet_sdk.utils.timedelta_to_expression import Duration from hatchet_sdk.utils.typing import CoroutineLike, JSONSerializableMapping @@ -59,6 +65,7 @@ if TYPE_CHECKING: from hatchet_sdk import Hatchet + from hatchet_sdk.cancellation import CancellationToken T = TypeVar("T") @@ -88,7 +95,7 @@ class ComputedTaskParameters(BaseModel): task_defaults: TaskDefaults @model_validator(mode="after") - def validate_params(self) -> "ComputedTaskParameters": + def validate_params(self) -> ComputedTaskParameters: self.execution_timeout = fall_back_to_default( value=self.execution_timeout, param_default=timedelta(seconds=60), @@ -121,8 +128,8 @@ def validate_params(self) -> "ComputedTaskParameters": def transform_desired_worker_label(d: DesiredWorkerLabel) -> DesiredWorkerLabels: value = d.value return DesiredWorkerLabels( - strValue=value if not isinstance(value, int) else None, - intValue=value if isinstance(value, int) else None, + str_value=value if not isinstance(value, int) else None, + int_value=value if isinstance(value, int) else None, required=d.required, weight=d.weight, comparator=d.comparator, # type: ignore[arg-type] @@ -136,7 +143,7 @@ class TypedTriggerWorkflowRunConfig(BaseModel, Generic[TWorkflowInput]): class BaseWorkflow(Generic[TWorkflowInput]): - def __init__(self, config: WorkflowConfig, client: "Hatchet") -> None: + def __init__(self, config: WorkflowConfig, client: Hatchet) -> None: self.config = config self._default_tasks: list[Task[TWorkflowInput, Any]] = [] self._durable_tasks: list[Task[TWorkflowInput, Any]] = [] @@ -621,41 +628,114 @@ def greet(input, ctx): and can be arranged into complex dependency patterns. """ + def _resolve_check_cancellation_token( + self, + cancellation_token: CancellationToken | None, + ) -> CancellationToken | None: + # Prefer explicit token, otherwise fall back to context. + cancellation_token = cancellation_token or ctx_cancellation_token.get() + + if cancellation_token and cancellation_token.is_cancelled: + raise CancelledError( + "Operation cancelled by cancellation token", + reason=CancellationReason.TOKEN_CANCELLED, + ) + + return cancellation_token + + def _register_child_with_token( + self, + cancellation_token: CancellationToken | None, + workflow_run_id: str, + log_prefix: str, + ) -> None: + if not cancellation_token: + return + + logger.debug( + f"{log_prefix}: registered child {workflow_run_id} with token" + ) + cancellation_token.register_child(workflow_run_id) + + def _register_children_with_token( + self, + cancellation_token: CancellationToken | None, + refs: list[WorkflowRunRef], + log_prefix: str, + ) -> None: + if not cancellation_token: + return + + for ref in refs: + logger.debug( + f"{log_prefix}: registered child {ref.workflow_run_id} with token" + ) + cancellation_token.register_child(ref.workflow_run_id) + def run_no_wait( self, input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()), options: TriggerWorkflowOptions = TriggerWorkflowOptions(), + cancellation_token: CancellationToken | None = None, ) -> WorkflowRunRef: """ Synchronously trigger a workflow run without waiting for it to complete. This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs. + If a cancellation token is provided (or available via context), the child workflow will be + registered with the token. + :param input: The input data for the workflow. :param options: Additional options for workflow execution. + :param cancellation_token: Optional cancellation token. If not provided, uses token from context. :returns: A `WorkflowRunRef` object representing the reference to the workflow run. """ - return self.client._client.admin.run_workflow( + cancellation_token = self._resolve_check_cancellation_token(cancellation_token) + + logger.debug( + f"Workflow.run_no_wait: triggering {self.config.name}, " + f"token={cancellation_token is not None}" + ) + + ref = self.client._client.admin.run_workflow( workflow_name=self.config.name, input=self._serialize_input(input), options=self._create_options_with_combined_additional_meta(options), ) + self._register_child_with_token( + cancellation_token, + ref.workflow_run_id, + "Workflow.run_no_wait", + ) + + return ref + def run( self, input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()), options: TriggerWorkflowOptions = TriggerWorkflowOptions(), + cancellation_token: CancellationToken | None = None, ) -> dict[str, Any]: """ Run the workflow synchronously and wait for it to complete. This method triggers a workflow run, blocks until completion, and returns the final result. + If a cancellation token is provided (or available via context), the wait can be interrupted. :param input: The input data for the workflow, must match the workflow's input type. :param options: Additional options for workflow execution like metadata and parent workflow ID. + :param cancellation_token: Optional cancellation token. If not provided, uses token from context. :returns: The result of the workflow execution as a dictionary. """ + cancellation_token = self._resolve_check_cancellation_token(cancellation_token) + + logger.debug( + f"Workflow.run: triggering {self.config.name}, " + f"token={cancellation_token is not None}" + ) ref = self.client._client.admin.run_workflow( workflow_name=self.config.name, @@ -663,57 +743,108 @@ def run( options=self._create_options_with_combined_additional_meta(options), ) - return ref.result() + self._register_child_with_token( + cancellation_token, + ref.workflow_run_id, + "Workflow.run", + ) + + logger.debug(f"Workflow.run: awaiting result for {ref.workflow_run_id}") + + return ref.result(cancellation_token=cancellation_token) async def aio_run_no_wait( self, input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()), options: TriggerWorkflowOptions = TriggerWorkflowOptions(), + cancellation_token: CancellationToken | None = None, ) -> WorkflowRunRef: """ Asynchronously trigger a workflow run without waiting for it to complete. This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs. + If a cancellation token is provided (or available via context), the child workflow will be + registered with the token. + :param input: The input data for the workflow. :param options: Additional options for workflow execution. + :param cancellation_token: Optional cancellation token. If not provided, uses token from context. :returns: A `WorkflowRunRef` object representing the reference to the workflow run. """ + cancellation_token = self._resolve_check_cancellation_token(cancellation_token) - return await self.client._client.admin.aio_run_workflow( + logger.debug( + f"Workflow.aio_run_no_wait: triggering {self.config.name}, " + f"token={cancellation_token is not None}" + ) + + ref = await self.client._client.admin.aio_run_workflow( workflow_name=self.config.name, input=self._serialize_input(input), options=self._create_options_with_combined_additional_meta(options), ) + self._register_child_with_token( + cancellation_token, + ref.workflow_run_id, + "Workflow.aio_run_no_wait", + ) + + return ref + async def aio_run( self, input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()), options: TriggerWorkflowOptions = TriggerWorkflowOptions(), + cancellation_token: CancellationToken | None = None, ) -> dict[str, Any]: """ Run the workflow asynchronously and wait for it to complete. This method triggers a workflow run, awaits until completion, and returns the final result. + If a cancellation token is provided (or available via context), the wait can be interrupted. :param input: The input data for the workflow, must match the workflow's input type. :param options: Additional options for workflow execution like metadata and parent workflow ID. + :param cancellation_token: Optional cancellation token. If not provided, uses token from context. :returns: The result of the workflow execution as a dictionary. """ + cancellation_token = self._resolve_check_cancellation_token(cancellation_token) + + logger.debug( + f"Workflow.aio_run: triggering {self.config.name}, " + f"token={cancellation_token is not None}" + ) + ref = await self.client._client.admin.aio_run_workflow( workflow_name=self.config.name, input=self._serialize_input(input), options=self._create_options_with_combined_additional_meta(options), ) - return await ref.aio_result() + self._register_child_with_token( + cancellation_token, + ref.workflow_run_id, + "Workflow.aio_run", + ) + + logger.debug(f"Workflow.aio_run: awaiting result for {ref.workflow_run_id}") + + return await await_with_cancellation( + ref.aio_result(), + cancellation_token, + ) def _get_result( - self, ref: WorkflowRunRef, return_exceptions: bool + self, + ref: WorkflowRunRef, + return_exceptions: bool, + cancellation_token: CancellationToken | None = None, ) -> dict[str, Any] | BaseException: try: - return ref.result() + return ref.result(cancellation_token=cancellation_token) except Exception as e: if return_exceptions: return e @@ -724,6 +855,7 @@ def run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: Literal[True], + cancellation_token: CancellationToken | None = None, ) -> list[dict[str, Any] | BaseException]: ... @overload @@ -731,32 +863,79 @@ def run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: Literal[False] = False, + cancellation_token: CancellationToken | None = None, ) -> list[dict[str, Any]]: ... def run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: bool = False, + cancellation_token: CancellationToken | None = None, ) -> list[dict[str, Any]] | list[dict[str, Any] | BaseException]: """ Run a workflow in bulk and wait for all runs to complete. This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results. + If a cancellation token is provided (or available via context), all child workflows will be + registered with the token and the wait can be interrupted. + :param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. :param return_exceptions: If `True`, exceptions will be returned as part of the results instead of raising them. + :param cancellation_token: Optional cancellation token. If not provided, uses token from context. :returns: A list of results for each workflow run. + :raises CancelledError: If the cancellation token is triggered (and return_exceptions is False). + :raises Exception: If a workflow run fails (and return_exceptions is False). """ + cancellation_token = self._resolve_check_cancellation_token(cancellation_token) + + logger.debug( + f"Workflow.run_many: triggering {len(workflows)} workflows, " + f"token={cancellation_token is not None}" + ) + refs = self.client._client.admin.run_workflows( workflows=workflows, ) - return [self._get_result(ref, return_exceptions) for ref in refs] + self._register_children_with_token( + cancellation_token, + refs, + "Workflow.run_many", + ) + + # Pass cancellation_token through to each result() call + # The cancellation check happens INSIDE result()'s polling loop + results: list[dict[str, Any] | BaseException] = [] + for ref in refs: + try: + results.append(ref.result(cancellation_token=cancellation_token)) + except CancelledError: # noqa: PERF203 + logger.debug( + f"Workflow.run_many: cancellation detected, stopping wait, " + f"reason={CancellationReason.PARENT_CANCELLED.value}" + ) + if return_exceptions: + results.append( + CancelledError( + "Operation cancelled by cancellation token", + reason=CancellationReason.PARENT_CANCELLED, + ) + ) + break + raise + except Exception as e: + if return_exceptions: + results.append(e) + else: + raise + return results @overload async def aio_run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: Literal[True], + cancellation_token: CancellationToken | None = None, ) -> list[dict[str, Any] | BaseException]: ... @overload @@ -764,62 +943,124 @@ async def aio_run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: Literal[False] = False, + cancellation_token: CancellationToken | None = None, ) -> list[dict[str, Any]]: ... async def aio_run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: bool = False, + cancellation_token: CancellationToken | None = None, ) -> list[dict[str, Any]] | list[dict[str, Any] | BaseException]: """ Run a workflow in bulk and wait for all runs to complete. This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results. + If a cancellation token is provided (or available via context), all child workflows will be + registered with the token and the wait can be interrupted. + :param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. :param return_exceptions: If `True`, exceptions will be returned as part of the results instead of raising them. + :param cancellation_token: Optional cancellation token. If not provided, uses token from context. :returns: A list of results for each workflow run. """ + cancellation_token = self._resolve_check_cancellation_token(cancellation_token) + + logger.debug( + f"Workflow.aio_run_many: triggering {len(workflows)} workflows, " + f"token={cancellation_token is not None}" + ) + refs = await self.client._client.admin.aio_run_workflows( workflows=workflows, ) - return await asyncio.gather( - *[ref.aio_result() for ref in refs], return_exceptions=return_exceptions + self._register_children_with_token( + cancellation_token, + refs, + "Workflow.aio_run_many", + ) + + return await await_with_cancellation( + asyncio.gather( + *[ref.aio_result() for ref in refs], return_exceptions=return_exceptions + ), + cancellation_token, ) def run_many_no_wait( self, workflows: list[WorkflowRunTriggerConfig], + cancellation_token: CancellationToken | None = None, ) -> list[WorkflowRunRef]: """ Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run. + If a cancellation token is provided (or available via context), all child workflows will be + registered with the token. + :param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. + :param cancellation_token: Optional cancellation token. If not provided, uses token from context. :returns: A list of `WorkflowRunRef` objects, each representing a reference to a workflow run. """ - return self.client._client.admin.run_workflows( + cancellation_token = self._resolve_check_cancellation_token(cancellation_token) + + logger.debug( + f"Workflow.run_many_no_wait: triggering {len(workflows)} workflows, " + f"token={cancellation_token is not None}" + ) + + refs = self.client._client.admin.run_workflows( workflows=workflows, ) + self._register_children_with_token( + cancellation_token, + refs, + "Workflow.run_many_no_wait", + ) + + return refs + async def aio_run_many_no_wait( self, workflows: list[WorkflowRunTriggerConfig], + cancellation_token: CancellationToken | None = None, ) -> list[WorkflowRunRef]: """ Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run. + If a cancellation token is provided (or available via context), all child workflows will be + registered with the token. + :param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. + :param cancellation_token: Optional cancellation token. If not provided, uses token from context. :returns: A list of `WorkflowRunRef` objects, each representing a reference to a workflow run. """ - return await self.client._client.admin.aio_run_workflows( + cancellation_token = self._resolve_check_cancellation_token(cancellation_token) + + logger.debug( + f"Workflow.aio_run_many_no_wait: triggering {len(workflows)} workflows, " + f"token={cancellation_token is not None}" + ) + + refs = await self.client._client.admin.aio_run_workflows( workflows=workflows, ) + self._register_children_with_token( + cancellation_token, + refs, + "Workflow.aio_run_many_no_wait", + ) + + return refs + def _parse_task_name( self, name: str | None, @@ -1164,7 +1405,7 @@ def inner( return inner - def add_task(self, task: "Standalone[TWorkflowInput, Any]") -> None: + def add_task(self, task: Standalone[TWorkflowInput, Any]) -> None: """ Add a task to a workflow. Intended to be used with a previously existing task (a Standalone), such as one created with `@hatchet.task()`, which has been converted to a `Task` object using `to_task`. @@ -1203,7 +1444,7 @@ def my_task(input, ctx) -> None: class TaskRunRef(Generic[TWorkflowInput, R]): def __init__( self, - standalone: "Standalone[TWorkflowInput, R]", + standalone: Standalone[TWorkflowInput, R], workflow_run_ref: WorkflowRunRef, ): self._s = standalone @@ -1279,6 +1520,7 @@ def run( self, input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()), options: TriggerWorkflowOptions = TriggerWorkflowOptions(), + cancellation_token: CancellationToken | None = None, ) -> R: """ Run the workflow synchronously and wait for it to complete. @@ -1287,15 +1529,19 @@ def run( :param input: The input data for the workflow. :param options: Additional options for workflow execution. + :param cancellation_token: Optional cancellation token to abort the wait and cancel the child. :returns: The extracted result of the workflow execution. """ - return self._extract_result(self._workflow.run(input, options)) + return self._extract_result( + self._workflow.run(input, options, cancellation_token=cancellation_token) + ) async def aio_run( self, input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()), options: TriggerWorkflowOptions = TriggerWorkflowOptions(), + cancellation_token: CancellationToken | None = None, ) -> R: """ Run the workflow asynchronously and wait for it to complete. @@ -1304,16 +1550,20 @@ async def aio_run( :param input: The input data for the workflow, must match the workflow's input type. :param options: Additional options for workflow execution like metadata and parent workflow ID. + :param cancellation_token: Optional cancellation token to abort the wait and cancel the child. :returns: The extracted result of the workflow execution. """ - result = await self._workflow.aio_run(input, options) + result = await self._workflow.aio_run( + input, options, cancellation_token=cancellation_token + ) return self._extract_result(result) def run_no_wait( self, input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()), options: TriggerWorkflowOptions = TriggerWorkflowOptions(), + cancellation_token: CancellationToken | None = None, ) -> TaskRunRef[TWorkflowInput, R]: """ Trigger a workflow run without waiting for it to complete. @@ -1322,10 +1572,13 @@ def run_no_wait( :param input: The input data for the workflow, must match the workflow's input type. :param options: Additional options for workflow execution like metadata and parent workflow ID. + :param cancellation_token: Optional cancellation token to register the child workflow with. :returns: A `TaskRunRef` object representing the reference to the workflow run. """ - ref = self._workflow.run_no_wait(input, options) + ref = self._workflow.run_no_wait( + input, options, cancellation_token=cancellation_token + ) return TaskRunRef[TWorkflowInput, R](self, ref) @@ -1333,6 +1586,7 @@ async def aio_run_no_wait( self, input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()), options: TriggerWorkflowOptions = TriggerWorkflowOptions(), + cancellation_token: CancellationToken | None = None, ) -> TaskRunRef[TWorkflowInput, R]: """ Asynchronously trigger a workflow run without waiting for it to complete. @@ -1340,10 +1594,13 @@ async def aio_run_no_wait( :param input: The input data for the workflow. :param options: Additional options for workflow execution. + :param cancellation_token: Optional cancellation token to register the child workflow with. :returns: A `TaskRunRef` object representing the reference to the workflow run. """ - ref = await self._workflow.aio_run_no_wait(input, options) + ref = await self._workflow.aio_run_no_wait( + input, options, cancellation_token=cancellation_token + ) return TaskRunRef[TWorkflowInput, R](self, ref) @@ -1352,6 +1609,7 @@ def run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: Literal[True], + cancellation_token: CancellationToken | None = None, ) -> list[R | BaseException]: ... @overload @@ -1359,10 +1617,14 @@ def run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: Literal[False] = False, + cancellation_token: CancellationToken | None = None, ) -> list[R]: ... def run_many( - self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: bool = False + self, + workflows: list[WorkflowRunTriggerConfig], + return_exceptions: bool = False, + cancellation_token: CancellationToken | None = None, ) -> list[R] | list[R | BaseException]: """ Run a workflow in bulk and wait for all runs to complete. @@ -1370,6 +1632,7 @@ def run_many( :param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. :param return_exceptions: If `True`, exceptions will be returned as part of the results instead of raising them. + :param cancellation_token: Optional cancellation token to abort the wait and cancel all children. :returns: A list of results for each workflow run. """ return [ @@ -1378,6 +1641,7 @@ def run_many( workflows, ## hack: typing needs literal True if return_exceptions else False, # noqa: SIM210 + cancellation_token=cancellation_token, ) ] @@ -1386,6 +1650,7 @@ async def aio_run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: Literal[True], + cancellation_token: CancellationToken | None = None, ) -> list[R | BaseException]: ... @overload @@ -1393,10 +1658,14 @@ async def aio_run_many( self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: Literal[False] = False, + cancellation_token: CancellationToken | None = None, ) -> list[R]: ... async def aio_run_many( - self, workflows: list[WorkflowRunTriggerConfig], return_exceptions: bool = False + self, + workflows: list[WorkflowRunTriggerConfig], + return_exceptions: bool = False, + cancellation_token: CancellationToken | None = None, ) -> list[R] | list[R | BaseException]: """ Run a workflow in bulk and wait for all runs to complete. @@ -1404,6 +1673,7 @@ async def aio_run_many( :param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. :param return_exceptions: If `True`, exceptions will be returned as part of the results instead of raising them. + :param cancellation_token: Optional cancellation token to abort the wait and cancel all children. :returns: A list of results for each workflow run. """ return [ @@ -1412,11 +1682,14 @@ async def aio_run_many( workflows, ## hack: typing needs literal True if return_exceptions else False, # noqa: SIM210 + cancellation_token=cancellation_token, ) ] def run_many_no_wait( - self, workflows: list[WorkflowRunTriggerConfig] + self, + workflows: list[WorkflowRunTriggerConfig], + cancellation_token: CancellationToken | None = None, ) -> list[TaskRunRef[TWorkflowInput, R]]: """ Run a workflow in bulk without waiting for all runs to complete. @@ -1424,14 +1697,19 @@ def run_many_no_wait( This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run. :param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. + :param cancellation_token: Optional cancellation token to register all children with. :returns: A list of `WorkflowRunRef` objects, each representing a reference to a workflow run. """ - refs = self._workflow.run_many_no_wait(workflows) + refs = self._workflow.run_many_no_wait( + workflows, cancellation_token=cancellation_token + ) return [TaskRunRef[TWorkflowInput, R](self, ref) for ref in refs] async def aio_run_many_no_wait( - self, workflows: list[WorkflowRunTriggerConfig] + self, + workflows: list[WorkflowRunTriggerConfig], + cancellation_token: CancellationToken | None = None, ) -> list[TaskRunRef[TWorkflowInput, R]]: """ Run a workflow in bulk without waiting for all runs to complete. @@ -1439,10 +1717,13 @@ async def aio_run_many_no_wait( This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run. :param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. + :param cancellation_token: Optional cancellation token to register all children with. :returns: A list of `WorkflowRunRef` objects, each representing a reference to a workflow run. """ - refs = await self._workflow.aio_run_many_no_wait(workflows) + refs = await self._workflow.aio_run_many_no_wait( + workflows, cancellation_token=cancellation_token + ) return [TaskRunRef[TWorkflowInput, R](self, ref) for ref in refs] diff --git a/sdks/python/hatchet_sdk/utils/cancellation.py b/sdks/python/hatchet_sdk/utils/cancellation.py new file mode 100644 index 0000000000..63e191366f --- /dev/null +++ b/sdks/python/hatchet_sdk/utils/cancellation.py @@ -0,0 +1,106 @@ +"""Utilities for cancellation-aware operations.""" + +from __future__ import annotations + +import asyncio +import contextlib +from collections.abc import Awaitable, Callable +from typing import TYPE_CHECKING, TypeVar + +from hatchet_sdk.logger import logger + +if TYPE_CHECKING: + from hatchet_sdk.cancellation import CancellationToken + +T = TypeVar("T") + + +async def await_with_cancellation( + coro: Awaitable[T], + token: CancellationToken | None, + cancel_callback: Callable[[], Awaitable[None]] | None = None, +) -> T: + """ + Await an awaitable with cancellation support. + + This function races the given awaitable against a cancellation token. If the + token is cancelled before the awaitable completes, the awaitable is cancelled + and an asyncio.CancelledError is raised. + + Args: + coro: The awaitable to await (coroutine, Future, or Task). + token: The cancellation token to check. If None, the coroutine is awaited directly. + cancel_callback: An optional async callback to invoke when cancellation occurs + (e.g., to cancel child workflows). + + Returns: + The result of the coroutine. + + Raises: + asyncio.CancelledError: If the token is cancelled before the coroutine completes. + + Example: + ```python + async def long_running_task(): + await asyncio.sleep(10) + return "done" + + token = CancellationToken() + + # This will raise asyncio.CancelledError if token.cancel() is called + result = await await_with_cancellation( + long_running_task(), + token, + cancel_callback=lambda: cleanup() + ) + ``` + """ + if token is None: + logger.debug("await_with_cancellation: no token provided, awaiting directly") + return await coro + + logger.debug("await_with_cancellation: starting with cancellation token") + + # Check if already cancelled + if token.is_cancelled: + logger.debug("await_with_cancellation: token already cancelled") + if cancel_callback: + logger.debug("await_with_cancellation: invoking cancel callback") + await cancel_callback() + raise asyncio.CancelledError("Operation cancelled by cancellation token") + + main_task = asyncio.ensure_future(coro) + cancel_task = asyncio.create_task(token.aio_wait()) + + try: + done, pending = await asyncio.wait( + [main_task, cancel_task], + return_when=asyncio.FIRST_COMPLETED, + ) + + # Cancel pending tasks + for task in pending: + task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await task + + if cancel_task in done: + logger.debug("await_with_cancellation: cancelled before completion") + if cancel_callback: + logger.debug("await_with_cancellation: invoking cancel callback") + await cancel_callback() + raise asyncio.CancelledError("Operation cancelled by cancellation token") + + logger.debug("await_with_cancellation: completed successfully") + return main_task.result() + + except asyncio.CancelledError: + # If we're cancelled externally (not via token), also invoke callback + logger.debug("await_with_cancellation: externally cancelled") + main_task.cancel() + cancel_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await main_task + with contextlib.suppress(asyncio.CancelledError): + await cancel_task + raise diff --git a/sdks/python/hatchet_sdk/utils/slots.py b/sdks/python/hatchet_sdk/utils/slots.py new file mode 100644 index 0000000000..a440cb8b22 --- /dev/null +++ b/sdks/python/hatchet_sdk/utils/slots.py @@ -0,0 +1,80 @@ +from typing import Any + +from hatchet_sdk.runnables.workflow import BaseWorkflow +from hatchet_sdk.worker.slot_types import SlotType + + +def normalize_slot_config( + slot_config: dict[SlotType | str, int], +) -> dict[str, int]: + normalized: dict[str, int] = {} + for key, value in slot_config.items(): + normalized_key = key.value if isinstance(key, SlotType) else key + normalized[normalized_key] = value + return normalized + + +def has_slot_config( + slot_config: dict[SlotType | str, int], slot_type: SlotType +) -> bool: + return slot_type in slot_config or slot_type.value in slot_config + + +def ensure_slot_config( + slot_config: dict[SlotType | str, int], slot_type: SlotType, default_value: int +) -> None: + if not has_slot_config(slot_config, slot_type): + slot_config[slot_type] = default_value + + +def required_slot_types_from_workflows( + workflows: list[BaseWorkflow[Any]] | None, +) -> set[SlotType]: + required: set[SlotType] = set() + if not workflows: + return required + + for workflow in workflows: + for task in workflow.tasks: + if task.is_durable: + required.add(SlotType.DURABLE) + for key in task.slot_requests: + if key == SlotType.DEFAULT or key == SlotType.DEFAULT.value: + required.add(SlotType.DEFAULT) + if key == SlotType.DURABLE or key == SlotType.DURABLE.value: + required.add(SlotType.DURABLE) + + return required + + +def resolve_worker_slot_config( + slot_config: dict[SlotType | str, int] | None, + slots: int | None, + durable_slots: int | None, + workflows: list[BaseWorkflow[Any]] | None, +) -> dict[SlotType | str, int]: + resolved_config: dict[SlotType | str, int] + + if slot_config is not None: + resolved_config = slot_config + else: + legacy_config: dict[SlotType | str, int] = { + key: value + for key, value in ( + (SlotType.DEFAULT, slots), + (SlotType.DURABLE, durable_slots), + ) + if value is not None + } + resolved_config = legacy_config if legacy_config else {} + + required_slot_types = required_slot_types_from_workflows(workflows) + if SlotType.DEFAULT in required_slot_types: + ensure_slot_config(resolved_config, SlotType.DEFAULT, 100) + if SlotType.DURABLE in required_slot_types: + ensure_slot_config(resolved_config, SlotType.DURABLE, 1000) + + if not resolved_config: + resolved_config[SlotType.DEFAULT] = 100 + + return resolved_config diff --git a/sdks/python/hatchet_sdk/worker/action_listener_process.py b/sdks/python/hatchet_sdk/worker/action_listener_process.py index 5befd4bce6..6db65998b4 100644 --- a/sdks/python/hatchet_sdk/worker/action_listener_process.py +++ b/sdks/python/hatchet_sdk/worker/action_listener_process.py @@ -3,6 +3,7 @@ import logging import signal import time +import warnings from dataclasses import dataclass from datetime import timedelta from enum import Enum @@ -67,7 +68,7 @@ def __init__( self, name: str, actions: list[str], - slots: int, + slot_config: dict[str, int], config: ClientConfig, action_queue: "Queue[Action]", event_queue: "Queue[ActionEvent | STOP_LOOP_TYPE]", @@ -77,7 +78,9 @@ def __init__( ) -> None: self.name = name self.actions = actions - self.slots = slots + self.slot_config = slot_config + self._slots = slot_config.get("default", 0) + self._durable_slots = slot_config.get("durable", 0) self.config = config self.action_queue = action_queue self.event_queue = event_queue @@ -129,6 +132,24 @@ def __init__( "Event loop lag in seconds (listener process)", ) + @property + def slots(self) -> int: + warnings.warn( + "WorkerActionListenerProcess.slots is deprecated; use slot_config['default'] instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._slots + + @property + def durable_slots(self) -> int: + warnings.warn( + "WorkerActionListenerProcess.durable_slots is deprecated; use slot_config['durable'] instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._durable_slots + async def _monitor_event_loop(self) -> None: # If the loop is blocked, this coroutine itself can't run; when it resumes, # we detect the lag by comparing elapsed time vs expected sleep. @@ -320,7 +341,7 @@ async def start(self, retry_attempt: int = 0) -> None: worker_name=self.name, services=["default"], actions=self.actions, - slots=self.slots, + slot_config=self.slot_config, raw_labels=self.labels, ) ) @@ -518,7 +539,7 @@ def exit_forcefully(self) -> None: def worker_action_listener_process( name: str, actions: list[str], - slots: int, + slot_config: dict[str, int], config: ClientConfig, action_queue: "Queue[Action]", event_queue: "Queue[ActionEvent | STOP_LOOP_TYPE]", @@ -530,7 +551,7 @@ async def run() -> None: process = WorkerActionListenerProcess( name=name, actions=actions, - slots=slots, + slot_config=slot_config, config=config, action_queue=action_queue, event_queue=event_queue, diff --git a/sdks/python/hatchet_sdk/worker/runner/runner.py b/sdks/python/hatchet_sdk/worker/runner/runner.py index 4cd6a2e02b..fde79d682a 100644 --- a/sdks/python/hatchet_sdk/worker/runner/runner.py +++ b/sdks/python/hatchet_sdk/worker/runner/runner.py @@ -2,6 +2,7 @@ import ctypes import functools import json +import time from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor from dataclasses import asdict, is_dataclass @@ -29,6 +30,7 @@ STEP_EVENT_TYPE_STARTED, ) from hatchet_sdk.exceptions import ( + CancellationReason, IllegalTaskOutputError, NonRetryableException, TaskRunError, @@ -39,6 +41,7 @@ from hatchet_sdk.runnables.contextvars import ( ctx_action_key, ctx_additional_metadata, + ctx_cancellation_token, ctx_step_run_id, ctx_task_retry_count, ctx_worker_id, @@ -60,6 +63,7 @@ ContextVarToCopyDict, ContextVarToCopyInt, ContextVarToCopyStr, + ContextVarToCopyToken, copy_context_vars, ) @@ -251,6 +255,7 @@ async def async_wrapped_action_func( ctx_action_key.set(action.key) ctx_additional_metadata.set(action.additional_metadata) ctx_task_retry_count.set(action.retry_count) + ctx_cancellation_token.set(ctx.cancellation_token) async with task._unpack_dependencies_with_cleanup(ctx) as dependencies: try: @@ -298,6 +303,12 @@ async def async_wrapped_action_func( value=action.retry_count, ) ), + ContextVarToCopy( + var=ContextVarToCopyToken( + name="ctx_cancellation_token", + value=ctx.cancellation_token, + ) + ), ], self.thread_action_func, ctx, @@ -480,27 +491,95 @@ def force_kill_thread(self, thread: Thread) -> None: ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor async def handle_cancel_action(self, action: Action) -> None: key = action.key + start_time = time.monotonic() + + logger.debug( + f"Cancellation: received cancel action for {action.action_id}, " + f"reason={CancellationReason.WORKFLOW_CANCELLED.value}" + ) + try: - # call cancel to signal the context to stop + # Trigger the cancellation token to signal the context to stop if key in self.contexts: - self.contexts[key]._set_cancellation_flag() + ctx = self.contexts[key] + child_count = len(ctx.cancellation_token.get_child_run_ids()) + logger.debug( + f"Cancellation: triggering token for {action.action_id}, " + f"reason={CancellationReason.WORKFLOW_CANCELLED.value}, " + f"{child_count} children registered" + ) + ctx._set_cancellation_flag(CancellationReason.WORKFLOW_CANCELLED) self.cancellations[key] = True + # Note: Child workflows are not cancelled here - they run independently + # and are managed by Hatchet's normal cancellation mechanisms + else: + logger.debug(f"Cancellation: no context found for {action.action_id}") + + # Wait with supervision (using timedelta configs) + grace_period = self.config.cancellation_grace_period.total_seconds() + warning_threshold = ( + self.config.cancellation_warning_threshold.total_seconds() + ) - await asyncio.sleep(1) + # Wait until warning threshold + await asyncio.sleep(warning_threshold) + elapsed = time.monotonic() - start_time - if key in self.tasks: - self.tasks[key].cancel() + # Check if task is still running after warning threshold + task_was_running = key in self.tasks and not self.tasks[key].done() - # check if thread is still running, if so, print a warning - if key in self.threads: - thread = self.threads[key] + if task_was_running: + logger.warning( + f"Cancellation: task {action.action_id} has not cancelled after " + f"{elapsed:.1f}s. Consider checking for blocking operations. " + f"See https://docs.hatchet.run/home/cancellation" + ) - if self.config.enable_force_kill_sync_threads: - self.force_kill_thread(thread) - await asyncio.sleep(1) + # Continue waiting until grace period only if task is still running + remaining = grace_period - elapsed + if remaining > 0: + await asyncio.sleep(remaining) - logger.warning( - f"thread {self.threads[key].ident} with key {key} is still running after cancellation. This could cause the thread pool to get blocked and prevent new tasks from running." + # Force cancel if still running after grace period + if key in self.tasks and not self.tasks[key].done(): + logger.debug( + f"Cancellation: force-cancelling task {action.action_id} after grace period" + ) + self.tasks[key].cancel() + + # Check if thread is still running + if key in self.threads: + thread = self.threads[key] + + if self.config.enable_force_kill_sync_threads: + logger.debug( + f"Cancellation: force-killing thread for {action.action_id}" + ) + self.force_kill_thread(thread) + await asyncio.sleep(1) + + if thread.is_alive(): + logger.warning( + f"Cancellation: thread {thread.ident} with key {key} is still running " + f"after cancellation. This could cause the thread pool to get blocked " + f"and prevent new tasks from running." + ) + + # Log final status for slow cancellation + total_elapsed = time.monotonic() - start_time + if total_elapsed > grace_period: + logger.warning( + f"Cancellation: cancellation of {action.action_id} took {total_elapsed:.1f}s " + f"(exceeded grace period of {self.config.cancellation_grace_period})" + ) + else: + logger.debug( + f"Cancellation: task {action.action_id} eventually completed in {total_elapsed:.1f}s" + ) + else: + # Task completed quickly - log success and exit + logger.debug( + f"Cancellation: task {action.action_id} completed within {elapsed:.1f}s" ) finally: self.cleanup_run_id(key) diff --git a/sdks/python/hatchet_sdk/worker/runner/utils/capture_logs.py b/sdks/python/hatchet_sdk/worker/runner/utils/capture_logs.py index 6fd8b52ee1..07450587a8 100644 --- a/sdks/python/hatchet_sdk/worker/runner/utils/capture_logs.py +++ b/sdks/python/hatchet_sdk/worker/runner/utils/capture_logs.py @@ -1,17 +1,20 @@ +from __future__ import annotations + import asyncio import functools import logging from collections.abc import Awaitable, Callable from io import StringIO -from typing import Literal, ParamSpec, TypeVar +from typing import TYPE_CHECKING, Any, Literal, ParamSpec, TypeVar -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from hatchet_sdk.clients.events import EventClient from hatchet_sdk.logger import logger from hatchet_sdk.runnables.contextvars import ( ctx_action_key, ctx_additional_metadata, + ctx_cancellation_token, ctx_step_run_id, ctx_task_retry_count, ctx_worker_id, @@ -24,6 +27,9 @@ LogLevel, ) +if TYPE_CHECKING: + pass + T = TypeVar("T") P = ParamSpec("P") @@ -48,10 +54,22 @@ class ContextVarToCopyDict(BaseModel): value: JSONSerializableMapping | None +class ContextVarToCopyToken(BaseModel): + """Special type for copying CancellationToken to threads.""" + + model_config = ConfigDict(arbitrary_types_allowed=True) + + name: Literal["ctx_cancellation_token"] + value: Any # CancellationToken, but using Any to avoid Pydantic issues with custom classes + + class ContextVarToCopy(BaseModel): - var: ContextVarToCopyStr | ContextVarToCopyDict | ContextVarToCopyInt = Field( - discriminator="name" - ) + var: ( + ContextVarToCopyStr + | ContextVarToCopyDict + | ContextVarToCopyInt + | ContextVarToCopyToken + ) = Field(discriminator="name") def copy_context_vars( @@ -73,6 +91,8 @@ def copy_context_vars( ctx_worker_id.set(var.var.value) elif var.var.name == "ctx_additional_metadata": ctx_additional_metadata.set(var.var.value or {}) + elif var.var.name == "ctx_cancellation_token": + ctx_cancellation_token.set(var.var.value) else: raise ValueError(f"Unknown context variable name: {var.var.name}") diff --git a/sdks/python/hatchet_sdk/worker/slot_types.py b/sdks/python/hatchet_sdk/worker/slot_types.py new file mode 100644 index 0000000000..70702f7ee6 --- /dev/null +++ b/sdks/python/hatchet_sdk/worker/slot_types.py @@ -0,0 +1,6 @@ +from enum import Enum + + +class SlotType(str, Enum): + DEFAULT = "default" + DURABLE = "durable" diff --git a/sdks/python/hatchet_sdk/worker/worker.py b/sdks/python/hatchet_sdk/worker/worker.py index a207b29176..1334d401ef 100644 --- a/sdks/python/hatchet_sdk/worker/worker.py +++ b/sdks/python/hatchet_sdk/worker/worker.py @@ -65,8 +65,7 @@ def __init__( self, name: str, config: ClientConfig, - slots: int, - durable_slots: int, + slot_config: dict[str, int], labels: dict[str, str | int] | None = None, debug: bool = False, owned_loop: bool = True, @@ -76,15 +75,15 @@ def __init__( ) -> None: self.config = config self.name = self.config.apply_namespace(name) - self.slots = slots - self.durable_slots = durable_slots + self.slot_config = slot_config + self._slots = slot_config.get("default", 0) + self._durable_slots = slot_config.get("durable", 0) self.debug = debug self.labels = labels or {} self.handle_kill = handle_kill self.owned_loop = owned_loop self.action_registry: dict[str, Task[Any, Any]] = {} - self.durable_action_registry: dict[str, Task[Any, Any]] = {} self.killing: bool = False self._status: WorkerStatus = WorkerStatus.INITIALIZED @@ -95,15 +94,13 @@ def __init__( self.action_listener_health_check: asyncio.Task[None] self.action_runner: WorkerActionRunLoopManager | None = None - self.durable_action_runner: WorkerActionRunLoopManager | None = None self.ctx = multiprocessing.get_context("spawn") self.action_queue: Queue[Action | STOP_LOOP_TYPE] = self.ctx.Queue() self.event_queue: Queue[ActionEvent] = self.ctx.Queue() - - self.durable_action_queue: Queue[Action | STOP_LOOP_TYPE] = self.ctx.Queue() - self.durable_event_queue: Queue[ActionEvent] = self.ctx.Queue() + self.durable_action_queue: Queue[Action | STOP_LOOP_TYPE] | None = None + self.durable_event_queue: Queue[ActionEvent] | None = None self.loop: asyncio.AbstractEventLoop | None = None @@ -111,9 +108,6 @@ def __init__( self._setup_signal_handlers() - self.has_any_durable = False - self.has_any_non_durable = False - self.lifespan = lifespan self.lifespan_stack: AsyncExitStack | None = None self._lifespan_cleanup_complete: asyncio.Event | None = None @@ -142,12 +136,7 @@ def register_workflow(self, workflow: BaseWorkflow[Any]) -> None: for step in workflow.tasks: action_name = workflow._create_action_name(step) - if step.is_durable: - self.has_any_durable = True - self.durable_action_registry[action_name] = step - else: - self.has_any_non_durable = True - self.action_registry[action_name] = step + self.action_registry[action_name] = step def register_workflows(self, workflows: list[BaseWorkflow[Any]]) -> None: for workflow in workflows: @@ -157,6 +146,24 @@ def register_workflows(self, workflows: list[BaseWorkflow[Any]]) -> None: def status(self) -> WorkerStatus: return self._status + @property + def slots(self) -> int: + warn( + "Worker.slots is deprecated; use slot_config['default'] instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._slots + + @property + def durable_slots(self) -> int: + warn( + "Worker.durable_slots is deprecated; use slot_config['durable'] instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._durable_slots + def _setup_loop(self) -> None: try: asyncio.get_running_loop() @@ -172,7 +179,7 @@ def _setup_loop(self) -> None: asyncio.set_event_loop(self.loop) def start(self, options: WorkerStartOptions = WorkerStartOptions()) -> None: - if not (self.action_registry or self.durable_action_registry): + if not self.action_registry: raise ValueError( "no actions registered, register workflows before starting worker" ) @@ -206,10 +213,7 @@ async def _aio_start(self) -> None: self._status = WorkerStatus.STARTING - if ( - len(self.action_registry.keys()) == 0 - and len(self.durable_action_registry.keys()) == 0 - ): + if len(self.action_registry.keys()) == 0: raise ValueError( "no actions registered, register workflows or actions before starting worker" ) @@ -227,34 +231,13 @@ async def _aio_start(self) -> None: # Healthcheck server is started inside the spawned action-listener process # (non-durable preferred) to avoid being affected by the main worker loop. healthcheck_port = self.config.healthcheck.port - enable_health_server_non_durable = ( - self.config.healthcheck.enabled and self.has_any_non_durable - ) - enable_health_server_durable = ( - self.config.healthcheck.enabled - and (not self.has_any_non_durable) - and self.has_any_durable - ) + enable_health_server = self.config.healthcheck.enabled - if self.has_any_non_durable: - self.action_listener_process = self._start_action_listener( - is_durable=False, - enable_health_server=enable_health_server_non_durable, - healthcheck_port=healthcheck_port, - ) - self.action_runner = self._run_action_runner( - is_durable=False, lifespan_context=lifespan_context - ) - - if self.has_any_durable: - self.durable_action_listener_process = self._start_action_listener( - is_durable=True, - enable_health_server=enable_health_server_durable, - healthcheck_port=healthcheck_port, - ) - self.durable_action_runner = self._run_action_runner( - is_durable=True, lifespan_context=lifespan_context - ) + self.action_listener_process = self._start_action_listener( + enable_health_server=enable_health_server, + healthcheck_port=healthcheck_port, + ) + self.action_runner = self._run_action_runner(lifespan_context=lifespan_context) if self.loop: self._lifespan_cleanup_complete = asyncio.Event() @@ -272,17 +255,17 @@ async def _aio_start(self) -> None: self._lifespan_cleanup_complete.set() def _run_action_runner( - self, is_durable: bool, lifespan_context: Any | None + self, lifespan_context: Any | None ) -> WorkerActionRunLoopManager: # Retrieve the shared queue if self.loop: return WorkerActionRunLoopManager( - self.name + ("_durable" if is_durable else ""), - self.durable_action_registry if is_durable else self.action_registry, - self.durable_slots if is_durable else self.slots, + self.name, + self.action_registry, + sum(self.slot_config.values()), self.config, - self.durable_action_queue if is_durable else self.action_queue, - self.durable_event_queue if is_durable else self.event_queue, + self.action_queue, + self.event_queue, self.loop, self.handle_kill, self.client.debug, @@ -320,7 +303,6 @@ async def _cleanup_lifespan(self) -> None: def _start_action_listener( self, - is_durable: bool, *, enable_health_server: bool = False, healthcheck_port: int = 8001, @@ -329,16 +311,12 @@ def _start_action_listener( process = self.ctx.Process( target=worker_action_listener_process, args=( - self.name + ("_durable" if is_durable else ""), - ( - list(self.durable_action_registry.keys()) - if is_durable - else list(self.action_registry.keys()) - ), - self.durable_slots if is_durable else self.slots, + self.name, + list(self.action_registry.keys()), + self.slot_config, self.config, - self.durable_action_queue if is_durable else self.action_queue, - self.durable_event_queue if is_durable else self.event_queue, + self.action_queue, + self.event_queue, self.handle_kill, self.client.debug, self.labels, @@ -358,12 +336,7 @@ async def _check_listener_health(self) -> None: while not self.killing: if ( not self.action_listener_process - and not self.durable_action_listener_process - ) or ( - self.action_listener_process - and self.durable_action_listener_process - and not self.action_listener_process.is_alive() - and not self.durable_action_listener_process.is_alive() + or not self.action_listener_process.is_alive() ): logger.debug("child action listener process killed...") self._status = WorkerStatus.UNHEALTHY @@ -416,7 +389,7 @@ def _handle_force_quit_signal(self, signum: int, frame: FrameType | None) -> Non self.loop.create_task(self._exit_forcefully()) def _close_queues(self) -> None: - queues: list[Queue[Any]] = [ + queues: list[Queue[Any] | None] = [ self.action_queue, self.event_queue, self.durable_action_queue, @@ -424,10 +397,12 @@ def _close_queues(self) -> None: ] for queue in queues: + if queue is None: + continue try: queue.cancel_join_thread() queue.close() - except Exception: # noqa: PERF203 + except Exception: continue def _terminate_processes(self) -> None: @@ -455,9 +430,6 @@ async def _close(self) -> None: if self.action_runner is not None: self.action_runner.cleanup() - if self.durable_action_runner is not None: - self.durable_action_runner.cleanup() - await self.action_listener_health_check self._close_queues() @@ -474,11 +446,13 @@ async def exit_gracefully(self) -> None: await self.action_runner.wait_for_tasks() await self.action_runner.exit_gracefully() - if self.durable_action_runner: - await self.durable_action_runner.wait_for_tasks() - await self.durable_action_runner.exit_gracefully() + if self.action_listener_process and self.action_listener_process.is_alive(): + self.action_listener_process.kill() - self._terminate_processes() + try: + await self._cleanup_lifespan() + except LifespanSetupError: + logger.exception("lifespan cleanup failed") await self._close() diff --git a/sdks/python/hatchet_sdk/workflow_run.py b/sdks/python/hatchet_sdk/workflow_run.py index 5760eef8f9..e1d0c3cc25 100644 --- a/sdks/python/hatchet_sdk/workflow_run.py +++ b/sdks/python/hatchet_sdk/workflow_run.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import time from typing import TYPE_CHECKING, Any @@ -6,9 +8,17 @@ RunEventListenerClient, ) from hatchet_sdk.clients.listeners.workflow_listener import PooledWorkflowRunListener -from hatchet_sdk.exceptions import FailedTaskRunExceptionGroup, TaskRunError +from hatchet_sdk.exceptions import ( + CancellationReason, + CancelledError, + FailedTaskRunExceptionGroup, + TaskRunError, +) +from hatchet_sdk.logger import logger +from hatchet_sdk.utils.cancellation import await_with_cancellation if TYPE_CHECKING: + from hatchet_sdk.cancellation import CancellationToken from hatchet_sdk.clients.admin import AdminClient @@ -18,7 +28,7 @@ def __init__( workflow_run_id: str, workflow_run_listener: PooledWorkflowRunListener, workflow_run_event_listener: RunEventListenerClient, - admin_client: "AdminClient", + admin_client: AdminClient, ): self.workflow_run_id = workflow_run_id self.workflow_run_listener = workflow_run_listener @@ -31,7 +41,25 @@ def __str__(self) -> str: def stream(self) -> RunEventListener: return self.workflow_run_event_listener.stream(self.workflow_run_id) - async def aio_result(self) -> dict[str, Any]: + async def aio_result( + self, cancellation_token: CancellationToken | None = None + ) -> dict[str, Any]: + """ + Asynchronously wait for the workflow run to complete and return the result. + + :param cancellation_token: Optional cancellation token to abort the wait. + :return: A dictionary mapping task names to their outputs. + """ + logger.debug( + f"WorkflowRunRef.aio_result: waiting for {self.workflow_run_id}, " + f"token={cancellation_token is not None}" + ) + + if cancellation_token: + return await await_with_cancellation( + self.workflow_run_listener.aio_result(self.workflow_run_id), + cancellation_token, + ) return await self.workflow_run_listener.aio_result(self.workflow_run_id) def _safely_get_action_name(self, action_id: str | None) -> str | None: @@ -43,12 +71,42 @@ def _safely_get_action_name(self, action_id: str | None) -> str | None: except IndexError: return None - def result(self) -> dict[str, Any]: + def result( + self, cancellation_token: CancellationToken | None = None + ) -> dict[str, Any]: + """ + Synchronously wait for the workflow run to complete and return the result. + + This method polls the API for the workflow run status. If a cancellation token + is provided, the polling will be interrupted when cancellation is triggered. + + :param cancellation_token: Optional cancellation token to abort the wait. + :return: A dictionary mapping task names to their outputs. + :raises CancelledError: If the cancellation token is triggered. + :raises FailedTaskRunExceptionGroup: If the workflow run fails. + :raises ValueError: If the workflow run is not found. + """ from hatchet_sdk.clients.admin import RunStatus + logger.debug( + f"WorkflowRunRef.result: waiting for {self.workflow_run_id}, " + f"token={cancellation_token is not None}" + ) + retries = 0 while True: + # Check cancellation at start of each iteration + if cancellation_token and cancellation_token.is_cancelled: + logger.debug( + f"WorkflowRunRef.result: cancellation detected for {self.workflow_run_id}, " + f"reason={CancellationReason.PARENT_CANCELLED.value}" + ) + raise CancelledError( + "Operation cancelled by cancellation token", + reason=CancellationReason.PARENT_CANCELLED, + ) + try: details = self.admin_client.get_details(self.workflow_run_id) except Exception as e: @@ -59,14 +117,42 @@ def result(self) -> dict[str, Any]: f"Workflow run {self.workflow_run_id} not found" ) from e - time.sleep(1) + # Use interruptible sleep via token.wait() + if cancellation_token: + if cancellation_token.wait(timeout=1.0): + logger.debug( + f"WorkflowRunRef.result: cancellation during retry sleep for {self.workflow_run_id}, " + f"reason={CancellationReason.PARENT_CANCELLED.value}" + ) + raise CancelledError( + "Operation cancelled by cancellation token", + reason=CancellationReason.PARENT_CANCELLED, + ) from None + else: + time.sleep(1) continue + logger.debug( + f"WorkflowRunRef.result: {self.workflow_run_id} status={details.status}" + ) + if ( details.status in [RunStatus.QUEUED, RunStatus.RUNNING] or details.done is False ): - time.sleep(1) + # Use interruptible sleep via token.wait() + if cancellation_token: + if cancellation_token.wait(timeout=1.0): + logger.debug( + f"WorkflowRunRef.result: cancellation during poll sleep for {self.workflow_run_id}, " + f"reason={CancellationReason.PARENT_CANCELLED.value}" + ) + raise CancelledError( + "Operation cancelled by cancellation token", + reason=CancellationReason.PARENT_CANCELLED, + ) + else: + time.sleep(1) continue if details.status == RunStatus.FAILED: @@ -80,6 +166,9 @@ def result(self) -> dict[str, Any]: ) if details.status == RunStatus.COMPLETED: + logger.debug( + f"WorkflowRunRef.result: {self.workflow_run_id} completed successfully" + ) return { readable_id: run.output for readable_id, run in details.task_runs.items() diff --git a/sdks/python/lint.sh b/sdks/python/lint.sh index dba5c69df6..28242cc424 100755 --- a/sdks/python/lint.sh +++ b/sdks/python/lint.sh @@ -8,8 +8,8 @@ poetry run ruff check . --fix echo "Formatting with black" poetry run black . --color -echo "\nFormatting with isort" -poetry run isort . +# echo "\nFormatting with isort" +# poetry run isort . echo "\nType checking with mypy" poetry run mypy --config-file=pyproject.toml diff --git a/sdks/python/pyproject.toml b/sdks/python/pyproject.toml index d748865eeb..8e7ecda5b0 100644 --- a/sdks/python/pyproject.toml +++ b/sdks/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "hatchet-sdk" -version = "1.22.15" +version = "1.23.0" description = "This is the official Python SDK for Hatchet, a distributed, fault-tolerant task queue. The SDK allows you to easily integrate Hatchet's task scheduling and workflow orchestration capabilities into your Python applications." authors = [ "Alexander Belanger ", diff --git a/sdks/python/tests/test_cancellation.py b/sdks/python/tests/test_cancellation.py new file mode 100644 index 0000000000..ede216d03e --- /dev/null +++ b/sdks/python/tests/test_cancellation.py @@ -0,0 +1,428 @@ +"""Unit tests for CancellationToken and cancellation utilities.""" + +import asyncio +import threading +import time + +import pytest + +from hatchet_sdk.cancellation import CancellationToken +from hatchet_sdk.exceptions import CancellationReason, CancelledError +from hatchet_sdk.runnables.contextvars import ctx_cancellation_token +from hatchet_sdk.utils.cancellation import await_with_cancellation + + +# CancellationToken + + +def test_initial_state() -> None: + """Token should start in non-cancelled state.""" + token = CancellationToken() + assert token.is_cancelled is False + + +def test_cancel_sets_flag() -> None: + """cancel() should set is_cancelled to True.""" + token = CancellationToken() + token.cancel() + assert token.is_cancelled is True + + +def test_cancel_sets_reason() -> None: + """cancel() should set the reason.""" + token = CancellationToken() + token.cancel(CancellationReason.USER_REQUESTED) + assert token.reason == CancellationReason.USER_REQUESTED + + +def test_cancel_default_reason_is_unknown() -> None: + """cancel() without explicit reason should default to UNKNOWN.""" + token = CancellationToken() + token.cancel() + assert token.reason == CancellationReason.UNKNOWN + + +def test_reason_is_none_before_cancel() -> None: + """reason should be None before cancellation.""" + token = CancellationToken() + assert token.reason is None + + +def test_cancel_idempotent() -> None: + """Multiple calls to cancel() should be safe.""" + token = CancellationToken() + token.cancel() + token.cancel() # Should not raise + assert token.is_cancelled is True + + +def test_cancel_idempotent_preserves_reason() -> None: + """Multiple calls to cancel() should preserve the original reason.""" + token = CancellationToken() + token.cancel(CancellationReason.USER_REQUESTED) + token.cancel(CancellationReason.TIMEOUT) # Second call should be ignored + assert token.reason == CancellationReason.USER_REQUESTED + + +def test_sync_wait_returns_true_when_cancelled() -> None: + """wait() should return True immediately if already cancelled.""" + token = CancellationToken() + token.cancel() + result = token.wait(timeout=0.1) + assert result is True + + +def test_sync_wait_timeout_returns_false() -> None: + """wait() should return False when timeout expires without cancellation.""" + token = CancellationToken() + start = time.monotonic() + result = token.wait(timeout=0.1) + elapsed = time.monotonic() - start + assert result is False + assert elapsed >= 0.1 + + +def test_sync_wait_interrupted_by_cancel() -> None: + """wait() should return True when cancelled during wait.""" + token = CancellationToken() + + def cancel_after_delay() -> None: + time.sleep(0.1) + token.cancel() + + thread = threading.Thread(target=cancel_after_delay) + thread.start() + + start = time.monotonic() + result = token.wait(timeout=1.0) + elapsed = time.monotonic() - start + + thread.join() + + assert result is True + assert elapsed < 0.5 # Should be much faster than timeout + + +@pytest.mark.asyncio +async def test_aio_wait_returns_when_cancelled() -> None: + """aio_wait() should return when cancelled.""" + token = CancellationToken() + + async def cancel_after_delay() -> None: + await asyncio.sleep(0.1) + token.cancel() + + asyncio.create_task(cancel_after_delay()) + + start = time.monotonic() + await token.aio_wait() + elapsed = time.monotonic() - start + + assert elapsed < 0.5 # Should be fast + + +def test_register_child() -> None: + """register_child() should add run IDs to the list.""" + token = CancellationToken() + token.register_child("run-1") + token.register_child("run-2") + + children = token.get_child_run_ids() + assert children == ["run-1", "run-2"] + + +def test_get_child_run_ids_returns_copy() -> None: + """get_child_run_ids() should return a copy, not the internal list.""" + token = CancellationToken() + token.register_child("run-1") + + children = token.get_child_run_ids() + children.append("run-2") # Modify the copy + + assert token.get_child_run_ids() == ["run-1"] # Original unchanged + + +def test_callback_invoked_on_cancel() -> None: + """Callbacks should be invoked when cancel() is called.""" + token = CancellationToken() + called = [] + + def callback() -> None: + called.append(True) + + token.add_callback(callback) + token.cancel() + + assert called == [True] + + +def test_callback_invoked_immediately_if_already_cancelled() -> None: + """Callbacks added after cancellation should be invoked immediately.""" + token = CancellationToken() + token.cancel() + + called = [] + + def callback() -> None: + called.append(True) + + token.add_callback(callback) + + assert called == [True] + + +def test_multiple_callbacks() -> None: + """Multiple callbacks should all be invoked.""" + token = CancellationToken() + results: list[int] = [] + + token.add_callback(lambda: results.append(1)) + token.add_callback(lambda: results.append(2)) + token.add_callback(lambda: results.append(3)) + + token.cancel() + + assert results == [1, 2, 3] + + +def test_repr() -> None: + """__repr__ should provide useful debugging info.""" + token = CancellationToken() + token.register_child("run-1") + + repr_str = repr(token) + assert "cancelled=False" in repr_str + assert "children=1" in repr_str + + +# await_with_cancellation + + +@pytest.mark.asyncio +async def test_no_token_awaits_directly() -> None: + """Without a token, coroutine should be awaited directly.""" + + async def simple_coro() -> str: + return "result" + + result = await await_with_cancellation(simple_coro(), None) + assert result == "result" + + +@pytest.mark.asyncio +async def test_token_not_cancelled_returns_result() -> None: + """With a non-cancelled token, should return coroutine result.""" + token = CancellationToken() + + async def simple_coro() -> str: + await asyncio.sleep(0.01) + return "result" + + result = await await_with_cancellation(simple_coro(), token) + assert result == "result" + + +@pytest.mark.asyncio +async def test_already_cancelled_raises_immediately() -> None: + """With an already-cancelled token, should raise immediately.""" + token = CancellationToken() + token.cancel() + + async def simple_coro() -> str: + await asyncio.sleep(10) # Would block if actually awaited + return "result" + + with pytest.raises(asyncio.CancelledError): + await await_with_cancellation(simple_coro(), token) + + +@pytest.mark.asyncio +async def test_cancellation_during_await_raises() -> None: + """Should raise CancelledError when token is cancelled during await.""" + token = CancellationToken() + + async def slow_coro() -> str: + await asyncio.sleep(10) + return "result" + + async def cancel_after_delay() -> None: + await asyncio.sleep(0.1) + token.cancel() + + asyncio.create_task(cancel_after_delay()) + + start = time.monotonic() + with pytest.raises(asyncio.CancelledError): + await await_with_cancellation(slow_coro(), token) + elapsed = time.monotonic() - start + + assert elapsed < 0.5 # Should be cancelled quickly + + +@pytest.mark.asyncio +async def test_cancel_callback_invoked() -> None: + """Cancel callback should be invoked on cancellation.""" + token = CancellationToken() + callback_called = [] + + async def cancel_callback() -> None: + callback_called.append(True) + + async def slow_coro() -> str: + await asyncio.sleep(10) + return "result" + + async def cancel_after_delay() -> None: + await asyncio.sleep(0.1) + token.cancel() + + asyncio.create_task(cancel_after_delay()) + + with pytest.raises(asyncio.CancelledError): + await await_with_cancellation( + slow_coro(), token, cancel_callback=cancel_callback + ) + + assert callback_called == [True] + + +@pytest.mark.asyncio +async def test_cancel_callback_not_invoked_on_success() -> None: + """Cancel callback should NOT be invoked when coroutine completes normally.""" + token = CancellationToken() + callback_called = [] + + async def cancel_callback() -> None: + callback_called.append(True) + + async def fast_coro() -> str: + await asyncio.sleep(0.01) + return "result" + + result = await await_with_cancellation( + fast_coro(), token, cancel_callback=cancel_callback + ) + + assert result == "result" + assert callback_called == [] + + +# CancellationReason + + +def test_all_reasons_exist() -> None: + """All expected cancellation reasons should exist.""" + assert CancellationReason.USER_REQUESTED.value == "user_requested" + assert CancellationReason.TIMEOUT.value == "timeout" + assert CancellationReason.PARENT_CANCELLED.value == "parent_cancelled" + assert CancellationReason.WORKFLOW_CANCELLED.value == "workflow_cancelled" + assert CancellationReason.UNKNOWN.value == "unknown" + + +def test_reasons_are_strings() -> None: + """Cancellation reason values should be strings.""" + for reason in CancellationReason: + assert isinstance(reason.value, str) + + +# CancelledError + + +def test_cancelled_error_is_base_exception() -> None: + """CancelledError should be a BaseException (not Exception).""" + err = CancelledError("test message") + assert isinstance(err, BaseException) + assert not isinstance(err, Exception) # Should NOT be caught by except Exception + assert str(err) == "test message" + + +def test_cancelled_error_not_caught_by_except_exception() -> None: + """CancelledError should NOT be caught by except Exception.""" + caught_by_exception = False + caught_by_cancelled_error = False + + try: + raise CancelledError("test") + except Exception: + caught_by_exception = True + except CancelledError: + caught_by_cancelled_error = True + + assert not caught_by_exception + assert caught_by_cancelled_error + + +def test_cancelled_error_with_reason() -> None: + """CancelledError should accept and store a reason.""" + err = CancelledError("test message", reason=CancellationReason.TIMEOUT) + assert err.reason == CancellationReason.TIMEOUT + + +def test_cancelled_error_reason_defaults_to_none() -> None: + """CancelledError reason should default to None.""" + err = CancelledError("test message") + assert err.reason is None + + +def test_cancelled_error_message_property() -> None: + """CancelledError should have a message property.""" + err = CancelledError("test message") + assert err.message == "test message" + + +def test_cancelled_error_default_message() -> None: + """CancelledError should have a default message.""" + err = CancelledError() + assert err.message == "Operation cancelled" + + +def test_can_be_raised_and_caught() -> None: + """CancelledError should be raisable and catchable.""" + with pytest.raises(CancelledError) as exc_info: + raise CancelledError("Operation cancelled") + + assert "Operation cancelled" in str(exc_info.value) + + +def test_can_be_raised_with_reason() -> None: + """CancelledError should be raisable with a reason.""" + with pytest.raises(CancelledError) as exc_info: + raise CancelledError( + "Parent was cancelled", reason=CancellationReason.PARENT_CANCELLED + ) + + assert exc_info.value.reason == CancellationReason.PARENT_CANCELLED + + +# Context var propagation + + +def test_context_var_default_is_none() -> None: + """ctx_cancellation_token should default to None.""" + assert ctx_cancellation_token.get() is None + + +def test_context_var_can_be_set_and_retrieved() -> None: + """ctx_cancellation_token should be settable and retrievable.""" + token = CancellationToken() + ctx_cancellation_token.set(token) + try: + assert ctx_cancellation_token.get() is token + finally: + ctx_cancellation_token.set(None) + + +@pytest.mark.asyncio +async def test_context_var_propagates_in_async() -> None: + """ctx_cancellation_token should propagate in async context.""" + token = CancellationToken() + ctx_cancellation_token.set(token) + + async def check_token() -> CancellationToken | None: + return ctx_cancellation_token.get() + + try: + retrieved = await check_token() + assert retrieved is token + finally: + ctx_cancellation_token.set(None) diff --git a/sdks/python/tests/test_client.py b/sdks/python/tests/test_client.py index 0021a46af2..a1d1665c2d 100644 --- a/sdks/python/tests/test_client.py +++ b/sdks/python/tests/test_client.py @@ -1,7 +1,11 @@ import os +from typing import Any, cast from unittest import mock from hatchet_sdk.config import ClientConfig +from hatchet_sdk.runnables.workflow import BaseWorkflow +from hatchet_sdk.utils.slots import resolve_worker_slot_config +from hatchet_sdk.worker.slot_types import SlotType def test_client_initialization_from_defaults() -> None: @@ -33,3 +37,54 @@ def test_client_server_url_override_when_env_var() -> None: assert config.server_url == "foobaz:8080" assert config.host_port == ClientConfig().host_port + + +def test_resolve_slot_config_no_durable() -> None: + resolved = resolve_worker_slot_config( + slot_config=None, + slots=None, + durable_slots=None, + workflows=None, + ) + + assert resolved == {SlotType.DEFAULT: 100} + + +def test_resolve_slot_config_only_durable() -> None: + class DummyTask: + is_durable = True + slot_requests: dict[str, int] = {"durable": 1} + + class DummyWorkflow: + tasks = [DummyTask()] + + resolved = resolve_worker_slot_config( + slot_config=None, + slots=None, + durable_slots=None, + workflows=cast(list[BaseWorkflow[Any]], [DummyWorkflow()]), + ) + + assert resolved == {SlotType.DURABLE: 1000} + + +def test_resolve_slot_config_mixed() -> None: + class DefaultTask: + is_durable = False + slot_requests: dict[str, int] = {"default": 1} + + class DurableTask: + is_durable = True + slot_requests: dict[str, int] = {"durable": 1} + + class DummyWorkflow: + tasks = [DefaultTask(), DurableTask()] + + resolved = resolve_worker_slot_config( + slot_config=None, + slots=None, + durable_slots=None, + workflows=cast(list[BaseWorkflow[Any]], [DummyWorkflow()]), + ) + + assert resolved == {SlotType.DEFAULT: 100, SlotType.DURABLE: 1000} diff --git a/sdks/typescript/.eslintrc.json b/sdks/typescript/.eslintrc.json index b3ede4100c..78a7273b2c 100644 --- a/sdks/typescript/.eslintrc.json +++ b/sdks/typescript/.eslintrc.json @@ -33,5 +33,21 @@ "class-methods-use-this": "off", "no-await-in-loop": "off", "no-restricted-syntax": "off" - } + }, + "overrides": [ + { + "files": [ + "src/**/examples/**/*.{ts,tsx,js}", + "src/examples/**/*.{ts,tsx,js}", + "tests/**/*.{ts,tsx,js}", + "src/**/*.test.{ts,tsx,js}", + "src/**/*.e2e.{ts,tsx,js}", + "src/**/__tests__/**/*.{ts,tsx,js}" + ], + "rules": { + "@typescript-eslint/no-unused-vars": "off", + "no-console": "off" + } + } + ] } diff --git a/sdks/typescript/CHANGELOG.md b/sdks/typescript/CHANGELOG.md index 8cf8f5c617..14a7abb431 100644 --- a/sdks/typescript/CHANGELOG.md +++ b/sdks/typescript/CHANGELOG.md @@ -5,6 +5,12 @@ All notable changes to Hatchet's TypeScript SDK will be documented in this chang The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.11.0] - 2026-02-02 + +### Internal Only + +- Updated gRPC/REST contract field names to lowerCamelCase for consistency across SDKs. + ## [1.10.8] - 2026-02-02 ### Changed diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index 9de0714daf..c09897118f 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -1,6 +1,6 @@ { "name": "@hatchet-dev/typescript-sdk", - "version": "1.10.8", + "version": "1.11.0", "description": "Background task orchestration & visibility for developers", "types": "dist/index.d.ts", "files": [ @@ -22,7 +22,7 @@ "dump-version": "node -e \"console.log('export const HATCHET_VERSION = \\'' + require('./package.json').version + '\\';');\" > src/version.ts", "tsc:build": "pnpm run dump-version && tsc && resolve-tspaths", "test:unit": "jest --testMatch='**/*.test.ts'", - "test:e2e": "jest --testMatch='**/*.e2e.ts'", + "test:e2e": "jest --testMatch='**/*.e2e.ts' --silent", "test:unit:watch": "jest --testMatch='**/*.test.ts' --watch", "generate": "pnpm run '/generate-.*/'", "generate-api": "npx swagger-typescript-api@13.2.7 generate -p ../../bin/oas/openapi.yaml -o ./src/clients/rest/generated -n hatchet.ts --modular --axios", diff --git a/sdks/typescript/src/clients/admin/admin-client.test.ts b/sdks/typescript/src/clients/admin/admin-client.test.ts index 9b0257b19f..077ef50198 100644 --- a/sdks/typescript/src/clients/admin/admin-client.test.ts +++ b/sdks/typescript/src/clients/admin/admin-client.test.ts @@ -136,4 +136,33 @@ describe('AdminClient', () => { }); }); }); + + describe('runWorkflows', () => { + it('should map parentTaskRunExternalId to parentTaskRunExternalId', async () => { + const bulkSpy = jest.spyOn(client.client, 'bulkTriggerWorkflow').mockResolvedValue({ + workflowRunIds: ['run-1'], + }); + + await client.runWorkflows([ + { + workflowName: 'workflowName', + input: { hello: 'world' }, + options: { + parentId: 'parent-wf-run', + parentTaskRunExternalId: 'parent-task-run', + }, + }, + ]); + + expect(bulkSpy).toHaveBeenCalledWith( + expect.objectContaining({ + workflows: [ + expect.objectContaining({ + parentTaskRunExternalId: 'parent-task-run', + }), + ], + }) + ); + }); + }); }); diff --git a/sdks/typescript/src/clients/admin/admin-client.ts b/sdks/typescript/src/clients/admin/admin-client.ts index d325429e1a..74c34c7027 100644 --- a/sdks/typescript/src/clients/admin/admin-client.ts +++ b/sdks/typescript/src/clients/admin/admin-client.ts @@ -39,6 +39,16 @@ export type WorkflowRun = { input: T; options?: { parentId?: string | undefined; + /** + * (optional) the parent task external run id. + * + * This is the field understood by the workflows gRPC API. + */ + parentTaskRunExternalId?: string | undefined; + /** + * @deprecated Use `parentTaskRunExternalId` instead. + * Kept for backward compatibility; will be mapped to `parentTaskRunExternalId`. + */ parentStepRunId?: string | undefined; childIndex?: number | undefined; childKey?: string | undefined; @@ -150,6 +160,14 @@ export class AdminClient { input: T, options?: { parentId?: string | undefined; + /** + * (optional) the parent task external run id. + */ + parentTaskRunExternalId?: string | undefined; + /** + * @deprecated Use `parentTaskRunExternalId` instead. + * Kept for backward compatibility; will be mapped to `parentTaskRunExternalId`. + */ parentStepRunId?: string | undefined; childIndex?: number | undefined; childKey?: string | undefined; @@ -172,6 +190,16 @@ export class AdminClient { input: Q, options?: { parentId?: string | undefined; + /** + * (optional) the parent task external run id. + * + * This is the field understood by the workflows gRPC API. + */ + parentTaskRunExternalId?: string | undefined; + /** + * @deprecated Use `parentTaskRunExternalId` instead. + * Kept for backward compatibility; will be mapped to `parentTaskRunExternalId`. + */ parentStepRunId?: string | undefined; childIndex?: number | undefined; childKey?: string | undefined; @@ -185,14 +213,17 @@ export class AdminClient { try { const inputStr = JSON.stringify(input); + const opts = options ?? {}; + const { additionalMetadata, parentStepRunId, parentTaskRunExternalId, ...rest } = opts; + const resp = this.client.triggerWorkflow({ name: computedName, input: inputStr, - ...options, - additionalMetadata: options?.additionalMetadata - ? JSON.stringify(options?.additionalMetadata) - : undefined, - priority: options?.priority, + ...rest, + // API expects `parentTaskRunExternalId`; accept the old name as an alias. + parentTaskRunExternalId: parentTaskRunExternalId ?? parentStepRunId, + additionalMetadata: additionalMetadata ? JSON.stringify(additionalMetadata) : undefined, + priority: opts.priority, }); return new WorkflowRunRef

(resp, this.listenerClient, this.workflows, options?.parentId); @@ -212,6 +243,16 @@ export class AdminClient { input: Q; options?: { parentId?: string | undefined; + /** + * (optional) the parent task external run id. + * + * This is the field understood by the workflows gRPC API. + */ + parentTaskRunExternalId?: string | undefined; + /** + * @deprecated Use `parentTaskRunExternalId` instead. + * Kept for backward compatibility; will be mapped to `parentTaskRunExternalId`. + */ parentStepRunId?: string | undefined; childIndex?: number | undefined; childKey?: string | undefined; @@ -226,13 +267,15 @@ export class AdminClient { const computedName = applyNamespace(workflowName, this.config.namespace); const inputStr = JSON.stringify(input); + const opts = options ?? {}; + const { additionalMetadata, parentStepRunId, parentTaskRunExternalId, ...rest } = opts; + return { name: computedName, input: inputStr, - ...options, - additionalMetadata: options?.additionalMetadata - ? JSON.stringify(options.additionalMetadata) - : undefined, + ...rest, + parentTaskRunExternalId: parentTaskRunExternalId ?? parentStepRunId, + additionalMetadata: additionalMetadata ? JSON.stringify(additionalMetadata) : undefined, }; }); diff --git a/sdks/typescript/src/clients/dispatcher/action-listener.test.ts b/sdks/typescript/src/clients/dispatcher/action-listener.test.ts index e69759fb5c..98703be927 100644 --- a/sdks/typescript/src/clients/dispatcher/action-listener.test.ts +++ b/sdks/typescript/src/clients/dispatcher/action-listener.test.ts @@ -17,14 +17,14 @@ const mockAssignedActions: AssignActionMock[] = [ jobId: 'job1', jobName: 'Job One', jobRunId: 'run1', - stepId: 'step1', - stepRunId: 'runStep1', + taskId: 'step1', + taskRunExternalId: 'runStep1', actionId: 'action1', actionType: ActionType.START_STEP_RUN, actionPayload: 'payload1', workflowRunId: 'workflowRun1', getGroupKeyRunId: 'groupKeyRun1', - stepName: 'step1', + taskName: 'step1', retryCount: 0, priority: 1, }, @@ -98,8 +98,8 @@ describe('ActionListener', () => { // jobId: 'job1', // jobName: 'Job One', // jobRunId: 'run1', - // stepId: 'step1', - // stepRunId: 'runStep1', + // taskId: 'step1', + // taskRunId: 'runStep1', // actionId: 'action1', // actionType: ActionType.START_STEP_RUN, // actionPayload: 'payload1', diff --git a/sdks/typescript/src/clients/dispatcher/action-listener.ts b/sdks/typescript/src/clients/dispatcher/action-listener.ts index 9a9270ab22..1d32adfa04 100644 --- a/sdks/typescript/src/clients/dispatcher/action-listener.ts +++ b/sdks/typescript/src/clients/dispatcher/action-listener.ts @@ -23,7 +23,12 @@ enum ListenStrategy { LISTEN_STRATEGY_V2 = 2, } -export interface Action extends AssignedAction {} +export type Action = AssignedAction & { + /** @deprecated use taskRunId */ + stepRunId?: string; + /** @deprecated use taskId */ + stepId?: string; +}; export type ActionKey = string; @@ -34,7 +39,7 @@ export function createActionKey(action: Action): ActionKey { case ActionType.CANCEL_STEP_RUN: case ActionType.START_STEP_RUN: case ActionType.UNRECOGNIZED: - return `${action.stepRunId}/${action.retryCount}`; + return `${action.taskRunExternalId}/${action.retryCount}`; default: // eslint-disable-next-line no-case-declarations const exhaustivenessCheck: never = action.actionType; @@ -84,6 +89,8 @@ export class ActionListener { for await (const assignedAction of listenClient) { const action: Action = { ...assignedAction, + stepRunId: assignedAction.taskRunExternalId, + stepId: assignedAction.taskId, }; yield action; diff --git a/sdks/typescript/src/clients/dispatcher/dispatcher-client.test.ts b/sdks/typescript/src/clients/dispatcher/dispatcher-client.test.ts index a1d223b28f..3467e70fbf 100644 --- a/sdks/typescript/src/clients/dispatcher/dispatcher-client.test.ts +++ b/sdks/typescript/src/clients/dispatcher/dispatcher-client.test.ts @@ -95,8 +95,8 @@ describe('DispatcherClient', () => { eventTimestamp: new Date(), jobId: 'a', jobRunId: 'b', - stepId: 'c', - stepRunId: 'd', + taskId: 'c', + taskRunExternalId: 'd', }); expect(clientSpy).toHaveBeenCalledWith({ @@ -106,8 +106,8 @@ describe('DispatcherClient', () => { eventPayload: '{"foo":"bar"}', jobId: 'a', jobRunId: 'b', - stepId: 'c', - stepRunId: 'd', + taskId: 'c', + taskRunExternalId: 'd', eventTimestamp: expect.any(Object), }); }); diff --git a/sdks/typescript/src/clients/dispatcher/dispatcher-client.ts b/sdks/typescript/src/clients/dispatcher/dispatcher-client.ts index 8549b379da..2fbd0a7556 100644 --- a/sdks/typescript/src/clients/dispatcher/dispatcher-client.ts +++ b/sdks/typescript/src/clients/dispatcher/dispatcher-client.ts @@ -16,6 +16,7 @@ import { Logger } from '@hatchet/util/logger'; import { retrier } from '@hatchet/util/retrier'; import { HATCHET_VERSION } from '@hatchet/version'; +import { SlotConfig, SlotType } from '@hatchet/v1/slot-types'; import { ActionListener } from './action-listener'; export type WorkerLabels = Record; @@ -24,10 +25,23 @@ interface GetActionListenerOptions { workerName: string; services: string[]; actions: string[]; + slotConfig?: SlotConfig; + /** @deprecated use slotConfig */ + slots?: number; + /** @deprecated use slotConfig */ + durableSlots?: number; + /** @deprecated use slots */ maxRuns?: number; - labels: Record; + labels: WorkerLabels; } +type StepActionEventInput = StepActionEvent & { + /** @deprecated use taskId */ + stepId?: string; + /** @deprecated use taskRunId */ + stepRunId?: string; +}; + export class DispatcherClient { config: ClientConfig; client: PbDispatcherClient; @@ -50,8 +64,22 @@ export class DispatcherClient { async getActionListener(options: GetActionListenerOptions) { // Register the worker + const slotConfig = + options.slotConfig || + (options.slots || options.durableSlots || options.maxRuns + ? { + ...(options.slots || options.maxRuns + ? { [SlotType.Default]: options.slots || options.maxRuns || 0 } + : {}), + ...(options.durableSlots ? { [SlotType.Durable]: options.durableSlots } : {}), + } + : undefined); + const registration = await this.client.register({ - ...options, + workerName: options.workerName, + services: options.services, + actions: options.actions, + slotConfig, labels: options.labels ? mapLabels(options.labels) : undefined, runtimeInfo: this.getRuntimeInfo(), }); @@ -59,9 +87,16 @@ export class DispatcherClient { return new ActionListener(this, registration.workerId); } - async sendStepActionEvent(in_: StepActionEvent) { + async sendStepActionEvent(in_: StepActionEventInput) { + const { taskId, taskRunExternalId, ...rest } = in_; + const event: StepActionEvent = { + ...rest, + taskId: taskId ?? '', + taskRunExternalId: taskRunExternalId ?? '', + }; + try { - return await retrier(async () => this.client.sendStepActionEvent(in_), this.logger); + return await retrier(async () => this.client.sendStepActionEvent(event), this.logger); } catch (e: any) { throw new HatchetError(e.message); } @@ -81,10 +116,10 @@ export class DispatcherClient { }); } - async refreshTimeout(incrementTimeoutBy: string, stepRunId: string) { + async refreshTimeout(incrementTimeoutBy: string, taskRunExternalId: string) { try { return this.client.refreshTimeout({ - stepRunId, + taskRunExternalId, incrementTimeoutBy, }); } catch (e: any) { diff --git a/sdks/typescript/src/clients/dispatcher/heartbeat/heartbeat-worker.ts b/sdks/typescript/src/clients/dispatcher/heartbeat/heartbeat-worker.ts index bbffcea29c..20d0f97678 100644 --- a/sdks/typescript/src/clients/dispatcher/heartbeat/heartbeat-worker.ts +++ b/sdks/typescript/src/clients/dispatcher/heartbeat/heartbeat-worker.ts @@ -66,7 +66,7 @@ class HeartbeatWorker { if (actualInterval > HEARTBEAT_INTERVAL * 1.2) { const message = `Heartbeat interval delay (${actualInterval}ms >> ${HEARTBEAT_INTERVAL}ms)`; - this.logger.warn(message); + this.logger.debug(message); postMessage({ type: 'warn', message, diff --git a/sdks/typescript/src/clients/event/event-client.ts b/sdks/typescript/src/clients/event/event-client.ts index 6ea972db7e..fec44ac8d7 100644 --- a/sdks/typescript/src/clients/event/event-client.ts +++ b/sdks/typescript/src/clients/event/event-client.ts @@ -116,7 +116,7 @@ export class EventClient { } async putLog( - stepRunId: string, + taskRunExternalId: string, log: string, level?: LogLevel, taskRetryCount?: number, @@ -132,7 +132,7 @@ export class EventClient { // fire and forget the log await this.client .putLog({ - stepRunId, + taskRunExternalId, createdAt, message: log, level: level || LogLevel.INFO, @@ -145,7 +145,7 @@ export class EventClient { }); } - async putStream(stepRunId: string, data: string | Uint8Array, index: number | undefined) { + async putStream(taskRunExternalId: string, data: string | Uint8Array, index: number | undefined) { const createdAt = new Date(); let dataBytes: Uint8Array; @@ -160,7 +160,7 @@ export class EventClient { retrier( async () => this.client.putStreamEvent({ - stepRunId, + taskRunExternalId, createdAt, message: dataBytes, eventIndex: index, diff --git a/sdks/typescript/src/clients/hatchet-client/client-config.test.ts b/sdks/typescript/src/clients/hatchet-client/client-config.test.ts new file mode 100644 index 0000000000..3c0a3851ac --- /dev/null +++ b/sdks/typescript/src/clients/hatchet-client/client-config.test.ts @@ -0,0 +1,56 @@ +import { ClientConfigSchema } from './client-config'; + +function baseConfig() { + return { + token: 'token', + tls_config: {}, + host_port: 'localhost:7070', + api_url: 'http://localhost:8080', + tenant_id: 'tenant', + }; +} + +describe('ClientConfigSchema cancellation timing', () => { + it('applies defaults (milliseconds)', () => { + const cfg = ClientConfigSchema.parse(baseConfig()); + expect(cfg.cancellation_grace_period).toBe(1000); + expect(cfg.cancellation_warning_threshold).toBe(300); + }); + + it('accepts integer milliseconds', () => { + const cfg = ClientConfigSchema.parse({ + ...baseConfig(), + cancellation_grace_period: 2500, + cancellation_warning_threshold: 400, + }); + expect(cfg.cancellation_grace_period).toBe(2500); + expect(cfg.cancellation_warning_threshold).toBe(400); + }); + + it('rejects invalid values', () => { + expect(() => + ClientConfigSchema.parse({ + ...baseConfig(), + cancellation_grace_period: -1, + }) + ).toThrow(); + expect(() => + ClientConfigSchema.parse({ + ...baseConfig(), + cancellation_warning_threshold: 0.1, + }) + ).toThrow(); + expect(() => + ClientConfigSchema.parse({ + ...baseConfig(), + cancellation_warning_threshold: 'nope' as any, + }) + ).toThrow(); + expect(() => + ClientConfigSchema.parse({ + ...baseConfig(), + cancellation_grace_period: '7s' as any, + }) + ).toThrow(); + }); +}); diff --git a/sdks/typescript/src/clients/hatchet-client/client-config.ts b/sdks/typescript/src/clients/hatchet-client/client-config.ts index 48139772e3..7c282723b1 100644 --- a/sdks/typescript/src/clients/hatchet-client/client-config.ts +++ b/sdks/typescript/src/clients/hatchet-client/client-config.ts @@ -2,6 +2,9 @@ import { ChannelCredentials } from 'nice-grpc'; import { z } from 'zod'; import { Logger, LogLevel } from '@util/logger'; +// Cancellation timings are specified in integer milliseconds. +const DurationMsSchema = z.number().int().nonnegative().finite(); + const ClientTLSConfigSchema = z.object({ tls_strategy: z.enum(['tls', 'mtls', 'none']).optional(), cert_file: z.string().optional(), @@ -24,11 +27,25 @@ export const ClientConfigSchema = z.object({ log_level: z.enum(['OFF', 'DEBUG', 'INFO', 'WARN', 'ERROR']).optional(), tenant_id: z.string(), namespace: z.string().optional(), + cancellation_grace_period: DurationMsSchema.optional().default(1000), + cancellation_warning_threshold: DurationMsSchema.optional().default(300), }); export type LogConstructor = (context: string, logLevel?: LogLevel) => Logger; -export type ClientConfig = z.infer & { - credentials?: ChannelCredentials; -} & { logger: LogConstructor }; +type ClientConfigInferred = z.infer; + +// Backwards-compatible: allow callers to omit these (schema supplies defaults when parsed). +type ClientConfigCancellationCompat = { + cancellation_grace_period?: ClientConfigInferred['cancellation_grace_period']; + cancellation_warning_threshold?: ClientConfigInferred['cancellation_warning_threshold']; +}; + +export type ClientConfig = Omit< + ClientConfigInferred, + 'cancellation_grace_period' | 'cancellation_warning_threshold' +> & + ClientConfigCancellationCompat & { + credentials?: ChannelCredentials; + } & { logger: LogConstructor }; export type ClientTLSConfig = z.infer; diff --git a/sdks/typescript/src/clients/hatchet-client/hatchet-client.test.ts b/sdks/typescript/src/clients/hatchet-client/hatchet-client.test.ts index 8065e70982..36220d7dc5 100644 --- a/sdks/typescript/src/clients/hatchet-client/hatchet-client.test.ts +++ b/sdks/typescript/src/clients/hatchet-client/hatchet-client.test.ts @@ -143,20 +143,5 @@ describe('Client', () => { } ); }); - - describe('run', () => { - xit('should start a worker', () => { - const worker = hatchet.run('workflow1'); - expect(worker).toBeDefined(); - }); - }); - - describe('worker', () => { - it('should start a worker', () => { - const worker = hatchet.worker('workflow1'); - - expect(worker).toBeDefined(); - }); - }); }); }); diff --git a/sdks/typescript/src/clients/hatchet-client/hatchet-client.ts b/sdks/typescript/src/clients/hatchet-client/hatchet-client.ts index c2d88003e4..b3943d5767 100644 --- a/sdks/typescript/src/clients/hatchet-client/hatchet-client.ts +++ b/sdks/typescript/src/clients/hatchet-client/hatchet-client.ts @@ -4,8 +4,6 @@ import { EventClient } from '@clients/event/event-client'; import { DispatcherClient } from '@clients/dispatcher/dispatcher-client'; import { AdminClient } from '@clients/admin/admin-client'; import { ChannelCredentials, createClientFactory } from 'nice-grpc'; -import { Workflow as V0Workflow } from '@hatchet/workflow'; -import { V0Worker, WorkerOpts } from '@clients/worker'; import { AxiosRequestConfig } from 'axios'; import { Logger } from '@util/logger'; import { DEFAULT_LOGGER } from '@clients/hatchet-client/hatchet-logger'; @@ -129,52 +127,4 @@ export class LegacyHatchetClient { ): LegacyHatchetClient { return new LegacyHatchetClient(config, options, axiosConfig); } - - // @deprecated - async run(workflow: string | V0Workflow): Promise { - this.logger.warn( - 'HatchetClient.run is deprecated and will be removed in a future release. Use HatchetClient.worker and Worker.start instead.' - ); - const worker = await this.worker(workflow); - worker.start(); - return worker; - } - - async worker( - workflow: string | V0Workflow, - opts?: Omit | number - ): Promise { - const name = typeof workflow === 'string' ? workflow : workflow.id; - - let options: WorkerOpts = { - name, - }; - - if (typeof opts === 'number') { - this.logger.warn( - '@deprecated maxRuns param is deprecated and will be removed in a future release in favor of WorkerOpts' - ); - options = { ...options, maxRuns: opts }; - } else { - options = { ...options, ...opts }; - } - - const worker = new V0Worker(this, options); - - if (typeof workflow !== 'string') { - await worker.registerWorkflow(workflow); - return worker; - } - - return worker; - } - - webhooks(workflows: Array) { - // TODO v1 workflows - const worker = new V0Worker(this, { - name: 'webhook-worker', - }); - - return worker.getHandler(workflows); - } } diff --git a/sdks/typescript/src/clients/hatchet-client/hatchet-logger.ts b/sdks/typescript/src/clients/hatchet-client/hatchet-logger.ts index 24198600a8..31481abca5 100644 --- a/sdks/typescript/src/clients/hatchet-client/hatchet-logger.ts +++ b/sdks/typescript/src/clients/hatchet-client/hatchet-logger.ts @@ -65,13 +65,14 @@ export class HatchetLogger implements Logger { } async warn(message: string, error?: Error): Promise { - await this.log('WARN', `${message} ${error}`, '93'); + await this.log('WARN', error ? `${message} ${error}` : message, '93'); } async error(message: string, error?: Error): Promise { - await this.log('ERROR', `${message} ${error}`, '91'); + await this.log('ERROR', error ? `${message} ${error}` : message, '91'); } + // eslint-disable-next-line @typescript-eslint/no-unused-vars util(key: UtilKeys, message: string, extra?: LogExtra): void | Promise { if (key === 'trace') { this.log('INFO', `trace: ${message}`, '35'); diff --git a/sdks/typescript/src/clients/listeners/durable-listener/durable-listener-client.ts b/sdks/typescript/src/clients/listeners/durable-listener/durable-listener-client.ts index 93f019443e..85a3b90d66 100644 --- a/sdks/typescript/src/clients/listeners/durable-listener/durable-listener-client.ts +++ b/sdks/typescript/src/clients/listeners/durable-listener/durable-listener-client.ts @@ -33,6 +33,16 @@ export class DurableListenerClient { return this.pooledListener.subscribe(request); } + result(request: { taskId: string; signalKey: string }, opts?: { signal?: AbortSignal }) { + if (!this.pooledListener) { + this.pooledListener = new DurableEventGrpcPooledListener(this, () => { + this.pooledListener = undefined; + }); + } + + return this.pooledListener.result(request, opts); + } + registerDurableEvent(request: { taskId: string; signalKey: string; diff --git a/sdks/typescript/src/clients/listeners/durable-listener/pooled-durable-listener-client.test.ts b/sdks/typescript/src/clients/listeners/durable-listener/pooled-durable-listener-client.test.ts new file mode 100644 index 0000000000..06e6792c64 --- /dev/null +++ b/sdks/typescript/src/clients/listeners/durable-listener/pooled-durable-listener-client.test.ts @@ -0,0 +1,30 @@ +import { DurableEventStreamable } from './pooled-durable-listener-client'; + +const dummyListener: AsyncIterable = (async function* gen() { + // never yields +})(); + +describe('DurableEventStreamable.get cancellation', () => { + it('rejects with AbortError and runs cleanup when aborted', async () => { + const cleanup = jest.fn(); + const s = new DurableEventStreamable(dummyListener, 'task', 'key', 'sub-1', cleanup); + const ac = new AbortController(); + + const p = s.get({ signal: ac.signal }); + ac.abort(); + + await expect(p).rejects.toMatchObject({ name: 'AbortError' }); + expect(cleanup).toHaveBeenCalledTimes(1); + }); + + it('resolves on response and runs cleanup once', async () => { + const cleanup = jest.fn(); + const s = new DurableEventStreamable(dummyListener, 'task', 'key', 'sub-1', cleanup); + + const event: any = { taskId: 'task', signalKey: 'key', data: '{}' }; + setTimeout(() => s.responseEmitter.emit('response', event), 0); + + await expect(s.get()).resolves.toEqual(event); + expect(cleanup).toHaveBeenCalledTimes(1); + }); +}); diff --git a/sdks/typescript/src/clients/listeners/durable-listener/pooled-durable-listener-client.ts b/sdks/typescript/src/clients/listeners/durable-listener/pooled-durable-listener-client.ts index b29c60acfa..4ee5d34f0a 100644 --- a/sdks/typescript/src/clients/listeners/durable-listener/pooled-durable-listener-client.ts +++ b/sdks/typescript/src/clients/listeners/durable-listener/pooled-durable-listener-client.ts @@ -1,5 +1,5 @@ // eslint-disable-next-line max-classes-per-file -import { EventEmitter, on } from 'events'; +import { EventEmitter, getMaxListeners, on, setMaxListeners } from 'events'; import { DurableEvent, ListenForDurableEventRequest, @@ -8,6 +8,7 @@ import { } from '@hatchet/protoc/v1/dispatcher'; import { isAbortError } from 'abort-controller-x'; import sleep from '@hatchet/util/sleep'; +import { createAbortError } from '@hatchet/util/abort-error'; import { DurableEventListenerConditions, SleepMatchCondition, @@ -19,18 +20,73 @@ export class DurableEventStreamable { listener: AsyncIterable; taskId: string; signalKey: string; + subscriptionId: string; + onCleanup: () => void; responseEmitter = new EventEmitter(); - constructor(listener: AsyncIterable, taskId: string, signalKey: string) { + constructor( + listener: AsyncIterable, + taskId: string, + signalKey: string, + subscriptionId: string, + onCleanup: () => void + ) { this.listener = listener; this.taskId = taskId; this.signalKey = signalKey; + this.subscriptionId = subscriptionId; + this.onCleanup = onCleanup; } - async get(): Promise { - return new Promise((resolve) => { - this.responseEmitter.once('response', resolve); + async get(opts?: { signal?: AbortSignal }): Promise { + const signal = opts?.signal; + + return new Promise((resolve, reject) => { + let cleanedUp = false; + + const cleanup = () => { + if (cleanedUp) return; + cleanedUp = true; + this.responseEmitter.removeListener('response', onResponse); + if (signal) { + signal.removeEventListener('abort', onAbort); + } + this.onCleanup(); + }; + + const onResponse = (event: DurableEvent) => { + cleanup(); + resolve(event); + }; + + const onAbort = () => { + cleanup(); + reject(createAbortError('Operation cancelled by AbortSignal')); + }; + + if (signal?.aborted) { + onAbort(); + return; + } + + this.responseEmitter.once('response', onResponse); + if (signal) { + /** + * Node defaults AbortSignal max listeners to 10, which is easy to exceed with + * legitimate high-concurrency waits (e.g. multiple concurrent `ctx.waitFor(...)` + * calls in the same task). + * + * If the signal is still at the default cap, bump it to a reasonable level + * to avoid noisy `MaxListenersExceededWarning` while still keeping protection + * against true leaks in unusual cases. + */ + const max = getMaxListeners(signal); + if (max !== 0 && max < 50) { + setMaxListeners(50, signal); + } + signal.addEventListener('abort', onAbort, { once: true }); + } }); } } @@ -106,15 +162,7 @@ export class DurableEventGrpcPooledListener { const emitter = this.subscribers[subId]; if (emitter) { emitter.responseEmitter.emit('response', event); - delete this.subscribers[subId]; - - // Remove this subscription from the mapping - this.taskSignalKeyToSubscriptionIds[subscriptionKey] = - this.taskSignalKeyToSubscriptionIds[subscriptionKey].filter((id) => id !== subId); - - if (this.taskSignalKeyToSubscriptionIds[subscriptionKey].length === 0) { - delete this.taskSignalKeyToSubscriptionIds[subscriptionKey]; - } + this.cleanupSubscription(subId); } } } @@ -138,6 +186,28 @@ export class DurableEventGrpcPooledListener { } } + private cleanupSubscription(subscriptionId: string) { + const emitter = this.subscribers[subscriptionId]; + if (!emitter) { + return; + } + + const subscriptionKey = keyHelper(emitter.taskId, emitter.signalKey); + + delete this.subscribers[subscriptionId]; + + // Remove from the mapping + if (this.taskSignalKeyToSubscriptionIds[subscriptionKey]) { + this.taskSignalKeyToSubscriptionIds[subscriptionKey] = this.taskSignalKeyToSubscriptionIds[ + subscriptionKey + ].filter((id) => id !== subscriptionId); + + if (this.taskSignalKeyToSubscriptionIds[subscriptionKey].length === 0) { + delete this.taskSignalKeyToSubscriptionIds[subscriptionKey]; + } + } + } + subscribe(request: { taskId: string; signalKey: string }): DurableEventStreamable { const { taskId, signalKey } = request; @@ -145,7 +215,13 @@ export class DurableEventGrpcPooledListener { // eslint-disable-next-line no-plusplus const subscriptionId = (this.subscriptionCounter++).toString(); - const subscriber = new DurableEventStreamable(this.listener, taskId, signalKey); + const subscriber = new DurableEventStreamable( + this.listener, + taskId, + signalKey, + subscriptionId, + () => this.cleanupSubscription(subscriptionId) + ); this.subscribers[subscriptionId] = subscriber; @@ -159,9 +235,12 @@ export class DurableEventGrpcPooledListener { return subscriber; } - async result(request: { taskId: string; signalKey: string }): Promise { + async result( + request: { taskId: string; signalKey: string }, + opts?: { signal?: AbortSignal } + ): Promise { const subscriber = this.subscribe(request); - const event = await subscriber.get(); + const event = await subscriber.get({ signal: opts?.signal }); return event; } @@ -189,7 +268,7 @@ export class DurableEventGrpcPooledListener { const subscriptionEntries = Object.entries(this.taskSignalKeyToSubscriptionIds); this.client.logger.debug(`Replaying ${subscriptionEntries.length} requests...`); - for (const [key, _] of subscriptionEntries) { + for (const [key] of subscriptionEntries) { const [taskId, signalKey] = key.split('|'); this.requestEmitter.emit('subscribe', { taskId, signalKey }); } diff --git a/sdks/typescript/src/clients/listeners/run-listener/child-listener-client.ts b/sdks/typescript/src/clients/listeners/run-listener/child-listener-client.ts index e5d020d4c1..a104126e87 100644 --- a/sdks/typescript/src/clients/listeners/run-listener/child-listener-client.ts +++ b/sdks/typescript/src/clients/listeners/run-listener/child-listener-client.ts @@ -174,6 +174,7 @@ export class RunEventListener { this.eventEmitter.emit('event'); }); + // eslint-disable-next-line @typescript-eslint/no-unused-vars for await (const _ of on(this.eventEmitter, 'event')) { while (this.q.length > 0) { const r = this.q.shift(); diff --git a/sdks/typescript/src/clients/listeners/run-listener/pooled-child-listener-client.test.ts b/sdks/typescript/src/clients/listeners/run-listener/pooled-child-listener-client.test.ts new file mode 100644 index 0000000000..f5106cddeb --- /dev/null +++ b/sdks/typescript/src/clients/listeners/run-listener/pooled-child-listener-client.test.ts @@ -0,0 +1,16 @@ +import { Streamable } from './pooled-child-listener-client'; + +describe('RunGrpcPooledListener Streamable', () => { + it('rejects with AbortError and runs cleanup when aborted', async () => { + const onCleanup = jest.fn(); + // eslint-disable-next-line func-names, no-empty-function + const streamable = new Streamable((async function* () {})(), 'run-1', onCleanup); + + const ac = new AbortController(); + const p = streamable.get({ signal: ac.signal }); + ac.abort(); + + await expect(p).rejects.toMatchObject({ name: 'AbortError' }); + expect(onCleanup).toHaveBeenCalled(); + }); +}); diff --git a/sdks/typescript/src/clients/listeners/run-listener/pooled-child-listener-client.ts b/sdks/typescript/src/clients/listeners/run-listener/pooled-child-listener-client.ts index 8d9f348e66..a6fcae544e 100644 --- a/sdks/typescript/src/clients/listeners/run-listener/pooled-child-listener-client.ts +++ b/sdks/typescript/src/clients/listeners/run-listener/pooled-child-listener-client.ts @@ -1,5 +1,5 @@ // eslint-disable-next-line max-classes-per-file -import { EventEmitter, on } from 'events'; +import { EventEmitter, getMaxListeners, on, setMaxListeners } from 'events'; import { WorkflowRunEvent, SubscribeToWorkflowRunsRequest, @@ -7,25 +7,85 @@ import { } from '@hatchet/protoc/dispatcher'; import { isAbortError } from 'abort-controller-x'; import sleep from '@hatchet/util/sleep'; +import { createAbortError } from '@hatchet/util/abort-error'; import { RunListenerClient } from './child-listener-client'; export class Streamable { listener: AsyncIterable; id: string; + onCleanup: () => void; + private cleanedUp = false; responseEmitter = new EventEmitter(); - constructor(listener: AsyncIterable, id: string) { + constructor(listener: AsyncIterable, id: string, onCleanup: () => void) { this.listener = listener; this.id = id; + this.onCleanup = onCleanup; } - async *stream(): AsyncGenerator { + private cleanupOnce() { + if (this.cleanedUp) return; + this.cleanedUp = true; + this.onCleanup(); + } + + async get(opts?: { signal?: AbortSignal }): Promise { + const signal = opts?.signal; + + return new Promise((resolve, reject) => { + const cleanupListeners = () => { + this.responseEmitter.removeListener('response', onResponse); + if (signal) { + signal.removeEventListener('abort', onAbort); + } + }; + + const onResponse = (event: WorkflowRunEvent) => { + cleanupListeners(); + resolve(event); + }; + + const onAbort = () => { + cleanupListeners(); + this.cleanupOnce(); + reject(createAbortError('Operation cancelled by AbortSignal')); + }; + + if (signal?.aborted) { + onAbort(); + return; + } + + this.responseEmitter.once('response', onResponse); + if (signal) { + /** + * Node defaults AbortSignal max listeners to 10, which is easy to exceed with + * legitimate high-concurrency waits (e.g. a cancelled parent task fanning out + * to many child `.result()` waits). + * + * If the signal is still at the default cap, bump it to a reasonable level + * to avoid noisy `MaxListenersExceededWarning` while still keeping protection + * against true leaks in unusual cases. + */ + const max = getMaxListeners(signal); + if (max !== 0 && max < 50) { + setMaxListeners(50, signal); + } + signal.addEventListener('abort', onAbort, { once: true }); + } + }); + } + + async *stream(opts?: { signal?: AbortSignal }): AsyncGenerator { while (true) { - const req: WorkflowRunEvent = await new Promise((resolve) => { - this.responseEmitter.once('response', resolve); - }); - yield req; + const event = await this.get(opts); + yield event; + + if (event.eventType === WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_FINISHED) { + this.cleanupOnce(); + break; + } } } } @@ -99,7 +159,13 @@ export class RunGrpcPooledListener { subscribe(request: SubscribeToWorkflowRunsRequest) { if (!this.listener) throw new Error('listener not initialized'); - this.subscribers[request.workflowRunId] = new Streamable(this.listener, request.workflowRunId); + this.subscribers[request.workflowRunId] = new Streamable( + this.listener, + request.workflowRunId, + () => { + delete this.subscribers[request.workflowRunId]; + } + ); this.requestEmitter.emit('subscribe', request); return this.subscribers[request.workflowRunId]; } diff --git a/sdks/typescript/src/clients/worker/handler.ts b/sdks/typescript/src/clients/worker/handler.ts deleted file mode 100644 index 820748ac5f..0000000000 --- a/sdks/typescript/src/clients/worker/handler.ts +++ /dev/null @@ -1,244 +0,0 @@ -import HatchetError from '@util/errors/hatchet-error'; -import { createHmac } from 'crypto'; -import { IncomingMessage, ServerResponse } from 'http'; -import { Workflow } from '@hatchet/workflow'; -import { V0Worker } from './worker'; - -export interface HandlerOpts { - secret: string; -} - -const okMessage = 'The Hatchet webhooks endpoint is up and running!'; - -export class WebhookHandler { - // eslint-disable-next-line no-useless-constructor - constructor( - private worker: V0Worker, - private workflows: Workflow[] - // eslint-disable-next-line no-empty-function - ) {} - - /** - * Handles a request with a provided body, secret, and signature. - * - * @param {string | undefined} body - The body of the request. - * @param {string | undefined} secret - The secret used for signature verification. - * @param {string | string[] | undefined | null} signature - The signature of the request. - * - * @throws {HatchetError} - If no signature is provided or the signature is not a string. - * @throws {HatchetError} - If no secret is provided. - * @throws {HatchetError} - If no body is provided. - */ - async handle( - body: string | undefined, - signature: string | string[] | undefined | null, - secret: string | undefined - ) { - this.checkSignature(body, signature, secret); - - const action = JSON.parse(body!); - - await this.worker.handleAction(action); - } - - private checkSignature( - body: string | undefined, - signature: string | string[] | undefined | null, - secret: string | undefined - ) { - if (!signature || typeof signature !== 'string') { - throw new HatchetError('No signature provided'); - } - if (!secret) { - throw new HatchetError('No secret provided'); - } - if (!body) { - throw new HatchetError('No body provided'); - } - - // verify hmac signature - const actualSignature = createHmac('sha256', secret).update(body).digest('hex'); - if (actualSignature !== signature) { - throw new HatchetError(`Invalid signature, expected ${actualSignature}, got ${signature}`); - } - } - - private async getHealthcheckResponse( - body: string | undefined, - signature: string | string[] | undefined | null, - secret: string | undefined - ) { - this.checkSignature(body, signature, secret); - - for (const workflow of this.workflows) { - await this.worker.registerWorkflow(workflow); - } - - return { - actions: Object.keys(this.worker.action_registry), - }; - } - - /** - * Express Handler - * - * This method is an asynchronous function that returns an Express middleware handler. - * The handler function is responsible for handling incoming requests and invoking the - * corresponding logic based on the provided secret. - */ - expressHandler({ secret }: HandlerOpts) { - return (req: any, res: any) => { - if (req.method === 'GET') { - res.status(200).send(okMessage); - return; - } - - if (req.method === 'PUT') { - let { body } = req; - - if (typeof body !== 'string') { - body = JSON.stringify(body); - } - - this.getHealthcheckResponse(body, req.headers['x-hatchet-signature'], secret) - .then((resp) => { - res.status(200).json(resp); - }) - .catch((err) => { - res.status(500); - this.worker.logger.error(`Error handling request: ${err.message}`); - }); - return; - } - - if (req.method !== 'POST') { - res.status(405).json({ error: 'Method not allowed' }); - return; - } - - let action = req.body; - - if (typeof action !== 'string') { - action = JSON.stringify(action); - } - - this.handle(action, req.headers['x-hatchet-signature'], secret) - .then(() => { - res.status(200); - }) - .catch((err) => { - res.status(500); - this.worker.logger.error(`Error handling request: ${err.message}`); - }); - }; - } - - /** - * A method that returns an HTTP request handler. - */ - httpHandler({ secret }: HandlerOpts) { - return (req: IncomingMessage, res: ServerResponse) => { - const handle = async () => { - if (req.method === 'GET') { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.write(okMessage); - res.end(); - return; - } - - const body = await this.getBody(req); - - if (req.method === 'PUT') { - const resp = await this.getHealthcheckResponse( - body, - req.headers['x-hatchet-signature'], - secret - ); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.write(JSON.stringify(resp)); - res.end(); - return; - } - - if (req.method !== 'POST') { - res.writeHead(405, { 'Content-Type': 'application/json' }); - res.write(JSON.stringify({ error: 'Method not allowed' })); - res.end(); - return; - } - - await this.handle(body, req.headers['x-hatchet-signature'], secret); - - res.writeHead(200, 'OK'); - res.end(); - }; - - handle().catch((e) => { - this.worker.logger.error(`Error handling request: ${e.message}`); - res.writeHead(500, 'Internal server error'); - res.end(); - }); - }; - } - - /** - * A method that returns a Next.js pages router request handler. - */ - nextJSPagesHandler({ secret }: HandlerOpts) { - return async (req: any, res: any) => { - if (req.method === 'GET') { - return res.status(200).send(okMessage); - } - const sig = req.headers['x-hatchet-signature']; - const body = JSON.stringify(req.body); - if (req.method === 'PUT') { - const resp = await this.getHealthcheckResponse(body, sig, secret); - return res.status(200).send(JSON.stringify(resp)); - } - if (req.method !== 'POST') { - return res.status(405).send('Method not allowed'); - } - await this.handle(body, sig, secret); - return res.status(200).send('ok'); - }; - } - - /** - * A method that returns a Next.js request handler. - */ - nextJSHandler({ secret }: HandlerOpts) { - const ok = async () => { - return new Response(okMessage, { status: 200 }); - }; - const f = async (req: Request) => { - const sig = req.headers.get('x-hatchet-signature'); - const body = await req.text(); - if (req.method === 'PUT') { - const resp = await this.getHealthcheckResponse(body, sig, secret); - return new Response(JSON.stringify(resp), { status: 200 }); - } - if (req.method !== 'POST') { - return new Response('Method not allowed', { status: 405 }); - } - await this.handle(body, sig, secret); - return new Response('ok', { status: 200 }); - }; - return { - GET: ok, - POST: f, - PUT: f, - }; - } - - private getBody(req: IncomingMessage): Promise { - return new Promise((resolve) => { - let body = ''; - req.on('data', (chunk) => { - body += chunk; - }); - req.on('end', () => { - resolve(body); - }); - }); - } -} diff --git a/sdks/typescript/src/clients/worker/index.ts b/sdks/typescript/src/clients/worker/index.ts deleted file mode 100644 index ea3d5b0ad0..0000000000 --- a/sdks/typescript/src/clients/worker/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './worker'; diff --git a/sdks/typescript/src/clients/worker/worker.test.ts b/sdks/typescript/src/clients/worker/worker.test.ts deleted file mode 100644 index 5686389588..0000000000 --- a/sdks/typescript/src/clients/worker/worker.test.ts +++ /dev/null @@ -1,335 +0,0 @@ -import { LegacyHatchetClient } from '@clients/hatchet-client'; -import { StepActionEventType, ActionType, AssignedAction } from '@hatchet/protoc/dispatcher'; -import { ActionListener } from '@clients/dispatcher/action-listener'; -import { never } from 'zod'; -import sleep from '@util/sleep'; -import { ChannelCredentials } from 'nice-grpc'; -import { V0Worker } from './worker'; - -type AssignActionMock = AssignedAction | Error; - -const mockStart: AssignActionMock = { - tenantId: 'TENANT_ID', - jobId: 'job1', - jobName: 'Job One', - jobRunId: 'run1', - stepId: 'step1', - stepRunId: 'runStep1', - actionId: 'action1', - actionType: ActionType.START_STEP_RUN, - actionPayload: JSON.stringify('{"input": {"data": 1}}'), - workflowRunId: 'workflowRun1', - getGroupKeyRunId: 'groupKeyRun1', - stepName: 'step1', - retryCount: 0, - priority: 1, -}; - -const mockCancel: AssignActionMock = { - ...mockStart, - actionType: ActionType.CANCEL_STEP_RUN, -}; - -describe('Worker', () => { - let hatchet: LegacyHatchetClient; - - beforeEach(() => { - hatchet = new LegacyHatchetClient( - { - token: - 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef', - log_level: 'OFF', - host_port: 'HOST_PORT', - tls_config: { - cert_file: 'TLS_CERT_FILE', - key_file: 'TLS_KEY_FILE', - ca_file: 'TLS_ROOT_CA_FILE', - server_name: 'TLS_SERVER_NAME', - }, - }, - { - credentials: ChannelCredentials.createInsecure(), - } - ); - }); - - describe('registerWorkflow', () => { - it('should update the registry', async () => { - const worker = new V0Worker(hatchet, { name: 'WORKER_NAME' }); - const putWorkflowSpy = jest.spyOn(worker.client.admin, 'putWorkflow').mockResolvedValue({ - id: 'workflow1', - version: 'v0.1.0', - order: 1, - workflowId: 'workflow1', - scheduledWorkflows: [], - createdAt: undefined, - updatedAt: undefined, - }); - - const workflow = { - id: 'workflow1', - description: 'test', - on: { - event: 'user:create', - }, - steps: [ - { - name: 'step1', - run: (ctx: any) => { - return { test: 'test' }; - }, - }, - ], - }; - - await worker.registerWorkflow(workflow); - - expect(putWorkflowSpy).toHaveBeenCalledTimes(1); - - expect(worker.action_registry).toEqual({ - [`workflow1:step1`]: workflow.steps[0].run, - }); - }); - }); - - describe('handle_start_step_run', () => { - it('should start a step run', async () => { - const worker = new V0Worker(hatchet, { name: 'WORKER_NAME' }); - - const getActionEventSpy = jest.spyOn(worker, 'getStepActionEvent'); - - const sendActionEventSpy = jest - .spyOn(worker.client.dispatcher, 'sendStepActionEvent') - .mockResolvedValue({ - tenantId: 'TENANT_ID', - workerId: 'WORKER_ID', - }); - - const startSpy = jest.fn().mockReturnValue({ data: 4 }); - - worker.action_registry = { - [mockStart.actionId]: startSpy, - }; - - worker.handleStartStepRun(mockStart); - await sleep(100); - - expect(startSpy).toHaveBeenCalledTimes(1); - - expect(getActionEventSpy).toHaveBeenNthCalledWith( - 2, - expect.anything(), - StepActionEventType.STEP_EVENT_TYPE_COMPLETED, - false, - { data: 4 }, - 0 - ); - expect(worker.futures[mockStart.stepRunId]).toBeUndefined(); - expect(sendActionEventSpy).toHaveBeenCalledTimes(2); - }); - - it('should fail gracefully', async () => { - const worker = new V0Worker(hatchet, { name: 'WORKER_NAME' }); - - const getActionEventSpy = jest.spyOn(worker, 'getStepActionEvent'); - - const sendActionEventSpy = jest - .spyOn(worker.client.dispatcher, 'sendStepActionEvent') - .mockResolvedValue({ - tenantId: 'TENANT_ID', - workerId: 'WORKER_ID', - }); - - const startSpy = jest.fn().mockRejectedValue(new Error('ERROR')); - - worker.action_registry = { - [mockStart.actionId]: startSpy, - }; - - worker.handleStartStepRun(mockStart); - await sleep(100); - - expect(startSpy).toHaveBeenCalledTimes(1); - expect(getActionEventSpy).toHaveBeenNthCalledWith( - 2, - expect.anything(), - StepActionEventType.STEP_EVENT_TYPE_FAILED, - false, - expect.anything(), - 0 - ); - expect(worker.futures[mockStart.stepRunId]).toBeUndefined(); - expect(sendActionEventSpy).toHaveBeenCalledTimes(2); - }); - }); - - describe('handle_cancel_step_run', () => {}); - - describe('exit_gracefully', () => { - xit('should call exit_gracefully on SIGTERM', async () => { - const worker = new V0Worker(hatchet, { name: 'WORKER_NAME' }); - - // the spy is not working and the test is killing the test process - const exitSpy = jest.spyOn(worker, 'exitGracefully').mockImplementationOnce(() => { - throw new Error('Simulated error'); - }); - - process.emit('SIGTERM', 'SIGTERM'); - expect(exitSpy).toHaveBeenCalledTimes(1); - }); - - xit('should call exit_gracefully on SIGINT', async () => { - const worker = new V0Worker(hatchet, { name: 'WORKER_NAME' }); - - // This is killing the process (as it should) fix the spy at some point - const exitSpy = jest.spyOn(worker, 'exitGracefully').mockResolvedValue(); - - process.emit('SIGINT', 'SIGINT'); - expect(exitSpy).toHaveBeenCalledTimes(1); - }); - - xit('should unregister the listener and exit', async () => { - const worker = new V0Worker(hatchet, { name: 'WORKER_NAME' }); - - jest.spyOn(process, 'exit').mockImplementation((number) => { - throw new Error(`EXIT ${number}`); - }); // This is killing the process (as it should) fix the spy at some point - - const mockActionListener = new ActionListener(hatchet.dispatcher, 'WORKER_ID'); - - mockActionListener.unregister = jest.fn().mockResolvedValue(never()); - worker.listener = mockActionListener; - - expect(async () => { - await worker.exitGracefully(true); - }).toThrow('EXIT 0'); - expect(mockActionListener.unregister).toHaveBeenCalledTimes(1); - }); - - it('should exit the process if handle_kill is true', async () => { - const worker = new V0Worker(hatchet, { name: 'WORKER_NAME' }); - const exitSpy = jest.spyOn(process, 'exit').mockReturnValue(undefined as never); - await worker.exitGracefully(true); - expect(exitSpy).toHaveBeenCalledTimes(1); - }); - }); - - describe('start', () => { - xit('should get actions and start runs', async () => { - const worker = new V0Worker(hatchet, { name: 'WORKER_NAME' }); - - const startSpy = jest.spyOn(worker, 'handleStartStepRun').mockResolvedValue(); - const cancelSpy = jest.spyOn(worker, 'handleCancelStepRun').mockResolvedValue(); - - const mockActionListener = new ActionListener(hatchet.dispatcher, 'WORKER_ID'); - - const getActionListenerSpy = jest - .spyOn(worker.client.dispatcher, 'getActionListener') - .mockResolvedValue(mockActionListener); - - await worker.start(); - - expect(getActionListenerSpy).toHaveBeenCalledTimes(1); - expect(startSpy).toHaveBeenCalledTimes(2); - expect(cancelSpy).toHaveBeenCalledTimes(0); - }); - - // it('should get actions and cancel runs', async () => { - // const worker = new Worker(hatchet, { name: 'WORKER_NAME' }); - - // const startSpy = jest.spyOn(worker, 'handleStartStepRun').mockReturnValue(); - // const cancelSpy = jest.spyOn(worker, 'handleCancelStepRun').mockReturnValue(); - - // const mockActionListener = new ActionListener( - // hatchet.dispatcher, - // mockListener([mockStart, mockCancel, new ServerError(Status.CANCELLED, 'CANCELLED')]), - // 'WORKER_ID' - // ); - - // const getActionListenerSpy = jest - // .spyOn(worker.client.dispatcher, 'getActionListener') - // .mockResolvedValue(mockActionListener); - - // await worker.start(); - - // expect(getActionListenerSpy).toHaveBeenCalledTimes(1); - // expect(startSpy).toHaveBeenCalledTimes(1); - // expect(cancelSpy).toHaveBeenCalledTimes(1); - // }); - - // it('should retry 5 times to start a worker then throw an error', async () => { - // const worker = new Worker(hatchet, { name: 'WORKER_NAME' }); - - // const startSpy = jest.spyOn(worker, 'handleStartStepRun').mockReturnValue(); - // const cancelSpy = jest.spyOn(worker, 'handleCancelStepRun').mockReturnValue(); - - // const mockActionListner = new ActionListener( - // hatchet.dispatcher, - // mockListener([mockStart, mockCancel, new ServerError(Status.CANCELLED, 'CANCELLED')]), - // 'WORKER_ID' - // ); - - // const getActionListenerSpy = jest - // .spyOn(worker.client.dispatcher, 'getActionListener') - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }); - - // await worker.start(); - - // expect(getActionListenerSpy).toHaveBeenCalledTimes(5); - // expect(startSpy).toHaveBeenCalledTimes(0); - // expect(cancelSpy).toHaveBeenCalledTimes(0); - // }); - - // it('should successfully run after retrying < 5 times', async () => { - // const worker = new Worker(hatchet, { name: 'WORKER_NAME' }); - - // const startSpy = jest.spyOn(worker, 'handleStartStepRun').mockReturnValue(); - // const cancelSpy = jest.spyOn(worker, 'handleCancelStepRun').mockReturnValue(); - - // const mockActionLister = new ActionListener( - // hatchet.dispatcher, - // mockListener([mockStart, mockCancel, new ServerError(Status.CANCELLED, 'CANCELLED')]), - // 'WORKER_ID' - // ); - - // const getActionListenerSpy = jest - // .spyOn(worker.client.dispatcher, 'getActionListener') - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(() => { - // throw new Error('Simulated error'); - // }) - // .mockImplementationOnce(async () => mockActionLister); - - // await worker.start(); - - // expect(getActionListenerSpy).toHaveBeenCalledTimes(5); - // expect(startSpy).toHaveBeenCalledTimes(1); - // expect(cancelSpy).toHaveBeenCalledTimes(1); - // }); - }); -}); diff --git a/sdks/typescript/src/clients/worker/worker.ts b/sdks/typescript/src/clients/worker/worker.ts deleted file mode 100644 index 688d5c0846..0000000000 --- a/sdks/typescript/src/clients/worker/worker.ts +++ /dev/null @@ -1,701 +0,0 @@ -/* eslint-disable no-underscore-dangle */ -/* eslint-disable no-nested-ternary */ -import { LegacyHatchetClient } from '@clients/hatchet-client'; -import HatchetError from '@util/errors/hatchet-error'; -import { - Action, - ActionKey, - ActionListener, - createActionKey, -} from '@clients/dispatcher/action-listener'; -import { - StepActionEvent, - StepActionEventType, - ActionType, - GroupKeyActionEvent, - GroupKeyActionEventType, - actionTypeFromJSON, -} from '@hatchet/protoc/dispatcher'; -import HatchetPromise from '@util/hatchet-promise/hatchet-promise'; -import { Workflow } from '@hatchet/workflow'; -import { - ConcurrencyLimitStrategy, - CreateWorkflowJobOpts, - CreateWorkflowStepOpts, - DesiredWorkerLabels, - WorkflowConcurrencyOpts, -} from '@hatchet/protoc/workflows'; -import { Logger } from '@hatchet/util/logger'; -import { WebhookHandler } from '@clients/worker/handler'; -import { WebhookWorkerCreateRequest } from '@clients/rest/generated/data-contracts'; -import { WorkflowDefinition } from '@hatchet/v1'; -import { CreateWorkflowTaskOpts, NonRetryableError } from '@hatchet/v1/task'; -import { applyNamespace } from '@hatchet/util/apply-namespace'; -import { V0Context, CreateStep, V0DurableContext, mapRateLimit, StepRunFunction } from '../../step'; -import { WorkerLabels } from '../dispatcher/dispatcher-client'; - -export type ActionRegistry = Record; - -export interface WorkerOpts { - name: string; - handleKill?: boolean; - maxRuns?: number; - labels?: WorkerLabels; -} - -export class V0Worker { - client: LegacyHatchetClient; - name: string; - workerId: string | undefined; - killing: boolean; - handle_kill: boolean; - - action_registry: ActionRegistry; - workflow_registry: Array = []; - listener: ActionListener | undefined; - futures: Record> = {}; - contexts: Record> = {}; - maxRuns?: number; - - logger: Logger; - - registeredWorkflowPromises: Array> = []; - - labels: WorkerLabels = {}; - - constructor( - client: LegacyHatchetClient, - options: { - name: string; - handleKill?: boolean; - maxRuns?: number; - labels?: WorkerLabels; - } - ) { - this.client = client; - this.name = applyNamespace(options.name, this.client.config.namespace); - this.action_registry = {}; - this.maxRuns = options.maxRuns; - - this.labels = options.labels || {}; - - process.on('SIGTERM', () => this.exitGracefully(true)); - process.on('SIGINT', () => this.exitGracefully(true)); - - this.killing = false; - this.handle_kill = options.handleKill === undefined ? true : options.handleKill; - - this.logger = client.config.logger(`Worker/${this.name}`, this.client.config.log_level); - } - - private registerActions(workflow: Workflow) { - const newActions = workflow.steps.reduce((acc, step) => { - acc[`${workflow.id}:${step.name.toLowerCase()}`] = step.run; - return acc; - }, {}); - - const onFailureAction = workflow.onFailure - ? { - [`${workflow.id}-on-failure:${workflow.onFailure.name}`]: workflow.onFailure.run, - } - : {}; - - this.action_registry = { - ...this.action_registry, - ...newActions, - ...onFailureAction, - }; - - this.action_registry = - workflow.concurrency?.name && workflow.concurrency.key - ? { - ...this.action_registry, - [`${workflow.id}:${workflow.concurrency.name.toLowerCase()}`]: workflow.concurrency.key, - } - : { - ...this.action_registry, - }; - } - - getHandler(workflows: Workflow[]) { - // TODO v1 - for (const workflow of workflows) { - const wf: Workflow = { - ...workflow, - id: applyNamespace(workflow.id, this.client.config.namespace), - }; - - this.registerActions(wf); - } - - return new WebhookHandler(this, workflows); - } - - async registerWebhook(webhook: WebhookWorkerCreateRequest) { - return this.client.admin.registerWebhook({ ...webhook }); - } - - /** - * @deprecated use registerWorkflow instead - */ - async register_workflow(initWorkflow: Workflow) { - return this.registerWorkflow(initWorkflow); - } - - async registerWorkflow(initWorkflow: Workflow) { - const workflow: Workflow = { - ...initWorkflow, - id: applyNamespace(initWorkflow.id, this.client.config.namespace).toLowerCase(), - }; - try { - if (workflow.concurrency?.key && workflow.concurrency.expression) { - throw new HatchetError( - 'Cannot have both key function and expression in workflow concurrency configuration' - ); - } - - const concurrency: WorkflowConcurrencyOpts | undefined = - workflow.concurrency?.name || workflow.concurrency?.expression - ? { - action: !workflow.concurrency.expression - ? `${workflow.id}:${workflow.concurrency.name}` - : undefined, - maxRuns: workflow.concurrency.maxRuns || 1, - expression: workflow.concurrency.expression, - limitStrategy: - workflow.concurrency.limitStrategy || ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, - } - : undefined; - - const onFailureJob: CreateWorkflowJobOpts | undefined = workflow.onFailure - ? { - name: `${workflow.id}-on-failure`, - description: workflow.description, - steps: [ - { - readableId: workflow.onFailure.name, - action: `${workflow.id}-on-failure:${workflow.onFailure.name}`, - timeout: workflow.onFailure.timeout || '60s', - inputs: '{}', - parents: [], - userData: '{}', - retries: workflow.onFailure.retries || 0, - rateLimits: mapRateLimit(workflow.onFailure.rate_limits), - workerLabels: {}, // no worker labels for on failure steps - }, - ], - } - : undefined; - - const registeredWorkflow = this.client.admin.putWorkflow({ - name: workflow.id, - description: workflow.description, - version: workflow.version || '', - eventTriggers: - workflow.on && workflow.on.event - ? [applyNamespace(workflow.on.event, this.client.config.namespace)] - : [], - cronTriggers: workflow.on && workflow.on.cron ? [workflow.on.cron] : [], - scheduledTriggers: [], - concurrency, - scheduleTimeout: workflow.scheduleTimeout, - onFailureJob, - sticky: workflow.sticky, - jobs: [ - { - name: workflow.id, - description: workflow.description, - steps: workflow.steps.map((step) => ({ - readableId: step.name, - action: `${workflow.id}:${step.name}`, - timeout: step.timeout || '60s', - inputs: '{}', - parents: step.parents ?? [], - userData: '{}', - retries: step.retries || 0, - rateLimits: mapRateLimit(step.rate_limits), - workerLabels: toPbWorkerLabel(step.worker_labels), - backoffFactor: step.backoff?.factor, - backoffMaxSeconds: step.backoff?.maxSeconds, - })), - }, - ], - }); - this.registeredWorkflowPromises.push(registeredWorkflow); - await registeredWorkflow; - this.workflow_registry.push(workflow); - } catch (e: any) { - throw new HatchetError(`Could not register workflow: ${e.message}`); - } - - this.registerActions(workflow); - } - - registerAction(actionId: string, action: StepRunFunction) { - this.action_registry[actionId.toLowerCase()] = action; - } - - async handleStartStepRun(action: Action) { - const { actionId } = action; - - try { - // Note: we always use a DurableContext since its a superset of the Context class - const context = new V0DurableContext(action, this.client, this); - this.contexts[createActionKey(action)] = context; - - const step = this.action_registry[actionId]; - - if (!step) { - this.logger.error(`Registered actions: '${Object.keys(this.action_registry).join(', ')}'`); - this.logger.error(`Could not find step '${actionId}'`); - return; - } - - const run = async () => { - return step(context); - }; - - const success = async (result: any) => { - this.logger.info(`Task run ${action.stepRunId} succeeded`); - - try { - // Send the action event to the dispatcher - const event = this.getStepActionEvent( - action, - StepActionEventType.STEP_EVENT_TYPE_COMPLETED, - false, - result || null, - action.retryCount - ); - await this.client.dispatcher.sendStepActionEvent(event); - } catch (actionEventError: any) { - this.logger.error( - `Could not send completed action event: ${actionEventError.message || actionEventError}` - ); - - // send a failure event - const failureEvent = this.getStepActionEvent( - action, - StepActionEventType.STEP_EVENT_TYPE_FAILED, - false, - actionEventError.message, - action.retryCount - ); - - try { - await this.client.dispatcher.sendStepActionEvent(failureEvent); - } catch (failureEventError: any) { - this.logger.error( - `Could not send failed action event: ${failureEventError.message || failureEventError}` - ); - } - - this.logger.error( - `Could not send action event: ${actionEventError.message || actionEventError}` - ); - } finally { - // delete the run from the futures - delete this.futures[createActionKey(action)]; - delete this.contexts[createActionKey(action)]; - } - }; - - const failure = async (error: any) => { - this.logger.error(`Task run ${action.stepRunId} failed: ${error.message}`); - - if (error.stack) { - this.logger.error(error.stack); - } - - const shouldNotRetry = error instanceof NonRetryableError; - - try { - // Send the action event to the dispatcher - const event = this.getStepActionEvent( - action, - StepActionEventType.STEP_EVENT_TYPE_FAILED, - shouldNotRetry, - { - message: error?.message, - stack: error?.stack, - }, - action.retryCount - ); - await this.client.dispatcher.sendStepActionEvent(event); - } catch (e: any) { - this.logger.error(`Could not send action event: ${e.message}`); - } finally { - // delete the run from the futures - delete this.futures[createActionKey(action)]; - delete this.contexts[createActionKey(action)]; - } - }; - - const future = new HatchetPromise( - (async () => { - let result: any; - try { - result = await run(); - } catch (e: any) { - await failure(e); - return; - } - await success(result); - })() - ); - this.futures[createActionKey(action)] = future; - - // Send the action event to the dispatcher - const event = this.getStepActionEvent( - action, - StepActionEventType.STEP_EVENT_TYPE_STARTED, - false, - undefined, - action.retryCount - ); - this.client.dispatcher.sendStepActionEvent(event).catch((e) => { - this.logger.error(`Could not send action event: ${e.message}`); - }); - - try { - await future.promise; - } catch (e: any) { - const message = e?.message || String(e); - if (message.includes('Cancelled')) { - this.logger.debug(`Task run ${action.stepRunId} was cancelled`); - } else { - this.logger.error( - `Could not wait for task run ${action.stepRunId} to finish. ` + - `See https://docs.hatchet.run/home/cancellation for best practices on handling cancellation: `, - e - ); - } - } - } catch (e: any) { - this.logger.error('Could not send action event (outer): ', e); - } - } - - async handleStartGroupKeyRun(action: Action) { - const { actionId } = action; - - try { - const context = new V0Context(action, this.client, this); - - const key = createActionKey(action); - - if (!key) { - this.logger.error(`No group key run id provided for action ${actionId}`); - return; - } - - this.contexts[key] = context; - - this.logger.debug(`Starting group key run ${key}`); - - const step = this.action_registry[actionId]; - - if (!step) { - this.logger.error(`Could not find step '${actionId}'`); - return; - } - - const run = async () => { - return step(context); - }; - - const success = (result: any) => { - this.logger.info(`Task run ${action.stepRunId} succeeded`); - - try { - // Send the action event to the dispatcher - const event = this.getGroupKeyActionEvent( - action, - GroupKeyActionEventType.GROUP_KEY_EVENT_TYPE_COMPLETED, - result - ); - this.client.dispatcher.sendGroupKeyActionEvent(event).catch((e) => { - this.logger.error(`Could not send action event: ${e.message}`); - }); - } catch (e: any) { - this.logger.error(`Could not send action event: ${e.message}`); - } finally { - // delete the run from the futures - delete this.futures[key]; - delete this.contexts[key]; - } - }; - - const failure = (error: any) => { - this.logger.error(`Task run ${key} failed: ${error.message}`); - - try { - // Send the action event to the dispatcher - const event = this.getGroupKeyActionEvent( - action, - GroupKeyActionEventType.GROUP_KEY_EVENT_TYPE_FAILED, - error - ); - this.client.dispatcher.sendGroupKeyActionEvent(event).catch((e) => { - this.logger.error(`Could not send action event: ${e.message}`); - }); - } catch (e: any) { - this.logger.error(`Could not send action event: ${e.message}`); - } finally { - // delete the run from the futures - delete this.futures[key]; - delete this.contexts[key]; - } - }; - - const future = new HatchetPromise(run().then(success).catch(failure)); - this.futures[key] = future; - - // Send the action event to the dispatcher - const event = this.getGroupKeyActionEvent( - action, - GroupKeyActionEventType.GROUP_KEY_EVENT_TYPE_STARTED - ); - this.client.dispatcher.sendGroupKeyActionEvent(event).catch((e) => { - this.logger.error(`Could not send action event: ${e.message}`); - }); - - await future.promise; - } catch (e: any) { - this.logger.error(`Could not send action event: ${e.message}`); - } - } - - getStepActionEvent( - action: Action, - eventType: StepActionEventType, - shouldNotRetry: boolean, - payload: any = '', - retryCount: number = 0 - ): StepActionEvent { - return { - workerId: this.name, - jobId: action.jobId, - jobRunId: action.jobRunId, - stepId: action.stepId, - stepRunId: action.stepRunId, - actionId: action.actionId, - eventTimestamp: new Date(), - eventType, - eventPayload: JSON.stringify(payload), - shouldNotRetry, - retryCount, - }; - } - - getGroupKeyActionEvent( - action: Action, - eventType: GroupKeyActionEventType, - payload: any = '' - ): GroupKeyActionEvent { - if (!action.getGroupKeyRunId) { - throw new HatchetError('No group key run id provided'); - } - return { - workerId: this.name, - workflowRunId: action.workflowRunId, - getGroupKeyRunId: action.getGroupKeyRunId, - actionId: action.actionId, - eventTimestamp: new Date(), - eventType, - eventPayload: JSON.stringify(payload), - }; - } - - async handleCancelStepRun(action: Action) { - const { stepRunId } = action; - try { - this.logger.info(`Cancelling task run ${action.stepRunId}`); - const future = this.futures[createActionKey(action)]; - const context = this.contexts[createActionKey(action)]; - - if (context && context.controller) { - context.controller.abort('Cancelled by worker'); - } - - if (future) { - future.promise.catch(() => { - this.logger.info(`Cancelled task run ${action.stepRunId}`); - }); - future.cancel('Cancelled by worker'); - await future.promise; - } - } catch (e: any) { - // Expected: the promise rejects when cancelled - this.logger.debug(`Task run ${stepRunId} cancellation completed`); - } finally { - delete this.futures[createActionKey(action)]; - delete this.contexts[createActionKey(action)]; - } - } - - async stop() { - await this.exitGracefully(false); - } - - async exitGracefully(handleKill: boolean) { - this.killing = true; - - this.logger.info('Starting to exit...'); - - try { - await this.listener?.unregister(); - } catch (e: any) { - this.logger.error(`Could not unregister listener: ${e.message}`); - } - - this.logger.info('Gracefully exiting hatchet worker, running tasks will attempt to finish...'); - - // attempt to wait for futures to finish - await Promise.all(Object.values(this.futures).map(({ promise }) => promise)); - - this.logger.info('Successfully finished pending tasks.'); - - if (handleKill) { - this.logger.info('Exiting hatchet worker...'); - process.exit(0); - } - } - - async start() { - // ensure all workflows are registered - await Promise.all(this.registeredWorkflowPromises); - - if (Object.keys(this.action_registry).length === 0) { - return; - } - - try { - this.listener = await this.client.dispatcher.getActionListener({ - workerName: this.name, - services: ['default'], - actions: Object.keys(this.action_registry), - maxRuns: this.maxRuns, - labels: this.labels, - }); - - this.workerId = this.listener.workerId; - - const generator = this.listener.actions(); - - this.logger.info(`Worker ${this.name} listening for actions`); - - for await (const action of generator) { - this.logger.info( - `Worker ${this.name} received action ${action.actionId}:${action.actionType}` - ); - - void this.handleAction(action); - } - } catch (e: any) { - if (this.killing) { - this.logger.info(`Exiting worker, ignoring error: ${e.message}`); - return; - } - this.logger.error(`Could not run worker: ${e.message}`); - throw new HatchetError(`Could not run worker: ${e.message}`); - } - } - - async handleAction(action: Action) { - const type = action.actionType - ? actionTypeFromJSON(action.actionType) - : ActionType.START_STEP_RUN; - if (type === ActionType.START_STEP_RUN) { - await this.handleStartStepRun(action); - } else if (type === ActionType.CANCEL_STEP_RUN) { - await this.handleCancelStepRun(action); - } else if (type === ActionType.START_GET_GROUP_KEY) { - await this.handleStartGroupKeyRun(action); - } else { - this.logger.error(`Worker ${this.name} received unknown action type ${type}`); - } - } - - async upsertLabels(labels: WorkerLabels) { - this.labels = labels; - - if (!this.workerId) { - this.logger.warn('Worker not registered.'); - return this.labels; - } - - this.client.dispatcher.upsertWorkerLabels(this.workerId, labels); - - return this.labels; - } -} - -function toPbWorkerLabel( - in_: CreateStep['worker_labels'] -): Record { - if (!in_) { - return {}; - } - - return Object.entries(in_).reduce>( - (acc, [key, value]) => { - if (!value) { - return { - ...acc, - [key]: { - strValue: undefined, - intValue: undefined, - }, - }; - } - - if (typeof value === 'string') { - return { - ...acc, - [key]: { - strValue: value, - intValue: undefined, - }, - }; - } - - if (typeof value === 'number') { - return { - ...acc, - [key]: { - strValue: undefined, - intValue: value, - }, - }; - } - - return { - ...acc, - [key]: { - strValue: typeof value.value === 'string' ? value.value : undefined, - intValue: typeof value.value === 'number' ? value.value : undefined, - required: value.required, - weight: value.weight, - comparator: value.comparator, - }, - }; - }, - {} as Record - ); -} - -function onFailureTaskName(workflow: WorkflowDefinition) { - return `${workflow.name}:on-failure-task`; -} - -function getLeaves(tasks: CreateWorkflowTaskOpts[]): CreateWorkflowTaskOpts[] { - return tasks.filter((task) => isLeafTask(task, tasks)); -} - -function isLeafTask( - task: CreateWorkflowTaskOpts, - allTasks: CreateWorkflowTaskOpts[] -): boolean { - return !allTasks.some((t) => t.parents?.some((p) => p.name === task.name)); -} diff --git a/sdks/typescript/src/index.ts b/sdks/typescript/src/index.ts index bc3b32c914..10af3f5cf8 100644 --- a/sdks/typescript/src/index.ts +++ b/sdks/typescript/src/index.ts @@ -1,13 +1,13 @@ import { HatchetClient as Hatchet } from '@hatchet/v1/client/client'; -export * from './workflow'; -export * from './step'; -export * from './clients/worker'; export * from './clients/rest'; export * from './clients/admin'; export * from './util/workflow-run-ref'; export * from './v1'; +export * from './workflow'; +export * from './step'; + export default Hatchet; export { Hatchet }; diff --git a/sdks/typescript/src/examples/affinity-workers.ts b/sdks/typescript/src/legacy/examples/affinity-workers.ts similarity index 96% rename from sdks/typescript/src/examples/affinity-workers.ts rename to sdks/typescript/src/legacy/examples/affinity-workers.ts index 04f1080155..d3760a37ee 100644 --- a/sdks/typescript/src/examples/affinity-workers.ts +++ b/sdks/typescript/src/legacy/examples/affinity-workers.ts @@ -1,6 +1,6 @@ import { WorkerLabelComparator } from '@hatchet/protoc/workflows'; -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/bulk-fanout-trigger.ts b/sdks/typescript/src/legacy/examples/bulk-fanout-trigger.ts similarity index 96% rename from sdks/typescript/src/examples/bulk-fanout-trigger.ts rename to sdks/typescript/src/legacy/examples/bulk-fanout-trigger.ts index 49676c20f0..af66cc7f97 100644 --- a/sdks/typescript/src/examples/bulk-fanout-trigger.ts +++ b/sdks/typescript/src/legacy/examples/bulk-fanout-trigger.ts @@ -1,5 +1,5 @@ import { WorkflowRun } from '@hatchet/clients/admin'; -import Hatchet from '../sdk'; +import Hatchet from '../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/bulk-fanout-worker.ts b/sdks/typescript/src/legacy/examples/bulk-fanout-worker.ts similarity index 97% rename from sdks/typescript/src/examples/bulk-fanout-worker.ts rename to sdks/typescript/src/legacy/examples/bulk-fanout-worker.ts index f59234dfb0..78ce1d821a 100644 --- a/sdks/typescript/src/examples/bulk-fanout-worker.ts +++ b/sdks/typescript/src/legacy/examples/bulk-fanout-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/bulk-trigger.ts b/sdks/typescript/src/legacy/examples/bulk-trigger.ts similarity index 95% rename from sdks/typescript/src/examples/bulk-trigger.ts rename to sdks/typescript/src/legacy/examples/bulk-trigger.ts index 698fdd6249..00de430d0b 100644 --- a/sdks/typescript/src/examples/bulk-trigger.ts +++ b/sdks/typescript/src/legacy/examples/bulk-trigger.ts @@ -1,5 +1,5 @@ import { WorkflowRun } from '@hatchet/clients/admin'; -import Hatchet from '../sdk'; +import Hatchet from '../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/byo-logger.ts b/sdks/typescript/src/legacy/examples/byo-logger.ts similarity index 100% rename from sdks/typescript/src/examples/byo-logger.ts rename to sdks/typescript/src/legacy/examples/byo-logger.ts diff --git a/sdks/typescript/src/examples/concurrency/cancel-in-progress/concurrency-event.ts b/sdks/typescript/src/legacy/examples/concurrency/cancel-in-progress/concurrency-event.ts similarity index 92% rename from sdks/typescript/src/examples/concurrency/cancel-in-progress/concurrency-event.ts rename to sdks/typescript/src/legacy/examples/concurrency/cancel-in-progress/concurrency-event.ts index 66ef328d59..d470cd3d16 100644 --- a/sdks/typescript/src/examples/concurrency/cancel-in-progress/concurrency-event.ts +++ b/sdks/typescript/src/legacy/examples/concurrency/cancel-in-progress/concurrency-event.ts @@ -1,4 +1,4 @@ -import Hatchet from '../../../sdk'; +import Hatchet from '../../../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/concurrency/cancel-in-progress/concurrency-worker.ts b/sdks/typescript/src/legacy/examples/concurrency/cancel-in-progress/concurrency-worker.ts similarity index 94% rename from sdks/typescript/src/examples/concurrency/cancel-in-progress/concurrency-worker.ts rename to sdks/typescript/src/legacy/examples/concurrency/cancel-in-progress/concurrency-worker.ts index e8b2c6738f..9e5d06707e 100644 --- a/sdks/typescript/src/examples/concurrency/cancel-in-progress/concurrency-worker.ts +++ b/sdks/typescript/src/legacy/examples/concurrency/cancel-in-progress/concurrency-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../../../sdk'; -import { Workflow } from '../../../workflow'; +import Hatchet from '../../../../sdk'; +import { Workflow } from '../../../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-event.ts b/sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-event.ts similarity index 91% rename from sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-event.ts rename to sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-event.ts index de9307929b..c4589c6d62 100644 --- a/sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-event.ts +++ b/sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-event.ts @@ -1,4 +1,4 @@ -import Hatchet from '../../../sdk'; +import Hatchet from '../../../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-worker-expression.ts b/sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-worker-expression.ts similarity index 91% rename from sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-worker-expression.ts rename to sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-worker-expression.ts index e14c54ef64..7284fecd94 100644 --- a/sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-worker-expression.ts +++ b/sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-worker-expression.ts @@ -1,5 +1,5 @@ -import Hatchet from '../../../sdk'; -import { ConcurrencyLimitStrategy, Workflow } from '../../../workflow'; +import Hatchet from '../../../../sdk'; +import { ConcurrencyLimitStrategy, Workflow } from '../../../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-worker-key-fn.ts b/sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-worker-key-fn.ts similarity index 92% rename from sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-worker-key-fn.ts rename to sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-worker-key-fn.ts index b90b20325a..42accd98a1 100644 --- a/sdks/typescript/src/examples/concurrency/group-round-robin/concurrency-worker-key-fn.ts +++ b/sdks/typescript/src/legacy/examples/concurrency/group-round-robin/concurrency-worker-key-fn.ts @@ -1,5 +1,5 @@ -import Hatchet from '../../../sdk'; -import { ConcurrencyLimitStrategy, Workflow } from '../../../workflow'; +import Hatchet from '../../../../sdk'; +import { ConcurrencyLimitStrategy, Workflow } from '../../../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/crons/cron-worker.ts b/sdks/typescript/src/legacy/examples/crons/cron-worker.ts similarity index 90% rename from sdks/typescript/src/examples/crons/cron-worker.ts rename to sdks/typescript/src/legacy/examples/crons/cron-worker.ts index 086f71a7ff..a3c23f7a9a 100644 --- a/sdks/typescript/src/examples/crons/cron-worker.ts +++ b/sdks/typescript/src/legacy/examples/crons/cron-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../../sdk'; -import { Workflow } from '../../workflow'; +import Hatchet from '../../../sdk'; +import { Workflow } from '../../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/crons/cron.e2e.ts b/sdks/typescript/src/legacy/examples/crons/cron.e2e.ts similarity index 95% rename from sdks/typescript/src/examples/crons/cron.e2e.ts rename to sdks/typescript/src/legacy/examples/crons/cron.e2e.ts index bd11535c37..09b3e66396 100644 --- a/sdks/typescript/src/examples/crons/cron.e2e.ts +++ b/sdks/typescript/src/legacy/examples/crons/cron.e2e.ts @@ -1,5 +1,5 @@ import sleep from '@hatchet/util/sleep'; -import Hatchet, { Worker } from '../..'; +import Hatchet, { Worker } from '../../..'; import { simpleCronWorkflow } from './cron-worker'; xdescribe('cron-e2e', () => { diff --git a/sdks/typescript/src/examples/crons/programatic-crons.ts b/sdks/typescript/src/legacy/examples/crons/programatic-crons.ts similarity index 97% rename from sdks/typescript/src/examples/crons/programatic-crons.ts rename to sdks/typescript/src/legacy/examples/crons/programatic-crons.ts index 44cedd3e14..65f824e932 100644 --- a/sdks/typescript/src/examples/crons/programatic-crons.ts +++ b/sdks/typescript/src/legacy/examples/crons/programatic-crons.ts @@ -1,4 +1,4 @@ -import Hatchet from '../../sdk'; +import Hatchet from '../../../sdk'; import { simpleCronWorkflow } from './cron-worker'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/dag-worker.ts b/sdks/typescript/src/legacy/examples/dag-worker.ts similarity index 94% rename from sdks/typescript/src/examples/dag-worker.ts rename to sdks/typescript/src/legacy/examples/dag-worker.ts index d81ba836ce..8f4d51a71c 100644 --- a/sdks/typescript/src/examples/dag-worker.ts +++ b/sdks/typescript/src/legacy/examples/dag-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init({ log_level: 'OFF', diff --git a/sdks/typescript/src/examples/example-event-with-results.ts b/sdks/typescript/src/legacy/examples/example-event-with-results.ts similarity index 92% rename from sdks/typescript/src/examples/example-event-with-results.ts rename to sdks/typescript/src/legacy/examples/example-event-with-results.ts index 8d4c2807ff..f563ab8507 100644 --- a/sdks/typescript/src/examples/example-event-with-results.ts +++ b/sdks/typescript/src/legacy/examples/example-event-with-results.ts @@ -1,4 +1,4 @@ -import Hatchet from '../sdk'; +import Hatchet from '../../sdk'; async function main() { const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/example-event.ts b/sdks/typescript/src/legacy/examples/example-event.ts similarity index 97% rename from sdks/typescript/src/examples/example-event.ts rename to sdks/typescript/src/legacy/examples/example-event.ts index 17a084d479..02b27dfbd5 100644 --- a/sdks/typescript/src/examples/example-event.ts +++ b/sdks/typescript/src/legacy/examples/example-event.ts @@ -1,4 +1,4 @@ -import Hatchet from '../sdk'; +import Hatchet from '../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/fanout-worker.e2e.ts b/sdks/typescript/src/legacy/examples/fanout-worker.e2e.ts similarity index 94% rename from sdks/typescript/src/examples/fanout-worker.e2e.ts rename to sdks/typescript/src/legacy/examples/fanout-worker.e2e.ts index 4d06281a09..89fc50315e 100644 --- a/sdks/typescript/src/examples/fanout-worker.e2e.ts +++ b/sdks/typescript/src/legacy/examples/fanout-worker.e2e.ts @@ -1,6 +1,6 @@ -import { Workflow } from '..'; -import sleep from '../util/sleep'; -import Hatchet from '../sdk'; +import { Workflow } from '../..'; +import sleep from '../../util/sleep'; +import Hatchet from '../../sdk'; xdescribe('fanout-e2e', () => { it('should pass a fanout workflow', async () => { diff --git a/sdks/typescript/src/examples/fanout-worker.ts b/sdks/typescript/src/legacy/examples/fanout-worker.ts similarity index 97% rename from sdks/typescript/src/examples/fanout-worker.ts rename to sdks/typescript/src/legacy/examples/fanout-worker.ts index 57946ea0d6..60d68b54b7 100644 --- a/sdks/typescript/src/examples/fanout-worker.ts +++ b/sdks/typescript/src/legacy/examples/fanout-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/logger.ts b/sdks/typescript/src/legacy/examples/logger.ts similarity index 91% rename from sdks/typescript/src/examples/logger.ts rename to sdks/typescript/src/legacy/examples/logger.ts index 159cfe5998..5e2c47fec2 100644 --- a/sdks/typescript/src/examples/logger.ts +++ b/sdks/typescript/src/legacy/examples/logger.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init({ log_level: 'OFF', diff --git a/sdks/typescript/src/examples/manual-trigger.ts b/sdks/typescript/src/legacy/examples/manual-trigger.ts similarity index 89% rename from sdks/typescript/src/examples/manual-trigger.ts rename to sdks/typescript/src/legacy/examples/manual-trigger.ts index 46b4a89332..572c9e62ac 100644 --- a/sdks/typescript/src/examples/manual-trigger.ts +++ b/sdks/typescript/src/legacy/examples/manual-trigger.ts @@ -1,4 +1,4 @@ -import Hatchet from '../sdk'; +import Hatchet from '../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/multi-workflow.ts b/sdks/typescript/src/legacy/examples/multi-workflow.ts similarity index 95% rename from sdks/typescript/src/examples/multi-workflow.ts rename to sdks/typescript/src/legacy/examples/multi-workflow.ts index 17390213f9..5b6311453d 100644 --- a/sdks/typescript/src/examples/multi-workflow.ts +++ b/sdks/typescript/src/legacy/examples/multi-workflow.ts @@ -1,4 +1,4 @@ -import Hatchet from '../sdk'; +import Hatchet from '../../sdk'; const hatchet: Hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/namespaced-worker.e2e.ts b/sdks/typescript/src/legacy/examples/namespaced-worker.e2e.ts similarity index 93% rename from sdks/typescript/src/examples/namespaced-worker.e2e.ts rename to sdks/typescript/src/legacy/examples/namespaced-worker.e2e.ts index 9d4d9ab897..1536945ba8 100644 --- a/sdks/typescript/src/examples/namespaced-worker.e2e.ts +++ b/sdks/typescript/src/legacy/examples/namespaced-worker.e2e.ts @@ -1,6 +1,6 @@ -import { Workflow, Worker } from '..'; -import sleep from '../util/sleep'; -import Hatchet from '../sdk'; +import { Workflow, Worker } from '../..'; +import sleep from '../../util/sleep'; +import Hatchet from '../../sdk'; xdescribe('e2e', () => { let hatchet: Hatchet; diff --git a/sdks/typescript/src/examples/namespaced-worker.ts b/sdks/typescript/src/legacy/examples/namespaced-worker.ts similarity index 92% rename from sdks/typescript/src/examples/namespaced-worker.ts rename to sdks/typescript/src/legacy/examples/namespaced-worker.ts index f8dc32acdf..bf611e3f37 100644 --- a/sdks/typescript/src/examples/namespaced-worker.ts +++ b/sdks/typescript/src/legacy/examples/namespaced-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init({ namespace: 'example-namespace', diff --git a/sdks/typescript/src/examples/on-failure.ts b/sdks/typescript/src/legacy/examples/on-failure.ts similarity index 93% rename from sdks/typescript/src/examples/on-failure.ts rename to sdks/typescript/src/legacy/examples/on-failure.ts index 34fc77a231..2b3c182fe0 100644 --- a/sdks/typescript/src/examples/on-failure.ts +++ b/sdks/typescript/src/legacy/examples/on-failure.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/rate-limit/events.ts b/sdks/typescript/src/legacy/examples/rate-limit/events.ts similarity index 85% rename from sdks/typescript/src/examples/rate-limit/events.ts rename to sdks/typescript/src/legacy/examples/rate-limit/events.ts index 77b87a91bd..d7f55a4a9a 100644 --- a/sdks/typescript/src/examples/rate-limit/events.ts +++ b/sdks/typescript/src/legacy/examples/rate-limit/events.ts @@ -1,4 +1,4 @@ -import Hatchet from '../../sdk'; +import Hatchet from '../../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/rate-limit/worker.ts b/sdks/typescript/src/legacy/examples/rate-limit/worker.ts similarity index 88% rename from sdks/typescript/src/examples/rate-limit/worker.ts rename to sdks/typescript/src/legacy/examples/rate-limit/worker.ts index 7bcb1bfbef..71d120d90f 100644 --- a/sdks/typescript/src/examples/rate-limit/worker.ts +++ b/sdks/typescript/src/legacy/examples/rate-limit/worker.ts @@ -1,6 +1,6 @@ -import { RateLimitDuration } from '../../protoc/workflows'; -import Hatchet from '../../sdk'; -import { Workflow } from '../../workflow'; +import { RateLimitDuration } from '../../../protoc/workflows'; +import Hatchet from '../../../sdk'; +import { Workflow } from '../../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/retries-with-backoff.ts b/sdks/typescript/src/legacy/examples/retries-with-backoff.ts similarity index 92% rename from sdks/typescript/src/examples/retries-with-backoff.ts rename to sdks/typescript/src/legacy/examples/retries-with-backoff.ts index 3262cf836a..3b5961d3c4 100644 --- a/sdks/typescript/src/examples/retries-with-backoff.ts +++ b/sdks/typescript/src/legacy/examples/retries-with-backoff.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/retries-worker.ts b/sdks/typescript/src/legacy/examples/retries-worker.ts similarity index 93% rename from sdks/typescript/src/examples/retries-worker.ts rename to sdks/typescript/src/legacy/examples/retries-worker.ts index a1797a3ab7..a352d0254a 100644 --- a/sdks/typescript/src/examples/retries-worker.ts +++ b/sdks/typescript/src/legacy/examples/retries-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/scheduled-runs/programatic-schedules.ts b/sdks/typescript/src/legacy/examples/scheduled-runs/programatic-schedules.ts similarity index 97% rename from sdks/typescript/src/examples/scheduled-runs/programatic-schedules.ts rename to sdks/typescript/src/legacy/examples/scheduled-runs/programatic-schedules.ts index e3669219e9..3a1d8680c7 100644 --- a/sdks/typescript/src/examples/scheduled-runs/programatic-schedules.ts +++ b/sdks/typescript/src/legacy/examples/scheduled-runs/programatic-schedules.ts @@ -1,4 +1,4 @@ -import Hatchet from '../../sdk'; +import Hatchet from '../../../sdk'; import { simpleWorkflow } from '../simple-worker'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/simple-worker.e2e.ts b/sdks/typescript/src/legacy/examples/simple-worker.e2e.ts similarity index 93% rename from sdks/typescript/src/examples/simple-worker.e2e.ts rename to sdks/typescript/src/legacy/examples/simple-worker.e2e.ts index 5e43651362..e26e571b74 100644 --- a/sdks/typescript/src/examples/simple-worker.e2e.ts +++ b/sdks/typescript/src/legacy/examples/simple-worker.e2e.ts @@ -1,6 +1,6 @@ -import { Workflow, Worker } from '..'; -import sleep from '../util/sleep'; -import Hatchet from '../sdk'; +import { Workflow, Worker } from '../..'; +import sleep from '../../util/sleep'; +import Hatchet from '../../sdk'; describe('e2e', () => { let hatchet: Hatchet; diff --git a/sdks/typescript/src/examples/simple-worker.ts b/sdks/typescript/src/legacy/examples/simple-worker.ts similarity index 93% rename from sdks/typescript/src/examples/simple-worker.ts rename to sdks/typescript/src/legacy/examples/simple-worker.ts index d2ad4264ae..cf7f4ba4ff 100644 --- a/sdks/typescript/src/examples/simple-worker.ts +++ b/sdks/typescript/src/legacy/examples/simple-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/sticky-trigger.ts b/sdks/typescript/src/legacy/examples/sticky-trigger.ts similarity index 91% rename from sdks/typescript/src/examples/sticky-trigger.ts rename to sdks/typescript/src/legacy/examples/sticky-trigger.ts index bd0e0c33b1..48c1ed1093 100644 --- a/sdks/typescript/src/examples/sticky-trigger.ts +++ b/sdks/typescript/src/legacy/examples/sticky-trigger.ts @@ -1,4 +1,4 @@ -import Hatchet from '../sdk'; +import Hatchet from '../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/sticky-worker-with-check.ts b/sdks/typescript/src/legacy/examples/sticky-worker-with-check.ts similarity index 96% rename from sdks/typescript/src/examples/sticky-worker-with-check.ts rename to sdks/typescript/src/legacy/examples/sticky-worker-with-check.ts index 0af33ea0de..ea4462d89f 100644 --- a/sdks/typescript/src/examples/sticky-worker-with-check.ts +++ b/sdks/typescript/src/legacy/examples/sticky-worker-with-check.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { StickyStrategy, Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { StickyStrategy, Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/sticky-worker.ts b/sdks/typescript/src/legacy/examples/sticky-worker.ts similarity index 94% rename from sdks/typescript/src/examples/sticky-worker.ts rename to sdks/typescript/src/legacy/examples/sticky-worker.ts index abb8abe337..0cdd006b48 100644 --- a/sdks/typescript/src/examples/sticky-worker.ts +++ b/sdks/typescript/src/legacy/examples/sticky-worker.ts @@ -1,5 +1,5 @@ -import Hatchet from '../sdk'; -import { StickyStrategy, Workflow } from '../workflow'; +import Hatchet from '../../sdk'; +import { StickyStrategy, Workflow } from '../../workflow'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/examples/stream-by-additional-meta.ts b/sdks/typescript/src/legacy/examples/stream-by-additional-meta.ts similarity index 96% rename from sdks/typescript/src/examples/stream-by-additional-meta.ts rename to sdks/typescript/src/legacy/examples/stream-by-additional-meta.ts index 978e99ac3c..7f62412e35 100644 --- a/sdks/typescript/src/examples/stream-by-additional-meta.ts +++ b/sdks/typescript/src/legacy/examples/stream-by-additional-meta.ts @@ -1,4 +1,4 @@ -import Hatchet from '../sdk'; +import Hatchet from '../../sdk'; const hatchet = Hatchet.init(); diff --git a/sdks/typescript/src/legacy/legacy-transformer.ts b/sdks/typescript/src/legacy/legacy-transformer.ts new file mode 100644 index 0000000000..0b98e06eed --- /dev/null +++ b/sdks/typescript/src/legacy/legacy-transformer.ts @@ -0,0 +1,219 @@ +/* eslint-disable no-console */ +import type { Workflow } from '@hatchet/legacy/workflow'; +import { V0Context } from '@hatchet/legacy/step'; +import type { CreateStep } from '@hatchet/legacy/step'; +import { BaseWorkflowDeclaration, WorkflowDeclaration } from '../v1/declaration'; +import type { CreateWorkflowOpts } from '../v1/declaration'; +import type { CreateWorkflowTaskOpts, Concurrency } from '../v1/task'; +import type { Duration } from '../v1/client/duration'; + +export type { Workflow as LegacyWorkflow } from '@hatchet/legacy/workflow'; + +const LEGACY_WORKFLOW_WARNING = [ + '', + '\x1b[33m╔══════════════════════════════════════════════════════════════════════════════╗', + '║ ║', + '║ ⚠ DEPRECATION WARNING: Legacy workflow format detected. ║', + '║ ║', + '║ Please migrate to the v1 SDK: ║', + '║ https://docs.hatchet.run/home/migration-guide-typescript ║', + '║ ║', + '╚══════════════════════════════════════════════════════════════════════════════╝\x1b[0m', + '', +].join('\n'); + +/** + * Type guard: returns true if the workflow is a legacy v0 Workflow (not a BaseWorkflowDeclaration). + */ +export function isLegacyWorkflow(workflow: unknown): workflow is Workflow { + return ( + workflow != null && + typeof workflow === 'object' && + !(workflow instanceof BaseWorkflowDeclaration) && + 'id' in workflow && + 'steps' in workflow && + Array.isArray((workflow as any).steps) + ); +} + +/** + * Emits a deprecation warning for legacy workflow usage. + */ +export function warnLegacyWorkflow(): void { + console.warn(LEGACY_WORKFLOW_WARNING); +} + +/** + * Transforms a legacy v0 Workflow into a v1 WorkflowDeclaration. + * + * The transformed declaration can be registered with a worker and executed + * by the v1 runtime. Legacy step `run` functions are wrapped to receive + * a V0Context for backwards compatibility. + */ +export function transformLegacyWorkflow(workflow: Workflow): WorkflowDeclaration { + // Map concurrency + let concurrency: Concurrency | undefined; + if (workflow.concurrency) { + if (workflow.concurrency.key) { + console.warn( + '[hatchet] Legacy concurrency key functions are not supported in v1. ' + + 'Use CEL expressions instead: https://docs.hatchet.run/home/v1-sdk-improvements' + ); + } + + concurrency = { + expression: workflow.concurrency.expression || workflow.concurrency.name, + maxRuns: workflow.concurrency.maxRuns, + limitStrategy: workflow.concurrency.limitStrategy as any, + }; + } + + const opts: CreateWorkflowOpts = { + name: workflow.id, + description: workflow.description, + version: workflow.version, + sticky: workflow.sticky != null ? (workflow.sticky as any) : undefined, + on: workflow.on + ? { + cron: workflow.on.cron, + event: workflow.on.event, + } + : undefined, + concurrency, + taskDefaults: { + executionTimeout: workflow.timeout as Duration | undefined, + scheduleTimeout: workflow.scheduleTimeout as Duration | undefined, + }, + }; + + const declaration = new WorkflowDeclaration(opts); + + // Build task lookup for parent resolution (parents are strings in legacy, objects in v1) + const taskMap: Record> = {}; + + for (const step of workflow.steps) { + const taskOpts = legacyStepToTaskOpts(step, taskMap, workflow.timeout); + taskMap[step.name] = taskOpts; + // eslint-disable-next-line no-underscore-dangle + declaration.definition._tasks.push(taskOpts); + } + + // Handle onFailure + if (workflow.onFailure) { + declaration.definition.onFailure = { + fn: wrapLegacyStepRun(workflow.onFailure), + executionTimeout: (workflow.onFailure.timeout || workflow.timeout) as Duration | undefined, + retries: workflow.onFailure.retries, + rateLimits: mapLegacyRateLimits(workflow.onFailure.rate_limits), + desiredWorkerLabels: workflow.onFailure.worker_labels as any, + backoff: workflow.onFailure.backoff, + }; + } + + return declaration; +} + +/** + * Converts a legacy CreateStep to a v1 CreateWorkflowTaskOpts. + */ +function legacyStepToTaskOpts( + step: CreateStep, + taskMap: Record>, + workflowTimeout?: string +): CreateWorkflowTaskOpts { + return { + name: step.name, + fn: wrapLegacyStepRun(step), + executionTimeout: (step.timeout || workflowTimeout) as Duration | undefined, + retries: step.retries, + rateLimits: mapLegacyRateLimits(step.rate_limits), + desiredWorkerLabels: step.worker_labels as any, + backoff: step.backoff, + parents: step.parents?.map((name) => taskMap[name]).filter(Boolean), + }; +} + +/** + * Wraps a legacy step's `run(ctx: V0Context)` function into a v1-compatible + * `fn(input, ctx: Context)` function by constructing a V0Context at runtime. + */ +function wrapLegacyStepRun(step: CreateStep) { + return (input: any, ctx: any) => { + // Access the V1Worker from the ContextWorker's private field. + // This is intentionally accessing a private field for legacy compatibility. + // eslint-disable-next-line no-underscore-dangle + const v1Worker = (ctx.worker as any).worker; + // eslint-disable-next-line no-underscore-dangle + const v0ctx = new V0Context(ctx.action, ctx.v1._v0, v1Worker); + // Share the abort controller so cancellation propagates + v0ctx.controller = ctx.controller; + return step.run(v0ctx); + }; +} + +/** + * Maps legacy rate limits to v1 format. + */ +function mapLegacyRateLimits( + limits?: CreateStep['rate_limits'] +): CreateWorkflowTaskOpts['rateLimits'] { + if (!limits) return undefined; + return limits.map((l) => ({ + staticKey: l.staticKey || l.key, + dynamicKey: l.dynamicKey, + units: l.units, + limit: l.limit, + duration: l.duration, + })); +} + +/** + * Normalizes a workflow: if it is a legacy Workflow, emits a deprecation warning + * and transforms it into a BaseWorkflowDeclaration. If already a v1 declaration, + * returns it as-is. + */ +export function normalizeWorkflow( + workflow: BaseWorkflowDeclaration | Workflow +): BaseWorkflowDeclaration { + if (isLegacyWorkflow(workflow)) { + warnLegacyWorkflow(); + return transformLegacyWorkflow(workflow); + } + return workflow; +} + +/** + * Normalizes an array of workflows, transforming any legacy workflows and + * emitting deprecation warnings. + */ +export function normalizeWorkflows( + workflows: Array | Workflow> +): Array> { + return workflows.map(normalizeWorkflow); +} + +/** + * Extracts the workflow name from a workflow-like value. + * Works for strings, BaseWorkflowDeclaration, WorkflowDefinition, and legacy Workflow. + * Emits a deprecation warning if a legacy workflow is detected. + */ +export function getWorkflowName( + workflow: string | BaseWorkflowDeclaration | Workflow | { name: string } +): string { + if (typeof workflow === 'string') { + return workflow; + } + if (workflow instanceof BaseWorkflowDeclaration) { + return workflow.name; + } + if (isLegacyWorkflow(workflow)) { + warnLegacyWorkflow(); + return workflow.id; + } + if ('name' in workflow) { + return workflow.name as string; + } + throw new Error( + 'Invalid workflow: must be a string, BaseWorkflowDeclaration, or legacy Workflow object' + ); +} diff --git a/sdks/typescript/src/legacy/step.ts b/sdks/typescript/src/legacy/step.ts new file mode 100644 index 0000000000..9caf3c98a8 --- /dev/null +++ b/sdks/typescript/src/legacy/step.ts @@ -0,0 +1,765 @@ +/* eslint-disable no-underscore-dangle */ +/* eslint-disable max-classes-per-file */ +import HatchetError from '@util/errors/hatchet-error'; +import * as z from 'zod'; +import { JsonObject } from '@bufbuild/protobuf'; +import { Workflow } from './workflow'; +import { Action } from '../clients/dispatcher/action-listener'; +import { LogLevel } from '../clients/event/event-client'; +import { Logger } from '../util/logger'; +import { parseJSON } from '../util/parse'; +import WorkflowRunRef from '../util/workflow-run-ref'; +import { WorkerLabels } from '../clients/dispatcher/dispatcher-client'; +import { CreateStepRateLimit, RateLimitDuration, WorkerLabelComparator } from '../protoc/workflows'; +import { CreateWorkflowTaskOpts, Priority } from '../v1'; +import { + RunOpts, + TaskWorkflowDeclaration, + BaseWorkflowDeclaration as WorkflowV1, +} from '../v1/declaration'; +import { Conditions, Render } from '../v1/conditions'; +import { Action as ConditionAction } from '../protoc/v1/shared/condition'; +import { conditionsToPb } from '../v1/conditions/transformer'; +import { Duration } from '../v1/client/duration'; +import { JsonValue, OutputType } from '../v1/types'; +import { V1Worker } from '../v1/client/worker/worker-internal'; +import { LegacyHatchetClient } from '../clients/hatchet-client'; +import { applyNamespace } from '../util/apply-namespace'; + +export const CreateRateLimitSchema = z.object({ + key: z.string().optional(), + staticKey: z.string().optional(), + dynamicKey: z.string().optional(), + + units: z.union([z.number().min(0), z.string()]), + limit: z.union([z.number().min(1), z.string()]).optional(), + duration: z.nativeEnum(RateLimitDuration).optional(), +}); + +export const DesiredWorkerLabelSchema = z + .union([ + z.string(), + z.number().int(), + z.object({ + value: z.union([z.string(), z.number()]), + required: z.boolean().optional(), + weight: z.number().int().optional(), + + // (optional) comparator for the label + // if not provided, the default is EQUAL + // desired COMPARATOR actual (i.e. desired > actual for GREATER_THAN) + comparator: z.nativeEnum(WorkerLabelComparator).optional(), + }), + ]) + .optional(); + +export const CreateStepSchema = z.object({ + name: z.string(), + parents: z.array(z.string()).optional(), + timeout: z.string().optional(), + retries: z.number().optional(), + rate_limits: z.array(CreateRateLimitSchema).optional(), + worker_labels: z.record(z.lazy(() => DesiredWorkerLabelSchema)).optional(), + backoff: z + .object({ + factor: z.number().optional(), + maxSeconds: z.number().optional(), + }) + .optional(), +}); + +export type NextStep = { [key: string]: JsonValue }; + +type TriggerData = Record>; + +type ChildRunOpts = RunOpts & { key?: string; sticky?: boolean }; + +interface ContextData { + input: T; + triggers: TriggerData; + parents: Record; + triggered_by: string; + user_data: K; + step_run_errors: Record; +} + +export class ContextWorker { + private worker: V1Worker; + constructor(worker: V1Worker) { + this.worker = worker; + } + + /** + * Gets the ID of the worker. + * @returns The ID of the worker. + */ + id() { + return this.worker.workerId; + } + + /** + * Checks if the worker has a registered workflow. + * @param workflowName - The name of the workflow to check. + * @returns True if the workflow is registered, otherwise false. + */ + hasWorkflow(workflowName: string) { + return !!this.worker.workflow_registry.find((workflow) => + 'id' in workflow ? workflow.id === workflowName : workflow.name === workflowName + ); + } + + /** + * Gets the current state of the worker labels. + * @returns The labels of the worker. + */ + labels() { + return this.worker.labels; + } + + /** + * Upserts the a set of labels on the worker. + * @param labels - The labels to upsert. + * @returns A promise that resolves when the labels have been upserted. + */ + upsertLabels(labels: WorkerLabels) { + return this.worker.upsertLabels(labels); + } +} + +export class V0Context { + data: ContextData; + // @deprecated use input prop instead + input: T; + // @deprecated use ctx.abortController instead + controller = new AbortController(); + action: Action; + v0: LegacyHatchetClient; + + worker: ContextWorker; + + overridesData: Record = {}; + logger: Logger; + + spawnIndex: number = 0; + + constructor(action: Action, client: LegacyHatchetClient, worker: V1Worker) { + try { + const data = parseJSON(action.actionPayload); + this.data = data; + this.action = action; + this.v0 = client; + this.worker = new ContextWorker(worker); + this.logger = client.config.logger(`Context Logger`, client.config.log_level); + + // if this is a getGroupKeyRunId, the data is the workflow input + if (action.getGroupKeyRunId !== '') { + this.input = data; + } else { + this.input = data.input; + } + + this.overridesData = data.overrides || {}; + } catch (e: any) { + throw new HatchetError(`Could not parse payload: ${e.message}`); + } + } + + get abortController() { + return this.controller; + } + + get cancelled() { + return this.controller.signal.aborted; + } + + /** + * Retrieves the output of a parent task. + * @param parentTask - The a CreateTaskOpts or string of the parent task name. + * @returns The output of the specified parent task. + * @throws An error if the task output is not found. + */ + async parentOutput(parentTask: CreateWorkflowTaskOpts | string) { + // NOTE: parentOutput is async since we plan on potentially making this a cacheable server call + if (typeof parentTask === 'string') { + return this.stepOutput(parentTask); + } + + return this.stepOutput(parentTask.name) as L; + } + + /** + * Get the output of a task. + * @param task - The name of the task to get the output for. + * @returns The output of the task. + * @throws An error if the task output is not found. + * @deprecated use ctx.parentOutput instead + */ + stepOutput(step: string): L { + if (!this.data.parents) { + throw new HatchetError('Parent task outputs not found'); + } + if (!this.data.parents[step]) { + throw new HatchetError(`Output for parent task '${step}' not found`); + } + return this.data.parents[step]; + } + + /** + * Returns errors from any task runs in the workflow. + * @returns A record mapping task names to error messages. + * @throws A warning if no errors are found (this method should be used in on-failure tasks). + * @deprecated use ctx.errors() instead + */ + stepRunErrors(): Record { + return this.errors(); + } + + /** + * Returns errors from any task runs in the workflow. + * @returns A record mapping task names to error messages. + * @throws A warning if no errors are found (this method should be used in on-failure tasks). + */ + errors(): Record { + const errors = this.data.step_run_errors || {}; + + if (Object.keys(errors).length === 0) { + this.logger.error( + 'No run errors found. `ctx.errors` is intended to be run in an on-failure task, and will only work on engine versions more recent than v0.53.10' + ); + } + + return errors; + } + + /** + * Gets the dag conditional triggers for the current workflow run. + * @returns The triggers for the current workflow. + */ + triggers(): TriggerData { + return this.data.triggers; + } + + /** + * Determines if the workflow was triggered by an event. + * @returns True if the workflow was triggered by an event, otherwise false. + */ + triggeredByEvent(): boolean { + return this.data?.triggered_by === 'event'; + } + + /** + * Gets the input data for the current workflow. + * @returns The input data for the workflow. + * @deprecated use task input parameter instead + */ + workflowInput(): T { + return this.input; + } + + /** + * Gets the name of the current workflow. + * @returns The name of the workflow. + */ + workflowName(): string { + return this.action.jobName; + } + + /** + * Gets the user data associated with the workflow. + * @returns The user data. + */ + userData(): K { + return this.data?.user_data; + } + + /** + * Gets the name of the current task. + * @returns The name of the task. + * @deprecated use ctx.taskName instead + */ + stepName(): string { + return this.taskName(); + } + + /** + * Gets the name of the current running task. + * @returns The name of the task. + */ + taskName(): string { + return this.action.taskName; + } + + /** + * Gets the ID of the current workflow run. + * @returns The workflow run ID. + */ + workflowRunId(): string { + return this.action.workflowRunId; + } + + /** + * Gets the ID of the current task run. + * @returns The task run ID. + */ + taskRunExternalId(): string { + return this.action.taskRunExternalId; + } + + /** + * Gets the number of times the current task has been retried. + * @returns The retry count. + */ + retryCount(): number { + return this.action.retryCount; + } + + /** + * Logs a message from the current task. + * @param message - The message to log. + * @param level - The log level (optional). + */ + log(message: string, level?: LogLevel) { + const { taskRunExternalId } = this.action; + + if (!taskRunExternalId) { + // log a warning + this.logger.warn('cannot log from context without stepRunId'); + return; + } + + this.v0.event.putLog(taskRunExternalId, message, level, this.retryCount()); + } + + /** + * Refreshes the timeout for the current task. + * @param incrementBy - The interval by which to increment the timeout. + * The interval should be specified in the format of '10s' for 10 seconds, '1m' for 1 minute, or '1d' for 1 day. + */ + async refreshTimeout(incrementBy: Duration) { + const { taskRunExternalId } = this.action; + + if (!taskRunExternalId) { + // log a warning + this.logger.warn('cannot refresh timeout from context without stepRunId'); + return; + } + + await this.v0.dispatcher.refreshTimeout(incrementBy, taskRunExternalId); + } + + /** + * Releases a worker slot for a task run such that the worker can pick up another task. + * Note: this is an advanced feature that may lead to unexpected behavior if used incorrectly. + * @returns A promise that resolves when the slot has been released. + */ + async releaseSlot(): Promise { + await this.v0.dispatcher.client.releaseSlot({ + taskRunExternalId: this.action.taskRunExternalId, + }); + } + + /** + * Streams data from the current task run. + * @param data - The data to stream (string or binary). + * @returns A promise that resolves when the data has been streamed. + */ + async putStream(data: string | Uint8Array) { + const { taskRunExternalId } = this.action; + + if (!taskRunExternalId) { + // log a warning + this.logger.warn('cannot log from context without stepRunId'); + return; + } + + await this.v0.event.putStream(taskRunExternalId, data, undefined); + } + + /** + * Runs multiple children workflows in parallel without waiting for their results. + * @param children - An array of objects containing the workflow name, input data, and options for each workflow. + * @returns A list of workflow run references to the enqueued runs. + */ + async bulkRunNoWaitChildren( + children: Array<{ + workflow: string | Workflow | WorkflowV1; + input: Q; + options?: ChildRunOpts; + }> + ): Promise[]> { + return this.spawnWorkflows(children); + } + + /** + * Runs multiple children workflows in parallel and waits for all results. + * @param children - An array of objects containing the workflow name, input data, and options for each workflow. + * @returns A list of results from the children workflows. + */ + async bulkRunChildren( + children: Array<{ + workflow: string | Workflow | WorkflowV1; + input: Q; + options?: ChildRunOpts; + }> + ): Promise { + const runs = await this.bulkRunNoWaitChildren(children); + return Promise.all(runs.map((run) => run.output)); + } + + /** + * Spawns multiple workflows. + * + * @param workflows - An array of objects containing the workflow name, input data, and options for each workflow. + * @returns A list of references to the spawned workflow runs. + * @deprecated Use bulkRunNoWaitChildren or bulkRunChildren instead. + */ + async spawnWorkflows( + workflows: Array<{ + workflow: string | Workflow | WorkflowV1; + input: Q; + options?: ChildRunOpts; + }> + ): Promise[]> { + const { workflowRunId, taskRunExternalId } = this.action; + + const workflowRuns = workflows.map(({ workflow, input, options }) => { + let workflowName: string; + + if (typeof workflow === 'string') { + workflowName = workflow; + } else { + workflowName = workflow.id; + } + + const name = applyNamespace(workflowName, this.v0.config.namespace); + + const opts = options || {}; + const { sticky } = opts; + + if (sticky && !this.worker.hasWorkflow(name)) { + throw new HatchetError( + `Cannot run with sticky: workflow ${name} is not registered on the worker` + ); + } + + const resp = { + workflowName: name, + input, + options: { + ...opts, + parentId: workflowRunId, + parentStepRunId: taskRunExternalId, + childIndex: this.spawnIndex, + desiredWorkerId: sticky ? this.worker.id() : undefined, + }, + }; + this.spawnIndex += 1; + return resp; + }); + + try { + const batchSize = 100; + + let resp: WorkflowRunRef

[] = []; + for (let i = 0; i < workflowRuns.length; i += batchSize) { + const batch = workflowRuns.slice(i, i + batchSize); + const batchResp = await this.v0.admin.runWorkflows(batch); + resp = resp.concat(batchResp); + } + + const res: WorkflowRunRef

[] = []; + resp.forEach((ref, index) => { + const wf = workflows[index].workflow; + if (wf instanceof TaskWorkflowDeclaration) { + // eslint-disable-next-line no-param-reassign + ref._standaloneTaskName = wf._standalone_task_name; + } + res.push(ref); + }); + + return resp; + } catch (e: any) { + throw new HatchetError(e.message); + } + } + + /** + * Runs a new workflow and waits for its result. + * + * @param workflow - The workflow to run (name, Workflow instance, or WorkflowV1 instance). + * @param input - The input data for the workflow. + * @param options - An options object containing key, sticky, priority, and additionalMetadata. + * @returns The result of the workflow. + */ + async runChild( + workflow: string | Workflow | WorkflowV1 | TaskWorkflowDeclaration, + input: Q, + options?: ChildRunOpts + ): Promise

{ + const run = await this.spawnWorkflow(workflow, input, options); + return run.output; + } + + /** + * Enqueues a new workflow without waiting for its result. + * + * @param workflow - The workflow to enqueue (name, Workflow instance, or WorkflowV1 instance). + * @param input - The input data for the workflow. + * @param options - An options object containing key, sticky, priority, and additionalMetadata. + * @returns A reference to the spawned workflow run. + */ + async runNoWaitChild( + workflow: string | Workflow | WorkflowV1, + input: Q, + options?: ChildRunOpts + ): Promise> { + return this.spawnWorkflow(workflow, input, options); + } + + /** + * Spawns a new workflow. + * + * @param workflow - The workflow to spawn (name, Workflow instance, or WorkflowV1 instance). + * @param input - The input data for the workflow. + * @param options - Additional options for spawning the workflow. + * @returns A reference to the spawned workflow run. + * @deprecated Use runChild or runNoWaitChild instead. + */ + async spawnWorkflow( + workflow: string | Workflow | WorkflowV1 | TaskWorkflowDeclaration, + input: Q, + options?: ChildRunOpts + ): Promise> { + const { workflowRunId, taskRunExternalId } = this.action; + + let workflowName: string = ''; + + if (typeof workflow === 'string') { + workflowName = workflow; + } else { + workflowName = workflow.id; + } + + const name = applyNamespace(workflowName, this.v0.config.namespace); + + const opts = options || {}; + const { sticky } = opts; + + if (sticky && !this.worker.hasWorkflow(name)) { + throw new HatchetError( + `cannot run with sticky: workflow ${name} is not registered on the worker` + ); + } + + try { + const resp = await this.v0.admin.runWorkflow(name, input, { + parentId: workflowRunId, + parentStepRunId: taskRunExternalId, + childIndex: this.spawnIndex, + desiredWorkerId: sticky ? this.worker.id() : undefined, + ...opts, + }); + + this.spawnIndex += 1; + + if (workflow instanceof TaskWorkflowDeclaration) { + resp._standaloneTaskName = workflow._standalone_task_name; + } + + return resp; + } catch (e: any) { + throw new HatchetError(e.message); + } + } + + /** + * Retrieves additional metadata associated with the current workflow run. + * @returns A record of metadata key-value pairs. + */ + additionalMetadata(): Record { + if (!this.action.additionalMetadata) { + return {}; + } + + // parse the additional metadata + const res: Record = parseJSON(this.action.additionalMetadata); + return res; + } + + /** + * Gets the index of this workflow if it was spawned as part of a bulk operation. + * @returns The child index number, or undefined if not set. + */ + childIndex(): number | undefined { + return this.action.childWorkflowIndex; + } + + /** + * Gets the key associated with this workflow if it was spawned as a child workflow. + * @returns The child key, or undefined if not set. + */ + childKey(): string | undefined { + return this.action.childWorkflowKey; + } + + /** + * Gets the ID of the parent workflow run if this workflow was spawned as a child. + * @returns The parent workflow run ID, or undefined if not a child workflow. + */ + parentWorkflowRunId(): string | undefined { + return this.action.parentWorkflowRunId; + } + + priority(): Priority | undefined { + switch (this.action.priority) { + case 1: + return Priority.LOW; + case 2: + return Priority.MEDIUM; + case 3: + return Priority.HIGH; + default: + return undefined; + } + } +} + +export class V0DurableContext extends V0Context { + waitKey: number = 0; + + /** + * Pauses execution for the specified duration. + * Duration is "global" meaning it will wait in real time regardless of transient failures like worker restarts. + * @param duration - The duration to sleep for. + * @returns A promise that resolves when the sleep duration has elapsed. + */ + async sleepFor(duration: Duration, readableDataKey?: string) { + return this.waitFor({ sleepFor: duration, readableDataKey }); + } + + /** + * Pauses execution until the specified conditions are met. + * Conditions are "global" meaning they will wait in real time regardless of transient failures like worker restarts. + * @param conditions - The conditions to wait for. + * @returns A promise that resolves with the event that satisfied the conditions. + */ + async waitFor(conditions: Conditions | Conditions[]): Promise> { + const pbConditions = conditionsToPb(Render(ConditionAction.CREATE, conditions)); + + // eslint-disable-next-line no-plusplus + const key = `waitFor-${this.waitKey++}`; + await this.v0.durableListener.registerDurableEvent({ + taskId: this.action.taskRunExternalId, + signalKey: key, + sleepConditions: pbConditions.sleepConditions, + userEventConditions: pbConditions.userEventConditions, + }); + + const listener = this.v0.durableListener.subscribe({ + taskId: this.action.taskRunExternalId, + signalKey: key, + }); + + const event = await listener.get(); + + // Convert event.data from Uint8Array to string if needed + const eventData = + event.data instanceof Uint8Array ? new TextDecoder().decode(event.data) : event.data; + + const res = JSON.parse(eventData) as Record>; + return res.CREATE; + } +} + +export type StepRunFunction = ( + ctx: V0Context +) => Promise | NextStep | void; + +/** + * A step is a unit of work that can be run by a worker. + * It is defined by a name, a function that returns the next step, and optional configuration. + * @deprecated use hatchet.workflows.task factory instead + */ +export interface CreateStep extends z.infer { + run: StepRunFunction; +} + +export function mapRateLimit(limits: CreateStep['rate_limits']): CreateStepRateLimit[] { + if (!limits) return []; + + return limits.map((l) => { + let key = l.staticKey; + const keyExpression = l.dynamicKey; + + if (l.key !== undefined) { + // eslint-disable-next-line no-console + console.warn( + 'key is deprecated and will be removed in a future release, please use staticKey instead' + ); + key = l.key; + } + + if (keyExpression !== undefined) { + if (key !== undefined) { + throw new Error('Cannot have both static key and dynamic key set'); + } + key = keyExpression; + if (!validateCelExpression(keyExpression)) { + throw new Error(`Invalid CEL expression: ${keyExpression}`); + } + } + + if (key === undefined) { + throw new Error(`Invalid key`); + } + + let units: number | undefined; + let unitsExpression: string | undefined; + if (typeof l.units === 'number') { + units = l.units; + } else { + if (!validateCelExpression(l.units)) { + throw new Error(`Invalid CEL expression: ${l.units}`); + } + unitsExpression = l.units; + } + + let limitExpression: string | undefined; + if (l.limit !== undefined) { + if (typeof l.limit === 'number') { + limitExpression = `${l.limit}`; + } else { + if (!validateCelExpression(l.limit)) { + throw new Error(`Invalid CEL expression: ${l.limit}`); + } + + limitExpression = l.limit; + } + } + + if (keyExpression !== undefined && limitExpression === undefined) { + throw new Error('CEL based keys requires limit to be set'); + } + + if (limitExpression === undefined) { + limitExpression = `-1`; + } + + return { + key, + keyExpr: keyExpression, + units, + unitsExpr: unitsExpression, + limitValuesExpr: limitExpression, + duration: l.duration, + }; + }); +} + +// Helper function to validate CEL expressions +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function validateCelExpression(expr: string): boolean { + // This is a placeholder. In a real implementation, you'd need to use a CEL parser or validator. + // For now, we'll just return true to mimic the behavior. + return true; +} diff --git a/sdks/typescript/src/legacy/workflow.ts b/sdks/typescript/src/legacy/workflow.ts new file mode 100644 index 0000000000..03083d29c6 --- /dev/null +++ b/sdks/typescript/src/legacy/workflow.ts @@ -0,0 +1,65 @@ +import * as z from 'zod'; + +import { CreateStep, CreateStepSchema } from '../step'; +import { + ConcurrencyLimitStrategy as PbConcurrencyLimitStrategy, + StickyStrategy as PbStickyStrategy, +} from '../protoc/workflows'; + +const CronConfigSchema = z.object({ + cron: z.string(), + event: z.undefined(), +}); + +const EventConfigSchema = z.object({ + cron: z.undefined(), + event: z.string(), +}); + +const OnConfigSchema = z.union([CronConfigSchema, EventConfigSchema]).optional(); + +const StepsSchema = z.array(CreateStepSchema); + +export type Steps = z.infer; + +export const ConcurrencyLimitStrategy = PbConcurrencyLimitStrategy; + +export const WorkflowConcurrency = z.object({ + name: z.string(), + maxRuns: z.number().optional(), + limitStrategy: z.nativeEnum(ConcurrencyLimitStrategy).optional(), + expression: z.string().optional(), +}); + +export const HatchetTimeoutSchema = z.string(); + +export const StickyStrategy = PbStickyStrategy; + +export const CreateWorkflowSchema = z.object({ + id: z.string(), + description: z.string(), + version: z.string().optional(), + /** + * sticky will attempt to run all steps for workflow on the same worker + */ + sticky: z.nativeEnum(StickyStrategy).optional(), + scheduleTimeout: z.string().optional(), + /** + * @deprecated Workflow timeout is deprecated. Use step timeouts instead. + */ + timeout: HatchetTimeoutSchema.optional(), + on: OnConfigSchema, + steps: StepsSchema, + onFailure: CreateStepSchema?.optional(), +}); + +/** + * @deprecated Use client.workflow instead (TODO link to migration doc) + */ +export interface Workflow extends z.infer { + concurrency?: z.infer & { + key?: (ctx: any) => string; + }; + steps: CreateStep[]; + onFailure?: CreateStep; +} diff --git a/sdks/typescript/src/protoc/dispatcher/dispatcher.ts b/sdks/typescript/src/protoc/dispatcher/dispatcher.ts index 4d47be0b41..76e4f33627 100644 --- a/sdks/typescript/src/protoc/dispatcher/dispatcher.ts +++ b/sdks/typescript/src/protoc/dispatcher/dispatcher.ts @@ -341,14 +341,18 @@ export interface WorkerRegisterRequest { actions: string[]; /** (optional) the services for this worker */ services: string[]; - /** (optional) the max number of runs this worker can handle */ - maxRuns?: number | undefined; + /** (optional) the number of default slots this worker can handle */ + slots?: number | undefined; /** (optional) worker labels (i.e. state or other metadata) */ labels: { [key: string]: WorkerLabels }; /** (optional) webhookId is the id of the webhook that the worker is associated with (if any) */ webhookId?: string | undefined; /** (optional) information regarding the runtime environment of the worker */ runtimeInfo?: RuntimeInfo | undefined; + /** (optional) the max number of durable slots this worker can handle */ + durableSlots?: number | undefined; + /** (optional) slot config for this worker (slot_type -> units) */ + slotConfig: { [key: string]: number }; } export interface WorkerRegisterRequest_LabelsEntry { @@ -356,6 +360,11 @@ export interface WorkerRegisterRequest_LabelsEntry { value: WorkerLabels | undefined; } +export interface WorkerRegisterRequest_SlotConfigEntry { + key: string; + value: number; +} + export interface WorkerRegisterResponse { /** the tenant id */ tenantId: string; @@ -397,18 +406,18 @@ export interface AssignedAction { jobName: string; /** the job run id */ jobRunId: string; - /** the step id */ - stepId: string; - /** the step run id */ - stepRunId: string; + /** the task id */ + taskId: string; + /** the task external run id */ + taskRunExternalId: string; /** the action id */ actionId: string; /** the action type */ actionType: ActionType; /** the action payload */ actionPayload: string; - /** the step name */ - stepName: string; + /** the task name */ + taskName: string; /** the count number of the retry attempt */ retryCount: number; /** (optional) additional metadata set on the workflow */ @@ -453,7 +462,7 @@ export interface GroupKeyActionEvent { /** the action id */ actionId: string; eventTimestamp: Date | undefined; - /** the step event type */ + /** the task event type */ eventType: GroupKeyActionEventType; /** the event payload */ eventPayload: string; @@ -466,14 +475,14 @@ export interface StepActionEvent { jobId: string; /** the job run id */ jobRunId: string; - /** the id of the step */ - stepId: string; - /** the step run id */ - stepRunId: string; + /** the id of the task */ + taskId: string; + /** the task external run id */ + taskRunExternalId: string; /** the action id */ actionId: string; eventTimestamp: Date | undefined; - /** the step event type */ + /** the task event type */ eventType: StepActionEventType; /** the event payload */ eventPayload: string; @@ -518,9 +527,9 @@ export interface WorkflowEvent { * will hang up the connection but clients might want to case */ hangup: boolean; - /** (optional) the max number of retries this step can handle */ - stepRetries?: number | undefined; - /** (optional) the retry count of this step */ + /** (optional) the max number of retries this task can handle */ + taskRetries?: number | undefined; + /** (optional) the retry count of this task */ retryCount?: number | undefined; eventIndex?: number | undefined; } @@ -534,16 +543,16 @@ export interface WorkflowRunEvent { } export interface StepRunResult { - stepRunId: string; - stepReadableId: string; + taskRunExternalId: string; + taskName: string; jobRunId: string; error?: string | undefined; output?: string | undefined; } export interface OverridesData { - /** the step run id */ - stepRunId: string; + /** the task run id */ + taskRunExternalId: string; /** the path of the data to set */ path: string; /** the value to set */ @@ -564,8 +573,8 @@ export interface HeartbeatRequest { export interface HeartbeatResponse {} export interface RefreshTimeoutRequest { - /** the id of the step run to release */ - stepRunId: string; + /** the id of the task run to release */ + taskRunExternalId: string; incrementTimeoutBy: string; } @@ -574,8 +583,8 @@ export interface RefreshTimeoutResponse { } export interface ReleaseSlotRequest { - /** the id of the step run to release */ - stepRunId: string; + /** the id of the task run to release */ + taskRunExternalId: string; } export interface ReleaseSlotResponse {} @@ -793,10 +802,12 @@ function createBaseWorkerRegisterRequest(): WorkerRegisterRequest { workerName: '', actions: [], services: [], - maxRuns: undefined, + slots: undefined, labels: {}, webhookId: undefined, runtimeInfo: undefined, + durableSlots: undefined, + slotConfig: {}, }; } @@ -811,8 +822,8 @@ export const WorkerRegisterRequest: MessageFns = { for (const v of message.services) { writer.uint32(26).string(v!); } - if (message.maxRuns !== undefined) { - writer.uint32(32).int32(message.maxRuns); + if (message.slots !== undefined) { + writer.uint32(32).int32(message.slots); } Object.entries(message.labels).forEach(([key, value]) => { WorkerRegisterRequest_LabelsEntry.encode( @@ -826,6 +837,15 @@ export const WorkerRegisterRequest: MessageFns = { if (message.runtimeInfo !== undefined) { RuntimeInfo.encode(message.runtimeInfo, writer.uint32(58).fork()).join(); } + if (message.durableSlots !== undefined) { + writer.uint32(64).int32(message.durableSlots); + } + Object.entries(message.slotConfig).forEach(([key, value]) => { + WorkerRegisterRequest_SlotConfigEntry.encode( + { key: key as any, value }, + writer.uint32(74).fork() + ).join(); + }); return writer; }, @@ -865,7 +885,7 @@ export const WorkerRegisterRequest: MessageFns = { break; } - message.maxRuns = reader.int32(); + message.slots = reader.int32(); continue; } case 5: { @@ -895,6 +915,25 @@ export const WorkerRegisterRequest: MessageFns = { message.runtimeInfo = RuntimeInfo.decode(reader, reader.uint32()); continue; } + case 8: { + if (tag !== 64) { + break; + } + + message.durableSlots = reader.int32(); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + const entry9 = WorkerRegisterRequest_SlotConfigEntry.decode(reader, reader.uint32()); + if (entry9.value !== undefined) { + message.slotConfig[entry9.key] = entry9.value; + } + continue; + } } if ((tag & 7) === 4 || tag === 0) { break; @@ -913,7 +952,7 @@ export const WorkerRegisterRequest: MessageFns = { services: globalThis.Array.isArray(object?.services) ? object.services.map((e: any) => globalThis.String(e)) : [], - maxRuns: isSet(object.maxRuns) ? globalThis.Number(object.maxRuns) : undefined, + slots: isSet(object.slots) ? globalThis.Number(object.slots) : undefined, labels: isObject(object.labels) ? Object.entries(object.labels).reduce<{ [key: string]: WorkerLabels }>( (acc, [key, value]) => { @@ -925,6 +964,16 @@ export const WorkerRegisterRequest: MessageFns = { : {}, webhookId: isSet(object.webhookId) ? globalThis.String(object.webhookId) : undefined, runtimeInfo: isSet(object.runtimeInfo) ? RuntimeInfo.fromJSON(object.runtimeInfo) : undefined, + durableSlots: isSet(object.durableSlots) ? globalThis.Number(object.durableSlots) : undefined, + slotConfig: isObject(object.slotConfig) + ? Object.entries(object.slotConfig).reduce<{ [key: string]: number }>( + (acc, [key, value]) => { + acc[key] = Number(value); + return acc; + }, + {} + ) + : {}, }; }, @@ -939,8 +988,8 @@ export const WorkerRegisterRequest: MessageFns = { if (message.services?.length) { obj.services = message.services; } - if (message.maxRuns !== undefined) { - obj.maxRuns = Math.round(message.maxRuns); + if (message.slots !== undefined) { + obj.slots = Math.round(message.slots); } if (message.labels) { const entries = Object.entries(message.labels); @@ -957,6 +1006,18 @@ export const WorkerRegisterRequest: MessageFns = { if (message.runtimeInfo !== undefined) { obj.runtimeInfo = RuntimeInfo.toJSON(message.runtimeInfo); } + if (message.durableSlots !== undefined) { + obj.durableSlots = Math.round(message.durableSlots); + } + if (message.slotConfig) { + const entries = Object.entries(message.slotConfig); + if (entries.length > 0) { + obj.slotConfig = {}; + entries.forEach(([k, v]) => { + obj.slotConfig[k] = Math.round(v); + }); + } + } return obj; }, @@ -968,7 +1029,7 @@ export const WorkerRegisterRequest: MessageFns = { message.workerName = object.workerName ?? ''; message.actions = object.actions?.map((e) => e) || []; message.services = object.services?.map((e) => e) || []; - message.maxRuns = object.maxRuns ?? undefined; + message.slots = object.slots ?? undefined; message.labels = Object.entries(object.labels ?? {}).reduce<{ [key: string]: WorkerLabels }>( (acc, [key, value]) => { if (value !== undefined) { @@ -983,6 +1044,16 @@ export const WorkerRegisterRequest: MessageFns = { object.runtimeInfo !== undefined && object.runtimeInfo !== null ? RuntimeInfo.fromPartial(object.runtimeInfo) : undefined; + message.durableSlots = object.durableSlots ?? undefined; + message.slotConfig = Object.entries(object.slotConfig ?? {}).reduce<{ [key: string]: number }>( + (acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.Number(value); + } + return acc; + }, + {} + ); return message; }, }; @@ -1071,6 +1142,93 @@ export const WorkerRegisterRequest_LabelsEntry: MessageFns = + { + encode( + message: WorkerRegisterRequest_SlotConfigEntry, + writer: BinaryWriter = new BinaryWriter() + ): BinaryWriter { + if (message.key !== '') { + writer.uint32(10).string(message.key); + } + if (message.value !== 0) { + writer.uint32(16).int32(message.value); + } + return writer; + }, + + decode( + input: BinaryReader | Uint8Array, + length?: number + ): WorkerRegisterRequest_SlotConfigEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseWorkerRegisterRequest_SlotConfigEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.value = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): WorkerRegisterRequest_SlotConfigEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : '', + value: isSet(object.value) ? globalThis.Number(object.value) : 0, + }; + }, + + toJSON(message: WorkerRegisterRequest_SlotConfigEntry): unknown { + const obj: any = {}; + if (message.key !== '') { + obj.key = message.key; + } + if (message.value !== 0) { + obj.value = Math.round(message.value); + } + return obj; + }, + + create( + base?: DeepPartial + ): WorkerRegisterRequest_SlotConfigEntry { + return WorkerRegisterRequest_SlotConfigEntry.fromPartial(base ?? {}); + }, + fromPartial( + object: DeepPartial + ): WorkerRegisterRequest_SlotConfigEntry { + const message = createBaseWorkerRegisterRequest_SlotConfigEntry(); + message.key = object.key ?? ''; + message.value = object.value ?? 0; + return message; + }, + }; + function createBaseWorkerRegisterResponse(): WorkerRegisterResponse { return { tenantId: '', workerId: '', workerName: '' }; } @@ -1447,12 +1605,12 @@ function createBaseAssignedAction(): AssignedAction { jobId: '', jobName: '', jobRunId: '', - stepId: '', - stepRunId: '', + taskId: '', + taskRunExternalId: '', actionId: '', actionType: 0, actionPayload: '', - stepName: '', + taskName: '', retryCount: 0, additionalMetadata: undefined, childWorkflowIndex: undefined, @@ -1484,11 +1642,11 @@ export const AssignedAction: MessageFns = { if (message.jobRunId !== '') { writer.uint32(50).string(message.jobRunId); } - if (message.stepId !== '') { - writer.uint32(58).string(message.stepId); + if (message.taskId !== '') { + writer.uint32(58).string(message.taskId); } - if (message.stepRunId !== '') { - writer.uint32(66).string(message.stepRunId); + if (message.taskRunExternalId !== '') { + writer.uint32(66).string(message.taskRunExternalId); } if (message.actionId !== '') { writer.uint32(74).string(message.actionId); @@ -1499,8 +1657,8 @@ export const AssignedAction: MessageFns = { if (message.actionPayload !== '') { writer.uint32(90).string(message.actionPayload); } - if (message.stepName !== '') { - writer.uint32(98).string(message.stepName); + if (message.taskName !== '') { + writer.uint32(98).string(message.taskName); } if (message.retryCount !== 0) { writer.uint32(104).int32(message.retryCount); @@ -1589,7 +1747,7 @@ export const AssignedAction: MessageFns = { break; } - message.stepId = reader.string(); + message.taskId = reader.string(); continue; } case 8: { @@ -1597,7 +1755,7 @@ export const AssignedAction: MessageFns = { break; } - message.stepRunId = reader.string(); + message.taskRunExternalId = reader.string(); continue; } case 9: { @@ -1629,7 +1787,7 @@ export const AssignedAction: MessageFns = { break; } - message.stepName = reader.string(); + message.taskName = reader.string(); continue; } case 13: { @@ -1715,12 +1873,14 @@ export const AssignedAction: MessageFns = { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : '', jobName: isSet(object.jobName) ? globalThis.String(object.jobName) : '', jobRunId: isSet(object.jobRunId) ? globalThis.String(object.jobRunId) : '', - stepId: isSet(object.stepId) ? globalThis.String(object.stepId) : '', - stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : '', + taskId: isSet(object.taskId) ? globalThis.String(object.taskId) : '', + taskRunExternalId: isSet(object.taskRunExternalId) + ? globalThis.String(object.taskRunExternalId) + : '', actionId: isSet(object.actionId) ? globalThis.String(object.actionId) : '', actionType: isSet(object.actionType) ? actionTypeFromJSON(object.actionType) : 0, actionPayload: isSet(object.actionPayload) ? globalThis.String(object.actionPayload) : '', - stepName: isSet(object.stepName) ? globalThis.String(object.stepName) : '', + taskName: isSet(object.taskName) ? globalThis.String(object.taskName) : '', retryCount: isSet(object.retryCount) ? globalThis.Number(object.retryCount) : 0, additionalMetadata: isSet(object.additionalMetadata) ? globalThis.String(object.additionalMetadata) @@ -1762,11 +1922,11 @@ export const AssignedAction: MessageFns = { if (message.jobRunId !== '') { obj.jobRunId = message.jobRunId; } - if (message.stepId !== '') { - obj.stepId = message.stepId; + if (message.taskId !== '') { + obj.taskId = message.taskId; } - if (message.stepRunId !== '') { - obj.stepRunId = message.stepRunId; + if (message.taskRunExternalId !== '') { + obj.taskRunExternalId = message.taskRunExternalId; } if (message.actionId !== '') { obj.actionId = message.actionId; @@ -1777,8 +1937,8 @@ export const AssignedAction: MessageFns = { if (message.actionPayload !== '') { obj.actionPayload = message.actionPayload; } - if (message.stepName !== '') { - obj.stepName = message.stepName; + if (message.taskName !== '') { + obj.taskName = message.taskName; } if (message.retryCount !== 0) { obj.retryCount = Math.round(message.retryCount); @@ -1818,12 +1978,12 @@ export const AssignedAction: MessageFns = { message.jobId = object.jobId ?? ''; message.jobName = object.jobName ?? ''; message.jobRunId = object.jobRunId ?? ''; - message.stepId = object.stepId ?? ''; - message.stepRunId = object.stepRunId ?? ''; + message.taskId = object.taskId ?? ''; + message.taskRunExternalId = object.taskRunExternalId ?? ''; message.actionId = object.actionId ?? ''; message.actionType = object.actionType ?? 0; message.actionPayload = object.actionPayload ?? ''; - message.stepName = object.stepName ?? ''; + message.taskName = object.taskName ?? ''; message.retryCount = object.retryCount ?? 0; message.additionalMetadata = object.additionalMetadata ?? undefined; message.childWorkflowIndex = object.childWorkflowIndex ?? undefined; @@ -2207,8 +2367,8 @@ function createBaseStepActionEvent(): StepActionEvent { workerId: '', jobId: '', jobRunId: '', - stepId: '', - stepRunId: '', + taskId: '', + taskRunExternalId: '', actionId: '', eventTimestamp: undefined, eventType: 0, @@ -2229,11 +2389,11 @@ export const StepActionEvent: MessageFns = { if (message.jobRunId !== '') { writer.uint32(26).string(message.jobRunId); } - if (message.stepId !== '') { - writer.uint32(34).string(message.stepId); + if (message.taskId !== '') { + writer.uint32(34).string(message.taskId); } - if (message.stepRunId !== '') { - writer.uint32(42).string(message.stepRunId); + if (message.taskRunExternalId !== '') { + writer.uint32(42).string(message.taskRunExternalId); } if (message.actionId !== '') { writer.uint32(50).string(message.actionId); @@ -2292,7 +2452,7 @@ export const StepActionEvent: MessageFns = { break; } - message.stepId = reader.string(); + message.taskId = reader.string(); continue; } case 5: { @@ -2300,7 +2460,7 @@ export const StepActionEvent: MessageFns = { break; } - message.stepRunId = reader.string(); + message.taskRunExternalId = reader.string(); continue; } case 6: { @@ -2365,8 +2525,10 @@ export const StepActionEvent: MessageFns = { workerId: isSet(object.workerId) ? globalThis.String(object.workerId) : '', jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : '', jobRunId: isSet(object.jobRunId) ? globalThis.String(object.jobRunId) : '', - stepId: isSet(object.stepId) ? globalThis.String(object.stepId) : '', - stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : '', + taskId: isSet(object.taskId) ? globalThis.String(object.taskId) : '', + taskRunExternalId: isSet(object.taskRunExternalId) + ? globalThis.String(object.taskRunExternalId) + : '', actionId: isSet(object.actionId) ? globalThis.String(object.actionId) : '', eventTimestamp: isSet(object.eventTimestamp) ? fromJsonTimestamp(object.eventTimestamp) @@ -2391,11 +2553,11 @@ export const StepActionEvent: MessageFns = { if (message.jobRunId !== '') { obj.jobRunId = message.jobRunId; } - if (message.stepId !== '') { - obj.stepId = message.stepId; + if (message.taskId !== '') { + obj.taskId = message.taskId; } - if (message.stepRunId !== '') { - obj.stepRunId = message.stepRunId; + if (message.taskRunExternalId !== '') { + obj.taskRunExternalId = message.taskRunExternalId; } if (message.actionId !== '') { obj.actionId = message.actionId; @@ -2426,8 +2588,8 @@ export const StepActionEvent: MessageFns = { message.workerId = object.workerId ?? ''; message.jobId = object.jobId ?? ''; message.jobRunId = object.jobRunId ?? ''; - message.stepId = object.stepId ?? ''; - message.stepRunId = object.stepRunId ?? ''; + message.taskId = object.taskId ?? ''; + message.taskRunExternalId = object.taskRunExternalId ?? ''; message.actionId = object.actionId ?? ''; message.eventTimestamp = object.eventTimestamp ?? undefined; message.eventType = object.eventType ?? 0; @@ -2689,7 +2851,7 @@ function createBaseWorkflowEvent(): WorkflowEvent { eventTimestamp: undefined, eventPayload: '', hangup: false, - stepRetries: undefined, + taskRetries: undefined, retryCount: undefined, eventIndex: undefined, }; @@ -2718,8 +2880,8 @@ export const WorkflowEvent: MessageFns = { if (message.hangup !== false) { writer.uint32(56).bool(message.hangup); } - if (message.stepRetries !== undefined) { - writer.uint32(64).int32(message.stepRetries); + if (message.taskRetries !== undefined) { + writer.uint32(64).int32(message.taskRetries); } if (message.retryCount !== undefined) { writer.uint32(72).int32(message.retryCount); @@ -2798,7 +2960,7 @@ export const WorkflowEvent: MessageFns = { break; } - message.stepRetries = reader.int32(); + message.taskRetries = reader.int32(); continue; } case 9: { @@ -2837,7 +2999,7 @@ export const WorkflowEvent: MessageFns = { : undefined, eventPayload: isSet(object.eventPayload) ? globalThis.String(object.eventPayload) : '', hangup: isSet(object.hangup) ? globalThis.Boolean(object.hangup) : false, - stepRetries: isSet(object.stepRetries) ? globalThis.Number(object.stepRetries) : undefined, + taskRetries: isSet(object.taskRetries) ? globalThis.Number(object.taskRetries) : undefined, retryCount: isSet(object.retryCount) ? globalThis.Number(object.retryCount) : undefined, eventIndex: isSet(object.eventIndex) ? globalThis.Number(object.eventIndex) : undefined, }; @@ -2866,8 +3028,8 @@ export const WorkflowEvent: MessageFns = { if (message.hangup !== false) { obj.hangup = message.hangup; } - if (message.stepRetries !== undefined) { - obj.stepRetries = Math.round(message.stepRetries); + if (message.taskRetries !== undefined) { + obj.taskRetries = Math.round(message.taskRetries); } if (message.retryCount !== undefined) { obj.retryCount = Math.round(message.retryCount); @@ -2890,7 +3052,7 @@ export const WorkflowEvent: MessageFns = { message.eventTimestamp = object.eventTimestamp ?? undefined; message.eventPayload = object.eventPayload ?? ''; message.hangup = object.hangup ?? false; - message.stepRetries = object.stepRetries ?? undefined; + message.taskRetries = object.taskRetries ?? undefined; message.retryCount = object.retryCount ?? undefined; message.eventIndex = object.eventIndex ?? undefined; return message; @@ -3010,16 +3172,16 @@ export const WorkflowRunEvent: MessageFns = { }; function createBaseStepRunResult(): StepRunResult { - return { stepRunId: '', stepReadableId: '', jobRunId: '', error: undefined, output: undefined }; + return { taskRunExternalId: '', taskName: '', jobRunId: '', error: undefined, output: undefined }; } export const StepRunResult: MessageFns = { encode(message: StepRunResult, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.stepRunId !== '') { - writer.uint32(10).string(message.stepRunId); + if (message.taskRunExternalId !== '') { + writer.uint32(10).string(message.taskRunExternalId); } - if (message.stepReadableId !== '') { - writer.uint32(18).string(message.stepReadableId); + if (message.taskName !== '') { + writer.uint32(18).string(message.taskName); } if (message.jobRunId !== '') { writer.uint32(26).string(message.jobRunId); @@ -3045,7 +3207,7 @@ export const StepRunResult: MessageFns = { break; } - message.stepRunId = reader.string(); + message.taskRunExternalId = reader.string(); continue; } case 2: { @@ -3053,7 +3215,7 @@ export const StepRunResult: MessageFns = { break; } - message.stepReadableId = reader.string(); + message.taskName = reader.string(); continue; } case 3: { @@ -3091,8 +3253,10 @@ export const StepRunResult: MessageFns = { fromJSON(object: any): StepRunResult { return { - stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : '', - stepReadableId: isSet(object.stepReadableId) ? globalThis.String(object.stepReadableId) : '', + taskRunExternalId: isSet(object.taskRunExternalId) + ? globalThis.String(object.taskRunExternalId) + : '', + taskName: isSet(object.taskName) ? globalThis.String(object.taskName) : '', jobRunId: isSet(object.jobRunId) ? globalThis.String(object.jobRunId) : '', error: isSet(object.error) ? globalThis.String(object.error) : undefined, output: isSet(object.output) ? globalThis.String(object.output) : undefined, @@ -3101,11 +3265,11 @@ export const StepRunResult: MessageFns = { toJSON(message: StepRunResult): unknown { const obj: any = {}; - if (message.stepRunId !== '') { - obj.stepRunId = message.stepRunId; + if (message.taskRunExternalId !== '') { + obj.taskRunExternalId = message.taskRunExternalId; } - if (message.stepReadableId !== '') { - obj.stepReadableId = message.stepReadableId; + if (message.taskName !== '') { + obj.taskName = message.taskName; } if (message.jobRunId !== '') { obj.jobRunId = message.jobRunId; @@ -3124,8 +3288,8 @@ export const StepRunResult: MessageFns = { }, fromPartial(object: DeepPartial): StepRunResult { const message = createBaseStepRunResult(); - message.stepRunId = object.stepRunId ?? ''; - message.stepReadableId = object.stepReadableId ?? ''; + message.taskRunExternalId = object.taskRunExternalId ?? ''; + message.taskName = object.taskName ?? ''; message.jobRunId = object.jobRunId ?? ''; message.error = object.error ?? undefined; message.output = object.output ?? undefined; @@ -3134,13 +3298,13 @@ export const StepRunResult: MessageFns = { }; function createBaseOverridesData(): OverridesData { - return { stepRunId: '', path: '', value: '', callerFilename: '' }; + return { taskRunExternalId: '', path: '', value: '', callerFilename: '' }; } export const OverridesData: MessageFns = { encode(message: OverridesData, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.stepRunId !== '') { - writer.uint32(10).string(message.stepRunId); + if (message.taskRunExternalId !== '') { + writer.uint32(10).string(message.taskRunExternalId); } if (message.path !== '') { writer.uint32(18).string(message.path); @@ -3166,7 +3330,7 @@ export const OverridesData: MessageFns = { break; } - message.stepRunId = reader.string(); + message.taskRunExternalId = reader.string(); continue; } case 2: { @@ -3204,7 +3368,9 @@ export const OverridesData: MessageFns = { fromJSON(object: any): OverridesData { return { - stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : '', + taskRunExternalId: isSet(object.taskRunExternalId) + ? globalThis.String(object.taskRunExternalId) + : '', path: isSet(object.path) ? globalThis.String(object.path) : '', value: isSet(object.value) ? globalThis.String(object.value) : '', callerFilename: isSet(object.callerFilename) ? globalThis.String(object.callerFilename) : '', @@ -3213,8 +3379,8 @@ export const OverridesData: MessageFns = { toJSON(message: OverridesData): unknown { const obj: any = {}; - if (message.stepRunId !== '') { - obj.stepRunId = message.stepRunId; + if (message.taskRunExternalId !== '') { + obj.taskRunExternalId = message.taskRunExternalId; } if (message.path !== '') { obj.path = message.path; @@ -3233,7 +3399,7 @@ export const OverridesData: MessageFns = { }, fromPartial(object: DeepPartial): OverridesData { const message = createBaseOverridesData(); - message.stepRunId = object.stepRunId ?? ''; + message.taskRunExternalId = object.taskRunExternalId ?? ''; message.path = object.path ?? ''; message.value = object.value ?? ''; message.callerFilename = object.callerFilename ?? ''; @@ -3404,13 +3570,13 @@ export const HeartbeatResponse: MessageFns = { }; function createBaseRefreshTimeoutRequest(): RefreshTimeoutRequest { - return { stepRunId: '', incrementTimeoutBy: '' }; + return { taskRunExternalId: '', incrementTimeoutBy: '' }; } export const RefreshTimeoutRequest: MessageFns = { encode(message: RefreshTimeoutRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.stepRunId !== '') { - writer.uint32(10).string(message.stepRunId); + if (message.taskRunExternalId !== '') { + writer.uint32(10).string(message.taskRunExternalId); } if (message.incrementTimeoutBy !== '') { writer.uint32(18).string(message.incrementTimeoutBy); @@ -3430,7 +3596,7 @@ export const RefreshTimeoutRequest: MessageFns = { break; } - message.stepRunId = reader.string(); + message.taskRunExternalId = reader.string(); continue; } case 2: { @@ -3452,7 +3618,9 @@ export const RefreshTimeoutRequest: MessageFns = { fromJSON(object: any): RefreshTimeoutRequest { return { - stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : '', + taskRunExternalId: isSet(object.taskRunExternalId) + ? globalThis.String(object.taskRunExternalId) + : '', incrementTimeoutBy: isSet(object.incrementTimeoutBy) ? globalThis.String(object.incrementTimeoutBy) : '', @@ -3461,8 +3629,8 @@ export const RefreshTimeoutRequest: MessageFns = { toJSON(message: RefreshTimeoutRequest): unknown { const obj: any = {}; - if (message.stepRunId !== '') { - obj.stepRunId = message.stepRunId; + if (message.taskRunExternalId !== '') { + obj.taskRunExternalId = message.taskRunExternalId; } if (message.incrementTimeoutBy !== '') { obj.incrementTimeoutBy = message.incrementTimeoutBy; @@ -3475,7 +3643,7 @@ export const RefreshTimeoutRequest: MessageFns = { }, fromPartial(object: DeepPartial): RefreshTimeoutRequest { const message = createBaseRefreshTimeoutRequest(); - message.stepRunId = object.stepRunId ?? ''; + message.taskRunExternalId = object.taskRunExternalId ?? ''; message.incrementTimeoutBy = object.incrementTimeoutBy ?? ''; return message; }, @@ -3540,13 +3708,13 @@ export const RefreshTimeoutResponse: MessageFns = { }; function createBaseReleaseSlotRequest(): ReleaseSlotRequest { - return { stepRunId: '' }; + return { taskRunExternalId: '' }; } export const ReleaseSlotRequest: MessageFns = { encode(message: ReleaseSlotRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.stepRunId !== '') { - writer.uint32(10).string(message.stepRunId); + if (message.taskRunExternalId !== '') { + writer.uint32(10).string(message.taskRunExternalId); } return writer; }, @@ -3563,7 +3731,7 @@ export const ReleaseSlotRequest: MessageFns = { break; } - message.stepRunId = reader.string(); + message.taskRunExternalId = reader.string(); continue; } } @@ -3576,13 +3744,17 @@ export const ReleaseSlotRequest: MessageFns = { }, fromJSON(object: any): ReleaseSlotRequest { - return { stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : '' }; + return { + taskRunExternalId: isSet(object.taskRunExternalId) + ? globalThis.String(object.taskRunExternalId) + : '', + }; }, toJSON(message: ReleaseSlotRequest): unknown { const obj: any = {}; - if (message.stepRunId !== '') { - obj.stepRunId = message.stepRunId; + if (message.taskRunExternalId !== '') { + obj.taskRunExternalId = message.taskRunExternalId; } return obj; }, @@ -3592,7 +3764,7 @@ export const ReleaseSlotRequest: MessageFns = { }, fromPartial(object: DeepPartial): ReleaseSlotRequest { const message = createBaseReleaseSlotRequest(); - message.stepRunId = object.stepRunId ?? ''; + message.taskRunExternalId = object.taskRunExternalId ?? ''; return message; }, }; diff --git a/sdks/typescript/src/protoc/events/events.ts b/sdks/typescript/src/protoc/events/events.ts index bdb0e7bc7f..e1a741bab4 100644 --- a/sdks/typescript/src/protoc/events/events.ts +++ b/sdks/typescript/src/protoc/events/events.ts @@ -33,8 +33,8 @@ export interface Events { } export interface PutLogRequest { - /** the step run id for the request */ - stepRunId: string; + /** the task external run id for the request */ + taskRunExternalId: string; /** when the log line was created */ createdAt: Date | undefined; /** the log line message */ @@ -50,8 +50,8 @@ export interface PutLogRequest { export interface PutLogResponse {} export interface PutStreamEventRequest { - /** the step run id for the request */ - stepRunId: string; + /** the task external run id for the request */ + taskRunExternalId: string; /** when the stream event was created */ createdAt: Date | undefined; /** the stream event message */ @@ -318,7 +318,7 @@ export const Events: MessageFns = { function createBasePutLogRequest(): PutLogRequest { return { - stepRunId: '', + taskRunExternalId: '', createdAt: undefined, message: '', level: undefined, @@ -329,8 +329,8 @@ function createBasePutLogRequest(): PutLogRequest { export const PutLogRequest: MessageFns = { encode(message: PutLogRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.stepRunId !== '') { - writer.uint32(10).string(message.stepRunId); + if (message.taskRunExternalId !== '') { + writer.uint32(10).string(message.taskRunExternalId); } if (message.createdAt !== undefined) { Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(18).fork()).join(); @@ -362,7 +362,7 @@ export const PutLogRequest: MessageFns = { break; } - message.stepRunId = reader.string(); + message.taskRunExternalId = reader.string(); continue; } case 2: { @@ -416,7 +416,9 @@ export const PutLogRequest: MessageFns = { fromJSON(object: any): PutLogRequest { return { - stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : '', + taskRunExternalId: isSet(object.taskRunExternalId) + ? globalThis.String(object.taskRunExternalId) + : '', createdAt: isSet(object.createdAt) ? fromJsonTimestamp(object.createdAt) : undefined, message: isSet(object.message) ? globalThis.String(object.message) : '', level: isSet(object.level) ? globalThis.String(object.level) : undefined, @@ -429,8 +431,8 @@ export const PutLogRequest: MessageFns = { toJSON(message: PutLogRequest): unknown { const obj: any = {}; - if (message.stepRunId !== '') { - obj.stepRunId = message.stepRunId; + if (message.taskRunExternalId !== '') { + obj.taskRunExternalId = message.taskRunExternalId; } if (message.createdAt !== undefined) { obj.createdAt = message.createdAt.toISOString(); @@ -455,7 +457,7 @@ export const PutLogRequest: MessageFns = { }, fromPartial(object: DeepPartial): PutLogRequest { const message = createBasePutLogRequest(); - message.stepRunId = object.stepRunId ?? ''; + message.taskRunExternalId = object.taskRunExternalId ?? ''; message.createdAt = object.createdAt ?? undefined; message.message = object.message ?? ''; message.level = object.level ?? undefined; @@ -510,7 +512,7 @@ export const PutLogResponse: MessageFns = { function createBasePutStreamEventRequest(): PutStreamEventRequest { return { - stepRunId: '', + taskRunExternalId: '', createdAt: undefined, message: new Uint8Array(0), metadata: '', @@ -520,8 +522,8 @@ function createBasePutStreamEventRequest(): PutStreamEventRequest { export const PutStreamEventRequest: MessageFns = { encode(message: PutStreamEventRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { - if (message.stepRunId !== '') { - writer.uint32(10).string(message.stepRunId); + if (message.taskRunExternalId !== '') { + writer.uint32(10).string(message.taskRunExternalId); } if (message.createdAt !== undefined) { Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(18).fork()).join(); @@ -550,7 +552,7 @@ export const PutStreamEventRequest: MessageFns = { break; } - message.stepRunId = reader.string(); + message.taskRunExternalId = reader.string(); continue; } case 2: { @@ -596,7 +598,9 @@ export const PutStreamEventRequest: MessageFns = { fromJSON(object: any): PutStreamEventRequest { return { - stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : '', + taskRunExternalId: isSet(object.taskRunExternalId) + ? globalThis.String(object.taskRunExternalId) + : '', createdAt: isSet(object.createdAt) ? fromJsonTimestamp(object.createdAt) : undefined, message: isSet(object.message) ? bytesFromBase64(object.message) : new Uint8Array(0), metadata: isSet(object.metadata) ? globalThis.String(object.metadata) : '', @@ -606,8 +610,8 @@ export const PutStreamEventRequest: MessageFns = { toJSON(message: PutStreamEventRequest): unknown { const obj: any = {}; - if (message.stepRunId !== '') { - obj.stepRunId = message.stepRunId; + if (message.taskRunExternalId !== '') { + obj.taskRunExternalId = message.taskRunExternalId; } if (message.createdAt !== undefined) { obj.createdAt = message.createdAt.toISOString(); @@ -629,7 +633,7 @@ export const PutStreamEventRequest: MessageFns = { }, fromPartial(object: DeepPartial): PutStreamEventRequest { const message = createBasePutStreamEventRequest(); - message.stepRunId = object.stepRunId ?? ''; + message.taskRunExternalId = object.taskRunExternalId ?? ''; message.createdAt = object.createdAt ?? undefined; message.message = object.message ?? new Uint8Array(0); message.metadata = object.metadata ?? ''; diff --git a/sdks/typescript/src/protoc/v1/workflows.ts b/sdks/typescript/src/protoc/v1/workflows.ts index 3c9b287a56..682a2bbc82 100644 --- a/sdks/typescript/src/protoc/v1/workflows.ts +++ b/sdks/typescript/src/protoc/v1/workflows.ts @@ -328,7 +328,7 @@ export interface CreateWorkflowVersionRequest { cronInput?: string | undefined; /** (optional) the job to run on failure */ onFailureTask?: CreateTaskOpts | undefined; - /** (optional) the sticky strategy for assigning steps to workers */ + /** (optional) the sticky strategy for assigning tasks to workers */ sticky?: StickyStrategy | undefined; /** (optional) the default priority for the workflow */ defaultPriority?: number | undefined; @@ -393,15 +393,15 @@ export interface CreateTaskOpts { inputs: string; /** (optional) the task parents. if none are passed in, this is a root task */ parents: string[]; - /** (optional) the number of retries for the step, default 0 */ + /** (optional) the number of retries for the task, default 0 */ retries: number; - /** (optional) the rate limits for the step */ + /** (optional) the rate limits for the task */ rateLimits: CreateTaskRateLimit[]; - /** (optional) the desired worker affinity state for the step */ + /** (optional) the desired worker affinity state for the task */ workerLabels: { [key: string]: DesiredWorkerLabels }; - /** (optional) the retry backoff factor for the step */ + /** (optional) the retry backoff factor for the task */ backoffFactor?: number | undefined; - /** (optional) the maximum backoff time for the step */ + /** (optional) the maximum backoff time for the task */ backoffMaxSeconds?: number | undefined; /** (optional) the task concurrency options */ concurrency: Concurrency[]; @@ -409,6 +409,10 @@ export interface CreateTaskOpts { conditions?: TaskConditions | undefined; /** (optional) the timeout for the schedule */ scheduleTimeout?: string | undefined; + /** (optional) whether the task is durable */ + isDurable: boolean; + /** (optional) slot requests (slot_type -> units) */ + slotRequests: { [key: string]: number }; } export interface CreateTaskOpts_WorkerLabelsEntry { @@ -416,10 +420,15 @@ export interface CreateTaskOpts_WorkerLabelsEntry { value: DesiredWorkerLabels | undefined; } +export interface CreateTaskOpts_SlotRequestsEntry { + key: string; + value: number; +} + export interface CreateTaskRateLimit { /** (required) the key for the rate limit */ key: string; - /** (optional) the number of units this step consumes */ + /** (optional) the number of units this task consumes */ units?: number | undefined; /** (optional) a CEL expression for determining the rate limit key */ keyExpr?: string | undefined; @@ -1715,6 +1724,8 @@ function createBaseCreateTaskOpts(): CreateTaskOpts { concurrency: [], conditions: undefined, scheduleTimeout: undefined, + isDurable: false, + slotRequests: {}, }; } @@ -1762,6 +1773,15 @@ export const CreateTaskOpts: MessageFns = { if (message.scheduleTimeout !== undefined) { writer.uint32(106).string(message.scheduleTimeout); } + if (message.isDurable !== false) { + writer.uint32(112).bool(message.isDurable); + } + Object.entries(message.slotRequests).forEach(([key, value]) => { + CreateTaskOpts_SlotRequestsEntry.encode( + { key: key as any, value }, + writer.uint32(122).fork() + ).join(); + }); return writer; }, @@ -1879,6 +1899,25 @@ export const CreateTaskOpts: MessageFns = { message.scheduleTimeout = reader.string(); continue; } + case 14: { + if (tag !== 112) { + break; + } + + message.isDurable = reader.bool(); + continue; + } + case 15: { + if (tag !== 122) { + break; + } + + const entry15 = CreateTaskOpts_SlotRequestsEntry.decode(reader, reader.uint32()); + if (entry15.value !== undefined) { + message.slotRequests[entry15.key] = entry15.value; + } + continue; + } } if ((tag & 7) === 4 || tag === 0) { break; @@ -1923,6 +1962,16 @@ export const CreateTaskOpts: MessageFns = { scheduleTimeout: isSet(object.scheduleTimeout) ? globalThis.String(object.scheduleTimeout) : undefined, + isDurable: isSet(object.isDurable) ? globalThis.Boolean(object.isDurable) : false, + slotRequests: isObject(object.slotRequests) + ? Object.entries(object.slotRequests).reduce<{ [key: string]: number }>( + (acc, [key, value]) => { + acc[key] = Number(value); + return acc; + }, + {} + ) + : {}, }; }, @@ -1973,6 +2022,18 @@ export const CreateTaskOpts: MessageFns = { if (message.scheduleTimeout !== undefined) { obj.scheduleTimeout = message.scheduleTimeout; } + if (message.isDurable !== false) { + obj.isDurable = message.isDurable; + } + if (message.slotRequests) { + const entries = Object.entries(message.slotRequests); + if (entries.length > 0) { + obj.slotRequests = {}; + entries.forEach(([k, v]) => { + obj.slotRequests[k] = Math.round(v); + }); + } + } return obj; }, @@ -2004,6 +2065,15 @@ export const CreateTaskOpts: MessageFns = { ? TaskConditions.fromPartial(object.conditions) : undefined; message.scheduleTimeout = object.scheduleTimeout ?? undefined; + message.isDurable = object.isDurable ?? false; + message.slotRequests = Object.entries(object.slotRequests ?? {}).reduce<{ + [key: string]: number; + }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = globalThis.Number(value); + } + return acc; + }, {}); return message; }, }; @@ -2092,6 +2162,87 @@ export const CreateTaskOpts_WorkerLabelsEntry: MessageFns = { + encode( + message: CreateTaskOpts_SlotRequestsEntry, + writer: BinaryWriter = new BinaryWriter() + ): BinaryWriter { + if (message.key !== '') { + writer.uint32(10).string(message.key); + } + if (message.value !== 0) { + writer.uint32(16).int32(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CreateTaskOpts_SlotRequestsEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCreateTaskOpts_SlotRequestsEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.value = reader.int32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CreateTaskOpts_SlotRequestsEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : '', + value: isSet(object.value) ? globalThis.Number(object.value) : 0, + }; + }, + + toJSON(message: CreateTaskOpts_SlotRequestsEntry): unknown { + const obj: any = {}; + if (message.key !== '') { + obj.key = message.key; + } + if (message.value !== 0) { + obj.value = Math.round(message.value); + } + return obj; + }, + + create(base?: DeepPartial): CreateTaskOpts_SlotRequestsEntry { + return CreateTaskOpts_SlotRequestsEntry.fromPartial(base ?? {}); + }, + fromPartial( + object: DeepPartial + ): CreateTaskOpts_SlotRequestsEntry { + const message = createBaseCreateTaskOpts_SlotRequestsEntry(); + message.key = object.key ?? ''; + message.value = object.value ?? 0; + return message; + }, +}; + function createBaseCreateTaskRateLimit(): CreateTaskRateLimit { return { key: '', diff --git a/sdks/typescript/src/protoc/workflows/workflows.ts b/sdks/typescript/src/protoc/workflows/workflows.ts index cad3becbb0..7c93d3cfb4 100644 --- a/sdks/typescript/src/protoc/workflows/workflows.ts +++ b/sdks/typescript/src/protoc/workflows/workflows.ts @@ -284,7 +284,7 @@ export interface CreateWorkflowVersionOpts { cronInput?: string | undefined; /** (optional) the job to run on failure */ onFailureJob?: CreateWorkflowJobOpts | undefined; - /** (optional) the sticky strategy for assigning steps to workers */ + /** (optional) the sticky strategy for assigning tasks to workers */ sticky?: StickyStrategy | undefined; /** (optional) the kind of workflow */ kind?: WorkflowKind | undefined; @@ -309,7 +309,7 @@ export interface CreateWorkflowJobOpts { name: string; /** (optional) the job description */ description: string; - /** (required) the job steps */ + /** (required) the job tasks */ steps: CreateWorkflowStepOpts[]; } @@ -336,29 +336,29 @@ export interface DesiredWorkerLabels { weight?: number | undefined; } -/** CreateWorkflowStepOpts represents options to create a workflow step. */ +/** CreateWorkflowStepOpts represents options to create a workflow task. */ export interface CreateWorkflowStepOpts { - /** (required) the step name */ + /** (required) the task name */ readableId: string; - /** (required) the step action id */ + /** (required) the task action id */ action: string; - /** (optional) the step timeout */ + /** (optional) the task timeout */ timeout: string; - /** (optional) the step inputs, assuming string representation of JSON */ + /** (optional) the task inputs, assuming string representation of JSON */ inputs: string; - /** (optional) the step parents. if none are passed in, this is a root step */ + /** (optional) the task parents. if none are passed in, this is a root task */ parents: string[]; - /** (optional) the custom step user data, assuming string representation of JSON */ + /** (optional) the custom task user data, assuming string representation of JSON */ userData: string; - /** (optional) the number of retries for the step, default 0 */ + /** (optional) the number of retries for the task, default 0 */ retries: number; - /** (optional) the rate limits for the step */ + /** (optional) the rate limits for the task */ rateLimits: CreateStepRateLimit[]; - /** (optional) the desired worker affinity state for the step */ + /** (optional) the desired worker affinity state for the task */ workerLabels: { [key: string]: DesiredWorkerLabels }; - /** (optional) the retry backoff factor for the step */ + /** (optional) the retry backoff factor for the task */ backoffFactor?: number | undefined; - /** (optional) the maximum backoff time for the step */ + /** (optional) the maximum backoff time for the task */ backoffMaxSeconds?: number | undefined; } @@ -370,7 +370,7 @@ export interface CreateWorkflowStepOpts_WorkerLabelsEntry { export interface CreateStepRateLimit { /** (required) the key for the rate limit */ key: string; - /** (optional) the number of units this step consumes */ + /** (optional) the number of units this task consumes */ units?: number | undefined; /** (optional) a CEL expression for determining the rate limit key */ keyExpr?: string | undefined; @@ -392,8 +392,8 @@ export interface ScheduleWorkflowRequest { input: string; /** (optional) the parent workflow run id */ parentId?: string | undefined; - /** (optional) the parent step run id */ - parentStepRunId?: string | undefined; + /** (optional) the parent task external run id */ + parentTaskRunExternalId?: string | undefined; /** * (optional) the index of the child workflow. if this is set, matches on the index or the * child key will be a no-op, even if the schedule has changed. @@ -453,17 +453,17 @@ export interface TriggerWorkflowRequest { input: string; /** (optional) the parent workflow run id */ parentId?: string | undefined; - /** (optional) the parent step run id */ - parentStepRunId?: string | undefined; + /** (optional) the parent task external run id */ + parentTaskRunExternalId?: string | undefined; /** * (optional) the index of the child workflow. if this is set, matches on the index or the - * child key will return an existing workflow run if the parent id, parent step run id, and + * child key will return an existing workflow run if the parent id, parent task run id, and * child index/key match an existing workflow run. */ childIndex?: number | undefined; /** * (optional) the key for the child. if this is set, matches on the index or the - * child key will return an existing workflow run if the parent id, parent step run id, and + * child key will return an existing workflow run if the parent id, parent task run id, and * child index/key match an existing workflow run. */ childKey?: string | undefined; @@ -474,7 +474,7 @@ export interface TriggerWorkflowRequest { * requires the workflow definition to have a sticky strategy */ desiredWorkerId?: string | undefined; - /** (optional) override for the priority of the workflow steps, will set all steps to this priority */ + /** (optional) override for the priority of the workflow tasks, will set all tasks to this priority */ priority?: number | undefined; } @@ -1758,7 +1758,7 @@ function createBaseScheduleWorkflowRequest(): ScheduleWorkflowRequest { schedules: [], input: '', parentId: undefined, - parentStepRunId: undefined, + parentTaskRunExternalId: undefined, childIndex: undefined, childKey: undefined, additionalMetadata: undefined, @@ -1783,8 +1783,8 @@ export const ScheduleWorkflowRequest: MessageFns = { if (message.parentId !== undefined) { writer.uint32(34).string(message.parentId); } - if (message.parentStepRunId !== undefined) { - writer.uint32(42).string(message.parentStepRunId); + if (message.parentTaskRunExternalId !== undefined) { + writer.uint32(42).string(message.parentTaskRunExternalId); } if (message.childIndex !== undefined) { writer.uint32(48).int32(message.childIndex); @@ -1845,7 +1845,7 @@ export const ScheduleWorkflowRequest: MessageFns = { break; } - message.parentStepRunId = reader.string(); + message.parentTaskRunExternalId = reader.string(); continue; } case 6: { @@ -1897,8 +1897,8 @@ export const ScheduleWorkflowRequest: MessageFns = { : [], input: isSet(object.input) ? globalThis.String(object.input) : '', parentId: isSet(object.parentId) ? globalThis.String(object.parentId) : undefined, - parentStepRunId: isSet(object.parentStepRunId) - ? globalThis.String(object.parentStepRunId) + parentTaskRunExternalId: isSet(object.parentTaskRunExternalId) + ? globalThis.String(object.parentTaskRunExternalId) : undefined, childIndex: isSet(object.childIndex) ? globalThis.Number(object.childIndex) : undefined, childKey: isSet(object.childKey) ? globalThis.String(object.childKey) : undefined, @@ -1923,8 +1923,8 @@ export const ScheduleWorkflowRequest: MessageFns = { if (message.parentId !== undefined) { obj.parentId = message.parentId; } - if (message.parentStepRunId !== undefined) { - obj.parentStepRunId = message.parentStepRunId; + if (message.parentTaskRunExternalId !== undefined) { + obj.parentTaskRunExternalId = message.parentTaskRunExternalId; } if (message.childIndex !== undefined) { obj.childIndex = Math.round(message.childIndex); @@ -1950,7 +1950,7 @@ export const ScheduleWorkflowRequest: MessageFns = { message.schedules = object.schedules?.map((e) => e) || []; message.input = object.input ?? ''; message.parentId = object.parentId ?? undefined; - message.parentStepRunId = object.parentStepRunId ?? undefined; + message.parentTaskRunExternalId = object.parentTaskRunExternalId ?? undefined; message.childIndex = object.childIndex ?? undefined; message.childKey = object.childKey ?? undefined; message.additionalMetadata = object.additionalMetadata ?? undefined; @@ -2492,7 +2492,7 @@ function createBaseTriggerWorkflowRequest(): TriggerWorkflowRequest { name: '', input: '', parentId: undefined, - parentStepRunId: undefined, + parentTaskRunExternalId: undefined, childIndex: undefined, childKey: undefined, additionalMetadata: undefined, @@ -2512,8 +2512,8 @@ export const TriggerWorkflowRequest: MessageFns = { if (message.parentId !== undefined) { writer.uint32(26).string(message.parentId); } - if (message.parentStepRunId !== undefined) { - writer.uint32(34).string(message.parentStepRunId); + if (message.parentTaskRunExternalId !== undefined) { + writer.uint32(34).string(message.parentTaskRunExternalId); } if (message.childIndex !== undefined) { writer.uint32(40).int32(message.childIndex); @@ -2569,7 +2569,7 @@ export const TriggerWorkflowRequest: MessageFns = { break; } - message.parentStepRunId = reader.string(); + message.parentTaskRunExternalId = reader.string(); continue; } case 5: { @@ -2626,8 +2626,8 @@ export const TriggerWorkflowRequest: MessageFns = { name: isSet(object.name) ? globalThis.String(object.name) : '', input: isSet(object.input) ? globalThis.String(object.input) : '', parentId: isSet(object.parentId) ? globalThis.String(object.parentId) : undefined, - parentStepRunId: isSet(object.parentStepRunId) - ? globalThis.String(object.parentStepRunId) + parentTaskRunExternalId: isSet(object.parentTaskRunExternalId) + ? globalThis.String(object.parentTaskRunExternalId) : undefined, childIndex: isSet(object.childIndex) ? globalThis.Number(object.childIndex) : undefined, childKey: isSet(object.childKey) ? globalThis.String(object.childKey) : undefined, @@ -2652,8 +2652,8 @@ export const TriggerWorkflowRequest: MessageFns = { if (message.parentId !== undefined) { obj.parentId = message.parentId; } - if (message.parentStepRunId !== undefined) { - obj.parentStepRunId = message.parentStepRunId; + if (message.parentTaskRunExternalId !== undefined) { + obj.parentTaskRunExternalId = message.parentTaskRunExternalId; } if (message.childIndex !== undefined) { obj.childIndex = Math.round(message.childIndex); @@ -2681,7 +2681,7 @@ export const TriggerWorkflowRequest: MessageFns = { message.name = object.name ?? ''; message.input = object.input ?? ''; message.parentId = object.parentId ?? undefined; - message.parentStepRunId = object.parentStepRunId ?? undefined; + message.parentTaskRunExternalId = object.parentTaskRunExternalId ?? undefined; message.childIndex = object.childIndex ?? undefined; message.childKey = object.childKey ?? undefined; message.additionalMetadata = object.additionalMetadata ?? undefined; diff --git a/sdks/typescript/src/step.ts b/sdks/typescript/src/step.ts index 18333ff84b..056819b9a9 100644 --- a/sdks/typescript/src/step.ts +++ b/sdks/typescript/src/step.ts @@ -1,765 +1,9 @@ -/* eslint-disable no-underscore-dangle */ -/* eslint-disable max-classes-per-file */ -import HatchetError from '@util/errors/hatchet-error'; -import * as z from 'zod'; -import { JsonObject } from '@bufbuild/protobuf'; -import { Workflow } from './workflow'; -import { Action } from './clients/dispatcher/action-listener'; -import { LogLevel } from './clients/event/event-client'; -import { Logger } from './util/logger'; -import { parseJSON } from './util/parse'; -import WorkflowRunRef from './util/workflow-run-ref'; -import { WorkerLabels } from './clients/dispatcher/dispatcher-client'; -import { CreateStepRateLimit, RateLimitDuration, WorkerLabelComparator } from './protoc/workflows'; -import { CreateWorkflowTaskOpts, Priority } from './v1'; -import { - RunOpts, - TaskWorkflowDeclaration, - BaseWorkflowDeclaration as WorkflowV1, -} from './v1/declaration'; -import { Conditions, Render } from './v1/conditions'; -import { Action as ConditionAction } from './protoc/v1/shared/condition'; -import { conditionsToPb } from './v1/conditions/transformer'; -import { Duration } from './v1/client/duration'; -import { JsonValue, OutputType } from './v1/types'; -import { V1Worker } from './v1/client/worker/worker-internal'; -import { V0Worker } from './clients/worker'; -import { LegacyHatchetClient } from './clients/hatchet-client'; -import { applyNamespace } from './util/apply-namespace'; - -export const CreateRateLimitSchema = z.object({ - key: z.string().optional(), - staticKey: z.string().optional(), - dynamicKey: z.string().optional(), - - units: z.union([z.number().min(0), z.string()]), - limit: z.union([z.number().min(1), z.string()]).optional(), - duration: z.nativeEnum(RateLimitDuration).optional(), -}); - -export const DesiredWorkerLabelSchema = z - .union([ - z.string(), - z.number().int(), - z.object({ - value: z.union([z.string(), z.number()]), - required: z.boolean().optional(), - weight: z.number().int().optional(), - - // (optional) comparator for the label - // if not provided, the default is EQUAL - // desired COMPARATOR actual (i.e. desired > actual for GREATER_THAN) - comparator: z.nativeEnum(WorkerLabelComparator).optional(), - }), - ]) - .optional(); - -export const CreateStepSchema = z.object({ - name: z.string(), - parents: z.array(z.string()).optional(), - timeout: z.string().optional(), - retries: z.number().optional(), - rate_limits: z.array(CreateRateLimitSchema).optional(), - worker_labels: z.record(z.lazy(() => DesiredWorkerLabelSchema)).optional(), - backoff: z - .object({ - factor: z.number().optional(), - maxSeconds: z.number().optional(), - }) - .optional(), -}); - -export type NextStep = { [key: string]: JsonValue }; - -type TriggerData = Record>; - -type ChildRunOpts = RunOpts & { key?: string; sticky?: boolean }; - -interface ContextData { - input: T; - triggers: TriggerData; - parents: Record; - triggered_by: string; - user_data: K; - step_run_errors: Record; -} - -export class ContextWorker { - private worker: V0Worker | V1Worker; - constructor(worker: V0Worker | V1Worker) { - this.worker = worker; - } - - /** - * Gets the ID of the worker. - * @returns The ID of the worker. - */ - id() { - return this.worker.workerId; - } - - /** - * Checks if the worker has a registered workflow. - * @param workflowName - The name of the workflow to check. - * @returns True if the workflow is registered, otherwise false. - */ - hasWorkflow(workflowName: string) { - return !!this.worker.workflow_registry.find((workflow) => - 'id' in workflow ? workflow.id === workflowName : workflow.name === workflowName - ); - } - - /** - * Gets the current state of the worker labels. - * @returns The labels of the worker. - */ - labels() { - return this.worker.labels; - } - - /** - * Upserts the a set of labels on the worker. - * @param labels - The labels to upsert. - * @returns A promise that resolves when the labels have been upserted. - */ - upsertLabels(labels: WorkerLabels) { - return this.worker.upsertLabels(labels); - } -} - -export class V0Context { - data: ContextData; - // @deprecated use input prop instead - input: T; - // @deprecated use ctx.abortController instead - controller = new AbortController(); - action: Action; - v0: LegacyHatchetClient; - - worker: ContextWorker; - - overridesData: Record = {}; - logger: Logger; - - spawnIndex: number = 0; - - constructor(action: Action, client: LegacyHatchetClient, worker: V0Worker | V1Worker) { - try { - const data = parseJSON(action.actionPayload); - this.data = data; - this.action = action; - this.v0 = client; - this.worker = new ContextWorker(worker); - this.logger = client.config.logger(`Context Logger`, client.config.log_level); - - // if this is a getGroupKeyRunId, the data is the workflow input - if (action.getGroupKeyRunId !== '') { - this.input = data; - } else { - this.input = data.input; - } - - this.overridesData = data.overrides || {}; - } catch (e: any) { - throw new HatchetError(`Could not parse payload: ${e.message}`); - } - } - - get abortController() { - return this.controller; - } - - get cancelled() { - return this.controller.signal.aborted; - } - - /** - * Retrieves the output of a parent task. - * @param parentTask - The a CreateTaskOpts or string of the parent task name. - * @returns The output of the specified parent task. - * @throws An error if the task output is not found. - */ - async parentOutput(parentTask: CreateWorkflowTaskOpts | string) { - // NOTE: parentOutput is async since we plan on potentially making this a cacheable server call - if (typeof parentTask === 'string') { - return this.stepOutput(parentTask); - } - - return this.stepOutput(parentTask.name) as L; - } - - /** - * Get the output of a task. - * @param task - The name of the task to get the output for. - * @returns The output of the task. - * @throws An error if the task output is not found. - * @deprecated use ctx.parentOutput instead - */ - stepOutput(step: string): L { - if (!this.data.parents) { - throw new HatchetError('Parent task outputs not found'); - } - if (!this.data.parents[step]) { - throw new HatchetError(`Output for parent task '${step}' not found`); - } - return this.data.parents[step]; - } - - /** - * Returns errors from any task runs in the workflow. - * @returns A record mapping task names to error messages. - * @throws A warning if no errors are found (this method should be used in on-failure tasks). - * @deprecated use ctx.errors() instead - */ - stepRunErrors(): Record { - return this.errors(); - } - - /** - * Returns errors from any task runs in the workflow. - * @returns A record mapping task names to error messages. - * @throws A warning if no errors are found (this method should be used in on-failure tasks). - */ - errors(): Record { - const errors = this.data.step_run_errors || {}; - - if (Object.keys(errors).length === 0) { - this.logger.error( - 'No run errors found. `ctx.errors` is intended to be run in an on-failure task, and will only work on engine versions more recent than v0.53.10' - ); - } - - return errors; - } - - /** - * Gets the dag conditional triggers for the current workflow run. - * @returns The triggers for the current workflow. - */ - triggers(): TriggerData { - return this.data.triggers; - } - - /** - * Determines if the workflow was triggered by an event. - * @returns True if the workflow was triggered by an event, otherwise false. - */ - triggeredByEvent(): boolean { - return this.data?.triggered_by === 'event'; - } - - /** - * Gets the input data for the current workflow. - * @returns The input data for the workflow. - * @deprecated use task input parameter instead - */ - workflowInput(): T { - return this.input; - } - - /** - * Gets the name of the current workflow. - * @returns The name of the workflow. - */ - workflowName(): string { - return this.action.jobName; - } - - /** - * Gets the user data associated with the workflow. - * @returns The user data. - */ - userData(): K { - return this.data?.user_data; - } - - /** - * Gets the name of the current task. - * @returns The name of the task. - * @deprecated use ctx.taskName instead - */ - stepName(): string { - return this.taskName(); - } - - /** - * Gets the name of the current running task. - * @returns The name of the task. - */ - taskName(): string { - return this.action.stepName; - } - - /** - * Gets the ID of the current workflow run. - * @returns The workflow run ID. - */ - workflowRunId(): string { - return this.action.workflowRunId; - } - - /** - * Gets the ID of the current task run. - * @returns The task run ID. - */ - taskRunId(): string { - return this.action.stepRunId; - } - - /** - * Gets the number of times the current task has been retried. - * @returns The retry count. - */ - retryCount(): number { - return this.action.retryCount; - } - - /** - * Logs a message from the current task. - * @param message - The message to log. - * @param level - The log level (optional). - */ - log(message: string, level?: LogLevel) { - const { stepRunId } = this.action; - - if (!stepRunId) { - // log a warning - this.logger.warn('cannot log from context without stepRunId'); - return; - } - - this.v0.event.putLog(stepRunId, message, level, this.retryCount()); - } - - /** - * Refreshes the timeout for the current task. - * @param incrementBy - The interval by which to increment the timeout. - * The interval should be specified in the format of '10s' for 10 seconds, '1m' for 1 minute, or '1d' for 1 day. - */ - async refreshTimeout(incrementBy: Duration) { - const { stepRunId } = this.action; - - if (!stepRunId) { - // log a warning - this.logger.warn('cannot refresh timeout from context without stepRunId'); - return; - } - - await this.v0.dispatcher.refreshTimeout(incrementBy, stepRunId); - } - - /** - * Releases a worker slot for a task run such that the worker can pick up another task. - * Note: this is an advanced feature that may lead to unexpected behavior if used incorrectly. - * @returns A promise that resolves when the slot has been released. - */ - async releaseSlot(): Promise { - await this.v0.dispatcher.client.releaseSlot({ - stepRunId: this.action.stepRunId, - }); - } - - /** - * Streams data from the current task run. - * @param data - The data to stream (string or binary). - * @returns A promise that resolves when the data has been streamed. - */ - async putStream(data: string | Uint8Array) { - const { stepRunId } = this.action; - - if (!stepRunId) { - // log a warning - this.logger.warn('cannot log from context without stepRunId'); - return; - } - - await this.v0.event.putStream(stepRunId, data, undefined); - } - - /** - * Runs multiple children workflows in parallel without waiting for their results. - * @param children - An array of objects containing the workflow name, input data, and options for each workflow. - * @returns A list of workflow run references to the enqueued runs. - */ - async bulkRunNoWaitChildren( - children: Array<{ - workflow: string | Workflow | WorkflowV1; - input: Q; - options?: ChildRunOpts; - }> - ): Promise[]> { - return this.spawnWorkflows(children); - } - - /** - * Runs multiple children workflows in parallel and waits for all results. - * @param children - An array of objects containing the workflow name, input data, and options for each workflow. - * @returns A list of results from the children workflows. - */ - async bulkRunChildren( - children: Array<{ - workflow: string | Workflow | WorkflowV1; - input: Q; - options?: ChildRunOpts; - }> - ): Promise { - const runs = await this.bulkRunNoWaitChildren(children); - return Promise.all(runs.map((run) => run.output)); - } - - /** - * Spawns multiple workflows. - * - * @param workflows - An array of objects containing the workflow name, input data, and options for each workflow. - * @returns A list of references to the spawned workflow runs. - * @deprecated Use bulkRunNoWaitChildren or bulkRunChildren instead. - */ - async spawnWorkflows( - workflows: Array<{ - workflow: string | Workflow | WorkflowV1; - input: Q; - options?: ChildRunOpts; - }> - ): Promise[]> { - const { workflowRunId, stepRunId } = this.action; - - const workflowRuns = workflows.map(({ workflow, input, options }) => { - let workflowName: string; - - if (typeof workflow === 'string') { - workflowName = workflow; - } else { - workflowName = workflow.id; - } - - const name = applyNamespace(workflowName, this.v0.config.namespace); - - const opts = options || {}; - const { sticky } = opts; - - if (sticky && !this.worker.hasWorkflow(name)) { - throw new HatchetError( - `Cannot run with sticky: workflow ${name} is not registered on the worker` - ); - } - - const resp = { - workflowName: name, - input, - options: { - ...opts, - parentId: workflowRunId, - parentStepRunId: stepRunId, - childIndex: this.spawnIndex, - desiredWorkerId: sticky ? this.worker.id() : undefined, - }, - }; - this.spawnIndex += 1; - return resp; - }); - - try { - const batchSize = 100; - - let resp: WorkflowRunRef

[] = []; - for (let i = 0; i < workflowRuns.length; i += batchSize) { - const batch = workflowRuns.slice(i, i + batchSize); - const batchResp = await this.v0.admin.runWorkflows(batch); - resp = resp.concat(batchResp); - } - - const res: WorkflowRunRef

[] = []; - resp.forEach((ref, index) => { - const wf = workflows[index].workflow; - if (wf instanceof TaskWorkflowDeclaration) { - // eslint-disable-next-line no-param-reassign - ref._standaloneTaskName = wf._standalone_task_name; - } - res.push(ref); - }); - - return resp; - } catch (e: any) { - throw new HatchetError(e.message); - } - } - - /** - * Runs a new workflow and waits for its result. - * - * @param workflow - The workflow to run (name, Workflow instance, or WorkflowV1 instance). - * @param input - The input data for the workflow. - * @param options - An options object containing key, sticky, priority, and additionalMetadata. - * @returns The result of the workflow. - */ - async runChild( - workflow: string | Workflow | WorkflowV1 | TaskWorkflowDeclaration, - input: Q, - options?: ChildRunOpts - ): Promise

{ - const run = await this.spawnWorkflow(workflow, input, options); - return run.output; - } - - /** - * Enqueues a new workflow without waiting for its result. - * - * @param workflow - The workflow to enqueue (name, Workflow instance, or WorkflowV1 instance). - * @param input - The input data for the workflow. - * @param options - An options object containing key, sticky, priority, and additionalMetadata. - * @returns A reference to the spawned workflow run. - */ - async runNoWaitChild( - workflow: string | Workflow | WorkflowV1, - input: Q, - options?: ChildRunOpts - ): Promise> { - return this.spawnWorkflow(workflow, input, options); - } - - /** - * Spawns a new workflow. - * - * @param workflow - The workflow to spawn (name, Workflow instance, or WorkflowV1 instance). - * @param input - The input data for the workflow. - * @param options - Additional options for spawning the workflow. - * @returns A reference to the spawned workflow run. - * @deprecated Use runChild or runNoWaitChild instead. - */ - async spawnWorkflow( - workflow: string | Workflow | WorkflowV1 | TaskWorkflowDeclaration, - input: Q, - options?: ChildRunOpts - ): Promise> { - const { workflowRunId, stepRunId } = this.action; - - let workflowName: string = ''; - - if (typeof workflow === 'string') { - workflowName = workflow; - } else { - workflowName = workflow.id; - } - - const name = applyNamespace(workflowName, this.v0.config.namespace); - - const opts = options || {}; - const { sticky } = opts; - - if (sticky && !this.worker.hasWorkflow(name)) { - throw new HatchetError( - `cannot run with sticky: workflow ${name} is not registered on the worker` - ); - } - - try { - const resp = await this.v0.admin.runWorkflow(name, input, { - parentId: workflowRunId, - parentStepRunId: stepRunId, - childIndex: this.spawnIndex, - desiredWorkerId: sticky ? this.worker.id() : undefined, - ...opts, - }); - - this.spawnIndex += 1; - - if (workflow instanceof TaskWorkflowDeclaration) { - resp._standaloneTaskName = workflow._standalone_task_name; - } - - return resp; - } catch (e: any) { - throw new HatchetError(e.message); - } - } - - /** - * Retrieves additional metadata associated with the current workflow run. - * @returns A record of metadata key-value pairs. - */ - additionalMetadata(): Record { - if (!this.action.additionalMetadata) { - return {}; - } - - // parse the additional metadata - const res: Record = parseJSON(this.action.additionalMetadata); - return res; - } - - /** - * Gets the index of this workflow if it was spawned as part of a bulk operation. - * @returns The child index number, or undefined if not set. - */ - childIndex(): number | undefined { - return this.action.childWorkflowIndex; - } - - /** - * Gets the key associated with this workflow if it was spawned as a child workflow. - * @returns The child key, or undefined if not set. - */ - childKey(): string | undefined { - return this.action.childWorkflowKey; - } - - /** - * Gets the ID of the parent workflow run if this workflow was spawned as a child. - * @returns The parent workflow run ID, or undefined if not a child workflow. - */ - parentWorkflowRunId(): string | undefined { - return this.action.parentWorkflowRunId; - } - - priority(): Priority | undefined { - switch (this.action.priority) { - case 1: - return Priority.LOW; - case 2: - return Priority.MEDIUM; - case 3: - return Priority.HIGH; - default: - return undefined; - } - } -} - -export class V0DurableContext extends V0Context { - waitKey: number = 0; - - /** - * Pauses execution for the specified duration. - * Duration is "global" meaning it will wait in real time regardless of transient failures like worker restarts. - * @param duration - The duration to sleep for. - * @returns A promise that resolves when the sleep duration has elapsed. - */ - async sleepFor(duration: Duration, readableDataKey?: string) { - return this.waitFor({ sleepFor: duration, readableDataKey }); - } - - /** - * Pauses execution until the specified conditions are met. - * Conditions are "global" meaning they will wait in real time regardless of transient failures like worker restarts. - * @param conditions - The conditions to wait for. - * @returns A promise that resolves with the event that satisfied the conditions. - */ - async waitFor(conditions: Conditions | Conditions[]): Promise> { - const pbConditions = conditionsToPb(Render(ConditionAction.CREATE, conditions)); - - // eslint-disable-next-line no-plusplus - const key = `waitFor-${this.waitKey++}`; - await this.v0.durableListener.registerDurableEvent({ - taskId: this.action.stepRunId, - signalKey: key, - sleepConditions: pbConditions.sleepConditions, - userEventConditions: pbConditions.userEventConditions, - }); - - const listener = this.v0.durableListener.subscribe({ - taskId: this.action.stepRunId, - signalKey: key, - }); - - const event = await listener.get(); - - // Convert event.data from Uint8Array to string if needed - const eventData = - event.data instanceof Uint8Array ? new TextDecoder().decode(event.data) : event.data; - - const res = JSON.parse(eventData) as Record>; - return res.CREATE; - } -} - -export type StepRunFunction = ( - ctx: V0Context -) => Promise | NextStep | void; - -/** - * A step is a unit of work that can be run by a worker. - * It is defined by a name, a function that returns the next step, and optional configuration. - * @deprecated use hatchet.workflows.task factory instead - */ -export interface CreateStep extends z.infer { - run: StepRunFunction; -} - -export function mapRateLimit(limits: CreateStep['rate_limits']): CreateStepRateLimit[] { - if (!limits) return []; - - return limits.map((l) => { - let key = l.staticKey; - const keyExpression = l.dynamicKey; - - if (l.key !== undefined) { - // eslint-disable-next-line no-console - console.warn( - 'key is deprecated and will be removed in a future release, please use staticKey instead' - ); - key = l.key; - } - - if (keyExpression !== undefined) { - if (key !== undefined) { - throw new Error('Cannot have both static key and dynamic key set'); - } - key = keyExpression; - if (!validateCelExpression(keyExpression)) { - throw new Error(`Invalid CEL expression: ${keyExpression}`); - } - } - - if (key === undefined) { - throw new Error(`Invalid key`); - } - - let units: number | undefined; - let unitsExpression: string | undefined; - if (typeof l.units === 'number') { - units = l.units; - } else { - if (!validateCelExpression(l.units)) { - throw new Error(`Invalid CEL expression: ${l.units}`); - } - unitsExpression = l.units; - } - - let limitExpression: string | undefined; - if (l.limit !== undefined) { - if (typeof l.limit === 'number') { - limitExpression = `${l.limit}`; - } else { - if (!validateCelExpression(l.limit)) { - throw new Error(`Invalid CEL expression: ${l.limit}`); - } - - limitExpression = l.limit; - } - } - - if (keyExpression !== undefined && limitExpression === undefined) { - throw new Error('CEL based keys requires limit to be set'); - } - - if (limitExpression === undefined) { - limitExpression = `-1`; - } - - return { - key, - keyExpr: keyExpression, - units, - unitsExpr: unitsExpression, - limitValuesExpr: limitExpression, - duration: l.duration, - }; - }); -} - -// Helper function to validate CEL expressions -function validateCelExpression(expr: string): boolean { - // This is a placeholder. In a real implementation, you'd need to use a CEL parser or validator. - // For now, we'll just return true to mimic the behavior. - return true; -} +export * from './legacy/step'; + +console.warn( + '\x1b[31mDeprecation warning: The v0 sdk, including the step module has been deprecated and has been removed in release v1.12.0.\x1b[0m' +); +console.warn( + '\x1b[32mPlease migrate to v1 SDK instead: https://docs.hatchet.run/home/v1-sdk-improvements\x1b[0m' +); +console.warn('--------------------------------'); diff --git a/sdks/typescript/src/util/abort-error.ts b/sdks/typescript/src/util/abort-error.ts new file mode 100644 index 0000000000..0c3864c737 --- /dev/null +++ b/sdks/typescript/src/util/abort-error.ts @@ -0,0 +1,89 @@ +export function createAbortError(message = 'Operation aborted'): Error { + const err: any = new Error(message); + err.name = 'AbortError'; + err.code = 'ABORT_ERR'; + return err as Error; +} + +export function isAbortError(err: unknown): err is Error { + return err instanceof Error && (err.name === 'AbortError' || (err as any).code === 'ABORT_ERR'); +} + +/** + * Helper to be used inside broad `catch` blocks so cancellation isn't accidentally swallowed. + * + * Example: + * ```ts + * try { ... } catch (e) { rethrowIfAborted(e); ... } + * ``` + */ +export function rethrowIfAborted(err: unknown): void { + if (isAbortError(err)) { + throw err; + } +} + +export type ThrowIfAbortedOpts = { + /** + * Optional: called before throwing when the signal is aborted. + * This lets callsites attach logging without coupling this util to a logger implementation. + */ + warn?: (message: string) => void; + + /** + * If true, emits a generic warning intended for "trigger/enqueue" paths. + */ + isTrigger?: boolean; + + /** + * Optional context used to make warnings consistent, e.g. "task run ". + */ + context?: string; + + /** + * Message used when the AbortSignal doesn't provide a reason. + */ + defaultMessage?: string; +}; + +/** + * Throws an AbortError if the provided signal is aborted. + * + * Notes: + * - In JS/TS, `catch` can swallow any thrown value, so this is best-effort. + * - We prefer throwing the signal's `reason` when it is already an Error. + */ +export function throwIfAborted( + signal: AbortSignal | undefined, + optsOrDefaultMessage: ThrowIfAbortedOpts | string = 'Operation cancelled by AbortSignal' +): void { + if (!signal?.aborted) { + return; + } + + const opts: ThrowIfAbortedOpts = + typeof optsOrDefaultMessage === 'string' + ? { defaultMessage: optsOrDefaultMessage } + : (optsOrDefaultMessage ?? {}); + + if (opts.isTrigger) { + const ctx = opts.context ? `${opts.context} ` : ''; + opts.warn?.( + `Cancellation: ${ctx}attempted to enqueue/trigger work after cancellation was signaled. ` + + `This usually means an AbortError was caught and not propagated. ` + + `See https://docs.hatchet.run/home/cancellation` + ); + } + + const { reason } = signal as any; + + if (reason instanceof Error) { + throw reason; + } + + if (typeof reason === 'string' && reason.length > 0) { + throw createAbortError(reason); + } + + throw createAbortError(opts.defaultMessage ?? 'Operation cancelled by AbortSignal'); +} diff --git a/sdks/typescript/src/util/config-loader/config-loader.test.ts b/sdks/typescript/src/util/config-loader/config-loader.test.ts index ed07af0a3e..c9443ce60c 100644 --- a/sdks/typescript/src/util/config-loader/config-loader.test.ts +++ b/sdks/typescript/src/util/config-loader/config-loader.test.ts @@ -1,6 +1,6 @@ import { ConfigLoader } from './config-loader'; -fdescribe('ConfigLoader', () => { +describe('ConfigLoader', () => { beforeEach(() => { process.env.HATCHET_CLIENT_TOKEN = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJncnBjX2Jyb2FkY2FzdF9hZGRyZXNzIjoiMTI3LjAuMC4xOjgwODAiLCJzZXJ2ZXJfdXJsIjoiaHR0cDovL2xvY2FsaG9zdDo4MDgwIiwic3ViIjoiNzA3ZDA4NTUtODBhYi00ZTFmLWExNTYtZjFjNDU0NmNiZjUyIn0K.abcdef'; diff --git a/sdks/typescript/src/util/hatchet-promise/hatchet-promise.ts b/sdks/typescript/src/util/hatchet-promise/hatchet-promise.ts index d1912cb603..03298cda3c 100644 --- a/sdks/typescript/src/util/hatchet-promise/hatchet-promise.ts +++ b/sdks/typescript/src/util/hatchet-promise/hatchet-promise.ts @@ -1,12 +1,20 @@ class HatchetPromise { // eslint-disable-next-line @typescript-eslint/no-unused-vars - cancel: Function = (reason: any) => {}; + cancel: (reason?: any) => void = (_reason?: any) => {}; promise: Promise; + /** + * The original (non-cancelable) promise passed to the constructor. + * + * `promise` is a cancelable wrapper which rejects immediately when `cancel` is called. + * `inner` continues executing and will settle when the underlying work completes. + */ + inner: Promise; constructor(promise: Promise) { + this.inner = Promise.resolve(promise) as Promise; this.promise = new Promise((resolve, reject) => { this.cancel = reject; - Promise.resolve(promise).then(resolve).catch(reject); + this.inner.then(resolve).catch(reject); }); } } diff --git a/sdks/typescript/src/util/logger/index.ts b/sdks/typescript/src/util/logger/index.ts index 1ff09efd40..9a1b758a4a 100644 --- a/sdks/typescript/src/util/logger/index.ts +++ b/sdks/typescript/src/util/logger/index.ts @@ -1 +1,2 @@ export * from './logger'; +export * from './task-run-log'; diff --git a/sdks/typescript/src/util/logger/task-run-log.ts b/sdks/typescript/src/util/logger/task-run-log.ts new file mode 100644 index 0000000000..d558d845d5 --- /dev/null +++ b/sdks/typescript/src/util/logger/task-run-log.ts @@ -0,0 +1,18 @@ +import { ActionType } from '../../protoc/dispatcher'; + +export function actionMap(action: ActionType): string { + switch (action) { + case ActionType.START_STEP_RUN: + return 'starting...'; + case ActionType.CANCEL_STEP_RUN: + return 'cancelling...'; + case ActionType.START_GET_GROUP_KEY: + return 'starting to get group key...'; + default: + return 'unknown'; + } +} + +export function taskRunLog(taskName: string, taskRunExternalId: string, action: string): string { + return `Task run ${action} \t ${taskName}/${taskRunExternalId} `; +} diff --git a/sdks/typescript/src/util/workflow-run-ref.ts b/sdks/typescript/src/util/workflow-run-ref.ts index b0db20fb12..8f7bdf7d32 100644 --- a/sdks/typescript/src/util/workflow-run-ref.ts +++ b/sdks/typescript/src/util/workflow-run-ref.ts @@ -53,6 +53,12 @@ export default class WorkflowRunRef { private client: RunListenerClient; private runs: RunsClient | undefined; _standaloneTaskName?: string; + /** + * Optional default AbortSignal used for listener-backed waits (e.g. `.result()`). + * This is primarily set when a run is spawned from within a task so cancellations propagate + * without manually threading `{ signal }` everywhere. + */ + defaultSignal?: AbortSignal; constructor( workflowRunId: @@ -64,13 +70,15 @@ export default class WorkflowRunRef { client: RunListenerClient, runsClient?: RunsClient, parentWorkflowRunId?: string, - standaloneTaskName?: string + standaloneTaskName?: string, + defaultSignal?: AbortSignal ) { this.workflowRunId = workflowRunId; this.parentWorkflowRunId = parentWorkflowRunId; this.client = client; this.runs = runsClient; this._standaloneTaskName = standaloneTaskName; + this.defaultSignal = defaultSignal; } // TODO docstrings @@ -103,7 +111,8 @@ export default class WorkflowRunRef { return new Promise((resolve, reject) => { (async () => { - for await (const event of streamable.stream()) { + const signal = this.defaultSignal; + for await (const event of streamable.stream({ signal })) { if (event.eventType === WorkflowRunEventType.WORKFLOW_RUN_EVENT_TYPE_FINISHED) { if (event.results.some((r) => r.error !== undefined)) { // HACK: this might replace intentional empty errors but this is the more common case @@ -154,7 +163,7 @@ export default class WorkflowRunRef { const result = event.results.reduce( (acc, r) => ({ ...acc, - [r.stepReadableId]: JSON.parse(r.output || '{}'), + [r.taskName]: JSON.parse(r.output || '{}'), }), {} as T ); @@ -168,7 +177,7 @@ export default class WorkflowRunRef { return; } } - })(); + })().catch(reject); }); } diff --git a/sdks/typescript/src/v1/client/admin.ts b/sdks/typescript/src/v1/client/admin.ts index f9a2fdba3c..e0197edcad 100644 --- a/sdks/typescript/src/v1/client/admin.ts +++ b/sdks/typescript/src/v1/client/admin.ts @@ -21,6 +21,16 @@ export type WorkflowRun = { input: T; options?: { parentId?: string | undefined; + /** + * (optional) the parent task run external id. + * + * This is the field understood by the workflows gRPC API (`parent_task_run_external_id`). + */ + parentTaskRunExternalId?: string | undefined; + /** + * @deprecated Use `parentTaskRunExternalId` instead. + * Kept for backward compatibility; will be mapped to `parentTaskRunExternalId`. + */ parentStepRunId?: string | undefined; childIndex?: number | undefined; childKey?: string | undefined; @@ -58,6 +68,16 @@ export class AdminClient { input: Q, options?: { parentId?: string | undefined; + /** + * (optional) the parent task run external id. + * + * This is the field understood by the workflows gRPC API (`parent_task_run_external_id`). + */ + parentTaskRunExternalId?: string | undefined; + /** + * @deprecated Use `parentTaskRunExternalId` instead. + * Kept for backward compatibility; will be mapped to `parentTaskRunExternalId`. + */ parentStepRunId?: string | undefined; childIndex?: number | undefined; childKey?: string | undefined; @@ -72,14 +92,17 @@ export class AdminClient { const inputStr = JSON.stringify(input); + const opts = options ?? {}; + const { additionalMetadata, parentStepRunId, parentTaskRunExternalId, ...rest } = opts; + const request = { name: computedName, input: inputStr, - ...options, - additionalMetadata: options?.additionalMetadata - ? JSON.stringify(options?.additionalMetadata) - : undefined, - priority: options?.priority, + ...rest, + // API expects `parentTaskRunExternalId`; accept old names as aliases. + parentTaskRunExternalId: parentTaskRunExternalId ?? parentStepRunId, + additionalMetadata: additionalMetadata ? JSON.stringify(additionalMetadata) : undefined, + priority: opts.priority, }; const resp = await retrier(async () => this.grpc.triggerWorkflow(request), this.logger); @@ -113,6 +136,16 @@ export class AdminClient { input: Q; options?: { parentId?: string | undefined; + /** + * (optional) the parent task run external id. + * + * This is the field understood by the workflows gRPC API (`parent_task_run_external_id`). + */ + parentTaskRunExternalId?: string | undefined; + /** + * @deprecated Use `parentTaskRunExternalId` instead. + * Kept for backward compatibility; will be mapped to `parentTaskRunExternalId`. + */ parentStepRunId?: string | undefined; childIndex?: number | undefined; childKey?: string | undefined; @@ -129,13 +162,16 @@ export class AdminClient { const computedName = applyNamespace(workflowName, this.config.namespace); const inputStr = JSON.stringify(input); + const opts = options ?? {}; + const { additionalMetadata, parentStepRunId, parentTaskRunExternalId, ...rest } = opts; + return { name: computedName, input: inputStr, - ...options, - additionalMetadata: options?.additionalMetadata - ? JSON.stringify(options.additionalMetadata) - : undefined, + ...rest, + // API expects `parentTaskRunExternalId`; accept old names as aliases. + parentTaskRunExternalId: parentTaskRunExternalId ?? parentStepRunId, + additionalMetadata: additionalMetadata ? JSON.stringify(additionalMetadata) : undefined, }; }); diff --git a/sdks/typescript/src/v1/client/client.ts b/sdks/typescript/src/v1/client/client.ts index 61d877065c..73f18053ec 100644 --- a/sdks/typescript/src/v1/client/client.ts +++ b/sdks/typescript/src/v1/client/client.ts @@ -8,8 +8,6 @@ import { } from '@hatchet/clients/hatchet-client'; import { AxiosRequestConfig } from 'axios'; import WorkflowRunRef from '@hatchet/util/workflow-run-ref'; -import { Workflow as V0Workflow } from '@hatchet/workflow'; -import { V0DurableContext } from '@hatchet/step'; import api, { Api } from '@hatchet/clients/rest'; import { ConfigLoader } from '@hatchet/util/config-loader'; import { DEFAULT_LOGGER } from '@hatchet/clients/hatchet-client/hatchet-logger'; @@ -30,6 +28,8 @@ import { CreateDurableTaskWorkflow, CreateDurableTaskWorkflowOpts, } from '../declaration'; +import type { LegacyWorkflow } from '../../legacy/legacy-transformer'; +import { getWorkflowName } from '../../legacy/legacy-transformer'; import { IHatchetClient } from './client.interface'; import { CreateWorkerOpts, Worker } from './worker/worker'; import { MetricsClient } from './features/metrics'; @@ -44,6 +44,7 @@ import { ScheduleClient } from './features/schedules'; import { CronClient } from './features/crons'; import { CELClient } from './features/cel'; import { TenantClient } from './features/tenant'; +import { DurableContext } from './worker/context'; /** * HatchetV1 implements the main client interface for interacting with the Hatchet workflow engine. @@ -142,7 +143,7 @@ export class HatchetClient implements IHatchetClient { ); } }) - .catch((error) => { + .catch(() => { // Do nothing here }); } catch (e) { @@ -245,7 +246,7 @@ export class HatchetClient implements IHatchetClient { * @returns A TaskWorkflowDeclaration instance with inferred types */ durableTask< - Fn extends (input: I, ctx: V0DurableContext) => O | Promise, + Fn extends (input: I, ctx: DurableContext) => O | Promise, I extends InputType = Parameters[0], O extends OutputType = ReturnType extends Promise ? P extends OutputType @@ -277,19 +278,11 @@ export class HatchetClient implements IHatchetClient { * @returns A WorkflowRunRef containing the run ID and methods to interact with the run */ async runNoWait( - workflow: BaseWorkflowDeclaration | string | V0Workflow, + workflow: BaseWorkflowDeclaration | LegacyWorkflow | string, input: I, options: RunOpts ): Promise> { - let name: string; - if (typeof workflow === 'string') { - name = workflow; - } else if ('id' in workflow) { - name = workflow.id; - } else { - throw new Error('unable to identify workflow'); - } - + const name = getWorkflowName(workflow); return this.admin.runWorkflow(name, input, options); } @@ -304,7 +297,7 @@ export class HatchetClient implements IHatchetClient { * @returns A promise that resolves with the workflow result */ async runAndWait( - workflow: BaseWorkflowDeclaration | string | V0Workflow, + workflow: BaseWorkflowDeclaration | LegacyWorkflow | string, input: I, options: RunOpts = {} ): Promise { @@ -321,7 +314,7 @@ export class HatchetClient implements IHatchetClient { * @returns A promise that resolves with the workflow result */ async run( - workflow: BaseWorkflowDeclaration | string | V0Workflow, + workflow: BaseWorkflowDeclaration | LegacyWorkflow | string, input: I, options: RunOpts = {} ): Promise { @@ -546,15 +539,6 @@ export class HatchetClient implements IHatchetClient { return Worker.create(this, this._v0, name, opts); } - /** - * Register a webhook with the worker - * @param workflows - The workflows to register on the webhooks - * @returns A promise that resolves when the webhook is registered - */ - webhooks(workflows: V0Workflow[]) { - return this._v0.webhooks(workflows); - } - runRef = any>(id: string): WorkflowRunRef { return this.runs.runRef(id); } diff --git a/sdks/typescript/src/v1/client/features/crons.ts b/sdks/typescript/src/v1/client/features/crons.ts index aae6013f3a..28167cb0b9 100644 --- a/sdks/typescript/src/v1/client/features/crons.ts +++ b/sdks/typescript/src/v1/client/features/crons.ts @@ -1,9 +1,9 @@ import { CronWorkflows, CronWorkflowsList } from '@hatchet/clients/rest/generated/data-contracts'; import { z } from 'zod'; -import { Workflow } from '@hatchet/workflow'; import { AxiosError } from 'axios'; import { isValidUUID } from '@util/uuid'; import { BaseWorkflowDeclaration } from '@hatchet/v1'; +import type { LegacyWorkflow } from '@hatchet-dev/typescript-sdk/legacy/legacy-transformer'; import { applyNamespace } from '@hatchet/util/apply-namespace'; import { HatchetClient } from '../client'; import { workflowNameString, WorkflowsClient } from './workflows'; @@ -63,7 +63,7 @@ export class CronClient { * @throws Will throw an error if the input is invalid or the API call fails. */ async create( - workflow: string | Workflow | BaseWorkflowDeclaration, + workflow: string | BaseWorkflowDeclaration | LegacyWorkflow, cron: CreateCronInput ): Promise { const workflowId = applyNamespace(workflowNameString(workflow), this.namespace); @@ -109,7 +109,7 @@ export class CronClient { */ async list( query: Parameters[1] & { - workflow?: string | Workflow | BaseWorkflowDeclaration; + workflow?: string | BaseWorkflowDeclaration | LegacyWorkflow; } ): Promise { const { workflow, ...rest } = query; diff --git a/sdks/typescript/src/v1/client/features/metrics.ts b/sdks/typescript/src/v1/client/features/metrics.ts index e464f3633b..40c0dd6f0d 100644 --- a/sdks/typescript/src/v1/client/features/metrics.ts +++ b/sdks/typescript/src/v1/client/features/metrics.ts @@ -1,5 +1,5 @@ import { BaseWorkflowDeclaration, workflowNameString } from '@hatchet/v1'; -import { Workflow } from '@hatchet/workflow'; +import type { LegacyWorkflow } from '@hatchet-dev/typescript-sdk/legacy/legacy-transformer'; import { HatchetClient } from '../client'; /** * MetricsClient is used to get metrics for workflows @@ -14,7 +14,7 @@ export class MetricsClient { } async getWorkflowMetrics( - workflow: string | Workflow | BaseWorkflowDeclaration, + workflow: string | BaseWorkflowDeclaration | LegacyWorkflow, opts?: Parameters[1] ) { const name = workflowNameString(workflow); diff --git a/sdks/typescript/src/v1/client/features/runs.ts b/sdks/typescript/src/v1/client/features/runs.ts index 8615aae59f..5b9dd63680 100644 --- a/sdks/typescript/src/v1/client/features/runs.ts +++ b/sdks/typescript/src/v1/client/features/runs.ts @@ -47,14 +47,24 @@ export interface ListRunsOpts extends RunFilter { workerId?: string; /** Whether to include DAGs or only to include tasks */ onlyTasks: boolean; + /** - * The parent task external id to filter by + * The parent task run external id to filter by + * @deprecated use parentTaskRunExternalId instead * @format uuid * @minLength 36 * @maxLength 36 */ parentTaskExternalId?: string; + /** + * The parent task run external id to filter by + * @format uuid + * @minLength 36 + * @maxLength 36 + */ + parentTaskRunExternalId?: string; + /** * The triggering event external id to filter by * @format uuid @@ -100,8 +110,13 @@ export class RunsClient { } async list(opts?: Partial) { + const normalizedOpts = + opts?.parentTaskExternalId && !opts?.parentTaskRunExternalId + ? { ...opts, parentTaskRunExternalId: opts.parentTaskExternalId } + : opts; + const { data } = await this.api.v1WorkflowRunList(this.tenantId, { - ...(await this.prepareListFilter(opts || {})), + ...(await this.prepareListFilter(normalizedOpts || {})), }); return data; } @@ -166,7 +181,7 @@ export class RunsClient { ), additional_metadata: am, only_tasks: opts.onlyTasks || false, - parent_task_external_id: opts.parentTaskExternalId, + parent_task_external_id: opts.parentTaskRunExternalId, triggering_event_external_id: opts.triggeringEventExternalId, include_payloads: opts.includePayloads, }; diff --git a/sdks/typescript/src/v1/client/features/schedules.ts b/sdks/typescript/src/v1/client/features/schedules.ts index eed91fe4c6..5e86ce4caf 100644 --- a/sdks/typescript/src/v1/client/features/schedules.ts +++ b/sdks/typescript/src/v1/client/features/schedules.ts @@ -6,10 +6,10 @@ import { ScheduledWorkflowsList, } from '@hatchet/clients/rest/generated/data-contracts'; import { z } from 'zod'; -import { Workflow } from '@hatchet/workflow'; import { AxiosError } from 'axios'; import { isValidUUID } from '@util/uuid'; import { BaseWorkflowDeclaration, WorkflowDefinition } from '@hatchet/v1'; +import type { LegacyWorkflow } from '@hatchet-dev/typescript-sdk/legacy/legacy-transformer'; import { applyNamespace } from '@hatchet/util/apply-namespace'; import { HatchetClient } from '../client'; import { workflowNameString, WorkflowsClient } from './workflows'; @@ -76,7 +76,7 @@ export class ScheduleClient { * @throws Will throw an error if the input is invalid or the API call fails. */ async create( - workflow: string | Workflow, + workflow: string | BaseWorkflowDeclaration | LegacyWorkflow, cron: CreateScheduledRunInput ): Promise { const workflowId = applyNamespace(workflowNameString(workflow), this.namespace); @@ -153,7 +153,7 @@ export class ScheduleClient { */ async list( query: Parameters[1] & { - workflow?: string | Workflow | WorkflowDefinition | BaseWorkflowDeclaration; + workflow?: string | WorkflowDefinition | BaseWorkflowDeclaration | LegacyWorkflow; } ): Promise { const { workflow, ...rest } = query; diff --git a/sdks/typescript/src/v1/client/features/workflows.ts b/sdks/typescript/src/v1/client/features/workflows.ts index 1e4d22261b..af999f954a 100644 --- a/sdks/typescript/src/v1/client/features/workflows.ts +++ b/sdks/typescript/src/v1/client/features/workflows.ts @@ -1,18 +1,28 @@ -import { Workflow } from '@hatchet/workflow'; import { BaseWorkflowDeclaration, WorkflowDefinition } from '@hatchet/v1'; +import type { LegacyWorkflow } from '@hatchet-dev/typescript-sdk/legacy/legacy-transformer'; +import { + isLegacyWorkflow, + warnLegacyWorkflow, +} from '@hatchet-dev/typescript-sdk/legacy/legacy-transformer'; import { isValidUUID } from '@util/uuid'; import { HatchetClient } from '../client'; export const workflowNameString = ( - workflow: string | Workflow | WorkflowDefinition | BaseWorkflowDeclaration + workflow: string | WorkflowDefinition | BaseWorkflowDeclaration | LegacyWorkflow ) => { if (typeof workflow === 'string') { return workflow; } - if (typeof workflow === 'object' && 'id' in workflow) { + if (typeof workflow === 'object' && 'name' in workflow) { + return workflow.name as string; + } + if (isLegacyWorkflow(workflow)) { + warnLegacyWorkflow(); return workflow.id; } - return workflow.name; + throw new Error( + 'Invalid workflow: must be a string, Workflow object, or WorkflowDefinition object' + ); }; /** @@ -42,7 +52,7 @@ export class WorkflowsClient { * @returns The workflow ID as a string. */ async getWorkflowIdFromName( - workflow: string | Workflow | WorkflowDefinition | BaseWorkflowDeclaration + workflow: string | WorkflowDefinition | BaseWorkflowDeclaration | LegacyWorkflow ): Promise { const str = (() => { if (typeof workflow === 'string') { @@ -53,13 +63,6 @@ export class WorkflowsClient { return workflow.name; } - if (typeof workflow === 'object' && 'id' in workflow) { - if (!workflow.id) { - throw new Error('Workflow ID is required'); - } - return workflow.id; - } - throw new Error( 'Invalid workflow: must be a string, Workflow object, or WorkflowDefinition object' ); @@ -76,7 +79,7 @@ export class WorkflowsClient { return str; } - async get(workflow: string | BaseWorkflowDeclaration | Workflow) { + async get(workflow: string | BaseWorkflowDeclaration | LegacyWorkflow) { // Get workflow name string const name = workflowNameString(workflow); @@ -121,7 +124,7 @@ export class WorkflowsClient { return data; } - async delete(workflow: string | BaseWorkflowDeclaration | Workflow) { + async delete(workflow: string | BaseWorkflowDeclaration | LegacyWorkflow) { const name = workflowNameString(workflow); try { diff --git a/sdks/typescript/src/v1/client/worker/context.ts b/sdks/typescript/src/v1/client/worker/context.ts index adc95e9b42..fd0ba59309 100644 --- a/sdks/typescript/src/v1/client/worker/context.ts +++ b/sdks/typescript/src/v1/client/worker/context.ts @@ -16,11 +16,11 @@ import { Conditions, Render } from '@hatchet/v1/conditions'; import { conditionsToPb } from '@hatchet/v1/conditions/transformer'; import { CreateWorkflowDurableTaskOpts, CreateWorkflowTaskOpts } from '@hatchet/v1/task'; import { OutputType } from '@hatchet/v1/types'; -import { Workflow } from '@hatchet/workflow'; import { Action as ConditionAction } from '@hatchet/protoc/v1/shared/condition'; import { HatchetClient } from '@hatchet/v1'; -import { ContextWorker, NextStep } from '@hatchet/step'; import { applyNamespace } from '@hatchet/util/apply-namespace'; +import { createAbortError, rethrowIfAborted } from '@hatchet/util/abort-error'; +import { WorkerLabels } from '@hatchet/clients/dispatcher/dispatcher-client'; import { V1Worker } from './worker-internal'; import { Duration } from '../duration'; @@ -42,6 +42,52 @@ interface ContextData { step_run_errors: Record; } +/** + * ContextWorker is a wrapper around the V1Worker class that provides a more user-friendly interface for the worker from the context of a run. + */ +export class ContextWorker { + private worker: V1Worker; + constructor(worker: V1Worker) { + this.worker = worker; + } + + /** + * Gets the ID of the worker. + * @returns The ID of the worker. + */ + id() { + return this.worker.workerId; + } + + /** + * Checks if the worker has a registered workflow. + * @param workflowName - The name of the workflow to check. + * @returns True if the workflow is registered, otherwise false. + */ + hasWorkflow(workflowName: string) { + return !!this.worker.workflow_registry.find((workflow) => + 'id' in workflow ? workflow.id === workflowName : workflow.name === workflowName + ); + } + + /** + * Gets the current state of the worker labels. + * @returns The labels of the worker. + */ + labels() { + return this.worker.labels; + } + + /** + * Upserts the a set of labels on the worker. + * @param labels - The labels to upsert. + * @returns A promise that resolves when the labels have been upserted. + */ + upsertLabels(labels: WorkerLabels) { + return this.worker.upsertLabels(labels); + } +} + export class Context { data: ContextData; // @deprecated use input prop instead @@ -90,9 +136,27 @@ export class Context { return this.controller.signal.aborted; } + protected throwIfCancelled(): void { + if (this.abortController.signal.aborted) { + throw createAbortError('Operation cancelled by AbortSignal'); + } + } + + /** + * Helper for broad `catch` blocks so cancellation isn't accidentally swallowed. + * + * Example: + * ```ts + * try { ... } catch (e) { ctx.rethrowIfCancelled(e); ... } + * ``` + */ + rethrowIfCancelled(err: unknown): void { + rethrowIfAborted(err); + } + async cancel() { await this.v1.runs.cancel({ - ids: [this.action.stepRunId], + ids: [this.action.taskRunExternalId], }); // optimistically abort the run @@ -194,7 +258,7 @@ export class Context { * @returns The name of the task. */ taskName(): string { - return this.action.stepName; + return this.action.taskName; } /** @@ -225,8 +289,8 @@ export class Context { * Gets the ID of the current task run. * @returns The task run ID. */ - taskRunId(): string { - return this.action.stepRunId; + taskRunExternalId(): string { + return this.action.taskRunExternalId; } /** @@ -244,9 +308,9 @@ export class Context { * @deprecated use ctx.logger.infoger.info, ctx.logger.infoger.debug, ctx.logger.infoger.warn, ctx.logger.infoger.error, ctx.logger.infoger.trace instead */ log(message: string, level?: LogLevel, extra?: LogExtra) { - const { stepRunId } = this.action; + const { taskRunExternalId } = this.action; - if (!stepRunId) { + if (!taskRunExternalId) { // log a warning this._logger.warn('cannot log from context without stepRunId'); return Promise.resolve(); @@ -255,7 +319,7 @@ export class Context { const logger = this.v1.config.logger('ctx', this.v1.config.log_level); const contextExtra = { workflowRunId: this.action.workflowRunId, - taskRunId: this.action.stepRunId, + taskRunExternalId: this.action.taskRunExternalId, retryCount: this.action.retryCount, workflowName: this.action.jobName, ...extra?.extra, @@ -275,7 +339,13 @@ export class Context { // FIXME: this is a hack to get around the fact that the log level is not typed promises.push( - this.v1.event.putLog(stepRunId, message, level as any, this.retryCount(), extra?.extra) + this.v1.event.putLog( + taskRunExternalId, + message, + level as any, + this.retryCount(), + extra?.extra + ) ); return Promise.all(promises); @@ -311,15 +381,15 @@ export class Context { * The interval should be specified in the format of '10s' for 10 seconds, '1m' for 1 minute, or '1d' for 1 day. */ async refreshTimeout(incrementBy: Duration) { - const { stepRunId } = this.action; + const { taskRunExternalId } = this.action; - if (!stepRunId) { + if (!taskRunExternalId) { // log a warning this._logger.warn('cannot refresh timeout from context without stepRunId'); return; } - await this.v1._v0.dispatcher.refreshTimeout(incrementBy, stepRunId); + await this.v1._v0.dispatcher.refreshTimeout(incrementBy, taskRunExternalId); } /** @@ -329,7 +399,7 @@ export class Context { */ async releaseSlot(): Promise { await this.v1._v0.dispatcher.client.releaseSlot({ - stepRunId: this.action.stepRunId, + taskRunExternalId: this.action.taskRunExternalId, }); } @@ -339,9 +409,9 @@ export class Context { * @returns A promise that resolves when the data has been streamed. */ async putStream(data: string | Uint8Array) { - const { stepRunId } = this.action; + const { taskRunExternalId } = this.action; - if (!stepRunId) { + if (!taskRunExternalId) { // log a warning this._logger.warn('cannot log from context without stepRunId'); return; @@ -349,16 +419,18 @@ export class Context { const index = this._incrementStreamIndex(); - await this.v1._v0.event.putStream(stepRunId, data, index); + await this.v1._v0.event.putStream(taskRunExternalId, data, index); } - private spawnOptions(workflow: string | Workflow | WorkflowV1, options?: ChildRunOpts) { + private spawnOptions(workflow: string | WorkflowV1, options?: ChildRunOpts) { + this.throwIfCancelled(); + let workflowName: string; if (typeof workflow === 'string') { workflowName = workflow; } else { - workflowName = workflow.id; + workflowName = workflow.name; } const opts = options || {}; @@ -370,12 +442,12 @@ export class Context { ); } - const { workflowRunId, stepRunId } = this.action; + const { workflowRunId, taskRunExternalId } = this.action; const finalOpts = { - ...options, + ...opts, parentId: workflowRunId, - parentStepRunId: stepRunId, + parentTaskRunExternalId: taskRunExternalId, childIndex: this.spawnIndex, childKey: options?.key, desiredWorkerId: sticky ? this.worker.id() : undefined, @@ -389,7 +461,7 @@ export class Context { } private spawn( - workflow: string | Workflow | WorkflowV1, + workflow: string | WorkflowV1, input: Q, options?: ChildRunOpts ) { @@ -399,11 +471,12 @@ export class Context { private spawnBulk( children: Array<{ - workflow: string | Workflow | WorkflowV1; + workflow: string | WorkflowV1; input: Q; options?: ChildRunOpts; }> ) { + this.throwIfCancelled(); const workflows: Parameters>[0] = children.map( (child) => { const { workflowName, opts } = this.spawnOptions(child.workflow, child.options); @@ -421,12 +494,17 @@ export class Context { */ async bulkRunNoWaitChildren( children: Array<{ - workflow: string | Workflow | WorkflowV1; + workflow: string | WorkflowV1; input: Q; options?: ChildRunOpts; }> ): Promise[]> { - return this.spawnBulk(children); + const refs = await this.spawnBulk(children); + refs.forEach((ref) => { + // eslint-disable-next-line no-param-reassign + ref.defaultSignal = this.abortController.signal; + }); + return refs; } /** @@ -436,7 +514,7 @@ export class Context { */ async bulkRunChildren( children: Array<{ - workflow: string | Workflow | WorkflowV1; + workflow: string | WorkflowV1; input: Q; options?: ChildRunOpts; }> @@ -454,11 +532,14 @@ export class Context { * @returns The result of the workflow. */ async runChild( - workflow: string | Workflow | WorkflowV1 | TaskWorkflowDeclaration, + workflow: string | WorkflowV1 | TaskWorkflowDeclaration, input: Q, options?: ChildRunOpts ): Promise

{ const run = await this.spawn(workflow, input, options); + // Ensure waiting for the child result aborts when this task is cancelled. + // eslint-disable-next-line no-param-reassign + run.defaultSignal = this.abortController.signal; return run.output; } @@ -471,11 +552,12 @@ export class Context { * @returns A reference to the spawned workflow run. */ async runNoWaitChild( - workflow: string | Workflow | WorkflowV1, + workflow: string | WorkflowV1, input: Q, options?: ChildRunOpts ): Promise> { const ref = await this.spawn(workflow, input, options); + ref.defaultSignal = this.abortController.signal; return ref; } @@ -529,42 +611,6 @@ export class Context { return undefined; } } - // FIXME: drop these at some point soon - - /** - * Get the output of a task. - * @param task - The name of the task to get the output for. - * @returns The output of the task. - * @throws An error if the task output is not found. - * @deprecated use ctx.parentOutput instead - */ - stepOutput(step: string): L { - if (!this.data.parents) { - throw new HatchetError('Parent task outputs not found'); - } - if (!this.data.parents[step]) { - throw new HatchetError(`Output for parent task '${step}' not found`); - } - return this.data.parents[step]; - } - - /** - * Gets the input data for the current workflow. - * @returns The input data for the workflow. - * @deprecated use task input parameter instead - */ - workflowInput(): T { - return this.input; - } - - /** - * Gets the name of the current task. - * @returns The name of the task. - * @deprecated use ctx.taskName instead - */ - stepName(): string { - return this.taskName(); - } /** * Spawns multiple workflows. @@ -575,12 +621,13 @@ export class Context { */ async spawnWorkflows( workflows: Array<{ - workflow: string | Workflow | WorkflowV1; + workflow: string | WorkflowV1; input: Q; options?: ChildRunOpts; }> ): Promise[]> { - const { workflowRunId, stepRunId } = this.action; + this.throwIfCancelled(); + const { workflowRunId, taskRunExternalId } = this.action; const workflowRuns = workflows.map(({ workflow, input, options }) => { let workflowName: string; @@ -588,7 +635,7 @@ export class Context { if (typeof workflow === 'string') { workflowName = workflow; } else { - workflowName = workflow.id; + workflowName = workflow.name; } const name = applyNamespace(workflowName, this.v1.config.namespace); @@ -602,13 +649,18 @@ export class Context { ); } + // `signal` must never be sent over the wire. + const optsWithoutSignal: Omit & { signal?: never } = { ...opts }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete (optsWithoutSignal as any).signal; + const resp = { workflowName: name, input, options: { - ...opts, + ...optsWithoutSignal, parentId: workflowRunId, - parentStepRunId: stepRunId, + parentTaskRunExternalId: taskRunExternalId, childIndex: this.spawnIndex, desiredWorkerId: sticky ? this.worker.id() : undefined, }, @@ -653,18 +705,19 @@ export class Context { * @deprecated Use runChild or runNoWaitChild instead. */ async spawnWorkflow( - workflow: string | Workflow | WorkflowV1 | TaskWorkflowDeclaration, + workflow: string | WorkflowV1 | TaskWorkflowDeclaration, input: Q, options?: ChildRunOpts ): Promise> { - const { workflowRunId, stepRunId } = this.action; + this.throwIfCancelled(); + const { workflowRunId, taskRunExternalId } = this.action; let workflowName: string = ''; if (typeof workflow === 'string') { workflowName = workflow; } else { - workflowName = workflow.id; + workflowName = workflow.name; } const name = applyNamespace(workflowName, this.v1.config.namespace); @@ -681,7 +734,7 @@ export class Context { try { const resp = await this.v1._v0.admin.runWorkflow(name, input, { parentId: workflowRunId, - parentStepRunId: stepRunId, + parentTaskRunExternalId: taskRunExternalId, childIndex: this.spawnIndex, desiredWorkerId: sticky ? this.worker.id() : undefined, ...opts, @@ -727,23 +780,24 @@ export class DurableContext extends Context { * @returns A promise that resolves with the event that satisfied the conditions. */ async waitFor(conditions: Conditions | Conditions[]): Promise> { + this.throwIfCancelled(); const pbConditions = conditionsToPb(Render(ConditionAction.CREATE, conditions)); // eslint-disable-next-line no-plusplus const key = `waitFor-${this.waitKey++}`; await this.v1._v0.durableListener.registerDurableEvent({ - taskId: this.action.stepRunId, + taskId: this.action.taskRunExternalId, signalKey: key, sleepConditions: pbConditions.sleepConditions, userEventConditions: pbConditions.userEventConditions, }); - - const listener = this.v1._v0.durableListener.subscribe({ - taskId: this.action.stepRunId, - signalKey: key, - }); - - const event = await listener.get(); + const event = await this.v1._v0.durableListener.result( + { + taskId: this.action.taskRunExternalId, + signalKey: key, + }, + { signal: this.abortController.signal } + ); // Convert event.data from Uint8Array to string if needed const eventData = diff --git a/sdks/typescript/src/v1/client/worker/slot-utils.ts b/sdks/typescript/src/v1/client/worker/slot-utils.ts new file mode 100644 index 0000000000..2e696876cb --- /dev/null +++ b/sdks/typescript/src/v1/client/worker/slot-utils.ts @@ -0,0 +1,117 @@ +import { BaseWorkflowDeclaration } from '../../declaration'; +import type { LegacyWorkflow } from '../../../legacy/legacy-transformer'; +import { SlotConfig, SlotType } from '../../slot-types'; + +const DEFAULT_DEFAULT_SLOTS = 100; +const DEFAULT_DURABLE_SLOTS = 1_000; + +export interface WorkerSlotOptions { + /** (optional) Maximum number of concurrent runs on this worker, defaults to 100 */ + slots?: number; + /** (optional) Maximum number of concurrent durable tasks, defaults to 1,000 */ + durableSlots?: number; + /** (optional) Array of workflows to register (supports both v1 and legacy workflow formats) */ + workflows?: Array | LegacyWorkflow>; + /** @deprecated Use slots instead */ + maxRuns?: number; +} + +export function resolveWorkerOptions( + options: T +): T & { + slots?: number; + durableSlots?: number; + slotConfig: SlotConfig; +} { + const requiredSlotTypes = options.workflows + ? getRequiredSlotTypes(options.workflows) + : new Set(); + + const slotConfig: SlotConfig = + options.slots || options.durableSlots || options.maxRuns + ? { + ...(options.slots || options.maxRuns + ? { [SlotType.Default]: options.slots || options.maxRuns || 0 } + : {}), + ...(options.durableSlots ? { [SlotType.Durable]: options.durableSlots } : {}), + } + : {}; + + if (requiredSlotTypes.has(SlotType.Default) && slotConfig[SlotType.Default] == null) { + slotConfig[SlotType.Default] = DEFAULT_DEFAULT_SLOTS; + } + if (requiredSlotTypes.has(SlotType.Durable) && slotConfig[SlotType.Durable] == null) { + slotConfig[SlotType.Durable] = DEFAULT_DURABLE_SLOTS; + } + + if (Object.keys(slotConfig).length === 0) { + slotConfig[SlotType.Default] = DEFAULT_DEFAULT_SLOTS; + } + + return { + ...options, + slots: + options.slots || + options.maxRuns || + (slotConfig[SlotType.Default] != null ? slotConfig[SlotType.Default] : undefined), + durableSlots: + options.durableSlots || + (slotConfig[SlotType.Durable] != null ? slotConfig[SlotType.Durable] : undefined), + slotConfig, + }; +} + +// eslint-disable-next-line @typescript-eslint/naming-convention +export const testingExports = { + resolveWorkerOptions, +}; + +function getRequiredSlotTypes( + workflows: Array | LegacyWorkflow> +): Set { + const required = new Set(); + const addFromRequests = ( + requests: Record | undefined, + fallbackType: SlotType + ) => { + if (requests && Object.keys(requests).length > 0) { + if (requests[SlotType.Default] !== undefined) { + required.add(SlotType.Default); + } + if (requests[SlotType.Durable] !== undefined) { + required.add(SlotType.Durable); + } + } else { + required.add(fallbackType); + } + }; + + for (const wf of workflows) { + if (wf instanceof BaseWorkflowDeclaration) { + // eslint-disable-next-line dot-notation + const tasks = wf.definition['_tasks'] as Array<{ slotRequests?: Record }>; + for (const task of tasks) { + addFromRequests(task.slotRequests, SlotType.Default); + } + // eslint-disable-next-line dot-notation + const durableTasks = wf.definition['_durableTasks'] as Array; + if (durableTasks.length > 0) { + required.add(SlotType.Durable); + } + + if (wf.definition.onFailure) { + const opts = + typeof wf.definition.onFailure === 'object' ? wf.definition.onFailure : undefined; + addFromRequests(opts?.slotRequests, SlotType.Default); + } + + if (wf.definition.onSuccess) { + const opts = + typeof wf.definition.onSuccess === 'object' ? wf.definition.onSuccess : undefined; + addFromRequests(opts?.slotRequests, SlotType.Default); + } + } + } + + return required; +} diff --git a/sdks/typescript/src/v1/client/worker/worker-cancel-supervision.test.ts b/sdks/typescript/src/v1/client/worker/worker-cancel-supervision.test.ts new file mode 100644 index 0000000000..6ff90fbd11 --- /dev/null +++ b/sdks/typescript/src/v1/client/worker/worker-cancel-supervision.test.ts @@ -0,0 +1,94 @@ +import { V1Worker } from '@hatchet/v1/client/worker/worker-internal'; +import HatchetPromise from '@util/hatchet-promise/hatchet-promise'; + +describe('V1Worker handleCancelStepRun cancellation supervision', () => { + beforeEach(() => { + jest.useFakeTimers(); + }); + + afterEach(() => { + jest.useRealTimers(); + }); + + it('logs warnings after threshold and grace period, then returns', async () => { + const logger = { + info: jest.fn(), + warn: jest.fn(), + debug: jest.fn(), + error: jest.fn(), + }; + + const taskExternalId = 'task-1'; + + // Use the real HatchetPromise behavior: cancel rejects the wrapper immediately, + // while the underlying work (`inner`) continues. + const inner = new Promise(() => { + // never resolves + }); + const future = new HatchetPromise(inner); + const originalCancel = future.cancel; + const cancelSpy = jest.fn((reason: any) => originalCancel(reason)); + future.cancel = cancelSpy; + + const ctx = { + abortController: new AbortController(), + }; + + const fakeThis: any = { + logger, + client: { + config: { + cancellation_warning_threshold: 300, + cancellation_grace_period: 1000, + }, + }, + cancellingTaskRuns: new Set(), + futures: { [taskExternalId]: future }, + contexts: { [taskExternalId]: ctx }, + }; + + const action: any = { taskRunExternalId: taskExternalId }; + + const p = V1Worker.prototype.handleCancelStepRun.call(fakeThis, action); + + await jest.advanceTimersByTimeAsync(1500); + await p; + + expect(ctx.abortController.signal.aborted).toBe(true); + expect(cancelSpy).toHaveBeenCalled(); + expect(logger.warn).toHaveBeenCalled(); + + expect(fakeThis.futures[taskExternalId]).toBeUndefined(); + expect(fakeThis.contexts[taskExternalId]).toBeUndefined(); + }); + + it('suppresses "was cancelled" debug log when cancellation is supervised', async () => { + const logger = { + info: jest.fn(), + warn: jest.fn(), + debug: jest.fn(), + error: jest.fn(), + }; + + const taskExternalId = 'task-2'; + + const fakeThis: any = { + logger, + cancellingTaskRuns: new Set([taskExternalId]), + }; + + // Reproduce the log suppression logic from the step execution path: + // we only log "was cancelled" if the cancellation isn't currently supervised. + const maybeLog = (e: any) => { + const message = e?.message || String(e); + if (message.includes('Cancelled')) { + if (!fakeThis.cancellingTaskRuns.has(taskExternalId)) { + fakeThis.logger.debug(`Task run ${taskExternalId} was cancelled`); + } + } + }; + + maybeLog(new Error('Cancelled by worker')); + expect(logger.debug).not.toHaveBeenCalled(); + }); +}); diff --git a/sdks/typescript/src/v1/client/worker/worker-internal.ts b/sdks/typescript/src/v1/client/worker/worker-internal.ts index b177883315..62e344eef7 100644 --- a/sdks/typescript/src/v1/client/worker/worker-internal.ts +++ b/sdks/typescript/src/v1/client/worker/worker-internal.ts @@ -11,16 +11,12 @@ import { actionTypeFromJSON, } from '@hatchet/protoc/dispatcher'; import HatchetPromise from '@util/hatchet-promise/hatchet-promise'; -import { Workflow } from '@hatchet/workflow'; import { - ConcurrencyLimitStrategy, - CreateWorkflowJobOpts, - CreateWorkflowStepOpts, + CreateStepRateLimit, DesiredWorkerLabels, - WorkflowConcurrencyOpts, + StickyStrategy, } from '@hatchet/protoc/workflows'; -import { Logger } from '@hatchet/util/logger'; -import { WebhookWorkerCreateRequest } from '@clients/rest/generated/data-contracts'; +import { actionMap, Logger, taskRunLog } from '@hatchet/util/logger'; import { BaseWorkflowDeclaration, WorkflowDefinition, HatchetClient } from '@hatchet/v1'; import { CreateTaskOpts } from '@hatchet/protoc/v1/workflows'; import { @@ -34,18 +30,21 @@ import { taskConditionsToPb } from '@hatchet/v1/conditions/transformer'; import { zodToJsonSchema } from 'zod-to-json-schema'; import { WorkerLabels } from '@hatchet/clients/dispatcher/dispatcher-client'; -import { CreateStep, mapRateLimit, StepRunFunction } from '@hatchet/step'; import { applyNamespace } from '@hatchet/util/apply-namespace'; +import sleep from '@hatchet/util/sleep'; +import { throwIfAborted } from '@hatchet/util/abort-error'; import { Context, DurableContext } from './context'; import { parentRunContextManager } from '../../parent-run-context-vars'; import { HealthServer, workerStatus, type WorkerStatus } from './health-server'; +import { SlotConfig, SlotType } from '../../slot-types'; export type ActionRegistry = Record; export interface WorkerOpts { name: string; handleKill?: boolean; - maxRuns?: number; + slots?: number; + durableSlots?: number; labels?: WorkerLabels; healthPort?: number; enableHealthServer?: boolean; @@ -59,11 +58,13 @@ export class V1Worker { handle_kill: boolean; action_registry: ActionRegistry; - workflow_registry: Array = []; + workflow_registry: Array = []; listener: ActionListener | undefined; - futures: Record> = {}; - contexts: Record> = {}; - maxRuns?: number; + futures: Record> = {}; + contexts: Record> = {}; + slots?: number; + durableSlots?: number; + slotConfig: SlotConfig; logger: Logger; @@ -82,14 +83,18 @@ export class V1Worker { options: { name: string; handleKill?: boolean; - maxRuns?: number; + slots?: number; + durableSlots?: number; + slotConfig?: SlotConfig; labels?: WorkerLabels; } ) { this.client = client; this.name = applyNamespace(options.name, this.client.config.namespace); this.action_registry = {}; - this.maxRuns = options.maxRuns; + this.slots = options.slots; + this.durableSlots = options.durableSlots; + this.slotConfig = options.slotConfig || {}; this.labels = options.labels || {}; @@ -126,12 +131,14 @@ export class V1Worker { ); } + // TODO where is this used, this doesnt make much sense private getAvailableSlots(): number { - if (!this.maxRuns) { + const baseSlots = this.slotConfig[SlotType.Default] ?? this.slots ?? 0; + if (!baseSlots) { return 0; } const currentRuns = Object.keys(this.futures).length; - return Math.max(0, this.maxRuns - currentRuns); + return Math.max(0, baseSlots - currentRuns); } private getRegisteredActions(): string[] { @@ -153,61 +160,6 @@ export class V1Worker { this.logger.debug(`Worker status changed to: ${status}`); } - private registerActions(workflow: Workflow) { - const newActions = workflow.steps.reduce((acc, step) => { - acc[`${workflow.id}:${step.name.toLowerCase()}`] = step.run; - return acc; - }, {}); - - const onFailureAction = workflow.onFailure - ? { - [`${workflow.id}-on-failure:${workflow.onFailure.name}`]: workflow.onFailure.run, - } - : {}; - - this.action_registry = { - ...this.action_registry, - ...newActions, - ...onFailureAction, - }; - - this.action_registry = - workflow.concurrency?.name && workflow.concurrency.key - ? { - ...this.action_registry, - [`${workflow.id}:${workflow.concurrency.name.toLowerCase()}`]: workflow.concurrency.key, - } - : { - ...this.action_registry, - }; - } - - getHandler(workflows: Workflow[]) { - throw new Error('Not implemented'); - // TODO v1 - // for (const workflow of workflows) { - // const wf: Workflow = { - // ...workflow, - // id: this.client.config.namespace + workflow.id, - // }; - - // this.registerActions(wf); - // } - - // return new WebhookHandler(this, workflows); - } - - async registerWebhook(webhook: WebhookWorkerCreateRequest) { - return this.client._v0.admin.registerWebhook({ ...webhook }); - } - - /** - * @deprecated use registerWorkflow instead - */ - async register_workflow(initWorkflow: Workflow) { - return this.registerWorkflow(initWorkflow); - } - registerDurableActionsV1(workflow: WorkflowDefinition) { const newActions = workflow._durableTasks .filter((task) => !!task.fn) @@ -284,6 +236,8 @@ export class V1Worker { rateLimits: [], workerLabels: {}, concurrency: [], + isDurable: false, + slotRequests: { default: 1 }, }; } @@ -298,14 +252,16 @@ export class V1Worker { inputs: '{}', parents: [], retries: onFailure.retries || workflow.taskDefaults?.retries || 0, - rateLimits: mapRateLimit(onFailure.rateLimits || workflow.taskDefaults?.rateLimits), - workerLabels: toPbWorkerLabel( + rateLimits: mapRateLimitPb(onFailure.rateLimits || workflow.taskDefaults?.rateLimits), + workerLabels: mapWorkerLabelPb( onFailure.desiredWorkerLabels || workflow.taskDefaults?.workerLabels ), concurrency: [], backoffFactor: onFailure.backoff?.factor || workflow.taskDefaults?.backoff?.factor, backoffMaxSeconds: onFailure.backoff?.maxSeconds || workflow.taskDefaults?.backoff?.maxSeconds, + isDurable: false, + slotRequests: { default: 1 }, }; } @@ -317,11 +273,11 @@ export class V1Worker { onSuccessTask = { name: 'on-success-task', fn: workflow.onSuccess, - timeout: '60s', + executionTimeout: '60s', parents, retries: 0, rateLimits: [], - desiredWorkerLabels: {}, + desiredWorkerLabels: undefined, concurrency: [], }; } @@ -333,7 +289,8 @@ export class V1Worker { onSuccessTask = { name: 'on-success-task', fn: onSuccess.fn, - timeout: onSuccess.executionTimeout || workflow.taskDefaults?.executionTimeout || '60s', + executionTimeout: + onSuccess.executionTimeout || workflow.taskDefaults?.executionTimeout || '60s', scheduleTimeout: onSuccess.scheduleTimeout || workflow.taskDefaults?.scheduleTimeout, parents, retries: onSuccess.retries || workflow.taskDefaults?.retries || 0, @@ -348,26 +305,23 @@ export class V1Worker { workflow._tasks.push(onSuccessTask); } - // cron and event triggers - if (workflow.on) { - this.logger.warn( - `\`on\` for event and cron triggers is deprecated and will be removed soon, use \`onEvents\` and \`onCrons\` instead for ${ - workflow.name - }` - ); - } - const eventTriggers = [ ...(workflow.onEvents || []).map((event) => applyNamespace(event, this.client.config.namespace) ), - ...(workflow.on?.event - ? [applyNamespace(workflow.on.event, this.client.config.namespace)] + ...(workflow.on && 'event' in workflow.on && workflow.on.event + ? Array.isArray(workflow.on.event) + ? workflow.on.event.map((event) => applyNamespace(event, this.client.config.namespace)) + : [applyNamespace(workflow.on.event, this.client.config.namespace)] : []), ]; - const cronTriggers = [ + const cronTriggers: string[] = [ ...(workflow.onCrons || []), - ...(workflow.on?.cron ? [workflow.on.cron] : []), + ...(workflow.on && 'cron' in workflow.on && workflow.on.cron + ? Array.isArray(workflow.on.cron) + ? workflow.on.cron + : [workflow.on.cron] + : []), ]; const concurrencyArr = Array.isArray(concurrency) ? concurrency : []; @@ -381,13 +335,37 @@ export class V1Worker { inputJsonSchema = new TextEncoder().encode(JSON.stringify(jsonSchema)); } + const durableTaskSet = new Set(workflow._durableTasks); + + let stickyStrategy: StickyStrategy | undefined; + // `workflow.sticky` is optional. When omitted, we don't set any sticky strategy. + // + // When provided, `workflow.sticky` is a v1 (non-protobuf) config which may also include + // legacy protobuf enum values for backwards compatibility. + if (workflow.sticky != null) { + switch (workflow.sticky) { + case 'soft': + case 'SOFT': + case 0: + stickyStrategy = StickyStrategy.SOFT; + break; + case 'hard': + case 'HARD': + case 1: + stickyStrategy = StickyStrategy.HARD; + break; + default: + throw new HatchetError(`Invalid sticky strategy: ${workflow.sticky}`); + } + } + const registeredWorkflow = this.client._v0.admin.putWorkflowV1({ name: workflow.name, description: workflow.description || '', version: workflow.version || '', eventTriggers, cronTriggers, - sticky: workflow.sticky, + sticky: stickyStrategy, concurrencyArr, onFailureTask, defaultPriority: workflow.defaultPriority, @@ -395,23 +373,22 @@ export class V1Worker { tasks: [...workflow._tasks, ...workflow._durableTasks].map((task) => ({ readableId: task.name, action: `${workflow.name}:${task.name}`, - timeout: - task.executionTimeout || - task.timeout || - workflow.taskDefaults?.executionTimeout || - '60s', + timeout: task.executionTimeout || workflow.taskDefaults?.executionTimeout || '60s', scheduleTimeout: task.scheduleTimeout || workflow.taskDefaults?.scheduleTimeout, inputs: '{}', parents: task.parents?.map((p) => p.name) ?? [], userData: '{}', retries: task.retries || workflow.taskDefaults?.retries || 0, - rateLimits: mapRateLimit(task.rateLimits || workflow.taskDefaults?.rateLimits), - workerLabels: toPbWorkerLabel( + rateLimits: mapRateLimitPb(task.rateLimits || workflow.taskDefaults?.rateLimits), + workerLabels: mapWorkerLabelPb( task.desiredWorkerLabels || workflow.taskDefaults?.workerLabels ), backoffFactor: task.backoff?.factor || workflow.taskDefaults?.backoff?.factor, backoffMaxSeconds: task.backoff?.maxSeconds || workflow.taskDefaults?.backoff?.maxSeconds, conditions: taskConditionsToPb(task), + isDurable: durableTaskSet.has(task), + slotRequests: + task.slotRequests || (durableTaskSet.has(task) ? { durable: 1 } : { default: 1 }), concurrency: task.concurrency ? Array.isArray(task.concurrency) ? task.concurrency @@ -440,106 +417,13 @@ export class V1Worker { this.registerActionsV1(workflow); } - async registerWorkflow(initWorkflow: Workflow) { - const workflow: Workflow = { - ...initWorkflow, - id: applyNamespace(initWorkflow.id, this.client.config.namespace).toLowerCase(), - }; - try { - if (workflow.concurrency?.key && workflow.concurrency.expression) { - throw new HatchetError( - 'Cannot have both key function and expression in workflow concurrency configuration' - ); - } - - const concurrency: WorkflowConcurrencyOpts | undefined = - workflow.concurrency?.name || workflow.concurrency?.expression - ? { - action: !workflow.concurrency.expression - ? `${workflow.id}:${workflow.concurrency.name}` - : undefined, - maxRuns: workflow.concurrency.maxRuns || 1, - expression: workflow.concurrency.expression, - limitStrategy: - workflow.concurrency.limitStrategy || ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, - } - : undefined; - - const onFailureJob: CreateWorkflowJobOpts | undefined = workflow.onFailure - ? { - name: `${workflow.id}-on-failure`, - description: workflow.description, - steps: [ - { - readableId: workflow.onFailure.name, - action: `${workflow.id}-on-failure:${workflow.onFailure.name}`, - timeout: workflow.onFailure.timeout || '60s', - inputs: '{}', - parents: [], - userData: '{}', - retries: workflow.onFailure.retries || 0, - rateLimits: mapRateLimit(workflow.onFailure.rate_limits), - workerLabels: {}, // no worker labels for on failure steps - }, - ], - } - : undefined; - - const registeredWorkflow = this.client._v0.admin.putWorkflow({ - name: workflow.id, - description: workflow.description, - version: workflow.version || '', - eventTriggers: - workflow.on && workflow.on.event - ? [applyNamespace(workflow.on.event, this.client.config.namespace)] - : [], - cronTriggers: workflow.on && workflow.on.cron ? [workflow.on.cron] : [], - scheduledTriggers: [], - concurrency, - scheduleTimeout: workflow.scheduleTimeout, - onFailureJob, - sticky: workflow.sticky, - jobs: [ - { - name: workflow.id, - description: workflow.description, - steps: workflow.steps.map((step) => ({ - readableId: step.name, - action: `${workflow.id}:${step.name}`, - timeout: step.timeout || '60s', - inputs: '{}', - parents: step.parents ?? [], - userData: '{}', - retries: step.retries || 0, - rateLimits: mapRateLimit(step.rate_limits), - workerLabels: toPbWorkerLabel(step.worker_labels), - backoffFactor: step.backoff?.factor, - backoffMaxSeconds: step.backoff?.maxSeconds, - })), - }, - ], - }); - this.registeredWorkflowPromises.push(registeredWorkflow); - await registeredWorkflow; - this.workflow_registry.push(workflow); - } catch (e: any) { - throw new HatchetError(`Could not register workflow: ${e.message}`); - } - - this.registerActions(workflow); - } - - registerAction(actionId: string, action: StepRunFunction) { - this.action_registry[actionId.toLowerCase()] = action; - } - async handleStartStepRun(action: Action) { - const { actionId } = action; + const { actionId, taskRunExternalId, taskName } = action; try { // Note: we always use a DurableContext since its a superset of the Context class const context = new DurableContext(action, this.client, this); - this.contexts[action.stepRunId] = context; + this.contexts[taskRunExternalId] = context; const step = this.action_registry[actionId]; @@ -550,13 +434,20 @@ export class V1Worker { } const run = async () => { - parentRunContextManager.setContext({ - parentId: action.workflowRunId, - parentRunId: action.stepRunId, - childIndex: 0, - desiredWorkerId: this.workerId || '', - }); - return step(context); + return parentRunContextManager.runWithContext( + { + parentId: action.workflowRunId, + parentTaskRunExternalId: taskRunExternalId, + childIndex: 0, + desiredWorkerId: this.workerId || '', + signal: context.abortController.signal, + }, + () => { + // Precheck: if cancellation already happened, don't execute user code. + throwIfAborted(context.abortController.signal); + return step(context); + } + ); }; const success = async (result: any) => { @@ -565,7 +456,7 @@ export class V1Worker { return; } - this.logger.info(`Task run ${action.stepRunId} succeeded`); + this.logger.info(taskRunLog(taskName, taskRunExternalId, 'completed')); // Send the action event to the dispatcher const event = this.getStepActionEvent( @@ -603,8 +494,8 @@ export class V1Worker { ); } finally { // delete the run from the futures - delete this.futures[action.stepRunId]; - delete this.contexts[action.stepRunId]; + delete this.futures[taskRunExternalId]; + delete this.contexts[taskRunExternalId]; } }; @@ -616,7 +507,7 @@ export class V1Worker { return; } - this.logger.error(`Task run ${action.stepRunId} failed: ${error.message}`); + this.logger.error(taskRunLog(taskName, taskRunExternalId, `failed: ${error.message}`)); if (error.stack) { this.logger.error(error.stack); @@ -638,8 +529,8 @@ export class V1Worker { this.logger.error(`Could not send action event: ${e.message}`); } finally { // delete the run from the futures - delete this.futures[action.stepRunId]; - delete this.contexts[action.stepRunId]; + delete this.futures[taskRunExternalId]; + delete this.contexts[taskRunExternalId]; } }; @@ -652,10 +543,23 @@ export class V1Worker { await failure(e); return; } + + // Postcheck: user code may swallow AbortError; don't report completion after cancellation. + // If we reached this point and the signal is aborted, the task likely caught/ignored cancellation. + if (context.abortController.signal.aborted) { + this.logger.warn( + `Cancellation: task run ${taskRunExternalId} returned after cancellation was signaled. ` + + `This usually means an AbortError was caught and not propagated. ` + + `See https://docs.hatchet.run/home/cancellation` + ); + return; + } + throwIfAborted(context.abortController.signal); + await success(result); })() ); - this.futures[action.stepRunId] = future; + this.futures[taskRunExternalId] = future; // Send the action event to the dispatcher const event = this.getStepActionEvent( @@ -673,11 +577,10 @@ export class V1Worker { await future.promise; } catch (e: any) { const message = e?.message || String(e); - if (message.includes('Cancelled')) { - this.logger.debug(`Task run ${action.stepRunId} was cancelled`); - } else { + // TODO is this cased correctly... + if (!message.includes('Cancelled')) { this.logger.error( - `Could not wait for task run ${action.stepRunId} to finish. ` + + `Could not wait for task run ${taskRunExternalId} to finish. ` + `See https://docs.hatchet.run/home/cancellation for best practices on handling cancellation: `, e ); @@ -689,7 +592,7 @@ export class V1Worker { } async handleStartGroupKeyRun(action: Action) { - const { actionId } = action; + const { actionId, getGroupKeyRunId, taskRunExternalId, taskName } = action; this.logger.error( 'Concurrency Key Functions have been deprecated and will be removed in a future release. Use Concurrency Expressions instead.' @@ -698,7 +601,7 @@ export class V1Worker { try { const context = new Context(action, this.client, this); - const key = action.getGroupKeyRunId; + const key = getGroupKeyRunId; if (!key) { this.logger.error(`No group key run id provided for action ${actionId}`); @@ -721,7 +624,7 @@ export class V1Worker { }; const success = (result: any) => { - this.logger.info(`Task run ${action.stepRunId} succeeded`); + this.logger.info(taskRunLog(taskName, taskRunExternalId, 'completed')); try { // Send the action event to the dispatcher @@ -743,7 +646,7 @@ export class V1Worker { }; const failure = (error: any) => { - this.logger.error(`Task run ${key} failed: ${error.message}`); + this.logger.error(taskRunLog(taskName, taskRunExternalId, `failed: ${error.message}`)); try { // Send the action event to the dispatcher @@ -793,8 +696,8 @@ export class V1Worker { workerId: this.name, jobId: action.jobId, jobRunId: action.jobRunId, - stepId: action.stepId, - stepRunId: action.stepRunId, + taskId: action.taskId, + taskRunExternalId: action.taskRunExternalId, actionId: action.actionId, eventTimestamp: new Date(), eventType, @@ -824,29 +727,76 @@ export class V1Worker { } async handleCancelStepRun(action: Action) { - const { stepRunId } = action; + const { taskRunExternalId, taskName } = action; + try { - this.logger.info(`Cancelling task run ${action.stepRunId}`); - const future = this.futures[stepRunId]; - const context = this.contexts[stepRunId]; + const future = this.futures[taskRunExternalId]; + const context = this.contexts[taskRunExternalId]; if (context && context.abortController) { - context.abortController.abort('Cancelled by worker'); + context.abortController.abort('Cancelled by worker'); // TODO this reason is nonsensical } if (future) { - future.promise.catch(() => { - this.logger.info(`Cancelled task run ${action.stepRunId}`); - }); - future.cancel('Cancelled by worker'); - await future.promise; + const start = Date.now(); + const warningThresholdMs = this.client.config.cancellation_warning_threshold ?? 300; + const gracePeriodMs = this.client.config.cancellation_grace_period ?? 1000; + const warningMs = Math.max(0, warningThresholdMs); + const graceMs = Math.max(0, gracePeriodMs); + + // Ensure cancelling this future doesn't create an unhandled rejection in cases + // where the main action handler isn't currently awaiting `future.promise`. + future.promise.catch(() => undefined); + + // Cancel the future (rejects the wrapper); user code must still cooperate with AbortSignal. + future.cancel('Cancelled by worker'); // TODO this reason is nonsensical + + // Track completion of the underlying work (not the cancelable wrapper). + // Ensure this promise never throws into our supervision flow. + const completion = (future.inner ?? future.promise).catch(() => undefined); + + // Wait until warning threshold, then log if still running. + if (warningMs > 0) { + const winner = await Promise.race([ + completion.then(() => 'done' as const), + sleep(warningMs).then(() => 'warn' as const), + ]); + + if (winner === 'warn') { + const milliseconds = Date.now() - start; + this.logger.warn( + `Cancellation: task run ${taskRunExternalId} has not cancelled after ${milliseconds}ms. Consider checking for blocking operations. ` + + `See https://docs.hatchet.run/home/cancellation` + ); + } + } + + // Wait until grace period (total), then log if still running. + const elapsedMs = Date.now() - start; + const remainingMs = graceMs - elapsedMs; + const winner = await Promise.race([ + completion.then(() => 'done' as const), + sleep(Math.max(0, remainingMs)).then(() => 'grace' as const), + ]); + + if (winner === 'done') { + this.logger.info(taskRunLog(taskName, taskRunExternalId, 'cancelled')); + } else { + const totalElapsedMs = Date.now() - start; + this.logger.error( + `Cancellation: task run ${taskRunExternalId} still running after cancellation grace period ` + + `${totalElapsedMs}ms.\n` + + `JavaScript cannot force-kill user code; see: https://docs.hatchet.run/home/cancellation` + ); + } } } catch (e: any) { - // Expected: the promise rejects when cancelled - this.logger.debug(`Task run ${stepRunId} cancellation completed`); + this.logger.error( + `Cancellation: error while supervising cancellation for task run ${taskRunExternalId}: ${e?.message || e}` + ); } finally { - delete this.futures[stepRunId]; - delete this.contexts[stepRunId]; + delete this.futures[taskRunExternalId]; + delete this.contexts[taskRunExternalId]; } } @@ -912,7 +862,7 @@ export class V1Worker { workerName: this.name, services: ['default'], actions: Object.keys(this.action_registry), - maxRuns: this.maxRuns, + slotConfig: this.slotConfig, labels: this.labels, }); @@ -924,9 +874,9 @@ export class V1Worker { this.logger.info(`Worker ${this.name} listening for actions`); for await (const action of generator) { - this.logger.info( - `Worker ${this.name} received action ${action.actionId}:${action.actionType}` - ); + const receivedType = actionMap(action.actionType); + + this.logger.info(taskRunLog(action.taskName, action.taskRunExternalId, `${receivedType}`)); void this.handleAction(action); } @@ -970,16 +920,16 @@ export class V1Worker { } } -function toPbWorkerLabel( - in_: CreateStep['worker_labels'] +function mapWorkerLabelPb( + in_: CreateWorkflowTaskOpts['desiredWorkerLabels'] ): Record { if (!in_) { return {}; } return Object.entries(in_).reduce>( - (acc, [key, value]) => { - if (!value) { + (acc, [key, label]) => { + if (!label) { return { ...acc, [key]: { @@ -989,22 +939,22 @@ function toPbWorkerLabel( }; } - if (typeof value === 'string') { + if (typeof label === 'string') { return { ...acc, [key]: { - strValue: value, + strValue: label, intValue: undefined, }, }; } - if (typeof value === 'number') { + if (typeof label === 'number') { return { ...acc, [key]: { strValue: undefined, - intValue: value, + intValue: label, }, }; } @@ -1012,11 +962,11 @@ function toPbWorkerLabel( return { ...acc, [key]: { - strValue: typeof value.value === 'string' ? value.value : undefined, - intValue: typeof value.value === 'number' ? value.value : undefined, - required: value.required, - weight: value.weight, - comparator: value.comparator, + strValue: typeof label.value === 'string' ? label.value : undefined, + intValue: typeof label.value === 'number' ? label.value : undefined, + required: label.required, + weight: label.weight, + comparator: label.comparator, }, }; }, @@ -1037,3 +987,85 @@ function getLeaves(tasks: LeafableTask[]): LeafableTask[] { function isLeafTask(task: LeafableTask, allTasks: LeafableTask[]): boolean { return !allTasks.some((t) => t.parents?.some((p) => p.name === task.name)); } + +export function mapRateLimitPb( + limits: CreateWorkflowTaskOpts['rateLimits'] +): CreateStepRateLimit[] { + if (!limits) return []; + + return limits.map((l) => { + let key = l.staticKey; + const keyExpression = l.dynamicKey; + + if (l.key !== undefined) { + // eslint-disable-next-line no-console + console.warn( + 'key is deprecated and will be removed in a future release, please use staticKey instead' + ); + key = l.key; + } + + if (keyExpression !== undefined) { + if (key !== undefined) { + throw new Error('Cannot have both static key and dynamic key set'); + } + key = keyExpression; + if (!validateCelExpression(keyExpression)) { + throw new Error(`Invalid CEL expression: ${keyExpression}`); + } + } + + if (key === undefined) { + throw new Error(`Invalid key`); + } + + let units: number | undefined; + let unitsExpression: string | undefined; + if (typeof l.units === 'number') { + units = l.units; + } else { + if (!validateCelExpression(l.units)) { + throw new Error(`Invalid CEL expression: ${l.units}`); + } + unitsExpression = l.units; + } + + let limitExpression: string | undefined; + if (l.limit !== undefined) { + if (typeof l.limit === 'number') { + limitExpression = `${l.limit}`; + } else { + if (!validateCelExpression(l.limit)) { + throw new Error(`Invalid CEL expression: ${l.limit}`); + } + + limitExpression = l.limit; + } + } + + if (keyExpression !== undefined && limitExpression === undefined) { + throw new Error('CEL based keys requires limit to be set'); + } + + if (limitExpression === undefined) { + limitExpression = `-1`; + } + + return { + key, + keyExpr: keyExpression, + units, + unitsExpr: unitsExpression, + limitValuesExpr: limitExpression, + duration: l.duration, + }; + }); +} + +// Helper function to validate CEL expressions +// eslint-disable-next-line @typescript-eslint/no-unused-vars +function validateCelExpression(_expr: string): boolean { + // FIXME: this is a placeholder. In a real implementation, you'd need to use a CEL parser or validator. + // For now, we'll just return true to mimic the behavior. + return true; +} diff --git a/sdks/typescript/src/v1/client/worker/worker-slot-capacities.test.ts b/sdks/typescript/src/v1/client/worker/worker-slot-capacities.test.ts new file mode 100644 index 0000000000..c36f8a4e48 --- /dev/null +++ b/sdks/typescript/src/v1/client/worker/worker-slot-capacities.test.ts @@ -0,0 +1,51 @@ +import { testingExports } from '@hatchet/v1/client/worker/slot-utils'; +import { WorkflowDeclaration } from '../../declaration'; +import { SlotType } from '../../slot-types'; + +const { resolveWorkerOptions } = testingExports; + +describe('resolveWorkerOptions slot config', () => { + it('sets default slots for non-durable tasks', () => { + const workflow = new WorkflowDeclaration({ name: 'default-wf' }); + workflow.task({ + name: 'task1', + fn: async () => undefined, + }); + + const resolved = resolveWorkerOptions({ workflows: [workflow] }); + + expect(resolved.slotConfig[SlotType.Default]).toBe(100); + expect(resolved.slotConfig[SlotType.Durable]).toBeUndefined(); + }); + + it('sets durable slots for durable-only workflows without default slots', () => { + const workflow = new WorkflowDeclaration({ name: 'durable-wf' }); + workflow.durableTask({ + name: 'durable-task', + fn: async () => undefined, + }); + + const resolved = resolveWorkerOptions({ workflows: [workflow] }); + + expect(resolved.slotConfig[SlotType.Durable]).toBe(1000); + expect(resolved.slotConfig[SlotType.Default]).toBeUndefined(); + expect(resolved.slots).toBeUndefined(); + }); + + it('sets both default and durable slots for mixed workflows', () => { + const workflow = new WorkflowDeclaration({ name: 'mixed-wf' }); + workflow.task({ + name: 'task1', + fn: async () => undefined, + }); + workflow.durableTask({ + name: 'durable-task', + fn: async () => undefined, + }); + + const resolved = resolveWorkerOptions({ workflows: [workflow] }); + + expect(resolved.slotConfig[SlotType.Default]).toBe(100); + expect(resolved.slotConfig[SlotType.Durable]).toBe(1000); + }); +}); diff --git a/sdks/typescript/src/v1/client/worker/worker.ts b/sdks/typescript/src/v1/client/worker/worker.ts index 572ad79b27..5a6f7deddc 100644 --- a/sdks/typescript/src/v1/client/worker/worker.ts +++ b/sdks/typescript/src/v1/client/worker/worker.ts @@ -1,32 +1,22 @@ /* eslint-disable no-underscore-dangle */ import { WorkerLabels } from '@hatchet/clients/dispatcher/dispatcher-client'; import { LegacyHatchetClient } from '@hatchet/clients/hatchet-client'; -import { Workflow as V0Workflow } from '@hatchet/workflow'; -import { WebhookWorkerCreateRequest } from '@hatchet/clients/rest/generated/data-contracts'; import { BaseWorkflowDeclaration } from '../../declaration'; +import type { LegacyWorkflow } from '../../../legacy/legacy-transformer'; +import { normalizeWorkflows } from '../../../legacy/legacy-transformer'; import { HatchetClient } from '../..'; import { V1Worker } from './worker-internal'; - -const DEFAULT_DURABLE_SLOTS = 1_000; +import { resolveWorkerOptions, type WorkerSlotOptions } from './slot-utils'; /** * Options for creating a new hatchet worker * @interface CreateWorkerOpts */ -export interface CreateWorkerOpts { - /** (optional) Maximum number of concurrent runs on this worker, defaults to 100 */ - slots?: number; - /** (optional) Array of workflows to register */ - workflows?: BaseWorkflowDeclaration[] | V0Workflow[]; +export interface CreateWorkerOpts extends WorkerSlotOptions { /** (optional) Worker labels for affinity-based assignment */ labels?: WorkerLabels; /** (optional) Whether to handle kill signals */ handleKill?: boolean; - /** @deprecated Use slots instead */ - maxRuns?: number; - - /** (optional) Maximum number of concurrent runs on the durable worker, defaults to 1,000 */ - durableSlots?: number; } /** @@ -38,9 +28,7 @@ export class Worker { _v1: HatchetClient; _v0: LegacyHatchetClient; - /** Internal reference to the underlying V0 worker implementation */ - nonDurable: V1Worker; - durable?: V1Worker; + _internal: V1Worker; /** * Creates a new HatchetWorker instance @@ -55,7 +43,7 @@ export class Worker { ) { this._v1 = v1; this._v0 = v0; - this.nonDurable = nonDurable; + this._internal = nonDurable; this.config = config; this.name = name; } @@ -72,56 +60,51 @@ export class Worker { name: string, options: CreateWorkerOpts ) { + // Normalize any legacy workflows before resolving worker options + const normalizedOptions = { + ...options, + workflows: options.workflows ? normalizeWorkflows(options.workflows) : undefined, + }; + + const resolvedOptions = resolveWorkerOptions(normalizedOptions); const opts = { name, - ...options, - maxRuns: options.slots || options.maxRuns, + ...resolvedOptions, }; const internalWorker = new V1Worker(v1, opts); - const worker = new Worker(v1, v0, internalWorker, options, name); - await worker.registerWorkflows(options.workflows); + const worker = new Worker(v1, v0, internalWorker, normalizedOptions, name); + await worker.registerWorkflows(normalizedOptions.workflows); return worker; } /** - * Registers workflows with the worker + * Registers workflows with the worker. + * Accepts both v1 BaseWorkflowDeclaration and legacy Workflow objects. + * Legacy workflows are automatically transformed and a deprecation warning is emitted. * @param workflows - Array of workflows to register * @returns Array of registered workflow promises */ - async registerWorkflows(workflows?: Array | V0Workflow>) { - for (const wf of workflows || []) { - if (wf instanceof BaseWorkflowDeclaration) { - // TODO check if tenant is V1 - await this.nonDurable.registerWorkflowV1(wf); - - if (wf.definition._durableTasks.length > 0) { - if (!this.durable) { - const opts = { - name: `${this.name}-durable`, - ...this.config, - maxRuns: this.config.durableSlots || DEFAULT_DURABLE_SLOTS, - }; - - this.durable = new V1Worker(this._v1, opts); - await this.durable.registerWorkflowV1(wf, true); - } - this.durable.registerDurableActionsV1(wf.definition); - } - } else { - // fallback to v0 client for backwards compatibility - await this.nonDurable.registerWorkflow(wf); + async registerWorkflows(workflows?: Array | LegacyWorkflow>) { + const normalized = workflows ? normalizeWorkflows(workflows) : []; + for (const wf of normalized) { + await this._internal.registerWorkflowV1(wf); + + if (wf.definition._durableTasks.length > 0) { + this._internal.registerDurableActionsV1(wf.definition); } } } /** - * Registers a single workflow with the worker + * Registers a single workflow with the worker. + * Accepts both v1 BaseWorkflowDeclaration and legacy Workflow objects. + * Legacy workflows are automatically transformed and a deprecation warning is emitted. * @param workflow - The workflow to register * @returns A promise that resolves when the workflow is registered * @deprecated use registerWorkflows instead */ - registerWorkflow(workflow: BaseWorkflowDeclaration | V0Workflow) { + registerWorkflow(workflow: BaseWorkflowDeclaration | LegacyWorkflow) { return this.registerWorkflows([workflow]); } @@ -130,13 +113,7 @@ export class Worker { * @returns Promise that resolves when the worker is stopped or killed */ start() { - const workers = [this.nonDurable]; - - if (this.durable) { - workers.push(this.durable); - } - - return Promise.all(workers.map((w) => w.start())); + return this._internal.start(); } /** @@ -144,13 +121,7 @@ export class Worker { * @returns Promise that resolves when the worker stops */ stop() { - const workers = [this.nonDurable]; - - if (this.durable) { - workers.push(this.durable); - } - - return Promise.all(workers.map((w) => w.stop())); + return this._internal.stop(); } /** @@ -159,7 +130,7 @@ export class Worker { * @returns Promise that resolves when labels are updated */ upsertLabels(labels: WorkerLabels) { - return this.nonDurable.upsertLabels(labels); + return this._internal.upsertLabels(labels); } /** @@ -167,52 +138,33 @@ export class Worker { * @returns The labels for the worker */ getLabels() { - return this.nonDurable.labels; - } - - /** - * Register a webhook with the worker - * @param webhook - The webhook to register - * @returns A promise that resolves when the webhook is registered - */ - registerWebhook(webhook: WebhookWorkerCreateRequest) { - return this.nonDurable.registerWebhook(webhook); + return this._internal.labels; } async isPaused() { - const promises: Promise[] = []; - if (this.nonDurable?.workerId) { - promises.push(this._v1.workers.isPaused(this.nonDurable.workerId)); - } - if (this.durable?.workerId) { - promises.push(this._v1.workers.isPaused(this.durable.workerId)); + if (!this._internal?.workerId) { + return false; } - const res = await Promise.all(promises); - - return !res.includes(false); + return this._v1.workers.isPaused(this._internal.workerId); } // TODO docstrings pause() { - const promises: Promise[] = []; - if (this.nonDurable?.workerId) { - promises.push(this._v1.workers.pause(this.nonDurable.workerId)); - } - if (this.durable?.workerId) { - promises.push(this._v1.workers.pause(this.durable.workerId)); + if (!this._internal?.workerId) { + return Promise.resolve(); } - return Promise.all(promises); + + return this._v1.workers.pause(this._internal.workerId); } unpause() { - const promises: Promise[] = []; - if (this.nonDurable?.workerId) { - promises.push(this._v1.workers.unpause(this.nonDurable.workerId)); + if (!this._internal?.workerId) { + return Promise.resolve(); } - if (this.durable?.workerId) { - promises.push(this._v1.workers.unpause(this.durable.workerId)); - } - return Promise.all(promises); + + return this._v1.workers.unpause(this._internal.workerId); } } + +export { testingExports as __testing } from './slot-utils'; diff --git a/sdks/typescript/src/v1/declaration.ts b/sdks/typescript/src/v1/declaration.ts index b226e311fe..3dc4f1db71 100644 --- a/sdks/typescript/src/v1/declaration.ts +++ b/sdks/typescript/src/v1/declaration.ts @@ -7,8 +7,8 @@ import { ScheduledWorkflows, V1CreateFilterRequest, } from '@hatchet/clients/rest/generated/data-contracts'; -import { Workflow as WorkflowV0 } from '@hatchet/workflow'; import { z } from 'zod'; +import { throwIfAborted } from '@hatchet/util/abort-error'; import { IHatchetClient } from './client/client.interface'; import { CreateWorkflowTaskOpts, @@ -90,29 +90,48 @@ export type TaskOutputType< type DefaultFilter = Omit; +/** + * Sticky strategy for workflow scheduling. + * + * Prefer using `StickyStrategy.SOFT` / `StickyStrategy.HARD` (v1, non-protobuf). + * For backwards compatibility, the workflow/task `sticky` field also accepts legacy + * protobuf enum values (`0`/`1`) and strings (`'SOFT'`/`'HARD'`). + */ +export const StickyStrategy = { + SOFT: 'soft', + HARD: 'hard', +} as const; + +// eslint-disable-next-line no-redeclare +export type StickyStrategy = (typeof StickyStrategy)[keyof typeof StickyStrategy]; + +export type StickyStrategyInput = StickyStrategy | 'SOFT' | 'HARD' | 0 | 1; export type CreateBaseWorkflowOpts = { /** * The name of the workflow. */ - name: WorkflowV0['id']; + name: string; /** * (optional) description of the workflow. */ - description?: WorkflowV0['description']; + description?: string; /** * (optional) version of the workflow. */ - version?: WorkflowV0['version']; + version?: string; /** * (optional) sticky strategy for the workflow. */ - sticky?: WorkflowV0['sticky']; + sticky?: StickyStrategyInput; /** * (optional) on config for the workflow. - * @deprecated use onCrons and onEvents instead + * @alias for onCrons and onEvents instead */ - on?: WorkflowV0['on']; + on?: { + cron?: string | string[]; + event?: string | string[]; + }; /** * (optional) cron config for the workflow. @@ -312,7 +331,6 @@ export class BaseWorkflowDeclaration< // set the parent run context const parentRunContext = parentRunContextManager.getContext(); - parentRunContextManager.incrementChildIndex(Array.isArray(input) ? input.length : 1); if (!parentRunContext && (options?.childKey || options?.sticky)) { this.client.admin.logger.warn( @@ -320,10 +338,24 @@ export class BaseWorkflowDeclaration< ); } + const inheritedSignal = parentRunContext?.signal; + + // Precheck: if we're being called from a cancelled parent task, do not enqueue more work. + // The signal is inherited from the parent task's `ctx.abortController.signal`. + throwIfAborted(inheritedSignal, { + isTrigger: true, + context: parentRunContext?.parentTaskRunExternalId + ? `task run ${parentRunContext.parentTaskRunExternalId}` + : undefined, + warn: (message) => this.client!.admin.logger.warn(message), + }); + + parentRunContextManager.incrementChildIndex(Array.isArray(input) ? input.length : 1); + const runOpts = { - ...options, + ...(options ?? {}), parentId: parentRunContext?.parentId, - parentStepRunId: parentRunContext?.parentRunId, + parentTaskRunExternalId: parentRunContext?.parentTaskRunExternalId, childIndex: parentRunContext?.childIndex, sticky: options?.sticky ? parentRunContext?.desiredWorkerId : undefined, childKey: options?.childKey, @@ -357,6 +389,9 @@ export class BaseWorkflowDeclaration< // eslint-disable-next-line no-param-reassign ref._standaloneTaskName = _standaloneTaskName; } + // Ensure result subscriptions inherit cancellation if no signal is provided explicitly. + // eslint-disable-next-line no-param-reassign + ref.defaultSignal = inheritedSignal; res.push(ref); }); return res; @@ -368,6 +403,7 @@ export class BaseWorkflowDeclaration< res._standaloneTaskName = _standaloneTaskName; } + res.defaultSignal = inheritedSignal; return res; } @@ -432,10 +468,16 @@ export class BaseWorkflowDeclaration< throw UNBOUND_ERR; } + // If called from within a cancelled parent task, do not enqueue scheduled work. + throwIfAborted(parentRunContextManager.getContext()?.signal, { + isTrigger: true, + warn: (message) => this.client!.admin.logger.warn(message), + }); + const scheduled = this.client.scheduled.create(this.definition.name, { triggerAt: enqueueAt, input: input as JsonObject, - ...options, + ...(options ?? {}), }); return scheduled; @@ -474,10 +516,16 @@ export class BaseWorkflowDeclaration< throw UNBOUND_ERR; } + // If called from within a cancelled parent task, do not enqueue cron work. + throwIfAborted(parentRunContextManager.getContext()?.signal, { + isTrigger: true, + warn: (message) => this.client!.admin.logger.warn(message), + }); + const cronDef = this.client.crons.create(this.definition.name, { expression, input: input as JsonObject, - ...options, + ...(options ?? {}), additionalMetadata: options?.additionalMetadata, name, }); diff --git a/sdks/typescript/src/v1/examples/__e2e__/harness.ts b/sdks/typescript/src/v1/examples/__e2e__/harness.ts new file mode 100644 index 0000000000..ea1f794ec4 --- /dev/null +++ b/sdks/typescript/src/v1/examples/__e2e__/harness.ts @@ -0,0 +1,75 @@ +import sleep from '@hatchet/util/sleep'; +import { randomUUID } from 'crypto'; +import { HatchetClient } from '@hatchet/v1'; +import type { BaseWorkflowDeclaration } from '@hatchet/v1'; +import { Worker } from '../../client/worker/worker'; + +export function requireEnv(name: string): string { + const value = process.env[name]; + if (!value) { + throw new Error( + `Missing required environment variable ${name}. ` + + `E2E tests require a configured Hatchet instance and credentials.` + ); + } + return value; +} + +export function makeE2EClient(): HatchetClient { + // ConfigLoader requires a token; this makes the failure message obvious. + requireEnv('HATCHET_CLIENT_TOKEN'); + return HatchetClient.init(); +} + +export function makeTestScope(prefix = 'ts_e2e'): string { + return `${prefix}_${randomUUID()}`; +} + +export async function startWorker({ + client, + name, + workflows, + slots = 50, +}: { + client: HatchetClient; + name: string; + workflows: Array>; + slots?: number; +}): Promise { + const worker = await client.worker(name, { workflows, slots }); + void worker.start(); + return worker; +} + +export async function stopWorker(worker: Worker | undefined) { + if (!worker) return; + await worker.stop(); + // give the engine a beat to settle + await sleep(1500); +} + +export async function poll( + fn: () => Promise, + { + timeoutMs = 30_000, + intervalMs = 1000, + shouldStop, + label = 'poll', + }: { + timeoutMs?: number; + intervalMs?: number; + shouldStop: (value: T) => boolean; + label?: string; + } +): Promise { + const start = Date.now(); + // eslint-disable-next-line no-constant-condition + while (true) { + const value = await fn(); + if (shouldStop(value)) return value; + if (Date.now() - start > timeoutMs) { + throw new Error(`Timed out waiting for ${label} after ${timeoutMs}ms`); + } + await sleep(intervalMs); + } +} diff --git a/sdks/typescript/src/v1/examples/affinity/affinity-workers.ts b/sdks/typescript/src/v1/examples/affinity/affinity-workers.ts index 9513316bf2..1d85612597 100644 --- a/sdks/typescript/src/v1/examples/affinity/affinity-workers.ts +++ b/sdks/typescript/src/v1/examples/affinity/affinity-workers.ts @@ -1,4 +1,4 @@ -import { WorkerLabelComparator } from '@hatchet/protoc/workflows'; +import { WorkerLabelComparator } from '@hatchet/v1'; import { hatchet } from '../hatchet-client'; // > AffinityWorkflow @@ -11,11 +11,11 @@ const workflow = hatchet.workflow({ workflow.task({ name: 'step1', fn: async (_, ctx) => { - const results: Promise[] = []; + const results = []; // eslint-disable-next-line no-plusplus for (let i = 0; i < 50; i++) { - const result = await ctx.spawnWorkflow(childWorkflow.id, {}); - results.push(result.output); + const result = await childWorkflow.run({}); + results.push(result); } console.log('Spawned 50 child workflows'); console.log('Results:', await Promise.all(results)); diff --git a/sdks/typescript/src/v1/examples/bulk_fanout/bulk_fanout.e2e.ts b/sdks/typescript/src/v1/examples/bulk_fanout/bulk_fanout.e2e.ts new file mode 100644 index 0000000000..a6d9250f89 --- /dev/null +++ b/sdks/typescript/src/v1/examples/bulk_fanout/bulk_fanout.e2e.ts @@ -0,0 +1,25 @@ +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { bulkChild, bulkParentWorkflow } from './workflow'; + +describe('bulk-fanout-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'bulk-fanout-e2e-worker', + workflows: [bulkChild, bulkParentWorkflow], + slots: 50, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('spawns N children and returns all results', async () => { + const result = await bulkParentWorkflow.run({ n: 12 }); + expect((result as any).spawn.results).toHaveLength(12); + }, 90_000); +}); diff --git a/sdks/typescript/src/v1/examples/bulk_fanout/workflow.ts b/sdks/typescript/src/v1/examples/bulk_fanout/workflow.ts new file mode 100644 index 0000000000..2c4be17d23 --- /dev/null +++ b/sdks/typescript/src/v1/examples/bulk_fanout/workflow.ts @@ -0,0 +1,29 @@ +import { hatchet } from '../hatchet-client'; + +export type ParentInput = { n: number }; + +export const bulkChild = hatchet.task({ + name: 'bulk-child', + fn: async (input: { i: number }) => { + return { i: input.i }; + }, +}); + +export const bulkParentWorkflow = hatchet.workflow({ + name: 'bulk-parent', +}); + +bulkParentWorkflow.task({ + name: 'spawn', + fn: async (input, ctx) => { + const typed = input as ParentInput; + const children = Array.from({ length: typed.n }, (_, i) => ({ + workflow: bulkChild, + input: { i }, + })); + + const results = await ctx.bulkRunChildren(children); + + return { results }; + }, +}); diff --git a/sdks/typescript/src/v1/examples/bulk_operations/bulk_replay.e2e.ts b/sdks/typescript/src/v1/examples/bulk_operations/bulk_replay.e2e.ts new file mode 100644 index 0000000000..9c789e1f72 --- /dev/null +++ b/sdks/typescript/src/v1/examples/bulk_operations/bulk_replay.e2e.ts @@ -0,0 +1,98 @@ +import { makeE2EClient, poll, startWorker, stopWorker, makeTestScope } from '../__e2e__/harness'; +import { V1TaskStatus } from '../../../clients/rest/generated/data-contracts'; +import { bulkReplayTest1, bulkReplayTest2, bulkReplayTest3 } from './workflow'; + +describe('bulk-replay-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'bulk-replay-test-worker', + workflows: [bulkReplayTest1, bulkReplayTest2, bulkReplayTest3], + slots: 50, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('bulk replays matching runs and increments attempt', async () => { + const testRunId = makeTestScope('bulk_replay'); + const n = 20; + + const meta = { test_run_id: testRunId }; + const since = new Date(Date.now() - 5 * 60 * 1000); + + const inputs = (count: number) => Array.from({ length: count }, () => ({})); + + await bulkReplayTest1.runNoWait(inputs(n + 1), { additionalMetadata: meta }); + await bulkReplayTest2.runNoWait(inputs(n / 2 - 1), { additionalMetadata: meta }); + await bulkReplayTest3.runNoWait(inputs(n / 2 - 2), { additionalMetadata: meta }); + + const workflowNames = [bulkReplayTest1.name, bulkReplayTest2.name, bulkReplayTest3.name]; + const expectedTotal = n + 1 + (n / 2 - 1) + (n / 2 - 2); + + const initialRuns = await poll( + async () => + hatchet.runs.list({ + since, + limit: 1000, + workflowNames, + additionalMetadata: meta, + }), + { + timeoutMs: 120_000, + intervalMs: 2000, + label: 'initial bulk runs completion', + shouldStop: (runs) => + (runs.rows || []).length === expectedTotal && + (runs.rows || []).every((r: any) => r.status === V1TaskStatus.COMPLETED), + } + ); + + expect(initialRuns.rows).toHaveLength(expectedTotal); + + // Equivalent to Python's aio_bulk_replay: runs.replay with filter. + await hatchet.runs.replay({ + filters: { + since, + workflowNames, + additionalMetadata: meta, + }, + }); + + const replayedRuns = await poll( + async () => + hatchet.runs.list({ + since, + limit: 1000, + workflowNames, + additionalMetadata: meta, + }), + { + timeoutMs: 120_000, + intervalMs: 2000, + label: 'bulk replay attempts visible', + shouldStop: (runs) => + (runs.rows || []).length === expectedTotal && + (runs.rows || []).every( + (r: any) => + r.status === V1TaskStatus.COMPLETED && + (r.retryCount ?? 0) >= 1 && + (r.attempt ?? 0) >= 2 + ), + } + ); + + const rows = replayedRuns.rows || []; + expect(rows).toHaveLength(expectedTotal); + + const byName = (name: string) => rows.filter((r: any) => r.workflowName === name); + expect(byName(bulkReplayTest1.name)).toHaveLength(n + 1); + expect(byName(bulkReplayTest2.name)).toHaveLength(n / 2 - 1); + expect(byName(bulkReplayTest3.name)).toHaveLength(n / 2 - 2); + }, 240_000); +}); diff --git a/sdks/typescript/src/v1/examples/bulk_operations/workflow.ts b/sdks/typescript/src/v1/examples/bulk_operations/workflow.ts new file mode 100644 index 0000000000..a481233b5c --- /dev/null +++ b/sdks/typescript/src/v1/examples/bulk_operations/workflow.ts @@ -0,0 +1,35 @@ +/* eslint-disable no-console */ +import { hatchet } from '../hatchet-client'; + +export const bulkReplayTest1 = hatchet.task({ + name: 'bulk-replay-test-1', + retries: 1, + fn: async (_input, ctx) => { + console.log('retrying bulk replay test task', ctx.retryCount()); + if (ctx.retryCount() === 0) { + throw new Error('This is a test error to trigger a retry.'); + } + }, +}); + +export const bulkReplayTest2 = hatchet.task({ + name: 'bulk-replay-test-2', + retries: 1, + fn: async (_input, ctx) => { + console.log('retrying bulk replay test task', ctx.retryCount()); + if (ctx.retryCount() === 0) { + throw new Error('This is a test error to trigger a retry.'); + } + }, +}); + +export const bulkReplayTest3 = hatchet.task({ + name: 'bulk-replay-test-3', + retries: 1, + fn: async (_input, ctx) => { + console.log('retrying bulk replay test task', ctx.retryCount()); + if (ctx.retryCount() === 0) { + throw new Error('This is a test error to trigger a retry.'); + } + }, +}); diff --git a/sdks/typescript/src/v1/examples/cancellation/cancellation-workflow.ts b/sdks/typescript/src/v1/examples/cancellation/cancellation-workflow.ts new file mode 100644 index 0000000000..464efba3df --- /dev/null +++ b/sdks/typescript/src/v1/examples/cancellation/cancellation-workflow.ts @@ -0,0 +1,81 @@ +import axios from 'axios'; +import { hatchet } from '../hatchet-client'; + +function sleepWithAbort(signal: AbortSignal, ms: number) { + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + signal.removeEventListener('abort', onAbort); + resolve(); + }, ms); + + const onAbort = () => { + clearTimeout(timer); + reject(new Error('Cancelled')); + }; + + if (signal.aborted) { + clearTimeout(timer); + reject(new Error('Cancelled')); + return; + } + + signal.addEventListener('abort', onAbort, { once: true }); + }); +} + +// > Self-cancelling workflow (mirrors Python example) +export const cancellationWorkflow = hatchet.workflow({ + name: 'CancelWorkflow', +}); + +cancellationWorkflow.task({ + name: 'self-cancel', + fn: async (_, ctx) => { + await sleepWithAbort(ctx.abortController.signal, 2000); + + // Cancel the current task run (server-side) and optimistically abort local execution. + await ctx.cancel(); + + // If cancellation didn't stop execution yet, keep waiting but cooperatively. + await sleepWithAbort(ctx.abortController.signal, 10_000); + + return { error: 'Task should have been cancelled' }; + }, +}); + +cancellationWorkflow.task({ + name: 'check-flag', + fn: async (_, ctx) => { + for (let i = 0; i < 3; i += 1) { + await sleepWithAbort(ctx.abortController.signal, 1000); + if (ctx.cancelled) { + throw new Error('Cancelled'); + } + } + return { error: 'Task should have been cancelled' }; + }, +}); +// !! + +// > Abort Signal +export const abortSignal = hatchet.task({ + name: 'abort-signal', + fn: async (_, { abortController }) => { + try { + const response = await axios.get('https://api.example.com/data', { + signal: abortController.signal, + }); + // Handle the response + } catch (error) { + if (axios.isCancel(error)) { + // Request was canceled + console.log('Request canceled'); + } else { + // Handle other errors + } + } + }, +}); +// !! + +// see ./worker.ts and ./run.ts for how to run the workflow diff --git a/sdks/typescript/src/v1/examples/cancellation/cancellation.e2e.ts b/sdks/typescript/src/v1/examples/cancellation/cancellation.e2e.ts new file mode 100644 index 0000000000..35f37caa12 --- /dev/null +++ b/sdks/typescript/src/v1/examples/cancellation/cancellation.e2e.ts @@ -0,0 +1,43 @@ +import { makeE2EClient, poll, startWorker, stopWorker } from '../__e2e__/harness'; +import { cancellationWorkflow } from './cancellation-workflow'; +import { V1TaskStatus } from '../../../clients/rest/generated/data-contracts'; + +describe('cancellation-e2e', () => { + const hatchet = makeE2EClient(); + + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'cancellation-e2e-worker', + workflows: [cancellationWorkflow], + slots: 10, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + xit('should cancel eventually (execution timeout)', async () => { + const ref = await cancellationWorkflow.runNoWait({}); + + const run = await poll(async () => hatchet.runs.get(ref), { + timeoutMs: 60_000, + intervalMs: 1000, + label: 'cancellation run status', + shouldStop: (r) => ![V1TaskStatus.QUEUED, V1TaskStatus.RUNNING].includes(r.run.status as any), + }); + + expect(run.run.status).toBe(V1TaskStatus.CANCELLED); + + // best-effort: python asserts `not run.run.output` + const out: unknown = run.run.output; + const isEmptyObject = + out != null && + typeof out === 'object' && + Object.keys(out as Record).length === 0; + expect(out == null || isEmptyObject).toBeTruthy(); + }, 90_000); +}); diff --git a/sdks/typescript/src/v1/examples/cancellations/run.ts b/sdks/typescript/src/v1/examples/cancellation/run.ts similarity index 69% rename from sdks/typescript/src/v1/examples/cancellations/run.ts rename to sdks/typescript/src/v1/examples/cancellation/run.ts index 17c8e3b1b2..daca962cd2 100644 --- a/sdks/typescript/src/v1/examples/cancellations/run.ts +++ b/sdks/typescript/src/v1/examples/cancellation/run.ts @@ -1,12 +1,12 @@ /* eslint-disable no-console */ // > Running a Task with Results import sleep from '@hatchet/util/sleep'; -import { cancellation } from './workflow'; +import { cancellationWorkflow } from './cancellation-workflow'; import { hatchet } from '../hatchet-client'; // ... async function main() { - const run = await cancellation.runNoWait({}); - const run1 = await cancellation.runNoWait({}); + const run = await cancellationWorkflow.runNoWait({}); + const run1 = await cancellationWorkflow.runNoWait({}); await sleep(1000); @@ -26,8 +26,8 @@ async function main() { console.log(resReplay); - const run2 = await cancellation.runNoWait({}, { additionalMetadata: { test: 'abc' } }); - const run4 = await cancellation.runNoWait({}, { additionalMetadata: { test: 'test' } }); + const run2 = await cancellationWorkflow.runNoWait({}, { additionalMetadata: { test: 'abc' } }); + const run4 = await cancellationWorkflow.runNoWait({}, { additionalMetadata: { test: 'test' } }); await sleep(1000); diff --git a/sdks/typescript/src/v1/examples/cancellations/worker.ts b/sdks/typescript/src/v1/examples/cancellation/worker.ts similarity index 79% rename from sdks/typescript/src/v1/examples/cancellations/worker.ts rename to sdks/typescript/src/v1/examples/cancellation/worker.ts index 4a2a1a7eca..b5fa57361e 100644 --- a/sdks/typescript/src/v1/examples/cancellations/worker.ts +++ b/sdks/typescript/src/v1/examples/cancellation/worker.ts @@ -1,11 +1,11 @@ // > Declaring a Worker import { hatchet } from '../hatchet-client'; -import { cancellation } from './workflow'; +import { cancellationWorkflow } from './cancellation-workflow'; async function main() { const worker = await hatchet.worker('cancellation-worker', { // 👀 Declare the workflows that the worker can execute - workflows: [cancellation], + workflows: [cancellationWorkflow], // 👀 Declare the number of concurrent task runs the worker can accept slots: 100, }); diff --git a/sdks/typescript/src/v1/examples/cancellations/workflow.ts b/sdks/typescript/src/v1/examples/cancellations/workflow.ts deleted file mode 100644 index 8789f5820c..0000000000 --- a/sdks/typescript/src/v1/examples/cancellations/workflow.ts +++ /dev/null @@ -1,43 +0,0 @@ -import sleep from '@hatchet/util/sleep'; -import axios from 'axios'; -import { hatchet } from '../hatchet-client'; - -// > Declaring a Task -export const cancellation = hatchet.task({ - name: 'cancellation', - fn: async (_, ctx) => { - await sleep(10 * 1000); - - if (ctx.cancelled) { - throw new Error('Task was cancelled'); - } - - return { - Completed: true, - }; - }, -}); -// !! - -// > Abort Signal -export const abortSignal = hatchet.task({ - name: 'abort-signal', - fn: async (_, { abortController }) => { - try { - const response = await axios.get('https://api.example.com/data', { - signal: abortController.signal, - }); - // Handle the response - } catch (error) { - if (axios.isCancel(error)) { - // Request was canceled - console.log('Request canceled'); - } else { - // Handle other errors - } - } - }, -}); -// !! - -// see ./worker.ts and ./run.ts for how to run the workflow diff --git a/sdks/typescript/src/v1/examples/concurrency_limit_rr/concurrency_limit_rr.e2e.ts b/sdks/typescript/src/v1/examples/concurrency_limit_rr/concurrency_limit_rr.e2e.ts new file mode 100644 index 0000000000..fb4d20ec84 --- /dev/null +++ b/sdks/typescript/src/v1/examples/concurrency_limit_rr/concurrency_limit_rr.e2e.ts @@ -0,0 +1,32 @@ +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { simpleConcurrency } from './workflow'; + +describe('concurrency_limit_rr-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'concurrency-rr-e2e-worker', + workflows: [simpleConcurrency], + slots: 1, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it.skip('round-robin concurrency behavior (timing-sensitive)', async () => { + // Python version is skipped due to timing unreliability; keep parity here. + // If we want to test this reliably, we should assert on engine events/ordering + // rather than wall-clock duration. + await simpleConcurrency.run([ + { Message: 'a', GroupKey: 'A' }, + { Message: 'b', GroupKey: 'A' }, + { Message: 'c', GroupKey: 'B' }, + { Message: 'd', GroupKey: 'B' }, + ]); + }, 120_000); +}); diff --git a/sdks/typescript/src/v1/examples/concurrency-rr/load.ts b/sdks/typescript/src/v1/examples/concurrency_limit_rr/load.ts similarity index 100% rename from sdks/typescript/src/v1/examples/concurrency-rr/load.ts rename to sdks/typescript/src/v1/examples/concurrency_limit_rr/load.ts diff --git a/sdks/typescript/src/v1/examples/concurrency-rr/run.ts b/sdks/typescript/src/v1/examples/concurrency_limit_rr/run.ts similarity index 100% rename from sdks/typescript/src/v1/examples/concurrency-rr/run.ts rename to sdks/typescript/src/v1/examples/concurrency_limit_rr/run.ts diff --git a/examples/typescript/concurrency-rr/worker.ts b/sdks/typescript/src/v1/examples/concurrency_limit_rr/worker.ts similarity index 100% rename from examples/typescript/concurrency-rr/worker.ts rename to sdks/typescript/src/v1/examples/concurrency_limit_rr/worker.ts diff --git a/sdks/typescript/src/v1/examples/concurrency-rr/workflow.ts b/sdks/typescript/src/v1/examples/concurrency_limit_rr/workflow.ts similarity index 95% rename from sdks/typescript/src/v1/examples/concurrency-rr/workflow.ts rename to sdks/typescript/src/v1/examples/concurrency_limit_rr/workflow.ts index d003fbfac4..e99f5b57e8 100644 --- a/sdks/typescript/src/v1/examples/concurrency-rr/workflow.ts +++ b/sdks/typescript/src/v1/examples/concurrency_limit_rr/workflow.ts @@ -1,4 +1,4 @@ -import { ConcurrencyLimitStrategy } from '@hatchet/workflow'; +import { ConcurrencyLimitStrategy } from '@hatchet/v1'; import { hatchet } from '../hatchet-client'; type SimpleInput = { diff --git a/sdks/typescript/src/v1/examples/dag_match_condition/complex-workflow.ts b/sdks/typescript/src/v1/examples/conditions/complex-workflow.ts similarity index 100% rename from sdks/typescript/src/v1/examples/dag_match_condition/complex-workflow.ts rename to sdks/typescript/src/v1/examples/conditions/complex-workflow.ts diff --git a/sdks/typescript/src/v1/examples/conditions/conditions.e2e.ts b/sdks/typescript/src/v1/examples/conditions/conditions.e2e.ts new file mode 100644 index 0000000000..819dc95d78 --- /dev/null +++ b/sdks/typescript/src/v1/examples/conditions/conditions.e2e.ts @@ -0,0 +1,57 @@ +import sleep from '@hatchet/util/sleep'; +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { taskConditionWorkflow } from './complex-workflow'; + +describe('conditions-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'conditions-e2e-worker', + workflows: [taskConditionWorkflow], + slots: 10, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + xit('waits, receives events, and completes branches', async () => { + const ref = await taskConditionWorkflow.runNoWait({}); + + // give the workflow time to reach waits + await sleep(15_000); + + await hatchet.events.push('skip_on_event:skip', {}); + await hatchet.events.push('wait_for_event:start', {}); + + const result: any = await ref.output; + + // python asserts skip_on_event is skipped + expect(result.skipOnEvent?.skipped ?? result.skip_on_event?.skipped).toBeTruthy(); + + const startRandom = result.start.randomNumber ?? result.start.random_number; + const waitForEventRandom = + result.waitForEvent.randomNumber ?? result.wait_for_event.random_number; + const waitForSleepRandom = + result.waitForSleep.randomNumber ?? result.wait_for_sleep.random_number; + + const left = result.leftBranch ?? result.left_branch; + const right = result.rightBranch ?? result.right_branch; + + expect(Boolean(left?.skipped) || Boolean(right?.skipped)).toBeTruthy(); + + const branchRandom = + left?.randomNumber ?? right?.randomNumber ?? left?.random_number ?? right?.random_number; + const { sum } = result.sum; + + // TS version includes optional skipped branches as 0 in its sum implementation; + // verify at least the required components add up. + expect(sum).toBeGreaterThanOrEqual( + startRandom + waitForEventRandom + waitForSleepRandom + branchRandom + ); + }, 120_000); +}); diff --git a/sdks/typescript/src/v1/examples/dag_match_condition/event.ts b/sdks/typescript/src/v1/examples/conditions/event.ts similarity index 100% rename from sdks/typescript/src/v1/examples/dag_match_condition/event.ts rename to sdks/typescript/src/v1/examples/conditions/event.ts diff --git a/sdks/typescript/src/v1/examples/dag_match_condition/run.ts b/sdks/typescript/src/v1/examples/conditions/run.ts similarity index 100% rename from sdks/typescript/src/v1/examples/dag_match_condition/run.ts rename to sdks/typescript/src/v1/examples/conditions/run.ts diff --git a/examples/typescript/dag_match_condition/worker.ts b/sdks/typescript/src/v1/examples/conditions/worker.ts similarity index 100% rename from examples/typescript/dag_match_condition/worker.ts rename to sdks/typescript/src/v1/examples/conditions/worker.ts diff --git a/sdks/typescript/src/v1/examples/dag_match_condition/workflow.ts b/sdks/typescript/src/v1/examples/conditions/workflow.ts similarity index 100% rename from sdks/typescript/src/v1/examples/dag_match_condition/workflow.ts rename to sdks/typescript/src/v1/examples/conditions/workflow.ts diff --git a/sdks/typescript/src/v1/examples/dag/dag.e2e.ts b/sdks/typescript/src/v1/examples/dag/dag.e2e.ts new file mode 100644 index 0000000000..03b48bdbcf --- /dev/null +++ b/sdks/typescript/src/v1/examples/dag/dag.e2e.ts @@ -0,0 +1,30 @@ +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { dag } from './workflow'; + +describe('dag-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'dag-e2e-worker', + workflows: [dag], + slots: 10, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('runs the DAG and produces expected output', async () => { + const res = await dag.run({ + Message: 'hello', + }); + + // Ensure parent output access and transform happened + expect(res.reverse.Original).toBe('hello'); + expect(res.reverse.Transformed).toBe('olleh'); + }, 60_000); +}); diff --git a/sdks/typescript/src/v1/examples/durable/durable.e2e.ts b/sdks/typescript/src/v1/examples/durable/durable.e2e.ts new file mode 100644 index 0000000000..4bdf0fde48 --- /dev/null +++ b/sdks/typescript/src/v1/examples/durable/durable.e2e.ts @@ -0,0 +1,83 @@ +import sleep from '@hatchet/util/sleep'; +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { durableWorkflow, EVENT_KEY, SLEEP_TIME_SECONDS, waitForSleepTwice } from './workflow'; + +xdescribe('durable-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'e2e-test-worker', + workflows: [durableWorkflow, waitForSleepTwice], + slots: 10, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('durable workflow waits for sleep + event', async () => { + const ref = await durableWorkflow.runNoWait({}); + + // `runNoWait` returns before work starts; reliably getting an event to `durable_task` + // means we need to push events *until* the task is actually waiting. + let finished = false; + const resultPromise = ref.output.finally(() => { + finished = true; + }); + + const eventPusher = (async () => { + // Push a handful of events over time to handle single-consumer semantics. + // Delay pushing so `wait_for_or_group_1` resolves via its sleep condition. + await sleep((SLEEP_TIME_SECONDS + 1) * 1000); + for (let i = 0; i < 30 && !finished; i += 1) { + await hatchet.events.push(EVENT_KEY, { test: 'test', i }); + await sleep(1000); + } + })(); + + const result = await resultPromise; + await eventPusher.catch(() => undefined); + + const workers = await hatchet.workers.list(); + expect(workers.rows?.length).toBeGreaterThan(0); + + const activeWorkers = (workers.rows || []).filter((w: any) => w.status === 'ACTIVE'); + expect(activeWorkers.length).toBeGreaterThanOrEqual(1); + expect(activeWorkers.some((w: any) => `${w.name}`.includes('e2e-test-worker'))).toBeTruthy(); + + expect((result as any).durable_task.status).toBe('success'); + + const g1 = (result as any).wait_for_or_group_1; + const g2 = (result as any).wait_for_or_group_2; + + // runtime is rounded to seconds and can drift a bit under load + expect(Math.abs(g1.runtime - SLEEP_TIME_SECONDS)).toBeLessThanOrEqual(5); + expect(g1.key).toBe(g2.key); + expect(g1.key).toBe('CREATE'); + expect(`${g1.event_id}`).toContain('sleep'); + expect(`${g2.event_id}`).toContain('event'); + + const multi = (result as any).wait_for_multi_sleep; + expect(multi.runtime).toBeGreaterThan(3 * SLEEP_TIME_SECONDS); + }, 180_000); + + it('durable sleep cancel + replay', async () => { + const ref = await waitForSleepTwice.runNoWait({}); + + await sleep((SLEEP_TIME_SECONDS * 1000) / 2); + await ref.cancel(); + + // may resolve or reject depending on engine; we only need it to settle + await ref.output.catch(() => undefined); + + await ref.replay(); + + const replayed = await ref.output; + // We've already slept a bit by the time the task is cancelled + expect(replayed.runtime).toBeLessThanOrEqual(SLEEP_TIME_SECONDS); + }, 180_000); +}); diff --git a/sdks/typescript/src/v1/examples/durable/workflow.ts b/sdks/typescript/src/v1/examples/durable/workflow.ts new file mode 100644 index 0000000000..c2ae0f857f --- /dev/null +++ b/sdks/typescript/src/v1/examples/durable/workflow.ts @@ -0,0 +1,123 @@ +/* eslint-disable no-console */ +import { Or, SleepCondition, UserEventCondition } from '@hatchet/v1/conditions'; +import { hatchet } from '../hatchet-client'; + +export const EVENT_KEY = 'durable-example:event'; +export const SLEEP_TIME_SECONDS = 5; +export const SLEEP_TIME = `${SLEEP_TIME_SECONDS}s` as const; + +// > Create a durable workflow +export const durableWorkflow = hatchet.workflow({ + name: 'DurableWorkflow', +}); +// !! + +durableWorkflow.task({ + name: 'ephemeral_task', + fn: async () => { + console.log('Running non-durable task'); + }, +}); + +durableWorkflow.durableTask({ + name: 'durable_task', + executionTimeout: '10m', + fn: async (_input, ctx) => { + console.log('Waiting for sleep'); + await ctx.sleepFor(SLEEP_TIME); + console.log('Sleep finished'); + + console.log('Waiting for event'); + await ctx.waitFor({ eventKey: EVENT_KEY }); + console.log('Event received'); + + return { status: 'success' }; + }, +}); + +function extractKeyAndEventId(waitResult: unknown): { key: string; eventId: string } { + // DurableContext.waitFor currently returns the CREATE payload directly. + // The shape is typically `{ [readableDataKey]: { [eventId]: ... } }`. + const obj = waitResult as any; + if (obj && typeof obj === 'object') { + const key = Object.keys(obj)[0]; + const inner = obj[key]; + if (inner && typeof inner === 'object') { + const eventId = Object.keys(inner)[0]; + if (eventId) { + return { key: 'CREATE', eventId }; + } + } + if (key) { + return { key: 'CREATE', eventId: key }; + } + } + + return { key: 'CREATE', eventId: '' }; +} + +durableWorkflow.durableTask({ + name: 'wait_for_or_group_1', + executionTimeout: '10m', + fn: async (_input, ctx) => { + const start = Date.now(); + const waitResult = await ctx.waitFor( + Or(new SleepCondition(SLEEP_TIME, 'sleep'), new UserEventCondition(EVENT_KEY, '', 'event')) + ); + const { key, eventId } = extractKeyAndEventId(waitResult); + return { + runtime: Math.round((Date.now() - start) / 1000), + key, + eventId, + }; + }, +}); + +durableWorkflow.durableTask({ + name: 'wait_for_or_group_2', + executionTimeout: '10m', + fn: async (_input, ctx) => { + const start = Date.now(); + const waitResult = await ctx.waitFor( + Or( + new SleepCondition(`${6 * SLEEP_TIME_SECONDS}s`, 'sleep'), + new UserEventCondition(EVENT_KEY, '', 'event') + ) + ); + const { key, eventId } = extractKeyAndEventId(waitResult); + return { + runtime: Math.round((Date.now() - start) / 1000), + key, + eventId, + }; + }, +}); + +durableWorkflow.durableTask({ + name: 'wait_for_multi_sleep', + executionTimeout: '10m', + fn: async (_input, ctx) => { + const start = Date.now(); + // sleep 3 times + for (let i = 0; i < 3; i += 1) { + await ctx.sleepFor(SLEEP_TIME); + } + + return { runtime: Math.round((Date.now() - start) / 1000) }; + }, +}); + +export const waitForSleepTwice = hatchet.durableTask({ + name: 'wait-for-sleep-twice', + executionTimeout: '10m', + fn: async (_input, ctx) => { + try { + const start = Date.now(); + await ctx.sleepFor(SLEEP_TIME); + return { runtime: Math.round((Date.now() - start) / 1000) }; + } catch (e) { + // treat cancellation as a successful completion for parity with Python sample + return { runtime: -1 }; + } + }, +}); diff --git a/sdks/typescript/src/v1/examples/durable-event/event.ts b/sdks/typescript/src/v1/examples/durable_event/event.ts similarity index 100% rename from sdks/typescript/src/v1/examples/durable-event/event.ts rename to sdks/typescript/src/v1/examples/durable_event/event.ts diff --git a/sdks/typescript/src/v1/examples/durable-event/run.ts b/sdks/typescript/src/v1/examples/durable_event/run.ts similarity index 100% rename from sdks/typescript/src/v1/examples/durable-event/run.ts rename to sdks/typescript/src/v1/examples/durable_event/run.ts diff --git a/examples/typescript/durable-event/worker.ts b/sdks/typescript/src/v1/examples/durable_event/worker.ts similarity index 100% rename from examples/typescript/durable-event/worker.ts rename to sdks/typescript/src/v1/examples/durable_event/worker.ts diff --git a/sdks/typescript/src/v1/examples/durable-event/workflow.ts b/sdks/typescript/src/v1/examples/durable_event/workflow.ts similarity index 100% rename from sdks/typescript/src/v1/examples/durable-event/workflow.ts rename to sdks/typescript/src/v1/examples/durable_event/workflow.ts diff --git a/sdks/typescript/src/v1/examples/durable-sleep/event.ts b/sdks/typescript/src/v1/examples/durable_sleep/event.ts similarity index 100% rename from sdks/typescript/src/v1/examples/durable-sleep/event.ts rename to sdks/typescript/src/v1/examples/durable_sleep/event.ts diff --git a/sdks/typescript/src/v1/examples/durable-sleep/run.ts b/sdks/typescript/src/v1/examples/durable_sleep/run.ts similarity index 100% rename from sdks/typescript/src/v1/examples/durable-sleep/run.ts rename to sdks/typescript/src/v1/examples/durable_sleep/run.ts diff --git a/examples/typescript/durable-sleep/worker.ts b/sdks/typescript/src/v1/examples/durable_sleep/worker.ts similarity index 100% rename from examples/typescript/durable-sleep/worker.ts rename to sdks/typescript/src/v1/examples/durable_sleep/worker.ts diff --git a/sdks/typescript/src/v1/examples/durable-sleep/workflow.ts b/sdks/typescript/src/v1/examples/durable_sleep/workflow.ts similarity index 100% rename from sdks/typescript/src/v1/examples/durable-sleep/workflow.ts rename to sdks/typescript/src/v1/examples/durable_sleep/workflow.ts diff --git a/sdks/typescript/src/v1/examples/on_event/event.e2e.ts b/sdks/typescript/src/v1/examples/events/event.e2e.ts similarity index 99% rename from sdks/typescript/src/v1/examples/on_event/event.e2e.ts rename to sdks/typescript/src/v1/examples/events/event.e2e.ts index 612082037c..3d081e0a14 100644 --- a/sdks/typescript/src/v1/examples/on_event/event.e2e.ts +++ b/sdks/typescript/src/v1/examples/events/event.e2e.ts @@ -56,6 +56,7 @@ xdescribe('events-e2e', () => { const maxAttempts = 15; const eventToRuns: Record = {}; + // eslint-disable-next-line no-constant-condition while (true) { console.log('Waiting for event runs to complete...'); if (attempts > maxAttempts) { diff --git a/sdks/typescript/src/v1/examples/on_event/event.ts b/sdks/typescript/src/v1/examples/events/event.ts similarity index 100% rename from sdks/typescript/src/v1/examples/on_event/event.ts rename to sdks/typescript/src/v1/examples/events/event.ts diff --git a/sdks/typescript/src/v1/examples/on_event/filter.ts b/sdks/typescript/src/v1/examples/events/filter.ts similarity index 95% rename from sdks/typescript/src/v1/examples/on_event/filter.ts rename to sdks/typescript/src/v1/examples/events/filter.ts index 4cae2762e3..d7df0dc47b 100644 --- a/sdks/typescript/src/v1/examples/on_event/filter.ts +++ b/sdks/typescript/src/v1/examples/events/filter.ts @@ -3,7 +3,7 @@ import { lower, SIMPLE_EVENT } from './workflow'; // > Create a filter hatchet.filters.create({ - workflowId: lower.id, + workflowId: lower.name, expression: 'input.ShouldSkip == false', scope: 'foobarbaz', payload: { diff --git a/examples/typescript/on_event/worker.ts b/sdks/typescript/src/v1/examples/events/worker.ts similarity index 100% rename from examples/typescript/on_event/worker.ts rename to sdks/typescript/src/v1/examples/events/worker.ts diff --git a/sdks/typescript/src/v1/examples/on_event/workflow.ts b/sdks/typescript/src/v1/examples/events/workflow.ts similarity index 100% rename from sdks/typescript/src/v1/examples/on_event/workflow.ts rename to sdks/typescript/src/v1/examples/events/workflow.ts diff --git a/sdks/typescript/src/v1/examples/high-memory/workflow-with-child.ts b/sdks/typescript/src/v1/examples/high-memory/workflow-with-child.ts index db5637d88f..5038ce935a 100644 --- a/sdks/typescript/src/v1/examples/high-memory/workflow-with-child.ts +++ b/sdks/typescript/src/v1/examples/high-memory/workflow-with-child.ts @@ -23,7 +23,7 @@ export const child = hatchet.task({ export const parent = hatchet.task({ name: 'parent', - timeout: '10m', + executionTimeout: '10m', fn: async (input: ParentInput, ctx) => { // lets generate large payload 1 mb const largePayload = new Array(1024 * 1024).fill('a').join(''); diff --git a/sdks/typescript/src/v1/examples/legacy/run.ts b/sdks/typescript/src/v1/examples/legacy/run.ts deleted file mode 100644 index 2e41f97777..0000000000 --- a/sdks/typescript/src/v1/examples/legacy/run.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { hatchet } from '../hatchet-client'; -import { simple } from './workflow'; - -async function main() { - const res = await hatchet.run<{ Message: string }, { step2: string }>(simple, { - Message: 'hello', - }); - - // eslint-disable-next-line no-console - console.log(res.step2); -} - -if (require.main === module) { - main(); -} diff --git a/sdks/typescript/src/v1/examples/legacy/worker.ts b/sdks/typescript/src/v1/examples/legacy/worker.ts deleted file mode 100644 index 81d05536fa..0000000000 --- a/sdks/typescript/src/v1/examples/legacy/worker.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { hatchet } from '../hatchet-client'; -import { simple } from './workflow'; - -async function main() { - const worker = await hatchet.worker('legacy-worker', { - workflows: [simple], - }); - - await worker.start(); -} - -if (require.main === module) { - main(); -} diff --git a/sdks/typescript/src/v1/examples/legacy/workflow.ts b/sdks/typescript/src/v1/examples/legacy/workflow.ts deleted file mode 100644 index 83c17605bf..0000000000 --- a/sdks/typescript/src/v1/examples/legacy/workflow.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Workflow } from '@hatchet/workflow'; - -export const simple: Workflow = { - id: 'legacy-workflow', - description: 'test', - on: { - event: 'user:create', - }, - steps: [ - { - name: 'step1', - run: async (ctx) => { - const input = ctx.workflowInput(); - - return { step1: `original input: ${input.Message}` }; - }, - }, - { - name: 'step2', - parents: ['step1'], - run: (ctx) => { - const step1Output = ctx.stepOutput('step1'); - - return { step2: `step1 output: ${step1Output.step1}` }; - }, - }, - ], -}; diff --git a/sdks/typescript/src/v1/examples/logging/byo-logger.ts b/sdks/typescript/src/v1/examples/logger/byo-logger.ts similarity index 100% rename from sdks/typescript/src/v1/examples/logging/byo-logger.ts rename to sdks/typescript/src/v1/examples/logger/byo-logger.ts diff --git a/sdks/typescript/src/v1/examples/logger/logger.e2e.ts b/sdks/typescript/src/v1/examples/logger/logger.e2e.ts new file mode 100644 index 0000000000..c4a1e666e6 --- /dev/null +++ b/sdks/typescript/src/v1/examples/logger/logger.e2e.ts @@ -0,0 +1,28 @@ +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { loggingWorkflow } from './workflow'; + +describe('logger-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'logger-e2e-worker', + workflows: [loggingWorkflow], + slots: 10, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('runs logging workflow tasks', async () => { + const result = await loggingWorkflow.run({}); + + // python asserts only root_logger, but we validate both tasks here + expect((result as any).root_logger.status).toBe('success'); + expect((result as any).context_logger.status).toBe('success'); + }, 90_000); +}); diff --git a/sdks/typescript/src/v1/examples/logging/logger.ts b/sdks/typescript/src/v1/examples/logger/logger.ts similarity index 100% rename from sdks/typescript/src/v1/examples/logging/logger.ts rename to sdks/typescript/src/v1/examples/logger/logger.ts diff --git a/sdks/typescript/src/v1/examples/logger/workflow.ts b/sdks/typescript/src/v1/examples/logger/workflow.ts new file mode 100644 index 0000000000..58a4129734 --- /dev/null +++ b/sdks/typescript/src/v1/examples/logger/workflow.ts @@ -0,0 +1,36 @@ +/* eslint-disable no-console */ +import { hatchet } from '../hatchet-client'; + +// Mirrors `sdks/python/examples/logger/workflow.py` +export const loggingWorkflow = hatchet.workflow({ + name: 'LoggingWorkflow', +}); + +loggingWorkflow.task({ + name: 'root_logger', + fn: async () => { + for (let i = 0; i < 12; i += 1) { + console.info(`executed step1 - ${i}`); + console.info({ step1: 'step1' }); + // keep this fast for e2e + } + + return { status: 'success' }; + }, +}); + +loggingWorkflow.task({ + name: 'context_logger', + fn: async (_input, ctx) => { + for (let i = 0; i < 12; i += 1) { + // Python uses ctx.log; TS has both ctx.log (deprecated) and ctx.logger.* + // Use ctx.log to stay closer semantically. + await ctx.log(`executed step1 - ${i}`); + await ctx.log(JSON.stringify({ step1: 'step1' })); + } + + return { status: 'success' }; + }, +}); + +// !! diff --git a/sdks/typescript/src/v1/examples/multiple_wf_concurrency/workflow.ts b/sdks/typescript/src/v1/examples/multiple_wf_concurrency/workflow.ts index 530381cbe6..1ad6e6e7de 100644 --- a/sdks/typescript/src/v1/examples/multiple_wf_concurrency/workflow.ts +++ b/sdks/typescript/src/v1/examples/multiple_wf_concurrency/workflow.ts @@ -1,4 +1,4 @@ -import { ConcurrencyLimitStrategy } from '@hatchet/workflow'; +import { ConcurrencyLimitStrategy } from '@hatchet/v1'; import { hatchet } from '../hatchet-client'; type SimpleInput = { diff --git a/sdks/typescript/src/v1/examples/non_retryable/non_retryable.e2e.ts b/sdks/typescript/src/v1/examples/non_retryable/non_retryable.e2e.ts new file mode 100644 index 0000000000..30627202a0 --- /dev/null +++ b/sdks/typescript/src/v1/examples/non_retryable/non_retryable.e2e.ts @@ -0,0 +1,45 @@ +import { makeE2EClient, poll, startWorker, stopWorker } from '../__e2e__/harness'; +import { nonRetryableWorkflow } from './workflow'; +import { V1TaskEventType, V1TaskStatus } from '../../../clients/rest/generated/data-contracts'; + +describe('non-retryable-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'non-retryable-e2e-worker', + workflows: [nonRetryableWorkflow], + slots: 10, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('retries only the retryable failure', async () => { + const ref = await nonRetryableWorkflow.runNoWait({}); + + const details = await poll(async () => hatchet.runs.get(ref), { + timeoutMs: 60_000, + intervalMs: 1000, + label: 'nonRetryableWorkflow terminal', + shouldStop: (d) => ![V1TaskStatus.QUEUED, V1TaskStatus.RUNNING].includes(d.run.status as any), + }); + + expect(details.run.status).toBe(V1TaskStatus.FAILED); + + const retrying = details.taskEvents.filter( + (e: { eventType: V1TaskEventType }) => e.eventType === V1TaskEventType.RETRYING + ); + expect(retrying.length).toBe(1); + + const failed = details.taskEvents.filter( + (e: { eventType: V1TaskEventType }) => e.eventType === V1TaskEventType.FAILED + ); + // python expects 3 FAILED events (two initial failures + one retry failure) + expect(failed.length).toBeGreaterThanOrEqual(3); + }, 90_000); +}); diff --git a/sdks/typescript/src/v1/examples/on_failure/on_failure.e2e.ts b/sdks/typescript/src/v1/examples/on_failure/on_failure.e2e.ts new file mode 100644 index 0000000000..21970a8586 --- /dev/null +++ b/sdks/typescript/src/v1/examples/on_failure/on_failure.e2e.ts @@ -0,0 +1,49 @@ +import { makeE2EClient, poll, startWorker, stopWorker } from '../__e2e__/harness'; +import { ERROR_TEXT, failureWorkflow } from './workflow'; +import { V1TaskStatus } from '../../../clients/rest/generated/data-contracts'; + +describe('on-failure-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'on-failure-e2e-worker', + workflows: [failureWorkflow], + slots: 10, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + xit('runs on_failure task after failure', async () => { + const ref = await failureWorkflow.runNoWait({}); + + await expect(ref.output).rejects.toEqual( + expect.arrayContaining([expect.stringContaining(ERROR_TEXT)]) + ); + + const details = await poll(async () => hatchet.runs.get(ref), { + timeoutMs: 120_000, + intervalMs: 1000, + label: 'onFailure run details', + shouldStop: (d) => + ![V1TaskStatus.QUEUED, V1TaskStatus.RUNNING].includes(d.run.status as any) && + (d.tasks || []).some((t) => `${t.displayName}`.includes('on_failure')), + }); + + expect(details.tasks.length).toBeGreaterThanOrEqual(2); + expect(details.run.status).toBe(V1TaskStatus.FAILED); + + const completed = details.tasks.filter((t) => t.status === V1TaskStatus.COMPLETED); + const failed = details.tasks.filter((t) => t.status === V1TaskStatus.FAILED); + expect(completed.length).toBeGreaterThanOrEqual(1); + expect(failed.length).toBeGreaterThanOrEqual(1); + + expect(completed.some((t) => t.displayName.includes('on_failure'))).toBeTruthy(); + expect(failed.some((t) => t.displayName.includes('step1'))).toBeTruthy(); + }, 180_000); +}); diff --git a/sdks/typescript/src/v1/examples/on_failure/workflow.ts b/sdks/typescript/src/v1/examples/on_failure/workflow.ts index 9ac31d3186..47d933affc 100644 --- a/sdks/typescript/src/v1/examples/on_failure/workflow.ts +++ b/sdks/typescript/src/v1/examples/on_failure/workflow.ts @@ -1,24 +1,31 @@ /* eslint-disable no-console */ import { hatchet } from '../hatchet-client'; +export const ERROR_TEXT = 'step1 failed'; + // > On Failure Task +// This workflow will fail because `step1` throws. We define an `onFailure` handler to run cleanup. export const failureWorkflow = hatchet.workflow({ - name: 'always-fail', + name: 'OnFailureWorkflow', }); failureWorkflow.task({ - name: 'always-fail', + name: 'step1', + executionTimeout: '1s', fn: async () => { - throw new Error('intentional failure'); + throw new Error(ERROR_TEXT); }, }); +// 👀 After the workflow fails, this special step will run failureWorkflow.onFailure({ - name: 'on-failure', - fn: async (input, ctx) => { + name: 'on_failure', + fn: async (_input, ctx) => { console.log('onFailure for run:', ctx.workflowRunId()); + console.log('upstream errors:', ctx.errors()); + return { - 'on-failure': 'success', + status: 'success', }; }, }); diff --git a/sdks/typescript/src/v1/examples/priority/priority.e2e.ts b/sdks/typescript/src/v1/examples/priority/priority.e2e.ts new file mode 100644 index 0000000000..313d031c3e --- /dev/null +++ b/sdks/typescript/src/v1/examples/priority/priority.e2e.ts @@ -0,0 +1,26 @@ +import { Priority } from '@hatchet/v1'; +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { priority, priorityWf } from './workflow'; + +describe('priority-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'priority-e2e-worker', + workflows: [priority, priorityWf], + slots: 1, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('task sees its configured default priority (unless overridden)', async () => { + const res = await priority.run({}); + expect(res.priority).toBe(Priority.MEDIUM); + }, 60_000); +}); diff --git a/sdks/typescript/src/v1/examples/simple/e2e-workflows.ts b/sdks/typescript/src/v1/examples/simple/e2e-workflows.ts new file mode 100644 index 0000000000..d01dfccd40 --- /dev/null +++ b/sdks/typescript/src/v1/examples/simple/e2e-workflows.ts @@ -0,0 +1,18 @@ +import type { InputType } from '@hatchet/v1'; +import { hatchet } from '../hatchet-client'; + +// Mirrors `sdks/python/examples/simple/worker.py` outputs for e2e. +export const helloWorld = hatchet.task({ + name: 'hello-world', + fn: async (_input: InputType) => { + return { result: 'Hello, world!' }; + }, +}); + +export const helloWorldDurable = hatchet.durableTask({ + name: 'hello-world-durable', + executionTimeout: '10m', + fn: async (_input: InputType) => { + return { result: 'Hello, world!' }; + }, +}); diff --git a/sdks/typescript/src/v1/examples/simple/enqueue.ts b/sdks/typescript/src/v1/examples/simple/enqueue.ts index 11d47928e6..858e8666d7 100644 --- a/sdks/typescript/src/v1/examples/simple/enqueue.ts +++ b/sdks/typescript/src/v1/examples/simple/enqueue.ts @@ -21,12 +21,12 @@ async function main() { // > Subscribing to results // the return object of the enqueue method is a WorkflowRunRef which includes a listener for the result of the workflow - const result = await run.result(); + const result = await run.output; console.log(result); // if you need to subscribe to the result of the workflow at a later time, you can use the runRef method and the stored runId const ref = hatchet.runRef(runId); - const result2 = await ref.result(); + const result2 = await ref.output; console.log(result2); // !! } diff --git a/sdks/typescript/src/v1/examples/simple/simple.e2e.ts b/sdks/typescript/src/v1/examples/simple/simple.e2e.ts new file mode 100644 index 0000000000..bd5ae7d3aa --- /dev/null +++ b/sdks/typescript/src/v1/examples/simple/simple.e2e.ts @@ -0,0 +1,37 @@ +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { helloWorld, helloWorldDurable } from './e2e-workflows'; + +describe('simple-run-modes-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'simple-run-modes-e2e-worker', + workflows: [helloWorld, helloWorldDurable], + slots: 50, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('supports the run variants for tasks and durable tasks', async () => { + const expected = { result: 'Hello, world!' }; + + for (const task of [helloWorld, helloWorldDurable]) { + const x1 = await task.run({}); + const x2 = await (await task.runNoWait({})).output; + + const x3 = (await task.run([{}]))[0]; + const x4 = await (await task.runNoWait([{}]))[0].output; + + // output alias for output + const x5 = await (await task.runNoWait({})).output; + + expect([x1, x2, x3, x4, x5]).toEqual([expected, expected, expected, expected, expected]); + } + }, 90_000); +}); diff --git a/sdks/typescript/src/v1/examples/simple/workflow.ts b/sdks/typescript/src/v1/examples/simple/workflow.ts index 0c139b8fcf..3f7115a444 100644 --- a/sdks/typescript/src/v1/examples/simple/workflow.ts +++ b/sdks/typescript/src/v1/examples/simple/workflow.ts @@ -1,4 +1,5 @@ // > Declaring a Task +import { StickyStrategy } from '@hatchet/v1'; import { hatchet } from '../hatchet-client'; // (optional) Define the input type for the workflow @@ -8,6 +9,7 @@ export type SimpleInput = { export const simple = hatchet.task({ name: 'simple', + sticky: StickyStrategy.SOFT, retries: 3, fn: async (input: SimpleInput) => { return { diff --git a/sdks/typescript/src/v1/examples/sticky/workflow.ts b/sdks/typescript/src/v1/examples/sticky/workflow.ts index 53d970d847..d843e3d7c9 100644 --- a/sdks/typescript/src/v1/examples/sticky/workflow.ts +++ b/sdks/typescript/src/v1/examples/sticky/workflow.ts @@ -1,5 +1,5 @@ /* eslint-disable no-console */ -import { StickyStrategy } from '@hatchet/protoc/workflows'; +import { StickyStrategy } from '@hatchet/v1'; import { hatchet } from '../hatchet-client'; import { child } from '../child_workflows/workflow'; diff --git a/sdks/typescript/src/v1/examples/streaming/streaming.e2e.ts b/sdks/typescript/src/v1/examples/streaming/streaming.e2e.ts new file mode 100644 index 0000000000..b94c7513a0 --- /dev/null +++ b/sdks/typescript/src/v1/examples/streaming/streaming.e2e.ts @@ -0,0 +1,39 @@ +import { makeE2EClient, startWorker, stopWorker } from '../__e2e__/harness'; +import { streamingTask } from './workflow'; + +describe('streaming-e2e', () => { + const hatchet = makeE2EClient(); + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'streaming-e2e-worker', + workflows: [streamingTask], + slots: 2, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('stream output arrives in-order and complete', async () => { + const ref = await streamingTask.runNoWait({}); + const runId = await ref.getWorkflowRunId(); + + let combined = ''; + for await (const chunk of hatchet.runs.subscribeToStream(runId)) { + combined += chunk; + } + + // Basic correctness: we got *something* and it includes the known leading text. + // (Exact chunk-by-chunk equality is possible, but this keeps the test resilient to + // small changes in chunking while still validating ordering/completeness.) + expect(combined.length).toBeGreaterThan(0); + expect(combined.startsWith('\nHappy families are all alike')).toBeTruthy(); + + // Ensure the run itself completed (stream closes at completion) + await ref.output; + }, 120_000); +}); diff --git a/sdks/typescript/src/v1/examples/timeout/run.ts b/sdks/typescript/src/v1/examples/timeout/run.ts new file mode 100644 index 0000000000..ab8175e93a --- /dev/null +++ b/sdks/typescript/src/v1/examples/timeout/run.ts @@ -0,0 +1,19 @@ +/* eslint-disable no-console */ +import { refreshTimeoutTask, timeoutTask } from './workflow'; + +async function main() { + try { + await timeoutTask.run({ Message: 'hello' }); + } catch (e) { + console.log('timeoutTask failed as expected', e); + } + + const res = await refreshTimeoutTask.run({ Message: 'hello' }); + console.log(res); +} + +if (require.main === module) { + main() + .catch(console.error) + .finally(() => process.exit(0)); +} diff --git a/sdks/typescript/src/v1/examples/timeout/timeout.e2e.ts b/sdks/typescript/src/v1/examples/timeout/timeout.e2e.ts new file mode 100644 index 0000000000..b50b31d9ee --- /dev/null +++ b/sdks/typescript/src/v1/examples/timeout/timeout.e2e.ts @@ -0,0 +1,40 @@ +import { makeE2EClient, poll, startWorker, stopWorker } from '../__e2e__/harness'; +import { timeoutTask, refreshTimeoutTask } from './workflow'; +import { V1TaskStatus } from '../../../clients/rest/generated/data-contracts'; + +describe('timeout-e2e', () => { + const hatchet = makeE2EClient(); + + let worker: Awaited> | undefined; + + beforeAll(async () => { + worker = await startWorker({ + client: hatchet, + name: 'timeout-e2e-worker', + workflows: [timeoutTask, refreshTimeoutTask], + slots: 10, + }); + }); + + afterAll(async () => { + await stopWorker(worker); + }); + + it('execution timeout should fail the run', async () => { + const ref = await timeoutTask.runNoWait({ Message: 'hello' }); + + const run = await poll(async () => hatchet.runs.get(ref), { + timeoutMs: 60_000, + intervalMs: 1000, + label: 'timeoutTask terminal status', + shouldStop: (r) => ![V1TaskStatus.QUEUED, V1TaskStatus.RUNNING].includes(r.run.status as any), + }); + + expect([V1TaskStatus.FAILED, V1TaskStatus.CANCELLED]).toContain(run.run.status); + }, 90_000); + + it('refresh timeout should allow a longer run to succeed', async () => { + const res = await refreshTimeoutTask.run({ Message: 'hello' }); + expect(res.status).toBe('success'); + }, 90_000); +}); diff --git a/sdks/typescript/src/v1/examples/timeout/worker.ts b/sdks/typescript/src/v1/examples/timeout/worker.ts new file mode 100644 index 0000000000..5a22aa0afe --- /dev/null +++ b/sdks/typescript/src/v1/examples/timeout/worker.ts @@ -0,0 +1,15 @@ +import { hatchet } from '../hatchet-client'; +import { refreshTimeoutTask, timeoutTask } from './workflow'; + +async function main() { + const worker = await hatchet.worker('timeout-worker', { + workflows: [timeoutTask, refreshTimeoutTask], + slots: 50, + }); + + await worker.start(); +} + +if (require.main === module) { + main(); +} diff --git a/sdks/typescript/src/v1/examples/timeout/workflow.ts b/sdks/typescript/src/v1/examples/timeout/workflow.ts new file mode 100644 index 0000000000..1720472632 --- /dev/null +++ b/sdks/typescript/src/v1/examples/timeout/workflow.ts @@ -0,0 +1,47 @@ +import sleep from '@hatchet/util/sleep'; +import { hatchet } from '../hatchet-client'; + +export type SimpleInput = { + Message: string; +}; + +// > Execution Timeout +// Mirrors Python `examples/timeout/test_timeout.py::test_execution_timeout` +export const timeoutTask = hatchet.task({ + name: 'timeout', + executionTimeout: '3s', + fn: async (_: SimpleInput, { cancelled }) => { + await sleep(10 * 1000); + + if (cancelled) { + throw new Error('Task was cancelled'); + } + + return { + status: 'success', + }; + }, +}); +// !! + +// > Refresh Timeout +// Mirrors Python `examples/timeout/test_timeout.py::test_run_refresh_timeout` +export const refreshTimeoutTask = hatchet.task({ + name: 'refresh-timeout', + executionTimeout: '10s', + scheduleTimeout: '10s', + fn: async (input: SimpleInput, ctx) => { + ctx.refreshTimeout('15s'); + await sleep(15000); + + if (ctx.abortController.signal.aborted) { + throw new Error('cancelled'); + } + + return { + status: 'success', + message: input.Message.toLowerCase(), + }; + }, +}); +// !! diff --git a/sdks/typescript/src/v1/examples/timeouts/run.ts b/sdks/typescript/src/v1/examples/timeouts/run.ts deleted file mode 100644 index b9b70e3d8f..0000000000 --- a/sdks/typescript/src/v1/examples/timeouts/run.ts +++ /dev/null @@ -1,18 +0,0 @@ -/* eslint-disable no-console */ -// > Running a Task with Results -import { cancellation } from './workflow'; -// ... -async function main() { - // 👀 Run the workflow with results - const res = await cancellation.run({}); - - // 👀 Access the results of the workflow - console.log(res.Completed); - // !! -} - -if (require.main === module) { - main() - .catch(console.error) - .finally(() => process.exit(0)); -} diff --git a/sdks/typescript/src/v1/examples/timeouts/worker.ts b/sdks/typescript/src/v1/examples/timeouts/worker.ts deleted file mode 100644 index 4a2a1a7eca..0000000000 --- a/sdks/typescript/src/v1/examples/timeouts/worker.ts +++ /dev/null @@ -1,19 +0,0 @@ -// > Declaring a Worker -import { hatchet } from '../hatchet-client'; -import { cancellation } from './workflow'; - -async function main() { - const worker = await hatchet.worker('cancellation-worker', { - // 👀 Declare the workflows that the worker can execute - workflows: [cancellation], - // 👀 Declare the number of concurrent task runs the worker can accept - slots: 100, - }); - - await worker.start(); -} - -if (require.main === module) { - main(); -} -// !! diff --git a/sdks/typescript/src/v1/examples/timeouts/workflow.ts b/sdks/typescript/src/v1/examples/timeouts/workflow.ts deleted file mode 100644 index 08b4668de9..0000000000 --- a/sdks/typescript/src/v1/examples/timeouts/workflow.ts +++ /dev/null @@ -1,23 +0,0 @@ -// > Declaring a Task -import sleep from '@hatchet/util/sleep'; -import { hatchet } from '../hatchet-client'; - -// (optional) Define the input type for the workflow -export const cancellation = hatchet.task({ - name: 'cancellation', - executionTimeout: '3s', - fn: async (_, { cancelled }) => { - await sleep(10 * 1000); - - if (cancelled) { - throw new Error('Task was cancelled'); - } - - return { - Completed: true, - }; - }, -}); -// !! - -// see ./worker.ts and ./run.ts for how to run the workflow diff --git a/sdks/typescript/src/v1/examples/with_timeouts/workflow.ts b/sdks/typescript/src/v1/examples/with_timeouts/workflow.ts deleted file mode 100644 index 0b7a9488cd..0000000000 --- a/sdks/typescript/src/v1/examples/with_timeouts/workflow.ts +++ /dev/null @@ -1,59 +0,0 @@ -import sleep from '@hatchet/util/sleep'; -import { hatchet } from '../hatchet-client'; - -// (optional) Define the input type for the workflow -export type SimpleInput = { - Message: string; -}; - -// > Execution Timeout -export const withTimeouts = hatchet.task({ - name: 'with-timeouts', - // time the task can wait in the queue before it is cancelled - scheduleTimeout: '10s', - // time the task can run before it is cancelled - executionTimeout: '10s', - fn: async (input: SimpleInput, ctx) => { - // wait 15 seconds - await sleep(15000); - - // get the abort controller - const { abortController } = ctx; - - // if the abort controller is aborted, throw an error - if (abortController.signal.aborted) { - throw new Error('cancelled'); - } - - return { - TransformedMessage: input.Message.toLowerCase(), - }; - }, -}); -// !! - -// > Refresh Timeout -export const refreshTimeout = hatchet.task({ - name: 'refresh-timeout', - executionTimeout: '10s', - scheduleTimeout: '10s', - fn: async (input: SimpleInput, ctx) => { - // adds 15 seconds to the execution timeout - ctx.refreshTimeout('15s'); - await sleep(15000); - - // get the abort controller - const { abortController } = ctx; - - // now this condition will not be met - // if the abort controller is aborted, throw an error - if (abortController.signal.aborted) { - throw new Error('cancelled'); - } - - return { - TransformedMessage: input.Message.toLowerCase(), - }; - }, -}); -// !! diff --git a/sdks/typescript/src/v1/index.ts b/sdks/typescript/src/v1/index.ts index 4a4dee93ed..7167c5c06c 100644 --- a/sdks/typescript/src/v1/index.ts +++ b/sdks/typescript/src/v1/index.ts @@ -7,3 +7,5 @@ export * from './client/duration'; export * from './types'; export * from './task'; export * from './client/worker/context'; +export * from './slot-types'; +export * from '../legacy/legacy-transformer'; diff --git a/sdks/typescript/src/v1/parent-run-context-vars.ts b/sdks/typescript/src/v1/parent-run-context-vars.ts index 419cd9ec62..8518d23f8a 100644 --- a/sdks/typescript/src/v1/parent-run-context-vars.ts +++ b/sdks/typescript/src/v1/parent-run-context-vars.ts @@ -2,9 +2,18 @@ import { AsyncLocalStorage } from 'async_hooks'; export interface ParentRunContext { parentId: string; - parentRunId: string; + /** + * External ID of the parent task/step run. + */ + parentTaskRunExternalId: string; desiredWorkerId: string; childIndex?: number; + + /** + * (optional) AbortSignal inherited by nested `run()` calls. + * Used to cancel local "wait for result" subscriptions when the parent task is cancelled. + */ + signal?: AbortSignal; } export class ParentRunContextManager { @@ -14,6 +23,15 @@ export class ParentRunContextManager { this.storage = new AsyncLocalStorage(); } + runWithContext(opts: ParentRunContext, fn: () => T): T { + return this.storage.run( + { + ...opts, + }, + fn + ); + } + setContext(opts: ParentRunContext): void { this.storage.enterWith({ ...opts, @@ -24,6 +42,7 @@ export class ParentRunContextManager { const parentRunContext = this.getContext(); if (parentRunContext) { parentRunContext.parentId = opts.parentId; + parentRunContext.parentTaskRunExternalId = opts.parentTaskRunExternalId; parentRunContext.childIndex = (parentRunContext.childIndex ?? 0) + 1; this.setContext(parentRunContext); } diff --git a/sdks/typescript/src/v1/slot-types.ts b/sdks/typescript/src/v1/slot-types.ts new file mode 100644 index 0000000000..9a8f473231 --- /dev/null +++ b/sdks/typescript/src/v1/slot-types.ts @@ -0,0 +1,7 @@ +// eslint-disable-next-line no-shadow +export enum SlotType { + Default = 'default', + Durable = 'durable', +} + +export type SlotConfig = Partial>; diff --git a/sdks/typescript/src/v1/task.ts b/sdks/typescript/src/v1/task.ts index 4b4a10d168..8066bb924e 100644 --- a/sdks/typescript/src/v1/task.ts +++ b/sdks/typescript/src/v1/task.ts @@ -1,10 +1,15 @@ -import { ConcurrencyLimitStrategy } from '@hatchet/protoc/v1/workflows'; -import { CreateStep } from '@hatchet/step'; +import { + ConcurrencyLimitStrategy, + RateLimitDuration, + WorkerLabelComparator, +} from '@hatchet/protoc/v1/workflows'; import { Conditions } from './conditions'; import { Duration } from './client/duration'; import { InputType, OutputType, UnknownInputType } from './types'; import { Context, DurableContext } from './client/worker/context'; +export { ConcurrencyLimitStrategy, WorkerLabelComparator }; + /** * Options for configuring the concurrency for a task. */ @@ -34,11 +39,6 @@ export type Concurrency = { limitStrategy?: ConcurrencyLimitStrategy; }; -/** - * @deprecated use Concurrency instead - */ -export type TaskConcurrency = Concurrency; - export class NonRetryableError extends Error { constructor(message?: string) { super(message); @@ -84,9 +84,9 @@ export type CreateBaseTaskOpts< fn?: C; /** - * @deprecated use executionTimeout instead + * @deparecated use executionTimeout instead */ - timeout?: CreateStep['timeout']; + timeout?: Duration; /** * (optional) execution timeout duration for the task after it starts running @@ -109,19 +109,29 @@ export type CreateBaseTaskOpts< * * default: 0 */ - retries?: CreateStep['retries']; + retries?: number; /** * (optional) backoff strategy configuration for retries. * - factor: Base of the exponential backoff (base ^ retry count) * - maxSeconds: Maximum backoff duration in seconds */ - backoff?: CreateStep['backoff']; + backoff?: { + factor?: number | undefined; + maxSeconds?: number | undefined; + }; /** * (optional) rate limits for the task. */ - rateLimits?: CreateStep['rate_limits']; + rateLimits?: { + units: string | number; + key?: string; + staticKey?: string; + dynamicKey?: string; + limit?: string | number; + duration?: RateLimitDuration; + }[]; /** * (optional) worker labels for task routing and scheduling. @@ -131,12 +141,23 @@ export type CreateBaseTaskOpts< * - weight: Priority weight for worker selection * - comparator: Custom comparison logic for label matching */ - desiredWorkerLabels?: CreateStep['worker_labels']; + desiredWorkerLabels?: Record< + string, + { + value: string | number; + required?: boolean; + weight?: number; + comparator?: WorkerLabelComparator; + } + >; /** * (optional) the concurrency options for the task */ concurrency?: Concurrency | Concurrency[]; + + /** @internal */ + slotRequests?: Record; }; export type CreateWorkflowTaskOpts< diff --git a/sdks/typescript/src/version.ts b/sdks/typescript/src/version.ts index 8435f06e99..041fc8a8d8 100644 --- a/sdks/typescript/src/version.ts +++ b/sdks/typescript/src/version.ts @@ -1 +1 @@ -export const HATCHET_VERSION = '1.9.5'; +export const HATCHET_VERSION = '1.11.0'; diff --git a/sdks/typescript/src/workflow.ts b/sdks/typescript/src/workflow.ts index 35b80e8f76..453fb7165f 100644 --- a/sdks/typescript/src/workflow.ts +++ b/sdks/typescript/src/workflow.ts @@ -1,65 +1,16 @@ -import * as z from 'zod'; - -import { CreateStep, CreateStepSchema } from './step'; -import { - ConcurrencyLimitStrategy as PbConcurrencyLimitStrategy, - StickyStrategy as PbStickyStrategy, -} from './protoc/workflows'; - -const CronConfigSchema = z.object({ - cron: z.string(), - event: z.undefined(), -}); - -const EventConfigSchema = z.object({ - cron: z.undefined(), - event: z.string(), -}); - -const OnConfigSchema = z.union([CronConfigSchema, EventConfigSchema]).optional(); - -const StepsSchema = z.array(CreateStepSchema); - -export type Steps = z.infer; - -export const ConcurrencyLimitStrategy = PbConcurrencyLimitStrategy; - -export const WorkflowConcurrency = z.object({ - name: z.string(), - maxRuns: z.number().optional(), - limitStrategy: z.nativeEnum(ConcurrencyLimitStrategy).optional(), - expression: z.string().optional(), -}); - -export const HatchetTimeoutSchema = z.string(); - -export const StickyStrategy = PbStickyStrategy; - -export const CreateWorkflowSchema = z.object({ - id: z.string(), - description: z.string(), - version: z.string().optional(), - /** - * sticky will attempt to run all steps for workflow on the same worker - */ - sticky: z.nativeEnum(StickyStrategy).optional(), - scheduleTimeout: z.string().optional(), - /** - * @deprecated Workflow timeout is deprecated. Use step timeouts instead. - */ - timeout: HatchetTimeoutSchema.optional(), - on: OnConfigSchema, - steps: StepsSchema, - onFailure: CreateStepSchema?.optional(), -}); - -/** - * @deprecated Use client.workflow instead (TODO link to migration doc) - */ -export interface Workflow extends z.infer { - concurrency?: z.infer & { - key?: (ctx: any) => string; - }; - steps: CreateStep[]; - onFailure?: CreateStep; -} +import { ConcurrencyLimitStrategy, StickyStrategy } from '@hatchet/v1'; + +export * from './legacy/workflow'; + +export { ConcurrencyLimitStrategy, StickyStrategy }; + +console.warn( + '\x1b[31mDeprecation warning: The v0 sdk, including the workflow module has been deprecated and has been removed in release v1.12.0.\x1b[0m' +); +console.warn( + '\x1b[31mPlease migrate to v1 SDK instead: https://docs.hatchet.run/home/v1-sdk-improvements\x1b[0m' +); +console.warn( + 'ConcurrencyLimitStrategy, StickyStrategy have been moved to @hatchet-dev/typescript-sdk/v1' +); +console.warn('--------------------------------'); diff --git a/sql/schema/v0.sql b/sql/schema/v0.sql index 995efb5c6e..5f8fe24571 100644 --- a/sql/schema/v0.sql +++ b/sql/schema/v0.sql @@ -454,6 +454,7 @@ CREATE TABLE "Step" ( -- the maximum amount of time in seconds to wait between retries "retryMaxBackoff" INTEGER, "scheduleTimeout" TEXT NOT NULL DEFAULT '5m', + "isDurable" BOOLEAN NOT NULL DEFAULT false, CONSTRAINT "Step_pkey" PRIMARY KEY ("id") ); @@ -850,7 +851,6 @@ CREATE TABLE "Worker" ( "lastHeartbeatAt" TIMESTAMP(3), "name" TEXT NOT NULL, "dispatcherId" UUID, - "maxRuns" INTEGER NOT NULL DEFAULT 100, "isActive" BOOLEAN NOT NULL DEFAULT false, "lastListenerEstablished" TIMESTAMP(3), "isPaused" BOOLEAN NOT NULL DEFAULT false, diff --git a/sql/schema/v1-core.sql b/sql/schema/v1-core.sql index a8e0f1624e..81cecc4148 100644 --- a/sql/schema/v1-core.sql +++ b/sql/schema/v1-core.sql @@ -144,6 +144,8 @@ CREATE TYPE v1_task_initial_state AS ENUM ('QUEUED', 'CANCELLED', 'SKIPPED', 'FA -- enqueue if the strategy is removed. CREATE TYPE v1_concurrency_strategy AS ENUM ('NONE', 'GROUP_ROUND_ROBIN', 'CANCEL_IN_PROGRESS', 'CANCEL_NEWEST'); +CREATE TYPE v1_worker_slot_group AS ENUM ('SLOTS', 'DURABLE_SLOTS'); + CREATE TABLE v1_workflow_concurrency ( -- We need an id used for stable ordering to prevent deadlocks. We must process all concurrency -- strategies on a workflow in the same order. @@ -415,12 +417,15 @@ CREATE TABLE v1_task_runtime ( worker_id UUID, tenant_id UUID NOT NULL, timeout_at TIMESTAMP(3) NOT NULL, + slot_group v1_worker_slot_group NOT NULL DEFAULT 'SLOTS', CONSTRAINT v1_task_runtime_pkey PRIMARY KEY (task_id, task_inserted_at, retry_count) ); CREATE INDEX v1_task_runtime_tenantId_workerId_idx ON v1_task_runtime (tenant_id ASC, worker_id ASC) WHERE worker_id IS NOT NULL; +CREATE INDEX v1_task_runtime_tenantId_workerId_slotGroup_idx ON v1_task_runtime (tenant_id ASC, worker_id ASC, slot_group ASC) WHERE worker_id IS NOT NULL; + CREATE INDEX v1_task_runtime_tenantId_timeoutAt_idx ON v1_task_runtime (tenant_id ASC, timeout_at ASC); alter table v1_task_runtime set ( @@ -432,6 +437,48 @@ alter table v1_task_runtime set ( autovacuum_vacuum_cost_limit='1000' ); +-- v1_worker_slot_config stores per-worker config for arbitrary slot types. +CREATE TABLE v1_worker_slot_config ( + tenant_id UUID NOT NULL, + worker_id UUID NOT NULL, + slot_type TEXT NOT NULL, + max_units INTEGER NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (tenant_id, worker_id, slot_type) +); + +-- v1_step_slot_request stores per-step slot requests. +CREATE TABLE v1_step_slot_request ( + tenant_id UUID NOT NULL, + step_id UUID NOT NULL, + slot_type TEXT NOT NULL, + units INTEGER NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (tenant_id, step_id, slot_type) +); + +-- v1_task_runtime_slot stores runtime slot consumption per task. +CREATE TABLE v1_task_runtime_slot ( + tenant_id UUID NOT NULL, + task_id bigint NOT NULL, + task_inserted_at TIMESTAMPTZ NOT NULL, + retry_count INTEGER NOT NULL, + worker_id UUID NOT NULL, + slot_type TEXT NOT NULL, + units INTEGER NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (task_id, task_inserted_at, retry_count, slot_type) +); + +CREATE INDEX v1_task_runtime_slot_tenant_worker_type_idx + ON v1_task_runtime_slot (tenant_id ASC, worker_id ASC, slot_type ASC); + +CREATE INDEX v1_step_slot_request_step_idx + ON v1_step_slot_request (step_id ASC); + -- v1_rate_limited_queue_items represents a queue item that has been rate limited and removed from the v1_queue_item table. CREATE TABLE v1_rate_limited_queue_items ( requeue_after TIMESTAMPTZ NOT NULL,