const std = @import("std"); const zap = @import("zap"); const mem = std.mem; const json = std.json; const db = @import("../db/sqlite.zig"); const uuid_util = @import("../utilities/uuid.zig"); const time_util = @import("../utilities/time.zig"); const json_util = @import("../utilities/json.zig"); const schedules = @import("deployment_schedules.zig"); pub fn handle(r: zap.Request) !void { const target = r.path orelse "/"; const method = r.method orelse "GET"; // POST /deployments/filter if (mem.eql(u8, method, "POST") and mem.endsWith(u8, target, "/filter")) { try filter(r); return; } // POST /deployments/count if (mem.eql(u8, method, "POST") and mem.endsWith(u8, target, "/count")) { try count(r); return; } // POST /deployments/get_scheduled_flow_runs if (mem.eql(u8, method, "POST") and mem.endsWith(u8, target, "/get_scheduled_flow_runs")) { try getScheduledFlowRuns(r); return; } // GET /deployments/name/{flow_name}/{deployment_name} if (mem.eql(u8, method, "GET") and mem.indexOf(u8, target, "/name/") != null) { try getByName(r, target); return; } // POST /deployments/{id}/create_flow_run if (mem.eql(u8, method, "POST") and mem.endsWith(u8, target, "/create_flow_run")) { try createFlowRun(r, target); return; } // POST /deployments/{id}/pause_deployment if (mem.eql(u8, method, "POST") and mem.endsWith(u8, target, "/pause_deployment")) { try pause(r, target); return; } // POST /deployments/{id}/resume_deployment if (mem.eql(u8, method, "POST") and mem.endsWith(u8, target, "/resume_deployment")) { try resume_(r, target); return; } // Schedule endpoints if (mem.indexOf(u8, target, "/schedules") != null) { try schedules.handle(r, target); return; } // POST /deployments/ - create deployment if (mem.eql(u8, method, "POST")) { const is_root = mem.endsWith(u8, target, "/deployments/") or mem.endsWith(u8, target, "/deployments"); if (is_root) { try create(r); return; } } // GET /deployments/{id} if (mem.eql(u8, method, "GET")) { const id = extractDeploymentId(target) orelse { json_util.sendStatus(r, "{\"detail\":\"deployment id required\"}", .bad_request); return; }; try read(r, id); return; } // PATCH /deployments/{id} if (mem.eql(u8, method, "PATCH")) { const id = extractDeploymentId(target) orelse { json_util.sendStatus(r, "{\"detail\":\"deployment id required\"}", .bad_request); return; }; try update(r, id); return; } // DELETE /deployments/{id} if (mem.eql(u8, method, "DELETE")) { const id = extractDeploymentId(target) orelse { json_util.sendStatus(r, "{\"detail\":\"deployment id required\"}", .bad_request); return; }; try delete(r, id); return; } json_util.sendStatus(r, "{\"detail\":\"not implemented\"}", .not_implemented); } // Path helper pub fn extractDeploymentId(target: []const u8) ?[]const u8 { const prefix = if (mem.startsWith(u8, target, "/api/deployments/")) "/api/deployments/" else if (mem.startsWith(u8, target, "/deployments/")) "/deployments/" else return null; if (target.len <= prefix.len) return null; const after = target[prefix.len..]; const end = mem.indexOf(u8, after, "/") orelse after.len; if (end == 0) return null; return after[0..end]; } // CRUD handlers fn create(r: zap.Request) !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const alloc = arena.allocator(); const body = r.body orelse { json_util.sendStatus(r, "{\"detail\":\"body required\"}", .bad_request); return; }; const parsed = json.parseFromSlice(json.Value, alloc, body, .{}) catch { json_util.sendStatus(r, "{\"detail\":\"invalid json\"}", .bad_request); return; }; const obj = parsed.value.object; const name = getString(obj, "name") orelse { json_util.sendStatus(r, "{\"detail\":\"name required\"}", .bad_request); return; }; const flow_id = getString(obj, "flow_id") orelse { json_util.sendStatus(r, "{\"detail\":\"flow_id required\"}", .bad_request); return; }; // Verify flow exists _ = db.flows.getById(alloc, flow_id) catch null orelse { json_util.sendStatus(r, "{\"detail\":\"Flow not found\"}", .not_found); return; }; var ts_buf: [32]u8 = undefined; const now = time_util.timestamp(&ts_buf); // Check for existing deployment (upsert) if (db.deployments.getByFlowAndName(alloc, flow_id, name) catch null) |existing| { _ = db.deployments.updateById(existing.id, now, buildUpdateParams(obj)) catch { json_util.sendStatus(r, "{\"detail\":\"update failed\"}", .internal_server_error); return; }; if (obj.get("schedules")) |sched_val| { try schedules.replaceSchedules(alloc, existing.id, sched_val, now); } const deployment = db.deployments.getById(alloc, existing.id) catch null orelse { json_util.sendStatus(r, "{\"detail\":\"not found after update\"}", .internal_server_error); return; }; const resp = writeDeployment(alloc, deployment) catch { json_util.sendStatus(r, "{\"detail\":\"serialize error\"}", .internal_server_error); return; }; json_util.send(r, resp); return; } // Create new var id_buf: [36]u8 = undefined; const new_id = uuid_util.generate(&id_buf); // Resolve work_queue_id from work_pool_name if provided var insert_params = buildInsertParams(obj); if (insert_params.work_pool_name) |pool_name| { if (db.work_pools.getByName(alloc, pool_name) catch null) |pool| { if (insert_params.work_queue_name) |queue_name| { // Look up specific queue by name if (db.work_queues.getByPoolAndName(alloc, pool.id, queue_name) catch null) |queue| { insert_params.work_queue_id = queue.id; } } else if (pool.default_queue_id) |default_id| { // Use pool's default queue insert_params.work_queue_id = default_id; insert_params.work_queue_name = "default"; } } } db.deployments.insert(new_id, name, flow_id, now, insert_params) catch { json_util.sendStatus(r, "{\"detail\":\"insert failed\"}", .internal_server_error); return; }; if (obj.get("schedules")) |sched_val| { try schedules.replaceSchedules(alloc, new_id, sched_val, now); } const deployment = db.deployments.getById(alloc, new_id) catch null orelse { json_util.sendStatus(r, "{\"detail\":\"not found after insert\"}", .internal_server_error); return; }; const resp = writeDeployment(alloc, deployment) catch { json_util.sendStatus(r, "{\"detail\":\"serialize error\"}", .internal_server_error); return; }; json_util.sendStatus(r, resp, .created); } fn read(r: zap.Request, id: []const u8) !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const alloc = arena.allocator(); const deployment = db.deployments.getById(alloc, id) catch null orelse { json_util.sendStatus(r, "{\"detail\":\"Deployment not found\"}", .not_found); return; }; const resp = writeDeployment(alloc, deployment) catch { json_util.sendStatus(r, "{\"detail\":\"serialize error\"}", .internal_server_error); return; }; json_util.send(r, resp); } fn getByName(r: zap.Request, target: []const u8) !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const alloc = arena.allocator(); const name_idx = mem.indexOf(u8, target, "/name/") orelse { json_util.sendStatus(r, "{\"detail\":\"invalid path\"}", .bad_request); return; }; const after_name = target[name_idx + 6 ..]; const sep_idx = mem.indexOf(u8, after_name, "/") orelse { json_util.sendStatus(r, "{\"detail\":\"deployment name required\"}", .bad_request); return; }; const flow_name = after_name[0..sep_idx]; const deployment_name = after_name[sep_idx + 1 ..]; if (deployment_name.len == 0) { json_util.sendStatus(r, "{\"detail\":\"deployment name required\"}", .bad_request); return; } const flow = db.flows.getByName(alloc, flow_name) catch null orelse { json_util.sendStatus(r, "{\"detail\":\"Flow not found\"}", .not_found); return; }; const deployment = db.deployments.getByFlowAndName(alloc, flow.id, deployment_name) catch null orelse { json_util.sendStatus(r, "{\"detail\":\"Deployment not found\"}", .not_found); return; }; const resp = writeDeployment(alloc, deployment) catch { json_util.sendStatus(r, "{\"detail\":\"serialize error\"}", .internal_server_error); return; }; json_util.send(r, resp); } fn update(r: zap.Request, id: []const u8) !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const alloc = arena.allocator(); const body = r.body orelse { json_util.sendStatus(r, "{\"detail\":\"body required\"}", .bad_request); return; }; const parsed = json.parseFromSlice(json.Value, alloc, body, .{}) catch { json_util.sendStatus(r, "{\"detail\":\"invalid json\"}", .bad_request); return; }; var ts_buf: [32]u8 = undefined; const now = time_util.timestamp(&ts_buf); const updated = db.deployments.updateById(id, now, buildUpdateParams(parsed.value.object)) catch { json_util.sendStatus(r, "{\"detail\":\"update failed\"}", .internal_server_error); return; }; if (!updated) { json_util.sendStatus(r, "{\"detail\":\"Deployment not found\"}", .not_found); return; } r.setStatus(.no_content); r.sendBody("") catch {}; } fn delete(r: zap.Request, id: []const u8) !void { const deleted = db.deployments.deleteById(id) catch { json_util.sendStatus(r, "{\"detail\":\"delete failed\"}", .internal_server_error); return; }; if (!deleted) { json_util.sendStatus(r, "{\"detail\":\"Deployment not found\"}", .not_found); return; } r.setStatus(.no_content); r.sendBody("") catch {}; } fn filter(r: zap.Request) !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const alloc = arena.allocator(); var limit: usize = 200; var offset: usize = 0; if (r.body) |body| { if (json.parseFromSlice(json.Value, alloc, body, .{})) |parsed| { const obj = parsed.value.object; if (obj.get("limit")) |v| { if (v == .integer) limit = @intCast(v.integer); } if (obj.get("offset")) |v| { if (v == .integer) offset = @intCast(v.integer); } } else |_| {} } const deployments_list = db.deployments.list(alloc, limit, offset) catch { json_util.sendStatus(r, "{\"detail\":\"database error\"}", .internal_server_error); return; }; var output: std.io.Writer.Allocating = .init(alloc); var jw: json.Stringify = .{ .writer = &output.writer }; jw.beginArray() catch { json_util.sendStatus(r, "{\"detail\":\"serialize error\"}", .internal_server_error); return; }; for (deployments_list) |d| { writeDeploymentObject(&jw, d, alloc) catch continue; } jw.endArray() catch {}; json_util.send(r, output.toOwnedSlice() catch "[]"); } fn count(r: zap.Request) !void { const c = db.deployments.count() catch 0; var buf: [32]u8 = undefined; const resp = std.fmt.bufPrint(&buf, "{d}", .{c}) catch "0"; json_util.send(r, resp); } // Action handlers fn createFlowRun(r: zap.Request, target: []const u8) !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const alloc = arena.allocator(); const id = extractDeploymentId(target) orelse { json_util.sendStatus(r, "{\"detail\":\"deployment id required\"}", .bad_request); return; }; const deployment = db.deployments.getById(alloc, id) catch null orelse { json_util.sendStatus(r, "{\"detail\":\"Deployment not found\"}", .not_found); return; }; var state_type: []const u8 = "SCHEDULED"; var state_name: []const u8 = "Scheduled"; if (r.body) |body| { if (json.parseFromSlice(json.Value, alloc, body, .{})) |parsed| { const obj = parsed.value.object; if (obj.get("state")) |s| { if (s == .object) { if (s.object.get("type")) |t| { if (t == .string) state_type = t.string; } if (s.object.get("name")) |n| { if (n == .string) state_name = n.string; } } } } else |_| {} } var id_buf: [36]u8 = undefined; const run_id = uuid_util.generate(&id_buf); var ts_buf: [32]u8 = undefined; const now = time_util.timestamp(&ts_buf); var name_buf: [64]u8 = undefined; const run_name = std.fmt.bufPrint(&name_buf, "{s}-{s}", .{ deployment.name[0..@min(deployment.name.len, 20)], run_id[0..8], }) catch "run"; db.flow_runs.insert(run_id, deployment.flow_id, run_name, state_type, state_name, now, .{ .deployment_id = deployment.id, .deployment_version = deployment.version, .work_queue_name = deployment.work_queue_name, .work_queue_id = deployment.work_queue_id, }) catch { json_util.sendStatus(r, "{\"detail\":\"insert failed\"}", .internal_server_error); return; }; const run = db.flow_runs.get(alloc, run_id) catch null orelse { json_util.sendStatus(r, "{\"detail\":\"not found after insert\"}", .internal_server_error); return; }; var state_id_buf: [36]u8 = undefined; const state_id = uuid_util.generate(&state_id_buf); const resp = writeFlowRunResponse(alloc, run, state_id) catch { json_util.sendStatus(r, "{\"detail\":\"serialize error\"}", .internal_server_error); return; }; json_util.sendStatus(r, resp, .created); } fn pause(r: zap.Request, target: []const u8) !void { const id = extractDeploymentId(target) orelse { json_util.sendStatus(r, "{\"detail\":\"deployment id required\"}", .bad_request); return; }; var ts_buf: [32]u8 = undefined; const now = time_util.timestamp(&ts_buf); const updated = db.deployments.updateById(id, now, .{ .paused = true }) catch { json_util.sendStatus(r, "{\"detail\":\"update failed\"}", .internal_server_error); return; }; if (!updated) { json_util.sendStatus(r, "{\"detail\":\"Deployment not found\"}", .not_found); return; } r.setStatus(.no_content); r.sendBody("") catch {}; } fn resume_(r: zap.Request, target: []const u8) !void { const id = extractDeploymentId(target) orelse { json_util.sendStatus(r, "{\"detail\":\"deployment id required\"}", .bad_request); return; }; var ts_buf: [32]u8 = undefined; const now = time_util.timestamp(&ts_buf); const updated = db.deployments.updateById(id, now, .{ .paused = false }) catch { json_util.sendStatus(r, "{\"detail\":\"update failed\"}", .internal_server_error); return; }; if (!updated) { json_util.sendStatus(r, "{\"detail\":\"Deployment not found\"}", .not_found); return; } r.setStatus(.no_content); r.sendBody("") catch {}; } fn getScheduledFlowRuns(r: zap.Request) !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const alloc = arena.allocator(); const body = r.body orelse { json_util.send(r, "[]"); return; }; const parsed = json.parseFromSlice(json.Value, alloc, body, .{}) catch { json_util.sendStatus(r, "{\"detail\":\"invalid json\"}", .bad_request); return; }; const obj = parsed.value.object; const ids_val = obj.get("deployment_ids") orelse { json_util.send(r, "[]"); return; }; if (ids_val != .array) { json_util.send(r, "[]"); return; } var deployment_ids = std.ArrayListUnmanaged([]const u8){}; for (ids_val.array.items) |item| { if (item == .string) { try deployment_ids.append(alloc, item.string); } } if (deployment_ids.items.len == 0) { json_util.send(r, "[]"); return; } var scheduled_before: ?[]const u8 = null; if (obj.get("scheduled_before")) |v| { if (v == .string) scheduled_before = normalizeTimestamp(alloc, v.string); } var limit: usize = 100; if (obj.get("limit")) |v| { if (v == .integer) limit = @intCast(v.integer); } const runs = db.flow_runs.getScheduledByDeployments(alloc, deployment_ids.items, scheduled_before, limit) catch { json_util.sendStatus(r, "{\"detail\":\"database error\"}", .internal_server_error); return; }; var output: std.io.Writer.Allocating = .init(alloc); var jw: json.Stringify = .{ .writer = &output.writer }; jw.beginArray() catch { json_util.sendStatus(r, "{\"detail\":\"serialize error\"}", .internal_server_error); return; }; for (runs) |run| { var state_id_buf: [36]u8 = undefined; const state_id = uuid_util.generate(&state_id_buf); writeFlowRunObject(&jw, run, state_id) catch continue; } jw.endArray() catch {}; json_util.send(r, output.toOwnedSlice() catch "[]"); } // Timestamp normalization - convert various timestamp formats to ISO8601 // Client may send "2026-01-22 16:40:23.915842+00:00" but db stores "2026-01-22T16:40:23.915842Z" fn normalizeTimestamp(alloc: std.mem.Allocator, raw: []const u8) ?[]const u8 { // find space between date and time const space_idx = mem.indexOf(u8, raw, " ") orelse return raw; var normalized = alloc.alloc(u8, raw.len) catch return raw; @memcpy(normalized, raw); normalized[space_idx] = 'T'; // convert +00:00 to Z if (mem.endsWith(u8, normalized, "+00:00")) { normalized[normalized.len - 6] = 'Z'; return normalized[0 .. normalized.len - 5]; } return normalized; } // JSON helpers fn getString(obj: json.ObjectMap, key: []const u8) ?[]const u8 { const v = obj.get(key) orelse return null; return if (v == .string) v.string else null; } fn getBool(obj: json.ObjectMap, key: []const u8) ?bool { const v = obj.get(key) orelse return null; return if (v == .bool) v.bool else null; } fn getInt(obj: json.ObjectMap, key: []const u8) ?i64 { const v = obj.get(key) orelse return null; return if (v == .integer) v.integer else null; } fn getJsonString(alloc: std.mem.Allocator, obj: json.ObjectMap, key: []const u8) ?[]const u8 { const v = obj.get(key) orelse return null; if (v == .null) return null; return std.fmt.allocPrint(alloc, "{f}", .{json.fmt(v, .{})}) catch null; } fn buildInsertParams(obj: json.ObjectMap) db.deployments.InsertParams { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); const alloc = arena.allocator(); return .{ .version = getString(obj, "version"), .description = getString(obj, "description"), .paused = getBool(obj, "paused") orelse false, .parameters = getJsonString(alloc, obj, "parameters") orelse "{}", .parameter_openapi_schema = getJsonString(alloc, obj, "parameter_openapi_schema"), .enforce_parameter_schema = getBool(obj, "enforce_parameter_schema") orelse true, .tags = getJsonString(alloc, obj, "tags") orelse "[]", .labels = getJsonString(alloc, obj, "labels") orelse "{}", .path = getString(obj, "path"), .entrypoint = getString(obj, "entrypoint"), .job_variables = getJsonString(alloc, obj, "job_variables") orelse "{}", .pull_steps = getJsonString(alloc, obj, "pull_steps"), .work_pool_name = getString(obj, "work_pool_name"), .work_queue_name = getString(obj, "work_queue_name"), .concurrency_limit = getInt(obj, "concurrency_limit"), }; } fn buildUpdateParams(obj: json.ObjectMap) db.deployments.UpdateParams { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); const alloc = arena.allocator(); return .{ .version = getString(obj, "version"), .description = getString(obj, "description"), .paused = getBool(obj, "paused"), .parameters = getJsonString(alloc, obj, "parameters"), .parameter_openapi_schema = getJsonString(alloc, obj, "parameter_openapi_schema"), .enforce_parameter_schema = getBool(obj, "enforce_parameter_schema"), .tags = getJsonString(alloc, obj, "tags"), .labels = getJsonString(alloc, obj, "labels"), .path = getString(obj, "path"), .entrypoint = getString(obj, "entrypoint"), .job_variables = getJsonString(alloc, obj, "job_variables"), .pull_steps = getJsonString(alloc, obj, "pull_steps"), .work_pool_name = getString(obj, "work_pool_name"), .work_queue_name = getString(obj, "work_queue_name"), .concurrency_limit = getInt(obj, "concurrency_limit"), }; } // Response serializers fn writeDeployment(alloc: std.mem.Allocator, d: db.deployments.DeploymentRow) ![]const u8 { var output: std.io.Writer.Allocating = .init(alloc); var jw: json.Stringify = .{ .writer = &output.writer }; try writeDeploymentObject(&jw, d, alloc); return output.toOwnedSlice(); } fn writeDeploymentObject(jw: *json.Stringify, d: db.deployments.DeploymentRow, alloc: std.mem.Allocator) !void { try jw.beginObject(); try jw.objectField("id"); try jw.write(d.id); try jw.objectField("created"); try jw.write(d.created); try jw.objectField("updated"); try jw.write(d.updated); try jw.objectField("name"); try jw.write(d.name); try jw.objectField("flow_id"); try jw.write(d.flow_id); try jw.objectField("version"); try jw.write(d.version); try jw.objectField("description"); try jw.write(d.description); try jw.objectField("paused"); try jw.write(d.paused); try jw.objectField("status"); try jw.write(d.status.toString()); try jw.objectField("last_polled"); try jw.write(d.last_polled); try jw.objectField("parameters"); try jw.beginWriteRaw(); try jw.writer.writeAll(d.parameters); jw.endWriteRaw(); try jw.objectField("parameter_openapi_schema"); if (d.parameter_openapi_schema) |s| { try jw.beginWriteRaw(); try jw.writer.writeAll(s); jw.endWriteRaw(); } else { try jw.write(null); } try jw.objectField("enforce_parameter_schema"); try jw.write(d.enforce_parameter_schema); try jw.objectField("tags"); try jw.beginWriteRaw(); try jw.writer.writeAll(d.tags); jw.endWriteRaw(); try jw.objectField("labels"); try jw.beginWriteRaw(); try jw.writer.writeAll(d.labels); jw.endWriteRaw(); try jw.objectField("path"); try jw.write(d.path); try jw.objectField("entrypoint"); try jw.write(d.entrypoint); try jw.objectField("job_variables"); try jw.beginWriteRaw(); try jw.writer.writeAll(d.job_variables); jw.endWriteRaw(); try jw.objectField("pull_steps"); if (d.pull_steps) |ps| { try jw.beginWriteRaw(); try jw.writer.writeAll(ps); jw.endWriteRaw(); } else { try jw.write(null); } try jw.objectField("work_pool_name"); try jw.write(d.work_pool_name); try jw.objectField("work_queue_name"); try jw.write(d.work_queue_name); try jw.objectField("work_queue_id"); try jw.write(d.work_queue_id); try jw.objectField("storage_document_id"); try jw.write(d.storage_document_id); try jw.objectField("infrastructure_document_id"); try jw.write(d.infrastructure_document_id); try jw.objectField("concurrency_limit"); try jw.write(d.concurrency_limit); try jw.objectField("schedules"); const sched_list = db.deployment_schedules.listByDeployment(alloc, d.id) catch &[_]db.deployment_schedules.DeploymentScheduleRow{}; try jw.beginArray(); for (sched_list) |s| { try schedules.writeScheduleObject(jw, s); } try jw.endArray(); try jw.endObject(); } fn writeFlowRunObject(jw: *json.Stringify, run: db.flow_runs.FlowRunRow, state_id: []const u8) !void { try jw.beginObject(); try jw.objectField("id"); try jw.write(run.id); try jw.objectField("created"); try jw.write(run.created); try jw.objectField("updated"); try jw.write(run.updated); try jw.objectField("name"); try jw.write(run.name); try jw.objectField("flow_id"); try jw.write(run.flow_id); try jw.objectField("deployment_id"); try jw.write(run.deployment_id); try jw.objectField("deployment_version"); try jw.write(run.deployment_version); try jw.objectField("work_queue_name"); try jw.write(run.work_queue_name); try jw.objectField("work_queue_id"); try jw.write(run.work_queue_id); try jw.objectField("state_type"); try jw.write(run.state_type); try jw.objectField("state_name"); try jw.write(run.state_name); try jw.objectField("expected_start_time"); try jw.write(run.expected_start_time); try jw.objectField("next_scheduled_start_time"); try jw.write(run.next_scheduled_start_time); try jw.objectField("start_time"); try jw.write(run.start_time); try jw.objectField("end_time"); try jw.write(run.end_time); try jw.objectField("state"); try jw.beginObject(); try jw.objectField("type"); try jw.write(run.state_type); try jw.objectField("name"); try jw.write(run.state_name); try jw.objectField("timestamp"); try jw.write(run.state_timestamp); try jw.objectField("id"); try jw.write(state_id); try jw.endObject(); try jw.objectField("parameters"); try jw.beginWriteRaw(); try jw.writer.writeAll(run.parameters); jw.endWriteRaw(); try jw.objectField("tags"); try jw.beginWriteRaw(); try jw.writer.writeAll(run.tags); jw.endWriteRaw(); try jw.objectField("auto_scheduled"); try jw.write(run.auto_scheduled); try jw.endObject(); } fn writeFlowRunResponse(alloc: std.mem.Allocator, run: db.flow_runs.FlowRunRow, state_id: []const u8) ![]const u8 { var output: std.io.Writer.Allocating = .init(alloc); var jw: json.Stringify = .{ .writer = &output.writer }; try jw.beginObject(); try jw.objectField("id"); try jw.write(run.id); try jw.objectField("created"); try jw.write(run.created); try jw.objectField("updated"); try jw.write(run.updated); try jw.objectField("name"); try jw.write(run.name); try jw.objectField("flow_id"); try jw.write(run.flow_id); try jw.objectField("deployment_id"); try jw.write(run.deployment_id); try jw.objectField("deployment_version"); try jw.write(run.deployment_version); try jw.objectField("work_queue_name"); try jw.write(run.work_queue_name); try jw.objectField("work_queue_id"); try jw.write(run.work_queue_id); try jw.objectField("state_type"); try jw.write(run.state_type); try jw.objectField("state_name"); try jw.write(run.state_name); try jw.objectField("state"); try jw.beginObject(); try jw.objectField("type"); try jw.write(run.state_type); try jw.objectField("name"); try jw.write(run.state_name); try jw.objectField("timestamp"); try jw.write(run.state_timestamp); try jw.objectField("id"); try jw.write(state_id); try jw.endObject(); try jw.objectField("parameters"); try jw.beginWriteRaw(); try jw.writer.writeAll(run.parameters); jw.endWriteRaw(); try jw.objectField("tags"); try jw.beginWriteRaw(); try jw.writer.writeAll(run.tags); jw.endWriteRaw(); try jw.objectField("auto_scheduled"); try jw.write(run.auto_scheduled); try jw.endObject(); return output.toOwnedSlice(); }