this repo has no description

Separate crates for separate concerns

lewis 5be4dc2f 32fee7a7

Changed files
+2314 -812
.sqlx
crates
tranquil-auth
tranquil-cache
tranquil-comms
tranquil-crypto
tranquil-infra
tranquil-oauth
tranquil-pds
src
api
appview
auth
cache
comms
delegation
handle
image
moderation
oauth
plc
repo
storage
sync
validation
tests
tranquil-repo
tranquil-scopes
tranquil-storage
tranquil-types
src
comms
oauth
repo
+22
.sqlx/query-05fd99170e31e68fa5028c862417cdf535cd70e09fde0a8a28249df0070eb2fc.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT t.token FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "token", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + false 19 + ] 20 + }, 21 + "hash": "05fd99170e31e68fa5028c862417cdf535cd70e09fde0a8a28249df0070eb2fc" 22 + }
+15
.sqlx/query-0710b57fb9aa933525f617b15e6e2e5feaa9c59c38ec9175568abdacda167107.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "UPDATE users SET deactivated_at = $1 WHERE did = $2", 4 + "describe": { 5 + "columns": [], 6 + "parameters": { 7 + "Left": [ 8 + "Timestamptz", 9 + "Text" 10 + ] 11 + }, 12 + "nullable": [] 13 + }, 14 + "hash": "0710b57fb9aa933525f617b15e6e2e5feaa9c59c38ec9175568abdacda167107" 15 + }
+22
.sqlx/query-0ec60bb854a4991d0d7249a68f7445b65c8cc8c723baca221d85f5e4f2478b99.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT body FROM comms_queue WHERE user_id = (SELECT id FROM users WHERE did = $1) AND comms_type = 'email_update' ORDER BY created_at DESC LIMIT 1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "body", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + false 19 + ] 20 + }, 21 + "hash": "0ec60bb854a4991d0d7249a68f7445b65c8cc8c723baca221d85f5e4f2478b99" 22 + }
+3 -3
.sqlx/query-20dd204aa552572ec9dc5b9950efdfa8a2e37aae3f171a2be73bee3057f86e08.json .sqlx/query-d4c68f8502bc81c27383f15dca1990c41b5e5534a3db9c137e3ef8e66fdf0a87.json
··· 1 1 { 2 2 "db_name": "PostgreSQL", 3 - "query": "\n UPDATE comms_queue\n SET status = 'processing', updated_at = NOW()\n WHERE id IN (\n SELECT id FROM comms_queue\n WHERE status = 'pending'\n AND scheduled_for <= $1\n AND attempts < max_attempts\n ORDER BY scheduled_for ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n )\n RETURNING\n id, user_id,\n channel as \"channel: CommsChannel\",\n comms_type as \"comms_type: super::types::CommsType\",\n status as \"status: CommsStatus\",\n recipient, subject, body, metadata,\n attempts, max_attempts, last_error,\n created_at, updated_at, scheduled_for, processed_at\n ", 3 + "query": "\n UPDATE comms_queue\n SET status = 'processing', updated_at = NOW()\n WHERE id IN (\n SELECT id FROM comms_queue\n WHERE status = 'pending'\n AND scheduled_for <= $1\n AND attempts < max_attempts\n ORDER BY scheduled_for ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n )\n RETURNING\n id, user_id,\n channel as \"channel: CommsChannel\",\n comms_type as \"comms_type: CommsType\",\n status as \"status: CommsStatus\",\n recipient, subject, body, metadata,\n attempts, max_attempts, last_error,\n created_at, updated_at, scheduled_for, processed_at\n ", 4 4 "describe": { 5 5 "columns": [ 6 6 { ··· 32 32 }, 33 33 { 34 34 "ordinal": 3, 35 - "name": "comms_type: super::types::CommsType", 35 + "name": "comms_type: CommsType", 36 36 "type_info": { 37 37 "Custom": { 38 38 "name": "comms_type", ··· 153 153 true 154 154 ] 155 155 }, 156 - "hash": "20dd204aa552572ec9dc5b9950efdfa8a2e37aae3f171a2be73bee3057f86e08" 156 + "hash": "d4c68f8502bc81c27383f15dca1990c41b5e5534a3db9c137e3ef8e66fdf0a87" 157 157 }
+22
.sqlx/query-24a7686c535e4f0332f45daa20cfce2209635090252ac3692823450431d03dc6.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT COUNT(*) FROM comms_queue WHERE status = 'pending' AND user_id = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "count", 9 + "type_info": "Int8" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Uuid" 15 + ] 16 + }, 17 + "nullable": [ 18 + null 19 + ] 20 + }, 21 + "hash": "24a7686c535e4f0332f45daa20cfce2209635090252ac3692823450431d03dc6" 22 + }
+14
.sqlx/query-29ef76852bb89af1ab9e679ceaa4abcf8bc8268a348d3be0da9840d1708d20b5.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "UPDATE users SET password_reset_code_expires_at = NOW() - INTERVAL '1 hour' WHERE email = $1", 4 + "describe": { 5 + "columns": [], 6 + "parameters": { 7 + "Left": [ 8 + "Text" 9 + ] 10 + }, 11 + "nullable": [] 12 + }, 13 + "hash": "29ef76852bb89af1ab9e679ceaa4abcf8bc8268a348d3be0da9840d1708d20b5" 14 + }
+54
.sqlx/query-4445cc86cdf04894b340e67661b79a3c411917144a011f50849b737130b24dbe.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT subject, body, comms_type as \"comms_type: String\" FROM comms_queue WHERE user_id = $1 AND comms_type = 'admin_email' ORDER BY created_at DESC LIMIT 1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "subject", 9 + "type_info": "Text" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "body", 14 + "type_info": "Text" 15 + }, 16 + { 17 + "ordinal": 2, 18 + "name": "comms_type: String", 19 + "type_info": { 20 + "Custom": { 21 + "name": "comms_type", 22 + "kind": { 23 + "Enum": [ 24 + "welcome", 25 + "email_verification", 26 + "password_reset", 27 + "email_update", 28 + "account_deletion", 29 + "admin_email", 30 + "plc_operation", 31 + "two_factor_code", 32 + "channel_verification", 33 + "passkey_recovery", 34 + "legacy_login_alert", 35 + "migration_verification" 36 + ] 37 + } 38 + } 39 + } 40 + } 41 + ], 42 + "parameters": { 43 + "Left": [ 44 + "Uuid" 45 + ] 46 + }, 47 + "nullable": [ 48 + true, 49 + false, 50 + false 51 + ] 52 + }, 53 + "hash": "4445cc86cdf04894b340e67661b79a3c411917144a011f50849b737130b24dbe" 54 + }
+22
.sqlx/query-4560c237741ce9d4166aecd669770b3360a3ac71e649b293efb88d92c3254068.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT id FROM users WHERE email = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "id", 9 + "type_info": "Uuid" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + false 19 + ] 20 + }, 21 + "hash": "4560c237741ce9d4166aecd669770b3360a3ac71e649b293efb88d92c3254068" 22 + }
+28
.sqlx/query-4649e8daefaf4cfefc5cb2de8b3813f13f5892f653128469be727b686e6a0f0a.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT body, metadata FROM comms_queue WHERE user_id = $1 AND comms_type = 'channel_verification' ORDER BY created_at DESC LIMIT 1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "body", 9 + "type_info": "Text" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "metadata", 14 + "type_info": "Jsonb" 15 + } 16 + ], 17 + "parameters": { 18 + "Left": [ 19 + "Uuid" 20 + ] 21 + }, 22 + "nullable": [ 23 + false, 24 + true 25 + ] 26 + }, 27 + "hash": "4649e8daefaf4cfefc5cb2de8b3813f13f5892f653128469be727b686e6a0f0a" 28 + }
+28
.sqlx/query-47fe4a54857344d8f789f37092a294cd58f64b4fb431b54b5deda13d64525e88.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT token, expires_at FROM account_deletion_requests WHERE did = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "token", 9 + "type_info": "Text" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "expires_at", 14 + "type_info": "Timestamptz" 15 + } 16 + ], 17 + "parameters": { 18 + "Left": [ 19 + "Text" 20 + ] 21 + }, 22 + "nullable": [ 23 + false, 24 + false 25 + ] 26 + }, 27 + "hash": "47fe4a54857344d8f789f37092a294cd58f64b4fb431b54b5deda13d64525e88" 28 + }
+22
.sqlx/query-49cbc923cc4a0dcf7dea4ead5ab9580ff03b717586c4ca2d5343709e2dac86b6.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT email_verified FROM users WHERE did = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "email_verified", 9 + "type_info": "Bool" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + false 19 + ] 20 + }, 21 + "hash": "49cbc923cc4a0dcf7dea4ead5ab9580ff03b717586c4ca2d5343709e2dac86b6" 22 + }
+28
.sqlx/query-5a016f289caf75177731711e56e92881ba343c73a9a6e513e205c801c5943ec0.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "\n SELECT k.key_bytes, k.encryption_version\n FROM user_keys k\n JOIN users u ON k.user_id = u.id\n WHERE u.did = $1\n ", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "key_bytes", 9 + "type_info": "Bytea" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "encryption_version", 14 + "type_info": "Int4" 15 + } 16 + ], 17 + "parameters": { 18 + "Left": [ 19 + "Text" 20 + ] 21 + }, 22 + "nullable": [ 23 + false, 24 + true 25 + ] 26 + }, 27 + "hash": "5a016f289caf75177731711e56e92881ba343c73a9a6e513e205c801c5943ec0" 28 + }
+22
.sqlx/query-5a036d95feedcbe6fb6396b10a7b4bd6a2eedeefda46a23e6a904cdbc3a65d45.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT body FROM comms_queue WHERE user_id = $1 AND comms_type = 'email_update' ORDER BY created_at DESC LIMIT 1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "body", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Uuid" 15 + ] 16 + }, 17 + "nullable": [ 18 + false 19 + ] 20 + }, 21 + "hash": "5a036d95feedcbe6fb6396b10a7b4bd6a2eedeefda46a23e6a904cdbc3a65d45" 22 + }
+22
.sqlx/query-785a864944c5939331704c71b0cd3ed26ffdd64f3fd0f26ecc28b6a0557bbe8f.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT subject FROM comms_queue WHERE user_id = $1 AND comms_type = 'admin_email' AND body = 'Email without subject' LIMIT 1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "subject", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Uuid" 15 + ] 16 + }, 17 + "nullable": [ 18 + true 19 + ] 20 + }, 21 + "hash": "785a864944c5939331704c71b0cd3ed26ffdd64f3fd0f26ecc28b6a0557bbe8f" 22 + }
+22
.sqlx/query-7caa8f9083b15ec1209dda35c4c6f6fba9fe338e4a6a10636b5389d426df1631.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "\n SELECT t.token\n FROM plc_operation_tokens t\n JOIN users u ON t.user_id = u.id\n WHERE u.did = $1\n ", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "token", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + false 19 + ] 20 + }, 21 + "hash": "7caa8f9083b15ec1209dda35c4c6f6fba9fe338e4a6a10636b5389d426df1631" 22 + }
+28
.sqlx/query-82717b6f61cd79347e1ca7e92c4413743ba168d1e0d8b85566711e54d4048f81.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT t.token, t.expires_at FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "token", 9 + "type_info": "Text" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "expires_at", 14 + "type_info": "Timestamptz" 15 + } 16 + ], 17 + "parameters": { 18 + "Left": [ 19 + "Text" 20 + ] 21 + }, 22 + "nullable": [ 23 + false, 24 + false 25 + ] 26 + }, 27 + "hash": "82717b6f61cd79347e1ca7e92c4413743ba168d1e0d8b85566711e54d4048f81" 28 + }
+22
.sqlx/query-9ad422bf3c43e3cfd86fc88c73594246ead214ca794760d3fe77bb5cf4f27be5.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT body FROM comms_queue WHERE user_id = (SELECT id FROM users WHERE did = $1) AND comms_type = 'email_verification' ORDER BY created_at DESC LIMIT 1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "body", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + false 19 + ] 20 + }, 21 + "hash": "9ad422bf3c43e3cfd86fc88c73594246ead214ca794760d3fe77bb5cf4f27be5" 22 + }
+28
.sqlx/query-9b035b051769e6b9d45910a8bb42ac0f84c73de8c244ba4560f004ee3f4b7002.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT did, public_key_did_key FROM reserved_signing_keys WHERE public_key_did_key = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "did", 9 + "type_info": "Text" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "public_key_did_key", 14 + "type_info": "Text" 15 + } 16 + ], 17 + "parameters": { 18 + "Left": [ 19 + "Text" 20 + ] 21 + }, 22 + "nullable": [ 23 + true, 24 + false 25 + ] 26 + }, 27 + "hash": "9b035b051769e6b9d45910a8bb42ac0f84c73de8c244ba4560f004ee3f4b7002" 28 + }
+108
.sqlx/query-9e772a967607553a0ab800970eaeadcaab7e06bdb79e0c89eb919b1bc1d6fabe.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "\n SELECT\n id, user_id, recipient, subject, body,\n channel as \"channel: CommsChannel\",\n comms_type as \"comms_type: CommsType\",\n status as \"status: CommsStatus\"\n FROM comms_queue\n WHERE id = $1\n ", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "id", 9 + "type_info": "Uuid" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "user_id", 14 + "type_info": "Uuid" 15 + }, 16 + { 17 + "ordinal": 2, 18 + "name": "recipient", 19 + "type_info": "Text" 20 + }, 21 + { 22 + "ordinal": 3, 23 + "name": "subject", 24 + "type_info": "Text" 25 + }, 26 + { 27 + "ordinal": 4, 28 + "name": "body", 29 + "type_info": "Text" 30 + }, 31 + { 32 + "ordinal": 5, 33 + "name": "channel: CommsChannel", 34 + "type_info": { 35 + "Custom": { 36 + "name": "comms_channel", 37 + "kind": { 38 + "Enum": [ 39 + "email", 40 + "discord", 41 + "telegram", 42 + "signal" 43 + ] 44 + } 45 + } 46 + } 47 + }, 48 + { 49 + "ordinal": 6, 50 + "name": "comms_type: CommsType", 51 + "type_info": { 52 + "Custom": { 53 + "name": "comms_type", 54 + "kind": { 55 + "Enum": [ 56 + "welcome", 57 + "email_verification", 58 + "password_reset", 59 + "email_update", 60 + "account_deletion", 61 + "admin_email", 62 + "plc_operation", 63 + "two_factor_code", 64 + "channel_verification", 65 + "passkey_recovery", 66 + "legacy_login_alert", 67 + "migration_verification" 68 + ] 69 + } 70 + } 71 + } 72 + }, 73 + { 74 + "ordinal": 7, 75 + "name": "status: CommsStatus", 76 + "type_info": { 77 + "Custom": { 78 + "name": "comms_status", 79 + "kind": { 80 + "Enum": [ 81 + "pending", 82 + "processing", 83 + "sent", 84 + "failed" 85 + ] 86 + } 87 + } 88 + } 89 + } 90 + ], 91 + "parameters": { 92 + "Left": [ 93 + "Uuid" 94 + ] 95 + }, 96 + "nullable": [ 97 + false, 98 + false, 99 + false, 100 + true, 101 + false, 102 + false, 103 + false, 104 + false 105 + ] 106 + }, 107 + "hash": "9e772a967607553a0ab800970eaeadcaab7e06bdb79e0c89eb919b1bc1d6fabe" 108 + }
+34
.sqlx/query-a23a390659616779d7dbceaa3b5d5171e70fa25e3b8393e142cebcbff752f0f5.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT private_key_bytes, expires_at, used_at FROM reserved_signing_keys WHERE public_key_did_key = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "private_key_bytes", 9 + "type_info": "Bytea" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "expires_at", 14 + "type_info": "Timestamptz" 15 + }, 16 + { 17 + "ordinal": 2, 18 + "name": "used_at", 19 + "type_info": "Timestamptz" 20 + } 21 + ], 22 + "parameters": { 23 + "Left": [ 24 + "Text" 25 + ] 26 + }, 27 + "nullable": [ 28 + false, 29 + false, 30 + true 31 + ] 32 + }, 33 + "hash": "a23a390659616779d7dbceaa3b5d5171e70fa25e3b8393e142cebcbff752f0f5" 34 + }
+22
.sqlx/query-a802d7d860f263eace39ce82bb27b633cec7287c1cc177f0e1d47ec6571564d5.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT token FROM account_deletion_requests WHERE did = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "token", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + false 19 + ] 20 + }, 21 + "hash": "a802d7d860f263eace39ce82bb27b633cec7287c1cc177f0e1d47ec6571564d5" 22 + }
+60
.sqlx/query-b0fca342e85dea89a06b4fee144cae4825dec587b1387f0fee401458aea2a2e5.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "\n SELECT\n recipient, subject, body,\n comms_type as \"comms_type: CommsType\"\n FROM comms_queue\n WHERE id = $1\n ", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "recipient", 9 + "type_info": "Text" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "subject", 14 + "type_info": "Text" 15 + }, 16 + { 17 + "ordinal": 2, 18 + "name": "body", 19 + "type_info": "Text" 20 + }, 21 + { 22 + "ordinal": 3, 23 + "name": "comms_type: CommsType", 24 + "type_info": { 25 + "Custom": { 26 + "name": "comms_type", 27 + "kind": { 28 + "Enum": [ 29 + "welcome", 30 + "email_verification", 31 + "password_reset", 32 + "email_update", 33 + "account_deletion", 34 + "admin_email", 35 + "plc_operation", 36 + "two_factor_code", 37 + "channel_verification", 38 + "passkey_recovery", 39 + "legacy_login_alert", 40 + "migration_verification" 41 + ] 42 + } 43 + } 44 + } 45 + } 46 + ], 47 + "parameters": { 48 + "Left": [ 49 + "Uuid" 50 + ] 51 + }, 52 + "nullable": [ 53 + false, 54 + true, 55 + false, 56 + false 57 + ] 58 + }, 59 + "hash": "b0fca342e85dea89a06b4fee144cae4825dec587b1387f0fee401458aea2a2e5" 60 + }
+22
.sqlx/query-cd3b8098ad4c1056c1d23acd8a6b29f7abfe18ee6f559bd94ab16274b1cfdfee.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT password_reset_code FROM users WHERE email = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "password_reset_code", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + true 19 + ] 20 + }, 21 + "hash": "cd3b8098ad4c1056c1d23acd8a6b29f7abfe18ee6f559bd94ab16274b1cfdfee" 22 + }
+22
.sqlx/query-cda68f9b6c60295a196fc853b70ec5fd51a8ffaa2bac5942c115c99d1cbcafa3.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT COUNT(*) as \"count!\" FROM plc_operation_tokens t JOIN users u ON t.user_id = u.id WHERE u.did = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "count!", 9 + "type_info": "Int8" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + null 19 + ] 20 + }, 21 + "hash": "cda68f9b6c60295a196fc853b70ec5fd51a8ffaa2bac5942c115c99d1cbcafa3" 22 + }
+14
.sqlx/query-d529d6dc9858c1da360f0417e94a3b40041b043bae57e95002d4bf5df46a4ab4.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "UPDATE account_deletion_requests SET expires_at = NOW() - INTERVAL '1 hour' WHERE token = $1", 4 + "describe": { 5 + "columns": [], 6 + "parameters": { 7 + "Left": [ 8 + "Text" 9 + ] 10 + }, 11 + "nullable": [] 12 + }, 13 + "hash": "d529d6dc9858c1da360f0417e94a3b40041b043bae57e95002d4bf5df46a4ab4" 14 + }
+22
.sqlx/query-e20cbe2a939d790aaea718b084a80d8ede655ba1cc0fd4346d7e91d6de7d6cf3.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT COUNT(*) FROM comms_queue WHERE user_id = $1 AND comms_type = 'password_reset'", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "count", 9 + "type_info": "Int8" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Uuid" 15 + ] 16 + }, 17 + "nullable": [ 18 + null 19 + ] 20 + }, 21 + "hash": "e20cbe2a939d790aaea718b084a80d8ede655ba1cc0fd4346d7e91d6de7d6cf3" 22 + }
+22
.sqlx/query-e64cd36284d10ab7f3d9f6959975a1a627809f444b0faff7e611d985f31b90e9.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT used_at FROM reserved_signing_keys WHERE public_key_did_key = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "used_at", 9 + "type_info": "Timestamptz" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + true 19 + ] 20 + }, 21 + "hash": "e64cd36284d10ab7f3d9f6959975a1a627809f444b0faff7e611d985f31b90e9" 22 + }
+22
.sqlx/query-f26c13023b47b908ec96da2e6b8bf8b34ca6a2246c20fc96f76f0e95530762a7.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT email FROM users WHERE did = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "email", 9 + "type_info": "Text" 10 + } 11 + ], 12 + "parameters": { 13 + "Left": [ 14 + "Text" 15 + ] 16 + }, 17 + "nullable": [ 18 + true 19 + ] 20 + }, 21 + "hash": "f26c13023b47b908ec96da2e6b8bf8b34ca6a2246c20fc96f76f0e95530762a7" 22 + }
+14
.sqlx/query-f29da3bdfbbc547b339b4cdb059fac26435b0feec65cf1c56f851d1c4d6b1814.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "UPDATE users SET is_admin = TRUE WHERE did = $1", 4 + "describe": { 5 + "columns": [], 6 + "parameters": { 7 + "Left": [ 8 + "Text" 9 + ] 10 + }, 11 + "nullable": [] 12 + }, 13 + "hash": "f29da3bdfbbc547b339b4cdb059fac26435b0feec65cf1c56f851d1c4d6b1814" 14 + }
+28
.sqlx/query-f7af28963099aec12cf1d4f8a9a03699bb3a90f39bc9c4c0f738a37827e8f382.json
··· 1 + { 2 + "db_name": "PostgreSQL", 3 + "query": "SELECT password_reset_code, password_reset_code_expires_at FROM users WHERE email = $1", 4 + "describe": { 5 + "columns": [ 6 + { 7 + "ordinal": 0, 8 + "name": "password_reset_code", 9 + "type_info": "Text" 10 + }, 11 + { 12 + "ordinal": 1, 13 + "name": "password_reset_code_expires_at", 14 + "type_info": "Timestamptz" 15 + } 16 + ], 17 + "parameters": { 18 + "Left": [ 19 + "Text" 20 + ] 21 + }, 22 + "nullable": [ 23 + true, 24 + true 25 + ] 26 + }, 27 + "hash": "f7af28963099aec12cf1d4f8a9a03699bb3a90f39bc9c4c0f738a37827e8f382" 28 + }
+168
Cargo.lock
··· 6314 6314 ] 6315 6315 6316 6316 [[package]] 6317 + name = "tranquil-auth" 6318 + version = "0.1.0" 6319 + dependencies = [ 6320 + "anyhow", 6321 + "base32", 6322 + "base64 0.22.1", 6323 + "bcrypt", 6324 + "chrono", 6325 + "hmac", 6326 + "k256", 6327 + "rand 0.8.5", 6328 + "serde", 6329 + "serde_json", 6330 + "sha2", 6331 + "subtle", 6332 + "thiserror 2.0.17", 6333 + "totp-rs", 6334 + "tranquil-crypto", 6335 + "urlencoding", 6336 + "uuid", 6337 + ] 6338 + 6339 + [[package]] 6340 + name = "tranquil-cache" 6341 + version = "0.1.0" 6342 + dependencies = [ 6343 + "async-trait", 6344 + "base64 0.22.1", 6345 + "redis", 6346 + "thiserror 2.0.17", 6347 + "tracing", 6348 + "tranquil-infra", 6349 + ] 6350 + 6351 + [[package]] 6352 + name = "tranquil-comms" 6353 + version = "0.1.0" 6354 + dependencies = [ 6355 + "async-trait", 6356 + "base64 0.22.1", 6357 + "chrono", 6358 + "reqwest", 6359 + "serde", 6360 + "serde_json", 6361 + "sqlx", 6362 + "thiserror 2.0.17", 6363 + "tokio", 6364 + "urlencoding", 6365 + "uuid", 6366 + ] 6367 + 6368 + [[package]] 6369 + name = "tranquil-crypto" 6370 + version = "0.1.0" 6371 + dependencies = [ 6372 + "aes-gcm", 6373 + "base64 0.22.1", 6374 + "hkdf", 6375 + "hmac", 6376 + "p256 0.13.2", 6377 + "rand 0.8.5", 6378 + "serde", 6379 + "serde_json", 6380 + "sha2", 6381 + "subtle", 6382 + "thiserror 2.0.17", 6383 + ] 6384 + 6385 + [[package]] 6386 + name = "tranquil-infra" 6387 + version = "0.1.0" 6388 + dependencies = [ 6389 + "async-trait", 6390 + "bytes", 6391 + "futures", 6392 + "thiserror 2.0.17", 6393 + "tokio", 6394 + "tracing", 6395 + ] 6396 + 6397 + [[package]] 6398 + name = "tranquil-oauth" 6399 + version = "0.1.0" 6400 + dependencies = [ 6401 + "anyhow", 6402 + "axum", 6403 + "base64 0.22.1", 6404 + "chrono", 6405 + "ed25519-dalek", 6406 + "p256 0.13.2", 6407 + "p384", 6408 + "rand 0.8.5", 6409 + "reqwest", 6410 + "serde", 6411 + "serde_json", 6412 + "sha2", 6413 + "sqlx", 6414 + "tokio", 6415 + "tracing", 6416 + "tranquil-types", 6417 + "uuid", 6418 + ] 6419 + 6420 + [[package]] 6317 6421 name = "tranquil-pds" 6318 6422 version = "0.1.0" 6319 6423 dependencies = [ ··· 6380 6484 "tower-layer", 6381 6485 "tracing", 6382 6486 "tracing-subscriber", 6487 + "tranquil-auth", 6488 + "tranquil-cache", 6489 + "tranquil-comms", 6490 + "tranquil-crypto", 6491 + "tranquil-infra", 6492 + "tranquil-oauth", 6493 + "tranquil-repo", 6494 + "tranquil-scopes", 6495 + "tranquil-storage", 6496 + "tranquil-types", 6383 6497 "urlencoding", 6384 6498 "uuid", 6385 6499 "webauthn-rs", 6386 6500 "webauthn-rs-proto", 6387 6501 "wiremock", 6388 6502 "zip", 6503 + ] 6504 + 6505 + [[package]] 6506 + name = "tranquil-repo" 6507 + version = "0.1.0" 6508 + dependencies = [ 6509 + "bytes", 6510 + "cid", 6511 + "jacquard-repo", 6512 + "multihash", 6513 + "sha2", 6514 + "sqlx", 6515 + "tranquil-types", 6516 + ] 6517 + 6518 + [[package]] 6519 + name = "tranquil-scopes" 6520 + version = "0.1.0" 6521 + dependencies = [ 6522 + "axum", 6523 + "futures", 6524 + "reqwest", 6525 + "serde", 6526 + "serde_json", 6527 + "tokio", 6528 + "tracing", 6529 + ] 6530 + 6531 + [[package]] 6532 + name = "tranquil-storage" 6533 + version = "0.1.0" 6534 + dependencies = [ 6535 + "async-trait", 6536 + "aws-config", 6537 + "aws-sdk-s3", 6538 + "bytes", 6539 + "futures", 6540 + "sha2", 6541 + "thiserror 2.0.17", 6542 + "tracing", 6543 + "tranquil-infra", 6544 + ] 6545 + 6546 + [[package]] 6547 + name = "tranquil-types" 6548 + version = "0.1.0" 6549 + dependencies = [ 6550 + "chrono", 6551 + "cid", 6552 + "jacquard", 6553 + "serde", 6554 + "serde_json", 6555 + "sqlx", 6556 + "thiserror 2.0.17", 6389 6557 ] 6390 6558 6391 6559 [[package]]
+89 -65
Cargo.toml
··· 1 - [package] 2 - name = "tranquil-pds" 1 + [workspace] 2 + resolver = "2" 3 + members = [ 4 + "crates/tranquil-types", 5 + "crates/tranquil-infra", 6 + "crates/tranquil-crypto", 7 + "crates/tranquil-storage", 8 + "crates/tranquil-cache", 9 + "crates/tranquil-repo", 10 + "crates/tranquil-scopes", 11 + "crates/tranquil-auth", 12 + "crates/tranquil-oauth", 13 + "crates/tranquil-comms", 14 + "crates/tranquil-pds", 15 + ] 16 + 17 + [workspace.package] 3 18 version = "0.1.0" 4 19 edition = "2024" 5 20 license = "AGPL-3.0-or-later" 6 - [dependencies] 7 - anyhow = "1.0.100" 8 - async-trait = "0.1.89" 9 - aws-config = "1.8.12" 10 - aws-sdk-s3 = "1.118.0" 11 - axum = { version = "0.8.8", features = ["ws", "macros"] } 21 + 22 + [workspace.dependencies] 23 + tranquil-types = { path = "crates/tranquil-types" } 24 + tranquil-infra = { path = "crates/tranquil-infra" } 25 + tranquil-crypto = { path = "crates/tranquil-crypto" } 26 + tranquil-storage = { path = "crates/tranquil-storage" } 27 + tranquil-cache = { path = "crates/tranquil-cache" } 28 + tranquil-repo = { path = "crates/tranquil-repo" } 29 + tranquil-scopes = { path = "crates/tranquil-scopes" } 30 + tranquil-auth = { path = "crates/tranquil-auth" } 31 + tranquil-oauth = { path = "crates/tranquil-oauth" } 32 + tranquil-comms = { path = "crates/tranquil-comms" } 33 + 34 + aes-gcm = "0.10" 35 + anyhow = "1.0" 36 + async-trait = "0.1" 37 + aws-config = "1.8" 38 + aws-sdk-s3 = "1.118" 39 + axum = { version = "0.8", features = ["ws", "macros"] } 12 40 base32 = "0.5" 13 - base64 = "0.22.1" 14 - bcrypt = "0.17.1" 15 - bytes = "1.11.0" 16 - chrono = { version = "0.4.42", features = ["serde"] } 17 - cid = "0.11.1" 18 - dotenvy = "0.15.7" 19 - futures = "0.3.30" 41 + base64 = "0.22" 42 + bcrypt = "0.17" 43 + bs58 = "0.5" 44 + bytes = "1.11" 45 + chrono = { version = "0.4", features = ["serde"] } 46 + cid = "0.11" 47 + dotenvy = "0.15" 48 + ed25519-dalek = { version = "2.1", features = ["pkcs8"] } 49 + futures = "0.3" 50 + futures-util = "0.3" 20 51 governor = "0.10" 21 52 hex = "0.4" 53 + hickory-resolver = { version = "0.24", features = ["tokio-runtime"] } 22 54 hkdf = "0.12" 23 55 hmac = "0.12" 56 + http = "1.4" 57 + image = { version = "0.25", default-features = false, features = ["jpeg", "png", "gif", "webp"] } 24 58 infer = "0.19" 25 - aes-gcm = "0.10" 26 - jacquard = { version = "0.9.5", default-features = false, features = ["api", "api_bluesky", "api_full", "derive", "dns"] } 27 - jacquard-axum = "0.9.6" 28 - jacquard-repo = "0.9.6" 29 - jsonwebtoken = { version = "10.2.0", features = ["rust_crypto"] } 30 - k256 = { version = "0.13.3", features = ["ecdsa", "pem", "pkcs8"] } 31 - multibase = "0.9.1" 32 - multihash = "0.19.3" 33 - rand = "0.8.5" 59 + ipld-core = "0.4" 60 + iroh-car = "0.5" 61 + jacquard = { version = "0.9", default-features = false, features = ["api", "api_bluesky", "api_full", "derive", "dns"] } 62 + jacquard-axum = "0.9" 63 + jacquard-repo = "0.9" 64 + jsonwebtoken = { version = "10.2", features = ["rust_crypto"] } 65 + k256 = { version = "0.13", features = ["ecdsa", "pem", "pkcs8"] } 66 + metrics = "0.24" 67 + metrics-exporter-prometheus = { version = "0.16", default-features = false, features = ["http-listener"] } 68 + multibase = "0.9" 69 + multihash = "0.19" 70 + p256 = { version = "0.13", features = ["ecdsa"] } 71 + p384 = { version = "0.13", features = ["ecdsa"] } 72 + rand = "0.8" 73 + redis = { version = "1.0", features = ["tokio-comp", "connection-manager"] } 34 74 regex = "1" 35 - reqwest = { version = "0.12.28", features = ["json"] } 36 - serde = { version = "1.0.228", features = ["derive"] } 37 - serde_bytes = "0.11.14" 38 - serde_ipld_dagcbor = "0.6.4" 39 - ipld-core = "0.4.2" 40 - serde_json = "1.0.146" 75 + reqwest = { version = "0.12", features = ["json"] } 76 + serde = { version = "1.0", features = ["derive"] } 77 + serde_bytes = "0.11" 78 + serde_ipld_dagcbor = "0.6" 79 + serde_json = "1.0" 41 80 serde_urlencoded = "0.7" 42 - sha2 = "0.10.9" 81 + sha2 = "0.10" 82 + sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "json"] } 43 83 subtle = "2.5" 44 - p256 = { version = "0.13", features = ["ecdsa"] } 45 - p384 = { version = "0.13", features = ["ecdsa"] } 46 - ed25519-dalek = { version = "2.1", features = ["pkcs8"] } 47 - sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "json"] } 48 - thiserror = "2.0.17" 49 - tokio = { version = "1.48.0", features = ["macros", "rt-multi-thread", "time", "signal", "process"] } 50 - tracing = "0.1.43" 51 - tracing-subscriber = "0.3.22" 52 - tokio-tungstenite = { version = "0.28.0", features = ["native-tls"] } 84 + thiserror = "2.0" 85 + tokio = { version = "1.48", features = ["macros", "rt-multi-thread", "time", "signal", "process"] } 86 + tokio-tungstenite = { version = "0.28", features = ["native-tls"] } 87 + totp-rs = { version = "5", features = ["qr"] } 88 + tower = "0.5" 89 + tower-http = { version = "0.6", features = ["fs", "cors"] } 90 + tower-layer = "0.3" 91 + tracing = "0.1" 92 + tracing-subscriber = "0.3" 53 93 urlencoding = "2.1" 54 - uuid = { version = "1.19.0", features = ["v4", "v5", "fast-rng"] } 55 - iroh-car = "0.5.1" 56 - image = { version = "0.25.9", default-features = false, features = ["jpeg", "png", "gif", "webp"] } 57 - redis = { version = "1.0.1", features = ["tokio-comp", "connection-manager"] } 58 - tower-http = { version = "0.6.8", features = ["fs", "cors"] } 59 - hickory-resolver = { version = "0.24", features = ["tokio-runtime"] } 60 - metrics = "0.24.3" 61 - metrics-exporter-prometheus = { version = "0.16", default-features = false, features = ["http-listener"] } 62 - bs58 = "0.5.1" 63 - totp-rs = { version = "5", features = ["qr"] } 64 - webauthn-rs = { version = "0.5.4", features = ["danger-allow-state-serialisation", "danger-user-presence-only-security-keys"] } 65 - webauthn-rs-proto = "0.5.4" 66 - zip = { version = "7.0.0", default-features = false, features = ["deflate"] } 67 - tower = "0.5.2" 68 - tower-layer = "0.3.3" 69 - futures-util = "0.3.31" 70 - http = "1.4.0" 71 - [features] 72 - external-infra = [] 73 - [dev-dependencies] 94 + uuid = { version = "1.19", features = ["v4", "v5", "fast-rng"] } 95 + webauthn-rs = { version = "0.5", features = ["danger-allow-state-serialisation", "danger-user-presence-only-security-keys"] } 96 + webauthn-rs-proto = "0.5" 97 + zip = { version = "7.0", default-features = false, features = ["deflate"] } 98 + 74 99 ciborium = "0.2" 75 - ctor = "0.6.3" 76 - testcontainers = "0.26.2" 77 - testcontainers-modules = { version = "0.14.0", features = ["postgres"] } 78 - wiremock = "0.6.5" 79 - # urlencoding is also in dependencies, but tests use it directly 100 + ctor = "0.6" 101 + testcontainers = "0.26" 102 + testcontainers-modules = { version = "0.14", features = ["postgres"] } 103 + wiremock = "0.6"
+25
crates/tranquil-auth/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-auth" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + tranquil-crypto = { workspace = true } 9 + 10 + anyhow = { workspace = true } 11 + base32 = { workspace = true } 12 + base64 = { workspace = true } 13 + bcrypt = { workspace = true } 14 + chrono = { workspace = true } 15 + hmac = { workspace = true } 16 + k256 = { workspace = true } 17 + rand = { workspace = true } 18 + serde = { workspace = true } 19 + serde_json = { workspace = true } 20 + sha2 = { workspace = true } 21 + subtle = { workspace = true } 22 + thiserror = { workspace = true } 23 + totp-rs = { workspace = true } 24 + urlencoding = { workspace = true } 25 + uuid = { workspace = true }
+29
crates/tranquil-auth/src/lib.rs
··· 1 + mod token; 2 + mod totp; 3 + mod types; 4 + mod verify; 5 + 6 + pub use token::{ 7 + SCOPE_ACCESS, SCOPE_APP_PASS, SCOPE_APP_PASS_PRIVILEGED, SCOPE_REFRESH, TOKEN_TYPE_ACCESS, 8 + TOKEN_TYPE_REFRESH, TOKEN_TYPE_SERVICE, create_access_token, create_access_token_hs256, 9 + create_access_token_hs256_with_metadata, create_access_token_with_delegation, 10 + create_access_token_with_metadata, create_access_token_with_scope_metadata, 11 + create_refresh_token, create_refresh_token_hs256, create_refresh_token_hs256_with_metadata, 12 + create_refresh_token_with_metadata, create_service_token, create_service_token_hs256, 13 + }; 14 + 15 + pub use totp::{ 16 + decrypt_totp_secret, encrypt_totp_secret, generate_backup_codes, generate_qr_png_base64, 17 + generate_totp_secret, generate_totp_uri, hash_backup_code, is_backup_code_format, 18 + verify_backup_code, verify_totp_code, 19 + }; 20 + 21 + pub use types::{ 22 + ActClaim, Claims, Header, TokenData, TokenVerifyError, TokenWithMetadata, UnsafeClaims, 23 + }; 24 + 25 + pub use verify::{ 26 + get_algorithm_from_token, get_did_from_token, get_jti_from_token, verify_access_token, 27 + verify_access_token_hs256, verify_access_token_typed, verify_refresh_token, 28 + verify_refresh_token_hs256, verify_token, 29 + };
+63
crates/tranquil-auth/src/types.rs
··· 1 + use chrono::{DateTime, Utc}; 2 + use serde::{Deserialize, Serialize}; 3 + use std::fmt; 4 + 5 + #[derive(Debug, Clone, Serialize, Deserialize)] 6 + pub struct ActClaim { 7 + pub sub: String, 8 + } 9 + 10 + #[derive(Debug, Serialize, Deserialize)] 11 + pub struct Claims { 12 + pub iss: String, 13 + pub sub: String, 14 + pub aud: String, 15 + pub exp: usize, 16 + pub iat: usize, 17 + #[serde(skip_serializing_if = "Option::is_none")] 18 + pub scope: Option<String>, 19 + #[serde(skip_serializing_if = "Option::is_none")] 20 + pub lxm: Option<String>, 21 + pub jti: String, 22 + #[serde(skip_serializing_if = "Option::is_none")] 23 + pub act: Option<ActClaim>, 24 + } 25 + 26 + #[derive(Debug, Serialize, Deserialize)] 27 + pub struct Header { 28 + pub alg: String, 29 + pub typ: String, 30 + } 31 + 32 + #[derive(Debug, Serialize, Deserialize)] 33 + pub struct UnsafeClaims { 34 + pub iss: String, 35 + pub sub: Option<String>, 36 + } 37 + 38 + pub struct TokenData<T> { 39 + pub claims: T, 40 + } 41 + 42 + pub struct TokenWithMetadata { 43 + pub token: String, 44 + pub jti: String, 45 + pub expires_at: DateTime<Utc>, 46 + } 47 + 48 + #[derive(Debug, Clone, Copy, PartialEq, Eq)] 49 + pub enum TokenVerifyError { 50 + Expired, 51 + Invalid, 52 + } 53 + 54 + impl fmt::Display for TokenVerifyError { 55 + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 56 + match self { 57 + Self::Expired => write!(f, "Token expired"), 58 + Self::Invalid => write!(f, "Token invalid"), 59 + } 60 + } 61 + } 62 + 63 + impl std::error::Error for TokenVerifyError {}
+14
crates/tranquil-cache/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-cache" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + tranquil-infra = { workspace = true } 9 + 10 + async-trait = { workspace = true } 11 + base64 = { workspace = true } 12 + redis = { workspace = true } 13 + thiserror = { workspace = true } 14 + tracing = { workspace = true }
+18
crates/tranquil-comms/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-comms" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + async-trait = { workspace = true } 9 + base64 = { workspace = true } 10 + chrono = { workspace = true } 11 + reqwest = { workspace = true } 12 + serde = { workspace = true } 13 + serde_json = { workspace = true } 14 + sqlx = { workspace = true } 15 + thiserror = { workspace = true } 16 + tokio = { workspace = true } 17 + urlencoding = { workspace = true } 18 + uuid = { workspace = true }
+13
crates/tranquil-comms/src/lib.rs
··· 1 + mod locale; 2 + mod sender; 3 + mod types; 4 + 5 + pub use locale::{ 6 + DEFAULT_LOCALE, NotificationStrings, VALID_LOCALES, format_message, get_strings, 7 + validate_locale, 8 + }; 9 + pub use sender::{ 10 + CommsSender, DiscordSender, EmailSender, SendError, SignalSender, TelegramSender, 11 + is_valid_phone_number, mime_encode_header, sanitize_header_value, 12 + }; 13 + pub use types::{CommsChannel, CommsStatus, CommsType, NewComms, QueuedComms};
+18
crates/tranquil-crypto/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-crypto" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + aes-gcm = { workspace = true } 9 + base64 = { workspace = true } 10 + hkdf = { workspace = true } 11 + hmac = { workspace = true } 12 + p256 = { workspace = true } 13 + rand = { workspace = true } 14 + serde = { workspace = true } 15 + serde_json = { workspace = true } 16 + sha2 = { workspace = true } 17 + subtle = { workspace = true } 18 + thiserror = { workspace = true }
+78
crates/tranquil-crypto/src/encryption.rs
··· 1 + #[allow(deprecated)] 2 + use aes_gcm::{Aes256Gcm, KeyInit, Nonce, aead::Aead}; 3 + use hkdf::Hkdf; 4 + use sha2::Sha256; 5 + 6 + use crate::CryptoError; 7 + 8 + pub fn derive_key(master_key: &[u8], context: &[u8]) -> Result<[u8; 32], CryptoError> { 9 + let hk = Hkdf::<Sha256>::new(None, master_key); 10 + let mut output = [0u8; 32]; 11 + hk.expand(context, &mut output) 12 + .map_err(|e| CryptoError::KeyDerivationFailed(format!("{}", e)))?; 13 + Ok(output) 14 + } 15 + 16 + pub fn encrypt_with_key(key: &[u8; 32], plaintext: &[u8]) -> Result<Vec<u8>, CryptoError> { 17 + use rand::RngCore; 18 + 19 + let cipher = Aes256Gcm::new_from_slice(key) 20 + .map_err(|e| CryptoError::EncryptionFailed(format!("Failed to create cipher: {}", e)))?; 21 + 22 + let mut nonce_bytes = [0u8; 12]; 23 + rand::thread_rng().fill_bytes(&mut nonce_bytes); 24 + 25 + #[allow(deprecated)] 26 + let nonce = Nonce::from_slice(&nonce_bytes); 27 + 28 + let ciphertext = cipher 29 + .encrypt(nonce, plaintext) 30 + .map_err(|e| CryptoError::EncryptionFailed(format!("{}", e)))?; 31 + 32 + let mut result = Vec::with_capacity(12 + ciphertext.len()); 33 + result.extend_from_slice(&nonce_bytes); 34 + result.extend_from_slice(&ciphertext); 35 + 36 + Ok(result) 37 + } 38 + 39 + pub fn decrypt_with_key(key: &[u8; 32], encrypted: &[u8]) -> Result<Vec<u8>, CryptoError> { 40 + if encrypted.len() < 12 { 41 + return Err(CryptoError::DecryptionFailed( 42 + "Encrypted data too short".to_string(), 43 + )); 44 + } 45 + 46 + let cipher = Aes256Gcm::new_from_slice(key) 47 + .map_err(|e| CryptoError::DecryptionFailed(format!("Failed to create cipher: {}", e)))?; 48 + 49 + #[allow(deprecated)] 50 + let nonce = Nonce::from_slice(&encrypted[..12]); 51 + let ciphertext = &encrypted[12..]; 52 + 53 + cipher 54 + .decrypt(nonce, ciphertext) 55 + .map_err(|e| CryptoError::DecryptionFailed(format!("{}", e))) 56 + } 57 + 58 + #[cfg(test)] 59 + mod tests { 60 + use super::*; 61 + 62 + #[test] 63 + fn test_encrypt_decrypt() { 64 + let key = [0u8; 32]; 65 + let plaintext = b"hello world"; 66 + let encrypted = encrypt_with_key(&key, plaintext).unwrap(); 67 + let decrypted = decrypt_with_key(&key, &encrypted).unwrap(); 68 + assert_eq!(plaintext.as_slice(), decrypted.as_slice()); 69 + } 70 + 71 + #[test] 72 + fn test_derive_key() { 73 + let master = b"master-key-for-testing"; 74 + let key1 = derive_key(master, b"context-1").unwrap(); 75 + let key2 = derive_key(master, b"context-2").unwrap(); 76 + assert_ne!(key1, key2); 77 + } 78 + }
+19
crates/tranquil-crypto/src/lib.rs
··· 1 + mod encryption; 2 + mod jwk; 3 + mod signing; 4 + 5 + pub use encryption::{decrypt_with_key, derive_key, encrypt_with_key}; 6 + pub use jwk::{Jwk, JwkSet, create_jwk_set}; 7 + pub use signing::{DeviceCookieSigner, SigningKeyPair}; 8 + 9 + #[derive(Debug, Clone, thiserror::Error)] 10 + pub enum CryptoError { 11 + #[error("Encryption failed: {0}")] 12 + EncryptionFailed(String), 13 + #[error("Decryption failed: {0}")] 14 + DecryptionFailed(String), 15 + #[error("Invalid key: {0}")] 16 + InvalidKey(String), 17 + #[error("Key derivation failed: {0}")] 18 + KeyDerivationFailed(String), 19 + }
+150
crates/tranquil-crypto/src/signing.rs
··· 1 + use base64::{Engine, engine::general_purpose::URL_SAFE_NO_PAD}; 2 + use hmac::Mac; 3 + use p256::ecdsa::SigningKey; 4 + use sha2::{Digest, Sha256}; 5 + use subtle::ConstantTimeEq; 6 + 7 + use crate::CryptoError; 8 + 9 + type HmacSha256 = hmac::Hmac<Sha256>; 10 + 11 + pub struct SigningKeyPair { 12 + #[allow(dead_code)] 13 + signing_key: SigningKey, 14 + pub key_id: String, 15 + pub x: String, 16 + pub y: String, 17 + } 18 + 19 + impl SigningKeyPair { 20 + pub fn from_seed(seed: &[u8]) -> Result<Self, CryptoError> { 21 + let mut hasher = Sha256::new(); 22 + hasher.update(b"oauth-signing-key-derivation:"); 23 + hasher.update(seed); 24 + let hash = hasher.finalize(); 25 + 26 + let signing_key = SigningKey::from_slice(&hash) 27 + .map_err(|e| CryptoError::InvalidKey(format!("Failed to create signing key: {}", e)))?; 28 + 29 + let verifying_key = signing_key.verifying_key(); 30 + let point = verifying_key.to_encoded_point(false); 31 + 32 + let x = URL_SAFE_NO_PAD.encode( 33 + point 34 + .x() 35 + .ok_or_else(|| CryptoError::InvalidKey("Missing X coordinate".to_string()))?, 36 + ); 37 + let y = URL_SAFE_NO_PAD.encode( 38 + point 39 + .y() 40 + .ok_or_else(|| CryptoError::InvalidKey("Missing Y coordinate".to_string()))?, 41 + ); 42 + 43 + let mut kid_hasher = Sha256::new(); 44 + kid_hasher.update(x.as_bytes()); 45 + kid_hasher.update(y.as_bytes()); 46 + let kid_hash = kid_hasher.finalize(); 47 + let key_id = URL_SAFE_NO_PAD.encode(&kid_hash[..8]); 48 + 49 + Ok(Self { 50 + signing_key, 51 + key_id, 52 + x, 53 + y, 54 + }) 55 + } 56 + } 57 + 58 + pub struct DeviceCookieSigner { 59 + key: [u8; 32], 60 + } 61 + 62 + impl DeviceCookieSigner { 63 + pub fn new(key: [u8; 32]) -> Self { 64 + Self { key } 65 + } 66 + 67 + pub fn sign(&self, device_id: &str) -> String { 68 + let timestamp = std::time::SystemTime::now() 69 + .duration_since(std::time::UNIX_EPOCH) 70 + .unwrap_or_default() 71 + .as_secs(); 72 + 73 + let message = format!("{}:{}", device_id, timestamp); 74 + let mut mac = 75 + <HmacSha256 as Mac>::new_from_slice(&self.key).expect("HMAC key size is valid"); 76 + mac.update(message.as_bytes()); 77 + let signature = URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes()); 78 + 79 + format!("{}.{}.{}", device_id, timestamp, signature) 80 + } 81 + 82 + pub fn verify(&self, cookie_value: &str, max_age_days: u64) -> Option<String> { 83 + let parts: Vec<&str> = cookie_value.splitn(3, '.').collect(); 84 + if parts.len() != 3 { 85 + return None; 86 + } 87 + 88 + let device_id = parts[0]; 89 + let timestamp_str = parts[1]; 90 + let provided_signature = parts[2]; 91 + 92 + let timestamp: u64 = timestamp_str.parse().ok()?; 93 + 94 + let now = std::time::SystemTime::now() 95 + .duration_since(std::time::UNIX_EPOCH) 96 + .unwrap_or_default() 97 + .as_secs(); 98 + 99 + if now.saturating_sub(timestamp) > max_age_days * 24 * 60 * 60 { 100 + return None; 101 + } 102 + 103 + let message = format!("{}:{}", device_id, timestamp); 104 + let mut mac = 105 + <HmacSha256 as Mac>::new_from_slice(&self.key).expect("HMAC key size is valid"); 106 + mac.update(message.as_bytes()); 107 + let expected_signature = URL_SAFE_NO_PAD.encode(mac.finalize().into_bytes()); 108 + 109 + if provided_signature 110 + .as_bytes() 111 + .ct_eq(expected_signature.as_bytes()) 112 + .into() 113 + { 114 + Some(device_id.to_string()) 115 + } else { 116 + None 117 + } 118 + } 119 + } 120 + 121 + #[cfg(test)] 122 + mod tests { 123 + use super::*; 124 + 125 + #[test] 126 + fn test_signing_key_pair() { 127 + let seed = b"test-seed-for-signing-key"; 128 + let kp = SigningKeyPair::from_seed(seed).unwrap(); 129 + assert!(!kp.key_id.is_empty()); 130 + assert!(!kp.x.is_empty()); 131 + assert!(!kp.y.is_empty()); 132 + } 133 + 134 + #[test] 135 + fn test_device_cookie_signer() { 136 + let key = [0u8; 32]; 137 + let signer = DeviceCookieSigner::new(key); 138 + let signed = signer.sign("device-123"); 139 + let verified = signer.verify(&signed, 400); 140 + assert_eq!(verified, Some("device-123".to_string())); 141 + } 142 + 143 + #[test] 144 + fn test_device_cookie_invalid() { 145 + let key = [0u8; 32]; 146 + let signer = DeviceCookieSigner::new(key); 147 + assert!(signer.verify("invalid", 400).is_none()); 148 + assert!(signer.verify("a.b.c", 400).is_none()); 149 + } 150 + }
+13
crates/tranquil-infra/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-infra" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + async-trait = { workspace = true } 9 + bytes = { workspace = true } 10 + futures = { workspace = true } 11 + thiserror = { workspace = true } 12 + tokio = { workspace = true } 13 + tracing = { workspace = true }
+58
crates/tranquil-infra/src/lib.rs
··· 1 + use async_trait::async_trait; 2 + use bytes::Bytes; 3 + use futures::Stream; 4 + use std::pin::Pin; 5 + use std::time::Duration; 6 + 7 + #[derive(Debug, thiserror::Error)] 8 + pub enum StorageError { 9 + #[error("IO error: {0}")] 10 + Io(#[from] std::io::Error), 11 + #[error("S3 error: {0}")] 12 + S3(String), 13 + #[error("Other: {0}")] 14 + Other(String), 15 + } 16 + 17 + pub struct StreamUploadResult { 18 + pub sha256_hash: [u8; 32], 19 + pub size: u64, 20 + } 21 + 22 + #[async_trait] 23 + pub trait BlobStorage: Send + Sync { 24 + async fn put(&self, key: &str, data: &[u8]) -> Result<(), StorageError>; 25 + async fn put_bytes(&self, key: &str, data: Bytes) -> Result<(), StorageError>; 26 + async fn get(&self, key: &str) -> Result<Vec<u8>, StorageError>; 27 + async fn get_bytes(&self, key: &str) -> Result<Bytes, StorageError>; 28 + async fn get_head(&self, key: &str, size: usize) -> Result<Bytes, StorageError>; 29 + async fn delete(&self, key: &str) -> Result<(), StorageError>; 30 + async fn put_stream( 31 + &self, 32 + key: &str, 33 + stream: Pin<Box<dyn Stream<Item = Result<Bytes, std::io::Error>> + Send>>, 34 + ) -> Result<StreamUploadResult, StorageError>; 35 + async fn copy(&self, src_key: &str, dst_key: &str) -> Result<(), StorageError>; 36 + } 37 + 38 + #[derive(Debug, thiserror::Error)] 39 + pub enum CacheError { 40 + #[error("Cache connection error: {0}")] 41 + Connection(String), 42 + #[error("Serialization error: {0}")] 43 + Serialization(String), 44 + } 45 + 46 + #[async_trait] 47 + pub trait Cache: Send + Sync { 48 + async fn get(&self, key: &str) -> Option<String>; 49 + async fn set(&self, key: &str, value: &str, ttl: Duration) -> Result<(), CacheError>; 50 + async fn delete(&self, key: &str) -> Result<(), CacheError>; 51 + async fn get_bytes(&self, key: &str) -> Option<Vec<u8>>; 52 + async fn set_bytes(&self, key: &str, value: &[u8], ttl: Duration) -> Result<(), CacheError>; 53 + } 54 + 55 + #[async_trait] 56 + pub trait DistributedRateLimiter: Send + Sync { 57 + async fn check_rate_limit(&self, key: &str, limit: u32, window_ms: u64) -> bool; 58 + }
+25
crates/tranquil-oauth/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-oauth" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + tranquil-types = { workspace = true } 9 + 10 + anyhow = { workspace = true } 11 + sqlx = { workspace = true } 12 + axum = { workspace = true } 13 + base64 = { workspace = true } 14 + chrono = { workspace = true } 15 + ed25519-dalek = { workspace = true } 16 + p256 = { workspace = true } 17 + p384 = { workspace = true } 18 + rand = { workspace = true } 19 + reqwest = { workspace = true } 20 + serde = { workspace = true } 21 + serde_json = { workspace = true } 22 + sha2 = { workspace = true } 23 + tokio = { workspace = true } 24 + tracing = { workspace = true } 25 + uuid = { workspace = true }
+17
crates/tranquil-oauth/src/lib.rs
··· 1 + mod client; 2 + mod dpop; 3 + mod error; 4 + mod types; 5 + 6 + pub use client::{ClientMetadata, ClientMetadataCache, verify_client_auth}; 7 + pub use dpop::{ 8 + DPoPJwk, DPoPProofHeader, DPoPProofPayload, DPoPVerifier, DPoPVerifyResult, 9 + compute_access_token_hash, compute_jwk_thumbprint, 10 + }; 11 + pub use error::OAuthError; 12 + pub use types::{ 13 + AuthFlowState, AuthorizationRequestParameters, AuthorizationServerMetadata, 14 + AuthorizedClientData, ClientAuth, Code, DPoPClaims, DeviceData, DeviceId, JwkPublicKey, Jwks, 15 + OAuthClientMetadata, ParResponse, ProtectedResourceMetadata, RefreshToken, RefreshTokenState, 16 + RequestData, RequestId, SessionId, TokenData, TokenId, TokenRequest, TokenResponse, 17 + };
+1
crates/tranquil-pds/.sqlx
··· 1 + ../../.sqlx
+92
crates/tranquil-pds/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-pds" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + tranquil-types = { workspace = true } 9 + tranquil-infra = { workspace = true } 10 + tranquil-crypto = { workspace = true } 11 + tranquil-storage = { workspace = true } 12 + tranquil-cache = { workspace = true } 13 + tranquil-repo = { workspace = true } 14 + tranquil-scopes = { workspace = true } 15 + tranquil-auth = { workspace = true } 16 + tranquil-oauth = { workspace = true } 17 + tranquil-comms = { workspace = true } 18 + 19 + aes-gcm = { workspace = true } 20 + anyhow = { workspace = true } 21 + async-trait = { workspace = true } 22 + aws-config = { workspace = true } 23 + aws-sdk-s3 = { workspace = true } 24 + axum = { workspace = true } 25 + base32 = { workspace = true } 26 + base64 = { workspace = true } 27 + bcrypt = { workspace = true } 28 + bs58 = { workspace = true } 29 + bytes = { workspace = true } 30 + chrono = { workspace = true } 31 + cid = { workspace = true } 32 + dotenvy = { workspace = true } 33 + ed25519-dalek = { workspace = true } 34 + futures = { workspace = true } 35 + futures-util = { workspace = true } 36 + governor = { workspace = true } 37 + hex = { workspace = true } 38 + hickory-resolver = { workspace = true } 39 + hkdf = { workspace = true } 40 + hmac = { workspace = true } 41 + http = { workspace = true } 42 + image = { workspace = true } 43 + infer = { workspace = true } 44 + ipld-core = { workspace = true } 45 + iroh-car = { workspace = true } 46 + jacquard = { workspace = true } 47 + jacquard-axum = { workspace = true } 48 + jacquard-repo = { workspace = true } 49 + jsonwebtoken = { workspace = true } 50 + k256 = { workspace = true } 51 + metrics = { workspace = true } 52 + metrics-exporter-prometheus = { workspace = true } 53 + multibase = { workspace = true } 54 + multihash = { workspace = true } 55 + p256 = { workspace = true } 56 + p384 = { workspace = true } 57 + rand = { workspace = true } 58 + redis = { workspace = true } 59 + regex = { workspace = true } 60 + reqwest = { workspace = true } 61 + serde = { workspace = true } 62 + serde_bytes = { workspace = true } 63 + serde_ipld_dagcbor = { workspace = true } 64 + serde_json = { workspace = true } 65 + serde_urlencoded = { workspace = true } 66 + sha2 = { workspace = true } 67 + sqlx = { workspace = true } 68 + subtle = { workspace = true } 69 + thiserror = { workspace = true } 70 + tokio = { workspace = true } 71 + tokio-tungstenite = { workspace = true } 72 + totp-rs = { workspace = true } 73 + tower = { workspace = true } 74 + tower-http = { workspace = true } 75 + tower-layer = { workspace = true } 76 + tracing = { workspace = true } 77 + tracing-subscriber = { workspace = true } 78 + urlencoding = { workspace = true } 79 + uuid = { workspace = true } 80 + webauthn-rs = { workspace = true } 81 + webauthn-rs-proto = { workspace = true } 82 + zip = { workspace = true } 83 + 84 + [features] 85 + external-infra = [] 86 + 87 + [dev-dependencies] 88 + ciborium = { workspace = true } 89 + ctor = { workspace = true } 90 + testcontainers = { workspace = true } 91 + testcontainers-modules = { workspace = true } 92 + wiremock = { workspace = true }
+1
crates/tranquil-pds/migrations
··· 1 + ../../migrations
+4
crates/tranquil-pds/src/cache/mod.rs
··· 1 + pub use tranquil_cache::{ 2 + Cache, CacheError, DistributedRateLimiter, NoOpCache, NoOpRateLimiter, RedisRateLimiter, 3 + ValkeyCache, create_cache, 4 + };
+15
crates/tranquil-pds/src/comms/mod.rs
··· 1 + mod service; 2 + 3 + pub use tranquil_comms::{ 4 + CommsChannel, CommsSender, CommsStatus, CommsType, DEFAULT_LOCALE, DiscordSender, EmailSender, 5 + NewComms, NotificationStrings, QueuedComms, SendError, SignalSender, TelegramSender, 6 + VALID_LOCALES, format_message, get_strings, is_valid_phone_number, mime_encode_header, 7 + sanitize_header_value, validate_locale, 8 + }; 9 + 10 + pub use service::{ 11 + CommsService, channel_display_name, enqueue_2fa_code, enqueue_account_deletion, enqueue_comms, 12 + enqueue_email_update, enqueue_email_update_token, enqueue_migration_verification, 13 + enqueue_passkey_recovery, enqueue_password_reset, enqueue_plc_operation, 14 + enqueue_signup_verification, enqueue_welcome, queue_legacy_login_notification, 15 + };
+1
crates/tranquil-pds/src/oauth/jwks.rs
··· 1 + pub use tranquil_crypto::{Jwk, JwkSet, create_jwk_set};
+20
crates/tranquil-pds/src/oauth/mod.rs
··· 1 + pub mod db; 2 + pub mod endpoints; 3 + pub mod jwks; 4 + pub mod scopes; 5 + pub mod verify; 6 + 7 + pub use tranquil_oauth::{ 8 + AuthFlowState, AuthorizationRequestParameters, AuthorizationServerMetadata, 9 + AuthorizedClientData, ClientAuth, ClientMetadata, ClientMetadataCache, Code, DPoPClaims, 10 + DPoPJwk, DPoPProofHeader, DPoPProofPayload, DPoPVerifier, DPoPVerifyResult, DeviceData, 11 + DeviceId, JwkPublicKey, Jwks, OAuthClientMetadata, OAuthError, ParResponse, 12 + ProtectedResourceMetadata, RefreshToken, RefreshTokenState, RequestData, RequestId, SessionId, 13 + TokenData, TokenId, TokenRequest, TokenResponse, compute_access_token_hash, 14 + compute_jwk_thumbprint, verify_client_auth, 15 + }; 16 + 17 + pub use scopes::{AccountAction, AccountAttr, RepoAction, ScopeError, ScopePermissions}; 18 + pub use verify::{ 19 + OAuthAuthError, OAuthUser, VerifyResult, generate_dpop_nonce, verify_oauth_access_token, 20 + };
+6
crates/tranquil-pds/src/oauth/scopes/mod.rs
··· 1 + pub use tranquil_scopes::{ 2 + AccountAction, AccountAttr, AccountScope, BlobScope, IdentityAttr, IdentityScope, IncludeScope, 3 + ParsedScope, RepoAction, RepoScope, RpcScope, SCOPE_DEFINITIONS, ScopeCategory, 4 + ScopeDefinition, ScopeError, ScopePermissions, expand_include_scopes, format_scope_for_display, 5 + get_required_scopes, get_scope_definition, is_valid_scope, parse_scope, parse_scope_string, 6 + };
+5
crates/tranquil-pds/src/repo/mod.rs
··· 1 + pub use tranquil_repo::{PostgresBlockStore, TrackingBlockStore}; 2 + 3 + pub mod tracking { 4 + pub use tranquil_repo::TrackingBlockStore; 5 + }
+3
crates/tranquil-pds/src/storage/mod.rs
··· 1 + pub use tranquil_storage::{ 2 + BackupStorage, BlobStorage, S3BlobStorage, StorageError, StreamUploadResult, 3 + };
+1
crates/tranquil-pds/src/types.rs
··· 1 + pub use tranquil_types::*;
+1
crates/tranquil-repo/.sqlx
··· 1 + ../../.sqlx
+15
crates/tranquil-repo/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-repo" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + tranquil-types = { workspace = true } 9 + 10 + bytes = { workspace = true } 11 + cid = { workspace = true } 12 + jacquard-repo = { workspace = true } 13 + multihash = { workspace = true } 14 + sha2 = { workspace = true } 15 + sqlx = { workspace = true }
+228
crates/tranquil-repo/src/lib.rs
··· 1 + use bytes::Bytes; 2 + use cid::Cid; 3 + use jacquard_repo::error::RepoError; 4 + use jacquard_repo::repo::CommitData; 5 + use jacquard_repo::storage::BlockStore; 6 + use multihash::Multihash; 7 + use sha2::{Digest, Sha256}; 8 + use sqlx::PgPool; 9 + use std::collections::HashSet; 10 + use std::sync::{Arc, Mutex}; 11 + 12 + #[derive(Clone)] 13 + pub struct PostgresBlockStore { 14 + pool: PgPool, 15 + } 16 + 17 + impl PostgresBlockStore { 18 + pub fn new(pool: PgPool) -> Self { 19 + Self { pool } 20 + } 21 + 22 + pub fn pool(&self) -> &PgPool { 23 + &self.pool 24 + } 25 + } 26 + 27 + impl BlockStore for PostgresBlockStore { 28 + async fn get(&self, cid: &Cid) -> Result<Option<Bytes>, RepoError> { 29 + let cid_bytes = cid.to_bytes(); 30 + let row = sqlx::query!("SELECT data FROM blocks WHERE cid = $1", &cid_bytes) 31 + .fetch_optional(&self.pool) 32 + .await 33 + .map_err(RepoError::storage)?; 34 + match row { 35 + Some(row) => Ok(Some(Bytes::from(row.data))), 36 + None => Ok(None), 37 + } 38 + } 39 + 40 + async fn put(&self, data: &[u8]) -> Result<Cid, RepoError> { 41 + let mut hasher = Sha256::new(); 42 + hasher.update(data); 43 + let hash = hasher.finalize(); 44 + let multihash = Multihash::wrap(0x12, &hash).map_err(|e| { 45 + RepoError::storage(std::io::Error::new( 46 + std::io::ErrorKind::InvalidData, 47 + format!("Failed to wrap multihash: {:?}", e), 48 + )) 49 + })?; 50 + let cid = Cid::new_v1(0x71, multihash); 51 + let cid_bytes = cid.to_bytes(); 52 + sqlx::query!( 53 + "INSERT INTO blocks (cid, data) VALUES ($1, $2) ON CONFLICT (cid) DO NOTHING", 54 + &cid_bytes, 55 + data 56 + ) 57 + .execute(&self.pool) 58 + .await 59 + .map_err(RepoError::storage)?; 60 + Ok(cid) 61 + } 62 + 63 + async fn has(&self, cid: &Cid) -> Result<bool, RepoError> { 64 + let cid_bytes = cid.to_bytes(); 65 + let row = sqlx::query!("SELECT 1 as one FROM blocks WHERE cid = $1", &cid_bytes) 66 + .fetch_optional(&self.pool) 67 + .await 68 + .map_err(RepoError::storage)?; 69 + Ok(row.is_some()) 70 + } 71 + 72 + async fn put_many( 73 + &self, 74 + blocks: impl IntoIterator<Item = (Cid, Bytes)> + Send, 75 + ) -> Result<(), RepoError> { 76 + let blocks: Vec<_> = blocks.into_iter().collect(); 77 + if blocks.is_empty() { 78 + return Ok(()); 79 + } 80 + let cids: Vec<Vec<u8>> = blocks.iter().map(|(cid, _)| cid.to_bytes()).collect(); 81 + let data: Vec<&[u8]> = blocks.iter().map(|(_, d)| d.as_ref()).collect(); 82 + sqlx::query!( 83 + r#" 84 + INSERT INTO blocks (cid, data) 85 + SELECT * FROM UNNEST($1::bytea[], $2::bytea[]) 86 + ON CONFLICT (cid) DO NOTHING 87 + "#, 88 + &cids, 89 + &data as &[&[u8]] 90 + ) 91 + .execute(&self.pool) 92 + .await 93 + .map_err(RepoError::storage)?; 94 + Ok(()) 95 + } 96 + 97 + async fn get_many(&self, cids: &[Cid]) -> Result<Vec<Option<Bytes>>, RepoError> { 98 + if cids.is_empty() { 99 + return Ok(Vec::new()); 100 + } 101 + let cid_bytes: Vec<Vec<u8>> = cids.iter().map(|c| c.to_bytes()).collect(); 102 + let rows = sqlx::query!( 103 + "SELECT cid, data FROM blocks WHERE cid = ANY($1)", 104 + &cid_bytes 105 + ) 106 + .fetch_all(&self.pool) 107 + .await 108 + .map_err(RepoError::storage)?; 109 + let found: std::collections::HashMap<Vec<u8>, Bytes> = rows 110 + .into_iter() 111 + .map(|row| (row.cid, Bytes::from(row.data))) 112 + .collect(); 113 + let results = cid_bytes 114 + .iter() 115 + .map(|cid| found.get(cid).cloned()) 116 + .collect(); 117 + Ok(results) 118 + } 119 + 120 + async fn apply_commit(&self, commit: CommitData) -> Result<(), RepoError> { 121 + self.put_many(commit.blocks).await?; 122 + Ok(()) 123 + } 124 + } 125 + 126 + #[derive(Clone)] 127 + pub struct TrackingBlockStore { 128 + inner: PostgresBlockStore, 129 + written_cids: Arc<Mutex<Vec<Cid>>>, 130 + read_cids: Arc<Mutex<HashSet<Cid>>>, 131 + } 132 + 133 + impl TrackingBlockStore { 134 + pub fn new(store: PostgresBlockStore) -> Self { 135 + Self { 136 + inner: store, 137 + written_cids: Arc::new(Mutex::new(Vec::new())), 138 + read_cids: Arc::new(Mutex::new(HashSet::new())), 139 + } 140 + } 141 + 142 + pub fn get_written_cids(&self) -> Vec<Cid> { 143 + match self.written_cids.lock() { 144 + Ok(guard) => guard.clone(), 145 + Err(poisoned) => poisoned.into_inner().clone(), 146 + } 147 + } 148 + 149 + pub fn get_read_cids(&self) -> Vec<Cid> { 150 + match self.read_cids.lock() { 151 + Ok(guard) => guard.iter().cloned().collect(), 152 + Err(poisoned) => poisoned.into_inner().iter().cloned().collect(), 153 + } 154 + } 155 + 156 + pub fn get_all_relevant_cids(&self) -> Vec<Cid> { 157 + let written = self.get_written_cids(); 158 + let read = self.get_read_cids(); 159 + let mut all: HashSet<Cid> = written.into_iter().collect(); 160 + all.extend(read); 161 + all.into_iter().collect() 162 + } 163 + } 164 + 165 + impl BlockStore for TrackingBlockStore { 166 + async fn get(&self, cid: &Cid) -> Result<Option<Bytes>, RepoError> { 167 + let result = self.inner.get(cid).await?; 168 + if result.is_some() { 169 + match self.read_cids.lock() { 170 + Ok(mut guard) => { 171 + guard.insert(*cid); 172 + } 173 + Err(poisoned) => { 174 + poisoned.into_inner().insert(*cid); 175 + } 176 + } 177 + } 178 + Ok(result) 179 + } 180 + 181 + async fn put(&self, data: &[u8]) -> Result<Cid, RepoError> { 182 + let cid = self.inner.put(data).await?; 183 + match self.written_cids.lock() { 184 + Ok(mut guard) => guard.push(cid), 185 + Err(poisoned) => poisoned.into_inner().push(cid), 186 + } 187 + Ok(cid) 188 + } 189 + 190 + async fn has(&self, cid: &Cid) -> Result<bool, RepoError> { 191 + self.inner.has(cid).await 192 + } 193 + 194 + async fn put_many( 195 + &self, 196 + blocks: impl IntoIterator<Item = (Cid, Bytes)> + Send, 197 + ) -> Result<(), RepoError> { 198 + let blocks: Vec<_> = blocks.into_iter().collect(); 199 + let cids: Vec<Cid> = blocks.iter().map(|(cid, _)| *cid).collect(); 200 + self.inner.put_many(blocks).await?; 201 + match self.written_cids.lock() { 202 + Ok(mut guard) => guard.extend(cids), 203 + Err(poisoned) => poisoned.into_inner().extend(cids), 204 + } 205 + Ok(()) 206 + } 207 + 208 + async fn get_many(&self, cids: &[Cid]) -> Result<Vec<Option<Bytes>>, RepoError> { 209 + let results = self.inner.get_many(cids).await?; 210 + cids.iter() 211 + .zip(results.iter()) 212 + .filter(|(_, result)| result.is_some()) 213 + .for_each(|(cid, _)| match self.read_cids.lock() { 214 + Ok(mut guard) => { 215 + guard.insert(*cid); 216 + } 217 + Err(poisoned) => { 218 + poisoned.into_inner().insert(*cid); 219 + } 220 + }); 221 + Ok(results) 222 + } 223 + 224 + async fn apply_commit(&self, commit: CommitData) -> Result<(), RepoError> { 225 + self.put_many(commit.blocks).await?; 226 + Ok(()) 227 + } 228 + }
+14
crates/tranquil-scopes/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-scopes" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + axum = { workspace = true } 9 + futures = { workspace = true } 10 + reqwest = { workspace = true } 11 + serde = { workspace = true } 12 + serde_json = { workspace = true } 13 + tokio = { workspace = true } 14 + tracing = { workspace = true }
+17
crates/tranquil-storage/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-storage" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + tranquil-infra = { workspace = true } 9 + 10 + async-trait = { workspace = true } 11 + aws-config = { workspace = true } 12 + aws-sdk-s3 = { workspace = true } 13 + bytes = { workspace = true } 14 + futures = { workspace = true } 15 + sha2 = { workspace = true } 16 + thiserror = { workspace = true } 17 + tracing = { workspace = true }
+14
crates/tranquil-types/Cargo.toml
··· 1 + [package] 2 + name = "tranquil-types" 3 + version.workspace = true 4 + edition.workspace = true 5 + license.workspace = true 6 + 7 + [dependencies] 8 + chrono = { workspace = true } 9 + cid = { workspace = true } 10 + jacquard = { workspace = true } 11 + serde = { workspace = true } 12 + serde_json = { workspace = true } 13 + sqlx = { workspace = true } 14 + thiserror = { workspace = true }
src/api/actor/mod.rs crates/tranquil-pds/src/api/actor/mod.rs
src/api/actor/preferences.rs crates/tranquil-pds/src/api/actor/preferences.rs
+2 -7
src/api/admin/account/delete.rs crates/tranquil-pds/src/api/admin/account/delete.rs
··· 130 130 error!("Failed to commit account deletion transaction: {:?}", e); 131 131 return ApiError::InternalError(Some("Failed to commit deletion".into())).into_response(); 132 132 } 133 - if let Err(e) = crate::api::repo::record::sequence_account_event( 134 - &state, 135 - did, 136 - false, 137 - Some("deleted"), 138 - ) 139 - .await 133 + if let Err(e) = 134 + crate::api::repo::record::sequence_account_event(&state, did, false, Some("deleted")).await 140 135 { 141 136 warn!( 142 137 "Failed to sequence account deletion event for {}: {}",
src/api/admin/account/email.rs crates/tranquil-pds/src/api/admin/account/email.rs
src/api/admin/account/info.rs crates/tranquil-pds/src/api/admin/account/info.rs
src/api/admin/account/mod.rs crates/tranquil-pds/src/api/admin/account/mod.rs
src/api/admin/account/search.rs crates/tranquil-pds/src/api/admin/account/search.rs
+3 -6
src/api/admin/account/update.rs crates/tranquil-pds/src/api/admin/account/update.rs
··· 104 104 } 105 105 let _ = state.cache.delete(&format!("handle:{}", handle)).await; 106 106 let handle_typed = Handle::new_unchecked(&handle); 107 - if let Err(e) = crate::api::repo::record::sequence_identity_event( 108 - &state, 109 - did, 110 - Some(&handle_typed), 111 - ) 112 - .await 107 + if let Err(e) = 108 + crate::api::repo::record::sequence_identity_event(&state, did, Some(&handle_typed)) 109 + .await 113 110 { 114 111 warn!( 115 112 "Failed to sequence identity event for admin handle update: {}",
src/api/admin/config.rs crates/tranquil-pds/src/api/admin/config.rs
src/api/admin/invite.rs crates/tranquil-pds/src/api/admin/invite.rs
src/api/admin/mod.rs crates/tranquil-pds/src/api/admin/mod.rs
src/api/admin/server_stats.rs crates/tranquil-pds/src/api/admin/server_stats.rs
+6 -3
src/api/admin/status.rs crates/tranquil-pds/src/api/admin/status.rs
··· 224 224 .execute(&mut *tx) 225 225 .await 226 226 } else { 227 - sqlx::query!("UPDATE users SET deactivated_at = NULL WHERE did = $1", did.as_str()) 228 - .execute(&mut *tx) 229 - .await 227 + sqlx::query!( 228 + "UPDATE users SET deactivated_at = NULL WHERE did = $1", 229 + did.as_str() 230 + ) 231 + .execute(&mut *tx) 232 + .await 230 233 }; 231 234 if let Err(e) = result { 232 235 error!(
src/api/age_assurance.rs crates/tranquil-pds/src/api/age_assurance.rs
src/api/backup.rs crates/tranquil-pds/src/api/backup.rs
+1 -5
src/api/delegation.rs crates/tranquil-pds/src/api/delegation.rs
··· 768 768 769 769 info!(did = %did, handle = %handle, controller = %&auth.0.did, "Delegated account created"); 770 770 771 - Json(CreateDelegatedAccountResponse { 772 - did: did.into(), 773 - handle: handle.into(), 774 - }) 775 - .into_response() 771 + Json(CreateDelegatedAccountResponse { did, handle }).into_response() 776 772 }
src/api/error.rs crates/tranquil-pds/src/api/error.rs
+6 -2
src/api/identity/account.rs crates/tranquil-pds/src/api/identity/account.rs
··· 796 796 if !is_migration && !is_did_web_byod { 797 797 let did_typed = Did::new_unchecked(&did); 798 798 let handle_typed = Handle::new_unchecked(&handle); 799 - if let Err(e) = 800 - crate::api::repo::record::sequence_identity_event(&state, &did_typed, Some(&handle_typed)).await 799 + if let Err(e) = crate::api::repo::record::sequence_identity_event( 800 + &state, 801 + &did_typed, 802 + Some(&handle_typed), 803 + ) 804 + .await 801 805 { 802 806 warn!("Failed to sequence identity event for {}: {}", did, e); 803 807 }
+2 -1
src/api/identity/did.rs crates/tranquil-pds/src/api/identity/did.rs
··· 754 754 let _ = state.cache.delete(&format!("handle:{}", handle)).await; 755 755 let handle_typed = Handle::new_unchecked(&handle); 756 756 if let Err(e) = 757 - crate::api::repo::record::sequence_identity_event(&state, &did, Some(&handle_typed)).await 757 + crate::api::repo::record::sequence_identity_event(&state, &did, Some(&handle_typed)) 758 + .await 758 759 { 759 760 warn!("Failed to sequence identity event for handle update: {}", e); 760 761 }
src/api/identity/mod.rs crates/tranquil-pds/src/api/identity/mod.rs
src/api/identity/plc/mod.rs crates/tranquil-pds/src/api/identity/plc/mod.rs
src/api/identity/plc/request.rs crates/tranquil-pds/src/api/identity/plc/request.rs
src/api/identity/plc/sign.rs crates/tranquil-pds/src/api/identity/plc/sign.rs
src/api/identity/plc/submit.rs crates/tranquil-pds/src/api/identity/plc/submit.rs
src/api/mod.rs crates/tranquil-pds/src/api/mod.rs
src/api/moderation/mod.rs crates/tranquil-pds/src/api/moderation/mod.rs
src/api/notification_prefs.rs crates/tranquil-pds/src/api/notification_prefs.rs
+1 -2
src/api/proxy.rs crates/tranquil-pds/src/api/proxy.rs
··· 268 268 } 269 269 Err(e) => { 270 270 warn!("Token validation failed: {:?}", e); 271 - if matches!(e, crate::auth::TokenValidationError::TokenExpired) 272 - && extracted.is_dpop 271 + if matches!(e, crate::auth::TokenValidationError::TokenExpired) && extracted.is_dpop 273 272 { 274 273 let www_auth = 275 274 "DPoP error=\"invalid_token\", error_description=\"Token has expired\"";
src/api/proxy_client.rs crates/tranquil-pds/src/api/proxy_client.rs
src/api/repo/blob.rs crates/tranquil-pds/src/api/repo/blob.rs
src/api/repo/import.rs crates/tranquil-pds/src/api/repo/import.rs
src/api/repo/meta.rs crates/tranquil-pds/src/api/repo/meta.rs
src/api/repo/mod.rs crates/tranquil-pds/src/api/repo/mod.rs
+8 -1
src/api/repo/record/batch.rs crates/tranquil-pds/src/api/repo/record/batch.rs
··· 421 421 ops, 422 422 modified_keys, 423 423 all_blob_cids, 424 - } = match process_writes(&input.writes, initial_mst, &did, input.validate, &tracking_store).await 424 + } = match process_writes( 425 + &input.writes, 426 + initial_mst, 427 + &did, 428 + input.validate, 429 + &tracking_store, 430 + ) 431 + .await 425 432 { 426 433 Ok(acc) => acc, 427 434 Err(response) => return response,
src/api/repo/record/delete.rs crates/tranquil-pds/src/api/repo/record/delete.rs
src/api/repo/record/mod.rs crates/tranquil-pds/src/api/repo/record/mod.rs
+8 -6
src/api/repo/record/read.rs crates/tranquil-pds/src/api/repo/record/read.rs
··· 257 257 .zip(blocks.into_iter()) 258 258 .filter_map(|((_, rkey, cid_str), block_opt)| { 259 259 block_opt.and_then(|block| { 260 - serde_ipld_dagcbor::from_slice::<Ipld>(&block).ok().map(|ipld| { 261 - json!({ 262 - "uri": format!("at://{}/{}/{}", input.repo, input.collection, rkey), 263 - "cid": cid_str, 264 - "value": ipld_to_json(ipld) 260 + serde_ipld_dagcbor::from_slice::<Ipld>(&block) 261 + .ok() 262 + .map(|ipld| { 263 + json!({ 264 + "uri": format!("at://{}/{}/{}", input.repo, input.collection, rkey), 265 + "cid": cid_str, 266 + "value": ipld_to_json(ipld) 267 + }) 265 268 }) 266 - }) 267 269 }) 268 270 }) 269 271 .collect();
+3 -3
src/api/repo/record/utils.rs crates/tranquil-pds/src/api/repo/record/utils.rs
··· 219 219 .await 220 220 .map_err(|e| format!("DB Error (user_blocks delete obsolete): {}", e))?; 221 221 } 222 - let (upserts, deletes): (Vec<_>, Vec<_>) = ops.iter().partition(|op| { 223 - matches!(op, RecordOp::Create { .. } | RecordOp::Update { .. }) 224 - }); 222 + let (upserts, deletes): (Vec<_>, Vec<_>) = ops 223 + .iter() 224 + .partition(|op| matches!(op, RecordOp::Create { .. } | RecordOp::Update { .. })); 225 225 let (upsert_collections, upsert_rkeys, upsert_cids): (Vec<String>, Vec<String>, Vec<String>) = 226 226 upserts 227 227 .into_iter()
src/api/repo/record/validation.rs crates/tranquil-pds/src/api/repo/record/validation.rs
src/api/repo/record/write.rs crates/tranquil-pds/src/api/repo/record/write.rs
src/api/responses.rs crates/tranquil-pds/src/api/responses.rs
+2 -3
src/api/server/account_status.rs crates/tranquil-pds/src/api/server/account_status.rs
··· 449 449 did 450 450 ); 451 451 if let Err(e) = 452 - crate::api::repo::record::sequence_account_event(&state, &did, true, None) 453 - .await 452 + crate::api::repo::record::sequence_account_event(&state, &did, true, None).await 454 453 { 455 454 warn!( 456 455 "[MIGRATION] activateAccount: Failed to sequence account activation event: {}", ··· 463 462 "[MIGRATION] activateAccount: Sequencing identity event for did={} handle={:?}", 464 463 did, handle 465 464 ); 466 - let handle_typed = handle.as_ref().map(|h| Handle::new_unchecked(h)); 465 + let handle_typed = handle.as_ref().map(Handle::new_unchecked); 467 466 if let Err(e) = crate::api::repo::record::sequence_identity_event( 468 467 &state, 469 468 &did,
src/api/server/app_password.rs crates/tranquil-pds/src/api/server/app_password.rs
src/api/server/email.rs crates/tranquil-pds/src/api/server/email.rs
src/api/server/invite.rs crates/tranquil-pds/src/api/server/invite.rs
src/api/server/logo.rs crates/tranquil-pds/src/api/server/logo.rs
src/api/server/meta.rs crates/tranquil-pds/src/api/server/meta.rs
src/api/server/migration.rs crates/tranquil-pds/src/api/server/migration.rs
src/api/server/mod.rs crates/tranquil-pds/src/api/server/mod.rs
+7 -3
src/api/server/passkey_account.rs crates/tranquil-pds/src/api/server/passkey_account.rs
··· 602 602 603 603 if !is_byod_did_web { 604 604 let handle_typed = Handle::new_unchecked(&handle); 605 - if let Err(e) = 606 - crate::api::repo::record::sequence_identity_event(&state, &did_typed, Some(&handle_typed)).await 605 + if let Err(e) = crate::api::repo::record::sequence_identity_event( 606 + &state, 607 + &did_typed, 608 + Some(&handle_typed), 609 + ) 610 + .await 607 611 { 608 612 warn!("Failed to sequence identity event for {}: {}", did, e); 609 613 } ··· 654 658 info!(did = %did, handle = %handle, "Passkey-only account created, awaiting setup completion"); 655 659 656 660 let access_jwt = if byod_auth.is_some() { 657 - match crate::auth::token::create_access_token_with_metadata(&did, &secret_key_bytes) { 661 + match crate::auth::create_access_token_with_metadata(&did, &secret_key_bytes) { 658 662 Ok(token_meta) => { 659 663 let refresh_jti = uuid::Uuid::new_v4().to_string(); 660 664 let refresh_expires = chrono::Utc::now() + chrono::Duration::hours(24);
src/api/server/passkeys.rs crates/tranquil-pds/src/api/server/passkeys.rs
src/api/server/password.rs crates/tranquil-pds/src/api/server/password.rs
src/api/server/reauth.rs crates/tranquil-pds/src/api/server/reauth.rs
src/api/server/service_auth.rs crates/tranquil-pds/src/api/server/service_auth.rs
+3 -1
src/api/server/session.rs crates/tranquil-pds/src/api/server/session.rs
··· 212 212 &key_bytes, 213 213 app_password_scopes.as_deref(), 214 214 app_password_controller.as_deref(), 215 + None, 215 216 ) { 216 217 Ok(m) => m, 217 218 Err(e) => { ··· 489 490 &key_bytes, 490 491 session_row.scope.as_deref(), 491 492 session_row.controller_did.as_deref(), 493 + None, 492 494 ) { 493 495 Ok(m) => m, 494 496 Err(e) => { ··· 1186 1188 } 1187 1189 } 1188 1190 1189 - use crate::comms::locale::VALID_LOCALES; 1191 + use crate::comms::VALID_LOCALES; 1190 1192 1191 1193 #[derive(Deserialize)] 1192 1194 #[serde(rename_all = "camelCase")]
src/api/server/signing_key.rs crates/tranquil-pds/src/api/server/signing_key.rs
+1 -1
src/api/server/totp.rs crates/tranquil-pds/src/api/server/totp.rs
··· 1 1 use crate::api::EmptyResponse; 2 2 use crate::api::error::ApiError; 3 3 use crate::auth::BearerAuth; 4 - use crate::auth::totp::{ 4 + use crate::auth::{ 5 5 decrypt_totp_secret, encrypt_totp_secret, generate_backup_codes, generate_qr_png_base64, 6 6 generate_totp_secret, generate_totp_uri, hash_backup_code, is_backup_code_format, 7 7 verify_backup_code, verify_totp_code,
src/api/server/trusted_devices.rs crates/tranquil-pds/src/api/server/trusted_devices.rs
src/api/server/verify_email.rs crates/tranquil-pds/src/api/server/verify_email.rs
src/api/server/verify_token.rs crates/tranquil-pds/src/api/server/verify_token.rs
src/api/temp.rs crates/tranquil-pds/src/api/temp.rs
src/api/validation.rs crates/tranquil-pds/src/api/validation.rs
src/api/verification.rs crates/tranquil-pds/src/api/verification.rs
src/appview/mod.rs crates/tranquil-pds/src/appview/mod.rs
src/auth/extractor.rs crates/tranquil-pds/src/auth/extractor.rs
+23 -51
src/auth/mod.rs crates/tranquil-pds/src/auth/mod.rs
··· 11 11 pub mod extractor; 12 12 pub mod scope_check; 13 13 pub mod service; 14 - pub mod token; 15 - pub mod totp; 16 14 pub mod verification_token; 17 - pub mod verify; 18 15 pub mod webauthn; 19 16 20 17 pub use extractor::{ ··· 22 19 extract_auth_token_from_header, extract_bearer_token_from_header, 23 20 }; 24 21 pub use service::{ServiceTokenClaims, ServiceTokenVerifier, is_service_token}; 25 - pub use token::{ 26 - SCOPE_ACCESS, SCOPE_APP_PASS, SCOPE_APP_PASS_PRIVILEGED, SCOPE_REFRESH, TOKEN_TYPE_ACCESS, 27 - TOKEN_TYPE_REFRESH, TOKEN_TYPE_SERVICE, TokenWithMetadata, create_access_token, 22 + 23 + pub use tranquil_auth::{ 24 + ActClaim, Claims, Header, SCOPE_ACCESS, SCOPE_APP_PASS, SCOPE_APP_PASS_PRIVILEGED, 25 + SCOPE_REFRESH, TOKEN_TYPE_ACCESS, TOKEN_TYPE_REFRESH, TOKEN_TYPE_SERVICE, TokenData, 26 + TokenVerifyError, TokenWithMetadata, UnsafeClaims, create_access_token, 27 + create_access_token_hs256, create_access_token_hs256_with_metadata, 28 28 create_access_token_with_delegation, create_access_token_with_metadata, 29 - create_access_token_with_scope_metadata, create_refresh_token, 30 - create_refresh_token_with_metadata, create_service_token, 31 - }; 32 - pub use verify::{ 33 - TokenVerifyError, get_did_from_token, get_jti_from_token, verify_access_token, 34 - verify_access_token_typed, verify_refresh_token, verify_token, 29 + create_access_token_with_scope_metadata, create_refresh_token, create_refresh_token_hs256, 30 + create_refresh_token_hs256_with_metadata, create_refresh_token_with_metadata, 31 + create_service_token, create_service_token_hs256, generate_backup_codes, 32 + generate_qr_png_base64, generate_totp_secret, generate_totp_uri, get_algorithm_from_token, 33 + get_did_from_token, get_jti_from_token, hash_backup_code, is_backup_code_format, 34 + verify_access_token, verify_access_token_hs256, verify_access_token_typed, verify_backup_code, 35 + verify_refresh_token, verify_refresh_token_hs256, verify_token, verify_totp_code, 35 36 }; 37 + 38 + pub fn encrypt_totp_secret(secret: &[u8]) -> Result<Vec<u8>, String> { 39 + crate::config::encrypt_key(secret) 40 + } 41 + 42 + pub fn decrypt_totp_secret(encrypted: &[u8], version: i32) -> Result<Vec<u8>, String> { 43 + crate::config::decrypt_key(encrypted, Some(version)) 44 + } 36 45 37 46 const KEY_CACHE_TTL_SECS: u64 = 300; 38 47 const SESSION_CACHE_TTL_SECS: u64 = 60; ··· 347 356 }); 348 357 } 349 358 } 350 - Err(verify::TokenVerifyError::Expired) => { 359 + Err(TokenVerifyError::Expired) => { 351 360 return Err(TokenValidationError::TokenExpired); 352 361 } 353 - Err(verify::TokenVerifyError::Invalid) => {} 362 + Err(TokenVerifyError::Invalid) => {} 354 363 } 355 364 } 356 365 } ··· 492 501 Err(_) => Err(TokenValidationError::AuthenticationFailed), 493 502 } 494 503 } 495 - 496 - #[derive(Debug, Clone, Serialize, Deserialize)] 497 - pub struct ActClaim { 498 - pub sub: String, 499 - } 500 - 501 - #[derive(Debug, Serialize, Deserialize)] 502 - pub struct Claims { 503 - pub iss: String, 504 - pub sub: String, 505 - pub aud: String, 506 - pub exp: usize, 507 - pub iat: usize, 508 - #[serde(skip_serializing_if = "Option::is_none")] 509 - pub scope: Option<String>, 510 - #[serde(skip_serializing_if = "Option::is_none")] 511 - pub lxm: Option<String>, 512 - pub jti: String, 513 - #[serde(skip_serializing_if = "Option::is_none")] 514 - pub act: Option<ActClaim>, 515 - } 516 - 517 - #[derive(Debug, Serialize, Deserialize)] 518 - pub struct Header { 519 - pub alg: String, 520 - pub typ: String, 521 - } 522 - 523 - #[derive(Debug, Serialize, Deserialize)] 524 - pub struct UnsafeClaims { 525 - pub iss: String, 526 - pub sub: Option<String>, 527 - } 528 - 529 - pub struct TokenData<T> { 530 - pub claims: T, 531 - }
+1 -1
src/auth/scope_check.rs crates/tranquil-pds/src/auth/scope_check.rs
··· 7 7 AccountAction, AccountAttr, IdentityAttr, RepoAction, ScopePermissions, 8 8 }; 9 9 10 - use super::token::SCOPE_ACCESS; 10 + use super::SCOPE_ACCESS; 11 11 12 12 fn has_custom_scope(scope: Option<&str>) -> bool { 13 13 match scope {
src/auth/service.rs crates/tranquil-pds/src/auth/service.rs
+16 -15
src/auth/token.rs crates/tranquil-auth/src/token.rs
··· 1 - use super::{ActClaim, Claims, Header}; 1 + use super::types::{ActClaim, Claims, Header, TokenWithMetadata}; 2 2 use anyhow::Result; 3 3 use base64::Engine as _; 4 4 use base64::engine::general_purpose::URL_SAFE_NO_PAD; 5 - use chrono::{DateTime, Duration, Utc}; 5 + use chrono::{Duration, Utc}; 6 6 use hmac::{Hmac, Mac}; 7 7 use k256::ecdsa::{Signature, SigningKey, signature::Signer}; 8 8 use sha2::Sha256; 9 - use uuid; 10 9 11 10 type HmacSha256 = Hmac<Sha256>; 12 11 ··· 18 17 pub const SCOPE_APP_PASS: &str = "com.atproto.appPass"; 19 18 pub const SCOPE_APP_PASS_PRIVILEGED: &str = "com.atproto.appPassPrivileged"; 20 19 21 - pub struct TokenWithMetadata { 22 - pub token: String, 23 - pub jti: String, 24 - pub expires_at: DateTime<Utc>, 25 - } 26 - 27 20 pub fn create_access_token(did: &str, key_bytes: &[u8]) -> Result<String> { 28 21 Ok(create_access_token_with_metadata(did, key_bytes)?.token) 29 22 } ··· 33 26 } 34 27 35 28 pub fn create_access_token_with_metadata(did: &str, key_bytes: &[u8]) -> Result<TokenWithMetadata> { 36 - create_access_token_with_scope_metadata(did, key_bytes, None) 29 + create_access_token_with_scope_metadata(did, key_bytes, None, None) 37 30 } 38 31 39 32 pub fn create_access_token_with_scope_metadata( 40 33 did: &str, 41 34 key_bytes: &[u8], 42 35 scopes: Option<&str>, 36 + hostname: Option<&str>, 43 37 ) -> Result<TokenWithMetadata> { 44 38 let scope = scopes.unwrap_or(SCOPE_ACCESS); 45 39 create_signed_token_with_metadata( ··· 48 42 TOKEN_TYPE_ACCESS, 49 43 key_bytes, 50 44 Duration::minutes(15), 45 + hostname, 51 46 ) 52 47 } 53 48 ··· 56 51 key_bytes: &[u8], 57 52 scopes: Option<&str>, 58 53 controller_did: Option<&str>, 54 + hostname: Option<&str>, 59 55 ) -> Result<TokenWithMetadata> { 60 56 let scope = scopes.unwrap_or(SCOPE_ACCESS); 61 57 let act = controller_did.map(|c| ActClaim { sub: c.to_string() }); ··· 66 62 key_bytes, 67 63 Duration::minutes(15), 68 64 act, 65 + hostname, 69 66 ) 70 67 } 71 68 ··· 79 76 TOKEN_TYPE_REFRESH, 80 77 key_bytes, 81 78 Duration::days(14), 79 + None, 82 80 ) 83 81 } 84 82 ··· 111 109 typ: &str, 112 110 key_bytes: &[u8], 113 111 duration: Duration, 112 + hostname: Option<&str>, 114 113 ) -> Result<TokenWithMetadata> { 115 - create_signed_token_with_act(did, scope, typ, key_bytes, duration, None) 114 + create_signed_token_with_act(did, scope, typ, key_bytes, duration, None, hostname) 116 115 } 117 116 118 117 fn create_signed_token_with_act( ··· 122 121 key_bytes: &[u8], 123 122 duration: Duration, 124 123 act: Option<ActClaim>, 124 + hostname: Option<&str>, 125 125 ) -> Result<TokenWithMetadata> { 126 126 let signing_key = SigningKey::from_slice(key_bytes)?; 127 127 ··· 132 132 let expiration = expires_at.timestamp(); 133 133 let jti = uuid::Uuid::new_v4().to_string(); 134 134 135 + let aud_hostname = hostname.map(|h| h.to_string()).unwrap_or_else(|| { 136 + std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string()) 137 + }); 138 + 135 139 let claims = Claims { 136 140 iss: did.to_owned(), 137 141 sub: did.to_owned(), 138 - aud: format!( 139 - "did:web:{}", 140 - std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string()) 141 - ), 142 + aud: format!("did:web:{}", aud_hostname), 142 143 exp: expiration as usize, 143 144 iat: Utc::now().timestamp() as usize, 144 145 scope: Some(scope.to_string()),
+23 -20
src/auth/totp.rs crates/tranquil-auth/src/totp.rs
··· 13 13 secret 14 14 } 15 15 16 - pub fn encrypt_totp_secret(secret: &[u8]) -> Result<Vec<u8>, String> { 17 - crate::config::encrypt_key(secret) 16 + pub fn encrypt_totp_secret( 17 + secret: &[u8], 18 + master_key: &[u8; 32], 19 + ) -> Result<Vec<u8>, tranquil_crypto::CryptoError> { 20 + tranquil_crypto::encrypt_with_key(master_key, secret) 18 21 } 19 22 20 - pub fn decrypt_totp_secret(encrypted: &[u8], version: i32) -> Result<Vec<u8>, String> { 21 - crate::config::decrypt_key(encrypted, Some(version)) 23 + pub fn decrypt_totp_secret( 24 + encrypted: &[u8], 25 + master_key: &[u8; 32], 26 + ) -> Result<Vec<u8>, tranquil_crypto::CryptoError> { 27 + tranquil_crypto::decrypt_with_key(master_key, encrypted) 22 28 } 23 29 24 30 fn create_totp( ··· 53 59 .map(|d| d.as_secs()) 54 60 .unwrap_or(0); 55 61 56 - for offset in [-1i64, 0, 1] { 62 + [-1i64, 0, 1].iter().any(|&offset| { 57 63 let time = (now as i64 + offset * TOTP_STEP as i64) as u64; 58 64 let expected = totp.generate(time); 59 65 let is_valid: bool = code.as_bytes().ct_eq(expected.as_bytes()).into(); 60 - if is_valid { 61 - return true; 62 - } 63 - } 64 - 65 - false 66 + is_valid 67 + }) 66 68 } 67 69 68 70 pub fn generate_totp_uri(secret: &[u8], account_name: &str, issuer: &str) -> String { ··· 107 109 let mut codes = Vec::with_capacity(BACKUP_CODE_COUNT); 108 110 let mut rng = rand::thread_rng(); 109 111 110 - for _ in 0..BACKUP_CODE_COUNT { 111 - let mut code = String::with_capacity(BACKUP_CODE_LENGTH); 112 - for _ in 0..BACKUP_CODE_LENGTH { 113 - let idx = (rng.next_u32() as usize) % BACKUP_CODE_ALPHABET.len(); 114 - code.push(BACKUP_CODE_ALPHABET[idx] as char); 115 - } 112 + (0..BACKUP_CODE_COUNT).for_each(|_| { 113 + let code: String = (0..BACKUP_CODE_LENGTH) 114 + .map(|_| { 115 + let idx = (rng.next_u32() as usize) % BACKUP_CODE_ALPHABET.len(); 116 + BACKUP_CODE_ALPHABET[idx] as char 117 + }) 118 + .collect(); 116 119 codes.push(code); 117 - } 120 + }); 118 121 119 122 codes 120 123 } ··· 167 170 fn test_backup_codes() { 168 171 let codes = generate_backup_codes(); 169 172 assert_eq!(codes.len(), BACKUP_CODE_COUNT); 170 - for code in &codes { 173 + codes.iter().for_each(|code| { 171 174 assert_eq!(code.len(), BACKUP_CODE_LENGTH); 172 175 assert!(is_backup_code_format(code)); 173 - } 176 + }); 174 177 } 175 178 176 179 #[test]
src/auth/verification_token.rs crates/tranquil-pds/src/auth/verification_token.rs
+17 -35
src/auth/verify.rs crates/tranquil-auth/src/verify.rs
··· 2 2 SCOPE_ACCESS, SCOPE_APP_PASS, SCOPE_APP_PASS_PRIVILEGED, SCOPE_REFRESH, TOKEN_TYPE_ACCESS, 3 3 TOKEN_TYPE_REFRESH, 4 4 }; 5 - use super::{Claims, Header, TokenData, UnsafeClaims}; 5 + use super::types::{Claims, Header, TokenData, TokenVerifyError, UnsafeClaims}; 6 6 use anyhow::{Context, Result, anyhow}; 7 7 use base64::Engine as _; 8 8 use base64::engine::general_purpose::URL_SAFE_NO_PAD; ··· 10 10 use hmac::{Hmac, Mac}; 11 11 use k256::ecdsa::{Signature, SigningKey, VerifyingKey, signature::Verifier}; 12 12 use sha2::Sha256; 13 - use std::fmt; 14 13 use subtle::ConstantTimeEq; 15 14 16 15 type HmacSha256 = Hmac<Sha256>; 17 - 18 - #[derive(Debug, Clone, Copy, PartialEq, Eq)] 19 - pub enum TokenVerifyError { 20 - Expired, 21 - Invalid, 22 - } 23 - 24 - impl fmt::Display for TokenVerifyError { 25 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 26 - match self { 27 - Self::Expired => write!(f, "Token expired"), 28 - Self::Invalid => write!(f, "Token invalid"), 29 - } 30 - } 31 - } 32 - 33 - impl std::error::Error for TokenVerifyError {} 34 16 35 17 pub fn get_did_from_token(token: &str) -> Result<String, String> { 36 18 let parts: Vec<&str> = token.split('.').collect(); ··· 66 48 .and_then(|j| j.as_str()) 67 49 .map(|s| s.to_string()) 68 50 .ok_or_else(|| "No jti claim in token".to_string()) 51 + } 52 + 53 + pub fn get_algorithm_from_token(token: &str) -> Result<String, String> { 54 + let parts: Vec<&str> = token.split('.').collect(); 55 + if parts.len() != 3 { 56 + return Err("Invalid token format".to_string()); 57 + } 58 + 59 + let header_bytes = URL_SAFE_NO_PAD 60 + .decode(parts[0]) 61 + .map_err(|e| format!("Base64 decode failed: {}", e))?; 62 + 63 + let header: Header = 64 + serde_json::from_slice(&header_bytes).map_err(|e| format!("JSON decode failed: {}", e))?; 65 + 66 + Ok(header.alg) 69 67 } 70 68 71 69 pub fn verify_token(token: &str, key_bytes: &[u8]) -> Result<TokenData<Claims>> { ··· 331 329 332 330 Ok(TokenData { claims }) 333 331 } 334 - 335 - pub fn get_algorithm_from_token(token: &str) -> Result<String, String> { 336 - let parts: Vec<&str> = token.split('.').collect(); 337 - if parts.len() != 3 { 338 - return Err("Invalid token format".to_string()); 339 - } 340 - 341 - let header_bytes = URL_SAFE_NO_PAD 342 - .decode(parts[0]) 343 - .map_err(|e| format!("Base64 decode failed: {}", e))?; 344 - 345 - let header: Header = 346 - serde_json::from_slice(&header_bytes).map_err(|e| format!("JSON decode failed: {}", e))?; 347 - 348 - Ok(header.alg) 349 - }
src/auth/webauthn.rs crates/tranquil-pds/src/auth/webauthn.rs
+18 -26
src/cache/mod.rs crates/tranquil-cache/src/lib.rs
··· 1 + pub use tranquil_infra::{Cache, CacheError, DistributedRateLimiter}; 2 + 1 3 use async_trait::async_trait; 2 4 use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64}; 3 5 use std::sync::Arc; 4 6 use std::time::Duration; 5 - 6 - #[derive(Debug, thiserror::Error)] 7 - pub enum CacheError { 8 - #[error("Cache connection error: {0}")] 9 - Connection(String), 10 - #[error("Serialization error: {0}")] 11 - Serialization(String), 12 - } 13 - 14 - #[async_trait] 15 - pub trait Cache: Send + Sync { 16 - async fn get(&self, key: &str) -> Option<String>; 17 - async fn set(&self, key: &str, value: &str, ttl: Duration) -> Result<(), CacheError>; 18 - async fn delete(&self, key: &str) -> Result<(), CacheError>; 19 - async fn get_bytes(&self, key: &str) -> Option<Vec<u8>> { 20 - self.get(key).await.and_then(|s| BASE64.decode(&s).ok()) 21 - } 22 - async fn set_bytes(&self, key: &str, value: &[u8], ttl: Duration) -> Result<(), CacheError> { 23 - let encoded = BASE64.encode(value); 24 - self.set(key, &encoded, ttl).await 25 - } 26 - } 27 7 28 8 #[derive(Clone)] 29 9 pub struct ValkeyCache { ··· 77 57 .await 78 58 .map_err(|e| CacheError::Connection(e.to_string())) 79 59 } 60 + 61 + async fn get_bytes(&self, key: &str) -> Option<Vec<u8>> { 62 + self.get(key).await.and_then(|s| BASE64.decode(&s).ok()) 63 + } 64 + 65 + async fn set_bytes(&self, key: &str, value: &[u8], ttl: Duration) -> Result<(), CacheError> { 66 + let encoded = BASE64.encode(value); 67 + self.set(key, &encoded, ttl).await 68 + } 80 69 } 81 70 82 71 pub struct NoOpCache; ··· 94 83 async fn delete(&self, _key: &str) -> Result<(), CacheError> { 95 84 Ok(()) 96 85 } 97 - } 98 86 99 - #[async_trait] 100 - pub trait DistributedRateLimiter: Send + Sync { 101 - async fn check_rate_limit(&self, key: &str, limit: u32, window_ms: u64) -> bool; 87 + async fn get_bytes(&self, _key: &str) -> Option<Vec<u8>> { 88 + None 89 + } 90 + 91 + async fn set_bytes(&self, _key: &str, _value: &[u8], _ttl: Duration) -> Result<(), CacheError> { 92 + Ok(()) 93 + } 102 94 } 103 95 104 96 #[derive(Clone)]
src/circuit_breaker.rs crates/tranquil-pds/src/circuit_breaker.rs
src/comms/locale.rs crates/tranquil-comms/src/locale.rs
-18
src/comms/mod.rs
··· 1 - pub mod locale; 2 - mod sender; 3 - mod service; 4 - mod types; 5 - 6 - pub use sender::{ 7 - CommsSender, DiscordSender, EmailSender, SendError, SignalSender, TelegramSender, 8 - is_valid_phone_number, sanitize_header_value, 9 - }; 10 - 11 - pub use service::{ 12 - CommsService, channel_display_name, enqueue_2fa_code, enqueue_account_deletion, enqueue_comms, 13 - enqueue_email_update, enqueue_email_update_token, enqueue_migration_verification, 14 - enqueue_passkey_recovery, enqueue_password_reset, enqueue_plc_operation, 15 - enqueue_signup_verification, enqueue_welcome, queue_legacy_login_notification, 16 - }; 17 - 18 - pub use types::{CommsChannel, CommsStatus, CommsType, NewComms, QueuedComms};
src/comms/sender.rs crates/tranquil-comms/src/sender.rs
+18 -18
src/comms/service.rs crates/tranquil-pds/src/comms/service.rs
··· 7 7 use tokio::sync::watch; 8 8 use tokio::time::interval; 9 9 use tracing::{debug, error, info, warn}; 10 + use tranquil_comms::{ 11 + CommsChannel, CommsSender, CommsStatus, CommsType, NewComms, QueuedComms, SendError, 12 + format_message, get_strings, 13 + }; 10 14 use uuid::Uuid; 11 - 12 - use super::locale::{format_message, get_strings}; 13 - use super::sender::{CommsSender, SendError}; 14 - use super::types::{CommsChannel, CommsStatus, NewComms, QueuedComms}; 15 15 16 16 pub struct CommsService { 17 17 db: PgPool, ··· 63 63 "#, 64 64 item.user_id, 65 65 item.channel as CommsChannel, 66 - item.comms_type as super::types::CommsType, 66 + item.comms_type as CommsType, 67 67 item.recipient, 68 68 item.subject, 69 69 item.body, ··· 140 140 RETURNING 141 141 id, user_id, 142 142 channel as "channel: CommsChannel", 143 - comms_type as "comms_type: super::types::CommsType", 143 + comms_type as "comms_type: CommsType", 144 144 status as "status: CommsStatus", 145 145 recipient, subject, body, metadata, 146 146 attempts, max_attempts, last_error, ··· 244 244 "#, 245 245 item.user_id, 246 246 item.channel as CommsChannel, 247 - item.comms_type as super::types::CommsType, 247 + item.comms_type as CommsType, 248 248 item.recipient, 249 249 item.subject, 250 250 item.body, ··· 304 304 NewComms::new( 305 305 user_id, 306 306 prefs.channel, 307 - super::types::CommsType::Welcome, 307 + CommsType::Welcome, 308 308 prefs.email.unwrap_or_default(), 309 309 Some(subject), 310 310 body, ··· 331 331 NewComms::new( 332 332 user_id, 333 333 prefs.channel, 334 - super::types::CommsType::PasswordReset, 334 + CommsType::PasswordReset, 335 335 prefs.email.unwrap_or_default(), 336 336 Some(subject), 337 337 body, ··· 371 371 db, 372 372 NewComms::email( 373 373 user_id, 374 - super::types::CommsType::EmailUpdate, 374 + CommsType::EmailUpdate, 375 375 new_email.to_string(), 376 376 subject, 377 377 body, ··· 409 409 db, 410 410 NewComms::email( 411 411 user_id, 412 - super::types::CommsType::EmailUpdate, 412 + CommsType::EmailUpdate, 413 413 current_email, 414 414 subject, 415 415 body, ··· 436 436 NewComms::new( 437 437 user_id, 438 438 prefs.channel, 439 - super::types::CommsType::AccountDeletion, 439 + CommsType::AccountDeletion, 440 440 prefs.email.unwrap_or_default(), 441 441 Some(subject), 442 442 body, ··· 463 463 NewComms::new( 464 464 user_id, 465 465 prefs.channel, 466 - super::types::CommsType::PlcOperation, 466 + CommsType::PlcOperation, 467 467 prefs.email.unwrap_or_default(), 468 468 Some(subject), 469 469 body, ··· 490 490 NewComms::new( 491 491 user_id, 492 492 prefs.channel, 493 - super::types::CommsType::TwoFactorCode, 493 + CommsType::TwoFactorCode, 494 494 prefs.email.unwrap_or_default(), 495 495 Some(subject), 496 496 body, ··· 517 517 NewComms::new( 518 518 user_id, 519 519 prefs.channel, 520 - super::types::CommsType::PasskeyRecovery, 520 + CommsType::PasskeyRecovery, 521 521 prefs.email.unwrap_or_default(), 522 522 Some(subject), 523 523 body, ··· 586 586 NewComms::new( 587 587 user_id, 588 588 comms_channel, 589 - super::types::CommsType::EmailVerification, 589 + CommsType::EmailVerification, 590 590 recipient.to_string(), 591 591 subject, 592 592 body, ··· 628 628 db, 629 629 NewComms::email( 630 630 user_id, 631 - super::types::CommsType::MigrationVerification, 631 + CommsType::MigrationVerification, 632 632 email.to_string(), 633 633 subject, 634 634 body, ··· 664 664 NewComms::new( 665 665 user_id, 666 666 channel, 667 - super::types::CommsType::LegacyLoginAlert, 667 + CommsType::LegacyLoginAlert, 668 668 prefs.email.unwrap_or_default(), 669 669 Some(subject), 670 670 body,
+7 -5
src/comms/types.rs crates/tranquil-comms/src/types.rs
··· 1 1 use chrono::{DateTime, Utc}; 2 2 use serde::{Deserialize, Serialize}; 3 - use sqlx::FromRow; 4 3 use uuid::Uuid; 5 4 6 - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, Serialize, Deserialize)] 5 + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 6 + #[serde(rename_all = "lowercase")] 7 7 #[sqlx(type_name = "comms_channel", rename_all = "lowercase")] 8 8 pub enum CommsChannel { 9 9 Email, ··· 12 12 Signal, 13 13 } 14 14 15 - #[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)] 15 + #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] 16 + #[serde(rename_all = "lowercase")] 16 17 #[sqlx(type_name = "comms_status", rename_all = "lowercase")] 17 18 pub enum CommsStatus { 18 19 Pending, ··· 21 22 Failed, 22 23 } 23 24 24 - #[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)] 25 + #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] 26 + #[serde(rename_all = "snake_case")] 25 27 #[sqlx(type_name = "comms_type", rename_all = "snake_case")] 26 28 pub enum CommsType { 27 29 Welcome, ··· 37 39 MigrationVerification, 38 40 } 39 41 40 - #[derive(Debug, Clone, FromRow)] 42 + #[derive(Debug, Clone)] 41 43 pub struct QueuedComms { 42 44 pub id: Uuid, 43 45 pub user_id: Uuid,
src/config.rs crates/tranquil-pds/src/config.rs
src/crawlers.rs crates/tranquil-pds/src/crawlers.rs
src/delegation/audit.rs crates/tranquil-pds/src/delegation/audit.rs
src/delegation/db.rs crates/tranquil-pds/src/delegation/db.rs
src/delegation/mod.rs crates/tranquil-pds/src/delegation/mod.rs
src/delegation/scopes.rs crates/tranquil-pds/src/delegation/scopes.rs
src/handle/mod.rs crates/tranquil-pds/src/handle/mod.rs
src/handle/reserved.rs crates/tranquil-pds/src/handle/reserved.rs
src/image/mod.rs crates/tranquil-pds/src/image/mod.rs
src/lib.rs crates/tranquil-pds/src/lib.rs
src/main.rs crates/tranquil-pds/src/main.rs
src/metrics.rs crates/tranquil-pds/src/metrics.rs
src/moderation/mod.rs crates/tranquil-pds/src/moderation/mod.rs
+25 -31
src/oauth/client.rs crates/tranquil-oauth/src/client.rs
··· 4 4 use std::sync::Arc; 5 5 use tokio::sync::RwLock; 6 6 7 - use super::OAuthError; 7 + use crate::OAuthError; 8 + use crate::types::ClientAuth; 8 9 9 10 #[derive(Debug, Clone, Serialize, Deserialize)] 10 11 pub struct ClientMetadata { ··· 96 97 url.scheme() == "http" 97 98 && url.host_str() == Some("localhost") 98 99 && url.port().is_none() 99 - // empty path 100 100 && url.path() == "/" 101 101 } else { 102 102 false ··· 108 108 .map_err(|_| OAuthError::InvalidClient("Invalid loopback client_id URL".into()))?; 109 109 let mut redirect_uris = Vec::<String>::new(); 110 110 let mut scope: Option<String> = None; 111 - for (key, value) in url.query_pairs() { 112 - if key == "redirect_uri" { 111 + url.query_pairs().for_each(|(key, value)| { 112 + if key == "redirect_uri" && redirect_uris.is_empty() { 113 113 redirect_uris.push(value.to_string()); 114 - break; 115 114 } 116 - if key == "scope" { 115 + if key == "scope" && scope.is_none() { 117 116 scope = Some(value.into()); 118 - break; 119 117 } 120 - } 118 + }); 121 119 if redirect_uris.is_empty() { 122 120 redirect_uris.push("http://127.0.0.1/".into()); 123 121 redirect_uris.push("http://[::1]/".into()); ··· 289 287 "redirect_uris is required".to_string(), 290 288 )); 291 289 } 292 - for uri in &metadata.redirect_uris { 293 - self.validate_redirect_uri_format(uri)?; 294 - } 290 + metadata 291 + .redirect_uris 292 + .iter() 293 + .try_for_each(|uri| self.validate_redirect_uri_format(uri))?; 295 294 if !metadata.grant_types.is_empty() 296 295 && !metadata 297 296 .grant_types ··· 357 356 if !scheme 358 357 .chars() 359 358 .next() 360 - .map(|c| c.is_ascii_lowercase()) 361 - .unwrap_or(false) 359 + .is_some_and(|c| c.is_ascii_lowercase()) 362 360 { 363 361 return Err(OAuthError::InvalidClient(format!( 364 362 "Invalid redirect_uri scheme: {}", ··· 388 386 pub async fn verify_client_auth( 389 387 cache: &ClientMetadataCache, 390 388 metadata: &ClientMetadata, 391 - client_auth: &super::ClientAuth, 389 + client_auth: &ClientAuth, 392 390 ) -> Result<(), OAuthError> { 393 391 let expected_method = metadata.auth_method(); 394 392 match (expected_method, client_auth) { 395 - ("none", super::ClientAuth::None) => Ok(()), 393 + ("none", ClientAuth::None) => Ok(()), 396 394 ("none", _) => Err(OAuthError::InvalidClient( 397 395 "Client is configured for no authentication, but credentials were provided".to_string(), 398 396 )), 399 - ("private_key_jwt", super::ClientAuth::PrivateKeyJwt { client_assertion }) => { 397 + ("private_key_jwt", ClientAuth::PrivateKeyJwt { client_assertion }) => { 400 398 verify_private_key_jwt_async(cache, metadata, client_assertion).await 401 399 } 402 400 ("private_key_jwt", _) => Err(OAuthError::InvalidClient( 403 401 "Client requires private_key_jwt authentication".to_string(), 404 402 )), 405 - ("client_secret_post", super::ClientAuth::SecretPost { .. }) => { 406 - Err(OAuthError::InvalidClient( 407 - "client_secret_post is not supported for ATProto OAuth".to_string(), 408 - )) 409 - } 410 - ("client_secret_basic", super::ClientAuth::SecretBasic { .. }) => { 411 - Err(OAuthError::InvalidClient( 412 - "client_secret_basic is not supported for ATProto OAuth".to_string(), 413 - )) 414 - } 403 + ("client_secret_post", ClientAuth::SecretPost { .. }) => Err(OAuthError::InvalidClient( 404 + "client_secret_post is not supported for ATProto OAuth".to_string(), 405 + )), 406 + ("client_secret_basic", ClientAuth::SecretBasic { .. }) => Err(OAuthError::InvalidClient( 407 + "client_secret_basic is not supported for ATProto OAuth".to_string(), 408 + )), 415 409 (method, _) => Err(OAuthError::InvalidClient(format!( 416 410 "Unsupported or mismatched authentication method: {}", 417 411 method ··· 519 513 .get("keys") 520 514 .and_then(|k| k.as_array()) 521 515 .ok_or_else(|| OAuthError::InvalidClient("Invalid JWKS: missing keys array".to_string()))?; 522 - let matching_keys: Vec<&serde_json::Value> = if let Some(kid) = kid { 523 - keys.iter() 516 + let matching_keys: Vec<&serde_json::Value> = match kid { 517 + Some(kid) => keys 518 + .iter() 524 519 .filter(|k| k.get("kid").and_then(|v| v.as_str()) == Some(kid)) 525 - .collect() 526 - } else { 527 - keys.iter().collect() 520 + .collect(), 521 + None => keys.iter().collect(), 528 522 }; 529 523 if matching_keys.is_empty() { 530 524 return Err(OAuthError::InvalidClient(
src/oauth/db/client.rs crates/tranquil-pds/src/oauth/db/client.rs
src/oauth/db/device.rs crates/tranquil-pds/src/oauth/db/device.rs
src/oauth/db/dpop.rs crates/tranquil-pds/src/oauth/db/dpop.rs
src/oauth/db/helpers.rs crates/tranquil-pds/src/oauth/db/helpers.rs
src/oauth/db/mod.rs crates/tranquil-pds/src/oauth/db/mod.rs
src/oauth/db/request.rs crates/tranquil-pds/src/oauth/db/request.rs
src/oauth/db/scope_preference.rs crates/tranquil-pds/src/oauth/db/scope_preference.rs
src/oauth/db/token.rs crates/tranquil-pds/src/oauth/db/token.rs
src/oauth/db/two_factor.rs crates/tranquil-pds/src/oauth/db/two_factor.rs
+2 -2
src/oauth/dpop.rs crates/tranquil-oauth/src/dpop.rs
··· 4 4 use serde::{Deserialize, Serialize}; 5 5 use sha2::{Digest, Sha256}; 6 6 7 - use super::OAuthError; 8 - use crate::types::{DPoPProofId, JwkThumbprint}; 7 + use crate::OAuthError; 8 + use tranquil_types::{DPoPProofId, JwkThumbprint}; 9 9 10 10 const DPOP_NONCE_VALIDITY_SECS: i64 = 300; 11 11 const DPOP_MAX_AGE_SECS: i64 = 300;
+1 -2
src/oauth/endpoints/authorize.rs crates/tranquil-pds/src/oauth/endpoints/authorize.rs
··· 1 1 use crate::comms::{CommsChannel, channel_display_name, enqueue_2fa_code}; 2 2 use crate::oauth::{ 3 - AuthFlowState, Code, DeviceData, DeviceId, OAuthError, SessionId, client::ClientMetadataCache, 4 - db, 3 + AuthFlowState, ClientMetadataCache, Code, DeviceData, DeviceId, OAuthError, SessionId, db, 5 4 }; 6 5 use crate::state::{AppState, RateLimitKind}; 7 6 use crate::types::{Handle, PlainPassword};
src/oauth/endpoints/delegation.rs crates/tranquil-pds/src/oauth/endpoints/delegation.rs
src/oauth/endpoints/metadata.rs crates/tranquil-pds/src/oauth/endpoints/metadata.rs
src/oauth/endpoints/mod.rs crates/tranquil-pds/src/oauth/endpoints/mod.rs
+3 -4
src/oauth/endpoints/par.rs crates/tranquil-pds/src/oauth/endpoints/par.rs
··· 1 1 use crate::oauth::{ 2 - AuthorizationRequestParameters, ClientAuth, OAuthError, RequestData, RequestId, 3 - client::ClientMetadataCache, 4 - db, 2 + AuthorizationRequestParameters, ClientAuth, ClientMetadataCache, OAuthError, RequestData, 3 + RequestId, db, 5 4 scopes::{ParsedScope, parse_scope}, 6 5 }; 7 6 use crate::state::{AppState, RateLimitKind}; ··· 173 172 174 173 fn validate_scope( 175 174 requested_scope: &Option<String>, 176 - client_metadata: &crate::oauth::client::ClientMetadata, 175 + client_metadata: &crate::oauth::ClientMetadata, 177 176 ) -> Result<Option<String>, OAuthError> { 178 177 let scope_str = match requested_scope { 179 178 Some(s) if !s.is_empty() => s,
+4 -5
src/oauth/endpoints/token/grants.rs crates/tranquil-pds/src/oauth/endpoints/token/grants.rs
··· 3 3 use crate::config::AuthConfig; 4 4 use crate::delegation; 5 5 use crate::oauth::{ 6 - AuthFlowState, ClientAuth, OAuthError, RefreshToken, TokenData, TokenId, 7 - client::{ClientMetadataCache, verify_client_auth}, 6 + AuthFlowState, ClientAuth, ClientMetadataCache, DPoPVerifier, OAuthError, RefreshToken, 7 + TokenData, TokenId, 8 8 db::{self, RefreshTokenLookup}, 9 - dpop::DPoPVerifier, 10 9 scopes::expand_include_scopes, 10 + verify_client_auth, 11 11 }; 12 12 use crate::state::AppState; 13 13 use axum::Json; ··· 110 110 Some(result.jkt.as_str().to_string()) 111 111 } else if auth_request.parameters.dpop_jkt.is_some() || client_metadata.requires_dpop() { 112 112 return Err(OAuthError::UseDpopNonce( 113 - crate::oauth::dpop::DPoPVerifier::new(AuthConfig::get().dpop_secret().as_bytes()) 114 - .generate_nonce(), 113 + DPoPVerifier::new(AuthConfig::get().dpop_secret().as_bytes()).generate_nonce(), 115 114 )); 116 115 } else { 117 116 None
src/oauth/endpoints/token/helpers.rs crates/tranquil-pds/src/oauth/endpoints/token/helpers.rs
src/oauth/endpoints/token/introspect.rs crates/tranquil-pds/src/oauth/endpoints/token/introspect.rs
src/oauth/endpoints/token/mod.rs crates/tranquil-pds/src/oauth/endpoints/token/mod.rs
src/oauth/endpoints/token/types.rs crates/tranquil-pds/src/oauth/endpoints/token/types.rs
+6 -6
src/oauth/error.rs crates/tranquil-oauth/src/error.rs
··· 82 82 } 83 83 } 84 84 85 - impl From<sqlx::Error> for OAuthError { 86 - fn from(err: sqlx::Error) -> Self { 87 - tracing::error!("Database error in OAuth flow: {}", err); 85 + impl From<anyhow::Error> for OAuthError { 86 + fn from(err: anyhow::Error) -> Self { 87 + tracing::error!("Internal error in OAuth flow: {}", err); 88 88 OAuthError::ServerError("An internal error occurred".to_string()) 89 89 } 90 90 } 91 91 92 - impl From<anyhow::Error> for OAuthError { 93 - fn from(err: anyhow::Error) -> Self { 94 - tracing::error!("Internal error in OAuth flow: {}", err); 92 + impl From<sqlx::Error> for OAuthError { 93 + fn from(err: sqlx::Error) -> Self { 94 + tracing::error!("Database error in OAuth flow: {}", err); 95 95 OAuthError::ServerError("An internal error occurred".to_string()) 96 96 } 97 97 }
src/oauth/jwks.rs crates/tranquil-crypto/src/jwk.rs
-16
src/oauth/mod.rs
··· 1 - pub mod client; 2 - pub mod db; 3 - pub mod dpop; 4 - pub mod endpoints; 5 - pub mod error; 6 - pub mod jwks; 7 - pub mod scopes; 8 - pub mod types; 9 - pub mod verify; 10 - 11 - pub use error::OAuthError; 12 - pub use scopes::{AccountAction, AccountAttr, RepoAction, ScopeError, ScopePermissions}; 13 - pub use types::*; 14 - pub use verify::{ 15 - OAuthAuthError, OAuthUser, VerifyResult, generate_dpop_nonce, verify_oauth_access_token, 16 - };
src/oauth/scopes/definitions.rs crates/tranquil-scopes/src/definitions.rs
src/oauth/scopes/error.rs crates/tranquil-scopes/src/error.rs
+4 -1
src/oauth/scopes/mod.rs crates/tranquil-scopes/src/lib.rs
··· 4 4 mod permission_set; 5 5 mod permissions; 6 6 7 - pub use definitions::{SCOPE_DEFINITIONS, ScopeCategory, ScopeDefinition}; 7 + pub use definitions::{ 8 + SCOPE_DEFINITIONS, ScopeCategory, ScopeDefinition, format_scope_for_display, 9 + get_required_scopes, get_scope_definition, is_valid_scope, 10 + }; 8 11 pub use error::ScopeError; 9 12 pub use parser::{ 10 13 AccountAction, AccountAttr, AccountScope, BlobScope, IdentityAttr, IdentityScope, IncludeScope,
src/oauth/scopes/parser.rs crates/tranquil-scopes/src/parser.rs
src/oauth/scopes/permission_set.rs crates/tranquil-scopes/src/permission_set.rs
src/oauth/scopes/permissions.rs crates/tranquil-scopes/src/permissions.rs
src/oauth/types.rs crates/tranquil-oauth/src/types.rs
+1 -2
src/oauth/verify.rs crates/tranquil-pds/src/oauth/verify.rs
··· 11 11 use sqlx::PgPool; 12 12 use subtle::ConstantTimeEq; 13 13 14 - use super::OAuthError; 15 14 use super::db; 16 - use super::dpop::DPoPVerifier; 17 15 use super::scopes::ScopePermissions; 16 + use super::{DPoPVerifier, OAuthError}; 18 17 use crate::config::AuthConfig; 19 18 use crate::state::AppState; 20 19
src/plc/mod.rs crates/tranquil-pds/src/plc/mod.rs
src/rate_limit.rs crates/tranquil-pds/src/rate_limit.rs
-125
src/repo/mod.rs
··· 1 - use bytes::Bytes; 2 - use cid::Cid; 3 - use jacquard_repo::error::RepoError; 4 - use jacquard_repo::repo::CommitData; 5 - use jacquard_repo::storage::BlockStore; 6 - use multihash::Multihash; 7 - use sha2::{Digest, Sha256}; 8 - use sqlx::PgPool; 9 - 10 - pub mod tracking; 11 - 12 - #[derive(Clone)] 13 - pub struct PostgresBlockStore { 14 - pool: PgPool, 15 - } 16 - 17 - impl PostgresBlockStore { 18 - pub fn new(pool: PgPool) -> Self { 19 - Self { pool } 20 - } 21 - } 22 - 23 - impl BlockStore for PostgresBlockStore { 24 - async fn get(&self, cid: &Cid) -> Result<Option<Bytes>, RepoError> { 25 - crate::metrics::record_block_operation("get"); 26 - let cid_bytes = cid.to_bytes(); 27 - let row = sqlx::query!("SELECT data FROM blocks WHERE cid = $1", &cid_bytes) 28 - .fetch_optional(&self.pool) 29 - .await 30 - .map_err(RepoError::storage)?; 31 - match row { 32 - Some(row) => Ok(Some(Bytes::from(row.data))), 33 - None => Ok(None), 34 - } 35 - } 36 - 37 - async fn put(&self, data: &[u8]) -> Result<Cid, RepoError> { 38 - crate::metrics::record_block_operation("put"); 39 - let mut hasher = Sha256::new(); 40 - hasher.update(data); 41 - let hash = hasher.finalize(); 42 - let multihash = Multihash::wrap(0x12, &hash).map_err(|e| { 43 - RepoError::storage(std::io::Error::new( 44 - std::io::ErrorKind::InvalidData, 45 - format!("Failed to wrap multihash: {:?}", e), 46 - )) 47 - })?; 48 - let cid = Cid::new_v1(0x71, multihash); 49 - let cid_bytes = cid.to_bytes(); 50 - sqlx::query!( 51 - "INSERT INTO blocks (cid, data) VALUES ($1, $2) ON CONFLICT (cid) DO NOTHING", 52 - &cid_bytes, 53 - data 54 - ) 55 - .execute(&self.pool) 56 - .await 57 - .map_err(RepoError::storage)?; 58 - Ok(cid) 59 - } 60 - 61 - async fn has(&self, cid: &Cid) -> Result<bool, RepoError> { 62 - crate::metrics::record_block_operation("has"); 63 - let cid_bytes = cid.to_bytes(); 64 - let row = sqlx::query!("SELECT 1 as one FROM blocks WHERE cid = $1", &cid_bytes) 65 - .fetch_optional(&self.pool) 66 - .await 67 - .map_err(RepoError::storage)?; 68 - Ok(row.is_some()) 69 - } 70 - 71 - async fn put_many( 72 - &self, 73 - blocks: impl IntoIterator<Item = (Cid, Bytes)> + Send, 74 - ) -> Result<(), RepoError> { 75 - let blocks: Vec<_> = blocks.into_iter().collect(); 76 - if blocks.is_empty() { 77 - return Ok(()); 78 - } 79 - crate::metrics::record_block_operation("put_many"); 80 - let cids: Vec<Vec<u8>> = blocks.iter().map(|(cid, _)| cid.to_bytes()).collect(); 81 - let data: Vec<&[u8]> = blocks.iter().map(|(_, d)| d.as_ref()).collect(); 82 - sqlx::query!( 83 - r#" 84 - INSERT INTO blocks (cid, data) 85 - SELECT * FROM UNNEST($1::bytea[], $2::bytea[]) 86 - ON CONFLICT (cid) DO NOTHING 87 - "#, 88 - &cids, 89 - &data as &[&[u8]] 90 - ) 91 - .execute(&self.pool) 92 - .await 93 - .map_err(RepoError::storage)?; 94 - Ok(()) 95 - } 96 - 97 - async fn get_many(&self, cids: &[Cid]) -> Result<Vec<Option<Bytes>>, RepoError> { 98 - if cids.is_empty() { 99 - return Ok(Vec::new()); 100 - } 101 - crate::metrics::record_block_operation("get_many"); 102 - let cid_bytes: Vec<Vec<u8>> = cids.iter().map(|c| c.to_bytes()).collect(); 103 - let rows = sqlx::query!( 104 - "SELECT cid, data FROM blocks WHERE cid = ANY($1)", 105 - &cid_bytes 106 - ) 107 - .fetch_all(&self.pool) 108 - .await 109 - .map_err(RepoError::storage)?; 110 - let found: std::collections::HashMap<Vec<u8>, Bytes> = rows 111 - .into_iter() 112 - .map(|row| (row.cid, Bytes::from(row.data))) 113 - .collect(); 114 - let results = cid_bytes 115 - .iter() 116 - .map(|cid| found.get(cid).cloned()) 117 - .collect(); 118 - Ok(results) 119 - } 120 - 121 - async fn apply_commit(&self, commit: CommitData) -> Result<(), RepoError> { 122 - self.put_many(commit.blocks).await?; 123 - Ok(()) 124 - } 125 - }
-113
src/repo/tracking.rs
··· 1 - use crate::repo::PostgresBlockStore; 2 - use bytes::Bytes; 3 - use cid::Cid; 4 - use jacquard_repo::error::RepoError; 5 - use jacquard_repo::repo::CommitData; 6 - use jacquard_repo::storage::BlockStore; 7 - use std::collections::HashSet; 8 - use std::sync::{Arc, Mutex}; 9 - 10 - #[derive(Clone)] 11 - pub struct TrackingBlockStore { 12 - inner: PostgresBlockStore, 13 - written_cids: Arc<Mutex<Vec<Cid>>>, 14 - read_cids: Arc<Mutex<HashSet<Cid>>>, 15 - } 16 - 17 - impl TrackingBlockStore { 18 - pub fn new(store: PostgresBlockStore) -> Self { 19 - Self { 20 - inner: store, 21 - written_cids: Arc::new(Mutex::new(Vec::new())), 22 - read_cids: Arc::new(Mutex::new(HashSet::new())), 23 - } 24 - } 25 - 26 - pub fn get_written_cids(&self) -> Vec<Cid> { 27 - match self.written_cids.lock() { 28 - Ok(guard) => guard.clone(), 29 - Err(poisoned) => poisoned.into_inner().clone(), 30 - } 31 - } 32 - 33 - pub fn get_read_cids(&self) -> Vec<Cid> { 34 - match self.read_cids.lock() { 35 - Ok(guard) => guard.iter().cloned().collect(), 36 - Err(poisoned) => poisoned.into_inner().iter().cloned().collect(), 37 - } 38 - } 39 - 40 - pub fn get_all_relevant_cids(&self) -> Vec<Cid> { 41 - let written = self.get_written_cids(); 42 - let read = self.get_read_cids(); 43 - let mut all: HashSet<Cid> = written.into_iter().collect(); 44 - all.extend(read); 45 - all.into_iter().collect() 46 - } 47 - } 48 - 49 - impl BlockStore for TrackingBlockStore { 50 - async fn get(&self, cid: &Cid) -> Result<Option<Bytes>, RepoError> { 51 - let result = self.inner.get(cid).await?; 52 - if result.is_some() { 53 - match self.read_cids.lock() { 54 - Ok(mut guard) => { 55 - guard.insert(*cid); 56 - } 57 - Err(poisoned) => { 58 - poisoned.into_inner().insert(*cid); 59 - } 60 - } 61 - } 62 - Ok(result) 63 - } 64 - 65 - async fn put(&self, data: &[u8]) -> Result<Cid, RepoError> { 66 - let cid = self.inner.put(data).await?; 67 - match self.written_cids.lock() { 68 - Ok(mut guard) => guard.push(cid), 69 - Err(poisoned) => poisoned.into_inner().push(cid), 70 - } 71 - Ok(cid) 72 - } 73 - 74 - async fn has(&self, cid: &Cid) -> Result<bool, RepoError> { 75 - self.inner.has(cid).await 76 - } 77 - 78 - async fn put_many( 79 - &self, 80 - blocks: impl IntoIterator<Item = (Cid, Bytes)> + Send, 81 - ) -> Result<(), RepoError> { 82 - let blocks: Vec<_> = blocks.into_iter().collect(); 83 - let cids: Vec<Cid> = blocks.iter().map(|(cid, _)| *cid).collect(); 84 - self.inner.put_many(blocks).await?; 85 - match self.written_cids.lock() { 86 - Ok(mut guard) => guard.extend(cids), 87 - Err(poisoned) => poisoned.into_inner().extend(cids), 88 - } 89 - Ok(()) 90 - } 91 - 92 - async fn get_many(&self, cids: &[Cid]) -> Result<Vec<Option<Bytes>>, RepoError> { 93 - let results = self.inner.get_many(cids).await?; 94 - for (cid, result) in cids.iter().zip(results.iter()) { 95 - if result.is_some() { 96 - match self.read_cids.lock() { 97 - Ok(mut guard) => { 98 - guard.insert(*cid); 99 - } 100 - Err(poisoned) => { 101 - poisoned.into_inner().insert(*cid); 102 - } 103 - } 104 - } 105 - } 106 - Ok(results) 107 - } 108 - 109 - async fn apply_commit(&self, commit: CommitData) -> Result<(), RepoError> { 110 - self.put_many(commit.blocks).await?; 111 - Ok(()) 112 - } 113 - }
+2 -2
src/scheduled.rs crates/tranquil-pds/src/scheduled.rs
··· 816 816 .map_err(|e| format!("Failed to fetch repo: {}", e))? 817 817 .ok_or_else(|| "Repository not found".to_string())?; 818 818 819 - let actual_head_cid = Cid::from_str(&repo_root_cid_str) 820 - .map_err(|e| format!("Invalid repo_root_cid: {}", e))?; 819 + let actual_head_cid = 820 + Cid::from_str(&repo_root_cid_str).map_err(|e| format!("Invalid repo_root_cid: {}", e))?; 821 821 822 822 generate_repo_car(block_store, &actual_head_cid).await 823 823 }
src/state.rs crates/tranquil-pds/src/state.rs
+19 -90
src/storage/mod.rs crates/tranquil-storage/src/lib.rs
··· 1 + pub use tranquil_infra::{BlobStorage, StorageError, StreamUploadResult}; 2 + 1 3 use async_trait::async_trait; 2 4 use aws_config::BehaviorVersion; 3 5 use aws_config::meta::region::RegionProviderChain; ··· 9 11 use futures::Stream; 10 12 use sha2::{Digest, Sha256}; 11 13 use std::pin::Pin; 12 - use thiserror::Error; 13 14 14 15 const MIN_PART_SIZE: usize = 5 * 1024 * 1024; 15 16 16 - #[derive(Error, Debug)] 17 - pub enum StorageError { 18 - #[error("IO error: {0}")] 19 - Io(#[from] std::io::Error), 20 - #[error("S3 error: {0}")] 21 - S3(String), 22 - #[error("Other: {0}")] 23 - Other(String), 24 - } 25 - 26 - pub struct StreamUploadResult { 27 - pub sha256_hash: [u8; 32], 28 - pub size: u64, 29 - } 30 - 31 - #[async_trait] 32 - pub trait BlobStorage: Send + Sync { 33 - async fn put(&self, key: &str, data: &[u8]) -> Result<(), StorageError>; 34 - async fn put_bytes(&self, key: &str, data: Bytes) -> Result<(), StorageError>; 35 - async fn get(&self, key: &str) -> Result<Vec<u8>, StorageError>; 36 - async fn get_bytes(&self, key: &str) -> Result<Bytes, StorageError>; 37 - async fn get_head(&self, key: &str, size: usize) -> Result<Bytes, StorageError>; 38 - async fn delete(&self, key: &str) -> Result<(), StorageError>; 39 - async fn put_stream( 40 - &self, 41 - key: &str, 42 - stream: Pin<Box<dyn Stream<Item = Result<Bytes, std::io::Error>> + Send>>, 43 - ) -> Result<StreamUploadResult, StorageError>; 44 - async fn copy(&self, src_key: &str, dst_key: &str) -> Result<(), StorageError>; 45 - } 46 - 47 17 pub struct S3BlobStorage { 48 18 client: Client, 49 19 bucket: String, ··· 52 22 impl S3BlobStorage { 53 23 pub async fn new() -> Self { 54 24 let bucket = std::env::var("S3_BUCKET").expect("S3_BUCKET must be set"); 25 + let client = create_s3_client().await; 26 + Self { client, bucket } 27 + } 28 + 29 + pub async fn with_bucket(bucket: String) -> Self { 55 30 let client = create_s3_client().await; 56 31 Self { client, bucket } 57 32 } ··· 124 99 .body(ByteStream::from(Bytes::copy_from_slice(data))) 125 100 .send() 126 101 .await 127 - .map_err(|e| { 128 - crate::metrics::record_s3_operation("backup_put", "error"); 129 - StorageError::S3(e.to_string()) 130 - })?; 102 + .map_err(|e| StorageError::S3(e.to_string()))?; 131 103 132 - crate::metrics::record_s3_operation("backup_put", "success"); 133 104 Ok(key) 134 105 } 135 106 ··· 141 112 .key(storage_key) 142 113 .send() 143 114 .await 144 - .map_err(|e| { 145 - crate::metrics::record_s3_operation("backup_get", "error"); 146 - StorageError::S3(e.to_string()) 147 - })?; 115 + .map_err(|e| StorageError::S3(e.to_string()))?; 148 116 149 117 let data = resp 150 118 .body 151 119 .collect() 152 120 .await 153 - .map_err(|e| { 154 - crate::metrics::record_s3_operation("backup_get", "error"); 155 - StorageError::S3(e.to_string()) 156 - })? 121 + .map_err(|e| StorageError::S3(e.to_string()))? 157 122 .into_bytes(); 158 123 159 - crate::metrics::record_s3_operation("backup_get", "success"); 160 124 Ok(data) 161 125 } 162 126 ··· 167 131 .key(storage_key) 168 132 .send() 169 133 .await 170 - .map_err(|e| { 171 - crate::metrics::record_s3_operation("backup_delete", "error"); 172 - StorageError::S3(e.to_string()) 173 - })?; 134 + .map_err(|e| StorageError::S3(e.to_string()))?; 174 135 175 - crate::metrics::record_s3_operation("backup_delete", "success"); 176 136 Ok(()) 177 137 } 178 138 } ··· 184 144 } 185 145 186 146 async fn put_bytes(&self, key: &str, data: Bytes) -> Result<(), StorageError> { 187 - let result = self 188 - .client 147 + self.client 189 148 .put_object() 190 149 .bucket(&self.bucket) 191 150 .key(key) 192 151 .body(ByteStream::from(data)) 193 152 .send() 194 153 .await 195 - .map_err(|e| StorageError::S3(e.to_string())); 154 + .map_err(|e| StorageError::S3(e.to_string()))?; 196 155 197 - match &result { 198 - Ok(_) => crate::metrics::record_s3_operation("put", "success"), 199 - Err(_) => crate::metrics::record_s3_operation("put", "error"), 200 - } 201 - 202 - result?; 203 156 Ok(()) 204 157 } 205 158 ··· 215 168 .key(key) 216 169 .send() 217 170 .await 218 - .map_err(|e| { 219 - crate::metrics::record_s3_operation("get", "error"); 220 - StorageError::S3(e.to_string()) 221 - })?; 171 + .map_err(|e| StorageError::S3(e.to_string()))?; 222 172 223 173 let data = resp 224 174 .body 225 175 .collect() 226 176 .await 227 - .map_err(|e| { 228 - crate::metrics::record_s3_operation("get", "error"); 229 - StorageError::S3(e.to_string()) 230 - })? 177 + .map_err(|e| StorageError::S3(e.to_string()))? 231 178 .into_bytes(); 232 179 233 - crate::metrics::record_s3_operation("get", "success"); 234 180 Ok(data) 235 181 } 236 182 ··· 244 190 .range(range) 245 191 .send() 246 192 .await 247 - .map_err(|e| { 248 - crate::metrics::record_s3_operation("get_head", "error"); 249 - StorageError::S3(e.to_string()) 250 - })?; 193 + .map_err(|e| StorageError::S3(e.to_string()))?; 251 194 252 195 let data = resp 253 196 .body 254 197 .collect() 255 198 .await 256 - .map_err(|e| { 257 - crate::metrics::record_s3_operation("get_head", "error"); 258 - StorageError::S3(e.to_string()) 259 - })? 199 + .map_err(|e| StorageError::S3(e.to_string()))? 260 200 .into_bytes(); 261 201 262 - crate::metrics::record_s3_operation("get_head", "success"); 263 202 Ok(data) 264 203 } 265 204 266 205 async fn delete(&self, key: &str) -> Result<(), StorageError> { 267 - let result = self 268 - .client 206 + self.client 269 207 .delete_object() 270 208 .bucket(&self.bucket) 271 209 .key(key) 272 210 .send() 273 211 .await 274 - .map_err(|e| StorageError::S3(e.to_string())); 275 - 276 - match &result { 277 - Ok(_) => crate::metrics::record_s3_operation("delete", "success"), 278 - Err(_) => crate::metrics::record_s3_operation("delete", "error"), 279 - } 212 + .map_err(|e| StorageError::S3(e.to_string()))?; 280 213 281 - result?; 282 214 Ok(()) 283 215 } 284 216 ··· 423 355 .await 424 356 .map_err(|e| StorageError::S3(format!("Failed to complete multipart upload: {}", e)))?; 425 357 426 - crate::metrics::record_s3_operation("put_stream", "success"); 427 - 428 358 let hash: [u8; 32] = hasher.finalize().into(); 429 359 Ok(StreamUploadResult { 430 360 sha256_hash: hash, ··· 444 374 .await 445 375 .map_err(|e| StorageError::S3(format!("Failed to copy object: {}", e)))?; 446 376 447 - crate::metrics::record_s3_operation("copy", "success"); 448 377 Ok(()) 449 378 } 450 379 }
src/sync/blob.rs crates/tranquil-pds/src/sync/blob.rs
src/sync/car.rs crates/tranquil-pds/src/sync/car.rs
src/sync/commit.rs crates/tranquil-pds/src/sync/commit.rs
src/sync/crawl.rs crates/tranquil-pds/src/sync/crawl.rs
src/sync/deprecated.rs crates/tranquil-pds/src/sync/deprecated.rs
src/sync/firehose.rs crates/tranquil-pds/src/sync/firehose.rs
src/sync/frame.rs crates/tranquil-pds/src/sync/frame.rs
src/sync/import.rs crates/tranquil-pds/src/sync/import.rs
src/sync/listener.rs crates/tranquil-pds/src/sync/listener.rs
src/sync/mod.rs crates/tranquil-pds/src/sync/mod.rs
src/sync/repo.rs crates/tranquil-pds/src/sync/repo.rs
src/sync/subscribe_repos.rs crates/tranquil-pds/src/sync/subscribe_repos.rs
src/sync/util.rs crates/tranquil-pds/src/sync/util.rs
src/sync/verify.rs crates/tranquil-pds/src/sync/verify.rs
src/sync/verify_tests.rs crates/tranquil-pds/src/sync/verify_tests.rs
+20 -110
src/types.rs crates/tranquil-types/src/lib.rs
··· 123 123 } 124 124 } 125 125 126 - #[derive(Debug, Clone)] 126 + #[derive(Debug, Clone, thiserror::Error)] 127 127 pub enum DidError { 128 + #[error("invalid DID: {0}")] 128 129 Invalid(String), 129 130 } 130 - 131 - impl fmt::Display for DidError { 132 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 133 - match self { 134 - Self::Invalid(s) => write!(f, "invalid DID: {}", s), 135 - } 136 - } 137 - } 138 - 139 - impl std::error::Error for DidError {} 140 131 141 132 #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 142 133 #[serde(transparent)] ··· 239 230 } 240 231 } 241 232 242 - #[derive(Debug, Clone)] 233 + #[derive(Debug, Clone, thiserror::Error)] 243 234 pub enum HandleError { 235 + #[error("invalid handle: {0}")] 244 236 Invalid(String), 245 237 } 246 238 247 - impl fmt::Display for HandleError { 248 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 249 - match self { 250 - Self::Invalid(s) => write!(f, "invalid handle: {}", s), 251 - } 252 - } 253 - } 254 - 255 - impl std::error::Error for HandleError {} 256 - 257 239 #[derive(Debug, Clone, PartialEq, Eq, Hash)] 258 240 pub enum AtIdentifier { 259 241 Did(Did), ··· 370 352 } 371 353 } 372 354 373 - #[derive(Debug, Clone)] 355 + #[derive(Debug, Clone, thiserror::Error)] 374 356 pub enum AtIdentifierError { 357 + #[error("invalid AT identifier: {0}")] 375 358 Invalid(String), 376 359 } 377 360 378 - impl fmt::Display for AtIdentifierError { 379 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 380 - match self { 381 - Self::Invalid(s) => write!(f, "invalid AT identifier: {}", s), 382 - } 383 - } 384 - } 385 - 386 - impl std::error::Error for AtIdentifierError {} 387 - 388 361 #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 389 362 #[serde(transparent)] 390 363 #[sqlx(type_name = "rkey")] ··· 495 468 } 496 469 } 497 470 498 - #[derive(Debug, Clone)] 471 + #[derive(Debug, Clone, thiserror::Error)] 499 472 pub enum RkeyError { 473 + #[error("invalid rkey: {0}")] 500 474 Invalid(String), 501 475 } 502 476 503 - impl fmt::Display for RkeyError { 504 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 505 - match self { 506 - Self::Invalid(s) => write!(f, "invalid rkey: {}", s), 507 - } 508 - } 509 - } 510 - 511 - impl std::error::Error for RkeyError {} 512 - 513 477 #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 514 478 #[serde(transparent)] 515 479 #[sqlx(type_name = "nsid")] ··· 624 588 } 625 589 } 626 590 627 - #[derive(Debug, Clone)] 591 + #[derive(Debug, Clone, thiserror::Error)] 628 592 pub enum NsidError { 593 + #[error("invalid NSID: {0}")] 629 594 Invalid(String), 630 595 } 631 - 632 - impl fmt::Display for NsidError { 633 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 634 - match self { 635 - Self::Invalid(s) => write!(f, "invalid NSID: {}", s), 636 - } 637 - } 638 - } 639 - 640 - impl std::error::Error for NsidError {} 641 596 642 597 #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 643 598 #[serde(transparent)] ··· 744 699 } 745 700 } 746 701 747 - #[derive(Debug, Clone)] 702 + #[derive(Debug, Clone, thiserror::Error)] 748 703 pub enum AtUriError { 704 + #[error("invalid AT URI: {0}")] 749 705 Invalid(String), 750 706 } 751 - 752 - impl fmt::Display for AtUriError { 753 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 754 - match self { 755 - Self::Invalid(s) => write!(f, "invalid AT URI: {}", s), 756 - } 757 - } 758 - } 759 - 760 - impl std::error::Error for AtUriError {} 761 707 762 708 #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 763 709 #[serde(transparent)] ··· 865 811 } 866 812 } 867 813 868 - #[derive(Debug, Clone)] 814 + #[derive(Debug, Clone, thiserror::Error)] 869 815 pub enum TidError { 816 + #[error("invalid TID: {0}")] 870 817 Invalid(String), 871 818 } 872 819 873 - impl fmt::Display for TidError { 874 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 875 - match self { 876 - Self::Invalid(s) => write!(f, "invalid TID: {}", s), 877 - } 878 - } 879 - } 880 - 881 - impl std::error::Error for TidError {} 882 - 883 820 #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 884 821 #[serde(transparent)] 885 822 #[sqlx(transparent)] ··· 990 927 } 991 928 } 992 929 993 - #[derive(Debug, Clone)] 930 + #[derive(Debug, Clone, thiserror::Error)] 994 931 pub enum DatetimeError { 932 + #[error("invalid datetime: {0}")] 995 933 Invalid(String), 996 934 } 997 935 998 - impl fmt::Display for DatetimeError { 999 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 1000 - match self { 1001 - Self::Invalid(s) => write!(f, "invalid datetime: {}", s), 1002 - } 1003 - } 1004 - } 1005 - 1006 - impl std::error::Error for DatetimeError {} 1007 - 1008 936 #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 1009 937 #[serde(transparent)] 1010 938 #[sqlx(transparent)] ··· 1107 1035 } 1108 1036 } 1109 1037 1110 - #[derive(Debug, Clone)] 1038 + #[derive(Debug, Clone, thiserror::Error)] 1111 1039 pub enum LanguageError { 1040 + #[error("invalid language tag: {0}")] 1112 1041 Invalid(String), 1113 1042 } 1114 1043 1115 - impl fmt::Display for LanguageError { 1116 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 1117 - match self { 1118 - Self::Invalid(s) => write!(f, "invalid language tag: {}", s), 1119 - } 1120 - } 1121 - } 1122 - 1123 - impl std::error::Error for LanguageError {} 1124 - 1125 1044 #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type)] 1126 1045 #[serde(transparent)] 1127 1046 #[sqlx(transparent)] ··· 1227 1146 } 1228 1147 } 1229 1148 1230 - #[derive(Debug, Clone)] 1149 + #[derive(Debug, Clone, thiserror::Error)] 1231 1150 pub enum CidLinkError { 1151 + #[error("invalid CID: {0}")] 1232 1152 Invalid(String), 1233 1153 } 1234 - 1235 - impl fmt::Display for CidLinkError { 1236 - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 1237 - match self { 1238 - Self::Invalid(s) => write!(f, "invalid CID: {}", s), 1239 - } 1240 - } 1241 - } 1242 - 1243 - impl std::error::Error for CidLinkError {} 1244 1154 1245 1155 #[derive(Debug, Clone, PartialEq, Eq)] 1246 1156 pub enum AccountState {
src/util.rs crates/tranquil-pds/src/util.rs
src/validation/mod.rs crates/tranquil-pds/src/validation/mod.rs
tests/account_lifecycle.rs crates/tranquil-pds/tests/account_lifecycle.rs
tests/account_notifications.rs crates/tranquil-pds/tests/account_notifications.rs
tests/actor.rs crates/tranquil-pds/tests/actor.rs
tests/admin_email.rs crates/tranquil-pds/tests/admin_email.rs
tests/admin_invite.rs crates/tranquil-pds/tests/admin_invite.rs
tests/admin_moderation.rs crates/tranquil-pds/tests/admin_moderation.rs
tests/admin_search.rs crates/tranquil-pds/tests/admin_search.rs
tests/admin_stats.rs crates/tranquil-pds/tests/admin_stats.rs
tests/backup.rs crates/tranquil-pds/tests/backup.rs
tests/banned_words.rs crates/tranquil-pds/tests/banned_words.rs
tests/change_password.rs crates/tranquil-pds/tests/change_password.rs
tests/commit_signing.rs crates/tranquil-pds/tests/commit_signing.rs
tests/common/mod.rs crates/tranquil-pds/tests/common/mod.rs
tests/delete_account.rs crates/tranquil-pds/tests/delete_account.rs
tests/did_web.rs crates/tranquil-pds/tests/did_web.rs
+1 -1
tests/dpop_unit.rs crates/tranquil-pds/tests/dpop_unit.rs
··· 4 4 use p256::ecdsa::{SigningKey, signature::Signer}; 5 5 use serde_json::json; 6 6 7 - use tranquil_pds::oauth::dpop::{ 7 + use tranquil_pds::oauth::{ 8 8 DPoPJwk, DPoPVerifier, compute_access_token_hash, compute_jwk_thumbprint, 9 9 }; 10 10
tests/email_update.rs crates/tranquil-pds/tests/email_update.rs
tests/firehose_validation.rs crates/tranquil-pds/tests/firehose_validation.rs
tests/helpers/mod.rs crates/tranquil-pds/tests/helpers/mod.rs
tests/identity.rs crates/tranquil-pds/tests/identity.rs
tests/image_processing.rs crates/tranquil-pds/tests/image_processing.rs
tests/import_verification.rs crates/tranquil-pds/tests/import_verification.rs
tests/import_with_verification.rs crates/tranquil-pds/tests/import_with_verification.rs
tests/invite.rs crates/tranquil-pds/tests/invite.rs
tests/jwt_security.rs crates/tranquil-pds/tests/jwt_security.rs
tests/lifecycle_record.rs crates/tranquil-pds/tests/lifecycle_record.rs
tests/lifecycle_session.rs crates/tranquil-pds/tests/lifecycle_session.rs
tests/lifecycle_social.rs crates/tranquil-pds/tests/lifecycle_social.rs
tests/moderation.rs crates/tranquil-pds/tests/moderation.rs
tests/notifications.rs crates/tranquil-pds/tests/notifications.rs
tests/oauth.rs crates/tranquil-pds/tests/oauth.rs
tests/oauth_client_metadata.rs crates/tranquil-pds/tests/oauth_client_metadata.rs
tests/oauth_lifecycle.rs crates/tranquil-pds/tests/oauth_lifecycle.rs
tests/oauth_scopes.rs crates/tranquil-pds/tests/oauth_scopes.rs
+1 -1
tests/oauth_security.rs crates/tranquil-pds/tests/oauth_security.rs
··· 8 8 use reqwest::StatusCode; 9 9 use serde_json::{Value, json}; 10 10 use sha2::{Digest, Sha256}; 11 - use tranquil_pds::oauth::dpop::{DPoPJwk, DPoPVerifier, compute_jwk_thumbprint}; 11 + use tranquil_pds::oauth::{DPoPJwk, DPoPVerifier, compute_jwk_thumbprint}; 12 12 use wiremock::matchers::{method, path}; 13 13 use wiremock::{Mock, MockServer, ResponseTemplate}; 14 14
tests/password_reset.rs crates/tranquil-pds/tests/password_reset.rs
tests/plc_migration.rs crates/tranquil-pds/tests/plc_migration.rs
tests/plc_operations.rs crates/tranquil-pds/tests/plc_operations.rs
tests/plc_validation.rs crates/tranquil-pds/tests/plc_validation.rs
tests/rate_limit.rs crates/tranquil-pds/tests/rate_limit.rs
tests/record_validation.rs crates/tranquil-pds/tests/record_validation.rs
tests/repo_batch.rs crates/tranquil-pds/tests/repo_batch.rs
tests/repo_blob.rs crates/tranquil-pds/tests/repo_blob.rs
tests/repo_conformance.rs crates/tranquil-pds/tests/repo_conformance.rs
tests/scope_edge_cases.rs crates/tranquil-pds/tests/scope_edge_cases.rs
tests/security_fixes.rs crates/tranquil-pds/tests/security_fixes.rs
tests/server.rs crates/tranquil-pds/tests/server.rs
tests/session_management.rs crates/tranquil-pds/tests/session_management.rs
tests/signing_key.rs crates/tranquil-pds/tests/signing_key.rs
tests/sync_blob.rs crates/tranquil-pds/tests/sync_blob.rs
tests/sync_conformance.rs crates/tranquil-pds/tests/sync_conformance.rs
tests/sync_deprecated.rs crates/tranquil-pds/tests/sync_deprecated.rs
tests/sync_repo.rs crates/tranquil-pds/tests/sync_repo.rs
tests/validation_edge_cases.rs crates/tranquil-pds/tests/validation_edge_cases.rs
tests/verify_live_commit.rs crates/tranquil-pds/tests/verify_live_commit.rs