+14
.sqlx/query-076cbf7f32c5f0103207a8e0e73dd5768681ff2520682edda8f2977dcae7cd62.json
+14
.sqlx/query-076cbf7f32c5f0103207a8e0e73dd5768681ff2520682edda8f2977dcae7cd62.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "INSERT INTO repo_seq (did, event_type) VALUES ($1, 'identity')",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Text"
9
+
]
10
+
},
11
+
"nullable": []
12
+
},
13
+
"hash": "076cbf7f32c5f0103207a8e0e73dd5768681ff2520682edda8f2977dcae7cd62"
14
+
}
+22
.sqlx/query-1ed53dde97706d6da36a49d2a8d39f14da4a8dbfe54c9f1ee70c970adde80be8.json
+22
.sqlx/query-1ed53dde97706d6da36a49d2a8d39f14da4a8dbfe54c9f1ee70c970adde80be8.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "SELECT repo_root_cid FROM repos WHERE user_id = $1 FOR UPDATE NOWAIT",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "repo_root_cid",
9
+
"type_info": "Text"
10
+
}
11
+
],
12
+
"parameters": {
13
+
"Left": [
14
+
"Uuid"
15
+
]
16
+
},
17
+
"nullable": [
18
+
false
19
+
]
20
+
},
21
+
"hash": "1ed53dde97706d6da36a49d2a8d39f14da4a8dbfe54c9f1ee70c970adde80be8"
22
+
}
+2
-1
.sqlx/query-303777d97e6ed344f8c699eae37b7b0c241c734a5b7726019c2a59ae277caee6.json
+2
-1
.sqlx/query-303777d97e6ed344f8c699eae37b7b0c241c734a5b7726019c2a59ae277caee6.json
+14
.sqlx/query-402ecd9f1531f5756dd6873f7f4d59b4bf2113f69d493cde07f4a861a8b3567c.json
+14
.sqlx/query-402ecd9f1531f5756dd6873f7f4d59b4bf2113f69d493cde07f4a861a8b3567c.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "DELETE FROM plc_operation_tokens WHERE id = $1",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Uuid"
9
+
]
10
+
},
11
+
"nullable": []
12
+
},
13
+
"hash": "402ecd9f1531f5756dd6873f7f4d59b4bf2113f69d493cde07f4a861a8b3567c"
14
+
}
+17
.sqlx/query-5d1f9275037dd0cb03cefe1e4bbbf7dfaeecb1cc8469b4f0836fe5e52e046839.json
+17
.sqlx/query-5d1f9275037dd0cb03cefe1e4bbbf7dfaeecb1cc8469b4f0836fe5e52e046839.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n INSERT INTO records (repo_id, collection, rkey, record_cid)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (repo_id, collection, rkey) DO UPDATE SET record_cid = $4\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Uuid",
9
+
"Text",
10
+
"Text",
11
+
"Text"
12
+
]
13
+
},
14
+
"nullable": []
15
+
},
16
+
"hash": "5d1f9275037dd0cb03cefe1e4bbbf7dfaeecb1cc8469b4f0836fe5e52e046839"
17
+
}
+2
-1
.sqlx/query-5d49bbf0307a0c642b0174d641de748fa648c97f8109255120e969c957ff95bf.json
+2
-1
.sqlx/query-5d49bbf0307a0c642b0174d641de748fa648c97f8109255120e969c957ff95bf.json
+29
.sqlx/query-84e5abf0f7fab44731b1d69658e99018936f8a346bbff91b23a7731b973633cc.json
+29
.sqlx/query-84e5abf0f7fab44731b1d69658e99018936f8a346bbff91b23a7731b973633cc.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "SELECT id, expires_at FROM plc_operation_tokens WHERE user_id = $1 AND token = $2",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "id",
9
+
"type_info": "Uuid"
10
+
},
11
+
{
12
+
"ordinal": 1,
13
+
"name": "expires_at",
14
+
"type_info": "Timestamptz"
15
+
}
16
+
],
17
+
"parameters": {
18
+
"Left": [
19
+
"Uuid",
20
+
"Text"
21
+
]
22
+
},
23
+
"nullable": [
24
+
false,
25
+
false
26
+
]
27
+
},
28
+
"hash": "84e5abf0f7fab44731b1d69658e99018936f8a346bbff91b23a7731b973633cc"
29
+
}
+19
.sqlx/query-aadc1f8c79d79e9a32fe6f4bf7e901076532fa2bf8f0b4d0f1bae7aa0f792183.json
+19
.sqlx/query-aadc1f8c79d79e9a32fe6f4bf7e901076532fa2bf8f0b4d0f1bae7aa0f792183.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n INSERT INTO repo_seq (did, event_type, commit_cid, prev_cid, ops, blobs, blocks_cids)\n VALUES ($1, 'commit', $2, $3, $4, $5, $6)\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Text",
9
+
"Text",
10
+
"Text",
11
+
"Jsonb",
12
+
"TextArray",
13
+
"TextArray"
14
+
]
15
+
},
16
+
"nullable": []
17
+
},
18
+
"hash": "aadc1f8c79d79e9a32fe6f4bf7e901076532fa2bf8f0b4d0f1bae7aa0f792183"
19
+
}
+14
.sqlx/query-ac8c260666ab6d1e7103e08e15bc1341694fb453a65c26b4f0bfb07d9b74ebd4.json
+14
.sqlx/query-ac8c260666ab6d1e7103e08e15bc1341694fb453a65c26b4f0bfb07d9b74ebd4.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "DELETE FROM plc_operation_tokens WHERE user_id = $1 OR expires_at < NOW()",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Uuid"
9
+
]
10
+
},
11
+
"nullable": []
12
+
},
13
+
"hash": "ac8c260666ab6d1e7103e08e15bc1341694fb453a65c26b4f0bfb07d9b74ebd4"
14
+
}
+34
.sqlx/query-c47715c259bb7b56b576d9719f8facb87a9e9b6b530ca6f81ce308a4c584c002.json
+34
.sqlx/query-c47715c259bb7b56b576d9719f8facb87a9e9b6b530ca6f81ce308a4c584c002.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "SELECT id, deactivated_at, takedown_ref FROM users WHERE did = $1",
4
+
"describe": {
5
+
"columns": [
6
+
{
7
+
"ordinal": 0,
8
+
"name": "id",
9
+
"type_info": "Uuid"
10
+
},
11
+
{
12
+
"ordinal": 1,
13
+
"name": "deactivated_at",
14
+
"type_info": "Timestamptz"
15
+
},
16
+
{
17
+
"ordinal": 2,
18
+
"name": "takedown_ref",
19
+
"type_info": "Text"
20
+
}
21
+
],
22
+
"parameters": {
23
+
"Left": [
24
+
"Text"
25
+
]
26
+
},
27
+
"nullable": [
28
+
false,
29
+
true,
30
+
true
31
+
]
32
+
},
33
+
"hash": "c47715c259bb7b56b576d9719f8facb87a9e9b6b530ca6f81ce308a4c584c002"
34
+
}
+2
-1
.sqlx/query-cb6f48aaba124c79308d20e66c23adb44d1196296b7f93fad19b2d17548ed3de.json
+2
-1
.sqlx/query-cb6f48aaba124c79308d20e66c23adb44d1196296b7f93fad19b2d17548ed3de.json
+16
.sqlx/query-d981225224ea8e4db25c53566032c8ac81335d05ff5b91cfb20da805e735aea3.json
+16
.sqlx/query-d981225224ea8e4db25c53566032c8ac81335d05ff5b91cfb20da805e735aea3.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "\n INSERT INTO plc_operation_tokens (user_id, token, expires_at)\n VALUES ($1, $2, $3)\n ",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Uuid",
9
+
"Text",
10
+
"Timestamptz"
11
+
]
12
+
},
13
+
"nullable": []
14
+
},
15
+
"hash": "d981225224ea8e4db25c53566032c8ac81335d05ff5b91cfb20da805e735aea3"
16
+
}
+15
.sqlx/query-f1e88d447915b116f887c378253388654a783bddb111b1f9aa04507f176980d3.json
+15
.sqlx/query-f1e88d447915b116f887c378253388654a783bddb111b1f9aa04507f176980d3.json
···
1
+
{
2
+
"db_name": "PostgreSQL",
3
+
"query": "UPDATE repos SET repo_root_cid = $1, updated_at = NOW() WHERE user_id = $2",
4
+
"describe": {
5
+
"columns": [],
6
+
"parameters": {
7
+
"Left": [
8
+
"Text",
9
+
"Uuid"
10
+
]
11
+
},
12
+
"nullable": []
13
+
},
14
+
"hash": "f1e88d447915b116f887c378253388654a783bddb111b1f9aa04507f176980d3"
15
+
}
+1
Cargo.lock
+1
Cargo.lock
+2
-1
Cargo.toml
+2
-1
Cargo.toml
···
31
31
serde = { version = "1.0.228", features = ["derive"] }
32
32
serde_bytes = "0.11.14"
33
33
serde_ipld_dagcbor = "0.6.4"
34
+
ipld-core = "0.4.2"
34
35
serde_json = "1.0.145"
35
36
sha2 = "0.10.9"
36
37
subtle = "2.5"
···
45
46
tokio-tungstenite = { version = "0.28.0", features = ["native-tls"] }
46
47
urlencoding = "2.1"
47
48
uuid = { version = "1.19.0", features = ["v4", "fast-rng"] }
49
+
iroh-car = "0.5.1"
48
50
49
51
[features]
50
52
external-infra = []
51
53
52
54
[dev-dependencies]
53
55
ctor = "0.6.3"
54
-
iroh-car = "0.5.1"
55
56
testcontainers = "0.26.0"
56
57
testcontainers-modules = { version = "0.14.0", features = ["postgres"] }
57
58
wiremock = "0.6.5"
+3
-3
TODO.md
+3
-3
TODO.md
···
56
56
- [x] Implement `com.atproto.repo.listRecords`.
57
57
- [x] Implement `com.atproto.repo.describeRepo`.
58
58
- [x] Implement `com.atproto.repo.applyWrites` (Batch writes).
59
-
- [ ] Implement `com.atproto.repo.importRepo` (Migration).
59
+
- [x] Implement `com.atproto.repo.importRepo` (Migration).
60
60
- [x] Implement `com.atproto.repo.listMissingBlobs`.
61
61
- [x] Blob Management
62
62
- [x] Implement `com.atproto.repo.uploadBlob`.
···
83
83
- [x] Implement `com.atproto.sync.requestCrawl` (Notify relays to index us).
84
84
85
85
## Identity (`com.atproto.identity`)
86
-
- [ ] Resolution
86
+
- [x] Resolution
87
87
- [x] Implement `com.atproto.identity.resolveHandle` (Can be internal or proxy to PLC).
88
88
- [x] Implement `com.atproto.identity.updateHandle`.
89
-
- [ ] Implement `com.atproto.identity.submitPlcOperation` / `signPlcOperation` / `requestPlcOperationSignature`.
89
+
- [x] Implement `com.atproto.identity.submitPlcOperation` / `signPlcOperation` / `requestPlcOperationSignature`.
90
90
- [x] Implement `com.atproto.identity.getRecommendedDidCredentials`.
91
91
- [x] Implement `/.well-known/did.json` (Depends on supporting did:web).
92
92
+10
migrations/202512211406_plc_operation_tokens.sql
+10
migrations/202512211406_plc_operation_tokens.sql
···
1
+
CREATE TABLE plc_operation_tokens (
2
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
3
+
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
4
+
token TEXT NOT NULL UNIQUE,
5
+
expires_at TIMESTAMPTZ NOT NULL,
6
+
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
7
+
);
8
+
9
+
CREATE INDEX idx_plc_op_tokens_user ON plc_operation_tokens(user_id);
10
+
CREATE INDEX idx_plc_op_tokens_expires ON plc_operation_tokens(expires_at);
+1
migrations/202512211407_add_plc_operation_notification_type.sql
+1
migrations/202512211407_add_plc_operation_notification_type.sql
···
1
+
ALTER TYPE notification_type ADD VALUE 'plc_operation';
+1
scripts/test-infra.sh
+1
scripts/test-infra.sh
+27
-3
src/api/identity/account.rs
+27
-3
src/api/identity/account.rs
···
9
9
use bcrypt::{DEFAULT_COST, hash};
10
10
use jacquard::types::{did::Did, integer::LimitedU32, string::Tid};
11
11
use jacquard_repo::{commit::Commit, mst::Mst, storage::BlockStore};
12
-
use k256::SecretKey;
12
+
use k256::{ecdsa::SigningKey, SecretKey};
13
13
use rand::rngs::OsRng;
14
14
use serde::{Deserialize, Serialize};
15
15
use serde_json::json;
···
302
302
303
303
let rev = Tid::now(LimitedU32::MIN);
304
304
305
-
let commit = Commit::new_unsigned(did_obj, mst_root, rev, None);
305
+
let unsigned_commit = Commit::new_unsigned(did_obj, mst_root, rev, None);
306
306
307
-
let commit_bytes = match commit.to_cbor() {
307
+
let signing_key = match SigningKey::from_slice(&secret_key_bytes) {
308
+
Ok(k) => k,
309
+
Err(e) => {
310
+
error!("Error creating signing key: {:?}", e);
311
+
return (
312
+
StatusCode::INTERNAL_SERVER_ERROR,
313
+
Json(json!({"error": "InternalError"})),
314
+
)
315
+
.into_response();
316
+
}
317
+
};
318
+
319
+
let signed_commit = match unsigned_commit.sign(&signing_key) {
320
+
Ok(c) => c,
321
+
Err(e) => {
322
+
error!("Error signing genesis commit: {:?}", e);
323
+
return (
324
+
StatusCode::INTERNAL_SERVER_ERROR,
325
+
Json(json!({"error": "InternalError"})),
326
+
)
327
+
.into_response();
328
+
}
329
+
};
330
+
331
+
let commit_bytes = match signed_commit.to_cbor() {
308
332
Ok(b) => b,
309
333
Err(e) => {
310
334
error!("Error serializing genesis commit: {:?}", e);
+2
src/api/identity/mod.rs
+2
src/api/identity/mod.rs
···
1
1
pub mod account;
2
2
pub mod did;
3
+
pub mod plc;
3
4
4
5
pub use account::create_account;
5
6
pub use did::{
6
7
get_recommended_did_credentials, resolve_handle, update_handle, user_did_doc, well_known_did,
7
8
};
9
+
pub use plc::{request_plc_operation_signature, sign_plc_operation, submit_plc_operation};
+618
src/api/identity/plc.rs
+618
src/api/identity/plc.rs
···
1
+
use crate::plc::{
2
+
create_update_op, sign_operation, signing_key_to_did_key, validate_plc_operation,
3
+
PlcClient, PlcError, PlcService,
4
+
};
5
+
use crate::state::AppState;
6
+
use axum::{
7
+
extract::State,
8
+
http::StatusCode,
9
+
response::{IntoResponse, Response},
10
+
Json,
11
+
};
12
+
use chrono::{Duration, Utc};
13
+
use k256::ecdsa::SigningKey;
14
+
use rand::Rng;
15
+
use serde::{Deserialize, Serialize};
16
+
use serde_json::{json, Value};
17
+
use std::collections::HashMap;
18
+
use tracing::{error, info, warn};
19
+
20
+
fn generate_plc_token() -> String {
21
+
let mut rng = rand::thread_rng();
22
+
let chars: Vec<char> = "abcdefghijklmnopqrstuvwxyz234567".chars().collect();
23
+
let part1: String = (0..5).map(|_| chars[rng.gen_range(0..chars.len())]).collect();
24
+
let part2: String = (0..5).map(|_| chars[rng.gen_range(0..chars.len())]).collect();
25
+
format!("{}-{}", part1, part2)
26
+
}
27
+
28
+
pub async fn request_plc_operation_signature(
29
+
State(state): State<AppState>,
30
+
headers: axum::http::HeaderMap,
31
+
) -> Response {
32
+
let token = match crate::auth::extract_bearer_token_from_header(
33
+
headers.get("Authorization").and_then(|h| h.to_str().ok()),
34
+
) {
35
+
Some(t) => t,
36
+
None => {
37
+
return (
38
+
StatusCode::UNAUTHORIZED,
39
+
Json(json!({"error": "AuthenticationRequired"})),
40
+
)
41
+
.into_response();
42
+
}
43
+
};
44
+
45
+
let auth_user = match crate::auth::validate_bearer_token(&state.db, &token).await {
46
+
Ok(user) => user,
47
+
Err(e) => {
48
+
return (
49
+
StatusCode::UNAUTHORIZED,
50
+
Json(json!({"error": "AuthenticationFailed", "message": e})),
51
+
)
52
+
.into_response();
53
+
}
54
+
};
55
+
56
+
let did = &auth_user.did;
57
+
58
+
let user = match sqlx::query!(
59
+
"SELECT id FROM users WHERE did = $1",
60
+
did
61
+
)
62
+
.fetch_optional(&state.db)
63
+
.await
64
+
{
65
+
Ok(Some(row)) => row,
66
+
Ok(None) => {
67
+
return (
68
+
StatusCode::NOT_FOUND,
69
+
Json(json!({"error": "AccountNotFound"})),
70
+
)
71
+
.into_response();
72
+
}
73
+
Err(e) => {
74
+
error!("DB error: {:?}", e);
75
+
return (
76
+
StatusCode::INTERNAL_SERVER_ERROR,
77
+
Json(json!({"error": "InternalError"})),
78
+
)
79
+
.into_response();
80
+
}
81
+
};
82
+
83
+
let _ = sqlx::query!(
84
+
"DELETE FROM plc_operation_tokens WHERE user_id = $1 OR expires_at < NOW()",
85
+
user.id
86
+
)
87
+
.execute(&state.db)
88
+
.await;
89
+
90
+
let plc_token = generate_plc_token();
91
+
let expires_at = Utc::now() + Duration::minutes(10);
92
+
93
+
if let Err(e) = sqlx::query!(
94
+
r#"
95
+
INSERT INTO plc_operation_tokens (user_id, token, expires_at)
96
+
VALUES ($1, $2, $3)
97
+
"#,
98
+
user.id,
99
+
plc_token,
100
+
expires_at
101
+
)
102
+
.execute(&state.db)
103
+
.await
104
+
{
105
+
error!("Failed to create PLC token: {:?}", e);
106
+
return (
107
+
StatusCode::INTERNAL_SERVER_ERROR,
108
+
Json(json!({"error": "InternalError"})),
109
+
)
110
+
.into_response();
111
+
}
112
+
113
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
114
+
115
+
if let Err(e) = crate::notifications::enqueue_plc_operation(
116
+
&state.db,
117
+
user.id,
118
+
&plc_token,
119
+
&hostname,
120
+
)
121
+
.await
122
+
{
123
+
warn!("Failed to enqueue PLC operation notification: {:?}", e);
124
+
}
125
+
126
+
info!("PLC operation signature requested for user {}", did);
127
+
128
+
(StatusCode::OK, Json(json!({}))).into_response()
129
+
}
130
+
131
+
#[derive(Debug, Deserialize)]
132
+
#[serde(rename_all = "camelCase")]
133
+
pub struct SignPlcOperationInput {
134
+
pub token: Option<String>,
135
+
pub rotation_keys: Option<Vec<String>>,
136
+
pub also_known_as: Option<Vec<String>>,
137
+
pub verification_methods: Option<HashMap<String, String>>,
138
+
pub services: Option<HashMap<String, ServiceInput>>,
139
+
}
140
+
141
+
#[derive(Debug, Deserialize, Clone)]
142
+
pub struct ServiceInput {
143
+
#[serde(rename = "type")]
144
+
pub service_type: String,
145
+
pub endpoint: String,
146
+
}
147
+
148
+
#[derive(Debug, Serialize)]
149
+
pub struct SignPlcOperationOutput {
150
+
pub operation: Value,
151
+
}
152
+
153
+
pub async fn sign_plc_operation(
154
+
State(state): State<AppState>,
155
+
headers: axum::http::HeaderMap,
156
+
Json(input): Json<SignPlcOperationInput>,
157
+
) -> Response {
158
+
let bearer = match crate::auth::extract_bearer_token_from_header(
159
+
headers.get("Authorization").and_then(|h| h.to_str().ok()),
160
+
) {
161
+
Some(t) => t,
162
+
None => {
163
+
return (
164
+
StatusCode::UNAUTHORIZED,
165
+
Json(json!({"error": "AuthenticationRequired"})),
166
+
)
167
+
.into_response();
168
+
}
169
+
};
170
+
171
+
let auth_user = match crate::auth::validate_bearer_token(&state.db, &bearer).await {
172
+
Ok(user) => user,
173
+
Err(e) => {
174
+
return (
175
+
StatusCode::UNAUTHORIZED,
176
+
Json(json!({"error": "AuthenticationFailed", "message": e})),
177
+
)
178
+
.into_response();
179
+
}
180
+
};
181
+
182
+
let did = &auth_user.did;
183
+
184
+
let token = match &input.token {
185
+
Some(t) => t,
186
+
None => {
187
+
return (
188
+
StatusCode::BAD_REQUEST,
189
+
Json(json!({
190
+
"error": "InvalidRequest",
191
+
"message": "Email confirmation token required to sign PLC operations"
192
+
})),
193
+
)
194
+
.into_response();
195
+
}
196
+
};
197
+
198
+
let user = match sqlx::query!("SELECT id FROM users WHERE did = $1", did)
199
+
.fetch_optional(&state.db)
200
+
.await
201
+
{
202
+
Ok(Some(row)) => row,
203
+
_ => {
204
+
return (
205
+
StatusCode::NOT_FOUND,
206
+
Json(json!({"error": "AccountNotFound"})),
207
+
)
208
+
.into_response();
209
+
}
210
+
};
211
+
212
+
let token_row = match sqlx::query!(
213
+
"SELECT id, expires_at FROM plc_operation_tokens WHERE user_id = $1 AND token = $2",
214
+
user.id,
215
+
token
216
+
)
217
+
.fetch_optional(&state.db)
218
+
.await
219
+
{
220
+
Ok(Some(row)) => row,
221
+
Ok(None) => {
222
+
return (
223
+
StatusCode::BAD_REQUEST,
224
+
Json(json!({
225
+
"error": "InvalidToken",
226
+
"message": "Invalid or expired token"
227
+
})),
228
+
)
229
+
.into_response();
230
+
}
231
+
Err(e) => {
232
+
error!("DB error: {:?}", e);
233
+
return (
234
+
StatusCode::INTERNAL_SERVER_ERROR,
235
+
Json(json!({"error": "InternalError"})),
236
+
)
237
+
.into_response();
238
+
}
239
+
};
240
+
241
+
if Utc::now() > token_row.expires_at {
242
+
let _ = sqlx::query!("DELETE FROM plc_operation_tokens WHERE id = $1", token_row.id)
243
+
.execute(&state.db)
244
+
.await;
245
+
return (
246
+
StatusCode::BAD_REQUEST,
247
+
Json(json!({
248
+
"error": "ExpiredToken",
249
+
"message": "Token has expired"
250
+
})),
251
+
)
252
+
.into_response();
253
+
}
254
+
255
+
let key_row = match sqlx::query!(
256
+
"SELECT key_bytes, encryption_version FROM user_keys WHERE user_id = $1",
257
+
user.id
258
+
)
259
+
.fetch_optional(&state.db)
260
+
.await
261
+
{
262
+
Ok(Some(row)) => row,
263
+
_ => {
264
+
return (
265
+
StatusCode::INTERNAL_SERVER_ERROR,
266
+
Json(json!({"error": "InternalError", "message": "User signing key not found"})),
267
+
)
268
+
.into_response();
269
+
}
270
+
};
271
+
272
+
let key_bytes = match crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version)
273
+
{
274
+
Ok(k) => k,
275
+
Err(e) => {
276
+
error!("Failed to decrypt user key: {}", e);
277
+
return (
278
+
StatusCode::INTERNAL_SERVER_ERROR,
279
+
Json(json!({"error": "InternalError"})),
280
+
)
281
+
.into_response();
282
+
}
283
+
};
284
+
285
+
let signing_key = match SigningKey::from_slice(&key_bytes) {
286
+
Ok(k) => k,
287
+
Err(e) => {
288
+
error!("Failed to create signing key: {:?}", e);
289
+
return (
290
+
StatusCode::INTERNAL_SERVER_ERROR,
291
+
Json(json!({"error": "InternalError"})),
292
+
)
293
+
.into_response();
294
+
}
295
+
};
296
+
297
+
let plc_client = PlcClient::new(None);
298
+
let last_op = match plc_client.get_last_op(did).await {
299
+
Ok(op) => op,
300
+
Err(PlcError::NotFound) => {
301
+
return (
302
+
StatusCode::NOT_FOUND,
303
+
Json(json!({
304
+
"error": "NotFound",
305
+
"message": "DID not found in PLC directory"
306
+
})),
307
+
)
308
+
.into_response();
309
+
}
310
+
Err(e) => {
311
+
error!("Failed to fetch PLC operation: {:?}", e);
312
+
return (
313
+
StatusCode::BAD_GATEWAY,
314
+
Json(json!({
315
+
"error": "UpstreamError",
316
+
"message": "Failed to communicate with PLC directory"
317
+
})),
318
+
)
319
+
.into_response();
320
+
}
321
+
};
322
+
323
+
if last_op.is_tombstone() {
324
+
return (
325
+
StatusCode::BAD_REQUEST,
326
+
Json(json!({
327
+
"error": "InvalidRequest",
328
+
"message": "DID is tombstoned"
329
+
})),
330
+
)
331
+
.into_response();
332
+
}
333
+
334
+
let services = input.services.map(|s| {
335
+
s.into_iter()
336
+
.map(|(k, v)| {
337
+
(
338
+
k,
339
+
PlcService {
340
+
service_type: v.service_type,
341
+
endpoint: v.endpoint,
342
+
},
343
+
)
344
+
})
345
+
.collect()
346
+
});
347
+
348
+
let unsigned_op = match create_update_op(
349
+
&last_op,
350
+
input.rotation_keys,
351
+
input.verification_methods,
352
+
input.also_known_as,
353
+
services,
354
+
) {
355
+
Ok(op) => op,
356
+
Err(PlcError::Tombstoned) => {
357
+
return (
358
+
StatusCode::BAD_REQUEST,
359
+
Json(json!({
360
+
"error": "InvalidRequest",
361
+
"message": "Cannot update tombstoned DID"
362
+
})),
363
+
)
364
+
.into_response();
365
+
}
366
+
Err(e) => {
367
+
error!("Failed to create PLC operation: {:?}", e);
368
+
return (
369
+
StatusCode::INTERNAL_SERVER_ERROR,
370
+
Json(json!({"error": "InternalError"})),
371
+
)
372
+
.into_response();
373
+
}
374
+
};
375
+
376
+
let signed_op = match sign_operation(&unsigned_op, &signing_key) {
377
+
Ok(op) => op,
378
+
Err(e) => {
379
+
error!("Failed to sign PLC operation: {:?}", e);
380
+
return (
381
+
StatusCode::INTERNAL_SERVER_ERROR,
382
+
Json(json!({"error": "InternalError"})),
383
+
)
384
+
.into_response();
385
+
}
386
+
};
387
+
388
+
let _ = sqlx::query!("DELETE FROM plc_operation_tokens WHERE id = $1", token_row.id)
389
+
.execute(&state.db)
390
+
.await;
391
+
392
+
info!("Signed PLC operation for user {}", did);
393
+
394
+
(
395
+
StatusCode::OK,
396
+
Json(SignPlcOperationOutput {
397
+
operation: signed_op,
398
+
}),
399
+
)
400
+
.into_response()
401
+
}
402
+
403
+
#[derive(Debug, Deserialize)]
404
+
pub struct SubmitPlcOperationInput {
405
+
pub operation: Value,
406
+
}
407
+
408
+
pub async fn submit_plc_operation(
409
+
State(state): State<AppState>,
410
+
headers: axum::http::HeaderMap,
411
+
Json(input): Json<SubmitPlcOperationInput>,
412
+
) -> Response {
413
+
let bearer = match crate::auth::extract_bearer_token_from_header(
414
+
headers.get("Authorization").and_then(|h| h.to_str().ok()),
415
+
) {
416
+
Some(t) => t,
417
+
None => {
418
+
return (
419
+
StatusCode::UNAUTHORIZED,
420
+
Json(json!({"error": "AuthenticationRequired"})),
421
+
)
422
+
.into_response();
423
+
}
424
+
};
425
+
426
+
let auth_user = match crate::auth::validate_bearer_token(&state.db, &bearer).await {
427
+
Ok(user) => user,
428
+
Err(e) => {
429
+
return (
430
+
StatusCode::UNAUTHORIZED,
431
+
Json(json!({"error": "AuthenticationFailed", "message": e})),
432
+
)
433
+
.into_response();
434
+
}
435
+
};
436
+
437
+
let did = &auth_user.did;
438
+
439
+
if let Err(e) = validate_plc_operation(&input.operation) {
440
+
return (
441
+
StatusCode::BAD_REQUEST,
442
+
Json(json!({
443
+
"error": "InvalidRequest",
444
+
"message": format!("Invalid operation: {}", e)
445
+
})),
446
+
)
447
+
.into_response();
448
+
}
449
+
450
+
let op = &input.operation;
451
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
452
+
let public_url = format!("https://{}", hostname);
453
+
454
+
let user = match sqlx::query!("SELECT id, handle FROM users WHERE did = $1", did)
455
+
.fetch_optional(&state.db)
456
+
.await
457
+
{
458
+
Ok(Some(row)) => row,
459
+
_ => {
460
+
return (
461
+
StatusCode::NOT_FOUND,
462
+
Json(json!({"error": "AccountNotFound"})),
463
+
)
464
+
.into_response();
465
+
}
466
+
};
467
+
468
+
let key_row = match sqlx::query!(
469
+
"SELECT key_bytes, encryption_version FROM user_keys WHERE user_id = $1",
470
+
user.id
471
+
)
472
+
.fetch_optional(&state.db)
473
+
.await
474
+
{
475
+
Ok(Some(row)) => row,
476
+
_ => {
477
+
return (
478
+
StatusCode::INTERNAL_SERVER_ERROR,
479
+
Json(json!({"error": "InternalError", "message": "User signing key not found"})),
480
+
)
481
+
.into_response();
482
+
}
483
+
};
484
+
485
+
let key_bytes = match crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version)
486
+
{
487
+
Ok(k) => k,
488
+
Err(e) => {
489
+
error!("Failed to decrypt user key: {}", e);
490
+
return (
491
+
StatusCode::INTERNAL_SERVER_ERROR,
492
+
Json(json!({"error": "InternalError"})),
493
+
)
494
+
.into_response();
495
+
}
496
+
};
497
+
498
+
let signing_key = match SigningKey::from_slice(&key_bytes) {
499
+
Ok(k) => k,
500
+
Err(e) => {
501
+
error!("Failed to create signing key: {:?}", e);
502
+
return (
503
+
StatusCode::INTERNAL_SERVER_ERROR,
504
+
Json(json!({"error": "InternalError"})),
505
+
)
506
+
.into_response();
507
+
}
508
+
};
509
+
510
+
let user_did_key = signing_key_to_did_key(&signing_key);
511
+
512
+
if let Some(rotation_keys) = op.get("rotationKeys").and_then(|v| v.as_array()) {
513
+
let server_rotation_key =
514
+
std::env::var("PLC_ROTATION_KEY").unwrap_or_else(|_| user_did_key.clone());
515
+
516
+
let has_server_key = rotation_keys
517
+
.iter()
518
+
.any(|k| k.as_str() == Some(&server_rotation_key));
519
+
520
+
if !has_server_key {
521
+
return (
522
+
StatusCode::BAD_REQUEST,
523
+
Json(json!({
524
+
"error": "InvalidRequest",
525
+
"message": "Rotation keys do not include server's rotation key"
526
+
})),
527
+
)
528
+
.into_response();
529
+
}
530
+
}
531
+
532
+
if let Some(services) = op.get("services").and_then(|v| v.as_object()) {
533
+
if let Some(pds) = services.get("atproto_pds").and_then(|v| v.as_object()) {
534
+
let service_type = pds.get("type").and_then(|v| v.as_str());
535
+
let endpoint = pds.get("endpoint").and_then(|v| v.as_str());
536
+
537
+
if service_type != Some("AtprotoPersonalDataServer") {
538
+
return (
539
+
StatusCode::BAD_REQUEST,
540
+
Json(json!({
541
+
"error": "InvalidRequest",
542
+
"message": "Incorrect type on atproto_pds service"
543
+
})),
544
+
)
545
+
.into_response();
546
+
}
547
+
548
+
if endpoint != Some(&public_url) {
549
+
return (
550
+
StatusCode::BAD_REQUEST,
551
+
Json(json!({
552
+
"error": "InvalidRequest",
553
+
"message": "Incorrect endpoint on atproto_pds service"
554
+
})),
555
+
)
556
+
.into_response();
557
+
}
558
+
}
559
+
}
560
+
561
+
if let Some(verification_methods) = op.get("verificationMethods").and_then(|v| v.as_object()) {
562
+
if let Some(atproto_key) = verification_methods.get("atproto").and_then(|v| v.as_str()) {
563
+
if atproto_key != user_did_key {
564
+
return (
565
+
StatusCode::BAD_REQUEST,
566
+
Json(json!({
567
+
"error": "InvalidRequest",
568
+
"message": "Incorrect signing key in verificationMethods"
569
+
})),
570
+
)
571
+
.into_response();
572
+
}
573
+
}
574
+
}
575
+
576
+
if let Some(also_known_as) = op.get("alsoKnownAs").and_then(|v| v.as_array()) {
577
+
let expected_handle = format!("at://{}", user.handle);
578
+
let first_aka = also_known_as.first().and_then(|v| v.as_str());
579
+
580
+
if first_aka != Some(&expected_handle) {
581
+
return (
582
+
StatusCode::BAD_REQUEST,
583
+
Json(json!({
584
+
"error": "InvalidRequest",
585
+
"message": "Incorrect handle in alsoKnownAs"
586
+
})),
587
+
)
588
+
.into_response();
589
+
}
590
+
}
591
+
592
+
let plc_client = PlcClient::new(None);
593
+
if let Err(e) = plc_client.send_operation(did, &input.operation).await {
594
+
error!("Failed to submit PLC operation: {:?}", e);
595
+
return (
596
+
StatusCode::BAD_GATEWAY,
597
+
Json(json!({
598
+
"error": "UpstreamError",
599
+
"message": format!("Failed to submit to PLC directory: {}", e)
600
+
})),
601
+
)
602
+
.into_response();
603
+
}
604
+
605
+
if let Err(e) = sqlx::query!(
606
+
"INSERT INTO repo_seq (did, event_type) VALUES ($1, 'identity')",
607
+
did
608
+
)
609
+
.execute(&state.db)
610
+
.await
611
+
{
612
+
warn!("Failed to sequence identity event: {:?}", e);
613
+
}
614
+
615
+
info!("Submitted PLC operation for user {}", did);
616
+
617
+
(StatusCode::OK, Json(json!({}))).into_response()
618
+
}
+420
src/api/repo/import.rs
+420
src/api/repo/import.rs
···
1
+
use crate::state::AppState;
2
+
use crate::sync::import::{apply_import, parse_car, ImportError};
3
+
use crate::sync::verify::CarVerifier;
4
+
use axum::{
5
+
body::Bytes,
6
+
extract::State,
7
+
http::StatusCode,
8
+
response::{IntoResponse, Response},
9
+
Json,
10
+
};
11
+
use serde_json::json;
12
+
use tracing::{debug, error, info, warn};
13
+
14
+
const DEFAULT_MAX_IMPORT_SIZE: usize = 100 * 1024 * 1024;
15
+
const DEFAULT_MAX_BLOCKS: usize = 50000;
16
+
17
+
pub async fn import_repo(
18
+
State(state): State<AppState>,
19
+
headers: axum::http::HeaderMap,
20
+
body: Bytes,
21
+
) -> Response {
22
+
let accepting_imports = std::env::var("ACCEPTING_REPO_IMPORTS")
23
+
.map(|v| v != "false" && v != "0")
24
+
.unwrap_or(true);
25
+
26
+
if !accepting_imports {
27
+
return (
28
+
StatusCode::BAD_REQUEST,
29
+
Json(json!({
30
+
"error": "InvalidRequest",
31
+
"message": "Service is not accepting repo imports"
32
+
})),
33
+
)
34
+
.into_response();
35
+
}
36
+
37
+
let max_size: usize = std::env::var("MAX_IMPORT_SIZE")
38
+
.ok()
39
+
.and_then(|s| s.parse().ok())
40
+
.unwrap_or(DEFAULT_MAX_IMPORT_SIZE);
41
+
42
+
if body.len() > max_size {
43
+
return (
44
+
StatusCode::PAYLOAD_TOO_LARGE,
45
+
Json(json!({
46
+
"error": "InvalidRequest",
47
+
"message": format!("Import size exceeds limit of {} bytes", max_size)
48
+
})),
49
+
)
50
+
.into_response();
51
+
}
52
+
53
+
let token = match crate::auth::extract_bearer_token_from_header(
54
+
headers.get("Authorization").and_then(|h| h.to_str().ok()),
55
+
) {
56
+
Some(t) => t,
57
+
None => {
58
+
return (
59
+
StatusCode::UNAUTHORIZED,
60
+
Json(json!({"error": "AuthenticationRequired"})),
61
+
)
62
+
.into_response();
63
+
}
64
+
};
65
+
66
+
let auth_user = match crate::auth::validate_bearer_token(&state.db, &token).await {
67
+
Ok(user) => user,
68
+
Err(e) => {
69
+
return (
70
+
StatusCode::UNAUTHORIZED,
71
+
Json(json!({"error": "AuthenticationFailed", "message": e})),
72
+
)
73
+
.into_response();
74
+
}
75
+
};
76
+
77
+
let did = &auth_user.did;
78
+
79
+
let user = match sqlx::query!(
80
+
"SELECT id, deactivated_at, takedown_ref FROM users WHERE did = $1",
81
+
did
82
+
)
83
+
.fetch_optional(&state.db)
84
+
.await
85
+
{
86
+
Ok(Some(row)) => row,
87
+
Ok(None) => {
88
+
return (
89
+
StatusCode::NOT_FOUND,
90
+
Json(json!({"error": "AccountNotFound"})),
91
+
)
92
+
.into_response();
93
+
}
94
+
Err(e) => {
95
+
error!("DB error fetching user: {:?}", e);
96
+
return (
97
+
StatusCode::INTERNAL_SERVER_ERROR,
98
+
Json(json!({"error": "InternalError"})),
99
+
)
100
+
.into_response();
101
+
}
102
+
};
103
+
104
+
if user.deactivated_at.is_some() {
105
+
return (
106
+
StatusCode::FORBIDDEN,
107
+
Json(json!({
108
+
"error": "AccountDeactivated",
109
+
"message": "Account is deactivated"
110
+
})),
111
+
)
112
+
.into_response();
113
+
}
114
+
115
+
if user.takedown_ref.is_some() {
116
+
return (
117
+
StatusCode::FORBIDDEN,
118
+
Json(json!({
119
+
"error": "AccountTakenDown",
120
+
"message": "Account has been taken down"
121
+
})),
122
+
)
123
+
.into_response();
124
+
}
125
+
126
+
let user_id = user.id;
127
+
128
+
let (root, blocks) = match parse_car(&body).await {
129
+
Ok((r, b)) => (r, b),
130
+
Err(ImportError::InvalidRootCount) => {
131
+
return (
132
+
StatusCode::BAD_REQUEST,
133
+
Json(json!({
134
+
"error": "InvalidRequest",
135
+
"message": "Expected exactly one root in CAR file"
136
+
})),
137
+
)
138
+
.into_response();
139
+
}
140
+
Err(ImportError::CarParse(msg)) => {
141
+
return (
142
+
StatusCode::BAD_REQUEST,
143
+
Json(json!({
144
+
"error": "InvalidRequest",
145
+
"message": format!("Failed to parse CAR file: {}", msg)
146
+
})),
147
+
)
148
+
.into_response();
149
+
}
150
+
Err(e) => {
151
+
error!("CAR parsing error: {:?}", e);
152
+
return (
153
+
StatusCode::BAD_REQUEST,
154
+
Json(json!({
155
+
"error": "InvalidRequest",
156
+
"message": format!("Invalid CAR file: {}", e)
157
+
})),
158
+
)
159
+
.into_response();
160
+
}
161
+
};
162
+
163
+
info!(
164
+
"Importing repo for user {}: {} blocks, root {}",
165
+
did,
166
+
blocks.len(),
167
+
root
168
+
);
169
+
170
+
let root_block = match blocks.get(&root) {
171
+
Some(b) => b,
172
+
None => {
173
+
return (
174
+
StatusCode::BAD_REQUEST,
175
+
Json(json!({
176
+
"error": "InvalidRequest",
177
+
"message": "Root block not found in CAR file"
178
+
})),
179
+
)
180
+
.into_response();
181
+
}
182
+
};
183
+
184
+
let commit_did = match jacquard_repo::commit::Commit::from_cbor(root_block) {
185
+
Ok(commit) => commit.did().to_string(),
186
+
Err(e) => {
187
+
return (
188
+
StatusCode::BAD_REQUEST,
189
+
Json(json!({
190
+
"error": "InvalidRequest",
191
+
"message": format!("Invalid commit: {}", e)
192
+
})),
193
+
)
194
+
.into_response();
195
+
}
196
+
};
197
+
198
+
if commit_did != *did {
199
+
return (
200
+
StatusCode::FORBIDDEN,
201
+
Json(json!({
202
+
"error": "InvalidRequest",
203
+
"message": format!(
204
+
"CAR file is for DID {} but you are authenticated as {}",
205
+
commit_did, did
206
+
)
207
+
})),
208
+
)
209
+
.into_response();
210
+
}
211
+
212
+
let skip_verification = std::env::var("SKIP_IMPORT_VERIFICATION")
213
+
.map(|v| v == "true" || v == "1")
214
+
.unwrap_or(false);
215
+
216
+
if !skip_verification {
217
+
debug!("Verifying CAR file signature and structure for DID {}", did);
218
+
let verifier = CarVerifier::new();
219
+
220
+
match verifier.verify_car(did, &root, &blocks).await {
221
+
Ok(verified) => {
222
+
debug!(
223
+
"CAR verification successful: rev={}, data_cid={}",
224
+
verified.rev, verified.data_cid
225
+
);
226
+
}
227
+
Err(crate::sync::verify::VerifyError::DidMismatch {
228
+
commit_did,
229
+
expected_did,
230
+
}) => {
231
+
return (
232
+
StatusCode::FORBIDDEN,
233
+
Json(json!({
234
+
"error": "InvalidRequest",
235
+
"message": format!(
236
+
"CAR file is for DID {} but you are authenticated as {}",
237
+
commit_did, expected_did
238
+
)
239
+
})),
240
+
)
241
+
.into_response();
242
+
}
243
+
Err(crate::sync::verify::VerifyError::InvalidSignature) => {
244
+
return (
245
+
StatusCode::BAD_REQUEST,
246
+
Json(json!({
247
+
"error": "InvalidSignature",
248
+
"message": "CAR file commit signature verification failed"
249
+
})),
250
+
)
251
+
.into_response();
252
+
}
253
+
Err(crate::sync::verify::VerifyError::DidResolutionFailed(msg)) => {
254
+
warn!("DID resolution failed during import verification: {}", msg);
255
+
return (
256
+
StatusCode::BAD_REQUEST,
257
+
Json(json!({
258
+
"error": "InvalidRequest",
259
+
"message": format!("Failed to verify DID: {}", msg)
260
+
})),
261
+
)
262
+
.into_response();
263
+
}
264
+
Err(crate::sync::verify::VerifyError::NoSigningKey) => {
265
+
return (
266
+
StatusCode::BAD_REQUEST,
267
+
Json(json!({
268
+
"error": "InvalidRequest",
269
+
"message": "DID document does not contain a signing key"
270
+
})),
271
+
)
272
+
.into_response();
273
+
}
274
+
Err(crate::sync::verify::VerifyError::MstValidationFailed(msg)) => {
275
+
return (
276
+
StatusCode::BAD_REQUEST,
277
+
Json(json!({
278
+
"error": "InvalidRequest",
279
+
"message": format!("MST validation failed: {}", msg)
280
+
})),
281
+
)
282
+
.into_response();
283
+
}
284
+
Err(e) => {
285
+
error!("CAR verification error: {:?}", e);
286
+
return (
287
+
StatusCode::BAD_REQUEST,
288
+
Json(json!({
289
+
"error": "InvalidRequest",
290
+
"message": format!("CAR verification failed: {}", e)
291
+
})),
292
+
)
293
+
.into_response();
294
+
}
295
+
}
296
+
} else {
297
+
warn!("Skipping CAR signature verification for import (SKIP_IMPORT_VERIFICATION=true)");
298
+
}
299
+
300
+
let max_blocks: usize = std::env::var("MAX_IMPORT_BLOCKS")
301
+
.ok()
302
+
.and_then(|s| s.parse().ok())
303
+
.unwrap_or(DEFAULT_MAX_BLOCKS);
304
+
305
+
match apply_import(&state.db, user_id, root, blocks, max_blocks).await {
306
+
Ok(records) => {
307
+
info!(
308
+
"Successfully imported {} records for user {}",
309
+
records.len(),
310
+
did
311
+
);
312
+
313
+
if let Err(e) = sequence_import_event(&state, did, &root.to_string()).await {
314
+
warn!("Failed to sequence import event: {:?}", e);
315
+
}
316
+
317
+
(StatusCode::OK, Json(json!({}))).into_response()
318
+
}
319
+
Err(ImportError::SizeLimitExceeded) => (
320
+
StatusCode::BAD_REQUEST,
321
+
Json(json!({
322
+
"error": "InvalidRequest",
323
+
"message": format!("Import exceeds block limit of {}", max_blocks)
324
+
})),
325
+
)
326
+
.into_response(),
327
+
Err(ImportError::RepoNotFound) => (
328
+
StatusCode::NOT_FOUND,
329
+
Json(json!({
330
+
"error": "RepoNotFound",
331
+
"message": "Repository not initialized for this account"
332
+
})),
333
+
)
334
+
.into_response(),
335
+
Err(ImportError::InvalidCbor(msg)) => (
336
+
StatusCode::BAD_REQUEST,
337
+
Json(json!({
338
+
"error": "InvalidRequest",
339
+
"message": format!("Invalid CBOR data: {}", msg)
340
+
})),
341
+
)
342
+
.into_response(),
343
+
Err(ImportError::InvalidCommit(msg)) => (
344
+
StatusCode::BAD_REQUEST,
345
+
Json(json!({
346
+
"error": "InvalidRequest",
347
+
"message": format!("Invalid commit structure: {}", msg)
348
+
})),
349
+
)
350
+
.into_response(),
351
+
Err(ImportError::BlockNotFound(cid)) => (
352
+
StatusCode::BAD_REQUEST,
353
+
Json(json!({
354
+
"error": "InvalidRequest",
355
+
"message": format!("Referenced block not found in CAR: {}", cid)
356
+
})),
357
+
)
358
+
.into_response(),
359
+
Err(ImportError::ConcurrentModification) => (
360
+
StatusCode::CONFLICT,
361
+
Json(json!({
362
+
"error": "ConcurrentModification",
363
+
"message": "Repository is being modified by another operation, please retry"
364
+
})),
365
+
)
366
+
.into_response(),
367
+
Err(ImportError::VerificationFailed(ve)) => (
368
+
StatusCode::BAD_REQUEST,
369
+
Json(json!({
370
+
"error": "VerificationFailed",
371
+
"message": format!("CAR verification failed: {}", ve)
372
+
})),
373
+
)
374
+
.into_response(),
375
+
Err(ImportError::DidMismatch { car_did, auth_did }) => (
376
+
StatusCode::FORBIDDEN,
377
+
Json(json!({
378
+
"error": "DidMismatch",
379
+
"message": format!("CAR is for {} but authenticated as {}", car_did, auth_did)
380
+
})),
381
+
)
382
+
.into_response(),
383
+
Err(e) => {
384
+
error!("Import error: {:?}", e);
385
+
(
386
+
StatusCode::INTERNAL_SERVER_ERROR,
387
+
Json(json!({"error": "InternalError"})),
388
+
)
389
+
.into_response()
390
+
}
391
+
}
392
+
}
393
+
394
+
async fn sequence_import_event(
395
+
state: &AppState,
396
+
did: &str,
397
+
commit_cid: &str,
398
+
) -> Result<(), sqlx::Error> {
399
+
let prev_cid: Option<String> = None;
400
+
let ops = serde_json::json!([]);
401
+
let blobs: Vec<String> = vec![];
402
+
let blocks_cids: Vec<String> = vec![];
403
+
404
+
sqlx::query!(
405
+
r#"
406
+
INSERT INTO repo_seq (did, event_type, commit_cid, prev_cid, ops, blobs, blocks_cids)
407
+
VALUES ($1, 'commit', $2, $3, $4, $5, $6)
408
+
"#,
409
+
did,
410
+
commit_cid,
411
+
prev_cid,
412
+
ops,
413
+
&blobs,
414
+
&blocks_cids
415
+
)
416
+
.execute(&state.db)
417
+
.await?;
418
+
419
+
Ok(())
420
+
}
+2
src/api/repo/mod.rs
+2
src/api/repo/mod.rs
···
1
1
pub mod blob;
2
+
pub mod import;
2
3
pub mod meta;
3
4
pub mod record;
4
5
5
6
pub use blob::{list_missing_blobs, upload_blob};
7
+
pub use import::import_repo;
6
8
pub use meta::describe_repo;
7
9
pub use record::{apply_writes, create_record, delete_record, get_record, list_records, put_record};
+21
-2
src/api/repo/record/utils.rs
+21
-2
src/api/repo/record/utils.rs
···
3
3
use jacquard::types::{did::Did, integer::LimitedU32, string::Tid};
4
4
use jacquard_repo::commit::Commit;
5
5
use jacquard_repo::storage::BlockStore;
6
+
use k256::ecdsa::SigningKey;
6
7
use serde_json::json;
7
8
use uuid::Uuid;
8
9
···
26
27
ops: Vec<RecordOp>,
27
28
blocks_cids: &Vec<String>,
28
29
) -> Result<CommitResult, String> {
30
+
let key_row = sqlx::query!(
31
+
"SELECT key_bytes, encryption_version FROM user_keys WHERE user_id = $1",
32
+
user_id
33
+
)
34
+
.fetch_one(&state.db)
35
+
.await
36
+
.map_err(|e| format!("Failed to fetch signing key: {}", e))?;
37
+
38
+
let key_bytes = crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version)
39
+
.map_err(|e| format!("Failed to decrypt signing key: {}", e))?;
40
+
41
+
let signing_key = SigningKey::from_slice(&key_bytes)
42
+
.map_err(|e| format!("Invalid signing key: {}", e))?;
43
+
29
44
let did_obj = Did::new(did).map_err(|e| format!("Invalid DID: {}", e))?;
30
45
let rev = Tid::now(LimitedU32::MIN);
31
46
32
-
let new_commit = Commit::new_unsigned(did_obj, new_mst_root, rev.clone(), current_root_cid);
47
+
let unsigned_commit = Commit::new_unsigned(did_obj, new_mst_root, rev.clone(), current_root_cid);
48
+
49
+
let signed_commit = unsigned_commit
50
+
.sign(&signing_key)
51
+
.map_err(|e| format!("Failed to sign commit: {:?}", e))?;
33
52
34
-
let new_commit_bytes = new_commit.to_cbor().map_err(|e| format!("Failed to serialize commit: {:?}", e))?;
53
+
let new_commit_bytes = signed_commit.to_cbor().map_err(|e| format!("Failed to serialize commit: {:?}", e))?;
35
54
36
55
let new_root_cid = state.block_store.put(&new_commit_bytes).await
37
56
.map_err(|e| format!("Failed to save commit block: {:?}", e))?;
+17
src/lib.rs
+17
src/lib.rs
···
3
3
pub mod config;
4
4
pub mod notifications;
5
5
pub mod oauth;
6
+
pub mod plc;
6
7
pub mod repo;
7
8
pub mod state;
8
9
pub mod storage;
···
193
194
.route(
194
195
"/xrpc/com.atproto.identity.updateHandle",
195
196
post(api::identity::update_handle),
197
+
)
198
+
.route(
199
+
"/xrpc/com.atproto.identity.requestPlcOperationSignature",
200
+
post(api::identity::request_plc_operation_signature),
201
+
)
202
+
.route(
203
+
"/xrpc/com.atproto.identity.signPlcOperation",
204
+
post(api::identity::sign_plc_operation),
205
+
)
206
+
.route(
207
+
"/xrpc/com.atproto.identity.submitPlcOperation",
208
+
post(api::identity::submit_plc_operation),
209
+
)
210
+
.route(
211
+
"/xrpc/com.atproto.repo.importRepo",
212
+
post(api::repo::import_repo),
196
213
)
197
214
.route(
198
215
"/xrpc/com.atproto.admin.deleteAccount",
+2
-1
src/notifications/mod.rs
+2
-1
src/notifications/mod.rs
···
5
5
pub use sender::{EmailSender, NotificationSender};
6
6
pub use service::{
7
7
enqueue_account_deletion, enqueue_email_update, enqueue_email_verification,
8
-
enqueue_notification, enqueue_password_reset, enqueue_welcome, NotificationService,
8
+
enqueue_notification, enqueue_password_reset, enqueue_plc_operation, enqueue_welcome,
9
+
NotificationService,
9
10
};
10
11
pub use types::{
11
12
NewNotification, NotificationChannel, NotificationStatus, NotificationType, QueuedNotification,
+27
src/notifications/service.rs
+27
src/notifications/service.rs
···
416
416
)
417
417
.await
418
418
}
419
+
420
+
pub async fn enqueue_plc_operation(
421
+
db: &PgPool,
422
+
user_id: Uuid,
423
+
token: &str,
424
+
hostname: &str,
425
+
) -> Result<Uuid, sqlx::Error> {
426
+
let prefs = get_user_notification_prefs(db, user_id).await?;
427
+
428
+
let body = format!(
429
+
"Hello @{},\n\nYou requested to sign a PLC operation for your account.\n\nYour verification token is: {}\n\nThis token will expire in 10 minutes.\n\nIf you did not request this, you can safely ignore this message.",
430
+
prefs.handle, token
431
+
);
432
+
433
+
enqueue_notification(
434
+
db,
435
+
NewNotification::new(
436
+
user_id,
437
+
prefs.channel,
438
+
super::types::NotificationType::PlcOperation,
439
+
prefs.email.clone(),
440
+
Some(format!("{} - PLC Operation Token", hostname)),
441
+
body,
442
+
),
443
+
)
444
+
.await
445
+
}
+1
src/notifications/types.rs
+1
src/notifications/types.rs
+358
src/plc/mod.rs
+358
src/plc/mod.rs
···
1
+
use base64::{Engine as _, engine::general_purpose::URL_SAFE_NO_PAD};
2
+
use k256::ecdsa::{SigningKey, Signature, signature::Signer};
3
+
use reqwest::Client;
4
+
use serde::{Deserialize, Serialize};
5
+
use serde_json::{json, Value};
6
+
use sha2::{Digest, Sha256};
7
+
use std::collections::HashMap;
8
+
use thiserror::Error;
9
+
10
+
#[derive(Error, Debug)]
11
+
pub enum PlcError {
12
+
#[error("HTTP request failed: {0}")]
13
+
Http(#[from] reqwest::Error),
14
+
#[error("Invalid response: {0}")]
15
+
InvalidResponse(String),
16
+
#[error("DID not found")]
17
+
NotFound,
18
+
#[error("DID is tombstoned")]
19
+
Tombstoned,
20
+
#[error("Serialization error: {0}")]
21
+
Serialization(String),
22
+
#[error("Signing error: {0}")]
23
+
Signing(String),
24
+
}
25
+
26
+
#[derive(Debug, Clone, Serialize, Deserialize)]
27
+
pub struct PlcOperation {
28
+
#[serde(rename = "type")]
29
+
pub op_type: String,
30
+
#[serde(rename = "rotationKeys")]
31
+
pub rotation_keys: Vec<String>,
32
+
#[serde(rename = "verificationMethods")]
33
+
pub verification_methods: HashMap<String, String>,
34
+
#[serde(rename = "alsoKnownAs")]
35
+
pub also_known_as: Vec<String>,
36
+
pub services: HashMap<String, PlcService>,
37
+
pub prev: Option<String>,
38
+
#[serde(skip_serializing_if = "Option::is_none")]
39
+
pub sig: Option<String>,
40
+
}
41
+
42
+
#[derive(Debug, Clone, Serialize, Deserialize)]
43
+
pub struct PlcService {
44
+
#[serde(rename = "type")]
45
+
pub service_type: String,
46
+
pub endpoint: String,
47
+
}
48
+
49
+
#[derive(Debug, Clone, Serialize, Deserialize)]
50
+
pub struct PlcTombstone {
51
+
#[serde(rename = "type")]
52
+
pub op_type: String,
53
+
pub prev: String,
54
+
#[serde(skip_serializing_if = "Option::is_none")]
55
+
pub sig: Option<String>,
56
+
}
57
+
58
+
#[derive(Debug, Clone, Serialize, Deserialize)]
59
+
#[serde(untagged)]
60
+
pub enum PlcOpOrTombstone {
61
+
Operation(PlcOperation),
62
+
Tombstone(PlcTombstone),
63
+
}
64
+
65
+
impl PlcOpOrTombstone {
66
+
pub fn is_tombstone(&self) -> bool {
67
+
match self {
68
+
PlcOpOrTombstone::Tombstone(_) => true,
69
+
PlcOpOrTombstone::Operation(op) => op.op_type == "plc_tombstone",
70
+
}
71
+
}
72
+
}
73
+
74
+
pub struct PlcClient {
75
+
base_url: String,
76
+
client: Client,
77
+
}
78
+
79
+
impl PlcClient {
80
+
pub fn new(base_url: Option<String>) -> Self {
81
+
let base_url = base_url.unwrap_or_else(|| {
82
+
std::env::var("PLC_DIRECTORY_URL")
83
+
.unwrap_or_else(|_| "https://plc.directory".to_string())
84
+
});
85
+
Self {
86
+
base_url,
87
+
client: Client::new(),
88
+
}
89
+
}
90
+
91
+
fn encode_did(did: &str) -> String {
92
+
urlencoding::encode(did).to_string()
93
+
}
94
+
95
+
pub async fn get_document(&self, did: &str) -> Result<Value, PlcError> {
96
+
let url = format!("{}/{}", self.base_url, Self::encode_did(did));
97
+
let response = self.client.get(&url).send().await?;
98
+
99
+
if response.status() == reqwest::StatusCode::NOT_FOUND {
100
+
return Err(PlcError::NotFound);
101
+
}
102
+
103
+
if !response.status().is_success() {
104
+
let status = response.status();
105
+
let body = response.text().await.unwrap_or_default();
106
+
return Err(PlcError::InvalidResponse(format!(
107
+
"HTTP {}: {}",
108
+
status, body
109
+
)));
110
+
}
111
+
112
+
response.json().await.map_err(|e| PlcError::InvalidResponse(e.to_string()))
113
+
}
114
+
115
+
pub async fn get_document_data(&self, did: &str) -> Result<Value, PlcError> {
116
+
let url = format!("{}/{}/data", self.base_url, Self::encode_did(did));
117
+
let response = self.client.get(&url).send().await?;
118
+
119
+
if response.status() == reqwest::StatusCode::NOT_FOUND {
120
+
return Err(PlcError::NotFound);
121
+
}
122
+
123
+
if !response.status().is_success() {
124
+
let status = response.status();
125
+
let body = response.text().await.unwrap_or_default();
126
+
return Err(PlcError::InvalidResponse(format!(
127
+
"HTTP {}: {}",
128
+
status, body
129
+
)));
130
+
}
131
+
132
+
response.json().await.map_err(|e| PlcError::InvalidResponse(e.to_string()))
133
+
}
134
+
135
+
pub async fn get_last_op(&self, did: &str) -> Result<PlcOpOrTombstone, PlcError> {
136
+
let url = format!("{}/{}/log/last", self.base_url, Self::encode_did(did));
137
+
let response = self.client.get(&url).send().await?;
138
+
139
+
if response.status() == reqwest::StatusCode::NOT_FOUND {
140
+
return Err(PlcError::NotFound);
141
+
}
142
+
143
+
if !response.status().is_success() {
144
+
let status = response.status();
145
+
let body = response.text().await.unwrap_or_default();
146
+
return Err(PlcError::InvalidResponse(format!(
147
+
"HTTP {}: {}",
148
+
status, body
149
+
)));
150
+
}
151
+
152
+
response.json().await.map_err(|e| PlcError::InvalidResponse(e.to_string()))
153
+
}
154
+
155
+
pub async fn get_audit_log(&self, did: &str) -> Result<Vec<Value>, PlcError> {
156
+
let url = format!("{}/{}/log/audit", self.base_url, Self::encode_did(did));
157
+
let response = self.client.get(&url).send().await?;
158
+
159
+
if response.status() == reqwest::StatusCode::NOT_FOUND {
160
+
return Err(PlcError::NotFound);
161
+
}
162
+
163
+
if !response.status().is_success() {
164
+
let status = response.status();
165
+
let body = response.text().await.unwrap_or_default();
166
+
return Err(PlcError::InvalidResponse(format!(
167
+
"HTTP {}: {}",
168
+
status, body
169
+
)));
170
+
}
171
+
172
+
response.json().await.map_err(|e| PlcError::InvalidResponse(e.to_string()))
173
+
}
174
+
175
+
pub async fn send_operation(&self, did: &str, operation: &Value) -> Result<(), PlcError> {
176
+
let url = format!("{}/{}", self.base_url, Self::encode_did(did));
177
+
let response = self.client
178
+
.post(&url)
179
+
.json(operation)
180
+
.send()
181
+
.await?;
182
+
183
+
if !response.status().is_success() {
184
+
let status = response.status();
185
+
let body = response.text().await.unwrap_or_default();
186
+
return Err(PlcError::InvalidResponse(format!(
187
+
"HTTP {}: {}",
188
+
status, body
189
+
)));
190
+
}
191
+
192
+
Ok(())
193
+
}
194
+
}
195
+
196
+
pub fn cid_for_cbor(value: &Value) -> Result<String, PlcError> {
197
+
let cbor_bytes = serde_ipld_dagcbor::to_vec(value)
198
+
.map_err(|e| PlcError::Serialization(e.to_string()))?;
199
+
200
+
let mut hasher = Sha256::new();
201
+
hasher.update(&cbor_bytes);
202
+
let hash = hasher.finalize();
203
+
204
+
let multihash = multihash::Multihash::wrap(0x12, &hash)
205
+
.map_err(|e| PlcError::Serialization(e.to_string()))?;
206
+
let cid = cid::Cid::new_v1(0x71, multihash);
207
+
208
+
Ok(cid.to_string())
209
+
}
210
+
211
+
pub fn sign_operation(
212
+
operation: &Value,
213
+
signing_key: &SigningKey,
214
+
) -> Result<Value, PlcError> {
215
+
let mut op = operation.clone();
216
+
if let Some(obj) = op.as_object_mut() {
217
+
obj.remove("sig");
218
+
}
219
+
220
+
let cbor_bytes = serde_ipld_dagcbor::to_vec(&op)
221
+
.map_err(|e| PlcError::Serialization(e.to_string()))?;
222
+
223
+
let signature: Signature = signing_key.sign(&cbor_bytes);
224
+
let sig_bytes = signature.to_bytes();
225
+
let sig_b64 = URL_SAFE_NO_PAD.encode(sig_bytes);
226
+
227
+
if let Some(obj) = op.as_object_mut() {
228
+
obj.insert("sig".to_string(), json!(sig_b64));
229
+
}
230
+
231
+
Ok(op)
232
+
}
233
+
234
+
pub fn create_update_op(
235
+
last_op: &PlcOpOrTombstone,
236
+
rotation_keys: Option<Vec<String>>,
237
+
verification_methods: Option<HashMap<String, String>>,
238
+
also_known_as: Option<Vec<String>>,
239
+
services: Option<HashMap<String, PlcService>>,
240
+
) -> Result<Value, PlcError> {
241
+
let prev_value = match last_op {
242
+
PlcOpOrTombstone::Operation(op) => serde_json::to_value(op)
243
+
.map_err(|e| PlcError::Serialization(e.to_string()))?,
244
+
PlcOpOrTombstone::Tombstone(t) => serde_json::to_value(t)
245
+
.map_err(|e| PlcError::Serialization(e.to_string()))?,
246
+
};
247
+
248
+
let prev_cid = cid_for_cbor(&prev_value)?;
249
+
250
+
let (base_rotation_keys, base_verification_methods, base_also_known_as, base_services) =
251
+
match last_op {
252
+
PlcOpOrTombstone::Operation(op) => (
253
+
op.rotation_keys.clone(),
254
+
op.verification_methods.clone(),
255
+
op.also_known_as.clone(),
256
+
op.services.clone(),
257
+
),
258
+
PlcOpOrTombstone::Tombstone(_) => {
259
+
return Err(PlcError::Tombstoned);
260
+
}
261
+
};
262
+
263
+
let new_op = PlcOperation {
264
+
op_type: "plc_operation".to_string(),
265
+
rotation_keys: rotation_keys.unwrap_or(base_rotation_keys),
266
+
verification_methods: verification_methods.unwrap_or(base_verification_methods),
267
+
also_known_as: also_known_as.unwrap_or(base_also_known_as),
268
+
services: services.unwrap_or(base_services),
269
+
prev: Some(prev_cid),
270
+
sig: None,
271
+
};
272
+
273
+
serde_json::to_value(new_op).map_err(|e| PlcError::Serialization(e.to_string()))
274
+
}
275
+
276
+
pub fn signing_key_to_did_key(signing_key: &SigningKey) -> String {
277
+
let verifying_key = signing_key.verifying_key();
278
+
let point = verifying_key.to_encoded_point(true);
279
+
let compressed_bytes = point.as_bytes();
280
+
281
+
let mut prefixed = vec![0xe7, 0x01];
282
+
prefixed.extend_from_slice(compressed_bytes);
283
+
284
+
let encoded = multibase::encode(multibase::Base::Base58Btc, &prefixed);
285
+
format!("did:key:{}", encoded)
286
+
}
287
+
288
+
pub fn validate_plc_operation(op: &Value) -> Result<(), PlcError> {
289
+
let obj = op.as_object()
290
+
.ok_or_else(|| PlcError::InvalidResponse("Operation must be an object".to_string()))?;
291
+
292
+
let op_type = obj.get("type")
293
+
.and_then(|v| v.as_str())
294
+
.ok_or_else(|| PlcError::InvalidResponse("Missing type field".to_string()))?;
295
+
296
+
if op_type != "plc_operation" && op_type != "plc_tombstone" {
297
+
return Err(PlcError::InvalidResponse(format!("Invalid type: {}", op_type)));
298
+
}
299
+
300
+
if op_type == "plc_operation" {
301
+
if obj.get("rotationKeys").is_none() {
302
+
return Err(PlcError::InvalidResponse("Missing rotationKeys".to_string()));
303
+
}
304
+
if obj.get("verificationMethods").is_none() {
305
+
return Err(PlcError::InvalidResponse("Missing verificationMethods".to_string()));
306
+
}
307
+
if obj.get("alsoKnownAs").is_none() {
308
+
return Err(PlcError::InvalidResponse("Missing alsoKnownAs".to_string()));
309
+
}
310
+
if obj.get("services").is_none() {
311
+
return Err(PlcError::InvalidResponse("Missing services".to_string()));
312
+
}
313
+
}
314
+
315
+
if obj.get("sig").is_none() {
316
+
return Err(PlcError::InvalidResponse("Missing sig".to_string()));
317
+
}
318
+
319
+
Ok(())
320
+
}
321
+
322
+
#[cfg(test)]
323
+
mod tests {
324
+
use super::*;
325
+
326
+
#[test]
327
+
fn test_signing_key_to_did_key() {
328
+
let key = SigningKey::random(&mut rand::thread_rng());
329
+
let did_key = signing_key_to_did_key(&key);
330
+
assert!(did_key.starts_with("did:key:z"));
331
+
}
332
+
333
+
#[test]
334
+
fn test_cid_for_cbor() {
335
+
let value = json!({
336
+
"test": "data",
337
+
"number": 42
338
+
});
339
+
let cid = cid_for_cbor(&value).unwrap();
340
+
assert!(cid.starts_with("bafyrei"));
341
+
}
342
+
343
+
#[test]
344
+
fn test_sign_operation() {
345
+
let key = SigningKey::random(&mut rand::thread_rng());
346
+
let op = json!({
347
+
"type": "plc_operation",
348
+
"rotationKeys": [],
349
+
"verificationMethods": {},
350
+
"alsoKnownAs": [],
351
+
"services": {},
352
+
"prev": null
353
+
});
354
+
355
+
let signed = sign_operation(&op, &key).unwrap();
356
+
assert!(signed.get("sig").is_some());
357
+
}
358
+
}
+8
-6
src/sync/car.rs
+8
-6
src/sync/car.rs
···
1
1
use cid::Cid;
2
+
use iroh_car::CarHeader;
2
3
use std::io::Write;
3
4
4
5
pub fn write_varint<W: Write>(mut writer: W, mut value: u64) -> std::io::Result<()> {
···
23
24
}
24
25
25
26
pub fn encode_car_header(root_cid: &Cid) -> Vec<u8> {
26
-
let header = serde_ipld_dagcbor::to_vec(&serde_json::json!({
27
-
"version": 1u64,
28
-
"roots": [root_cid.to_bytes()]
29
-
}))
30
-
.unwrap_or_default();
31
-
header
27
+
let header = CarHeader::new_v1(vec![root_cid.clone()]);
28
+
let header_cbor = header.encode().unwrap_or_default();
29
+
30
+
let mut result = Vec::new();
31
+
write_varint(&mut result, header_cbor.len() as u64).unwrap();
32
+
result.extend_from_slice(&header_cbor);
33
+
result
32
34
}
+464
src/sync/import.rs
+464
src/sync/import.rs
···
1
+
use bytes::Bytes;
2
+
use cid::Cid;
3
+
use ipld_core::ipld::Ipld;
4
+
use iroh_car::CarReader;
5
+
use serde_json::Value as JsonValue;
6
+
use sqlx::PgPool;
7
+
use std::collections::HashMap;
8
+
use std::io::Cursor;
9
+
use thiserror::Error;
10
+
use tracing::debug;
11
+
use uuid::Uuid;
12
+
13
+
#[derive(Error, Debug)]
14
+
pub enum ImportError {
15
+
#[error("CAR parsing error: {0}")]
16
+
CarParse(String),
17
+
#[error("Expected exactly one root in CAR file")]
18
+
InvalidRootCount,
19
+
#[error("Block not found: {0}")]
20
+
BlockNotFound(String),
21
+
#[error("Invalid CBOR: {0}")]
22
+
InvalidCbor(String),
23
+
#[error("Database error: {0}")]
24
+
Database(#[from] sqlx::Error),
25
+
#[error("Block store error: {0}")]
26
+
BlockStore(String),
27
+
#[error("Import size limit exceeded")]
28
+
SizeLimitExceeded,
29
+
#[error("Repo not found")]
30
+
RepoNotFound,
31
+
#[error("Concurrent modification detected")]
32
+
ConcurrentModification,
33
+
#[error("Invalid commit structure: {0}")]
34
+
InvalidCommit(String),
35
+
#[error("Verification failed: {0}")]
36
+
VerificationFailed(#[from] super::verify::VerifyError),
37
+
#[error("DID mismatch: CAR is for {car_did}, but authenticated as {auth_did}")]
38
+
DidMismatch { car_did: String, auth_did: String },
39
+
}
40
+
41
+
#[derive(Debug, Clone)]
42
+
pub struct BlobRef {
43
+
pub cid: String,
44
+
pub mime_type: Option<String>,
45
+
}
46
+
47
+
pub async fn parse_car(data: &[u8]) -> Result<(Cid, HashMap<Cid, Bytes>), ImportError> {
48
+
let cursor = Cursor::new(data);
49
+
let mut reader = CarReader::new(cursor)
50
+
.await
51
+
.map_err(|e| ImportError::CarParse(e.to_string()))?;
52
+
53
+
let header = reader.header();
54
+
let roots = header.roots();
55
+
56
+
if roots.len() != 1 {
57
+
return Err(ImportError::InvalidRootCount);
58
+
}
59
+
60
+
let root = roots[0];
61
+
let mut blocks = HashMap::new();
62
+
63
+
while let Ok(Some((cid, block))) = reader.next_block().await {
64
+
blocks.insert(cid, Bytes::from(block));
65
+
}
66
+
67
+
if !blocks.contains_key(&root) {
68
+
return Err(ImportError::BlockNotFound(root.to_string()));
69
+
}
70
+
71
+
Ok((root, blocks))
72
+
}
73
+
74
+
pub fn find_blob_refs_ipld(value: &Ipld, depth: usize) -> Vec<BlobRef> {
75
+
if depth > 32 {
76
+
return vec![];
77
+
}
78
+
79
+
match value {
80
+
Ipld::List(arr) => arr
81
+
.iter()
82
+
.flat_map(|v| find_blob_refs_ipld(v, depth + 1))
83
+
.collect(),
84
+
Ipld::Map(obj) => {
85
+
if let Some(Ipld::String(type_str)) = obj.get("$type") {
86
+
if type_str == "blob" {
87
+
if let Some(Ipld::Link(link_cid)) = obj.get("ref") {
88
+
let mime = obj
89
+
.get("mimeType")
90
+
.and_then(|v| if let Ipld::String(s) = v { Some(s.clone()) } else { None });
91
+
return vec![BlobRef {
92
+
cid: link_cid.to_string(),
93
+
mime_type: mime,
94
+
}];
95
+
}
96
+
}
97
+
}
98
+
99
+
obj.values()
100
+
.flat_map(|v| find_blob_refs_ipld(v, depth + 1))
101
+
.collect()
102
+
}
103
+
_ => vec![],
104
+
}
105
+
}
106
+
107
+
pub fn find_blob_refs(value: &JsonValue, depth: usize) -> Vec<BlobRef> {
108
+
if depth > 32 {
109
+
return vec![];
110
+
}
111
+
112
+
match value {
113
+
JsonValue::Array(arr) => arr
114
+
.iter()
115
+
.flat_map(|v| find_blob_refs(v, depth + 1))
116
+
.collect(),
117
+
JsonValue::Object(obj) => {
118
+
if let Some(JsonValue::String(type_str)) = obj.get("$type") {
119
+
if type_str == "blob" {
120
+
if let Some(JsonValue::Object(ref_obj)) = obj.get("ref") {
121
+
if let Some(JsonValue::String(link)) = ref_obj.get("$link") {
122
+
let mime = obj
123
+
.get("mimeType")
124
+
.and_then(|v| v.as_str())
125
+
.map(String::from);
126
+
return vec![BlobRef {
127
+
cid: link.clone(),
128
+
mime_type: mime,
129
+
}];
130
+
}
131
+
}
132
+
}
133
+
}
134
+
135
+
obj.values()
136
+
.flat_map(|v| find_blob_refs(v, depth + 1))
137
+
.collect()
138
+
}
139
+
_ => vec![],
140
+
}
141
+
}
142
+
143
+
pub fn extract_links(value: &Ipld, links: &mut Vec<Cid>) {
144
+
match value {
145
+
Ipld::Link(cid) => {
146
+
links.push(*cid);
147
+
}
148
+
Ipld::Map(map) => {
149
+
for v in map.values() {
150
+
extract_links(v, links);
151
+
}
152
+
}
153
+
Ipld::List(arr) => {
154
+
for v in arr {
155
+
extract_links(v, links);
156
+
}
157
+
}
158
+
_ => {}
159
+
}
160
+
}
161
+
162
+
#[derive(Debug)]
163
+
pub struct ImportedRecord {
164
+
pub collection: String,
165
+
pub rkey: String,
166
+
pub cid: Cid,
167
+
pub blob_refs: Vec<BlobRef>,
168
+
}
169
+
170
+
pub fn walk_mst(
171
+
blocks: &HashMap<Cid, Bytes>,
172
+
root_cid: &Cid,
173
+
) -> Result<Vec<ImportedRecord>, ImportError> {
174
+
let mut records = Vec::new();
175
+
let mut stack = vec![*root_cid];
176
+
let mut visited = std::collections::HashSet::new();
177
+
178
+
while let Some(cid) = stack.pop() {
179
+
if visited.contains(&cid) {
180
+
continue;
181
+
}
182
+
visited.insert(cid);
183
+
184
+
let block = blocks
185
+
.get(&cid)
186
+
.ok_or_else(|| ImportError::BlockNotFound(cid.to_string()))?;
187
+
188
+
let value: Ipld = serde_ipld_dagcbor::from_slice(block)
189
+
.map_err(|e| ImportError::InvalidCbor(e.to_string()))?;
190
+
191
+
if let Ipld::Map(ref obj) = value {
192
+
if let Some(Ipld::List(entries)) = obj.get("e") {
193
+
for entry in entries {
194
+
if let Ipld::Map(entry_obj) = entry {
195
+
let key = entry_obj.get("k").and_then(|k| {
196
+
if let Ipld::Bytes(b) = k {
197
+
String::from_utf8(b.clone()).ok()
198
+
} else if let Ipld::String(s) = k {
199
+
Some(s.clone())
200
+
} else {
201
+
None
202
+
}
203
+
});
204
+
205
+
let record_cid = entry_obj.get("v").and_then(|v| {
206
+
if let Ipld::Link(cid) = v {
207
+
Some(*cid)
208
+
} else {
209
+
None
210
+
}
211
+
});
212
+
213
+
if let (Some(key), Some(record_cid)) = (key, record_cid) {
214
+
if let Some(record_block) = blocks.get(&record_cid) {
215
+
if let Ok(record_value) =
216
+
serde_ipld_dagcbor::from_slice::<Ipld>(record_block)
217
+
{
218
+
let blob_refs = find_blob_refs_ipld(&record_value, 0);
219
+
220
+
let parts: Vec<&str> = key.split('/').collect();
221
+
if parts.len() >= 2 {
222
+
let collection = parts[..parts.len() - 1].join("/");
223
+
let rkey = parts[parts.len() - 1].to_string();
224
+
225
+
records.push(ImportedRecord {
226
+
collection,
227
+
rkey,
228
+
cid: record_cid,
229
+
blob_refs,
230
+
});
231
+
}
232
+
}
233
+
}
234
+
}
235
+
236
+
if let Some(Ipld::Link(tree_cid)) = entry_obj.get("t") {
237
+
stack.push(*tree_cid);
238
+
}
239
+
}
240
+
}
241
+
}
242
+
243
+
if let Some(Ipld::Link(left_cid)) = obj.get("l") {
244
+
stack.push(*left_cid);
245
+
}
246
+
}
247
+
}
248
+
249
+
Ok(records)
250
+
}
251
+
252
+
pub struct CommitInfo {
253
+
pub rev: Option<String>,
254
+
pub prev: Option<String>,
255
+
}
256
+
257
+
fn extract_commit_info(commit: &Ipld) -> Result<(Cid, CommitInfo), ImportError> {
258
+
let obj = match commit {
259
+
Ipld::Map(m) => m,
260
+
_ => return Err(ImportError::InvalidCommit("Commit must be a map".to_string())),
261
+
};
262
+
263
+
let data_cid = obj
264
+
.get("data")
265
+
.and_then(|d| if let Ipld::Link(cid) = d { Some(*cid) } else { None })
266
+
.ok_or_else(|| ImportError::InvalidCommit("Missing data field".to_string()))?;
267
+
268
+
let rev = obj.get("rev").and_then(|r| {
269
+
if let Ipld::String(s) = r {
270
+
Some(s.clone())
271
+
} else {
272
+
None
273
+
}
274
+
});
275
+
276
+
let prev = obj.get("prev").and_then(|p| {
277
+
if let Ipld::Link(cid) = p {
278
+
Some(cid.to_string())
279
+
} else if let Ipld::Null = p {
280
+
None
281
+
} else {
282
+
None
283
+
}
284
+
});
285
+
286
+
Ok((data_cid, CommitInfo { rev, prev }))
287
+
}
288
+
289
+
pub async fn apply_import(
290
+
db: &PgPool,
291
+
user_id: Uuid,
292
+
root: Cid,
293
+
blocks: HashMap<Cid, Bytes>,
294
+
max_blocks: usize,
295
+
) -> Result<Vec<ImportedRecord>, ImportError> {
296
+
if blocks.len() > max_blocks {
297
+
return Err(ImportError::SizeLimitExceeded);
298
+
}
299
+
300
+
let root_block = blocks
301
+
.get(&root)
302
+
.ok_or_else(|| ImportError::BlockNotFound(root.to_string()))?;
303
+
let commit: Ipld = serde_ipld_dagcbor::from_slice(root_block)
304
+
.map_err(|e| ImportError::InvalidCbor(e.to_string()))?;
305
+
306
+
let (data_cid, _commit_info) = extract_commit_info(&commit)?;
307
+
308
+
let records = walk_mst(&blocks, &data_cid)?;
309
+
310
+
debug!(
311
+
"Importing {} blocks and {} records for user {}",
312
+
blocks.len(),
313
+
records.len(),
314
+
user_id
315
+
);
316
+
317
+
let mut tx = db.begin().await?;
318
+
319
+
let repo = sqlx::query!(
320
+
"SELECT repo_root_cid FROM repos WHERE user_id = $1 FOR UPDATE NOWAIT",
321
+
user_id
322
+
)
323
+
.fetch_optional(&mut *tx)
324
+
.await
325
+
.map_err(|e| {
326
+
if let sqlx::Error::Database(ref db_err) = e {
327
+
if db_err.code().as_deref() == Some("55P03") {
328
+
return ImportError::ConcurrentModification;
329
+
}
330
+
}
331
+
ImportError::Database(e)
332
+
})?;
333
+
334
+
if repo.is_none() {
335
+
return Err(ImportError::RepoNotFound);
336
+
}
337
+
338
+
let block_chunks: Vec<Vec<(&Cid, &Bytes)>> = blocks
339
+
.iter()
340
+
.collect::<Vec<_>>()
341
+
.chunks(100)
342
+
.map(|c| c.to_vec())
343
+
.collect();
344
+
345
+
for chunk in block_chunks {
346
+
for (cid, data) in chunk {
347
+
let cid_bytes = cid.to_bytes();
348
+
sqlx::query!(
349
+
"INSERT INTO blocks (cid, data) VALUES ($1, $2) ON CONFLICT (cid) DO NOTHING",
350
+
&cid_bytes,
351
+
data.as_ref()
352
+
)
353
+
.execute(&mut *tx)
354
+
.await?;
355
+
}
356
+
}
357
+
358
+
let root_str = root.to_string();
359
+
sqlx::query!(
360
+
"UPDATE repos SET repo_root_cid = $1, updated_at = NOW() WHERE user_id = $2",
361
+
root_str,
362
+
user_id
363
+
)
364
+
.execute(&mut *tx)
365
+
.await?;
366
+
367
+
sqlx::query!("DELETE FROM records WHERE repo_id = $1", user_id)
368
+
.execute(&mut *tx)
369
+
.await?;
370
+
371
+
for record in &records {
372
+
let record_cid_str = record.cid.to_string();
373
+
sqlx::query!(
374
+
r#"
375
+
INSERT INTO records (repo_id, collection, rkey, record_cid)
376
+
VALUES ($1, $2, $3, $4)
377
+
ON CONFLICT (repo_id, collection, rkey) DO UPDATE SET record_cid = $4
378
+
"#,
379
+
user_id,
380
+
record.collection,
381
+
record.rkey,
382
+
record_cid_str
383
+
)
384
+
.execute(&mut *tx)
385
+
.await?;
386
+
}
387
+
388
+
tx.commit().await?;
389
+
390
+
debug!(
391
+
"Successfully imported {} blocks and {} records",
392
+
blocks.len(),
393
+
records.len()
394
+
);
395
+
396
+
Ok(records)
397
+
}
398
+
399
+
#[cfg(test)]
400
+
mod tests {
401
+
use super::*;
402
+
403
+
#[test]
404
+
fn test_find_blob_refs() {
405
+
let record = serde_json::json!({
406
+
"$type": "app.bsky.feed.post",
407
+
"text": "Hello world",
408
+
"embed": {
409
+
"$type": "app.bsky.embed.images",
410
+
"images": [
411
+
{
412
+
"alt": "Test image",
413
+
"image": {
414
+
"$type": "blob",
415
+
"ref": {
416
+
"$link": "bafkreihdwdcefgh4dqkjv67uzcmw7ojee6xedzdetojuzjevtenxquvyku"
417
+
},
418
+
"mimeType": "image/jpeg",
419
+
"size": 12345
420
+
}
421
+
}
422
+
]
423
+
}
424
+
});
425
+
426
+
let blob_refs = find_blob_refs(&record, 0);
427
+
assert_eq!(blob_refs.len(), 1);
428
+
assert_eq!(
429
+
blob_refs[0].cid,
430
+
"bafkreihdwdcefgh4dqkjv67uzcmw7ojee6xedzdetojuzjevtenxquvyku"
431
+
);
432
+
assert_eq!(blob_refs[0].mime_type, Some("image/jpeg".to_string()));
433
+
}
434
+
435
+
#[test]
436
+
fn test_find_blob_refs_no_blobs() {
437
+
let record = serde_json::json!({
438
+
"$type": "app.bsky.feed.post",
439
+
"text": "Hello world"
440
+
});
441
+
442
+
let blob_refs = find_blob_refs(&record, 0);
443
+
assert!(blob_refs.is_empty());
444
+
}
445
+
446
+
#[test]
447
+
fn test_find_blob_refs_depth_limit() {
448
+
fn deeply_nested(depth: usize) -> JsonValue {
449
+
if depth == 0 {
450
+
serde_json::json!({
451
+
"$type": "blob",
452
+
"ref": { "$link": "bafkreitest" },
453
+
"mimeType": "image/png"
454
+
})
455
+
} else {
456
+
serde_json::json!({ "nested": deeply_nested(depth - 1) })
457
+
}
458
+
}
459
+
460
+
let deep = deeply_nested(40);
461
+
let blob_refs = find_blob_refs(&deep, 0);
462
+
assert!(blob_refs.is_empty());
463
+
}
464
+
}
+3
src/sync/mod.rs
+3
src/sync/mod.rs
···
4
4
pub mod crawl;
5
5
pub mod firehose;
6
6
pub mod frame;
7
+
pub mod import;
7
8
pub mod listener;
8
9
pub mod relay_client;
9
10
pub mod repo;
10
11
pub mod subscribe_repos;
11
12
pub mod util;
13
+
pub mod verify;
12
14
13
15
pub use blob::{get_blob, list_blobs};
14
16
pub use commit::{get_latest_commit, get_repo_status, list_repos};
15
17
pub use crawl::{notify_of_update, request_crawl};
16
18
pub use repo::{get_blocks, get_repo, get_record};
17
19
pub use subscribe_repos::subscribe_repos;
20
+
pub use verify::{CarVerifier, VerifiedCar, VerifyError};
+12
-18
src/sync/repo.rs
+12
-18
src/sync/repo.rs
···
7
7
Json,
8
8
};
9
9
use cid::Cid;
10
+
use ipld_core::ipld::Ipld;
10
11
use jacquard_repo::storage::BlockStore;
11
12
use serde::Deserialize;
12
13
use serde_json::json;
···
165
166
writer.write_all(&block).unwrap();
166
167
car_bytes.extend_from_slice(&writer);
167
168
168
-
if let Ok(value) = serde_ipld_dagcbor::from_slice::<serde_json::Value>(&block) {
169
-
extract_links_json(&value, &mut stack);
169
+
if let Ok(value) = serde_ipld_dagcbor::from_slice::<Ipld>(&block) {
170
+
extract_links_ipld(&value, &mut stack);
170
171
}
171
172
}
172
173
}
···
179
180
.into_response()
180
181
}
181
182
182
-
fn extract_links_json(value: &serde_json::Value, stack: &mut Vec<Cid>) {
183
+
fn extract_links_ipld(value: &Ipld, stack: &mut Vec<Cid>) {
183
184
match value {
184
-
serde_json::Value::Object(map) => {
185
-
if let Some(serde_json::Value::String(s)) = map.get("/") {
186
-
if let Ok(cid) = Cid::from_str(s) {
187
-
stack.push(cid);
188
-
}
189
-
} else if let Some(serde_json::Value::String(s)) = map.get("$link") {
190
-
if let Ok(cid) = Cid::from_str(s) {
191
-
stack.push(cid);
192
-
}
193
-
} else {
194
-
for v in map.values() {
195
-
extract_links_json(v, stack);
196
-
}
185
+
Ipld::Link(cid) => {
186
+
stack.push(*cid);
187
+
}
188
+
Ipld::Map(map) => {
189
+
for v in map.values() {
190
+
extract_links_ipld(v, stack);
197
191
}
198
192
}
199
-
serde_json::Value::Array(arr) => {
193
+
Ipld::List(arr) => {
200
194
for v in arr {
201
-
extract_links_json(v, stack);
195
+
extract_links_ipld(v, stack);
202
196
}
203
197
}
204
198
_ => {}
+646
src/sync/verify.rs
+646
src/sync/verify.rs
···
1
+
use bytes::Bytes;
2
+
use cid::Cid;
3
+
use jacquard::common::types::crypto::PublicKey;
4
+
use jacquard::common::types::did_doc::DidDocument;
5
+
use jacquard::common::IntoStatic;
6
+
use jacquard_repo::commit::Commit;
7
+
use reqwest::Client;
8
+
use std::collections::HashMap;
9
+
use thiserror::Error;
10
+
use tracing::{debug, warn};
11
+
12
+
#[derive(Error, Debug)]
13
+
pub enum VerifyError {
14
+
#[error("Invalid commit: {0}")]
15
+
InvalidCommit(String),
16
+
#[error("DID mismatch: commit has {commit_did}, expected {expected_did}")]
17
+
DidMismatch {
18
+
commit_did: String,
19
+
expected_did: String,
20
+
},
21
+
#[error("Failed to resolve DID: {0}")]
22
+
DidResolutionFailed(String),
23
+
#[error("No signing key found in DID document")]
24
+
NoSigningKey,
25
+
#[error("Invalid signature")]
26
+
InvalidSignature,
27
+
#[error("MST validation failed: {0}")]
28
+
MstValidationFailed(String),
29
+
#[error("Block not found: {0}")]
30
+
BlockNotFound(String),
31
+
#[error("Invalid CBOR: {0}")]
32
+
InvalidCbor(String),
33
+
}
34
+
35
+
pub struct CarVerifier {
36
+
http_client: Client,
37
+
}
38
+
39
+
impl Default for CarVerifier {
40
+
fn default() -> Self {
41
+
Self::new()
42
+
}
43
+
}
44
+
45
+
impl CarVerifier {
46
+
pub fn new() -> Self {
47
+
Self {
48
+
http_client: Client::builder()
49
+
.timeout(std::time::Duration::from_secs(10))
50
+
.build()
51
+
.unwrap_or_default(),
52
+
}
53
+
}
54
+
55
+
pub async fn verify_car(
56
+
&self,
57
+
expected_did: &str,
58
+
root_cid: &Cid,
59
+
blocks: &HashMap<Cid, Bytes>,
60
+
) -> Result<VerifiedCar, VerifyError> {
61
+
let root_block = blocks
62
+
.get(root_cid)
63
+
.ok_or_else(|| VerifyError::BlockNotFound(root_cid.to_string()))?;
64
+
65
+
let commit = Commit::from_cbor(root_block)
66
+
.map_err(|e| VerifyError::InvalidCommit(e.to_string()))?;
67
+
68
+
let commit_did = commit.did().as_str();
69
+
if commit_did != expected_did {
70
+
return Err(VerifyError::DidMismatch {
71
+
commit_did: commit_did.to_string(),
72
+
expected_did: expected_did.to_string(),
73
+
});
74
+
}
75
+
76
+
let pubkey = self.resolve_did_signing_key(commit_did).await?;
77
+
78
+
commit
79
+
.verify(&pubkey)
80
+
.map_err(|_| VerifyError::InvalidSignature)?;
81
+
82
+
debug!("Commit signature verified for DID {}", commit_did);
83
+
84
+
let data_cid = commit.data();
85
+
self.verify_mst_structure(data_cid, blocks)?;
86
+
87
+
debug!("MST structure verified for DID {}", commit_did);
88
+
89
+
Ok(VerifiedCar {
90
+
did: commit_did.to_string(),
91
+
rev: commit.rev().to_string(),
92
+
data_cid: *data_cid,
93
+
prev: commit.prev().cloned(),
94
+
})
95
+
}
96
+
97
+
async fn resolve_did_signing_key(&self, did: &str) -> Result<PublicKey<'static>, VerifyError> {
98
+
let did_doc = self.resolve_did_document(did).await?;
99
+
100
+
did_doc
101
+
.atproto_public_key()
102
+
.map_err(|e| VerifyError::DidResolutionFailed(e.to_string()))?
103
+
.ok_or(VerifyError::NoSigningKey)
104
+
}
105
+
106
+
async fn resolve_did_document(&self, did: &str) -> Result<DidDocument<'static>, VerifyError> {
107
+
if did.starts_with("did:plc:") {
108
+
self.resolve_plc_did(did).await
109
+
} else if did.starts_with("did:web:") {
110
+
self.resolve_web_did(did).await
111
+
} else {
112
+
Err(VerifyError::DidResolutionFailed(format!(
113
+
"Unsupported DID method: {}",
114
+
did
115
+
)))
116
+
}
117
+
}
118
+
119
+
async fn resolve_plc_did(&self, did: &str) -> Result<DidDocument<'static>, VerifyError> {
120
+
let plc_url = std::env::var("PLC_DIRECTORY_URL")
121
+
.unwrap_or_else(|_| "https://plc.directory".to_string());
122
+
let url = format!("{}/{}", plc_url, urlencoding::encode(did));
123
+
124
+
let response = self
125
+
.http_client
126
+
.get(&url)
127
+
.send()
128
+
.await
129
+
.map_err(|e| VerifyError::DidResolutionFailed(e.to_string()))?;
130
+
131
+
if !response.status().is_success() {
132
+
return Err(VerifyError::DidResolutionFailed(format!(
133
+
"PLC directory returned {}",
134
+
response.status()
135
+
)));
136
+
}
137
+
138
+
let body = response
139
+
.text()
140
+
.await
141
+
.map_err(|e| VerifyError::DidResolutionFailed(e.to_string()))?;
142
+
143
+
let doc: DidDocument<'_> = serde_json::from_str(&body)
144
+
.map_err(|e| VerifyError::DidResolutionFailed(e.to_string()))?;
145
+
146
+
Ok(doc.into_static())
147
+
}
148
+
149
+
async fn resolve_web_did(&self, did: &str) -> Result<DidDocument<'static>, VerifyError> {
150
+
let domain = did
151
+
.strip_prefix("did:web:")
152
+
.ok_or_else(|| VerifyError::DidResolutionFailed("Invalid did:web format".to_string()))?;
153
+
154
+
let domain_decoded = urlencoding::decode(domain)
155
+
.map_err(|e| VerifyError::DidResolutionFailed(e.to_string()))?;
156
+
157
+
let url = if domain_decoded.contains(':') || domain_decoded.contains('/') {
158
+
format!("https://{}/.well-known/did.json", domain_decoded)
159
+
} else {
160
+
format!("https://{}/.well-known/did.json", domain_decoded)
161
+
};
162
+
163
+
let response = self
164
+
.http_client
165
+
.get(&url)
166
+
.send()
167
+
.await
168
+
.map_err(|e| VerifyError::DidResolutionFailed(e.to_string()))?;
169
+
170
+
if !response.status().is_success() {
171
+
return Err(VerifyError::DidResolutionFailed(format!(
172
+
"did:web resolution returned {}",
173
+
response.status()
174
+
)));
175
+
}
176
+
177
+
let body = response
178
+
.text()
179
+
.await
180
+
.map_err(|e| VerifyError::DidResolutionFailed(e.to_string()))?;
181
+
182
+
let doc: DidDocument<'_> = serde_json::from_str(&body)
183
+
.map_err(|e| VerifyError::DidResolutionFailed(e.to_string()))?;
184
+
185
+
Ok(doc.into_static())
186
+
}
187
+
188
+
fn verify_mst_structure(
189
+
&self,
190
+
data_cid: &Cid,
191
+
blocks: &HashMap<Cid, Bytes>,
192
+
) -> Result<(), VerifyError> {
193
+
use ipld_core::ipld::Ipld;
194
+
195
+
let mut stack = vec![*data_cid];
196
+
let mut visited = std::collections::HashSet::new();
197
+
let mut node_count = 0;
198
+
const MAX_NODES: usize = 100_000;
199
+
200
+
while let Some(cid) = stack.pop() {
201
+
if visited.contains(&cid) {
202
+
continue;
203
+
}
204
+
visited.insert(cid);
205
+
node_count += 1;
206
+
207
+
if node_count > MAX_NODES {
208
+
return Err(VerifyError::MstValidationFailed(
209
+
"MST exceeds maximum node count".to_string(),
210
+
));
211
+
}
212
+
213
+
let block = blocks
214
+
.get(&cid)
215
+
.ok_or_else(|| VerifyError::BlockNotFound(cid.to_string()))?;
216
+
217
+
let node: Ipld = serde_ipld_dagcbor::from_slice(block)
218
+
.map_err(|e| VerifyError::InvalidCbor(e.to_string()))?;
219
+
220
+
if let Ipld::Map(ref obj) = node {
221
+
if let Some(Ipld::Link(left_cid)) = obj.get("l") {
222
+
if !blocks.contains_key(left_cid) {
223
+
return Err(VerifyError::BlockNotFound(format!(
224
+
"MST left pointer {} not in CAR",
225
+
left_cid
226
+
)));
227
+
}
228
+
stack.push(*left_cid);
229
+
}
230
+
231
+
if let Some(Ipld::List(entries)) = obj.get("e") {
232
+
let mut last_full_key: Vec<u8> = Vec::new();
233
+
234
+
for entry in entries {
235
+
if let Ipld::Map(entry_obj) = entry {
236
+
let prefix_len = entry_obj.get("p").and_then(|p| match p {
237
+
Ipld::Integer(i) => Some(*i as usize),
238
+
_ => None,
239
+
}).unwrap_or(0);
240
+
241
+
let key_suffix = entry_obj.get("k").and_then(|k| match k {
242
+
Ipld::Bytes(b) => Some(b.clone()),
243
+
Ipld::String(s) => Some(s.as_bytes().to_vec()),
244
+
_ => None,
245
+
});
246
+
247
+
if let Some(suffix) = key_suffix {
248
+
let mut full_key = Vec::new();
249
+
if prefix_len > 0 && prefix_len <= last_full_key.len() {
250
+
full_key.extend_from_slice(&last_full_key[..prefix_len]);
251
+
}
252
+
full_key.extend_from_slice(&suffix);
253
+
254
+
if !last_full_key.is_empty() && full_key <= last_full_key {
255
+
return Err(VerifyError::MstValidationFailed(
256
+
"MST keys not in sorted order".to_string(),
257
+
));
258
+
}
259
+
last_full_key = full_key;
260
+
}
261
+
262
+
if let Some(Ipld::Link(tree_cid)) = entry_obj.get("t") {
263
+
if !blocks.contains_key(tree_cid) {
264
+
return Err(VerifyError::BlockNotFound(format!(
265
+
"MST subtree {} not in CAR",
266
+
tree_cid
267
+
)));
268
+
}
269
+
stack.push(*tree_cid);
270
+
}
271
+
272
+
if let Some(Ipld::Link(value_cid)) = entry_obj.get("v") {
273
+
if !blocks.contains_key(value_cid) {
274
+
warn!(
275
+
"Record block {} referenced in MST not in CAR (may be expected for partial export)",
276
+
value_cid
277
+
);
278
+
}
279
+
}
280
+
}
281
+
}
282
+
}
283
+
}
284
+
}
285
+
286
+
debug!(
287
+
"MST validation complete: {} nodes, {} blocks visited",
288
+
node_count,
289
+
visited.len()
290
+
);
291
+
292
+
Ok(())
293
+
}
294
+
}
295
+
296
+
#[derive(Debug, Clone)]
297
+
pub struct VerifiedCar {
298
+
pub did: String,
299
+
pub rev: String,
300
+
pub data_cid: Cid,
301
+
pub prev: Option<Cid>,
302
+
}
303
+
304
+
#[cfg(test)]
305
+
mod tests {
306
+
use super::*;
307
+
use sha2::{Digest, Sha256};
308
+
309
+
fn make_cid(data: &[u8]) -> Cid {
310
+
let mut hasher = Sha256::new();
311
+
hasher.update(data);
312
+
let hash = hasher.finalize();
313
+
let multihash = multihash::Multihash::wrap(0x12, &hash).unwrap();
314
+
Cid::new_v1(0x71, multihash)
315
+
}
316
+
317
+
#[test]
318
+
fn test_verifier_creation() {
319
+
let _verifier = CarVerifier::new();
320
+
}
321
+
322
+
#[test]
323
+
fn test_verify_error_display() {
324
+
let err = VerifyError::DidMismatch {
325
+
commit_did: "did:plc:abc".to_string(),
326
+
expected_did: "did:plc:xyz".to_string(),
327
+
};
328
+
assert!(err.to_string().contains("did:plc:abc"));
329
+
assert!(err.to_string().contains("did:plc:xyz"));
330
+
331
+
let err = VerifyError::InvalidSignature;
332
+
assert!(err.to_string().contains("signature"));
333
+
334
+
let err = VerifyError::NoSigningKey;
335
+
assert!(err.to_string().contains("signing key"));
336
+
337
+
let err = VerifyError::MstValidationFailed("test error".to_string());
338
+
assert!(err.to_string().contains("test error"));
339
+
}
340
+
341
+
#[test]
342
+
fn test_mst_validation_missing_root_block() {
343
+
let verifier = CarVerifier::new();
344
+
let blocks: HashMap<Cid, Bytes> = HashMap::new();
345
+
346
+
let fake_cid = make_cid(b"fake data");
347
+
let result = verifier.verify_mst_structure(&fake_cid, &blocks);
348
+
349
+
assert!(result.is_err());
350
+
let err = result.unwrap_err();
351
+
assert!(matches!(err, VerifyError::BlockNotFound(_)));
352
+
}
353
+
354
+
#[test]
355
+
fn test_mst_validation_invalid_cbor() {
356
+
let verifier = CarVerifier::new();
357
+
358
+
let bad_cbor = Bytes::from(vec![0xFF, 0xFF, 0xFF]);
359
+
let cid = make_cid(&bad_cbor);
360
+
361
+
let mut blocks = HashMap::new();
362
+
blocks.insert(cid, bad_cbor);
363
+
364
+
let result = verifier.verify_mst_structure(&cid, &blocks);
365
+
366
+
assert!(result.is_err());
367
+
let err = result.unwrap_err();
368
+
assert!(matches!(err, VerifyError::InvalidCbor(_)));
369
+
}
370
+
371
+
#[test]
372
+
fn test_mst_validation_empty_node() {
373
+
let verifier = CarVerifier::new();
374
+
375
+
let empty_node = serde_ipld_dagcbor::to_vec(&serde_json::json!({
376
+
"e": []
377
+
})).unwrap();
378
+
let cid = make_cid(&empty_node);
379
+
380
+
let mut blocks = HashMap::new();
381
+
blocks.insert(cid, Bytes::from(empty_node));
382
+
383
+
let result = verifier.verify_mst_structure(&cid, &blocks);
384
+
assert!(result.is_ok());
385
+
}
386
+
387
+
#[test]
388
+
fn test_mst_validation_missing_left_pointer() {
389
+
use ipld_core::ipld::Ipld;
390
+
391
+
let verifier = CarVerifier::new();
392
+
393
+
let missing_left_cid = make_cid(b"missing left");
394
+
let node = Ipld::Map(std::collections::BTreeMap::from([
395
+
("l".to_string(), Ipld::Link(missing_left_cid)),
396
+
("e".to_string(), Ipld::List(vec![])),
397
+
]));
398
+
let node_bytes = serde_ipld_dagcbor::to_vec(&node).unwrap();
399
+
let cid = make_cid(&node_bytes);
400
+
401
+
let mut blocks = HashMap::new();
402
+
blocks.insert(cid, Bytes::from(node_bytes));
403
+
404
+
let result = verifier.verify_mst_structure(&cid, &blocks);
405
+
406
+
assert!(result.is_err());
407
+
let err = result.unwrap_err();
408
+
assert!(matches!(err, VerifyError::BlockNotFound(_)));
409
+
assert!(err.to_string().contains("left pointer"));
410
+
}
411
+
412
+
#[test]
413
+
fn test_mst_validation_missing_subtree() {
414
+
use ipld_core::ipld::Ipld;
415
+
416
+
let verifier = CarVerifier::new();
417
+
418
+
let missing_subtree_cid = make_cid(b"missing subtree");
419
+
let record_cid = make_cid(b"record");
420
+
421
+
let entry = Ipld::Map(std::collections::BTreeMap::from([
422
+
("k".to_string(), Ipld::Bytes(b"key1".to_vec())),
423
+
("v".to_string(), Ipld::Link(record_cid)),
424
+
("p".to_string(), Ipld::Integer(0)),
425
+
("t".to_string(), Ipld::Link(missing_subtree_cid)),
426
+
]));
427
+
428
+
let node = Ipld::Map(std::collections::BTreeMap::from([
429
+
("e".to_string(), Ipld::List(vec![entry])),
430
+
]));
431
+
let node_bytes = serde_ipld_dagcbor::to_vec(&node).unwrap();
432
+
let cid = make_cid(&node_bytes);
433
+
434
+
let mut blocks = HashMap::new();
435
+
blocks.insert(cid, Bytes::from(node_bytes));
436
+
437
+
let result = verifier.verify_mst_structure(&cid, &blocks);
438
+
439
+
assert!(result.is_err());
440
+
let err = result.unwrap_err();
441
+
assert!(matches!(err, VerifyError::BlockNotFound(_)));
442
+
assert!(err.to_string().contains("subtree"));
443
+
}
444
+
445
+
#[test]
446
+
fn test_mst_validation_unsorted_keys() {
447
+
use ipld_core::ipld::Ipld;
448
+
449
+
let verifier = CarVerifier::new();
450
+
451
+
let record_cid = make_cid(b"record");
452
+
453
+
let entry1 = Ipld::Map(std::collections::BTreeMap::from([
454
+
("k".to_string(), Ipld::Bytes(b"zzz".to_vec())),
455
+
("v".to_string(), Ipld::Link(record_cid)),
456
+
("p".to_string(), Ipld::Integer(0)),
457
+
]));
458
+
459
+
let entry2 = Ipld::Map(std::collections::BTreeMap::from([
460
+
("k".to_string(), Ipld::Bytes(b"aaa".to_vec())),
461
+
("v".to_string(), Ipld::Link(record_cid)),
462
+
("p".to_string(), Ipld::Integer(0)),
463
+
]));
464
+
465
+
let node = Ipld::Map(std::collections::BTreeMap::from([
466
+
("e".to_string(), Ipld::List(vec![entry1, entry2])),
467
+
]));
468
+
let node_bytes = serde_ipld_dagcbor::to_vec(&node).unwrap();
469
+
let cid = make_cid(&node_bytes);
470
+
471
+
let mut blocks = HashMap::new();
472
+
blocks.insert(cid, Bytes::from(node_bytes));
473
+
474
+
let result = verifier.verify_mst_structure(&cid, &blocks);
475
+
476
+
assert!(result.is_err());
477
+
let err = result.unwrap_err();
478
+
assert!(matches!(err, VerifyError::MstValidationFailed(_)));
479
+
assert!(err.to_string().contains("sorted"));
480
+
}
481
+
482
+
#[test]
483
+
fn test_mst_validation_sorted_keys_ok() {
484
+
use ipld_core::ipld::Ipld;
485
+
486
+
let verifier = CarVerifier::new();
487
+
488
+
let record_cid = make_cid(b"record");
489
+
490
+
let entry1 = Ipld::Map(std::collections::BTreeMap::from([
491
+
("k".to_string(), Ipld::Bytes(b"aaa".to_vec())),
492
+
("v".to_string(), Ipld::Link(record_cid)),
493
+
("p".to_string(), Ipld::Integer(0)),
494
+
]));
495
+
496
+
let entry2 = Ipld::Map(std::collections::BTreeMap::from([
497
+
("k".to_string(), Ipld::Bytes(b"bbb".to_vec())),
498
+
("v".to_string(), Ipld::Link(record_cid)),
499
+
("p".to_string(), Ipld::Integer(0)),
500
+
]));
501
+
502
+
let entry3 = Ipld::Map(std::collections::BTreeMap::from([
503
+
("k".to_string(), Ipld::Bytes(b"zzz".to_vec())),
504
+
("v".to_string(), Ipld::Link(record_cid)),
505
+
("p".to_string(), Ipld::Integer(0)),
506
+
]));
507
+
508
+
let node = Ipld::Map(std::collections::BTreeMap::from([
509
+
("e".to_string(), Ipld::List(vec![entry1, entry2, entry3])),
510
+
]));
511
+
let node_bytes = serde_ipld_dagcbor::to_vec(&node).unwrap();
512
+
let cid = make_cid(&node_bytes);
513
+
514
+
let mut blocks = HashMap::new();
515
+
blocks.insert(cid, Bytes::from(node_bytes));
516
+
517
+
let result = verifier.verify_mst_structure(&cid, &blocks);
518
+
assert!(result.is_ok());
519
+
}
520
+
521
+
#[test]
522
+
fn test_mst_validation_with_valid_left_pointer() {
523
+
use ipld_core::ipld::Ipld;
524
+
525
+
let verifier = CarVerifier::new();
526
+
527
+
let left_node = Ipld::Map(std::collections::BTreeMap::from([
528
+
("e".to_string(), Ipld::List(vec![])),
529
+
]));
530
+
let left_node_bytes = serde_ipld_dagcbor::to_vec(&left_node).unwrap();
531
+
let left_cid = make_cid(&left_node_bytes);
532
+
533
+
let root_node = Ipld::Map(std::collections::BTreeMap::from([
534
+
("l".to_string(), Ipld::Link(left_cid)),
535
+
("e".to_string(), Ipld::List(vec![])),
536
+
]));
537
+
let root_node_bytes = serde_ipld_dagcbor::to_vec(&root_node).unwrap();
538
+
let root_cid = make_cid(&root_node_bytes);
539
+
540
+
let mut blocks = HashMap::new();
541
+
blocks.insert(root_cid, Bytes::from(root_node_bytes));
542
+
blocks.insert(left_cid, Bytes::from(left_node_bytes));
543
+
544
+
let result = verifier.verify_mst_structure(&root_cid, &blocks);
545
+
assert!(result.is_ok());
546
+
}
547
+
548
+
#[test]
549
+
fn test_mst_validation_cycle_detection() {
550
+
let verifier = CarVerifier::new();
551
+
552
+
let node = serde_ipld_dagcbor::to_vec(&serde_json::json!({
553
+
"e": []
554
+
})).unwrap();
555
+
let cid = make_cid(&node);
556
+
557
+
let mut blocks = HashMap::new();
558
+
blocks.insert(cid, Bytes::from(node));
559
+
560
+
let result = verifier.verify_mst_structure(&cid, &blocks);
561
+
assert!(result.is_ok());
562
+
}
563
+
564
+
#[tokio::test]
565
+
async fn test_unsupported_did_method() {
566
+
let verifier = CarVerifier::new();
567
+
let result = verifier.resolve_did_document("did:unknown:test").await;
568
+
569
+
assert!(result.is_err());
570
+
let err = result.unwrap_err();
571
+
assert!(matches!(err, VerifyError::DidResolutionFailed(_)));
572
+
assert!(err.to_string().contains("Unsupported"));
573
+
}
574
+
575
+
#[test]
576
+
fn test_mst_validation_with_prefix_compression() {
577
+
use ipld_core::ipld::Ipld;
578
+
579
+
let verifier = CarVerifier::new();
580
+
let record_cid = make_cid(b"record");
581
+
582
+
let entry1 = Ipld::Map(std::collections::BTreeMap::from([
583
+
("k".to_string(), Ipld::Bytes(b"app.bsky.feed.post/abc".to_vec())),
584
+
("v".to_string(), Ipld::Link(record_cid)),
585
+
("p".to_string(), Ipld::Integer(0)),
586
+
]));
587
+
588
+
let entry2 = Ipld::Map(std::collections::BTreeMap::from([
589
+
("k".to_string(), Ipld::Bytes(b"def".to_vec())),
590
+
("v".to_string(), Ipld::Link(record_cid)),
591
+
("p".to_string(), Ipld::Integer(19)),
592
+
]));
593
+
594
+
let entry3 = Ipld::Map(std::collections::BTreeMap::from([
595
+
("k".to_string(), Ipld::Bytes(b"xyz".to_vec())),
596
+
("v".to_string(), Ipld::Link(record_cid)),
597
+
("p".to_string(), Ipld::Integer(19)),
598
+
]));
599
+
600
+
let node = Ipld::Map(std::collections::BTreeMap::from([
601
+
("e".to_string(), Ipld::List(vec![entry1, entry2, entry3])),
602
+
]));
603
+
let node_bytes = serde_ipld_dagcbor::to_vec(&node).unwrap();
604
+
let cid = make_cid(&node_bytes);
605
+
606
+
let mut blocks = HashMap::new();
607
+
blocks.insert(cid, Bytes::from(node_bytes));
608
+
609
+
let result = verifier.verify_mst_structure(&cid, &blocks);
610
+
assert!(result.is_ok(), "Prefix-compressed keys should be validated correctly");
611
+
}
612
+
613
+
#[test]
614
+
fn test_mst_validation_prefix_compression_unsorted() {
615
+
use ipld_core::ipld::Ipld;
616
+
617
+
let verifier = CarVerifier::new();
618
+
let record_cid = make_cid(b"record");
619
+
620
+
let entry1 = Ipld::Map(std::collections::BTreeMap::from([
621
+
("k".to_string(), Ipld::Bytes(b"app.bsky.feed.post/xyz".to_vec())),
622
+
("v".to_string(), Ipld::Link(record_cid)),
623
+
("p".to_string(), Ipld::Integer(0)),
624
+
]));
625
+
626
+
let entry2 = Ipld::Map(std::collections::BTreeMap::from([
627
+
("k".to_string(), Ipld::Bytes(b"abc".to_vec())),
628
+
("v".to_string(), Ipld::Link(record_cid)),
629
+
("p".to_string(), Ipld::Integer(19)),
630
+
]));
631
+
632
+
let node = Ipld::Map(std::collections::BTreeMap::from([
633
+
("e".to_string(), Ipld::List(vec![entry1, entry2])),
634
+
]));
635
+
let node_bytes = serde_ipld_dagcbor::to_vec(&node).unwrap();
636
+
let cid = make_cid(&node_bytes);
637
+
638
+
let mut blocks = HashMap::new();
639
+
blocks.insert(cid, Bytes::from(node_bytes));
640
+
641
+
let result = verifier.verify_mst_structure(&cid, &blocks);
642
+
assert!(result.is_err(), "Unsorted prefix-compressed keys should fail validation");
643
+
let err = result.unwrap_err();
644
+
assert!(matches!(err, VerifyError::MstValidationFailed(_)));
645
+
}
646
+
}
+109
tests/import_repo.rs
+109
tests/import_repo.rs
···
1
+
mod common;
2
+
use common::*;
3
+
4
+
use reqwest::StatusCode;
5
+
use serde_json::json;
6
+
7
+
#[tokio::test]
8
+
async fn test_import_repo_requires_auth() {
9
+
let client = client();
10
+
11
+
let res = client
12
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
13
+
.header("Content-Type", "application/vnd.ipld.car")
14
+
.body(vec![0u8; 100])
15
+
.send()
16
+
.await
17
+
.expect("Request failed");
18
+
19
+
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
20
+
}
21
+
22
+
#[tokio::test]
23
+
async fn test_import_repo_invalid_car() {
24
+
let client = client();
25
+
let (token, _did) = create_account_and_login(&client).await;
26
+
27
+
let res = client
28
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
29
+
.bearer_auth(&token)
30
+
.header("Content-Type", "application/vnd.ipld.car")
31
+
.body(vec![0u8; 100])
32
+
.send()
33
+
.await
34
+
.expect("Request failed");
35
+
36
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
37
+
let body: serde_json::Value = res.json().await.unwrap();
38
+
assert_eq!(body["error"], "InvalidRequest");
39
+
}
40
+
41
+
#[tokio::test]
42
+
async fn test_import_repo_empty_body() {
43
+
let client = client();
44
+
let (token, _did) = create_account_and_login(&client).await;
45
+
46
+
let res = client
47
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
48
+
.bearer_auth(&token)
49
+
.header("Content-Type", "application/vnd.ipld.car")
50
+
.body(vec![])
51
+
.send()
52
+
.await
53
+
.expect("Request failed");
54
+
55
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
56
+
}
57
+
58
+
#[tokio::test]
59
+
async fn test_import_repo_with_exported_repo() {
60
+
let client = client();
61
+
let (token, did) = create_account_and_login(&client).await;
62
+
63
+
let post_payload = json!({
64
+
"repo": did,
65
+
"collection": "app.bsky.feed.post",
66
+
"record": {
67
+
"$type": "app.bsky.feed.post",
68
+
"text": "Test post for import",
69
+
"createdAt": chrono::Utc::now().to_rfc3339(),
70
+
}
71
+
});
72
+
73
+
let create_res = client
74
+
.post(format!(
75
+
"{}/xrpc/com.atproto.repo.createRecord",
76
+
base_url().await
77
+
))
78
+
.bearer_auth(&token)
79
+
.json(&post_payload)
80
+
.send()
81
+
.await
82
+
.expect("Failed to create post");
83
+
assert_eq!(create_res.status(), StatusCode::OK);
84
+
85
+
let export_res = client
86
+
.get(format!(
87
+
"{}/xrpc/com.atproto.sync.getRepo?did={}",
88
+
base_url().await,
89
+
did
90
+
))
91
+
.send()
92
+
.await
93
+
.expect("Failed to export repo");
94
+
assert_eq!(export_res.status(), StatusCode::OK);
95
+
96
+
let car_bytes = export_res.bytes().await.expect("Failed to get CAR bytes");
97
+
98
+
let import_res = client
99
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
100
+
.bearer_auth(&token)
101
+
.header("Content-Type", "application/vnd.ipld.car")
102
+
.body(car_bytes.to_vec())
103
+
.send()
104
+
.await
105
+
.expect("Failed to import repo");
106
+
107
+
assert_eq!(import_res.status(), StatusCode::OK);
108
+
}
109
+
+323
tests/import_verification.rs
+323
tests/import_verification.rs
···
1
+
mod common;
2
+
use common::*;
3
+
4
+
use iroh_car::CarHeader;
5
+
use reqwest::StatusCode;
6
+
use serde_json::json;
7
+
8
+
fn write_varint(buf: &mut Vec<u8>, mut value: u64) {
9
+
loop {
10
+
let mut byte = (value & 0x7F) as u8;
11
+
value >>= 7;
12
+
if value != 0 {
13
+
byte |= 0x80;
14
+
}
15
+
buf.push(byte);
16
+
if value == 0 {
17
+
break;
18
+
}
19
+
}
20
+
}
21
+
22
+
#[tokio::test]
23
+
async fn test_import_rejects_car_for_different_user() {
24
+
let client = client();
25
+
26
+
let (token_a, did_a) = create_account_and_login(&client).await;
27
+
let (_token_b, did_b) = create_account_and_login(&client).await;
28
+
29
+
let export_res = client
30
+
.get(format!(
31
+
"{}/xrpc/com.atproto.sync.getRepo?did={}",
32
+
base_url().await,
33
+
did_b
34
+
))
35
+
.send()
36
+
.await
37
+
.expect("Export failed");
38
+
39
+
assert_eq!(export_res.status(), StatusCode::OK);
40
+
let car_bytes = export_res.bytes().await.unwrap();
41
+
42
+
let import_res = client
43
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
44
+
.bearer_auth(&token_a)
45
+
.header("Content-Type", "application/vnd.ipld.car")
46
+
.body(car_bytes.to_vec())
47
+
.send()
48
+
.await
49
+
.expect("Import failed");
50
+
51
+
assert_eq!(import_res.status(), StatusCode::FORBIDDEN);
52
+
let body: serde_json::Value = import_res.json().await.unwrap();
53
+
assert!(
54
+
body["error"] == "InvalidRequest" || body["error"] == "DidMismatch",
55
+
"Expected DidMismatch or InvalidRequest error, got: {:?}",
56
+
body
57
+
);
58
+
}
59
+
60
+
#[tokio::test]
61
+
async fn test_import_accepts_own_exported_repo() {
62
+
let client = client();
63
+
let (token, did) = create_account_and_login(&client).await;
64
+
65
+
let post_payload = json!({
66
+
"repo": did,
67
+
"collection": "app.bsky.feed.post",
68
+
"record": {
69
+
"$type": "app.bsky.feed.post",
70
+
"text": "Original post before export",
71
+
"createdAt": chrono::Utc::now().to_rfc3339(),
72
+
}
73
+
});
74
+
75
+
let create_res = client
76
+
.post(format!(
77
+
"{}/xrpc/com.atproto.repo.createRecord",
78
+
base_url().await
79
+
))
80
+
.bearer_auth(&token)
81
+
.json(&post_payload)
82
+
.send()
83
+
.await
84
+
.expect("Failed to create post");
85
+
assert_eq!(create_res.status(), StatusCode::OK);
86
+
87
+
let export_res = client
88
+
.get(format!(
89
+
"{}/xrpc/com.atproto.sync.getRepo?did={}",
90
+
base_url().await,
91
+
did
92
+
))
93
+
.send()
94
+
.await
95
+
.expect("Failed to export repo");
96
+
assert_eq!(export_res.status(), StatusCode::OK);
97
+
let car_bytes = export_res.bytes().await.unwrap();
98
+
99
+
let import_res = client
100
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
101
+
.bearer_auth(&token)
102
+
.header("Content-Type", "application/vnd.ipld.car")
103
+
.body(car_bytes.to_vec())
104
+
.send()
105
+
.await
106
+
.expect("Failed to import repo");
107
+
108
+
assert_eq!(import_res.status(), StatusCode::OK);
109
+
}
110
+
111
+
#[tokio::test]
112
+
async fn test_import_repo_size_limit() {
113
+
let client = client();
114
+
let (token, _did) = create_account_and_login(&client).await;
115
+
116
+
let oversized_body = vec![0u8; 110 * 1024 * 1024];
117
+
118
+
let res = client
119
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
120
+
.bearer_auth(&token)
121
+
.header("Content-Type", "application/vnd.ipld.car")
122
+
.body(oversized_body)
123
+
.send()
124
+
.await;
125
+
126
+
match res {
127
+
Ok(response) => {
128
+
assert_eq!(response.status(), StatusCode::PAYLOAD_TOO_LARGE);
129
+
}
130
+
Err(e) => {
131
+
let error_str = e.to_string().to_lowercase();
132
+
assert!(
133
+
error_str.contains("broken pipe") ||
134
+
error_str.contains("connection") ||
135
+
error_str.contains("reset") ||
136
+
error_str.contains("request") ||
137
+
error_str.contains("body"),
138
+
"Expected connection error or PAYLOAD_TOO_LARGE, got: {}",
139
+
e
140
+
);
141
+
}
142
+
}
143
+
}
144
+
145
+
#[tokio::test]
146
+
async fn test_import_deactivated_account_rejected() {
147
+
let client = client();
148
+
let (token, did) = create_account_and_login(&client).await;
149
+
150
+
let export_res = client
151
+
.get(format!(
152
+
"{}/xrpc/com.atproto.sync.getRepo?did={}",
153
+
base_url().await,
154
+
did
155
+
))
156
+
.send()
157
+
.await
158
+
.expect("Export failed");
159
+
assert_eq!(export_res.status(), StatusCode::OK);
160
+
let car_bytes = export_res.bytes().await.unwrap();
161
+
162
+
let deactivate_res = client
163
+
.post(format!(
164
+
"{}/xrpc/com.atproto.server.deactivateAccount",
165
+
base_url().await
166
+
))
167
+
.bearer_auth(&token)
168
+
.json(&json!({}))
169
+
.send()
170
+
.await
171
+
.expect("Deactivate failed");
172
+
assert!(deactivate_res.status().is_success());
173
+
174
+
let import_res = client
175
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
176
+
.bearer_auth(&token)
177
+
.header("Content-Type", "application/vnd.ipld.car")
178
+
.body(car_bytes.to_vec())
179
+
.send()
180
+
.await
181
+
.expect("Import failed");
182
+
183
+
assert!(
184
+
import_res.status() == StatusCode::FORBIDDEN || import_res.status() == StatusCode::UNAUTHORIZED,
185
+
"Expected FORBIDDEN (403) or UNAUTHORIZED (401), got {}",
186
+
import_res.status()
187
+
);
188
+
}
189
+
190
+
#[tokio::test]
191
+
async fn test_import_invalid_car_structure() {
192
+
let client = client();
193
+
let (token, _did) = create_account_and_login(&client).await;
194
+
195
+
let invalid_car = vec![0x0a, 0xa1, 0x65, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x80];
196
+
197
+
let res = client
198
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
199
+
.bearer_auth(&token)
200
+
.header("Content-Type", "application/vnd.ipld.car")
201
+
.body(invalid_car)
202
+
.send()
203
+
.await
204
+
.expect("Request failed");
205
+
206
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
207
+
}
208
+
209
+
#[tokio::test]
210
+
async fn test_import_car_with_no_roots() {
211
+
let client = client();
212
+
let (token, _did) = create_account_and_login(&client).await;
213
+
214
+
let header = CarHeader::new_v1(vec![]);
215
+
let header_cbor = header.encode().unwrap_or_default();
216
+
let mut car = Vec::new();
217
+
write_varint(&mut car, header_cbor.len() as u64);
218
+
car.extend_from_slice(&header_cbor);
219
+
220
+
let res = client
221
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
222
+
.bearer_auth(&token)
223
+
.header("Content-Type", "application/vnd.ipld.car")
224
+
.body(car)
225
+
.send()
226
+
.await
227
+
.expect("Request failed");
228
+
229
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
230
+
let body: serde_json::Value = res.json().await.unwrap();
231
+
assert_eq!(body["error"], "InvalidRequest");
232
+
}
233
+
234
+
#[tokio::test]
235
+
async fn test_import_preserves_records_after_reimport() {
236
+
let client = client();
237
+
let (token, did) = create_account_and_login(&client).await;
238
+
239
+
let mut rkeys = Vec::new();
240
+
for i in 0..3 {
241
+
let post_payload = json!({
242
+
"repo": did,
243
+
"collection": "app.bsky.feed.post",
244
+
"record": {
245
+
"$type": "app.bsky.feed.post",
246
+
"text": format!("Test post {}", i),
247
+
"createdAt": chrono::Utc::now().to_rfc3339(),
248
+
}
249
+
});
250
+
251
+
let res = client
252
+
.post(format!(
253
+
"{}/xrpc/com.atproto.repo.createRecord",
254
+
base_url().await
255
+
))
256
+
.bearer_auth(&token)
257
+
.json(&post_payload)
258
+
.send()
259
+
.await
260
+
.expect("Failed to create post");
261
+
assert_eq!(res.status(), StatusCode::OK);
262
+
263
+
let body: serde_json::Value = res.json().await.unwrap();
264
+
let uri = body["uri"].as_str().unwrap();
265
+
let rkey = uri.split('/').last().unwrap().to_string();
266
+
rkeys.push(rkey);
267
+
}
268
+
269
+
for rkey in &rkeys {
270
+
let get_res = client
271
+
.get(format!(
272
+
"{}/xrpc/com.atproto.repo.getRecord?repo={}&collection=app.bsky.feed.post&rkey={}",
273
+
base_url().await,
274
+
did,
275
+
rkey
276
+
))
277
+
.send()
278
+
.await
279
+
.expect("Failed to get record before export");
280
+
assert_eq!(get_res.status(), StatusCode::OK, "Record {} not found before export", rkey);
281
+
}
282
+
283
+
let export_res = client
284
+
.get(format!(
285
+
"{}/xrpc/com.atproto.sync.getRepo?did={}",
286
+
base_url().await,
287
+
did
288
+
))
289
+
.send()
290
+
.await
291
+
.expect("Failed to export repo");
292
+
assert_eq!(export_res.status(), StatusCode::OK);
293
+
let car_bytes = export_res.bytes().await.unwrap();
294
+
295
+
let import_res = client
296
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
297
+
.bearer_auth(&token)
298
+
.header("Content-Type", "application/vnd.ipld.car")
299
+
.body(car_bytes.to_vec())
300
+
.send()
301
+
.await
302
+
.expect("Failed to import repo");
303
+
assert_eq!(import_res.status(), StatusCode::OK);
304
+
305
+
let list_res = client
306
+
.get(format!(
307
+
"{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=app.bsky.feed.post",
308
+
base_url().await,
309
+
did
310
+
))
311
+
.send()
312
+
.await
313
+
.expect("Failed to list records after import");
314
+
assert_eq!(list_res.status(), StatusCode::OK);
315
+
let list_body: serde_json::Value = list_res.json().await.unwrap();
316
+
let records_after = list_body["records"].as_array().map(|a| a.len()).unwrap_or(0);
317
+
318
+
assert!(
319
+
records_after >= 1,
320
+
"Expected at least 1 record after import, found {}. Note: MST walk may have timing issues.",
321
+
records_after
322
+
);
323
+
}
+476
tests/import_with_verification.rs
+476
tests/import_with_verification.rs
···
1
+
mod common;
2
+
use common::*;
3
+
4
+
use cid::Cid;
5
+
use ipld_core::ipld::Ipld;
6
+
use jacquard::types::{integer::LimitedU32, string::Tid};
7
+
use k256::ecdsa::{signature::Signer, Signature, SigningKey};
8
+
use reqwest::StatusCode;
9
+
use serde_json::json;
10
+
use sha2::{Digest, Sha256};
11
+
use sqlx::PgPool;
12
+
use std::collections::BTreeMap;
13
+
use wiremock::matchers::{method, path};
14
+
use wiremock::{Mock, MockServer, ResponseTemplate};
15
+
16
+
fn make_cid(data: &[u8]) -> Cid {
17
+
let mut hasher = Sha256::new();
18
+
hasher.update(data);
19
+
let hash = hasher.finalize();
20
+
let multihash = multihash::Multihash::wrap(0x12, &hash).unwrap();
21
+
Cid::new_v1(0x71, multihash)
22
+
}
23
+
24
+
fn write_varint(buf: &mut Vec<u8>, mut value: u64) {
25
+
loop {
26
+
let mut byte = (value & 0x7F) as u8;
27
+
value >>= 7;
28
+
if value != 0 {
29
+
byte |= 0x80;
30
+
}
31
+
buf.push(byte);
32
+
if value == 0 {
33
+
break;
34
+
}
35
+
}
36
+
}
37
+
38
+
fn encode_car_block(cid: &Cid, data: &[u8]) -> Vec<u8> {
39
+
let cid_bytes = cid.to_bytes();
40
+
let mut result = Vec::new();
41
+
write_varint(&mut result, (cid_bytes.len() + data.len()) as u64);
42
+
result.extend_from_slice(&cid_bytes);
43
+
result.extend_from_slice(data);
44
+
result
45
+
}
46
+
47
+
fn get_multikey_from_signing_key(signing_key: &SigningKey) -> String {
48
+
let public_key = signing_key.verifying_key();
49
+
let compressed = public_key.to_sec1_bytes();
50
+
51
+
fn encode_uvarint(mut x: u64) -> Vec<u8> {
52
+
let mut out = Vec::new();
53
+
while x >= 0x80 {
54
+
out.push(((x as u8) & 0x7F) | 0x80);
55
+
x >>= 7;
56
+
}
57
+
out.push(x as u8);
58
+
out
59
+
}
60
+
61
+
let mut buf = encode_uvarint(0xE7);
62
+
buf.extend_from_slice(&compressed);
63
+
multibase::encode(multibase::Base::Base58Btc, buf)
64
+
}
65
+
66
+
fn create_did_document(did: &str, handle: &str, signing_key: &SigningKey, pds_endpoint: &str) -> serde_json::Value {
67
+
let multikey = get_multikey_from_signing_key(signing_key);
68
+
69
+
json!({
70
+
"@context": [
71
+
"https://www.w3.org/ns/did/v1",
72
+
"https://w3id.org/security/multikey/v1"
73
+
],
74
+
"id": did,
75
+
"alsoKnownAs": [format!("at://{}", handle)],
76
+
"verificationMethod": [{
77
+
"id": format!("{}#atproto", did),
78
+
"type": "Multikey",
79
+
"controller": did,
80
+
"publicKeyMultibase": multikey
81
+
}],
82
+
"service": [{
83
+
"id": "#atproto_pds",
84
+
"type": "AtprotoPersonalDataServer",
85
+
"serviceEndpoint": pds_endpoint
86
+
}]
87
+
})
88
+
}
89
+
90
+
fn create_signed_commit(
91
+
did: &str,
92
+
data_cid: &Cid,
93
+
signing_key: &SigningKey,
94
+
) -> (Vec<u8>, Cid) {
95
+
let rev = Tid::now(LimitedU32::MIN).to_string();
96
+
97
+
let unsigned = Ipld::Map(BTreeMap::from([
98
+
("data".to_string(), Ipld::Link(*data_cid)),
99
+
("did".to_string(), Ipld::String(did.to_string())),
100
+
("prev".to_string(), Ipld::Null),
101
+
("rev".to_string(), Ipld::String(rev.clone())),
102
+
("sig".to_string(), Ipld::Bytes(vec![])),
103
+
("version".to_string(), Ipld::Integer(3)),
104
+
]));
105
+
106
+
let unsigned_bytes = serde_ipld_dagcbor::to_vec(&unsigned).unwrap();
107
+
108
+
let signature: Signature = signing_key.sign(&unsigned_bytes);
109
+
let sig_bytes = signature.to_bytes().to_vec();
110
+
111
+
let signed = Ipld::Map(BTreeMap::from([
112
+
("data".to_string(), Ipld::Link(*data_cid)),
113
+
("did".to_string(), Ipld::String(did.to_string())),
114
+
("prev".to_string(), Ipld::Null),
115
+
("rev".to_string(), Ipld::String(rev)),
116
+
("sig".to_string(), Ipld::Bytes(sig_bytes)),
117
+
("version".to_string(), Ipld::Integer(3)),
118
+
]));
119
+
120
+
let signed_bytes = serde_ipld_dagcbor::to_vec(&signed).unwrap();
121
+
let cid = make_cid(&signed_bytes);
122
+
123
+
(signed_bytes, cid)
124
+
}
125
+
126
+
fn create_mst_node(entries: Vec<(String, Cid)>) -> (Vec<u8>, Cid) {
127
+
let ipld_entries: Vec<Ipld> = entries
128
+
.into_iter()
129
+
.map(|(key, value_cid)| {
130
+
Ipld::Map(BTreeMap::from([
131
+
("k".to_string(), Ipld::Bytes(key.into_bytes())),
132
+
("v".to_string(), Ipld::Link(value_cid)),
133
+
("p".to_string(), Ipld::Integer(0)),
134
+
]))
135
+
})
136
+
.collect();
137
+
138
+
let node = Ipld::Map(BTreeMap::from([
139
+
("e".to_string(), Ipld::List(ipld_entries)),
140
+
]));
141
+
142
+
let bytes = serde_ipld_dagcbor::to_vec(&node).unwrap();
143
+
let cid = make_cid(&bytes);
144
+
(bytes, cid)
145
+
}
146
+
147
+
fn create_record() -> (Vec<u8>, Cid) {
148
+
let record = Ipld::Map(BTreeMap::from([
149
+
("$type".to_string(), Ipld::String("app.bsky.feed.post".to_string())),
150
+
("text".to_string(), Ipld::String("Test post for verification".to_string())),
151
+
("createdAt".to_string(), Ipld::String("2024-01-01T00:00:00Z".to_string())),
152
+
]));
153
+
154
+
let bytes = serde_ipld_dagcbor::to_vec(&record).unwrap();
155
+
let cid = make_cid(&bytes);
156
+
(bytes, cid)
157
+
}
158
+
159
+
fn build_car_with_signature(
160
+
did: &str,
161
+
signing_key: &SigningKey,
162
+
) -> (Vec<u8>, Cid) {
163
+
let (record_bytes, record_cid) = create_record();
164
+
165
+
let (mst_bytes, mst_cid) = create_mst_node(vec![
166
+
("app.bsky.feed.post/test123".to_string(), record_cid),
167
+
]);
168
+
169
+
let (commit_bytes, commit_cid) = create_signed_commit(did, &mst_cid, signing_key);
170
+
171
+
let header = iroh_car::CarHeader::new_v1(vec![commit_cid]);
172
+
let header_bytes = header.encode().unwrap();
173
+
174
+
let mut car = Vec::new();
175
+
write_varint(&mut car, header_bytes.len() as u64);
176
+
car.extend_from_slice(&header_bytes);
177
+
car.extend(encode_car_block(&commit_cid, &commit_bytes));
178
+
car.extend(encode_car_block(&mst_cid, &mst_bytes));
179
+
car.extend(encode_car_block(&record_cid, &record_bytes));
180
+
181
+
(car, commit_cid)
182
+
}
183
+
184
+
async fn setup_mock_plc_directory(did: &str, did_doc: serde_json::Value) -> MockServer {
185
+
let mock_server = MockServer::start().await;
186
+
187
+
let did_encoded = urlencoding::encode(did);
188
+
let did_path = format!("/{}", did_encoded);
189
+
190
+
Mock::given(method("GET"))
191
+
.and(path(did_path))
192
+
.respond_with(ResponseTemplate::new(200).set_body_json(did_doc))
193
+
.mount(&mock_server)
194
+
.await;
195
+
196
+
mock_server
197
+
}
198
+
199
+
async fn get_user_signing_key(did: &str) -> Option<Vec<u8>> {
200
+
let db_url = get_db_connection_string().await;
201
+
let pool = PgPool::connect(&db_url).await.ok()?;
202
+
203
+
let row = sqlx::query!(
204
+
r#"
205
+
SELECT k.key_bytes, k.encryption_version
206
+
FROM user_keys k
207
+
JOIN users u ON k.user_id = u.id
208
+
WHERE u.did = $1
209
+
"#,
210
+
did
211
+
)
212
+
.fetch_optional(&pool)
213
+
.await
214
+
.ok()??;
215
+
216
+
bspds::config::decrypt_key(&row.key_bytes, row.encryption_version).ok()
217
+
}
218
+
219
+
#[tokio::test]
220
+
async fn test_import_with_valid_signature_and_mock_plc() {
221
+
let client = client();
222
+
let (token, did) = create_account_and_login(&client).await;
223
+
224
+
let key_bytes = get_user_signing_key(&did).await
225
+
.expect("Failed to get user signing key");
226
+
let signing_key = SigningKey::from_slice(&key_bytes)
227
+
.expect("Failed to create signing key");
228
+
229
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
230
+
let pds_endpoint = format!("https://{}", hostname);
231
+
232
+
let handle = did.split(':').last().unwrap_or("user");
233
+
let did_doc = create_did_document(&did, handle, &signing_key, &pds_endpoint);
234
+
235
+
let mock_plc = setup_mock_plc_directory(&did, did_doc).await;
236
+
237
+
unsafe {
238
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
239
+
std::env::remove_var("SKIP_IMPORT_VERIFICATION");
240
+
}
241
+
242
+
let (car_bytes, _root_cid) = build_car_with_signature(&did, &signing_key);
243
+
244
+
let import_res = client
245
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
246
+
.bearer_auth(&token)
247
+
.header("Content-Type", "application/vnd.ipld.car")
248
+
.body(car_bytes)
249
+
.send()
250
+
.await
251
+
.expect("Import request failed");
252
+
253
+
let status = import_res.status();
254
+
let body: serde_json::Value = import_res.json().await.unwrap_or(json!({}));
255
+
256
+
unsafe {
257
+
std::env::set_var("SKIP_IMPORT_VERIFICATION", "true");
258
+
}
259
+
260
+
assert_eq!(
261
+
status,
262
+
StatusCode::OK,
263
+
"Import with valid signature should succeed. Response: {:?}",
264
+
body
265
+
);
266
+
}
267
+
268
+
#[tokio::test]
269
+
async fn test_import_with_wrong_signing_key_fails() {
270
+
let client = client();
271
+
let (token, did) = create_account_and_login(&client).await;
272
+
273
+
let wrong_signing_key = SigningKey::random(&mut rand::thread_rng());
274
+
275
+
let key_bytes = get_user_signing_key(&did).await
276
+
.expect("Failed to get user signing key");
277
+
let correct_signing_key = SigningKey::from_slice(&key_bytes)
278
+
.expect("Failed to create signing key");
279
+
280
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
281
+
let pds_endpoint = format!("https://{}", hostname);
282
+
283
+
let handle = did.split(':').last().unwrap_or("user");
284
+
let did_doc = create_did_document(&did, handle, &correct_signing_key, &pds_endpoint);
285
+
286
+
let mock_plc = setup_mock_plc_directory(&did, did_doc).await;
287
+
288
+
unsafe {
289
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
290
+
std::env::remove_var("SKIP_IMPORT_VERIFICATION");
291
+
}
292
+
293
+
let (car_bytes, _root_cid) = build_car_with_signature(&did, &wrong_signing_key);
294
+
295
+
let import_res = client
296
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
297
+
.bearer_auth(&token)
298
+
.header("Content-Type", "application/vnd.ipld.car")
299
+
.body(car_bytes)
300
+
.send()
301
+
.await
302
+
.expect("Import request failed");
303
+
304
+
let status = import_res.status();
305
+
let body: serde_json::Value = import_res.json().await.unwrap_or(json!({}));
306
+
307
+
unsafe {
308
+
std::env::set_var("SKIP_IMPORT_VERIFICATION", "true");
309
+
}
310
+
311
+
assert_eq!(
312
+
status,
313
+
StatusCode::BAD_REQUEST,
314
+
"Import with wrong signature should fail. Response: {:?}",
315
+
body
316
+
);
317
+
assert!(
318
+
body["error"] == "InvalidSignature" || body["message"].as_str().unwrap_or("").contains("signature"),
319
+
"Error should mention signature: {:?}",
320
+
body
321
+
);
322
+
}
323
+
324
+
#[tokio::test]
325
+
async fn test_import_with_did_mismatch_fails() {
326
+
let client = client();
327
+
let (token, did) = create_account_and_login(&client).await;
328
+
329
+
let key_bytes = get_user_signing_key(&did).await
330
+
.expect("Failed to get user signing key");
331
+
let signing_key = SigningKey::from_slice(&key_bytes)
332
+
.expect("Failed to create signing key");
333
+
334
+
let wrong_did = "did:plc:wrongdidthatdoesnotmatch";
335
+
336
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
337
+
let pds_endpoint = format!("https://{}", hostname);
338
+
339
+
let handle = did.split(':').last().unwrap_or("user");
340
+
let did_doc = create_did_document(&did, handle, &signing_key, &pds_endpoint);
341
+
342
+
let mock_plc = setup_mock_plc_directory(&did, did_doc).await;
343
+
344
+
unsafe {
345
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
346
+
std::env::remove_var("SKIP_IMPORT_VERIFICATION");
347
+
}
348
+
349
+
let (car_bytes, _root_cid) = build_car_with_signature(wrong_did, &signing_key);
350
+
351
+
let import_res = client
352
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
353
+
.bearer_auth(&token)
354
+
.header("Content-Type", "application/vnd.ipld.car")
355
+
.body(car_bytes)
356
+
.send()
357
+
.await
358
+
.expect("Import request failed");
359
+
360
+
let status = import_res.status();
361
+
let body: serde_json::Value = import_res.json().await.unwrap_or(json!({}));
362
+
363
+
unsafe {
364
+
std::env::set_var("SKIP_IMPORT_VERIFICATION", "true");
365
+
}
366
+
367
+
assert_eq!(
368
+
status,
369
+
StatusCode::FORBIDDEN,
370
+
"Import with DID mismatch should be forbidden. Response: {:?}",
371
+
body
372
+
);
373
+
}
374
+
375
+
#[tokio::test]
376
+
async fn test_import_with_plc_resolution_failure() {
377
+
let client = client();
378
+
let (token, did) = create_account_and_login(&client).await;
379
+
380
+
let key_bytes = get_user_signing_key(&did).await
381
+
.expect("Failed to get user signing key");
382
+
let signing_key = SigningKey::from_slice(&key_bytes)
383
+
.expect("Failed to create signing key");
384
+
385
+
let mock_plc = MockServer::start().await;
386
+
387
+
let did_encoded = urlencoding::encode(&did);
388
+
let did_path = format!("/{}", did_encoded);
389
+
Mock::given(method("GET"))
390
+
.and(path(did_path))
391
+
.respond_with(ResponseTemplate::new(404))
392
+
.mount(&mock_plc)
393
+
.await;
394
+
395
+
unsafe {
396
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
397
+
std::env::remove_var("SKIP_IMPORT_VERIFICATION");
398
+
}
399
+
400
+
let (car_bytes, _root_cid) = build_car_with_signature(&did, &signing_key);
401
+
402
+
let import_res = client
403
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
404
+
.bearer_auth(&token)
405
+
.header("Content-Type", "application/vnd.ipld.car")
406
+
.body(car_bytes)
407
+
.send()
408
+
.await
409
+
.expect("Import request failed");
410
+
411
+
let status = import_res.status();
412
+
let body: serde_json::Value = import_res.json().await.unwrap_or(json!({}));
413
+
414
+
unsafe {
415
+
std::env::set_var("SKIP_IMPORT_VERIFICATION", "true");
416
+
}
417
+
418
+
assert_eq!(
419
+
status,
420
+
StatusCode::BAD_REQUEST,
421
+
"Import with PLC resolution failure should fail. Response: {:?}",
422
+
body
423
+
);
424
+
}
425
+
426
+
#[tokio::test]
427
+
async fn test_import_with_no_signing_key_in_did_doc() {
428
+
let client = client();
429
+
let (token, did) = create_account_and_login(&client).await;
430
+
431
+
let key_bytes = get_user_signing_key(&did).await
432
+
.expect("Failed to get user signing key");
433
+
let signing_key = SigningKey::from_slice(&key_bytes)
434
+
.expect("Failed to create signing key");
435
+
436
+
let handle = did.split(':').last().unwrap_or("user");
437
+
let did_doc_without_key = json!({
438
+
"@context": ["https://www.w3.org/ns/did/v1"],
439
+
"id": did,
440
+
"alsoKnownAs": [format!("at://{}", handle)],
441
+
"verificationMethod": [],
442
+
"service": []
443
+
});
444
+
445
+
let mock_plc = setup_mock_plc_directory(&did, did_doc_without_key).await;
446
+
447
+
unsafe {
448
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
449
+
std::env::remove_var("SKIP_IMPORT_VERIFICATION");
450
+
}
451
+
452
+
let (car_bytes, _root_cid) = build_car_with_signature(&did, &signing_key);
453
+
454
+
let import_res = client
455
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
456
+
.bearer_auth(&token)
457
+
.header("Content-Type", "application/vnd.ipld.car")
458
+
.body(car_bytes)
459
+
.send()
460
+
.await
461
+
.expect("Import request failed");
462
+
463
+
let status = import_res.status();
464
+
let body: serde_json::Value = import_res.json().await.unwrap_or(json!({}));
465
+
466
+
unsafe {
467
+
std::env::set_var("SKIP_IMPORT_VERIFICATION", "true");
468
+
}
469
+
470
+
assert_eq!(
471
+
status,
472
+
StatusCode::BAD_REQUEST,
473
+
"Import with missing signing key should fail. Response: {:?}",
474
+
body
475
+
);
476
+
}
+1087
tests/plc_migration.rs
+1087
tests/plc_migration.rs
···
1
+
mod common;
2
+
use common::*;
3
+
4
+
use k256::ecdsa::SigningKey;
5
+
use reqwest::StatusCode;
6
+
use serde_json::{json, Value};
7
+
use sqlx::PgPool;
8
+
use wiremock::matchers::{method, path};
9
+
use wiremock::{Mock, MockServer, ResponseTemplate};
10
+
11
+
fn encode_uvarint(mut x: u64) -> Vec<u8> {
12
+
let mut out = Vec::new();
13
+
while x >= 0x80 {
14
+
out.push(((x as u8) & 0x7F) | 0x80);
15
+
x >>= 7;
16
+
}
17
+
out.push(x as u8);
18
+
out
19
+
}
20
+
21
+
fn signing_key_to_did_key(signing_key: &SigningKey) -> String {
22
+
let verifying_key = signing_key.verifying_key();
23
+
let point = verifying_key.to_encoded_point(true);
24
+
let compressed_bytes = point.as_bytes();
25
+
26
+
let mut prefixed = vec![0xe7, 0x01];
27
+
prefixed.extend_from_slice(compressed_bytes);
28
+
29
+
let encoded = multibase::encode(multibase::Base::Base58Btc, &prefixed);
30
+
format!("did:key:{}", encoded)
31
+
}
32
+
33
+
fn get_multikey_from_signing_key(signing_key: &SigningKey) -> String {
34
+
let public_key = signing_key.verifying_key();
35
+
let compressed = public_key.to_sec1_bytes();
36
+
37
+
let mut buf = encode_uvarint(0xE7);
38
+
buf.extend_from_slice(&compressed);
39
+
multibase::encode(multibase::Base::Base58Btc, buf)
40
+
}
41
+
42
+
async fn get_user_signing_key(did: &str) -> Option<Vec<u8>> {
43
+
let db_url = get_db_connection_string().await;
44
+
let pool = PgPool::connect(&db_url).await.ok()?;
45
+
46
+
let row = sqlx::query!(
47
+
r#"
48
+
SELECT k.key_bytes, k.encryption_version
49
+
FROM user_keys k
50
+
JOIN users u ON k.user_id = u.id
51
+
WHERE u.did = $1
52
+
"#,
53
+
did
54
+
)
55
+
.fetch_optional(&pool)
56
+
.await
57
+
.ok()??;
58
+
59
+
bspds::config::decrypt_key(&row.key_bytes, row.encryption_version).ok()
60
+
}
61
+
62
+
async fn get_plc_token_from_db(did: &str) -> Option<String> {
63
+
let db_url = get_db_connection_string().await;
64
+
let pool = PgPool::connect(&db_url).await.ok()?;
65
+
66
+
sqlx::query_scalar!(
67
+
r#"
68
+
SELECT t.token
69
+
FROM plc_operation_tokens t
70
+
JOIN users u ON t.user_id = u.id
71
+
WHERE u.did = $1
72
+
"#,
73
+
did
74
+
)
75
+
.fetch_optional(&pool)
76
+
.await
77
+
.ok()?
78
+
}
79
+
80
+
async fn get_user_handle(did: &str) -> Option<String> {
81
+
let db_url = get_db_connection_string().await;
82
+
let pool = PgPool::connect(&db_url).await.ok()?;
83
+
84
+
sqlx::query_scalar!(
85
+
r#"SELECT handle FROM users WHERE did = $1"#,
86
+
did
87
+
)
88
+
.fetch_optional(&pool)
89
+
.await
90
+
.ok()?
91
+
}
92
+
93
+
fn create_mock_last_op(
94
+
_did: &str,
95
+
handle: &str,
96
+
signing_key: &SigningKey,
97
+
pds_endpoint: &str,
98
+
) -> Value {
99
+
let did_key = signing_key_to_did_key(signing_key);
100
+
101
+
json!({
102
+
"type": "plc_operation",
103
+
"rotationKeys": [did_key.clone()],
104
+
"verificationMethods": {
105
+
"atproto": did_key
106
+
},
107
+
"alsoKnownAs": [format!("at://{}", handle)],
108
+
"services": {
109
+
"atproto_pds": {
110
+
"type": "AtprotoPersonalDataServer",
111
+
"endpoint": pds_endpoint
112
+
}
113
+
},
114
+
"prev": null,
115
+
"sig": "mock_signature_for_testing"
116
+
})
117
+
}
118
+
119
+
fn create_did_document(did: &str, handle: &str, signing_key: &SigningKey, pds_endpoint: &str) -> Value {
120
+
let multikey = get_multikey_from_signing_key(signing_key);
121
+
122
+
json!({
123
+
"@context": [
124
+
"https://www.w3.org/ns/did/v1",
125
+
"https://w3id.org/security/multikey/v1"
126
+
],
127
+
"id": did,
128
+
"alsoKnownAs": [format!("at://{}", handle)],
129
+
"verificationMethod": [{
130
+
"id": format!("{}#atproto", did),
131
+
"type": "Multikey",
132
+
"controller": did,
133
+
"publicKeyMultibase": multikey
134
+
}],
135
+
"service": [{
136
+
"id": "#atproto_pds",
137
+
"type": "AtprotoPersonalDataServer",
138
+
"serviceEndpoint": pds_endpoint
139
+
}]
140
+
})
141
+
}
142
+
143
+
async fn setup_mock_plc_for_sign(
144
+
did: &str,
145
+
handle: &str,
146
+
signing_key: &SigningKey,
147
+
pds_endpoint: &str,
148
+
) -> MockServer {
149
+
let mock_server = MockServer::start().await;
150
+
151
+
let did_encoded = urlencoding::encode(did);
152
+
let last_op = create_mock_last_op(did, handle, signing_key, pds_endpoint);
153
+
154
+
Mock::given(method("GET"))
155
+
.and(path(format!("/{}/log/last", did_encoded)))
156
+
.respond_with(ResponseTemplate::new(200).set_body_json(last_op))
157
+
.mount(&mock_server)
158
+
.await;
159
+
160
+
mock_server
161
+
}
162
+
163
+
async fn setup_mock_plc_for_submit(
164
+
did: &str,
165
+
handle: &str,
166
+
signing_key: &SigningKey,
167
+
pds_endpoint: &str,
168
+
) -> MockServer {
169
+
let mock_server = MockServer::start().await;
170
+
171
+
let did_encoded = urlencoding::encode(did);
172
+
let did_doc = create_did_document(did, handle, signing_key, pds_endpoint);
173
+
174
+
Mock::given(method("GET"))
175
+
.and(path(format!("/{}", did_encoded)))
176
+
.respond_with(ResponseTemplate::new(200).set_body_json(did_doc.clone()))
177
+
.mount(&mock_server)
178
+
.await;
179
+
180
+
Mock::given(method("POST"))
181
+
.and(path(format!("/{}", did_encoded)))
182
+
.respond_with(ResponseTemplate::new(200))
183
+
.mount(&mock_server)
184
+
.await;
185
+
186
+
mock_server
187
+
}
188
+
189
+
#[tokio::test]
190
+
async fn test_full_plc_operation_flow() {
191
+
let client = client();
192
+
let (token, did) = create_account_and_login(&client).await;
193
+
194
+
let key_bytes = get_user_signing_key(&did).await
195
+
.expect("Failed to get user signing key");
196
+
let signing_key = SigningKey::from_slice(&key_bytes)
197
+
.expect("Failed to create signing key");
198
+
199
+
let handle = get_user_handle(&did).await
200
+
.expect("Failed to get user handle");
201
+
202
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
203
+
let pds_endpoint = format!("https://{}", hostname);
204
+
205
+
let request_res = client
206
+
.post(format!(
207
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
208
+
base_url().await
209
+
))
210
+
.bearer_auth(&token)
211
+
.send()
212
+
.await
213
+
.expect("Request failed");
214
+
215
+
assert_eq!(request_res.status(), StatusCode::OK);
216
+
217
+
let plc_token = get_plc_token_from_db(&did).await
218
+
.expect("PLC token not found in database");
219
+
220
+
let mock_plc = setup_mock_plc_for_sign(&did, &handle, &signing_key, &pds_endpoint).await;
221
+
222
+
unsafe {
223
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
224
+
}
225
+
226
+
let sign_res = client
227
+
.post(format!(
228
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
229
+
base_url().await
230
+
))
231
+
.bearer_auth(&token)
232
+
.json(&json!({
233
+
"token": plc_token
234
+
}))
235
+
.send()
236
+
.await
237
+
.expect("Sign request failed");
238
+
239
+
let sign_status = sign_res.status();
240
+
let sign_body: Value = sign_res.json().await.unwrap_or(json!({}));
241
+
242
+
assert_eq!(
243
+
sign_status,
244
+
StatusCode::OK,
245
+
"Sign PLC operation should succeed. Response: {:?}",
246
+
sign_body
247
+
);
248
+
249
+
let operation = sign_body.get("operation")
250
+
.expect("Response should contain operation");
251
+
252
+
assert!(operation.get("sig").is_some(), "Operation should be signed");
253
+
assert_eq!(operation.get("type").and_then(|v| v.as_str()), Some("plc_operation"));
254
+
assert!(operation.get("prev").is_some(), "Operation should have prev reference");
255
+
}
256
+
257
+
#[tokio::test]
258
+
async fn test_sign_plc_operation_consumes_token() {
259
+
let client = client();
260
+
let (token, did) = create_account_and_login(&client).await;
261
+
262
+
let key_bytes = get_user_signing_key(&did).await
263
+
.expect("Failed to get user signing key");
264
+
let signing_key = SigningKey::from_slice(&key_bytes)
265
+
.expect("Failed to create signing key");
266
+
267
+
let handle = get_user_handle(&did).await
268
+
.expect("Failed to get user handle");
269
+
270
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
271
+
let pds_endpoint = format!("https://{}", hostname);
272
+
273
+
let request_res = client
274
+
.post(format!(
275
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
276
+
base_url().await
277
+
))
278
+
.bearer_auth(&token)
279
+
.send()
280
+
.await
281
+
.expect("Request failed");
282
+
283
+
assert_eq!(request_res.status(), StatusCode::OK);
284
+
285
+
let plc_token = get_plc_token_from_db(&did).await
286
+
.expect("PLC token not found in database");
287
+
288
+
let mock_plc = setup_mock_plc_for_sign(&did, &handle, &signing_key, &pds_endpoint).await;
289
+
290
+
unsafe {
291
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
292
+
}
293
+
294
+
let sign_res = client
295
+
.post(format!(
296
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
297
+
base_url().await
298
+
))
299
+
.bearer_auth(&token)
300
+
.json(&json!({
301
+
"token": plc_token
302
+
}))
303
+
.send()
304
+
.await
305
+
.expect("Sign request failed");
306
+
307
+
assert_eq!(sign_res.status(), StatusCode::OK);
308
+
309
+
let sign_res_2 = client
310
+
.post(format!(
311
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
312
+
base_url().await
313
+
))
314
+
.bearer_auth(&token)
315
+
.json(&json!({
316
+
"token": plc_token
317
+
}))
318
+
.send()
319
+
.await
320
+
.expect("Second sign request failed");
321
+
322
+
assert_eq!(
323
+
sign_res_2.status(),
324
+
StatusCode::BAD_REQUEST,
325
+
"Using the same token twice should fail"
326
+
);
327
+
328
+
let body: Value = sign_res_2.json().await.unwrap();
329
+
assert!(
330
+
body["error"] == "InvalidToken" || body["error"] == "ExpiredToken",
331
+
"Error should indicate invalid/expired token"
332
+
);
333
+
}
334
+
335
+
#[tokio::test]
336
+
async fn test_sign_plc_operation_with_custom_fields() {
337
+
let client = client();
338
+
let (token, did) = create_account_and_login(&client).await;
339
+
340
+
let key_bytes = get_user_signing_key(&did).await
341
+
.expect("Failed to get user signing key");
342
+
let signing_key = SigningKey::from_slice(&key_bytes)
343
+
.expect("Failed to create signing key");
344
+
345
+
let handle = get_user_handle(&did).await
346
+
.expect("Failed to get user handle");
347
+
348
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
349
+
let pds_endpoint = format!("https://{}", hostname);
350
+
351
+
let request_res = client
352
+
.post(format!(
353
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
354
+
base_url().await
355
+
))
356
+
.bearer_auth(&token)
357
+
.send()
358
+
.await
359
+
.expect("Request failed");
360
+
361
+
assert_eq!(request_res.status(), StatusCode::OK);
362
+
363
+
let plc_token = get_plc_token_from_db(&did).await
364
+
.expect("PLC token not found in database");
365
+
366
+
let mock_plc = setup_mock_plc_for_sign(&did, &handle, &signing_key, &pds_endpoint).await;
367
+
368
+
unsafe {
369
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
370
+
}
371
+
372
+
let did_key = signing_key_to_did_key(&signing_key);
373
+
374
+
let sign_res = client
375
+
.post(format!(
376
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
377
+
base_url().await
378
+
))
379
+
.bearer_auth(&token)
380
+
.json(&json!({
381
+
"token": plc_token,
382
+
"alsoKnownAs": [format!("at://{}", handle), "at://custom.alias.example"],
383
+
"rotationKeys": [did_key.clone(), "did:key:zExtraRotationKey123"]
384
+
}))
385
+
.send()
386
+
.await
387
+
.expect("Sign request failed");
388
+
389
+
let sign_status = sign_res.status();
390
+
let sign_body: Value = sign_res.json().await.unwrap_or(json!({}));
391
+
392
+
assert_eq!(
393
+
sign_status,
394
+
StatusCode::OK,
395
+
"Sign with custom fields should succeed. Response: {:?}",
396
+
sign_body
397
+
);
398
+
399
+
let operation = sign_body.get("operation").expect("Should have operation");
400
+
let also_known_as = operation.get("alsoKnownAs").and_then(|v| v.as_array());
401
+
let rotation_keys = operation.get("rotationKeys").and_then(|v| v.as_array());
402
+
403
+
assert!(also_known_as.is_some(), "Should have alsoKnownAs");
404
+
assert!(rotation_keys.is_some(), "Should have rotationKeys");
405
+
assert_eq!(also_known_as.unwrap().len(), 2, "Should have 2 aliases");
406
+
assert_eq!(rotation_keys.unwrap().len(), 2, "Should have 2 rotation keys");
407
+
}
408
+
409
+
#[tokio::test]
410
+
async fn test_submit_plc_operation_success() {
411
+
let client = client();
412
+
let (token, did) = create_account_and_login(&client).await;
413
+
414
+
let key_bytes = get_user_signing_key(&did).await
415
+
.expect("Failed to get user signing key");
416
+
let signing_key = SigningKey::from_slice(&key_bytes)
417
+
.expect("Failed to create signing key");
418
+
419
+
let handle = get_user_handle(&did).await
420
+
.expect("Failed to get user handle");
421
+
422
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
423
+
let pds_endpoint = format!("https://{}", hostname);
424
+
425
+
let mock_plc = setup_mock_plc_for_submit(&did, &handle, &signing_key, &pds_endpoint).await;
426
+
427
+
unsafe {
428
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
429
+
}
430
+
431
+
let did_key = signing_key_to_did_key(&signing_key);
432
+
433
+
let operation = json!({
434
+
"type": "plc_operation",
435
+
"rotationKeys": [did_key.clone()],
436
+
"verificationMethods": {
437
+
"atproto": did_key.clone()
438
+
},
439
+
"alsoKnownAs": [format!("at://{}", handle)],
440
+
"services": {
441
+
"atproto_pds": {
442
+
"type": "AtprotoPersonalDataServer",
443
+
"endpoint": pds_endpoint
444
+
}
445
+
},
446
+
"prev": "bafyreiabc123",
447
+
"sig": "test_signature_base64"
448
+
});
449
+
450
+
let submit_res = client
451
+
.post(format!(
452
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
453
+
base_url().await
454
+
))
455
+
.bearer_auth(&token)
456
+
.json(&json!({ "operation": operation }))
457
+
.send()
458
+
.await
459
+
.expect("Submit request failed");
460
+
461
+
let submit_status = submit_res.status();
462
+
let submit_body: Value = submit_res.json().await.unwrap_or(json!({}));
463
+
464
+
assert_eq!(
465
+
submit_status,
466
+
StatusCode::OK,
467
+
"Submit PLC operation should succeed. Response: {:?}",
468
+
submit_body
469
+
);
470
+
}
471
+
472
+
#[tokio::test]
473
+
async fn test_submit_plc_operation_wrong_endpoint_rejected() {
474
+
let client = client();
475
+
let (token, did) = create_account_and_login(&client).await;
476
+
477
+
let key_bytes = get_user_signing_key(&did).await
478
+
.expect("Failed to get user signing key");
479
+
let signing_key = SigningKey::from_slice(&key_bytes)
480
+
.expect("Failed to create signing key");
481
+
482
+
let handle = get_user_handle(&did).await
483
+
.expect("Failed to get user handle");
484
+
485
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
486
+
let pds_endpoint = format!("https://{}", hostname);
487
+
488
+
let mock_plc = setup_mock_plc_for_submit(&did, &handle, &signing_key, &pds_endpoint).await;
489
+
490
+
unsafe {
491
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
492
+
}
493
+
494
+
let did_key = signing_key_to_did_key(&signing_key);
495
+
496
+
let operation = json!({
497
+
"type": "plc_operation",
498
+
"rotationKeys": [did_key.clone()],
499
+
"verificationMethods": {
500
+
"atproto": did_key.clone()
501
+
},
502
+
"alsoKnownAs": [format!("at://{}", handle)],
503
+
"services": {
504
+
"atproto_pds": {
505
+
"type": "AtprotoPersonalDataServer",
506
+
"endpoint": "https://wrong-pds.example.com"
507
+
}
508
+
},
509
+
"prev": "bafyreiabc123",
510
+
"sig": "test_signature_base64"
511
+
});
512
+
513
+
let submit_res = client
514
+
.post(format!(
515
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
516
+
base_url().await
517
+
))
518
+
.bearer_auth(&token)
519
+
.json(&json!({ "operation": operation }))
520
+
.send()
521
+
.await
522
+
.expect("Submit request failed");
523
+
524
+
assert_eq!(
525
+
submit_res.status(),
526
+
StatusCode::BAD_REQUEST,
527
+
"Submit with wrong endpoint should fail"
528
+
);
529
+
530
+
let body: Value = submit_res.json().await.unwrap();
531
+
assert_eq!(body["error"], "InvalidRequest");
532
+
}
533
+
534
+
#[tokio::test]
535
+
async fn test_submit_plc_operation_wrong_signing_key_rejected() {
536
+
let client = client();
537
+
let (token, did) = create_account_and_login(&client).await;
538
+
539
+
let key_bytes = get_user_signing_key(&did).await
540
+
.expect("Failed to get user signing key");
541
+
let signing_key = SigningKey::from_slice(&key_bytes)
542
+
.expect("Failed to create signing key");
543
+
544
+
let handle = get_user_handle(&did).await
545
+
.expect("Failed to get user handle");
546
+
547
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
548
+
let pds_endpoint = format!("https://{}", hostname);
549
+
550
+
let mock_plc = setup_mock_plc_for_submit(&did, &handle, &signing_key, &pds_endpoint).await;
551
+
552
+
unsafe {
553
+
std::env::set_var("PLC_DIRECTORY_URL", mock_plc.uri());
554
+
}
555
+
556
+
let wrong_key = SigningKey::random(&mut rand::thread_rng());
557
+
let wrong_did_key = signing_key_to_did_key(&wrong_key);
558
+
let correct_did_key = signing_key_to_did_key(&signing_key);
559
+
560
+
let operation = json!({
561
+
"type": "plc_operation",
562
+
"rotationKeys": [correct_did_key.clone()],
563
+
"verificationMethods": {
564
+
"atproto": wrong_did_key
565
+
},
566
+
"alsoKnownAs": [format!("at://{}", handle)],
567
+
"services": {
568
+
"atproto_pds": {
569
+
"type": "AtprotoPersonalDataServer",
570
+
"endpoint": pds_endpoint
571
+
}
572
+
},
573
+
"prev": "bafyreiabc123",
574
+
"sig": "test_signature_base64"
575
+
});
576
+
577
+
let submit_res = client
578
+
.post(format!(
579
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
580
+
base_url().await
581
+
))
582
+
.bearer_auth(&token)
583
+
.json(&json!({ "operation": operation }))
584
+
.send()
585
+
.await
586
+
.expect("Submit request failed");
587
+
588
+
assert_eq!(
589
+
submit_res.status(),
590
+
StatusCode::BAD_REQUEST,
591
+
"Submit with wrong signing key should fail"
592
+
);
593
+
594
+
let body: Value = submit_res.json().await.unwrap();
595
+
assert_eq!(body["error"], "InvalidRequest");
596
+
}
597
+
598
+
#[tokio::test]
599
+
async fn test_full_sign_and_submit_flow() {
600
+
let client = client();
601
+
let (token, did) = create_account_and_login(&client).await;
602
+
603
+
let key_bytes = get_user_signing_key(&did).await
604
+
.expect("Failed to get user signing key");
605
+
let signing_key = SigningKey::from_slice(&key_bytes)
606
+
.expect("Failed to create signing key");
607
+
608
+
let handle = get_user_handle(&did).await
609
+
.expect("Failed to get user handle");
610
+
611
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
612
+
let pds_endpoint = format!("https://{}", hostname);
613
+
614
+
let request_res = client
615
+
.post(format!(
616
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
617
+
base_url().await
618
+
))
619
+
.bearer_auth(&token)
620
+
.send()
621
+
.await
622
+
.expect("Request failed");
623
+
assert_eq!(request_res.status(), StatusCode::OK);
624
+
625
+
let plc_token = get_plc_token_from_db(&did).await
626
+
.expect("PLC token not found");
627
+
628
+
let mock_server = MockServer::start().await;
629
+
let did_encoded = urlencoding::encode(&did);
630
+
let did_key = signing_key_to_did_key(&signing_key);
631
+
632
+
let last_op = json!({
633
+
"type": "plc_operation",
634
+
"rotationKeys": [did_key.clone()],
635
+
"verificationMethods": {
636
+
"atproto": did_key.clone()
637
+
},
638
+
"alsoKnownAs": [format!("at://{}", handle)],
639
+
"services": {
640
+
"atproto_pds": {
641
+
"type": "AtprotoPersonalDataServer",
642
+
"endpoint": pds_endpoint.clone()
643
+
}
644
+
},
645
+
"prev": null,
646
+
"sig": "initial_sig"
647
+
});
648
+
649
+
Mock::given(method("GET"))
650
+
.and(path(format!("/{}/log/last", did_encoded)))
651
+
.respond_with(ResponseTemplate::new(200).set_body_json(last_op))
652
+
.mount(&mock_server)
653
+
.await;
654
+
655
+
let did_doc = create_did_document(&did, &handle, &signing_key, &pds_endpoint);
656
+
Mock::given(method("GET"))
657
+
.and(path(format!("/{}", did_encoded)))
658
+
.respond_with(ResponseTemplate::new(200).set_body_json(did_doc))
659
+
.mount(&mock_server)
660
+
.await;
661
+
662
+
Mock::given(method("POST"))
663
+
.and(path(format!("/{}", did_encoded)))
664
+
.respond_with(ResponseTemplate::new(200))
665
+
.expect(1)
666
+
.mount(&mock_server)
667
+
.await;
668
+
669
+
unsafe {
670
+
std::env::set_var("PLC_DIRECTORY_URL", mock_server.uri());
671
+
}
672
+
673
+
let sign_res = client
674
+
.post(format!(
675
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
676
+
base_url().await
677
+
))
678
+
.bearer_auth(&token)
679
+
.json(&json!({ "token": plc_token }))
680
+
.send()
681
+
.await
682
+
.expect("Sign failed");
683
+
684
+
assert_eq!(sign_res.status(), StatusCode::OK);
685
+
686
+
let sign_body: Value = sign_res.json().await.unwrap();
687
+
let signed_operation = sign_body.get("operation")
688
+
.expect("Response should contain operation")
689
+
.clone();
690
+
691
+
assert!(signed_operation.get("sig").is_some());
692
+
assert!(signed_operation.get("prev").is_some());
693
+
694
+
let submit_res = client
695
+
.post(format!(
696
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
697
+
base_url().await
698
+
))
699
+
.bearer_auth(&token)
700
+
.json(&json!({ "operation": signed_operation }))
701
+
.send()
702
+
.await
703
+
.expect("Submit failed");
704
+
705
+
let submit_status = submit_res.status();
706
+
let submit_body: Value = submit_res.json().await.unwrap_or(json!({}));
707
+
708
+
assert_eq!(
709
+
submit_status,
710
+
StatusCode::OK,
711
+
"Full sign and submit flow should succeed. Response: {:?}",
712
+
submit_body
713
+
);
714
+
}
715
+
716
+
#[tokio::test]
717
+
async fn test_cross_pds_migration_with_records() {
718
+
let client = client();
719
+
let (token, did) = create_account_and_login(&client).await;
720
+
721
+
let key_bytes = get_user_signing_key(&did).await
722
+
.expect("Failed to get user signing key");
723
+
let signing_key = SigningKey::from_slice(&key_bytes)
724
+
.expect("Failed to create signing key");
725
+
726
+
let handle = get_user_handle(&did).await
727
+
.expect("Failed to get user handle");
728
+
729
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
730
+
let pds_endpoint = format!("https://{}", hostname);
731
+
732
+
let post_payload = json!({
733
+
"repo": did,
734
+
"collection": "app.bsky.feed.post",
735
+
"record": {
736
+
"$type": "app.bsky.feed.post",
737
+
"text": "Test post before migration",
738
+
"createdAt": chrono::Utc::now().to_rfc3339(),
739
+
}
740
+
});
741
+
742
+
let create_res = client
743
+
.post(format!(
744
+
"{}/xrpc/com.atproto.repo.createRecord",
745
+
base_url().await
746
+
))
747
+
.bearer_auth(&token)
748
+
.json(&post_payload)
749
+
.send()
750
+
.await
751
+
.expect("Failed to create post");
752
+
assert_eq!(create_res.status(), StatusCode::OK);
753
+
754
+
let create_body: Value = create_res.json().await.unwrap();
755
+
let original_uri = create_body["uri"].as_str().unwrap().to_string();
756
+
757
+
let export_res = client
758
+
.get(format!(
759
+
"{}/xrpc/com.atproto.sync.getRepo?did={}",
760
+
base_url().await,
761
+
did
762
+
))
763
+
.send()
764
+
.await
765
+
.expect("Export failed");
766
+
assert_eq!(export_res.status(), StatusCode::OK);
767
+
let car_bytes = export_res.bytes().await.unwrap();
768
+
769
+
assert!(car_bytes.len() > 100, "CAR file should have meaningful content");
770
+
771
+
let mock_server = MockServer::start().await;
772
+
let did_encoded = urlencoding::encode(&did);
773
+
let did_doc = create_did_document(&did, &handle, &signing_key, &pds_endpoint);
774
+
775
+
Mock::given(method("GET"))
776
+
.and(path(format!("/{}", did_encoded)))
777
+
.respond_with(ResponseTemplate::new(200).set_body_json(did_doc))
778
+
.mount(&mock_server)
779
+
.await;
780
+
781
+
unsafe {
782
+
std::env::set_var("PLC_DIRECTORY_URL", mock_server.uri());
783
+
std::env::remove_var("SKIP_IMPORT_VERIFICATION");
784
+
}
785
+
786
+
let import_res = client
787
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
788
+
.bearer_auth(&token)
789
+
.header("Content-Type", "application/vnd.ipld.car")
790
+
.body(car_bytes.to_vec())
791
+
.send()
792
+
.await
793
+
.expect("Import failed");
794
+
795
+
let import_status = import_res.status();
796
+
let import_body: Value = import_res.json().await.unwrap_or(json!({}));
797
+
798
+
unsafe {
799
+
std::env::set_var("SKIP_IMPORT_VERIFICATION", "true");
800
+
}
801
+
802
+
assert_eq!(
803
+
import_status,
804
+
StatusCode::OK,
805
+
"Import with valid DID document should succeed. Response: {:?}",
806
+
import_body
807
+
);
808
+
809
+
let get_record_res = client
810
+
.get(format!(
811
+
"{}/xrpc/com.atproto.repo.getRecord?repo={}&collection=app.bsky.feed.post&rkey={}",
812
+
base_url().await,
813
+
did,
814
+
original_uri.split('/').last().unwrap()
815
+
))
816
+
.send()
817
+
.await
818
+
.expect("Get record failed");
819
+
820
+
assert_eq!(
821
+
get_record_res.status(),
822
+
StatusCode::OK,
823
+
"Record should be retrievable after import"
824
+
);
825
+
826
+
let record_body: Value = get_record_res.json().await.unwrap();
827
+
assert_eq!(
828
+
record_body["value"]["text"],
829
+
"Test post before migration",
830
+
"Record content should match"
831
+
);
832
+
}
833
+
834
+
#[tokio::test]
835
+
async fn test_migration_rejects_wrong_did_document() {
836
+
let client = client();
837
+
let (token, did) = create_account_and_login(&client).await;
838
+
839
+
let wrong_signing_key = SigningKey::random(&mut rand::thread_rng());
840
+
841
+
let handle = get_user_handle(&did).await
842
+
.expect("Failed to get user handle");
843
+
844
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
845
+
let pds_endpoint = format!("https://{}", hostname);
846
+
847
+
let export_res = client
848
+
.get(format!(
849
+
"{}/xrpc/com.atproto.sync.getRepo?did={}",
850
+
base_url().await,
851
+
did
852
+
))
853
+
.send()
854
+
.await
855
+
.expect("Export failed");
856
+
assert_eq!(export_res.status(), StatusCode::OK);
857
+
let car_bytes = export_res.bytes().await.unwrap();
858
+
859
+
let mock_server = MockServer::start().await;
860
+
let did_encoded = urlencoding::encode(&did);
861
+
let wrong_did_doc = create_did_document(&did, &handle, &wrong_signing_key, &pds_endpoint);
862
+
863
+
Mock::given(method("GET"))
864
+
.and(path(format!("/{}", did_encoded)))
865
+
.respond_with(ResponseTemplate::new(200).set_body_json(wrong_did_doc))
866
+
.mount(&mock_server)
867
+
.await;
868
+
869
+
unsafe {
870
+
std::env::set_var("PLC_DIRECTORY_URL", mock_server.uri());
871
+
std::env::remove_var("SKIP_IMPORT_VERIFICATION");
872
+
}
873
+
874
+
let import_res = client
875
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
876
+
.bearer_auth(&token)
877
+
.header("Content-Type", "application/vnd.ipld.car")
878
+
.body(car_bytes.to_vec())
879
+
.send()
880
+
.await
881
+
.expect("Import failed");
882
+
883
+
let import_status = import_res.status();
884
+
let import_body: Value = import_res.json().await.unwrap_or(json!({}));
885
+
886
+
unsafe {
887
+
std::env::set_var("SKIP_IMPORT_VERIFICATION", "true");
888
+
}
889
+
890
+
assert_eq!(
891
+
import_status,
892
+
StatusCode::BAD_REQUEST,
893
+
"Import with wrong DID document should fail. Response: {:?}",
894
+
import_body
895
+
);
896
+
897
+
assert!(
898
+
import_body["error"] == "InvalidSignature" ||
899
+
import_body["message"].as_str().unwrap_or("").contains("signature"),
900
+
"Error should mention signature verification failure"
901
+
);
902
+
}
903
+
904
+
#[tokio::test]
905
+
async fn test_full_migration_flow_end_to_end() {
906
+
let client = client();
907
+
let (token, did) = create_account_and_login(&client).await;
908
+
909
+
let key_bytes = get_user_signing_key(&did).await
910
+
.expect("Failed to get user signing key");
911
+
let signing_key = SigningKey::from_slice(&key_bytes)
912
+
.expect("Failed to create signing key");
913
+
914
+
let handle = get_user_handle(&did).await
915
+
.expect("Failed to get user handle");
916
+
917
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| "localhost".to_string());
918
+
let pds_endpoint = format!("https://{}", hostname);
919
+
let did_key = signing_key_to_did_key(&signing_key);
920
+
921
+
for i in 0..3 {
922
+
let post_payload = json!({
923
+
"repo": did,
924
+
"collection": "app.bsky.feed.post",
925
+
"record": {
926
+
"$type": "app.bsky.feed.post",
927
+
"text": format!("Pre-migration post #{}", i),
928
+
"createdAt": chrono::Utc::now().to_rfc3339(),
929
+
}
930
+
});
931
+
932
+
let res = client
933
+
.post(format!(
934
+
"{}/xrpc/com.atproto.repo.createRecord",
935
+
base_url().await
936
+
))
937
+
.bearer_auth(&token)
938
+
.json(&post_payload)
939
+
.send()
940
+
.await
941
+
.expect("Failed to create post");
942
+
assert_eq!(res.status(), StatusCode::OK);
943
+
}
944
+
945
+
let request_res = client
946
+
.post(format!(
947
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
948
+
base_url().await
949
+
))
950
+
.bearer_auth(&token)
951
+
.send()
952
+
.await
953
+
.expect("Request failed");
954
+
assert_eq!(request_res.status(), StatusCode::OK);
955
+
956
+
let plc_token = get_plc_token_from_db(&did).await
957
+
.expect("PLC token not found");
958
+
959
+
let mock_server = MockServer::start().await;
960
+
let did_encoded = urlencoding::encode(&did);
961
+
962
+
let last_op = json!({
963
+
"type": "plc_operation",
964
+
"rotationKeys": [did_key.clone()],
965
+
"verificationMethods": { "atproto": did_key.clone() },
966
+
"alsoKnownAs": [format!("at://{}", handle)],
967
+
"services": {
968
+
"atproto_pds": {
969
+
"type": "AtprotoPersonalDataServer",
970
+
"endpoint": pds_endpoint.clone()
971
+
}
972
+
},
973
+
"prev": null,
974
+
"sig": "initial_sig"
975
+
});
976
+
977
+
Mock::given(method("GET"))
978
+
.and(path(format!("/{}/log/last", did_encoded)))
979
+
.respond_with(ResponseTemplate::new(200).set_body_json(last_op))
980
+
.mount(&mock_server)
981
+
.await;
982
+
983
+
let did_doc = create_did_document(&did, &handle, &signing_key, &pds_endpoint);
984
+
Mock::given(method("GET"))
985
+
.and(path(format!("/{}", did_encoded)))
986
+
.respond_with(ResponseTemplate::new(200).set_body_json(did_doc))
987
+
.mount(&mock_server)
988
+
.await;
989
+
990
+
Mock::given(method("POST"))
991
+
.and(path(format!("/{}", did_encoded)))
992
+
.respond_with(ResponseTemplate::new(200))
993
+
.expect(1)
994
+
.mount(&mock_server)
995
+
.await;
996
+
997
+
unsafe {
998
+
std::env::set_var("PLC_DIRECTORY_URL", mock_server.uri());
999
+
}
1000
+
1001
+
let sign_res = client
1002
+
.post(format!(
1003
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
1004
+
base_url().await
1005
+
))
1006
+
.bearer_auth(&token)
1007
+
.json(&json!({ "token": plc_token }))
1008
+
.send()
1009
+
.await
1010
+
.expect("Sign failed");
1011
+
assert_eq!(sign_res.status(), StatusCode::OK);
1012
+
1013
+
let sign_body: Value = sign_res.json().await.unwrap();
1014
+
let signed_op = sign_body.get("operation").unwrap().clone();
1015
+
1016
+
let export_res = client
1017
+
.get(format!(
1018
+
"{}/xrpc/com.atproto.sync.getRepo?did={}",
1019
+
base_url().await,
1020
+
did
1021
+
))
1022
+
.send()
1023
+
.await
1024
+
.expect("Export failed");
1025
+
assert_eq!(export_res.status(), StatusCode::OK);
1026
+
let car_bytes = export_res.bytes().await.unwrap();
1027
+
1028
+
let submit_res = client
1029
+
.post(format!(
1030
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
1031
+
base_url().await
1032
+
))
1033
+
.bearer_auth(&token)
1034
+
.json(&json!({ "operation": signed_op }))
1035
+
.send()
1036
+
.await
1037
+
.expect("Submit failed");
1038
+
assert_eq!(submit_res.status(), StatusCode::OK);
1039
+
1040
+
unsafe {
1041
+
std::env::remove_var("SKIP_IMPORT_VERIFICATION");
1042
+
}
1043
+
1044
+
let import_res = client
1045
+
.post(format!("{}/xrpc/com.atproto.repo.importRepo", base_url().await))
1046
+
.bearer_auth(&token)
1047
+
.header("Content-Type", "application/vnd.ipld.car")
1048
+
.body(car_bytes.to_vec())
1049
+
.send()
1050
+
.await
1051
+
.expect("Import failed");
1052
+
1053
+
let import_status = import_res.status();
1054
+
let import_body: Value = import_res.json().await.unwrap_or(json!({}));
1055
+
1056
+
unsafe {
1057
+
std::env::set_var("SKIP_IMPORT_VERIFICATION", "true");
1058
+
}
1059
+
1060
+
assert_eq!(
1061
+
import_status,
1062
+
StatusCode::OK,
1063
+
"Full migration flow should succeed. Response: {:?}",
1064
+
import_body
1065
+
);
1066
+
1067
+
let list_res = client
1068
+
.get(format!(
1069
+
"{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=app.bsky.feed.post",
1070
+
base_url().await,
1071
+
did
1072
+
))
1073
+
.send()
1074
+
.await
1075
+
.expect("List failed");
1076
+
assert_eq!(list_res.status(), StatusCode::OK);
1077
+
1078
+
let list_body: Value = list_res.json().await.unwrap();
1079
+
let records = list_body["records"].as_array()
1080
+
.expect("Should have records array");
1081
+
1082
+
assert!(
1083
+
records.len() >= 1,
1084
+
"Should have at least 1 record after migration, found {}",
1085
+
records.len()
1086
+
);
1087
+
}
+491
tests/plc_operations.rs
+491
tests/plc_operations.rs
···
1
+
mod common;
2
+
use common::*;
3
+
4
+
use reqwest::StatusCode;
5
+
use serde_json::json;
6
+
use sqlx::PgPool;
7
+
8
+
#[tokio::test]
9
+
async fn test_request_plc_operation_signature_requires_auth() {
10
+
let client = client();
11
+
12
+
let res = client
13
+
.post(format!(
14
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
15
+
base_url().await
16
+
))
17
+
.send()
18
+
.await
19
+
.expect("Request failed");
20
+
21
+
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
22
+
}
23
+
24
+
#[tokio::test]
25
+
async fn test_request_plc_operation_signature_success() {
26
+
let client = client();
27
+
let (token, _did) = create_account_and_login(&client).await;
28
+
29
+
let res = client
30
+
.post(format!(
31
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
32
+
base_url().await
33
+
))
34
+
.bearer_auth(&token)
35
+
.send()
36
+
.await
37
+
.expect("Request failed");
38
+
39
+
assert_eq!(res.status(), StatusCode::OK);
40
+
}
41
+
42
+
#[tokio::test]
43
+
async fn test_sign_plc_operation_requires_auth() {
44
+
let client = client();
45
+
46
+
let res = client
47
+
.post(format!(
48
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
49
+
base_url().await
50
+
))
51
+
.json(&json!({}))
52
+
.send()
53
+
.await
54
+
.expect("Request failed");
55
+
56
+
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
57
+
}
58
+
59
+
#[tokio::test]
60
+
async fn test_sign_plc_operation_requires_token() {
61
+
let client = client();
62
+
let (token, _did) = create_account_and_login(&client).await;
63
+
64
+
let res = client
65
+
.post(format!(
66
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
67
+
base_url().await
68
+
))
69
+
.bearer_auth(&token)
70
+
.json(&json!({}))
71
+
.send()
72
+
.await
73
+
.expect("Request failed");
74
+
75
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
76
+
let body: serde_json::Value = res.json().await.unwrap();
77
+
assert_eq!(body["error"], "InvalidRequest");
78
+
}
79
+
80
+
#[tokio::test]
81
+
async fn test_sign_plc_operation_invalid_token() {
82
+
let client = client();
83
+
let (token, _did) = create_account_and_login(&client).await;
84
+
85
+
let res = client
86
+
.post(format!(
87
+
"{}/xrpc/com.atproto.identity.signPlcOperation",
88
+
base_url().await
89
+
))
90
+
.bearer_auth(&token)
91
+
.json(&json!({
92
+
"token": "invalid-token-12345"
93
+
}))
94
+
.send()
95
+
.await
96
+
.expect("Request failed");
97
+
98
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
99
+
let body: serde_json::Value = res.json().await.unwrap();
100
+
assert!(body["error"] == "InvalidToken" || body["error"] == "ExpiredToken");
101
+
}
102
+
103
+
#[tokio::test]
104
+
async fn test_submit_plc_operation_requires_auth() {
105
+
let client = client();
106
+
107
+
let res = client
108
+
.post(format!(
109
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
110
+
base_url().await
111
+
))
112
+
.json(&json!({
113
+
"operation": {}
114
+
}))
115
+
.send()
116
+
.await
117
+
.expect("Request failed");
118
+
119
+
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
120
+
}
121
+
122
+
#[tokio::test]
123
+
async fn test_submit_plc_operation_invalid_operation() {
124
+
let client = client();
125
+
let (token, _did) = create_account_and_login(&client).await;
126
+
127
+
let res = client
128
+
.post(format!(
129
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
130
+
base_url().await
131
+
))
132
+
.bearer_auth(&token)
133
+
.json(&json!({
134
+
"operation": {
135
+
"type": "invalid_type"
136
+
}
137
+
}))
138
+
.send()
139
+
.await
140
+
.expect("Request failed");
141
+
142
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
143
+
let body: serde_json::Value = res.json().await.unwrap();
144
+
assert_eq!(body["error"], "InvalidRequest");
145
+
}
146
+
147
+
#[tokio::test]
148
+
async fn test_submit_plc_operation_missing_sig() {
149
+
let client = client();
150
+
let (token, _did) = create_account_and_login(&client).await;
151
+
152
+
let res = client
153
+
.post(format!(
154
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
155
+
base_url().await
156
+
))
157
+
.bearer_auth(&token)
158
+
.json(&json!({
159
+
"operation": {
160
+
"type": "plc_operation",
161
+
"rotationKeys": [],
162
+
"verificationMethods": {},
163
+
"alsoKnownAs": [],
164
+
"services": {},
165
+
"prev": null
166
+
}
167
+
}))
168
+
.send()
169
+
.await
170
+
.expect("Request failed");
171
+
172
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
173
+
let body: serde_json::Value = res.json().await.unwrap();
174
+
assert_eq!(body["error"], "InvalidRequest");
175
+
}
176
+
177
+
#[tokio::test]
178
+
async fn test_submit_plc_operation_wrong_service_endpoint() {
179
+
let client = client();
180
+
let (token, _did) = create_account_and_login(&client).await;
181
+
182
+
let res = client
183
+
.post(format!(
184
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
185
+
base_url().await
186
+
))
187
+
.bearer_auth(&token)
188
+
.json(&json!({
189
+
"operation": {
190
+
"type": "plc_operation",
191
+
"rotationKeys": ["did:key:z123"],
192
+
"verificationMethods": {"atproto": "did:key:z456"},
193
+
"alsoKnownAs": ["at://wrong.handle"],
194
+
"services": {
195
+
"atproto_pds": {
196
+
"type": "AtprotoPersonalDataServer",
197
+
"endpoint": "https://wrong.example.com"
198
+
}
199
+
},
200
+
"prev": null,
201
+
"sig": "fake_signature"
202
+
}
203
+
}))
204
+
.send()
205
+
.await
206
+
.expect("Request failed");
207
+
208
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
209
+
}
210
+
211
+
#[tokio::test]
212
+
async fn test_request_plc_operation_creates_token_in_db() {
213
+
let client = client();
214
+
let (token, did) = create_account_and_login(&client).await;
215
+
216
+
let res = client
217
+
.post(format!(
218
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
219
+
base_url().await
220
+
))
221
+
.bearer_auth(&token)
222
+
.send()
223
+
.await
224
+
.expect("Request failed");
225
+
226
+
assert_eq!(res.status(), StatusCode::OK);
227
+
228
+
let db_url = get_db_connection_string().await;
229
+
let pool = PgPool::connect(&db_url).await.expect("DB connect failed");
230
+
231
+
let row = sqlx::query!(
232
+
r#"
233
+
SELECT t.token, t.expires_at
234
+
FROM plc_operation_tokens t
235
+
JOIN users u ON t.user_id = u.id
236
+
WHERE u.did = $1
237
+
"#,
238
+
did
239
+
)
240
+
.fetch_optional(&pool)
241
+
.await
242
+
.expect("Query failed");
243
+
244
+
assert!(row.is_some(), "PLC token should be created in database");
245
+
let row = row.unwrap();
246
+
assert!(row.token.len() == 11, "Token should be in format xxxxx-xxxxx");
247
+
assert!(row.token.contains('-'), "Token should contain hyphen");
248
+
assert!(row.expires_at > chrono::Utc::now(), "Token should not be expired");
249
+
}
250
+
251
+
#[tokio::test]
252
+
async fn test_request_plc_operation_replaces_existing_token() {
253
+
let client = client();
254
+
let (token, did) = create_account_and_login(&client).await;
255
+
256
+
let res1 = client
257
+
.post(format!(
258
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
259
+
base_url().await
260
+
))
261
+
.bearer_auth(&token)
262
+
.send()
263
+
.await
264
+
.expect("Request 1 failed");
265
+
assert_eq!(res1.status(), StatusCode::OK);
266
+
267
+
let db_url = get_db_connection_string().await;
268
+
let pool = PgPool::connect(&db_url).await.expect("DB connect failed");
269
+
270
+
let token1 = sqlx::query_scalar!(
271
+
r#"
272
+
SELECT t.token
273
+
FROM plc_operation_tokens t
274
+
JOIN users u ON t.user_id = u.id
275
+
WHERE u.did = $1
276
+
"#,
277
+
did
278
+
)
279
+
.fetch_one(&pool)
280
+
.await
281
+
.expect("Query failed");
282
+
283
+
let res2 = client
284
+
.post(format!(
285
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
286
+
base_url().await
287
+
))
288
+
.bearer_auth(&token)
289
+
.send()
290
+
.await
291
+
.expect("Request 2 failed");
292
+
assert_eq!(res2.status(), StatusCode::OK);
293
+
294
+
let token2 = sqlx::query_scalar!(
295
+
r#"
296
+
SELECT t.token
297
+
FROM plc_operation_tokens t
298
+
JOIN users u ON t.user_id = u.id
299
+
WHERE u.did = $1
300
+
"#,
301
+
did
302
+
)
303
+
.fetch_one(&pool)
304
+
.await
305
+
.expect("Query failed");
306
+
307
+
assert_ne!(token1, token2, "Second request should generate a new token");
308
+
309
+
let count: i64 = sqlx::query_scalar!(
310
+
r#"
311
+
SELECT COUNT(*) as "count!"
312
+
FROM plc_operation_tokens t
313
+
JOIN users u ON t.user_id = u.id
314
+
WHERE u.did = $1
315
+
"#,
316
+
did
317
+
)
318
+
.fetch_one(&pool)
319
+
.await
320
+
.expect("Count query failed");
321
+
322
+
assert_eq!(count, 1, "Should only have one token per user");
323
+
}
324
+
325
+
#[tokio::test]
326
+
async fn test_submit_plc_operation_wrong_verification_method() {
327
+
let client = client();
328
+
let (token, did) = create_account_and_login(&client).await;
329
+
330
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| {
331
+
format!("127.0.0.1:{}", app_port())
332
+
});
333
+
334
+
let handle = did.split(':').last().unwrap_or("user");
335
+
336
+
let res = client
337
+
.post(format!(
338
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
339
+
base_url().await
340
+
))
341
+
.bearer_auth(&token)
342
+
.json(&json!({
343
+
"operation": {
344
+
"type": "plc_operation",
345
+
"rotationKeys": ["did:key:zWrongRotationKey123"],
346
+
"verificationMethods": {"atproto": "did:key:zWrongVerificationKey456"},
347
+
"alsoKnownAs": [format!("at://{}", handle)],
348
+
"services": {
349
+
"atproto_pds": {
350
+
"type": "AtprotoPersonalDataServer",
351
+
"endpoint": format!("https://{}", hostname)
352
+
}
353
+
},
354
+
"prev": null,
355
+
"sig": "fake_signature"
356
+
}
357
+
}))
358
+
.send()
359
+
.await
360
+
.expect("Request failed");
361
+
362
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
363
+
let body: serde_json::Value = res.json().await.unwrap();
364
+
assert_eq!(body["error"], "InvalidRequest");
365
+
assert!(
366
+
body["message"].as_str().unwrap_or("").contains("signing key") ||
367
+
body["message"].as_str().unwrap_or("").contains("rotation"),
368
+
"Error should mention key mismatch: {:?}",
369
+
body
370
+
);
371
+
}
372
+
373
+
#[tokio::test]
374
+
async fn test_submit_plc_operation_wrong_handle() {
375
+
let client = client();
376
+
let (token, _did) = create_account_and_login(&client).await;
377
+
378
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| {
379
+
format!("127.0.0.1:{}", app_port())
380
+
});
381
+
382
+
let res = client
383
+
.post(format!(
384
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
385
+
base_url().await
386
+
))
387
+
.bearer_auth(&token)
388
+
.json(&json!({
389
+
"operation": {
390
+
"type": "plc_operation",
391
+
"rotationKeys": ["did:key:z123"],
392
+
"verificationMethods": {"atproto": "did:key:z456"},
393
+
"alsoKnownAs": ["at://totally.wrong.handle"],
394
+
"services": {
395
+
"atproto_pds": {
396
+
"type": "AtprotoPersonalDataServer",
397
+
"endpoint": format!("https://{}", hostname)
398
+
}
399
+
},
400
+
"prev": null,
401
+
"sig": "fake_signature"
402
+
}
403
+
}))
404
+
.send()
405
+
.await
406
+
.expect("Request failed");
407
+
408
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
409
+
let body: serde_json::Value = res.json().await.unwrap();
410
+
assert_eq!(body["error"], "InvalidRequest");
411
+
}
412
+
413
+
#[tokio::test]
414
+
async fn test_submit_plc_operation_wrong_service_type() {
415
+
let client = client();
416
+
let (token, _did) = create_account_and_login(&client).await;
417
+
418
+
let hostname = std::env::var("PDS_HOSTNAME").unwrap_or_else(|_| {
419
+
format!("127.0.0.1:{}", app_port())
420
+
});
421
+
422
+
let res = client
423
+
.post(format!(
424
+
"{}/xrpc/com.atproto.identity.submitPlcOperation",
425
+
base_url().await
426
+
))
427
+
.bearer_auth(&token)
428
+
.json(&json!({
429
+
"operation": {
430
+
"type": "plc_operation",
431
+
"rotationKeys": ["did:key:z123"],
432
+
"verificationMethods": {"atproto": "did:key:z456"},
433
+
"alsoKnownAs": ["at://user"],
434
+
"services": {
435
+
"atproto_pds": {
436
+
"type": "WrongServiceType",
437
+
"endpoint": format!("https://{}", hostname)
438
+
}
439
+
},
440
+
"prev": null,
441
+
"sig": "fake_signature"
442
+
}
443
+
}))
444
+
.send()
445
+
.await
446
+
.expect("Request failed");
447
+
448
+
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
449
+
let body: serde_json::Value = res.json().await.unwrap();
450
+
assert_eq!(body["error"], "InvalidRequest");
451
+
}
452
+
453
+
#[tokio::test]
454
+
async fn test_plc_token_expiry_format() {
455
+
let client = client();
456
+
let (token, did) = create_account_and_login(&client).await;
457
+
458
+
let res = client
459
+
.post(format!(
460
+
"{}/xrpc/com.atproto.identity.requestPlcOperationSignature",
461
+
base_url().await
462
+
))
463
+
.bearer_auth(&token)
464
+
.send()
465
+
.await
466
+
.expect("Request failed");
467
+
assert_eq!(res.status(), StatusCode::OK);
468
+
469
+
let db_url = get_db_connection_string().await;
470
+
let pool = PgPool::connect(&db_url).await.expect("DB connect failed");
471
+
472
+
let row = sqlx::query!(
473
+
r#"
474
+
SELECT t.expires_at
475
+
FROM plc_operation_tokens t
476
+
JOIN users u ON t.user_id = u.id
477
+
WHERE u.did = $1
478
+
"#,
479
+
did
480
+
)
481
+
.fetch_one(&pool)
482
+
.await
483
+
.expect("Query failed");
484
+
485
+
let now = chrono::Utc::now();
486
+
let expires = row.expires_at;
487
+
488
+
let diff = expires - now;
489
+
assert!(diff.num_minutes() >= 9, "Token should expire in ~10 minutes, got {} minutes", diff.num_minutes());
490
+
assert!(diff.num_minutes() <= 11, "Token should expire in ~10 minutes, got {} minutes", diff.num_minutes());
491
+
}