···1+CREATE TABLE plc_operation_tokens (
2+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
3+ user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
4+ token TEXT NOT NULL UNIQUE,
5+ expires_at TIMESTAMPTZ NOT NULL,
6+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
7+);
8+9+CREATE INDEX idx_plc_op_tokens_user ON plc_operation_tokens(user_id);
10+CREATE INDEX idx_plc_op_tokens_expires ON plc_operation_tokens(expires_at);
···1pub mod account;
2pub mod did;
034pub use account::create_account;
5pub use did::{
6 get_recommended_did_credentials, resolve_handle, update_handle, user_did_doc, well_known_did,
7};
0
···1pub mod account;
2pub mod did;
3+pub mod plc;
45pub use account::create_account;
6pub use did::{
7 get_recommended_did_credentials, resolve_handle, update_handle, user_did_doc, well_known_did,
8};
9+pub use plc::{request_plc_operation_signature, sign_plc_operation, submit_plc_operation};
···1pub mod blob;
02pub mod meta;
3pub mod record;
45pub use blob::{list_missing_blobs, upload_blob};
06pub use meta::describe_repo;
7pub use record::{apply_writes, create_record, delete_record, get_record, list_records, put_record};
···1pub mod blob;
2+pub mod import;
3pub mod meta;
4pub mod record;
56pub use blob::{list_missing_blobs, upload_blob};
7+pub use import::import_repo;
8pub use meta::describe_repo;
9pub use record::{apply_writes, create_record, delete_record, get_record, list_records, put_record};
+21-2
src/api/repo/record/utils.rs
···3use jacquard::types::{did::Did, integer::LimitedU32, string::Tid};
4use jacquard_repo::commit::Commit;
5use jacquard_repo::storage::BlockStore;
06use serde_json::json;
7use uuid::Uuid;
8···26 ops: Vec<RecordOp>,
27 blocks_cids: &Vec<String>,
28) -> Result<CommitResult, String> {
0000000000000029 let did_obj = Did::new(did).map_err(|e| format!("Invalid DID: {}", e))?;
30 let rev = Tid::now(LimitedU32::MIN);
3132- let new_commit = Commit::new_unsigned(did_obj, new_mst_root, rev.clone(), current_root_cid);
00003334- let new_commit_bytes = new_commit.to_cbor().map_err(|e| format!("Failed to serialize commit: {:?}", e))?;
3536 let new_root_cid = state.block_store.put(&new_commit_bytes).await
37 .map_err(|e| format!("Failed to save commit block: {:?}", e))?;
···3use jacquard::types::{did::Did, integer::LimitedU32, string::Tid};
4use jacquard_repo::commit::Commit;
5use jacquard_repo::storage::BlockStore;
6+use k256::ecdsa::SigningKey;
7use serde_json::json;
8use uuid::Uuid;
9···27 ops: Vec<RecordOp>,
28 blocks_cids: &Vec<String>,
29) -> Result<CommitResult, String> {
30+ let key_row = sqlx::query!(
31+ "SELECT key_bytes, encryption_version FROM user_keys WHERE user_id = $1",
32+ user_id
33+ )
34+ .fetch_one(&state.db)
35+ .await
36+ .map_err(|e| format!("Failed to fetch signing key: {}", e))?;
37+38+ let key_bytes = crate::config::decrypt_key(&key_row.key_bytes, key_row.encryption_version)
39+ .map_err(|e| format!("Failed to decrypt signing key: {}", e))?;
40+41+ let signing_key = SigningKey::from_slice(&key_bytes)
42+ .map_err(|e| format!("Invalid signing key: {}", e))?;
43+44 let did_obj = Did::new(did).map_err(|e| format!("Invalid DID: {}", e))?;
45 let rev = Tid::now(LimitedU32::MIN);
4647+ let unsigned_commit = Commit::new_unsigned(did_obj, new_mst_root, rev.clone(), current_root_cid);
48+49+ let signed_commit = unsigned_commit
50+ .sign(&signing_key)
51+ .map_err(|e| format!("Failed to sign commit: {:?}", e))?;
5253+ let new_commit_bytes = signed_commit.to_cbor().map_err(|e| format!("Failed to serialize commit: {:?}", e))?;
5455 let new_root_cid = state.block_store.put(&new_commit_bytes).await
56 .map_err(|e| format!("Failed to save commit block: {:?}", e))?;
+17
src/lib.rs
···3pub mod config;
4pub mod notifications;
5pub mod oauth;
06pub mod repo;
7pub mod state;
8pub mod storage;
···193 .route(
194 "/xrpc/com.atproto.identity.updateHandle",
195 post(api::identity::update_handle),
0000000000000000196 )
197 .route(
198 "/xrpc/com.atproto.admin.deleteAccount",
···3pub mod config;
4pub mod notifications;
5pub mod oauth;
6+pub mod plc;
7pub mod repo;
8pub mod state;
9pub mod storage;
···194 .route(
195 "/xrpc/com.atproto.identity.updateHandle",
196 post(api::identity::update_handle),
197+ )
198+ .route(
199+ "/xrpc/com.atproto.identity.requestPlcOperationSignature",
200+ post(api::identity::request_plc_operation_signature),
201+ )
202+ .route(
203+ "/xrpc/com.atproto.identity.signPlcOperation",
204+ post(api::identity::sign_plc_operation),
205+ )
206+ .route(
207+ "/xrpc/com.atproto.identity.submitPlcOperation",
208+ post(api::identity::submit_plc_operation),
209+ )
210+ .route(
211+ "/xrpc/com.atproto.repo.importRepo",
212+ post(api::repo::import_repo),
213 )
214 .route(
215 "/xrpc/com.atproto.admin.deleteAccount",
+2-1
src/notifications/mod.rs
···5pub use sender::{EmailSender, NotificationSender};
6pub use service::{
7 enqueue_account_deletion, enqueue_email_update, enqueue_email_verification,
8- enqueue_notification, enqueue_password_reset, enqueue_welcome, NotificationService,
09};
10pub use types::{
11 NewNotification, NotificationChannel, NotificationStatus, NotificationType, QueuedNotification,
···5pub use sender::{EmailSender, NotificationSender};
6pub use service::{
7 enqueue_account_deletion, enqueue_email_update, enqueue_email_verification,
8+ enqueue_notification, enqueue_password_reset, enqueue_plc_operation, enqueue_welcome,
9+ NotificationService,
10};
11pub use types::{
12 NewNotification, NotificationChannel, NotificationStatus, NotificationType, QueuedNotification,
···1+use bytes::Bytes;
2+use cid::Cid;
3+use ipld_core::ipld::Ipld;
4+use iroh_car::CarReader;
5+use serde_json::Value as JsonValue;
6+use sqlx::PgPool;
7+use std::collections::HashMap;
8+use std::io::Cursor;
9+use thiserror::Error;
10+use tracing::debug;
11+use uuid::Uuid;
12+13+#[derive(Error, Debug)]
14+pub enum ImportError {
15+ #[error("CAR parsing error: {0}")]
16+ CarParse(String),
17+ #[error("Expected exactly one root in CAR file")]
18+ InvalidRootCount,
19+ #[error("Block not found: {0}")]
20+ BlockNotFound(String),
21+ #[error("Invalid CBOR: {0}")]
22+ InvalidCbor(String),
23+ #[error("Database error: {0}")]
24+ Database(#[from] sqlx::Error),
25+ #[error("Block store error: {0}")]
26+ BlockStore(String),
27+ #[error("Import size limit exceeded")]
28+ SizeLimitExceeded,
29+ #[error("Repo not found")]
30+ RepoNotFound,
31+ #[error("Concurrent modification detected")]
32+ ConcurrentModification,
33+ #[error("Invalid commit structure: {0}")]
34+ InvalidCommit(String),
35+ #[error("Verification failed: {0}")]
36+ VerificationFailed(#[from] super::verify::VerifyError),
37+ #[error("DID mismatch: CAR is for {car_did}, but authenticated as {auth_did}")]
38+ DidMismatch { car_did: String, auth_did: String },
39+}
40+41+#[derive(Debug, Clone)]
42+pub struct BlobRef {
43+ pub cid: String,
44+ pub mime_type: Option<String>,
45+}
46+47+pub async fn parse_car(data: &[u8]) -> Result<(Cid, HashMap<Cid, Bytes>), ImportError> {
48+ let cursor = Cursor::new(data);
49+ let mut reader = CarReader::new(cursor)
50+ .await
51+ .map_err(|e| ImportError::CarParse(e.to_string()))?;
52+53+ let header = reader.header();
54+ let roots = header.roots();
55+56+ if roots.len() != 1 {
57+ return Err(ImportError::InvalidRootCount);
58+ }
59+60+ let root = roots[0];
61+ let mut blocks = HashMap::new();
62+63+ while let Ok(Some((cid, block))) = reader.next_block().await {
64+ blocks.insert(cid, Bytes::from(block));
65+ }
66+67+ if !blocks.contains_key(&root) {
68+ return Err(ImportError::BlockNotFound(root.to_string()));
69+ }
70+71+ Ok((root, blocks))
72+}
73+74+pub fn find_blob_refs_ipld(value: &Ipld, depth: usize) -> Vec<BlobRef> {
75+ if depth > 32 {
76+ return vec![];
77+ }
78+79+ match value {
80+ Ipld::List(arr) => arr
81+ .iter()
82+ .flat_map(|v| find_blob_refs_ipld(v, depth + 1))
83+ .collect(),
84+ Ipld::Map(obj) => {
85+ if let Some(Ipld::String(type_str)) = obj.get("$type") {
86+ if type_str == "blob" {
87+ if let Some(Ipld::Link(link_cid)) = obj.get("ref") {
88+ let mime = obj
89+ .get("mimeType")
90+ .and_then(|v| if let Ipld::String(s) = v { Some(s.clone()) } else { None });
91+ return vec![BlobRef {
92+ cid: link_cid.to_string(),
93+ mime_type: mime,
94+ }];
95+ }
96+ }
97+ }
98+99+ obj.values()
100+ .flat_map(|v| find_blob_refs_ipld(v, depth + 1))
101+ .collect()
102+ }
103+ _ => vec![],
104+ }
105+}
106+107+pub fn find_blob_refs(value: &JsonValue, depth: usize) -> Vec<BlobRef> {
108+ if depth > 32 {
109+ return vec![];
110+ }
111+112+ match value {
113+ JsonValue::Array(arr) => arr
114+ .iter()
115+ .flat_map(|v| find_blob_refs(v, depth + 1))
116+ .collect(),
117+ JsonValue::Object(obj) => {
118+ if let Some(JsonValue::String(type_str)) = obj.get("$type") {
119+ if type_str == "blob" {
120+ if let Some(JsonValue::Object(ref_obj)) = obj.get("ref") {
121+ if let Some(JsonValue::String(link)) = ref_obj.get("$link") {
122+ let mime = obj
123+ .get("mimeType")
124+ .and_then(|v| v.as_str())
125+ .map(String::from);
126+ return vec![BlobRef {
127+ cid: link.clone(),
128+ mime_type: mime,
129+ }];
130+ }
131+ }
132+ }
133+ }
134+135+ obj.values()
136+ .flat_map(|v| find_blob_refs(v, depth + 1))
137+ .collect()
138+ }
139+ _ => vec![],
140+ }
141+}
142+143+pub fn extract_links(value: &Ipld, links: &mut Vec<Cid>) {
144+ match value {
145+ Ipld::Link(cid) => {
146+ links.push(*cid);
147+ }
148+ Ipld::Map(map) => {
149+ for v in map.values() {
150+ extract_links(v, links);
151+ }
152+ }
153+ Ipld::List(arr) => {
154+ for v in arr {
155+ extract_links(v, links);
156+ }
157+ }
158+ _ => {}
159+ }
160+}
161+162+#[derive(Debug)]
163+pub struct ImportedRecord {
164+ pub collection: String,
165+ pub rkey: String,
166+ pub cid: Cid,
167+ pub blob_refs: Vec<BlobRef>,
168+}
169+170+pub fn walk_mst(
171+ blocks: &HashMap<Cid, Bytes>,
172+ root_cid: &Cid,
173+) -> Result<Vec<ImportedRecord>, ImportError> {
174+ let mut records = Vec::new();
175+ let mut stack = vec![*root_cid];
176+ let mut visited = std::collections::HashSet::new();
177+178+ while let Some(cid) = stack.pop() {
179+ if visited.contains(&cid) {
180+ continue;
181+ }
182+ visited.insert(cid);
183+184+ let block = blocks
185+ .get(&cid)
186+ .ok_or_else(|| ImportError::BlockNotFound(cid.to_string()))?;
187+188+ let value: Ipld = serde_ipld_dagcbor::from_slice(block)
189+ .map_err(|e| ImportError::InvalidCbor(e.to_string()))?;
190+191+ if let Ipld::Map(ref obj) = value {
192+ if let Some(Ipld::List(entries)) = obj.get("e") {
193+ for entry in entries {
194+ if let Ipld::Map(entry_obj) = entry {
195+ let key = entry_obj.get("k").and_then(|k| {
196+ if let Ipld::Bytes(b) = k {
197+ String::from_utf8(b.clone()).ok()
198+ } else if let Ipld::String(s) = k {
199+ Some(s.clone())
200+ } else {
201+ None
202+ }
203+ });
204+205+ let record_cid = entry_obj.get("v").and_then(|v| {
206+ if let Ipld::Link(cid) = v {
207+ Some(*cid)
208+ } else {
209+ None
210+ }
211+ });
212+213+ if let (Some(key), Some(record_cid)) = (key, record_cid) {
214+ if let Some(record_block) = blocks.get(&record_cid) {
215+ if let Ok(record_value) =
216+ serde_ipld_dagcbor::from_slice::<Ipld>(record_block)
217+ {
218+ let blob_refs = find_blob_refs_ipld(&record_value, 0);
219+220+ let parts: Vec<&str> = key.split('/').collect();
221+ if parts.len() >= 2 {
222+ let collection = parts[..parts.len() - 1].join("/");
223+ let rkey = parts[parts.len() - 1].to_string();
224+225+ records.push(ImportedRecord {
226+ collection,
227+ rkey,
228+ cid: record_cid,
229+ blob_refs,
230+ });
231+ }
232+ }
233+ }
234+ }
235+236+ if let Some(Ipld::Link(tree_cid)) = entry_obj.get("t") {
237+ stack.push(*tree_cid);
238+ }
239+ }
240+ }
241+ }
242+243+ if let Some(Ipld::Link(left_cid)) = obj.get("l") {
244+ stack.push(*left_cid);
245+ }
246+ }
247+ }
248+249+ Ok(records)
250+}
251+252+pub struct CommitInfo {
253+ pub rev: Option<String>,
254+ pub prev: Option<String>,
255+}
256+257+fn extract_commit_info(commit: &Ipld) -> Result<(Cid, CommitInfo), ImportError> {
258+ let obj = match commit {
259+ Ipld::Map(m) => m,
260+ _ => return Err(ImportError::InvalidCommit("Commit must be a map".to_string())),
261+ };
262+263+ let data_cid = obj
264+ .get("data")
265+ .and_then(|d| if let Ipld::Link(cid) = d { Some(*cid) } else { None })
266+ .ok_or_else(|| ImportError::InvalidCommit("Missing data field".to_string()))?;
267+268+ let rev = obj.get("rev").and_then(|r| {
269+ if let Ipld::String(s) = r {
270+ Some(s.clone())
271+ } else {
272+ None
273+ }
274+ });
275+276+ let prev = obj.get("prev").and_then(|p| {
277+ if let Ipld::Link(cid) = p {
278+ Some(cid.to_string())
279+ } else if let Ipld::Null = p {
280+ None
281+ } else {
282+ None
283+ }
284+ });
285+286+ Ok((data_cid, CommitInfo { rev, prev }))
287+}
288+289+pub async fn apply_import(
290+ db: &PgPool,
291+ user_id: Uuid,
292+ root: Cid,
293+ blocks: HashMap<Cid, Bytes>,
294+ max_blocks: usize,
295+) -> Result<Vec<ImportedRecord>, ImportError> {
296+ if blocks.len() > max_blocks {
297+ return Err(ImportError::SizeLimitExceeded);
298+ }
299+300+ let root_block = blocks
301+ .get(&root)
302+ .ok_or_else(|| ImportError::BlockNotFound(root.to_string()))?;
303+ let commit: Ipld = serde_ipld_dagcbor::from_slice(root_block)
304+ .map_err(|e| ImportError::InvalidCbor(e.to_string()))?;
305+306+ let (data_cid, _commit_info) = extract_commit_info(&commit)?;
307+308+ let records = walk_mst(&blocks, &data_cid)?;
309+310+ debug!(
311+ "Importing {} blocks and {} records for user {}",
312+ blocks.len(),
313+ records.len(),
314+ user_id
315+ );
316+317+ let mut tx = db.begin().await?;
318+319+ let repo = sqlx::query!(
320+ "SELECT repo_root_cid FROM repos WHERE user_id = $1 FOR UPDATE NOWAIT",
321+ user_id
322+ )
323+ .fetch_optional(&mut *tx)
324+ .await
325+ .map_err(|e| {
326+ if let sqlx::Error::Database(ref db_err) = e {
327+ if db_err.code().as_deref() == Some("55P03") {
328+ return ImportError::ConcurrentModification;
329+ }
330+ }
331+ ImportError::Database(e)
332+ })?;
333+334+ if repo.is_none() {
335+ return Err(ImportError::RepoNotFound);
336+ }
337+338+ let block_chunks: Vec<Vec<(&Cid, &Bytes)>> = blocks
339+ .iter()
340+ .collect::<Vec<_>>()
341+ .chunks(100)
342+ .map(|c| c.to_vec())
343+ .collect();
344+345+ for chunk in block_chunks {
346+ for (cid, data) in chunk {
347+ let cid_bytes = cid.to_bytes();
348+ sqlx::query!(
349+ "INSERT INTO blocks (cid, data) VALUES ($1, $2) ON CONFLICT (cid) DO NOTHING",
350+ &cid_bytes,
351+ data.as_ref()
352+ )
353+ .execute(&mut *tx)
354+ .await?;
355+ }
356+ }
357+358+ let root_str = root.to_string();
359+ sqlx::query!(
360+ "UPDATE repos SET repo_root_cid = $1, updated_at = NOW() WHERE user_id = $2",
361+ root_str,
362+ user_id
363+ )
364+ .execute(&mut *tx)
365+ .await?;
366+367+ sqlx::query!("DELETE FROM records WHERE repo_id = $1", user_id)
368+ .execute(&mut *tx)
369+ .await?;
370+371+ for record in &records {
372+ let record_cid_str = record.cid.to_string();
373+ sqlx::query!(
374+ r#"
375+ INSERT INTO records (repo_id, collection, rkey, record_cid)
376+ VALUES ($1, $2, $3, $4)
377+ ON CONFLICT (repo_id, collection, rkey) DO UPDATE SET record_cid = $4
378+ "#,
379+ user_id,
380+ record.collection,
381+ record.rkey,
382+ record_cid_str
383+ )
384+ .execute(&mut *tx)
385+ .await?;
386+ }
387+388+ tx.commit().await?;
389+390+ debug!(
391+ "Successfully imported {} blocks and {} records",
392+ blocks.len(),
393+ records.len()
394+ );
395+396+ Ok(records)
397+}
398+399+#[cfg(test)]
400+mod tests {
401+ use super::*;
402+403+ #[test]
404+ fn test_find_blob_refs() {
405+ let record = serde_json::json!({
406+ "$type": "app.bsky.feed.post",
407+ "text": "Hello world",
408+ "embed": {
409+ "$type": "app.bsky.embed.images",
410+ "images": [
411+ {
412+ "alt": "Test image",
413+ "image": {
414+ "$type": "blob",
415+ "ref": {
416+ "$link": "bafkreihdwdcefgh4dqkjv67uzcmw7ojee6xedzdetojuzjevtenxquvyku"
417+ },
418+ "mimeType": "image/jpeg",
419+ "size": 12345
420+ }
421+ }
422+ ]
423+ }
424+ });
425+426+ let blob_refs = find_blob_refs(&record, 0);
427+ assert_eq!(blob_refs.len(), 1);
428+ assert_eq!(
429+ blob_refs[0].cid,
430+ "bafkreihdwdcefgh4dqkjv67uzcmw7ojee6xedzdetojuzjevtenxquvyku"
431+ );
432+ assert_eq!(blob_refs[0].mime_type, Some("image/jpeg".to_string()));
433+ }
434+435+ #[test]
436+ fn test_find_blob_refs_no_blobs() {
437+ let record = serde_json::json!({
438+ "$type": "app.bsky.feed.post",
439+ "text": "Hello world"
440+ });
441+442+ let blob_refs = find_blob_refs(&record, 0);
443+ assert!(blob_refs.is_empty());
444+ }
445+446+ #[test]
447+ fn test_find_blob_refs_depth_limit() {
448+ fn deeply_nested(depth: usize) -> JsonValue {
449+ if depth == 0 {
450+ serde_json::json!({
451+ "$type": "blob",
452+ "ref": { "$link": "bafkreitest" },
453+ "mimeType": "image/png"
454+ })
455+ } else {
456+ serde_json::json!({ "nested": deeply_nested(depth - 1) })
457+ }
458+ }
459+460+ let deep = deeply_nested(40);
461+ let blob_refs = find_blob_refs(&deep, 0);
462+ assert!(blob_refs.is_empty());
463+ }
464+}
+3
src/sync/mod.rs
···4pub mod crawl;
5pub mod firehose;
6pub mod frame;
07pub mod listener;
8pub mod relay_client;
9pub mod repo;
10pub mod subscribe_repos;
11pub mod util;
01213pub use blob::{get_blob, list_blobs};
14pub use commit::{get_latest_commit, get_repo_status, list_repos};
15pub use crawl::{notify_of_update, request_crawl};
16pub use repo::{get_blocks, get_repo, get_record};
17pub use subscribe_repos::subscribe_repos;
0
···4pub mod crawl;
5pub mod firehose;
6pub mod frame;
7+pub mod import;
8pub mod listener;
9pub mod relay_client;
10pub mod repo;
11pub mod subscribe_repos;
12pub mod util;
13+pub mod verify;
1415pub use blob::{get_blob, list_blobs};
16pub use commit::{get_latest_commit, get_repo_status, list_repos};
17pub use crawl::{notify_of_update, request_crawl};
18pub use repo::{get_blocks, get_repo, get_record};
19pub use subscribe_repos::subscribe_repos;
20+pub use verify::{CarVerifier, VerifiedCar, VerifyError};
+12-18
src/sync/repo.rs
···7 Json,
8};
9use cid::Cid;
010use jacquard_repo::storage::BlockStore;
11use serde::Deserialize;
12use serde_json::json;
···165 writer.write_all(&block).unwrap();
166 car_bytes.extend_from_slice(&writer);
167168- if let Ok(value) = serde_ipld_dagcbor::from_slice::<serde_json::Value>(&block) {
169- extract_links_json(&value, &mut stack);
170 }
171 }
172 }
···179 .into_response()
180}
181182-fn extract_links_json(value: &serde_json::Value, stack: &mut Vec<Cid>) {
183 match value {
184- serde_json::Value::Object(map) => {
185- if let Some(serde_json::Value::String(s)) = map.get("/") {
186- if let Ok(cid) = Cid::from_str(s) {
187- stack.push(cid);
188- }
189- } else if let Some(serde_json::Value::String(s)) = map.get("$link") {
190- if let Ok(cid) = Cid::from_str(s) {
191- stack.push(cid);
192- }
193- } else {
194- for v in map.values() {
195- extract_links_json(v, stack);
196- }
197 }
198 }
199- serde_json::Value::Array(arr) => {
200 for v in arr {
201- extract_links_json(v, stack);
202 }
203 }
204 _ => {}
···7 Json,
8};
9use cid::Cid;
10+use ipld_core::ipld::Ipld;
11use jacquard_repo::storage::BlockStore;
12use serde::Deserialize;
13use serde_json::json;
···166 writer.write_all(&block).unwrap();
167 car_bytes.extend_from_slice(&writer);
168169+ if let Ok(value) = serde_ipld_dagcbor::from_slice::<Ipld>(&block) {
170+ extract_links_ipld(&value, &mut stack);
171 }
172 }
173 }
···180 .into_response()
181}
182183+fn extract_links_ipld(value: &Ipld, stack: &mut Vec<Cid>) {
184 match value {
185+ Ipld::Link(cid) => {
186+ stack.push(*cid);
187+ }
188+ Ipld::Map(map) => {
189+ for v in map.values() {
190+ extract_links_ipld(v, stack);
0000000191 }
192 }
193+ Ipld::List(arr) => {
194 for v in arr {
195+ extract_links_ipld(v, stack);
196 }
197 }
198 _ => {}