this repo has no description
1use crate::api::proxy_client::{
2 is_ssrf_safe, proxy_client, MAX_RESPONSE_SIZE, RESPONSE_HEADERS_TO_FORWARD,
3};
4use crate::api::ApiError;
5use crate::state::AppState;
6use axum::{
7 http::{HeaderMap, HeaderValue, StatusCode},
8 response::{IntoResponse, Response},
9 Json,
10};
11use bytes::Bytes;
12use chrono::{DateTime, Utc};
13use cid::Cid;
14use jacquard_repo::storage::BlockStore;
15use serde::{Deserialize, Serialize};
16use serde_json::Value;
17use std::collections::HashMap;
18use tracing::{error, info, warn};
19use uuid::Uuid;
20
21pub const REPO_REV_HEADER: &str = "atproto-repo-rev";
22pub const UPSTREAM_LAG_HEADER: &str = "atproto-upstream-lag";
23
24#[derive(Debug, Clone, Serialize, Deserialize)]
25#[serde(rename_all = "camelCase")]
26pub struct PostRecord {
27 #[serde(rename = "$type")]
28 pub record_type: Option<String>,
29 pub text: String,
30 pub created_at: String,
31 #[serde(skip_serializing_if = "Option::is_none")]
32 pub reply: Option<Value>,
33 #[serde(skip_serializing_if = "Option::is_none")]
34 pub embed: Option<Value>,
35 #[serde(skip_serializing_if = "Option::is_none")]
36 pub langs: Option<Vec<String>>,
37 #[serde(skip_serializing_if = "Option::is_none")]
38 pub labels: Option<Value>,
39 #[serde(skip_serializing_if = "Option::is_none")]
40 pub tags: Option<Vec<String>>,
41 #[serde(flatten)]
42 pub extra: HashMap<String, Value>,
43}
44
45#[derive(Debug, Clone, Serialize, Deserialize)]
46#[serde(rename_all = "camelCase")]
47pub struct ProfileRecord {
48 #[serde(rename = "$type")]
49 pub record_type: Option<String>,
50 #[serde(skip_serializing_if = "Option::is_none")]
51 pub display_name: Option<String>,
52 #[serde(skip_serializing_if = "Option::is_none")]
53 pub description: Option<String>,
54 #[serde(skip_serializing_if = "Option::is_none")]
55 pub avatar: Option<Value>,
56 #[serde(skip_serializing_if = "Option::is_none")]
57 pub banner: Option<Value>,
58 #[serde(flatten)]
59 pub extra: HashMap<String, Value>,
60}
61
62#[derive(Debug, Clone)]
63pub struct RecordDescript<T> {
64 pub uri: String,
65 pub cid: String,
66 pub indexed_at: DateTime<Utc>,
67 pub record: T,
68}
69
70#[derive(Debug, Clone, Serialize, Deserialize)]
71#[serde(rename_all = "camelCase")]
72pub struct LikeRecord {
73 #[serde(rename = "$type")]
74 pub record_type: Option<String>,
75 pub subject: LikeSubject,
76 pub created_at: String,
77 #[serde(flatten)]
78 pub extra: HashMap<String, Value>,
79}
80
81#[derive(Debug, Clone, Serialize, Deserialize)]
82#[serde(rename_all = "camelCase")]
83pub struct LikeSubject {
84 pub uri: String,
85 pub cid: String,
86}
87
88#[derive(Debug, Default)]
89pub struct LocalRecords {
90 pub count: usize,
91 pub profile: Option<RecordDescript<ProfileRecord>>,
92 pub posts: Vec<RecordDescript<PostRecord>>,
93 pub likes: Vec<RecordDescript<LikeRecord>>,
94}
95
96pub async fn get_records_since_rev(
97 state: &AppState,
98 did: &str,
99 rev: &str,
100) -> Result<LocalRecords, String> {
101 let mut result = LocalRecords::default();
102 let user_id: Uuid = sqlx::query_scalar!("SELECT id FROM users WHERE did = $1", did)
103 .fetch_optional(&state.db)
104 .await
105 .map_err(|e| format!("DB error: {}", e))?
106 .ok_or_else(|| "User not found".to_string())?;
107 let rows = sqlx::query!(
108 r#"
109 SELECT record_cid, collection, rkey, created_at, repo_rev
110 FROM records
111 WHERE repo_id = $1 AND repo_rev > $2
112 ORDER BY repo_rev ASC
113 LIMIT 10
114 "#,
115 user_id,
116 rev
117 )
118 .fetch_all(&state.db)
119 .await
120 .map_err(|e| format!("DB error fetching records: {}", e))?;
121 if rows.is_empty() {
122 return Ok(result);
123 }
124 let sanity_check = sqlx::query_scalar!(
125 "SELECT 1 as val FROM records WHERE repo_id = $1 AND repo_rev <= $2 LIMIT 1",
126 user_id,
127 rev
128 )
129 .fetch_optional(&state.db)
130 .await
131 .map_err(|e| format!("DB error sanity check: {}", e))?;
132 if sanity_check.is_none() {
133 warn!("Sanity check failed: no records found before rev {}", rev);
134 return Ok(result);
135 }
136 struct RowData {
137 cid_str: String,
138 collection: String,
139 rkey: String,
140 created_at: DateTime<Utc>,
141 }
142 let mut row_data: Vec<RowData> = Vec::with_capacity(rows.len());
143 let mut cids: Vec<Cid> = Vec::with_capacity(rows.len());
144 for row in &rows {
145 if let Ok(cid) = row.record_cid.parse::<Cid>() {
146 cids.push(cid);
147 row_data.push(RowData {
148 cid_str: row.record_cid.clone(),
149 collection: row.collection.clone(),
150 rkey: row.rkey.clone(),
151 created_at: row.created_at,
152 });
153 }
154 }
155 let blocks: Vec<Option<Bytes>> = state
156 .block_store
157 .get_many(&cids)
158 .await
159 .map_err(|e| format!("Error fetching blocks: {}", e))?;
160 for (data, block_opt) in row_data.into_iter().zip(blocks.into_iter()) {
161 let block_bytes = match block_opt {
162 Some(b) => b,
163 None => continue,
164 };
165 result.count += 1;
166 let uri = format!("at://{}/{}/{}", did, data.collection, data.rkey);
167 if data.collection == "app.bsky.actor.profile" && data.rkey == "self" {
168 if let Ok(record) = serde_ipld_dagcbor::from_slice::<ProfileRecord>(&block_bytes) {
169 result.profile = Some(RecordDescript {
170 uri,
171 cid: data.cid_str,
172 indexed_at: data.created_at,
173 record,
174 });
175 }
176 } else if data.collection == "app.bsky.feed.post" {
177 if let Ok(record) = serde_ipld_dagcbor::from_slice::<PostRecord>(&block_bytes) {
178 result.posts.push(RecordDescript {
179 uri,
180 cid: data.cid_str,
181 indexed_at: data.created_at,
182 record,
183 });
184 }
185 } else if data.collection == "app.bsky.feed.like" {
186 if let Ok(record) = serde_ipld_dagcbor::from_slice::<LikeRecord>(&block_bytes) {
187 result.likes.push(RecordDescript {
188 uri,
189 cid: data.cid_str,
190 indexed_at: data.created_at,
191 record,
192 });
193 }
194 }
195 }
196 Ok(result)
197}
198
199pub fn get_local_lag(local: &LocalRecords) -> Option<i64> {
200 let mut oldest: Option<DateTime<Utc>> = local.profile.as_ref().map(|p| p.indexed_at);
201 for post in &local.posts {
202 match oldest {
203 None => oldest = Some(post.indexed_at),
204 Some(o) if post.indexed_at < o => oldest = Some(post.indexed_at),
205 _ => {}
206 }
207 }
208 for like in &local.likes {
209 match oldest {
210 None => oldest = Some(like.indexed_at),
211 Some(o) if like.indexed_at < o => oldest = Some(like.indexed_at),
212 _ => {}
213 }
214 }
215 oldest.map(|o| (Utc::now() - o).num_milliseconds())
216}
217
218pub fn extract_repo_rev(headers: &HeaderMap) -> Option<String> {
219 headers
220 .get(REPO_REV_HEADER)
221 .and_then(|h| h.to_str().ok())
222 .map(|s| s.to_string())
223}
224
225#[derive(Debug)]
226pub struct ProxyResponse {
227 pub status: StatusCode,
228 pub headers: HeaderMap,
229 pub body: bytes::Bytes,
230}
231
232pub async fn proxy_to_appview(
233 method: &str,
234 params: &HashMap<String, String>,
235 auth_header: Option<&str>,
236) -> Result<ProxyResponse, Response> {
237 let appview_url = std::env::var("APPVIEW_URL").map_err(|_| {
238 ApiError::UpstreamUnavailable("No upstream AppView configured".to_string()).into_response()
239 })?;
240 if let Err(e) = is_ssrf_safe(&appview_url) {
241 error!("SSRF check failed for appview URL: {}", e);
242 return Err(ApiError::UpstreamUnavailable(format!("Invalid upstream URL: {}", e))
243 .into_response());
244 }
245 let target_url = format!("{}/xrpc/{}", appview_url, method);
246 info!(target = %target_url, "Proxying request to appview");
247 let client = proxy_client();
248 let mut request_builder = client.get(&target_url).query(params);
249 if let Some(auth) = auth_header {
250 request_builder = request_builder.header("Authorization", auth);
251 }
252 match request_builder.send().await {
253 Ok(resp) => {
254 let status =
255 StatusCode::from_u16(resp.status().as_u16()).unwrap_or(StatusCode::BAD_GATEWAY);
256 let headers: HeaderMap = resp
257 .headers()
258 .iter()
259 .filter(|(k, _)| {
260 RESPONSE_HEADERS_TO_FORWARD
261 .iter()
262 .any(|h| k.as_str().eq_ignore_ascii_case(h))
263 })
264 .filter_map(|(k, v)| {
265 let name = axum::http::HeaderName::try_from(k.as_str()).ok()?;
266 let value = HeaderValue::from_bytes(v.as_bytes()).ok()?;
267 Some((name, value))
268 })
269 .collect();
270 let content_length = resp
271 .content_length()
272 .unwrap_or(0);
273 if content_length > MAX_RESPONSE_SIZE {
274 error!(
275 content_length,
276 max = MAX_RESPONSE_SIZE,
277 "Upstream response too large"
278 );
279 return Err(ApiError::UpstreamFailure.into_response());
280 }
281 let body = resp.bytes().await.map_err(|e| {
282 error!(error = ?e, "Error reading proxy response body");
283 ApiError::UpstreamFailure.into_response()
284 })?;
285 if body.len() as u64 > MAX_RESPONSE_SIZE {
286 error!(
287 len = body.len(),
288 max = MAX_RESPONSE_SIZE,
289 "Upstream response body exceeded size limit"
290 );
291 return Err(ApiError::UpstreamFailure.into_response());
292 }
293 Ok(ProxyResponse {
294 status,
295 headers,
296 body,
297 })
298 }
299 Err(e) => {
300 error!(error = ?e, "Error sending proxy request");
301 if e.is_timeout() {
302 Err(ApiError::UpstreamTimeout.into_response())
303 } else if e.is_connect() {
304 Err(ApiError::UpstreamUnavailable("Failed to connect to upstream".to_string())
305 .into_response())
306 } else {
307 Err(ApiError::UpstreamFailure.into_response())
308 }
309 }
310 }
311}
312
313pub fn format_munged_response<T: Serialize>(data: T, lag: Option<i64>) -> Response {
314 let mut response = (StatusCode::OK, Json(data)).into_response();
315 if let Some(lag_ms) = lag {
316 if let Ok(header_val) = HeaderValue::from_str(&lag_ms.to_string()) {
317 response
318 .headers_mut()
319 .insert(UPSTREAM_LAG_HEADER, header_val);
320 }
321 }
322 response
323}
324
325#[derive(Debug, Clone, Serialize, Deserialize)]
326#[serde(rename_all = "camelCase")]
327pub struct AuthorView {
328 pub did: String,
329 pub handle: String,
330 #[serde(skip_serializing_if = "Option::is_none")]
331 pub display_name: Option<String>,
332 #[serde(skip_serializing_if = "Option::is_none")]
333 pub avatar: Option<String>,
334 #[serde(flatten)]
335 pub extra: HashMap<String, Value>,
336}
337
338#[derive(Debug, Clone, Serialize, Deserialize)]
339#[serde(rename_all = "camelCase")]
340pub struct PostView {
341 pub uri: String,
342 pub cid: String,
343 pub author: AuthorView,
344 pub record: Value,
345 pub indexed_at: String,
346 #[serde(skip_serializing_if = "Option::is_none")]
347 pub embed: Option<Value>,
348 #[serde(default)]
349 pub reply_count: i64,
350 #[serde(default)]
351 pub repost_count: i64,
352 #[serde(default)]
353 pub like_count: i64,
354 #[serde(default)]
355 pub quote_count: i64,
356 #[serde(flatten)]
357 pub extra: HashMap<String, Value>,
358}
359
360#[derive(Debug, Clone, Serialize, Deserialize)]
361#[serde(rename_all = "camelCase")]
362pub struct FeedViewPost {
363 pub post: PostView,
364 #[serde(skip_serializing_if = "Option::is_none")]
365 pub reply: Option<Value>,
366 #[serde(skip_serializing_if = "Option::is_none")]
367 pub reason: Option<Value>,
368 #[serde(skip_serializing_if = "Option::is_none")]
369 pub feed_context: Option<String>,
370 #[serde(flatten)]
371 pub extra: HashMap<String, Value>,
372}
373
374#[derive(Debug, Clone, Serialize, Deserialize)]
375pub struct FeedOutput {
376 pub feed: Vec<FeedViewPost>,
377 #[serde(skip_serializing_if = "Option::is_none")]
378 pub cursor: Option<String>,
379}
380
381pub fn format_local_post(
382 descript: &RecordDescript<PostRecord>,
383 author_did: &str,
384 author_handle: &str,
385 profile: Option<&RecordDescript<ProfileRecord>>,
386) -> PostView {
387 let display_name = profile.and_then(|p| p.record.display_name.clone());
388 PostView {
389 uri: descript.uri.clone(),
390 cid: descript.cid.clone(),
391 author: AuthorView {
392 did: author_did.to_string(),
393 handle: author_handle.to_string(),
394 display_name,
395 avatar: None,
396 extra: HashMap::new(),
397 },
398 record: serde_json::to_value(&descript.record).unwrap_or(Value::Null),
399 indexed_at: descript.indexed_at.to_rfc3339(),
400 embed: descript.record.embed.clone(),
401 reply_count: 0,
402 repost_count: 0,
403 like_count: 0,
404 quote_count: 0,
405 extra: HashMap::new(),
406 }
407}
408
409pub fn insert_posts_into_feed(feed: &mut Vec<FeedViewPost>, posts: Vec<PostView>) {
410 if posts.is_empty() {
411 return;
412 }
413 let new_items: Vec<FeedViewPost> = posts
414 .into_iter()
415 .map(|post| FeedViewPost {
416 post,
417 reply: None,
418 reason: None,
419 feed_context: None,
420 extra: HashMap::new(),
421 })
422 .collect();
423 feed.extend(new_items);
424 feed.sort_by(|a, b| b.post.indexed_at.cmp(&a.post.indexed_at));
425}