···33//! This module provides a GraphQL interface to query slice records with support
44//! for joining linked records through AT Protocol strongRef references.
5566-mod schema_builder;
77-mod dataloaders;
86mod dataloader;
99-mod types;
77+mod dataloaders;
108pub mod handler;
119pub mod pubsub;
1010+mod schema_builder;
1111+mod types;
12121313-pub use schema_builder::build_graphql_schema;
1414-pub use handler::{graphql_handler, graphql_playground, graphql_subscription_handler};
1515-pub use pubsub::{RecordUpdateEvent, RecordOperation, PUBSUB};
1613pub use dataloader::GraphQLContext;
1414+pub use handler::{graphql_handler, graphql_playground, graphql_subscription_handler};
1515+pub use pubsub::{PUBSUB, RecordOperation, RecordUpdateEvent};
1616+pub use schema_builder::build_graphql_schema;
+1-2
api/src/graphql/pubsub.rs
···66use serde::{Deserialize, Serialize};
77use std::collections::HashMap;
88use std::sync::Arc;
99-use tokio::sync::{broadcast, RwLock};
99+use tokio::sync::{RwLock, broadcast};
1010use tracing::{debug, info};
11111212/// Event broadcast when a record is created or updated
···2929 Update,
3030 Delete,
3131}
3232-33323433/// PubSub manager for broadcasting events to subscribers
3534///
+725-479
api/src/graphql/schema_builder.rs
···33//! This module generates GraphQL schemas at runtime based on lexicon definitions
44//! stored in the database, enabling flexible querying of slice records.
5566-use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, Schema, Scalar, TypeRef, InputObject, InputValue, Enum, EnumItem, Subscription, SubscriptionField, SubscriptionFieldFuture};
66+use async_graphql::dynamic::{
77+ Enum, EnumItem, Field, FieldFuture, FieldValue, InputObject, InputValue, Object, Scalar,
88+ Schema, Subscription, SubscriptionField, SubscriptionFieldFuture, TypeRef,
99+};
710use async_graphql::{Error, Value as GraphQLValue};
88-use base64::engine::general_purpose;
911use base64::Engine;
1212+use base64::engine::general_purpose;
1013use serde_json;
1114use std::collections::HashMap;
1215use std::sync::Arc;
1316use tokio::sync::Mutex;
14171518use crate::database::Database;
1616-use crate::graphql::types::{extract_collection_fields, extract_record_key, GraphQLField, GraphQLType};
1719use crate::graphql::PUBSUB;
1820use crate::graphql::dataloader::GraphQLContext;
2121+use crate::graphql::types::{
2222+ GraphQLField, GraphQLType, extract_collection_fields, extract_record_key,
2323+};
19242025/// Metadata about a collection for cross-referencing
2126#[derive(Clone)]
2227struct CollectionMeta {
2328 nsid: String,
2424- key_type: String, // "tid", "literal:self", or "any"
2525- type_name: String, // GraphQL type name for this collection
2929+ key_type: String, // "tid", "literal:self", or "any"
3030+ type_name: String, // GraphQL type name for this collection
2631 at_uri_fields: Vec<String>, // Fields with format "at-uri" for reverse joins
2732}
28332934/// Builds a dynamic GraphQL schema from lexicons for a given slice
3030-pub async fn build_graphql_schema(
3131- database: Database,
3232- slice_uri: String,
3333-) -> Result<Schema, String> {
3535+pub async fn build_graphql_schema(database: Database, slice_uri: String) -> Result<Schema, String> {
3436 // Fetch all lexicons for this slice
3537 let all_lexicons = database
3638 .get_lexicons_by_slice(&slice_uri)
···7375 if !fields.is_empty() {
7476 if let Some(key_type) = extract_record_key(defs) {
7577 // Extract at-uri field names for reverse joins
7676- let at_uri_fields: Vec<String> = fields.iter()
7878+ let at_uri_fields: Vec<String> = fields
7979+ .iter()
7780 .filter(|f| f.format.as_deref() == Some("at-uri"))
7881 .map(|f| f.name.clone())
7982 .collect();
80838184 if !at_uri_fields.is_empty() {
8282- tracing::debug!(
8383- "Collection {} has at-uri fields: {:?}",
8484- nsid,
8585- at_uri_fields
8686- );
8585+ tracing::debug!("Collection {} has at-uri fields: {:?}", nsid, at_uri_fields);
8786 }
88878988 all_collections.push(CollectionMeta {
···115114 if !fields.is_empty() {
116115 // Create a GraphQL type for this collection
117116 let type_name = nsid_to_type_name(nsid);
118118- let record_type = create_record_type(&type_name, &fields, database.clone(), slice_uri.clone(), &all_collections);
117117+ let record_type = create_record_type(
118118+ &type_name,
119119+ &fields,
120120+ database.clone(),
121121+ slice_uri.clone(),
122122+ &all_collections,
123123+ );
119124120125 // Create edge and connection types for this collection (Relay standard)
121126 let edge_type = create_edge_type(&type_name);
···140145141146 for (field_name, filter_type) in system_fields {
142147 if !lexicon_field_names.contains(field_name) {
143143- where_input = where_input.field(InputValue::new(field_name, TypeRef::named(filter_type)));
148148+ where_input =
149149+ where_input.field(InputValue::new(field_name, TypeRef::named(filter_type)));
144150 }
145151 }
146152···150156 GraphQLType::Int => "IntFilter",
151157 _ => "StringFilter", // Default to StringFilter for strings and other types
152158 };
153153- where_input = where_input.field(InputValue::new(&field.name, TypeRef::named(filter_type)));
159159+ where_input =
160160+ where_input.field(InputValue::new(&field.name, TypeRef::named(filter_type)));
154161 }
155162156163 // Add nested and/or support
157164 where_input = where_input
158158- .field(InputValue::new("and", TypeRef::named_list(format!("{}WhereInput", type_name))))
159159- .field(InputValue::new("or", TypeRef::named_list(format!("{}WhereInput", type_name))));
165165+ .field(InputValue::new(
166166+ "and",
167167+ TypeRef::named_list(format!("{}WhereInput", type_name)),
168168+ ))
169169+ .field(InputValue::new(
170170+ "or",
171171+ TypeRef::named_list(format!("{}WhereInput", type_name)),
172172+ ));
160173161174 // Create GroupByField enum for this collection
162175 let mut group_by_enum = Enum::new(format!("{}GroupByField", type_name));
···168181169182 // Create collection-specific GroupByFieldInput
170183 let group_by_input = InputObject::new(format!("{}GroupByFieldInput", type_name))
171171- .field(InputValue::new("field", TypeRef::named_nn(format!("{}GroupByField", type_name))))
184184+ .field(InputValue::new(
185185+ "field",
186186+ TypeRef::named_nn(format!("{}GroupByField", type_name)),
187187+ ))
172188 .field(InputValue::new("interval", TypeRef::named("DateInterval")));
173189174190 // Create collection-specific SortFieldInput
175191 let sort_field_input = InputObject::new(format!("{}SortFieldInput", type_name))
176176- .field(InputValue::new("field", TypeRef::named_nn(format!("{}GroupByField", type_name))))
177177- .field(InputValue::new("direction", TypeRef::named("SortDirection")));
192192+ .field(InputValue::new(
193193+ "field",
194194+ TypeRef::named_nn(format!("{}GroupByField", type_name)),
195195+ ))
196196+ .field(InputValue::new(
197197+ "direction",
198198+ TypeRef::named("SortDirection"),
199199+ ));
178200179201 // Collect the types to register with schema later
180202 objects_to_register.push(record_type);
···214236 };
215237216238 // Parse sortBy argument
217217- let sort_by: Option<Vec<crate::models::SortField>> = match ctx.args.get("sortBy") {
239239+ let sort_by: Option<Vec<crate::models::SortField>> = match ctx
240240+ .args
241241+ .get("sortBy")
242242+ {
218243 Some(val) => {
219244 if let Ok(list) = val.list() {
220245 let mut sort_fields = Vec::new();
221246 for item in list.iter() {
222247 if let Ok(obj) = item.object() {
223223- let field = obj.get("field")
224224- .and_then(|v| v.enum_name().ok().map(|s| s.to_string()))
248248+ let field = obj
249249+ .get("field")
250250+ .and_then(|v| {
251251+ v.enum_name().ok().map(|s| s.to_string())
252252+ })
225253 .unwrap_or_else(|| "indexedAt".to_string());
226226- let direction = obj.get("direction")
227227- .and_then(|v| v.enum_name().ok().map(|s| s.to_string()))
254254+ let direction = obj
255255+ .get("direction")
256256+ .and_then(|v| {
257257+ v.enum_name().ok().map(|s| s.to_string())
258258+ })
228259 .unwrap_or_else(|| "desc".to_string());
229229- sort_fields.push(crate::models::SortField { field, direction });
260260+ sort_fields.push(crate::models::SortField {
261261+ field,
262262+ direction,
263263+ });
230264 }
231265 }
232266 Some(sort_fields)
233267 } else {
234268 None
235269 }
236236- },
270270+ }
237271 None => None,
238272 };
239273···249283 where_clause.conditions.insert(
250284 "collection".to_string(),
251285 crate::models::WhereCondition {
252252- gt: None,
253253- gte: None,
254254- lt: None,
255255- lte: None,
286286+ gt: None,
287287+ gte: None,
288288+ lt: None,
289289+ lte: None,
256290 eq: Some(serde_json::Value::String(collection.clone())),
257291 in_values: None,
258292 contains: None,
293293+ fuzzy: None,
259294 },
260295 );
261296···272307 }
273308274309 // Resolve actorHandle to did if present
275275- if let Some(actor_handle_condition) = where_clause.conditions.remove("actorHandle") {
310310+ if let Some(actor_handle_condition) =
311311+ where_clause.conditions.remove("actorHandle")
312312+ {
276313 // Collect handles to resolve
277314 let mut handles = Vec::new();
278315 if let Some(eq_value) = &actor_handle_condition.eq {
···296333 // Replace actorHandle condition with did condition
297334 let did_condition = if dids.len() == 1 {
298335 crate::models::WhereCondition {
299299- gt: None,
300300- gte: None,
301301- lt: None,
302302- lte: None,
303303- eq: Some(serde_json::Value::String(dids[0].clone())),
336336+ gt: None,
337337+ gte: None,
338338+ lt: None,
339339+ lte: None,
340340+ eq: Some(serde_json::Value::String(
341341+ dids[0].clone(),
342342+ )),
304343 in_values: None,
305344 contains: None,
345345+ fuzzy: None,
306346 }
307347 } else {
308348 crate::models::WhereCondition {
309309- gt: None,
310310- gte: None,
311311- lt: None,
312312- lte: None,
349349+ gt: None,
350350+ gte: None,
351351+ lt: None,
352352+ lte: None,
313353 eq: None,
314314- in_values: Some(dids.into_iter().map(|d| serde_json::Value::String(d)).collect()),
354354+ in_values: Some(
355355+ dids.into_iter()
356356+ .map(|d| {
357357+ serde_json::Value::String(d)
358358+ })
359359+ .collect(),
360360+ ),
315361 contains: None,
362362+ fuzzy: None,
316363 }
317364 };
318318- where_clause.conditions.insert("did".to_string(), did_condition);
365365+ where_clause
366366+ .conditions
367367+ .insert("did".to_string(), did_condition);
319368 }
320369 // If no DIDs found, the query will return 0 results naturally
321370 }
···336385 Some(&where_clause),
337386 )
338387 .await
339339- .map_err(|e| {
340340- Error::new(format!("Database query failed: {}", e))
341341- })?;
388388+ .map_err(|e| Error::new(format!("Database query failed: {}", e)))?;
342389343390 // Query database for total count
344391 let total_count = db
345392 .count_slice_collections_records(&slice, Some(&where_clause))
346393 .await
347347- .map_err(|e| {
348348- Error::new(format!("Count query failed: {}", e))
349349- })? as i32;
394394+ .map_err(|e| Error::new(format!("Count query failed: {}", e)))?
395395+ as i32;
350396351397 // Convert records to RecordContainers
352398 let record_containers: Vec<RecordContainer> = records
···521567 eq: Some(serde_json::Value::String(collection.clone())),
522568 in_values: None,
523569 contains: None,
570570+ fuzzy: None,
524571 },
525572 );
526573···565612 eq: Some(serde_json::Value::String(dids[0].clone())),
566613 in_values: None,
567614 contains: None,
615615+ fuzzy: None,
568616 }
569617 } else {
570618 crate::models::WhereCondition {
···575623 eq: None,
576624 in_values: Some(dids.into_iter().map(|d| serde_json::Value::String(d)).collect()),
577625 contains: None,
626626+ fuzzy: None,
578627 }
579628 };
580629 where_clause.conditions.insert("did".to_string(), did_condition);
···639688 let subscription = create_subscription_type(slice_uri.clone(), &lexicons);
640689641690 // Build and return the schema with complexity limits
642642- let mut schema_builder = Schema::build(query.type_name(), Some(mutation.type_name()), Some(subscription.type_name()))
643643- .register(query)
644644- .register(mutation)
645645- .register(subscription)
646646- .limit_depth(50) // Higher limit to support GraphiQL introspection with reverse joins
647647- .limit_complexity(5000); // Prevent expensive deeply nested queries
691691+ let mut schema_builder = Schema::build(
692692+ query.type_name(),
693693+ Some(mutation.type_name()),
694694+ Some(subscription.type_name()),
695695+ )
696696+ .register(query)
697697+ .register(mutation)
698698+ .register(subscription)
699699+ .limit_depth(50) // Higher limit to support GraphiQL introspection with reverse joins
700700+ .limit_complexity(5000); // Prevent expensive deeply nested queries
648701649702 // Register JSON scalar type for complex fields
650703 let json_scalar = Scalar::new("JSON");
···655708 .field(InputValue::new("eq", TypeRef::named(TypeRef::STRING)))
656709 .field(InputValue::new("in", TypeRef::named_list(TypeRef::STRING)))
657710 .field(InputValue::new("contains", TypeRef::named(TypeRef::STRING)))
711711+ .field(InputValue::new("fuzzy", TypeRef::named(TypeRef::STRING)))
658712 .field(InputValue::new("gt", TypeRef::named(TypeRef::STRING)))
659713 .field(InputValue::new("gte", TypeRef::named(TypeRef::STRING)))
660714 .field(InputValue::new("lt", TypeRef::named(TypeRef::STRING)))
···746800/// Container to hold blob data and DID for URL generation
747801#[derive(Clone)]
748802struct BlobContainer {
749749- blob_ref: String, // CID reference
750750- mime_type: String, // MIME type
751751- size: i64, // Size in bytes
752752- did: String, // DID for CDN URL generation
803803+ blob_ref: String, // CID reference
804804+ mime_type: String, // MIME type
805805+ size: i64, // Size in bytes
806806+ did: String, // DID for CDN URL generation
753807}
754808755809/// Creates a GraphQL Object type for a record collection
···768822769823 // Add standard AT Protocol fields only if they don't conflict with lexicon fields
770824 if !lexicon_field_names.contains("uri") {
771771- object = object.field(Field::new("uri", TypeRef::named_nn(TypeRef::STRING), |ctx| {
772772- FieldFuture::new(async move {
773773- let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
774774- Ok(Some(GraphQLValue::from(container.record.uri.clone())))
775775- })
776776- }));
825825+ object = object.field(Field::new(
826826+ "uri",
827827+ TypeRef::named_nn(TypeRef::STRING),
828828+ |ctx| {
829829+ FieldFuture::new(async move {
830830+ let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
831831+ Ok(Some(GraphQLValue::from(container.record.uri.clone())))
832832+ })
833833+ },
834834+ ));
777835 }
778836779837 if !lexicon_field_names.contains("cid") {
780780- object = object.field(Field::new("cid", TypeRef::named_nn(TypeRef::STRING), |ctx| {
781781- FieldFuture::new(async move {
782782- let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
783783- Ok(Some(GraphQLValue::from(container.record.cid.clone())))
784784- })
785785- }));
838838+ object = object.field(Field::new(
839839+ "cid",
840840+ TypeRef::named_nn(TypeRef::STRING),
841841+ |ctx| {
842842+ FieldFuture::new(async move {
843843+ let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
844844+ Ok(Some(GraphQLValue::from(container.record.cid.clone())))
845845+ })
846846+ },
847847+ ));
786848 }
787849788850 if !lexicon_field_names.contains("did") {
789789- object = object.field(Field::new("did", TypeRef::named_nn(TypeRef::STRING), |ctx| {
790790- FieldFuture::new(async move {
791791- let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
792792- Ok(Some(GraphQLValue::from(container.record.did.clone())))
793793- })
794794- }));
851851+ object = object.field(Field::new(
852852+ "did",
853853+ TypeRef::named_nn(TypeRef::STRING),
854854+ |ctx| {
855855+ FieldFuture::new(async move {
856856+ let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
857857+ Ok(Some(GraphQLValue::from(container.record.did.clone())))
858858+ })
859859+ },
860860+ ));
795861 }
796862797863 if !lexicon_field_names.contains("indexedAt") {
···817883 "actorHandle",
818884 TypeRef::named(TypeRef::STRING),
819885 move |ctx| {
820820- let db = db_for_actor.clone();
821821- let slice = slice_for_actor.clone();
822822- FieldFuture::new(async move {
823823- let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
824824- let did = &container.record.did;
886886+ let db = db_for_actor.clone();
887887+ let slice = slice_for_actor.clone();
888888+ FieldFuture::new(async move {
889889+ let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
890890+ let did = &container.record.did;
825891826826- // Build where clause to find actor by DID
827827- let mut where_clause = crate::models::WhereClause {
828828- conditions: std::collections::HashMap::new(),
829829- or_conditions: None,
830830- and: None,
831831- or: None,
832832- };
833833- where_clause.conditions.insert(
834834- "did".to_string(),
835835- crate::models::WhereCondition {
836836- gt: None,
837837- gte: None,
838838- lt: None,
839839- lte: None,
840840- eq: Some(serde_json::Value::String(did.clone())),
841841- in_values: None,
842842- contains: None,
843843- },
844844- );
892892+ // Build where clause to find actor by DID
893893+ let mut where_clause = crate::models::WhereClause {
894894+ conditions: std::collections::HashMap::new(),
895895+ or_conditions: None,
896896+ and: None,
897897+ or: None,
898898+ };
899899+ where_clause.conditions.insert(
900900+ "did".to_string(),
901901+ crate::models::WhereCondition {
902902+ gt: None,
903903+ gte: None,
904904+ lt: None,
905905+ lte: None,
906906+ eq: Some(serde_json::Value::String(did.clone())),
907907+ in_values: None,
908908+ contains: None,
909909+ fuzzy: None,
910910+ },
911911+ );
845912846846- match db.get_slice_actors(&slice, Some(1), None, Some(&where_clause)).await {
847847- Ok((actors, _cursor)) => {
848848- if let Some(actor) = actors.first() {
849849- if let Some(handle) = &actor.handle {
850850- Ok(Some(GraphQLValue::from(handle.clone())))
851851- } else {
852852- Ok(None)
853853- }
913913+ match db
914914+ .get_slice_actors(&slice, Some(1), None, Some(&where_clause))
915915+ .await
916916+ {
917917+ Ok((actors, _cursor)) => {
918918+ if let Some(actor) = actors.first() {
919919+ if let Some(handle) = &actor.handle {
920920+ Ok(Some(GraphQLValue::from(handle.clone())))
854921 } else {
855922 Ok(None)
856923 }
857857- }
858858- Err(e) => {
859859- tracing::debug!("Actor not found for {}: {}", did, e);
924924+ } else {
860925 Ok(None)
861926 }
862927 }
863863- })
864864- },
865865- ));
928928+ Err(e) => {
929929+ tracing::debug!("Actor not found for {}: {}", did, e);
930930+ Ok(None)
931931+ }
932932+ }
933933+ })
934934+ },
935935+ ));
866936867937 // Add fields from lexicon
868938 for field in fields {
···910980 .unwrap_or("image/jpeg")
911981 .to_string();
912982913913- let size = obj
914914- .get("size")
915915- .and_then(|s| s.as_i64())
916916- .unwrap_or(0);
983983+ let size =
984984+ obj.get("size").and_then(|s| s.as_i64()).unwrap_or(0);
917985918986 let blob_container = BlobContainer {
919987 blob_ref,
···9521020 .unwrap_or("image/jpeg")
9531021 .to_string();
9541022955955- let size = obj
956956- .get("size")
957957- .and_then(|s| s.as_i64())
958958- .unwrap_or(0);
10231023+ let size = obj.get("size").and_then(|s| s.as_i64()).unwrap_or(0);
95910249601025 let blob_container = BlobContainer {
9611026 blob_ref,
···9801045 match db.get_record(&uri).await {
9811046 Ok(Some(linked_record)) => {
9821047 // Convert the linked record to a JSON value
983983- let record_json = serde_json::to_value(linked_record)
984984- .map_err(|e| {
10481048+ let record_json =
10491049+ serde_json::to_value(linked_record).map_err(|e| {
9851050 Error::new(format!("Serialization error: {}", e))
9861051 })?;
9871052···1021108610221087 // Collect all string fields with format "at-uri" that might reference this collection
10231088 // We'll check each one at runtime to see if it contains a URI to this collection
10241024- let uri_ref_fields: Vec<_> = fields.iter()
10891089+ let uri_ref_fields: Vec<_> = fields
10901090+ .iter()
10251091 .filter(|f| matches!(f.format.as_deref(), Some("at-uri")))
10261092 .collect();
10271093···1031109710321098 // If we found at-uri fields, create a resolver that checks each one at runtime
10331099 if !uri_ref_fields.is_empty() {
10341034- let ref_field_names: Vec<String> = uri_ref_fields.iter().map(|f| f.name.clone()).collect();
11001100+ let ref_field_names: Vec<String> =
11011101+ uri_ref_fields.iter().map(|f| f.name.clone()).collect();
10351102 let db_for_uri_join = database.clone();
10361103 let target_collection = collection_nsid.clone();
10371104···10551122 match db.get_record(uri).await {
10561123 Ok(Some(record)) => {
10571124 let new_container = RecordContainer { record };
10581058- return Ok(Some(FieldValue::owned_any(new_container)));
11251125+ return Ok(Some(FieldValue::owned_any(
11261126+ new_container,
11271127+ )));
10591128 }
10601129 Ok(None) => continue, // Try next field
10611061- Err(_) => continue, // Try next field
11301130+ Err(_) => continue, // Try next field
10621131 }
10631132 }
10641133 }
···10831152 let db = db_for_join.clone();
10841153 let nsid = collection_nsid.clone();
10851154 FieldFuture::new(async move {
10861086- let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
11551155+ let container =
11561156+ ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
10871157 let uri = format!("at://{}/{}/self", container.record.did, nsid);
1088115810891159 match db.get_record(&uri).await {
10901160 Ok(Some(record)) => {
10911091- let new_container = RecordContainer {
10921092- record,
10931093- };
11611161+ let new_container = RecordContainer { record };
10941162 Ok(Some(FieldValue::owned_any(new_container)))
10951163 }
10961164 Ok(None) => Ok(None),
···11791247 eq: Some(serde_json::Value::String(nsid.clone())),
11801248 in_values: None,
11811249 contains: None,
12501250+ fuzzy: None,
11821251 },
11831252 );
11841253 where_clause.conditions.insert(
···11911260 eq: Some(serde_json::Value::String(did.clone())),
11921261 in_values: None,
11931262 contains: None,
12631263+ fuzzy: None,
11941264 },
11951265 );
11961266···13541424 let collection_for_count = collection.nsid.clone();
13551425 let at_uri_fields_for_count = collection.at_uri_fields.clone();
1356142613571357- object = object.field(
13581358- Field::new(
13591359- &count_field_name,
13601360- TypeRef::named_nn(TypeRef::INT),
13611361- move |ctx| {
13621362- let slice = slice_for_count.clone();
13631363- let nsid = collection_for_count.clone();
13641364- let db = db_for_count.clone();
13651365- let ref_fields = at_uri_fields_for_count.clone();
13661366- FieldFuture::new(async move {
13671367- let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
13681368- let parent_uri = &container.record.uri;
14271427+ object = object.field(Field::new(
14281428+ &count_field_name,
14291429+ TypeRef::named_nn(TypeRef::INT),
14301430+ move |ctx| {
14311431+ let slice = slice_for_count.clone();
14321432+ let nsid = collection_for_count.clone();
14331433+ let db = db_for_count.clone();
14341434+ let ref_fields = at_uri_fields_for_count.clone();
14351435+ FieldFuture::new(async move {
14361436+ let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
14371437+ let parent_uri = &container.record.uri;
1369143813701370- // Build where clause to count records referencing this URI
13711371- for ref_field in &ref_fields {
13721372- let mut where_clause = crate::models::WhereClause {
13731373- conditions: HashMap::new(),
13741374- or_conditions: None,
13751375- and: None,
13761376- or: None,
13771377- };
14391439+ // Build where clause to count records referencing this URI
14401440+ for ref_field in &ref_fields {
14411441+ let mut where_clause = crate::models::WhereClause {
14421442+ conditions: HashMap::new(),
14431443+ or_conditions: None,
14441444+ and: None,
14451445+ or: None,
14461446+ };
1378144713791379- where_clause.conditions.insert(
13801380- "collection".to_string(),
13811381- crate::models::WhereCondition {
13821382- gt: None,
13831383- gte: None,
13841384- lt: None,
13851385- lte: None,
13861386- eq: Some(serde_json::Value::String(nsid.clone())),
13871387- in_values: None,
13881388- contains: None,
13891389- },
13901390- );
14481448+ where_clause.conditions.insert(
14491449+ "collection".to_string(),
14501450+ crate::models::WhereCondition {
14511451+ gt: None,
14521452+ gte: None,
14531453+ lt: None,
14541454+ lte: None,
14551455+ eq: Some(serde_json::Value::String(nsid.clone())),
14561456+ in_values: None,
14571457+ contains: None,
14581458+ fuzzy: None,
14591459+ },
14601460+ );
1391146113921392- where_clause.conditions.insert(
13931393- ref_field.clone(),
13941394- crate::models::WhereCondition {
13951395- gt: None,
13961396- gte: None,
13971397- lt: None,
13981398- lte: None,
13991399- eq: Some(serde_json::Value::String(parent_uri.clone())),
14001400- in_values: None,
14011401- contains: None,
14021402- },
14031403- );
14621462+ where_clause.conditions.insert(
14631463+ ref_field.clone(),
14641464+ crate::models::WhereCondition {
14651465+ gt: None,
14661466+ gte: None,
14671467+ lt: None,
14681468+ lte: None,
14691469+ eq: Some(serde_json::Value::String(parent_uri.clone())),
14701470+ in_values: None,
14711471+ contains: None,
14721472+ fuzzy: None,
14731473+ },
14741474+ );
1404147514051405- match db.count_slice_collections_records(&slice, Some(&where_clause)).await {
14061406- Ok(count) if count > 0 => {
14071407- return Ok(Some(FieldValue::value(count as i32)));
14081408- }
14091409- Ok(_) => continue,
14101410- Err(e) => {
14111411- tracing::debug!("Count error for {}: {}", nsid, e);
14121412- continue;
14131413- }
14761476+ match db
14771477+ .count_slice_collections_records(&slice, Some(&where_clause))
14781478+ .await
14791479+ {
14801480+ Ok(count) if count > 0 => {
14811481+ return Ok(Some(FieldValue::value(count as i32)));
14821482+ }
14831483+ Ok(_) => continue,
14841484+ Err(e) => {
14851485+ tracing::debug!("Count error for {}: {}", nsid, e);
14861486+ continue;
14141487 }
14151488 }
14891489+ }
1416149014171417- // No matching field found, return 0
14181418- Ok(Some(FieldValue::value(0)))
14191419- })
14201420- },
14211421- )
14221422- );
14911491+ // No matching field found, return 0
14921492+ Ok(Some(FieldValue::value(0)))
14931493+ })
14941494+ },
14951495+ ));
14231496 }
1424149714251498 object
···15041577 // For arrays of primitives, use typed arrays
15051578 // For arrays of complex types, use JSON scalar
15061579 match inner.as_ref() {
15071507- GraphQLType::String | GraphQLType::Int | GraphQLType::Boolean | GraphQLType::Float => {
15801580+ GraphQLType::String
15811581+ | GraphQLType::Int
15821582+ | GraphQLType::Boolean
15831583+ | GraphQLType::Float => {
15081584 let inner_ref = match inner.as_ref() {
15091585 GraphQLType::String => TypeRef::STRING,
15101586 GraphQLType::Int => TypeRef::INT,
···15451621 let mut blob = Object::new("Blob");
1546162215471623 // ref field - CID reference
15481548- blob = blob.field(Field::new("ref", TypeRef::named_nn(TypeRef::STRING), |ctx| {
15491549- FieldFuture::new(async move {
15501550- let container = ctx.parent_value.try_downcast_ref::<BlobContainer>()?;
15511551- Ok(Some(GraphQLValue::from(container.blob_ref.clone())))
15521552- })
15531553- }));
16241624+ blob = blob.field(Field::new(
16251625+ "ref",
16261626+ TypeRef::named_nn(TypeRef::STRING),
16271627+ |ctx| {
16281628+ FieldFuture::new(async move {
16291629+ let container = ctx.parent_value.try_downcast_ref::<BlobContainer>()?;
16301630+ Ok(Some(GraphQLValue::from(container.blob_ref.clone())))
16311631+ })
16321632+ },
16331633+ ));
1554163415551635 // mimeType field
15561556- blob = blob.field(Field::new("mimeType", TypeRef::named_nn(TypeRef::STRING), |ctx| {
15571557- FieldFuture::new(async move {
15581558- let container = ctx.parent_value.try_downcast_ref::<BlobContainer>()?;
15591559- Ok(Some(GraphQLValue::from(container.mime_type.clone())))
15601560- })
15611561- }));
16361636+ blob = blob.field(Field::new(
16371637+ "mimeType",
16381638+ TypeRef::named_nn(TypeRef::STRING),
16391639+ |ctx| {
16401640+ FieldFuture::new(async move {
16411641+ let container = ctx.parent_value.try_downcast_ref::<BlobContainer>()?;
16421642+ Ok(Some(GraphQLValue::from(container.mime_type.clone())))
16431643+ })
16441644+ },
16451645+ ));
1562164615631647 // size field
15641648 blob = blob.field(Field::new("size", TypeRef::named_nn(TypeRef::INT), |ctx| {
···16071691fn create_sync_result_type() -> Object {
16081692 let mut sync_result = Object::new("SyncResult");
1609169316101610- sync_result = sync_result.field(Field::new("success", TypeRef::named_nn(TypeRef::BOOLEAN), |ctx| {
16111611- FieldFuture::new(async move {
16121612- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
16131613- .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
16141614- if let GraphQLValue::Object(obj) = value {
16151615- if let Some(success) = obj.get("success") {
16161616- return Ok(Some(success.clone()));
16941694+ sync_result = sync_result.field(Field::new(
16951695+ "success",
16961696+ TypeRef::named_nn(TypeRef::BOOLEAN),
16971697+ |ctx| {
16981698+ FieldFuture::new(async move {
16991699+ let value = ctx
17001700+ .parent_value
17011701+ .downcast_ref::<GraphQLValue>()
17021702+ .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
17031703+ if let GraphQLValue::Object(obj) = value {
17041704+ if let Some(success) = obj.get("success") {
17051705+ return Ok(Some(success.clone()));
17061706+ }
16171707 }
16181618- }
16191619- Ok(None)
16201620- })
16211621- }));
17081708+ Ok(None)
17091709+ })
17101710+ },
17111711+ ));
1622171216231623- sync_result = sync_result.field(Field::new("reposProcessed", TypeRef::named_nn(TypeRef::INT), |ctx| {
16241624- FieldFuture::new(async move {
16251625- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
16261626- .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
16271627- if let GraphQLValue::Object(obj) = value {
16281628- if let Some(repos) = obj.get("reposProcessed") {
16291629- return Ok(Some(repos.clone()));
17131713+ sync_result = sync_result.field(Field::new(
17141714+ "reposProcessed",
17151715+ TypeRef::named_nn(TypeRef::INT),
17161716+ |ctx| {
17171717+ FieldFuture::new(async move {
17181718+ let value = ctx
17191719+ .parent_value
17201720+ .downcast_ref::<GraphQLValue>()
17211721+ .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
17221722+ if let GraphQLValue::Object(obj) = value {
17231723+ if let Some(repos) = obj.get("reposProcessed") {
17241724+ return Ok(Some(repos.clone()));
17251725+ }
16301726 }
16311631- }
16321632- Ok(None)
16331633- })
16341634- }));
17271727+ Ok(None)
17281728+ })
17291729+ },
17301730+ ));
1635173116361636- sync_result = sync_result.field(Field::new("recordsSynced", TypeRef::named_nn(TypeRef::INT), |ctx| {
16371637- FieldFuture::new(async move {
16381638- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
16391639- .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
16401640- if let GraphQLValue::Object(obj) = value {
16411641- if let Some(records) = obj.get("recordsSynced") {
16421642- return Ok(Some(records.clone()));
17321732+ sync_result = sync_result.field(Field::new(
17331733+ "recordsSynced",
17341734+ TypeRef::named_nn(TypeRef::INT),
17351735+ |ctx| {
17361736+ FieldFuture::new(async move {
17371737+ let value = ctx
17381738+ .parent_value
17391739+ .downcast_ref::<GraphQLValue>()
17401740+ .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
17411741+ if let GraphQLValue::Object(obj) = value {
17421742+ if let Some(records) = obj.get("recordsSynced") {
17431743+ return Ok(Some(records.clone()));
17441744+ }
16431745 }
16441644- }
16451645- Ok(None)
16461646- })
16471647- }));
17461746+ Ok(None)
17471747+ })
17481748+ },
17491749+ ));
1648175016491649- sync_result = sync_result.field(Field::new("timedOut", TypeRef::named_nn(TypeRef::BOOLEAN), |ctx| {
16501650- FieldFuture::new(async move {
16511651- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
16521652- .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
16531653- if let GraphQLValue::Object(obj) = value {
16541654- if let Some(timed_out) = obj.get("timedOut") {
16551655- return Ok(Some(timed_out.clone()));
17511751+ sync_result = sync_result.field(Field::new(
17521752+ "timedOut",
17531753+ TypeRef::named_nn(TypeRef::BOOLEAN),
17541754+ |ctx| {
17551755+ FieldFuture::new(async move {
17561756+ let value = ctx
17571757+ .parent_value
17581758+ .downcast_ref::<GraphQLValue>()
17591759+ .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
17601760+ if let GraphQLValue::Object(obj) = value {
17611761+ if let Some(timed_out) = obj.get("timedOut") {
17621762+ return Ok(Some(timed_out.clone()));
17631763+ }
16561764 }
16571657- }
16581658- Ok(None)
16591659- })
16601660- }));
17651765+ Ok(None)
17661766+ })
17671767+ },
17681768+ ));
1661176916621662- sync_result = sync_result.field(Field::new("message", TypeRef::named_nn(TypeRef::STRING), |ctx| {
16631663- FieldFuture::new(async move {
16641664- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
16651665- .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
16661666- if let GraphQLValue::Object(obj) = value {
16671667- if let Some(message) = obj.get("message") {
16681668- return Ok(Some(message.clone()));
17701770+ sync_result = sync_result.field(Field::new(
17711771+ "message",
17721772+ TypeRef::named_nn(TypeRef::STRING),
17731773+ |ctx| {
17741774+ FieldFuture::new(async move {
17751775+ let value = ctx
17761776+ .parent_value
17771777+ .downcast_ref::<GraphQLValue>()
17781778+ .ok_or_else(|| Error::new("Failed to downcast sync result"))?;
17791779+ if let GraphQLValue::Object(obj) = value {
17801780+ if let Some(message) = obj.get("message") {
17811781+ return Ok(Some(message.clone()));
17821782+ }
16691783 }
16701670- }
16711671- Ok(None)
16721672- })
16731673- }));
17841784+ Ok(None)
17851785+ })
17861786+ },
17871787+ ));
1674178816751789 sync_result
16761790}
···16981812 .field(InputValue::new("eq", TypeRef::named(TypeRef::STRING)))
16991813 .field(InputValue::new("in", TypeRef::named_list(TypeRef::STRING)))
17001814 .field(InputValue::new("contains", TypeRef::named(TypeRef::STRING)))
18151815+ .field(InputValue::new("fuzzy", TypeRef::named(TypeRef::STRING)))
17011816}
1702181717031818/// Creates the IntCondition input type for int field filtering
···17111826fn create_page_info_type() -> Object {
17121827 let mut page_info = Object::new("PageInfo");
1713182817141714- page_info = page_info.field(Field::new("hasNextPage", TypeRef::named_nn(TypeRef::BOOLEAN), |ctx| {
17151715- FieldFuture::new(async move {
17161716- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
17171717- .ok_or_else(|| Error::new("Failed to downcast PageInfo"))?;
17181718- if let GraphQLValue::Object(obj) = value {
17191719- if let Some(has_next) = obj.get("hasNextPage") {
17201720- return Ok(Some(has_next.clone()));
18291829+ page_info = page_info.field(Field::new(
18301830+ "hasNextPage",
18311831+ TypeRef::named_nn(TypeRef::BOOLEAN),
18321832+ |ctx| {
18331833+ FieldFuture::new(async move {
18341834+ let value = ctx
18351835+ .parent_value
18361836+ .downcast_ref::<GraphQLValue>()
18371837+ .ok_or_else(|| Error::new("Failed to downcast PageInfo"))?;
18381838+ if let GraphQLValue::Object(obj) = value {
18391839+ if let Some(has_next) = obj.get("hasNextPage") {
18401840+ return Ok(Some(has_next.clone()));
18411841+ }
17211842 }
17221722- }
17231723- Ok(Some(GraphQLValue::from(false)))
17241724- })
17251725- }));
18431843+ Ok(Some(GraphQLValue::from(false)))
18441844+ })
18451845+ },
18461846+ ));
1726184717271727- page_info = page_info.field(Field::new("hasPreviousPage", TypeRef::named_nn(TypeRef::BOOLEAN), |ctx| {
17281728- FieldFuture::new(async move {
17291729- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
17301730- .ok_or_else(|| Error::new("Failed to downcast PageInfo"))?;
17311731- if let GraphQLValue::Object(obj) = value {
17321732- if let Some(has_prev) = obj.get("hasPreviousPage") {
17331733- return Ok(Some(has_prev.clone()));
18481848+ page_info = page_info.field(Field::new(
18491849+ "hasPreviousPage",
18501850+ TypeRef::named_nn(TypeRef::BOOLEAN),
18511851+ |ctx| {
18521852+ FieldFuture::new(async move {
18531853+ let value = ctx
18541854+ .parent_value
18551855+ .downcast_ref::<GraphQLValue>()
18561856+ .ok_or_else(|| Error::new("Failed to downcast PageInfo"))?;
18571857+ if let GraphQLValue::Object(obj) = value {
18581858+ if let Some(has_prev) = obj.get("hasPreviousPage") {
18591859+ return Ok(Some(has_prev.clone()));
18601860+ }
17341861 }
17351735- }
17361736- Ok(Some(GraphQLValue::from(false)))
17371737- })
17381738- }));
18621862+ Ok(Some(GraphQLValue::from(false)))
18631863+ })
18641864+ },
18651865+ ));
1739186617401740- page_info = page_info.field(Field::new("startCursor", TypeRef::named(TypeRef::STRING), |ctx| {
17411741- FieldFuture::new(async move {
17421742- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
17431743- .ok_or_else(|| Error::new("Failed to downcast PageInfo"))?;
17441744- if let GraphQLValue::Object(obj) = value {
17451745- if let Some(cursor) = obj.get("startCursor") {
17461746- return Ok(Some(cursor.clone()));
18671867+ page_info = page_info.field(Field::new(
18681868+ "startCursor",
18691869+ TypeRef::named(TypeRef::STRING),
18701870+ |ctx| {
18711871+ FieldFuture::new(async move {
18721872+ let value = ctx
18731873+ .parent_value
18741874+ .downcast_ref::<GraphQLValue>()
18751875+ .ok_or_else(|| Error::new("Failed to downcast PageInfo"))?;
18761876+ if let GraphQLValue::Object(obj) = value {
18771877+ if let Some(cursor) = obj.get("startCursor") {
18781878+ return Ok(Some(cursor.clone()));
18791879+ }
17471880 }
17481748- }
17491749- Ok(None)
17501750- })
17511751- }));
18811881+ Ok(None)
18821882+ })
18831883+ },
18841884+ ));
1752188517531753- page_info = page_info.field(Field::new("endCursor", TypeRef::named(TypeRef::STRING), |ctx| {
17541754- FieldFuture::new(async move {
17551755- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
17561756- .ok_or_else(|| Error::new("Failed to downcast PageInfo"))?;
17571757- if let GraphQLValue::Object(obj) = value {
17581758- if let Some(cursor) = obj.get("endCursor") {
17591759- return Ok(Some(cursor.clone()));
18861886+ page_info = page_info.field(Field::new(
18871887+ "endCursor",
18881888+ TypeRef::named(TypeRef::STRING),
18891889+ |ctx| {
18901890+ FieldFuture::new(async move {
18911891+ let value = ctx
18921892+ .parent_value
18931893+ .downcast_ref::<GraphQLValue>()
18941894+ .ok_or_else(|| Error::new("Failed to downcast PageInfo"))?;
18951895+ if let GraphQLValue::Object(obj) = value {
18961896+ if let Some(cursor) = obj.get("endCursor") {
18971897+ return Ok(Some(cursor.clone()));
18981898+ }
17601899 }
17611761- }
17621762- Ok(None)
17631763- })
17641764- }));
19001900+ Ok(None)
19011901+ })
19021902+ },
19031903+ ));
1765190417661905 page_info
17671906}
···17981937 }));
1799193818001939 // Add cursor field
18011801- edge = edge.field(Field::new("cursor", TypeRef::named_nn(TypeRef::STRING), |ctx| {
18021802- FieldFuture::new(async move {
18031803- let edge_data = ctx.parent_value.try_downcast_ref::<EdgeData>()?;
18041804- Ok(Some(GraphQLValue::from(edge_data.cursor.clone())))
18051805- })
18061806- }));
19401940+ edge = edge.field(Field::new(
19411941+ "cursor",
19421942+ TypeRef::named_nn(TypeRef::STRING),
19431943+ |ctx| {
19441944+ FieldFuture::new(async move {
19451945+ let edge_data = ctx.parent_value.try_downcast_ref::<EdgeData>()?;
19461946+ Ok(Some(GraphQLValue::from(edge_data.cursor.clone())))
19471947+ })
19481948+ },
19491949+ ));
1807195018081951 edge
18091952}
···18151958 let mut connection = Object::new(&connection_name);
1816195918171960 // Add totalCount field
18181818- connection = connection.field(Field::new("totalCount", TypeRef::named_nn(TypeRef::INT), |ctx| {
18191819- FieldFuture::new(async move {
18201820- let data = ctx.parent_value.try_downcast_ref::<ConnectionData>()?;
18211821- Ok(Some(GraphQLValue::from(data.total_count)))
18221822- })
18231823- }));
19611961+ connection = connection.field(Field::new(
19621962+ "totalCount",
19631963+ TypeRef::named_nn(TypeRef::INT),
19641964+ |ctx| {
19651965+ FieldFuture::new(async move {
19661966+ let data = ctx.parent_value.try_downcast_ref::<ConnectionData>()?;
19671967+ Ok(Some(GraphQLValue::from(data.total_count)))
19681968+ })
19691969+ },
19701970+ ));
1824197118251972 // Add pageInfo field
18261826- connection = connection.field(Field::new("pageInfo", TypeRef::named_nn("PageInfo"), |ctx| {
18271827- FieldFuture::new(async move {
18281828- let data = ctx.parent_value.try_downcast_ref::<ConnectionData>()?;
19731973+ connection = connection.field(Field::new(
19741974+ "pageInfo",
19751975+ TypeRef::named_nn("PageInfo"),
19761976+ |ctx| {
19771977+ FieldFuture::new(async move {
19781978+ let data = ctx.parent_value.try_downcast_ref::<ConnectionData>()?;
1829197918301830- let mut page_info = async_graphql::indexmap::IndexMap::new();
18311831- page_info.insert(
18321832- async_graphql::Name::new("hasNextPage"),
18331833- GraphQLValue::from(data.has_next_page)
18341834- );
18351835- // For forward pagination only, hasPreviousPage is always false
18361836- page_info.insert(
18371837- async_graphql::Name::new("hasPreviousPage"),
18381838- GraphQLValue::from(false)
18391839- );
19801980+ let mut page_info = async_graphql::indexmap::IndexMap::new();
19811981+ page_info.insert(
19821982+ async_graphql::Name::new("hasNextPage"),
19831983+ GraphQLValue::from(data.has_next_page),
19841984+ );
19851985+ // For forward pagination only, hasPreviousPage is always false
19861986+ page_info.insert(
19871987+ async_graphql::Name::new("hasPreviousPage"),
19881988+ GraphQLValue::from(false),
19891989+ );
1840199018411841- // Add startCursor (first node's cid if available)
18421842- if !data.nodes.is_empty() {
18431843- if let Some(first_record) = data.nodes.first() {
18441844- let start_cursor = general_purpose::URL_SAFE_NO_PAD.encode(first_record.record.cid.clone());
19911991+ // Add startCursor (first node's cid if available)
19921992+ if !data.nodes.is_empty() {
19931993+ if let Some(first_record) = data.nodes.first() {
19941994+ let start_cursor = general_purpose::URL_SAFE_NO_PAD
19951995+ .encode(first_record.record.cid.clone());
19961996+ page_info.insert(
19971997+ async_graphql::Name::new("startCursor"),
19981998+ GraphQLValue::from(start_cursor),
19991999+ );
20002000+ }
20012001+ }
20022002+20032003+ // Add endCursor
20042004+ if let Some(ref cursor) = data.end_cursor {
18452005 page_info.insert(
18461846- async_graphql::Name::new("startCursor"),
18471847- GraphQLValue::from(start_cursor)
20062006+ async_graphql::Name::new("endCursor"),
20072007+ GraphQLValue::from(cursor.clone()),
18482008 );
18492009 }
18501850- }
1851201018521852- // Add endCursor
18531853- if let Some(ref cursor) = data.end_cursor {
18541854- page_info.insert(
18551855- async_graphql::Name::new("endCursor"),
18561856- GraphQLValue::from(cursor.clone())
18571857- );
18581858- }
18591859-18601860- Ok(Some(FieldValue::owned_any(GraphQLValue::Object(page_info))))
18611861- })
18621862- }));
20112011+ Ok(Some(FieldValue::owned_any(GraphQLValue::Object(page_info))))
20122012+ })
20132013+ },
20142014+ ));
1863201518642016 // Add edges field (Relay standard)
18652017 let edge_type = format!("{}Edge", record_type_name);
18661866- connection = connection.field(Field::new("edges", TypeRef::named_nn_list_nn(&edge_type), |ctx| {
18671867- FieldFuture::new(async move {
18681868- let data = ctx.parent_value.try_downcast_ref::<ConnectionData>()?;
20182018+ connection = connection.field(Field::new(
20192019+ "edges",
20202020+ TypeRef::named_nn_list_nn(&edge_type),
20212021+ |ctx| {
20222022+ FieldFuture::new(async move {
20232023+ let data = ctx.parent_value.try_downcast_ref::<ConnectionData>()?;
1869202418701870- let field_values: Vec<FieldValue<'_>> = data.nodes.iter()
18711871- .map(|node| {
18721872- // Use base64-encoded CID as cursor
18731873- let cursor = general_purpose::URL_SAFE_NO_PAD.encode(node.record.cid.clone());
18741874- let edge = EdgeData {
18751875- node: node.clone(),
18761876- cursor,
18771877- };
18781878- FieldValue::owned_any(edge)
18791879- })
18801880- .collect();
20252025+ let field_values: Vec<FieldValue<'_>> = data
20262026+ .nodes
20272027+ .iter()
20282028+ .map(|node| {
20292029+ // Use base64-encoded CID as cursor
20302030+ let cursor =
20312031+ general_purpose::URL_SAFE_NO_PAD.encode(node.record.cid.clone());
20322032+ let edge = EdgeData {
20332033+ node: node.clone(),
20342034+ cursor,
20352035+ };
20362036+ FieldValue::owned_any(edge)
20372037+ })
20382038+ .collect();
1881203918821882- Ok(Some(FieldValue::list(field_values)))
18831883- })
18841884- }));
20402040+ Ok(Some(FieldValue::list(field_values)))
20412041+ })
20422042+ },
20432043+ ));
1885204418862045 // Add nodes field (convenience, direct access to records without edges wrapper)
18871887- connection = connection.field(Field::new("nodes", TypeRef::named_nn_list_nn(record_type_name), |ctx| {
18881888- FieldFuture::new(async move {
18891889- let data = ctx.parent_value.try_downcast_ref::<ConnectionData>()?;
20462046+ connection = connection.field(Field::new(
20472047+ "nodes",
20482048+ TypeRef::named_nn_list_nn(record_type_name),
20492049+ |ctx| {
20502050+ FieldFuture::new(async move {
20512051+ let data = ctx.parent_value.try_downcast_ref::<ConnectionData>()?;
1890205218911891- let field_values: Vec<FieldValue<'_>> = data.nodes.iter()
18921892- .map(|node| FieldValue::owned_any(node.clone()))
18931893- .collect();
20532053+ let field_values: Vec<FieldValue<'_>> = data
20542054+ .nodes
20552055+ .iter()
20562056+ .map(|node| FieldValue::owned_any(node.clone()))
20572057+ .collect();
1894205818951895- Ok(Some(FieldValue::list(field_values)))
18961896- })
18971897- }));
20592059+ Ok(Some(FieldValue::list(field_values)))
20602060+ })
20612061+ },
20622062+ ));
1898206318992064 connection
19002065}
···19162081 let slice = slice_clone.clone();
1917208219182083 FieldFuture::new(async move {
19191919- let did = ctx.args.get("did")
20842084+ let did = ctx
20852085+ .args
20862086+ .get("did")
19202087 .and_then(|v| v.string().ok())
19212088 .ok_or_else(|| Error::new("did argument is required"))?;
1922208919232090 // Create sync service and call sync_user_collections
19242091 let cache_backend = crate::cache::CacheFactory::create_cache(
19251925- crate::cache::CacheBackend::InMemory { ttl_seconds: None }
19261926- ).await.map_err(|e| Error::new(format!("Failed to create cache: {}", e)))?;
20922092+ crate::cache::CacheBackend::InMemory { ttl_seconds: None },
20932093+ )
20942094+ .await
20952095+ .map_err(|e| Error::new(format!("Failed to create cache: {}", e)))?;
19272096 let cache = Arc::new(Mutex::new(crate::cache::SliceCache::new(cache_backend)));
19282097 let sync_service = crate::sync::SyncService::with_cache(
19292098 db.clone(),
···1939210819402109 // Convert result to GraphQL object
19412110 let mut obj = async_graphql::indexmap::IndexMap::new();
19421942- obj.insert(async_graphql::Name::new("success"), GraphQLValue::from(result.success));
19431943- obj.insert(async_graphql::Name::new("reposProcessed"), GraphQLValue::from(result.repos_processed));
19441944- obj.insert(async_graphql::Name::new("recordsSynced"), GraphQLValue::from(result.records_synced));
19451945- obj.insert(async_graphql::Name::new("timedOut"), GraphQLValue::from(result.timed_out));
19461946- obj.insert(async_graphql::Name::new("message"), GraphQLValue::from(result.message));
21112111+ obj.insert(
21122112+ async_graphql::Name::new("success"),
21132113+ GraphQLValue::from(result.success),
21142114+ );
21152115+ obj.insert(
21162116+ async_graphql::Name::new("reposProcessed"),
21172117+ GraphQLValue::from(result.repos_processed),
21182118+ );
21192119+ obj.insert(
21202120+ async_graphql::Name::new("recordsSynced"),
21212121+ GraphQLValue::from(result.records_synced),
21222122+ );
21232123+ obj.insert(
21242124+ async_graphql::Name::new("timedOut"),
21252125+ GraphQLValue::from(result.timed_out),
21262126+ );
21272127+ obj.insert(
21282128+ async_graphql::Name::new("message"),
21292129+ GraphQLValue::from(result.message),
21302130+ );
1947213119482132 Ok(Some(FieldValue::owned_any(GraphQLValue::Object(obj))))
19492133 })
···19532137 "did",
19542138 TypeRef::named_nn(TypeRef::STRING),
19552139 ))
19561956- .description("Sync user collections for a given DID")
21402140+ .description("Sync user collections for a given DID"),
19572141 );
1958214219592143 mutation
···19822166 let camel_case = nsid_to_join_field_name(nsid);
1983216719842168 // Then pluralize the end
19851985- if camel_case.ends_with("s") || camel_case.ends_with("x") || camel_case.ends_with("ch") || camel_case.ends_with("sh") {
21692169+ if camel_case.ends_with("s")
21702170+ || camel_case.ends_with("x")
21712171+ || camel_case.ends_with("ch")
21722172+ || camel_case.ends_with("sh")
21732173+ {
19862174 format!("{}es", camel_case) // status -> statuses, box -> boxes
19872175 } else if camel_case.ends_with("y") && camel_case.len() > 1 {
19882176 let chars: Vec<char> = camel_case.chars().collect();
···20272215 for field in fields {
20282216 let field_name = field.name.clone();
20292217 let field_name_clone = field_name.clone();
20302030- aggregated = aggregated.field(Field::new(&field_name, TypeRef::named("JSON"), move |ctx| {
20312031- let field_name = field_name_clone.clone();
22182218+ aggregated = aggregated.field(Field::new(
22192219+ &field_name,
22202220+ TypeRef::named("JSON"),
22212221+ move |ctx| {
22222222+ let field_name = field_name_clone.clone();
22232223+ FieldFuture::new(async move {
22242224+ let json_value = ctx.parent_value.try_downcast_ref::<serde_json::Value>()?;
22252225+ if let Some(obj) = json_value.as_object() {
22262226+ if let Some(value) = obj.get(&field_name) {
22272227+ // Convert serde_json::Value to async_graphql::Value
22282228+ let graphql_value = serde_json_to_graphql_value(value);
22292229+ return Ok(Some(graphql_value));
22302230+ }
22312231+ }
22322232+ Ok(None)
22332233+ })
22342234+ },
22352235+ ));
22362236+ }
22372237+22382238+ // Add count field
22392239+ aggregated = aggregated.field(Field::new(
22402240+ "count",
22412241+ TypeRef::named_nn(TypeRef::INT),
22422242+ |ctx| {
20322243 FieldFuture::new(async move {
20332244 let json_value = ctx.parent_value.try_downcast_ref::<serde_json::Value>()?;
20342245 if let Some(obj) = json_value.as_object() {
20352035- if let Some(value) = obj.get(&field_name) {
20362036- // Convert serde_json::Value to async_graphql::Value
20372037- let graphql_value = serde_json_to_graphql_value(value);
20382038- return Ok(Some(graphql_value));
22462246+ if let Some(count) = obj.get("count") {
22472247+ if let Some(count_i64) = count.as_i64() {
22482248+ return Ok(Some(GraphQLValue::from(count_i64 as i32)));
22492249+ }
20392250 }
20402251 }
20412041- Ok(None)
22522252+ Ok(Some(GraphQLValue::from(0)))
20422253 })
20432043- }));
20442044- }
20452045-20462046- // Add count field
20472047- aggregated = aggregated.field(Field::new("count", TypeRef::named_nn(TypeRef::INT), |ctx| {
20482048- FieldFuture::new(async move {
20492049- let json_value = ctx.parent_value.try_downcast_ref::<serde_json::Value>()?;
20502050- if let Some(obj) = json_value.as_object() {
20512051- if let Some(count) = obj.get("count") {
20522052- if let Some(count_i64) = count.as_i64() {
20532053- return Ok(Some(GraphQLValue::from(count_i64 as i32)));
20542054- }
20552055- }
20562056- }
20572057- Ok(Some(GraphQLValue::from(0)))
20582058- })
20592059- }));
22542254+ },
22552255+ ));
2060225620612257 aggregated
20622258}
···21132309fn create_record_update_type() -> Object {
21142310 let mut record_update = Object::new("RecordUpdate");
2115231121162116- record_update = record_update.field(Field::new("uri", TypeRef::named_nn(TypeRef::STRING), |ctx| {
21172117- FieldFuture::new(async move {
21182118- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
21192119- .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
21202120- if let GraphQLValue::Object(obj) = value {
21212121- if let Some(uri) = obj.get("uri") {
21222122- return Ok(Some(uri.clone()));
23122312+ record_update = record_update.field(Field::new(
23132313+ "uri",
23142314+ TypeRef::named_nn(TypeRef::STRING),
23152315+ |ctx| {
23162316+ FieldFuture::new(async move {
23172317+ let value = ctx
23182318+ .parent_value
23192319+ .downcast_ref::<GraphQLValue>()
23202320+ .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
23212321+ if let GraphQLValue::Object(obj) = value {
23222322+ if let Some(uri) = obj.get("uri") {
23232323+ return Ok(Some(uri.clone()));
23242324+ }
21232325 }
21242124- }
21252125- Ok(None)
21262126- })
21272127- }));
23262326+ Ok(None)
23272327+ })
23282328+ },
23292329+ ));
2128233021292129- record_update = record_update.field(Field::new("cid", TypeRef::named_nn(TypeRef::STRING), |ctx| {
21302130- FieldFuture::new(async move {
21312131- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
21322132- .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
21332133- if let GraphQLValue::Object(obj) = value {
21342134- if let Some(cid) = obj.get("cid") {
21352135- return Ok(Some(cid.clone()));
23312331+ record_update = record_update.field(Field::new(
23322332+ "cid",
23332333+ TypeRef::named_nn(TypeRef::STRING),
23342334+ |ctx| {
23352335+ FieldFuture::new(async move {
23362336+ let value = ctx
23372337+ .parent_value
23382338+ .downcast_ref::<GraphQLValue>()
23392339+ .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
23402340+ if let GraphQLValue::Object(obj) = value {
23412341+ if let Some(cid) = obj.get("cid") {
23422342+ return Ok(Some(cid.clone()));
23432343+ }
21362344 }
21372137- }
21382138- Ok(None)
21392139- })
21402140- }));
23452345+ Ok(None)
23462346+ })
23472347+ },
23482348+ ));
2141234921422142- record_update = record_update.field(Field::new("did", TypeRef::named_nn(TypeRef::STRING), |ctx| {
21432143- FieldFuture::new(async move {
21442144- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
21452145- .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
21462146- if let GraphQLValue::Object(obj) = value {
21472147- if let Some(did) = obj.get("did") {
21482148- return Ok(Some(did.clone()));
23502350+ record_update = record_update.field(Field::new(
23512351+ "did",
23522352+ TypeRef::named_nn(TypeRef::STRING),
23532353+ |ctx| {
23542354+ FieldFuture::new(async move {
23552355+ let value = ctx
23562356+ .parent_value
23572357+ .downcast_ref::<GraphQLValue>()
23582358+ .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
23592359+ if let GraphQLValue::Object(obj) = value {
23602360+ if let Some(did) = obj.get("did") {
23612361+ return Ok(Some(did.clone()));
23622362+ }
21492363 }
21502150- }
21512151- Ok(None)
21522152- })
21532153- }));
23642364+ Ok(None)
23652365+ })
23662366+ },
23672367+ ));
2154236821552155- record_update = record_update.field(Field::new("collection", TypeRef::named_nn(TypeRef::STRING), |ctx| {
21562156- FieldFuture::new(async move {
21572157- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
21582158- .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
21592159- if let GraphQLValue::Object(obj) = value {
21602160- if let Some(collection) = obj.get("collection") {
21612161- return Ok(Some(collection.clone()));
23692369+ record_update = record_update.field(Field::new(
23702370+ "collection",
23712371+ TypeRef::named_nn(TypeRef::STRING),
23722372+ |ctx| {
23732373+ FieldFuture::new(async move {
23742374+ let value = ctx
23752375+ .parent_value
23762376+ .downcast_ref::<GraphQLValue>()
23772377+ .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
23782378+ if let GraphQLValue::Object(obj) = value {
23792379+ if let Some(collection) = obj.get("collection") {
23802380+ return Ok(Some(collection.clone()));
23812381+ }
21622382 }
21632163- }
21642164- Ok(None)
21652165- })
21662166- }));
23832383+ Ok(None)
23842384+ })
23852385+ },
23862386+ ));
2167238721682168- record_update = record_update.field(Field::new("indexedAt", TypeRef::named_nn(TypeRef::STRING), |ctx| {
21692169- FieldFuture::new(async move {
21702170- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
21712171- .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
21722172- if let GraphQLValue::Object(obj) = value {
21732173- if let Some(indexed_at) = obj.get("indexedAt") {
21742174- return Ok(Some(indexed_at.clone()));
23882388+ record_update = record_update.field(Field::new(
23892389+ "indexedAt",
23902390+ TypeRef::named_nn(TypeRef::STRING),
23912391+ |ctx| {
23922392+ FieldFuture::new(async move {
23932393+ let value = ctx
23942394+ .parent_value
23952395+ .downcast_ref::<GraphQLValue>()
23962396+ .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
23972397+ if let GraphQLValue::Object(obj) = value {
23982398+ if let Some(indexed_at) = obj.get("indexedAt") {
23992399+ return Ok(Some(indexed_at.clone()));
24002400+ }
21752401 }
21762176- }
21772177- Ok(None)
21782178- })
21792179- }));
24022402+ Ok(None)
24032403+ })
24042404+ },
24052405+ ));
2180240621812181- record_update = record_update.field(Field::new("operation", TypeRef::named_nn(TypeRef::STRING), |ctx| {
21822182- FieldFuture::new(async move {
21832183- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
21842184- .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
21852185- if let GraphQLValue::Object(obj) = value {
21862186- if let Some(operation) = obj.get("operation") {
21872187- return Ok(Some(operation.clone()));
24072407+ record_update = record_update.field(Field::new(
24082408+ "operation",
24092409+ TypeRef::named_nn(TypeRef::STRING),
24102410+ |ctx| {
24112411+ FieldFuture::new(async move {
24122412+ let value = ctx
24132413+ .parent_value
24142414+ .downcast_ref::<GraphQLValue>()
24152415+ .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
24162416+ if let GraphQLValue::Object(obj) = value {
24172417+ if let Some(operation) = obj.get("operation") {
24182418+ return Ok(Some(operation.clone()));
24192419+ }
21882420 }
21892189- }
21902190- Ok(None)
21912191- })
21922192- }));
24212421+ Ok(None)
24222422+ })
24232423+ },
24242424+ ));
2193242521942426 record_update = record_update.field(Field::new("value", TypeRef::named_nn("JSON"), |ctx| {
21952427 FieldFuture::new(async move {
21962196- let value = ctx.parent_value.downcast_ref::<GraphQLValue>()
24282428+ let value = ctx
24292429+ .parent_value
24302430+ .downcast_ref::<GraphQLValue>()
21972431 .ok_or_else(|| Error::new("Failed to downcast RecordUpdate"))?;
21982432 if let GraphQLValue::Object(obj) = value {
21992433 if let Some(val) = obj.get("value") {
···23702604}
2371260523722606/// Helper function to parse GraphQL where clause recursively
23732373-fn parse_where_clause(where_obj: async_graphql::dynamic::ObjectAccessor) -> crate::models::WhereClause {
26072607+fn parse_where_clause(
26082608+ where_obj: async_graphql::dynamic::ObjectAccessor,
26092609+) -> crate::models::WhereClause {
23742610 let mut where_clause = crate::models::WhereClause {
23752611 conditions: HashMap::new(),
23762612 or_conditions: None,
···24192655 eq: None,
24202656 in_values: None,
24212657 contains: None,
26582658+ fuzzy: None,
24222659 gt: None,
24232660 gte: None,
24242661 lt: None,
···24562693 }
24572694 }
2458269526962696+ // Parse fuzzy condition
26972697+ if let Some(fuzzy_val) = condition_obj.get("fuzzy") {
26982698+ if let Ok(fuzzy_str) = fuzzy_val.string() {
26992699+ where_condition.fuzzy = Some(fuzzy_str.to_string());
27002700+ }
27012701+ }
27022702+24592703 // Parse gt condition
24602704 if let Some(gt_val) = condition_obj.get("gt") {
24612705 if let Ok(gt_str) = gt_val.string() {
···24992743 field_str.to_string()
25002744 };
2501274525022502- where_clause.conditions.insert(db_field_name, where_condition);
27462746+ where_clause
27472747+ .conditions
27482748+ .insert(db_field_name, where_condition);
25032749 }
25042750 }
25052751
···1414use crate::cache::{CacheBackend, CacheFactory, SliceCache};
1515use crate::database::Database;
1616use crate::errors::JetstreamError;
1717-use crate::graphql::{RecordOperation, RecordUpdateEvent, PUBSUB};
1717+use crate::graphql::{PUBSUB, RecordOperation, RecordUpdateEvent};
1818use crate::jetstream_cursor::PostgresCursorHandler;
1919use crate::logging::{LogLevel, Logger};
2020use crate::models::{Actor, Record};
···324324325325 // Check if this is a primary collection (starts with slice domain)
326326 // Lexicon records for this slice are always treated as primary
327327- let is_primary_collection = commit.collection.starts_with(&domain) || is_lexicon_for_this_slice;
327327+ let is_primary_collection =
328328+ commit.collection.starts_with(&domain) || is_lexicon_for_this_slice;
328329329330 // For external collections, check actor status BEFORE expensive validation
330331 if !is_primary_collection {
···428429 };
429430430431 // Insert into database
431431- if let Err(e) = self.database.batch_insert_actors(&[actor]).await {
432432+ if let Err(e) =
433433+ self.database.batch_insert_actors(&[actor]).await
434434+ {
432435 error!("Failed to create actor {}: {}", did, e);
433436 } else {
434437 // Add to cache after successful database insert
···253253 // First, get all repos from primary collections
254254 let mut primary_repos = std::collections::HashSet::new();
255255 for collection in &primary_collections {
256256- match self.get_repos_for_collection(collection, slice_uri, max_repos).await {
256256+ match self
257257+ .get_repos_for_collection(collection, slice_uri, max_repos)
258258+ .await
259259+ {
257260 Ok(repos) => {
258261 info!(
259262 "Found {} repositories for primary collection \"{}\"",
···465468 match database.batch_insert_records(&batch).await {
466469 Ok(_) => {
467470 write_count += batch_size;
468468- info!("Database writer: Inserted batch of {} records (total: {})", batch_size, write_count);
471471+ info!(
472472+ "Database writer: Inserted batch of {} records (total: {})",
473473+ batch_size, write_count
474474+ );
469475 }
470476 Err(e) => {
471477 error!("Database writer: Failed to insert batch: {}", e);
···611617612618 // Send batch to writer when buffer is full
613619 if batch_buffer.len() >= BATCH_SIZE {
614614- let batch_to_send = std::mem::replace(&mut batch_buffer, Vec::with_capacity(BATCH_SIZE));
620620+ let batch_to_send =
621621+ std::mem::replace(&mut batch_buffer, Vec::with_capacity(BATCH_SIZE));
615622 let batch_count = batch_to_send.len() as i64;
616616- info!("Sending batch of {} records to database writer", batch_count);
623623+ info!(
624624+ "Sending batch of {} records to database writer",
625625+ batch_count
626626+ );
617627618628 // Send to writer channel (non-blocking)
619629 if let Err(e) = tx.send(batch_to_send).await {
620630 error!("Failed to send batch to writer: {}", e);
621621- return Err(SyncError::Generic(format!("Failed to send batch to writer: {}", e)));
631631+ return Err(SyncError::Generic(format!(
632632+ "Failed to send batch to writer: {}",
633633+ e
634634+ )));
622635 }
623636624637 let mut total = total_indexed_records.lock().await;
···629642 // Flush any remaining records in the buffer
630643 if !batch_buffer.is_empty() {
631644 let batch_count = batch_buffer.len() as i64;
632632- info!("Sending final batch of {} records to database writer", batch_count);
645645+ info!(
646646+ "Sending final batch of {} records to database writer",
647647+ batch_count
648648+ );
633649634650 if let Err(e) = tx.send(batch_buffer).await {
635651 error!("Failed to send final batch to writer: {}", e);
636636- return Err(SyncError::Generic(format!("Failed to send final batch to writer: {}", e)));
652652+ return Err(SyncError::Generic(format!(
653653+ "Failed to send final batch to writer: {}",
654654+ e
655655+ )));
637656 }
638657639658 let mut total = total_indexed_records.lock().await;
···642661643662 // Close the channel and wait for writer to finish
644663 drop(tx);
645645- let write_result = writer_task.await
664664+ let write_result = writer_task
665665+ .await
646666 .map_err(|e| SyncError::Generic(format!("Writer task panicked: {}", e)))?;
647667648668 let final_count = match write_result {
···655675 successful_tasks, failed_tasks
656676 );
657677658658- info!(
659659- "Indexed {} new/changed records in batches",
660660- final_count
661661- );
678678+ info!("Indexed {} new/changed records in batches", final_count);
662679663680 info!("Backfill complete!");
664681···699716 if page_count > max_pages {
700717 warn!(
701718 "Reached maximum page limit ({}) for collection {} (based on repo limit {:?}, estimated max {} repos at {} per page)",
702702- max_pages, collection, max_repos, max_pages * REPOS_PER_PAGE, REPOS_PER_PAGE
719719+ max_pages,
720720+ collection,
721721+ max_repos,
722722+ max_pages * REPOS_PER_PAGE,
723723+ REPOS_PER_PAGE
703724 );
704725 break;
705726 }
···9791000 const CHUNK_SIZE: usize = 50; // Process DIDs in chunks
9801001 const MAX_CONCURRENT: usize = 10; // Limit concurrent resolutions
9811002982982- info!("Resolving ATP data for {} repositories in chunks", repos.len());
10031003+ info!(
10041004+ "Resolving ATP data for {} repositories in chunks",
10051005+ repos.len()
10061006+ );
98310079841008 for (chunk_idx, chunk) in repos.chunks(CHUNK_SIZE).enumerate() {
9851009 let chunk_start = chunk_idx * CHUNK_SIZE;
···10311055 }
10321056 }
1033105710341034- info!("Successfully resolved ATP data for {}/{} repositories", atp_map.len(), repos.len());
10581058+ info!(
10591059+ "Successfully resolved ATP data for {}/{} repositories",
10601060+ atp_map.len(),
10611061+ repos.len()
10621062+ );
10351063 Ok(atp_map)
10361064 }
10371065···11931221 Some(&external_collections),
11941222 Some(&[user_did.to_string()]), // Only sync this user's repos
11951223 false, // Always validate user collections
11961196- None, // No limit for user-specific sync
12241224+ None, // No limit for user-specific sync
11971225 )
11981226 .await
11991227 };
···11use crate::{AppState, auth, errors::AppError, sync::SyncService};
22-use axum::{extract::{Query, State}, http::HeaderMap, response::Json};
22+use axum::{
33+ extract::{Query, State},
44+ http::HeaderMap,
55+ response::Json,
66+};
37use serde::{Deserialize, Serialize};
48use std::collections::HashMap;
59···144148145149 // First, get repos ONLY from primary collections
146150 for collection in &primary_collections {
147147- match sync_service.get_repos_for_collection(collection, slice_uri, Some(applied_limit)).await {
151151+ match sync_service
152152+ .get_repos_for_collection(collection, slice_uri, Some(applied_limit))
153153+ .await
154154+ {
148155 Ok(repos) => {
149156 counts.insert(collection.clone(), repos.len() as i64);
150157 discovered_repos.extend(repos);
+37
docs/graphql-api.md
···7979- `eq`: Exact match
8080- `in`: Match any value in array
8181- `contains`: Substring match (case-insensitive)
8282+- `fuzzy`: Fuzzy/similarity match (typo-tolerant)
8283- `gt`: Greater than (lexicographic)
8384- `gte`: Greater than or equal to
8485- `lt`: Less than
···100101- `gte`: At or after datetime
101102- `lt`: Before datetime
102103- `lte`: At or before datetime
104104+105105+#### Fuzzy Matching Example
106106+107107+The `fuzzy` filter uses PostgreSQL's trigram similarity for typo-tolerant search:
108108+109109+```graphql
110110+query FuzzySearch {
111111+ fmTealAlphaFeedPlays(
112112+ where: {
113113+ trackName: { fuzzy: "love" }
114114+ }
115115+ ) {
116116+ edges {
117117+ node {
118118+ trackName
119119+ artists
120120+ }
121121+ }
122122+ }
123123+}
124124+```
125125+126126+This will match track names like:
127127+- "Love" (exact)
128128+- "Love Song"
129129+- "Lovely"
130130+- "I Love You"
131131+- "Lover"
132132+- "Loveless"
133133+134134+The fuzzy filter is great for:
135135+- Handling typos and misspellings
136136+- Finding similar variations of text
137137+- Flexible search without exact matching
138138+139139+**Note**: Fuzzy matching works on the similarity between strings (using trigrams), so it's more flexible than `contains` but may return unexpected matches if the similarity threshold is met.
103140104141#### Date Range Example
105142