···217217 sort_by: Option<&Vec<SortField>>,
218218 where_clause: Option<&WhereClause>,
219219 ) -> Result<(Vec<Record>, Option<String>), DatabaseError> {
220220- let limit = limit.unwrap_or(50).min(100);
220220+ // Default to 50 for API requests, but support unlimited queries for DataLoader
221221+ let limit = limit.unwrap_or(50);
221222222223 let mut where_clauses = Vec::new();
223224 let mut param_count = 1;
···433434 if let Some(contains_value) = &condition.contains {
434435 query_builder = query_builder.bind(contains_value);
435436 }
437437+ if let Some(gt_value) = &condition.gt {
438438+ if let Some(str_val) = gt_value.as_str() {
439439+ query_builder = query_builder.bind(str_val);
440440+ } else {
441441+ query_builder = query_builder.bind(gt_value);
442442+ }
443443+ }
444444+ if let Some(gte_value) = &condition.gte {
445445+ if let Some(str_val) = gte_value.as_str() {
446446+ query_builder = query_builder.bind(str_val);
447447+ } else {
448448+ query_builder = query_builder.bind(gte_value);
449449+ }
450450+ }
451451+ if let Some(lt_value) = &condition.lt {
452452+ if let Some(str_val) = lt_value.as_str() {
453453+ query_builder = query_builder.bind(str_val);
454454+ } else {
455455+ query_builder = query_builder.bind(lt_value);
456456+ }
457457+ }
458458+ if let Some(lte_value) = &condition.lte {
459459+ if let Some(str_val) = lte_value.as_str() {
460460+ query_builder = query_builder.bind(str_val);
461461+ } else {
462462+ query_builder = query_builder.bind(lte_value);
463463+ }
464464+ }
436465 }
437466438467 if let Some(or_conditions) = &clause.or_conditions {
···454483 if let Some(contains_value) = &condition.contains {
455484 query_builder = query_builder.bind(contains_value);
456485 }
486486+ if let Some(gt_value) = &condition.gt {
487487+ if let Some(str_val) = gt_value.as_str() {
488488+ query_builder = query_builder.bind(str_val);
489489+ } else {
490490+ query_builder = query_builder.bind(gt_value);
491491+ }
492492+ }
493493+ if let Some(gte_value) = &condition.gte {
494494+ if let Some(str_val) = gte_value.as_str() {
495495+ query_builder = query_builder.bind(str_val);
496496+ } else {
497497+ query_builder = query_builder.bind(gte_value);
498498+ }
499499+ }
500500+ if let Some(lt_value) = &condition.lt {
501501+ if let Some(str_val) = lt_value.as_str() {
502502+ query_builder = query_builder.bind(str_val);
503503+ } else {
504504+ query_builder = query_builder.bind(lt_value);
505505+ }
506506+ }
507507+ if let Some(lte_value) = &condition.lte {
508508+ if let Some(str_val) = lte_value.as_str() {
509509+ query_builder = query_builder.bind(str_val);
510510+ } else {
511511+ query_builder = query_builder.bind(lte_value);
512512+ }
513513+ }
457514 }
458515 }
459516 }
···476533 pub async fn get_aggregated_records(
477534 &self,
478535 slice_uri: &str,
479479- group_by_fields: &[String],
536536+ group_by_fields: &[crate::models::GroupByField],
480537 where_clause: Option<&WhereClause>,
481538 order_by_count: Option<&str>,
482539 limit: Option<i32>,
···488545 let limit = limit.unwrap_or(50).min(1000);
489546 let mut param_count = 1;
490547491491- // Build SELECT clause with JSON field extraction
548548+ // Build SELECT clause with JSON field extraction and optional date truncation
492549 let select_fields: Vec<String> = group_by_fields
493550 .iter()
494551 .enumerate()
495495- .map(|(i, field)| {
496496- // Check if it's a table column
497497- if matches!(field.as_str(), "did" | "collection" | "uri" | "cid" | "indexed_at") {
498498- format!("\"{}\" as field_{}", field, i)
499499- } else {
500500- // JSON field
501501- format!("json->>'{}' as field_{}", field, i)
552552+ .map(|(i, group_by_field)| {
553553+ match group_by_field {
554554+ crate::models::GroupByField::Simple(field) => {
555555+ // Check if it's a table column
556556+ if matches!(field.as_str(), "did" | "collection" | "uri" | "cid" | "indexed_at") {
557557+ format!("\"{}\" as field_{}", field, i)
558558+ } else {
559559+ // JSON field
560560+ format!("json->>'{}' as field_{}", field, i)
561561+ }
562562+ }
563563+ crate::models::GroupByField::Truncated { field, interval } => {
564564+ // Date truncation using PostgreSQL's date_trunc function
565565+ let interval_str = interval.to_pg_interval();
566566+567567+ // Check if it's a table column
568568+ if field == "indexed_at" {
569569+ format!("date_trunc('{}', \"{}\")::text as field_{}", interval_str, field, i)
570570+ } else {
571571+ // JSON field - cast to timestamp for date_trunc, then to text
572572+ format!("date_trunc('{}', (json->>'{}')::timestamp)::text as field_{}", interval_str, field, i)
573573+ }
574574+ }
502575 }
503576 })
504577 .collect();
···536609 select_clause, where_sql, group_by_clause.join(", "), order_by_sql, limit
537610 );
538611612612+ tracing::debug!("Generated SQL: {}", query);
613613+539614 let mut query_builder = sqlx::query(&query);
540615 query_builder = query_builder.bind(slice_uri);
541616···559634 if let Some(contains_value) = &condition.contains {
560635 query_builder = query_builder.bind(contains_value);
561636 }
637637+ if let Some(gt_value) = &condition.gt {
638638+ if let Some(str_val) = gt_value.as_str() {
639639+ query_builder = query_builder.bind(str_val);
640640+ } else {
641641+ query_builder = query_builder.bind(gt_value.to_string());
642642+ }
643643+ }
644644+ if let Some(gte_value) = &condition.gte {
645645+ if let Some(str_val) = gte_value.as_str() {
646646+ query_builder = query_builder.bind(str_val);
647647+ } else {
648648+ query_builder = query_builder.bind(gte_value.to_string());
649649+ }
650650+ }
651651+ if let Some(lt_value) = &condition.lt {
652652+ if let Some(str_val) = lt_value.as_str() {
653653+ query_builder = query_builder.bind(str_val);
654654+ } else {
655655+ query_builder = query_builder.bind(lt_value.to_string());
656656+ }
657657+ }
658658+ if let Some(lte_value) = &condition.lte {
659659+ if let Some(str_val) = lte_value.as_str() {
660660+ query_builder = query_builder.bind(str_val);
661661+ } else {
662662+ query_builder = query_builder.bind(lte_value.to_string());
663663+ }
664664+ }
562665 }
563666564667 if let Some(or_conditions) = &clause.or_conditions {
···580683 if let Some(contains_value) = &condition.contains {
581684 query_builder = query_builder.bind(contains_value);
582685 }
686686+ if let Some(gt_value) = &condition.gt {
687687+ if let Some(str_val) = gt_value.as_str() {
688688+ query_builder = query_builder.bind(str_val);
689689+ } else {
690690+ query_builder = query_builder.bind(gt_value.to_string());
691691+ }
692692+ }
693693+ if let Some(gte_value) = &condition.gte {
694694+ if let Some(str_val) = gte_value.as_str() {
695695+ query_builder = query_builder.bind(str_val);
696696+ } else {
697697+ query_builder = query_builder.bind(gte_value.to_string());
698698+ }
699699+ }
700700+ if let Some(lt_value) = &condition.lt {
701701+ if let Some(str_val) = lt_value.as_str() {
702702+ query_builder = query_builder.bind(str_val);
703703+ } else {
704704+ query_builder = query_builder.bind(lt_value.to_string());
705705+ }
706706+ }
707707+ if let Some(lte_value) = &condition.lte {
708708+ if let Some(str_val) = lte_value.as_str() {
709709+ query_builder = query_builder.bind(str_val);
710710+ } else {
711711+ query_builder = query_builder.bind(lte_value.to_string());
712712+ }
713713+ }
583714 }
584715 }
585716 }
···592723 let mut obj = serde_json::Map::new();
593724594725 // Extract grouped field values
595595- for (i, field_name) in group_by_fields.iter().enumerate() {
726726+ for (i, group_by_field) in group_by_fields.iter().enumerate() {
596727 let col_name = format!("field_{}", i);
597728 let value: Option<String> = row.try_get(col_name.as_str()).ok();
598729···609740 serde_json::Value::Null
610741 };
611742612612- obj.insert(field_name.clone(), json_value);
743743+ obj.insert(group_by_field.field_name().to_string(), json_value);
613744 }
614745615746 // Extract count
+9-1
api/src/database/types.rs
···991010/// Represents a single condition in a WHERE clause.
1111///
1212-/// Supports three types of operations:
1212+/// Supports multiple types of operations:
1313/// - `eq`: Exact match (field = value)
1414/// - `in_values`: Array membership (field IN (...))
1515/// - `contains`: Pattern matching (field ILIKE '%value%')
1616+/// - `gt`: Greater than (field > value)
1717+/// - `gte`: Greater than or equal (field >= value)
1818+/// - `lt`: Less than (field < value)
1919+/// - `lte`: Less than or equal (field <= value)
1620#[derive(Debug, Clone, Serialize, Deserialize)]
1721#[serde(rename_all = "camelCase")]
1822pub struct WhereCondition {
···2024 #[serde(rename = "in")]
2125 pub in_values: Option<Vec<Value>>,
2226 pub contains: Option<String>,
2727+ pub gt: Option<Value>,
2828+ pub gte: Option<Value>,
2929+ pub lt: Option<Value>,
3030+ pub lte: Option<Value>,
2331}
24322533/// Represents a complete WHERE clause with AND/OR conditions.
+298
api/src/graphql/dataloader.rs
···11+//! DataLoader implementation for batching database queries
22+//!
33+//! This module provides a DataLoader that batches multiple requests for records
44+//! into single database queries, eliminating the N+1 query problem.
55+66+use async_graphql::dataloader::{DataLoader as AsyncGraphQLDataLoader, Loader};
77+use std::collections::HashMap;
88+use std::sync::Arc;
99+1010+use crate::database::Database;
1111+use crate::models::{IndexedRecord, WhereClause, WhereCondition};
1212+1313+/// Key for batching record queries by collection and DID
1414+#[derive(Debug, Clone, Hash, Eq, PartialEq)]
1515+pub struct CollectionDidKey {
1616+ pub slice_uri: String,
1717+ pub collection: String,
1818+ pub did: String,
1919+}
2020+2121+/// Loader for batching record queries by collection and DID
2222+pub struct CollectionDidLoader {
2323+ db: Database,
2424+}
2525+2626+impl CollectionDidLoader {
2727+ pub fn new(db: Database) -> Self {
2828+ Self { db }
2929+ }
3030+}
3131+3232+impl Loader<CollectionDidKey> for CollectionDidLoader {
3333+ type Value = Vec<IndexedRecord>;
3434+ type Error = Arc<String>;
3535+3636+ async fn load(&self, keys: &[CollectionDidKey]) -> Result<HashMap<CollectionDidKey, Self::Value>, Self::Error> {
3737+ // Group keys by slice_uri and collection for optimal batching
3838+ let mut grouped: HashMap<(String, String), Vec<String>> = HashMap::new();
3939+4040+ for key in keys {
4141+ grouped
4242+ .entry((key.slice_uri.clone(), key.collection.clone()))
4343+ .or_insert_with(Vec::new)
4444+ .push(key.did.clone());
4545+ }
4646+4747+ let mut results: HashMap<CollectionDidKey, Vec<IndexedRecord>> = HashMap::new();
4848+4949+ // Execute one query per (slice, collection) combination
5050+ for ((slice_uri, collection), dids) in grouped {
5151+ let mut where_clause = WhereClause {
5252+ conditions: HashMap::new(),
5353+ or_conditions: None,
5454+ };
5555+5656+ // Filter by collection
5757+ where_clause.conditions.insert(
5858+ "collection".to_string(),
5959+ WhereCondition {
6060+ gt: None,
6161+ gte: None,
6262+ lt: None,
6363+ lte: None,
6464+ eq: Some(serde_json::Value::String(collection.clone())),
6565+ in_values: None,
6666+ contains: None,
6767+ },
6868+ );
6969+7070+ // Filter by DIDs using IN clause for batching
7171+ where_clause.conditions.insert(
7272+ "did".to_string(),
7373+ WhereCondition {
7474+ gt: None,
7575+ gte: None,
7676+ lt: None,
7777+ lte: None,
7878+ eq: None,
7979+ in_values: Some(
8080+ dids.iter()
8181+ .map(|did| serde_json::Value::String(did.clone()))
8282+ .collect()
8383+ ),
8484+ contains: None,
8585+ },
8686+ );
8787+8888+ // Query database with no limit - load all records for batched filtering
8989+ match self.db.get_slice_collections_records(
9090+ &slice_uri,
9191+ None, // No limit - load all records for this DID
9292+ None, // cursor
9393+ None, // sort
9494+ Some(&where_clause),
9595+ ).await {
9696+ Ok((records, _cursor)) => {
9797+ // Group results by DID
9898+ for record in records {
9999+ let key = CollectionDidKey {
100100+ slice_uri: slice_uri.clone(),
101101+ collection: collection.clone(),
102102+ did: record.did.clone(),
103103+ };
104104+105105+ // Convert Record to IndexedRecord
106106+ let indexed_record = IndexedRecord {
107107+ uri: record.uri,
108108+ cid: record.cid,
109109+ did: record.did,
110110+ collection: record.collection,
111111+ value: record.json,
112112+ indexed_at: record.indexed_at.to_rfc3339(),
113113+ };
114114+115115+ results
116116+ .entry(key)
117117+ .or_insert_with(Vec::new)
118118+ .push(indexed_record);
119119+ }
120120+ }
121121+ Err(e) => {
122122+ tracing::error!("DataLoader batch query failed for {}/{}: {}", slice_uri, collection, e);
123123+ // Return empty results for failed queries rather than failing the entire batch
124124+ }
125125+ }
126126+ }
127127+128128+ // Ensure all requested keys have an entry (even if empty)
129129+ for key in keys {
130130+ results.entry(key.clone()).or_insert_with(Vec::new);
131131+ }
132132+133133+ Ok(results)
134134+ }
135135+}
136136+137137+/// Key for batching record queries by collection and parent URI (for reverse joins)
138138+#[derive(Debug, Clone, Hash, Eq, PartialEq)]
139139+pub struct CollectionUriKey {
140140+ pub slice_uri: String,
141141+ pub collection: String,
142142+ pub parent_uri: String,
143143+ pub reference_field: String, // Field name that contains the reference (e.g., "subject")
144144+}
145145+146146+/// Loader for batching record queries by collection and parent URI
147147+/// Used for reverse joins where we need to find records that reference a parent URI
148148+pub struct CollectionUriLoader {
149149+ db: Database,
150150+}
151151+152152+impl CollectionUriLoader {
153153+ pub fn new(db: Database) -> Self {
154154+ Self { db }
155155+ }
156156+}
157157+158158+impl Loader<CollectionUriKey> for CollectionUriLoader {
159159+ type Value = Vec<IndexedRecord>;
160160+ type Error = Arc<String>;
161161+162162+ async fn load(&self, keys: &[CollectionUriKey]) -> Result<HashMap<CollectionUriKey, Self::Value>, Self::Error> {
163163+ // Group keys by (slice_uri, collection, reference_field) for optimal batching
164164+ let mut grouped: HashMap<(String, String, String), Vec<String>> = HashMap::new();
165165+166166+ for key in keys {
167167+ grouped
168168+ .entry((key.slice_uri.clone(), key.collection.clone(), key.reference_field.clone()))
169169+ .or_insert_with(Vec::new)
170170+ .push(key.parent_uri.clone());
171171+ }
172172+173173+ let mut results: HashMap<CollectionUriKey, Vec<IndexedRecord>> = HashMap::new();
174174+175175+ // Execute one query per (slice, collection, reference_field) combination
176176+ for ((slice_uri, collection, reference_field), parent_uris) in grouped {
177177+ let mut where_clause = WhereClause {
178178+ conditions: HashMap::new(),
179179+ or_conditions: None,
180180+ };
181181+182182+ // Filter by collection
183183+ where_clause.conditions.insert(
184184+ "collection".to_string(),
185185+ WhereCondition {
186186+ gt: None,
187187+ gte: None,
188188+ lt: None,
189189+ lte: None,
190190+ eq: Some(serde_json::Value::String(collection.clone())),
191191+ in_values: None,
192192+ contains: None,
193193+ },
194194+ );
195195+196196+ // Filter by parent URIs using IN clause on the reference field
197197+ // This queries: WHERE json->>'reference_field' IN (parent_uri1, parent_uri2, ...)
198198+ where_clause.conditions.insert(
199199+ reference_field.clone(),
200200+ WhereCondition {
201201+ gt: None,
202202+ gte: None,
203203+ lt: None,
204204+ lte: None,
205205+ eq: None,
206206+ in_values: Some(
207207+ parent_uris.iter()
208208+ .map(|uri| serde_json::Value::String(uri.clone()))
209209+ .collect()
210210+ ),
211211+ contains: None,
212212+ },
213213+ );
214214+215215+ // Query database with no limit - load all records for batched filtering
216216+ match self.db.get_slice_collections_records(
217217+ &slice_uri,
218218+ None, // No limit - load all records matching parent URIs
219219+ None, // cursor
220220+ None, // sort
221221+ Some(&where_clause),
222222+ ).await {
223223+ Ok((records, _cursor)) => {
224224+ // Group results by parent URI (extract from the reference field)
225225+ for record in records {
226226+ // Try to extract URI - could be plain string or strongRef object
227227+ let parent_uri = record.json.get(&reference_field).and_then(|v| {
228228+ // First try as plain string
229229+ if let Some(uri_str) = v.as_str() {
230230+ return Some(uri_str.to_string());
231231+ }
232232+ // Then try as strongRef
233233+ crate::graphql::dataloaders::extract_uri_from_strong_ref(v)
234234+ });
235235+236236+ if let Some(parent_uri) = parent_uri {
237237+ let key = CollectionUriKey {
238238+ slice_uri: slice_uri.clone(),
239239+ collection: collection.clone(),
240240+ parent_uri: parent_uri.clone(),
241241+ reference_field: reference_field.clone(),
242242+ };
243243+244244+ // Convert Record to IndexedRecord
245245+ let indexed_record = IndexedRecord {
246246+ uri: record.uri,
247247+ cid: record.cid,
248248+ did: record.did,
249249+ collection: record.collection,
250250+ value: record.json,
251251+ indexed_at: record.indexed_at.to_rfc3339(),
252252+ };
253253+254254+ results
255255+ .entry(key)
256256+ .or_insert_with(Vec::new)
257257+ .push(indexed_record);
258258+ }
259259+ }
260260+ }
261261+ Err(e) => {
262262+ tracing::error!("CollectionUriLoader batch query failed for {}/{}: {}", slice_uri, collection, e);
263263+ // Return empty results for failed queries rather than failing the entire batch
264264+ }
265265+ }
266266+ }
267267+268268+ // Ensure all requested keys have an entry (even if empty)
269269+ for key in keys {
270270+ results.entry(key.clone()).or_insert_with(Vec::new);
271271+ }
272272+273273+ Ok(results)
274274+ }
275275+}
276276+277277+/// Context data that includes the DataLoader
278278+#[derive(Clone)]
279279+pub struct GraphQLContext {
280280+ #[allow(dead_code)]
281281+ pub collection_did_loader: Arc<AsyncGraphQLDataLoader<CollectionDidLoader>>,
282282+ pub collection_uri_loader: Arc<AsyncGraphQLDataLoader<CollectionUriLoader>>,
283283+}
284284+285285+impl GraphQLContext {
286286+ pub fn new(db: Database) -> Self {
287287+ Self {
288288+ collection_did_loader: Arc::new(AsyncGraphQLDataLoader::new(
289289+ CollectionDidLoader::new(db.clone()),
290290+ tokio::spawn
291291+ )),
292292+ collection_uri_loader: Arc::new(AsyncGraphQLDataLoader::new(
293293+ CollectionUriLoader::new(db),
294294+ tokio::spawn
295295+ )),
296296+ }
297297+ }
298298+}
+24-5
api/src/graphql/handler.rs
···15151616use crate::errors::AppError;
1717use crate::AppState;
1818+use crate::graphql::GraphQLContext;
18191920/// Global schema cache (one schema per slice)
2021/// This prevents rebuilding the schema on every request
···6566 }
6667 };
67686868- Ok(schema.execute(req.into_inner()).await.into())
6969+ // Create GraphQL context with DataLoader
7070+ let gql_context = GraphQLContext::new(state.database.clone());
7171+7272+ // Execute query with context
7373+ Ok(schema
7474+ .execute(req.into_inner().data(gql_context))
7575+ .await
7676+ .into())
6977}
70787179/// GraphiQL UI handler
···198206 }
199207 };
200208209209+ // Create GraphQL context with DataLoader
210210+ let gql_context = GraphQLContext::new(state.database.clone());
211211+201212 // Upgrade to WebSocket and handle GraphQL subscriptions manually
202213 Ok(ws
203214 .protocols(["graphql-transport-ws", "graphql-ws"])
204204- .on_upgrade(move |socket| handle_graphql_ws(socket, schema)))
215215+ .on_upgrade(move |socket| handle_graphql_ws(socket, schema, gql_context)))
205216}
206217207218/// Handle GraphQL WebSocket connection
208208-async fn handle_graphql_ws(socket: WebSocket, schema: Schema) {
219219+async fn handle_graphql_ws(socket: WebSocket, schema: Schema, gql_context: GraphQLContext) {
209220 let (ws_sender, ws_receiver) = socket.split();
210221211222 // Convert axum WebSocket messages to strings for async-graphql
···216227 })
217228 });
218229219219- // Create GraphQL WebSocket handler
220220- let mut stream = GraphQLWebSocket::new(schema, input, WebSocketProtocols::GraphQLWS);
230230+ // Create GraphQL WebSocket handler with context
231231+ let mut stream = GraphQLWebSocket::new(schema.clone(), input, WebSocketProtocols::GraphQLWS)
232232+ .on_connection_init(move |_| {
233233+ let gql_ctx = gql_context.clone();
234234+ async move {
235235+ let mut data = async_graphql::Data::default();
236236+ data.insert(gql_ctx);
237237+ Ok(data)
238238+ }
239239+ });
221240222241 // Send GraphQL messages back through WebSocket
223242 let mut ws_sender = ws_sender;
+2
api/src/graphql/mod.rs
···5566mod schema_builder;
77mod dataloaders;
88+mod dataloader;
89mod types;
910pub mod handler;
1011pub mod pubsub;
···1213pub use schema_builder::build_graphql_schema;
1314pub use handler::{graphql_handler, graphql_playground, graphql_subscription_handler};
1415pub use pubsub::{RecordUpdateEvent, RecordOperation, PUBSUB};
1616+pub use dataloader::GraphQLContext;
+658-143
api/src/graphql/schema_builder.rs
···1515use crate::database::Database;
1616use crate::graphql::types::{extract_collection_fields, extract_record_key, GraphQLField, GraphQLType};
1717use crate::graphql::PUBSUB;
1818+use crate::graphql::dataloader::GraphQLContext;
18191920/// Metadata about a collection for cross-referencing
2021#[derive(Clone)]
···2223 nsid: String,
2324 key_type: String, // "tid", "literal:self", or "any"
2425 type_name: String, // GraphQL type name for this collection
2626+ at_uri_fields: Vec<String>, // Fields with format "at-uri" for reverse joins
2527}
26282729/// Builds a dynamic GraphQL schema from lexicons for a given slice
···5254 // Build Query root type and collect all object types
5355 let mut query = Object::new("Query");
5456 let mut objects_to_register = Vec::new();
5757+ let mut where_inputs_to_register = Vec::new();
5858+ let mut group_by_enums_to_register = Vec::new();
55595660 // First pass: collect metadata about all collections for cross-referencing
5761 let mut all_collections: Vec<CollectionMeta> = Vec::new();
···6872 let fields = extract_collection_fields(defs);
6973 if !fields.is_empty() {
7074 if let Some(key_type) = extract_record_key(defs) {
7575+ // Extract at-uri field names for reverse joins
7676+ let at_uri_fields: Vec<String> = fields.iter()
7777+ .filter(|f| f.format.as_deref() == Some("at-uri"))
7878+ .map(|f| f.name.clone())
7979+ .collect();
8080+8181+ if !at_uri_fields.is_empty() {
8282+ tracing::debug!(
8383+ "Collection {} has at-uri fields: {:?}",
8484+ nsid,
8585+ at_uri_fields
8686+ );
8787+ }
8888+7189 all_collections.push(CollectionMeta {
7290 nsid: nsid.to_string(),
7391 key_type,
7492 type_name: nsid_to_type_name(nsid),
9393+ at_uri_fields,
7594 });
7695 }
7796 }
···102121 let edge_type = create_edge_type(&type_name);
103122 let connection_type = create_connection_type(&type_name);
104123124124+ // Create WhereInput type for this collection
125125+ let mut where_input = InputObject::new(format!("{}WhereInput", type_name));
126126+127127+ // Collect lexicon field names to avoid duplicates
128128+ let lexicon_field_names: std::collections::HashSet<&str> =
129129+ fields.iter().map(|f| f.name.as_str()).collect();
130130+131131+ // Add system fields available on all records (skip if already in lexicon)
132132+ let system_fields = [
133133+ ("indexedAt", "DateTimeFilter"),
134134+ ("uri", "StringFilter"),
135135+ ("cid", "StringFilter"),
136136+ ("did", "StringFilter"),
137137+ ("collection", "StringFilter"),
138138+ ("actorHandle", "StringFilter"),
139139+ ];
140140+141141+ for (field_name, filter_type) in system_fields {
142142+ if !lexicon_field_names.contains(field_name) {
143143+ where_input = where_input.field(InputValue::new(field_name, TypeRef::named(filter_type)));
144144+ }
145145+ }
146146+147147+ // Add fields from the lexicon
148148+ for field in &fields {
149149+ let filter_type = match field.field_type {
150150+ GraphQLType::Int => "IntFilter",
151151+ _ => "StringFilter", // Default to StringFilter for strings and other types
152152+ };
153153+ where_input = where_input.field(InputValue::new(&field.name, TypeRef::named(filter_type)));
154154+ }
155155+156156+ // Create GroupByField enum for this collection
157157+ let mut group_by_enum = Enum::new(format!("{}GroupByField", type_name));
158158+ group_by_enum = group_by_enum.item(EnumItem::new("indexedAt"));
159159+160160+ for field in &fields {
161161+ group_by_enum = group_by_enum.item(EnumItem::new(&field.name));
162162+ }
163163+164164+ // Create collection-specific GroupByFieldInput
165165+ let group_by_input = InputObject::new(format!("{}GroupByFieldInput", type_name))
166166+ .field(InputValue::new("field", TypeRef::named_nn(format!("{}GroupByField", type_name))))
167167+ .field(InputValue::new("interval", TypeRef::named("DateInterval")));
168168+169169+ // Create collection-specific SortFieldInput
170170+ let sort_field_input = InputObject::new(format!("{}SortFieldInput", type_name))
171171+ .field(InputValue::new("field", TypeRef::named_nn(format!("{}GroupByField", type_name))))
172172+ .field(InputValue::new("direction", TypeRef::named("SortDirection")));
173173+105174 // Collect the types to register with schema later
106175 objects_to_register.push(record_type);
107176 objects_to_register.push(edge_type);
108177 objects_to_register.push(connection_type);
178178+ where_inputs_to_register.push(where_input);
179179+ where_inputs_to_register.push(group_by_input);
180180+ where_inputs_to_register.push(sort_field_input);
181181+ group_by_enums_to_register.push(group_by_enum);
109182110183 // Add query field for this collection
111184 let collection_query_name = nsid_to_query_name(nsid);
···143216 for item in list.iter() {
144217 if let Ok(obj) = item.object() {
145218 let field = obj.get("field")
146146- .and_then(|v| v.string().ok())
147147- .unwrap_or("indexedAt")
148148- .to_string();
219219+ .and_then(|v| v.enum_name().ok().map(|s| s.to_string()))
220220+ .unwrap_or_else(|| "indexedAt".to_string());
149221 let direction = obj.get("direction")
150150- .and_then(|v| v.string().ok())
151151- .unwrap_or("desc")
152152- .to_string();
222222+ .and_then(|v| v.enum_name().ok().map(|s| s.to_string()))
223223+ .unwrap_or_else(|| "desc".to_string());
153224 sort_fields.push(crate::models::SortField { field, direction });
154225 }
155226 }
···171242 where_clause.conditions.insert(
172243 "collection".to_string(),
173244 crate::models::WhereCondition {
245245+ gt: None,
246246+ gte: None,
247247+ lt: None,
248248+ lte: None,
174249 eq: Some(serde_json::Value::String(collection.clone())),
175250 in_values: None,
176251 contains: None,
···184259 for (field_name, condition_val) in where_obj.iter() {
185260 if let Ok(condition_obj) = condition_val.object() {
186261 let mut where_condition = crate::models::WhereCondition {
262262+ gt: None,
263263+ gte: None,
264264+ lt: None,
265265+ lte: None,
187266 eq: None,
188267 in_values: None,
189268 contains: None,
···220299 }
221300 }
222301223223- where_clause.conditions.insert(field_name.to_string(), where_condition);
302302+ // Parse gt condition
303303+ if let Some(gt_val) = condition_obj.get("gt") {
304304+ if let Ok(gt_str) = gt_val.string() {
305305+ where_condition.gt = Some(serde_json::Value::String(gt_str.to_string()));
306306+ } else if let Ok(gt_i64) = gt_val.i64() {
307307+ where_condition.gt = Some(serde_json::Value::Number(gt_i64.into()));
308308+ }
309309+ }
310310+311311+ // Parse gte condition
312312+ if let Some(gte_val) = condition_obj.get("gte") {
313313+ if let Ok(gte_str) = gte_val.string() {
314314+ where_condition.gte = Some(serde_json::Value::String(gte_str.to_string()));
315315+ } else if let Ok(gte_i64) = gte_val.i64() {
316316+ where_condition.gte = Some(serde_json::Value::Number(gte_i64.into()));
317317+ }
318318+ }
319319+320320+ // Parse lt condition
321321+ if let Some(lt_val) = condition_obj.get("lt") {
322322+ if let Ok(lt_str) = lt_val.string() {
323323+ where_condition.lt = Some(serde_json::Value::String(lt_str.to_string()));
324324+ } else if let Ok(lt_i64) = lt_val.i64() {
325325+ where_condition.lt = Some(serde_json::Value::Number(lt_i64.into()));
326326+ }
327327+ }
328328+329329+ // Parse lte condition
330330+ if let Some(lte_val) = condition_obj.get("lte") {
331331+ if let Ok(lte_str) = lte_val.string() {
332332+ where_condition.lte = Some(serde_json::Value::String(lte_str.to_string()));
333333+ } else if let Ok(lte_i64) = lte_val.i64() {
334334+ where_condition.lte = Some(serde_json::Value::Number(lte_i64.into()));
335335+ }
336336+ }
337337+338338+ // Convert indexedAt to indexed_at for database column
339339+ let db_field_name = if field_name == "indexedAt" {
340340+ "indexed_at".to_string()
341341+ } else {
342342+ field_name.to_string()
343343+ };
344344+345345+ where_clause.conditions.insert(db_field_name, where_condition);
224346 }
225347 }
226348 }
···251373 // Replace actorHandle condition with did condition
252374 let did_condition = if dids.len() == 1 {
253375 crate::models::WhereCondition {
376376+ gt: None,
377377+ gte: None,
378378+ lt: None,
379379+ lte: None,
254380 eq: Some(serde_json::Value::String(dids[0].clone())),
255381 in_values: None,
256382 contains: None,
257383 }
258384 } else {
259385 crate::models::WhereCondition {
386386+ gt: None,
387387+ gte: None,
388388+ lt: None,
389389+ lte: None,
260390 eq: None,
261391 in_values: Some(dids.into_iter().map(|d| serde_json::Value::String(d)).collect()),
262392 contains: None,
···344474 ))
345475 .argument(async_graphql::dynamic::InputValue::new(
346476 "sortBy",
347347- TypeRef::named_list("SortField"),
477477+ TypeRef::named_list(format!("{}SortFieldInput", type_name)),
348478 ))
349479 .argument(async_graphql::dynamic::InputValue::new(
350480 "where",
351351- TypeRef::named("JSON"),
481481+ TypeRef::named(format!("{}WhereInput", type_name)),
352482 ))
353483 .description(format!("Query {} records", nsid)),
354484 );
···376506377507 FieldFuture::new(async move {
378508 // Parse groupBy argument
379379- let group_by_fields: Vec<String> = match ctx.args.get("groupBy") {
509509+ let group_by_fields: Vec<crate::models::GroupByField> = match ctx.args.get("groupBy") {
380510 Some(val) => {
381511 if let Ok(list) = val.list() {
382382- list.iter()
383383- .filter_map(|v| v.string().ok().map(|s| s.to_string()))
384384- .collect()
512512+ let mut fields = Vec::new();
513513+ for item in list.iter() {
514514+ if let Ok(obj) = item.object() {
515515+ // Get field name from enum
516516+ let field_name = obj.get("field")
517517+ .and_then(|v| v.enum_name().ok().map(|s| s.to_string()))
518518+ .ok_or_else(|| Error::new("Missing field name in groupBy"))?;
519519+520520+ // Get optional interval
521521+ if let Some(interval_val) = obj.get("interval") {
522522+ if let Ok(interval_str) = interval_val.enum_name() {
523523+ // Parse interval string to DateInterval
524524+ let interval = match interval_str {
525525+ "second" => crate::models::DateInterval::Second,
526526+ "minute" => crate::models::DateInterval::Minute,
527527+ "hour" => crate::models::DateInterval::Hour,
528528+ "day" => crate::models::DateInterval::Day,
529529+ "week" => crate::models::DateInterval::Week,
530530+ "month" => crate::models::DateInterval::Month,
531531+ "quarter" => crate::models::DateInterval::Quarter,
532532+ "year" => crate::models::DateInterval::Year,
533533+ _ => return Err(Error::new(format!("Invalid interval: {}", interval_str))),
534534+ };
535535+ fields.push(crate::models::GroupByField::Truncated {
536536+ field: field_name,
537537+ interval,
538538+ });
539539+ } else {
540540+ return Err(Error::new("Invalid interval value"));
541541+ }
542542+ } else {
543543+ // No interval, simple field
544544+ fields.push(crate::models::GroupByField::Simple(field_name));
545545+ }
546546+ } else {
547547+ return Err(Error::new("Invalid groupBy item"));
548548+ }
549549+ }
550550+ fields
385551 } else {
386552 Vec::new()
387553 }
···423589 where_clause.conditions.insert(
424590 "collection".to_string(),
425591 crate::models::WhereCondition {
592592+ gt: None,
593593+ gte: None,
594594+ lt: None,
595595+ lte: None,
426596 eq: Some(serde_json::Value::String(collection.clone())),
427597 in_values: None,
428598 contains: None,
···435605 for (field_name, condition_val) in where_obj.iter() {
436606 if let Ok(condition_obj) = condition_val.object() {
437607 let mut where_condition = crate::models::WhereCondition {
608608+ gt: None,
609609+ gte: None,
610610+ lt: None,
611611+ lte: None,
438612 eq: None,
439613 in_values: None,
440614 contains: None,
···471645 }
472646 }
473647474474- where_clause.conditions.insert(field_name.to_string(), where_condition);
648648+ // Parse gt condition
649649+ if let Some(gt_val) = condition_obj.get("gt") {
650650+ if let Ok(gt_str) = gt_val.string() {
651651+ where_condition.gt = Some(serde_json::Value::String(gt_str.to_string()));
652652+ } else if let Ok(gt_i64) = gt_val.i64() {
653653+ where_condition.gt = Some(serde_json::Value::Number(gt_i64.into()));
654654+ }
655655+ }
656656+657657+ // Parse gte condition
658658+ if let Some(gte_val) = condition_obj.get("gte") {
659659+ if let Ok(gte_str) = gte_val.string() {
660660+ where_condition.gte = Some(serde_json::Value::String(gte_str.to_string()));
661661+ } else if let Ok(gte_i64) = gte_val.i64() {
662662+ where_condition.gte = Some(serde_json::Value::Number(gte_i64.into()));
663663+ }
664664+ }
665665+666666+ // Parse lt condition
667667+ if let Some(lt_val) = condition_obj.get("lt") {
668668+ if let Ok(lt_str) = lt_val.string() {
669669+ where_condition.lt = Some(serde_json::Value::String(lt_str.to_string()));
670670+ } else if let Ok(lt_i64) = lt_val.i64() {
671671+ where_condition.lt = Some(serde_json::Value::Number(lt_i64.into()));
672672+ }
673673+ }
674674+675675+ // Parse lte condition
676676+ if let Some(lte_val) = condition_obj.get("lte") {
677677+ if let Ok(lte_str) = lte_val.string() {
678678+ where_condition.lte = Some(serde_json::Value::String(lte_str.to_string()));
679679+ } else if let Ok(lte_i64) = lte_val.i64() {
680680+ where_condition.lte = Some(serde_json::Value::Number(lte_i64.into()));
681681+ }
682682+ }
683683+684684+ // Convert indexedAt to indexed_at for database column
685685+ let db_field_name = if field_name == "indexedAt" {
686686+ "indexed_at".to_string()
687687+ } else {
688688+ field_name.to_string()
689689+ };
690690+691691+ where_clause.conditions.insert(db_field_name, where_condition);
475692 }
476693 }
477694 }
···499716 if !dids.is_empty() {
500717 let did_condition = if dids.len() == 1 {
501718 crate::models::WhereCondition {
719719+ gt: None,
720720+ gte: None,
721721+ lt: None,
722722+ lte: None,
502723 eq: Some(serde_json::Value::String(dids[0].clone())),
503724 in_values: None,
504725 contains: None,
505726 }
506727 } else {
507728 crate::models::WhereCondition {
729729+ gt: None,
730730+ gte: None,
731731+ lt: None,
732732+ lte: None,
508733 eq: None,
509734 in_values: Some(dids.into_iter().map(|d| serde_json::Value::String(d)).collect()),
510735 contains: None,
···546771 )
547772 .argument(async_graphql::dynamic::InputValue::new(
548773 "groupBy",
549549- TypeRef::named_nn_list_nn(TypeRef::STRING),
774774+ TypeRef::named_nn_list(format!("{}GroupByFieldInput", type_name)),
550775 ))
551776 .argument(async_graphql::dynamic::InputValue::new(
552777 "where",
553553- TypeRef::named("JSON"),
778778+ TypeRef::named(format!("{}WhereInput", type_name)),
554779 ))
555780 .argument(async_graphql::dynamic::InputValue::new(
556781 "orderBy",
···571796 // Build Subscription type with collection-specific subscriptions
572797 let subscription = create_subscription_type(slice_uri.clone(), &lexicons);
573798574574- // Build and return the schema
799799+ // Build and return the schema with complexity limits
575800 let mut schema_builder = Schema::build(query.type_name(), Some(mutation.type_name()), Some(subscription.type_name()))
576801 .register(query)
577802 .register(mutation)
578578- .register(subscription);
803803+ .register(subscription)
804804+ .limit_depth(50) // Higher limit to support GraphiQL introspection with reverse joins
805805+ .limit_complexity(5000); // Prevent expensive deeply nested queries
579806580807 // Register JSON scalar type for complex fields
581808 let json_scalar = Scalar::new("JSON");
582809 schema_builder = schema_builder.register(json_scalar);
583810811811+ // Register filter input types for WHERE clauses
812812+ let string_filter = InputObject::new("StringFilter")
813813+ .field(InputValue::new("eq", TypeRef::named(TypeRef::STRING)))
814814+ .field(InputValue::new("in", TypeRef::named_list(TypeRef::STRING)))
815815+ .field(InputValue::new("contains", TypeRef::named(TypeRef::STRING)))
816816+ .field(InputValue::new("gt", TypeRef::named(TypeRef::STRING)))
817817+ .field(InputValue::new("gte", TypeRef::named(TypeRef::STRING)))
818818+ .field(InputValue::new("lt", TypeRef::named(TypeRef::STRING)))
819819+ .field(InputValue::new("lte", TypeRef::named(TypeRef::STRING)));
820820+ schema_builder = schema_builder.register(string_filter);
821821+822822+ let int_filter = InputObject::new("IntFilter")
823823+ .field(InputValue::new("eq", TypeRef::named(TypeRef::INT)))
824824+ .field(InputValue::new("in", TypeRef::named_list(TypeRef::INT)))
825825+ .field(InputValue::new("gt", TypeRef::named(TypeRef::INT)))
826826+ .field(InputValue::new("gte", TypeRef::named(TypeRef::INT)))
827827+ .field(InputValue::new("lt", TypeRef::named(TypeRef::INT)))
828828+ .field(InputValue::new("lte", TypeRef::named(TypeRef::INT)));
829829+ schema_builder = schema_builder.register(int_filter);
830830+831831+ let datetime_filter = InputObject::new("DateTimeFilter")
832832+ .field(InputValue::new("eq", TypeRef::named(TypeRef::STRING)))
833833+ .field(InputValue::new("gt", TypeRef::named(TypeRef::STRING)))
834834+ .field(InputValue::new("gte", TypeRef::named(TypeRef::STRING)))
835835+ .field(InputValue::new("lt", TypeRef::named(TypeRef::STRING)))
836836+ .field(InputValue::new("lte", TypeRef::named(TypeRef::STRING)));
837837+ schema_builder = schema_builder.register(datetime_filter);
838838+584839 // Register Blob type
585840 let blob_type = create_blob_type();
586841 schema_builder = schema_builder.register(blob_type);
···608863 let aggregation_order_by_input = create_aggregation_order_by_input();
609864 schema_builder = schema_builder.register(aggregation_order_by_input);
610865866866+ // Register DateInterval enum for date truncation
867867+ let date_interval_enum = create_date_interval_enum();
868868+ schema_builder = schema_builder.register(date_interval_enum);
869869+611870 // Register PageInfo type
612871 let page_info_type = create_page_info_type();
613872 schema_builder = schema_builder.register(page_info_type);
···621880 schema_builder = schema_builder.register(obj);
622881 }
623882883883+ // Register all WhereInput types
884884+ for where_input in where_inputs_to_register {
885885+ schema_builder = schema_builder.register(where_input);
886886+ }
887887+888888+ // Register all GroupByField enums
889889+ for group_by_enum in group_by_enums_to_register {
890890+ schema_builder = schema_builder.register(group_by_enum);
891891+ }
892892+624893 schema_builder
625894 .finish()
626895 .map_err(|e| format!("Schema build error: {:?}", e))
···720989 where_clause.conditions.insert(
721990 "did".to_string(),
722991 crate::models::WhereCondition {
992992+ gt: None,
993993+ gte: None,
994994+ lt: None,
995995+ lte: None,
723996 eq: Some(serde_json::Value::String(did.clone())),
724997 in_values: None,
725998 contains: None,
···7681041 if let Some(val) = value {
7691042 // Check for explicit null value
7701043 if val.is_null() {
771771- return Ok(Some(FieldValue::NULL));
10441044+ return Ok(None);
10451045+ }
10461046+10471047+ // Check if this is an array of blobs
10481048+ if let GraphQLType::Array(inner) = &field_type {
10491049+ if matches!(inner.as_ref(), GraphQLType::Blob) {
10501050+ if let Some(arr) = val.as_array() {
10511051+ let blob_containers: Vec<FieldValue> = arr
10521052+ .iter()
10531053+ .filter_map(|blob_val| {
10541054+ let obj = blob_val.as_object()?;
10551055+ let blob_ref = obj
10561056+ .get("ref")
10571057+ .and_then(|r| r.as_object())
10581058+ .and_then(|r| r.get("$link"))
10591059+ .and_then(|l| l.as_str())
10601060+ .unwrap_or("")
10611061+ .to_string();
10621062+10631063+ let mime_type = obj
10641064+ .get("mimeType")
10651065+ .and_then(|m| m.as_str())
10661066+ .unwrap_or("image/jpeg")
10671067+ .to_string();
10681068+10691069+ let size = obj
10701070+ .get("size")
10711071+ .and_then(|s| s.as_i64())
10721072+ .unwrap_or(0);
10731073+10741074+ let blob_container = BlobContainer {
10751075+ blob_ref,
10761076+ mime_type,
10771077+ size,
10781078+ did: container.record.did.clone(),
10791079+ };
10801080+10811081+ Some(FieldValue::owned_any(blob_container))
10821082+ })
10831083+ .collect();
10841084+10851085+ return Ok(Some(FieldValue::list(blob_containers)));
10861086+ }
10871087+10881088+ // If not a proper array, return empty list
10891089+ return Ok(Some(FieldValue::list(Vec::<FieldValue>::new())));
10901090+ }
7721091 }
77310927741093 // Check if this is a blob field
···8041123 return Ok(Some(FieldValue::owned_any(blob_container)));
8051124 }
8061125807807- // If not a proper blob object, return NULL
808808- return Ok(Some(FieldValue::NULL));
11261126+ // If not a proper blob object, return None (field is null)
11271127+ return Ok(None);
8091128 }
81011298111130 // Check if this is a reference field that needs joining
···8271146 return Ok(Some(FieldValue::value(graphql_val)));
8281147 }
8291148 Ok(None) => {
830830- return Ok(Some(FieldValue::NULL));
11491149+ return Ok(None);
8311150 }
8321151 Err(e) => {
8331152 tracing::error!("Error fetching linked record: {}", e);
834834- return Ok(Some(FieldValue::NULL));
11531153+ return Ok(None);
8351154 }
8361155 }
8371156 }
···8411160 let graphql_val = json_to_graphql_value(val);
8421161 Ok(Some(FieldValue::value(graphql_val)))
8431162 } else {
844844- Ok(Some(FieldValue::NULL))
11631163+ Ok(None)
8451164 }
8461165 })
8471166 }));
8481167 }
8491168850850- // Add join fields for cross-referencing other collections by DID
11691169+ // Add join fields for cross-referencing other collections
8511170 for collection in all_collections {
8521171 let field_name = nsid_to_join_field_name(&collection.nsid);
8531172···8561175 continue;
8571176 }
858117711781178+ // Collect all string fields with format "at-uri" that might reference this collection
11791179+ // We'll check each one at runtime to see if it contains a URI to this collection
11801180+ let uri_ref_fields: Vec<_> = fields.iter()
11811181+ .filter(|f| matches!(f.format.as_deref(), Some("at-uri")))
11821182+ .collect();
11831183+8591184 let collection_nsid = collection.nsid.clone();
8601185 let key_type = collection.key_type.clone();
8611186 let db_for_join = database.clone();
862862- let slice_for_join = slice_uri.clone();
11871187+11881188+ // If we found at-uri fields, create a resolver that checks each one at runtime
11891189+ if !uri_ref_fields.is_empty() {
11901190+ let ref_field_names: Vec<String> = uri_ref_fields.iter().map(|f| f.name.clone()).collect();
11911191+ let db_for_uri_join = database.clone();
11921192+ let target_collection = collection_nsid.clone();
11931193+11941194+ object = object.field(Field::new(
11951195+ &field_name,
11961196+ TypeRef::named(&collection.type_name),
11971197+ move |ctx| {
11981198+ let db = db_for_uri_join.clone();
11991199+ let field_names = ref_field_names.clone();
12001200+ let expected_collection = target_collection.clone();
12011201+ FieldFuture::new(async move {
12021202+ let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
12031203+12041204+ // Try each at-uri field to find one that references this collection
12051205+ for field_name in &field_names {
12061206+ if let Some(uri_value) = container.record.value.get(field_name) {
12071207+ if let Some(uri) = uri_value.as_str() {
12081208+ // Check if the URI is for the expected collection
12091209+ if uri.contains(&format!("/{}/", expected_collection)) {
12101210+ // Fetch the record at this URI
12111211+ match db.get_record(uri).await {
12121212+ Ok(Some(record)) => {
12131213+ let new_container = RecordContainer { record };
12141214+ return Ok(Some(FieldValue::owned_any(new_container)));
12151215+ }
12161216+ Ok(None) => continue, // Try next field
12171217+ Err(_) => continue, // Try next field
12181218+ }
12191219+ }
12201220+ }
12211221+ }
12221222+ }
12231223+ // No matching URI found in any field
12241224+ Ok(None)
12251225+ })
12261226+ },
12271227+ ));
12281228+ continue; // Skip the normal DID-based join logic
12291229+ }
86312308641231 // Determine type and resolver based on key_type
8651232 match key_type.as_str() {
···8931260 ));
8941261 }
8951262 "tid" | "any" => {
896896- // Multiple records per DID - return array of the collection's type
897897- object = object.field(
12631263+ // Skip - these are handled as plural reverse joins below with URI filtering
12641264+ continue;
12651265+12661266+ // Multiple records per DID - return array of the collection's type (DISABLED)
12671267+ /*object = object.field(
8981268 Field::new(
8991269 &field_name,
9001270 TypeRef::named_nn_list_nn(&collection.type_name),
9011271 move |ctx| {
902902- let db = db_for_join.clone();
9031272 let nsid = collection_nsid.clone();
9041273 let slice = slice_for_join.clone();
12741274+ let db_fallback = db_for_join.clone();
9051275 FieldFuture::new(async move {
9061276 let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
9071277 let did = &container.record.did;
···9091279 // Get limit from argument, default to 50
9101280 let limit = ctx.args.get("limit")
9111281 .and_then(|v| v.i64().ok())
912912- .map(|i| i as i32)
12821282+ .map(|i| i as usize)
9131283 .unwrap_or(50)
9141284 .min(100); // Cap at 100 to prevent abuse
9151285916916- // Build where clause to find all records of this collection for this DID
917917- let mut where_clause = crate::models::WhereClause {
918918- conditions: HashMap::new(),
919919- or_conditions: None,
920920- };
921921- where_clause.conditions.insert(
922922- "collection".to_string(),
923923- crate::models::WhereCondition {
924924- eq: Some(serde_json::Value::String(nsid.clone())),
925925- in_values: None,
926926- contains: None,
927927- },
928928- );
929929- where_clause.conditions.insert(
930930- "did".to_string(),
931931- crate::models::WhereCondition {
932932- eq: Some(serde_json::Value::String(did.clone())),
933933- in_values: None,
934934- contains: None,
935935- },
936936- );
12861286+ // Try to get DataLoader from context
12871287+ if let Some(gql_ctx) = ctx.data_opt::<GraphQLContext>() {
12881288+ // Use DataLoader for batched loading
12891289+ let key = CollectionDidKey {
12901290+ slice_uri: slice.clone(),
12911291+ collection: nsid.clone(),
12921292+ did: did.clone(),
12931293+ };
12941294+12951295+ match gql_ctx.collection_did_loader.load_one(key).await {
12961296+ Ok(Some(mut records)) => {
12971297+ // Apply limit after loading
12981298+ records.truncate(limit);
9371299938938- match db.get_slice_collections_records(
939939- &slice,
940940- Some(limit),
941941- None, // cursor
942942- None, // sort
943943- Some(&where_clause),
944944- ).await {
945945- Ok((records, _cursor)) => {
946946- let values: Vec<FieldValue> = records
947947- .into_iter()
948948- .map(|record| {
949949- // Convert Record to IndexedRecord
950950- let indexed_record = crate::models::IndexedRecord {
951951- uri: record.uri,
952952- cid: record.cid,
953953- did: record.did,
954954- collection: record.collection,
955955- value: record.json,
956956- indexed_at: record.indexed_at.to_rfc3339(),
957957- };
958958- let container = RecordContainer {
959959- record: indexed_record,
960960- };
961961- FieldValue::owned_any(container)
962962- })
963963- .collect();
964964- Ok(Some(FieldValue::list(values)))
13001300+ let values: Vec<FieldValue> = records
13011301+ .into_iter()
13021302+ .map(|indexed_record| {
13031303+ let container = RecordContainer {
13041304+ record: indexed_record,
13051305+ };
13061306+ FieldValue::owned_any(container)
13071307+ })
13081308+ .collect();
13091309+ Ok(Some(FieldValue::list(values)))
13101310+ }
13111311+ Ok(None) => {
13121312+ Ok(Some(FieldValue::list(Vec::<FieldValue>::new())))
13131313+ }
13141314+ Err(e) => {
13151315+ tracing::debug!("DataLoader error for {}: {:?}", nsid, e);
13161316+ Ok(Some(FieldValue::list(Vec::<FieldValue>::new())))
13171317+ }
9651318 }
966966- Err(e) => {
967967- tracing::debug!("Error querying {}: {}", nsid, e);
968968- Ok(Some(FieldValue::list(Vec::<FieldValue>::new())))
13191319+ } else {
13201320+ // Fallback to direct database query if DataLoader not available
13211321+ let db = db_fallback.clone();
13221322+ let mut where_clause = crate::models::WhereClause {
13231323+ conditions: HashMap::new(),
13241324+ or_conditions: None,
13251325+ };
13261326+ where_clause.conditions.insert(
13271327+ "collection".to_string(),
13281328+ crate::models::WhereCondition {
13291329+ gt: None,
13301330+ gte: None,
13311331+ lt: None,
13321332+ lte: None,
13331333+ eq: Some(serde_json::Value::String(nsid.clone())),
13341334+ in_values: None,
13351335+ contains: None,
13361336+ },
13371337+ );
13381338+ where_clause.conditions.insert(
13391339+ "did".to_string(),
13401340+ crate::models::WhereCondition {
13411341+ gt: None,
13421342+ gte: None,
13431343+ lt: None,
13441344+ lte: None,
13451345+ eq: Some(serde_json::Value::String(did.clone())),
13461346+ in_values: None,
13471347+ contains: None,
13481348+ },
13491349+ );
13501350+13511351+ match db.get_slice_collections_records(
13521352+ &slice,
13531353+ Some(limit as i32),
13541354+ None, // cursor
13551355+ None, // sort
13561356+ Some(&where_clause),
13571357+ ).await {
13581358+ Ok((records, _cursor)) => {
13591359+ let values: Vec<FieldValue> = records
13601360+ .into_iter()
13611361+ .map(|record| {
13621362+ let indexed_record = crate::models::IndexedRecord {
13631363+ uri: record.uri,
13641364+ cid: record.cid,
13651365+ did: record.did,
13661366+ collection: record.collection,
13671367+ value: record.json,
13681368+ indexed_at: record.indexed_at.to_rfc3339(),
13691369+ };
13701370+ let container = RecordContainer {
13711371+ record: indexed_record,
13721372+ };
13731373+ FieldValue::owned_any(container)
13741374+ })
13751375+ .collect();
13761376+ Ok(Some(FieldValue::list(values)))
13771377+ }
13781378+ Err(e) => {
13791379+ tracing::debug!("Error querying {}: {}", nsid, e);
13801380+ Ok(Some(FieldValue::list(Vec::<FieldValue>::new())))
13811381+ }
9691382 }
9701383 }
9711384 })
···9751388 "limit",
9761389 TypeRef::named(TypeRef::INT),
9771390 ))
978978- );
13911391+ );*/
9791392 }
9801393 _ => {
9811394 // Unknown key type, skip
···9881401 // This enables bidirectional traversal (e.g., profile.plays and play.profile)
9891402 for collection in all_collections {
9901403 let reverse_field_name = format!("{}s", nsid_to_join_field_name(&collection.nsid));
991991- let db_for_reverse = database.clone();
9921404 let slice_for_reverse = slice_uri.clone();
9931405 let collection_nsid = collection.nsid.clone();
9941406 let collection_type = collection.type_name.clone();
14071407+ let at_uri_fields = collection.at_uri_fields.clone();
99514089961409 object = object.field(
9971410 Field::new(
9981411 &reverse_field_name,
9991412 TypeRef::named_nn_list_nn(&collection_type),
10001413 move |ctx| {
10011001- let db = db_for_reverse.clone();
10021414 let slice = slice_for_reverse.clone();
10031415 let nsid = collection_nsid.clone();
14161416+ let ref_fields = at_uri_fields.clone();
10041417 FieldFuture::new(async move {
10051418 let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
10061006- let did = &container.record.did;
1007141910081420 // Get limit from argument, default to 50
10091421 let limit = ctx.args.get("limit")
10101422 .and_then(|v| v.i64().ok())
10111011- .map(|i| i as i32)
14231423+ .map(|i| i as usize)
10121424 .unwrap_or(50)
10131425 .min(100); // Cap at 100 to prevent abuse
1014142610151015- // Build where clause to find all records of this collection for this DID
10161016- let mut where_clause = crate::models::WhereClause {
10171017- conditions: HashMap::new(),
10181018- or_conditions: None,
10191019- };
10201020- where_clause.conditions.insert(
10211021- "collection".to_string(),
10221022- crate::models::WhereCondition {
10231023- eq: Some(serde_json::Value::String(nsid.clone())),
10241024- in_values: None,
10251025- contains: None,
10261026- },
10271027- );
10281028- where_clause.conditions.insert(
10291029- "did".to_string(),
10301030- crate::models::WhereCondition {
10311031- eq: Some(serde_json::Value::String(did.clone())),
10321032- in_values: None,
10331033- contains: None,
10341034- },
10351035- );
14271427+ // Try to get DataLoader from context
14281428+ if let Some(gql_ctx) = ctx.data_opt::<GraphQLContext>() {
14291429+ let parent_uri = &container.record.uri;
14301430+14311431+ // Try each at-uri field from the lexicon
14321432+ tracing::debug!(
14331433+ "Trying reverse join for {} with at-uri fields: {:?}",
14341434+ nsid,
14351435+ ref_fields
14361436+ );
14371437+14381438+ for ref_field in &ref_fields {
14391439+ let key = crate::graphql::dataloader::CollectionUriKey {
14401440+ slice_uri: slice.clone(),
14411441+ collection: nsid.clone(),
14421442+ parent_uri: parent_uri.clone(),
14431443+ reference_field: ref_field.clone(),
14441444+ };
14451445+14461446+ tracing::debug!(
14471447+ "Querying {} via field '{}' for URI: {}",
14481448+ nsid,
14491449+ ref_field,
14501450+ parent_uri
14511451+ );
14521452+14531453+ match gql_ctx.collection_uri_loader.load_one(key).await {
14541454+ Ok(Some(mut records)) => {
14551455+ if !records.is_empty() {
14561456+ tracing::debug!(
14571457+ "Found {} {} records via '{}' field for parent URI: {}",
14581458+ records.len(),
14591459+ nsid,
14601460+ ref_field,
14611461+ parent_uri
14621462+ );
14631463+14641464+ // Apply limit
14651465+ records.truncate(limit);
1036146610371037- match db.get_slice_collections_records(
10381038- &slice,
10391039- Some(limit),
10401040- None, // cursor
10411041- None, // sort
10421042- Some(&where_clause),
10431043- ).await {
10441044- Ok((records, _cursor)) => {
10451045- let values: Vec<FieldValue> = records
10461046- .into_iter()
10471047- .map(|record| {
10481048- // Convert Record to IndexedRecord
10491049- let indexed_record = crate::models::IndexedRecord {
10501050- uri: record.uri,
10511051- cid: record.cid,
10521052- did: record.did,
10531053- collection: record.collection,
10541054- value: record.json,
10551055- indexed_at: record.indexed_at.to_rfc3339(),
10561056- };
10571057- let container = RecordContainer {
10581058- record: indexed_record,
10591059- };
10601060- FieldValue::owned_any(container)
10611061- })
10621062- .collect();
10631063- Ok(Some(FieldValue::list(values)))
14671467+ let values: Vec<FieldValue> = records
14681468+ .into_iter()
14691469+ .map(|indexed_record| {
14701470+ let container = RecordContainer {
14711471+ record: indexed_record,
14721472+ };
14731473+ FieldValue::owned_any(container)
14741474+ })
14751475+ .collect();
14761476+ return Ok(Some(FieldValue::list(values)));
14771477+ }
14781478+ }
14791479+ Ok(None) => continue,
14801480+ Err(e) => {
14811481+ tracing::debug!("DataLoader error for {} field '{}': {:?}", nsid, ref_field, e);
14821482+ continue;
14831483+ }
14841484+ }
10641485 }
10651065- Err(e) => {
10661066- tracing::debug!("Error querying {}: {}", nsid, e);
10671067- Ok(Some(FieldValue::list(Vec::<FieldValue>::new())))
10681068- }
14861486+14871487+ // No records found via any at-uri field
14881488+ tracing::debug!("No {} records found for parent URI: {}", nsid, parent_uri);
14891489+ return Ok(Some(FieldValue::list(Vec::<FieldValue>::new())));
10691490 }
14911491+14921492+ // Fallback: DataLoader not available
14931493+ tracing::debug!("DataLoader not available for reverse join");
14941494+ Ok(Some(FieldValue::list(Vec::<FieldValue>::new())))
10701495 })
10711496 },
10721497 )
···10751500 TypeRef::named(TypeRef::INT),
10761501 ))
10771502 );
15031503+15041504+ // Add count field for the reverse join
15051505+ let count_field_name = format!("{}Count", reverse_field_name);
15061506+ let db_for_count = database.clone();
15071507+ let slice_for_count = slice_uri.clone();
15081508+ let collection_for_count = collection.nsid.clone();
15091509+ let at_uri_fields_for_count = collection.at_uri_fields.clone();
15101510+15111511+ object = object.field(
15121512+ Field::new(
15131513+ &count_field_name,
15141514+ TypeRef::named_nn(TypeRef::INT),
15151515+ move |ctx| {
15161516+ let slice = slice_for_count.clone();
15171517+ let nsid = collection_for_count.clone();
15181518+ let db = db_for_count.clone();
15191519+ let ref_fields = at_uri_fields_for_count.clone();
15201520+ FieldFuture::new(async move {
15211521+ let container = ctx.parent_value.try_downcast_ref::<RecordContainer>()?;
15221522+ let parent_uri = &container.record.uri;
15231523+15241524+ // Build where clause to count records referencing this URI
15251525+ for ref_field in &ref_fields {
15261526+ let mut where_clause = crate::models::WhereClause {
15271527+ conditions: HashMap::new(),
15281528+ or_conditions: None,
15291529+ };
15301530+15311531+ where_clause.conditions.insert(
15321532+ "collection".to_string(),
15331533+ crate::models::WhereCondition {
15341534+ gt: None,
15351535+ gte: None,
15361536+ lt: None,
15371537+ lte: None,
15381538+ eq: Some(serde_json::Value::String(nsid.clone())),
15391539+ in_values: None,
15401540+ contains: None,
15411541+ },
15421542+ );
15431543+15441544+ where_clause.conditions.insert(
15451545+ ref_field.clone(),
15461546+ crate::models::WhereCondition {
15471547+ gt: None,
15481548+ gte: None,
15491549+ lt: None,
15501550+ lte: None,
15511551+ eq: Some(serde_json::Value::String(parent_uri.clone())),
15521552+ in_values: None,
15531553+ contains: None,
15541554+ },
15551555+ );
15561556+15571557+ match db.count_slice_collections_records(&slice, Some(&where_clause)).await {
15581558+ Ok(count) if count > 0 => {
15591559+ return Ok(Some(FieldValue::value(count as i32)));
15601560+ }
15611561+ Ok(_) => continue,
15621562+ Err(e) => {
15631563+ tracing::debug!("Count error for {}: {}", nsid, e);
15641564+ continue;
15651565+ }
15661566+ }
15671567+ }
15681568+15691569+ // No matching field found, return 0
15701570+ Ok(Some(FieldValue::value(0)))
15711571+ })
15721572+ },
15731573+ )
15741574+ );
10781575 }
1079157610801577 object
···11441641 }
11451642 GraphQLType::Blob => {
11461643 // Blob object type with url resolver
11471147- if is_required {
11481148- TypeRef::named_nn("Blob")
11491149- } else {
11501150- TypeRef::named("Blob")
11511151- }
16441644+ // Always nullable since blob data might be missing or malformed
16451645+ TypeRef::named("Blob")
11521646 }
11531647 GraphQLType::Json | GraphQLType::Ref | GraphQLType::Object(_) | GraphQLType::Union => {
11541648 // JSON scalar type - linked records and complex objects return as JSON
···11751669 TypeRef::named_nn_list_nn(inner_ref)
11761670 } else {
11771671 TypeRef::named_list(inner_ref)
16721672+ }
16731673+ }
16741674+ GraphQLType::Blob => {
16751675+ // Arrays of blobs - return list of Blob objects
16761676+ if is_required {
16771677+ TypeRef::named_nn_list("Blob")
16781678+ } else {
16791679+ TypeRef::named_list("Blob")
11781680 }
11791681 }
11801682 _ => {
···17152217fn create_aggregation_order_by_input() -> InputObject {
17162218 InputObject::new("AggregationOrderBy")
17172219 .field(InputValue::new("count", TypeRef::named("SortDirection")))
22202220+}
22212221+22222222+/// Creates the DateInterval enum for date truncation
22232223+fn create_date_interval_enum() -> Enum {
22242224+ Enum::new("DateInterval")
22252225+ .item(EnumItem::new("second"))
22262226+ .item(EnumItem::new("minute"))
22272227+ .item(EnumItem::new("hour"))
22282228+ .item(EnumItem::new("day"))
22292229+ .item(EnumItem::new("week"))
22302230+ .item(EnumItem::new("month"))
22312231+ .item(EnumItem::new("quarter"))
22322232+ .item(EnumItem::new("year"))
17182233}
1719223417202235/// Converts a serde_json::Value to an async_graphql::Value