tangled
alpha
login
or
join now
nonbinary.computer
/
jacquard
80
fork
atom
A better Rust ATProto crate
80
fork
atom
overview
issues
9
pulls
pipelines
code->lexicon stage 1
Orual
4 months ago
9f1fd63c
7c7de622
+1308
6 changed files
expand all
collapse all
unified
split
crates
jacquard-lexicon
src
lib.rs
schema
builder.rs
type_mapping.rs
schema.rs
tests
builder_tests.rs
schema_tests.rs
+2
crates/jacquard-lexicon/src/lib.rs
···
9
9
//! - [`codegen`] - Rust code generation from parsed schemas
10
10
//! - [`corpus`] - Lexicon corpus management and namespace organization
11
11
//! - [`lexicon`] - Schema parsing and validation
12
12
+
//! - [`schema`] - Schema generation from Rust types (reverse codegen)
12
13
//! - [`union_registry`] - Tracks union types for collision detection
13
14
//! - [`fs`] - Filesystem utilities for lexicon storage
14
15
···
17
18
pub mod error;
18
19
pub mod fs;
19
20
pub mod lexicon;
21
21
+
pub mod schema;
20
22
pub mod union_registry;
+378
crates/jacquard-lexicon/src/schema.rs
···
1
1
+
//! # Lexicon Schema Generation
2
2
+
//!
3
3
+
//! This module provides traits and utilities for generating AT Protocol lexicon schemas
4
4
+
//! from Rust types. This is the reverse direction from the usual lexicon→Rust codegen.
5
5
+
//!
6
6
+
//! ## Use Cases
7
7
+
//!
8
8
+
//! - **Rapid prototyping**: Define types in Rust, generate schemas automatically
9
9
+
//! - **Custom lexicons**: Easy creation of third-party lexicons
10
10
+
//! - **Documentation**: Keep types and schemas in sync
11
11
+
//! - **Runtime introspection**: Access schema metadata at runtime
12
12
+
//!
13
13
+
//! ## Usage
14
14
+
//!
15
15
+
//! ### Manual Implementation
16
16
+
//!
17
17
+
//! Implement the `LexiconSchema` trait for your types:
18
18
+
//!
19
19
+
//! ```rust
20
20
+
//! # use jacquard_lexicon::schema::{LexiconSchema, LexiconGenerator};
21
21
+
//! # use jacquard_lexicon::lexicon::LexiconDoc;
22
22
+
//! struct MyType;
23
23
+
//!
24
24
+
//! impl LexiconSchema for MyType {
25
25
+
//! fn nsid() -> &'static str {
26
26
+
//! "com.example.myType"
27
27
+
//! }
28
28
+
//!
29
29
+
//! fn lexicon_doc(generator: &mut LexiconGenerator) -> LexiconDoc<'static> {
30
30
+
//! // Build schema using generator or builder API
31
31
+
//! todo!()
32
32
+
//! }
33
33
+
//! }
34
34
+
//! ```
35
35
+
//!
36
36
+
//! ### Derive Macro (Future)
37
37
+
//!
38
38
+
//! In Phase 2, a derive macro will automate trait implementation:
39
39
+
//!
40
40
+
//! ```rust,ignore
41
41
+
//! #[derive(LexiconSchema)]
42
42
+
//! #[lexicon(nsid = "app.bsky.feed.post", record, key = "tid")]
43
43
+
//! struct Post<'a> {
44
44
+
//! #[lexicon(max_graphemes = 300, max_length = 3000)]
45
45
+
//! text: CowStr<'a>,
46
46
+
//! created_at: Datetime,
47
47
+
//! }
48
48
+
//! ```
49
49
+
//!
50
50
+
//! ## Design Pattern
51
51
+
//!
52
52
+
//! The design follows `schemars` crate patterns:
53
53
+
//!
54
54
+
//! - **Trait-based**: Types implement `LexiconSchema` trait
55
55
+
//! - **Generator pattern**: `LexiconGenerator` tracks refs/defs
56
56
+
//! - **Inline vs refs**: Types control whether they inline or use refs
57
57
+
//! - **Validation**: Runtime constraint checking via `validate()` method
58
58
+
59
59
+
pub mod builder;
60
60
+
pub mod type_mapping;
61
61
+
62
62
+
use crate::lexicon::{Lexicon, LexiconDoc, LexObjectProperty, LexRef, LexUserType};
63
63
+
use jacquard_common::smol_str::SmolStr;
64
64
+
use std::borrow::Cow;
65
65
+
use std::collections::{BTreeMap, HashSet};
66
66
+
67
67
+
/// Trait for types that can generate lexicon schemas
68
68
+
pub trait LexiconSchema {
69
69
+
/// The NSID for this type's primary definition
70
70
+
///
71
71
+
/// For fragments, this is the base NSID (without `#fragment`).
72
72
+
fn nsid() -> &'static str;
73
73
+
74
74
+
/// The schema ID for this type
75
75
+
///
76
76
+
/// Defaults to NSID. Override for fragments to include `#fragment` suffix.
77
77
+
fn schema_id() -> Cow<'static, str> {
78
78
+
Cow::Borrowed(Self::nsid())
79
79
+
}
80
80
+
81
81
+
/// Whether this type should be inlined vs referenced
82
82
+
///
83
83
+
/// - `false` (default): Type becomes a def, references use `{"type": "ref", "ref": "nsid"}`
84
84
+
/// - `true`: Type's schema is inlined directly into parent
85
85
+
///
86
86
+
/// Recursive types MUST return `false` to avoid infinite expansion.
87
87
+
fn inline_schema() -> bool {
88
88
+
false
89
89
+
}
90
90
+
91
91
+
/// Generate the lexicon document for this type
92
92
+
///
93
93
+
/// Called by generator to build complete schema. Use `generator` to handle
94
94
+
/// nested types and refs.
95
95
+
fn lexicon_doc(generator: &mut LexiconGenerator) -> LexiconDoc<'static>;
96
96
+
97
97
+
/// Validate an instance against lexicon constraints
98
98
+
///
99
99
+
/// Checks runtime constraints like `max_length`, `max_graphemes`, `minimum`, etc.
100
100
+
/// Returns `Ok(())` if valid, `Err` with details if invalid.
101
101
+
fn validate(&self) -> Result<(), ValidationError> {
102
102
+
// Default impl: no constraints to check
103
103
+
Ok(())
104
104
+
}
105
105
+
}
106
106
+
107
107
+
/// Error type for validation failures
108
108
+
#[derive(Debug, Clone, thiserror::Error, miette::Diagnostic)]
109
109
+
pub enum ValidationError {
110
110
+
#[error("field `{field}` exceeds maximum length: {actual} > {max}")]
111
111
+
MaxLength {
112
112
+
field: &'static str,
113
113
+
max: usize,
114
114
+
actual: usize,
115
115
+
},
116
116
+
117
117
+
#[error("field `{field}` exceeds maximum grapheme count: {actual} > {max}")]
118
118
+
MaxGraphemes {
119
119
+
field: &'static str,
120
120
+
max: usize,
121
121
+
actual: usize,
122
122
+
},
123
123
+
124
124
+
#[error("field `{field}` below minimum length: {actual} < {min}")]
125
125
+
MinLength {
126
126
+
field: &'static str,
127
127
+
min: usize,
128
128
+
actual: usize,
129
129
+
},
130
130
+
131
131
+
#[error("field `{field}` below minimum grapheme count: {actual} < {min}")]
132
132
+
MinGraphemes {
133
133
+
field: &'static str,
134
134
+
min: usize,
135
135
+
actual: usize,
136
136
+
},
137
137
+
138
138
+
#[error("field `{field}` value {actual} exceeds maximum: {max}")]
139
139
+
Maximum {
140
140
+
field: &'static str,
141
141
+
max: i64,
142
142
+
actual: i64,
143
143
+
},
144
144
+
145
145
+
#[error("field `{field}` value {actual} below minimum: {min}")]
146
146
+
Minimum {
147
147
+
field: &'static str,
148
148
+
min: i64,
149
149
+
actual: i64,
150
150
+
},
151
151
+
152
152
+
#[error("field `{field}` has invalid value: {message}")]
153
153
+
InvalidValue {
154
154
+
field: &'static str,
155
155
+
message: String,
156
156
+
},
157
157
+
158
158
+
#[error("multiple validation errors: {0:?}")]
159
159
+
Multiple(Vec<ValidationError>),
160
160
+
}
161
161
+
162
162
+
/// Generator for lexicon schemas
163
163
+
///
164
164
+
/// Tracks seen types and manages refs vs inline decisions.
165
165
+
/// Similar pattern to `schemars::SchemaGenerator`.
166
166
+
pub struct LexiconGenerator {
167
167
+
/// Root NSID for the primary type being generated
168
168
+
root_nsid: SmolStr,
169
169
+
170
170
+
/// Collected definitions (def_name -> LexUserType)
171
171
+
defs: BTreeMap<SmolStr, LexUserType<'static>>,
172
172
+
173
173
+
/// Types we've seen (prevents duplicate defs)
174
174
+
seen_types: HashSet<SmolStr>,
175
175
+
176
176
+
/// Stack of types currently being generated (cycle detection)
177
177
+
generation_stack: Vec<SmolStr>,
178
178
+
}
179
179
+
180
180
+
impl LexiconGenerator {
181
181
+
/// Create a new generator for a primary type
182
182
+
pub fn new(root_nsid: impl Into<SmolStr>) -> Self {
183
183
+
Self {
184
184
+
root_nsid: root_nsid.into(),
185
185
+
defs: BTreeMap::new(),
186
186
+
seen_types: HashSet::new(),
187
187
+
generation_stack: Vec::new(),
188
188
+
}
189
189
+
}
190
190
+
191
191
+
/// Generate schema for a type that implements LexiconSchema
192
192
+
///
193
193
+
/// Returns either a ref property or an inline object property depending on `inline_schema()`.
194
194
+
/// Also adds any defs to the generator's collection.
195
195
+
pub fn generate_for<T: LexiconSchema>(
196
196
+
&mut self,
197
197
+
) -> Result<LexObjectProperty<'static>, GeneratorError> {
198
198
+
let schema_id = T::schema_id();
199
199
+
200
200
+
// Check for cycles
201
201
+
if self.generation_stack.contains(&schema_id.as_ref().into()) {
202
202
+
return Err(GeneratorError::RecursiveType {
203
203
+
type_name: schema_id.to_string(),
204
204
+
stack: self.generation_stack.clone(),
205
205
+
});
206
206
+
}
207
207
+
208
208
+
// If we've seen this type and it's not inline, return a ref
209
209
+
if !T::inline_schema() && self.seen_types.contains(schema_id.as_ref()) {
210
210
+
return Ok(self.make_ref_property(schema_id.as_ref()));
211
211
+
}
212
212
+
213
213
+
// Mark as seen and add to stack
214
214
+
self.seen_types.insert(schema_id.to_string().into());
215
215
+
self.generation_stack.push(schema_id.to_string().into());
216
216
+
217
217
+
// Generate the schema
218
218
+
let doc = T::lexicon_doc(self);
219
219
+
220
220
+
// Pop from stack
221
221
+
self.generation_stack.pop();
222
222
+
223
223
+
// If inline, extract the main def and convert to property
224
224
+
if T::inline_schema() {
225
225
+
// Find the main def and convert to property type
226
226
+
if let Some(def) = doc.defs.get("main") {
227
227
+
// Convert LexUserType to LexObjectProperty
228
228
+
match def {
229
229
+
LexUserType::Object(obj) => Ok(LexObjectProperty::Object(obj.clone())),
230
230
+
LexUserType::String(s) => Ok(LexObjectProperty::String(s.clone())),
231
231
+
LexUserType::Array(a) => Ok(LexObjectProperty::Array(a.clone())),
232
232
+
LexUserType::Boolean(b) => Ok(LexObjectProperty::Boolean(b.clone())),
233
233
+
LexUserType::Integer(i) => Ok(LexObjectProperty::Integer(i.clone())),
234
234
+
LexUserType::Blob(b) => Ok(LexObjectProperty::Blob(b.clone())),
235
235
+
LexUserType::Bytes(b) => Ok(LexObjectProperty::Bytes(b.clone())),
236
236
+
LexUserType::CidLink(c) => Ok(LexObjectProperty::CidLink(c.clone())),
237
237
+
LexUserType::Unknown(u) => Ok(LexObjectProperty::Unknown(u.clone())),
238
238
+
// Types that cannot be inlined as properties
239
239
+
_ => Err(GeneratorError::InvalidInlineType {
240
240
+
type_name: schema_id.to_string(),
241
241
+
user_type: format!("{:?}", def),
242
242
+
}),
243
243
+
}
244
244
+
} else {
245
245
+
Err(GeneratorError::EmptyDoc {
246
246
+
type_name: schema_id.to_string(),
247
247
+
})
248
248
+
}
249
249
+
} else {
250
250
+
// Not inline - merge defs into our collection and return ref
251
251
+
for (def_name, def) in doc.defs {
252
252
+
self.defs.insert(def_name, def);
253
253
+
}
254
254
+
Ok(self.make_ref_property(schema_id.as_ref()))
255
255
+
}
256
256
+
}
257
257
+
258
258
+
/// Create a ref property to another type
259
259
+
///
260
260
+
/// Returns a LexObjectProperty::Ref for use in object properties or array items.
261
261
+
pub fn make_ref_property(&self, ref_nsid: &str) -> LexObjectProperty<'static> {
262
262
+
LexObjectProperty::Ref(LexRef {
263
263
+
description: None,
264
264
+
r#ref: ref_nsid.to_string().into(),
265
265
+
})
266
266
+
}
267
267
+
268
268
+
/// Build the final lexicon document
269
269
+
pub fn into_doc(self) -> LexiconDoc<'static> {
270
270
+
LexiconDoc {
271
271
+
lexicon: Lexicon::Lexicon1,
272
272
+
id: self.root_nsid.into(),
273
273
+
revision: None,
274
274
+
description: None,
275
275
+
defs: self.defs,
276
276
+
}
277
277
+
}
278
278
+
279
279
+
/// Add a def directly (for manual construction)
280
280
+
pub fn add_def(&mut self, name: impl Into<SmolStr>, def: LexUserType<'static>) {
281
281
+
self.defs.insert(name.into(), def);
282
282
+
}
283
283
+
284
284
+
/// Get the root NSID
285
285
+
pub fn root_nsid(&self) -> &str {
286
286
+
&self.root_nsid
287
287
+
}
288
288
+
}
289
289
+
290
290
+
/// Errors from lexicon generation
291
291
+
#[derive(Debug, Clone, thiserror::Error, miette::Diagnostic)]
292
292
+
pub enum GeneratorError {
293
293
+
#[error("recursive type detected: {type_name}")]
294
294
+
RecursiveType {
295
295
+
type_name: String,
296
296
+
stack: Vec<SmolStr>,
297
297
+
},
298
298
+
299
299
+
#[error("type {type_name} generated empty document")]
300
300
+
EmptyDoc { type_name: String },
301
301
+
302
302
+
#[error("type {type_name} marked as inline but main def type cannot be inlined: {user_type}")]
303
303
+
InvalidInlineType {
304
304
+
type_name: String,
305
305
+
user_type: String,
306
306
+
},
307
307
+
308
308
+
#[error("invalid NSID: {nsid}")]
309
309
+
InvalidNsid { nsid: String },
310
310
+
}
311
311
+
312
312
+
#[cfg(test)]
313
313
+
mod tests {
314
314
+
use super::*;
315
315
+
use crate::lexicon::{
316
316
+
LexBoolean, LexInteger, LexObject, LexRecord, LexRecordRecord, LexString,
317
317
+
};
318
318
+
319
319
+
#[test]
320
320
+
fn test_generator_simple() {
321
321
+
let mut generator = LexiconGenerator::new("app.example.test");
322
322
+
323
323
+
// Add a simple record
324
324
+
generator.add_def(
325
325
+
"main",
326
326
+
LexUserType::Record(LexRecord {
327
327
+
description: Some("Test record".into()),
328
328
+
key: Some("tid".into()),
329
329
+
record: LexRecordRecord::Object(LexObject {
330
330
+
description: None,
331
331
+
required: Some(vec!["field1".into()]),
332
332
+
nullable: None,
333
333
+
properties: [(
334
334
+
"field1".into(),
335
335
+
LexObjectProperty::String(LexString {
336
336
+
description: None,
337
337
+
format: None,
338
338
+
default: None,
339
339
+
min_length: None,
340
340
+
max_length: None,
341
341
+
min_graphemes: None,
342
342
+
max_graphemes: None,
343
343
+
r#enum: None,
344
344
+
r#const: None,
345
345
+
known_values: None,
346
346
+
}),
347
347
+
)]
348
348
+
.into(),
349
349
+
}),
350
350
+
}),
351
351
+
);
352
352
+
353
353
+
let doc = generator.into_doc();
354
354
+
assert_eq!(doc.id.as_ref(), "app.example.test");
355
355
+
assert_eq!(doc.defs.len(), 1);
356
356
+
assert!(doc.defs.contains_key("main"));
357
357
+
}
358
358
+
359
359
+
#[test]
360
360
+
fn test_validation_max_length() {
361
361
+
let err = ValidationError::MaxLength {
362
362
+
field: "text",
363
363
+
max: 100,
364
364
+
actual: 150,
365
365
+
};
366
366
+
assert!(err.to_string().contains("exceeds maximum length"));
367
367
+
}
368
368
+
369
369
+
#[test]
370
370
+
fn test_validation_max_graphemes() {
371
371
+
let err = ValidationError::MaxGraphemes {
372
372
+
field: "text",
373
373
+
max: 50,
374
374
+
actual: 75,
375
375
+
};
376
376
+
assert!(err.to_string().contains("exceeds maximum grapheme count"));
377
377
+
}
378
378
+
}
+530
crates/jacquard-lexicon/src/schema/builder.rs
···
1
1
+
//! Builder API for manual lexicon schema construction
2
2
+
//!
3
3
+
//! Provides ergonomic API for building lexicon documents without implementing the trait.
4
4
+
//! Useful for prototyping, testing, and dynamic schema generation.
5
5
+
6
6
+
use crate::lexicon::{
7
7
+
LexArray, LexArrayItem, LexBoolean, LexInteger, LexObject, LexObjectProperty, LexRecord,
8
8
+
LexRecordRecord, LexRef, LexString, LexStringFormat, LexUserType, LexXrpcBody,
9
9
+
LexXrpcBodySchema, LexXrpcError, LexXrpcParameters, LexXrpcParametersProperty, LexXrpcQuery,
10
10
+
LexXrpcQueryParameter, Lexicon, LexiconDoc,
11
11
+
};
12
12
+
use jacquard_common::CowStr;
13
13
+
use jacquard_common::smol_str::SmolStr;
14
14
+
use std::collections::BTreeMap;
15
15
+
16
16
+
/// Builder for lexicon documents
17
17
+
pub struct LexiconDocBuilder {
18
18
+
nsid: SmolStr,
19
19
+
description: Option<CowStr<'static>>,
20
20
+
defs: BTreeMap<SmolStr, LexUserType<'static>>,
21
21
+
}
22
22
+
23
23
+
impl LexiconDocBuilder {
24
24
+
/// Start building a lexicon document
25
25
+
pub fn new(nsid: impl Into<SmolStr>) -> Self {
26
26
+
Self {
27
27
+
nsid: nsid.into(),
28
28
+
description: None,
29
29
+
defs: BTreeMap::new(),
30
30
+
}
31
31
+
}
32
32
+
33
33
+
/// Set document description
34
34
+
pub fn description(mut self, desc: impl Into<CowStr<'static>>) -> Self {
35
35
+
self.description = Some(desc.into());
36
36
+
self
37
37
+
}
38
38
+
39
39
+
/// Add a record def (becomes "main")
40
40
+
pub fn record(self) -> RecordBuilder {
41
41
+
RecordBuilder {
42
42
+
doc_builder: self,
43
43
+
key: None,
44
44
+
description: None,
45
45
+
properties: BTreeMap::new(),
46
46
+
required: Vec::new(),
47
47
+
}
48
48
+
}
49
49
+
50
50
+
/// Add an object def
51
51
+
pub fn object(self, name: impl Into<SmolStr>) -> ObjectBuilder {
52
52
+
ObjectBuilder {
53
53
+
doc_builder: self,
54
54
+
def_name: name.into(),
55
55
+
description: None,
56
56
+
properties: BTreeMap::new(),
57
57
+
required: Vec::new(),
58
58
+
}
59
59
+
}
60
60
+
61
61
+
/// Add a query def
62
62
+
pub fn query(self) -> QueryBuilder {
63
63
+
QueryBuilder {
64
64
+
doc_builder: self,
65
65
+
description: None,
66
66
+
parameters: BTreeMap::new(),
67
67
+
required_params: Vec::new(),
68
68
+
output: None,
69
69
+
errors: Vec::new(),
70
70
+
}
71
71
+
}
72
72
+
73
73
+
/// Build the final document
74
74
+
pub fn build(self) -> LexiconDoc<'static> {
75
75
+
LexiconDoc {
76
76
+
lexicon: Lexicon::Lexicon1,
77
77
+
id: self.nsid.into(),
78
78
+
revision: None,
79
79
+
description: self.description,
80
80
+
defs: self.defs,
81
81
+
}
82
82
+
}
83
83
+
}
84
84
+
85
85
+
pub struct RecordBuilder {
86
86
+
doc_builder: LexiconDocBuilder,
87
87
+
key: Option<CowStr<'static>>,
88
88
+
description: Option<CowStr<'static>>,
89
89
+
properties: BTreeMap<SmolStr, LexObjectProperty<'static>>,
90
90
+
required: Vec<SmolStr>,
91
91
+
}
92
92
+
93
93
+
impl RecordBuilder {
94
94
+
/// Set record key type (e.g., "tid")
95
95
+
pub fn key(mut self, key: impl Into<CowStr<'static>>) -> Self {
96
96
+
self.key = Some(key.into());
97
97
+
self
98
98
+
}
99
99
+
100
100
+
/// Set description
101
101
+
pub fn description(mut self, desc: impl Into<CowStr<'static>>) -> Self {
102
102
+
self.description = Some(desc.into());
103
103
+
self
104
104
+
}
105
105
+
106
106
+
/// Add a field
107
107
+
pub fn field<F>(mut self, name: impl Into<SmolStr>, builder: F) -> Self
108
108
+
where
109
109
+
F: FnOnce(FieldBuilder) -> FieldBuilder,
110
110
+
{
111
111
+
let field_builder = FieldBuilder::new();
112
112
+
let field_builder = builder(field_builder);
113
113
+
114
114
+
let name = name.into();
115
115
+
if field_builder.required {
116
116
+
self.required.push(name.clone());
117
117
+
}
118
118
+
119
119
+
self.properties.insert(name, field_builder.build());
120
120
+
self
121
121
+
}
122
122
+
123
123
+
/// Build and add to document
124
124
+
pub fn build(mut self) -> LexiconDocBuilder {
125
125
+
let record_obj = LexObject {
126
126
+
description: self.description,
127
127
+
required: if self.required.is_empty() {
128
128
+
None
129
129
+
} else {
130
130
+
Some(self.required)
131
131
+
},
132
132
+
nullable: None,
133
133
+
properties: self.properties,
134
134
+
};
135
135
+
136
136
+
let record = LexRecord {
137
137
+
description: None,
138
138
+
key: self.key,
139
139
+
record: LexRecordRecord::Object(record_obj),
140
140
+
};
141
141
+
142
142
+
self.doc_builder
143
143
+
.defs
144
144
+
.insert("main".into(), LexUserType::Record(record));
145
145
+
self.doc_builder
146
146
+
}
147
147
+
}
148
148
+
149
149
+
pub struct ObjectBuilder {
150
150
+
doc_builder: LexiconDocBuilder,
151
151
+
def_name: SmolStr,
152
152
+
description: Option<CowStr<'static>>,
153
153
+
properties: BTreeMap<SmolStr, LexObjectProperty<'static>>,
154
154
+
required: Vec<SmolStr>,
155
155
+
}
156
156
+
157
157
+
impl ObjectBuilder {
158
158
+
/// Set description
159
159
+
pub fn description(mut self, desc: impl Into<CowStr<'static>>) -> Self {
160
160
+
self.description = Some(desc.into());
161
161
+
self
162
162
+
}
163
163
+
164
164
+
/// Add a field
165
165
+
pub fn field<F>(mut self, name: impl Into<SmolStr>, builder: F) -> Self
166
166
+
where
167
167
+
F: FnOnce(FieldBuilder) -> FieldBuilder,
168
168
+
{
169
169
+
let field_builder = FieldBuilder::new();
170
170
+
let field_builder = builder(field_builder);
171
171
+
172
172
+
let name = name.into();
173
173
+
if field_builder.required {
174
174
+
self.required.push(name.clone());
175
175
+
}
176
176
+
177
177
+
self.properties.insert(name, field_builder.build());
178
178
+
self
179
179
+
}
180
180
+
181
181
+
/// Build and add to document
182
182
+
pub fn build(mut self) -> LexiconDocBuilder {
183
183
+
let object = LexObject {
184
184
+
description: self.description,
185
185
+
required: if self.required.is_empty() {
186
186
+
None
187
187
+
} else {
188
188
+
Some(self.required)
189
189
+
},
190
190
+
nullable: None,
191
191
+
properties: self.properties,
192
192
+
};
193
193
+
194
194
+
self.doc_builder
195
195
+
.defs
196
196
+
.insert(self.def_name, LexUserType::Object(object));
197
197
+
self.doc_builder
198
198
+
}
199
199
+
}
200
200
+
201
201
+
pub struct QueryBuilder {
202
202
+
doc_builder: LexiconDocBuilder,
203
203
+
description: Option<CowStr<'static>>,
204
204
+
parameters: BTreeMap<SmolStr, LexXrpcParametersProperty<'static>>,
205
205
+
required_params: Vec<SmolStr>,
206
206
+
output: Option<LexXrpcBody<'static>>,
207
207
+
errors: Vec<LexXrpcError<'static>>,
208
208
+
}
209
209
+
210
210
+
impl QueryBuilder {
211
211
+
/// Set description
212
212
+
pub fn description(mut self, desc: impl Into<CowStr<'static>>) -> Self {
213
213
+
self.description = Some(desc.into());
214
214
+
self
215
215
+
}
216
216
+
217
217
+
/// Add a string parameter
218
218
+
pub fn param_string(mut self, name: impl Into<SmolStr>, required: bool) -> Self {
219
219
+
let param = LexXrpcParametersProperty::String(LexString {
220
220
+
description: None,
221
221
+
format: None,
222
222
+
default: None,
223
223
+
min_length: None,
224
224
+
max_length: None,
225
225
+
min_graphemes: None,
226
226
+
max_graphemes: None,
227
227
+
r#enum: None,
228
228
+
r#const: None,
229
229
+
known_values: None,
230
230
+
});
231
231
+
232
232
+
let name = name.into();
233
233
+
if required {
234
234
+
self.required_params.push(name.clone());
235
235
+
}
236
236
+
self.parameters.insert(name, param);
237
237
+
self
238
238
+
}
239
239
+
240
240
+
/// Set output schema
241
241
+
pub fn output(
242
242
+
mut self,
243
243
+
encoding: impl Into<CowStr<'static>>,
244
244
+
schema: LexXrpcBodySchema<'static>,
245
245
+
) -> Self {
246
246
+
self.output = Some(LexXrpcBody {
247
247
+
description: None,
248
248
+
encoding: encoding.into(),
249
249
+
schema: Some(schema),
250
250
+
});
251
251
+
self
252
252
+
}
253
253
+
254
254
+
/// Build and add to document
255
255
+
pub fn build(mut self) -> LexiconDocBuilder {
256
256
+
let params = if self.parameters.is_empty() {
257
257
+
None
258
258
+
} else {
259
259
+
Some(LexXrpcQueryParameter::Params(LexXrpcParameters {
260
260
+
description: None,
261
261
+
required: if self.required_params.is_empty() {
262
262
+
None
263
263
+
} else {
264
264
+
Some(self.required_params)
265
265
+
},
266
266
+
properties: self.parameters,
267
267
+
}))
268
268
+
};
269
269
+
270
270
+
let query = LexXrpcQuery {
271
271
+
description: self.description,
272
272
+
parameters: params,
273
273
+
output: self.output,
274
274
+
errors: if self.errors.is_empty() {
275
275
+
None
276
276
+
} else {
277
277
+
Some(self.errors)
278
278
+
},
279
279
+
};
280
280
+
281
281
+
self.doc_builder
282
282
+
.defs
283
283
+
.insert("main".into(), LexUserType::XrpcQuery(query));
284
284
+
self.doc_builder
285
285
+
}
286
286
+
}
287
287
+
288
288
+
pub struct FieldBuilder {
289
289
+
property: Option<LexObjectProperty<'static>>,
290
290
+
required: bool,
291
291
+
}
292
292
+
293
293
+
impl FieldBuilder {
294
294
+
fn new() -> Self {
295
295
+
Self {
296
296
+
property: None,
297
297
+
required: false,
298
298
+
}
299
299
+
}
300
300
+
301
301
+
/// Mark field as required
302
302
+
pub fn required(mut self) -> Self {
303
303
+
self.required = true;
304
304
+
self
305
305
+
}
306
306
+
307
307
+
/// String field
308
308
+
pub fn string(self) -> StringFieldBuilder {
309
309
+
StringFieldBuilder {
310
310
+
field_builder: self,
311
311
+
format: None,
312
312
+
max_length: None,
313
313
+
max_graphemes: None,
314
314
+
min_length: None,
315
315
+
min_graphemes: None,
316
316
+
description: None,
317
317
+
}
318
318
+
}
319
319
+
320
320
+
/// Integer field
321
321
+
pub fn integer(self) -> IntegerFieldBuilder {
322
322
+
IntegerFieldBuilder {
323
323
+
field_builder: self,
324
324
+
minimum: None,
325
325
+
maximum: None,
326
326
+
description: None,
327
327
+
}
328
328
+
}
329
329
+
330
330
+
/// Boolean field
331
331
+
pub fn boolean(mut self) -> Self {
332
332
+
self.property = Some(LexObjectProperty::Boolean(LexBoolean {
333
333
+
description: None,
334
334
+
default: None,
335
335
+
r#const: None,
336
336
+
}));
337
337
+
self
338
338
+
}
339
339
+
340
340
+
/// Ref field (to another type)
341
341
+
pub fn ref_to(mut self, ref_nsid: impl Into<CowStr<'static>>) -> Self {
342
342
+
self.property = Some(LexObjectProperty::Ref(LexRef {
343
343
+
description: None,
344
344
+
r#ref: ref_nsid.into(),
345
345
+
}));
346
346
+
self
347
347
+
}
348
348
+
349
349
+
/// Array field
350
350
+
pub fn array<F>(mut self, item_builder: F) -> Self
351
351
+
where
352
352
+
F: FnOnce(ArrayItemBuilder) -> ArrayItemBuilder,
353
353
+
{
354
354
+
let builder = ArrayItemBuilder::new();
355
355
+
let builder = item_builder(builder);
356
356
+
self.property = Some(LexObjectProperty::Array(builder.build()));
357
357
+
self
358
358
+
}
359
359
+
360
360
+
pub fn build(self) -> LexObjectProperty<'static> {
361
361
+
self.property.expect("field type not set")
362
362
+
}
363
363
+
}
364
364
+
365
365
+
pub struct StringFieldBuilder {
366
366
+
field_builder: FieldBuilder,
367
367
+
format: Option<LexStringFormat>,
368
368
+
max_length: Option<usize>,
369
369
+
max_graphemes: Option<usize>,
370
370
+
min_length: Option<usize>,
371
371
+
min_graphemes: Option<usize>,
372
372
+
description: Option<CowStr<'static>>,
373
373
+
}
374
374
+
375
375
+
impl StringFieldBuilder {
376
376
+
pub fn format(mut self, format: LexStringFormat) -> Self {
377
377
+
self.format = Some(format);
378
378
+
self
379
379
+
}
380
380
+
381
381
+
pub fn max_length(mut self, max: usize) -> Self {
382
382
+
self.max_length = Some(max);
383
383
+
self
384
384
+
}
385
385
+
386
386
+
pub fn max_graphemes(mut self, max: usize) -> Self {
387
387
+
self.max_graphemes = Some(max);
388
388
+
self
389
389
+
}
390
390
+
391
391
+
pub fn min_length(mut self, min: usize) -> Self {
392
392
+
self.min_length = Some(min);
393
393
+
self
394
394
+
}
395
395
+
396
396
+
pub fn min_graphemes(mut self, min: usize) -> Self {
397
397
+
self.min_graphemes = Some(min);
398
398
+
self
399
399
+
}
400
400
+
401
401
+
pub fn description(mut self, desc: impl Into<CowStr<'static>>) -> Self {
402
402
+
self.description = Some(desc.into());
403
403
+
self
404
404
+
}
405
405
+
406
406
+
pub fn required(mut self) -> Self {
407
407
+
self.field_builder.required = true;
408
408
+
self
409
409
+
}
410
410
+
411
411
+
pub fn build(mut self) -> FieldBuilder {
412
412
+
self.field_builder.property = Some(LexObjectProperty::String(LexString {
413
413
+
description: self.description,
414
414
+
format: self.format,
415
415
+
default: None,
416
416
+
min_length: self.min_length,
417
417
+
max_length: self.max_length,
418
418
+
min_graphemes: self.min_graphemes,
419
419
+
max_graphemes: self.max_graphemes,
420
420
+
r#enum: None,
421
421
+
r#const: None,
422
422
+
known_values: None,
423
423
+
}));
424
424
+
self.field_builder
425
425
+
}
426
426
+
}
427
427
+
428
428
+
pub struct IntegerFieldBuilder {
429
429
+
field_builder: FieldBuilder,
430
430
+
minimum: Option<i64>,
431
431
+
maximum: Option<i64>,
432
432
+
description: Option<CowStr<'static>>,
433
433
+
}
434
434
+
435
435
+
impl IntegerFieldBuilder {
436
436
+
pub fn minimum(mut self, min: i64) -> Self {
437
437
+
self.minimum = Some(min);
438
438
+
self
439
439
+
}
440
440
+
441
441
+
pub fn maximum(mut self, max: i64) -> Self {
442
442
+
self.maximum = Some(max);
443
443
+
self
444
444
+
}
445
445
+
446
446
+
pub fn description(mut self, desc: impl Into<CowStr<'static>>) -> Self {
447
447
+
self.description = Some(desc.into());
448
448
+
self
449
449
+
}
450
450
+
451
451
+
pub fn build(mut self) -> FieldBuilder {
452
452
+
self.field_builder.property = Some(LexObjectProperty::Integer(LexInteger {
453
453
+
description: self.description,
454
454
+
default: None,
455
455
+
minimum: self.minimum,
456
456
+
maximum: self.maximum,
457
457
+
r#enum: None,
458
458
+
r#const: None,
459
459
+
}));
460
460
+
self.field_builder
461
461
+
}
462
462
+
}
463
463
+
464
464
+
pub struct ArrayItemBuilder {
465
465
+
item: Option<LexArrayItem<'static>>,
466
466
+
description: Option<CowStr<'static>>,
467
467
+
min_length: Option<usize>,
468
468
+
max_length: Option<usize>,
469
469
+
}
470
470
+
471
471
+
impl ArrayItemBuilder {
472
472
+
fn new() -> Self {
473
473
+
Self {
474
474
+
item: None,
475
475
+
description: None,
476
476
+
min_length: None,
477
477
+
max_length: None,
478
478
+
}
479
479
+
}
480
480
+
481
481
+
pub fn description(mut self, desc: impl Into<CowStr<'static>>) -> Self {
482
482
+
self.description = Some(desc.into());
483
483
+
self
484
484
+
}
485
485
+
486
486
+
pub fn min_length(mut self, min: usize) -> Self {
487
487
+
self.min_length = Some(min);
488
488
+
self
489
489
+
}
490
490
+
491
491
+
pub fn max_length(mut self, max: usize) -> Self {
492
492
+
self.max_length = Some(max);
493
493
+
self
494
494
+
}
495
495
+
496
496
+
/// String items
497
497
+
pub fn string_items(mut self) -> Self {
498
498
+
self.item = Some(LexArrayItem::String(LexString {
499
499
+
description: None,
500
500
+
format: None,
501
501
+
default: None,
502
502
+
min_length: None,
503
503
+
max_length: None,
504
504
+
min_graphemes: None,
505
505
+
max_graphemes: None,
506
506
+
r#enum: None,
507
507
+
r#const: None,
508
508
+
known_values: None,
509
509
+
}));
510
510
+
self
511
511
+
}
512
512
+
513
513
+
/// Ref items
514
514
+
pub fn ref_items(mut self, ref_nsid: impl Into<CowStr<'static>>) -> Self {
515
515
+
self.item = Some(LexArrayItem::Ref(LexRef {
516
516
+
description: None,
517
517
+
r#ref: ref_nsid.into(),
518
518
+
}));
519
519
+
self
520
520
+
}
521
521
+
522
522
+
fn build(self) -> LexArray<'static> {
523
523
+
LexArray {
524
524
+
description: self.description,
525
525
+
items: self.item.expect("array item type not set"),
526
526
+
min_length: self.min_length,
527
527
+
max_length: self.max_length,
528
528
+
}
529
529
+
}
530
530
+
}
+176
crates/jacquard-lexicon/src/schema/type_mapping.rs
···
1
1
+
//! Type mapping utilities for converting Rust types to lexicon primitives
2
2
+
//!
3
3
+
//! These utilities parse Rust types using `syn` to determine their lexicon equivalents.
4
4
+
//! Used by the derive macro in Phase 2.
5
5
+
6
6
+
use syn;
7
7
+
8
8
+
/// Detect the lexicon type for a Rust type path
9
9
+
///
10
10
+
/// Used by derive macro to map field types to lexicon primitives.
11
11
+
pub fn rust_type_to_lexicon_type(ty: &syn::Type) -> Option<LexiconPrimitiveType> {
12
12
+
match ty {
13
13
+
syn::Type::Path(type_path) => {
14
14
+
let path = &type_path.path;
15
15
+
let last_segment = path.segments.last()?;
16
16
+
17
17
+
match last_segment.ident.to_string().as_str() {
18
18
+
// Boolean types
19
19
+
"bool" => Some(LexiconPrimitiveType::Boolean),
20
20
+
21
21
+
// Integer types (lexicon integers are i64)
22
22
+
"i8" | "i16" | "i32" | "i64" | "isize" => Some(LexiconPrimitiveType::Integer),
23
23
+
// Note: unsigned types not directly supported by lexicon spec
24
24
+
// Users should use i64 or cast to i64
25
25
+
"u8" | "u16" | "u32" | "u64" | "usize" => Some(LexiconPrimitiveType::Integer),
26
26
+
27
27
+
// String types (Rust primitives)
28
28
+
"String" | "str" => Some(LexiconPrimitiveType::String(StringFormat::Plain)),
29
29
+
30
30
+
// jacquard string types
31
31
+
"CowStr" | "SmolStr" => Some(LexiconPrimitiveType::String(StringFormat::Plain)),
32
32
+
"Did" => Some(LexiconPrimitiveType::String(StringFormat::Did)),
33
33
+
"Handle" => Some(LexiconPrimitiveType::String(StringFormat::Handle)),
34
34
+
"AtUri" => Some(LexiconPrimitiveType::String(StringFormat::AtUri)),
35
35
+
"Nsid" => Some(LexiconPrimitiveType::String(StringFormat::Nsid)),
36
36
+
"Cid" => Some(LexiconPrimitiveType::String(StringFormat::Cid)),
37
37
+
"Datetime" => Some(LexiconPrimitiveType::String(StringFormat::Datetime)),
38
38
+
"Language" => Some(LexiconPrimitiveType::String(StringFormat::Language)),
39
39
+
"Tid" => Some(LexiconPrimitiveType::String(StringFormat::Tid)),
40
40
+
"RecordKey" => Some(LexiconPrimitiveType::String(StringFormat::RecordKey)),
41
41
+
42
42
+
// IPLD types
43
43
+
"Bytes" if is_bytes_type(path) => Some(LexiconPrimitiveType::Bytes),
44
44
+
"CidLink" => Some(LexiconPrimitiveType::CidLink),
45
45
+
46
46
+
// Blob type
47
47
+
"Blob" => Some(LexiconPrimitiveType::Blob),
48
48
+
49
49
+
// Unknown/unvalidated data
50
50
+
"Data" | "RawData" => Some(LexiconPrimitiveType::Unknown),
51
51
+
"Vec" => {
52
52
+
// Extract Vec<T> item type
53
53
+
if let syn::PathArguments::AngleBracketed(args) = &last_segment.arguments {
54
54
+
if let Some(syn::GenericArgument::Type(inner_ty)) = args.args.first() {
55
55
+
return Some(LexiconPrimitiveType::Array(Box::new(
56
56
+
rust_type_to_lexicon_type(inner_ty)?,
57
57
+
)));
58
58
+
}
59
59
+
}
60
60
+
None
61
61
+
}
62
62
+
"Option" => {
63
63
+
// Extract Option<T> inner type - mark as optional
64
64
+
if let syn::PathArguments::AngleBracketed(args) = &last_segment.arguments {
65
65
+
if let Some(syn::GenericArgument::Type(inner_ty)) = args.args.first() {
66
66
+
return rust_type_to_lexicon_type(inner_ty);
67
67
+
}
68
68
+
}
69
69
+
None
70
70
+
}
71
71
+
_ => None,
72
72
+
}
73
73
+
}
74
74
+
_ => None,
75
75
+
}
76
76
+
}
77
77
+
78
78
+
/// Check if a path represents bytes::Bytes
79
79
+
fn is_bytes_type(path: &syn::Path) -> bool {
80
80
+
if path.segments.len() == 2 {
81
81
+
let first = &path.segments[0].ident;
82
82
+
let second = &path.segments[1].ident;
83
83
+
first == "bytes" && second == "Bytes"
84
84
+
} else {
85
85
+
false
86
86
+
}
87
87
+
}
88
88
+
89
89
+
/// Classification of lexicon primitive types
90
90
+
#[derive(Debug, Clone, PartialEq, Eq)]
91
91
+
pub enum LexiconPrimitiveType {
92
92
+
Boolean,
93
93
+
Integer,
94
94
+
String(StringFormat),
95
95
+
Bytes,
96
96
+
CidLink,
97
97
+
Blob,
98
98
+
Unknown,
99
99
+
Array(Box<LexiconPrimitiveType>),
100
100
+
Object, // For structs
101
101
+
Ref(String), // For types with LexiconSchema impl
102
102
+
Union(Vec<String>), // For enums with #[open_union]
103
103
+
}
104
104
+
105
105
+
#[derive(Debug, Clone, PartialEq, Eq)]
106
106
+
pub enum StringFormat {
107
107
+
Plain,
108
108
+
Did,
109
109
+
Handle,
110
110
+
AtUri,
111
111
+
Nsid,
112
112
+
Cid,
113
113
+
Datetime,
114
114
+
Language,
115
115
+
Tid,
116
116
+
RecordKey,
117
117
+
AtIdentifier,
118
118
+
Uri,
119
119
+
}
120
120
+
121
121
+
/// Extract constraints from field attributes
122
122
+
pub fn extract_field_constraints(attrs: &[syn::Attribute]) -> FieldConstraints {
123
123
+
let mut constraints = FieldConstraints::default();
124
124
+
125
125
+
for attr in attrs {
126
126
+
if !attr.path().is_ident("lexicon") {
127
127
+
continue;
128
128
+
}
129
129
+
130
130
+
let _ = attr.parse_nested_meta(|meta| {
131
131
+
if meta.path.is_ident("max_length") {
132
132
+
if let Ok(lit) = meta.value()?.parse::<syn::LitInt>() {
133
133
+
constraints.max_length = Some(lit.base10_parse()?);
134
134
+
}
135
135
+
} else if meta.path.is_ident("max_graphemes") {
136
136
+
if let Ok(lit) = meta.value()?.parse::<syn::LitInt>() {
137
137
+
constraints.max_graphemes = Some(lit.base10_parse()?);
138
138
+
}
139
139
+
} else if meta.path.is_ident("min_length") {
140
140
+
if let Ok(lit) = meta.value()?.parse::<syn::LitInt>() {
141
141
+
constraints.min_length = Some(lit.base10_parse()?);
142
142
+
}
143
143
+
} else if meta.path.is_ident("min_graphemes") {
144
144
+
if let Ok(lit) = meta.value()?.parse::<syn::LitInt>() {
145
145
+
constraints.min_graphemes = Some(lit.base10_parse()?);
146
146
+
}
147
147
+
} else if meta.path.is_ident("minimum") {
148
148
+
if let Ok(lit) = meta.value()?.parse::<syn::LitInt>() {
149
149
+
constraints.minimum = Some(lit.base10_parse()?);
150
150
+
}
151
151
+
} else if meta.path.is_ident("maximum") {
152
152
+
if let Ok(lit) = meta.value()?.parse::<syn::LitInt>() {
153
153
+
constraints.maximum = Some(lit.base10_parse()?);
154
154
+
}
155
155
+
} else if meta.path.is_ident("ref") {
156
156
+
if let Ok(lit) = meta.value()?.parse::<syn::LitStr>() {
157
157
+
constraints.explicit_ref = Some(lit.value());
158
158
+
}
159
159
+
}
160
160
+
Ok(())
161
161
+
});
162
162
+
}
163
163
+
164
164
+
constraints
165
165
+
}
166
166
+
167
167
+
#[derive(Debug, Default, Clone)]
168
168
+
pub struct FieldConstraints {
169
169
+
pub max_length: Option<usize>,
170
170
+
pub max_graphemes: Option<usize>,
171
171
+
pub min_length: Option<usize>,
172
172
+
pub min_graphemes: Option<usize>,
173
173
+
pub minimum: Option<i64>,
174
174
+
pub maximum: Option<i64>,
175
175
+
pub explicit_ref: Option<String>,
176
176
+
}
+85
crates/jacquard-lexicon/tests/builder_tests.rs
···
1
1
+
use jacquard_lexicon::lexicon::LexStringFormat;
2
2
+
use jacquard_lexicon::schema::builder::LexiconDocBuilder;
3
3
+
4
4
+
#[test]
5
5
+
fn test_builder_simple_record() {
6
6
+
let doc = LexiconDocBuilder::new("app.example.test")
7
7
+
.description("Test record")
8
8
+
.record()
9
9
+
.key("tid")
10
10
+
.field("text", |f| f.string().max_length(1000).required().build())
11
11
+
.field("createdAt", |f| {
12
12
+
f.string()
13
13
+
.format(LexStringFormat::Datetime)
14
14
+
.required()
15
15
+
.build()
16
16
+
})
17
17
+
.build()
18
18
+
.build();
19
19
+
20
20
+
assert_eq!(doc.id.as_ref(), "app.example.test");
21
21
+
assert_eq!(doc.defs.len(), 1);
22
22
+
23
23
+
// Serialize and verify
24
24
+
let json = serde_json::to_string_pretty(&doc).unwrap();
25
25
+
println!("{}", json);
26
26
+
27
27
+
assert!(json.contains("\"type\": \"record\""));
28
28
+
assert!(json.contains("\"maxLength\": 1000"));
29
29
+
}
30
30
+
31
31
+
#[test]
32
32
+
fn test_builder_query() {
33
33
+
let doc = LexiconDocBuilder::new("app.example.getPost")
34
34
+
.description("Get a post")
35
35
+
.query()
36
36
+
.description("Retrieve a post by URI")
37
37
+
.param_string("uri", true)
38
38
+
.build()
39
39
+
.build();
40
40
+
41
41
+
assert_eq!(doc.id.as_ref(), "app.example.getPost");
42
42
+
assert_eq!(doc.defs.len(), 1);
43
43
+
44
44
+
let json = serde_json::to_string_pretty(&doc).unwrap();
45
45
+
println!("{}", json);
46
46
+
47
47
+
assert!(json.contains("\"type\": \"query\""));
48
48
+
}
49
49
+
50
50
+
#[test]
51
51
+
fn test_builder_object_with_ref() {
52
52
+
let doc = LexiconDocBuilder::new("app.example.types")
53
53
+
.object("post")
54
54
+
.field("uri", |f| {
55
55
+
f.string().format(LexStringFormat::AtUri).required().build()
56
56
+
})
57
57
+
.field("author", |f| f.ref_to("app.bsky.actor.defs#profileView"))
58
58
+
.build()
59
59
+
.build();
60
60
+
61
61
+
assert_eq!(doc.id.as_ref(), "app.example.types");
62
62
+
assert_eq!(doc.defs.len(), 1);
63
63
+
64
64
+
let json = serde_json::to_string_pretty(&doc).unwrap();
65
65
+
println!("{}", json);
66
66
+
67
67
+
assert!(json.contains("\"type\": \"ref\""));
68
68
+
assert!(json.contains("app.bsky.actor.defs#profileView"));
69
69
+
}
70
70
+
71
71
+
#[test]
72
72
+
fn test_builder_array_field() {
73
73
+
let doc = LexiconDocBuilder::new("app.example.list")
74
74
+
.record()
75
75
+
.field("items", |f| f.array(|a| a.string_items().max_length(100)))
76
76
+
.build()
77
77
+
.build();
78
78
+
79
79
+
assert_eq!(doc.id.as_ref(), "app.example.list");
80
80
+
81
81
+
let json = serde_json::to_string_pretty(&doc).unwrap();
82
82
+
println!("{}", json);
83
83
+
84
84
+
assert!(json.contains("\"type\": \"array\""));
85
85
+
}
+137
crates/jacquard-lexicon/tests/schema_tests.rs
···
1
1
+
use jacquard_common::types::string::Datetime;
2
2
+
use jacquard_common::CowStr;
3
3
+
use jacquard_lexicon::lexicon::{
4
4
+
Lexicon, LexObject, LexObjectProperty, LexRecord, LexRecordRecord, LexString,
5
5
+
LexStringFormat, LexUserType, LexiconDoc,
6
6
+
};
7
7
+
use jacquard_lexicon::schema::{LexiconGenerator, LexiconSchema, ValidationError};
8
8
+
use std::collections::BTreeMap;
9
9
+
10
10
+
// Simple test type
11
11
+
#[derive(Debug, Clone)]
12
12
+
struct SimpleRecord<'a> {
13
13
+
text: CowStr<'a>,
14
14
+
timestamp: Datetime,
15
15
+
}
16
16
+
17
17
+
impl LexiconSchema for SimpleRecord<'_> {
18
18
+
fn nsid() -> &'static str {
19
19
+
"com.example.simple"
20
20
+
}
21
21
+
22
22
+
fn lexicon_doc(_generator: &mut LexiconGenerator) -> LexiconDoc<'static> {
23
23
+
let mut properties = BTreeMap::new();
24
24
+
25
25
+
properties.insert(
26
26
+
"text".into(),
27
27
+
LexObjectProperty::String(LexString {
28
28
+
description: None,
29
29
+
format: None,
30
30
+
default: None,
31
31
+
min_length: None,
32
32
+
max_length: Some(1000),
33
33
+
min_graphemes: None,
34
34
+
max_graphemes: None,
35
35
+
r#enum: None,
36
36
+
r#const: None,
37
37
+
known_values: None,
38
38
+
}),
39
39
+
);
40
40
+
41
41
+
properties.insert(
42
42
+
"timestamp".into(),
43
43
+
LexObjectProperty::String(LexString {
44
44
+
description: None,
45
45
+
format: Some(LexStringFormat::Datetime),
46
46
+
default: None,
47
47
+
min_length: None,
48
48
+
max_length: None,
49
49
+
min_graphemes: None,
50
50
+
max_graphemes: None,
51
51
+
r#enum: None,
52
52
+
r#const: None,
53
53
+
known_values: None,
54
54
+
}),
55
55
+
);
56
56
+
57
57
+
let record_obj = LexObject {
58
58
+
description: None,
59
59
+
required: Some(vec!["text".into(), "timestamp".into()]),
60
60
+
nullable: None,
61
61
+
properties,
62
62
+
};
63
63
+
64
64
+
let record = LexRecord {
65
65
+
description: Some("Simple record type".into()),
66
66
+
key: Some("tid".into()),
67
67
+
record: LexRecordRecord::Object(record_obj),
68
68
+
};
69
69
+
70
70
+
let mut defs = BTreeMap::new();
71
71
+
defs.insert("main".into(), LexUserType::Record(record));
72
72
+
73
73
+
LexiconDoc {
74
74
+
lexicon: Lexicon::Lexicon1,
75
75
+
id: Self::nsid().into(),
76
76
+
revision: None,
77
77
+
description: Some("Test schema".into()),
78
78
+
defs,
79
79
+
}
80
80
+
}
81
81
+
82
82
+
fn validate(&self) -> Result<(), ValidationError> {
83
83
+
// Check text length
84
84
+
if self.text.len() > 1000 {
85
85
+
return Err(ValidationError::MaxLength {
86
86
+
field: "text",
87
87
+
max: 1000,
88
88
+
actual: self.text.len(),
89
89
+
});
90
90
+
}
91
91
+
92
92
+
Ok(())
93
93
+
}
94
94
+
}
95
95
+
96
96
+
#[test]
97
97
+
fn test_manual_impl_generates_valid_schema() {
98
98
+
let mut generator = LexiconGenerator::new(SimpleRecord::nsid());
99
99
+
let doc = SimpleRecord::lexicon_doc(&mut generator);
100
100
+
101
101
+
// Verify structure
102
102
+
assert_eq!(doc.id.as_ref(), "com.example.simple");
103
103
+
assert!(doc.defs.contains_key("main"));
104
104
+
105
105
+
// Serialize to JSON
106
106
+
let json = serde_json::to_string_pretty(&doc).expect("serialize");
107
107
+
println!("{}", json);
108
108
+
109
109
+
// Should be valid lexicon JSON
110
110
+
assert!(json.contains("\"lexicon\": 1"));
111
111
+
assert!(json.contains("\"id\": \"com.example.simple\""));
112
112
+
}
113
113
+
114
114
+
#[test]
115
115
+
fn test_validation_works() {
116
116
+
let record = SimpleRecord {
117
117
+
text: "a".repeat(5000).into(), // Too long
118
118
+
timestamp: Datetime::now(),
119
119
+
};
120
120
+
121
121
+
let result = record.validate();
122
122
+
assert!(result.is_err());
123
123
+
124
124
+
let err = result.unwrap_err();
125
125
+
assert!(matches!(err, ValidationError::MaxLength { .. }));
126
126
+
}
127
127
+
128
128
+
#[test]
129
129
+
fn test_validation_passes() {
130
130
+
let record = SimpleRecord {
131
131
+
text: "Hello, world!".into(),
132
132
+
timestamp: Datetime::now(),
133
133
+
};
134
134
+
135
135
+
let result = record.validate();
136
136
+
assert!(result.is_ok());
137
137
+
}