A better Rust ATProto crate
1use crate::corpus::LexiconCorpus;
2use crate::error::{CodegenError, Result};
3use crate::lexicon::{LexArrayItem, LexUserType};
4use proc_macro2::TokenStream;
5use quote::quote;
6
7pub(crate) mod builder_gen;
8pub(crate) mod builder_heuristics;
9pub(crate) mod lifetime;
10pub(crate) mod names;
11pub(crate) mod nsid_utils;
12pub(crate) mod output;
13pub(crate) mod schema_impl;
14pub(crate) mod structs;
15pub(crate) mod types;
16pub(crate) mod union_codegen;
17pub(crate) mod utils;
18pub(crate) mod xrpc;
19
20/// Code generator for lexicon types
21pub struct CodeGenerator<'c> {
22 corpus: &'c LexiconCorpus,
23 root_module: String,
24 /// Track namespace dependencies (namespace -> set of namespaces it depends on)
25 namespace_deps:
26 std::cell::RefCell<std::collections::HashMap<String, std::collections::HashSet<String>>>,
27 /// Track which file paths contain subscription endpoints
28 subscription_files: std::cell::RefCell<std::collections::HashSet<std::path::PathBuf>>,
29 /// Track which NSIDs have already generated their shared lexicon_doc function
30 generated_shared_docs: std::cell::RefCell<std::collections::HashSet<String>>,
31}
32
33impl<'c> CodeGenerator<'c> {
34 /// Create a new code generator
35 pub fn new(corpus: &'c LexiconCorpus, root_module: impl Into<String>) -> Self {
36 Self {
37 corpus,
38 root_module: root_module.into(),
39 namespace_deps: std::cell::RefCell::new(std::collections::HashMap::new()),
40 subscription_files: std::cell::RefCell::new(std::collections::HashSet::new()),
41 generated_shared_docs: std::cell::RefCell::new(std::collections::HashSet::new()),
42 }
43 }
44
45 /// Generate doc comment from optional description (wrapper for utils function)
46 fn generate_doc_comment(&self, desc: Option<&jacquard_common::CowStr>) -> TokenStream {
47 utils::generate_doc_comment(desc)
48 }
49
50 /// Track namespace dependency when a ref crosses namespace boundaries
51 pub(crate) fn track_ref_namespace_dep(&self, current_nsid: &str, ref_str: &str) {
52 use nsid_utils::NsidPath;
53
54 let current_path = NsidPath::parse(current_nsid);
55 let ref_path = nsid_utils::RefPath::parse(ref_str, None);
56 let ref_nsid_path = NsidPath::parse(ref_path.nsid());
57
58 let current_ns = current_path.namespace();
59 let ref_ns = ref_nsid_path.namespace();
60
61 // Only track if crossing namespace boundaries
62 if current_ns != ref_ns {
63 self.namespace_deps
64 .borrow_mut()
65 .entry(current_ns)
66 .or_default()
67 .insert(ref_ns);
68 }
69 }
70
71 /// Generate or reference the shared lexicon_doc function for this NSID
72 /// Returns (optional shared function, trait impl tokens)
73 pub(crate) fn generate_schema_impl_with_shared(
74 &self,
75 type_name: &str,
76 nsid: &str,
77 def_name: &str,
78 has_lifetime: bool,
79 ) -> (Option<TokenStream>, TokenStream) {
80 let lex_doc = self.corpus.get(nsid).expect("nsid exists in corpus");
81
82 // Generate shared function name from NSID (use sanitize_name for proper handling)
83 let shared_fn_name = format!("lexicon_doc_{}", utils::sanitize_name(nsid));
84 let shared_fn_ident = syn::Ident::new(&shared_fn_name, proc_macro2::Span::call_site());
85
86 // Check if we need to generate the shared function
87 let mut generated = self.generated_shared_docs.borrow_mut();
88 let shared_fn = if !generated.contains(nsid) {
89 generated.insert(nsid.to_string());
90 // Codegen from JSON doesn't have union_fields (those are for Rust -> lexicon derive)
91 let doc_literal = crate::derive_impl::doc_to_tokens::doc_to_tokens(
92 lex_doc,
93 &std::collections::BTreeMap::new(),
94 );
95 Some(quote! {
96 fn #shared_fn_ident() -> ::jacquard_lexicon::lexicon::LexiconDoc<'static> {
97 #doc_literal
98 }
99 })
100 } else {
101 None
102 };
103
104 // Generate lightweight trait impl that calls shared function
105 let type_ident = syn::Ident::new(type_name, proc_macro2::Span::call_site());
106 let (impl_generics, type_generics) = if has_lifetime {
107 (quote! { <'a> }, quote! { <'a> })
108 } else {
109 (quote! {}, quote! {})
110 };
111
112 // Extract validation checks for this specific def
113 let validation_checks = schema_impl::extract_validation_checks(lex_doc, def_name);
114 let validation_code =
115 crate::derive_impl::doc_to_tokens::validations_to_tokens(&validation_checks);
116
117 let trait_impl = quote! {
118 impl #impl_generics ::jacquard_lexicon::schema::LexiconSchema for #type_ident #type_generics {
119 fn nsid() -> &'static str {
120 #nsid
121 }
122
123 fn def_name() -> &'static str {
124 #def_name
125 }
126
127 fn lexicon_doc() -> ::jacquard_lexicon::lexicon::LexiconDoc<'static> {
128 #shared_fn_ident()
129 }
130
131 fn validate(&self) -> ::core::result::Result<(), ::jacquard_lexicon::validation::ConstraintError> {
132 #validation_code
133 }
134 }
135 };
136
137 (shared_fn, trait_impl)
138 }
139
140 /// Generate code for a lexicon def
141 pub fn generate_def(
142 &self,
143 nsid: &str,
144 def_name: &str,
145 def: &LexUserType<'static>,
146 ) -> Result<TokenStream> {
147 match def {
148 LexUserType::Record(record) => self.generate_record(nsid, def_name, record),
149 LexUserType::Object(obj) => self.generate_object(nsid, def_name, obj),
150 LexUserType::XrpcQuery(query) => self.generate_query(nsid, def_name, query),
151 LexUserType::XrpcProcedure(proc) => self.generate_procedure(nsid, def_name, proc),
152 LexUserType::Token(token) => {
153 // Token types are marker structs that can be used as union refs
154 let type_name = self.def_to_type_name(nsid, def_name);
155 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site());
156 let doc = self.generate_doc_comment(token.description.as_ref());
157
158 // Token name for Display impl (just the def name, not the full ref)
159 let token_name = def_name;
160
161 Ok(quote! {
162 #doc
163 #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq, Hash, jacquard_derive::IntoStatic)]
164 pub struct #ident;
165
166 impl std::fmt::Display for #ident {
167 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
168 write!(f, #token_name)
169 }
170 }
171 })
172 }
173 LexUserType::String(s) if s.known_values.is_some() => {
174 self.generate_known_values_enum(nsid, def_name, s)
175 }
176 LexUserType::String(s) => {
177 // Plain string type alias
178 let type_name = self.def_to_type_name(nsid, def_name);
179 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site());
180 let rust_type = self.string_to_rust_type(s);
181 let doc = self.generate_doc_comment(s.description.as_ref());
182 Ok(quote! {
183 #doc
184 pub type #ident<'a> = #rust_type;
185 })
186 }
187 LexUserType::Integer(i) if i.r#enum.is_some() => {
188 self.generate_integer_enum(nsid, def_name, i)
189 }
190 LexUserType::Array(array) => {
191 // Top-level array becomes type alias to Vec<ItemType>
192 let type_name = self.def_to_type_name(nsid, def_name);
193 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site());
194 let doc = self.generate_doc_comment(array.description.as_ref());
195 let needs_lifetime = self.array_item_needs_lifetime(&array.items);
196
197 // Check if items are a union - if so, generate the union enum first
198 if let LexArrayItem::Union(union) = &array.items {
199 let union_name = format!("{}Item", type_name);
200 let refs: Vec<_> = union.refs.iter().cloned().collect();
201 let union_def =
202 self.generate_union(nsid, &union_name, &refs, None, union.closed)?;
203
204 let union_ident = syn::Ident::new(&union_name, proc_macro2::Span::call_site());
205 if needs_lifetime {
206 Ok(quote! {
207 #union_def
208
209 #doc
210 pub type #ident<'a> = Vec<#union_ident<'a>>;
211 })
212 } else {
213 Ok(quote! {
214 #union_def
215
216 #doc
217 pub type #ident = Vec<#union_ident>;
218 })
219 }
220 } else {
221 // Regular array item type
222 let item_type = self.array_item_to_rust_type(nsid, &array.items)?;
223 if needs_lifetime {
224 Ok(quote! {
225 #doc
226 pub type #ident<'a> = Vec<#item_type>;
227 })
228 } else {
229 Ok(quote! {
230 #doc
231 pub type #ident = Vec<#item_type>;
232 })
233 }
234 }
235 }
236 LexUserType::Boolean(_)
237 | LexUserType::Integer(_)
238 | LexUserType::Bytes(_)
239 | LexUserType::CidLink(_)
240 | LexUserType::Unknown(_) => {
241 // These are rarely top-level defs, but if they are, make type aliases
242 let type_name = self.def_to_type_name(nsid, def_name);
243 let ident = syn::Ident::new(&type_name, proc_macro2::Span::call_site());
244 let (rust_type, needs_lifetime) = match def {
245 LexUserType::Boolean(_) => (quote! { bool }, false),
246 LexUserType::Integer(_) => (quote! { i64 }, false),
247 LexUserType::Bytes(_) => (quote! { bytes::Bytes }, false),
248 LexUserType::CidLink(_) => {
249 (quote! { jacquard_common::types::cid::CidLink<'a> }, true)
250 }
251 LexUserType::Unknown(_) => {
252 (quote! { jacquard_common::types::value::Data<'a> }, true)
253 }
254 _ => unreachable!(),
255 };
256 if needs_lifetime {
257 Ok(quote! {
258 pub type #ident<'a> = #rust_type;
259 })
260 } else {
261 Ok(quote! {
262 pub type #ident = #rust_type;
263 })
264 }
265 }
266 LexUserType::Blob(_) => Err(CodegenError::unsupported(
267 format!("top-level def type {:?}", def),
268 nsid,
269 None::<String>,
270 )),
271 LexUserType::XrpcSubscription(sub) => {
272 // Track this file as containing a subscription
273 let file_path = self.nsid_to_file_path(nsid);
274 self.subscription_files.borrow_mut().insert(file_path);
275 self.generate_subscription(nsid, def_name, sub)
276 }
277 LexUserType::Union(union) => {
278 // Top-level union generates an enum
279 let type_name = self.def_to_type_name(nsid, def_name);
280 let refs: Vec<_> = union.refs.iter().cloned().collect();
281 self.generate_union(
282 nsid,
283 &type_name,
284 &refs,
285 union.description.as_ref().map(|d| d.as_ref()),
286 union.closed,
287 )
288 }
289 }
290 }
291}
292
293#[cfg(test)]
294mod tests {
295 use super::*;
296
297 #[test]
298 fn test_generate_record() {
299 let corpus =
300 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
301 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
302
303 let doc = corpus.get("app.bsky.feed.post").expect("get post");
304 let def = doc.defs.get("main").expect("get main def");
305
306 let tokens = codegen
307 .generate_def("app.bsky.feed.post", "main", def)
308 .expect("generate");
309
310 // Format and print for inspection
311 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
312 let formatted = prettyplease::unparse(&file);
313 println!("\n{}\n", formatted);
314
315 // Check basic structure
316 assert!(formatted.contains("struct Post"));
317 assert!(formatted.contains("pub text"));
318 assert!(formatted.contains("CowStr<'a>"));
319 }
320
321 #[test]
322 fn test_generate_union() {
323 let corpus =
324 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
325 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
326
327 // Create a union with embed types
328 let refs = vec![
329 "app.bsky.embed.images".into(),
330 "app.bsky.embed.video".into(),
331 "app.bsky.embed.external".into(),
332 ];
333
334 let tokens = codegen
335 .generate_union(
336 "app.bsky.feed.post",
337 "RecordEmbed",
338 &refs,
339 Some("Post embed union"),
340 None,
341 )
342 .expect("generate union");
343
344 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
345 let formatted = prettyplease::unparse(&file);
346 println!("\n{}\n", formatted);
347
348 // Check structure
349 assert!(formatted.contains("enum RecordEmbed"));
350 assert!(formatted.contains("Images"));
351 assert!(formatted.contains("Video"));
352 assert!(formatted.contains("External"));
353 assert!(formatted.contains("#[serde(tag = \"$type\")]"));
354 assert!(formatted.contains("#[jacquard_derive::open_union]"));
355 }
356
357 #[test]
358 fn test_generate_query() {
359 let corpus =
360 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
361 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
362
363 let doc = corpus
364 .get("app.bsky.feed.getAuthorFeed")
365 .expect("get getAuthorFeed");
366 let def = doc.defs.get("main").expect("get main def");
367
368 let tokens = codegen
369 .generate_def("app.bsky.feed.getAuthorFeed", "main", def)
370 .expect("generate");
371
372 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
373 let formatted = prettyplease::unparse(&file);
374 println!("\n{}\n", formatted);
375
376 // Check structure
377 assert!(formatted.contains("struct GetAuthorFeed"));
378 assert!(formatted.contains("struct GetAuthorFeedOutput"));
379 assert!(formatted.contains("enum GetAuthorFeedError"));
380 assert!(formatted.contains("pub actor"));
381 assert!(formatted.contains("pub limit"));
382 assert!(formatted.contains("pub cursor"));
383 assert!(formatted.contains("pub feed"));
384 assert!(formatted.contains("BlockedActor"));
385 assert!(formatted.contains("BlockedByActor"));
386 }
387
388 #[test]
389 fn test_generate_known_values_enum() {
390 let corpus =
391 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
392 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
393
394 let doc = corpus
395 .get("com.atproto.label.defs")
396 .expect("get label defs");
397 let def = doc.defs.get("labelValue").expect("get labelValue def");
398
399 let tokens = codegen
400 .generate_def("com.atproto.label.defs", "labelValue", def)
401 .expect("generate");
402
403 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
404 let formatted = prettyplease::unparse(&file);
405 println!("\n{}\n", formatted);
406
407 // Check structure
408 assert!(formatted.contains("enum LabelValue"));
409 assert!(formatted.contains("Hide"));
410 assert!(formatted.contains("NoPromote"));
411 assert!(formatted.contains("Warn"));
412 assert!(formatted.contains("DmcaViolation"));
413 assert!(formatted.contains("Other(jacquard_common::CowStr"));
414 assert!(formatted.contains("impl<'a> From<&'a str>"));
415 assert!(formatted.contains("fn as_str(&self)"));
416 }
417
418 #[test]
419 fn test_nsid_to_file_path() {
420 let corpus =
421 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
422 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
423
424 // Regular paths
425 assert_eq!(
426 codegen.nsid_to_file_path("app.bsky.feed.post"),
427 std::path::PathBuf::from("app_bsky/feed/post.rs")
428 );
429
430 assert_eq!(
431 codegen.nsid_to_file_path("app.bsky.feed.getAuthorFeed"),
432 std::path::PathBuf::from("app_bsky/feed/get_author_feed.rs")
433 );
434
435 // Defs paths - should go in parent
436 assert_eq!(
437 codegen.nsid_to_file_path("com.atproto.label.defs"),
438 std::path::PathBuf::from("com_atproto/label.rs")
439 );
440 }
441
442 #[test]
443 fn test_write_to_disk() {
444 let corpus =
445 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
446 let codegen = CodeGenerator::new(&corpus, "test_generated");
447
448 let tmp_dir =
449 tempfile::tempdir().expect("should be able to create temp directory for output");
450 let output_dir = std::path::PathBuf::from(tmp_dir.path());
451
452 // Clean up any previous test output
453 let _ = std::fs::remove_dir_all(&output_dir);
454
455 // Generate and write
456 codegen.write_to_disk(&output_dir).expect("write to disk");
457
458 // Verify some files were created
459 assert!(output_dir.join("app_bsky/feed/post.rs").exists());
460 assert!(output_dir.join("app_bsky/feed/get_author_feed.rs").exists());
461 assert!(output_dir.join("com_atproto/label.rs").exists());
462
463 // Verify module files were created
464 assert!(output_dir.join("lib.rs").exists());
465 assert!(output_dir.join("app_bsky.rs").exists());
466
467 // Read and verify post.rs contains expected content
468 let post_content = std::fs::read_to_string(output_dir.join("app_bsky/feed/post.rs"))
469 .expect("read post.rs");
470 assert!(post_content.contains("pub struct Post"));
471 assert!(post_content.contains("jacquard_common"));
472 }
473
474 #[test]
475 fn test_generate_procedure() {
476 let corpus =
477 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
478 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
479
480 let doc = corpus
481 .get("com.atproto.repo.createRecord")
482 .expect("get createRecord");
483 let def = doc.defs.get("main").expect("get main def");
484
485 let tokens = codegen
486 .generate_def("com.atproto.repo.createRecord", "main", def)
487 .expect("generate");
488
489 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
490 let formatted = prettyplease::unparse(&file);
491 println!("\n{}\n", formatted);
492
493 // Check structure - procedures have input, output, and error types
494 assert!(formatted.contains("struct CreateRecord"));
495 assert!(formatted.contains("struct CreateRecordOutput"));
496 assert!(formatted.contains("enum CreateRecordError"));
497 // Check input fields
498 assert!(formatted.contains("pub repo"));
499 assert!(formatted.contains("pub collection"));
500 assert!(formatted.contains("pub record"));
501 // Check output fields
502 assert!(formatted.contains("pub uri"));
503 assert!(formatted.contains("pub cid"));
504 // Check error variants
505 assert!(formatted.contains("InvalidSwap"));
506 assert!(formatted.contains("InvalidRecord"));
507 }
508
509 #[test]
510 fn test_generate_subscription() {
511 let corpus =
512 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
513 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
514
515 let doc = corpus
516 .get("com.atproto.sync.subscribeRepos")
517 .expect("get subscribeRepos");
518 let def = doc.defs.get("main").expect("get main def");
519
520 let tokens = codegen
521 .generate_def("com.atproto.sync.subscribeRepos", "main", def)
522 .expect("generate");
523
524 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
525 let formatted = prettyplease::unparse(&file);
526 println!("\n{}\n", formatted);
527
528 // Check subscription structure
529 assert!(formatted.contains("struct SubscribeRepos"));
530 assert!(formatted.contains("enum SubscribeReposMessage"));
531 // Check message union variants
532 assert!(formatted.contains("Commit"));
533 assert!(formatted.contains("Identity"));
534 assert!(formatted.contains("Account"));
535 }
536
537 // #[test]
538 // fn test_generate_token_type() {
539 // let corpus =
540 // LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
541 // let codegen = CodeGenerator::new(&corpus, "jacquard_api");
542
543 // let doc = corpus.get("app.bsky.embed.images").expect("get images");
544 // let def = doc.defs.get("viewImage").expect("get viewImage def");
545
546 // let tokens = codegen
547 // .generate_def("app.bsky.embed.images", "viewImage", def)
548 // .expect("generate");
549
550 // let file: syn::File = syn::parse2(tokens).expect("parse tokens");
551 // let formatted = prettyplease::unparse(&file);
552 // println!("\n{}\n", formatted);
553
554 // // Token types are unit structs
555 // assert!(formatted.contains("struct ViewImage"));
556 // // Should have Display implementation
557 // assert!(formatted.contains("impl std::fmt::Display"));
558 // }
559
560 #[test]
561 fn test_generate_array_types() {
562 let corpus =
563 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
564 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
565
566 let doc = corpus.get("test.array.types").expect("get array types");
567 let def = doc.defs.get("main").expect("get main def");
568
569 let tokens = codegen
570 .generate_def("test.array.types", "main", def)
571 .expect("generate");
572
573 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
574 let formatted = prettyplease::unparse(&file);
575 println!("\n{}\n", formatted);
576
577 // Check different array item types
578 assert!(formatted.contains("simple_strings"));
579 assert!(formatted.contains("Vec<"));
580 // Union array items should generate enum
581 assert!(formatted.contains("union_items"));
582 // Ref array items
583 assert!(formatted.contains("ref_items"));
584 // CID link arrays
585 assert!(formatted.contains("cid_links"));
586 }
587
588 #[test]
589 fn test_generate_binary_types() {
590 let corpus =
591 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
592 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
593
594 let doc = corpus.get("test.binary.types").expect("get binary types");
595 let def = doc.defs.get("main").expect("get main def");
596
597 let tokens = codegen
598 .generate_def("test.binary.types", "main", def)
599 .expect("generate");
600
601 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
602 let formatted = prettyplease::unparse(&file);
603 println!("\n{}\n", formatted);
604
605 // Check binary field types
606 assert!(formatted.contains("pub cid"));
607 assert!(formatted.contains("CidLink") || formatted.contains("types::cid"));
608 assert!(formatted.contains("pub data"));
609 assert!(formatted.contains("Bytes"));
610 assert!(formatted.contains("pub avatar"));
611 assert!(formatted.contains("BlobRef") || formatted.contains("types::blob"));
612 }
613
614 #[test]
615 fn test_generate_empty_object() {
616 let corpus =
617 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
618 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
619
620 let doc = corpus.get("test.empty.object").expect("get empty object");
621 let def = doc.defs.get("emptyDef").expect("get emptyDef");
622
623 let tokens = codegen
624 .generate_def("test.empty.object", "emptyDef", def)
625 .expect("generate");
626
627 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
628 let formatted = prettyplease::unparse(&file);
629 println!("\n{}\n", formatted);
630
631 // Empty objects should generate type alias to Data<'a>
632 assert!(formatted.contains("type EmptyDef") || formatted.contains("Data<'a>"));
633 }
634
635 #[test]
636 fn test_generate_multi_def_lexicon() {
637 let corpus =
638 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
639 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
640
641 let doc = corpus
642 .get("pub.leaflet.poll.definition")
643 .expect("get poll definition");
644
645 // Test main def
646 let main_def = doc.defs.get("main").expect("get main def");
647 let main_tokens = codegen
648 .generate_def("pub.leaflet.poll.definition", "main", main_def)
649 .expect("generate main");
650 let main_file: syn::File = syn::parse2(main_tokens).expect("parse main tokens");
651 let main_formatted = prettyplease::unparse(&main_file);
652 println!("\nMain:\n{}\n", main_formatted);
653 assert!(main_formatted.contains("struct Definition"));
654 assert!(main_formatted.contains("pub question"));
655 assert!(main_formatted.contains("pub options"));
656
657 // Test option fragment
658 let option_def = doc.defs.get("option").expect("get option def");
659 let option_tokens = codegen
660 .generate_def("pub.leaflet.poll.definition", "option", option_def)
661 .expect("generate option");
662 let option_file: syn::File = syn::parse2(option_tokens).expect("parse option tokens");
663 let option_formatted = prettyplease::unparse(&option_file);
664 println!("\nOption:\n{}\n", option_formatted);
665 assert!(option_formatted.contains("struct DefinitionOption"));
666 assert!(option_formatted.contains("pub text"));
667
668 // Test vote fragment
669 let vote_def = doc.defs.get("vote").expect("get vote def");
670 let vote_tokens = codegen
671 .generate_def("pub.leaflet.poll.definition", "vote", vote_def)
672 .expect("generate vote");
673 let vote_file: syn::File = syn::parse2(vote_tokens).expect("parse vote tokens");
674 let vote_formatted = prettyplease::unparse(&vote_file);
675 println!("\nVote:\n{}\n", vote_formatted);
676 assert!(
677 vote_formatted.contains("struct DefinitionVote")
678 || vote_formatted.contains("struct Vote")
679 );
680 assert!(vote_formatted.contains("pub poll_ref"));
681 assert!(vote_formatted.contains("pub option_index"));
682 }
683
684 #[test]
685 fn test_generate_with_constraints_and_defaults() {
686 let corpus =
687 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
688 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
689
690 let doc = corpus
691 .get("test.constraints.validation")
692 .expect("get constraints");
693 let def = doc.defs.get("main").expect("get main def");
694
695 let tokens = codegen
696 .generate_def("test.constraints.validation", "main", def)
697 .expect("generate");
698
699 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
700 let formatted = prettyplease::unparse(&file);
701 println!("\n{}\n", formatted);
702
703 // Check fields with constraints are generated
704 assert!(formatted.contains("pub username"));
705 assert!(formatted.contains("pub bio"));
706 assert!(formatted.contains("pub age"));
707 assert!(formatted.contains("pub enabled"));
708 assert!(formatted.contains("pub tags"));
709 assert!(formatted.contains("pub role"));
710
711 // Constraints should be in docs or validation metadata
712 // (exact format depends on codegen implementation)
713 }
714
715 #[test]
716 fn test_local_refs_in_definitions() {
717 let corpus =
718 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
719 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
720
721 let doc = corpus
722 .get("pub.leaflet.poll.definition")
723 .expect("get poll definition");
724 let def = doc.defs.get("main").expect("get main def");
725
726 let tokens = codegen
727 .generate_def("pub.leaflet.poll.definition", "main", def)
728 .expect("generate");
729
730 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
731 let formatted = prettyplease::unparse(&file);
732 println!("\n{}\n", formatted);
733
734 // Local ref #option should resolve to DefinitionOption type (fully qualified or local)
735 assert!(
736 formatted.contains("Vec<DefinitionOption")
737 || formatted
738 .contains("Vec<jacquard_api::pub_leaflet::poll::definition::DefinitionOption")
739 );
740 }
741
742 #[test]
743 fn test_nullable_optional_properties() {
744 let corpus =
745 LexiconCorpus::load_from_dir("tests/fixtures/test_lexicons").expect("load corpus");
746 let codegen = CodeGenerator::new(&corpus, "jacquard_api");
747
748 let doc = corpus.get("test.binary.types").expect("get binary types");
749 let def = doc.defs.get("main").expect("get main def");
750
751 let tokens = codegen
752 .generate_def("test.binary.types", "main", def)
753 .expect("generate");
754
755 let file: syn::File = syn::parse2(tokens).expect("parse tokens");
756 let formatted = prettyplease::unparse(&file);
757 println!("\n{}\n", formatted);
758
759 // Optional fields should use Option<T>
760 assert!(formatted.contains("optional_cid"));
761 assert!(formatted.contains("Option<"));
762 }
763}