Tholp's bespoke website generator

Named sections

+99 -113
+1 -1
src/args.rs
··· 1 - use clap::{Args, Parser, Subcommand}; 1 + use clap::{Parser, Subcommand}; 2 2 3 3 #[derive(Parser, Debug)] 4 4 #[command(version, about, long_about = None)]
-78
src/closures.rs
··· 1 - // Closures are essentally blocked macros that change behavior on symbol instead of name 2 - // Instances of most types of closures can be named as sections ... 3 - // ... to work with !insert() to pick certain parts out of a file 4 - 5 - use boa_engine::Context; 6 - 7 - use crate::{ 8 - project::Project, 9 - types::{SkidContext, Token}, 10 - }; 11 - 12 - type ClosureFunction = fn(&[Token], &mut Project, &mut SkidContext) -> Vec<Token>; 13 - 14 - pub struct Closure { 15 - pub opener: &'static str, 16 - pub opener2: &'static str, 17 - pub closer: &'static str, 18 - pub function: ClosureFunction, 19 - } 20 - 21 - // (opener) name (opener2) ... (closer) 22 - 23 - // << js ! .. >> 24 - // <!-- comment --> 25 - // [ name {{ .. }}] 26 - 27 - // <!-- ... --> comment 28 - // ?name<< >> named js 29 - // ?name[[ ]] named section 30 - // ?<< >> js 31 - // ?[[ ]] section 32 - // ?name^[[]] named emphemeral section 33 - // ?name-[[]] named inverted section 34 - 35 - pub static CLOSURE_LIST: &'static [Closure] = &[ 36 - Closure { 37 - opener: "?", 38 - opener2: "<<", 39 - closer: ">>", 40 - function: closure_js, 41 - }, 42 - Closure { 43 - opener: "<!--", 44 - opener2: "", // blank means it doesnt accept a name 45 - closer: "-->", 46 - function: closure_comment, 47 - }, 48 - Closure { 49 - opener: "?", 50 - opener2: "{{", 51 - closer: "}}", 52 - function: closure_section, 53 - }, 54 - ]; 55 - 56 - fn closure_comment( 57 - _tokens: &[Token], 58 - _project_context: &mut Project, 59 - _skid_context: &mut SkidContext, 60 - ) -> Vec<Token> { 61 - Vec::new() 62 - } 63 - 64 - fn closure_section( 65 - tokens: &[Token], 66 - _project_context: &mut Project, 67 - _skid_context: &mut SkidContext, 68 - ) -> Vec<Token> { 69 - tokens.to_vec() 70 - } 71 - 72 - fn closure_js( 73 - tokens: &[Token], 74 - project_context: &mut Project, 75 - skid_context: &mut SkidContext, 76 - ) -> Vec<Token> { 77 - Vec::new() 78 - }
+4
src/console.rs
··· 67 67 msg 68 68 ); 69 69 } 70 + 71 + pub fn info_generic(msg: &String) { 72 + println!("{} {}", "[INFO]".purple(), msg); 73 + }
+35 -7
src/macros/insert.rs
··· 2 2 3 3 use crate::{ 4 4 console::error_skid, 5 - macros::template::SkidTemplate, 5 + process_skid, 6 6 project::{Indexing, Project}, 7 7 stringtools::split_to_tokens, 8 8 types::{SkidContext, Token}, ··· 11 11 pub fn macro_insert( 12 12 origin_index: usize, 13 13 origin_line: usize, 14 - context: &mut Project, 15 - _skid_context: &mut SkidContext, 14 + proj_context: &mut Project, 15 + skid_context: &mut SkidContext, 16 16 args: &Vec<String>, 17 17 _scope: &[Token], 18 18 ) -> Vec<Token> { 19 - let origin_file = context 19 + let origin_file = proj_context 20 20 .file_for_index_canonical(origin_index) 21 21 .expect("Macro 'Insert' was given a bad origin index") 22 22 .clone(); 23 23 24 + let mut sections_ids_to_keep = Vec::new(); 25 + 26 + if args.len() > 1 { 27 + for a in &args[1..] { 28 + let id = proj_context.index_of_section_name(a); 29 + sections_ids_to_keep.push(id); 30 + } 31 + } 32 + 24 33 let mut arg = args[0].clone(); 25 34 let mut search_from_root = arg.starts_with("//"); 26 35 let mut ok = false; ··· 44 53 } 45 54 46 55 if search_from_root { 47 - let mut include_path = context.input_folder.clone(); 56 + let mut include_path = proj_context.input_folder.clone(); 48 57 include_path.push(&arg); 49 58 50 59 if include_path.exists() && include_path.is_file() { ··· 54 63 } 55 64 56 65 if !ok { 57 - error_skid(context, origin_index, origin_line, &format!("Insert was unable to find the file \"{}\" relative to its origin or in project root.", arg)); 66 + error_skid(proj_context, origin_index, origin_line, &format!("Insert was unable to find the file \"{}\" relative to its origin or in project root.", arg)); 58 67 } 59 68 60 69 let mut output = fs::read_to_string(&include_file).expect("File unreadable or missing"); ··· 62 71 output.pop(); 63 72 } //remove trailing newlines 64 73 65 - return split_to_tokens(output, context.index_of_file(&PathBuf::from(&include_file))); 74 + if sections_ids_to_keep.len() > 0 { 75 + let mut processed = process_skid( 76 + &split_to_tokens( 77 + output, 78 + proj_context.index_of_file(&PathBuf::from(&include_file)), 79 + ), 80 + proj_context, 81 + skid_context, 82 + ); 83 + processed.retain(|t| sections_ids_to_keep.contains(&t.section_index)); 84 + for t in &mut processed { 85 + t.pre_proccessed = true; 86 + } 87 + return processed; 88 + } else { 89 + return split_to_tokens( 90 + output, 91 + proj_context.index_of_file(&PathBuf::from(&include_file)), 92 + ); 93 + } 66 94 }
+1 -1
src/macros/mod.rs
··· 18 18 expansion: macro_insert, 19 19 takes_block: false, 20 20 min_args: 1, 21 - max_args: 1, 21 + max_args: usize::max_value(), 22 22 }, 23 23 Macro { 24 24 symbol: "time",
+15 -7
src/macros/simple_blocks.rs
··· 1 1 // This file for implementations of short blocks, im qualifying that as less than 30ish lines 2 2 use crate::{ 3 - console::*, 4 - project::Project, 5 - stringtools::TokenTools, 3 + project::{Indexing, Project}, 6 4 types::{SkidContext, Token}, 7 5 }; 8 6 ··· 20 18 pub fn macro_section( 21 19 _origin_index: usize, 22 20 _origin_line: usize, 23 - _context: &mut Project, 21 + proj_context: &mut Project, 24 22 _skid_context: &mut SkidContext, 25 - _args: &Vec<String>, 23 + args: &Vec<String>, 26 24 scope: &[Token], 27 25 ) -> Vec<Token> { 28 26 let mut tokens = Vec::new(); 29 - for tok in scope { 30 - tokens.push(tok.clone()); 27 + if args.len() == 1 { 28 + let section_index = proj_context.index_of_section_name(&args[0]); 29 + for tok in scope { 30 + let mut new = tok.clone(); 31 + new.section_index = section_index; 32 + tokens.push(new); 33 + } 34 + } else { 35 + for tok in scope { 36 + tokens.push(tok.clone()); 37 + } 31 38 } 39 + 32 40 return tokens; 33 41 } 34 42
-1
src/macros/simple_macros.rs
··· 4 4 use chrono::Local; 5 5 6 6 use crate::{ 7 - args, 8 7 console::{error_skid, reminder_skid, warn_skid}, 9 8 project::{Indexing, Project}, 10 9 stringtools::split_to_tokens,
+9 -5
src/main.rs
··· 1 1 mod args; 2 - mod closures; 3 2 mod console; 4 3 mod macros; 5 4 mod project; ··· 24 23 path::PathBuf, 25 24 }; 26 25 use stringtools::{collect_arguments, collect_block, split_to_tokens}; 27 - use types::{InputFile, Token}; 26 + use types::Token; 28 27 29 28 // really need to change this whole thing to work with characters rather than 30 29 // strings split on kind of abitrary chars.. ··· 58 57 project_path = project_folder.clone(); 59 58 project_path.push("skidmark.toml"); 60 59 } 61 - println!("Operatting on {:?}", &project_path.as_os_str()); 60 + info_generic(&format!("Operatting on {:?}", &project_path.as_os_str())); 62 61 assert!(env::set_current_dir(&project_folder).is_ok()); 63 62 64 63 let mut project = parse_project(&project_path); ··· 69 68 num = num + group.files.len(); 70 69 } 71 70 72 - println!("Proccesing {} files.", num); 71 + info_generic(&format!("Proccesing {} files.", num)); 73 72 // for group in &mut project.filegroups { 74 73 // for infile in &mut group.files { 75 74 // process_skid(infile, group.convert_html, &mut project.context); ··· 288 287 None 289 288 } 290 289 291 - fn process_skid( 290 + pub fn process_skid( 292 291 tokens_in: &[Token], 293 292 proj_context: &mut Project, 294 293 skid_context: &mut SkidContext, ··· 306 305 let mut working_index = 0; 307 306 308 307 while working_index < tokens.len() { 308 + if tokens[working_index].pre_proccessed { 309 + working_index += 1; 310 + continue; 311 + } 312 + 309 313 if tokens[working_index] == '\\' && !escaped { 310 314 tokens[working_index].contents = '\0'; // skip over this later when outputting to avoid shifting memory rn 311 315 escaped = true;
+24 -4
src/project.rs
··· 15 15 pub global_post_insert: PathBuf, 16 16 17 17 pub filemap: Vec<PathBuf>, // mapped to index 18 + pub section_name_map: Vec<String>, 18 19 } 19 20 20 21 pub struct FileGroup { ··· 65 66 66 67 let mut project: Project = Project { 67 68 filegroups: Vec::new(), 68 - //context: ProjectContext { 69 69 input_folder: PathBuf::new(), 70 70 output_folder: PathBuf::new(), 71 71 global_pre_insert: PathBuf::new(), 72 72 global_post_insert: PathBuf::new(), 73 73 filemap: Vec::new(), 74 - //}, 74 + section_name_map: Vec::new(), 75 75 }; 76 76 let config = tomlfile 77 77 .parse::<Table>() ··· 173 173 fn file_for_index(&self, i: usize) -> Option<PathBuf>; 174 174 fn file_for_index_canonical(&self, i: usize) -> Option<&PathBuf>; 175 175 176 - // fn index_of_section_name(&mut self, name: String) -> usize; 177 - // fn section_name_for_index(&self, index: usize) -> String; 176 + fn index_of_section_name(&mut self, name: &String) -> usize; 177 + fn section_name_for_index(&self, index: usize) -> Option<&String>; 178 178 } 179 179 180 180 impl Indexing for Project { ··· 204 204 return None; 205 205 } 206 206 return Some(&self.filemap[i]); 207 + } 208 + 209 + // Some weirdly placed + and - 1 because 0 is the default index 210 + fn index_of_section_name(&mut self, name: &String) -> usize { 211 + let mut index = 0; 212 + while index < self.section_name_map.len() { 213 + if *name == self.section_name_map[index] { 214 + return index + 1; 215 + } 216 + index += 1; 217 + } 218 + self.section_name_map.push(name.clone()); 219 + return self.section_name_map.len(); 220 + } 221 + 222 + fn section_name_for_index(&self, index: usize) -> Option<&String> { 223 + if (index - 1) >= self.section_name_map.len() { 224 + return None; 225 + } 226 + return Some(&self.section_name_map[index - 1]); 207 227 } 208 228 }
+3
src/stringtools.rs
··· 1 1 use super::DELIMITERS; 2 2 use crate::types::Token; 3 3 4 + //TODO: Theres a couple functions that are still written like tokens are strings not chars, they work fine 5 + // for now but they may need to be changed later 6 + 4 7 pub fn collect_arguments(tokens: &[Token]) -> Option<(Vec<String>, usize)> { 5 8 // Returns arguments vec and number of tokens to be consumed 6 9 //let mut output = Vec::new();
+7 -9
src/types.rs
··· 12 12 pub origin_index: usize, 13 13 pub template_origin: usize, 14 14 pub origin_line: usize, 15 - pub section_name_index: usize, 15 + pub section_index: usize, 16 + pub pre_proccessed: bool, 16 17 } 17 18 18 19 impl PartialEq<char> for Token { ··· 134 135 impl Token { 135 136 pub fn new(contents: char, origin_file: usize, line_number: usize) -> Token { 136 137 Token { 137 - contents: contents, 138 + contents, 138 139 origin_index: origin_file, 139 140 template_origin: origin_file, 140 141 origin_line: line_number, 141 - section_name_index: 0, 142 + section_index: 0, 143 + pre_proccessed: false, 142 144 } 143 145 } 144 146 } 145 147 146 - // impl ToString for Token { 147 - // fn to_string(&self) -> String { 148 - // return self.contents.clone(); 149 - // } 150 - // } 151 - 152 148 impl Clone for Token { 153 149 fn clone(&self) -> Self { 154 150 let mut t = Token::new( ··· 156 152 self.origin_index.clone(), 157 153 self.origin_line, 158 154 ); 155 + t.section_index = self.section_index; 159 156 t.template_origin = self.template_origin; 157 + t.pre_proccessed = self.pre_proccessed; 160 158 return t; 161 159 } 162 160 }