forked from
atscan.net/plcbundle-rs
High-performance implementation of plcbundle written in Rust
1use anyhow::Result;
2use clap::Args;
3use plcbundle::format::{format_bytes_compact, format_duration_compact};
4use std::path::PathBuf;
5
6#[derive(Args)]
7#[command(
8 about = "List bundles (machine-readable)",
9 long_about = "List bundle metadata in a structured, machine-readable format designed for
10scripting and data processing. Output is tab-separated by default, making it
11easy to parse with standard Unix tools like awk, cut, or process in scripts.
12
13You can customize the output format using --format to select which fields
14to display, and --separator to change the delimiter (useful for CSV export).
15The --human-readable flag automatically converts size fields to human-friendly
16units like KB, MB, or GB.
17
18By default shows the most recent bundles first (like a log), but use --reverse
19to show oldest first. Use --last to limit output to the most recent N bundles.
20
21This command is optimized for programmatic use: output is consistent, fields
22are well-defined, and the format is designed to be easily parsed by both
23humans and machines.",
24 help_template = crate::clap_help!(
25 examples: " # List all bundles\n \
26 {bin} ls\n\n \
27 # Human-readable sizes\n \
28 {bin} ls -h\n\n \
29 # Last 10 bundles\n \
30 {bin} ls -n 10\n\n \
31 # Oldest first\n \
32 {bin} ls --reverse\n\n \
33 # Custom format\n \
34 {bin} ls --format \"bundle,hash,date,size\"\n\n \
35 # CSV format\n \
36 {bin} ls --separator \",\"\n\n \
37 # Scripting examples\n \
38 {bin} ls | awk '{print $1}' # Just bundle numbers\n \
39 {bin} ls | grep 000150 # Find specific bundle\n \
40 {bin} ls -n 5 | cut -f1,4 # First and 4th columns\n \
41 {bin} ls --format bundle,hash # Custom columns\n \
42 {bin} ls --separator \",\" > bundles.csv # Export to CSV"
43 )
44)]
45pub struct LsCommand {
46 /// Show only last N bundles (0 = all)
47 #[arg(short = 'n', long, default_value = "0")]
48 pub last: usize,
49
50 /// Show oldest first (default: newest first)
51 #[arg(long)]
52 pub reverse: bool,
53
54 /// Output format: bundle,hash,date,ops,dids,size,size_h,uncompressed,uncompressed_h,ratio,timespan
55 ///
56 /// Size fields:
57 /// - size: raw bytes
58 /// - size_mb: megabytes (decimal)
59 /// - size_h/size_human: human-readable (e.g., "1.5K", "2.3M", "1.2G")
60 /// - uncompressed: raw bytes
61 /// - uncompressed_mb: megabytes (decimal)
62 /// - uncompressed_h/uncompressed_human: human-readable (e.g., "1.5K", "2.3M")
63 #[arg(long, default_value = "bundle,hash,date,ops,dids,size")]
64 pub format: String,
65
66 /// Omit header row
67 #[arg(long)]
68 pub no_header: bool,
69
70 /// Field separator (default: tab)
71 #[arg(long, default_value = "\t")]
72 pub separator: String,
73
74 /// Print human-readable sizes (e.g., 1.5K, 2.3M, 1.2G)
75 /// Automatically converts 'size' and 'uncompressed' fields to human-readable format
76 #[arg(short = 'h', long = "human-readable")]
77 pub human_readable: bool,
78}
79
80pub fn run(cmd: LsCommand, dir: PathBuf, verbose: bool, quiet: bool) -> Result<()> {
81 let manager = super::utils::create_manager(dir, verbose, quiet, false)?;
82
83 // Get all bundle metadata from the index
84 let bundles = super::utils::get_all_bundle_metadata(&manager);
85
86 if bundles.is_empty() {
87 return Ok(());
88 }
89
90 // Apply limit
91 let display_bundles = if cmd.last > 0 && cmd.last < bundles.len() {
92 bundles[bundles.len() - cmd.last..].to_vec()
93 } else {
94 bundles
95 };
96
97 // Reverse if not --reverse (default is newest first, like log)
98 let display_bundles = if !cmd.reverse {
99 display_bundles.into_iter().rev().collect::<Vec<_>>()
100 } else {
101 display_bundles
102 };
103
104 // Parse format string
105 let fields = parse_format_string(&cmd.format);
106
107 // Print header (unless disabled)
108 if !cmd.no_header {
109 print_header(&fields, &cmd.separator);
110 }
111
112 // Print each bundle
113 for meta in display_bundles {
114 print_bundle_fields(&meta, &fields, &cmd.separator, cmd.human_readable);
115 }
116
117 Ok(())
118}
119
120fn parse_format_string(format: &str) -> Vec<String> {
121 format
122 .split(',')
123 .map(|s| s.trim().to_string())
124 .filter(|s| !s.is_empty())
125 .collect()
126}
127
128fn print_header(fields: &[String], sep: &str) {
129 let headers: Vec<String> = fields.iter().map(|f| get_field_header(f)).collect();
130 println!("{}", headers.join(sep));
131}
132
133fn get_field_header(field: &str) -> String {
134 match field {
135 "bundle" => "bundle",
136 "hash" => "hash",
137 "hash_short" => "hash",
138 "content" => "content_hash",
139 "content_short" => "content_hash",
140 "parent" => "parent_hash",
141 "parent_short" => "parent_hash",
142 "date" | "time" => "date",
143 "date_short" => "date",
144 "timestamp" | "unix" => "timestamp",
145 "age" => "age",
146 "age_seconds" => "age_seconds",
147 "ops" | "operations" => "ops",
148 "dids" => "dids",
149 "size" | "compressed" => "size",
150 "size_mb" => "size_mb",
151 "size_h" | "size_human" => "size",
152 "uncompressed" => "uncompressed",
153 "uncompressed_mb" => "uncompressed_mb",
154 "uncompressed_h" | "uncompressed_human" => "uncompressed",
155 "ratio" => "ratio",
156 "timespan" | "duration" => "timespan",
157 "timespan_seconds" => "timespan_seconds",
158 "start" => "start_time",
159 "end" => "end_time",
160 "created" => "created_at",
161 _ => field,
162 }
163 .to_string()
164}
165
166fn print_bundle_fields(
167 meta: &plcbundle::index::BundleMetadata,
168 fields: &[String],
169 sep: &str,
170 human_readable: bool,
171) {
172 let values: Vec<String> = fields
173 .iter()
174 .map(|f| get_field_value(meta, f, human_readable))
175 .collect();
176 println!("{}", values.join(sep));
177}
178
179fn get_field_value(
180 meta: &plcbundle::index::BundleMetadata,
181 field: &str,
182 human_readable: bool,
183) -> String {
184 match field {
185 "bundle" => format!("{}", meta.bundle_number),
186
187 "hash" => meta.hash.clone(),
188 "hash_short" => {
189 if meta.hash.len() >= 12 {
190 meta.hash[..12].to_string()
191 } else {
192 meta.hash.clone()
193 }
194 }
195
196 "content" => meta.content_hash.clone(),
197 "content_short" => {
198 if meta.content_hash.len() >= 12 {
199 meta.content_hash[..12].to_string()
200 } else {
201 meta.content_hash.clone()
202 }
203 }
204
205 "parent" => meta.parent.clone(),
206 "parent_short" => {
207 if meta.parent.len() >= 12 {
208 meta.parent[..12].to_string()
209 } else {
210 meta.parent.clone()
211 }
212 }
213
214 "date" | "time" => {
215 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&meta.end_time) {
216 dt.format("%Y-%m-%dT%H:%M:%SZ").to_string()
217 } else {
218 meta.end_time.clone()
219 }
220 }
221
222 "date_short" => {
223 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&meta.end_time) {
224 dt.format("%Y-%m-%d").to_string()
225 } else {
226 meta.end_time.clone()
227 }
228 }
229
230 "timestamp" | "unix" => {
231 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&meta.end_time) {
232 format!("{}", dt.timestamp())
233 } else {
234 "0".to_string()
235 }
236 }
237
238 "age" => {
239 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&meta.end_time) {
240 let age = chrono::Utc::now().signed_duration_since(dt);
241 format_duration_compact(age)
242 } else {
243 "unknown".to_string()
244 }
245 }
246
247 "age_seconds" => {
248 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&meta.end_time) {
249 let age = chrono::Utc::now().signed_duration_since(dt);
250 format!("{:.0}", age.num_seconds())
251 } else {
252 "0".to_string()
253 }
254 }
255
256 "ops" | "operations" => format!("{}", meta.operation_count),
257 "dids" => format!("{}", meta.did_count),
258
259 "size" | "compressed" => {
260 if human_readable {
261 format_bytes_compact(meta.compressed_size)
262 } else {
263 format!("{}", meta.compressed_size)
264 }
265 }
266 "size_mb" => format!("{:.2}", meta.compressed_size as f64 / (1024.0 * 1024.0)),
267 "size_h" | "size_human" => format_bytes_compact(meta.compressed_size),
268
269 "uncompressed" => {
270 if human_readable {
271 format_bytes_compact(meta.uncompressed_size)
272 } else {
273 format!("{}", meta.uncompressed_size)
274 }
275 }
276 "uncompressed_mb" => format!("{:.2}", meta.uncompressed_size as f64 / (1024.0 * 1024.0)),
277 "uncompressed_h" | "uncompressed_human" => format_bytes_compact(meta.uncompressed_size),
278
279 "ratio" => {
280 if meta.compressed_size > 0 {
281 let ratio = meta.uncompressed_size as f64 / meta.compressed_size as f64;
282 format!("{:.2}", ratio)
283 } else {
284 "0".to_string()
285 }
286 }
287
288 "timespan" | "duration" => {
289 if let (Ok(start), Ok(end)) = (
290 chrono::DateTime::parse_from_rfc3339(&meta.start_time),
291 chrono::DateTime::parse_from_rfc3339(&meta.end_time),
292 ) {
293 let duration = end.signed_duration_since(start);
294 format_duration_compact(duration)
295 } else {
296 "unknown".to_string()
297 }
298 }
299
300 "timespan_seconds" => {
301 if let (Ok(start), Ok(end)) = (
302 chrono::DateTime::parse_from_rfc3339(&meta.start_time),
303 chrono::DateTime::parse_from_rfc3339(&meta.end_time),
304 ) {
305 let duration = end.signed_duration_since(start);
306 format!("{:.0}", duration.num_seconds())
307 } else {
308 "0".to_string()
309 }
310 }
311
312 "start" => {
313 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&meta.start_time) {
314 dt.format("%Y-%m-%dT%H:%M:%SZ").to_string()
315 } else {
316 meta.start_time.clone()
317 }
318 }
319
320 "end" => {
321 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&meta.end_time) {
322 dt.format("%Y-%m-%dT%H:%M:%SZ").to_string()
323 } else {
324 meta.end_time.clone()
325 }
326 }
327
328 "created" => {
329 if let Ok(dt) = chrono::DateTime::parse_from_rfc3339(&meta.created_at) {
330 dt.format("%Y-%m-%dT%H:%M:%SZ").to_string()
331 } else {
332 meta.created_at.clone()
333 }
334 }
335
336 _ => String::new(),
337 }
338}