Nothing to see here, move along
1pub fn bytes_eq(a: &[u8], b: &[u8]) -> bool {
2 a.len() == b.len() && (0..a.len()).all(|i| a[i] == b[i])
3}
4
5pub fn first_word(line: &[u8]) -> &[u8] {
6 match (0..line.len()).find(|&i| line[i] == b' ') {
7 Some(pos) => &line[..pos],
8 None => line,
9 }
10}
11
12pub fn extract_filename(path: &[u8]) -> &[u8] {
13 match (0..path.len()).rev().find(|&i| path[i] == b'/') {
14 Some(pos) => &path[pos + 1..],
15 None => path,
16 }
17}
18
19pub fn skip_leading_spaces(s: &[u8]) -> &[u8] {
20 match s.first() {
21 Some(&b' ') => skip_leading_spaces(&s[1..]),
22 _ => s,
23 }
24}
25
26pub fn canonicalize(
27 work: &[u8],
28 pos: usize,
29 stack: &mut [(usize, usize); 32],
30 depth: usize,
31) -> usize {
32 match pos >= work.len() {
33 true => depth,
34 false => {
35 let start = match work[pos] == b'/' {
36 true => pos + 1,
37 false => pos,
38 };
39 let end = (start..work.len())
40 .find(|&i| work[i] == b'/')
41 .unwrap_or(work.len());
42 let comp_len = end - start;
43 let next = (end + 1).min(work.len());
44
45 match comp_len {
46 0 => canonicalize(work, next, stack, depth),
47 1 if work[start] == b'.' => canonicalize(work, next, stack, depth),
48 2 if work[start] == b'.' && work[start + 1] == b'.' => {
49 canonicalize(work, next, stack, depth.saturating_sub(1))
50 }
51 _ if depth < 32 => {
52 stack[depth] = (start, comp_len);
53 canonicalize(work, next, stack, depth + 1)
54 }
55 _ => canonicalize(work, next, stack, depth),
56 }
57 }
58 }
59}
60
61pub fn resolve_path(cwd: &[u8], cwd_len: usize, path: &[u8], out: &mut [u8; 256]) -> usize {
62 let mut work = [0u8; 512];
63 let work_len = match path.first() {
64 Some(&b'/') => {
65 let n = path.len().min(512);
66 work[..n].copy_from_slice(&path[..n]);
67 n
68 }
69 _ => {
70 let c = cwd_len.min(255);
71 work[..c].copy_from_slice(&cwd[..c]);
72 work[c] = b'/';
73 let p = path.len().min(512 - c - 1);
74 work[c + 1..c + 1 + p].copy_from_slice(&path[..p]);
75 c + 1 + p
76 }
77 };
78
79 let mut stack: [(usize, usize); 32] = [(0, 0); 32];
80 let depth = canonicalize(&work[..work_len], 0, &mut stack, 0);
81
82 match depth {
83 0 => {
84 out[0] = b'/';
85 1
86 }
87 _ => (0..depth).fold(0usize, |pos, i| {
88 let (s, l) = stack[i];
89 match pos + 1 + l <= 256 {
90 true => {
91 out[pos] = b'/';
92 out[pos + 1..pos + 1 + l].copy_from_slice(&work[s..s + l]);
93 pos + 1 + l
94 }
95 false => pos,
96 }
97 }),
98 }
99}
100
101fn write_resolved_token(
102 cwd: &[u8],
103 cwd_len: usize,
104 token: &[u8],
105 out: &mut [u8; 256],
106 olen: usize,
107) -> usize {
108 match token.first() {
109 Some(&b'/') | None => {
110 let copy = token.len().min(256 - olen);
111 out[olen..olen + copy].copy_from_slice(&token[..copy]);
112 olen + copy
113 }
114 _ => {
115 let mut resolved = [0u8; 256];
116 let rlen = resolve_path(cwd, cwd_len, token, &mut resolved);
117 let copy = rlen.min(256 - olen);
118 out[olen..olen + copy].copy_from_slice(&resolved[..copy]);
119 olen + copy
120 }
121 }
122}
123
124fn resolve_remaining_tokens(
125 cwd: &[u8],
126 cwd_len: usize,
127 remaining: &[u8],
128 out: &mut [u8; 256],
129 olen: usize,
130) -> usize {
131 match remaining.is_empty() || olen >= 256 {
132 true => olen,
133 false => match remaining[0] {
134 b' ' => {
135 let new_olen = match olen < 256 {
136 true => {
137 out[olen] = b' ';
138 olen + 1
139 }
140 false => olen,
141 };
142 resolve_remaining_tokens(cwd, cwd_len, &remaining[1..], out, new_olen)
143 }
144 _ => {
145 let token = first_word(remaining);
146 let rest = &remaining[token.len()..];
147 let new_olen = write_resolved_token(cwd, cwd_len, token, out, olen);
148 resolve_remaining_tokens(cwd, cwd_len, rest, out, new_olen)
149 }
150 },
151 }
152}
153
154pub fn build_resolved_line(cwd: &[u8], cwd_len: usize, line: &[u8], out: &mut [u8; 256]) -> usize {
155 let cmd = first_word(line);
156 let cmd_n = cmd.len().min(256);
157 out[..cmd_n].copy_from_slice(&cmd[..cmd_n]);
158
159 match cmd.len() >= line.len() {
160 true => {
161 out[cmd_n] = b' ';
162 let c = cwd_len.min(255 - cmd_n);
163 out[cmd_n + 1..cmd_n + 1 + c].copy_from_slice(&cwd[..c]);
164 cmd_n + 1 + c
165 }
166 false => {
167 let is_write = bytes_eq(cmd, b"write");
168 let after_cmd = &line[cmd.len()..];
169 match is_write {
170 true => {
171 let trimmed = skip_leading_spaces(after_cmd);
172 match trimmed.is_empty() {
173 true => {
174 let copy = after_cmd.len().min(256 - cmd_n);
175 out[cmd_n..cmd_n + copy].copy_from_slice(&after_cmd[..copy]);
176 cmd_n + copy
177 }
178 false => {
179 let space_count = after_cmd.len() - trimmed.len();
180 out[cmd_n..cmd_n + space_count]
181 .copy_from_slice(&after_cmd[..space_count]);
182 let olen = cmd_n + space_count;
183 let first_arg = first_word(trimmed);
184 let resolved_olen =
185 write_resolved_token(cwd, cwd_len, first_arg, out, olen);
186 let rest = &trimmed[first_arg.len()..];
187 let copy = rest.len().min(256 - resolved_olen);
188 out[resolved_olen..resolved_olen + copy].copy_from_slice(&rest[..copy]);
189 resolved_olen + copy
190 }
191 }
192 }
193 false => resolve_remaining_tokens(cwd, cwd_len, after_cmd, out, cmd_n),
194 }
195 }
196 }
197}