QuickDID is a high-performance AT Protocol identity resolution service written in Rust. It provides handle-to-DID resolution with Redis-backed caching and queue processing.
···11+//! Lexicon resolution with caching support.
22+//!
33+//! This module provides implementations for resolving AT Protocol lexicons (NSIDs)
44+//! to their schemas with various caching strategies.
55+66+mod redis;
77+88+pub use redis::{create_redis_lexicon_resolver, create_redis_lexicon_resolver_with_ttl};
+458
src/lexicon_resolver/redis.rs
···11+//! Redis-backed caching lexicon resolver.
22+//!
33+//! This module provides a lexicon resolver that caches resolution results in Redis
44+//! with configurable expiration times. Redis caching provides persistence across
55+//! service restarts and allows sharing of cached results across multiple instances.
66+77+use crate::metrics::SharedMetricsPublisher;
88+use async_trait::async_trait;
99+use atproto_lexicon::resolve::LexiconResolver;
1010+use deadpool_redis::{Pool as RedisPool, redis::AsyncCommands};
1111+use metrohash::MetroHash64;
1212+use std::hash::Hasher as _;
1313+use std::sync::Arc;
1414+1515+/// Redis-backed caching lexicon resolver.
1616+///
1717+/// This resolver caches lexicon resolution results in Redis with a configurable TTL.
1818+/// Results are stored as JSON bytes to minimize storage overhead while maintaining
1919+/// the schema structure.
2020+///
2121+/// # Features
2222+///
2323+/// - Persistent caching across service restarts
2424+/// - Shared cache across multiple service instances
2525+/// - Configurable TTL (default: 90 days)
2626+/// - JSON storage format for lexicon schemas
2727+/// - Graceful fallback if Redis is unavailable
2828+///
2929+/// # Example
3030+///
3131+/// ```no_run
3232+/// use std::sync::Arc;
3333+/// use deadpool_redis::Pool;
3434+/// use atproto_lexicon::resolve::LexiconResolver;
3535+/// use quickdid::lexicon_resolver::create_redis_lexicon_resolver;
3636+/// use quickdid::metrics::NoOpMetricsPublisher;
3737+///
3838+/// # async fn example() {
3939+/// # let inner_resolver: Arc<dyn LexiconResolver> = todo!();
4040+/// # let redis_pool: Pool = todo!();
4141+/// # let metrics = Arc::new(NoOpMetricsPublisher);
4242+/// // Create with default 90-day TTL
4343+/// let resolver = create_redis_lexicon_resolver(
4444+/// inner_resolver,
4545+/// redis_pool,
4646+/// metrics
4747+/// );
4848+/// # }
4949+/// ```
5050+pub(super) struct RedisLexiconResolver {
5151+ /// Base lexicon resolver to perform actual resolution
5252+ inner: Arc<dyn LexiconResolver>,
5353+ /// Redis connection pool
5454+ pool: RedisPool,
5555+ /// Redis key prefix for lexicon resolution cache
5656+ key_prefix: String,
5757+ /// TTL for cache entries in seconds
5858+ ttl_seconds: u64,
5959+ /// Metrics publisher for telemetry
6060+ metrics: SharedMetricsPublisher,
6161+}
6262+6363+impl RedisLexiconResolver {
6464+ /// Create a new Redis-backed lexicon resolver with default 90-day TTL.
6565+ fn new(
6666+ inner: Arc<dyn LexiconResolver>,
6767+ pool: RedisPool,
6868+ metrics: SharedMetricsPublisher,
6969+ ) -> Self {
7070+ Self::with_ttl(inner, pool, 90 * 24 * 60 * 60, metrics) // 90 days default
7171+ }
7272+7373+ /// Create a new Redis-backed lexicon resolver with custom TTL.
7474+ fn with_ttl(
7575+ inner: Arc<dyn LexiconResolver>,
7676+ pool: RedisPool,
7777+ ttl_seconds: u64,
7878+ metrics: SharedMetricsPublisher,
7979+ ) -> Self {
8080+ Self::with_full_config(inner, pool, "lexicon:".to_string(), ttl_seconds, metrics)
8181+ }
8282+8383+ /// Create a new Redis-backed lexicon resolver with full configuration.
8484+ fn with_full_config(
8585+ inner: Arc<dyn LexiconResolver>,
8686+ pool: RedisPool,
8787+ key_prefix: String,
8888+ ttl_seconds: u64,
8989+ metrics: SharedMetricsPublisher,
9090+ ) -> Self {
9191+ Self {
9292+ inner,
9393+ pool,
9494+ key_prefix,
9595+ ttl_seconds,
9696+ metrics,
9797+ }
9898+ }
9999+100100+ /// Generate the Redis key for an NSID.
101101+ ///
102102+ /// Uses MetroHash64 to generate a consistent hash of the NSID
103103+ /// for use as the Redis key. This provides better key distribution
104104+ /// and avoids issues with special characters in NSIDs.
105105+ fn make_key(&self, nsid: &str) -> String {
106106+ let mut h = MetroHash64::default();
107107+ h.write(nsid.as_bytes());
108108+ format!("{}{}", self.key_prefix, h.finish())
109109+ }
110110+111111+ /// Get the TTL in seconds.
112112+ fn ttl_seconds(&self) -> u64 {
113113+ self.ttl_seconds
114114+ }
115115+}
116116+117117+#[async_trait]
118118+impl LexiconResolver for RedisLexiconResolver {
119119+ async fn resolve(&self, nsid: &str) -> Result<serde_json::Value, anyhow::Error> {
120120+ let key = self.make_key(nsid);
121121+122122+ // Try to get from Redis cache first
123123+ match self.pool.get().await {
124124+ Ok(mut conn) => {
125125+ // Check if the key exists in Redis (stored as JSON bytes)
126126+ let cached: Option<Vec<u8>> = match conn.get(&key).await {
127127+ Ok(value) => value,
128128+ Err(e) => {
129129+ self.metrics.incr("lexicon_resolver.redis.get_error").await;
130130+ tracing::warn!("Failed to get NSID from Redis cache: {}", e);
131131+ None
132132+ }
133133+ };
134134+135135+ if let Some(cached_bytes) = cached {
136136+ // Deserialize the cached JSON
137137+ match serde_json::from_slice::<serde_json::Value>(&cached_bytes) {
138138+ Ok(cached_value) => {
139139+ tracing::debug!("Cache hit for NSID {}", nsid);
140140+ self.metrics.incr("lexicon_resolver.redis.cache_hit").await;
141141+ return Ok(cached_value);
142142+ }
143143+ Err(e) => {
144144+ tracing::warn!(
145145+ "Failed to deserialize cached lexicon for NSID {}: {}",
146146+ nsid,
147147+ e
148148+ );
149149+ self.metrics
150150+ .incr("lexicon_resolver.redis.deserialize_error")
151151+ .await;
152152+ // Fall through to re-resolve if deserialization fails
153153+ }
154154+ }
155155+ }
156156+157157+ // Not in cache, resolve through inner resolver
158158+ tracing::debug!("Cache miss for NSID {}, resolving...", nsid);
159159+ self.metrics.incr("lexicon_resolver.redis.cache_miss").await;
160160+ let result = self.inner.resolve(nsid).await;
161161+162162+ // Cache successful result
163163+ if let Ok(ref schema) = result {
164164+ // Serialize to JSON bytes
165165+ match serde_json::to_vec(schema) {
166166+ Ok(bytes) => {
167167+ // Set with expiration (ignore errors to not fail the resolution)
168168+ if let Err(e) = conn
169169+ .set_ex::<_, _, ()>(&key, bytes, self.ttl_seconds())
170170+ .await
171171+ {
172172+ tracing::warn!(
173173+ "Failed to cache lexicon resolution in Redis: {}",
174174+ e
175175+ );
176176+ self.metrics
177177+ .incr("lexicon_resolver.redis.cache_set_error")
178178+ .await;
179179+ } else {
180180+ tracing::debug!("Cached lexicon for NSID {}", nsid);
181181+ self.metrics.incr("lexicon_resolver.redis.cache_set").await;
182182+ }
183183+ }
184184+ Err(e) => {
185185+ tracing::warn!(
186186+ "Failed to serialize lexicon result for NSID {}: {}",
187187+ nsid,
188188+ e
189189+ );
190190+ self.metrics
191191+ .incr("lexicon_resolver.redis.serialize_error")
192192+ .await;
193193+ }
194194+ }
195195+ }
196196+197197+ result
198198+ }
199199+ Err(e) => {
200200+ // Redis connection failed, fall back to inner resolver
201201+ tracing::warn!(
202202+ "Failed to get Redis connection, falling back to uncached resolution: {}",
203203+ e
204204+ );
205205+ self.metrics
206206+ .incr("lexicon_resolver.redis.connection_error")
207207+ .await;
208208+ self.inner.resolve(nsid).await
209209+ }
210210+ }
211211+ }
212212+}
213213+214214+/// Create a new Redis-backed lexicon resolver with default 90-day TTL.
215215+///
216216+/// # Arguments
217217+///
218218+/// * `inner` - The underlying resolver to use for actual resolution
219219+/// * `pool` - Redis connection pool
220220+/// * `metrics` - Metrics publisher for telemetry
221221+///
222222+/// # Example
223223+///
224224+/// ```no_run
225225+/// use std::sync::Arc;
226226+/// use atproto_lexicon::resolve::{DefaultLexiconResolver, LexiconResolver};
227227+/// use quickdid::lexicon_resolver::create_redis_lexicon_resolver;
228228+/// use quickdid::cache::create_redis_pool;
229229+/// use quickdid::metrics::NoOpMetricsPublisher;
230230+///
231231+/// # async fn example() -> anyhow::Result<()> {
232232+/// # use atproto_identity::resolve::HickoryDnsResolver;
233233+/// # use reqwest::Client;
234234+/// # let dns_resolver = HickoryDnsResolver::create_resolver(&[]);
235235+/// # let http_client = Client::new();
236236+/// # let metrics = Arc::new(NoOpMetricsPublisher);
237237+/// let base: Arc<dyn LexiconResolver> = Arc::new(
238238+/// DefaultLexiconResolver::new(http_client, dns_resolver)
239239+/// );
240240+///
241241+/// let pool = create_redis_pool("redis://localhost:6379")?;
242242+/// let resolver = create_redis_lexicon_resolver(base, pool, metrics);
243243+/// let schema = resolver.resolve("app.bsky.feed.post").await.unwrap();
244244+/// # Ok(())
245245+/// # }
246246+/// ```
247247+pub fn create_redis_lexicon_resolver(
248248+ inner: Arc<dyn LexiconResolver>,
249249+ pool: RedisPool,
250250+ metrics: SharedMetricsPublisher,
251251+) -> Arc<dyn LexiconResolver> {
252252+ Arc::new(RedisLexiconResolver::new(inner, pool, metrics))
253253+}
254254+255255+/// Create a new Redis-backed lexicon resolver with custom TTL.
256256+///
257257+/// # Arguments
258258+///
259259+/// * `inner` - The underlying resolver to use for actual resolution
260260+/// * `pool` - Redis connection pool
261261+/// * `ttl_seconds` - TTL for cache entries in seconds
262262+/// * `metrics` - Metrics publisher for telemetry
263263+pub fn create_redis_lexicon_resolver_with_ttl(
264264+ inner: Arc<dyn LexiconResolver>,
265265+ pool: RedisPool,
266266+ ttl_seconds: u64,
267267+ metrics: SharedMetricsPublisher,
268268+) -> Arc<dyn LexiconResolver> {
269269+ Arc::new(RedisLexiconResolver::with_ttl(
270270+ inner,
271271+ pool,
272272+ ttl_seconds,
273273+ metrics,
274274+ ))
275275+}
276276+277277+#[cfg(test)]
278278+mod tests {
279279+ use super::*;
280280+281281+ // Mock lexicon resolver for testing
282282+ #[derive(Clone)]
283283+ struct MockLexiconResolver {
284284+ should_fail: bool,
285285+ expected_schema: serde_json::Value,
286286+ }
287287+288288+ #[async_trait]
289289+ impl LexiconResolver for MockLexiconResolver {
290290+ async fn resolve(&self, _nsid: &str) -> Result<serde_json::Value, anyhow::Error> {
291291+ if self.should_fail {
292292+ Err(anyhow::anyhow!("Mock resolution failure"))
293293+ } else {
294294+ Ok(self.expected_schema.clone())
295295+ }
296296+ }
297297+ }
298298+299299+ #[tokio::test]
300300+ async fn test_redis_lexicon_resolver_cache_hit() {
301301+ let pool = match crate::test_helpers::get_test_redis_pool() {
302302+ Some(p) => p,
303303+ None => return,
304304+ };
305305+306306+ // Create mock resolver with sample schema
307307+ let schema = serde_json::json!({
308308+ "lexicon": 1,
309309+ "id": "app.bsky.feed.post",
310310+ "defs": {
311311+ "main": {
312312+ "type": "record",
313313+ "description": "A post record"
314314+ }
315315+ }
316316+ });
317317+318318+ let mock_resolver = Arc::new(MockLexiconResolver {
319319+ should_fail: false,
320320+ expected_schema: schema.clone(),
321321+ });
322322+323323+ // Create metrics publisher
324324+ let metrics = Arc::new(crate::metrics::NoOpMetricsPublisher);
325325+326326+ // Create Redis-backed resolver with a unique key prefix for testing
327327+ let test_prefix = format!(
328328+ "test:lexicon:{}:",
329329+ std::time::SystemTime::now()
330330+ .duration_since(std::time::UNIX_EPOCH)
331331+ .unwrap()
332332+ .as_nanos()
333333+ );
334334+ let redis_resolver = RedisLexiconResolver::with_full_config(
335335+ mock_resolver,
336336+ pool.clone(),
337337+ test_prefix.clone(),
338338+ 3600,
339339+ metrics,
340340+ );
341341+342342+ let test_nsid = "app.bsky.feed.post";
343343+344344+ // First resolution - should call inner resolver
345345+ let result1 = redis_resolver.resolve(test_nsid).await.unwrap();
346346+ assert_eq!(result1, schema);
347347+348348+ // Second resolution - should hit cache
349349+ let result2 = redis_resolver.resolve(test_nsid).await.unwrap();
350350+ assert_eq!(result2, schema);
351351+352352+ // Clean up test data
353353+ if let Ok(mut conn) = pool.get().await {
354354+ let mut h = MetroHash64::default();
355355+ h.write(test_nsid.as_bytes());
356356+ let key = format!("{}{}", test_prefix, h.finish());
357357+ let _: Result<(), _> = conn.del(key).await;
358358+ }
359359+ }
360360+361361+ #[tokio::test]
362362+ async fn test_redis_lexicon_resolver_cache_miss() {
363363+ let pool = match crate::test_helpers::get_test_redis_pool() {
364364+ Some(p) => p,
365365+ None => return,
366366+ };
367367+368368+ let schema = serde_json::json!({
369369+ "lexicon": 1,
370370+ "id": "com.example.test",
371371+ });
372372+373373+ let mock_resolver = Arc::new(MockLexiconResolver {
374374+ should_fail: false,
375375+ expected_schema: schema.clone(),
376376+ });
377377+378378+ let metrics = Arc::new(crate::metrics::NoOpMetricsPublisher);
379379+380380+ let test_prefix = format!(
381381+ "test:lexicon:{}:",
382382+ std::time::SystemTime::now()
383383+ .duration_since(std::time::UNIX_EPOCH)
384384+ .unwrap()
385385+ .as_nanos()
386386+ );
387387+ let redis_resolver = RedisLexiconResolver::with_full_config(
388388+ mock_resolver,
389389+ pool.clone(),
390390+ test_prefix.clone(),
391391+ 3600,
392392+ metrics,
393393+ );
394394+395395+ let test_nsid = "com.example.test";
396396+397397+ // Ensure key doesn't exist
398398+ if let Ok(mut conn) = pool.get().await {
399399+ let mut h = MetroHash64::default();
400400+ h.write(test_nsid.as_bytes());
401401+ let key = format!("{}{}", test_prefix, h.finish());
402402+ let _: Result<(), _> = conn.del(&key).await;
403403+ }
404404+405405+ // Resolution should succeed and cache the result
406406+ let result = redis_resolver.resolve(test_nsid).await.unwrap();
407407+ assert_eq!(result, schema);
408408+409409+ // Verify the result was cached
410410+ if let Ok(mut conn) = pool.get().await {
411411+ let mut h = MetroHash64::default();
412412+ h.write(test_nsid.as_bytes());
413413+ let key = format!("{}{}", test_prefix, h.finish());
414414+ let exists: bool = conn.exists(&key).await.unwrap();
415415+ assert!(exists, "Result should be cached");
416416+417417+ // Clean up
418418+ let _: Result<(), _> = conn.del(key).await;
419419+ }
420420+ }
421421+422422+ #[tokio::test]
423423+ async fn test_redis_lexicon_resolver_error_handling() {
424424+ let pool = match crate::test_helpers::get_test_redis_pool() {
425425+ Some(p) => p,
426426+ None => return,
427427+ };
428428+429429+ // Create mock resolver that fails
430430+ let mock_resolver = Arc::new(MockLexiconResolver {
431431+ should_fail: true,
432432+ expected_schema: serde_json::Value::Null,
433433+ });
434434+435435+ let metrics = Arc::new(crate::metrics::NoOpMetricsPublisher);
436436+437437+ let test_prefix = format!(
438438+ "test:lexicon:{}:",
439439+ std::time::SystemTime::now()
440440+ .duration_since(std::time::UNIX_EPOCH)
441441+ .unwrap()
442442+ .as_nanos()
443443+ );
444444+ let redis_resolver = RedisLexiconResolver::with_full_config(
445445+ mock_resolver,
446446+ pool.clone(),
447447+ test_prefix,
448448+ 3600,
449449+ metrics,
450450+ );
451451+452452+ let test_nsid = "com.example.nonexistent";
453453+454454+ // Resolution should fail
455455+ let result = redis_resolver.resolve(test_nsid).await;
456456+ assert!(result.is_err());
457457+ }
458458+}
+1
src/lib.rs
···33pub mod handle_resolver; // Only traits and factory functions exposed
44pub mod http; // Only create_router exposed
55pub mod jetstream_handler; // Jetstream event handler for AT Protocol events
66+pub mod lexicon_resolver; // Lexicon resolution with caching support
6778// Semi-public modules - needed by binary but with limited exposure
89pub mod cache; // Only create_redis_pool exposed
+87-31
www/index.html
···3535 <!-- Stylesheet -->
3636 <link rel="stylesheet" href="/css/pico.classless.green.min.css">
3737 <style>
3838- #resolveForm {
3838+ .resolver-form {
3939 margin: 2rem 0;
4040 padding: 1.5rem;
4141 background: var(--card-background-color);
···4343 border: 1px solid var(--muted-border-color);
4444 }
45454646- #result {
4747- margin-top: 2rem;
4646+ .resolver-result {
4747+ margin-top: 1rem;
4848 padding: 1.5rem;
4949 background: var(--code-background-color);
5050 border-radius: var(--border-radius);
5151 border: 1px solid var(--muted-border-color);
5252 }
53535454- #resultContent {
5454+ .result-content {
5555 background: transparent;
5656 padding: 1rem;
5757 overflow-x: auto;
···7575 font-weight: bold;
7676 margin-right: 0.5rem;
7777 }
7878+7979+ .endpoint-section {
8080+ margin-bottom: 3rem;
8181+ }
7882 </style>
7983</head>
8084···8690 </hgroup>
8791 </header>
8892 <main>
8989- <p>QuickDID is a high-performance handle-to-DID resolution service for the AT Protocol ecosystem.</p>
9393+ <p>QuickDID provides high-performance resolution services for the AT Protocol ecosystem.</p>
90949195 <h2>Available Endpoints</h2>
92969393- <h3>GET /xrpc/com.atproto.identity.resolveHandle</h3>
9494- <p>Resolve an AT Protocol handle to its DID</p>
9595- <p>Parameters: <code>?handle={handle}</code></p>
9797+ <section class="endpoint-section">
9898+ <h3>GET /xrpc/com.atproto.identity.resolveHandle</h3>
9999+ <p>Resolve an AT Protocol handle to its DID</p>
100100+ <p>Parameters: <code>?handle={handle}</code></p>
101101+102102+ <h4>Try It Out</h4>
103103+ <form id="handleResolveForm" class="resolver-form">
104104+ <label for="handleInput">
105105+ Enter an AT Protocol handle to resolve:
106106+ <input type="text" id="handleInput" name="handle" placeholder="e.g., alice.bsky.social" required>
107107+ </label>
108108+ <button type="submit">Resolve Handle</button>
109109+ </form>
961109797- <h2>Try It Out</h2>
9898- <form id="resolveForm">
9999- <label for="handle">
100100- Enter an AT Protocol handle to resolve:
101101- <input type="text" id="handle" name="handle" placeholder="e.g., alice.bsky.social" required>
102102- </label>
103103- <button type="submit">Resolve Handle</button>
104104- </form>
111111+ <div id="handleResult" class="resolver-result" style="display: none;">
112112+ <h4>Result</h4>
113113+ <pre id="handleResultContent" class="result-content"></pre>
114114+ </div>
105115106106- <div id="result" style="display: none;">
107107- <h3>Result</h3>
108108- <pre id="resultContent"></pre>
109109- </div>
116116+ <h4>Example Usage</h4>
117117+ <code>curl "https://quickdid.smokesignal.tools/xrpc/com.atproto.identity.resolveHandle?handle=ngerakines.me"</code>
118118+ </section>
110119111111- <h2>Example Usage</h2>
112112- <code>curl "https://quickdid.smokesignal.tools/xrpc/com.atproto.identity.resolveHandle?handle=ngerakines.me"</code>
120120+ <section class="endpoint-section">
121121+ <h3>GET /xrpc/com.atproto.lexicon.resolveLexicon</h3>
122122+ <p>Resolve an AT Protocol lexicon (NSID) to its schema</p>
123123+ <p>Parameters: <code>?nsid={nsid}</code></p>
124124+125125+ <h4>Try It Out</h4>
126126+ <form id="lexiconResolveForm" class="resolver-form">
127127+ <label for="nsidInput">
128128+ Enter an AT Protocol NSID to resolve:
129129+ <input type="text" id="nsidInput" name="nsid" placeholder="e.g., app.bsky.feed.post" required>
130130+ </label>
131131+ <button type="submit">Resolve Lexicon</button>
132132+ </form>
133133+134134+ <div id="lexiconResult" class="resolver-result" style="display: none;">
135135+ <h4>Result</h4>
136136+ <pre id="lexiconResultContent" class="result-content"></pre>
137137+ </div>
138138+139139+ <h4>Example Usage</h4>
140140+ <code>curl "https://quickdid.smokesignal.tools/xrpc/com.atproto.lexicon.resolveLexicon?nsid=app.bsky.feed.post"</code>
141141+ </section>
113142114143 <h2>Documentation</h2>
115144 <p>
···122151 </main>
123152124153 <script>
125125- document.getElementById('resolveForm').addEventListener('submit', async (e) => {
154154+ // Handle form submission for handle resolution
155155+ document.getElementById('handleResolveForm').addEventListener('submit', async (e) => {
126156 e.preventDefault();
127157128128- const handle = document.getElementById('handle').value.trim();
129129- const resultDiv = document.getElementById('result');
130130- const resultContent = document.getElementById('resultContent');
158158+ const handle = document.getElementById('handleInput').value.trim();
159159+ const resultDiv = document.getElementById('handleResult');
160160+ const resultContent = document.getElementById('handleResultContent');
131161132162 // Show loading state
133163 resultDiv.style.display = 'block';
···155185 }
156186 });
157187158158- // Allow pressing Enter in the input field to submit
159159- document.getElementById('handle').addEventListener('keypress', (e) => {
160160- if (e.key === 'Enter') {
161161- e.preventDefault();
162162- document.getElementById('resolveForm').dispatchEvent(new Event('submit'));
188188+ // Handle form submission for lexicon resolution
189189+ document.getElementById('lexiconResolveForm').addEventListener('submit', async (e) => {
190190+ e.preventDefault();
191191+192192+ const nsid = document.getElementById('nsidInput').value.trim();
193193+ const resultDiv = document.getElementById('lexiconResult');
194194+ const resultContent = document.getElementById('lexiconResultContent');
195195+196196+ // Show loading state
197197+ resultDiv.style.display = 'block';
198198+ resultContent.textContent = 'Loading...';
199199+200200+ try {
201201+ // Build the request URL
202202+ const url = `/xrpc/com.atproto.lexicon.resolveLexicon?nsid=${encodeURIComponent(nsid)}`;
203203+204204+ // Make the GET request
205205+ const response = await fetch(url);
206206+ const data = await response.json();
207207+208208+ // Display the result
209209+ if (response.ok) {
210210+ resultContent.textContent = JSON.stringify(data, null, 2);
211211+ resultContent.style.color = '';
212212+ } else {
213213+ resultContent.textContent = `Error: ${JSON.stringify(data, null, 2)}`;
214214+ resultContent.style.color = '#d32f2f';
215215+ }
216216+ } catch (error) {
217217+ resultContent.textContent = `Network Error: ${error.message}`;
218218+ resultContent.style.color = '#d32f2f';
163219 }
164220 });
165221 </script>