this repo has no description
1use crate::circuit_breaker::CircuitBreaker; 2use crate::sync::firehose::SequencedEvent; 3use reqwest::Client; 4use std::sync::atomic::{AtomicU64, Ordering}; 5use std::sync::Arc; 6use std::time::Duration; 7use tokio::sync::{broadcast, watch}; 8use tracing::{debug, error, info, warn}; 9 10const NOTIFY_THRESHOLD_SECS: u64 = 20 * 60; 11 12pub struct Crawlers { 13 hostname: String, 14 crawler_urls: Vec<String>, 15 http_client: Client, 16 last_notified: AtomicU64, 17 circuit_breaker: Option<Arc<CircuitBreaker>>, 18} 19 20impl Crawlers { 21 pub fn new(hostname: String, crawler_urls: Vec<String>) -> Self { 22 Self { 23 hostname, 24 crawler_urls, 25 http_client: Client::builder() 26 .timeout(Duration::from_secs(30)) 27 .build() 28 .unwrap_or_default(), 29 last_notified: AtomicU64::new(0), 30 circuit_breaker: None, 31 } 32 } 33 34 pub fn with_circuit_breaker(mut self, circuit_breaker: Arc<CircuitBreaker>) -> Self { 35 self.circuit_breaker = Some(circuit_breaker); 36 self 37 } 38 39 pub fn from_env() -> Option<Self> { 40 let hostname = std::env::var("PDS_HOSTNAME").ok()?; 41 42 let crawler_urls: Vec<String> = std::env::var("CRAWLERS") 43 .unwrap_or_default() 44 .split(',') 45 .filter(|s| !s.is_empty()) 46 .map(|s| s.trim().to_string()) 47 .collect(); 48 49 if crawler_urls.is_empty() { 50 return None; 51 } 52 53 Some(Self::new(hostname, crawler_urls)) 54 } 55 56 fn should_notify(&self) -> bool { 57 let now = std::time::SystemTime::now() 58 .duration_since(std::time::UNIX_EPOCH) 59 .unwrap_or_default() 60 .as_secs(); 61 62 let last = self.last_notified.load(Ordering::Relaxed); 63 now - last >= NOTIFY_THRESHOLD_SECS 64 } 65 66 fn mark_notified(&self) { 67 let now = std::time::SystemTime::now() 68 .duration_since(std::time::UNIX_EPOCH) 69 .unwrap_or_default() 70 .as_secs(); 71 72 self.last_notified.store(now, Ordering::Relaxed); 73 } 74 75 pub async fn notify_of_update(&self) { 76 if !self.should_notify() { 77 debug!("Skipping crawler notification due to debounce"); 78 return; 79 } 80 81 if let Some(cb) = &self.circuit_breaker { 82 if !cb.can_execute().await { 83 debug!("Skipping crawler notification due to circuit breaker open"); 84 return; 85 } 86 } 87 88 self.mark_notified(); 89 let circuit_breaker = self.circuit_breaker.clone(); 90 91 for crawler_url in &self.crawler_urls { 92 let url = format!("{}/xrpc/com.atproto.sync.requestCrawl", crawler_url.trim_end_matches('/')); 93 let hostname = self.hostname.clone(); 94 let client = self.http_client.clone(); 95 let cb = circuit_breaker.clone(); 96 97 tokio::spawn(async move { 98 match client 99 .post(&url) 100 .json(&serde_json::json!({ "hostname": hostname })) 101 .send() 102 .await 103 { 104 Ok(response) => { 105 if response.status().is_success() { 106 debug!(crawler = %url, "Successfully notified crawler"); 107 if let Some(cb) = cb { 108 cb.record_success().await; 109 } 110 } else { 111 let status = response.status(); 112 let body = response.text().await.unwrap_or_default(); 113 warn!( 114 crawler = %url, 115 status = %status, 116 body = %body, 117 hostname = %hostname, 118 "Crawler notification returned non-success status" 119 ); 120 if let Some(cb) = cb { 121 cb.record_failure().await; 122 } 123 } 124 } 125 Err(e) => { 126 warn!(crawler = %url, error = %e, "Failed to notify crawler"); 127 if let Some(cb) = cb { 128 cb.record_failure().await; 129 } 130 } 131 } 132 }); 133 } 134 } 135} 136 137pub async fn start_crawlers_service( 138 crawlers: Arc<Crawlers>, 139 mut firehose_rx: broadcast::Receiver<SequencedEvent>, 140 mut shutdown: watch::Receiver<bool>, 141) { 142 info!( 143 hostname = %crawlers.hostname, 144 crawler_count = crawlers.crawler_urls.len(), 145 crawlers = ?crawlers.crawler_urls, 146 "Starting crawlers notification service" 147 ); 148 149 loop { 150 tokio::select! { 151 result = firehose_rx.recv() => { 152 match result { 153 Ok(event) => { 154 if event.event_type == "commit" { 155 crawlers.notify_of_update().await; 156 } 157 } 158 Err(broadcast::error::RecvError::Lagged(n)) => { 159 warn!(skipped = n, "Crawlers service lagged behind firehose"); 160 crawlers.notify_of_update().await; 161 } 162 Err(broadcast::error::RecvError::Closed) => { 163 error!("Firehose channel closed, stopping crawlers service"); 164 break; 165 } 166 } 167 } 168 _ = shutdown.changed() => { 169 if *shutdown.borrow() { 170 info!("Crawlers service shutting down"); 171 break; 172 } 173 } 174 } 175 } 176}