this repo has no description
1use crate::circuit_breaker::CircuitBreaker; 2use crate::sync::firehose::SequencedEvent; 3use reqwest::Client; 4use std::sync::atomic::{AtomicU64, Ordering}; 5use std::sync::Arc; 6use std::time::Duration; 7use tokio::sync::{broadcast, watch}; 8use tracing::{debug, error, info, warn}; 9 10const NOTIFY_THRESHOLD_SECS: u64 = 20 * 60; 11 12pub struct Crawlers { 13 hostname: String, 14 crawler_urls: Vec<String>, 15 http_client: Client, 16 last_notified: AtomicU64, 17 circuit_breaker: Option<Arc<CircuitBreaker>>, 18} 19 20impl Crawlers { 21 pub fn new(hostname: String, crawler_urls: Vec<String>) -> Self { 22 Self { 23 hostname, 24 crawler_urls, 25 http_client: Client::builder() 26 .timeout(Duration::from_secs(30)) 27 .build() 28 .unwrap_or_default(), 29 last_notified: AtomicU64::new(0), 30 circuit_breaker: None, 31 } 32 } 33 34 pub fn with_circuit_breaker(mut self, circuit_breaker: Arc<CircuitBreaker>) -> Self { 35 self.circuit_breaker = Some(circuit_breaker); 36 self 37 } 38 39 pub fn from_env() -> Option<Self> { 40 let hostname = std::env::var("PDS_HOSTNAME").ok()?; 41 let crawler_urls: Vec<String> = std::env::var("CRAWLERS") 42 .unwrap_or_default() 43 .split(',') 44 .filter(|s| !s.is_empty()) 45 .map(|s| s.trim().to_string()) 46 .collect(); 47 48 if crawler_urls.is_empty() { 49 return None; 50 } 51 52 Some(Self::new(hostname, crawler_urls)) 53 } 54 55 fn should_notify(&self) -> bool { 56 let now = std::time::SystemTime::now() 57 .duration_since(std::time::UNIX_EPOCH) 58 .unwrap_or_default() 59 .as_secs(); 60 let last = self.last_notified.load(Ordering::Relaxed); 61 now - last >= NOTIFY_THRESHOLD_SECS 62 } 63 64 fn mark_notified(&self) { 65 let now = std::time::SystemTime::now() 66 .duration_since(std::time::UNIX_EPOCH) 67 .unwrap_or_default() 68 .as_secs(); 69 self.last_notified.store(now, Ordering::Relaxed); 70 } 71 72 pub async fn notify_of_update(&self) { 73 if !self.should_notify() { 74 debug!("Skipping crawler notification due to debounce"); 75 return; 76 } 77 78 if let Some(cb) = &self.circuit_breaker { 79 if !cb.can_execute().await { 80 debug!("Skipping crawler notification due to circuit breaker open"); 81 return; 82 } 83 } 84 85 self.mark_notified(); 86 87 let circuit_breaker = self.circuit_breaker.clone(); 88 89 for crawler_url in &self.crawler_urls { 90 let url = format!("{}/xrpc/com.atproto.sync.requestCrawl", crawler_url.trim_end_matches('/')); 91 let hostname = self.hostname.clone(); 92 let client = self.http_client.clone(); 93 let cb = circuit_breaker.clone(); 94 95 tokio::spawn(async move { 96 match client 97 .post(&url) 98 .json(&serde_json::json!({ "hostname": hostname })) 99 .send() 100 .await 101 { 102 Ok(response) => { 103 if response.status().is_success() { 104 debug!(crawler = %url, "Successfully notified crawler"); 105 if let Some(cb) = cb { 106 cb.record_success().await; 107 } 108 } else { 109 let status = response.status(); 110 let body = response.text().await.unwrap_or_default(); 111 warn!( 112 crawler = %url, 113 status = %status, 114 body = %body, 115 hostname = %hostname, 116 "Crawler notification returned non-success status" 117 ); 118 if let Some(cb) = cb { 119 cb.record_failure().await; 120 } 121 } 122 } 123 Err(e) => { 124 warn!(crawler = %url, error = %e, "Failed to notify crawler"); 125 if let Some(cb) = cb { 126 cb.record_failure().await; 127 } 128 } 129 } 130 }); 131 } 132 } 133} 134 135pub async fn start_crawlers_service( 136 crawlers: Arc<Crawlers>, 137 mut firehose_rx: broadcast::Receiver<SequencedEvent>, 138 mut shutdown: watch::Receiver<bool>, 139) { 140 info!( 141 hostname = %crawlers.hostname, 142 crawler_count = crawlers.crawler_urls.len(), 143 crawlers = ?crawlers.crawler_urls, 144 "Starting crawlers notification service" 145 ); 146 147 loop { 148 tokio::select! { 149 result = firehose_rx.recv() => { 150 match result { 151 Ok(event) => { 152 if event.event_type == "commit" { 153 crawlers.notify_of_update().await; 154 } 155 } 156 Err(broadcast::error::RecvError::Lagged(n)) => { 157 warn!(skipped = n, "Crawlers service lagged behind firehose"); 158 crawlers.notify_of_update().await; 159 } 160 Err(broadcast::error::RecvError::Closed) => { 161 error!("Firehose channel closed, stopping crawlers service"); 162 break; 163 } 164 } 165 } 166 _ = shutdown.changed() => { 167 if *shutdown.borrow() { 168 info!("Crawlers service shutting down"); 169 break; 170 } 171 } 172 } 173 } 174}