A decentralized music tracking and discovery platform built on AT Protocol 🎵 rocksky.app
spotify atproto lastfm musicbrainz scrobbling listenbrainz

Refactor logging to use tracing #7

merged opened by tsiry-sandratraina.com targeting main from setup-tracing
Labels

None yet.

assignee

None yet.

Participants 1
AT URI
at://did:plc:7vdlgi2bflelz7mmuxoqjfcr/sh.tangled.repo.pull/3lzolvk366x22
+556 -972
Diff #0
+77 -4
Cargo.lock
··· 3064 3065 [[package]] 3066 name = "log" 3067 - version = "0.4.27" 3068 source = "registry+https://github.com/rust-lang/crates.io-index" 3069 - checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" 3070 3071 [[package]] 3072 name = "lru-slab" ··· 3227 "winapi", 3228 ] 3229 3230 [[package]] 3231 name = "nuid" 3232 version = "0.5.0" ··· 3402 3403 [[package]] 3404 name = "owo-colors" 3405 - version = "4.2.1" 3406 source = "registry+https://github.com/rust-lang/crates.io-index" 3407 - checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec" 3408 dependencies = [ 3409 "supports-color 2.1.0", 3410 "supports-color 3.0.2", ··· 4837 "sqlx", 4838 "tokio", 4839 "tokio-stream", 4840 ] 4841 4842 [[package]] ··· 4888 "tempfile", 4889 "tokio", 4890 "tokio-stream", 4891 ] 4892 4893 [[package]] ··· 4919 "tempfile", 4920 "tokio", 4921 "tokio-stream", 4922 ] 4923 4924 [[package]] ··· 4941 "tokio", 4942 "tokio-stream", 4943 "tokio-tungstenite", 4944 "tungstenite", 4945 "url", 4946 ] ··· 4968 "sqlx", 4969 "tokio", 4970 "tokio-stream", 4971 ] 4972 4973 [[package]] ··· 4995 "sqlx", 4996 "tokio", 4997 "tokio-stream", 4998 "uuid", 4999 ] 5000 ··· 5018 "sqlx", 5019 "tokio", 5020 "tokio-stream", 5021 ] 5022 5023 [[package]] ··· 5034 "sqlx", 5035 "tokio", 5036 "tokio-stream", 5037 ] 5038 5039 [[package]] ··· 5053 "serde_json", 5054 "tokio", 5055 "tokio-stream", 5056 "uuid", 5057 ] 5058 ··· 5080 "sqlx", 5081 "tokio", 5082 "tokio-stream", 5083 ] 5084 5085 [[package]] ··· 5089 "anyhow", 5090 "clap", 5091 "dotenv", 5092 "rocksky-analytics", 5093 "rocksky-dropbox", 5094 "rocksky-googledrive", ··· 5099 "rocksky-tracklist", 5100 "rocksky-webscrobbler", 5101 "tokio", 5102 ] 5103 5104 [[package]] ··· 5630 "tokio", 5631 ] 5632 5633 [[package]] 5634 name = "shlex" 5635 version = "1.3.0" ··· 6440 "syn 2.0.101", 6441 ] 6442 6443 [[package]] 6444 name = "time" 6445 version = "0.3.44" ··· 6780 checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" 6781 dependencies = [ 6782 "once_cell", 6783 ] 6784 6785 [[package]] ··· 6958 "wasm-bindgen", 6959 ] 6960 6961 [[package]] 6962 name = "vcpkg" 6963 version = "0.2.15"
··· 3064 3065 [[package]] 3066 name = "log" 3067 + version = "0.4.28" 3068 source = "registry+https://github.com/rust-lang/crates.io-index" 3069 + checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" 3070 3071 [[package]] 3072 name = "lru-slab" ··· 3227 "winapi", 3228 ] 3229 3230 + [[package]] 3231 + name = "nu-ansi-term" 3232 + version = "0.50.1" 3233 + source = "registry+https://github.com/rust-lang/crates.io-index" 3234 + checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" 3235 + dependencies = [ 3236 + "windows-sys 0.52.0", 3237 + ] 3238 + 3239 [[package]] 3240 name = "nuid" 3241 version = "0.5.0" ··· 3411 3412 [[package]] 3413 name = "owo-colors" 3414 + version = "4.2.2" 3415 source = "registry+https://github.com/rust-lang/crates.io-index" 3416 + checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e" 3417 dependencies = [ 3418 "supports-color 2.1.0", 3419 "supports-color 3.0.2", ··· 4846 "sqlx", 4847 "tokio", 4848 "tokio-stream", 4849 + "tracing", 4850 ] 4851 4852 [[package]] ··· 4898 "tempfile", 4899 "tokio", 4900 "tokio-stream", 4901 + "tracing", 4902 ] 4903 4904 [[package]] ··· 4930 "tempfile", 4931 "tokio", 4932 "tokio-stream", 4933 + "tracing", 4934 ] 4935 4936 [[package]] ··· 4953 "tokio", 4954 "tokio-stream", 4955 "tokio-tungstenite", 4956 + "tracing", 4957 "tungstenite", 4958 "url", 4959 ] ··· 4981 "sqlx", 4982 "tokio", 4983 "tokio-stream", 4984 + "tracing", 4985 ] 4986 4987 [[package]] ··· 5009 "sqlx", 5010 "tokio", 5011 "tokio-stream", 5012 + "tracing", 5013 "uuid", 5014 ] 5015 ··· 5033 "sqlx", 5034 "tokio", 5035 "tokio-stream", 5036 + "tracing", 5037 ] 5038 5039 [[package]] ··· 5050 "sqlx", 5051 "tokio", 5052 "tokio-stream", 5053 + "tracing", 5054 ] 5055 5056 [[package]] ··· 5070 "serde_json", 5071 "tokio", 5072 "tokio-stream", 5073 + "tracing", 5074 "uuid", 5075 ] 5076 ··· 5098 "sqlx", 5099 "tokio", 5100 "tokio-stream", 5101 + "tracing", 5102 ] 5103 5104 [[package]] ··· 5108 "anyhow", 5109 "clap", 5110 "dotenv", 5111 + "owo-colors", 5112 "rocksky-analytics", 5113 "rocksky-dropbox", 5114 "rocksky-googledrive", ··· 5119 "rocksky-tracklist", 5120 "rocksky-webscrobbler", 5121 "tokio", 5122 + "tracing", 5123 + "tracing-log", 5124 + "tracing-subscriber", 5125 ] 5126 5127 [[package]] ··· 5653 "tokio", 5654 ] 5655 5656 + [[package]] 5657 + name = "sharded-slab" 5658 + version = "0.1.7" 5659 + source = "registry+https://github.com/rust-lang/crates.io-index" 5660 + checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" 5661 + dependencies = [ 5662 + "lazy_static", 5663 + ] 5664 + 5665 [[package]] 5666 name = "shlex" 5667 version = "1.3.0" ··· 6472 "syn 2.0.101", 6473 ] 6474 6475 + [[package]] 6476 + name = "thread_local" 6477 + version = "1.1.9" 6478 + source = "registry+https://github.com/rust-lang/crates.io-index" 6479 + checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" 6480 + dependencies = [ 6481 + "cfg-if", 6482 + ] 6483 + 6484 [[package]] 6485 name = "time" 6486 version = "0.3.44" ··· 6821 checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" 6822 dependencies = [ 6823 "once_cell", 6824 + "valuable", 6825 + ] 6826 + 6827 + [[package]] 6828 + name = "tracing-log" 6829 + version = "0.2.0" 6830 + source = "registry+https://github.com/rust-lang/crates.io-index" 6831 + checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" 6832 + dependencies = [ 6833 + "log", 6834 + "once_cell", 6835 + "tracing-core", 6836 + ] 6837 + 6838 + [[package]] 6839 + name = "tracing-subscriber" 6840 + version = "0.3.20" 6841 + source = "registry+https://github.com/rust-lang/crates.io-index" 6842 + checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" 6843 + dependencies = [ 6844 + "nu-ansi-term", 6845 + "sharded-slab", 6846 + "smallvec", 6847 + "thread_local", 6848 + "tracing-core", 6849 + "tracing-log", 6850 ] 6851 6852 [[package]] ··· 7025 "wasm-bindgen", 7026 ] 7027 7028 + [[package]] 7029 + name = "valuable" 7030 + version = "0.1.1" 7031 + source = "registry+https://github.com/rust-lang/crates.io-index" 7032 + checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" 7033 + 7034 [[package]] 7035 name = "vcpkg" 7036 version = "0.2.15"
+1
crates/analytics/Cargo.toml
··· 28 clap = "4.5.31" 29 actix-web = "4.9.0" 30 tokio-stream = { version = "0.1.17", features = ["full"] }
··· 28 clap = "4.5.31" 29 actix-web = "4.9.0" 30 tokio-stream = { version = "0.1.17", features = ["full"] } 31 + tracing = "0.1.41"
+2 -2
crates/analytics/src/cmd/serve.rs
··· 29 req: HttpRequest, 30 ) -> Result<impl Responder, actix_web::Error> { 31 let method = req.match_info().get("method").unwrap_or("unknown"); 32 - println!("Method: {}", method.bright_green()); 33 34 let conn = data.get_ref().clone(); 35 handle(method, &mut payload, &req, conn) ··· 45 let addr = format!("{}:{}", host, port); 46 47 let url = format!("http://{}", addr); 48 - println!("Listening on {}", url.bright_green()); 49 50 let conn = conn.clone(); 51 HttpServer::new(move || {
··· 29 req: HttpRequest, 30 ) -> Result<impl Responder, actix_web::Error> { 31 let method = req.match_info().get("method").unwrap_or("unknown"); 32 + tracing::info!(method = %method.bright_green(), "API call"); 33 34 let conn = data.get_ref().clone(); 35 handle(method, &mut payload, &req, conn) ··· 45 let addr = format!("{}:{}", host, port); 46 47 let url = format!("http://{}", addr); 48 + tracing::info!(url = %url.bright_green(), "Listening on"); 49 50 let conn = conn.clone(); 51 HttpServer::new(move || {
+37 -84
crates/analytics/src/core.rs
··· 194 .await?; 195 196 for (i, track) in tracks.clone().into_iter().enumerate() { 197 - println!( 198 - "track {} - {} - {}", 199 - i, 200 - track.title.bright_green(), 201 - track.artist 202 - ); 203 match conn.execute( 204 "INSERT INTO tracks ( 205 id, ··· 255 ], 256 ) { 257 Ok(_) => (), 258 - Err(e) => println!("error: {}", e), 259 } 260 } 261 262 - println!("tracks: {:?}", tracks.len()); 263 Ok(()) 264 } 265 ··· 277 .await?; 278 279 for (i, artist) in artists.clone().into_iter().enumerate() { 280 - println!("artist {} - {}", i, artist.name.bright_green()); 281 match conn.execute( 282 "INSERT INTO artists ( 283 id, ··· 323 ], 324 ) { 325 Ok(_) => (), 326 - Err(e) => println!("error: {}", e), 327 } 328 } 329 330 - println!("artists: {:?}", artists.len()); 331 Ok(()) 332 } 333 ··· 342 .await?; 343 344 for (i, album) in albums.clone().into_iter().enumerate() { 345 - println!("album {} - {}", i, album.title.bright_green()); 346 match conn.execute( 347 "INSERT INTO albums ( 348 id, ··· 388 ], 389 ) { 390 Ok(_) => (), 391 - Err(e) => println!("error: {}", e), 392 } 393 } 394 395 - println!("albums: {:?}", albums.len()); 396 Ok(()) 397 } 398 ··· 407 .await?; 408 409 for (i, user) in users.clone().into_iter().enumerate() { 410 - println!("user {} - {}", i, user.display_name.bright_green()); 411 match conn.execute( 412 "INSERT INTO users ( 413 id, ··· 429 ], 430 ) { 431 Ok(_) => (), 432 - Err(e) => println!("error: {}", e), 433 } 434 } 435 436 - println!("users: {:?}", users.len()); 437 Ok(()) 438 } 439 ··· 451 .await?; 452 453 for (i, scrobble) in scrobbles.clone().into_iter().enumerate() { 454 - println!( 455 - "scrobble {} - {}", 456 - i, 457 - match scrobble.uri.clone() { 458 - Some(uri) => uri.to_string(), 459 - None => "None".to_string(), 460 - } 461 - .bright_green() 462 - ); 463 match conn.execute( 464 "INSERT INTO scrobbles ( 465 id, ··· 489 ], 490 ) { 491 Ok(_) => (), 492 - Err(e) => println!("error: {}", e), 493 } 494 } 495 496 - println!("scrobbles: {:?}", scrobbles.len()); 497 Ok(()) 498 } 499 ··· 511 .await?; 512 513 for (i, album_track) in album_tracks.clone().into_iter().enumerate() { 514 - println!( 515 - "album_track {} - {} - {}", 516 - i, 517 - album_track.album_id.bright_green(), 518 - album_track.track_id 519 - ); 520 match conn.execute( 521 "INSERT INTO album_tracks ( 522 id, ··· 532 ], 533 ) { 534 Ok(_) => (), 535 - Err(e) => println!("error: {}", e), 536 } 537 } 538 - println!("album_tracks: {:?}", album_tracks.len()); 539 Ok(()) 540 } 541 ··· 553 .await?; 554 555 for (i, loved_track) in loved_tracks.clone().into_iter().enumerate() { 556 - println!( 557 - "loved_track {} - {} - {}", 558 - i, 559 - loved_track.user_id.bright_green(), 560 - loved_track.track_id 561 - ); 562 match conn.execute( 563 "INSERT INTO loved_tracks ( 564 id, ··· 577 ], 578 ) { 579 Ok(_) => (), 580 - Err(e) => println!("error: {}", e), 581 } 582 } 583 584 - println!("loved_tracks: {:?}", loved_tracks.len()); 585 Ok(()) 586 } 587 ··· 599 .await?; 600 601 for (i, artist_track) in artist_tracks.clone().into_iter().enumerate() { 602 - println!( 603 - "artist_track {} - {} - {}", 604 - i, 605 - artist_track.artist_id.bright_green(), 606 - artist_track.track_id 607 - ); 608 match conn.execute( 609 "INSERT INTO artist_tracks (id, artist_id, track_id, created_at) VALUES (?, ?, ?, ?)", 610 params![ ··· 615 ], 616 ) { 617 Ok(_) => (), 618 - Err(e) => println!("error: {}", e), 619 } 620 } 621 622 - println!("artist_tracks: {:?}", artist_tracks.len()); 623 Ok(()) 624 } 625 ··· 637 .await?; 638 639 for (i, artist_album) in artist_albums.clone().into_iter().enumerate() { 640 - println!( 641 - "artist_albums {} - {} - {}", 642 - i, 643 - artist_album.artist_id.bright_green(), 644 - artist_album.album_id 645 - ); 646 match conn.execute( 647 "INSERT INTO artist_albums (id, artist_id, album_id, created_at) VALUES (?, ?, ?, ?)", 648 params![ ··· 653 ], 654 ) { 655 Ok(_) => (), 656 - Err(e) => println!("error: {}", e), 657 } 658 } 659 660 - println!("artist_albums: {:?}", artist_albums.len()); 661 Ok(()) 662 } 663 ··· 675 .await?; 676 677 for (i, user_album) in user_albums.clone().into_iter().enumerate() { 678 - println!( 679 - "user_album {} - {} - {}", 680 - i, 681 - user_album.user_id.bright_green(), 682 - user_album.album_id 683 - ); 684 match conn.execute( 685 "INSERT INTO user_albums (id, user_id, album_id, created_at) VALUES (?, ?, ?, ?)", 686 params![ ··· 691 ], 692 ) { 693 Ok(_) => (), 694 - Err(e) => println!("error: {}", e), 695 } 696 } 697 698 - println!("user_albums: {:?}", user_albums.len()); 699 Ok(()) 700 } 701 ··· 713 .await?; 714 715 for (i, user_artist) in user_artists.clone().into_iter().enumerate() { 716 - println!( 717 - "user_artist {} - {} - {}", 718 - i, 719 - user_artist.user_id.bright_green(), 720 - user_artist.artist_id 721 - ); 722 match conn.execute( 723 "INSERT INTO user_artists (id, user_id, artist_id, created_at) VALUES (?, ?, ?, ?)", 724 params![ ··· 729 ], 730 ) { 731 Ok(_) => (), 732 - Err(e) => println!("error: {}", e), 733 } 734 } 735 736 - println!("user_artists: {:?}", user_artists.len()); 737 Ok(()) 738 } 739 ··· 751 .await?; 752 753 for (i, user_track) in user_tracks.clone().into_iter().enumerate() { 754 - println!( 755 - "user_track {} - {} - {}", 756 - i, 757 - user_track.user_id.bright_green(), 758 - user_track.track_id 759 - ); 760 match conn.execute( 761 "INSERT INTO user_tracks (id, user_id, track_id, created_at) VALUES (?, ?, ?, ?)", 762 params![ ··· 767 ], 768 ) { 769 Ok(_) => (), 770 - Err(e) => println!("error: {}", e), 771 } 772 } 773 774 - println!("user_tracks: {:?}", user_tracks.len()); 775 Ok(()) 776 }
··· 194 .await?; 195 196 for (i, track) in tracks.clone().into_iter().enumerate() { 197 + tracing::info!(track = i, title = %track.title.bright_green(), artist = %track.artist); 198 match conn.execute( 199 "INSERT INTO tracks ( 200 id, ··· 250 ], 251 ) { 252 Ok(_) => (), 253 + Err(e) => tracing::error!(error = %e, "Error inserting track"), 254 } 255 } 256 257 + tracing::info!(tracks = tracks.len(), "Loaded tracks"); 258 Ok(()) 259 } 260 ··· 272 .await?; 273 274 for (i, artist) in artists.clone().into_iter().enumerate() { 275 + tracing::info!(artist = i, name = %artist.name.bright_green()); 276 match conn.execute( 277 "INSERT INTO artists ( 278 id, ··· 318 ], 319 ) { 320 Ok(_) => (), 321 + Err(e) => tracing::error!(error = %e, "Error inserting artist"), 322 } 323 } 324 325 + tracing::info!(artists = artists.len(), "Loaded artists"); 326 Ok(()) 327 } 328 ··· 337 .await?; 338 339 for (i, album) in albums.clone().into_iter().enumerate() { 340 + tracing::info!(album = i, title = %album.title.bright_green(), artist = %album.artist); 341 match conn.execute( 342 "INSERT INTO albums ( 343 id, ··· 383 ], 384 ) { 385 Ok(_) => (), 386 + Err(e) => tracing::error!(error = %e, "Error inserting album"), 387 } 388 } 389 390 + tracing::info!(albums = albums.len(), "Loaded albums"); 391 Ok(()) 392 } 393 ··· 402 .await?; 403 404 for (i, user) in users.clone().into_iter().enumerate() { 405 + tracing::info!(user = i, name = %user.display_name.bright_green()); 406 match conn.execute( 407 "INSERT INTO users ( 408 id, ··· 424 ], 425 ) { 426 Ok(_) => (), 427 + Err(e) => tracing::error!(error = %e, "Error inserting user"), 428 } 429 } 430 431 + tracing::info!(users = users.len(), "Loaded users"); 432 Ok(()) 433 } 434 ··· 446 .await?; 447 448 for (i, scrobble) in scrobbles.clone().into_iter().enumerate() { 449 + tracing::info!(scrobble = i, uri = %scrobble.uri.clone().unwrap_or_else(|| "None".to_string()).bright_green()); 450 match conn.execute( 451 "INSERT INTO scrobbles ( 452 id, ··· 476 ], 477 ) { 478 Ok(_) => (), 479 + Err(e) => tracing::error!(error = %e, "Error inserting scrobble"), 480 } 481 } 482 483 + tracing::info!(scrobbles = scrobbles.len(), "Loaded scrobbles"); 484 Ok(()) 485 } 486 ··· 498 .await?; 499 500 for (i, album_track) in album_tracks.clone().into_iter().enumerate() { 501 + tracing::info!(album_track = i, album_id = %album_track.album_id.bright_green(), track_id = %album_track.track_id); 502 match conn.execute( 503 "INSERT INTO album_tracks ( 504 id, ··· 514 ], 515 ) { 516 Ok(_) => (), 517 + Err(e) => tracing::error!(error = %e, "Error inserting album_track"), 518 } 519 } 520 + 521 + tracing::info!(album_tracks = album_tracks.len(), "Loaded album_tracks"); 522 Ok(()) 523 } 524 ··· 536 .await?; 537 538 for (i, loved_track) in loved_tracks.clone().into_iter().enumerate() { 539 + tracing::info!(loved_track = i, user_id = %loved_track.user_id.bright_green(), track_id = %loved_track.track_id); 540 match conn.execute( 541 "INSERT INTO loved_tracks ( 542 id, ··· 555 ], 556 ) { 557 Ok(_) => (), 558 + Err(e) => tracing::error!(error = %e, "Error inserting loved_track"), 559 } 560 } 561 562 + tracing::info!(loved_tracks = loved_tracks.len(), "Loaded loved_tracks"); 563 Ok(()) 564 } 565 ··· 577 .await?; 578 579 for (i, artist_track) in artist_tracks.clone().into_iter().enumerate() { 580 + tracing::info!(artist_track = i, artist_id = %artist_track.artist_id.bright_green(), track_id = %artist_track.track_id); 581 match conn.execute( 582 "INSERT INTO artist_tracks (id, artist_id, track_id, created_at) VALUES (?, ?, ?, ?)", 583 params![ ··· 588 ], 589 ) { 590 Ok(_) => (), 591 + Err(e) => tracing::error!(error = %e, "Error inserting artist_track"), 592 } 593 } 594 595 + tracing::info!(artist_tracks = artist_tracks.len(), "Loaded artist_tracks"); 596 Ok(()) 597 } 598 ··· 610 .await?; 611 612 for (i, artist_album) in artist_albums.clone().into_iter().enumerate() { 613 + tracing::info!(artist_album = i, artist_id = %artist_album.artist_id.bright_green(), album_id = %artist_album.album_id); 614 match conn.execute( 615 "INSERT INTO artist_albums (id, artist_id, album_id, created_at) VALUES (?, ?, ?, ?)", 616 params![ ··· 621 ], 622 ) { 623 Ok(_) => (), 624 + Err(e) => tracing::error!(error = %e, "Error inserting artist_album"), 625 } 626 } 627 628 + tracing::info!(artist_albums = artist_albums.len(), "Loaded artist_albums"); 629 Ok(()) 630 } 631 ··· 643 .await?; 644 645 for (i, user_album) in user_albums.clone().into_iter().enumerate() { 646 + tracing::info!(user_album = i, user_id = %user_album.user_id.bright_green(), album_id = %user_album.album_id); 647 match conn.execute( 648 "INSERT INTO user_albums (id, user_id, album_id, created_at) VALUES (?, ?, ?, ?)", 649 params![ ··· 654 ], 655 ) { 656 Ok(_) => (), 657 + Err(e) => tracing::error!(error = %e, "Error inserting user_album"), 658 } 659 } 660 661 + tracing::info!(user_albums = user_albums.len(), "Loaded user_albums"); 662 Ok(()) 663 } 664 ··· 676 .await?; 677 678 for (i, user_artist) in user_artists.clone().into_iter().enumerate() { 679 + tracing::info!(user_artist = i, user_id = %user_artist.user_id.bright_green(), artist_id = %user_artist.artist_id); 680 match conn.execute( 681 "INSERT INTO user_artists (id, user_id, artist_id, created_at) VALUES (?, ?, ?, ?)", 682 params![ ··· 687 ], 688 ) { 689 Ok(_) => (), 690 + Err(e) => tracing::error!(error = %e, "Error inserting user_artist"), 691 } 692 } 693 694 + tracing::info!(user_artists = user_artists.len(), "Loaded user_artists"); 695 Ok(()) 696 } 697 ··· 709 .await?; 710 711 for (i, user_track) in user_tracks.clone().into_iter().enumerate() { 712 + tracing::info!(user_track = i, user_id = %user_track.user_id.bright_green(), track_id = %user_track.track_id); 713 match conn.execute( 714 "INSERT INTO user_tracks (id, user_id, track_id, created_at) VALUES (?, ?, ?, ?)", 715 params![ ··· 720 ], 721 ) { 722 Ok(_) => (), 723 + Err(e) => tracing::error!(error = %e, "Error inserting user_track"), 724 } 725 } 726 727 + tracing::info!(user_tracks = user_tracks.len(), "Loaded user_tracks"); 728 Ok(()) 729 }
+4
crates/analytics/src/handlers/albums.rs
··· 22 let offset = pagination.skip.unwrap_or(0); 23 let limit = pagination.take.unwrap_or(20); 24 let did = params.user_did; 25 26 let conn = conn.lock().unwrap(); 27 let mut stmt = match did { ··· 118 let offset = pagination.skip.unwrap_or(0); 119 let limit = pagination.take.unwrap_or(20); 120 let did = params.user_did; 121 122 let conn = conn.lock().unwrap(); 123 let mut stmt = match did { ··· 237 let body = read_payload!(payload); 238 let params = serde_json::from_slice::<GetAlbumTracksParams>(&body)?; 239 let conn = conn.lock().unwrap(); 240 let mut stmt = conn.prepare(r#" 241 SELECT 242 t.id,
··· 22 let offset = pagination.skip.unwrap_or(0); 23 let limit = pagination.take.unwrap_or(20); 24 let did = params.user_did; 25 + tracing::info!(limit, offset, user_did = ?did, "Get albums"); 26 27 let conn = conn.lock().unwrap(); 28 let mut stmt = match did { ··· 119 let offset = pagination.skip.unwrap_or(0); 120 let limit = pagination.take.unwrap_or(20); 121 let did = params.user_did; 122 + tracing::info!(limit, offset, user_did = ?did, "Get top albums"); 123 124 let conn = conn.lock().unwrap(); 125 let mut stmt = match did { ··· 239 let body = read_payload!(payload); 240 let params = serde_json::from_slice::<GetAlbumTracksParams>(&body)?; 241 let conn = conn.lock().unwrap(); 242 + tracing::info!(album_id = %params.album_id, "Get album tracks"); 243 + 244 let mut stmt = conn.prepare(r#" 245 SELECT 246 t.id,
+2
crates/analytics/src/handlers/artists.rs
··· 318 let body = read_payload!(payload); 319 let params = serde_json::from_slice::<GetArtistAlbumsParams>(&body)?; 320 let conn = conn.lock().unwrap(); 321 322 let mut stmt = conn.prepare(r#" 323 SELECT ··· 376 let pagination = params.pagination.unwrap_or_default(); 377 let offset = pagination.skip.unwrap_or(0); 378 let limit = pagination.take.unwrap_or(10); 379 380 let conn = conn.lock().unwrap(); 381 let mut stmt =
··· 318 let body = read_payload!(payload); 319 let params = serde_json::from_slice::<GetArtistAlbumsParams>(&body)?; 320 let conn = conn.lock().unwrap(); 321 + tracing::info!(artist_id = %params.artist_id, "Get artist albums"); 322 323 let mut stmt = conn.prepare(r#" 324 SELECT ··· 377 let pagination = params.pagination.unwrap_or_default(); 378 let offset = pagination.skip.unwrap_or(0); 379 let limit = pagination.take.unwrap_or(10); 380 + tracing::info!(artist_id = %params.artist_id, limit, offset, "Get artist listeners"); 381 382 let conn = conn.lock().unwrap(); 383 let mut stmt =
+2
crates/analytics/src/handlers/scrobbles.rs
··· 19 let offset = pagination.skip.unwrap_or(0); 20 let limit = pagination.take.unwrap_or(20); 21 let did = params.user_did; 22 23 let conn = conn.lock().unwrap(); 24 let mut stmt = match did { ··· 139 let pagination = params.pagination.unwrap_or_default(); 140 let offset = pagination.skip.unwrap_or(0); 141 let limit = pagination.take.unwrap_or(10); 142 143 let conn = conn.lock().unwrap(); 144 let mut stmt = conn.prepare(
··· 19 let offset = pagination.skip.unwrap_or(0); 20 let limit = pagination.take.unwrap_or(20); 21 let did = params.user_did; 22 + tracing::info!(limit, offset, user_did = ?did, "Get scrobbles"); 23 24 let conn = conn.lock().unwrap(); 25 let mut stmt = match did { ··· 140 let pagination = params.pagination.unwrap_or_default(); 141 let offset = pagination.skip.unwrap_or(0); 142 let limit = pagination.take.unwrap_or(10); 143 + tracing::info!(limit, offset, user_did = ?params.user_did, "Get distinct scrobbles"); 144 145 let conn = conn.lock().unwrap(); 146 let mut stmt = conn.prepare(
+8
crates/analytics/src/handlers/stats.rs
··· 23 let body = read_payload!(payload); 24 25 let params = serde_json::from_slice::<GetStatsParams>(&body)?; 26 27 let conn = conn.lock().unwrap(); 28 let mut stmt = conn.prepare("SELECT COUNT(*) FROM scrobbles s LEFT JOIN users u ON s.user_id = u.id WHERE u.did = ? OR u.handle = ?")?; ··· 118 .end 119 .unwrap_or(GetScrobblesPerDayParams::default().end.unwrap()); 120 let did = params.user_did; 121 122 let conn = conn.lock().unwrap(); 123 match did { ··· 190 .end 191 .unwrap_or(GetScrobblesPerDayParams::default().end.unwrap()); 192 let did = params.user_did; 193 194 let conn = conn.lock().unwrap(); 195 match did { ··· 266 .end 267 .unwrap_or(GetScrobblesPerDayParams::default().end.unwrap()); 268 let did = params.user_did; 269 270 let conn = conn.lock().unwrap(); 271 match did { ··· 338 .end 339 .unwrap_or(GetAlbumScrobblesParams::default().end.unwrap()); 340 let conn = conn.lock().unwrap(); 341 let mut stmt = conn.prepare( 342 r#" 343 SELECT ··· 379 .end 380 .unwrap_or(GetArtistScrobblesParams::default().end.unwrap()); 381 let conn = conn.lock().unwrap(); 382 383 let mut stmt = conn.prepare( 384 r#" ··· 426 .end 427 .unwrap_or(GetTrackScrobblesParams::default().end.unwrap()); 428 let conn = conn.lock().unwrap(); 429 430 let mut stmt = conn.prepare( 431 r#"
··· 23 let body = read_payload!(payload); 24 25 let params = serde_json::from_slice::<GetStatsParams>(&body)?; 26 + tracing::info!(user_did = ?params.user_did, "Get stats"); 27 28 let conn = conn.lock().unwrap(); 29 let mut stmt = conn.prepare("SELECT COUNT(*) FROM scrobbles s LEFT JOIN users u ON s.user_id = u.id WHERE u.did = ? OR u.handle = ?")?; ··· 119 .end 120 .unwrap_or(GetScrobblesPerDayParams::default().end.unwrap()); 121 let did = params.user_did; 122 + tracing::info!(start = %start, end = %end, user_did = ?did, "Get scrobbles per day"); 123 124 let conn = conn.lock().unwrap(); 125 match did { ··· 192 .end 193 .unwrap_or(GetScrobblesPerDayParams::default().end.unwrap()); 194 let did = params.user_did; 195 + tracing::info!(start = %start, end = %end, user_did = ?did, "Get scrobbles per month"); 196 197 let conn = conn.lock().unwrap(); 198 match did { ··· 269 .end 270 .unwrap_or(GetScrobblesPerDayParams::default().end.unwrap()); 271 let did = params.user_did; 272 + tracing::info!(start = %start, end = %end, user_did = ?did, "Get scrobbles per year"); 273 274 let conn = conn.lock().unwrap(); 275 match did { ··· 342 .end 343 .unwrap_or(GetAlbumScrobblesParams::default().end.unwrap()); 344 let conn = conn.lock().unwrap(); 345 + tracing::info!(album_id = %params.album_id, start = %start, end = %end, "Get album scrobbles"); 346 + 347 let mut stmt = conn.prepare( 348 r#" 349 SELECT ··· 385 .end 386 .unwrap_or(GetArtistScrobblesParams::default().end.unwrap()); 387 let conn = conn.lock().unwrap(); 388 + tracing::info!(artist_id = %params.artist_id, start = %start, end = %end, "Get artist scrobbles"); 389 390 let mut stmt = conn.prepare( 391 r#" ··· 433 .end 434 .unwrap_or(GetTrackScrobblesParams::default().end.unwrap()); 435 let conn = conn.lock().unwrap(); 436 + tracing::info!(track_id = %params.track_id, start = %start, end = %end, "Get track scrobbles"); 437 438 let mut stmt = conn.prepare( 439 r#"
+3
crates/analytics/src/handlers/tracks.rs
··· 19 let offset = pagination.skip.unwrap_or(0); 20 let limit = pagination.take.unwrap_or(20); 21 let did = params.user_did; 22 23 let conn = conn.lock().unwrap(); 24 match did { ··· 178 let offset = pagination.skip.unwrap_or(0); 179 let limit = pagination.take.unwrap_or(20); 180 let did = params.user_did; 181 182 let conn = conn.lock().unwrap(); 183 let mut stmt = conn.prepare( ··· 261 let offset = pagination.skip.unwrap_or(0); 262 let limit = pagination.take.unwrap_or(20); 263 let did = params.user_did; 264 265 let conn = conn.lock().unwrap(); 266 match did {
··· 19 let offset = pagination.skip.unwrap_or(0); 20 let limit = pagination.take.unwrap_or(20); 21 let did = params.user_did; 22 + tracing::info!(limit, offset, user_did = ?did, "Get tracks"); 23 24 let conn = conn.lock().unwrap(); 25 match did { ··· 179 let offset = pagination.skip.unwrap_or(0); 180 let limit = pagination.take.unwrap_or(20); 181 let did = params.user_did; 182 + tracing::info!(limit, offset, user_did = ?did, "Get loved tracks"); 183 184 let conn = conn.lock().unwrap(); 185 let mut stmt = conn.prepare( ··· 263 let offset = pagination.skip.unwrap_or(0); 264 let limit = pagination.take.unwrap_or(20); 265 let did = params.user_did; 266 + tracing::info!(limit, offset, user_did = ?did, "Get top tracks"); 267 268 let conn = conn.lock().unwrap(); 269 match did {
+174 -172
crates/analytics/src/subscriber/mod.rs
··· 16 let addr = env::var("NATS_URL").unwrap_or_else(|_| "nats://localhost:4222".to_string()); 17 let conn = conn.clone(); 18 let nc = connect(&addr).await?; 19 - println!("Connected to NATS server at {}", addr.bright_green()); 20 21 let nc = Arc::new(Mutex::new(nc)); 22 on_scrobble(nc.clone(), conn.clone()); ··· 39 40 41 42 43 44 ··· 59 60 61 62 63 64 ··· 78 79 80 81 82 83 ··· 97 98 99 100 101 102 ··· 116 117 118 119 120 121 ··· 176 177 178 179 180 181 ··· 224 225 226 227 228 229 ··· 280 281 282 283 284 285 ··· 296 297 298 299 300 301 ··· 307 308 309 310 311 312 ··· 318 319 320 321 322 323 ··· 329 330 331 332 333 334 ··· 340 341 342 343 344 345 ··· 351 352 353 354 355 356 ··· 381 382 383 384 385 386 ··· 446 447 448 449 450 451 ··· 462 463 464 465 466 467 ··· 473 474 475 476 477 478 ··· 484 485 486 487 - 488 - 489 - 490 - 491 - 492 - 493 - 494 - 495 - 496 - 497 - 498 - 499 - 500 - 501 - 502 - 503 - 504 - 505 - 506 - 507 - 508 - 509 - 510 - 511 - 512 - 513 - 514 - 515 - 516 - 517 - 518 - 519 - 520 - 521 - 522 - 523 - 524 - 525 - 526 - 527 - 528 - 529 - 530 - 531 - 532 - 533 - 534 - 535 - 536 - 537 - 538 - 539 - 540 - 541 - 542 - 543 - 544 - 545 - 546 - 547 - 548 - 549 - 550 - 551 - 552 - 553 - 554 - 555 - 556 - 557 - 558 - 559 - 560 - 561 - 562 - 563 - 564 - 565 - 566 - 567 - 568 - 569 - 570 - 571 - 572 - 573 - 574 - 575 - 576 - 577 - 578 - 579 - 580 - 581 - 582 - 583 - 584 - 585 - 586 - 587 - 588 - 589 - 590 - 591 - 592 - 593 - 594 - 595 - 596 - 597 - 598 - 599 - 600 - 601 - 602 - 603 - 604 - 605 - 606 - 607 - 608 - 609 - 610 - 611 - 612 - 613 - 614 - 615 - 616 - 617 - 618 - 619 - 620 - 621 - 622 - 623 - 624 - 625 - 626 - 627 - 628 - 629 - 630 - 631 - 632 - 633 - 634 - 635 - 636 - 637 - 638 - 639 - 640 - 641 - 642 - 643 - 644 - 645 - 646 - 647 - 648 - 649 - 650 - 651 - 652 Ok(_) => (), 653 Err(e) => { 654 if !e.to_string().contains("violates primary key constraint") { 655 - println!("[artist_albums] error: {}", e); 656 return Err(e.into()); 657 } 658 } ··· 684 Ok(_) => (), 685 Err(e) => { 686 if !e.to_string().contains("violates primary key constraint") { 687 - println!("[likes] error: {}", e); 688 return Err(e.into()); 689 } 690 } ··· 700 ) { 701 Ok(_) => (), 702 Err(e) => { 703 - println!("[unlikes] error: {}", e); 704 return Err(e.into()); 705 } 706 } ··· 740 Ok(_) => (), 741 Err(e) => { 742 if !e.to_string().contains("violates primary key constraint") { 743 - println!("[users] error: {}", e); 744 return Err(e.into()); 745 } 746 } ··· 921 922 match serde_json::from_str::<types::ScrobblePayload>(data) { 923 Err(e) => { 924 - eprintln!("Error parsing payload: {}", e); 925 - println!("{}", data); 926 } 927 Ok(_) => {} 928 }
··· 16 let addr = env::var("NATS_URL").unwrap_or_else(|_| "nats://localhost:4222".to_string()); 17 let conn = conn.clone(); 18 let nc = connect(&addr).await?; 19 + tracing::info!(server = %addr.bright_green(), "Connected to NATS"); 20 21 let nc = Arc::new(Mutex::new(nc)); 22 on_scrobble(nc.clone(), conn.clone()); ··· 39 40 41 42 + let data = String::from_utf8(msg.payload.to_vec()).unwrap(); 43 + match serde_json::from_str::<ScrobblePayload>(&data) { 44 + Ok(payload) => match save_scrobble(conn.clone(), payload.clone()).await { 45 + Ok(_) => tracing::info!( 46 + uri = %payload.scrobble.uri.cyan(), 47 + "Scrobble saved successfully", 48 + ), 49 + Err(e) => tracing::error!("Error saving scrobble: {}", e), 50 + }, 51 + Err(e) => { 52 + tracing::error!("Error parsing payload: {}", e); 53 + tracing::debug!("{}", data); 54 + } 55 + } 56 + } 57 58 59 ··· 74 75 76 77 + match serde_json::from_str::<NewTrackPayload>(&data) { 78 + Ok(payload) => match save_track(conn.clone(), payload.clone()).await { 79 + Ok(_) => { 80 + tracing::info!( 81 + title = %payload.track.title.cyan(), 82 + "Track saved successfully", 83 + ) 84 + } 85 + Err(e) => tracing::error!("Error saving track: {}", e), 86 + }, 87 + Err(e) => { 88 + tracing::error!("Error parsing payload: {}", e); 89 + tracing::debug!("{}", data); 90 + } 91 + } 92 + } 93 94 95 ··· 109 110 111 112 + let data = String::from_utf8(msg.payload.to_vec()).unwrap(); 113 + match serde_json::from_str::<LikePayload>(&data) { 114 + Ok(payload) => match like(conn.clone(), payload.clone()).await { 115 + Ok(_) => tracing::info!( 116 + track_id = %payload.track_id.xata_id.cyan(), 117 + "Like saved successfully", 118 + ), 119 + Err(e) => tracing::error!("Error saving like: {}", e), 120 + }, 121 + Err(e) => { 122 + tracing::error!("Error parsing payload: {}", e); 123 + tracing::debug!("{}", data); 124 + } 125 + } 126 + } 127 128 129 ··· 143 144 145 146 + let data = String::from_utf8(msg.payload.to_vec()).unwrap(); 147 + match serde_json::from_str::<UnlikePayload>(&data) { 148 + Ok(payload) => match unlike(conn.clone(), payload.clone()).await { 149 + Ok(_) => tracing::info!( 150 + track_id = %payload.track_id.xata_id.cyan(), 151 + "Unlike saved successfully", 152 + ), 153 + Err(e) => tracing::error!("Error saving unlike: {}", e), 154 + }, 155 + Err(e) => { 156 + tracing::error!("Error parsing payload: {}", e); 157 + tracing::debug!("{}", data); 158 + } 159 + } 160 + } 161 162 163 ··· 177 178 179 180 + let data = String::from_utf8(msg.payload.to_vec()).unwrap(); 181 + match serde_json::from_str::<UserPayload>(&data) { 182 + Ok(payload) => match save_user(conn.clone(), payload.clone()).await { 183 + Ok(_) => tracing::info!( 184 + handle = %payload.handle.cyan(), 185 + "User saved successfully", 186 + ), 187 + Err(e) => tracing::error!("Error saving user: {}", e), 188 + }, 189 + Err(e) => { 190 + tracing::error!("Error parsing payload: {}", e); 191 + tracing::debug!("{}", data); 192 + } 193 + } 194 + } 195 196 197 ··· 252 253 254 255 + Ok(_) => (), 256 + Err(e) => { 257 + if !e.to_string().contains("violates primary key constraint") { 258 + tracing::error!("[artists] error: {}", e); 259 + return Err(e.into()); 260 + } 261 + } 262 263 264 ··· 307 308 309 310 + Ok(_) => (), 311 + Err(e) => { 312 + if !e.to_string().contains("violates primary key constraint") { 313 + tracing::error!("[albums] error: {}", e); 314 + return Err(e.into()); 315 + } 316 + } 317 318 319 ··· 370 371 372 373 + Ok(_) => (), 374 + Err(e) => { 375 + if !e.to_string().contains("violates primary key constraint") { 376 + tracing::error!("[tracks] error: {}", e); 377 + return Err(e.into()); 378 + } 379 + } 380 381 382 ··· 393 394 395 396 + Ok(_) => (), 397 + Err(e) => { 398 + if !e.to_string().contains("violates primary key constraint") { 399 + tracing::error!("[album_tracks] error: {}", e); 400 + return Err(e.into()); 401 + } 402 + } 403 404 405 ··· 411 412 413 414 + Ok(_) => (), 415 + Err(e) => { 416 + if !e.to_string().contains("violates primary key constraint") { 417 + tracing::error!("[artist_tracks] error: {}", e); 418 + return Err(e.into()); 419 + } 420 + } 421 422 423 ··· 429 430 431 432 + Ok(_) => (), 433 + Err(e) => { 434 + if !e.to_string().contains("violates primary key constraint") { 435 + tracing::error!("[artist_albums] error: {}", e); 436 + return Err(e.into()); 437 + } 438 + } 439 440 441 ··· 447 448 449 450 + Ok(_) => (), 451 + Err(e) => { 452 + if !e.to_string().contains("violates primary key constraint") { 453 + tracing::error!("[user_albums] error: {}", e); 454 + return Err(e.into()); 455 + } 456 + } 457 458 459 ··· 465 466 467 468 + Ok(_) => (), 469 + Err(e) => { 470 + if !e.to_string().contains("violates primary key constraint") { 471 + tracing::error!("[user_artists] error: {}", e); 472 + return Err(e.into()); 473 + } 474 + } 475 476 477 ··· 483 484 485 486 + Ok(_) => (), 487 + Err(e) => { 488 + if !e.to_string().contains("violates primary key constraint") { 489 + tracing::error!("[user_tracks] error: {}", e); 490 + return Err(e.into()); 491 + } 492 + } 493 494 495 ··· 520 521 522 523 + Ok(_) => (), 524 + Err(e) => { 525 + if !e.to_string().contains("violates primary key constraint") { 526 + tracing::error!("[scrobbles] error: {}", e); 527 + return Err(e.into()); 528 + } 529 + } 530 531 532 ··· 592 593 594 595 + Ok(_) => (), 596 + Err(e) => { 597 + if !e.to_string().contains("violates primary key constraint") { 598 + tracing::error!("[tracks] error: {}", e); 599 + return Err(e.into()); 600 + } 601 + } 602 603 604 ··· 615 616 617 618 + Ok(_) => (), 619 + Err(e) => { 620 + if !e.to_string().contains("violates primary key constraint") { 621 + tracing::error!("[album_tracks] error: {}", e); 622 + return Err(e.into()); 623 + } 624 + } 625 626 627 ··· 633 634 635 636 + Ok(_) => (), 637 + Err(e) => { 638 + if !e.to_string().contains("violates primary key constraint") { 639 + tracing::error!("[artist_tracks] error: {}", e); 640 + return Err(e.into()); 641 + } 642 + } 643 644 645 ··· 651 652 653 654 Ok(_) => (), 655 Err(e) => { 656 if !e.to_string().contains("violates primary key constraint") { 657 + tracing::error!("[artist_albums] error: {}", e); 658 return Err(e.into()); 659 } 660 } ··· 686 Ok(_) => (), 687 Err(e) => { 688 if !e.to_string().contains("violates primary key constraint") { 689 + tracing::error!("[likes] error: {}", e); 690 return Err(e.into()); 691 } 692 } ··· 702 ) { 703 Ok(_) => (), 704 Err(e) => { 705 + tracing::error!("[unlikes] error: {}", e); 706 return Err(e.into()); 707 } 708 } ··· 742 Ok(_) => (), 743 Err(e) => { 744 if !e.to_string().contains("violates primary key constraint") { 745 + tracing::error!("[users] error: {}", e); 746 return Err(e.into()); 747 } 748 } ··· 923 924 match serde_json::from_str::<types::ScrobblePayload>(data) { 925 Err(e) => { 926 + tracing::error!("Error parsing payload: {}", e); 927 + tracing::error!("{}", data); 928 } 929 Ok(_) => {} 930 }
+1
crates/dropbox/Cargo.toml
··· 43 tempfile = "3.19.1" 44 tokio = { version = "1.43.0", features = ["full"] } 45 tokio-stream = { version = "0.1.17", features = ["full"] }
··· 43 tempfile = "3.19.1" 44 tokio = { version = "1.43.0", features = ["full"] } 45 tokio-stream = { version = "0.1.17", features = ["full"] } 46 + tracing = "0.1.41"
+1
crates/googledrive/Cargo.toml
··· 44 tempfile = "3.19.1" 45 tokio = { version = "1.43.0", features = ["full"] } 46 tokio-stream = { version = "0.1.17", features = ["full"] }
··· 44 tempfile = "3.19.1" 45 tokio = { version = "1.43.0", features = ["full"] } 46 tokio-stream = { version = "0.1.17", features = ["full"] } 47 + tracing = "0.1.41"
+1
crates/jetstream/Cargo.toml
··· 38 sha256 = "1.6.0" 39 time = { version = "0.3.44", features = ["formatting", "macros"] } 40 redis = { version = "0.29.0", features = ["aio", "tokio-comp"] }
··· 38 sha256 = "1.6.0" 39 time = { version = "0.3.44", features = ["formatting", "macros"] } 40 redis = { version = "0.29.0", features = ["aio", "tokio-comp"] } 41 + tracing = "0.1.41"
+1
crates/playlists/Cargo.toml
··· 37 hex = "0.4.3" 38 jsonwebtoken = "9.3.1" 39 sha2 = "0.10.8"
··· 37 hex = "0.4.3" 38 jsonwebtoken = "9.3.1" 39 sha2 = "0.10.8" 40 + tracing = "0.1.41"
+4
crates/rockskyd/Cargo.toml
··· 20 rocksky-spotify = { path = "../spotify" } 21 rocksky-tracklist = { path = "../tracklist" } 22 rocksky-webscrobbler = { path = "../webscrobbler" }
··· 20 rocksky-spotify = { path = "../spotify" } 21 rocksky-tracklist = { path = "../tracklist" } 22 rocksky-webscrobbler = { path = "../webscrobbler" } 23 + tracing = "0.1.41" 24 + tracing-subscriber = "0.3.20" 25 + tracing-log = "0.2.0" 26 + owo-colors = "4.2.2"
+12
crates/rockskyd/src/main.rs
··· 1 use clap::Command; 2 use dotenv::dotenv; 3 4 pub mod cmd; 5 ··· 35 36 #[tokio::main] 37 async fn main() -> Result<(), Box<dyn std::error::Error>> { 38 dotenv().ok(); 39 40 let args = cli().get_matches();
··· 1 use clap::Command; 2 use dotenv::dotenv; 3 + use tracing_subscriber::fmt::format::Format; 4 5 pub mod cmd; 6 ··· 36 37 #[tokio::main] 38 async fn main() -> Result<(), Box<dyn std::error::Error>> { 39 + let format = Format::default() 40 + .with_level(true) 41 + .with_target(true) 42 + .with_ansi(true) 43 + .compact(); 44 + 45 + tracing_subscriber::fmt() 46 + .event_format(format) 47 + .with_max_level(tracing::Level::INFO) 48 + .init(); 49 + 50 dotenv().ok(); 51 52 let args = cli().get_matches();
+6 -9
crates/scrobbler/Cargo.toml
··· 3 4 5 6 7 8 9 ··· 32 33 34 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 - 43 - 44 actix-limitation = "0.5.1" 45 actix-session = "0.10.1" 46 tokio-stream = { version = "0.1.17", features = ["full"] }
··· 3 4 5 6 + license.workspace = true 7 + repository.workspace = true 8 9 + [dependencies] 10 + serde = { version = "1.0.217", features = ["derive"] } 11 + serde_json = "1.0.139" 12 13 14 ··· 37 38 39 40 actix-limitation = "0.5.1" 41 actix-session = "0.10.1" 42 tokio-stream = { version = "0.1.17", features = ["full"] } 43 + tracing = "0.1.41"
+1
crates/spotify/Cargo.toml
··· 38 ] } 39 tokio = { version = "1.43.0", features = ["full"] } 40 tokio-stream = { version = "0.1.17", features = ["full"] }
··· 38 ] } 39 tokio = { version = "1.43.0", features = ["full"] } 40 tokio-stream = { version = "0.1.17", features = ["full"] } 41 + tracing = "0.1.41"
+1
crates/storage/Cargo.toml
··· 26 ] } 27 actix-web = "4.9.0" 28 tokio-stream = { version = "0.1.17", features = ["full"] }
··· 26 ] } 27 actix-web = "4.9.0" 28 tokio-stream = { version = "0.1.17", features = ["full"] } 29 + tracing = "0.1.41"
+1
crates/tracklist/Cargo.toml
··· 22 clap = "4.5.31" 23 rand = "0.9.2" 24 uuid = { version = "1.18.0", features = ["v4"] }
··· 22 clap = "4.5.31" 23 rand = "0.9.2" 24 uuid = { version = "1.18.0", features = ["v4"] } 25 + tracing = "0.1.41"
+6 -9
crates/webscrobbler/Cargo.toml
··· 3 4 5 6 7 8 9 ··· 30 31 32 33 - 34 - 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 tokio-stream = { version = "0.1.17", features = ["full"] } 43 actix-session = "0.10.1" 44 actix-limitation = "0.5.1"
··· 3 4 5 6 + license.workspace = true 7 + repository.workspace = true 8 9 + [dependencies] 10 + serde = { version = "1.0.217", features = ["derive"] } 11 + serde_json = "1.0.139" 12 13 14 ··· 35 36 37 38 tokio-stream = { version = "0.1.17", features = ["full"] } 39 actix-session = "0.10.1" 40 actix-limitation = "0.5.1" 41 + tracing = "0.1.41"
+2 -2
crates/dropbox/src/cmd/serve.rs
··· 27 req: HttpRequest, 28 ) -> Result<impl Responder, actix_web::Error> { 29 let method = req.match_info().get("method").unwrap_or("unknown"); 30 - println!("Method: {}", method.bright_green()); 31 32 let conn = data.get_ref().clone(); 33 handle(method, &mut payload, &req, conn) ··· 41 let addr = format!("{}:{}", host, port); 42 43 let url = format!("http://{}", addr); 44 - println!("Listening on {}", url.bright_green()); 45 46 let pool = PgPoolOptions::new() 47 .max_connections(5)
··· 27 req: HttpRequest, 28 ) -> Result<impl Responder, actix_web::Error> { 29 let method = req.match_info().get("method").unwrap_or("unknown"); 30 + tracing::info!(method = %method.bright_green(), "API call"); 31 32 let conn = data.get_ref().clone(); 33 handle(method, &mut payload, &req, conn) ··· 41 let addr = format!("{}:{}", host, port); 42 43 let url = format!("http://{}", addr); 44 + tracing::info!(url = %url.bright_green(), "Listening on"); 45 46 let pool = PgPoolOptions::new() 47 .max_connections(5)
+8
crates/dropbox/src/handlers/files.rs
··· 2 3 use actix_web::{web, HttpRequest, HttpResponse}; 4 use anyhow::Error; 5 use sqlx::{Pool, Postgres}; 6 use tokio_stream::StreamExt; 7 ··· 24 let body = read_payload!(payload); 25 let params = serde_json::from_slice::<GetFilesParams>(&body)?; 26 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 27 28 if refresh_token.is_none() { 29 return Ok(HttpResponse::Unauthorized().finish()); ··· 48 let body = read_payload!(payload); 49 let params = serde_json::from_slice::<GetFilesParams>(&body)?; 50 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 51 52 if refresh_token.is_none() { 53 return Ok(HttpResponse::Unauthorized().finish()); ··· 72 let body = read_payload!(payload); 73 let params = serde_json::from_slice::<GetFilesAtParams>(&body)?; 74 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 75 76 if refresh_token.is_none() { 77 return Ok(HttpResponse::Unauthorized().finish()); ··· 96 let body = read_payload!(payload); 97 let params = serde_json::from_slice::<DownloadFileParams>(&body)?; 98 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 99 100 if refresh_token.is_none() { 101 return Ok(HttpResponse::Unauthorized().finish()); ··· 118 let body = read_payload!(payload); 119 let params = serde_json::from_slice::<DownloadFileParams>(&body)?; 120 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 121 122 if refresh_token.is_none() { 123 return Ok(HttpResponse::Unauthorized().finish()); ··· 142 let body = read_payload!(payload); 143 let params = serde_json::from_slice::<DownloadFileParams>(&body)?; 144 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 145 146 if refresh_token.is_none() { 147 return Ok(HttpResponse::Unauthorized().finish()); ··· 165 ) -> Result<HttpResponse, Error> { 166 let body = read_payload!(payload); 167 let params = serde_json::from_slice::<ScanFolderParams>(&body)?; 168 169 let pool = pool.clone(); 170 thread::spawn(move || {
··· 2 3 use actix_web::{web, HttpRequest, HttpResponse}; 4 use anyhow::Error; 5 + use owo_colors::OwoColorize; 6 use sqlx::{Pool, Postgres}; 7 use tokio_stream::StreamExt; 8 ··· 25 let body = read_payload!(payload); 26 let params = serde_json::from_slice::<GetFilesParams>(&body)?; 27 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 28 + tracing::info!(did = %params.did.bright_green(), "dropbox.getFiles"); 29 30 if refresh_token.is_none() { 31 return Ok(HttpResponse::Unauthorized().finish()); ··· 50 let body = read_payload!(payload); 51 let params = serde_json::from_slice::<GetFilesParams>(&body)?; 52 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 53 + tracing::info!(did = %params.did.bright_green(), "dropbox.createMusicFolder"); 54 55 if refresh_token.is_none() { 56 return Ok(HttpResponse::Unauthorized().finish()); ··· 75 let body = read_payload!(payload); 76 let params = serde_json::from_slice::<GetFilesAtParams>(&body)?; 77 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 78 + tracing::info!(did = %params.did.bright_green(), path = %params.path.bright_green(), "dropbox.getFilesAt"); 79 80 if refresh_token.is_none() { 81 return Ok(HttpResponse::Unauthorized().finish()); ··· 100 let body = read_payload!(payload); 101 let params = serde_json::from_slice::<DownloadFileParams>(&body)?; 102 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 103 + tracing::info!(did = %params.did.bright_green(), path = %params.path.bright_green(), "dropbox.downloadFile"); 104 105 if refresh_token.is_none() { 106 return Ok(HttpResponse::Unauthorized().finish()); ··· 123 let body = read_payload!(payload); 124 let params = serde_json::from_slice::<DownloadFileParams>(&body)?; 125 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 126 + tracing::info!(did = %params.did.bright_green(), path = %params.path.bright_green(), "dropbox.getTemporaryLink"); 127 128 if refresh_token.is_none() { 129 return Ok(HttpResponse::Unauthorized().finish()); ··· 148 let body = read_payload!(payload); 149 let params = serde_json::from_slice::<DownloadFileParams>(&body)?; 150 let refresh_token = find_dropbox_refresh_token(&pool.clone(), &params.did).await?; 151 + tracing::info!(did = %params.did.bright_green(), path = %params.path.bright_green(), "dropbox.getMetadata"); 152 153 if refresh_token.is_none() { 154 return Ok(HttpResponse::Unauthorized().finish()); ··· 172 ) -> Result<HttpResponse, Error> { 173 let body = read_payload!(payload); 174 let params = serde_json::from_slice::<ScanFolderParams>(&body)?; 175 + tracing::info!(did = %params.did.bright_green(), path = %params.path.bright_green(), "dropbox.scanFolder"); 176 177 let pool = pool.clone(); 178 thread::spawn(move || {
+60 -74
crates/dropbox/src/scan.rs
··· 91 .await?; 92 93 if res.status().as_u16() == 400 || res.status().as_u16() == 409 { 94 - println!("Path not found: {}", path.bright_red()); 95 return Ok(()); 96 } 97 98 let entry = res.json::<Entry>().await?; 99 100 if entry.tag.clone().unwrap().as_str() == "folder" { 101 - println!("Scanning folder: {}", path.bright_green()); 102 103 let parent_path = Path::new(&path) 104 .parent() ··· 160 161 let client = Client::new(); 162 163 - println!("Downloading file: {}", path.bright_green()); 164 165 let res = client 166 .post(&format!("{}/files/download", CONTENT_URL)) ··· 176 let mut tmpfile = File::create(&tmppath)?; 177 tmpfile.write_all(&bytes)?; 178 179 - println!( 180 - "Reading file: {}", 181 - &tmppath.clone().display().to_string().bright_green() 182 - ); 183 184 let tagged_file = match Probe::open(&tmppath)?.read() { 185 Ok(tagged_file) => tagged_file, 186 Err(e) => { 187 - println!("Error opening file: {}", e); 188 return Ok(()); 189 } 190 }; ··· 193 let tag = match primary_tag { 194 Some(tag) => tag, 195 None => { 196 - println!("No tag found in file"); 197 return Ok(()); 198 } 199 }; 200 201 let pictures = tag.pictures(); 202 203 - println!( 204 - "Title: {}", 205 - tag.get_string(&lofty::tag::ItemKey::TrackTitle) 206 - .unwrap_or_default() 207 - .bright_green() 208 - ); 209 - println!( 210 - "Artist: {}", 211 - tag.get_string(&lofty::tag::ItemKey::TrackArtist) 212 - .unwrap_or_default() 213 - .bright_green() 214 ); 215 - println!( 216 - "Album Artist: {}", 217 - tag.get_string(&lofty::tag::ItemKey::AlbumArtist) 218 - .unwrap_or_default() 219 - .bright_green() 220 ); 221 - println!( 222 - "Album: {}", 223 - tag.get_string(&lofty::tag::ItemKey::AlbumTitle) 224 - .unwrap_or_default() 225 - .bright_green() 226 ); 227 - println!( 228 - "Lyrics: {}", 229 - tag.get_string(&lofty::tag::ItemKey::Lyrics) 230 - .unwrap_or_default() 231 - .bright_green() 232 ); 233 - println!("Year: {}", tag.year().unwrap_or_default().bright_green()); 234 - println!( 235 - "Track Number: {}", 236 - tag.track().unwrap_or_default().bright_green() 237 ); 238 - println!( 239 - "Track Total: {}", 240 - tag.track_total().unwrap_or_default().bright_green() 241 ); 242 - println!( 243 - "Release Date: {:?}", 244 - tag.get_string(&lofty::tag::ItemKey::OriginalReleaseDate) 245 - .unwrap_or_default() 246 - .bright_green() 247 ); 248 - println!( 249 - "Recording Date: {:?}", 250 - tag.get_string(&lofty::tag::ItemKey::RecordingDate) 251 - .unwrap_or_default() 252 - .bright_green() 253 ); 254 - println!( 255 - "Copyright Message: {}", 256 - tag.get_string(&lofty::tag::ItemKey::CopyrightMessage) 257 - .unwrap_or_default() 258 - .bright_green() 259 - ); 260 - println!("Pictures: {:?}", pictures); 261 262 let title = tag 263 .get_string(&lofty::tag::ItemKey::TrackTitle) ··· 290 291 match track { 292 Some(track) => { 293 - println!("Track exists: {}", title.bright_green()); 294 let parent_path = Path::new(&path) 295 .parent() 296 .map(|p| p.to_string_lossy().to_string()); 297 let status = 298 create_dropbox_path(&pool, &entry, &track, &dropbox_id, parent_path).await; 299 - println!("status: {:?}", status); 300 301 // TODO: publish file metadata to nats 302 } 303 None => { 304 - println!("Creating track: {}", title.bright_green()); 305 let album_art = 306 upload_album_cover(albumart_id.into(), pictures, &access_token).await?; 307 let client = Client::new(); ··· 338 })) 339 .send() 340 .await?; 341 - println!("Track Saved: {} {}", title, response.status()); 342 tokio::time::sleep(std::time::Duration::from_secs(3)).await; 343 344 let track = get_track_by_hash(&pool, &hash).await?; ··· 353 return Ok(()); 354 } 355 356 - println!("Failed to create track: {}", title.bright_green()); 357 } 358 } 359 ··· 413 .send() 414 .await?; 415 416 - println!("Cover uploaded: {}", response.status()); 417 418 Ok(Some(name)) 419 } ··· 433 let meta_opts = MetadataOptions::default(); 434 let format_opts = FormatOptions::default(); 435 436 - let probed = 437 - match symphonia::default::get_probe().format(&hint, media_source, &format_opts, &meta_opts) 438 - { 439 - Ok(probed) => probed, 440 - Err(_) => { 441 - println!("Error probing file"); 442 - return Ok(duration); 443 - } 444 - }; 445 446 if let Some(track) = probed.format.tracks().first() { 447 if let Some(duration) = track.codec_params.n_frames {
··· 91 .await?; 92 93 if res.status().as_u16() == 400 || res.status().as_u16() == 409 { 94 + tracing::error!(path = %path.bright_red(), "Path not found"); 95 return Ok(()); 96 } 97 98 let entry = res.json::<Entry>().await?; 99 100 if entry.tag.clone().unwrap().as_str() == "folder" { 101 + tracing::info!(path = %path.bright_green(), "Scanning folder"); 102 103 let parent_path = Path::new(&path) 104 .parent() ··· 160 161 let client = Client::new(); 162 163 + tracing::info!(path = %path.bright_green(), "Downloading file"); 164 165 let res = client 166 .post(&format!("{}/files/download", CONTENT_URL)) ··· 176 let mut tmpfile = File::create(&tmppath)?; 177 tmpfile.write_all(&bytes)?; 178 179 + tracing::info!(path = %tmppath.clone().display().to_string().bright_green(), "Reading file"); 180 181 let tagged_file = match Probe::open(&tmppath)?.read() { 182 Ok(tagged_file) => tagged_file, 183 Err(e) => { 184 + tracing::error!(path = %tmppath.clone().display().to_string().bright_red(), "Error reading file: {}", e); 185 return Ok(()); 186 } 187 }; ··· 190 let tag = match primary_tag { 191 Some(tag) => tag, 192 None => { 193 + tracing::error!(path = %tmppath.clone().display().to_string().bright_red(), "No tag found in file"); 194 return Ok(()); 195 } 196 }; 197 198 let pictures = tag.pictures(); 199 200 + tracing::info!( 201 + title = %tag 202 + .get_string(&lofty::tag::ItemKey::TrackTitle) 203 + .unwrap_or_default(), 204 ); 205 + tracing::info!( 206 + artist = %tag 207 + .get_string(&lofty::tag::ItemKey::TrackArtist) 208 + .unwrap_or_default(), 209 ); 210 + tracing::info!( 211 + album = %tag 212 + .get_string(&lofty::tag::ItemKey::AlbumTitle) 213 + .unwrap_or_default(), 214 ); 215 + tracing::info!( 216 + album_artist = %tag 217 + .get_string(&lofty::tag::ItemKey::AlbumArtist) 218 + .unwrap_or_default(), 219 ); 220 + tracing::info!( 221 + lyrics = %tag 222 + .get_string(&lofty::tag::ItemKey::Lyrics) 223 + .unwrap_or_default(), 224 ); 225 + tracing::info!(year = %tag.year().unwrap_or_default()); 226 + tracing::info!(track_number = %tag.track().unwrap_or_default()); 227 + tracing::info!(track_total = %tag.track_total().unwrap_or_default()); 228 + tracing::info!( 229 + release_date = %tag 230 + .get_string(&lofty::tag::ItemKey::OriginalReleaseDate) 231 + .unwrap_or_default(), 232 ); 233 + tracing::info!( 234 + recording_date = %tag 235 + .get_string(&lofty::tag::ItemKey::RecordingDate) 236 + .unwrap_or_default(), 237 ); 238 + tracing::info!( 239 + copyright_message = %tag 240 + .get_string(&lofty::tag::ItemKey::CopyrightMessage) 241 + .unwrap_or_default(), 242 ); 243 + tracing::info!(pictures = ?pictures); 244 245 let title = tag 246 .get_string(&lofty::tag::ItemKey::TrackTitle) ··· 273 274 match track { 275 Some(track) => { 276 + tracing::info!(title = %title.bright_green(), "Track exists"); 277 let parent_path = Path::new(&path) 278 .parent() 279 .map(|p| p.to_string_lossy().to_string()); 280 let status = 281 create_dropbox_path(&pool, &entry, &track, &dropbox_id, parent_path).await; 282 + tracing::info!(status = ?status); 283 284 // TODO: publish file metadata to nats 285 } 286 None => { 287 + tracing::info!(title = %title.bright_green(), "Creating track"); 288 let album_art = 289 upload_album_cover(albumart_id.into(), pictures, &access_token).await?; 290 let client = Client::new(); ··· 321 })) 322 .send() 323 .await?; 324 + tracing::info!(title = title, status = %response.status(), "Track saved"); 325 tokio::time::sleep(std::time::Duration::from_secs(3)).await; 326 327 let track = get_track_by_hash(&pool, &hash).await?; ··· 336 return Ok(()); 337 } 338 339 + tracing::error!(title = %title.bright_red(), "Failed to create track"); 340 } 341 } 342 ··· 396 .send() 397 .await?; 398 399 + tracing::info!(status = %response.status(), "Cover uploaded"); 400 401 Ok(Some(name)) 402 } ··· 416 let meta_opts = MetadataOptions::default(); 417 let format_opts = FormatOptions::default(); 418 419 + let probed = match symphonia::default::get_probe().format( 420 + &hint, 421 + media_source, 422 + &format_opts, 423 + &meta_opts, 424 + ) { 425 + Ok(probed) => probed, 426 + Err(e) => { 427 + tracing::error!(path = %path.display().to_string().bright_red(), "Error probing file: {}", e); 428 + return Ok(duration); 429 + } 430 + }; 431 432 if let Some(track) = probed.format.tracks().first() { 433 if let Some(duration) = track.codec_params.n_frames {
+2 -2
crates/googledrive/src/cmd/serve.rs
··· 27 req: HttpRequest, 28 ) -> Result<impl Responder, actix_web::Error> { 29 let method = req.match_info().get("method").unwrap_or("unknown"); 30 - println!("Method: {}", method.bright_green()); 31 32 let conn = data.get_ref().clone(); 33 handle(method, &mut payload, &req, conn) ··· 41 let addr = format!("{}:{}", host, port); 42 43 let url = format!("http://{}", addr); 44 - println!("Listening on {}", url.bright_green()); 45 46 let pool = PgPoolOptions::new() 47 .max_connections(5)
··· 27 req: HttpRequest, 28 ) -> Result<impl Responder, actix_web::Error> { 29 let method = req.match_info().get("method").unwrap_or("unknown"); 30 + tracing::info!(method = %method.bright_green(), "API call"); 31 32 let conn = data.get_ref().clone(); 33 handle(method, &mut payload, &req, conn) ··· 41 let addr = format!("{}:{}", host, port); 42 43 let url = format!("http://{}", addr); 44 + tracing::info!(url = %url.bright_green(), "Listening on"); 45 46 let pool = PgPoolOptions::new() 47 .max_connections(5)
+6 -1
crates/googledrive/src/repo/google_drive_path.rs
··· 1 use sqlx::{Pool, Postgres}; 2 3 use crate::{ ··· 47 .execute(pool) 48 .await?; 49 50 - println!("{:?}", result); 51 52 sqlx::query( 53 r#"
··· 1 + use owo_colors::OwoColorize; 2 use sqlx::{Pool, Postgres}; 3 4 use crate::{ ··· 48 .execute(pool) 49 .await?; 50 51 + tracing::info!( 52 + file_id = %file.id.bright_green(), 53 + rows_affected = %result.rows_affected(), 54 + "Google Drive path created" 55 + ); 56 57 sqlx::query( 58 r#"
+27 -79
crates/googledrive/src/scan.rs
··· 104 let file = res.json::<File>().await?; 105 106 if file.mime_type == "application/vnd.google-apps.folder" { 107 - println!("Scanning folder: {}", file.name.bright_green()); 108 109 create_google_drive_directory( 110 &pool, ··· 172 return Ok(()); 173 } 174 175 - println!("Downloading file: {}", file.name.bright_green()); 176 177 let client = Client::new(); 178 ··· 191 let mut tmpfile = std::fs::File::create(&tmppath)?; 192 tmpfile.write_all(&bytes)?; 193 194 - println!( 195 - "Reading file: {}", 196 - &tmppath.clone().display().to_string().bright_green() 197 - ); 198 199 let tagged_file = match Probe::open(&tmppath)?.read() { 200 Ok(tagged_file) => tagged_file, 201 Err(e) => { 202 - println!("Error opening file: {}", e); 203 return Ok(()); 204 } 205 }; ··· 208 let tag = match primary_tag { 209 Some(tag) => tag, 210 None => { 211 - println!("No tag found in file"); 212 return Ok(()); 213 } 214 }; 215 216 let pictures = tag.pictures(); 217 218 - println!( 219 - "Title: {}", 220 - tag.get_string(&lofty::tag::ItemKey::TrackTitle) 221 - .unwrap_or_default() 222 - .bright_green() 223 - ); 224 - println!( 225 - "Artist: {}", 226 - tag.get_string(&lofty::tag::ItemKey::TrackArtist) 227 - .unwrap_or_default() 228 - .bright_green() 229 - ); 230 - println!( 231 - "Album Artist: {}", 232 - tag.get_string(&lofty::tag::ItemKey::AlbumArtist) 233 - .unwrap_or_default() 234 - .bright_green() 235 - ); 236 - println!( 237 - "Album: {}", 238 - tag.get_string(&lofty::tag::ItemKey::AlbumTitle) 239 - .unwrap_or_default() 240 - .bright_green() 241 - ); 242 - println!( 243 - "Lyrics: {}", 244 - tag.get_string(&lofty::tag::ItemKey::Lyrics) 245 - .unwrap_or_default() 246 - .bright_green() 247 - ); 248 - println!("Year: {}", tag.year().unwrap_or_default().bright_green()); 249 - println!( 250 - "Track Number: {}", 251 - tag.track().unwrap_or_default().bright_green() 252 - ); 253 - println!( 254 - "Track Total: {}", 255 - tag.track_total().unwrap_or_default().bright_green() 256 - ); 257 - println!( 258 - "Release Date: {:?}", 259 - tag.get_string(&lofty::tag::ItemKey::OriginalReleaseDate) 260 - .unwrap_or_default() 261 - .bright_green() 262 - ); 263 - println!( 264 - "Recording Date: {:?}", 265 - tag.get_string(&lofty::tag::ItemKey::RecordingDate) 266 - .unwrap_or_default() 267 - .bright_green() 268 - ); 269 - println!( 270 - "Copyright Message: {}", 271 - tag.get_string(&lofty::tag::ItemKey::CopyrightMessage) 272 - .unwrap_or_default() 273 - .bright_green() 274 - ); 275 - println!("Pictures: {:?}", pictures); 276 277 let title = tag 278 .get_string(&lofty::tag::ItemKey::TrackTitle) ··· 304 305 match track { 306 Some(track) => { 307 - println!("Track exists: {}", title.bright_green()); 308 let parent_drive_id = parent_drive_file_id.as_deref(); 309 - let status = create_google_drive_path( 310 &pool, 311 &file, 312 &track, ··· 315 ) 316 .await?; 317 318 - println!("status: {:?}", status); 319 // TODO: publish file metadata to nats 320 } 321 None => { 322 - println!("Creating track: {}", title.bright_green()); 323 324 let albumart = 325 upload_album_cover(albumart_id.into(), pictures, &access_token).await?; ··· 358 })) 359 .send() 360 .await?; 361 - println!("Track Saved: {} {}", title, response.status()); 362 tokio::time::sleep(std::time::Duration::from_secs(3)).await; 363 364 let track = get_track_by_hash(&pool, &hash).await?; 365 if let Some(track) = track { 366 let parent_drive_id = parent_drive_file_id.as_deref(); 367 - let status = create_google_drive_path( 368 &pool, 369 &file, 370 &track, 371 &google_drive_id, 372 parent_drive_id.unwrap_or(""), 373 ) 374 - .await; 375 - 376 - println!("status: {:?}", status); 377 378 // TODO: publish file metadata to nats 379 ··· 382 return Ok(()); 383 } 384 385 - println!("Failed to create track: {}", title.bright_green()); 386 } 387 } 388 ··· 442 .send() 443 .await?; 444 445 - println!("Cover uploaded: {}", response.status()); 446 447 Ok(Some(name)) 448 } ··· 466 match symphonia::default::get_probe().format(&hint, media_source, &format_opts, &meta_opts) 467 { 468 Ok(probed) => probed, 469 - Err(_) => { 470 - println!("Error probing file"); 471 return Ok(duration); 472 } 473 };
··· 104 let file = res.json::<File>().await?; 105 106 if file.mime_type == "application/vnd.google-apps.folder" { 107 + tracing::info!(folder = %file.name.bright_green(), "Scanning folder"); 108 109 create_google_drive_directory( 110 &pool, ··· 172 return Ok(()); 173 } 174 175 + tracing::info!(file = %file.name.bright_green(), "Downloading file"); 176 177 let client = Client::new(); 178 ··· 191 let mut tmpfile = std::fs::File::create(&tmppath)?; 192 tmpfile.write_all(&bytes)?; 193 194 + tracing::info!(path = %tmppath.display(), "Reading file"); 195 196 let tagged_file = match Probe::open(&tmppath)?.read() { 197 Ok(tagged_file) => tagged_file, 198 Err(e) => { 199 + tracing::warn!(file = %file.name.bright_green(), error = %e, "Failed to open file with lofty"); 200 return Ok(()); 201 } 202 }; ··· 205 let tag = match primary_tag { 206 Some(tag) => tag, 207 None => { 208 + tracing::warn!(file = %file.name.bright_green(), "No tag found in file"); 209 return Ok(()); 210 } 211 }; 212 213 let pictures = tag.pictures(); 214 215 + tracing::info!(title = %tag.get_string(&lofty::tag::ItemKey::TrackTitle).unwrap_or_default(), "Title"); 216 + tracing::info!(artist = %tag.get_string(&lofty::tag::ItemKey::TrackArtist).unwrap_or_default(), "Artist"); 217 + tracing::info!(album_artist = %tag.get_string(&lofty::tag::ItemKey::AlbumArtist).unwrap_or_default(), "Album artist"); 218 + tracing::info!(album = %tag.get_string(&lofty::tag::ItemKey::AlbumTitle).unwrap_or_default(), "Album"); 219 + tracing::info!(lyrics = %tag.get_string(&lofty::tag::ItemKey::Lyrics).unwrap_or_default(), "Lyrics"); 220 + tracing::info!(year = %tag.year().unwrap_or_default(), "Year"); 221 + tracing::info!(track_number = %tag.track().unwrap_or_default(), "Track number"); 222 + tracing::info!(track_total = %tag.track_total().unwrap_or_default(), "Track total"); 223 + tracing::info!(release_date = %tag.get_string(&lofty::tag::ItemKey::OriginalReleaseDate).unwrap_or_default(), "Release date"); 224 + tracing::info!(recording_date = %tag.get_string(&lofty::tag::ItemKey::RecordingDate).unwrap_or_default(), "Recording date"); 225 + tracing::info!(copyright = %tag.get_string(&lofty::tag::ItemKey::CopyrightMessage).unwrap_or_default(), "Copyright message"); 226 + tracing::info!(pictures = %pictures.len(), "Pictures found"); 227 228 let title = tag 229 .get_string(&lofty::tag::ItemKey::TrackTitle) ··· 255 256 match track { 257 Some(track) => { 258 + tracing::info!(title = %title.bright_green(), "Track exists"); 259 let parent_drive_id = parent_drive_file_id.as_deref(); 260 + create_google_drive_path( 261 &pool, 262 &file, 263 &track, ··· 266 ) 267 .await?; 268 269 // TODO: publish file metadata to nats 270 } 271 None => { 272 + tracing::info!(title = %title.bright_green(), "Creating track"); 273 274 let albumart = 275 upload_album_cover(albumart_id.into(), pictures, &access_token).await?; ··· 308 })) 309 .send() 310 .await?; 311 + tracing::info!(status = %response.status(), "Track saved"); 312 tokio::time::sleep(std::time::Duration::from_secs(3)).await; 313 314 let track = get_track_by_hash(&pool, &hash).await?; 315 if let Some(track) = track { 316 let parent_drive_id = parent_drive_file_id.as_deref(); 317 + create_google_drive_path( 318 &pool, 319 &file, 320 &track, 321 &google_drive_id, 322 parent_drive_id.unwrap_or(""), 323 ) 324 + .await?; 325 326 // TODO: publish file metadata to nats 327 ··· 330 return Ok(()); 331 } 332 333 + tracing::warn!(title = %title.bright_green(), "Failed to create track"); 334 } 335 } 336 ··· 390 .send() 391 .await?; 392 393 + tracing::info!(status = %response.status(), "Cover uploaded"); 394 395 Ok(Some(name)) 396 } ··· 414 match symphonia::default::get_probe().format(&hint, media_source, &format_opts, &meta_opts) 415 { 416 Ok(probed) => probed, 417 + Err(e) => { 418 + tracing::warn!(path = %path.display(), error = %e, "Failed to probe media"); 419 return Ok(duration); 420 } 421 };
+25 -68
crates/jetstream/src/repo.rs
··· 56 57 let user_id = save_user(&mut tx, did).await?; 58 59 - println!( 60 - "Saving scrobble: {} ", 61 - format!( 62 - "{} - {} - {}", 63 - scrobble_record.title, scrobble_record.artist, scrobble_record.album 64 - ) 65 - .magenta() 66 - ); 67 68 sqlx::query( 69 r#" ··· 144 { 145 Ok(_) => {} 146 Err(e) => { 147 - eprintln!("Failed to push to webhook queue: {}", e); 148 } 149 } 150 } ··· 188 } 189 } 190 _ => { 191 - println!("Unsupported operation: {}", commit.operation); 192 } 193 } 194 Ok(()) ··· 341 .await?; 342 343 if !albums.is_empty() { 344 - println!("Album already exists: {}", albums[0].title.magenta()); 345 return Ok(albums[0].xata_id.clone()); 346 } 347 348 - println!("Saving album: {}", scrobble_record.album.magenta()); 349 350 let uri: Option<String> = None; 351 let artist_uri: Option<String> = None; ··· 402 .await?; 403 404 if !artists.is_empty() { 405 - println!("Artist already exists: {}", artists[0].name.magenta()); 406 return Ok(artists[0].xata_id.clone()); 407 } 408 409 - println!("Saving artist: {}", scrobble_record.album_artist.magenta()); 410 411 let uri: Option<String> = None; 412 let picture = ""; ··· 450 .await?; 451 452 if !album_tracks.is_empty() { 453 - println!( 454 - "Album track already exists: {}", 455 - format!("{} - {}", album_id, track_id).magenta() 456 - ); 457 return Ok(()); 458 } 459 460 - println!( 461 - "Saving album track: {}", 462 - format!("{} - {}", album_id, track_id).magenta() 463 - ); 464 465 sqlx::query( 466 r#" ··· 492 .await?; 493 494 if !artist_tracks.is_empty() { 495 - println!( 496 - "Artist track already exists: {}", 497 - format!("{} - {}", artist_id, track_id).magenta() 498 - ); 499 return Ok(()); 500 } 501 502 - println!( 503 - "Saving artist track: {}", 504 - format!("{} - {}", artist_id, track_id).magenta() 505 - ); 506 507 sqlx::query( 508 r#" ··· 534 .await?; 535 536 if !artist_albums.is_empty() { 537 - println!( 538 - "Artist album already exists: {}", 539 - format!("{} - {}", artist_id, album_id).magenta() 540 - ); 541 return Ok(()); 542 } 543 544 - println!( 545 - "Saving artist album: {}", 546 - format!("{} - {}", artist_id, album_id).magenta() 547 - ); 548 549 sqlx::query( 550 r#" ··· 585 586 match artists.is_empty() { 587 true => { 588 - println!("Saving artist: {}", record.name.magenta()); 589 let did = users[0].did.clone(); 590 sqlx::query( 591 r#" ··· 632 .await?; 633 634 if !user_artists.is_empty() { 635 - println!( 636 - "User artist already exists: {}", 637 - format!("{} - {}", user_id, artist_id).magenta() 638 - ); 639 sqlx::query( 640 r#" 641 UPDATE user_artists ··· 652 return Ok(()); 653 } 654 655 - println!( 656 - "Saving user artist: {}", 657 - format!("{} - {}", user_id, artist_id).magenta() 658 - ); 659 660 sqlx::query( 661 r#" ··· 699 700 match albums.is_empty() { 701 true => { 702 - println!("Saving album: {}", record.title.magenta()); 703 let did = users[0].did.clone(); 704 sqlx::query( 705 r#" ··· 752 .await?; 753 754 if !user_albums.is_empty() { 755 - println!( 756 - "User album already exists: {}", 757 - format!("{} - {}", user_id, album_id).magenta() 758 - ); 759 sqlx::query( 760 r#" 761 UPDATE user_albums ··· 772 return Ok(()); 773 } 774 775 - println!( 776 - "Saving user album: {}", 777 - format!("{} - {}", user_id, album_id).magenta() 778 - ); 779 780 sqlx::query( 781 r#" ··· 822 823 match tracks.is_empty() { 824 true => { 825 - println!("Saving track: {}", record.title.magenta()); 826 let did = users[0].did.clone(); 827 sqlx::query( 828 r#" ··· 894 .await?; 895 896 if !user_tracks.is_empty() { 897 - println!( 898 - "User track already exists: {}", 899 - format!("{} - {}", user_id, track_id).magenta() 900 - ); 901 sqlx::query( 902 r#" 903 UPDATE user_tracks ··· 914 return Ok(()); 915 } 916 917 - println!( 918 - "Saving user track: {}", 919 - format!("{} - {}", user_id, track_id).magenta() 920 - ); 921 922 sqlx::query( 923 r#" ··· 954 .await?; 955 956 if artists.is_empty() { 957 - println!("Artist not found: {}", record.name.magenta()); 958 return Ok(()); 959 } 960 ··· 1023 .fetch_all(&mut **tx) 1024 .await?; 1025 if albums.is_empty() { 1026 - println!("Album not found: {}", record.title.magenta()); 1027 return Ok(()); 1028 } 1029 let album_id = &albums[0].xata_id; ··· 1082 .await?; 1083 1084 if tracks.is_empty() { 1085 - println!("Track not found: {}", record.title.magenta()); 1086 return Ok(()); 1087 } 1088
··· 56 57 let user_id = save_user(&mut tx, did).await?; 58 59 + tracing::info!(title = %scrobble_record.title.magenta(), artist = %scrobble_record.artist.magenta(), album = %scrobble_record.album.magenta(), "Saving scrobble"); 60 61 sqlx::query( 62 r#" ··· 137 { 138 Ok(_) => {} 139 Err(e) => { 140 + tracing::error!(error = %e, "Failed to push to webhook queue"); 141 } 142 } 143 } ··· 181 } 182 } 183 _ => { 184 + tracing::warn!(operation = %commit.operation, "Unsupported operation"); 185 } 186 } 187 Ok(()) ··· 334 .await?; 335 336 if !albums.is_empty() { 337 + tracing::info!(name = %albums[0].title.magenta(), "Album already exists"); 338 return Ok(albums[0].xata_id.clone()); 339 } 340 341 + tracing::info!(name = %scrobble_record.album, "Saving new album"); 342 343 let uri: Option<String> = None; 344 let artist_uri: Option<String> = None; ··· 395 .await?; 396 397 if !artists.is_empty() { 398 + tracing::info!(name = %scrobble_record.album_artist, "Artist already exists"); 399 return Ok(artists[0].xata_id.clone()); 400 } 401 402 + tracing::info!(name = %scrobble_record.album_artist, "Saving new artist"); 403 404 let uri: Option<String> = None; 405 let picture = ""; ··· 443 .await?; 444 445 if !album_tracks.is_empty() { 446 + tracing::info!(album_id = %album_id, track_id = %track_id, "Album track already exists"); 447 return Ok(()); 448 } 449 450 + tracing::info!(album_id = %album_id, track_id = %track_id, "Saving album track"); 451 452 sqlx::query( 453 r#" ··· 479 .await?; 480 481 if !artist_tracks.is_empty() { 482 + tracing::info!(artist_id = %artist_id, track_id = %track_id, "Artist track already exists"); 483 return Ok(()); 484 } 485 486 + tracing::info!(artist_id = %artist_id, track_id = %track_id, "Saving artist track"); 487 488 sqlx::query( 489 r#" ··· 515 .await?; 516 517 if !artist_albums.is_empty() { 518 + tracing::info!(artist_id = %artist_id, album_id = %album_id, "Artist album already exists"); 519 return Ok(()); 520 } 521 522 + tracing::info!(artist_id = %artist_id, album_id = %album_id, "Saving artist album"); 523 524 sqlx::query( 525 r#" ··· 560 561 match artists.is_empty() { 562 true => { 563 + tracing::info!(name = %record.name, "Artist not found in database, inserting new artist"); 564 let did = users[0].did.clone(); 565 sqlx::query( 566 r#" ··· 607 .await?; 608 609 if !user_artists.is_empty() { 610 + tracing::info!(user_id = %user_id, artist_id = %artist_id, "Updating user artist"); 611 sqlx::query( 612 r#" 613 UPDATE user_artists ··· 624 return Ok(()); 625 } 626 627 + tracing::info!(user_id = %user_id, artist_id = %artist_id, "Inserting user artist"); 628 629 sqlx::query( 630 r#" ··· 668 669 match albums.is_empty() { 670 true => { 671 + tracing::info!(title = %record.title, artist = %record.artist, "Album not found in database, inserting new album"); 672 let did = users[0].did.clone(); 673 sqlx::query( 674 r#" ··· 721 .await?; 722 723 if !user_albums.is_empty() { 724 + tracing::info!(user_id = %user_id, album_id = %album_id, "Updating user album"); 725 sqlx::query( 726 r#" 727 UPDATE user_albums ··· 738 return Ok(()); 739 } 740 741 + tracing::info!(user_id = %user_id, album_id = %album_id, "Inserting user album"); 742 743 sqlx::query( 744 r#" ··· 785 786 match tracks.is_empty() { 787 true => { 788 + tracing::info!(title = %record.title, artist = %record.artist, album = %record.album, "Track not found in database, inserting new track"); 789 let did = users[0].did.clone(); 790 sqlx::query( 791 r#" ··· 857 .await?; 858 859 if !user_tracks.is_empty() { 860 + tracing::info!(user_id = %user_id, track_id = %track_id, "Updating user track"); 861 sqlx::query( 862 r#" 863 UPDATE user_tracks ··· 874 return Ok(()); 875 } 876 877 + tracing::info!(user_id = %user_id, track_id = %track_id, "Inserting user track"); 878 879 sqlx::query( 880 r#" ··· 911 .await?; 912 913 if artists.is_empty() { 914 + tracing::warn!(name = %record.name, "Artist not found in database"); 915 return Ok(()); 916 } 917 ··· 980 .fetch_all(&mut **tx) 981 .await?; 982 if albums.is_empty() { 983 + tracing::warn!(title = %record.title, "Album not found in database"); 984 return Ok(()); 985 } 986 let album_id = &albums[0].xata_id; ··· 1039 .await?; 1040 1041 if tracks.is_empty() { 1042 + tracing::warn!(title = %record.title, "Track not found in database"); 1043 return Ok(()); 1044 } 1045
+6 -9
crates/jetstream/src/subscriber.rs
··· 40 let pool = Arc::new(Mutex::new(pool)); 41 42 let (mut ws_stream, _) = connect_async(&self.service_url).await?; 43 - println!( 44 - "Connected to jetstream at {}", 45 - self.service_url.bright_green() 46 - ); 47 48 while let Some(msg) = ws_stream.next().await { 49 match msg { 50 Ok(msg) => { 51 if let Err(e) = handle_message(state.clone(), pool.clone(), msg).await { 52 - eprintln!("Error handling message: {}", e); 53 } 54 } 55 Err(e) => { 56 - eprintln!("WebSocket error: {}", e); 57 break; 58 } 59 } ··· 76 return Ok::<(), Error>(()); 77 } 78 79 - println!("Received message: {:#?}", message); 80 if let Some(commit) = message.commit { 81 match save_scrobble(state, pool, &message.did, commit).await { 82 Ok(_) => { 83 - println!("Scrobble saved successfully"); 84 } 85 Err(e) => { 86 - eprintln!("Error saving scrobble: {}", e); 87 } 88 } 89 }
··· 40 let pool = Arc::new(Mutex::new(pool)); 41 42 let (mut ws_stream, _) = connect_async(&self.service_url).await?; 43 + tracing::info!(url = %self.service_url.bright_green(), "Connected to jetstream at"); 44 45 while let Some(msg) = ws_stream.next().await { 46 match msg { 47 Ok(msg) => { 48 if let Err(e) = handle_message(state.clone(), pool.clone(), msg).await { 49 + tracing::error!(error = %e, "Error handling message"); 50 } 51 } 52 Err(e) => { 53 + tracing::error!(error = %e, "WebSocket error"); 54 break; 55 } 56 } ··· 73 return Ok::<(), Error>(()); 74 } 75 76 + tracing::info!(message = %text.bright_green(), "Received message"); 77 if let Some(commit) = message.commit { 78 match save_scrobble(state, pool, &message.did, commit).await { 79 Ok(_) => { 80 + tracing::info!(user_id = %message.did.bright_green(), "Scrobble saved successfully"); 81 } 82 Err(e) => { 83 + tracing::error!(error = %e, "Error saving scrobble"); 84 } 85 } 86 }
+2 -2
crates/jetstream/src/webhook/discord/mod.rs
··· 37 embeds: Vec<DiscordEmbed>, 38 ) -> reqwest::Result<()> { 39 if discord_webhook_url.is_empty() { 40 - println!("DISCORD_WEBHOOK_URL is not set, skipping webhook post"); 41 return Ok(()); 42 } 43 ··· 48 let res = http.post(discord_webhook_url).json(&body).send().await?; 49 if !res.status().is_success() { 50 let text = res.text().await.unwrap_or_default(); 51 - eprintln!("Failed to post to Discord webhook: {}", text); 52 } 53 Ok(()) 54 }
··· 37 embeds: Vec<DiscordEmbed>, 38 ) -> reqwest::Result<()> { 39 if discord_webhook_url.is_empty() { 40 + tracing::warn!("DISCORD_WEBHOOK_URL is not set, skipping webhook post"); 41 return Ok(()); 42 } 43 ··· 48 let res = http.post(discord_webhook_url).json(&body).send().await?; 49 if !res.status().is_success() { 50 let text = res.text().await.unwrap_or_default(); 51 + tracing::error!(error = %text, "Failed to post to Discord webhook"); 52 } 53 Ok(()) 54 }
+2 -2
crates/jetstream/src/webhook_worker.rs
··· 79 } 80 Ok(None) => break, 81 Err(e) => { 82 - eprintln!("Failed to pop from Redis: {}", e); 83 break; 84 } 85 } ··· 93 tokens -= 1; 94 95 if let Err(e) = discord::post_embeds(&http, &discord_webhook_url, embeds).await { 96 - eprintln!("Failed to post to Discord webhook: {}", e); 97 } 98 } 99 }
··· 79 } 80 Ok(None) => break, 81 Err(e) => { 82 + tracing::error!(error = %e, "Failed to pop from Redis"); 83 break; 84 } 85 } ··· 93 tokens -= 1; 94 95 if let Err(e) = discord::post_embeds(&http, &discord_webhook_url, embeds).await { 96 + tracing::error!(error = %e, "Failed to post to Discord webhook"); 97 } 98 } 99 }
+1 -1
crates/scrobbler/src/auth.rs
··· 37 let expected_password = md5::compute(expected_password); 38 let expected_password = format!("{:x}", expected_password); 39 if expected_password != password_md5 { 40 - println!("{} != {}", expected_password, password_md5); 41 return Err(Error::msg("Invalid password")); 42 } 43 Ok(())
··· 37 let expected_password = md5::compute(expected_password); 38 let expected_password = format!("{:x}", expected_password); 39 if expected_password != password_md5 { 40 + tracing::error!(expected = %expected_password, provided = %password_md5, "Invalid password"); 41 return Err(Error::msg("Invalid password")); 42 } 43 Ok(())
+1 -1
crates/scrobbler/src/handlers/v1/nowplaying.rs
··· 18 let a = form.get("a").unwrap().to_string(); 19 let t = form.get("t").unwrap().to_string(); 20 21 - println!("Now playing: {} - {} {}", a, t, s.cyan()); 22 23 let user_id = verify_session_id(cache, &s); 24 if let Err(e) = user_id {
··· 18 let a = form.get("a").unwrap().to_string(); 19 let t = form.get("t").unwrap().to_string(); 20 21 + tracing::info!(artist = %a, track = %t, user = %s.cyan(), "Now playing"); 22 23 let user_id = verify_session_id(cache, &s); 24 if let Err(e) = user_id {
+5 -6
crates/scrobbler/src/handlers/v1/submission.rs
··· 1 2 3 ··· 21 22 23 24 - 25 - 26 - 27 - 28 - 29 } 30 31 let user_id = user_id.unwrap(); 32 - println!("Submission: {} - {} {} {} {}", a, t, i, user_id, s.cyan()); 33 34 match scrobble_v1(pool, cache, &form).await { 35 Ok(_) => Ok(HttpResponse::Ok().body("OK\n")),
··· 1 + use actix_web::HttpResponse; 2 + use anyhow::Error; 3 + use serde_json::json; 4 + use std::{collections::BTreeMap, sync::Arc}; 5 6 7 ··· 25 26 27 28 } 29 30 let user_id = user_id.unwrap(); 31 + tracing::info!(artist = %a, track = %t, timestamp = %i, user_id = %user_id, "Submission"); 32 33 match scrobble_v1(pool, cache, &form).await { 34 Ok(_) => Ok(HttpResponse::Ok().body("OK\n")),
+1 -4
crates/scrobbler/src/lib.rs
··· 56 .parse::<u16>() 57 .unwrap_or(7882); 58 59 - println!( 60 - "Starting Scrobble server @ {}", 61 - format!("{}:{}", host, port).green() 62 - ); 63 64 let limiter = web::Data::new( 65 Limiter::builder("redis://127.0.0.1")
··· 56 .parse::<u16>() 57 .unwrap_or(7882); 58 59 + tracing::info!(url = %format!("http://{}:{}", host, port).bright_green(), "Starting Scrobble server @"); 60 61 let limiter = web::Data::new( 62 Limiter::builder("redis://127.0.0.1")
+2 -8
crates/scrobbler/src/listenbrainz/core/submit.rs
··· 17 token: &str, 18 ) -> Result<HttpResponse, Error> { 19 if payload.listen_type != "playing_now" { 20 - println!("skipping listen type: {}", payload.listen_type.cyan()); 21 return Ok(HttpResponse::Ok().json(json!({ 22 "status": "ok", 23 "payload": { ··· 62 63 cache.del(&format!("listenbrainz:cache:{}:{}:{}", artist, track, did))?; 64 65 - println!( 66 - "Retryable error on attempt {}/{}: {}", 67 - attempt, 68 - RETRIES, 69 - e.to_string().yellow() 70 - ); 71 - println!("{:#?}", payload); 72 73 if attempt == RETRIES { 74 return Ok(HttpResponse::BadRequest().json(serde_json::json!({
··· 17 token: &str, 18 ) -> Result<HttpResponse, Error> { 19 if payload.listen_type != "playing_now" { 20 + tracing::info!(listen_type = %payload.listen_type.cyan(), "Skipping listen type"); 21 return Ok(HttpResponse::Ok().json(json!({ 22 "status": "ok", 23 "payload": { ··· 62 63 cache.del(&format!("listenbrainz:cache:{}:{}:{}", artist, track, did))?; 64 65 + tracing::error!(error = %e, attempt = attempt, "Retryable error submitting listens for {} - {} (attempt {}/{})", artist, track, attempt, RETRIES); 66 67 if attempt == RETRIES { 68 return Ok(HttpResponse::BadRequest().json(serde_json::json!({
+1 -1
crates/scrobbler/src/listenbrainz/core/validate_token.rs
··· 12 }, 13 }))), 14 Err(e) => { 15 - println!("Error validating token: {}", e); 16 Ok(HttpResponse::BadRequest().json(serde_json::json!({ 17 "error": 4, 18 "message": format!("Failed to validate token: {}", e)
··· 12 }, 13 }))), 14 Err(e) => { 15 + tracing::error!(error = %e, "Failed to validate token"); 16 Ok(HttpResponse::BadRequest().json(serde_json::json!({ 17 "error": 4, 18 "message": format!("Failed to validate token: {}", e)
+12 -13
crates/scrobbler/src/listenbrainz/handlers.rs
··· 58 let body = String::from_utf8_lossy(&payload); 59 let req = serde_json::from_str::<SubmitListensRequest>(&body) 60 .map_err(|e| { 61 - println!("{}", body); 62 - println!("Error parsing request body: {}", e); 63 e 64 }) 65 .map_err(actix_web::error::ErrorBadRequest)?; ··· 116 })); 117 } 118 Err(e) => { 119 - println!("Error validating token: {}", e); 120 return HttpResponse::InternalServerError().finish(); 121 } 122 } ··· 127 query: web::Query<String>, 128 data: web::Data<Arc<Pool<Postgres>>>, 129 ) -> impl Responder { 130 - let pool = data.get_ref(); 131 let query = query.into_inner(); 132 133 match search_users(&query).await { 134 Ok(users) => HttpResponse::Ok().json(users), 135 Err(e) => { 136 - println!("Error searching users: {}", e); 137 HttpResponse::InternalServerError().finish() 138 } 139 } ··· 145 match get_listens(&user_name).await { 146 Ok(listens) => HttpResponse::Ok().json(listens), 147 Err(e) => { 148 - println!("Error getting listens for user {}: {}", user_name, e); 149 HttpResponse::InternalServerError().finish() 150 } 151 } ··· 157 match get_listen_count(&user_name).await { 158 Ok(count) => HttpResponse::Ok().json(count), 159 Err(e) => { 160 - println!("Error getting listen count for user {}: {}", user_name, e); 161 HttpResponse::InternalServerError().finish() 162 } 163 } ··· 169 match get_playing_now(&user_name).await { 170 Ok(playing_now) => HttpResponse::Ok().json(playing_now), 171 Err(e) => { 172 - println!("Error getting playing now for user {}: {}", user_name, e); 173 HttpResponse::InternalServerError().finish() 174 } 175 } ··· 181 match get_top_artists(&user_name).await { 182 Ok(artists) => HttpResponse::Ok().json(artists), 183 Err(e) => { 184 - println!("Error getting top artists: {}", e); 185 HttpResponse::InternalServerError().finish() 186 } 187 } ··· 193 match get_top_releases(&user_name).await { 194 Ok(releases) => HttpResponse::Ok().json(releases), 195 Err(e) => { 196 - println!("Error getting top releases: {}", e); 197 HttpResponse::InternalServerError().finish() 198 } 199 } ··· 205 match get_top_recordings(&user_name).await { 206 Ok(recordings) => HttpResponse::Ok().json(recordings), 207 Err(e) => { 208 - println!("Error getting sitewide recordings: {}", e); 209 HttpResponse::InternalServerError().finish() 210 } 211 } ··· 217 match get_top_release_groups(&user_name).await { 218 Ok(release_groups) => HttpResponse::Ok().json(release_groups), 219 Err(e) => { 220 - println!("Error getting top release groups: {}", e); 221 HttpResponse::InternalServerError().finish() 222 } 223 } ··· 229 match get_top_recordings(&user_name).await { 230 Ok(recordings) => HttpResponse::Ok().json(recordings), 231 Err(e) => { 232 - println!("Error getting top recordings: {}", e); 233 HttpResponse::InternalServerError().finish() 234 } 235 }
··· 58 let body = String::from_utf8_lossy(&payload); 59 let req = serde_json::from_str::<SubmitListensRequest>(&body) 60 .map_err(|e| { 61 + tracing::error!(body = %body, error = %e, "Error parsing request body"); 62 e 63 }) 64 .map_err(actix_web::error::ErrorBadRequest)?; ··· 115 })); 116 } 117 Err(e) => { 118 + tracing::error!(error = %e, "Error validating token"); 119 return HttpResponse::InternalServerError().finish(); 120 } 121 } ··· 126 query: web::Query<String>, 127 data: web::Data<Arc<Pool<Postgres>>>, 128 ) -> impl Responder { 129 + let _pool = data.get_ref(); 130 let query = query.into_inner(); 131 132 match search_users(&query).await { 133 Ok(users) => HttpResponse::Ok().json(users), 134 Err(e) => { 135 + tracing::error!(error = %e, "Error searching users"); 136 HttpResponse::InternalServerError().finish() 137 } 138 } ··· 144 match get_listens(&user_name).await { 145 Ok(listens) => HttpResponse::Ok().json(listens), 146 Err(e) => { 147 + tracing::error!(error = %e, "Error getting listens for user {}", user_name); 148 HttpResponse::InternalServerError().finish() 149 } 150 } ··· 156 match get_listen_count(&user_name).await { 157 Ok(count) => HttpResponse::Ok().json(count), 158 Err(e) => { 159 + tracing::error!(error = %e, "Error getting listen count for user {}", user_name); 160 HttpResponse::InternalServerError().finish() 161 } 162 } ··· 168 match get_playing_now(&user_name).await { 169 Ok(playing_now) => HttpResponse::Ok().json(playing_now), 170 Err(e) => { 171 + tracing::error!(error = %e, "Error getting playing now for user {}", user_name); 172 HttpResponse::InternalServerError().finish() 173 } 174 } ··· 180 match get_top_artists(&user_name).await { 181 Ok(artists) => HttpResponse::Ok().json(artists), 182 Err(e) => { 183 + tracing::error!(error = %e, "Error getting top artists"); 184 HttpResponse::InternalServerError().finish() 185 } 186 } ··· 192 match get_top_releases(&user_name).await { 193 Ok(releases) => HttpResponse::Ok().json(releases), 194 Err(e) => { 195 + tracing::error!(error = %e, "Error getting top releases"); 196 HttpResponse::InternalServerError().finish() 197 } 198 } ··· 204 match get_top_recordings(&user_name).await { 205 Ok(recordings) => HttpResponse::Ok().json(recordings), 206 Err(e) => { 207 + tracing::error!(error = %e, "Error getting top recordings"); 208 HttpResponse::InternalServerError().finish() 209 } 210 } ··· 216 match get_top_release_groups(&user_name).await { 217 Ok(release_groups) => HttpResponse::Ok().json(release_groups), 218 Err(e) => { 219 + tracing::error!(error = %e, "Error getting top release groups"); 220 HttpResponse::InternalServerError().finish() 221 } 222 } ··· 228 match get_top_recordings(&user_name).await { 229 Ok(recordings) => HttpResponse::Ok().json(recordings), 230 Err(e) => { 231 + tracing::error!(error = %e, "Error getting top recordings"); 232 HttpResponse::InternalServerError().finish() 233 } 234 }
-66
crates/scrobbler/src/main.rs
··· 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - 13 - 14 - 15 - 16 - 17 - 18 - 19 - 20 - 21 - 22 - 23 - 24 - 25 - 26 - 27 - 28 - 29 - 30 - 31 - 32 - 33 - 34 - 35 - 36 - 37 - 38 - 39 - 40 - 41 - 42 - 43 - 44 - 45 - 46 - 47 - 48 - 49 - 50 - 51 - 52 - 53 - 54 - 55 - 56 - 57 - 58 - .parse::<u16>() 59 - .unwrap_or(7882); 60 - 61 - println!( 62 - "Starting Scrobble server @ {}", 63 - format!("{}:{}", host, port).green() 64 - ); 65 - 66 - let limiter = web::Data::new(
···
+3 -4
crates/scrobbler/src/rocksky.rs
··· 25 let token = generate_token(did)?; 26 let client = Client::new(); 27 28 - println!("Scrobbling track: \n {:#?}", track); 29 30 let response = client 31 .post(&format!("{}/now-playing", ROCKSKY_API)) ··· 35 .await?; 36 37 let status = response.status(); 38 - println!("Response status: {}", status); 39 if !status.is_success() { 40 let response_text = response.text().await?; 41 - println!("did: {}", did); 42 - println!("Failed to scrobble track: {}", response_text); 43 return Err(Error::msg(format!( 44 "Failed to scrobble track: {}", 45 response_text
··· 25 let token = generate_token(did)?; 26 let client = Client::new(); 27 28 + tracing::info!(did = %did, track = ?track, "Scrobbling track"); 29 30 let response = client 31 .post(&format!("{}/now-playing", ROCKSKY_API)) ··· 35 .await?; 36 37 let status = response.status(); 38 + tracing::info!(did = %did, artist = %track.artist, track = %track.title, status = %status, "Scrobble response"); 39 if !status.is_success() { 40 let response_text = response.text().await?; 41 + tracing::error!(did = %did, response = %response_text, "Failed to scrobble track"); 42 return Err(Error::msg(format!( 43 "Failed to scrobble track: {}", 44 response_text
+25 -56
crates/scrobbler/src/scrobbler.rs
··· 1 2 3 4 ··· 125 126 127 128 - 129 - 130 - 131 - 132 - 133 ); 134 let cached = cache.get(&key)?; 135 if cached.is_some() { 136 - println!("{}", format!("Cached: {}", key).yellow()); 137 let track = serde_json::from_str::<Track>(&cached.unwrap())?; 138 scrobble.album = Some(track.album.clone()); 139 rocksky::scrobble(cache, &did, track, scrobble.timestamp).await?; ··· 144 if let Some(mbid) = &scrobble.mbid { 145 // let result = repo::track::get_track_by_mbid(pool, mbid).await?; 146 let result = mb_client.get_recording(mbid).await?; 147 - println!("{}", "Musicbrainz (mbid)".yellow()); 148 scrobble.album = Some(Track::from(result.clone()).album); 149 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 150 tokio::time::sleep(std::time::Duration::from_secs(1)).await; ··· 154 let result = repo::track::get_track(pool, &scrobble.track, &scrobble.artist).await?; 155 156 if let Some(track) = result { 157 - println!("{}", "Xata (track)".yellow()); 158 scrobble.album = Some(track.album.clone()); 159 let album = repo::album::get_album_by_track_id(pool, &track.xata_id).await?; 160 let artist = repo::artist::get_artist_by_track_id(pool, &track.xata_id).await?; ··· 204 .await?; 205 206 if let Some(track) = result.tracks.items.first() { 207 - println!("{}", "Spotify (track)".yellow()); 208 scrobble.album = Some(track.album.name.clone()); 209 let mut track = track.clone(); 210 ··· 232 233 if let Some(recording) = result.recordings.first() { 234 let result = mb_client.get_recording(&recording.id).await?; 235 - println!("{}", "Musicbrainz (recording)".yellow()); 236 scrobble.album = Some(Track::from(result.clone()).album); 237 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 238 tokio::time::sleep(std::time::Duration::from_secs(1)).await; 239 continue; 240 } 241 242 - println!( 243 - "{} {} - {}, skipping", 244 - "Track not found: ".yellow(), 245 - scrobble.artist, 246 - scrobble.track 247 - ); 248 scrobble.ignored = Some(true); 249 } 250 ··· 313 ); 314 let cached = cache.get(&key)?; 315 if cached.is_some() { 316 - println!("{}", format!("Cached: {}", key).yellow()); 317 let track = serde_json::from_str::<Track>(&cached.unwrap())?; 318 scrobble.album = Some(track.album.clone()); 319 rocksky::scrobble(cache, &did, track, scrobble.timestamp).await?; ··· 324 if let Some(mbid) = &scrobble.mbid { 325 // let result = repo::track::get_track_by_mbid(pool, mbid).await?; 326 let result = mb_client.get_recording(mbid).await?; 327 - println!("{}", "Musicbrainz (mbid)".yellow()); 328 scrobble.album = Some(Track::from(result.clone()).album); 329 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 330 tokio::time::sleep(std::time::Duration::from_secs(1)).await; ··· 334 let result = repo::track::get_track(pool, &scrobble.track, &scrobble.artist).await?; 335 336 if let Some(track) = result { 337 - println!("{}", "Xata (track)".yellow()); 338 scrobble.album = Some(track.album.clone()); 339 let album = repo::album::get_album_by_track_id(pool, &track.xata_id).await?; 340 let artist = repo::artist::get_artist_by_track_id(pool, &track.xata_id).await?; ··· 384 .await?; 385 386 if let Some(track) = result.tracks.items.first() { 387 - println!("{}", "Spotify (track)".yellow()); 388 scrobble.album = Some(track.album.name.clone()); 389 let mut track = track.clone(); 390 ··· 412 413 if let Some(recording) = result.recordings.first() { 414 let result = mb_client.get_recording(&recording.id).await?; 415 - println!("{}", "Musicbrainz (recording)".yellow()); 416 scrobble.album = Some(Track::from(result.clone()).album); 417 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 418 tokio::time::sleep(std::time::Duration::from_secs(1)).await; 419 return Ok(()); 420 } 421 422 - println!( 423 - "{} {} - {}, skipping", 424 - "Track not found: ".yellow(), 425 - artist, 426 - track 427 - ); 428 429 Ok(()) 430 } ··· 435 req: &SubmitListensRequest, 436 token: &str, 437 ) -> Result<(), Error> { 438 - println!("Listenbrainz\n{:#?}", req); 439 440 if req.payload.is_empty() { 441 return Err(Error::msg("No payload found")); ··· 481 .get(&format!("listenbrainz:cache:{}:{}:{}", artist, track, did))? 482 .is_some() 483 { 484 - println!( 485 - "{} {} - {}, recently scrobbled", 486 - "Already scrobbled: ".yellow(), 487 - artist, 488 - track 489 - ); 490 return Ok(()); 491 } 492 ··· 496 .get(&format!("{}:current", spotify_user.email))? 497 .is_some() 498 { 499 - println!( 500 - "{} {} - {}, currently scrobbling, skipping", 501 - "Currently scrobbling: ".yellow(), 502 - artist, 503 - track 504 - ); 505 return Ok(()); 506 } 507 } 508 509 if cache.get(&format!("nowplaying:{}", did))?.is_some() { 510 - println!( 511 - "{} {} - {}, currently scrobbling, skipping", 512 - "Currently scrobbling: ".yellow(), 513 - artist, 514 - track 515 - ); 516 return Ok(()); 517 } 518 ··· 565 ); 566 let cached = cache.get(&key)?; 567 if cached.is_some() { 568 - println!("{}", format!("Cached: {}", key).yellow()); 569 let track = serde_json::from_str::<Track>(&cached.unwrap())?; 570 scrobble.album = Some(track.album.clone()); 571 rocksky::scrobble(cache, &did, track, scrobble.timestamp).await?; ··· 576 if let Some(mbid) = &scrobble.mbid { 577 // let result = repo::track::get_track_by_mbid(pool, mbid).await?; 578 let result = mb_client.get_recording(mbid).await?; 579 - println!("{}", "Musicbrainz (mbid)".yellow()); 580 scrobble.album = Some(Track::from(result.clone()).album); 581 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 582 tokio::time::sleep(std::time::Duration::from_secs(1)).await; ··· 586 let result = repo::track::get_track(pool, &scrobble.track, &scrobble.artist).await?; 587 588 if let Some(track) = result { 589 - println!("{}", "Xata (track)".yellow()); 590 scrobble.album = Some(track.album.clone()); 591 let album = repo::album::get_album_by_track_id(pool, &track.xata_id).await?; 592 let artist = repo::artist::get_artist_by_track_id(pool, &track.xata_id).await?; ··· 636 .await?; 637 638 if let Some(track) = result.tracks.items.first() { 639 - println!("{}", "Spotify (track)".yellow()); 640 scrobble.album = Some(track.album.name.clone()); 641 let mut track = track.clone(); 642 ··· 676 } 677 */ 678 679 - println!( 680 - "{} {} - {}, skipping", 681 - "Track not found: ".yellow(), 682 - artist, 683 - track 684 - ); 685 686 Ok(()) 687 }
··· 1 + use std::{collections::BTreeMap, env}; 2 3 + use anyhow::Error; 4 + use rand::Rng; 5 + use sqlx::{Pool, Postgres}; 6 7 8 ··· 129 130 131 132 ); 133 let cached = cache.get(&key)?; 134 if cached.is_some() { 135 + tracing::info!(key = %key, "Cached:"); 136 let track = serde_json::from_str::<Track>(&cached.unwrap())?; 137 scrobble.album = Some(track.album.clone()); 138 rocksky::scrobble(cache, &did, track, scrobble.timestamp).await?; ··· 143 if let Some(mbid) = &scrobble.mbid { 144 // let result = repo::track::get_track_by_mbid(pool, mbid).await?; 145 let result = mb_client.get_recording(mbid).await?; 146 + tracing::info!(%scrobble.artist, %scrobble.track, "Musicbrainz (mbid)"); 147 scrobble.album = Some(Track::from(result.clone()).album); 148 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 149 tokio::time::sleep(std::time::Duration::from_secs(1)).await; ··· 153 let result = repo::track::get_track(pool, &scrobble.track, &scrobble.artist).await?; 154 155 if let Some(track) = result { 156 + tracing::info!(artist = %scrobble.artist, track = %scrobble.track, "Xata (track)"); 157 scrobble.album = Some(track.album.clone()); 158 let album = repo::album::get_album_by_track_id(pool, &track.xata_id).await?; 159 let artist = repo::artist::get_artist_by_track_id(pool, &track.xata_id).await?; ··· 203 .await?; 204 205 if let Some(track) = result.tracks.items.first() { 206 + tracing::info!(artist = %scrobble.artist, track = %scrobble.track, "Spotify (track)"); 207 scrobble.album = Some(track.album.name.clone()); 208 let mut track = track.clone(); 209 ··· 231 232 if let Some(recording) = result.recordings.first() { 233 let result = mb_client.get_recording(&recording.id).await?; 234 + tracing::info!(%scrobble.artist, %scrobble.track, "Musicbrainz (recording)"); 235 scrobble.album = Some(Track::from(result.clone()).album); 236 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 237 tokio::time::sleep(std::time::Duration::from_secs(1)).await; 238 continue; 239 } 240 241 + tracing::info!(artist = %scrobble.artist, track = %scrobble.track, "Track not found, skipping"); 242 scrobble.ignored = Some(true); 243 } 244 ··· 307 ); 308 let cached = cache.get(&key)?; 309 if cached.is_some() { 310 + tracing::info!(key = %key, "Cached:"); 311 let track = serde_json::from_str::<Track>(&cached.unwrap())?; 312 scrobble.album = Some(track.album.clone()); 313 rocksky::scrobble(cache, &did, track, scrobble.timestamp).await?; ··· 318 if let Some(mbid) = &scrobble.mbid { 319 // let result = repo::track::get_track_by_mbid(pool, mbid).await?; 320 let result = mb_client.get_recording(mbid).await?; 321 + tracing::info!(%scrobble.artist, %scrobble.track, "Musicbrainz (mbid)"); 322 scrobble.album = Some(Track::from(result.clone()).album); 323 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 324 tokio::time::sleep(std::time::Duration::from_secs(1)).await; ··· 328 let result = repo::track::get_track(pool, &scrobble.track, &scrobble.artist).await?; 329 330 if let Some(track) = result { 331 + tracing::info!(artist = %scrobble.artist, track = %scrobble.track, "Xata (track)"); 332 scrobble.album = Some(track.album.clone()); 333 let album = repo::album::get_album_by_track_id(pool, &track.xata_id).await?; 334 let artist = repo::artist::get_artist_by_track_id(pool, &track.xata_id).await?; ··· 378 .await?; 379 380 if let Some(track) = result.tracks.items.first() { 381 + tracing::info!(artist = %scrobble.artist, track = %scrobble.track, "Spotify (track)"); 382 scrobble.album = Some(track.album.name.clone()); 383 let mut track = track.clone(); 384 ··· 406 407 if let Some(recording) = result.recordings.first() { 408 let result = mb_client.get_recording(&recording.id).await?; 409 + tracing::info!(%scrobble.artist, %scrobble.track, "Musicbrainz (recording)"); 410 scrobble.album = Some(Track::from(result.clone()).album); 411 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 412 tokio::time::sleep(std::time::Duration::from_secs(1)).await; 413 return Ok(()); 414 } 415 416 + tracing::info!(artist = %artist, track = %track, "Track not found, skipping"); 417 418 Ok(()) 419 } ··· 424 req: &SubmitListensRequest, 425 token: &str, 426 ) -> Result<(), Error> { 427 + tracing::info!(req = ?req, "Listenbrainz submission"); 428 429 if req.payload.is_empty() { 430 return Err(Error::msg("No payload found")); ··· 470 .get(&format!("listenbrainz:cache:{}:{}:{}", artist, track, did))? 471 .is_some() 472 { 473 + tracing::info!(artist= %artist, track = %track, "Recently scrobbled, skipping"); 474 return Ok(()); 475 } 476 ··· 480 .get(&format!("{}:current", spotify_user.email))? 481 .is_some() 482 { 483 + tracing::info!(artist= %artist, track = %track, "Currently scrobbling, skipping"); 484 return Ok(()); 485 } 486 } 487 488 if cache.get(&format!("nowplaying:{}", did))?.is_some() { 489 + tracing::info!(artist= %artist, track = %track, "Currently scrobbling, skipping"); 490 return Ok(()); 491 } 492 ··· 539 ); 540 let cached = cache.get(&key)?; 541 if cached.is_some() { 542 + tracing::info!(key = %key, "Cached"); 543 let track = serde_json::from_str::<Track>(&cached.unwrap())?; 544 scrobble.album = Some(track.album.clone()); 545 rocksky::scrobble(cache, &did, track, scrobble.timestamp).await?; ··· 550 if let Some(mbid) = &scrobble.mbid { 551 // let result = repo::track::get_track_by_mbid(pool, mbid).await?; 552 let result = mb_client.get_recording(mbid).await?; 553 + tracing::info!("Musicbrainz (mbid)"); 554 scrobble.album = Some(Track::from(result.clone()).album); 555 rocksky::scrobble(cache, &did, result.into(), scrobble.timestamp).await?; 556 tokio::time::sleep(std::time::Duration::from_secs(1)).await; ··· 560 let result = repo::track::get_track(pool, &scrobble.track, &scrobble.artist).await?; 561 562 if let Some(track) = result { 563 + tracing::info!("Xata (track)"); 564 scrobble.album = Some(track.album.clone()); 565 let album = repo::album::get_album_by_track_id(pool, &track.xata_id).await?; 566 let artist = repo::artist::get_artist_by_track_id(pool, &track.xata_id).await?; ··· 610 .await?; 611 612 if let Some(track) = result.tracks.items.first() { 613 + tracing::info!("Spotify (track)"); 614 scrobble.album = Some(track.album.name.clone()); 615 let mut track = track.clone(); 616 ··· 650 } 651 */ 652 653 + tracing::warn!(artist = %artist, track = %track, "Track not found, skipping"); 654 655 Ok(()) 656 }
+2 -10
crates/scrobbler/src/spotify/client.rs
··· 36 let data = response.text().await?; 37 38 if data == "Too many requests" { 39 - println!( 40 - "> retry-after {}", 41 - headers.get("retry-after").unwrap().to_str().unwrap() 42 - ); 43 - println!("> {} [get_album]", data); 44 return Ok(None); 45 } 46 ··· 56 let data = response.text().await?; 57 58 if data == "Too many requests" { 59 - println!( 60 - "> retry-after {}", 61 - headers.get("retry-after").unwrap().to_str().unwrap() 62 - ); 63 - println!("> {} [get_artist]", data); 64 return Ok(None); 65 } 66
··· 36 let data = response.text().await?; 37 38 if data == "Too many requests" { 39 + tracing::info!(retry_after = %headers.get("retry-after").unwrap().to_str().unwrap(), data = %data, "Rate limited on get_album"); 40 return Ok(None); 41 } 42 ··· 52 let data = response.text().await?; 53 54 if data == "Too many requests" { 55 + tracing::info!(retry_after = %headers.get("retry-after").unwrap().to_str().unwrap(), data = %data, "Rate limited on get_artist"); 56 return Ok(None); 57 } 58
+7 -14
crates/webscrobbler/src/handlers.rs
··· 32 req: HttpRequest, 33 ) -> Result<impl Responder, actix_web::Error> { 34 let id = req.match_info().get("id").unwrap(); 35 - println!("Received scrobble for ID: {}", id.cyan()); 36 37 let pool = data.get_ref().clone(); 38 ··· 50 let body = read_payload!(payload); 51 let params = serde_json::from_slice::<ScrobbleRequest>(&body).map_err(|err| { 52 let body = String::from_utf8_lossy(&body); 53 - println!("Failed to parse JSON: {}", body); 54 - println!("Failed to parse JSON: {}", err); 55 actix_web::error::ErrorBadRequest(format!("Failed to parse JSON: {}", err)) 56 })?; 57 58 - println!("Parsed scrobble request: {:#?}", params); 59 60 if params.event_name != "scrobble" { 61 - println!("Skipping non-scrobble event: {}", params.event_name.green()); 62 return Ok(HttpResponse::Ok().body("Skipping non-scrobble event")); 63 } 64 ··· 75 })?; 76 77 if spotify_token.is_some() { 78 - println!("User has a Spotify token, skipping scrobble"); 79 return Ok(HttpResponse::Ok().body("User has a Spotify token, skipping scrobble")); 80 } 81 } ··· 91 )); 92 93 if cached.is_err() { 94 - println!( 95 - "Failed to check cache for Emby scrobble: {}", 96 - cached.unwrap_err() 97 - ); 98 return Ok(HttpResponse::Ok().body("Failed to check cache for Emby scrobble")); 99 } 100 101 if cached.unwrap().is_some() { 102 - println!( 103 - "Skipping duplicate scrobble for Emby: {} - {}", 104 - artist, track 105 - ); 106 return Ok(HttpResponse::Ok().body("Skipping duplicate scrobble for Emby")); 107 } 108 }
··· 32 req: HttpRequest, 33 ) -> Result<impl Responder, actix_web::Error> { 34 let id = req.match_info().get("id").unwrap(); 35 + tracing::info!(id = %id.bright_green(), "Received scrobble"); 36 37 let pool = data.get_ref().clone(); 38 ··· 50 let body = read_payload!(payload); 51 let params = serde_json::from_slice::<ScrobbleRequest>(&body).map_err(|err| { 52 let body = String::from_utf8_lossy(&body); 53 + tracing::error!(body = %body, error = %err, "Failed to parse JSON"); 54 actix_web::error::ErrorBadRequest(format!("Failed to parse JSON: {}", err)) 55 })?; 56 57 + tracing::info!(params = ?params, "Parsed scrobble request"); 58 59 if params.event_name != "scrobble" { 60 + tracing::info!(event_name = %params.event_name.cyan(), "Skipping non-scrobble event"); 61 return Ok(HttpResponse::Ok().body("Skipping non-scrobble event")); 62 } 63 ··· 74 })?; 75 76 if spotify_token.is_some() { 77 + tracing::info!("User has a Spotify token, skipping scrobble"); 78 return Ok(HttpResponse::Ok().body("User has a Spotify token, skipping scrobble")); 79 } 80 } ··· 90 )); 91 92 if cached.is_err() { 93 + tracing::error!(artist = %artist, track = %track, error = %cached.unwrap_err(), "Failed to check cache for Emby scrobble"); 94 return Ok(HttpResponse::Ok().body("Failed to check cache for Emby scrobble")); 95 } 96 97 if cached.unwrap().is_some() { 98 + tracing::warn!(artist = %artist, track = %track, "Skipping duplicate scrobble for Emby"); 99 return Ok(HttpResponse::Ok().body("Skipping duplicate scrobble for Emby")); 100 } 101 }
+1 -4
crates/webscrobbler/src/lib.rs
··· 44 .parse::<u16>() 45 .unwrap_or(7883); 46 47 - println!( 48 - "Starting WebScrobbler Webhook @ {}", 49 - format!("{}:{}", host, port).green() 50 - ); 51 52 let limiter = web::Data::new( 53 Limiter::builder("redis://127.0.0.1")
··· 44 .parse::<u16>() 45 .unwrap_or(7883); 46 47 + tracing::info!(url = %format!("http://{}:{}", host, port).bright_green(), "Starting WebScrobbler server @"); 48 49 let limiter = web::Data::new( 50 Limiter::builder("redis://127.0.0.1")
+4 -8
crates/webscrobbler/src/rocksky.rs
··· 1 use anyhow::Error; 2 - use owo_colors::OwoColorize; 3 use reqwest::Client; 4 5 use crate::{auth::generate_token, cache::Cache, types::Track}; ··· 26 let token = generate_token(did)?; 27 let client = Client::new(); 28 29 - println!("Scrobbling track: \n {:#?}", track); 30 31 let response = client 32 .post(&format!("{}/now-playing", ROCKSKY_API)) ··· 36 .await?; 37 38 if !response.status().is_success() { 39 - println!( 40 - "Failed to scrobble track: {}", 41 - response.status().to_string() 42 - ); 43 let text = response.text().await?; 44 - println!("Response: {}", text); 45 return Err(Error::msg(format!("Failed to scrobble track: {}", text))); 46 } 47 48 - println!("Scrobbled track: {}", track.title.green()); 49 50 Ok(()) 51 }
··· 1 use anyhow::Error; 2 use reqwest::Client; 3 4 use crate::{auth::generate_token, cache::Cache, types::Track}; ··· 25 let token = generate_token(did)?; 26 let client = Client::new(); 27 28 + tracing::info!(did = %did, track = ?track, "Scrobbling track"); 29 30 let response = client 31 .post(&format!("{}/now-playing", ROCKSKY_API)) ··· 35 .await?; 36 37 if !response.status().is_success() { 38 + tracing::error!(did = %did, artist = %track.artist, track = %track.title, status = %response.status(), "Failed to scrobble track"); 39 let text = response.text().await?; 40 + tracing::error!(did = %did, response = %text, "Response"); 41 return Err(Error::msg(format!("Failed to scrobble track: {}", text))); 42 } 43 44 + tracing::info!(did = %did, artist = %track.artist, track = %track.title, "Scrobbled track"); 45 46 Ok(()) 47 }
+4 -9
crates/webscrobbler/src/scrobbler.rs
··· 34 35 let cached = cache.get(&key)?; 36 if cached.is_some() { 37 - println!("{}", format!("Cached: {}", key).yellow()); 38 let track = serde_json::from_str::<Track>(&cached.unwrap())?; 39 rocksky::scrobble(cache, &did, track, scrobble.time).await?; 40 tokio::time::sleep(std::time::Duration::from_secs(1)).await; ··· 127 let result = spotify_client.search(&query).await?; 128 129 if let Some(track) = result.tracks.items.first() { 130 - println!("{}", "Spotify (track)".yellow()); 131 let mut track = track.clone(); 132 133 if let Some(album) = spotify_client.get_album(&track.album.id).await? { ··· 154 155 if let Some(recording) = result.recordings.first() { 156 let result = mb_client.get_recording(&recording.id).await?; 157 - println!("{}", "Musicbrainz (recording)".yellow()); 158 rocksky::scrobble(cache, &did, result.into(), scrobble.time).await?; 159 tokio::time::sleep(std::time::Duration::from_secs(1)).await; 160 return Ok(()); 161 } 162 163 - println!( 164 - "{} {} - {}, skipping", 165 - "Track not found: ".yellow(), 166 - scrobble.data.song.parsed.artist, 167 - scrobble.data.song.parsed.track 168 - ); 169 170 Ok(()) 171 }
··· 34 35 let cached = cache.get(&key)?; 36 if cached.is_some() { 37 + tracing::info!(artist = %scrobble.data.song.parsed.artist, track = %scrobble.data.song.parsed.track, "Using cached track"); 38 let track = serde_json::from_str::<Track>(&cached.unwrap())?; 39 rocksky::scrobble(cache, &did, track, scrobble.time).await?; 40 tokio::time::sleep(std::time::Duration::from_secs(1)).await; ··· 127 let result = spotify_client.search(&query).await?; 128 129 if let Some(track) = result.tracks.items.first() { 130 + tracing::info!("Spotify (track)"); 131 let mut track = track.clone(); 132 133 if let Some(album) = spotify_client.get_album(&track.album.id).await? { ··· 154 155 if let Some(recording) = result.recordings.first() { 156 let result = mb_client.get_recording(&recording.id).await?; 157 + tracing::info!("Musicbrainz (recording)"); 158 rocksky::scrobble(cache, &did, result.into(), scrobble.time).await?; 159 tokio::time::sleep(std::time::Duration::from_secs(1)).await; 160 return Ok(()); 161 } 162 163 + tracing::warn!(artist = %scrobble.data.song.parsed.artist, track = %scrobble.data.song.parsed.track, "Track not found, skipping"); 164 165 Ok(()) 166 }
+2 -10
crates/webscrobbler/src/spotify/client.rs
··· 36 let data = response.text().await?; 37 38 if data == "Too many requests" { 39 - println!( 40 - "> retry-after {}", 41 - headers.get("retry-after").unwrap().to_str().unwrap() 42 - ); 43 - println!("> {} [get_album]", data); 44 return Ok(None); 45 } 46 ··· 56 let data = response.text().await?; 57 58 if data == "Too many requests" { 59 - println!( 60 - "> retry-after {}", 61 - headers.get("retry-after").unwrap().to_str().unwrap() 62 - ); 63 - println!("> {} [get_artist]", data); 64 return Ok(None); 65 } 66
··· 36 let data = response.text().await?; 37 38 if data == "Too many requests" { 39 + tracing::info!(retry_after = %headers.get("retry-after").unwrap().to_str().unwrap(), data = %data, "Rate limited on get_album"); 40 return Ok(None); 41 } 42 ··· 52 let data = response.text().await?; 53 54 if data == "Too many requests" { 55 + tracing::info!(retry_after = %headers.get("retry-after").unwrap().to_str().unwrap(), data = %data, "Rate limited on get_artist"); 56 return Ok(None); 57 } 58
-50
crates/analytics/src/main.rs
··· 1 - use core::create_tables; 2 - use std::{ 3 - env, 4 - sync::{Arc, Mutex}, 5 - }; 6 - 7 - use clap::Command; 8 - use cmd::{serve::serve, sync::sync}; 9 - use dotenv::dotenv; 10 - use duckdb::Connection; 11 - use sqlx::postgres::PgPoolOptions; 12 - 13 - pub mod cmd; 14 - pub mod core; 15 - pub mod handlers; 16 - pub mod subscriber; 17 - pub mod types; 18 - pub mod xata; 19 - 20 - fn cli() -> Command { 21 - Command::new("analytics") 22 - .version(env!("CARGO_PKG_VERSION")) 23 - .about("Rocksky Analytics CLI built with Rust and DuckDB") 24 - .subcommand(Command::new("sync").about("Sync data from Xata to DuckDB")) 25 - .subcommand(Command::new("serve").about("Serve the Rocksky Analytics API")) 26 - } 27 - 28 - #[tokio::main] 29 - async fn main() -> Result<(), Box<dyn std::error::Error>> { 30 - dotenv().ok(); 31 - 32 - let pool = PgPoolOptions::new() 33 - .max_connections(5) 34 - .connect(&env::var("XATA_POSTGRES_URL")?) 35 - .await?; 36 - let conn = Connection::open("./rocksky-analytics.ddb")?; 37 - 38 - create_tables(&conn).await?; 39 - 40 - let args = cli().get_matches(); 41 - let conn = Arc::new(Mutex::new(conn)); 42 - 43 - match args.subcommand() { 44 - Some(("sync", _)) => sync(conn, &pool).await?, 45 - Some(("serve", _)) => serve(conn).await?, 46 - _ => serve(conn).await?, 47 - } 48 - 49 - Ok(()) 50 - }
···
-37
crates/dropbox/src/main.rs
··· 1 - use clap::Command; 2 - use cmd::{scan::scan, serve::serve}; 3 - use dotenv::dotenv; 4 - 5 - pub mod client; 6 - pub mod cmd; 7 - pub mod consts; 8 - pub mod crypto; 9 - pub mod handlers; 10 - pub mod repo; 11 - pub mod scan; 12 - pub mod token; 13 - pub mod types; 14 - pub mod xata; 15 - 16 - fn cli() -> Command { 17 - Command::new("dropbox") 18 - .version(env!("CARGO_PKG_VERSION")) 19 - .about("Rocksky Dropbox Service") 20 - .subcommand(Command::new("scan").about("Scan Dropbox Music Folder")) 21 - .subcommand(Command::new("serve").about("Serve Rocksky Dropbox API")) 22 - } 23 - 24 - #[tokio::main] 25 - async fn main() -> Result<(), Box<dyn std::error::Error>> { 26 - dotenv().ok(); 27 - 28 - let args = cli().get_matches(); 29 - 30 - match args.subcommand() { 31 - Some(("scan", _)) => scan().await?, 32 - Some(("serve", _)) => serve().await?, 33 - _ => serve().await?, 34 - } 35 - 36 - Ok(()) 37 - }
···
-37
crates/googledrive/src/main.rs
··· 1 - use clap::Command; 2 - use cmd::{scan::scan, serve::serve}; 3 - use dotenv::dotenv; 4 - 5 - pub mod client; 6 - pub mod cmd; 7 - pub mod consts; 8 - pub mod crypto; 9 - pub mod handlers; 10 - pub mod repo; 11 - pub mod scan; 12 - pub mod token; 13 - pub mod types; 14 - pub mod xata; 15 - 16 - fn cli() -> Command { 17 - Command::new("googledrive") 18 - .version(env!("CARGO_PKG_VERSION")) 19 - .about("Rocksky Google Drive Service") 20 - .subcommand(Command::new("scan").about("Scan Google Drive Music Folder")) 21 - .subcommand(Command::new("serve").about("Serve Rocksky Google Drive API")) 22 - } 23 - 24 - #[tokio::main] 25 - async fn main() -> Result<(), Box<dyn std::error::Error>> { 26 - dotenv().ok(); 27 - 28 - let args = cli().get_matches(); 29 - 30 - match args.subcommand() { 31 - Some(("scan", _)) => scan().await?, 32 - Some(("serve", _)) => serve().await?, 33 - _ => serve().await?, 34 - } 35 - 36 - Ok(()) 37 - }
···
-37
crates/jetstream/src/main.rs
··· 1 - use std::{env, sync::Arc}; 2 - 3 - use dotenv::dotenv; 4 - use subscriber::ScrobbleSubscriber; 5 - use tokio::sync::Mutex; 6 - 7 - use crate::webhook_worker::AppState; 8 - 9 - pub mod profile; 10 - pub mod repo; 11 - pub mod subscriber; 12 - pub mod types; 13 - pub mod webhook; 14 - pub mod webhook_worker; 15 - pub mod xata; 16 - 17 - #[tokio::main] 18 - async fn main() -> Result<(), anyhow::Error> { 19 - dotenv()?; 20 - let jetstream_server = env::var("JETSTREAM_SERVER") 21 - .unwrap_or_else(|_| "wss://jetstream2.us-west.bsky.network".to_string()); 22 - let url = format!( 23 - "{}/subscribe?wantedCollections=app.rocksky.*", 24 - jetstream_server 25 - ); 26 - let subscriber = ScrobbleSubscriber::new(&url); 27 - 28 - let redis_url = env::var("REDIS_URL").unwrap_or_else(|_| "redis://127.0.0.1:6379".to_string()); 29 - let redis = redis::Client::open(redis_url)?; 30 - let queue_key = 31 - env::var("WEBHOOK_QUEUE_KEY").unwrap_or_else(|_| "rocksky:webhook_queue".to_string()); 32 - 33 - let state = Arc::new(Mutex::new(AppState { redis, queue_key })); 34 - 35 - subscriber.run(state).await?; 36 - Ok(()) 37 - }
···
-65
crates/playlists/src/main.rs
··· 1 - use core::{create_tables, find_spotify_users, load_users, save_playlists}; 2 - use std::{ 3 - env, 4 - sync::{Arc, Mutex}, 5 - }; 6 - 7 - use anyhow::Error; 8 - use async_nats::connect; 9 - use dotenv::dotenv; 10 - use duckdb::Connection; 11 - use owo_colors::OwoColorize; 12 - use rocksky_playlists::subscriber::subscribe; 13 - use spotify::get_user_playlists; 14 - use sqlx::postgres::PgPoolOptions; 15 - 16 - pub mod core; 17 - pub mod crypto; 18 - pub mod spotify; 19 - pub mod types; 20 - pub mod xata; 21 - 22 - #[tokio::main] 23 - async fn main() -> Result<(), Error> { 24 - dotenv().ok(); 25 - 26 - let conn = Connection::open("./rocksky-playlists.ddb")?; 27 - let conn = Arc::new(Mutex::new(conn)); 28 - create_tables(conn.clone())?; 29 - 30 - subscribe(conn.clone()).await?; 31 - 32 - let pool = PgPoolOptions::new() 33 - .max_connections(5) 34 - .connect(&env::var("XATA_POSTGRES_URL")?) 35 - .await?; 36 - let users = find_spotify_users(&pool, 0, 100).await?; 37 - 38 - load_users(conn.clone(), &pool).await?; 39 - 40 - sqlx::query(r#" 41 - CREATE UNIQUE INDEX IF NOT EXISTS user_playlists_unique_index ON user_playlists (user_id, playlist_id) 42 - "#) 43 - .execute(&pool) 44 - .await?; 45 - let conn = conn.clone(); 46 - 47 - let addr = env::var("NATS_URL").unwrap_or_else(|_| "nats://localhost:4222".to_string()); 48 - let nc = connect(&addr).await?; 49 - let nc = Arc::new(Mutex::new(nc)); 50 - println!("Connected to NATS server at {}", addr.bright_green()); 51 - 52 - for user in users { 53 - let token = user.1.clone(); 54 - let did = user.2.clone(); 55 - let user_id = user.3.clone(); 56 - let playlists = get_user_playlists(token).await?; 57 - save_playlists(&pool, conn.clone(), nc.clone(), playlists, &user_id, &did).await?; 58 - } 59 - 60 - println!("Done!"); 61 - 62 - loop { 63 - tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; 64 - } 65 - }
···
-12
crates/webscrobbler/src/main.rs
··· 1 - use anyhow::Error; 2 - use dotenv::dotenv; 3 - use rocksky_webscrobbler::start_server; 4 - 5 - #[tokio::main] 6 - async fn main() -> Result<(), Error> { 7 - dotenv().ok(); 8 - 9 - start_server().await?; 10 - 11 - Ok(()) 12 - }
···

History

1 round 0 comments
sign up or login to add to the discussion
5 commits
expand
feat: update dependencies to include tracing and improve logging throughout the application
Refactor logging to use tracing instead of println and eprintln
feat: remove unused crate main files for analytics, dropbox, googledrive, jetstream, playlists, scrobbler, and webscrobbler
feat: remove unused binary entries for scrobbler and webscrobbler
feat: remove unused owo_colors dependency from submission and scrobbler modules
expand 0 comments
pull request successfully merged