From 579d4fef103da187e1b20680cbf23ecc3ebce375 Mon Sep 17 00:00:00 2001 From: Power2All Date: Tue, 15 Oct 2024 15:29:07 +0200 Subject: [PATCH 1/2] Possible fix routing --- src/api/api.rs | 82 ++++++++++++++++++++----------- src/config/impls/configuration.rs | 10 ++++ src/main.rs | 6 ++- 3 files changed, 68 insertions(+), 30 deletions(-) diff --git a/src/api/api.rs b/src/api/api.rs index c2ec10e..56a25e7 100644 --- a/src/api/api.rs +++ b/src/api/api.rs @@ -49,44 +49,68 @@ pub fn api_service_routes(data: Arc) -> Box, path: &str) + { + let config_toml = toml::to_string(&config).unwrap(); + match Self::save_file(path, config_toml) { + Ok(_) => { eprintln!("[CONFIG SAVE] Config file is saved"); } + Err(_) => { eprintln!("[CONFIG SAVE] Unable to save to {}", path); } + } + } + pub fn load_from_file(create: bool) -> Result { let mut config = Configuration::init(); match Configuration::load_file("config.toml") { diff --git a/src/main.rs b/src/main.rs index c5acbfd..5232392 100644 --- a/src/main.rs +++ b/src/main.rs @@ -293,6 +293,9 @@ async fn main() -> std::io::Result<()> task::sleep(Duration::from_secs(1)).await; if tracker.config.database.clone().unwrap().persistent { + tracker.set_stats(StatsEvent::Completed, config.tracker_config.clone().unwrap().total_downloads as i64); + Configuration::save_from_config(tracker.config.clone(), "config.toml"); + info!("Saving completed data to an INI..."); info!("Saving data to the database..."); let _ = tracker.save_torrent_updates(tracker.clone()).await; if tracker.config.tracker_config.clone().unwrap().whitelist_enabled.unwrap() { @@ -308,8 +311,9 @@ async fn main() -> std::io::Result<()> let _ = tracker.save_user_updates(tracker.clone()).await; } } else { - info!("Saving completed data to an INI..."); tracker.set_stats(StatsEvent::Completed, config.tracker_config.clone().unwrap().total_downloads as i64); + Configuration::save_from_config(tracker.config.clone(), "config.toml"); + info!("Saving completed data to an INI..."); } task::sleep(Duration::from_secs(1)).await; From 8f59500c43a14e8fc29cff5904b495f3a491206a Mon Sep 17 00:00:00 2001 From: Power2All Date: Mon, 21 Oct 2024 16:02:51 +0200 Subject: [PATCH 2/2] Added enhancement and bug fixes, need one more check before releasing as v4.0.2 --- Cargo.lock | 57 +- Cargo.toml | 2 +- README.md | 6 + docker/Dockerfile | 2 +- src/api/api_blacklists.rs | 17 + src/api/api_keys.rs | 25 +- src/api/api_torrents.rs | 13 +- src/api/api_users.rs | 33 +- src/api/api_whitelists.rs | 18 + src/config/impls/configuration.rs | 21 +- src/config/structs/database_config.rs | 1 + .../database_structure_config_blacklist.rs | 2 +- .../structs/database_structure_config_keys.rs | 2 +- .../database_structure_config_torrents.rs | 2 +- .../database_structure_config_users.rs | 2 +- .../database_structure_config_whitelist.rs | 2 +- src/database/impls/database_connector.rs | 11 +- .../impls/database_connector_mysql.rs | 1132 +++++++++------- .../impls/database_connector_pgsql.rs | 1189 ++++++++++------- .../impls/database_connector_sqlite.rs | 1150 +++++++++------- src/main.rs | 28 +- src/stats/enums/stats_event.rs | 12 +- src/stats/impls/torrent_tracker.rs | 80 +- src/stats/structs/stats.rs | 8 +- src/stats/structs/stats_atomics.rs | 8 +- src/tracker/enums.rs | 3 +- src/tracker/enums/updates_action.rs | 8 + src/tracker/impls.rs | 5 +- src/tracker/impls/torrent_tracker.rs | 11 +- src/tracker/impls/torrent_tracker_handlers.rs | 16 +- src/tracker/impls/torrent_tracker_import.rs | 94 +- src/tracker/impls/torrent_tracker_keys.rs | 7 +- .../impls/torrent_tracker_keys_updates.rs | 85 ++ src/tracker/impls/torrent_tracker_torrents.rs | 24 +- .../torrent_tracker_torrents_blacklist.rs | 7 +- ...rent_tracker_torrents_blacklist_updates.rs | 94 ++ .../impls/torrent_tracker_torrents_updates.rs | 54 +- .../torrent_tracker_torrents_whitelist.rs | 7 +- ...rent_tracker_torrents_whitelist_updates.rs | 94 ++ src/tracker/impls/torrent_tracker_users.rs | 35 +- .../impls/torrent_tracker_users_updates.rs | 53 +- src/tracker/mod.rs | 1 + src/tracker/structs/torrent_tracker.rs | 12 +- src/tracker/types.rs | 3 + src/tracker/types/keys_updates.rs | 7 + src/tracker/types/torrents_updates.rs | 8 + src/tracker/types/users_updates.rs | 8 + 47 files changed, 2718 insertions(+), 1741 deletions(-) create mode 100644 src/tracker/enums/updates_action.rs create mode 100644 src/tracker/impls/torrent_tracker_keys_updates.rs create mode 100644 src/tracker/impls/torrent_tracker_torrents_blacklist_updates.rs create mode 100644 src/tracker/impls/torrent_tracker_torrents_whitelist_updates.rs create mode 100644 src/tracker/types.rs create mode 100644 src/tracker/types/keys_updates.rs create mode 100644 src/tracker/types/torrents_updates.rs create mode 100644 src/tracker/types/users_updates.rs diff --git a/Cargo.lock b/Cargo.lock index e15c6ce..925d0d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -609,9 +609,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.30" +version = "1.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16803a61b81d9eabb7eae2588776c4c1e584b738ede45fdbb4c972cec1e9945" +checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" dependencies = [ "jobserver", "libc", @@ -979,9 +979,9 @@ dependencies = [ [[package]] name = "flume" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" dependencies = [ "futures-core", "futures-sink", @@ -1292,9 +1292,9 @@ dependencies = [ [[package]] name = "impl-more" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d" +checksum = "aae21c3177a27788957044151cc2800043d127acaa460a47ebb9b84dfa2c6aa0" [[package]] name = "indexmap" @@ -1374,9 +1374,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.159" +version = "0.2.161" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" +checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" [[package]] name = "libm" @@ -1747,9 +1747,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.87" +version = "1.0.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a" +checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9" dependencies = [ "unicode-ident", ] @@ -1949,9 +1949,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.14" +version = "0.23.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8" +checksum = "5fbb44d7acc4e873d613422379f69f237a1b141928c02f6bc6ccfddddc2d7993" dependencies = [ "once_cell", "ring", @@ -1972,9 +1972,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" [[package]] name = "rustls-webpki" @@ -2036,9 +2036,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.128" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" dependencies = [ "indexmap", "itoa", @@ -2415,9 +2415,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.79" +version = "2.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" +checksum = "83540f837a8afc019423a8edb95b52a8effe46957ee402287f4292fae35be021" dependencies = [ "proc-macro2", "quote", @@ -2623,7 +2623,7 @@ dependencies = [ [[package]] name = "torrust-actix" -version = "4.0.1" +version = "4.0.2" dependencies = [ "actix-cors", "actix-web", @@ -2698,12 +2698,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-bidi" @@ -2763,9 +2760,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "utoipa" -version = "5.0.0" +version = "5.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e2b34fc58a72021914a5745832024b2baa638fe771df5a35f3d1b69266bd92c" +checksum = "8861811f7213bb866cd02319acb69a15b0ef8ca46874e805bd92d488c779036a" dependencies = [ "indexmap", "serde", @@ -2775,9 +2772,9 @@ dependencies = [ [[package]] name = "utoipa-gen" -version = "5.0.0" +version = "5.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "866f11b33be38a747542f435578a164674b8922d958cc065d7f19319c19d4784" +checksum = "5fadf94f07d67df4b15e6490dd9a9d59d7374849413e7f137eafe52fdcbd0db5" dependencies = [ "proc-macro2", "quote", @@ -2787,9 +2784,9 @@ dependencies = [ [[package]] name = "utoipa-swagger-ui" -version = "8.0.0" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0800ca865caaff4484adca065a77927a801bd9c48879f0451ec08846eb6a12b" +checksum = "85a551be0331bd01a1d39f2654409ca61cb955f02dfef0fc0f7aced8b2abbc88" dependencies = [ "actix-web", "mime_guess", diff --git a/Cargo.toml b/Cargo.toml index fad221d..d0c7ec8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "torrust-actix" -version = "4.0.1" +version = "4.0.2" edition = "2021" license = "AGPL-3.0" authors = [ diff --git a/README.md b/README.md index a4fbb3b..021d8cb 100644 --- a/README.md +++ b/README.md @@ -56,6 +56,12 @@ Swagger UI is introduced, and when enabled in the configuration, is accessible t ### ChangeLog +#### v4.0.2 +* Added option that the system will remove data from database. +* Added updates variables for the white/black list and keys tables. +* Renaming the "database" naming which should be "tables". +* A lot of fixes and bugs I stumbled upon. + #### v4.0.0 * Completely rebuilt of the tracker code, for readability. * Moved to Actix v4, thus versioning this software to v4.0.0 as well. diff --git a/docker/Dockerfile b/docker/Dockerfile index 0800c3a..0a8fb6f 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -2,7 +2,7 @@ FROM rust:alpine RUN apk add git musl-dev curl RUN git clone https://github.com/Power2All/torrust-actix.git /tmp/torrust-actix -RUN cd /tmp/torrust-actix && git checkout tags/v4.0.1 +RUN cd /tmp/torrust-actix && git checkout tags/v4.0.2 WORKDIR /tmp/torrust-actix RUN cd /tmp/torrust-actix RUN cargo build --release && rm -Rf target/release/.fingerprint target/release/build target/release/deps target/release/examples target/release/incremental diff --git a/src/api/api_blacklists.rs b/src/api/api_blacklists.rs index 76dfc86..5f2997c 100644 --- a/src/api/api_blacklists.rs +++ b/src/api/api_blacklists.rs @@ -8,6 +8,7 @@ use crate::api::api::{api_parse_body, api_service_token, api_validation}; use crate::api::structs::api_service_data::ApiServiceData; use crate::api::structs::query_token::QueryToken; use crate::common::common::hex2bin; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; pub async fn api_service_blacklist_get(request: HttpRequest, path: web::Path, data: Data>) -> HttpResponse @@ -88,6 +89,10 @@ pub async fn api_service_blacklist_post(request: HttpRequest, path: web::Path { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})); } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_blacklist_update(info_hash, UpdatesAction::Add); + } + return match data.torrent_tracker.add_blacklist(info_hash) { true => { HttpResponse::Ok().content_type(ContentType::json()).json(json!({"status": "ok"})) } false => { HttpResponse::NotModified().content_type(ContentType::json()).json(json!({"status": format!("info_hash updated {}", info)})) } @@ -124,6 +129,10 @@ pub async fn api_service_blacklists_post(request: HttpRequest, payload: web::Pay Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})) } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_blacklist_update(info_hash, UpdatesAction::Add); + } + match data.torrent_tracker.add_blacklist(info_hash) { true => { blacklists_output.insert(info_hash, json!({"status": "ok"})); } false => { blacklists_output.insert(info_hash, json!({"status": "info_hash updated"})); } @@ -153,6 +162,10 @@ pub async fn api_service_blacklist_delete(request: HttpRequest, path: web::Path< Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})); } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_blacklist_update(info_hash, UpdatesAction::Remove); + } + return match data.torrent_tracker.remove_blacklist(info_hash) { true => { HttpResponse::Ok().content_type(ContentType::json()).json(json!({"status": "ok"})) } false => { HttpResponse::NotModified().content_type(ContentType::json()).json(json!({"status": format!("unknown info_hash {}", info)})) } @@ -189,6 +202,10 @@ pub async fn api_service_blacklists_delete(request: HttpRequest, payload: web::P Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})) } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_blacklist_update(info_hash, UpdatesAction::Remove); + } + match data.torrent_tracker.remove_blacklist(info_hash) { true => { blacklists_output.insert(info_hash, json!({"status": "ok"})); } false => { blacklists_output.insert(info_hash, json!({"status": "unknown info_hash"})); } diff --git a/src/api/api_keys.rs b/src/api/api_keys.rs index 3c80b00..b0b5735 100644 --- a/src/api/api_keys.rs +++ b/src/api/api_keys.rs @@ -8,6 +8,7 @@ use crate::api::api::{api_parse_body, api_service_token, api_validation}; use crate::api::structs::api_service_data::ApiServiceData; use crate::api::structs::query_token::QueryToken; use crate::common::common::hex2bin; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; pub async fn api_service_key_get(request: HttpRequest, path: web::Path, data: Data>) -> HttpResponse @@ -98,6 +99,10 @@ pub async fn api_service_key_post(request: HttpRequest, path: web::Path<(String, Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid key_hash {}", key)})); } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_key_update(key_hash, timeout as i64, UpdatesAction::Add); + } + return match data.torrent_tracker.add_key(key_hash, timeout as i64) { true => { HttpResponse::Ok().content_type(ContentType::json()).json(json!({"status": "ok"})) } false => { HttpResponse::NotModified().content_type(ContentType::json()).json(json!({"status": format!("key_hash updated {}", key)})) } @@ -134,6 +139,10 @@ pub async fn api_service_keys_post(request: HttpRequest, payload: web::Payload, Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid key_hash {}", key)})) } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_key_update(key_hash, timeout as i64, UpdatesAction::Add); + } + match data.torrent_tracker.add_key(key_hash, timeout as i64) { true => { keys_output.insert(key, json!({"status": "ok"})); } false => { keys_output.insert(key, json!({"status": "key_hash updated"})); } @@ -163,6 +172,10 @@ pub async fn api_service_key_delete(request: HttpRequest, path: web::Path { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid key {}", key)})); } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_key_update(key_hash, 0i64, UpdatesAction::Remove); + } + return match data.torrent_tracker.remove_key(key_hash) { true => { HttpResponse::Ok().content_type(ContentType::json()).json(json!({"status": "ok"})) } false => { HttpResponse::NotModified().content_type(ContentType::json()).json(json!({"status": format!("unknown key_hash {}", key)})) } @@ -194,14 +207,18 @@ pub async fn api_service_keys_delete(request: HttpRequest, payload: web::Payload let mut keys_output = HashMap::new(); for key_item in keys { if key_item.len() == 40 { - let key = match hex2bin(key_item.clone()) { + let key_hash = match hex2bin(key_item.clone()) { Ok(key) => { InfoHash(key) } Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid key {}", key_item)})) } }; - match data.torrent_tracker.remove_key(key) { - true => { keys_output.insert(key, json!({"status": "ok"})); } - false => { keys_output.insert(key, json!({"status": "unknown key_hash"})); } + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_key_update(key_hash, 0i64, UpdatesAction::Remove); + } + + match data.torrent_tracker.remove_key(key_hash) { + true => { keys_output.insert(key_hash, json!({"status": "ok"})); } + false => { keys_output.insert(key_hash, json!({"status": "unknown key_hash"})); } } } } diff --git a/src/api/api_torrents.rs b/src/api/api_torrents.rs index a9ee22d..64c6a8c 100644 --- a/src/api/api_torrents.rs +++ b/src/api/api_torrents.rs @@ -9,6 +9,7 @@ use crate::api::api::{api_parse_body, api_service_token, api_validation}; use crate::api::structs::api_service_data::ApiServiceData; use crate::api::structs::query_token::QueryToken; use crate::common::common::hex2bin; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_entry::TorrentEntry; @@ -101,7 +102,7 @@ pub async fn api_service_torrent_post(request: HttpRequest, path: web::Path<(Str }; if data.torrent_tracker.config.database.clone().unwrap().persistent { - let _ = data.torrent_tracker.add_torrent_update(info_hash, torrent_entry.clone()); + let _ = data.torrent_tracker.add_torrent_update(info_hash, torrent_entry.clone(), UpdatesAction::Add); } return match data.torrent_tracker.add_torrent(info_hash, torrent_entry.clone()) { @@ -148,7 +149,7 @@ pub async fn api_service_torrents_post(request: HttpRequest, payload: web::Paylo }; if data.torrent_tracker.config.database.clone().unwrap().persistent { - let _ = data.torrent_tracker.add_torrent_update(info_hash, torrent_entry.clone()); + let _ = data.torrent_tracker.add_torrent_update(info_hash, torrent_entry.clone(), UpdatesAction::Add); } match data.torrent_tracker.add_torrent(info_hash, torrent_entry.clone()) { @@ -180,6 +181,10 @@ pub async fn api_service_torrent_delete(request: HttpRequest, path: web::Path { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})); } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_torrent_update(info_hash, TorrentEntry::default(), UpdatesAction::Remove); + } + return match data.torrent_tracker.remove_torrent(info_hash) { None => { HttpResponse::NotModified().content_type(ContentType::json()).json(json!({"status": format!("unknown info_hash {}", info)})) } Some(_) => { HttpResponse::Ok().content_type(ContentType::json()).json(json!({"status": "ok"})) } @@ -216,6 +221,10 @@ pub async fn api_service_torrents_delete(request: HttpRequest, payload: web::Pay Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})) } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_torrent_update(info_hash, TorrentEntry::default(), UpdatesAction::Remove); + } + match data.torrent_tracker.remove_torrent(info_hash) { None => { torrents_output.insert(info_hash, json!({"status": "unknown info_hash"})); } Some(_) => { torrents_output.insert(info_hash, json!({"status": "ok"})); } diff --git a/src/api/api_users.rs b/src/api/api_users.rs index d55ac2c..fb5d123 100644 --- a/src/api/api_users.rs +++ b/src/api/api_users.rs @@ -11,6 +11,7 @@ use crate::api::api::{api_parse_body, api_service_token, api_validation}; use crate::api::structs::api_service_data::ApiServiceData; use crate::api::structs::query_token::QueryToken; use crate::common::common::hex2bin; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::user_entry_item::UserEntryItem; use crate::tracker::structs::user_id::UserId; @@ -114,7 +115,7 @@ pub async fn api_service_user_post(request: HttpRequest, path: web::Path<(String let id_hash = <[u8; 20]>::try_from(hasher.finalize().as_slice()).unwrap(); if data.torrent_tracker.config.database.clone().unwrap().persistent { - let _ = data.torrent_tracker.add_user_update(UserId(id_hash), user_entry.clone()); + let _ = data.torrent_tracker.add_user_update(UserId(id_hash), user_entry.clone(), UpdatesAction::Add); } return match data.torrent_tracker.add_user(UserId(id_hash), user_entry.clone()) { @@ -187,7 +188,7 @@ pub async fn api_service_users_post(request: HttpRequest, payload: web::Payload, let id_hash = <[u8; 20]>::try_from(hasher.finalize().as_slice()).unwrap(); if data.torrent_tracker.config.database.clone().unwrap().persistent { - let _ = data.torrent_tracker.add_user_update(UserId(id_hash), user_entry.clone()); + let _ = data.torrent_tracker.add_user_update(UserId(id_hash), user_entry.clone(), UpdatesAction::Add); } match data.torrent_tracker.add_user(UserId(id_hash), user_entry.clone()) { @@ -219,6 +220,20 @@ pub async fn api_service_user_delete(request: HttpRequest, path: web::Path { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid user_hash {}", id)})); } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_user_update(id_hash, UserEntryItem { + key: UserId([0u8; 20]), + user_id: None, + user_uuid: None, + uploaded: 0, + downloaded: 0, + completed: 0, + updated: 0, + active: 0, + torrents_active: Default::default(), + }, UpdatesAction::Remove); + } + return match data.torrent_tracker.remove_user(id_hash) { None => { HttpResponse::NotModified().content_type(ContentType::json()).json(json!({"status": format!("unknown user_hash {}", id)})) } Some(_) => { HttpResponse::Ok().content_type(ContentType::json()).json(json!({"status": "ok"})) } @@ -255,6 +270,20 @@ pub async fn api_service_users_delete(request: HttpRequest, payload: web::Payloa Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid user_hash {}", id)})) } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_user_update(id_hash, UserEntryItem { + key: UserId([0u8; 20]), + user_id: None, + user_uuid: None, + uploaded: 0, + downloaded: 0, + completed: 0, + updated: 0, + active: 0, + torrents_active: Default::default(), + }, UpdatesAction::Remove); + } + match data.torrent_tracker.remove_user(id_hash) { None => { users_output.insert(id_hash, json!({"status": "unknown user_hash"})); } Some(_) => { users_output.insert(id_hash, json!({"status": "ok"})); } diff --git a/src/api/api_whitelists.rs b/src/api/api_whitelists.rs index 4f1e173..1394f59 100644 --- a/src/api/api_whitelists.rs +++ b/src/api/api_whitelists.rs @@ -8,7 +8,9 @@ use crate::api::api::{api_parse_body, api_service_token, api_validation}; use crate::api::structs::api_service_data::ApiServiceData; use crate::api::structs::query_token::QueryToken; use crate::common::common::hex2bin; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; +use crate::tracker::structs::torrent_entry::TorrentEntry; pub async fn api_service_whitelist_get(request: HttpRequest, path: web::Path, data: Data>) -> HttpResponse { @@ -88,6 +90,10 @@ pub async fn api_service_whitelist_post(request: HttpRequest, path: web::Path { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})); } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_whitelist_update(info_hash, UpdatesAction::Add); + } + return match data.torrent_tracker.add_whitelist(info_hash) { true => { HttpResponse::Ok().content_type(ContentType::json()).json(json!({"status": "ok"})) } false => { HttpResponse::NotModified().content_type(ContentType::json()).json(json!({"status": format!("info_hash updated {}", info)})) } @@ -124,6 +130,10 @@ pub async fn api_service_whitelists_post(request: HttpRequest, payload: web::Pay Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})) } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_whitelist_update(info_hash, UpdatesAction::Add); + } + match data.torrent_tracker.add_whitelist(info_hash) { true => { whitelists_output.insert(info_hash, json!({"status": "ok"})); } false => { whitelists_output.insert(info_hash, json!({"status": "info_hash updated"})); } @@ -153,6 +163,10 @@ pub async fn api_service_whitelist_delete(request: HttpRequest, path: web::Path< Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})); } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_torrent_update(info_hash, TorrentEntry::default(), UpdatesAction::Remove); + } + return match data.torrent_tracker.remove_whitelist(info_hash) { true => { HttpResponse::Ok().content_type(ContentType::json()).json(json!({"status": "ok"})) } false => { HttpResponse::NotModified().content_type(ContentType::json()).json(json!({"status": format!("unknown info_hash {}", info)})) } @@ -189,6 +203,10 @@ pub async fn api_service_whitelists_delete(request: HttpRequest, payload: web::P Err(_) => { return HttpResponse::BadRequest().content_type(ContentType::json()).json(json!({"status": format!("invalid info_hash {}", info)})) } }; + if data.torrent_tracker.config.database.clone().unwrap().persistent { + let _ = data.torrent_tracker.add_whitelist_update(info_hash, UpdatesAction::Remove); + } + match data.torrent_tracker.remove_whitelist(info_hash) { true => { whitelists_output.insert(info_hash, json!({"status": "ok"})); } false => { whitelists_output.insert(info_hash, json!({"status": "unknown info_hash"})); } diff --git a/src/config/impls/configuration.rs b/src/config/impls/configuration.rs index 2983805..fe9553e 100644 --- a/src/config/impls/configuration.rs +++ b/src/config/impls/configuration.rs @@ -44,12 +44,13 @@ impl Configuration { persistent: false, persistent_interval: Some(60), insert_vacant: false, + remove_action: false, update_completed: true, update_peers: false, }), database_structure: Some(DatabaseStructureConfig { torrents: Some(DatabaseStructureConfigTorrents { - database_name: String::from("torrents"), + table_name: String::from("torrents"), column_infohash: String::from("infohash"), bin_type_infohash: true, column_seeds: String::from("seeds"), @@ -57,23 +58,23 @@ impl Configuration { column_completed: String::from("completed") }), whitelist: Some(DatabaseStructureConfigWhitelist { - database_name: String::from("whitelist"), + table_name: String::from("whitelist"), column_infohash: String::from("infohash"), bin_type_infohash: true, }), blacklist: Some(DatabaseStructureConfigBlacklist { - database_name: String::from("blacklist"), + table_name: String::from("blacklist"), column_infohash: String::from("infohash"), bin_type_infohash: true, }), keys: Some(DatabaseStructureConfigKeys { - database_name: String::from("keys"), + table_name: String::from("keys"), column_hash: String::from("hash"), bin_type_hash: true, column_timeout: String::from("timeout") }), users: Some(DatabaseStructureConfigUsers { - database_name: String::from("users"), + table_name: String::from("users"), id_uuid: true, column_uuid: String::from("uuid"), column_id: "id".to_string(), @@ -205,19 +206,19 @@ impl Configuration { pub fn validate(config: Configuration) { // Check Map let check_map = vec![ - ("[DB: torrents]", config.database_structure.clone().unwrap().torrents.unwrap().database_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), + ("[DB: torrents]", config.database_structure.clone().unwrap().torrents.unwrap().table_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: torrents] Column: infohash", config.database_structure.clone().unwrap().torrents.unwrap().column_infohash, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: torrents] Column: seeds", config.database_structure.clone().unwrap().torrents.unwrap().column_seeds, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: torrents] Column: peers", config.database_structure.clone().unwrap().torrents.unwrap().column_peers, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: torrents] Column: completed", config.database_structure.clone().unwrap().torrents.unwrap().column_completed, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), - ("[DB: whitelist]", config.database_structure.clone().unwrap().whitelist.unwrap().database_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), + ("[DB: whitelist]", config.database_structure.clone().unwrap().whitelist.unwrap().table_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: whitelist] Column: infohash", config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), - ("[DB: blacklist]", config.database_structure.clone().unwrap().blacklist.unwrap().database_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), + ("[DB: blacklist]", config.database_structure.clone().unwrap().blacklist.unwrap().table_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: blacklist] Column: infohash", config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), - ("[DB: keys]", config.database_structure.clone().unwrap().keys.unwrap().database_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), + ("[DB: keys]", config.database_structure.clone().unwrap().keys.unwrap().table_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: keys] Column: hash", config.database_structure.clone().unwrap().keys.unwrap().column_hash, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: keys] Column: timeout", config.database_structure.clone().unwrap().keys.unwrap().column_timeout, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), - ("[DB: users]", config.database_structure.clone().unwrap().users.unwrap().database_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), + ("[DB: users]", config.database_structure.clone().unwrap().users.unwrap().table_name, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: users] Column: id", config.database_structure.clone().unwrap().users.unwrap().column_id, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: users] Column: uuid", config.database_structure.clone().unwrap().users.unwrap().column_uuid, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), ("[DB: users] Column: key", config.database_structure.clone().unwrap().users.unwrap().column_key, r"^[a-z_][a-z0-9_]{0,30}$".to_string()), diff --git a/src/config/structs/database_config.rs b/src/config/structs/database_config.rs index 5464946..4d4b7d4 100644 --- a/src/config/structs/database_config.rs +++ b/src/config/structs/database_config.rs @@ -8,6 +8,7 @@ pub struct DatabaseConfig { pub persistent: bool, pub persistent_interval: Option, pub insert_vacant: bool, + pub remove_action: bool, pub update_completed: bool, pub update_peers: bool } \ No newline at end of file diff --git a/src/config/structs/database_structure_config_blacklist.rs b/src/config/structs/database_structure_config_blacklist.rs index d911503..6e9722d 100644 --- a/src/config/structs/database_structure_config_blacklist.rs +++ b/src/config/structs/database_structure_config_blacklist.rs @@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct DatabaseStructureConfigBlacklist { - pub database_name: String, + pub table_name: String, pub column_infohash: String, pub bin_type_infohash: bool } \ No newline at end of file diff --git a/src/config/structs/database_structure_config_keys.rs b/src/config/structs/database_structure_config_keys.rs index 1e5e453..46d391a 100644 --- a/src/config/structs/database_structure_config_keys.rs +++ b/src/config/structs/database_structure_config_keys.rs @@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct DatabaseStructureConfigKeys { - pub database_name: String, + pub table_name: String, pub column_hash: String, pub bin_type_hash: bool, pub column_timeout: String diff --git a/src/config/structs/database_structure_config_torrents.rs b/src/config/structs/database_structure_config_torrents.rs index a91f692..64f7c15 100644 --- a/src/config/structs/database_structure_config_torrents.rs +++ b/src/config/structs/database_structure_config_torrents.rs @@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct DatabaseStructureConfigTorrents { - pub database_name: String, + pub table_name: String, pub column_infohash: String, pub bin_type_infohash: bool, pub column_seeds: String, diff --git a/src/config/structs/database_structure_config_users.rs b/src/config/structs/database_structure_config_users.rs index 96e864e..c7e4d78 100644 --- a/src/config/structs/database_structure_config_users.rs +++ b/src/config/structs/database_structure_config_users.rs @@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct DatabaseStructureConfigUsers { - pub database_name: String, + pub table_name: String, pub id_uuid: bool, pub column_uuid: String, pub column_id: String, diff --git a/src/config/structs/database_structure_config_whitelist.rs b/src/config/structs/database_structure_config_whitelist.rs index 38e3b2f..167da6f 100644 --- a/src/config/structs/database_structure_config_whitelist.rs +++ b/src/config/structs/database_structure_config_whitelist.rs @@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct DatabaseStructureConfigWhitelist { - pub database_name: String, + pub table_name: String, pub column_infohash: String, pub bin_type_infohash: bool } \ No newline at end of file diff --git a/src/database/impls/database_connector.rs b/src/database/impls/database_connector.rs index b4a8cf7..697b667 100644 --- a/src/database/impls/database_connector.rs +++ b/src/database/impls/database_connector.rs @@ -7,6 +7,7 @@ use crate::database::structs::database_connector::DatabaseConnector; use crate::database::structs::database_connector_mysql::DatabaseConnectorMySQL; use crate::database::structs::database_connector_pgsql::DatabaseConnectorPgSQL; use crate::database::structs::database_connector_sqlite::DatabaseConnectorSQLite; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_entry::TorrentEntry; use crate::tracker::structs::torrent_tracker::TorrentTracker; @@ -100,7 +101,7 @@ impl DatabaseConnector { Err(Error::RowNotFound) } - pub async fn save_whitelist(&self, tracker: Arc, whitelists: Vec) -> Result + pub async fn save_whitelist(&self, tracker: Arc, whitelists: Vec<(InfoHash, UpdatesAction)>) -> Result { if self.engine.is_some() { return match self.engine.clone().unwrap() { @@ -113,7 +114,7 @@ impl DatabaseConnector { Err(Error::RowNotFound) } - pub async fn save_blacklist(&self, tracker: Arc, blacklists: Vec) -> Result + pub async fn save_blacklist(&self, tracker: Arc, blacklists: Vec<(InfoHash, UpdatesAction)>) -> Result { if self.engine.is_some() { return match self.engine.clone().unwrap() { @@ -126,7 +127,7 @@ impl DatabaseConnector { Err(Error::RowNotFound) } - pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result + pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result { if self.engine.is_some() { return match self.engine.clone().unwrap() { @@ -139,7 +140,7 @@ impl DatabaseConnector { Err(Error::RowNotFound) } - pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), Error> + pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), Error> { if self.engine.is_some() { return match self.engine.clone().unwrap() { @@ -152,7 +153,7 @@ impl DatabaseConnector { Err(Error::RowNotFound) } - pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), Error> + pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), Error> { if self.engine.is_some() { return match self.engine.clone().unwrap() { diff --git a/src/database/impls/database_connector_mysql.rs b/src/database/impls/database_connector_mysql.rs index c17752d..67bd754 100644 --- a/src/database/impls/database_connector_mysql.rs +++ b/src/database/impls/database_connector_mysql.rs @@ -15,6 +15,7 @@ use crate::database::enums::database_drivers::DatabaseDrivers; use crate::database::structs::database_connector::DatabaseConnector; use crate::database::structs::database_connector_mysql::DatabaseConnectorMySQL; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_entry::TorrentEntry; use crate::tracker::structs::torrent_tracker::TorrentTracker; @@ -46,14 +47,16 @@ impl DatabaseConnectorMySQL { if create_database { let pool = &structure.mysql.clone().unwrap().pool; + info!("[BOOT] Database creation triggered for MySQL."); // Create Torrent DB + info!("[BOOT MySQL] Creating table {}", config.database_structure.clone().unwrap().torrents.unwrap().table_name); match config.database_structure.clone().unwrap().torrents.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` BINARY(20) NOT NULL, `{}` INT NOT NULL DEFAULT 0, `{}` INT NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().torrents.unwrap().database_name, + config.database_structure.clone().unwrap().torrents.unwrap().table_name, config.database_structure.clone().unwrap().torrents.unwrap().column_infohash, config.database_structure.clone().unwrap().torrents.unwrap().column_seeds, config.database_structure.clone().unwrap().torrents.unwrap().column_peers, @@ -69,7 +72,7 @@ impl DatabaseConnectorMySQL { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` VARCHAR(40) NOT NULL, `{}` INT NOT NULL DEFAULT 0, `{}` INT NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().torrents.unwrap().database_name, + config.database_structure.clone().unwrap().torrents.unwrap().table_name, config.database_structure.clone().unwrap().torrents.unwrap().column_infohash, config.database_structure.clone().unwrap().torrents.unwrap().column_seeds, config.database_structure.clone().unwrap().torrents.unwrap().column_peers, @@ -84,12 +87,13 @@ impl DatabaseConnectorMySQL { } // Create Whitelist DB + info!("[BOOT MySQL] Creating table {}", config.database_structure.clone().unwrap().whitelist.unwrap().table_name); match config.database_structure.clone().unwrap().whitelist.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` BINARY(20) NOT NULL, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().whitelist.unwrap().database_name, + config.database_structure.clone().unwrap().whitelist.unwrap().table_name, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash ).as_str() @@ -102,7 +106,7 @@ impl DatabaseConnectorMySQL { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` VARCHAR(40) NOT NULL, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().whitelist.unwrap().database_name, + config.database_structure.clone().unwrap().whitelist.unwrap().table_name, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash ).as_str() @@ -114,12 +118,13 @@ impl DatabaseConnectorMySQL { } // Create Blacklist DB + info!("[BOOT MySQL] Creating table {}", config.database_structure.clone().unwrap().blacklist.unwrap().table_name); match config.database_structure.clone().unwrap().blacklist.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` BINARY(20) NOT NULL, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().blacklist.unwrap().database_name, + config.database_structure.clone().unwrap().blacklist.unwrap().table_name, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash ).as_str() @@ -132,7 +137,7 @@ impl DatabaseConnectorMySQL { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` VARCHAR(40) NOT NULL, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().blacklist.unwrap().database_name, + config.database_structure.clone().unwrap().blacklist.unwrap().table_name, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash ).as_str() @@ -144,12 +149,13 @@ impl DatabaseConnectorMySQL { } // Create Keys DB + info!("[BOOT MySQL] Creating table {}", config.database_structure.clone().unwrap().keys.unwrap().table_name); match config.database_structure.clone().unwrap().keys.unwrap().bin_type_hash { true => { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` BINARY(20) NOT NULL, `{}` INT NOT NULL DEFAULT 0, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().keys.unwrap().database_name, + config.database_structure.clone().unwrap().keys.unwrap().table_name, config.database_structure.clone().unwrap().keys.unwrap().column_hash, config.database_structure.clone().unwrap().keys.unwrap().column_timeout, config.database_structure.clone().unwrap().keys.unwrap().column_hash @@ -163,7 +169,7 @@ impl DatabaseConnectorMySQL { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` VARCHAR(40) NOT NULL, `{}` INT NOT NULL DEFAULT 0, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().keys.unwrap().database_name, + config.database_structure.clone().unwrap().keys.unwrap().table_name, config.database_structure.clone().unwrap().keys.unwrap().column_hash, config.database_structure.clone().unwrap().keys.unwrap().column_timeout, config.database_structure.clone().unwrap().keys.unwrap().column_hash @@ -176,6 +182,7 @@ impl DatabaseConnectorMySQL { } // Create Users DB + info!("[BOOT MySQL] Creating table {}", config.database_structure.clone().unwrap().users.unwrap().table_name); match config.database_structure.clone().unwrap().users.unwrap().id_uuid { true => { match config.database_structure.clone().unwrap().users.unwrap().bin_type_key { @@ -183,7 +190,7 @@ impl DatabaseConnectorMySQL { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` VARCHAR(36) NOT NULL, `{}` BINARY(20) NOT NULL, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` TINYINT NOT NULL DEFAULT 0, `{}` INT NOT NULL DEFAULT 0, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_uuid, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -202,7 +209,7 @@ impl DatabaseConnectorMySQL { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` VARCHAR(36) NOT NULL, `{}` VARCHAR(40) NOT NULL, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` TINYINT NOT NULL DEFAULT 0, `{}` INT NOT NULL DEFAULT 0, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_uuid, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -225,7 +232,7 @@ impl DatabaseConnectorMySQL { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, `{}` BINARY(20) NOT NULL, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` TINYINT NOT NULL DEFAULT 0, `{}` INT NOT NULL DEFAULT 0, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_id, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -244,7 +251,7 @@ impl DatabaseConnectorMySQL { match sqlx::query( format!( "CREATE TABLE `{}` (`{}` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, `{}` VARCHAR(40) NOT NULL, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` BIGINT UNSIGNED NOT NULL DEFAULT 0, `{}` TINYINT NOT NULL DEFAULT 0, `{}` INT NOT NULL DEFAULT 0, PRIMARY KEY (`{}`)) COLLATE='utf8mb4_general_ci'", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_id, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -281,11 +288,6 @@ impl DatabaseConnectorMySQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[MySQL] Trying to querying {} torrents - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { true => { format!( @@ -293,7 +295,7 @@ impl DatabaseConnectorMySQL { structure.column_infohash, structure.column_infohash, structure.column_completed, - structure.database_name, + structure.table_name, start, length ) @@ -303,7 +305,7 @@ impl DatabaseConnectorMySQL { "SELECT `{}`, `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, structure.column_completed, - structure.database_name, + structure.table_name, start, length ) @@ -330,13 +332,14 @@ impl DatabaseConnectorMySQL { if torrents < start { break; } + info!("[MySQL] Handled {} torrents", torrents); } tracker.set_stats(StatsEvent::Completed, completed as i64); info!("[MySQL] Loaded {} torrents with {} completed", torrents, completed); Ok((torrents, completed)) } - pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), Error> + pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), Error> { let mut torrents_transaction = self.pool.begin().await?; let mut torrents_handled_entries = 0u64; @@ -344,77 +347,26 @@ impl DatabaseConnectorMySQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (info_hash, torrent_entry) in torrents.iter() { + for (info_hash, (torrent_entry, updates_action)) in torrents.iter() { torrents_handled_entries += 1; - match tracker.config.deref().clone().database.unwrap().insert_vacant { - true => { - if tracker.config.deref().clone().database.unwrap().update_peers { + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { true => { format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`) VALUES (UNHEX('{}'), {}, {}) ON DUPLICATE KEY UPDATE `{}` = VALUES(`{}`), `{}`=VALUES(`{}`)", - structure.database_name, - structure.column_infohash, - structure.column_seeds, - structure.column_peers, - info_hash, - torrent_entry.seeds.len(), - torrent_entry.peers.len(), - structure.column_seeds, - structure.column_seeds, - structure.column_peers, - structure.column_peers - ) - } - false => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`) VALUES ('{}', {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", - structure.database_name, + "DELETE FROM `{}` WHERE `{}`=UNHEX('{}')", + structure.table_name, structure.column_infohash, - structure.column_seeds, - structure.column_peers, - info_hash, - torrent_entry.seeds.len(), - torrent_entry.peers.len(), - structure.column_seeds, - structure.column_seeds, - structure.column_peers, - structure.column_peers - ) - } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[MySQL] Error: {}", e.to_string()); - return Err(e); - } - } - } - if tracker.config.deref().clone().database.unwrap().update_completed { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "INSERT INTO `{}` (`{}`, `{}`) VALUES (UNHEX('{}'), {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`)", - structure.database_name, - structure.column_infohash, - structure.column_completed, - info_hash, - torrent_entry.completed, - structure.column_completed, - structure.column_completed + info_hash ) } false => { format!( - "INSERT INTO `{}` (`{}`, `{}`) VALUES ('{}', {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`)", - structure.database_name, + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, structure.column_infohash, - structure.column_completed, - info_hash, - torrent_entry.completed, - structure.column_completed, - structure.column_completed + info_hash ) } }; @@ -427,70 +379,154 @@ impl DatabaseConnectorMySQL { } } } - false => { - if tracker.config.deref().clone().database.unwrap().update_peers { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "UPDATE IGNORE `{}` SET `{}`={}, `{}`={} WHERE `{}`=UNHEX('{}')", - structure.database_name, - structure.column_seeds, - torrent_entry.seeds.len(), - structure.column_peers, - torrent_entry.peers.len(), - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "UPDATE IGNORE `{}` SET `{}`={}, `{}`={} WHERE `{}`='{}'", - structure.database_name, - structure.column_seeds, - torrent_entry.seeds.len(), - structure.column_peers, - torrent_entry.peers.len(), - structure.column_infohash, - info_hash - ) + UpdatesAction::Add | UpdatesAction::Update => { + match tracker.config.deref().clone().database.unwrap().insert_vacant { + true => { + if tracker.config.deref().clone().database.unwrap().update_peers { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`) VALUES (UNHEX('{}'), {}, {}) ON DUPLICATE KEY UPDATE `{}` = VALUES(`{}`), `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_infohash, + structure.column_seeds, + structure.column_peers, + info_hash, + torrent_entry.seeds.len(), + torrent_entry.peers.len(), + structure.column_seeds, + structure.column_seeds, + structure.column_peers, + structure.column_peers + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`) VALUES ('{}', {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_infohash, + structure.column_seeds, + structure.column_peers, + info_hash, + torrent_entry.seeds.len(), + torrent_entry.peers.len(), + structure.column_seeds, + structure.column_seeds, + structure.column_peers, + structure.column_peers + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[MySQL] Error: {}", e.to_string()); - return Err(e); + if tracker.config.deref().clone().database.unwrap().update_completed { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`) VALUES (UNHEX('{}'), {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_infohash, + structure.column_completed, + info_hash, + torrent_entry.completed, + structure.column_completed, + structure.column_completed + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`) VALUES ('{}', {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_infohash, + structure.column_completed, + info_hash, + torrent_entry.completed, + structure.column_completed, + structure.column_completed + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } } } - } - if tracker.config.deref().clone().database.unwrap().update_completed { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "UPDATE IGNORE `{}` SET `{}`={} WHERE `{}`=UNHEX('{}')", - structure.database_name, - structure.column_completed, - torrent_entry.completed, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "UPDATE IGNORE `{}` SET `{}`={} WHERE `{}`='{}'", - structure.database_name, - structure.column_completed, - torrent_entry.completed, - structure.column_infohash, - info_hash - ) + false => { + if tracker.config.deref().clone().database.unwrap().update_peers { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "UPDATE IGNORE `{}` SET `{}`={}, `{}`={} WHERE `{}`=UNHEX('{}')", + structure.table_name, + structure.column_seeds, + torrent_entry.seeds.len(), + structure.column_peers, + torrent_entry.peers.len(), + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "UPDATE IGNORE `{}` SET `{}`={}, `{}`={} WHERE `{}`='{}'", + structure.table_name, + structure.column_seeds, + torrent_entry.seeds.len(), + structure.column_peers, + torrent_entry.peers.len(), + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[MySQL] Error: {}", e.to_string()); - return Err(e); + if tracker.config.deref().clone().database.unwrap().update_completed { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "UPDATE IGNORE `{}` SET `{}`={} WHERE `{}`=UNHEX('{}')", + structure.table_name, + structure.column_completed, + torrent_entry.completed, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "UPDATE IGNORE `{}` SET `{}`={} WHERE `{}`='{}'", + structure.table_name, + structure.column_completed, + torrent_entry.completed, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } } } } @@ -500,6 +536,7 @@ impl DatabaseConnectorMySQL { info!("[MySQL] Handled {} torrents", torrents_handled_entries); } } + info!("[MySQL] Handled {} torrents", torrents_handled_entries); self.commit(torrents_transaction).await } @@ -513,18 +550,13 @@ impl DatabaseConnectorMySQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[MySQL] Trying to querying {} whitelisted hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { true => { format!( "SELECT HEX(`{}`) AS `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -533,7 +565,7 @@ impl DatabaseConnectorMySQL { format!( "SELECT `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -550,12 +582,13 @@ impl DatabaseConnectorMySQL { if hashes < start { break; } + info!("[MySQL] Handled {} whitelisted torrents", hashes); } - info!("[MySQL] Loaded {} whitelisted torrents", hashes); + info!("[MySQL] Handled {} whitelisted torrents", hashes); Ok(hashes) } - pub async fn save_whitelist(&self, tracker: Arc, whitelists: Vec) -> Result + pub async fn save_whitelist(&self, tracker: Arc, whitelists: Vec<(InfoHash, UpdatesAction)>) -> Result { let mut whitelist_transaction = self.pool.begin().await?; let mut whitelist_handled_entries = 0u64; @@ -563,38 +596,71 @@ impl DatabaseConnectorMySQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for info_hash in whitelists.iter() { + for (info_hash, updates_action) in whitelists.iter() { whitelist_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { - true => { - format!( - "INSERT IGNORE INTO `{}` (`{}`) VALUES (UNHEX('{}'))", - structure.database_name, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "INSERT IGNORE INTO `{}` (`{}`) VALUES ('{}')", - structure.database_name, - structure.column_infohash, - info_hash - ) + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { + true => { + format!( + "DELETE FROM `{}` WHERE `{}`=UNHEX('{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[MySQL] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { + true => { + format!( + "INSERT IGNORE INTO `{}` (`{}`) VALUES (UNHEX('{}'))", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "INSERT IGNORE INTO `{}` (`{}`) VALUES ('{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } } } if (whitelist_handled_entries as f64 / 1000f64).fract() == 0.0 { - info!("[MySQL] Handled {} torrents", whitelist_handled_entries); + info!("[MySQL] Handled {} whitelisted torrents", whitelist_handled_entries); } } - info!("[MySQL] Saved {} whitelisted torrents", whitelist_handled_entries); + info!("[MySQL] Handled {} whitelisted torrents", whitelist_handled_entries); let _ = self.commit(whitelist_transaction).await; Ok(whitelist_handled_entries) } @@ -609,18 +675,13 @@ impl DatabaseConnectorMySQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[MySQL] Trying to querying {} blacklisted hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { true => { format!( "SELECT HEX(`{}`) AS `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -629,7 +690,7 @@ impl DatabaseConnectorMySQL { format!( "SELECT `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -646,12 +707,13 @@ impl DatabaseConnectorMySQL { if hashes < start { break; } + info!("[MySQL] Handled {} blacklisted torrents", hashes); } - info!("[MySQL] Loaded {} blacklisted torrents", hashes); + info!("[MySQL] Handled {} blacklisted torrents", hashes); Ok(hashes) } - pub async fn save_blacklist(&self, tracker: Arc, blacklists: Vec) -> Result + pub async fn save_blacklist(&self, tracker: Arc, blacklists: Vec<(InfoHash, UpdatesAction)>) -> Result { let mut blacklist_transaction = self.pool.begin().await?; let mut blacklist_handled_entries = 0u64; @@ -659,38 +721,71 @@ impl DatabaseConnectorMySQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for info_hash in blacklists.iter() { + for (info_hash, updates_action) in blacklists.iter() { blacklist_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { - true => { - format!( - "INSERT IGNORE INTO `{}` (`{}`) VALUES (UNHEX('{}'))", - structure.database_name, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "INSERT IGNORE INTO `{}` (`{}`) VALUES ('{}')", - structure.database_name, - structure.column_infohash, - info_hash - ) + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { + true => { + format!( + "DELETE FROM `{}` WHERE `{}`=UNHEX('{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[MySQL] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { + true => { + format!( + "INSERT IGNORE INTO `{}` (`{}`) VALUES (UNHEX('{}'))", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "INSERT IGNORE INTO `{}` (`{}`) VALUES ('{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } } } if (blacklist_handled_entries as f64 / 1000f64).fract() == 0.0 { - info!("[MySQL] Handled {} torrents", blacklist_handled_entries); + info!("[MySQL] Handled {} blacklisted torrents", blacklist_handled_entries); } } - info!("[MySQL] Saved {} blacklisted torrents", blacklist_handled_entries); + info!("[MySQL] Handled {} blacklisted torrents", blacklist_handled_entries); let _ = self.commit(blacklist_transaction).await; Ok(blacklist_handled_entries) } @@ -705,11 +800,6 @@ impl DatabaseConnectorMySQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[MySQL] Trying to querying {} keys hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { true => { format!( @@ -717,7 +807,7 @@ impl DatabaseConnectorMySQL { structure.column_hash, structure.column_hash, structure.column_timeout, - structure.database_name, + structure.table_name, start, length ) @@ -727,7 +817,7 @@ impl DatabaseConnectorMySQL { "SELECT `{}`, `{}` FROM `{}` LIMIT {}, {}", structure.column_hash, structure.column_timeout, - structure.database_name, + structure.table_name, start, length ) @@ -745,12 +835,13 @@ impl DatabaseConnectorMySQL { if hashes < start { break; } + info!("[MySQL] Handled {} keys", hashes); } - info!("[MySQL] Loaded {} blacklisted torrents", hashes); + info!("[MySQL] Handled {} keys", hashes); Ok(hashes) } - pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result + pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result { let mut keys_transaction = self.pool.begin().await?; let mut keys_handled_entries = 0u64; @@ -758,50 +849,83 @@ impl DatabaseConnectorMySQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (hash, timeout) in keys.iter() { + for (hash, (timeout, update_action)) in keys.iter() { keys_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { - true => { - format!( - "INSERT INTO `{}` (`{}`, `{}`) VALUES (UNHEX('{}'), {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", - structure.database_name, - structure.column_hash, - structure.column_timeout, - hash, - timeout, - structure.column_hash, - structure.column_hash, - structure.column_timeout, - structure.column_timeout - ) - } - false => { - format!( - "INSERT INTO `{}` (`{}`, `{}`) VALUES ('{}', {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", - structure.database_name, - structure.column_hash, - structure.column_timeout, - hash, - timeout, - structure.column_hash, - structure.column_hash, - structure.column_timeout, - structure.column_timeout - ) + match update_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { + true => { + format!( + "DELETE FROM `{}` WHERE `{}`=UNHEX('{}')", + structure.table_name, + structure.column_hash, + hash + ) + } + false => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_hash, + hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[MySQL] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`) VALUES (UNHEX('{}'), {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_hash, + structure.column_timeout, + hash, + timeout, + structure.column_hash, + structure.column_hash, + structure.column_timeout, + structure.column_timeout + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`) VALUES ('{}', {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_hash, + structure.column_timeout, + hash, + timeout, + structure.column_hash, + structure.column_hash, + structure.column_timeout, + structure.column_timeout + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } + } } } if (keys_handled_entries as f64 / 1000f64).fract() == 0.0 { info!("[MySQL] Handled {} keys", keys_handled_entries); } } - info!("[MySQL] Saved {} keys", keys_handled_entries); + info!("[MySQL] Handled {} keys", keys_handled_entries); let _ = self.commit(keys_transaction).await; Ok(keys_handled_entries) } @@ -816,11 +940,6 @@ impl DatabaseConnectorMySQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[MySQL] Trying to querying {} users - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { @@ -835,7 +954,7 @@ impl DatabaseConnectorMySQL { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -850,7 +969,7 @@ impl DatabaseConnectorMySQL { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -870,7 +989,7 @@ impl DatabaseConnectorMySQL { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -885,7 +1004,7 @@ impl DatabaseConnectorMySQL { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -921,9 +1040,9 @@ impl DatabaseConnectorMySQL { true => { Some(result.get(structure.column_uuid.as_str())) } false => { None } }, - uploaded: result.get(structure.column_uploaded.as_str()), - downloaded: result.get(structure.column_downloaded.as_str()), - completed: result.get(structure.column_completed.as_str()), + uploaded: result.get::(structure.column_uploaded.as_str()) as u64, + downloaded: result.get::(structure.column_downloaded.as_str()) as u64, + completed: result.get::(structure.column_completed.as_str()) as u64, updated: result.get::(structure.column_updated.as_str()) as u64, active: result.get::(structure.column_active.as_str()) as u8, torrents_active: Default::default(), @@ -934,12 +1053,13 @@ impl DatabaseConnectorMySQL { if hashes < start { break; } + info!("[MySQL] Loaded {} users", hashes); } - info!("[MySQL] Loaded {} blacklisted torrents", hashes); + info!("[MySQL] Loaded {} users", hashes); Ok(hashes) } - pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), Error> + pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), Error> { let mut users_transaction = self.pool.begin().await?; let mut users_handled_entries = 0u64; @@ -947,253 +1067,287 @@ impl DatabaseConnectorMySQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (_, user_entry_item) in users.iter() { + for (_, (user_entry_item, updates_action)) in users.iter() { users_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database.unwrap().insert_vacant { - true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { - true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { - true => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', UNHEX('{}'), {}, {}, {}, {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", - structure.database_name, - structure.column_uuid, - structure.column_key, - structure.column_uploaded, - structure.column_downloaded, - structure.column_completed, - structure.column_active, - structure.column_updated, - user_entry_item.user_uuid.clone().unwrap(), - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.downloaded, - user_entry_item.completed, - user_entry_item.active, - user_entry_item.updated, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_downloaded, - structure.column_downloaded, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_updated, - structure.column_updated - ) - } - false => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', '{}', {}, {}, {}, {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", - structure.database_name, - structure.column_uuid, - structure.column_key, - structure.column_uploaded, - structure.column_downloaded, - structure.column_completed, - structure.column_active, - structure.column_updated, - user_entry_item.user_uuid.clone().unwrap(), - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.downloaded, - user_entry_item.completed, - user_entry_item.active, - user_entry_item.updated, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_downloaded, - structure.column_downloaded, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_updated, - structure.column_updated - ) - } + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { + true => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap() + ) } - } - false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { - true => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', UNHEX('{}'), {}, {}, {}, {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", - structure.database_name, - structure.column_id, - structure.column_key, - structure.column_uploaded, - structure.column_downloaded, - structure.column_completed, - structure.column_active, - structure.column_updated, - user_entry_item.user_id.unwrap(), - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.downloaded, - user_entry_item.completed, - user_entry_item.active, - user_entry_item.updated, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_downloaded, - structure.column_downloaded, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_updated, - structure.column_updated - ) - } - false => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', '{}', {}, {}, {}, {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", - structure.database_name, - structure.column_id, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_id.unwrap(), - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } + false => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_id, + user_entry_item.user_id.unwrap() + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); } } } } - false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database.unwrap().insert_vacant { true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { - format!( - "UPDATE IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`=UNHEX('{}'), `{}`={}, `{}`={} WHERE `{}`=UNHEX('{}')", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_uuid, - user_entry_item.user_uuid.clone().unwrap(), - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', UNHEX('{}'), {}, {}, {}, {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_uuid, + structure.column_key, + structure.column_uploaded, + structure.column_downloaded, + structure.column_completed, + structure.column_active, + structure.column_updated, + user_entry_item.user_uuid.clone().unwrap(), + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.downloaded, + user_entry_item.completed, + user_entry_item.active, + user_entry_item.updated, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_downloaded, + structure.column_downloaded, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_updated, + structure.column_updated + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', '{}', {}, {}, {}, {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_uuid, + structure.column_key, + structure.column_uploaded, + structure.column_downloaded, + structure.column_completed, + structure.column_active, + structure.column_updated, + user_entry_item.user_uuid.clone().unwrap(), + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.downloaded, + user_entry_item.completed, + user_entry_item.active, + user_entry_item.updated, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_downloaded, + structure.column_downloaded, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_updated, + structure.column_updated + ) + } + } } false => { - format!( - "UPDATE IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`='{}', `{}`={}, `{}`={} WHERE `{}`='{}'", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_uuid, - user_entry_item.user_uuid.clone().unwrap(), - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', UNHEX('{}'), {}, {}, {}, {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_id, + structure.column_key, + structure.column_uploaded, + structure.column_downloaded, + structure.column_completed, + structure.column_active, + structure.column_updated, + user_entry_item.user_id.unwrap(), + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.downloaded, + user_entry_item.completed, + user_entry_item.active, + user_entry_item.updated, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_downloaded, + structure.column_downloaded, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_updated, + structure.column_updated + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', '{}', {}, {}, {}, {}, {}) ON DUPLICATE KEY UPDATE `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`), `{}`=VALUES(`{}`)", + structure.table_name, + structure.column_id, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_id.unwrap(), + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + } } } } false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { - format!( - "UPDATE IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`=UNHEX('{}'), `{}`={}, `{}`={} WHERE `{}`=UNHEX('{}')", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_id, - user_entry_item.user_id.unwrap(), - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "UPDATE IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`=UNHEX('{}'), `{}`={}, `{}`={} WHERE `{}`=UNHEX('{}')", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap(), + ) + } + false => { + format!( + "UPDATE IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`='{}', `{}`={}, `{}`={} WHERE `{}`='{}'", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap(), + ) + } + } } false => { - format!( - "UPDATE IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`='{}', `{}`={}, `{}`={} WHERE `{}`='{}'", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_id, - user_entry_item.user_id.unwrap(), - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "UPDATE IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`=UNHEX('{}'), `{}`={}, `{}`={} WHERE `{}`=UNHEX('{}')", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_id, + user_entry_item.user_id.unwrap(), + ) + } + false => { + format!( + "UPDATE IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`='{}', `{}`={}, `{}`={} WHERE `{}`='{}'", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_id, + user_entry_item.user_id.unwrap(), + ) + } + } } } } + }; + match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[MySQL] Error: {}", e.to_string()); + return Err(e); + } } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[MySQL] Error: {}", e.to_string()); - return Err(e); - } } if (users_handled_entries as f64 / 1000f64).fract() == 0.0 || users.len() as u64 == users_handled_entries { info!("[MySQL] Handled {} users", users_handled_entries); } } + info!("[MySQL] Handled {} users", users_handled_entries); self.commit(users_transaction).await } @@ -1206,7 +1360,7 @@ impl DatabaseConnectorMySQL { }; let string_format = format!( "UPDATE `{}` SET `{}`=0, `{}`=0", - structure.database_name, + structure.table_name, structure.column_seeds, structure.column_peers ); diff --git a/src/database/impls/database_connector_pgsql.rs b/src/database/impls/database_connector_pgsql.rs index 15b1cbf..75df936 100644 --- a/src/database/impls/database_connector_pgsql.rs +++ b/src/database/impls/database_connector_pgsql.rs @@ -15,6 +15,7 @@ use crate::database::enums::database_drivers::DatabaseDrivers; use crate::database::structs::database_connector::DatabaseConnector; use crate::database::structs::database_connector_pgsql::DatabaseConnectorPgSQL; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_entry::TorrentEntry; use crate::tracker::structs::torrent_tracker::TorrentTracker; @@ -48,13 +49,13 @@ impl DatabaseConnectorPgSQL { info!("[BOOT] Database creation triggered for PgSQL."); // Create Torrent DB - info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().torrents.unwrap().database_name); + info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().torrents.unwrap().table_name); match config.database_structure.clone().unwrap().torrents.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} bytea NOT NULL, {} integer NOT NULL DEFAULT 0, {} integer NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, CONSTRAINT torrents_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().torrents.unwrap().database_name, + config.database_structure.clone().unwrap().torrents.unwrap().table_name, config.database_structure.clone().unwrap().torrents.unwrap().column_infohash, config.database_structure.clone().unwrap().torrents.unwrap().column_seeds, config.database_structure.clone().unwrap().torrents.unwrap().column_peers, @@ -70,7 +71,7 @@ impl DatabaseConnectorPgSQL { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} character(40) NOT NULL, {} integer NOT NULL DEFAULT 0, {} integer NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, CONSTRAINT torrents_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().torrents.unwrap().database_name, + config.database_structure.clone().unwrap().torrents.unwrap().table_name, config.database_structure.clone().unwrap().torrents.unwrap().column_infohash, config.database_structure.clone().unwrap().torrents.unwrap().column_seeds, config.database_structure.clone().unwrap().torrents.unwrap().column_peers, @@ -85,13 +86,13 @@ impl DatabaseConnectorPgSQL { } // Create Whitelist DB - info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().whitelist.unwrap().database_name); + info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().whitelist.unwrap().table_name); match config.database_structure.clone().unwrap().whitelist.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} bytea NOT NULL, CONSTRAINT whitelist_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().whitelist.unwrap().database_name, + config.database_structure.clone().unwrap().whitelist.unwrap().table_name, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash ).as_str() @@ -104,7 +105,7 @@ impl DatabaseConnectorPgSQL { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} character(40) NOT NULL, CONSTRAINT whitelist_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().whitelist.unwrap().database_name, + config.database_structure.clone().unwrap().whitelist.unwrap().table_name, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash ).as_str() @@ -116,13 +117,13 @@ impl DatabaseConnectorPgSQL { } // Create Blacklist DB - info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().blacklist.unwrap().database_name); + info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().blacklist.unwrap().table_name); match config.database_structure.clone().unwrap().blacklist.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} bytea NOT NULL, CONSTRAINT blacklist_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().blacklist.unwrap().database_name, + config.database_structure.clone().unwrap().blacklist.unwrap().table_name, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash ).as_str() @@ -135,7 +136,7 @@ impl DatabaseConnectorPgSQL { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} character(40) NOT NULL, CONSTRAINT blacklist_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().blacklist.unwrap().database_name, + config.database_structure.clone().unwrap().blacklist.unwrap().table_name, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash ).as_str() @@ -147,13 +148,13 @@ impl DatabaseConnectorPgSQL { } // Create Keys DB - info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().keys.unwrap().database_name); + info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().keys.unwrap().table_name); match config.database_structure.clone().unwrap().keys.unwrap().bin_type_hash { true => { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} bytea NOT NULL, {} integer NOT NULL DEFAULT 0, CONSTRAINT keys_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().keys.unwrap().database_name, + config.database_structure.clone().unwrap().keys.unwrap().table_name, config.database_structure.clone().unwrap().keys.unwrap().column_hash, config.database_structure.clone().unwrap().keys.unwrap().column_timeout, config.database_structure.clone().unwrap().keys.unwrap().column_hash @@ -167,7 +168,7 @@ impl DatabaseConnectorPgSQL { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} character(40) NOT NULL, {} integer NOT NULL DEFAULT 0, CONSTRAINT keys_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().keys.unwrap().database_name, + config.database_structure.clone().unwrap().keys.unwrap().table_name, config.database_structure.clone().unwrap().keys.unwrap().column_hash, config.database_structure.clone().unwrap().keys.unwrap().column_timeout, config.database_structure.clone().unwrap().keys.unwrap().column_hash @@ -180,7 +181,7 @@ impl DatabaseConnectorPgSQL { } // Create Users DB - info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().users.unwrap().database_name); + info!("[BOOT PgSQL] Creating table {}", config.database_structure.clone().unwrap().users.unwrap().table_name); match config.database_structure.clone().unwrap().users.unwrap().id_uuid { true => { match config.database_structure.clone().unwrap().users.unwrap().bin_type_key { @@ -188,7 +189,7 @@ impl DatabaseConnectorPgSQL { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} character(36) NOT NULL, {} bytea NOT NULL, {} bigint NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, {} smallint NOT NULL DEFAULT 0, {} integer NOT NULL DEFAULT 0, CONSTRAINT uuid_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_uuid, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -207,7 +208,7 @@ impl DatabaseConnectorPgSQL { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} character(36) NOT NULL, {} character(40) NOT NULL, {} bigint NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, {} smallint NOT NULL DEFAULT 0, {} integer NOT NULL DEFAULT 0, CONSTRAINT uuid_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_uuid, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -230,7 +231,7 @@ impl DatabaseConnectorPgSQL { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} bigserial NOT NULL, {} bytea NOT NULL, {} bigint NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, {} smallint NOT NULL DEFAULT 0, {} integer NOT NULL DEFAULT 0, CONSTRAINT id_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_id, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -249,7 +250,7 @@ impl DatabaseConnectorPgSQL { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS public.{} ({} bigserial NOT NULL, {} character(40) NOT NULL, {} bigint NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, {} bigint NOT NULL DEFAULT 0, {} smallint NOT NULL DEFAULT 0, {} integer NOT NULL DEFAULT 0, CONSTRAINT id_pkey PRIMARY KEY ({})) TABLESPACE pg_default", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_id, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -286,18 +287,13 @@ impl DatabaseConnectorPgSQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[PgSQL] Trying to querying {} torrents - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { true => { format!( "SELECT encode({}::bytea, 'hex'), {} FROM {} LIMIT {}, {}", structure.column_infohash, structure.column_completed, - structure.database_name, + structure.table_name, start, length ) @@ -307,7 +303,7 @@ impl DatabaseConnectorPgSQL { "SELECT {}, {} FROM {} LIMIT {}, {}", structure.column_infohash, structure.column_completed, - structure.database_name, + structure.table_name, start, length ) @@ -334,12 +330,14 @@ impl DatabaseConnectorPgSQL { if torrents < start { break; } + info!("[PgSQL] Loaded {} torrents", torrents); } tracker.set_stats(StatsEvent::Completed, completed as i64); + info!("[PgSQL] Loaded {} torrents with {} completed", torrents, completed); Ok((torrents, completed)) } - pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), Error> + pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), Error> { let mut torrents_transaction = self.pool.begin().await?; let mut torrents_handled_entries = 0u64; @@ -347,81 +345,26 @@ impl DatabaseConnectorPgSQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (info_hash, torrent_entry) in torrents.iter() { + for (info_hash, (torrent_entry, updates_action)) in torrents.iter() { torrents_handled_entries += 1; - match tracker.config.deref().clone().database.unwrap().insert_vacant { - true => { - if tracker.config.deref().clone().database.unwrap().update_peers { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "INSERT INTO {} ({}, {}, {}) VALUES (decode('{}', 'hex'), {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}", - structure.database_name, - structure.column_infohash, - structure.column_seeds, - structure.column_peers, - info_hash, - torrent_entry.seeds.len(), - torrent_entry.peers.len(), - structure.column_infohash, - structure.column_seeds, - structure.column_seeds, - structure.column_peers, - structure.column_peers - ) - } - false => { - format!( - "INSERT INTO {} ({}, {}, {}) VALUES ('{}', {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}", - structure.database_name, - structure.column_infohash, - structure.column_seeds, - structure.column_peers, - info_hash, - torrent_entry.seeds.len(), - torrent_entry.peers.len(), - structure.column_infohash, - structure.column_seeds, - structure.column_seeds, - structure.column_peers, - structure.column_peers - ) - } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[PgSQL] Error: {}", e.to_string()); - return Err(e); - } - } - } - if tracker.config.deref().clone().database.unwrap().update_completed { + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { true => { format!( - "INSERT INTO {} ({}, {}) VALUES (decode('{}', 'hex'), {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}", - structure.database_name, + "DELETE FROM {} WHERE {}=decode('{}', 'hex')", + structure.table_name, structure.column_infohash, - structure.column_completed, - info_hash, - torrent_entry.completed, - structure.column_infohash, - structure.column_completed, - structure.column_completed + info_hash ) } false => { format!( - "INSERT INTO {} ({}, {}) VALUES ('{}', {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}", - structure.database_name, + "DELETE FROM {} WHERE {}='{}'", + structure.table_name, structure.column_infohash, - structure.column_completed, - info_hash, - torrent_entry.completed, - structure.column_infohash, - structure.column_completed, - structure.column_completed + info_hash ) } }; @@ -434,82 +377,170 @@ impl DatabaseConnectorPgSQL { } } } - false => { - if tracker.config.deref().clone().database.unwrap().update_peers { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "UPDATE {} SET ({}, {}) = ({}, {}) WHERE {}=decode('{}', 'hex') AND NOT EXISTS (SELECT 1 FROM {} WHERE {}=decode('{}', 'hex'))", - structure.database_name, - structure.column_seeds, - structure.column_peers, - torrent_entry.seeds.len(), - torrent_entry.peers.len(), - structure.column_infohash, - info_hash, - structure.database_name, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "UPDATE {} SET ({}, {}) = ({}, {}) WHERE {}='{}' AND NOT EXISTS (SELECT 1 FROM {} WHERE {}='{}')", - structure.database_name, - structure.column_seeds, - structure.column_peers, - torrent_entry.seeds.len(), - torrent_entry.peers.len(), - structure.column_infohash, - info_hash, - structure.database_name, - structure.column_infohash, - info_hash - ) + UpdatesAction::Add | UpdatesAction::Update => { + match tracker.config.deref().clone().database.unwrap().insert_vacant { + true => { + if tracker.config.deref().clone().database.unwrap().update_peers { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "INSERT INTO {} ({}, {}, {}) VALUES (decode('{}', 'hex'), {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}", + structure.table_name, + structure.column_infohash, + structure.column_seeds, + structure.column_peers, + info_hash, + torrent_entry.seeds.len(), + torrent_entry.peers.len(), + structure.column_infohash, + structure.column_seeds, + structure.column_seeds, + structure.column_peers, + structure.column_peers + ) + } + false => { + format!( + "INSERT INTO {} ({}, {}, {}) VALUES ('{}', {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}", + structure.table_name, + structure.column_infohash, + structure.column_seeds, + structure.column_peers, + info_hash, + torrent_entry.seeds.len(), + torrent_entry.peers.len(), + structure.column_infohash, + structure.column_seeds, + structure.column_seeds, + structure.column_peers, + structure.column_peers + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[PgSQL] Error: {}", e.to_string()); - return Err(e); + if tracker.config.deref().clone().database.unwrap().update_completed { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "INSERT INTO {} ({}, {}) VALUES (decode('{}', 'hex'), {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}", + structure.table_name, + structure.column_infohash, + structure.column_completed, + info_hash, + torrent_entry.completed, + structure.column_infohash, + structure.column_completed, + structure.column_completed + ) + } + false => { + format!( + "INSERT INTO {} ({}, {}) VALUES ('{}', {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}", + structure.table_name, + structure.column_infohash, + structure.column_completed, + info_hash, + torrent_entry.completed, + structure.column_infohash, + structure.column_completed, + structure.column_completed + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } } } - } - if tracker.config.deref().clone().database.unwrap().update_completed { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "UPDATE {} SET {}={} WHERE {}=decode('{}', 'hex') AND EXISTS (SELECT 1 FROM {} WHERE {}=decode('{}', 'hex'))", - structure.database_name, - structure.column_completed, - torrent_entry.completed, - structure.column_infohash, - info_hash, - structure.database_name, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "UPDATE {} SET {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", - structure.database_name, - structure.column_completed, - torrent_entry.completed, - structure.column_infohash, - info_hash, - structure.database_name, - structure.column_infohash, - info_hash - ) + false => { + if tracker.config.deref().clone().database.unwrap().update_peers { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "UPDATE {} SET ({}, {}) = ({}, {}) WHERE {}=decode('{}', 'hex') AND NOT EXISTS (SELECT 1 FROM {} WHERE {}=decode('{}', 'hex'))", + structure.table_name, + structure.column_seeds, + structure.column_peers, + torrent_entry.seeds.len(), + torrent_entry.peers.len(), + structure.column_infohash, + info_hash, + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "UPDATE {} SET ({}, {}) = ({}, {}) WHERE {}='{}' AND NOT EXISTS (SELECT 1 FROM {} WHERE {}='{}')", + structure.table_name, + structure.column_seeds, + structure.column_peers, + torrent_entry.seeds.len(), + torrent_entry.peers.len(), + structure.column_infohash, + info_hash, + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[PgSQL] Error: {}", e.to_string()); - return Err(e); + if tracker.config.deref().clone().database.unwrap().update_completed { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "UPDATE {} SET {}={} WHERE {}=decode('{}', 'hex') AND EXISTS (SELECT 1 FROM {} WHERE {}=decode('{}', 'hex'))", + structure.table_name, + structure.column_completed, + torrent_entry.completed, + structure.column_infohash, + info_hash, + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "UPDATE {} SET {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", + structure.table_name, + structure.column_completed, + torrent_entry.completed, + structure.column_infohash, + info_hash, + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } } } } @@ -519,6 +550,7 @@ impl DatabaseConnectorPgSQL { info!("[PgSQL] Handled {} torrents", torrents_handled_entries); } } + info!("[PgSQL] Handled {} torrents", torrents_handled_entries); self.commit(torrents_transaction).await } @@ -532,17 +564,12 @@ impl DatabaseConnectorPgSQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[PgSQL] Trying to querying {} whitelisted hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { true => { format!( "SELECT encode({}::bytea, 'hex') FROM {} LIMIT {}, {}", structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -551,7 +578,7 @@ impl DatabaseConnectorPgSQL { format!( "SELECT {} FROM {} LIMIT {}, {}", structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -568,11 +595,13 @@ impl DatabaseConnectorPgSQL { if hashes < start { break; } + info!("[PgSQL] Handled {} whitelisted torrents", hashes); } + info!("[PgSQL] Loaded {} whitelisted torrents", hashes); Ok(hashes) } - pub async fn save_whitelist(&self, tracker: Arc, whitelists: Vec) -> Result + pub async fn save_whitelist(&self, tracker: Arc, whitelists: Vec<(InfoHash, UpdatesAction)>) -> Result { let mut whitelist_transaction = self.pool.begin().await?; let mut whitelist_handled_entries = 0u64; @@ -580,38 +609,71 @@ impl DatabaseConnectorPgSQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for info_hash in whitelists.iter() { + for (info_hash, updates_action) in whitelists.iter() { whitelist_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { - true => { - format!( - "INSERT INTO {} ({}) VALUES (decode('{}', 'hex')) ON CONFLICT DO NOTHING", - structure.database_name, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "INSERT INTO {} ({}) VALUES ('{}') ON CONFLICT DO NOTHING", - structure.database_name, - structure.column_infohash, - info_hash - ) + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { + true => { + format!( + "DELETE FROM {} WHERE {}=decode('{}', 'hex')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "DELETE FROM {} WHERE {}='{}'", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[PgSQL] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { + true => { + format!( + "INSERT INTO {} ({}) VALUES (decode('{}', 'hex')) ON CONFLICT DO NOTHING", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "INSERT INTO {} ({}) VALUES ('{}') ON CONFLICT DO NOTHING", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } } } if (whitelist_handled_entries as f64 / 1000f64).fract() == 0.0 { info!("[PgSQL] Handled {} torrents", whitelist_handled_entries); } } - info!("[PgSQL] Saved {} whitelisted torrents", whitelist_handled_entries); + info!("[PgSQL] Handled {} whitelisted torrents", whitelist_handled_entries); let _ = self.commit(whitelist_transaction).await; Ok(whitelist_handled_entries) } @@ -626,17 +688,12 @@ impl DatabaseConnectorPgSQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[PgSQL] Trying to querying {} blacklisted hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { true => { format!( "SELECT encode({}::bytea, 'hex') FROM {} LIMIT {}, {}", structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -645,7 +702,7 @@ impl DatabaseConnectorPgSQL { format!( "SELECT {} FROM {} LIMIT {}, {}", structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -662,11 +719,13 @@ impl DatabaseConnectorPgSQL { if hashes < start { break; } + info!("[PgSQL] Loaded {} blacklisted torrents", hashes); } + info!("[PgSQL] Loaded {} blacklisted torrents", hashes); Ok(hashes) } - pub async fn save_blacklist(&self, tracker: Arc, blacklists: Vec) -> Result + pub async fn save_blacklist(&self, tracker: Arc, blacklists: Vec<(InfoHash, UpdatesAction)>) -> Result { let mut blacklist_transaction = self.pool.begin().await?; let mut blacklist_handled_entries = 0u64; @@ -674,38 +733,71 @@ impl DatabaseConnectorPgSQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for info_hash in blacklists.iter() { + for (info_hash, updates_action) in blacklists.iter() { blacklist_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { - true => { - format!( - "INSERT INTO {} ({}) VALUES (decode('{}', 'hex')) ON CONFLICT DO NOTHING", - structure.database_name, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "INSERT INTO {} ({}) VALUES ('{}') ON CONFLICT DO NOTHING", - structure.database_name, - structure.column_infohash, - info_hash - ) + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { + true => { + format!( + "DELETE FROM {} WHERE {}=decode('{}', 'hex')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "DELETE FROM {} WHERE {}='{}'", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[PgSQL] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { + true => { + format!( + "INSERT INTO {} ({}) VALUES (decode('{}', 'hex')) ON CONFLICT DO NOTHING", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "INSERT INTO {} ({}) VALUES ('{}') ON CONFLICT DO NOTHING", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } } } if (blacklist_handled_entries as f64 / 1000f64).fract() == 0.0 { - info!("[PgSQL] Handled {} torrents", blacklist_handled_entries); + info!("[PgSQL] Handled {} blacklisted torrents", blacklist_handled_entries); } } - info!("[PgSQL] Saved {} blacklisted torrents", blacklist_handled_entries); + info!("[PgSQL] Handled {} blacklisted torrents", blacklist_handled_entries); let _ = self.commit(blacklist_transaction).await; Ok(blacklist_handled_entries) } @@ -720,18 +812,13 @@ impl DatabaseConnectorPgSQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[PgSQL] Trying to querying {} keys hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { true => { format!( "SELECT encode({}::bytea, 'hex'), {} FROM {} LIMIT {}, {}", structure.column_hash, structure.column_timeout, - structure.database_name, + structure.table_name, start, length ) @@ -741,7 +828,7 @@ impl DatabaseConnectorPgSQL { "SELECT {}, {} FROM {} LIMIT {}, {}", structure.column_hash, structure.column_timeout, - structure.database_name, + structure.table_name, start, length ) @@ -759,11 +846,13 @@ impl DatabaseConnectorPgSQL { if hashes < start { break; } + info!("[PgSQL] Handled {} keys", hashes); } + info!("[PgSQL] Handled {} keys", hashes); Ok(hashes) } - pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result + pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result { let mut keys_transaction = self.pool.begin().await?; let mut keys_handled_entries = 0u64; @@ -771,48 +860,81 @@ impl DatabaseConnectorPgSQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (hash, timeout) in keys.iter() { + for (hash, (timeout, update_action)) in keys.iter() { keys_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { - true => { - format!( - "INSERT INTO {} ({}, {}) VALUES (decode('{}', 'hex'), {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}", - structure.database_name, - structure.column_hash, - structure.column_timeout, - hash, - timeout, - structure.column_hash, - structure.column_timeout, - structure.column_timeout - ) - } - false => { - format!( - "INSERT INTO {} ({}, {}) VALUES ('{}', {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}", - structure.database_name, - structure.column_hash, - structure.column_timeout, - hash, - timeout, - structure.column_hash, - structure.column_timeout, - structure.column_timeout - ) + match update_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { + true => { + format!( + "DELETE FROM {} WHERE {}=decode('{}', 'hex')", + structure.table_name, + structure.column_hash, + hash + ) + } + false => { + format!( + "DELETE FROM {} WHERE {}='{}'", + structure.table_name, + structure.column_hash, + hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[PgSQL] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { + true => { + format!( + "INSERT INTO {} ({}, {}) VALUES (decode('{}', 'hex'), {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}", + structure.table_name, + structure.column_hash, + structure.column_timeout, + hash, + timeout, + structure.column_hash, + structure.column_timeout, + structure.column_timeout + ) + } + false => { + format!( + "INSERT INTO {} ({}, {}) VALUES ('{}', {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}", + structure.table_name, + structure.column_hash, + structure.column_timeout, + hash, + timeout, + structure.column_hash, + structure.column_timeout, + structure.column_timeout + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } + } } } if (keys_handled_entries as f64 / 1000f64).fract() == 0.0 { info!("[PgSQL] Handled {} keys", keys_handled_entries); } } - info!("[PgSQL] Saved {} keys", keys_handled_entries); + info!("[PgSQL] Handled {} keys", keys_handled_entries); let _ = self.commit(keys_transaction).await; Ok(keys_handled_entries) } @@ -827,11 +949,6 @@ impl DatabaseConnectorPgSQL { Some(db_structure) => { db_structure } }; loop { - info!( - "[PgSQL] Trying to querying {} users - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { @@ -845,7 +962,7 @@ impl DatabaseConnectorPgSQL { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -860,7 +977,7 @@ impl DatabaseConnectorPgSQL { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -879,7 +996,7 @@ impl DatabaseConnectorPgSQL { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -894,7 +1011,7 @@ impl DatabaseConnectorPgSQL { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -934,7 +1051,7 @@ impl DatabaseConnectorPgSQL { downloaded: result.get::(structure.column_downloaded.as_str()) as u64, completed: result.get::(structure.column_completed.as_str()) as u64, updated: result.get::(structure.column_updated.as_str()) as u64, - active: result.get::(structure.column_active.as_str()) as u8, + active: result.get::(structure.column_active.as_str()) as u8, torrents_active: Default::default(), }); hashes += 1; @@ -943,11 +1060,13 @@ impl DatabaseConnectorPgSQL { if hashes < start { break; } + info!("[PgSQL] Loaded {} users", hashes); } + info!("[PgSQL] Loaded {} users", hashes); Ok(hashes) } - pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), Error> + pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), Error> { let mut users_transaction = self.pool.begin().await?; let mut users_handled_entries = 0u64; @@ -955,269 +1074,303 @@ impl DatabaseConnectorPgSQL { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (_, user_entry_item) in users.iter() { + for (_, (user_entry_item, updates_action)) in users.iter() { users_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database.unwrap().insert_vacant { - true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { - true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { - true => { - format!( - "INSERT INTO {} ({}, {}, {}, {}, {}, {}, {}) VALUES ('{}', {}, {}, {}, decode('{}', 'hex'), {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}", - structure.database_name, - structure.column_uuid, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_uuid.clone().unwrap(), - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_uuid, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } - false => { - format!( - "INSERT INTO {} ({}, {}, {}, {}, {}, {}, {}) VALUES ('{}', {}, {}, {}, '{}', {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}", - structure.database_name, - structure.column_uuid, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_uuid.clone().unwrap(), - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_uuid, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { + true => { + format!( + "DELETE FROM {} WHERE {}='{}'", + structure.table_name, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap() + ) } - } - false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { - true => { - format!( - "INSERT INTO {} ({}, {}, {}, {}, {}, {}, {}) VALUES ('{}', {}, {}, {}, decode('{}', 'hex'), {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}", - structure.database_name, - structure.column_id, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_id.unwrap(), - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_id, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } - false => { - format!( - "INSERT INTO {} ({}, {}, {}, {}, {}, {}, {}) VALUES ('{}', {}, {}, {}, '{}', {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}", - structure.database_name, - structure.column_id, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_id.unwrap(), - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_id, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } + false => { + format!( + "DELETE FROM {} WHERE {}='{}'", + structure.table_name, + structure.column_id, + user_entry_item.user_id.unwrap() + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); } } } } - false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database.unwrap().insert_vacant { true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { - format!( - "UPDATE {} SET {}={}, {}={}, {}={}, {}=decode('{}', 'hex'), {}={}, {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_uuid, - user_entry_item.user_uuid.clone().unwrap(), - structure.database_name, - structure.column_uuid, - user_entry_item.user_uuid.clone().unwrap() - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "INSERT INTO {} ({}, {}, {}, {}, {}, {}, {}) VALUES ('{}', {}, {}, {}, decode('{}', 'hex'), {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}", + structure.table_name, + structure.column_uuid, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_uuid.clone().unwrap(), + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_uuid, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + false => { + format!( + "INSERT INTO {} ({}, {}, {}, {}, {}, {}, {}) VALUES ('{}', {}, {}, {}, '{}', {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}", + structure.table_name, + structure.column_uuid, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_uuid.clone().unwrap(), + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_uuid, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + } } false => { - format!( - "UPDATE {} SET {}={}, {}={}, {}={}, {}='{}', {}={}, {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_uuid, - user_entry_item.user_uuid.clone().unwrap(), - structure.database_name, - structure.column_uuid, - user_entry_item.user_uuid.clone().unwrap() - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "INSERT INTO {} ({}, {}, {}, {}, {}, {}, {}) VALUES ('{}', {}, {}, {}, decode('{}', 'hex'), {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}", + structure.table_name, + structure.column_id, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_id.unwrap(), + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_id, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + false => { + format!( + "INSERT INTO {} ({}, {}, {}, {}, {}, {}, {}) VALUES ('{}', {}, {}, {}, '{}', {}, {}) ON CONFLICT ({}) DO UPDATE SET {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}, {}=excluded.{}", + structure.table_name, + structure.column_id, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_id.unwrap(), + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_id, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + } } } } false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { - format!( - "UPDATE {} SET {}={}, {}={}, {}={}, {}=decode('{}', 'hex'), {}={}, {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_id, - user_entry_item.user_id.unwrap(), - structure.database_name, - structure.column_id, - user_entry_item.user_id.unwrap() - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "UPDATE {} SET {}={}, {}={}, {}={}, {}=decode('{}', 'hex'), {}={}, {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap(), + structure.table_name, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap() + ) + } + false => { + format!( + "UPDATE {} SET {}={}, {}={}, {}={}, {}='{}', {}={}, {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap(), + structure.table_name, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap() + ) + } + } } false => { - format!( - "UPDATE {} SET {}={}, {}={}, {}={}, {}='{}', {}={}, {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_id, - user_entry_item.user_id.unwrap(), - structure.database_name, - structure.column_id, - user_entry_item.user_id.unwrap() - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "UPDATE {} SET {}={}, {}={}, {}={}, {}=decode('{}', 'hex'), {}={}, {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_id, + user_entry_item.user_id.unwrap(), + structure.table_name, + structure.column_id, + user_entry_item.user_id.unwrap() + ) + } + false => { + format!( + "UPDATE {} SET {}={}, {}={}, {}={}, {}='{}', {}={}, {}={} WHERE {}='{}' AND EXISTS (SELECT 1 FROM {} WHERE {}='{}')", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_id, + user_entry_item.user_id.unwrap(), + structure.table_name, + structure.column_id, + user_entry_item.user_id.unwrap() + ) + } + } } } } + }; + match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[PgSQL] Error: {}", e.to_string()); + return Err(e); + } } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[PgSQL] Error: {}", e.to_string()); - return Err(e); - } } if (users_handled_entries as f64 / 1000f64).fract() == 0.0 || users.len() as u64 == users_handled_entries { info!("[PgSQL] Handled {} users", users_handled_entries); } } + info!("[PgSQL] Handled {} users", users_handled_entries); self.commit(users_transaction).await } @@ -1230,7 +1383,7 @@ impl DatabaseConnectorPgSQL { }; let string_format = format!( "UPDATE {} SET ({}, {}) = (0, 0)", - structure.database_name, + structure.table_name, structure.column_seeds, structure.column_peers ); @@ -1252,7 +1405,7 @@ impl DatabaseConnectorPgSQL { Ok(()) } Err(e) => { - error!("[PgSQL3] Error: {}", e.to_string()); + error!("[PgSQL] Error: {}", e.to_string()); Err(e) } } diff --git a/src/database/impls/database_connector_sqlite.rs b/src/database/impls/database_connector_sqlite.rs index 19af30e..485e249 100644 --- a/src/database/impls/database_connector_sqlite.rs +++ b/src/database/impls/database_connector_sqlite.rs @@ -15,6 +15,7 @@ use crate::database::enums::database_drivers::DatabaseDrivers; use crate::database::structs::database_connector::DatabaseConnector; use crate::database::structs::database_connector_sqlite::DatabaseConnectorSQLite; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_entry::TorrentEntry; use crate::tracker::structs::torrent_tracker::TorrentTracker; @@ -46,6 +47,7 @@ impl DatabaseConnectorSQLite { if create_database { let pool = &structure.sqlite.clone().unwrap().pool; info!("[BOOT] Database creation triggered for SQLite."); + info!("[BOOT SQLite] Setting the PRAGMA config..."); let _ = sqlx::query("PRAGMA temp_store = memory;").execute(pool).await; let _ = sqlx::query("PRAGMA mmap_size = 30000000000;").execute(pool).await; @@ -53,13 +55,13 @@ impl DatabaseConnectorSQLite { let _ = sqlx::query("PRAGMA synchronous = full;").execute(pool).await; // Create Torrent DB - info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().torrents.unwrap().database_name); + info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().torrents.unwrap().table_name); match config.database_structure.clone().unwrap().torrents.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` BLOB PRIMARY KEY NOT NULL, `{}` INTEGER DEFAULT 0, `{}` INTEGER DEFAULT 0, `{}` INTEGER DEFAULT 0)", - config.database_structure.clone().unwrap().torrents.unwrap().database_name, + config.database_structure.clone().unwrap().torrents.unwrap().table_name, config.database_structure.clone().unwrap().torrents.unwrap().column_infohash, config.database_structure.clone().unwrap().torrents.unwrap().column_seeds, config.database_structure.clone().unwrap().torrents.unwrap().column_peers, @@ -74,7 +76,7 @@ impl DatabaseConnectorSQLite { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` TEXT PRIMARY KEY NOT NULL, `{}` INTEGER DEFAULT 0, `{}` INTEGER DEFAULT 0, `{}` INTEGER DEFAULT 0)", - config.database_structure.clone().unwrap().torrents.unwrap().database_name, + config.database_structure.clone().unwrap().torrents.unwrap().table_name, config.database_structure.clone().unwrap().torrents.unwrap().column_infohash, config.database_structure.clone().unwrap().torrents.unwrap().column_seeds, config.database_structure.clone().unwrap().torrents.unwrap().column_peers, @@ -88,13 +90,13 @@ impl DatabaseConnectorSQLite { } // Create Whitelist DB - info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().whitelist.unwrap().database_name); + info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().whitelist.unwrap().table_name); match config.database_structure.clone().unwrap().whitelist.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` BLOB PRIMARY KEY NOT NULL)", - config.database_structure.clone().unwrap().whitelist.unwrap().database_name, + config.database_structure.clone().unwrap().whitelist.unwrap().table_name, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash ).as_str() ).execute(pool).await { @@ -106,7 +108,7 @@ impl DatabaseConnectorSQLite { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` TEXT PRIMARY KEY NOT NULL)", - config.database_structure.clone().unwrap().whitelist.unwrap().database_name, + config.database_structure.clone().unwrap().whitelist.unwrap().table_name, config.database_structure.clone().unwrap().whitelist.unwrap().column_infohash ).as_str() ).execute(pool).await { @@ -117,13 +119,13 @@ impl DatabaseConnectorSQLite { } // Create Blacklist DB - info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().blacklist.unwrap().database_name); + info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().blacklist.unwrap().table_name); match config.database_structure.clone().unwrap().blacklist.unwrap().bin_type_infohash { true => { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` BLOB PRIMARY KEY NOT NULL)", - config.database_structure.clone().unwrap().blacklist.unwrap().database_name, + config.database_structure.clone().unwrap().blacklist.unwrap().table_name, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash ).as_str() ).execute(pool).await { @@ -135,7 +137,7 @@ impl DatabaseConnectorSQLite { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` TEXT PRIMARY KEY NOT NULL)", - config.database_structure.clone().unwrap().blacklist.unwrap().database_name, + config.database_structure.clone().unwrap().blacklist.unwrap().table_name, config.database_structure.clone().unwrap().blacklist.unwrap().column_infohash ).as_str() ).execute(pool).await { @@ -146,13 +148,13 @@ impl DatabaseConnectorSQLite { } // Create Keys DB - info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().keys.unwrap().database_name); + info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().keys.unwrap().table_name); match config.database_structure.clone().unwrap().keys.unwrap().bin_type_hash { true => { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` BLOB PRIMARY KEY NOT NULL, `{}` INTEGER DEFAULT 0)", - config.database_structure.clone().unwrap().keys.unwrap().database_name, + config.database_structure.clone().unwrap().keys.unwrap().table_name, config.database_structure.clone().unwrap().keys.unwrap().column_hash, config.database_structure.clone().unwrap().keys.unwrap().column_timeout ).as_str() @@ -165,7 +167,7 @@ impl DatabaseConnectorSQLite { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` TEXT PRIMARY KEY NOT NULL, `{}` INTEGER DEFAULT 0)", - config.database_structure.clone().unwrap().keys.unwrap().database_name, + config.database_structure.clone().unwrap().keys.unwrap().table_name, config.database_structure.clone().unwrap().keys.unwrap().column_hash, config.database_structure.clone().unwrap().keys.unwrap().column_timeout ).as_str() @@ -177,7 +179,7 @@ impl DatabaseConnectorSQLite { } // Create Users DB - info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().users.unwrap().database_name); + info!("[BOOT SQLite] Creating table {}", config.database_structure.clone().unwrap().users.unwrap().table_name); match config.database_structure.clone().unwrap().users.unwrap().id_uuid { true => { match config.database_structure.clone().unwrap().users.unwrap().bin_type_key { @@ -185,7 +187,7 @@ impl DatabaseConnectorSQLite { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` TEXT PRIMARY KEY NOT NULL, `{}` BLOB NOT NULL, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0)", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_uuid, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -203,7 +205,7 @@ impl DatabaseConnectorSQLite { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` TEXT PRIMARY KEY NOT NULL, `{}` TEXT NOT NULL, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0)", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_uuid, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -225,7 +227,7 @@ impl DatabaseConnectorSQLite { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` INTEGER PRIMARY KEY AUTOINCREMENT, `{}` BLOB NOT NULL, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0)", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_id, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -243,7 +245,7 @@ impl DatabaseConnectorSQLite { match sqlx::query( format!( "CREATE TABLE IF NOT EXISTS `{}` (`{}` INTEGER PRIMARY KEY AUTOINCREMENT, `{}` TEXT NOT NULL, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0, `{}` INTEGER NOT NULL DEFAULT 0)", - config.database_structure.clone().unwrap().users.unwrap().database_name, + config.database_structure.clone().unwrap().users.unwrap().table_name, config.database_structure.clone().unwrap().users.unwrap().column_id, config.database_structure.clone().unwrap().users.unwrap().column_key, config.database_structure.clone().unwrap().users.unwrap().column_uploaded, @@ -279,11 +281,6 @@ impl DatabaseConnectorSQLite { Some(db_structure) => { db_structure } }; loop { - info!( - "[SQLite] Trying to querying {} torrents - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { true => { format!( @@ -291,7 +288,7 @@ impl DatabaseConnectorSQLite { structure.column_infohash, structure.column_infohash, structure.column_completed, - structure.database_name, + structure.table_name, start, length ) @@ -301,7 +298,7 @@ impl DatabaseConnectorSQLite { "SELECT `{}`, `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, structure.column_completed, - structure.database_name, + structure.table_name, start, length ) @@ -328,12 +325,14 @@ impl DatabaseConnectorSQLite { if torrents < start { break; } + info!("[SQLite] Handled {} torrents", torrents); } tracker.set_stats(StatsEvent::Completed, completed as i64); + info!("[SQLite] Loaded {} torrents with {} completed", torrents, completed); Ok((torrents, completed)) } - pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), Error> + pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), Error> { let mut torrents_transaction = self.pool.begin().await?; let mut torrents_handled_entries = 0u64; @@ -341,81 +340,26 @@ impl DatabaseConnectorSQLite { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (info_hash, torrent_entry) in torrents.iter() { + for (info_hash, (torrent_entry, updates_action)) in torrents.iter() { torrents_handled_entries += 1; - match tracker.config.deref().clone().database.unwrap().insert_vacant { - true => { - if tracker.config.deref().clone().database.unwrap().update_peers { + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { true => { format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`) VALUES (X'{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`", - structure.database_name, - structure.column_infohash, - structure.column_seeds, - structure.column_peers, - info_hash, - torrent_entry.seeds.len(), - torrent_entry.peers.len(), - structure.column_infohash, - structure.column_seeds, - structure.column_seeds, - structure.column_peers, - structure.column_peers - ) - } - false => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`) VALUES ('{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`", - structure.database_name, + "DELETE FROM `{}` WHERE `{}`=X'{}'", + structure.table_name, structure.column_infohash, - structure.column_seeds, - structure.column_peers, - info_hash, - torrent_entry.seeds.len(), - torrent_entry.peers.len(), - structure.column_infohash, - structure.column_seeds, - structure.column_seeds, - structure.column_peers, - structure.column_peers - ) - } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[SQLite] Error: {}", e.to_string()); - return Err(e); - } - } - } - if tracker.config.deref().clone().database.unwrap().update_completed { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "INSERT INTO `{}` (`{}`, `{}`) VALUES (X'{}', {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`", - structure.database_name, - structure.column_infohash, - structure.column_completed, - info_hash, - torrent_entry.completed, - structure.column_infohash, - structure.column_completed, - structure.column_completed + info_hash ) } false => { format!( - "INSERT INTO `{}` (`{}`, `{}`) VALUES ('{}', {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`", - structure.database_name, + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, structure.column_infohash, - structure.column_completed, - info_hash, - torrent_entry.completed, - structure.column_infohash, - structure.column_completed, - structure.column_completed + info_hash ) } }; @@ -428,70 +372,158 @@ impl DatabaseConnectorSQLite { } } } - false => { - if tracker.config.deref().clone().database.unwrap().update_peers { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={} WHERE `{}`=X'{}'", - structure.database_name, - structure.column_seeds, - torrent_entry.seeds.len(), - structure.column_peers, - torrent_entry.peers.len(), - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={} WHERE `{}`='{}'", - structure.database_name, - structure.column_seeds, - torrent_entry.seeds.len(), - structure.column_peers, - torrent_entry.peers.len(), - structure.column_infohash, - info_hash - ) + UpdatesAction::Add | UpdatesAction::Update => { + match tracker.config.deref().clone().database.unwrap().insert_vacant { + true => { + if tracker.config.deref().clone().database.unwrap().update_peers { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`) VALUES (X'{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`", + structure.table_name, + structure.column_infohash, + structure.column_seeds, + structure.column_peers, + info_hash, + torrent_entry.seeds.len(), + torrent_entry.peers.len(), + structure.column_infohash, + structure.column_seeds, + structure.column_seeds, + structure.column_peers, + structure.column_peers + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`) VALUES ('{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`", + structure.table_name, + structure.column_infohash, + structure.column_seeds, + structure.column_peers, + info_hash, + torrent_entry.seeds.len(), + torrent_entry.peers.len(), + structure.column_infohash, + structure.column_seeds, + structure.column_seeds, + structure.column_peers, + structure.column_peers + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[SQLite] Error: {}", e.to_string()); - return Err(e); + if tracker.config.deref().clone().database.unwrap().update_completed { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`) VALUES (X'{}', {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`", + structure.table_name, + structure.column_infohash, + structure.column_completed, + info_hash, + torrent_entry.completed, + structure.column_infohash, + structure.column_completed, + structure.column_completed + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`) VALUES ('{}', {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`", + structure.table_name, + structure.column_infohash, + structure.column_completed, + info_hash, + torrent_entry.completed, + structure.column_infohash, + structure.column_completed, + structure.column_completed + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } } } - } - if tracker.config.deref().clone().database.unwrap().update_completed { - let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { - true => { - format!( - "UPDATE IGNORE `{}` SET `{}`={} WHERE `{}`=X'{}'", - structure.database_name, - structure.column_completed, - torrent_entry.completed, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "UPDATE IGNORE `{}` SET `{}`={} WHERE `{}`='{}'", - structure.database_name, - structure.column_completed, - torrent_entry.completed, - structure.column_infohash, - info_hash - ) + false => { + if tracker.config.deref().clone().database.unwrap().update_peers { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={} WHERE `{}`=X'{}'", + structure.table_name, + structure.column_seeds, + torrent_entry.seeds.len(), + structure.column_peers, + torrent_entry.peers.len(), + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={} WHERE `{}`='{}'", + structure.table_name, + structure.column_seeds, + torrent_entry.seeds.len(), + structure.column_peers, + torrent_entry.peers.len(), + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[SQLite] Error: {}", e.to_string()); - return Err(e); + if tracker.config.deref().clone().database.unwrap().update_completed { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().torrents.unwrap().bin_type_infohash { + true => { + format!( + "UPDATE IGNORE `{}` SET `{}`={} WHERE `{}`=X'{}'", + structure.table_name, + structure.column_completed, + torrent_entry.completed, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "UPDATE IGNORE `{}` SET `{}`={} WHERE `{}`='{}'", + structure.table_name, + structure.column_completed, + torrent_entry.completed, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *torrents_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } } } } @@ -501,6 +533,7 @@ impl DatabaseConnectorSQLite { info!("[SQLite] Handled {} torrents", torrents_handled_entries); } } + info!("[SQLite] Handled {} torrents", torrents_handled_entries); self.commit(torrents_transaction).await } @@ -514,18 +547,13 @@ impl DatabaseConnectorSQLite { Some(db_structure) => { db_structure } }; loop { - info!( - "[SQLite] Trying to querying {} whitelisted hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { true => { format!( "SELECT HEX(`{}`) AS `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -534,7 +562,7 @@ impl DatabaseConnectorSQLite { format!( "SELECT `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -551,11 +579,13 @@ impl DatabaseConnectorSQLite { if hashes < start { break; } + info!("[SQLite] Handled {} whitelisted torrents", hashes); } + info!("[SQLite] Handled {} whitelisted torrents", hashes); Ok(hashes) } - pub async fn save_whitelist(&self, tracker: Arc, whitelists: Vec) -> Result + pub async fn save_whitelist(&self, tracker: Arc, whitelists: Vec<(InfoHash, UpdatesAction)>) -> Result { let mut whitelist_transaction = self.pool.begin().await?; let mut whitelist_handled_entries = 0u64; @@ -563,38 +593,71 @@ impl DatabaseConnectorSQLite { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for info_hash in whitelists.iter() { + for (info_hash, updates_action) in whitelists.iter() { whitelist_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { - true => { - format!( - "INSERT OR IGNORE INTO `{}` (`{}`) VALUES (X'{}')", - structure.database_name, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "INSERT OR IGNORE INTO `{}` (`{}`) VALUES ('{}')", - structure.database_name, - structure.column_infohash, - info_hash - ) + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { + true => { + format!( + "DELETE FROM `{}` WHERE `{}`=UNHEX('{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[SQLite] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().whitelist.unwrap().bin_type_infohash { + true => { + format!( + "INSERT OR IGNORE INTO `{}` (`{}`) VALUES (X'{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "INSERT OR IGNORE INTO `{}` (`{}`) VALUES ('{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *whitelist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } } } if (whitelist_handled_entries as f64 / 1000f64).fract() == 0.0 { - info!("[SQLite] Handled {} torrents", whitelist_handled_entries); + info!("[SQLite] Handled {} whitelisted torrents", whitelist_handled_entries); } } - info!("[SQLite] Saved {} whitelisted torrents", whitelist_handled_entries); + info!("[SQLite] Handled {} whitelisted torrents", whitelist_handled_entries); let _ = self.commit(whitelist_transaction).await; Ok(whitelist_handled_entries) } @@ -609,18 +672,13 @@ impl DatabaseConnectorSQLite { Some(db_structure) => { db_structure } }; loop { - info!( - "[SQLite] Trying to querying {} blacklisted hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { true => { format!( "SELECT HEX(`{}`) AS `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -629,7 +687,7 @@ impl DatabaseConnectorSQLite { format!( "SELECT `{}` FROM `{}` LIMIT {}, {}", structure.column_infohash, - structure.database_name, + structure.table_name, start, length ) @@ -646,11 +704,13 @@ impl DatabaseConnectorSQLite { if hashes < start { break; } + info!("[SQLite] Handled {} blacklisted torrents", hashes); } + info!("[SQLite] Handled {} blacklisted torrents", hashes); Ok(hashes) } - pub async fn save_blacklist(&self, tracker: Arc, blacklists: Vec) -> Result + pub async fn save_blacklist(&self, tracker: Arc, blacklists: Vec<(InfoHash, UpdatesAction)>) -> Result { let mut blacklist_transaction = self.pool.begin().await?; let mut blacklist_handled_entries = 0u64; @@ -658,38 +718,71 @@ impl DatabaseConnectorSQLite { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for info_hash in blacklists.iter() { + for (info_hash, updates_action) in blacklists.iter() { blacklist_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { - true => { - format!( - "INSERT OR IGNORE INTO `{}` (`{}`) VALUES (X'{}')", - structure.database_name, - structure.column_infohash, - info_hash - ) - } - false => { - format!( - "INSERT OR IGNORE INTO `{}` (`{}`) VALUES ('{}')", - structure.database_name, - structure.column_infohash, - info_hash - ) + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { + true => { + format!( + "DELETE FROM `{}` WHERE `{}`=X'{}'", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[SQLite] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().blacklist.unwrap().bin_type_infohash { + true => { + format!( + "INSERT OR IGNORE INTO `{}` (`{}`) VALUES (X'{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + false => { + format!( + "INSERT OR IGNORE INTO `{}` (`{}`) VALUES ('{}')", + structure.table_name, + structure.column_infohash, + info_hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *blacklist_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } } } if (blacklist_handled_entries as f64 / 1000f64).fract() == 0.0 { - info!("[SQLite] Handled {} torrents", blacklist_handled_entries); + info!("[SQLite] Handled {} blacklisted torrents", blacklist_handled_entries); } } - info!("[SQLite] Saved {} blacklisted torrents", blacklist_handled_entries); + info!("[SQLite] Handled {} blacklisted torrents", blacklist_handled_entries); let _ = self.commit(blacklist_transaction).await; Ok(blacklist_handled_entries) } @@ -704,11 +797,6 @@ impl DatabaseConnectorSQLite { Some(db_structure) => { db_structure } }; loop { - info!( - "[SQLite] Trying to querying {} keys hashes - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { true => { format!( @@ -716,7 +804,7 @@ impl DatabaseConnectorSQLite { structure.column_hash, structure.column_hash, structure.column_timeout, - structure.database_name, + structure.table_name, start, length ) @@ -726,7 +814,7 @@ impl DatabaseConnectorSQLite { "SELECT `{}`, `{}` FROM `{}` LIMIT {}, {}", structure.column_hash, structure.column_timeout, - structure.database_name, + structure.table_name, start, length ) @@ -744,11 +832,13 @@ impl DatabaseConnectorSQLite { if hashes < start { break; } + info!("[SQLite] Handled {} keys", hashes); } + info!("[SQLite] Handled {} keys", hashes); Ok(hashes) } - pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result + pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result { let mut keys_transaction = self.pool.begin().await?; let mut keys_handled_entries = 0u64; @@ -756,48 +846,81 @@ impl DatabaseConnectorSQLite { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (hash, timeout) in keys.iter() { + for (hash, (timeout, update_action)) in keys.iter() { keys_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { - true => { - format!( - "INSERT INTO `{}` (`{}`, `{}`) VALUES (X'{}', {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`", - structure.database_name, - structure.column_hash, - structure.column_timeout, - hash, - timeout, - structure.column_hash, - structure.column_timeout, - structure.column_timeout - ) - } - false => { - format!( - "INSERT INTO `{}` (`{}`, `{}`) VALUES ('{}', {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`", - structure.database_name, - structure.column_hash, - structure.column_timeout, - hash, - timeout, - structure.column_hash, - structure.column_timeout, - structure.column_timeout - ) + match update_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { + true => { + format!( + "DELETE FROM `{}` WHERE `{}`=X'{}'", + structure.table_name, + structure.column_hash, + hash + ) + } + false => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_hash, + hash + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } + } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[SQLite] Error: {}", e.to_string()); - return Err(e); + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().keys.unwrap().bin_type_hash { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`) VALUES (X'{}', {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`", + structure.table_name, + structure.column_hash, + structure.column_timeout, + hash, + timeout, + structure.column_hash, + structure.column_timeout, + structure.column_timeout + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`) VALUES ('{}', {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`", + structure.table_name, + structure.column_hash, + structure.column_timeout, + hash, + timeout, + structure.column_hash, + structure.column_timeout, + structure.column_timeout + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *keys_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } + } } } if (keys_handled_entries as f64 / 1000f64).fract() == 0.0 { info!("[SQLite] Handled {} keys", keys_handled_entries); } } - info!("[SQLite] Saved {} keys", keys_handled_entries); + info!("[SQLite] Handled {} keys", keys_handled_entries); let _ = self.commit(keys_transaction).await; Ok(keys_handled_entries) } @@ -812,25 +935,21 @@ impl DatabaseConnectorSQLite { Some(db_structure) => { db_structure } }; loop { - info!( - "[SQLite] Trying to querying {} users - Skip: {}", - length, - start - ); let string_format = match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { true => { format!( - "SELECT `{}`, HEX(`{}`), `{}`, `{}`, `{}`, `{}`, `{}` FROM `{}` LIMIT {}, {}", + "SELECT `{}`, HEX(`{}`) AS `{}`, `{}`, `{}`, `{}`, `{}`, `{}` FROM `{}` LIMIT {}, {}", structure.column_uuid, structure.column_key, + structure.column_key, structure.column_uploaded, structure.column_downloaded, structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -845,7 +964,7 @@ impl DatabaseConnectorSQLite { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -856,15 +975,16 @@ impl DatabaseConnectorSQLite { match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { true => { format!( - "SELECT `{}`, HEX(`{}`), `{}`, `{}`, `{}`, `{}`, `{}` FROM `{}` LIMIT {}, {}", + "SELECT `{}`, HEX(`{}`) AS `{}`, `{}`, `{}`, `{}`, `{}`, `{}` FROM `{}` LIMIT {}, {}", structure.column_id, structure.column_key, + structure.column_key, structure.column_uploaded, structure.column_downloaded, structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -879,7 +999,7 @@ impl DatabaseConnectorSQLite { structure.column_completed, structure.column_updated, structure.column_active, - structure.database_name, + structure.table_name, start, length ) @@ -919,7 +1039,7 @@ impl DatabaseConnectorSQLite { downloaded: result.get::(structure.column_downloaded.as_str()) as u64, completed: result.get::(structure.column_completed.as_str()) as u64, updated: result.get::(structure.column_updated.as_str()) as u64, - active: result.get(structure.column_active.as_str()), + active: result.get::(structure.column_active.as_str()) as u8, torrents_active: Default::default(), }); hashes += 1; @@ -928,11 +1048,13 @@ impl DatabaseConnectorSQLite { if hashes < start { break; } + info!("[SQLite] Handled {} users", hashes); } + info!("[SQLite] Handled {} users", hashes); Ok(hashes) } - pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), Error> + pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), Error> { let mut users_transaction = self.pool.begin().await?; let mut users_handled_entries = 0u64; @@ -940,257 +1062,291 @@ impl DatabaseConnectorSQLite { None => { return Err(Error::RowNotFound); } Some(db_structure) => { db_structure } }; - for (_, user_entry_item) in users.iter() { + for (_, (user_entry_item, updates_action)) in users.iter() { users_handled_entries += 1; - let string_format = match tracker.config.deref().clone().database.unwrap().insert_vacant { - true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { - true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { - true => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', {}, {}, {}, X'{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`", - structure.database_name, - structure.column_uuid, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_uuid.clone().unwrap() , - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_uuid, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } - false => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', {}, {}, {}, '{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`", - structure.database_name, - structure.column_uuid, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_uuid.clone().unwrap(), - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_uuid, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } + match updates_action { + UpdatesAction::Remove => { + if tracker.config.deref().clone().database.unwrap().remove_action { + let string_format = match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { + true => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap() + ) } - } - false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { - true => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES (X'{}', {}, {}, {}, X'{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`", - structure.database_name, - structure.column_id, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_uuid.clone().unwrap(), - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_id, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } - false => { - format!( - "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', {}, {}, {}, '{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`", - structure.database_name, - structure.column_id, - structure.column_completed, - structure.column_active, - structure.column_downloaded, - structure.column_key, - structure.column_uploaded, - structure.column_updated, - user_entry_item.user_uuid.clone().unwrap(), - user_entry_item.completed, - user_entry_item.active, - user_entry_item.downloaded, - user_entry_item.key, - user_entry_item.uploaded, - user_entry_item.updated, - structure.column_id, - structure.column_completed, - structure.column_completed, - structure.column_active, - structure.column_active, - structure.column_downloaded, - structure.column_downloaded, - structure.column_key, - structure.column_key, - structure.column_uploaded, - structure.column_uploaded, - structure.column_updated, - structure.column_updated - ) - } + false => { + format!( + "DELETE FROM `{}` WHERE `{}`='{}'", + structure.table_name, + structure.column_id, + user_entry_item.user_id.unwrap() + ) + } + }; + match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); } } } } - false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { + UpdatesAction::Add | UpdatesAction::Update => { + let string_format = match tracker.config.deref().clone().database.unwrap().insert_vacant { true => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { - format!( - "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`=X'{}', `{}`={}, `{}`={} WHERE `{}`=X'{}'", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_uuid, - user_entry_item.user_uuid.clone().unwrap(), - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', {}, {}, {}, X'{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`", + structure.table_name, + structure.column_uuid, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_uuid.clone().unwrap() , + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_uuid, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', {}, {}, {}, '{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`", + structure.table_name, + structure.column_uuid, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_uuid.clone().unwrap(), + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_uuid, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + } } false => { - format!( - "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`='{}', `{}`={}, `{}`={} WHERE `{}`='{}'", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_uuid, - user_entry_item.user_uuid.clone().unwrap(), - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES (X'{}', {}, {}, {}, X'{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`", + structure.table_name, + structure.column_id, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_uuid.clone().unwrap(), + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_id, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + false => { + format!( + "INSERT INTO `{}` (`{}`, `{}`, `{}`, `{}`, `{}`, `{}`, `{}`) VALUES ('{}', {}, {}, {}, '{}', {}, {}) ON CONFLICT (`{}`) DO UPDATE SET `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`, `{}`=excluded.`{}`", + structure.table_name, + structure.column_id, + structure.column_completed, + structure.column_active, + structure.column_downloaded, + structure.column_key, + structure.column_uploaded, + structure.column_updated, + user_entry_item.user_uuid.clone().unwrap(), + user_entry_item.completed, + user_entry_item.active, + user_entry_item.downloaded, + user_entry_item.key, + user_entry_item.uploaded, + user_entry_item.updated, + structure.column_id, + structure.column_completed, + structure.column_completed, + structure.column_active, + structure.column_active, + structure.column_downloaded, + structure.column_downloaded, + structure.column_key, + structure.column_key, + structure.column_uploaded, + structure.column_uploaded, + structure.column_updated, + structure.column_updated + ) + } + } } } } false => { - match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().id_uuid { true => { - format!( - "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`=X'{}', `{}`={}, `{}`={} WHERE `{}`=X'{}'", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_uuid, - user_entry_item.user_id.unwrap(), - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`=X'{}', `{}`={}, `{}`={} WHERE `{}`=X'{}'", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap(), + ) + } + false => { + format!( + "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`='{}', `{}`={}, `{}`={} WHERE `{}`='{}'", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_uuid, + user_entry_item.user_uuid.clone().unwrap(), + ) + } + } } false => { - format!( - "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`='{}', `{}`={}, `{}`={} WHERE `{}`='{}'", - structure.database_name, - structure.column_completed, - user_entry_item.completed, - structure.column_active, - user_entry_item.active, - structure.column_downloaded, - user_entry_item.downloaded, - structure.column_key, - user_entry_item.key, - structure.column_uploaded, - user_entry_item.uploaded, - structure.column_updated, - user_entry_item.updated, - structure.column_uuid, - user_entry_item.user_id.unwrap(), - ) + match tracker.config.deref().clone().database_structure.unwrap().users.unwrap().bin_type_key { + true => { + format!( + "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`=X'{}', `{}`={}, `{}`={} WHERE `{}`=X'{}'", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_uuid, + user_entry_item.user_id.unwrap(), + ) + } + false => { + format!( + "UPDATE OR IGNORE `{}` SET `{}`={}, `{}`={}, `{}`={}, `{}`='{}', `{}`={}, `{}`={} WHERE `{}`='{}'", + structure.table_name, + structure.column_completed, + user_entry_item.completed, + structure.column_active, + user_entry_item.active, + structure.column_downloaded, + user_entry_item.downloaded, + structure.column_key, + user_entry_item.key, + structure.column_uploaded, + user_entry_item.uploaded, + structure.column_updated, + user_entry_item.updated, + structure.column_uuid, + user_entry_item.user_id.unwrap(), + ) + } + } } } } + }; + match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { + Ok(_) => {} + Err(e) => { + error!("[SQLite] Error: {}", e.to_string()); + return Err(e); + } } } - }; - match sqlx::query(string_format.as_str()).execute(&mut *users_transaction).await { - Ok(_) => {} - Err(e) => { - error!("[SQLite] Error: {}", e.to_string()); - return Err(e); - } } if (users_handled_entries as f64 / 1000f64).fract() == 0.0 || users.len() as u64 == users_handled_entries { info!("[SQLite] Handled {} users", users_handled_entries); } } + info!("[SQLite] Handled {} users", users_handled_entries); self.commit(users_transaction).await } @@ -1203,7 +1359,7 @@ impl DatabaseConnectorSQLite { }; let string_format = format!( "UPDATE `{}` SET `{}`=0, `{}`=0", - structure.database_name, + structure.table_name, structure.column_seeds, structure.column_peers ); @@ -1225,7 +1381,7 @@ impl DatabaseConnectorSQLite { Ok(()) } Err(e) => { - error!("[SQLite3] Error: {}", e.to_string()); + error!("[SQLite] Error: {}", e.to_string()); Err(e) } } diff --git a/src/main.rs b/src/main.rs index 5232392..bbd33df 100644 --- a/src/main.rs +++ b/src/main.rs @@ -189,8 +189,8 @@ async fn main() -> std::io::Result<()> } let stats = tracker_spawn_stats.get_stats(); - info!("[STATS] Torrents: {} - Updates: {} - Shadow {}: - Seeds: {} - Peers: {} - Completed: {}", stats.torrents, stats.torrents_updates, stats.torrents_shadow, stats.seeds, stats.peers, stats.completed); - info!("[STATS] Whitelists: {} - Blacklists: {} - Keys: {}", stats.whitelist, stats.blacklist, stats.keys); + info!("[STATS] Torrents: {} - Updates: {} - Seeds: {} - Peers: {} - Completed: {}", stats.torrents, stats.torrents_updates, stats.seeds, stats.peers, stats.completed); + info!("[STATS] WList: {} - WList Updates: {} - BLists: {} - BLists Updates: {} - Keys: {} - Keys Updates {}", stats.whitelist, stats.whitelist_updates, stats.blacklist, stats.blacklist_updates, stats.keys, stats.keys_updates); info!("[STATS TCP IPv4] Connect: {} - API: {} - A: {} - S: {} - F: {} - 404: {}", stats.tcp4_connections_handled, stats.tcp4_api_handled, stats.tcp4_announces_handled, stats.tcp4_scrapes_handled, stats.tcp4_failure, stats.tcp4_not_found); info!("[STATS TCP IPv6] Connect: {} - API: {} - A: {} - S: {} - F: {} - 404: {}", stats.tcp6_connections_handled, stats.tcp6_api_handled, stats.tcp6_announces_handled, stats.tcp6_scrapes_handled, stats.tcp6_failure, stats.tcp6_not_found); info!("[STATS UDP IPv4] Connect: {} - A: {} - S: {} - IR: {} - BR: {}", stats.udp4_connections_handled, stats.udp4_announces_handled, stats.udp4_scrapes_handled, stats.udp4_invalid_request, stats.udp4_bad_request); @@ -256,6 +256,24 @@ async fn main() -> std::io::Result<()> let _ = tracker_spawn_updates.save_torrent_updates(tracker_spawn_updates.clone()).await; info!("[TORRENTS UPDATES] Torrent updates inserted into DB."); + if tracker_spawn_updates.config.tracker_config.clone().unwrap().whitelist_enabled.unwrap() { + info!("[WHITELIST UPDATES] Start updating whitelists into the DB."); + let _ = tracker_spawn_updates.save_whitelist_updates(tracker_spawn_updates.clone()).await; + info!("[WHITELIST UPDATES] Whitelists updates inserted into DB."); + } + + if tracker_spawn_updates.config.tracker_config.clone().unwrap().blacklist_enabled.unwrap() { + info!("[BLACKLIST UPDATES] Start updating blacklists into the DB."); + let _ = tracker_spawn_updates.save_blacklist_updates(tracker_spawn_updates.clone()).await; + info!("[BLACKLIST UPDATES] Blacklists updates inserted into DB."); + } + + if tracker_spawn_updates.config.tracker_config.clone().unwrap().keys_enabled.unwrap() { + info!("[KEY UPDATES] Start updating keys into the DB."); + let _ = tracker_spawn_updates.save_key_updates(tracker_spawn_updates.clone()).await; + info!("[KEY UPDATES] Keys updates inserted into DB."); + } + if tracker_spawn_updates.config.tracker_config.clone().unwrap().users_enabled.unwrap() { info!("[USERS UPDATES] Start updating users into the DB."); let _ = tracker_spawn_updates.save_user_updates(tracker_spawn_updates.clone()).await; @@ -299,13 +317,13 @@ async fn main() -> std::io::Result<()> info!("Saving data to the database..."); let _ = tracker.save_torrent_updates(tracker.clone()).await; if tracker.config.tracker_config.clone().unwrap().whitelist_enabled.unwrap() { - let _ = tracker.save_whitelist(tracker.clone(), tracker.get_whitelist()).await; + let _ = tracker.save_whitelist_updates(tracker.clone()).await; } if tracker.config.tracker_config.clone().unwrap().blacklist_enabled.unwrap() { - let _ = tracker.save_blacklist(tracker.clone(), tracker.get_blacklist()).await; + let _ = tracker.save_blacklist_updates(tracker.clone()).await; } if tracker.config.tracker_config.clone().unwrap().keys_enabled.unwrap() { - let _ = tracker.save_keys(tracker.clone(), tracker.get_keys()).await; + let _ = tracker.save_key_updates(tracker.clone()).await; } if tracker.config.tracker_config.clone().unwrap().users_enabled.unwrap() { let _ = tracker.save_user_updates(tracker.clone()).await; diff --git a/src/stats/enums/stats_event.rs b/src/stats/enums/stats_event.rs index a6eb451..a45c2ac 100644 --- a/src/stats/enums/stats_event.rs +++ b/src/stats/enums/stats_event.rs @@ -4,21 +4,23 @@ use serde::{Deserialize, Serialize}; pub enum StatsEvent { Torrents, TorrentsUpdates, - TorrentsShadow, Users, UsersUpdates, - UsersShadow, TimestampSave, TimestampTimeout, TimestampConsole, TimestampKeysTimeout, - MaintenanceMode, Seeds, Peers, Completed, + WhitelistEnabled, Whitelist, + WhitelistUpdates, + BlacklistEnabled, Blacklist, + BlacklistUpdates, Key, + KeyUpdates, Tcp4NotFound, Tcp4Failure, Tcp4ConnectionsHandled, @@ -40,7 +42,5 @@ pub enum StatsEvent { Udp6InvalidRequest, Udp6ConnectionsHandled, Udp6AnnouncesHandled, - Udp6ScrapesHandled, - TestCounter, - TestCounterUdp, + Udp6ScrapesHandled } \ No newline at end of file diff --git a/src/stats/impls/torrent_tracker.rs b/src/stats/impls/torrent_tracker.rs index de2cf38..fa73457 100644 --- a/src/stats/impls/torrent_tracker.rs +++ b/src/stats/impls/torrent_tracker.rs @@ -14,20 +14,20 @@ impl TorrentTracker { timestamp_run_keys_timeout: self.stats.timestamp_run_keys_timeout.load(Ordering::SeqCst), torrents: self.stats.torrents.load(Ordering::SeqCst), torrents_updates: self.stats.torrents_updates.load(Ordering::SeqCst), - torrents_shadow: self.stats.torrents_shadow.load(Ordering::SeqCst), users: self.stats.users.load(Ordering::SeqCst), users_updates: self.stats.users_updates.load(Ordering::SeqCst), - users_shadow: self.stats.users_shadow.load(Ordering::SeqCst), - maintenance_mode: self.stats.maintenance_mode.load(Ordering::SeqCst), seeds: self.stats.seeds.load(Ordering::SeqCst), peers: self.stats.peers.load(Ordering::SeqCst), completed: self.stats.completed.load(Ordering::SeqCst), whitelist_enabled: self.stats.whitelist_enabled.load(Ordering::SeqCst), whitelist: self.stats.whitelist.load(Ordering::SeqCst), + whitelist_updates: self.stats.whitelist_updates.load(Ordering::SeqCst), blacklist_enabled: self.stats.blacklist_enabled.load(Ordering::SeqCst), blacklist: self.stats.blacklist.load(Ordering::SeqCst), + blacklist_updates: self.stats.blacklist_updates.load(Ordering::SeqCst), keys_enabled: self.stats.keys_enabled.load(Ordering::SeqCst), keys: self.stats.keys.load(Ordering::SeqCst), + keys_updates: self.stats.keys_updates.load(Ordering::SeqCst), tcp4_not_found: self.stats.tcp4_not_found.load(Ordering::SeqCst), tcp4_failure: self.stats.tcp4_failure.load(Ordering::SeqCst), tcp4_connections_handled: self.stats.tcp4_connections_handled.load(Ordering::SeqCst), @@ -50,8 +50,6 @@ impl TorrentTracker { udp6_connections_handled: self.stats.udp6_connections_handled.load(Ordering::SeqCst), udp6_announces_handled: self.stats.udp6_announces_handled.load(Ordering::SeqCst), udp6_scrapes_handled: self.stats.udp6_scrapes_handled.load(Ordering::SeqCst), - test_counter: self.stats.test_counter.load(Ordering::SeqCst), - test_counter_udp: self.stats.test_counter_udp.load(Ordering::SeqCst), } } @@ -66,10 +64,6 @@ impl TorrentTracker { if value > 0 { self.stats.torrents_updates.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.torrents_updates.fetch_sub(-value, Ordering::SeqCst); } } - StatsEvent::TorrentsShadow => { - if value > 0 { self.stats.torrents_shadow.fetch_add(value, Ordering::SeqCst); } - if value < 0 { self.stats.torrents_shadow.fetch_sub(-value, Ordering::SeqCst); } - } StatsEvent::Users => { if value > 0 { self.stats.users.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.users.fetch_sub(-value, Ordering::SeqCst); } @@ -78,10 +72,6 @@ impl TorrentTracker { if value > 0 { self.stats.users_updates.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.users_updates.fetch_sub(-value, Ordering::SeqCst); } } - StatsEvent::UsersShadow => { - if value > 0 { self.stats.users_shadow.fetch_add(value, Ordering::SeqCst); } - if value < 0 { self.stats.users_shadow.fetch_sub(-value, Ordering::SeqCst); } - } StatsEvent::TimestampSave => { if value > 0 { self.stats.timestamp_run_save.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.timestamp_run_save.fetch_sub(-value, Ordering::SeqCst); } @@ -98,10 +88,6 @@ impl TorrentTracker { if value > 0 { self.stats.timestamp_run_keys_timeout.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.timestamp_run_keys_timeout.fetch_sub(-value, Ordering::SeqCst); } } - StatsEvent::MaintenanceMode => { - if value > 0 { self.stats.maintenance_mode.fetch_add(value, Ordering::SeqCst); } - if value < 0 { self.stats.maintenance_mode.fetch_sub(-value, Ordering::SeqCst); } - } StatsEvent::Seeds => { if value > 0 { self.stats.seeds.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.seeds.fetch_sub(-value, Ordering::SeqCst); } @@ -114,18 +100,38 @@ impl TorrentTracker { if value > 0 { self.stats.completed.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.completed.fetch_sub(-value, Ordering::SeqCst); } } + StatsEvent::WhitelistEnabled => { + if value > 0 { self.stats.whitelist_enabled.store(true, Ordering::SeqCst); } + if value < 0 { self.stats.whitelist_enabled.store(false, Ordering::SeqCst); } + } StatsEvent::Whitelist => { if value > 0 { self.stats.whitelist.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.whitelist.fetch_sub(-value, Ordering::SeqCst); } } + StatsEvent::WhitelistUpdates => { + if value > 0 { self.stats.whitelist_updates.fetch_add(value, Ordering::SeqCst); } + if value < 0 { self.stats.whitelist_updates.fetch_sub(-value, Ordering::SeqCst); } + } + StatsEvent::BlacklistEnabled => { + if value > 0 { self.stats.blacklist_enabled.store(true, Ordering::SeqCst); } + if value < 0 { self.stats.blacklist_enabled.store(false, Ordering::SeqCst); } + } StatsEvent::Blacklist => { if value > 0 { self.stats.blacklist.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.blacklist.fetch_sub(-value, Ordering::SeqCst); } } + StatsEvent::BlacklistUpdates => { + if value > 0 { self.stats.blacklist_updates.fetch_add(value, Ordering::SeqCst); } + if value < 0 { self.stats.blacklist_updates.fetch_sub(-value, Ordering::SeqCst); } + } StatsEvent::Key => { if value > 0 { self.stats.keys.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.keys.fetch_sub(-value, Ordering::SeqCst); } } + StatsEvent::KeyUpdates => { + if value > 0 { self.stats.keys_updates.fetch_add(value, Ordering::SeqCst); } + if value < 0 { self.stats.keys_updates.fetch_sub(-value, Ordering::SeqCst); } + } StatsEvent::Tcp4NotFound => { if value > 0 { self.stats.tcp4_not_found.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.tcp4_not_found.fetch_sub(-value, Ordering::SeqCst); } @@ -214,14 +220,6 @@ impl TorrentTracker { if value > 0 { self.stats.udp6_scrapes_handled.fetch_add(value, Ordering::SeqCst); } if value < 0 { self.stats.udp6_scrapes_handled.fetch_sub(-value, Ordering::SeqCst); } } - StatsEvent::TestCounter => { - if value > 0 { self.stats.test_counter.fetch_add(value, Ordering::SeqCst); } - if value < 0 { self.stats.test_counter.fetch_sub(-value, Ordering::SeqCst); } - } - StatsEvent::TestCounterUdp => { - if value > 0 { self.stats.test_counter_udp.fetch_add(value, Ordering::SeqCst); } - if value < 0 { self.stats.test_counter_udp.fetch_sub(-value, Ordering::SeqCst); } - } }; self.get_stats() } @@ -235,18 +233,12 @@ impl TorrentTracker { StatsEvent::TorrentsUpdates => { self.stats.torrents_updates.store(value, Ordering::SeqCst); } - StatsEvent::TorrentsShadow => { - self.stats.torrents_shadow.store(value, Ordering::SeqCst); - } StatsEvent::Users => { self.stats.users.store(value, Ordering::SeqCst); } StatsEvent::UsersUpdates => { self.stats.users_updates.store(value, Ordering::SeqCst); } - StatsEvent::UsersShadow => { - self.stats.users_shadow.store(value, Ordering::SeqCst); - } StatsEvent::TimestampSave => { self.stats.timestamp_run_save.store(value, Ordering::SeqCst); } @@ -259,9 +251,6 @@ impl TorrentTracker { StatsEvent::TimestampKeysTimeout => { self.stats.timestamp_run_keys_timeout.store(value, Ordering::SeqCst); } - StatsEvent::MaintenanceMode => { - self.stats.maintenance_mode.store(value, Ordering::SeqCst); - } StatsEvent::Seeds => { self.stats.seeds.store(value, Ordering::SeqCst); } @@ -271,15 +260,32 @@ impl TorrentTracker { StatsEvent::Completed => { self.stats.completed.store(value, Ordering::SeqCst); } + StatsEvent::WhitelistEnabled => { + if value > 0 { self.stats.whitelist_enabled.store(true, Ordering::SeqCst); } + if value < 0 { self.stats.whitelist_enabled.store(false, Ordering::SeqCst); } + } StatsEvent::Whitelist => { self.stats.whitelist.store(value, Ordering::SeqCst); } + StatsEvent::WhitelistUpdates => { + self.stats.whitelist_updates.store(value, Ordering::SeqCst); + } + StatsEvent::BlacklistEnabled => { + if value > 0 { self.stats.blacklist_enabled.store(true, Ordering::SeqCst); } + if value < 0 { self.stats.blacklist_enabled.store(false, Ordering::SeqCst); } + } StatsEvent::Blacklist => { self.stats.blacklist.store(value, Ordering::SeqCst); } + StatsEvent::BlacklistUpdates => { + self.stats.blacklist_updates.store(value, Ordering::SeqCst); + } StatsEvent::Key => { self.stats.keys.store(value, Ordering::SeqCst); } + StatsEvent::KeyUpdates => { + self.stats.keys_updates.store(value, Ordering::SeqCst); + } StatsEvent::Tcp4NotFound => { self.stats.tcp4_not_found.store(value, Ordering::SeqCst); } @@ -346,12 +352,6 @@ impl TorrentTracker { StatsEvent::Udp6ScrapesHandled => { self.stats.udp6_scrapes_handled.store(value, Ordering::SeqCst); } - StatsEvent::TestCounter => { - self.stats.test_counter.store(value, Ordering::SeqCst); - } - StatsEvent::TestCounterUdp => { - self.stats.test_counter_udp.store(value, Ordering::SeqCst); - } }; self.get_stats() } diff --git a/src/stats/structs/stats.rs b/src/stats/structs/stats.rs index 9c08c46..91873aa 100644 --- a/src/stats/structs/stats.rs +++ b/src/stats/structs/stats.rs @@ -9,20 +9,20 @@ pub struct Stats { pub timestamp_run_keys_timeout: i64, pub torrents: i64, pub torrents_updates: i64, - pub torrents_shadow: i64, pub users: i64, pub users_updates: i64, - pub users_shadow: i64, - pub maintenance_mode: i64, pub seeds: i64, pub peers: i64, pub completed: i64, pub whitelist_enabled: bool, pub whitelist: i64, + pub whitelist_updates: i64, pub blacklist_enabled: bool, pub blacklist: i64, + pub blacklist_updates: i64, pub keys_enabled: bool, pub keys: i64, + pub keys_updates: i64, pub tcp4_not_found: i64, pub tcp4_failure: i64, pub tcp4_connections_handled: i64, @@ -45,6 +45,4 @@ pub struct Stats { pub udp6_connections_handled: i64, pub udp6_announces_handled: i64, pub udp6_scrapes_handled: i64, - pub test_counter: i64, - pub test_counter_udp: i64, } \ No newline at end of file diff --git a/src/stats/structs/stats_atomics.rs b/src/stats/structs/stats_atomics.rs index 4908589..a27eea1 100644 --- a/src/stats/structs/stats_atomics.rs +++ b/src/stats/structs/stats_atomics.rs @@ -10,20 +10,20 @@ pub struct StatsAtomics { pub timestamp_run_keys_timeout: AtomicI64, pub torrents: AtomicI64, pub torrents_updates: AtomicI64, - pub torrents_shadow: AtomicI64, pub users: AtomicI64, pub users_updates: AtomicI64, - pub users_shadow: AtomicI64, - pub maintenance_mode: AtomicI64, pub seeds: AtomicI64, pub peers: AtomicI64, pub completed: AtomicI64, pub whitelist_enabled: AtomicBool, pub whitelist: AtomicI64, + pub whitelist_updates: AtomicI64, pub blacklist_enabled: AtomicBool, pub blacklist: AtomicI64, + pub blacklist_updates: AtomicI64, pub keys_enabled: AtomicBool, pub keys: AtomicI64, + pub keys_updates: AtomicI64, pub tcp4_not_found: AtomicI64, pub tcp4_failure: AtomicI64, pub tcp4_connections_handled: AtomicI64, @@ -46,6 +46,4 @@ pub struct StatsAtomics { pub udp6_connections_handled: AtomicI64, pub udp6_announces_handled: AtomicI64, pub udp6_scrapes_handled: AtomicI64, - pub test_counter: AtomicI64, - pub test_counter_udp: AtomicI64, } \ No newline at end of file diff --git a/src/tracker/enums.rs b/src/tracker/enums.rs index b2026b8..4e4aca9 100644 --- a/src/tracker/enums.rs +++ b/src/tracker/enums.rs @@ -1,3 +1,4 @@ pub mod announce_event; pub mod announce_event_def; -pub mod torrent_peers_type; \ No newline at end of file +pub mod torrent_peers_type; +pub mod updates_action; \ No newline at end of file diff --git a/src/tracker/enums/updates_action.rs b/src/tracker/enums/updates_action.rs new file mode 100644 index 0000000..e6ce054 --- /dev/null +++ b/src/tracker/enums/updates_action.rs @@ -0,0 +1,8 @@ +use serde::Deserialize; + +#[derive(Deserialize, PartialEq, Eq, Hash, Clone, Copy, Debug)] +pub enum UpdatesAction { + Add, + Remove, + Update, +} \ No newline at end of file diff --git a/src/tracker/impls.rs b/src/tracker/impls.rs index b684c29..6186077 100644 --- a/src/tracker/impls.rs +++ b/src/tracker/impls.rs @@ -20,4 +20,7 @@ pub mod announce_event; pub mod torrent_sharding; pub mod torrent_tracker_import; pub mod torrent_tracker_export; -pub mod torrent_tracker_cert_gen; \ No newline at end of file +pub mod torrent_tracker_cert_gen; +pub mod torrent_tracker_torrents_blacklist_updates; +pub mod torrent_tracker_torrents_whitelist_updates; +pub mod torrent_tracker_keys_updates; \ No newline at end of file diff --git a/src/tracker/impls/torrent_tracker.rs b/src/tracker/impls/torrent_tracker.rs index 81a6265..f6eb6bf 100644 --- a/src/tracker/impls/torrent_tracker.rs +++ b/src/tracker/impls/torrent_tracker.rs @@ -16,8 +16,11 @@ impl TorrentTracker { torrents_sharding: Arc::new(Default::default()), torrents_updates: Arc::new(RwLock::new(HashMap::new())), torrents_whitelist: Arc::new(RwLock::new(Vec::new())), + torrents_whitelist_updates: Arc::new(RwLock::new(HashMap::new())), torrents_blacklist: Arc::new(RwLock::new(Vec::new())), + torrents_blacklist_updates: Arc::new(RwLock::new(HashMap::new())), keys: Arc::new(RwLock::new(BTreeMap::new())), + keys_updates: Arc::new(RwLock::new(HashMap::new())), stats: Arc::new(StatsAtomics { started: AtomicI64::new(Utc::now().timestamp()), timestamp_run_save: AtomicI64::new(0), @@ -26,20 +29,20 @@ impl TorrentTracker { timestamp_run_keys_timeout: AtomicI64::new(0), torrents: AtomicI64::new(0), torrents_updates: AtomicI64::new(0), - torrents_shadow: AtomicI64::new(0), users: AtomicI64::new(0), users_updates: AtomicI64::new(0), - users_shadow: AtomicI64::new(0), - maintenance_mode: AtomicI64::new(0), seeds: AtomicI64::new(0), peers: AtomicI64::new(0), completed: AtomicI64::new(0), whitelist_enabled: AtomicBool::new(config.tracker_config.clone().unwrap().whitelist_enabled.unwrap()), whitelist: AtomicI64::new(0), + whitelist_updates: AtomicI64::new(0), blacklist_enabled: AtomicBool::new(config.tracker_config.clone().unwrap().blacklist_enabled.unwrap()), blacklist: AtomicI64::new(0), + blacklist_updates: AtomicI64::new(0), keys_enabled: AtomicBool::new(config.tracker_config.clone().unwrap().keys_enabled.unwrap()), keys: AtomicI64::new(0), + keys_updates: AtomicI64::new(0), tcp4_connections_handled: AtomicI64::new(0), tcp4_api_handled: AtomicI64::new(0), tcp4_announces_handled: AtomicI64::new(0), @@ -62,8 +65,6 @@ impl TorrentTracker { udp6_connections_handled: AtomicI64::new(0), udp6_announces_handled: AtomicI64::new(0), udp6_scrapes_handled: AtomicI64::new(0), - test_counter: AtomicI64::new(0), - test_counter_udp: AtomicI64::new(0) }), users: Arc::new(RwLock::new(BTreeMap::new())), users_updates: Arc::new(RwLock::new(HashMap::new())), diff --git a/src/tracker/impls/torrent_tracker_handlers.rs b/src/tracker/impls/torrent_tracker_handlers.rs index 0c6c17d..1673590 100644 --- a/src/tracker/impls/torrent_tracker_handlers.rs +++ b/src/tracker/impls/torrent_tracker_handlers.rs @@ -6,6 +6,7 @@ use log::debug; use crate::common::structs::custom_error::CustomError; use crate::common::structs::number_of_bytes::NumberOfBytes; use crate::tracker::enums::announce_event::AnnounceEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::announce_query_request::AnnounceQueryRequest; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::peer_id::PeerId; @@ -268,7 +269,8 @@ impl TorrentTracker { if data.config.database.clone().unwrap().persistent { let _ = data.add_torrent_update( announce_query.info_hash, - torrent_entry.1.clone() + torrent_entry.1.clone(), + UpdatesAction::Add ); } @@ -278,7 +280,7 @@ impl TorrentTracker { user.torrents_active.insert(announce_query.info_hash, SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs()); data.add_user(user_key.unwrap(), user.clone()); if data.config.database.clone().unwrap().persistent { - data.add_user_update(user_key.unwrap(), user); + data.add_user_update(user_key.unwrap(), user, UpdatesAction::Add); } } } @@ -312,7 +314,7 @@ impl TorrentTracker { user.torrents_active.remove(&announce_query.info_hash); data.add_user(user_key.unwrap(), user.clone()); if data.config.database.clone().unwrap().persistent { - data.add_user_update(user_key.unwrap(), user); + data.add_user_update(user_key.unwrap(), user, UpdatesAction::Add); } } } @@ -326,7 +328,8 @@ impl TorrentTracker { if data.config.database.clone().unwrap().persistent { let _ = data.add_torrent_update( announce_query.info_hash, - torrent_entry.clone() + torrent_entry.clone(), + UpdatesAction::Add ); } @@ -347,7 +350,8 @@ impl TorrentTracker { if data.config.database.clone().unwrap().persistent { let _ = data.add_torrent_update( announce_query.info_hash, - torrent_entry.1.clone() + torrent_entry.1.clone(), + UpdatesAction::Add ); } @@ -357,7 +361,7 @@ impl TorrentTracker { user.updated = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs(); data.add_user(user_key.unwrap(), user.clone()); if data.config.database.clone().unwrap().persistent { - data.add_user_update(user_key.unwrap(), user); + data.add_user_update(user_key.unwrap(), user, UpdatesAction::Add); } } } diff --git a/src/tracker/impls/torrent_tracker_import.rs b/src/tracker/impls/torrent_tracker_import.rs index 88ae7c4..f18b20b 100644 --- a/src/tracker/impls/torrent_tracker_import.rs +++ b/src/tracker/impls/torrent_tracker_import.rs @@ -1,12 +1,16 @@ +use std::collections::BTreeMap; use std::fs; use std::process::exit; use std::sync::Arc; use log::{error, info}; use serde_json::Value; use crate::structs::Cli; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_entry::TorrentEntry; use crate::tracker::structs::torrent_tracker::TorrentTracker; +use crate::tracker::structs::user_entry_item::UserEntryItem; +use crate::tracker::structs::user_id::UserId; impl TorrentTracker { pub async fn import(&self, args: &Cli, tracker: Arc) @@ -31,7 +35,7 @@ impl TorrentTracker { peers: Default::default(), completed, updated: std::time::Instant::now(), - }); + }, UpdatesAction::Add); } match tracker.save_torrent_updates(tracker.clone()).await { Ok(_) => {} @@ -56,9 +60,9 @@ impl TorrentTracker { Ok(hash_result) => { InfoHash(<[u8; 20]>::try_from(hash_result[0..20].as_ref()).unwrap()) } Err(_) => { panic!("[IMPORT] Torrent hash is not hex or invalid!"); } }; - tracker.add_whitelist(info_hash); + tracker.add_whitelist_update(info_hash, UpdatesAction::Add); } - match tracker.save_whitelist(tracker.clone(), tracker.get_whitelist()).await { + match tracker.save_whitelist_updates(tracker.clone()).await { Ok(_) => {} Err(_) => { panic!("[IMPORT] Unable to save whitelist to the database!"); @@ -82,9 +86,9 @@ impl TorrentTracker { Ok(hash_result) => { InfoHash(<[u8; 20]>::try_from(hash_result[0..20].as_ref()).unwrap()) } Err(_) => { panic!("[IMPORT] Torrent hash is not hex or invalid!"); } }; - tracker.add_blacklist(info_hash); + tracker.add_blacklist_update(info_hash, UpdatesAction::Add); } - match tracker.save_blacklist(tracker.clone(), tracker.get_blacklist()).await { + match tracker.save_blacklist_updates(tracker.clone()).await { Ok(_) => {} Err(_) => { panic!("[IMPORT] Unable to save blacklist to the database!"); } } @@ -101,20 +105,86 @@ impl TorrentTracker { match fs::read(args.import_file_keys.as_str()) { Ok(data) => { let keys: Value = serde_json::from_str(String::from_utf8(data).unwrap().as_str()).unwrap(); - for value in keys.as_array().unwrap() { - let info_hash = match hex::decode(value.as_str().unwrap()) { + for (key, value) in keys.as_object().unwrap() { + let timeout = match value.as_i64() { + None => { panic!("[IMPORT] timeout value doesn't exist or is missing!"); } + Some(timeout) => { timeout } + }; + let hash = match hex::decode(key.as_str()) { Ok(hash_result) => { InfoHash(<[u8; 20]>::try_from(hash_result[0..20].as_ref()).unwrap()) } - Err(_) => { panic!("[IMPORT] Torrent hash is not hex or invalid!"); } + Err(_) => { panic!("[IMPORT] Key hash is not hex or invalid!"); } }; - tracker.add_blacklist(info_hash); + tracker.add_key_update(hash, timeout, UpdatesAction::Add); } - match tracker.save_blacklist(tracker.clone(), tracker.get_blacklist()).await { + match tracker.save_key_updates(tracker.clone()).await { Ok(_) => {} - Err(_) => { panic!("[IMPORT] Unable to save blacklist to the database!"); } + Err(_) => { panic!("[IMPORT] Unable to save keys to the database!"); } } } Err(error) => { - error!("[IMPORT] The blacklists file {} could not be imported!", args.import_file_blacklists.as_str()); + error!("[IMPORT] The keys file {} could not be imported!", args.import_file_keys.as_str()); + panic!("[IMPORT] {}", error) + } + } + } + + if tracker.config.tracker_config.clone().unwrap().users_enabled.unwrap() { + info!("[IMPORT] Importing users to memory {}", args.import_file_users.as_str()); + match fs::read(args.import_file_users.as_str()) { + Ok(data) => { + let users: Value = serde_json::from_str(String::from_utf8(data).unwrap().as_str()).unwrap(); + for (key, value) in users.as_object().unwrap() { + let user_hash = match hex::decode(key.as_str()) { + Ok(hash_result) => { UserId(<[u8; 20]>::try_from(hash_result[0..20].as_ref()).unwrap()) } + Err(_) => { panic!("[IMPORT] User hash is not hex or invalid!"); } + }; + let key_hash = match hex::decode(value["key"].as_str().unwrap()) { + Ok(hash_result) => { UserId(<[u8; 20]>::try_from(hash_result[0..20].as_ref()).unwrap()) } + Err(_) => { panic!("[IMPORT] Key hash is not hex or invalid!"); } + }; + let user_id = value["user_id"].as_u64(); + let user_uuid = value["user_uuid"].as_str().map(String::from); + let uploaded = match value["uploaded"].as_u64() { + None => { panic!("[IMPORT] 'uploaded' field doesn't exist or is missing!"); } + Some(uploaded) => { uploaded } + }; + let downloaded = match value["downloaded"].as_u64() { + None => { panic!("[IMPORT] 'downloaded' field doesn't exist or is missing!"); } + Some(downloaded) => { downloaded } + }; + let completed = match value["completed"].as_u64() { + None => { panic!("[IMPORT] 'completed' field doesn't exist or is missing!"); } + Some(completed) => { completed } + }; + let updated = match value["updated"].as_u64() { + None => { panic!("[IMPORT] 'updated' field doesn't exist or is missing!"); } + Some(updated) => { updated } + }; + let active = match value["active"].as_u64() { + None => { panic!("[IMPORT] 'active' field doesn't exist or is missing!"); } + Some(active) => { active as u8 } + }; + let _ = tracker.add_user_update(user_hash, UserEntryItem { + key: key_hash, + user_id, + user_uuid, + uploaded, + downloaded, + completed, + updated, + active, + torrents_active: BTreeMap::new() + }, UpdatesAction::Add); + } + match tracker.save_user_updates(tracker.clone()).await { + Ok(_) => {} + Err(_) => { + panic!("[IMPORT] Unable to save users to the database!"); + } + } + } + Err(error) => { + error!("[IMPORT] The users file {} could not be imported!", args.import_file_users.as_str()); panic!("[IMPORT] {}", error) } } diff --git a/src/tracker/impls/torrent_tracker_keys.rs b/src/tracker/impls/torrent_tracker_keys.rs index 6a967b9..8356dde 100644 --- a/src/tracker/impls/torrent_tracker_keys.rs +++ b/src/tracker/impls/torrent_tracker_keys.rs @@ -5,6 +5,7 @@ use std::time::{SystemTime, UNIX_EPOCH}; use chrono::{TimeZone, Utc}; use log::{error, info}; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_tracker::TorrentTracker; @@ -16,15 +17,15 @@ impl TorrentTracker { } } - pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result<(), ()> + pub async fn save_keys(&self, tracker: Arc, keys: BTreeMap) -> Result<(), ()> { match self.sqlx.save_keys(tracker.clone(), keys.clone()).await { Ok(keys_count) => { - info!("[SAVE BLACKLIST] Saved {} keys", keys_count); + info!("[SYNC KEYS] Synced {} keys", keys_count); Ok(()) } Err(_) => { - error!("[SAVE BLACKLIST] Unable to save {} keys", keys.len()); + error!("[SYNC KEYS] Unable to sync {} keys", keys.len()); Err(()) } } diff --git a/src/tracker/impls/torrent_tracker_keys_updates.rs b/src/tracker/impls/torrent_tracker_keys_updates.rs new file mode 100644 index 0000000..d9f2d56 --- /dev/null +++ b/src/tracker/impls/torrent_tracker_keys_updates.rs @@ -0,0 +1,85 @@ +use std::collections::{BTreeMap, HashMap}; +use std::collections::hash_map::Entry; +use std::sync::Arc; +use std::time::SystemTime; +use log::{error, info}; +use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; +use crate::tracker::structs::info_hash::InfoHash; +use crate::tracker::structs::torrent_tracker::TorrentTracker; + +impl TorrentTracker { + pub fn add_key_update(&self, info_hash: InfoHash, timeout: i64, updates_action: UpdatesAction) -> bool + { + let map = self.keys_updates.clone(); + let mut lock = map.write(); + match lock.insert(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos(), (info_hash, timeout, updates_action)) { + None => { + self.update_stats(StatsEvent::KeyUpdates, 1); + true + } + Some(_) => { + false + } + } + } + + pub fn get_key_updates(&self) -> HashMap + { + let map = self.keys_updates.clone(); + let lock = map.read_recursive(); + lock.clone() + } + + pub fn remove_key_update(&self, timestamp: &u128) -> bool + { + let map = self.keys_updates.clone(); + let mut lock = map.write(); + match lock.remove(timestamp) { + None => { false } + Some(_) => { + self.update_stats(StatsEvent::KeyUpdates, -1); + true + } + } + } + + pub fn clear_key_updates(&self) + { + let map = self.keys_updates.clone(); + let mut lock = map.write(); + lock.clear(); + self.set_stats(StatsEvent::KeyUpdates, 0); + } + + pub async fn save_key_updates(&self, torrent_tracker: Arc) -> Result<(), ()> + { + let mut mapping: HashMap = HashMap::new(); + for (timestamp, (info_hash, timeout, updates_action)) in self.get_key_updates().iter() { + match mapping.entry(*info_hash) { + Entry::Occupied(mut o) => { + o.insert((o.get().0, *timeout, *updates_action)); + self.remove_key_update(timestamp); + } + Entry::Vacant(v) => { + v.insert((*timestamp, *timeout, *updates_action)); + } + } + } + match self.save_keys(torrent_tracker.clone(), mapping.clone().into_iter().map(|(info_hash, (_, timeout, updates_action))| { + (info_hash, (timeout, updates_action)) + }).collect::>()).await { + Ok(_) => { + info!("[SYNC KEY UPDATES] Synced {} keys", mapping.len()); + for (_, (timestamp, _, _)) in mapping.into_iter() { + self.remove_key_update(×tamp); + } + Ok(()) + } + Err(_) => { + error!("[SYNC KEY UPDATES] Unable to sync {} keys", mapping.len()); + Err(()) + } + } + } +} \ No newline at end of file diff --git a/src/tracker/impls/torrent_tracker_torrents.rs b/src/tracker/impls/torrent_tracker_torrents.rs index 34ec85d..bdcf84d 100644 --- a/src/tracker/impls/torrent_tracker_torrents.rs +++ b/src/tracker/impls/torrent_tracker_torrents.rs @@ -3,6 +3,7 @@ use std::collections::btree_map::Entry; use std::sync::Arc; use log::{error, info}; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_entry::TorrentEntry; use crate::tracker::structs::torrent_tracker::TorrentTracker; @@ -15,15 +16,15 @@ impl TorrentTracker { } } - pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), ()> + pub async fn save_torrents(&self, tracker: Arc, torrents: BTreeMap) -> Result<(), ()> { match self.sqlx.save_torrents(tracker.clone(), torrents.clone()).await { Ok(_) => { - info!("[SAVE TORRENTS] Saved {} torrents", torrents.len()); + info!("[SYNC TORRENTS] Synced {} torrents", torrents.len()); Ok(()) } Err(_) => { - error!("[SAVE TORRENTS] Unable to save {} torrents", torrents.len()); + error!("[SYNC TORRENTS] Unable to sync {} torrents", torrents.len()); Err(()) } } @@ -50,10 +51,23 @@ impl TorrentTracker { match lock.entry(info_hash) { Entry::Vacant(v) => { self.update_stats(StatsEvent::Torrents, 1); + self.update_stats(StatsEvent::Completed, torrent_entry.completed as i64); + self.update_stats(StatsEvent::Seeds, torrent_entry.seeds.len() as i64); + self.update_stats(StatsEvent::Peers, torrent_entry.peers.len() as i64); (v.insert(torrent_entry).clone(), true) } - Entry::Occupied(o) => { - (o.get().clone(), false) + Entry::Occupied(mut o) => { + self.update_stats(StatsEvent::Completed, 0i64 - o.get().completed as i64); + self.update_stats(StatsEvent::Completed, torrent_entry.completed as i64); + o.get_mut().completed = torrent_entry.completed; + self.update_stats(StatsEvent::Seeds, 0i64 - o.get().seeds.len() as i64); + self.update_stats(StatsEvent::Seeds, torrent_entry.seeds.len() as i64); + o.get_mut().seeds = torrent_entry.seeds.clone(); + self.update_stats(StatsEvent::Peers, 0i64 - o.get().peers.len() as i64); + self.update_stats(StatsEvent::Peers, torrent_entry.peers.len() as i64); + o.get_mut().peers = torrent_entry.peers.clone(); + o.get_mut().updated = torrent_entry.updated; + (torrent_entry.clone(), false) } } } diff --git a/src/tracker/impls/torrent_tracker_torrents_blacklist.rs b/src/tracker/impls/torrent_tracker_torrents_blacklist.rs index da38273..80d3eaa 100644 --- a/src/tracker/impls/torrent_tracker_torrents_blacklist.rs +++ b/src/tracker/impls/torrent_tracker_torrents_blacklist.rs @@ -1,6 +1,7 @@ use std::sync::Arc; use log::{error, info}; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_tracker::TorrentTracker; @@ -12,15 +13,15 @@ impl TorrentTracker { } } - pub async fn save_blacklist(&self, tracker: Arc, hashes: Vec) -> Result<(), ()> + pub async fn save_blacklist(&self, tracker: Arc, hashes: Vec<(InfoHash, UpdatesAction)>) -> Result<(), ()> { match self.sqlx.save_blacklist(tracker.clone(), hashes.clone()).await { Ok(_) => { - info!("[SAVE BLACKLIST] Saved {} blacklists", hashes.len()); + info!("[SYNC BLACKLIST] Synced {} blacklists", hashes.len()); Ok(()) } Err(_) => { - error!("[SAVE BLACKLIST] Unable to save {} blacklists", hashes.len()); + error!("[SYNC BLACKLIST] Unable to sync {} blacklists", hashes.len()); Err(()) } } diff --git a/src/tracker/impls/torrent_tracker_torrents_blacklist_updates.rs b/src/tracker/impls/torrent_tracker_torrents_blacklist_updates.rs new file mode 100644 index 0000000..f55f59d --- /dev/null +++ b/src/tracker/impls/torrent_tracker_torrents_blacklist_updates.rs @@ -0,0 +1,94 @@ +use std::collections::HashMap; +use std::collections::hash_map::Entry; +use std::sync::Arc; +use std::time::SystemTime; +use log::{error, info}; +use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; +use crate::tracker::structs::info_hash::InfoHash; +use crate::tracker::structs::torrent_tracker::TorrentTracker; + +impl TorrentTracker { + pub fn add_blacklist_update(&self, info_hash: InfoHash, updates_action: UpdatesAction) -> bool + { + let map = self.torrents_blacklist_updates.clone(); + let mut lock = map.write(); + match lock.insert(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos(), (info_hash, updates_action)) { + None => { + self.update_stats(StatsEvent::BlacklistUpdates, 1); + true + } + Some(_) => { + false + } + } + } + + pub fn add_blacklist_updates(&self, hashes: Vec<(InfoHash, UpdatesAction)>) -> Vec<(InfoHash, bool)> + { + let mut returned_data = Vec::new(); + for (info_hash, updates_action) in hashes { + returned_data.push((info_hash, self.add_blacklist_update(info_hash, updates_action))); + } + returned_data + } + + pub fn get_blacklist_updates(&self) -> HashMap + { + let map = self.torrents_blacklist_updates.clone(); + let lock = map.read_recursive(); + lock.clone() + } + + pub fn remove_blacklist_update(&self, timestamp: &u128) -> bool + { + let map = self.torrents_blacklist_updates.clone(); + let mut lock = map.write(); + match lock.remove(timestamp) { + None => { false } + Some(_) => { + self.update_stats(StatsEvent::BlacklistUpdates, -1); + true + } + } + } + + pub fn clear_blacklist_updates(&self) + { + let map = self.torrents_blacklist_updates.clone(); + let mut lock = map.write(); + lock.clear(); + self.set_stats(StatsEvent::BlacklistUpdates, 0); + } + + pub async fn save_blacklist_updates(&self, torrent_tracker: Arc) -> Result<(), ()> + { + let mut mapping: HashMap = HashMap::new(); + for (timestamp, (info_hash, updates_action)) in self.get_blacklist_updates().iter() { + match mapping.entry(*info_hash) { + Entry::Occupied(mut o) => { + o.insert((o.get().0, *updates_action)); + self.remove_blacklist_update(timestamp); + } + Entry::Vacant(v) => { + v.insert((*timestamp, *updates_action)); + } + } + } + match self.save_blacklist(torrent_tracker.clone(), mapping.clone().into_iter().map(|(info_hash, (_, updates_action))| { + (info_hash, updates_action) + }).collect::>()).await { + Ok(_) => { + info!("[SYNC BLACKLIST UPDATES] Synced {} blacklists", mapping.len()); + for (_, (timestamp, _)) in mapping.into_iter() { + self.remove_blacklist_update(×tamp); + } + Ok(()) + } + Err(_) => { + error!("[SYNC BLACKLIST UPDATES] Unable to sync {} blacklists", mapping.len()); + Err(()) + } + } + } +} \ No newline at end of file diff --git a/src/tracker/impls/torrent_tracker_torrents_updates.rs b/src/tracker/impls/torrent_tracker_torrents_updates.rs index b220eb0..42187b1 100644 --- a/src/tracker/impls/torrent_tracker_torrents_updates.rs +++ b/src/tracker/impls/torrent_tracker_torrents_updates.rs @@ -1,18 +1,20 @@ use std::collections::{BTreeMap, HashMap}; +use std::collections::hash_map::Entry; use std::sync::Arc; use std::time::SystemTime; use log::{error, info}; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_entry::TorrentEntry; use crate::tracker::structs::torrent_tracker::TorrentTracker; impl TorrentTracker { - pub fn add_torrent_update(&self, info_hash: InfoHash, torrent_entry: TorrentEntry) -> bool + pub fn add_torrent_update(&self, info_hash: InfoHash, torrent_entry: TorrentEntry, updates_action: UpdatesAction) -> bool { let map = self.torrents_updates.clone(); let mut lock = map.write(); - match lock.insert(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos(), (info_hash, torrent_entry.clone())) { + match lock.insert(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos(), (info_hash, torrent_entry.clone(), updates_action)) { None => { self.update_stats(StatsEvent::TorrentsUpdates, 1); true @@ -23,17 +25,17 @@ impl TorrentTracker { } } - pub fn add_torrent_updates(&self, hashes: HashMap) -> BTreeMap + pub fn add_torrent_updates(&self, hashes: HashMap) -> BTreeMap { let mut returned_data = BTreeMap::new(); - for (timestamp, (info_hash, torrent_entry)) in hashes.iter() { - returned_data.insert(*info_hash, self.add_torrent_update(*info_hash, torrent_entry.clone())); + for (timestamp, (info_hash, torrent_entry, updates_action)) in hashes.iter() { + returned_data.insert(*info_hash, self.add_torrent_update(*info_hash, torrent_entry.clone(), *updates_action)); let _ = self.remove_torrent_update(timestamp); } returned_data } - pub fn get_torrent_updates(&self) -> HashMap + pub fn get_torrent_updates(&self) -> HashMap { let map = self.torrents_updates.clone(); let lock = map.read_recursive(); @@ -63,40 +65,30 @@ impl TorrentTracker { pub async fn save_torrent_updates(&self, torrent_tracker: Arc) -> Result<(), ()> { - let mut hashmapping: HashMap, TorrentEntry)> = HashMap::new(); - let mut hashmap: BTreeMap = BTreeMap::new(); - let updates = self.get_torrent_updates(); - - // Build the actually updates for SQL, adding the timestamps into a vector for removal afterward. - for (timestamp, (info_hash, torrent_entry)) in updates.iter() { - match hashmapping.get_mut(info_hash) { - None => { - hashmapping.insert(*info_hash, (vec![*timestamp], torrent_entry.clone())); - hashmap.insert(*info_hash, torrent_entry.clone()); + let mut mapping: HashMap = HashMap::new(); + for (timestamp, (info_hash, torrent_entry, updates_action)) in self.get_torrent_updates().iter() { + match mapping.entry(*info_hash) { + Entry::Occupied(mut o) => { + o.insert((o.get().0, torrent_entry.clone(), *updates_action)); + self.remove_torrent_update(timestamp); } - Some((timestamps, _)) => { - if !timestamps.contains(timestamp) { - timestamps.push(*timestamp); - } - hashmap.insert(*info_hash, torrent_entry.clone()); + Entry::Vacant(v) => { + v.insert((*timestamp, torrent_entry.clone(), *updates_action)); } } } - - // Now we're going to save the torrents in a list, and depending on what we get returned, we remove them from the updates list. - match self.save_torrents(torrent_tracker.clone(), hashmap).await { + match self.save_torrents(torrent_tracker.clone(), mapping.clone().into_iter().map(|(info_hash, (_, torrent_entry, updates_action))| { + (info_hash, (torrent_entry.clone(), updates_action)) + }).collect::>()).await { Ok(_) => { - // We can remove the updates keys, since they are updated. - for (_, (timestamps, _)) in hashmapping.iter() { - for timestamp in timestamps.iter() { - self.remove_torrent_update(timestamp); - } + info!("[SYNC TORRENT UPDATES] Synced {} torrents", mapping.len()); + for (_, (timestamp, _, _)) in mapping.into_iter() { + self.remove_torrent_update(×tamp); } - info!("[SAVE TORRENTS UPDATES] Saved {} torrents", hashmapping.len()); Ok(()) } Err(_) => { - error!("[SAVE TORRENTS UPDATES] Unable to save {} torrents", hashmapping.len()); + error!("[SYNC TORRENT UPDATES] Unable to sync {} torrents", mapping.len()); Err(()) } } diff --git a/src/tracker/impls/torrent_tracker_torrents_whitelist.rs b/src/tracker/impls/torrent_tracker_torrents_whitelist.rs index 99eb751..842fefe 100644 --- a/src/tracker/impls/torrent_tracker_torrents_whitelist.rs +++ b/src/tracker/impls/torrent_tracker_torrents_whitelist.rs @@ -1,6 +1,7 @@ use std::sync::Arc; use log::{error, info}; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_tracker::TorrentTracker; @@ -12,15 +13,15 @@ impl TorrentTracker { } } - pub async fn save_whitelist(&self, tracker: Arc, hashes: Vec) -> Result<(), ()> + pub async fn save_whitelist(&self, tracker: Arc, hashes: Vec<(InfoHash, UpdatesAction)>) -> Result<(), ()> { match self.sqlx.save_whitelist(tracker.clone(), hashes.clone()).await { Ok(_) => { - info!("[SAVE WHITELIST] Saved {} whitelists", hashes.len()); + info!("[SYNC WHITELIST] Synced {} whitelists", hashes.len()); Ok(()) } Err(_) => { - error!("[SAVE WHITELIST] Unable to save {} whitelists", hashes.len()); + error!("[SYNC WHITELIST] Unable to sync {} whitelists", hashes.len()); Err(()) } } diff --git a/src/tracker/impls/torrent_tracker_torrents_whitelist_updates.rs b/src/tracker/impls/torrent_tracker_torrents_whitelist_updates.rs new file mode 100644 index 0000000..4a34fd1 --- /dev/null +++ b/src/tracker/impls/torrent_tracker_torrents_whitelist_updates.rs @@ -0,0 +1,94 @@ +use std::collections::HashMap; +use std::collections::hash_map::Entry; +use std::sync::Arc; +use std::time::SystemTime; +use log::{error, info}; +use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; +use crate::tracker::structs::info_hash::InfoHash; +use crate::tracker::structs::torrent_tracker::TorrentTracker; + +impl TorrentTracker { + pub fn add_whitelist_update(&self, info_hash: InfoHash, updates_action: UpdatesAction) -> bool + { + let map = self.torrents_whitelist_updates.clone(); + let mut lock = map.write(); + match lock.insert(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos(), (info_hash, updates_action)) { + None => { + self.update_stats(StatsEvent::WhitelistUpdates, 1); + true + } + Some(_) => { + false + } + } + } + + pub fn add_whitelist_updates(&self, hashes: Vec<(InfoHash, UpdatesAction)>) -> Vec<(InfoHash, bool)> + { + let mut returned_data = Vec::new(); + for (info_hash, updates_action) in hashes { + returned_data.push((info_hash, self.add_whitelist_update(info_hash, updates_action))); + } + returned_data + } + + pub fn get_whitelist_updates(&self) -> HashMap + { + let map = self.torrents_whitelist_updates.clone(); + let lock = map.read_recursive(); + lock.clone() + } + + pub fn remove_whitelist_update(&self, timestamp: &u128) -> bool + { + let map = self.torrents_whitelist_updates.clone(); + let mut lock = map.write(); + match lock.remove(timestamp) { + None => { false } + Some(_) => { + self.update_stats(StatsEvent::WhitelistUpdates, -1); + true + } + } + } + + pub fn clear_whitelist_updates(&self) + { + let map = self.torrents_whitelist_updates.clone(); + let mut lock = map.write(); + lock.clear(); + self.set_stats(StatsEvent::WhitelistUpdates, 0); + } + + pub async fn save_whitelist_updates(&self, torrent_tracker: Arc) -> Result<(), ()> + { + let mut mapping: HashMap = HashMap::new(); + for (timestamp, (info_hash, updates_action)) in self.get_whitelist_updates().iter() { + match mapping.entry(*info_hash) { + Entry::Occupied(mut o) => { + o.insert((o.get().0, *updates_action)); + self.remove_whitelist_update(timestamp); + } + Entry::Vacant(v) => { + v.insert((*timestamp, *updates_action)); + } + } + } + match self.save_whitelist(torrent_tracker.clone(), mapping.clone().into_iter().map(|(info_hash, (_, updates_action))| { + (info_hash, updates_action) + }).collect::>()).await { + Ok(_) => { + info!("[SYNC WHITELIST UPDATES] Synced {} whitelists", mapping.len()); + for (_, (timestamp, _)) in mapping.into_iter() { + self.remove_whitelist_update(×tamp); + } + Ok(()) + } + Err(_) => { + error!("[SYNC WHITELIST UPDATES] Unable to sync {} whitelists", mapping.len()); + Err(()) + } + } + } +} \ No newline at end of file diff --git a/src/tracker/impls/torrent_tracker_users.rs b/src/tracker/impls/torrent_tracker_users.rs index 77e89d5..c2c995f 100644 --- a/src/tracker/impls/torrent_tracker_users.rs +++ b/src/tracker/impls/torrent_tracker_users.rs @@ -5,6 +5,7 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH}; use chrono::{TimeZone, Utc}; use log::{error, info}; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; use crate::tracker::structs::torrent_tracker::TorrentTracker; use crate::tracker::structs::user_entry_item::UserEntryItem; @@ -18,34 +19,20 @@ impl TorrentTracker { } } - pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), ()> + pub async fn save_users(&self, tracker: Arc, users: BTreeMap) -> Result<(), ()> { match self.sqlx.save_users(tracker.clone(), users.clone()).await { Ok(_) => { - info!("[SAVE USERS] Saved {} users", users.len()); + info!("[SYNC USERS] Synced {} users", users.len()); Ok(()) } Err(_) => { - error!("[SAVE USERS] Unable to save {} users", users.len()); + error!("[SYNC USERS] Unable to sync {} users", users.len()); Err(()) } } } - pub fn get_user(&self, id: UserId) -> Option - { - let map = self.users.clone(); - let lock = map.read_recursive(); - lock.get(&id).cloned() - } - - pub fn get_users(&self) -> BTreeMap - { - let map = self.users.clone(); - let lock = map.read_recursive(); - lock.clone() - } - pub fn add_user(&self, user_id: UserId, user_entry_item: UserEntryItem) -> bool { let map = self.users.clone(); @@ -80,6 +67,20 @@ impl TorrentTracker { } } + pub fn get_user(&self, id: UserId) -> Option + { + let map = self.users.clone(); + let lock = map.read_recursive(); + lock.get(&id).cloned() + } + + pub fn get_users(&self) -> BTreeMap + { + let map = self.users.clone(); + let lock = map.read_recursive(); + lock.clone() + } + pub fn remove_user(&self, user_id: UserId) -> Option { let map = self.users.clone(); diff --git a/src/tracker/impls/torrent_tracker_users_updates.rs b/src/tracker/impls/torrent_tracker_users_updates.rs index ee59a1d..3143be4 100644 --- a/src/tracker/impls/torrent_tracker_users_updates.rs +++ b/src/tracker/impls/torrent_tracker_users_updates.rs @@ -1,17 +1,20 @@ use std::collections::{BTreeMap, HashMap}; +use std::collections::hash_map::Entry; use std::sync::Arc; use std::time::SystemTime; +use log::{error, info}; use crate::stats::enums::stats_event::StatsEvent; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::torrent_tracker::TorrentTracker; use crate::tracker::structs::user_entry_item::UserEntryItem; use crate::tracker::structs::user_id::UserId; impl TorrentTracker { - pub fn add_user_update(&self, user_id: UserId, user_entry_item: UserEntryItem) -> (UserEntryItem, bool) + pub fn add_user_update(&self, user_id: UserId, user_entry_item: UserEntryItem, updates_action: UpdatesAction) -> (UserEntryItem, bool) { let map = self.users_updates.clone(); let mut lock = map.write(); - match lock.insert(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos(), (user_id, user_entry_item.clone())) { + match lock.insert(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_nanos(), (user_id, user_entry_item.clone(), updates_action)) { None => { self.update_stats(StatsEvent::UsersUpdates, 1); (user_entry_item, true) @@ -22,7 +25,7 @@ impl TorrentTracker { } } - pub fn get_user_updates(&self) -> HashMap + pub fn get_user_updates(&self) -> HashMap { let map = self.users_updates.clone(); let lock = map.read_recursive(); @@ -50,35 +53,33 @@ impl TorrentTracker { self.set_stats(StatsEvent::UsersUpdates, 0); } - pub async fn save_user_updates(&self, torrent_tracker: Arc) + pub async fn save_user_updates(&self, torrent_tracker: Arc) -> Result<(), ()> { - let mut hashmapping: HashMap, UserEntryItem)> = HashMap::new(); - let mut hashmap: BTreeMap = BTreeMap::new(); - let updates = self.get_user_updates(); - - // Build the actually updates for SQL, adding the timestamps into a vector for removal afterward. - for (timestamp, (user_id, user_entry_item)) in updates.iter() { - match hashmapping.get_mut(user_id) { - None => { - hashmapping.insert(*user_id, (vec![*timestamp], user_entry_item.clone())); - hashmap.insert(*user_id, user_entry_item.clone()); + let mut mapping: HashMap = HashMap::new(); + for (timestamp, (user_id, user_entry_item, updates_action)) in self.get_user_updates().iter() { + match mapping.entry(*user_id) { + Entry::Occupied(mut o) => { + o.insert((o.get().0, user_entry_item.clone(), *updates_action)); + self.remove_user_update(timestamp); } - Some((timestamps, _)) => { - if !timestamps.contains(timestamp) { - timestamps.push(*timestamp); - } - hashmap.insert(*user_id, user_entry_item.clone()); + Entry::Vacant(v) => { + v.insert((*timestamp, user_entry_item.clone(), *updates_action)); } } } - - // Now we're going to save the torrents in a list, and depending on what we get returned, we remove them from the updates list. - if self.save_users(torrent_tracker.clone(), hashmap).await.is_ok() { - // We can remove the updates keys, since they are updated. - for (_, (timestamps, _)) in hashmapping.iter() { - for timestamp in timestamps.iter() { - self.remove_user_update(timestamp); + match self.save_users(torrent_tracker.clone(), mapping.clone().into_iter().map(|(user_id, (_, user_entry_item, updates_action))| { + (user_id, (user_entry_item.clone(), updates_action)) + }).collect::>()).await { + Ok(_) => { + info!("[SYNC USER UPDATES] Synced {} users", mapping.len()); + for (_, (timestamp, _, _)) in mapping.into_iter() { + self.remove_user_update(×tamp); } + Ok(()) + } + Err(_) => { + error!("[SYNC USER UPDATES] Unable to sync {} users", mapping.len()); + Err(()) } } } diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index aae7dea..ca4419a 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -1,4 +1,5 @@ pub mod enums; pub mod impls; pub mod structs; +pub mod types; pub mod tests; \ No newline at end of file diff --git a/src/tracker/structs/torrent_tracker.rs b/src/tracker/structs/torrent_tracker.rs index 4cbb287..f6c9313 100644 --- a/src/tracker/structs/torrent_tracker.rs +++ b/src/tracker/structs/torrent_tracker.rs @@ -4,21 +4,27 @@ use parking_lot::RwLock; use crate::config::structs::configuration::Configuration; use crate::database::structs::database_connector::DatabaseConnector; use crate::stats::structs::stats_atomics::StatsAtomics; +use crate::tracker::enums::updates_action::UpdatesAction; use crate::tracker::structs::info_hash::InfoHash; -use crate::tracker::structs::torrent_entry::TorrentEntry; use crate::tracker::structs::torrent_sharding::TorrentSharding; use crate::tracker::structs::user_entry_item::UserEntryItem; use crate::tracker::structs::user_id::UserId; +use crate::tracker::types::keys_updates::KeysUpdates; +use crate::tracker::types::torrents_updates::TorrentsUpdates; +use crate::tracker::types::users_updates::UsersUpdates; pub struct TorrentTracker { pub config: Arc, pub sqlx: DatabaseConnector, pub torrents_sharding: Arc, - pub torrents_updates: Arc>>, + pub torrents_updates: TorrentsUpdates, pub torrents_whitelist: Arc>>, + pub torrents_whitelist_updates: Arc>>, pub torrents_blacklist: Arc>>, + pub torrents_blacklist_updates: Arc>>, pub keys: Arc>>, + pub keys_updates: KeysUpdates, pub users: Arc>>, - pub users_updates: Arc>>, + pub users_updates: UsersUpdates, pub stats: Arc, } \ No newline at end of file diff --git a/src/tracker/types.rs b/src/tracker/types.rs new file mode 100644 index 0000000..1548b61 --- /dev/null +++ b/src/tracker/types.rs @@ -0,0 +1,3 @@ +pub mod torrents_updates; +pub mod keys_updates; +pub mod users_updates; \ No newline at end of file diff --git a/src/tracker/types/keys_updates.rs b/src/tracker/types/keys_updates.rs new file mode 100644 index 0000000..8bd7c2e --- /dev/null +++ b/src/tracker/types/keys_updates.rs @@ -0,0 +1,7 @@ +use std::collections::HashMap; +use std::sync::Arc; +use parking_lot::RwLock; +use crate::tracker::enums::updates_action::UpdatesAction; +use crate::tracker::structs::info_hash::InfoHash; + +pub type KeysUpdates = Arc>>; \ No newline at end of file diff --git a/src/tracker/types/torrents_updates.rs b/src/tracker/types/torrents_updates.rs new file mode 100644 index 0000000..823d586 --- /dev/null +++ b/src/tracker/types/torrents_updates.rs @@ -0,0 +1,8 @@ +use std::collections::HashMap; +use std::sync::Arc; +use parking_lot::RwLock; +use crate::tracker::enums::updates_action::UpdatesAction; +use crate::tracker::structs::info_hash::InfoHash; +use crate::tracker::structs::torrent_entry::TorrentEntry; + +pub type TorrentsUpdates = Arc>>; \ No newline at end of file diff --git a/src/tracker/types/users_updates.rs b/src/tracker/types/users_updates.rs new file mode 100644 index 0000000..3bcff75 --- /dev/null +++ b/src/tracker/types/users_updates.rs @@ -0,0 +1,8 @@ +use std::collections::HashMap; +use std::sync::Arc; +use parking_lot::RwLock; +use crate::tracker::enums::updates_action::UpdatesAction; +use crate::tracker::structs::user_entry_item::UserEntryItem; +use crate::tracker::structs::user_id::UserId; + +pub type UsersUpdates = Arc>>; \ No newline at end of file