Added scrape support + fixed some warnings

This commit is contained in:
Naim A 2018-12-11 02:46:57 +02:00
parent 0f0210f6b8
commit 04a0faa604
No known key found for this signature in database
GPG key ID: FD7948915D9EF8B9
4 changed files with 85 additions and 5 deletions

View file

@ -5,7 +5,7 @@ authors = ["Naim A. <naim94a@github.com>"]
description = "High performance torrent tracker"
[profile.release]
lto = true
lto = "thin"
[dependencies]
serde = "1.0.80"

View file

@ -152,7 +152,7 @@ fn main() {
std::thread::spawn(move || {
loop {
std::thread::sleep_ms(1000 * 120);
std::thread::sleep(std::time::Duration::new(120, 0));
debug!("periodically saving database.");
tracker_clone.periodic_task(db_p.as_str());
debug!("database saved.");

View file

@ -98,6 +98,13 @@ struct UDPAnnounceResponse {
seeders: u32,
}
#[derive(Serialize)]
struct UDPScrapeResponseEntry {
seeders: u32,
completed: u32,
leechers: u32,
}
pub struct UDPTracker {
server: std::net::UdpSocket,
tracker: std::sync::Arc<tracker::TorrentTracker>,
@ -281,12 +288,72 @@ impl UDPTracker {
}
}
fn handle_scrape(&self, remote_addr: &SocketAddr, header: &UDPRequestHeader, _payload: &[u8]) {
fn handle_scrape(&self, remote_addr: &SocketAddr, header: &UDPRequestHeader, payload: &[u8]) {
if header.connection_id != self.get_connection_id(remote_addr) {
return;
}
self.send_error(remote_addr, header, "scrape not yet implemented");
const MAX_SCRAPE: usize = 74;
let mut response_buffer = [0u8; 8 + MAX_SCRAPE * 12];
let mut response = StackVec::from(&mut response_buffer);
if pack_into(&mut response, &UDPResponseHeader{
action: Actions::Scrape,
transaction_id: header.transaction_id,
}).is_err() {
// not much we can do...
error!("failed to encode udp scrape response header.");
return;
}
// skip first 16 bytes for header...
let info_hash_array = &payload[16..];
if info_hash_array.len() % 20 != 0 {
trace!("received weird length for scrape info_hash array (!mod20).");
}
let db = self.tracker.get_database();
for torrent_index in 0..MAX_SCRAPE {
let info_hash_start = torrent_index * 20;
let info_hash_end = (torrent_index + 1) * 20;
if info_hash_end > info_hash_array.len() {
break;
}
let info_hash = &info_hash_array[info_hash_start..info_hash_end];
let ih = tracker::InfoHash::from(info_hash);
let result = match db.get(&ih) {
Some(torrent_info) => {
let (seeders, completed, leechers) = torrent_info.get_stats();
UDPScrapeResponseEntry{
seeders,
completed,
leechers,
}
},
None => {
UDPScrapeResponseEntry{
seeders: 0,
completed: 0,
leechers: 0,
}
}
};
if pack_into(&mut response, &result).is_err() {
debug!("failed to encode scrape entry.");
return;
}
}
// if sending fails, not much we can do...
let _ = self.send_packet(&remote_addr, &response.as_slice());
}
fn get_connection_id(&self, remote_address: &SocketAddr) -> u64 {

View file

@ -40,6 +40,17 @@ impl std::cmp::PartialOrd<InfoHash> for InfoHash {
}
}
impl std::convert::From<&[u8]> for InfoHash {
fn from(data: &[u8]) -> InfoHash {
assert_eq!(data.len(), 20);
let mut ret = InfoHash{
info_hash: [0u8; 20],
};
ret.info_hash.clone_from_slice(data);
return ret;
}
}
impl std::convert::Into<InfoHash> for [u8; 20] {
fn into(self) -> InfoHash {
InfoHash { info_hash: self }
@ -425,7 +436,9 @@ impl TorrentTracker {
return;
}
Ok(mut file) => {
self.save_database(&mut file);
if let Err(err) = self.save_database(&mut file) {
error!("failed saving database. {}", err);
}
}
}
}