use std::{ fs::{self, File}, io::{BufReader, BufWriter, Read, Write}, path::{Path, PathBuf}, sync::mpsc::channel, thread, time::Duration, }; use anyhow::{anyhow, Context, Result}; use clap::Clap; use evtclib::{Compression, Encounter, Log}; use log::{debug, error, info, warn}; use notify::{self, DebouncedEvent, RecursiveMode, Watcher}; use regex::Regex; use serde::Deserialize; use zip::{CompressionMethod, ZipArchive, ZipWriter}; mod categories; use categories::Categorizable; mod config; use config::Config; mod discord; mod logbag; mod matrix; const DPS_REPORT_API: &str = "https://dps.report/uploadContent"; const WATCH_DELAY_SECONDS: u64 = 2; const RETRY_DELAY: Duration = Duration::from_secs(5); #[derive(Clap, Debug, Clone, PartialEq, Eq, Hash)] #[clap(version = "0.1")] struct Opts { /// The configuration file path. #[clap(short, long, default_value = "ezau.toml")] config: PathBuf, #[clap(subcommand)] subcmd: SubCommand, } #[derive(Clap, Debug, Clone, PartialEq, Eq, Hash)] enum SubCommand { Watch(Watch), Upload(Upload), } /// Use the watch mode to automatically handle new logs. /// /// This watches the given directory for new files and then zips and uploads them. #[derive(Clap, Debug, Clone, PartialEq, Eq, Hash)] struct Watch { /// The directory to watch. dirname: PathBuf, } /// Upload a single log, as it would be done by the automatic watcher. #[derive(Clap, Debug, Clone, PartialEq, Eq, Hash)] struct Upload { /// The log to upload. path: PathBuf, } fn main() { pretty_env_logger::init(); let opts = Opts::parse(); if let Err(e) = inner_main(&opts) { error!("{}", e); e.chain() .skip(1) .for_each(|cause| error!("... because: {}", cause)); std::process::exit(1); } } fn inner_main(opts: &Opts) -> Result<()> { let config = config::load(&opts.config).context("Could not load configuration")?; match &opts.subcmd { SubCommand::Watch(w) => watch(w, &config)?, SubCommand::Upload(u) => { let permalink = upload_log(&u.path)?; println!("{}", permalink); let log = load_log(&u.path)?; if let Some(d) = &config.discord { discord::post_link(&d.auth_token, d.channel_id, &log, &permalink) .context("Could not post link to Discord")?; } if let Some(m) = &config.matrix { matrix::post_link(m.clone().into(), &m.room_id, &log, &permalink) .context("Could not post link to Matrix")?; } } } Ok(()) } fn watch(watch: &Watch, config: &Config) -> Result<()> { let raw_evtc_re = Regex::new(r"\d{8}-\d{6}(\.evtc)?$").unwrap(); let zip_evtc_re = Regex::new(r"(\.zip|\.zevtc)$").unwrap(); let (tx, rx) = channel(); let mut watcher = notify::watcher(tx, Duration::from_secs(WATCH_DELAY_SECONDS))?; watcher .watch(&watch.dirname, RecursiveMode::Recursive) .context("Could not watch the given directory")?; info!("Watcher set up, watching {:?}", watch.dirname); loop { let event = rx.recv()?; debug!("Event: {:?}", event); if let DebouncedEvent::Create(path) = event { let path_str = path.to_str().unwrap(); // Check if we need to zip it first. if config.zip && raw_evtc_re.is_match(path_str) { info!("Zipping up {}", path_str); zip_file(&path)?; } else if zip_evtc_re.is_match(path_str) { handle_file(config, &path)?; } } } } fn zip_file(filepath: &Path) -> Result<()> { let evtc_content = fs::read(filepath)?; let filename = filepath .file_name() .ok_or_else(|| anyhow!("Path does not have a file name"))? .to_str() .ok_or_else(|| anyhow!("Filename is invalid utf-8"))?; let outname = filepath.with_extension("zevtc"); let outfile = BufWriter::new(File::create(&outname)?); let mut zip = ZipWriter::new(outfile); let options = zip::write::FileOptions::default().compression_method(CompressionMethod::Deflated); zip.start_file(filename, options)?; zip.write_all(&evtc_content)?; zip.finish()?.flush()?; if !verify_zip(filepath, &outname)? { warn!("ZIP content mismatch, keeping original file"); return Ok(()); } fs::remove_file(filepath)?; Ok(()) } fn verify_zip(original: &Path, zip_path: &Path) -> Result { let expected_content = fs::read(original)?; let mut archive = ZipArchive::new(BufReader::new(File::open(zip_path)?))?; let mut inner = archive.by_index(0)?; let mut actual_content = Vec::new(); inner.read_to_end(&mut actual_content)?; Ok(expected_content == actual_content) } fn handle_file(config: &Config, filename: &Path) -> Result<()> { if !config.upload { return Ok(()); } let log = load_log(filename)?; info!("Loaded log from category {}", log.category()); if !should_upload(config, &log) { info!("Skipping log, not uploading"); return Ok(()); } let mut try_counter = 0; let permalink = loop { let result = upload_log(filename); if let Ok(link) = result { break link; } warn!( "Upload try {} failed, retrying {} more times. Reason: {}", try_counter + 1, config.retries - try_counter, result.as_ref().unwrap_err(), ); if try_counter == config.retries { return Err(result.unwrap_err()); } try_counter += 1; thread::sleep(RETRY_DELAY); }; info!("Uploaded log, available at {}", permalink); if let Some(d) = &config.discord { discord::post_link(&d.auth_token, d.channel_id, &log, &permalink) .context("Could not post link to Discord")?; info!("Posted link to Discord"); } if let Some(m) = &config.matrix { matrix::post_link(m.clone().into(), &m.room_id, &log, &permalink) .context("Could not post link to Matrix")?; info!("Posted link to Matrix"); } Ok(()) } fn should_upload(config: &Config, log: &Log) -> bool { // Only upload known logs if log.encounter().is_none() && !config.upload_unknown { return false; } // Only upload Skorvald if it actually was in 100 CM (and not in in lower-tier or non-CM). if log.encounter() == Some(Encounter::Skorvald) && !log.is_cm() { return false; } // Only upload logs that are long enough if log.span() < config.minimum_duration { return false; } true } fn load_log(path: &Path) -> Result { evtclib::process_file(path, Compression::Zip).map_err(Into::into) } fn upload_log(file: &Path) -> Result { #[derive(Debug, Deserialize)] struct ApiResponse { permalink: String, } let client = reqwest::blocking::Client::new(); let form = reqwest::blocking::multipart::Form::new().file("file", file)?; let resp: ApiResponse = client .post(DPS_REPORT_API) .query(&[("json", 1)]) .multipart(form) .send()? .error_for_status()? .json()?; Ok(resp.permalink) }