diff options
Diffstat (limited to 'src')
| -rw-r--r-- | src/filters/mod.rs | 6 | ||||
| -rw-r--r-- | src/guilds.rs | 32 | ||||
| -rw-r--r-- | src/main.rs | 28 | ||||
| -rw-r--r-- | src/output/aggregators.rs | 43 | ||||
| -rw-r--r-- | src/output/formats.rs | 10 | ||||
| -rw-r--r-- | src/output/pipeline.rs | 10 | ||||
| -rw-r--r-- | src/paths.rs | 6 | 
7 files changed, 83 insertions, 52 deletions
| diff --git a/src/filters/mod.rs b/src/filters/mod.rs index 7ab0d42..668c5e1 100644 --- a/src/filters/mod.rs +++ b/src/filters/mod.rs @@ -26,7 +26,7 @@ impl ops::Not for Inclusion {      type Output = Self;      fn not(self) -> Self::Output { -        Inclusion::from_i8(-(self as i8)).unwrap() +        Inclusion::from_i8(-(self as i8)).expect("i8 is from pre-existing Inclusion")      }  } @@ -34,7 +34,7 @@ impl ops::BitAnd<Inclusion> for Inclusion {      type Output = Self;      fn bitand(self, rhs: Inclusion) -> Self::Output { -        Inclusion::from_i8((self as i8).min(rhs as i8)).unwrap() +        Inclusion::from_i8((self as i8).min(rhs as i8)).expect("i8 is from pre-existing Inclusion")      }  } @@ -42,7 +42,7 @@ impl ops::BitOr<Inclusion> for Inclusion {      type Output = Self;      fn bitor(self, rhs: Inclusion) -> Self::Output { -        Inclusion::from_i8((self as i8).max(rhs as i8)).unwrap() +        Inclusion::from_i8((self as i8).max(rhs as i8)).expect("i8 is from pre-existing Inclusion")      }  } diff --git a/src/guilds.rs b/src/guilds.rs index e847841..115de42 100644 --- a/src/guilds.rs +++ b/src/guilds.rs @@ -5,6 +5,8 @@ use std::collections::HashMap;  use std::fs::File;  use std::sync::RwLock; +use anyhow::Result; +use log::{error, warn};  use once_cell::sync::Lazy;  use serde::{Deserialize, Serialize}; @@ -51,19 +53,31 @@ pub fn lookup(api_id: &str) -> Option<Guild> {  /// Loads the cache from the file system.  pub fn prepare_cache() {      let path = paths::cache_path(); -    if !path.is_file() { -        return; -    } +    if let Some(path) = path { +        if !path.is_file() { +            return; +        } -    let file = File::open(path).expect("Unable to read cache file"); -    let cache = serde_json::from_reader(file).expect("Cache file has invalid format"); -    let mut target = CACHE.write().unwrap(); -    *target = cache; +        let file = File::open(path).expect("Unable to read cache file"); +        let cache = serde_json::from_reader(file).expect("Cache file has invalid format"); +        let mut target = CACHE.write().unwrap(); +        *target = cache; +    } else { +        warn!("Could not determine the cache path, the persistent guild cache is disabled"); +    }  }  /// Saves the cache to the file system  pub fn save_cache() {      let path = paths::cache_path(); -    let file = File::create(path).expect("Cannot open cache for writing"); -    serde_json::to_writer(file, &*CACHE.read().unwrap()).unwrap(); +    // We already warned about that earlier +    if let Some(path) = path { +        let result: Result<()> = try { +            let file = File::create(path)?; +            serde_json::to_writer(file, &*CACHE.read().unwrap())?; +        }; +        if let Err(e) = result { +            error!("Error saving the cache: {}", e); +        } +    }  } diff --git a/src/main.rs b/src/main.rs index 27ad285..242e285 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,4 +1,4 @@ -#![feature(trait_alias)] +#![feature(trait_alias, try_blocks)]  use std::collections::HashMap;  use std::fmt;  use std::fs::{self, File}; @@ -10,7 +10,7 @@ use std::sync::atomic::{AtomicBool, Ordering};  use anyhow::{anyhow, Context, Error, Result};  use chrono::{DateTime, Duration, TimeZone, Utc};  use colored::Colorize; -use log::debug; +use log::{debug, error};  use regex::Regex;  use rustyline::Editor;  use structopt::StructOpt; @@ -253,23 +253,23 @@ pub struct EarlyLogResult {  }  enum ZipWrapper<R: Read + Seek> { -    Raw(Option<R>), +    Raw(R),      Zipped(zip::ZipArchive<R>),  }  impl<R: Read + Seek> ZipWrapper<R> {      pub fn raw(input: R) -> Self { -        ZipWrapper::Raw(Some(input)) +        ZipWrapper::Raw(input)      } -    pub fn zipped(input: R) -> Self { -        ZipWrapper::Zipped(zip::ZipArchive::new(input).unwrap()) +    pub fn zipped(input: R) -> Result<Self> { +        Ok(ZipWrapper::Zipped(zip::ZipArchive::new(input)?))      } -    pub fn get_stream<'a>(&'a mut self) -> Box<(dyn Read + 'a)> { +    pub fn get_stream<'a>(&'a mut self) -> Result<Box<(dyn Read + 'a)>> {          match *self { -            ZipWrapper::Raw(ref mut o) => Box::new(o.take().unwrap()), -            ZipWrapper::Zipped(ref mut z) => Box::new(z.by_index(0).unwrap()), +            ZipWrapper::Raw(ref mut o) => Ok(Box::new(o)), +            ZipWrapper::Zipped(ref mut z) => Ok(Box::new(z.by_index(0)?)),          }      }  } @@ -471,7 +471,9 @@ fn grep(opt: &Opt, filter: &dyn LogFilter) -> Result<bool> {                          Ok(None) => (),                          Ok(Some(result)) => {                              found_something.store(true, Ordering::Relaxed); -                            pipeline_ref.push_item(result); +                            if let Err(e) = pipeline_ref.push_item(result) { +                                error!("Error writing item to output: {}", e); +                            }                          }                          Err(err) => {                              debug!("Runtime error while scanning {:?}: {}", entry.path(), err); @@ -483,7 +485,7 @@ fn grep(opt: &Opt, filter: &dyn LogFilter) -> Result<bool> {          Ok(())      });      result?; -    pipeline.finish(); +    pipeline.finish()?;      Ok(found_something.load(Ordering::Relaxed))  } @@ -512,11 +514,11 @@ fn search_entry(entry: &DirEntry, filter: &dyn LogFilter) -> Result<Option<LogRe  fn search_file(path: &Path, is_zip: bool, filter: &dyn LogFilter) -> Result<Option<LogResult>> {      let file_stream = BufReader::new(File::open(path)?);      let mut wrapper = if is_zip { -        ZipWrapper::zipped(file_stream) +        ZipWrapper::zipped(file_stream)?      } else {          ZipWrapper::raw(file_stream)      }; -    let mut stream = wrapper.get_stream(); +    let mut stream = wrapper.get_stream()?;      let partial = evtclib::raw::parser::parse_partial_file(&mut stream)?;      let early_log = EarlyLogResult { diff --git a/src/output/aggregators.rs b/src/output/aggregators.rs index 34ecea4..c3d971a 100644 --- a/src/output/aggregators.rs +++ b/src/output/aggregators.rs @@ -14,10 +14,13 @@ use std::{      sync::Mutex,  }; +use anyhow::Result; +  pub trait Aggregator: Sync { -    fn push_item(&self, item: LogResult, format: &dyn Format, stream: &mut dyn Write); +    fn push_item(&self, item: LogResult, format: &dyn Format, stream: &mut dyn Write) +        -> Result<()>;      // When the `unsized_locals` feature is stable, we could rewrite this to finish(self, ...). -    fn finish(self: Box<Self>, format: &dyn Format, stream: &mut dyn Write); +    fn finish(self: Box<Self>, format: &dyn Format, stream: &mut dyn Write) -> Result<()>;  }  /// An aggregator that just pushes through each item to the output stream without any sorting or @@ -25,13 +28,21 @@ pub trait Aggregator: Sync {  pub struct WriteThrough;  impl Aggregator for WriteThrough { -    fn push_item(&self, item: LogResult, format: &dyn Format, stream: &mut dyn Write) { +    fn push_item( +        &self, +        item: LogResult, +        format: &dyn Format, +        stream: &mut dyn Write, +    ) -> Result<()> {          let text = format.format_result(&item); -        stream.write_all(text.as_bytes()).unwrap(); -        stream.flush().unwrap(); +        stream.write_all(text.as_bytes())?; +        stream.flush()?; +        Ok(())      } -    fn finish(self: Box<Self>, _: &dyn Format, _: &mut dyn Write) {} +    fn finish(self: Box<Self>, _: &dyn Format, _: &mut dyn Write) -> Result<()> { +        Ok(()) +    }  }  /// An aggregator that keeps all found logs in memory and sorts them before outputting them. @@ -51,19 +62,21 @@ impl SortedOutput {  }  impl Aggregator for SortedOutput { -    fn push_item(&self, item: LogResult, _: &dyn Format, _: &mut dyn Write) { -        self.items.lock().unwrap().push(item) +    fn push_item(&self, item: LogResult, _: &dyn Format, _: &mut dyn Write) -> Result<()> { +        self.items.lock().unwrap().push(item); +        Ok(())      } -    fn finish(self: Box<Self>, format: &dyn Format, stream: &mut dyn Write) { +    fn finish(self: Box<Self>, format: &dyn Format, stream: &mut dyn Write) -> Result<()> {          let SortedOutput { sorting, items } = *self;          let mut items = items.into_inner().unwrap();          items.sort_unstable_by(|a, b| sorting.cmp(a, b));          for item in items {              let text = format.format_result(&item); -            stream.write_all(text.as_bytes()).unwrap(); +            stream.write_all(text.as_bytes())?;          } -        stream.flush().unwrap(); +        stream.flush()?; +        Ok(())      }  } @@ -82,11 +95,13 @@ impl CountingOutput {  }  impl Aggregator for CountingOutput { -    fn push_item(&self, _: LogResult, _: &dyn Format, _: &mut dyn Write) { +    fn push_item(&self, _: LogResult, _: &dyn Format, _: &mut dyn Write) -> Result<()> {          self.counter.fetch_add(1, Ordering::SeqCst); +        Ok(())      } -    fn finish(self: Box<Self>, _: &dyn Format, stream: &mut dyn Write) { -        writeln!(stream, "{}", self.counter.into_inner()).unwrap(); +    fn finish(self: Box<Self>, _: &dyn Format, stream: &mut dyn Write) -> Result<()> { +        writeln!(stream, "{}", self.counter.into_inner())?; +        Ok(())      }  } diff --git a/src/output/formats.rs b/src/output/formats.rs index 7225da2..d22ab19 100644 --- a/src/output/formats.rs +++ b/src/output/formats.rs @@ -29,7 +29,7 @@ impl Format for HumanReadable {              "File".green(),              item.log_file.to_string_lossy()          ) -        .unwrap(); +        .expect("writing to String failed");          let outcome = match item.outcome {              FightOutcome::Success => "SUCCESS".green(),              FightOutcome::Wipe => "WIPE".red(), @@ -49,7 +49,7 @@ impl Format for HumanReadable {              outcome,              humantime::Duration::from(item.duration.to_std().unwrap()),          ) -        .unwrap(); +        .expect("writing to String failed");          for player in &item.players {              write!(                  result, @@ -59,7 +59,7 @@ impl Format for HumanReadable {                  player.character_name.cyan(),                  player.profession,              ) -            .unwrap(); +            .expect("writing to String failed");              if self.show_guilds {                  let guild = player.guild_id.as_ref().and_then(|id| guilds::lookup(id));                  if let Some(guild) = guild { @@ -69,10 +69,10 @@ impl Format for HumanReadable {                          guild.tag().magenta(),                          guild.name().magenta(),                      ) -                    .unwrap(); +                    .expect("writing to String failed");                  }              } -            writeln!(result).unwrap(); +            writeln!(result).expect("writing to String failed");          }          result      } diff --git a/src/output/pipeline.rs b/src/output/pipeline.rs index 9b7f461..3db0d56 100644 --- a/src/output/pipeline.rs +++ b/src/output/pipeline.rs @@ -3,6 +3,8 @@ use super::{aggregators::Aggregator, formats::Format};  use std::{io::Write, sync::Mutex}; +use anyhow::Result; +  pub struct Pipeline {      format: Box<dyn Format>,      aggregator: Box<dyn Aggregator>, @@ -22,13 +24,13 @@ impl Pipeline {          }      } -    pub fn push_item(&self, item: LogResult) { +    pub fn push_item(&self, item: LogResult) -> Result<()> {          let mut writer = self.writer.lock().unwrap(); -        self.aggregator.push_item(item, &*self.format, &mut *writer); +        self.aggregator.push_item(item, &*self.format, &mut *writer)      } -    pub fn finish(self) { +    pub fn finish(self) -> Result<()> {          let mut writer = self.writer.lock().unwrap(); -        self.aggregator.finish(&*self.format, &mut *writer); +        self.aggregator.finish(&*self.format, &mut *writer)      }  } diff --git a/src/paths.rs b/src/paths.rs index f219dc4..bebe0ee 100644 --- a/src/paths.rs +++ b/src/paths.rs @@ -4,10 +4,8 @@ use super::APP_NAME;  use std::path::PathBuf;  /// Returns the path that should be used for the cache. -pub fn cache_path() -> PathBuf { -    let mut cache_path = dirs::cache_dir().unwrap(); -    cache_path.push(APP_NAME); -    cache_path +pub fn cache_path() -> Option<PathBuf> { +    dirs::cache_dir().map(|p| p.join(APP_NAME))  }  /// Returns the path that should be used for the REPL history. | 
