Included showing puzzel input file into benchmark report
Tidied up benchmakr report
This commit is contained in:
parent
4970f247f3
commit
1d0154122b
8 changed files with 225 additions and 78 deletions
|
@ -1,43 +1,55 @@
|
|||
pub use benchmark_result::BenchmarkResult;
|
||||
pub use conducted_benchmark::ConductedBenchmark;
|
||||
pub use file_to_benchmark::FileToBenchmark;
|
||||
|
||||
mod benchmark_result;
|
||||
use row_builder::COLUMN_NUMBER;
|
||||
use row_building::MatrixReport;
|
||||
|
||||
mod conducted_benchmark;
|
||||
mod file_to_benchmark;
|
||||
mod row_builder;
|
||||
mod row_building;
|
||||
|
||||
use crate::{cli::BenchmarkCli, solving_given::solve_given, AppResult};
|
||||
use crate::{
|
||||
cli::BenchmarkCli,
|
||||
solving_given::{solve_given, NoSolutionFound},
|
||||
AppError, AppResult,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use colonnade::Colonnade;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
fn calc_average(sum: Duration, nanos_count: u64) -> Duration {
|
||||
let average_raw = sum.div_duration_f64(Duration::from_nanos(nanos_count));
|
||||
Duration::from_nanos(average_raw.round() as u64)
|
||||
}
|
||||
pub type BenchmarkResult = Result<ConductedBenchmark, NoSolutionFound>;
|
||||
|
||||
pub fn execute_benchmark(args: &BenchmarkCli) -> AppResult<String> {
|
||||
let loaded = load_benchmarks(args)?;
|
||||
let benchmarked = loaded
|
||||
.into_iter()
|
||||
.map(solve_and_keep_track_of_runtime)
|
||||
.collect::<AppResult<Vec<BenchmarkResult>>>()?;
|
||||
let sum: Duration = benchmarked.iter().map(|result| result.how_long()).sum();
|
||||
let average = calc_average(sum, benchmarked.len() as u64);
|
||||
|
||||
let benchmarked = loaded.into_iter().map(solve_and_keep_track_of_runtime);
|
||||
|
||||
let mut sum: Duration = Duration::ZERO;
|
||||
let mut count: u64 = 0;
|
||||
let after_header = row_building::create_rows_for_every_solutions(benchmarked, |to_add| {
|
||||
sum += to_add;
|
||||
count += 1;
|
||||
});
|
||||
let average = calc_average(sum, count);
|
||||
let header = row_building::create_header();
|
||||
let after_header = row_building::create_rows_for_every_solutions(benchmarked);
|
||||
let table: Vec<Vec<String>> = after_header
|
||||
|
||||
let table: MatrixReport = after_header
|
||||
.into_iter()
|
||||
.chain(row_building::create_sum_row(sum))
|
||||
.chain(row_building::create_average_row(average))
|
||||
.chain(header)
|
||||
.collect();
|
||||
let lines = Colonnade::new(5, 100).unwrap().tabulate(table)?;
|
||||
let lines = Colonnade::new(COLUMN_NUMBER, 100)
|
||||
.unwrap()
|
||||
.tabulate(table)?;
|
||||
|
||||
Ok(lines.join("\n"))
|
||||
}
|
||||
|
||||
fn solve_and_keep_track_of_runtime(
|
||||
(benchmark, content): (FileToBenchmark, String),
|
||||
) -> AppResult<BenchmarkResult> {
|
||||
) -> Result<ConductedBenchmark, NoSolutionFound> {
|
||||
let (day, task) = (benchmark.given_day(), benchmark.given_task());
|
||||
let before = Instant::now();
|
||||
let actual_ouput = solve_given(day, task, &content)?;
|
||||
|
@ -45,15 +57,14 @@ fn solve_and_keep_track_of_runtime(
|
|||
|
||||
let how_long = after - before;
|
||||
|
||||
let result = BenchmarkResult::new(benchmark, actual_ouput, how_long);
|
||||
let result = ConductedBenchmark::new(benchmark, actual_ouput, how_long);
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn load_benchmarks(args: &BenchmarkCli) -> AppResult<Vec<(FileToBenchmark, String)>> {
|
||||
let benchmarks = args
|
||||
.files()
|
||||
args.files()
|
||||
.iter()
|
||||
.map(|path| {
|
||||
.flat_map(|path| {
|
||||
let content = std::fs::read_to_string(path).map_err(|error| {
|
||||
anyhow!(
|
||||
"Could not read benchmark file at {:?}\n\
|
||||
|
@ -70,14 +81,9 @@ fn load_benchmarks(args: &BenchmarkCli) -> AppResult<Vec<(FileToBenchmark, Strin
|
|||
error
|
||||
)
|
||||
})?;
|
||||
Ok(parsed)
|
||||
Ok::<_, AppError>(parsed)
|
||||
})
|
||||
.collect::<AppResult<Vec<Vec<FileToBenchmark>>>>()?
|
||||
.into_iter()
|
||||
.flatten();
|
||||
|
||||
benchmarks
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|benchmark| {
|
||||
let where_to_look = benchmark.where_to_look();
|
||||
let content = std::fs::read_to_string(where_to_look).map_err(|error| {
|
||||
|
@ -92,5 +98,10 @@ fn load_benchmarks(args: &BenchmarkCli) -> AppResult<Vec<(FileToBenchmark, Strin
|
|||
})?;
|
||||
Ok((benchmark, content))
|
||||
})
|
||||
.collect::<AppResult<Vec<(FileToBenchmark, String)>>>()
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn calc_average(sum: Duration, nanos_count: u64) -> Duration {
|
||||
let average_raw = sum.div_duration_f64(Duration::from_nanos(nanos_count));
|
||||
Duration::from_nanos(average_raw.round() as u64)
|
||||
}
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
use std::time::Duration;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use crate::cli::{GivenDay, GivenTask};
|
||||
|
||||
use super::FileToBenchmark;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BenchmarkResult {
|
||||
pub struct ConductedBenchmark {
|
||||
task: GivenTask,
|
||||
day: GivenDay,
|
||||
how_long: Duration,
|
||||
maybe_different_actual_output: Option<String>,
|
||||
expected_output: String,
|
||||
path_of_input: PathBuf,
|
||||
}
|
||||
|
||||
impl BenchmarkResult {
|
||||
impl ConductedBenchmark {
|
||||
pub fn new(benchmarking: FileToBenchmark, actual_ouput: String, how_long: Duration) -> Self {
|
||||
let (day, task, expected_output) = benchmarking.into();
|
||||
let (day, task, expected_output, path_of_input) = benchmarking.into();
|
||||
let maybe_different_actual_output = if actual_ouput == expected_output {
|
||||
None
|
||||
} else {
|
||||
|
@ -27,6 +31,7 @@ impl BenchmarkResult {
|
|||
how_long,
|
||||
maybe_different_actual_output,
|
||||
expected_output,
|
||||
path_of_input,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -63,4 +68,8 @@ impl BenchmarkResult {
|
|||
let mili_secs = how_long.as_nanos() % 1_000_000_000;
|
||||
format!("{}:{}", secs, mili_secs)
|
||||
}
|
||||
|
||||
pub fn path_of_input(&self) -> &Path {
|
||||
&self.path_of_input
|
||||
}
|
||||
}
|
|
@ -19,9 +19,14 @@ pub struct FileToBenchmark {
|
|||
expected_output: String,
|
||||
}
|
||||
|
||||
impl From<FileToBenchmark> for (GivenDay, GivenTask, String) {
|
||||
impl From<FileToBenchmark> for (GivenDay, GivenTask, String, PathBuf) {
|
||||
fn from(val: FileToBenchmark) -> Self {
|
||||
(val.given_day, val.given_task, val.expected_output)
|
||||
(
|
||||
val.given_day,
|
||||
val.given_task,
|
||||
val.expected_output,
|
||||
val.where_to_look,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
124
crates/cli/src/benchmarking/row_builder.rs
Normal file
124
crates/cli/src/benchmarking/row_builder.rs
Normal file
|
@ -0,0 +1,124 @@
|
|||
use std::{path::PathBuf, time::Duration};
|
||||
|
||||
const AVERAGE: &str = "Average";
|
||||
const TOTAL: &str = "total";
|
||||
|
||||
use crate::{
|
||||
benchmarking::ConductedBenchmark,
|
||||
cli::{GivenDay, GivenTask},
|
||||
constants,
|
||||
solving_given::NoSolutionFound,
|
||||
};
|
||||
|
||||
pub const COLUMN_NUMBER: usize = 6;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct RowBuilder {
|
||||
day: Option<String>,
|
||||
task: Option<String>,
|
||||
taken_time: Option<Duration>,
|
||||
actual_result: Option<String>,
|
||||
expected_result: Option<String>,
|
||||
path_to_input: Option<String>,
|
||||
}
|
||||
|
||||
impl From<NoSolutionFound> for RowBuilder {
|
||||
fn from(value: NoSolutionFound) -> Self {
|
||||
match value {
|
||||
NoSolutionFound::DayNotFound(day) => Self::no_day_found(day),
|
||||
NoSolutionFound::TaskNotFound { day, task } => Self::no_task_found(day, task),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ConductedBenchmark> for RowBuilder {
|
||||
fn from(value: ConductedBenchmark) -> Self {
|
||||
Self::new(
|
||||
value.day(),
|
||||
value.task(),
|
||||
value.how_long(),
|
||||
value
|
||||
.maybe_different_actual_output()
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_default(),
|
||||
value.expected_output().to_string(),
|
||||
value.path_of_input().to_path_buf(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl RowBuilder {
|
||||
pub fn new(
|
||||
day: GivenDay,
|
||||
task: GivenTask,
|
||||
taken_time: Duration,
|
||||
actual_result: String,
|
||||
expected_result: String,
|
||||
path: PathBuf,
|
||||
) -> Self {
|
||||
Self {
|
||||
day: Some(day.to_string()),
|
||||
task: Some(task.to_string()),
|
||||
taken_time: Some(taken_time),
|
||||
actual_result: Some(actual_result),
|
||||
expected_result: Some(expected_result),
|
||||
path_to_input: Some(path.to_string_lossy().to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn no_day_found(wrong_day: GivenDay) -> Self {
|
||||
let day = Some(format!("Solution found for day {}", wrong_day));
|
||||
Self {
|
||||
day,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn no_task_found(right_day: GivenDay, wrong_task: GivenTask) -> Self {
|
||||
let day = Some(right_day.to_string());
|
||||
let task = Some(format!("No solution found for task {}", wrong_task));
|
||||
Self {
|
||||
day,
|
||||
task,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn average(average: Duration) -> Self {
|
||||
Self::new_aggreate(AVERAGE, average)
|
||||
}
|
||||
|
||||
pub fn total(total: Duration) -> Self {
|
||||
Self::new_aggreate(TOTAL, total)
|
||||
}
|
||||
|
||||
pub fn into_row(self) -> Vec<String> {
|
||||
fn this_or_placeholder(value: Option<String>) -> String {
|
||||
value.unwrap_or_else(|| constants::PLACEHOLDER_IN_BENCHMARK_REPORTS.to_string())
|
||||
}
|
||||
|
||||
let (day, task, taken_time, actual_result, expected_result, path) = (
|
||||
this_or_placeholder(self.day),
|
||||
this_or_placeholder(self.task),
|
||||
this_or_placeholder(
|
||||
self.taken_time
|
||||
.map(ConductedBenchmark::convert_duration_to_secs_and_mili_txt),
|
||||
),
|
||||
this_or_placeholder(self.actual_result),
|
||||
this_or_placeholder(self.expected_result),
|
||||
this_or_placeholder(self.path_to_input),
|
||||
);
|
||||
vec![day, task, taken_time, actual_result, expected_result, path]
|
||||
}
|
||||
|
||||
fn new_aggreate(label: &str, aggregate: Duration) -> Self {
|
||||
let (day, task) = (Some(label.to_string()), Some(label.to_string()));
|
||||
let taken_time = Some(aggregate);
|
||||
Self {
|
||||
day,
|
||||
task,
|
||||
taken_time,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,7 +1,8 @@
|
|||
use std::time::Duration;
|
||||
pub type MatrixReport = Vec<Vec<String>>;
|
||||
pub type SingleRow = std::iter::Once<Vec<String>>;
|
||||
|
||||
use super::BenchmarkResult;
|
||||
type MatrixReport = Vec<Vec<String>>;
|
||||
use super::{row_builder::RowBuilder, BenchmarkResult};
|
||||
use std::time::Duration;
|
||||
|
||||
pub fn create_header() -> std::iter::Once<Vec<String>> {
|
||||
std::iter::once(vec![
|
||||
|
@ -10,45 +11,30 @@ pub fn create_header() -> std::iter::Once<Vec<String>> {
|
|||
"How long (Seconds:Mili)".to_string(),
|
||||
"Expected".to_string(),
|
||||
"Actual".to_string(),
|
||||
"Used input file".to_string(),
|
||||
])
|
||||
}
|
||||
|
||||
pub fn create_sum_row(sum: Duration) -> std::iter::Once<Vec<String>> {
|
||||
let passed_txt = BenchmarkResult::convert_duration_to_secs_and_mili_txt(sum);
|
||||
std::iter::once(vec![
|
||||
"Total".to_string(),
|
||||
"Total".to_string(),
|
||||
passed_txt,
|
||||
"-".to_string(),
|
||||
"-".to_string(),
|
||||
])
|
||||
pub fn create_sum_row(sum: Duration) -> SingleRow {
|
||||
std::iter::once(RowBuilder::total(sum).into_row())
|
||||
}
|
||||
|
||||
pub fn create_average_row(average: Duration) -> std::iter::Once<Vec<String>> {
|
||||
let passed_txt = BenchmarkResult::convert_duration_to_secs_and_mili_txt(average);
|
||||
std::iter::once(vec![
|
||||
"Average".to_string(),
|
||||
"Average".to_string(),
|
||||
passed_txt,
|
||||
"-".to_string(),
|
||||
"-".to_string(),
|
||||
])
|
||||
pub fn create_average_row(average: Duration) -> SingleRow {
|
||||
std::iter::once(RowBuilder::average(average).into_row())
|
||||
}
|
||||
|
||||
pub fn create_rows_for_every_solutions(loaded: Vec<BenchmarkResult>) -> MatrixReport {
|
||||
pub fn create_rows_for_every_solutions(
|
||||
loaded: impl IntoIterator<Item = BenchmarkResult>,
|
||||
mut on_ok: impl FnMut(Duration),
|
||||
) -> MatrixReport {
|
||||
loaded
|
||||
.into_iter()
|
||||
.map(|result| {
|
||||
vec![
|
||||
result.day().to_string(),
|
||||
result.task().to_string(),
|
||||
result.how_long_as_string(),
|
||||
result.expected_output().to_string(),
|
||||
result
|
||||
.maybe_different_actual_output()
|
||||
.map(|unowned| unowned.to_string())
|
||||
.unwrap_or_else(|| result.expected_output().to_string()),
|
||||
]
|
||||
.inspect(|on_success| on_ok(on_success.how_long()))
|
||||
.map(RowBuilder::from)
|
||||
.unwrap_or_else(RowBuilder::from)
|
||||
})
|
||||
.map(RowBuilder::into_row)
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
pub const MAX_DAY: u32 = 25;
|
||||
pub const MAX_TASK: u32 = 2;
|
||||
pub const PLACEHOLDER_IN_BENCHMARK_REPORTS: &str = "-";
|
||||
|
|
|
@ -7,16 +7,23 @@ use crate::{
|
|||
AppResult,
|
||||
};
|
||||
|
||||
pub fn solve_given(given_day: GivenDay, given_task: GivenTask, content: &str) -> AppResult<String> {
|
||||
pub fn solve_given(
|
||||
given_day: GivenDay,
|
||||
given_task: GivenTask,
|
||||
content: &str,
|
||||
) -> Result<String, NoSolutionFound> {
|
||||
let found_task = {
|
||||
let day: u32 = given_day.into();
|
||||
let task: u32 = given_task.into();
|
||||
let found_day = ALL_SOLUTIONS
|
||||
.get(day.saturating_sub(1) as usize)
|
||||
.ok_or_else(|| CouldNotSolveError::DayNotFound(day))?;
|
||||
.ok_or(NoSolutionFound::DayNotFound(given_day))?;
|
||||
found_day
|
||||
.get(task.saturating_sub(1) as usize)
|
||||
.ok_or_else(|| CouldNotSolveError::TaskNotFound { day, task })
|
||||
.ok_or(NoSolutionFound::TaskNotFound {
|
||||
day: given_day,
|
||||
task: given_task,
|
||||
})
|
||||
}?;
|
||||
|
||||
let solved = (found_task)(content);
|
||||
|
@ -40,11 +47,17 @@ fn try_read_from_file_if_demanded(args: &CliSolutionToSolve) -> io::Result<Strin
|
|||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
enum CouldNotSolveError {
|
||||
#[error("There is no solution for the day {0}")]
|
||||
DayNotFound(u32),
|
||||
#[error("There is not solution for task {task} under the day {day}")]
|
||||
TaskNotFound { day: u32, task: u32 },
|
||||
pub enum CouldNotSolveError {
|
||||
#[error("{0}")]
|
||||
NotFound(#[from] NoSolutionFound),
|
||||
#[error("Could not read puzzel input from the given file\n {0}")]
|
||||
CouldNotReadFromFile(#[from] io::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum NoSolutionFound {
|
||||
#[error("There is no solution for the day {0}")]
|
||||
DayNotFound(GivenDay),
|
||||
#[error("There is not solution for task {task} under the day {day}")]
|
||||
TaskNotFound { day: GivenDay, task: GivenTask },
|
||||
}
|
||||
|
|
|
@ -23,9 +23,7 @@ pub fn blocks_of_lines_seperated_by<'a>(
|
|||
None
|
||||
})
|
||||
}
|
||||
pub fn blocks_of_lines_seperated_by_empty_lines<'a>(
|
||||
input: &'a str,
|
||||
) -> impl Iterator<Item = Vec<&'a str>> {
|
||||
pub fn blocks_of_lines_seperated_by_empty_lines(input: &str) -> impl Iterator<Item = Vec<&str>> {
|
||||
blocks_of_lines_seperated_by(input, |line| line.trim().is_empty())
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue