HUGE refactor and splitting up
This commit is contained in:
parent
726dfefadf
commit
3fbc0a2ff8
File diff suppressed because it is too large
Load Diff
|
@ -8,12 +8,12 @@ edition = "2021"
|
|||
|
||||
[dependencies]
|
||||
calamine = "0.18.0"
|
||||
chrono = { version = "0.4.22", features = ["wasmbind"] }
|
||||
chrono = { version = "0.4.23", features = ["wasmbind"] }
|
||||
console_error_panic_hook = "0.1.7"
|
||||
dioxus = { version = "0.2.4", features = ["desktop"] }
|
||||
iced = { version = "0.4", features = ["pure"]}
|
||||
fast-float = "0.2.0"
|
||||
palette = "0.6.1"
|
||||
plotly = { version = "0.8.1", features = ["wasm"] }
|
||||
serde = "1.0.144"
|
||||
serde_derive = "1.0.144"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
toml = "0.5.9"
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
shot_dir = "shots"
|
||||
brew_dir = "beanconqueror/brews"
|
||||
output_dir = "images"
|
||||
main_json = "beanconqueror/Beanconqueror.json"
|
||||
|
||||
width = 1600
|
||||
height = 900
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
use serde::Deserialize;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Shot {
|
||||
pub filename: Option<String>,
|
||||
pub json: Option<String>,
|
||||
pub title: String,
|
||||
pub cutoff: Option<f64>,
|
||||
pub disable: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Chart {
|
||||
pub title: String,
|
||||
pub shots: Vec<u64>,
|
||||
|
||||
pub max_time: u64,
|
||||
pub max_weight: u64,
|
||||
pub max_flow: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Config {
|
||||
pub shots: HashMap<String, Shot>,
|
||||
pub charts: HashMap<String, Chart>,
|
||||
|
||||
pub shot_dir: String,
|
||||
pub brew_dir: String,
|
||||
pub output_dir: String,
|
||||
pub main_json: String,
|
||||
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
}
|
||||
|
||||
pub const WGHT_SHEET: usize = 0;
|
||||
pub const FLOW_SHEET: usize = 1;
|
||||
|
||||
pub const TIME_COL: usize = 0;
|
||||
pub const WGHT_COL: usize = 5;
|
||||
pub const FLOW_COL: usize = 2;
|
||||
|
||||
pub struct Data {
|
||||
pub weight: Vec<(f64, f64)>,
|
||||
pub flowrate: Vec<(f64, f64)>,
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use std::fs;
|
||||
|
||||
/// Structs for handling the Beanconqueror database "Beanconqueror.json" export data
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Database {
|
||||
#[serde(rename = "BEANS")]
|
||||
pub beans: Vec<Bean>,
|
||||
|
||||
#[serde(rename = "BREWS")]
|
||||
pub brews: Vec<Brew>,
|
||||
|
||||
#[serde(rename = "MILL")]
|
||||
pub mill: Vec<Mill>,
|
||||
|
||||
#[serde(rename = "PREPARATION")]
|
||||
pub preparation: Vec<Preparation>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Bean {
|
||||
pub name: String,
|
||||
pub config: Config,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Brew {
|
||||
pub bean: String,
|
||||
pub config: Config,
|
||||
pub flow_profile: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Mill {
|
||||
pub name: String,
|
||||
pub config: Config,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Preparation {
|
||||
pub name: String,
|
||||
pub config: Config,
|
||||
pub tools: Tools,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Tools {
|
||||
pub name: String,
|
||||
pub config: Config,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Config {
|
||||
pub uuid: String,
|
||||
pub unix_timestamp: i64,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub fn from_file(path: &str) -> Database {
|
||||
let file = fs::File::open(path)
|
||||
.unwrap_or_else(|_| panic!("Cannot open file at path \"{}\"", path));
|
||||
let database: Database = serde_json::from_reader(file)
|
||||
.unwrap_or_else(|_| panic!("Cannot deserialize file at path \"{}\"", path));
|
||||
|
||||
database
|
||||
}
|
||||
|
||||
pub fn bean_with_uuid(&self, uuid: &str) -> Option<&Bean> {
|
||||
self.beans.iter().find(|bean| bean.config.uuid == uuid.to_string())
|
||||
}
|
||||
|
||||
pub fn brew_with_uuid(&self, uuid: &str) -> Option<&Brew> {
|
||||
self.brews.iter().find(|brew| brew.config.uuid == uuid.to_string())
|
||||
}
|
||||
|
||||
pub fn bean_for_brew(&self, brew: &Brew) -> Option<&Bean> {
|
||||
self.beans.iter().find(|bean| bean.config.uuid == brew.config.uuid)
|
||||
}
|
||||
|
||||
pub fn brews_for_bean(&self, bean: &Bean) -> Vec<&Brew> {
|
||||
self.brews.iter().filter(|brew| brew.bean == bean.config.uuid).collect()
|
||||
}
|
||||
|
||||
// TODO: Preparation, Tools, maybe transiton to HashMaps?
|
||||
}
|
|
@ -0,0 +1,163 @@
|
|||
use crate::time::*;
|
||||
|
||||
use chrono::NaiveTime;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use std::fs;
|
||||
|
||||
/// Structs for handling the Beanconqueror "*_flow_profile.json" export data
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct FlowProfile {
|
||||
pub weight: Vec<WeightData>,
|
||||
#[serde(rename = "waterFlow")]
|
||||
pub water_flow: Vec<FlowData>,
|
||||
#[serde(rename = "realtimeFlow")]
|
||||
pub realtime_flow: Vec<RtFlowData>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub cutoff: Option<f64>,
|
||||
#[serde(skip)]
|
||||
pub data_collection: Option<DataCollection>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct WeightData {
|
||||
pub timestamp: String,
|
||||
pub brew_time: String,
|
||||
pub actual_weight: f64,
|
||||
pub old_weight: f64,
|
||||
pub actual_smoothed_weight: f64,
|
||||
pub old_smoothed_weight: f64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct FlowData {
|
||||
pub brew_time: String,
|
||||
pub timestamp: String,
|
||||
pub value: f64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct RtFlowData {
|
||||
pub brew_time: String,
|
||||
pub timestamp: String,
|
||||
pub smoothed_weight: f64,
|
||||
pub flow_value: f64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct DataCollection {
|
||||
pub reference_time: NaiveTime,
|
||||
pub weight: Vec<(f64, f64)>,
|
||||
pub flow: Vec<(f64, f64)>,
|
||||
pub rt_flow: Vec<(f64, f64)>,
|
||||
}
|
||||
|
||||
impl FlowProfile {
|
||||
pub fn from_file(path: &str, cutoff: Option<f64>) -> FlowProfile {
|
||||
let file = fs::File::open(path)
|
||||
.unwrap_or_else(|_| panic!("Cannot open file at path \"{}\"", path));
|
||||
let mut brew: FlowProfile = serde_json::from_reader(file)
|
||||
.unwrap_or_else(|_| panic!("Cannot deserialize file at path \"{}\"", path));
|
||||
brew.cutoff = cutoff;
|
||||
|
||||
brew
|
||||
}
|
||||
|
||||
pub fn preprocess_json_mut(&mut self) {
|
||||
self.data_collection = self.process_json();
|
||||
}
|
||||
|
||||
pub fn preprocess_json(self) -> FlowProfile {
|
||||
let data_collection = self.process_json();
|
||||
FlowProfile {
|
||||
weight: self.weight,
|
||||
water_flow: self.water_flow,
|
||||
realtime_flow: self.realtime_flow,
|
||||
cutoff: None,
|
||||
data_collection,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn process_json(&self) -> Option<DataCollection> {
|
||||
let weight_starting_time: NaiveTime = str_to_naivetime(&(self.weight[0].timestamp));
|
||||
let flow_starting_time: NaiveTime = str_to_naivetime(&(self.water_flow[0].timestamp));
|
||||
let rt_flow_starting_time: NaiveTime = str_to_naivetime(&(self.realtime_flow[0].timestamp));
|
||||
|
||||
let reference_time: NaiveTime = weight_starting_time
|
||||
.min(flow_starting_time)
|
||||
.min(rt_flow_starting_time);
|
||||
|
||||
let mut weight_tuples: Vec<(f64, f64)> = self
|
||||
.weight
|
||||
.iter()
|
||||
.map(
|
||||
|WeightData {
|
||||
ref timestamp,
|
||||
ref actual_weight,
|
||||
..
|
||||
}| {
|
||||
let deltatime = deltatime(str_to_naivetime(timestamp.as_str()), reference_time);
|
||||
let std_duration = deltatime.to_std().unwrap();
|
||||
|
||||
(std_duration.as_secs_f64(), actual_weight.clone())
|
||||
},
|
||||
)
|
||||
.collect();
|
||||
|
||||
let mut flow_tuples: Vec<(f64, f64)> = self
|
||||
.water_flow
|
||||
.iter()
|
||||
.map(
|
||||
|FlowData {
|
||||
ref timestamp,
|
||||
ref value,
|
||||
..
|
||||
}| {
|
||||
let deltatime = deltatime(str_to_naivetime(timestamp.as_str()), reference_time);
|
||||
let std_duration = deltatime.to_std().unwrap();
|
||||
|
||||
(std_duration.as_secs_f64(), value.clone())
|
||||
},
|
||||
)
|
||||
.collect();
|
||||
|
||||
let mut rt_flow_tuples: Vec<(f64, f64)> = self
|
||||
.realtime_flow
|
||||
.iter()
|
||||
.map(
|
||||
|RtFlowData {
|
||||
ref timestamp,
|
||||
ref flow_value,
|
||||
..
|
||||
}| {
|
||||
let deltatime = deltatime(str_to_naivetime(timestamp.as_str()), reference_time);
|
||||
let std_duration = deltatime.to_std().unwrap();
|
||||
|
||||
(std_duration.as_secs_f64(), flow_value.clone())
|
||||
},
|
||||
)
|
||||
.collect();
|
||||
|
||||
if let Some(cutoff_val) = self.cutoff {
|
||||
if cutoff_val != -1.0 {
|
||||
weight_tuples.retain(|tuple| tuple.0 < cutoff_val);
|
||||
flow_tuples.retain(|tuple| tuple.0 < cutoff_val);
|
||||
rt_flow_tuples.retain(|tuple| tuple.0 < cutoff_val);
|
||||
}
|
||||
}
|
||||
|
||||
if !weight_tuples.is_empty() && !flow_tuples.is_empty() && !rt_flow_tuples.is_empty() {
|
||||
Some(DataCollection {
|
||||
reference_time,
|
||||
weight: weight_tuples,
|
||||
flow: flow_tuples,
|
||||
rt_flow: rt_flow_tuples,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
use iced::pure::widget::{Button, Column, Container, Text};
|
||||
use iced::pure::Sandbox;
|
||||
// use iced::Settings;
|
||||
|
||||
pub struct Counter {
|
||||
count: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum CounterMessage {
|
||||
Increment,
|
||||
Decrement,
|
||||
}
|
||||
|
||||
impl Sandbox for Counter {
|
||||
type Message = CounterMessage;
|
||||
|
||||
fn new() -> Self {
|
||||
Counter { count: 0 }
|
||||
}
|
||||
|
||||
fn title(&self) -> String {
|
||||
String::from("RustyBeans")
|
||||
}
|
||||
|
||||
fn update(&mut self, message: Self::Message) {
|
||||
match message {
|
||||
CounterMessage::Increment => self.count += 1,
|
||||
CounterMessage::Decrement => self.count -= 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn view(&self) -> iced::pure::Element<'_, Self::Message> {
|
||||
let label = Text::new(format!("Count: {}", self.count));
|
||||
|
||||
let incr = Button::new("Increment").on_press(CounterMessage::Increment);
|
||||
let decr = Button::new("Decrement").on_press(CounterMessage::Decrement);
|
||||
|
||||
let col = Column::new().push(incr).push(label).push(decr);
|
||||
|
||||
Container::new(col)
|
||||
.center_x()
|
||||
.center_y()
|
||||
.width(iced::Length::Fill)
|
||||
.height(iced::Length::Fill)
|
||||
.into()
|
||||
}
|
||||
}
|
221
src/main.rs
221
src/main.rs
|
@ -1,203 +1,32 @@
|
|||
mod database;
|
||||
mod config;
|
||||
mod flow_profile;
|
||||
mod iced;
|
||||
mod plot;
|
||||
mod sheets;
|
||||
mod time;
|
||||
|
||||
use crate::flow_profile::FlowProfile;
|
||||
use crate::plot::generate_plots;
|
||||
|
||||
extern crate console_error_panic_hook;
|
||||
use std::panic;
|
||||
|
||||
use calamine::{open_workbook, Reader, Xlsx};
|
||||
use chrono::{Duration, NaiveTime};
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
use plotly::{
|
||||
common::{Mode, Title},
|
||||
Layout, Plot, Scatter,
|
||||
};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
|
||||
use dioxus::prelude::*;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Shot {
|
||||
filename: String,
|
||||
title: String,
|
||||
cutoff: Option<f64>,
|
||||
disable: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Chart {
|
||||
title: String,
|
||||
shots: Vec<u64>,
|
||||
|
||||
max_time: u64,
|
||||
max_weight: u64,
|
||||
max_flow: u64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Config {
|
||||
shots: HashMap<String, Shot>,
|
||||
charts: HashMap<String, Chart>,
|
||||
|
||||
shot_dir: String,
|
||||
output_dir: String,
|
||||
|
||||
width: u32,
|
||||
height: u32,
|
||||
}
|
||||
|
||||
const WGHT_SHEET: usize = 0;
|
||||
const FLOW_SHEET: usize = 1;
|
||||
|
||||
const TIME_COL: usize = 0;
|
||||
const WGHT_COL: usize = 5;
|
||||
const FLOW_COL: usize = 2;
|
||||
|
||||
struct Data {
|
||||
weight: Vec<(f64, f64)>,
|
||||
flowrate: Vec<(f64, f64)>,
|
||||
}
|
||||
|
||||
fn str_to_naivetime(unix_str: &str) -> NaiveTime {
|
||||
NaiveTime::parse_from_str(unix_str, "%T%.3f").expect("Couldn't parse timestamp")
|
||||
}
|
||||
|
||||
fn cell_to_naivetime(cell: Option<&str>) -> NaiveTime {
|
||||
str_to_naivetime(cell.expect("Timestamp is not a string!"))
|
||||
}
|
||||
|
||||
fn deltatime(time: NaiveTime, start: NaiveTime) -> Duration {
|
||||
time - start
|
||||
}
|
||||
|
||||
fn process_sheet(
|
||||
path: &str,
|
||||
worksheet: usize,
|
||||
time_col: usize,
|
||||
data_col: usize,
|
||||
) -> Vec<(f64, f64)> {
|
||||
let mut workbook: Xlsx<_> =
|
||||
open_workbook(path).unwrap_or_else(|_| panic!("Cannot open file at path \"{}\"", path));
|
||||
|
||||
if let Some(Ok(range)) = workbook.worksheet_range_at(worksheet) {
|
||||
let starting_time: NaiveTime = cell_to_naivetime(range[(1, time_col)].get_string());
|
||||
|
||||
let time_range = range.range(
|
||||
(1, time_col as u32),
|
||||
(range.height() as u32 - 1, time_col as u32),
|
||||
);
|
||||
let weight_range = range.range(
|
||||
(1, data_col as u32),
|
||||
(range.height() as u32 - 1, data_col as u32),
|
||||
);
|
||||
|
||||
// println!("time column cells: {:?}", time_range.cells().next());
|
||||
// println!("time column strings: {:?}", time_range.cells().map(|c| c.2.get_string().unwrap()).collect::<Vec<&str>>());
|
||||
|
||||
let map_time_range = time_range.cells().map(|c| {
|
||||
let timestamp = cell_to_naivetime(c.2.get_string());
|
||||
let deltatime = deltatime(timestamp, starting_time);
|
||||
let std_duration = deltatime.to_std().unwrap();
|
||||
|
||||
std_duration.as_secs_f32() as f64
|
||||
});
|
||||
|
||||
let map_weight_range = weight_range.cells().map(|c| {
|
||||
c.2.get_float().unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Can't get float value of weight column at position ({},{})",
|
||||
c.0, c.1
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
map_time_range.zip(map_weight_range).collect()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
fn load_data(path: &str, cutoff: Option<f64>) -> Option<Data> {
|
||||
let mut w = process_sheet(path, WGHT_SHEET, TIME_COL, WGHT_COL);
|
||||
let mut fr = process_sheet(path, FLOW_SHEET, TIME_COL, FLOW_COL);
|
||||
|
||||
if let Some(cutoff_val) = cutoff {
|
||||
if cutoff_val != -1.0 {
|
||||
w.retain(|x| x.0 < cutoff_val);
|
||||
fr.retain(|x| x.0 < cutoff_val);
|
||||
}
|
||||
}
|
||||
|
||||
if !w.is_empty() && !fr.is_empty() {
|
||||
let data = Data {
|
||||
weight: w,
|
||||
flowrate: fr,
|
||||
};
|
||||
Some(data)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_plots() -> Vec<(String, Plot)> {
|
||||
let config_file = fs::read_to_string("config.toml").expect("Can't read config.toml");
|
||||
let config: Config = toml::from_str(&config_file).expect("Can't deserialize config.toml");
|
||||
|
||||
let mut result: Vec<(String, Plot)> = Vec::with_capacity(config.charts.len());
|
||||
|
||||
for chart in config.charts {
|
||||
// println!("Chart: {}\n", chart.1.title);
|
||||
|
||||
let filename = format!("{}/{}.html", config.output_dir, &chart.1.title);
|
||||
let mut plot = Plot::new();
|
||||
|
||||
let _shot_count = chart.1.shots.len();
|
||||
|
||||
for shot_nr in chart.1.shots {
|
||||
if let Some(shot) = config.shots.get(&shot_nr.to_string()) {
|
||||
// println!("\tShot: {}n", shot.title);
|
||||
|
||||
if let Some(data) = load_data(
|
||||
&format!("{}/{}", config.shot_dir, shot.filename),
|
||||
shot.cutoff,
|
||||
) {
|
||||
if let Some(disable) = shot.disable {
|
||||
if disable {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let (x, y): (Vec<_>, Vec<_>) = data.weight.into_iter().unzip();
|
||||
let trace = Scatter::new(x, y).name(&shot.title).mode(Mode::Lines);
|
||||
plot.add_trace(trace);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let layout = Layout::new().title(Title::new(&chart.1.title));
|
||||
plot.set_layout(layout);
|
||||
|
||||
plot.use_local_plotly();
|
||||
plot.write_html(filename);
|
||||
result.push((chart.1.title, plot));
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
//fn main() -> Result<(), iced::Error> {
|
||||
fn main() {
|
||||
panic::set_hook(Box::new(console_error_panic_hook::hook));
|
||||
dioxus::desktop::launch(app);
|
||||
generate_plots();
|
||||
}
|
||||
|
||||
fn app(cx: Scope) -> Element {
|
||||
cx.render(rsx! (
|
||||
section { class: "rustybeans",
|
||||
style { [include_str!("style.css")] }
|
||||
div { class: "container row",
|
||||
div { "Hello, world!" }
|
||||
button { "Click me!" }
|
||||
}
|
||||
}
|
||||
))
|
||||
let mut brew = FlowProfile::from_file("brews/test.json", None);
|
||||
|
||||
println!("Brew: {:?}\n", &brew);
|
||||
println!("process_json(): {:?}\n", &brew.process_json());
|
||||
brew.preprocess_json_mut();
|
||||
println!("preprocess_json_mut(): {:?}\n", &brew);
|
||||
|
||||
brew = brew.preprocess_json();
|
||||
println!("preprocess_json: {:?}\n", &brew);
|
||||
|
||||
generate_plots();
|
||||
|
||||
// Counter::run(Settings::default())
|
||||
}
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
use crate::config::Config;
|
||||
use crate::database::{Database};
|
||||
use crate::flow_profile::FlowProfile;
|
||||
use crate::sheets::load_data;
|
||||
|
||||
use plotly::{
|
||||
common::{Mode, Title},
|
||||
Layout, Plot, Scatter,
|
||||
};
|
||||
|
||||
use std::fs;
|
||||
|
||||
pub fn generate_plots() -> Vec<(String, Plot)> {
|
||||
let config_file = fs::read_to_string("config.toml").expect("Can't read config.toml");
|
||||
let config: Config = toml::from_str(&config_file).expect("Can't deserialize config.toml");
|
||||
|
||||
let mut result: Vec<(String, Plot)> = Vec::with_capacity(config.charts.len());
|
||||
|
||||
for chart in config.charts {
|
||||
// println!("Chart: {}\n", chart.1.title);
|
||||
|
||||
let filename = format!("{}/{}.html", config.output_dir, &chart.1.title);
|
||||
let mut plot = Plot::new();
|
||||
|
||||
let _shot_count = chart.1.shots.len();
|
||||
|
||||
for shot_nr in chart.1.shots {
|
||||
if let Some(shot) = config.shots.get(&shot_nr.to_string()) {
|
||||
// println!("\tShot: {}n", shot.title);
|
||||
|
||||
if let Some(shot_json) = &shot.json {
|
||||
let brew = FlowProfile::from_file(
|
||||
&format!("{}/{}_flow_profile.json", config.brew_dir, shot_json),
|
||||
shot.cutoff,
|
||||
)
|
||||
.preprocess_json();
|
||||
|
||||
let (x, y): (Vec<_>, Vec<_>) = brew
|
||||
.data_collection
|
||||
.unwrap_or_else(|| {
|
||||
panic!("No data_collection present for shot_json: {}", shot_json)
|
||||
})
|
||||
.weight
|
||||
.iter()
|
||||
.cloned()
|
||||
.unzip();
|
||||
let trace = Scatter::new(x, y).name(&shot.title).mode(Mode::Lines);
|
||||
plot.add_trace(trace);
|
||||
} else if let Some(shot_filename) = &shot.filename {
|
||||
if let Some(data) = load_data(
|
||||
&format!("{}/{}", config.shot_dir, shot_filename),
|
||||
shot.cutoff,
|
||||
) {
|
||||
if let Some(disable) = shot.disable {
|
||||
if disable {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let (x, y): (Vec<_>, Vec<_>) = data.weight.into_iter().unzip();
|
||||
let trace = Scatter::new(x, y).name(&shot.title).mode(Mode::Lines);
|
||||
plot.add_trace(trace);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let layout = Layout::new().title(Title::new(&chart.1.title));
|
||||
plot.set_layout(layout);
|
||||
|
||||
plot.use_local_plotly();
|
||||
plot.write_html(filename);
|
||||
result.push((chart.1.title, plot));
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn database_plots() -> Vec<(String, Plot)> {
|
||||
|
||||
|
||||
vec![]
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
use crate::config::{Data, FLOW_COL, FLOW_SHEET, TIME_COL, WGHT_COL, WGHT_SHEET};
|
||||
use crate::time::{cell_to_naivetime, deltatime};
|
||||
|
||||
use calamine::{open_workbook, Reader, Xlsx};
|
||||
|
||||
use chrono::NaiveTime;
|
||||
|
||||
pub fn process_sheet(
|
||||
path: &str,
|
||||
worksheet: usize,
|
||||
time_col: usize,
|
||||
data_col: usize,
|
||||
) -> Vec<(f64, f64)> {
|
||||
let mut workbook: Xlsx<_> =
|
||||
open_workbook(path).unwrap_or_else(|_| panic!("Cannot open file at path \"{}\"", path));
|
||||
|
||||
if let Some(Ok(range)) = workbook.worksheet_range_at(worksheet) {
|
||||
let starting_time: NaiveTime = cell_to_naivetime(range[(1, time_col)].get_string());
|
||||
|
||||
let time_range = range.range(
|
||||
(1, time_col as u32),
|
||||
(range.height() as u32 - 1, time_col as u32),
|
||||
);
|
||||
let weight_range = range.range(
|
||||
(1, data_col as u32),
|
||||
(range.height() as u32 - 1, data_col as u32),
|
||||
);
|
||||
|
||||
// println!("time column cells: {:?}", time_range.cells().next());
|
||||
// println!("time column strings: {:?}", time_range.cells().map(|c| c.2.get_string().unwrap()).collect::<Vec<&str>>());
|
||||
|
||||
let map_time_range = time_range.cells().map(|c| {
|
||||
let timestamp = cell_to_naivetime(c.2.get_string());
|
||||
let deltatime = deltatime(timestamp, starting_time);
|
||||
let std_duration = deltatime.to_std().unwrap();
|
||||
|
||||
std_duration.as_secs_f32() as f64
|
||||
});
|
||||
|
||||
let map_weight_range = weight_range.cells().map(|c| {
|
||||
c.2.get_float().unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Can't get float value of weight column at position ({},{})",
|
||||
c.0, c.1
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
map_time_range.zip(map_weight_range).collect()
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_data(path: &str, cutoff: Option<f64>) -> Option<Data> {
|
||||
let mut w = process_sheet(path, WGHT_SHEET, TIME_COL, WGHT_COL);
|
||||
let mut fr = process_sheet(path, FLOW_SHEET, TIME_COL, FLOW_COL);
|
||||
|
||||
if let Some(cutoff_val) = cutoff {
|
||||
if cutoff_val != -1.0 {
|
||||
w.retain(|x| x.0 < cutoff_val);
|
||||
fr.retain(|x| x.0 < cutoff_val);
|
||||
}
|
||||
}
|
||||
|
||||
if !w.is_empty() && !fr.is_empty() {
|
||||
let data = Data {
|
||||
weight: w,
|
||||
flowrate: fr,
|
||||
};
|
||||
Some(data)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
use chrono::{Duration, NaiveDateTime, NaiveTime};
|
||||
|
||||
pub fn str_to_naivetime(unix_str: &str) -> NaiveTime {
|
||||
NaiveTime::parse_from_str(unix_str, "%T%.3f").expect("Couldn't parse timestamp")
|
||||
}
|
||||
|
||||
pub fn cell_to_naivetime(cell: Option<&str>) -> NaiveTime {
|
||||
str_to_naivetime(cell.expect("Timestamp is not a string!"))
|
||||
}
|
||||
|
||||
pub fn deltatime(time: NaiveTime, start: NaiveTime) -> Duration {
|
||||
time - start
|
||||
}
|
||||
|
||||
pub fn unix_to_naivetime(unix_timestamp: i64) -> Option<NaiveTime> {
|
||||
if let Some(date_time) = NaiveDateTime::from_timestamp_millis(unix_timestamp) {
|
||||
Some(date_time.time())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_same_day(time_1: NaiveTime, time_2: NaiveTime) -> bool {
|
||||
time_1.format("%Y:%m:%d").to_string() == time_2.format("%Y:%m:%d").to_string()
|
||||
}
|
Loading…
Reference in New Issue