2021-08-09 10:13:51 -05:00
|
|
|
use std::convert::TryFrom;
|
2021-08-25 14:30:27 -05:00
|
|
|
use std::io::{BufRead, Write};
|
2021-08-09 10:13:51 -05:00
|
|
|
|
|
|
|
use clap::ArgMatches;
|
2022-05-06 08:48:00 -05:00
|
|
|
use chrono::{DateTime, Utc, Duration};
|
2021-08-09 10:13:51 -05:00
|
|
|
use regex::Regex;
|
|
|
|
|
2021-08-11 20:25:32 -05:00
|
|
|
use crate::database::Database;
|
2021-08-09 10:13:51 -05:00
|
|
|
use crate::error::{Error, Result};
|
2021-08-25 14:30:27 -05:00
|
|
|
use crate::commands::{Command, Facts};
|
2022-07-30 09:46:22 -05:00
|
|
|
use crate::timeparse::{parse_time, parse_hours};
|
2021-08-11 20:25:32 -05:00
|
|
|
use crate::old::{entries_or_warning, time_or_warning};
|
2021-08-09 10:13:51 -05:00
|
|
|
use crate::formatters::text;
|
|
|
|
use crate::regex::parse_regex;
|
2021-08-11 20:25:32 -05:00
|
|
|
use crate::interactive::ask;
|
2021-08-25 14:30:27 -05:00
|
|
|
use crate::io::Streams;
|
2022-07-30 09:46:22 -05:00
|
|
|
use crate::models::Entry;
|
2021-08-09 10:13:51 -05:00
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub struct Args {
|
|
|
|
start: Option<DateTime<Utc>>,
|
|
|
|
end: Option<DateTime<Utc>>,
|
|
|
|
grep: Option<Regex>,
|
2022-07-30 09:46:22 -05:00
|
|
|
hours: Option<u16>,
|
2021-08-09 10:13:51 -05:00
|
|
|
fake: bool,
|
|
|
|
sheet: Option<String>,
|
|
|
|
}
|
|
|
|
|
2023-02-13 18:17:47 -06:00
|
|
|
impl<'a> TryFrom<&'a ArgMatches> for Args {
|
2021-08-09 10:13:51 -05:00
|
|
|
type Error = Error;
|
|
|
|
|
|
|
|
fn try_from(matches: &'a ArgMatches) -> Result<Self> {
|
|
|
|
Ok(Args {
|
2021-12-13 14:20:56 -06:00
|
|
|
start: matches.value_of("start").map(parse_time).transpose()?,
|
|
|
|
end: matches.value_of("end").map(parse_time).transpose()?,
|
2021-08-09 10:13:51 -05:00
|
|
|
grep: matches.value_of("grep").map(parse_regex).transpose()?,
|
2022-07-30 09:46:22 -05:00
|
|
|
hours: matches.value_of("time").map(parse_hours).transpose()?,
|
2021-08-09 10:13:51 -05:00
|
|
|
fake: matches.is_present("fake"),
|
|
|
|
sheet: matches.value_of("sheet").map(|s| s.to_owned()),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-30 09:46:22 -05:00
|
|
|
/// Modify the given Entry such that it only lasts the given `time`, and return
|
|
|
|
/// the data needed to create a new entry with mostly the same attributes such
|
|
|
|
/// that it accounts for the time substracted from the original.
|
|
|
|
fn split_entry(entry: &mut Entry, time: Duration) -> (DateTime<Utc>, Option<DateTime<Utc>>, Option<String>, String) {
|
|
|
|
let Entry {
|
|
|
|
id: _, note, start, end, sheet,
|
|
|
|
} = entry.clone();
|
|
|
|
|
|
|
|
let old_entry_end = start + time;
|
|
|
|
let new_entry_start = old_entry_end;
|
|
|
|
|
|
|
|
entry.end = Some(old_entry_end);
|
|
|
|
|
|
|
|
(new_entry_start, end, note, sheet)
|
|
|
|
}
|
|
|
|
|
2021-08-09 10:13:51 -05:00
|
|
|
pub struct ArchiveCommand {}
|
|
|
|
|
|
|
|
impl<'a> Command<'a> for ArchiveCommand {
|
|
|
|
type Args = Args;
|
|
|
|
|
2021-08-25 14:30:27 -05:00
|
|
|
fn handle<D, I, O, E>(args: Args, streams: &mut Streams<D, I, O, E>, facts: &Facts) -> Result<()>
|
2021-08-09 10:13:51 -05:00
|
|
|
where
|
|
|
|
D: Database,
|
2021-08-25 14:30:27 -05:00
|
|
|
I: BufRead,
|
2021-08-09 10:13:51 -05:00
|
|
|
O: Write,
|
|
|
|
E: Write,
|
|
|
|
{
|
2022-07-30 09:46:22 -05:00
|
|
|
// Get all entries from the database that match the filter criteria
|
|
|
|
// given from the command line: start time, end time and sheet.
|
|
|
|
let entries = {
|
|
|
|
let started_after = args.start.map(|s| time_or_warning(s, &streams.db)).transpose()?.map(|s| s.0);
|
|
|
|
let started_before = args.end.map(|e| time_or_warning(e, &streams.db)).transpose()?.map(|e| e.0);
|
|
|
|
let sheet = args.sheet.map(Ok).unwrap_or_else(|| streams.db.current_sheet())?;
|
2021-08-11 20:25:32 -05:00
|
|
|
|
2022-07-30 09:46:22 -05:00
|
|
|
let mut entries = streams.db.entries_by_sheet(&sheet, started_after, started_before)?;
|
2021-08-11 20:25:32 -05:00
|
|
|
|
2022-07-30 09:46:22 -05:00
|
|
|
// only archive those entries that are finished.
|
|
|
|
entries.retain(|e| e.end.is_some());
|
2021-08-11 20:25:32 -05:00
|
|
|
|
2022-07-30 09:46:22 -05:00
|
|
|
if let Some(re) = args.grep {
|
|
|
|
entries.retain(|e| re.is_match(&e.note.clone().unwrap_or_default()));
|
|
|
|
}
|
|
|
|
|
|
|
|
entries
|
|
|
|
};
|
|
|
|
|
|
|
|
// If the user requested to archive entries by a total time then not all
|
|
|
|
// entries will be archived, and instead just those oldest ones that
|
|
|
|
// accumulate the given time will be. If the total time of the filtered
|
|
|
|
// entries is more than the requested time the last one will be split
|
|
|
|
// into two pieces.
|
|
|
|
let (time, entries, new, extra_msg) = if let Some(hours) = args.hours {
|
|
|
|
let requested_time = Duration::hours(hours as i64);
|
2022-05-06 08:48:00 -05:00
|
|
|
// archive the maximum amount of consecutive entries whose
|
|
|
|
// accumulated time is not bigger that `time`.
|
2022-07-30 09:46:22 -05:00
|
|
|
let mut selected_entries = Vec::with_capacity(entries.len());
|
|
|
|
let mut accumulated_time = Duration::seconds(0);
|
|
|
|
let mut new = None;
|
|
|
|
|
|
|
|
for entry in entries {
|
|
|
|
// Can unwrap because only entries with an end time get this far
|
|
|
|
let timespan = entry.timespan().unwrap();
|
|
|
|
|
|
|
|
if accumulated_time < requested_time {
|
|
|
|
if accumulated_time + timespan > requested_time {
|
|
|
|
// should split the last entry
|
|
|
|
let missing_time = requested_time - accumulated_time;
|
|
|
|
let mut entry = entry;
|
|
|
|
let parts = split_entry(&mut entry, missing_time);
|
|
|
|
new.replace(parts);
|
|
|
|
selected_entries.push(entry);
|
|
|
|
accumulated_time = accumulated_time + missing_time;
|
|
|
|
} else {
|
|
|
|
// fits perfectly, just add it
|
|
|
|
selected_entries.push(entry);
|
|
|
|
accumulated_time = accumulated_time + timespan;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// accumulated_time is equal or higher than requested_time,
|
|
|
|
// no more entries are admitted
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-01 08:58:56 -05:00
|
|
|
let msg = if new.is_some() {
|
|
|
|
String::from("\nAdditionally an entry will be split so that the total archived time is exact.")
|
|
|
|
} else if accumulated_time < requested_time {
|
|
|
|
let requested_time_str = text::format_hours(requested_time);
|
|
|
|
let missing_time_str = text::format_hours(requested_time - accumulated_time);
|
|
|
|
format!("\nThere were not enough entries to fulfill the requested time of {requested_time_str} (difference: {missing_time_str}).")
|
|
|
|
} else {
|
|
|
|
String::new()
|
|
|
|
};
|
|
|
|
|
|
|
|
(text::format_hours(accumulated_time), selected_entries, new, msg)
|
2022-07-30 09:46:22 -05:00
|
|
|
} else {
|
|
|
|
(text::format_hours(
|
|
|
|
entries
|
|
|
|
.iter()
|
|
|
|
.filter_map(|e| e.end.map(|end| end - e.start))
|
|
|
|
.fold(Duration::seconds(0), |acc, new| {
|
|
|
|
acc + new
|
|
|
|
})
|
|
|
|
), entries, None, String::from(""))
|
|
|
|
};
|
|
|
|
|
|
|
|
let n = entries.len();
|
2022-08-01 08:58:56 -05:00
|
|
|
let n_entries = if n == 1 {
|
|
|
|
String::from("1 entry")
|
|
|
|
} else {
|
|
|
|
format!("{n} entries")
|
|
|
|
};
|
2022-05-06 08:48:00 -05:00
|
|
|
|
2021-08-11 20:25:32 -05:00
|
|
|
if args.fake {
|
2021-08-25 14:30:27 -05:00
|
|
|
let (entries, _) = entries_or_warning(entries, &streams.db)?;
|
2021-08-11 20:25:32 -05:00
|
|
|
|
2022-08-01 08:58:56 -05:00
|
|
|
writeln!(streams.out, "These entries would be archived:\n")?;
|
|
|
|
|
2021-08-11 20:25:32 -05:00
|
|
|
text::print_formatted(
|
|
|
|
entries,
|
2021-08-25 14:30:27 -05:00
|
|
|
&mut streams.out,
|
2021-08-25 14:43:50 -05:00
|
|
|
facts,
|
2021-08-11 20:25:32 -05:00
|
|
|
true,
|
|
|
|
)?;
|
2022-08-01 08:58:56 -05:00
|
|
|
} else if ask(streams, &format!("A total of {n_entries} accounting for {time} will be archived.{extra_msg}\nProceed?"))? {
|
2021-08-11 20:35:24 -05:00
|
|
|
for entry in entries {
|
2021-08-25 14:30:27 -05:00
|
|
|
streams.db.entry_update(entry.id, entry.start, entry.end, entry.note, &format!("_{}", entry.sheet))?;
|
2021-08-11 20:25:32 -05:00
|
|
|
}
|
2022-07-30 09:46:22 -05:00
|
|
|
|
|
|
|
if let Some((start, end, note, sheet)) = new {
|
|
|
|
streams.db.entry_insert(start, end, note, &sheet)?;
|
|
|
|
}
|
2021-08-11 20:35:24 -05:00
|
|
|
} else {
|
2021-08-25 14:30:27 -05:00
|
|
|
writeln!(streams.out, "Ok, they're still there")?;
|
2021-08-11 20:25:32 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
2021-08-09 10:13:51 -05:00
|
|
|
}
|
|
|
|
}
|
2022-07-25 00:09:04 -05:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2022-07-30 09:46:22 -05:00
|
|
|
use pretty_assertions::{assert_eq, assert_str_eq};
|
|
|
|
use chrono::TimeZone;
|
|
|
|
|
|
|
|
use crate::models::Entry;
|
2022-07-25 00:09:04 -05:00
|
|
|
|
|
|
|
use super::*;
|
|
|
|
|
2022-07-30 09:46:22 -05:00
|
|
|
#[test]
|
|
|
|
fn archive_archives() {
|
|
|
|
let args: Args = Default::default();
|
|
|
|
let mut streams = Streams::fake(b"y\n");
|
|
|
|
let facts = Facts::new();
|
|
|
|
|
|
|
|
streams.db.set_current_sheet("foo").unwrap();
|
|
|
|
streams.db.entry_insert(facts.now - Duration::hours(3), Some(facts.now - Duration::minutes(90)), Some("first".into()), "foo").unwrap();
|
|
|
|
streams.db.entry_insert(facts.now - Duration::minutes(90), Some(facts.now), Some("second".into()), "foo").unwrap();
|
|
|
|
|
|
|
|
ArchiveCommand::handle(args, &mut streams, &facts).unwrap();
|
|
|
|
|
|
|
|
let remaining = streams.db.entries_by_sheet("foo", None, None).unwrap();
|
|
|
|
let archived = streams.db.entries_by_sheet("_foo", None, None).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(String::from_utf8_lossy(&streams.out), "A total of 2 entries accounting for 3h will be archived.\nProceed? [y/N] ");
|
|
|
|
|
|
|
|
// First entry gets archived whole, second entry gets split in two:
|
|
|
|
// - a 30 minute piece to complete the requested 2 hour span
|
|
|
|
// - a 1 hour piece that remains unarchived
|
|
|
|
assert_eq!(archived, vec![
|
|
|
|
Entry {
|
|
|
|
id: 1,
|
|
|
|
note: Some("first".into()),
|
|
|
|
start: facts.now - Duration::hours(3),
|
|
|
|
end: Some(facts.now - Duration::minutes(90)),
|
|
|
|
sheet: "_foo".into(),
|
|
|
|
},
|
|
|
|
Entry {
|
|
|
|
id: 2,
|
|
|
|
note: Some("second".into()),
|
|
|
|
start: facts.now - Duration::minutes(90),
|
|
|
|
end: Some(facts.now),
|
|
|
|
sheet: "_foo".into(),
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
assert_eq!(remaining, vec![]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn no_running_entry_is_archived() {
|
|
|
|
let args: Args = Default::default();
|
|
|
|
let mut streams = Streams::fake(b"y\n");
|
|
|
|
let facts = Facts::new();
|
|
|
|
|
|
|
|
streams.db.set_current_sheet("foo").unwrap();
|
|
|
|
streams.db.entry_insert(facts.now - Duration::hours(3), Some(facts.now - Duration::minutes(90)), Some("first".into()), "foo").unwrap();
|
|
|
|
streams.db.entry_insert(facts.now - Duration::minutes(90), None, Some("running".into()), "foo").unwrap();
|
|
|
|
|
|
|
|
ArchiveCommand::handle(args, &mut streams, &facts).unwrap();
|
|
|
|
|
|
|
|
let remaining = streams.db.entries_by_sheet("foo", None, None).unwrap();
|
|
|
|
let archived = streams.db.entries_by_sheet("_foo", None, None).unwrap();
|
|
|
|
|
2022-08-01 08:58:56 -05:00
|
|
|
assert_eq!(String::from_utf8_lossy(&streams.out), "A total of 1 entry accounting for 1h 30m will be archived.\nProceed? [y/N] ");
|
2022-07-30 09:46:22 -05:00
|
|
|
|
|
|
|
// First entry gets archived whole, second entry gets split in two:
|
|
|
|
// - a 30 minute piece to complete the requested 2 hour span
|
|
|
|
// - a 1 hour piece that remains unarchived
|
|
|
|
assert_eq!(archived, vec![
|
|
|
|
Entry {
|
|
|
|
id: 1,
|
|
|
|
note: Some("first".into()),
|
|
|
|
start: facts.now - Duration::hours(3),
|
|
|
|
end: Some(facts.now - Duration::minutes(90)),
|
|
|
|
sheet: "_foo".into(),
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
assert_eq!(remaining, vec![
|
|
|
|
Entry {
|
|
|
|
id: 2,
|
|
|
|
note: Some("running".into()),
|
|
|
|
start: facts.now - Duration::minutes(90),
|
|
|
|
end: None,
|
|
|
|
sheet: "foo".into(),
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn entries_are_split_properly() {
|
|
|
|
let mut old_entry = Entry {
|
|
|
|
id: 1,
|
2023-02-13 18:17:47 -06:00
|
|
|
start: Utc.with_ymd_and_hms(2022, 7, 29, 10, 0, 0).unwrap(),
|
|
|
|
end: Some(Utc.with_ymd_and_hms(2022, 7, 29, 11, 0, 0).unwrap()),
|
2022-07-30 09:46:22 -05:00
|
|
|
note: Some("an entry".to_string()),
|
|
|
|
sheet: "foo".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
assert_eq!(split_entry(&mut old_entry, Duration::minutes(25)), (
|
2023-02-13 18:17:47 -06:00
|
|
|
Utc.with_ymd_and_hms(2022, 7, 29, 10, 25, 0).unwrap(),
|
|
|
|
Some(Utc.with_ymd_and_hms(2022, 7, 29, 11, 0, 0).unwrap()),
|
2022-07-30 09:46:22 -05:00
|
|
|
Some("an entry".to_string()),
|
|
|
|
"foo".to_string(),
|
|
|
|
));
|
|
|
|
assert_eq!(old_entry, Entry {
|
|
|
|
id: 1,
|
2023-02-13 18:17:47 -06:00
|
|
|
start: Utc.with_ymd_and_hms(2022, 7, 29, 10, 0, 0).unwrap(),
|
|
|
|
end: Some(Utc.with_ymd_and_hms(2022, 7, 29, 10, 25, 0).unwrap()),
|
2022-07-30 09:46:22 -05:00
|
|
|
note: Some("an entry".to_string()),
|
|
|
|
sheet: "foo".to_string(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2022-07-25 00:09:04 -05:00
|
|
|
#[test]
|
|
|
|
fn archive_by_hours() {
|
|
|
|
let args = Args {
|
2022-07-30 09:46:22 -05:00
|
|
|
hours: Some(2),
|
2022-07-25 00:09:04 -05:00
|
|
|
..Default::default()
|
|
|
|
};
|
2022-07-30 09:46:22 -05:00
|
|
|
let mut streams = Streams::fake(b"y\n");
|
2022-07-25 00:09:04 -05:00
|
|
|
let facts = Facts::new();
|
|
|
|
|
2022-07-30 09:46:22 -05:00
|
|
|
streams.db.set_current_sheet("foo").unwrap();
|
|
|
|
streams.db.entry_insert(facts.now - Duration::hours(3), Some(facts.now - Duration::minutes(90)), Some("first".into()), "foo").unwrap();
|
|
|
|
streams.db.entry_insert(facts.now - Duration::minutes(90), Some(facts.now), Some("second".into()), "foo").unwrap();
|
|
|
|
|
2022-07-25 00:09:04 -05:00
|
|
|
ArchiveCommand::handle(args, &mut streams, &facts).unwrap();
|
|
|
|
|
2022-07-30 09:46:22 -05:00
|
|
|
let remaining = streams.db.entries_by_sheet("foo", None, None).unwrap();
|
|
|
|
let archived = streams.db.entries_by_sheet("_foo", None, None).unwrap();
|
2022-07-25 00:09:04 -05:00
|
|
|
|
2022-07-30 09:46:22 -05:00
|
|
|
assert_str_eq!(String::from_utf8_lossy(&streams.out), "A total of 2 entries accounting for 2h will be archived.
|
|
|
|
Additionally an entry will be split so that the total archived time is exact.
|
|
|
|
Proceed? [y/N] ");
|
|
|
|
|
|
|
|
// First entry gets archived whole, second entry gets split in two:
|
|
|
|
// - a 30 minute piece to complete the requested 2 hour span
|
|
|
|
// - a 1 hour piece that remains unarchived
|
|
|
|
assert_eq!(archived, vec![
|
|
|
|
Entry {
|
|
|
|
id: 1,
|
|
|
|
note: Some("first".into()),
|
|
|
|
start: facts.now - Duration::hours(3),
|
|
|
|
end: Some(facts.now - Duration::minutes(90)),
|
|
|
|
sheet: "_foo".into(),
|
|
|
|
},
|
|
|
|
Entry {
|
|
|
|
id: 2,
|
|
|
|
note: Some("second".into()),
|
|
|
|
start: facts.now - Duration::minutes(90),
|
|
|
|
end: Some(facts.now - Duration::hours(1)),
|
|
|
|
sheet: "_foo".into(),
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
assert_eq!(remaining, vec![
|
|
|
|
Entry {
|
|
|
|
id: 3,
|
|
|
|
note: Some("second".into()),
|
|
|
|
start: facts.now - Duration::hours(1),
|
|
|
|
end: Some(facts.now),
|
|
|
|
sheet: "foo".into(),
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn not_enough_entries_to_archive_time() {
|
2022-08-01 08:58:56 -05:00
|
|
|
let args = Args {
|
|
|
|
hours: Some(2),
|
|
|
|
..Default::default()
|
|
|
|
};
|
|
|
|
let mut streams = Streams::fake(b"y\n");
|
|
|
|
let facts = Facts::new();
|
|
|
|
|
|
|
|
streams.db.set_current_sheet("foo").unwrap();
|
|
|
|
streams.db.entry_insert(facts.now - Duration::hours(3), Some(facts.now - Duration::minutes(90)), Some("first".into()), "foo").unwrap();
|
|
|
|
|
|
|
|
ArchiveCommand::handle(args, &mut streams, &facts).unwrap();
|
|
|
|
|
|
|
|
let remaining = streams.db.entries_by_sheet("foo", None, None).unwrap();
|
|
|
|
let archived = streams.db.entries_by_sheet("_foo", None, None).unwrap();
|
|
|
|
|
|
|
|
assert_str_eq!(String::from_utf8_lossy(&streams.out), "A total of 1 entry accounting for 1h 30m will be archived.
|
|
|
|
There were not enough entries to fulfill the requested time of 2h (difference: 30m).
|
|
|
|
Proceed? [y/N] ");
|
|
|
|
|
|
|
|
// First entry gets archived whole, second entry gets split in two:
|
|
|
|
// - a 30 minute piece to complete the requested 2 hour span
|
|
|
|
// - a 1 hour piece that remains unarchived
|
|
|
|
assert_eq!(archived, vec![
|
|
|
|
Entry {
|
|
|
|
id: 1,
|
|
|
|
note: Some("first".into()),
|
|
|
|
start: facts.now - Duration::hours(3),
|
|
|
|
end: Some(facts.now - Duration::minutes(90)),
|
|
|
|
sheet: "_foo".into(),
|
|
|
|
},
|
|
|
|
]);
|
|
|
|
assert_eq!(remaining, vec![]);
|
2022-07-30 09:46:22 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fake_and_split_work_well_together() {
|
2022-08-03 08:37:42 -05:00
|
|
|
std::env::set_var("TZ", "CST+6");
|
|
|
|
|
2022-08-01 08:58:56 -05:00
|
|
|
let args = Args {
|
|
|
|
hours: Some(2),
|
|
|
|
fake: true,
|
|
|
|
..Default::default()
|
|
|
|
};
|
|
|
|
let mut streams = Streams::fake(b"y\n");
|
|
|
|
let facts = Facts::new();
|
2023-02-13 18:17:47 -06:00
|
|
|
let time_a = Utc.with_ymd_and_hms(2022, 8, 1, 10, 0, 0).unwrap();
|
2022-08-01 08:58:56 -05:00
|
|
|
let time_b = time_a + Duration::minutes(90);
|
|
|
|
let time_d = time_a + Duration::hours(3);
|
|
|
|
|
|
|
|
streams.db.set_current_sheet("foo").unwrap();
|
|
|
|
streams.db.entry_insert(time_a, Some(time_b), Some("first".into()), "foo").unwrap();
|
|
|
|
streams.db.entry_insert(time_b, Some(time_d), Some("second".into()), "foo").unwrap();
|
|
|
|
|
|
|
|
ArchiveCommand::handle(args, &mut streams, &facts).unwrap();
|
|
|
|
|
|
|
|
let remaining = streams.db.entries_by_sheet("foo", None, None).unwrap();
|
|
|
|
let archived = streams.db.entries_by_sheet("_foo", None, None).unwrap();
|
|
|
|
|
|
|
|
assert_str_eq!(String::from_utf8_lossy(&streams.out), "These entries would be archived:
|
|
|
|
|
|
|
|
Timesheet: foo
|
|
|
|
ID Day Start End Duration Notes
|
2022-08-03 08:37:42 -05:00
|
|
|
1 Mon Aug 01, 2022 04:00:00 - 05:30:00 1:30:00 first
|
|
|
|
2 05:30:00 - 06:00:00 0:30:00 second
|
2022-08-01 08:58:56 -05:00
|
|
|
2:00:00
|
|
|
|
------------------------------------------------------------
|
|
|
|
Total 2:00:00
|
|
|
|
");
|
|
|
|
|
|
|
|
// First entry gets archived whole, second entry gets split in two:
|
|
|
|
// - a 30 minute piece to complete the requested 2 hour span
|
|
|
|
// - a 1 hour piece that remains unarchived
|
|
|
|
assert_eq!(archived, vec![]);
|
|
|
|
assert_eq!(remaining, vec![
|
|
|
|
Entry {
|
|
|
|
id: 1,
|
|
|
|
note: Some("first".into()),
|
|
|
|
start: time_a,
|
|
|
|
end: Some(time_b),
|
|
|
|
sheet: "foo".into(),
|
|
|
|
},
|
|
|
|
Entry {
|
|
|
|
id: 2,
|
|
|
|
note: Some("second".into()),
|
|
|
|
start: time_b,
|
|
|
|
end: Some(time_d),
|
|
|
|
sheet: "foo".into(),
|
|
|
|
},
|
|
|
|
]);
|
2022-07-25 00:09:04 -05:00
|
|
|
}
|
|
|
|
}
|