Merge pull request #16 from djmitche/issue15

remove taskwarrior compatibility stuff
This commit is contained in:
Dustin J. Mitchell 2020-11-21 17:54:01 -05:00 committed by GitHub
commit 8ed54e9728
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 0 additions and 919 deletions

View file

@ -1,273 +0,0 @@
//! Re-implementation of TaskWarrior's A2 module.
use crate::util::lexer::*;
use std::collections::{HashMap, HashSet};
use std::fmt;
/// A2 represents a single argument.
#[derive(Clone)]
pub(crate) struct A2 {
pub(crate) lextype: Type,
tags: HashSet<String>,
attributes: HashMap<String, String>,
}
impl A2 {
pub(crate) fn new<S: Into<String>>(raw: S, lextype: Type) -> A2 {
let mut attributes = HashMap::new();
attributes.insert("raw".into(), raw.into());
let mut rv = A2 {
lextype,
tags: HashSet::new(),
attributes,
};
rv.decompose();
rv
}
/// Return true if the given tag exists in this argument.
pub(crate) fn has_tag<S: AsRef<str>>(&self, tag: S) -> bool {
self.tags.contains(tag.as_ref())
}
/// Add the given tag to this argument.
pub(crate) fn tag<S: Into<String>>(&mut self, tag: S) {
self.tags.insert(tag.into());
}
/// Remove the given tag from this argument.
pub(crate) fn untag<S: AsRef<str>>(&mut self, tag: S) {
self.tags.remove(tag.as_ref());
}
/// Set the given attribute
pub(crate) fn set_attribute<S1: Into<String>, S2: Into<String>>(
&mut self,
name: S1,
value: S2,
) {
self.attributes.insert(name.into(), value.into());
}
/// Get the given attribute
pub(crate) fn get_attribute<S: AsRef<str>>(&self, name: S) -> Option<&str> {
self.attributes.get(name.as_ref()).map(|s| s.as_ref())
}
/// Get either the canonical or raw form (attribute)
pub(crate) fn get_token(&self) -> &str {
self.attributes
.get("canonical")
.or_else(|| self.attributes.get("raw"))
.unwrap()
.as_ref()
}
/// Decompose the raw form into tags and attributes based on the lextype:
///
/// * Tag -
/// - "name" is the tag name
/// - "sign" is the sign (`+` or `-`)
/// * Substitution
/// - "from" is the first part
/// - "to" is the second part
/// - "flags' is the substitution flag, or empty string
/// * Pair
/// - "name"
/// - "modifier"
/// - "separator"
/// - "value" are the parts of the pair (a pair has four parts..?)
/// - tag "RC" is set if the name is "rc" with no modifier
/// - tag "CONFIG" is set if the name is "rc" with a monitor
/// * Pattern
/// - "pattern" is the pattern value
/// - "flags" is the pattern flag, or empty string
///
/// all other types are left unchanged
pub(crate) fn decompose(&mut self) {
let raw = self.get_attribute("raw").unwrap();
match self.lextype {
Type::Tag => {
let (sign, name) = (raw[..1].to_string(), raw[1..].to_string());
self.set_attribute("sign", sign);
self.set_attribute("name", name);
}
Type::Substitution => {
let DecomposedSubstitution { from, to, flags } =
decompose_substitution(raw).unwrap();
self.set_attribute("from", from);
self.set_attribute("to", to);
self.set_attribute("flags", flags);
}
Type::Pair => {
let DecomposedPair {
name,
modifier,
separator,
value,
} = decompose_pair(raw).unwrap();
if &name == "rc" {
if &modifier != "" {
self.tag("CONFIG");
} else {
self.tag("RC");
}
}
self.set_attribute("name", name);
self.set_attribute("modifier", modifier);
self.set_attribute("separator", separator);
self.set_attribute("value", value);
}
Type::Pattern => {
let DecomposedPattern { pattern, flags } = decompose_pattern(raw).unwrap();
self.set_attribute("pattern", pattern);
self.set_attribute("flags", flags);
}
_ => (),
}
}
}
impl fmt::Debug for A2 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "A2{}{:?}", "{", self.lextype)?;
let mut tags = self.tags.iter().collect::<Vec<_>>();
tags.sort();
for tag in tags {
write!(f, ", {}", tag)?;
}
let mut attributes = self.attributes.iter().collect::<Vec<_>>();
attributes.sort();
for (name, value) in attributes {
write!(f, ", {}={:?}", name, value)?;
}
write!(f, "{}", "}")?;
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn tags() {
let mut a2 = A2::new("ident", Type::Identifier);
assert!(!a2.has_tag("foo"));
a2.tag("foo");
assert!(a2.has_tag("foo"));
a2.untag("foo");
assert!(!a2.has_tag("foo"));
}
#[test]
fn raw_attribute() {
let a2 = A2::new("ident", Type::Identifier);
assert_eq!(a2.get_attribute("raw"), Some("ident"));
}
#[test]
fn set_get_attribute() {
let mut a2 = A2::new("ident", Type::Identifier);
assert_eq!(a2.get_attribute("foo"), None);
a2.set_attribute("foo", "bar");
assert_eq!(a2.get_attribute("foo"), Some("bar"));
a2.set_attribute("foo", "bing");
assert_eq!(a2.get_attribute("foo"), Some("bing"));
}
#[test]
fn get_token_raw() {
let a2 = A2::new("ident", Type::Identifier);
assert_eq!(a2.get_token(), "ident");
}
#[test]
fn get_token_canonical() {
let mut a2 = A2::new("ident", Type::Identifier);
a2.set_attribute("canonical", "identifier");
assert_eq!(a2.get_token(), "identifier");
}
#[test]
fn decompose_tag() {
let mut a2 = A2::new("+foo", Type::Tag);
a2.decompose();
assert_eq!(a2.get_attribute("sign"), Some("+"));
assert_eq!(a2.get_attribute("name"), Some("foo"));
}
#[test]
fn decompose_substitution() {
let mut a2 = A2::new("/foo/bar/g", Type::Substitution);
a2.decompose();
assert_eq!(a2.get_attribute("from"), Some("foo"));
assert_eq!(a2.get_attribute("to"), Some("bar"));
assert_eq!(a2.get_attribute("flags"), Some("g"));
}
#[test]
fn decompose_pair() {
let mut a2 = A2::new("thing.foo:bar", Type::Pair);
a2.decompose();
assert_eq!(a2.get_attribute("name"), Some("thing"));
assert_eq!(a2.get_attribute("modifier"), Some("foo"));
assert_eq!(a2.get_attribute("separator"), Some(":"));
assert_eq!(a2.get_attribute("value"), Some("bar"));
assert!(!a2.has_tag("RC"));
assert!(!a2.has_tag("CONFIG"));
}
#[test]
fn decompose_pair_rc() {
let mut a2 = A2::new("rc:bar", Type::Pair);
a2.decompose();
assert_eq!(a2.get_attribute("name"), Some("rc"));
assert_eq!(a2.get_attribute("modifier"), Some(""));
assert_eq!(a2.get_attribute("separator"), Some(":"));
assert_eq!(a2.get_attribute("value"), Some("bar"));
assert!(a2.has_tag("RC"));
assert!(!a2.has_tag("CONFIG"));
}
#[test]
fn decompose_pair_config() {
let mut a2 = A2::new("rc.foo:bar", Type::Pair);
a2.decompose();
assert_eq!(a2.get_attribute("name"), Some("rc"));
assert_eq!(a2.get_attribute("modifier"), Some("foo"));
assert_eq!(a2.get_attribute("separator"), Some(":"));
assert_eq!(a2.get_attribute("value"), Some("bar"));
assert!(!a2.has_tag("RC"));
assert!(a2.has_tag("CONFIG"));
}
#[test]
fn decompose_pattern() {
let mut a2 = A2::new("/foobar/g", Type::Pattern);
a2.decompose();
assert_eq!(a2.get_attribute("pattern"), Some("foobar"));
assert_eq!(a2.get_attribute("flags"), Some("g"));
}
#[test]
fn decompose_other() {
let mut a2 = A2::new("123", Type::Number);
a2.decompose();
assert_eq!(a2.get_attribute("raw"), Some("123"));
}
#[test]
fn debug() {
let mut a2 = A2::new("/ab/g", Type::Pattern);
a2.decompose();
a2.tag("FOO");
assert_eq!(
format!("{:?}", a2),
"A2{Pattern, FOO, flags=\"g\", pattern=\"ab\", raw=\"/ab/g\"}"
);
}
}

View file

@ -1,345 +0,0 @@
//! Reimplementation of the CLI2 class in TaskWarrior.
//!
//! This class is sparsely tested in TaskWarrior, but the intent is to replicate its functionality
//! reliably enough that any command-line accepted by TaskWarrior will also be accepted by this
//! implementation.
use super::a2::A2;
use crate::util::lexer::{dequote, read_word_quoted, was_quoted, Lexer, Type};
use std::collections::{HashMap, HashSet};
#[derive(Default)]
pub(crate) struct CLI2 {
entities: HashMap<String, HashSet<String>>,
aliases: HashMap<String, String>,
original_args: Vec<A2>,
args: Vec<A2>,
id_ranges: Vec<(String, String)>,
uuid_list: Vec<String>,
context_filter_added: bool,
}
impl CLI2 {
pub(crate) fn new() -> CLI2 {
CLI2 {
..Default::default()
}
}
/// Add an alias
pub(crate) fn alias<S1: Into<String>, S2: Into<String>>(&mut self, name: S1, value: S2) {
self.aliases.insert(name.into(), value.into());
}
/// Add an entity category thing ??
pub(crate) fn entity<S1: Into<String>, S2: Into<String>>(&mut self, category: S1, name: S2) {
self.entities
.entry(category.into())
.or_insert_with(|| HashSet::new())
.insert(name.into());
}
/// Capture a single argument, tagged as ORIGINAL
pub(crate) fn add<S: Into<String>>(&mut self, argument: S) {
let mut arg = A2::new(argument, Type::Word);
arg.tag("ORIGINAL");
self.original_args.push(arg);
self.args.clear();
}
/// Capture a set of arguments, inserted immediately after the binary.
/// There must be at least one argument set already. The new args are not
/// tagged as ORIGINAL.
///
/// Note that this is in no way equivalent to calling `add` in a loop!
pub(crate) fn add_args<S: Into<String>>(&mut self, arguments: Vec<S>) {
let mut replacement = vec![self.original_args[0].clone()];
for arg in arguments {
replacement.push(A2::new(arg, Type::Word));
}
for arg in self.original_args.drain(1..) {
replacement.push(arg);
}
self.original_args = replacement;
self.args.clear();
}
/// Perform the command-line analysis after arguments are added with `add` and `add_args`.
pub(crate) fn analyze(&mut self) {
self.args.clear();
self.handle_arg0();
self.lex_arguments();
// self.alias_expansion(); - TODO
if !self.find_command() {
self.default_command();
assert!(self.find_command()); // default_command guarantees this
}
// self.demotion(); - TODO
// self.canonicalizeNames(); - TODO
// self.categorizeArgs(); - TODO
// self.parenthesizeOriginalFilter(); - TODO
}
/// Handle the first argument, indicating the invoked binary.
fn handle_arg0(&mut self) {
// NOTE: this omits the special handling for "cal" and "calendar"
self.original_args[0].tag("BINARY");
}
/// Use the lexer to process all arguments (except the first, handled by handle_arg0).
///
/// All arguments must be individually and wholly recognized by the Lexer. Any argument not
/// recognized is considered a lexer::Type::Word.
///
/// As a side effect, tags all arguments after a terminator ('--') with TERMINATED.
fn lex_arguments(&mut self) {
let mut terminated = false;
// Note: Starts iterating at index 1, because ::handleArg0 has already
// processed it.
for arg in &self.original_args[1..] {
let raw = arg.get_attribute("raw").unwrap();
let quoted = was_quoted(raw);
// Process single-token arguments.
let mut lex = Lexer::new(raw);
match lex.token() {
// if we got a token and it goes to EOS (quoted pairs automatically go to EOS)
Some((lexeme, mut lextype))
if lex.is_eos() || (quoted && lextype == Type::Pair) =>
{
if !terminated && lextype == Type::Separator {
terminated = true;
} else if terminated {
lextype = Type::Word;
}
let mut lexed_arg = A2::new(raw, lextype);
if terminated {
lexed_arg.tag("TERMINATED");
}
if quoted {
lexed_arg.tag("QUOTED");
}
if arg.has_tag("ORIGINAL") {
lexed_arg.tag("ORIGINAL");
}
self.args.push(lexed_arg)
}
// ..otherwise, process "muktiple-token" arguments
_ => {
// TODO: this is kind of insane and almost certainly wrong, but
// implements what the C++ code does..
let quote = "'";
let escaped = format!("'{}'", raw.replace(quote, "\\'"));
let mut lexed_arg;
if let Some((word, _)) = read_word_quoted(&escaped, quote, 0) {
let word = dequote(&word, "'\"");
lexed_arg = A2::new(word, Type::Word);
} else {
// "This branch may have no use-case"!
lexed_arg = A2::new(raw, Type::Word);
lexed_arg.tag("UNKNOWN");
}
if quoted {
lexed_arg.tag("QUOTED");
}
if arg.has_tag("ORIGINAL") {
lexed_arg.tag("ORIGINAL");
}
self.args.push(lexed_arg)
}
}
}
/*
println!("lexed args:");
for arg in &self.args {
println!("{:?}", arg);
}
*/
}
/// Scan all arguments and if any are an exact match for a command name, then tag as CMD. If an
/// argument is an exact match for an attribute, despite being an inexact match for a command,
/// then it is not a command.
fn find_command(&mut self) -> bool {
for (i, arg) in self.args.iter().enumerate() {
let raw = arg.get_attribute("raw").unwrap();
let canonical;
if self.exact_match_entity("cmd", raw) {
canonical = raw.into();
} else if self.exact_match_entity("attribute", raw) {
continue;
} else if let Some(cannon) = self.canonicalize_entity("cmd", raw) {
canonical = cannon;
} else {
continue;
}
let mut arg = arg.clone();
arg.set_attribute("canonical", canonical);
arg.tag("CMD");
// TODO: apply "command DNA"
self.args[i] = arg;
return true;
}
false
}
/// Set a default command argument. Look for situations that require defaults:
///
/// 1. If no command was found, and no ID/UUID, and if rc.default.command is
/// configured, inject the lexed tokens from rc.default.command.
///
/// 2. If no command was found, but an ID/UUID was found, then assume a command
/// of 'information'.
fn default_command(&mut self) {
let mut found_command = false;
let mut found_sequence = false;
for arg in &self.args {
if arg.has_tag("CMD") {
found_command = true;
}
if arg.lextype == Type::Uuid || arg.lextype == Type::Number {
found_sequence = true;
}
}
if !found_command {
if !found_sequence {
unreachable!(); // TODO (requires default.command, context, etc.)
} else {
let mut info = A2::new("information", Type::Word);
info.tag("ASSUMED");
info.tag("CMD");
self.args.insert(0, info);
}
}
}
/// Search for 'value' in _entities category, return canonicalized value.
fn canonicalize_entity(&self, category: &str, value: &str) -> Option<String> {
// TODO: for the moment this only accepts exact matches
if let Some(names) = self.entities.get(category) {
if names.contains(value) {
Some(value.into())
} else {
None
}
} else {
None
}
}
/// Search for exact 'value' in _entities category.
fn exact_match_entity(&self, category: &str, value: &str) -> bool {
if let Some(names) = self.entities.get(category) {
names.contains(value)
} else {
false
}
}
}
#[cfg(test)]
mod test {
use super::*;
fn assert_args(args: &Vec<A2>, exp: Vec<&str>) {
assert_eq!(
args.iter().map(|a| format!("{:?}", a)).collect::<Vec<_>>(),
exp.iter().map(|s| s.to_string()).collect::<Vec<_>>(),
);
}
#[test]
fn alias() {
let mut c = CLI2::new();
c.alias("foo", "bar");
assert_eq!(c.aliases.get("foo"), Some(&"bar".to_string()));
}
#[test]
fn entities() {
let mut c = CLI2::new();
c.entity("cat", "foo");
c.entity("cat", "bar");
let mut exp = HashSet::new();
exp.insert("foo".to_string());
exp.insert("bar".to_string());
assert_eq!(c.entities.get("cat"), Some(&exp));
}
#[test]
fn add() {
let mut c = CLI2::new();
c.add("foo");
c.add("bar");
assert_eq!(
c.original_args
.iter()
.map(|a| format!("{:?}", a))
.collect::<Vec<_>>(),
vec![
"A2{Word, ORIGINAL, raw=\"foo\"}",
"A2{Word, ORIGINAL, raw=\"bar\"}"
]
);
}
#[test]
fn add_args() {
let mut c = CLI2::new();
c.add("0");
c.add("1");
c.add("2");
c.add_args(vec!["foo", "bar"]);
assert_args(
&c.original_args,
vec![
"A2{Word, ORIGINAL, raw=\"0\"}",
"A2{Word, raw=\"foo\"}",
"A2{Word, raw=\"bar\"}",
"A2{Word, ORIGINAL, raw=\"1\"}",
"A2{Word, ORIGINAL, raw=\"2\"}",
],
);
}
#[test]
fn analyze_example_cmdline() {
let mut c = CLI2::new();
c.entity("cmd", "next");
c.add("arg0");
c.add("rc.gc=0");
c.add("next");
c.add("+PENDING");
c.add("due:tomorrow");
c.analyze();
assert_args(
&c.args,
vec![
"A2{Pair, CONFIG, ORIGINAL, modifier=\"gc\", name=\"rc\", raw=\"rc.gc=0\", separator=\"=\", value=\"0\"}",
"A2{Identifier, CMD, ORIGINAL, canonical=\"next\", raw=\"next\"}",
"A2{Tag, ORIGINAL, name=\"PENDING\", raw=\"+PENDING\", sign=\"+\"}",
"A2{Pair, ORIGINAL, modifier=\"\", name=\"due\", raw=\"due:tomorrow\", separator=\":\", value=\"tomorrow\"}",
],
);
}
#[test]
fn exact_match_entity() {
let mut c = CLI2::new();
c.entity("cmd", "next");
c.entity("cmd", "list");
assert!(c.exact_match_entity("cmd", "next"));
assert!(!c.exact_match_entity("cmd", "bar"));
assert!(!c.exact_match_entity("foo", "bar"));
}
}

View file

@ -1,2 +0,0 @@
mod a2;
mod cli2;

View file

@ -5,7 +5,6 @@
#[macro_use]
extern crate failure;
mod cli;
mod errors;
mod operation;
mod replica;
@ -13,7 +12,6 @@ mod server;
mod task;
mod taskdb;
pub mod taskstorage;
mod tdb2;
mod util;
pub use operation::Operation;
@ -23,11 +21,3 @@ pub use task::Priority;
pub use task::Status;
pub use task::Task;
pub use taskdb::DB;
use failure::Fallible;
use std::io::BufRead;
// TODO: remove (artifact of merging projects)
pub fn parse(filename: &str, reader: impl BufRead) -> Fallible<Vec<Task>> {
Ok(tdb2::parse(filename, reader)?)
}

View file

@ -1,245 +0,0 @@
use std::str;
use crate::task::{Task, TaskBuilder};
use crate::util::pig::Pig;
use failure::Fallible;
/// Rust implementation of part of utf8_codepoint from Taskwarrior's src/utf8.cpp
///
/// Note that the original function will return garbage for invalid hex sequences;
/// this panics instead.
fn hex_to_unicode(value: &[u8]) -> Fallible<String> {
if value.len() < 4 {
bail!("too short");
}
fn nyb(c: u8) -> Fallible<u16> {
match c {
b'0'..=b'9' => Ok((c - b'0') as u16),
b'a'..=b'f' => Ok((c - b'a' + 10) as u16),
b'A'..=b'F' => Ok((c - b'A' + 10) as u16),
_ => bail!("invalid hex character"),
}
};
let words = [nyb(value[0])? << 12 | nyb(value[1])? << 8 | nyb(value[2])? << 4 | nyb(value[3])?];
Ok(String::from_utf16(&words[..])?)
}
/// Rust implementation of JSON::decode in Taskwarrior's src/JSON.cpp
///
/// Decode the given byte slice into a string using Taskwarrior JSON's escaping The slice is
/// assumed to be ASCII; unicode escapes within it will be expanded.
fn json_decode(value: &[u8]) -> Fallible<String> {
let length = value.len();
let mut rv = String::with_capacity(length);
let mut pos = 0;
while pos < length {
let v = value[pos];
if v == b'\\' {
pos += 1;
if pos == length {
rv.push(v as char);
break;
}
let v = value[pos];
match v {
b'"' | b'\\' | b'/' => rv.push(v as char),
b'b' => rv.push(8 as char),
b'f' => rv.push(12 as char),
b'n' => rv.push('\n' as char),
b'r' => rv.push('\r' as char),
b't' => rv.push('\t' as char),
b'u' => {
let unicode = hex_to_unicode(&value[pos + 1..pos + 5]).map_err(|_| {
let esc = &value[pos - 1..pos + 5];
match str::from_utf8(esc) {
Ok(s) => format_err!("invalid unicode escape `{}`", s),
Err(_) => format_err!("invalid unicode escape bytes {:?}", esc),
}
})?;
rv.push_str(&unicode);
pos += 4;
}
_ => {
rv.push(b'\\' as char);
rv.push(v as char);
}
}
} else {
rv.push(v as char)
}
pos += 1;
}
Ok(rv)
}
/// Rust implementation of Task::decode in Taskwarrior's src/Task.cpp
///
/// Note that the docstring for the C++ function does not match the
/// implementation!
fn decode(value: String) -> String {
if let Some(_) = value.find('&') {
return value.replace("&open;", "[").replace("&close;", "]");
}
value
}
/// Parse an "FF4" formatted task line. From Task::parse in Taskwarrior's src/Task.cpp.
///
/// While Taskwarrior supports additional formats, this is the only format supported by
/// taskwarrior_rust.
pub(super) fn parse_ff4(line: &str) -> Fallible<Task> {
let mut pig = Pig::new(line.as_bytes());
let mut builder = TaskBuilder::new();
pig.skip(b'[')?;
let line = pig.get_until(b']')?;
let mut subpig = Pig::new(line);
while !subpig.depleted() {
let name = subpig.get_until(b':')?;
let name = str::from_utf8(name)?;
subpig.skip(b':')?;
let value = subpig.get_quoted(b'"')?;
let value = json_decode(value)?;
let value = decode(value);
builder = builder.set(name, value);
subpig.skip(b' ').ok(); // ignore if not found..
}
pig.skip(b']')?;
if !pig.depleted() {
bail!("trailing characters on line");
}
Ok(builder.finish())
}
#[cfg(test)]
mod test {
use super::{decode, hex_to_unicode, json_decode, parse_ff4};
use crate::task::Pending;
#[test]
fn test_hex_to_unicode_digits() {
assert_eq!(hex_to_unicode(b"1234").unwrap(), "\u{1234}");
}
#[test]
fn test_hex_to_unicode_lower() {
assert_eq!(hex_to_unicode(b"abcd").unwrap(), "\u{abcd}");
}
#[test]
fn test_hex_to_unicode_upper() {
assert_eq!(hex_to_unicode(b"ABCD").unwrap(), "\u{abcd}");
}
#[test]
fn test_hex_to_unicode_too_short() {
assert!(hex_to_unicode(b"AB").is_err());
}
#[test]
fn test_hex_to_unicode_invalid() {
assert!(hex_to_unicode(b"defg").is_err());
}
#[test]
fn test_json_decode_no_change() {
assert_eq!(json_decode(b"abcd").unwrap(), "abcd");
}
#[test]
fn test_json_decode_escape_quote() {
assert_eq!(json_decode(b"ab\\\"cd").unwrap(), "ab\"cd");
}
#[test]
fn test_json_decode_escape_backslash() {
assert_eq!(json_decode(b"ab\\\\cd").unwrap(), "ab\\cd");
}
#[test]
fn test_json_decode_escape_frontslash() {
assert_eq!(json_decode(b"ab\\/cd").unwrap(), "ab/cd");
}
#[test]
fn test_json_decode_escape_b() {
assert_eq!(json_decode(b"ab\\bcd").unwrap(), "ab\x08cd");
}
#[test]
fn test_json_decode_escape_f() {
assert_eq!(json_decode(b"ab\\fcd").unwrap(), "ab\x0ccd");
}
#[test]
fn test_json_decode_escape_n() {
assert_eq!(json_decode(b"ab\\ncd").unwrap(), "ab\ncd");
}
#[test]
fn test_json_decode_escape_r() {
assert_eq!(json_decode(b"ab\\rcd").unwrap(), "ab\rcd");
}
#[test]
fn test_json_decode_escape_t() {
assert_eq!(json_decode(b"ab\\tcd").unwrap(), "ab\tcd");
}
#[test]
fn test_json_decode_escape_other() {
assert_eq!(json_decode(b"ab\\xcd").unwrap(), "ab\\xcd");
}
#[test]
fn test_json_decode_escape_eos() {
assert_eq!(json_decode(b"ab\\").unwrap(), "ab\\");
}
#[test]
fn test_json_decode_escape_unicode() {
assert_eq!(json_decode(b"ab\\u1234").unwrap(), "ab\u{1234}");
}
#[test]
fn test_json_decode_escape_unicode_bad() {
let rv = json_decode(b"ab\\uwxyz");
assert_eq!(
rv.unwrap_err().to_string(),
"invalid unicode escape `\\uwxyz`"
);
}
#[test]
fn test_decode_no_change() {
let s = "abcd &quot; efgh &".to_string();
assert_eq!(decode(s.clone()), s);
}
#[test]
fn test_decode_multi() {
let s = "abcd &open; efgh &close; &open".to_string();
assert_eq!(decode(s), "abcd [ efgh ] &open".to_string());
}
#[test]
fn test_parse_ff4() {
let s = "[description:\"desc\" entry:\"1437855511\" modified:\"1479480556\" \
priority:\"L\" project:\"lists\" status:\"pending\" tags:\"watch\" \
uuid:\"83ce989e-8634-4d62-841c-eb309383ff1f\"]";
let task = parse_ff4(s).unwrap();
assert_eq!(task.status, Pending);
assert_eq!(task.description, "desc");
}
#[test]
fn test_parse_ff4_fail() {
assert!(parse_ff4("abc:10]").is_err());
assert!(parse_ff4("[abc:10").is_err());
assert!(parse_ff4("[abc:10 123:123]").is_err());
}
}

View file

@ -1,24 +0,0 @@
//! TDB2 is Taskwarrior's on-disk database format. This module implements
//! support for the data structure as a compatibility layer.
mod ff4;
use self::ff4::parse_ff4;
use crate::task::Task;
use failure::Fallible;
use std::io::BufRead;
pub(crate) fn parse(filename: &str, reader: impl BufRead) -> Fallible<Vec<Task>> {
let mut tasks = vec![];
for (i, line) in reader.lines().enumerate() {
tasks.push(parse_ff4(&line?).map_err(|e| {
format_err!(
"TDB2 Error at {}:{}: {}",
filename.to_string(),
i as u64 + 1,
e
)
})?);
}
Ok(tasks)
}

View file

@ -1,20 +0,0 @@
use chrono::prelude::*;
use std::fs::File;
use std::io::BufReader;
#[test]
fn test_parse() {
let filename = "tests/data/tdb2-test.data";
let file = File::open(filename).unwrap();
let tasks = taskwarrior_rust::parse(filename, BufReader::new(file)).unwrap();
assert_eq!(
tasks[0].description,
"https://phabricator.services.example.com/D7364 [taskgraph] Download debian packages"
);
assert_eq!(tasks[0].entry, Utc.timestamp(1538520624, 0));
assert_eq!(tasks[0].udas.get("phabricatorid").unwrap(), "D7364");
assert_eq!(tasks[1].annotations[0].entry, Utc.timestamp(1541461824, 0));
assert!(tasks[1].annotations[0]
.description
.starts_with("https://github.com",));
}