mirror of
https://github.com/GothenburgBitFactory/taskwarrior.git
synced 2025-06-26 10:54:26 +02:00
Merge branch '2.6.0'
This commit is contained in:
commit
c010855bac
456 changed files with 17264 additions and 12446 deletions
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -142,19 +142,19 @@ class TestBug1687(TestCase):
|
|||
"""1687: The named date 'som' should take precedence over 'someday', for an exact match"""
|
||||
self.t("rc.abbreviation.minimum=2 add one due:som")
|
||||
code, out, err = self.t("_get 1.due.year")
|
||||
self.assertNotEqual("2038\n", out)
|
||||
self.assertNotEqual("9999\n", out)
|
||||
|
||||
self.t("rc.abbreviation.minimum=3 add two due:som")
|
||||
code, out, err = self.t("_get 2.due.year")
|
||||
self.assertNotEqual("2038\n", out)
|
||||
self.assertNotEqual("9999\n", out)
|
||||
|
||||
self.t("rc.abbreviation.minimum=4 add three due:som")
|
||||
code, out, err = self.t("_get 3.due.year")
|
||||
self.assertNotEqual("2038\n", out)
|
||||
self.assertNotEqual("9999\n", out)
|
||||
|
||||
self.t("rc.abbreviation.minimum=4 add three due:some")
|
||||
code, out, err = self.t("_get 4.due.year")
|
||||
self.assertEqual("2038\n", out)
|
||||
self.assertEqual("9999\n", out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -191,7 +191,7 @@ class Test1549(TestCase):
|
|||
"""
|
||||
|
||||
# This command will hang and therefore timeout in 2.4.1.
|
||||
code, out, err = self.t('add 1e x')
|
||||
code, out, err = self.t('rc.verbose:new-id add 1e x')
|
||||
self.assertIn("Created task 1.", out)
|
||||
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -53,6 +53,7 @@ class Task(object):
|
|||
with open(self.taskrc, 'w') as rc:
|
||||
rc.write("data.location={0}\n"
|
||||
"hooks=off\n"
|
||||
"news.version=2.6.0\n"
|
||||
"".format(self.datadir))
|
||||
|
||||
# Setup configuration to talk to taskd automatically
|
||||
|
|
|
@ -351,7 +351,7 @@ def release_port(port):
|
|||
|
||||
|
||||
def memoize(obj):
|
||||
"""Keep an in-memory cache of function results given it's inputs
|
||||
"""Keep an in-memory cache of function results given its inputs
|
||||
"""
|
||||
cache = obj.cache = {}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -60,8 +60,10 @@ def prepare_tasksh(t):
|
|||
for line in fh:
|
||||
line = line.rstrip()
|
||||
|
||||
if line == "taskcommand='task rc.verbose:nothing rc.confirmation:no rc.hooks:off'":
|
||||
line = "taskcommand='{0} rc.verbose:nothing rc.confirmation:no rc.hooks:off rc:{1}'".format(t.taskw, t.taskrc)
|
||||
if line == "taskbin='task'":
|
||||
line = "taskbin='{0}'".format(t.taskw)
|
||||
if line == "taskrc=''":
|
||||
line = "taskrc='rc:{0}'".format(t.taskrc)
|
||||
|
||||
tasksh.append(line)
|
||||
|
||||
|
|
|
@ -1,13 +1,20 @@
|
|||
#!/usr/bin/env bash
|
||||
# For more information, see https://github.com/wbsch/bash_tap
|
||||
# Subject to the MIT License. See LICENSE file or http://opensource.org/licenses/MIT
|
||||
# Copyright (c) 2015 - 2021 Wilhelm Schürmann
|
||||
# Subject to the MIT License. See LICENSE file or https://opensource.org/licenses/MIT
|
||||
# Copyright (c) 2015 - 2021, Wilhelm Schürmann
|
||||
|
||||
function bashtap_on_error {
|
||||
# A command in the parent script failed, interpret this as a test failure.
|
||||
# $bashtap_line contains the last executed line, or an error.
|
||||
echo -n "$bashtap_output"
|
||||
echo "not ok 1 - ${bashtap_line}"
|
||||
|
||||
# Determine if this failure was expected
|
||||
if [[ ! -z "$EXPFAIL" ]]
|
||||
then
|
||||
todo_suffix=" # TODO"
|
||||
fi
|
||||
|
||||
echo "not ok 1 - ${bashtap_line}${todo_suffix}"
|
||||
bashtap_clean_tmpdir
|
||||
}
|
||||
|
||||
|
|
|
@ -8,8 +8,8 @@
|
|||
# "taskrc" is a file set up in bash_tap_tw.sh:setup_taskrc(), and can be
|
||||
# appended to or changed as needed.
|
||||
#
|
||||
# Subject to the MIT License. See LICENSE file or http://opensource.org/licenses/MIT
|
||||
# Copyright (c) 2015 - 2021 Wilhelm Schürmann
|
||||
# Subject to the MIT License. See LICENSE file or https://opensource.org/licenses/MIT
|
||||
# Copyright (c) 2015 - 2021, Wilhelm Schürmann
|
||||
|
||||
function setup_taskrc {
|
||||
# Configuration
|
||||
|
@ -35,7 +35,7 @@ function find_task_binary {
|
|||
for t in "${bashtap_org_pwd}/task" "${bashtap_org_pwd}/src/task" "${bashtap_org_pwd}/../task" "${bashtap_org_pwd}/../src/task" "${bashtap_org_pwd}/../build/src/task"; do
|
||||
if [ -f "$t" ] && [ -x "$t" ]; then
|
||||
t_abs=$(bashtap_get_absolute_path "$t")
|
||||
eval "function task { ${t_abs} rc:taskrc \"\$@\"; }"
|
||||
eval "function task { '${t_abs}' rc:taskrc \"\$@\"; }"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -64,6 +64,15 @@ class TestBurndownCommand(TestCase):
|
|||
self.assertIn("+", out)
|
||||
self.assertIn("X", out)
|
||||
|
||||
def test_burndown_daily_non_cumulative(self):
|
||||
"""Ensure burndown.daily in non-cumulative mode generates a chart"""
|
||||
self.t.config("burndown.cumulative", "0")
|
||||
code, out, err = self.t("burndown.daily")
|
||||
self.assertIn("Daily Burndown", out)
|
||||
self.assertIn(".", out)
|
||||
self.assertIn("+", out)
|
||||
self.assertIn("X", out)
|
||||
|
||||
def test_burndown_daily_color(self):
|
||||
"""Ensure burndown.daily with color, generates a chart"""
|
||||
code, out, err = self.t("burndown.daily rc._forcecolor:on")
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -29,17 +29,27 @@
|
|||
import sys
|
||||
import os
|
||||
import unittest
|
||||
from datetime import datetime, timedelta
|
||||
# Ensure python finds the local simpletap module
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from basetest import Task, TestCase
|
||||
|
||||
|
||||
def timestamp_in_holiday_format(time):
|
||||
return time.strftime("%Y%m%d")
|
||||
|
||||
class TestCalendarCommandLine(TestCase):
|
||||
def setUp(self):
|
||||
"""Executed before each test in the class"""
|
||||
self.t = Task()
|
||||
|
||||
tomorrow = datetime.now() + timedelta(days=1)
|
||||
next_month = datetime.now() + timedelta(days=32)
|
||||
|
||||
self.tomorrow = timestamp_in_holiday_format(tomorrow)
|
||||
self.next_month = timestamp_in_holiday_format(next_month)
|
||||
|
||||
def test_basic_command(self):
|
||||
"""Verify 'calendar' does not fail"""
|
||||
code, out, err = self.t("calendar")
|
||||
|
@ -77,6 +87,16 @@ class TestCalendarCommandLine(TestCase):
|
|||
code, out, err = self.t("calendar rc.calendar.holidays:full")
|
||||
self.assertIn("Date Holiday", out)
|
||||
|
||||
def test_basic_command_single_holiday(self):
|
||||
"""Verify 'calendar rc.holiday.test.name:donkeyday rc.holiday.test.date:[tomorrws date] rc.calendar.holidays:full' does not fail"""
|
||||
code, out, err = self.t("calendar rc.holiday.test.name:donkeyday rc.holliday.test.date:{0} rc.calendar.holidays:full".format(self.tomorrow))
|
||||
self.assertRegex(out, "Date +Holiday")
|
||||
|
||||
def test_basic_command_multiday_holiday(self):
|
||||
"""Verify 'calendar rc.holiday.test.name:donkeyday rc.holiday.test.start:[tomorrws date] rc.holiday.test.end:[date a month later] rc.calendar.holidays:full' does not fail"""
|
||||
code, out, err = self.t("calendar rc.holiday.test.name:donkeyday rc.holiday.test.start:{0} rc.holiday.test.end:{1} rc.calendar.holidays:full".format(self.tomorrow, self.next_month))
|
||||
self.assertRegex(out, "Date +Holiday")
|
||||
|
||||
def test_y_argument(self):
|
||||
"""Verify 'calendar y' does not fail"""
|
||||
code, out, err = self.t("calendar y")
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
// Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
// Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -81,16 +81,17 @@ class TestColorRules(TestCase):
|
|||
cls.t('3 modify depends:4')
|
||||
cls.t('add tomorrow due:tomorrow') # 5
|
||||
cls.t('add yesterday due:yesterday') # 6
|
||||
cls.t('add someday due:yesterday') # 7
|
||||
cls.t('add project_x project:x') # 8
|
||||
cls.t('add pri_h priority:H') # 9
|
||||
cls.t('add pri_m priority:M') # 10
|
||||
cls.t('add pri_l priority:L') # 11
|
||||
cls.t('add keyword') # 12
|
||||
cls.t('add tag_x +x') # 13
|
||||
cls.t('add uda_xxx_1 xxx:1') # 14
|
||||
cls.t('add uda_xxx_4 xxx:4') # 15
|
||||
cls.t('add recurring due:tomorrow recur:1week') # 16 # Keep this last
|
||||
cls.t('add anhourago due:now-1h') # 7
|
||||
cls.t('add someday due:yesterday') # 8
|
||||
cls.t('add project_x project:x') # 9
|
||||
cls.t('add pri_h priority:H') # 10
|
||||
cls.t('add pri_m priority:M') # 11
|
||||
cls.t('add pri_l priority:L') # 12
|
||||
cls.t('add keyword') # 13
|
||||
cls.t('add tag_x +x') # 14
|
||||
cls.t('add uda_xxx_1 xxx:1') # 15
|
||||
cls.t('add uda_xxx_4 xxx:4') # 16
|
||||
cls.t('add recurring due:tomorrow recur:1week') # 17 Keep this last
|
||||
|
||||
def test_control(self):
|
||||
"""No color on control task."""
|
||||
|
@ -122,6 +123,12 @@ class TestColorRules(TestCase):
|
|||
code, out, err = self.t('/yesterday/ info')
|
||||
self.assertIn('\x1b[34m', out)
|
||||
|
||||
def test_due_anhourago(self):
|
||||
"""Overdue color rule from an hour ago."""
|
||||
code, out, err = self.t('/anhourago/ info')
|
||||
# Match 4-bit or 8-bit blue color code
|
||||
self.assertRegex(out, '\x1b\[(38;5;4|34)m')
|
||||
|
||||
def test_due_tomorrow(self):
|
||||
"""Due tomorrow color rule."""
|
||||
code, out, err = self.t('/tomorrow/ info')
|
||||
|
@ -139,7 +146,7 @@ class TestColorRules(TestCase):
|
|||
|
||||
def test_color_header(self):
|
||||
"""Header color."""
|
||||
code, out, err = self.t('rc.verbose=header /control/')
|
||||
code, out, err = self.t('rc.verbose=header,default /control/')
|
||||
self.assertIn('\x1b[34m', err)
|
||||
|
||||
def test_color_footnote(self):
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -390,8 +390,8 @@ class TestDateFormats(TestCase):
|
|||
def test_date_format_countdown(self):
|
||||
"""Verify due.countdown formatting"""
|
||||
code, out, err = self.t("xxx rc.report.xxx.columns:id,due.countdown")
|
||||
self.assertRegex(out, r'1\s+\d+\S+')
|
||||
self.assertRegex(out, r'2\s+')
|
||||
self.assertRegex(out, r'1\s+')
|
||||
self.assertRegex(out, r'2\s+\d+\S+')
|
||||
|
||||
def test_date_format_unrecognized(self):
|
||||
"""Verify due.donkey formatting fails"""
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -42,14 +42,14 @@ class TestCommands(TestCase):
|
|||
def test_command_dna(self):
|
||||
"""Verify 'add', 'modify', 'list' dna"""
|
||||
code, out, err = self.t("commands")
|
||||
self.assertRegex(out, "add\s+operation\s+RW\s+Mods\s+Adds a new task")
|
||||
self.assertRegex(out, "add\s+operation\s+RW\s+Ctxt\s+Mods\s+Adds a new task")
|
||||
self.assertRegex(out, "list\s+report\s+RO\s+ID\s+GC\s+Ctxt\s+Filt\s+Most details of")
|
||||
self.assertRegex(out, "modify\s+operation\s+RW\s+Filt\s+Mods\s+Modifies the")
|
||||
|
||||
def test_command_dna_color(self):
|
||||
"""Verify 'add', 'modify', 'list' dna"""
|
||||
code, out, err = self.t("commands rc._forcecolor:on")
|
||||
self.assertRegex(out, "add\s+operation\s+RW\s+Mods\s+Adds a new task")
|
||||
self.assertRegex(out, "add\s+operation\s+RW\s+Ctxt\s+Mods\s+Adds a new task")
|
||||
self.assertRegex(out, "list\s+report\s+RO\s+ID\s+GC\s+Ctxt\s+Filt\s+Most details of")
|
||||
self.assertRegex(out, "modify\s+operation\s+RW\s+Filt\s+Mods\s+Modifies the")
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
197
test/context.t
197
test/context.t
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -41,96 +41,161 @@ class ContextManagementTest(TestCase):
|
|||
def setUp(self):
|
||||
self.t = Task()
|
||||
|
||||
self.t.config("confirmation", "off")
|
||||
|
||||
def test_context_define_confirmation(self):
|
||||
"""With confirmation active, prompt if context filter matches no tasks"""
|
||||
self.t.config("confirmation", "on")
|
||||
|
||||
code, out, err = self.t.runError('context define work project:Work', input="y\n")
|
||||
code, out, err = self.t.runError('context define work project:Work', input="y\nn\n")
|
||||
self.assertIn("The filter 'project:Work' matches 0 pending tasks.", out)
|
||||
self.assertNotIn("Context 'work' defined.", out)
|
||||
self.assertNotIn("Context 'work' defined", out)
|
||||
|
||||
# Assert the config contains context definition
|
||||
self.assertNotIn('context.work=project:Work\n', self.t.taskrc_content)
|
||||
|
||||
def test_context_define(self):
|
||||
"""Test simple context definition."""
|
||||
code, out, err = self.t('context define work project:Work', input="y\n")
|
||||
self.assertIn("Context 'work' defined.", out)
|
||||
code, out, err = self.t('context define work project:Work', input="y\ny\nn\n")
|
||||
self.assertIn("Context 'work' defined", out)
|
||||
|
||||
# Assert the config contains context definition
|
||||
context_line = 'context.work=project:Work\n'
|
||||
# Assert the config contains read context definition
|
||||
context_line = 'context.work.read=project:Work\n'
|
||||
self.assertIn(context_line, self.t.taskrc_content)
|
||||
|
||||
# Assert that it contains the definition only once
|
||||
self.assertEqual(self.t.taskrc_content.count(context_line), 1)
|
||||
|
||||
# Assert the config does not contain write context definition
|
||||
context_line = 'context.work.write=project:Work\n'
|
||||
self.assertNotIn(context_line, self.t.taskrc_content)
|
||||
|
||||
# Assert that legacy style was not used
|
||||
# Assert the config contains read context definition
|
||||
context_line = 'context.work=project:Work\n'
|
||||
self.assertNotIn(context_line, self.t.taskrc_content)
|
||||
|
||||
def test_context_redefine_same_definition(self):
|
||||
"""Test re-defining the context with the same definition."""
|
||||
self.t('context define work project:Work')
|
||||
code, out, err = self.t('context define work project:Work')
|
||||
self.assertIn("Context 'work' defined.", out)
|
||||
self.t('context define work project:Work', input='y\ny\ny\n')
|
||||
code, out, err = self.t('context define work project:Work', input='y\ny\ny\n')
|
||||
self.assertIn("Context 'work' defined (read, write).", out)
|
||||
|
||||
# Assert the config contains context definition
|
||||
context_line = 'context.work=project:Work\n'
|
||||
context_line = 'context.work.read=project:Work\n'
|
||||
self.assertIn(context_line, self.t.taskrc_content)
|
||||
self.assertEqual(self.t.taskrc_content.count(context_line), 1)
|
||||
|
||||
# Assert that it contains the definition only once
|
||||
context_line = 'context.work.write=project:Work\n'
|
||||
self.assertIn(context_line, self.t.taskrc_content)
|
||||
self.assertEqual(self.t.taskrc_content.count(context_line), 1)
|
||||
|
||||
def test_context_redefine_different_definition(self):
|
||||
"""Test re-defining the context with different definition."""
|
||||
self.t('context define work project:Work')
|
||||
code, out, err = self.t('context define work +work')
|
||||
self.assertIn("Context 'work' defined.", out)
|
||||
self.t('context define work project:Work', input='y\ny\ny\n')
|
||||
code, out, err = self.t('context define work +work', input='y\ny\ny\n')
|
||||
self.assertIn("Context 'work' defined", out)
|
||||
|
||||
# Assert the config does not contain the old context definition
|
||||
self.assertNotIn('context.work=project:Work\n', self.t.taskrc_content)
|
||||
self.assertNotIn('context.work.read=project:Work\n', self.t.taskrc_content)
|
||||
self.assertNotIn('context.work.write=project:Work\n', self.t.taskrc_content)
|
||||
|
||||
# Assert the config contains context definition
|
||||
context_line = 'context.work=+work\n'
|
||||
context_line = 'context.work.read=+work\n'
|
||||
self.assertIn(context_line, self.t.taskrc_content)
|
||||
self.assertEqual(self.t.taskrc_content.count(context_line), 1)
|
||||
|
||||
context_line = 'context.work.write=+work\n'
|
||||
self.assertIn(context_line, self.t.taskrc_content)
|
||||
self.assertEqual(self.t.taskrc_content.count(context_line), 1)
|
||||
|
||||
def test_context_define_invalid_for_write_due_to_modifier(self):
|
||||
"""Test definition of a context that is not a valid write context."""
|
||||
self.t.config("confirmation", "off")
|
||||
code, out, err = self.t('context define urgent due.before:today')
|
||||
self.assertIn("Context 'urgent' defined", out)
|
||||
|
||||
# Assert the config contains read context definition
|
||||
context_line = 'context.urgent.read=due.before:today\n'
|
||||
self.assertIn(context_line, self.t.taskrc_content)
|
||||
|
||||
# Assert that it contains the definition only once
|
||||
self.assertEqual(self.t.taskrc_content.count(context_line), 1)
|
||||
|
||||
# Assert the config does not contain write context definition
|
||||
context_line = 'context.work.write=due.before:today\n'
|
||||
self.assertNotIn(context_line, self.t.taskrc_content)
|
||||
|
||||
# Assert that the write context was not set at all
|
||||
self.assertNotIn('context.work.write=', self.t.taskrc_content)
|
||||
|
||||
# Assert that legacy style was not used
|
||||
# Assert the config contains read context definition
|
||||
context_line = 'context.work=due.before:today\n'
|
||||
self.assertNotIn(context_line, self.t.taskrc_content)
|
||||
|
||||
def test_context_define_invalid_for_write_due_to_operator(self):
|
||||
"""Test definition of a context that is not a valid write context because it uses an OR operator."""
|
||||
self.t.config("confirmation", "off")
|
||||
code, out, err = self.t('context define urgent due:today or +next')
|
||||
self.assertIn("Context 'urgent' defined", out)
|
||||
|
||||
# Assert the config contains read context definition
|
||||
context_line = 'context.urgent.read=due:today or +next\n'
|
||||
self.assertIn(context_line, self.t.taskrc_content)
|
||||
|
||||
# Assert that it contains the definition only once
|
||||
self.assertEqual(self.t.taskrc_content.count(context_line), 1)
|
||||
|
||||
# Assert the config does not contain write context definition
|
||||
context_line = 'context.work.write=due:today or +next\n'
|
||||
self.assertNotIn(context_line, self.t.taskrc_content)
|
||||
|
||||
# Assert that the write context was not set at all
|
||||
self.assertNotIn('context.work.write=', self.t.taskrc_content)
|
||||
|
||||
# Assert that legacy style was not used
|
||||
# Assert the config contains read context definition
|
||||
context_line = 'context.work=due:today or +next\n'
|
||||
self.assertNotIn(context_line, self.t.taskrc_content)
|
||||
|
||||
def test_context_delete(self):
|
||||
"""Test simple context deletion."""
|
||||
self.t('context define work project:Work')
|
||||
code, out, err = self.t('context delete work')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
code, out, err = self.t('context delete work', input='y\ny\n')
|
||||
self.assertIn("Context 'work' deleted.", out)
|
||||
|
||||
# Assert that taskrc does not countain context work definition
|
||||
self.assertFalse(any('context.work=' in line for line in self.t.taskrc_content))
|
||||
self.assertFalse(any('context.work' in line for line in self.t.taskrc_content))
|
||||
|
||||
def test_context_delete_undefined(self):
|
||||
"""Test deletion of undefined context."""
|
||||
code, out, err = self.t.runError('context delete work')
|
||||
self.assertIn("Context 'work' not deleted.", err)
|
||||
code, out, err = self.t.runError('context delete foo', input='y\n')
|
||||
self.assertIn("Context 'foo' not found.", err)
|
||||
|
||||
# Assert that taskrc does not countain context work definition
|
||||
self.assertFalse(any('context.work=' in line for line in self.t.taskrc_content))
|
||||
self.assertFalse(any('context.foo.read=' in line for line in self.t.taskrc_content))
|
||||
self.assertFalse(any('context.foo.write=' in line for line in self.t.taskrc_content))
|
||||
|
||||
def test_context_delete_unset_after_removal(self):
|
||||
"""Test that context is unset if its definition has been removed."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context work')
|
||||
code, out, err = self.t('context delete work')
|
||||
code, out, err = self.t('context delete work', input='y\n\y\n')
|
||||
self.assertIn("Context 'work' deleted.", out)
|
||||
|
||||
# Assert that taskrc does not countain context work definition
|
||||
self.assertFalse(any('context.work=' in line for line in self.t.taskrc_content))
|
||||
self.assertFalse(any('context.work.read=' in line for line in self.t.taskrc_content))
|
||||
self.assertFalse(any('context.work.write=' in line for line in self.t.taskrc_content))
|
||||
|
||||
# Aseert that the context is not set
|
||||
code, out, err = self.t('context show')
|
||||
self.assertIn('No context is currently applied.', out)
|
||||
self.assertFalse(any(re.search("^context=", line) for line in self.t.taskrc_content))
|
||||
self.assertFalse(any(re.search(r"^context(\.(read|write))?=", line) for line in self.t.taskrc_content))
|
||||
|
||||
def test_context_list_active(self):
|
||||
"""Test the 'context list' command."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
self.t('context home')
|
||||
code, out, err = self.t('context list')
|
||||
contains_work = lambda line: 'work' in line and 'project:Work' in line and 'no' in line
|
||||
|
@ -143,8 +208,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context_initially_empty(self):
|
||||
"""Test that no context is set initially."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
|
||||
code, out, err = self.t('context show')
|
||||
self.assertIn('No context is currently applied.', out)
|
||||
|
@ -152,8 +217,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context_setting(self):
|
||||
"""Test simple context setting."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home home', input='y\ny\n')
|
||||
|
||||
code, out, err = self.t('context home')
|
||||
self.assertIn("Context 'home' set.", out)
|
||||
|
@ -161,8 +226,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context_resetting(self):
|
||||
"""Test resetting the same context."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
|
||||
self.t('context home')
|
||||
code, out, err = self.t('context home')
|
||||
|
@ -173,8 +238,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context_switching(self):
|
||||
"""Test changing the context."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
|
||||
# Switch to home context
|
||||
code, out, err = self.t('context home')
|
||||
|
@ -195,8 +260,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context_unsetting(self):
|
||||
"""Test removing the context."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
|
||||
self.t('context home')
|
||||
code, out, err = self.t('context none')
|
||||
|
@ -214,8 +279,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context_unsetting_after_switching(self):
|
||||
"""Test unsetting the context after changing the context around."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
|
||||
# Switch to contexts around
|
||||
self.t('context home')
|
||||
|
@ -238,8 +303,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context_unsetting_with_no_context_set(self):
|
||||
"""Test removing the context when no context is set."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
|
||||
code, out, err = self.t.runError('context none')
|
||||
|
||||
|
@ -256,8 +321,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context(self):
|
||||
"""Test the _context command."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
code, out, err = self.t('_context')
|
||||
|
||||
# Assert expected output.
|
||||
|
@ -267,8 +332,8 @@ class ContextManagementTest(TestCase):
|
|||
|
||||
def test_context_completion(self):
|
||||
"""Test the _context command with some context set."""
|
||||
self.t('context define work project:Work')
|
||||
self.t('context define home +home')
|
||||
self.t('context define work project:Work', input='y\ny\n')
|
||||
self.t('context define home +home', input='y\ny\n')
|
||||
|
||||
# Activate some context
|
||||
self.t('context work')
|
||||
|
@ -478,6 +543,33 @@ class ContextEvaluationTest(TestCase):
|
|||
self.assertNotIn("work today task", output)
|
||||
self.assertNotIn("home today task", output)
|
||||
|
||||
def test_context_ignored(self):
|
||||
"""Test the context is not applied with report list command if
|
||||
report.list.context is set to 0."""
|
||||
|
||||
# Turn off context for this report
|
||||
self.t.config("report.list.context", "0")
|
||||
|
||||
# Get the tasks
|
||||
code, out, err = self.t('list')
|
||||
|
||||
# Assert all the tasks are present in the output
|
||||
self.assertIn("work task", out)
|
||||
self.assertIn("home task", out)
|
||||
self.assertIn("work today task", out)
|
||||
self.assertIn("home today task", out)
|
||||
|
||||
# Set the home context and rerun the report
|
||||
self.t('context home')
|
||||
|
||||
code, out, err = self.t('list')
|
||||
|
||||
# Assert nothing changed - all the tasks are present in the output
|
||||
self.assertIn("work task", out)
|
||||
self.assertIn("home task", out)
|
||||
self.assertIn("work today task", out)
|
||||
self.assertIn("home today task", out)
|
||||
|
||||
|
||||
class ContextErrorHandling(TestCase):
|
||||
def setUp(self):
|
||||
|
@ -517,13 +609,16 @@ class ContextErrorHandling(TestCase):
|
|||
"""Verify 'task context show' with contexts works"""
|
||||
self.t.config("confirmation", "off")
|
||||
code, out, err = self.t("context define work +work")
|
||||
self.assertIn("Context 'work' defined. Use 'task context work' to activate.", out)
|
||||
self.assertIn("Context 'work' defined (read, write). Use 'task context work' to activate.", out)
|
||||
|
||||
code, out, err = self.t("context work")
|
||||
self.assertIn("Context 'work' set. Use 'task context none' to remove.", out)
|
||||
|
||||
code, out, err = self.t("context show")
|
||||
self.assertIn("Context 'work' with filter '+work' is currently applied.", out)
|
||||
self.assertIn("Context 'work' with", out)
|
||||
self.assertIn("read filter: '+work'", out)
|
||||
self.assertIn("write filter: '+work'", out)
|
||||
self.assertIn("is currently applied", out)
|
||||
|
||||
code, out, err = self.t("context none")
|
||||
self.assertIn("Context unset.", out)
|
||||
|
@ -536,7 +631,7 @@ class TestBug1734(TestCase):
|
|||
self.t = Task()
|
||||
self.t("add zero")
|
||||
self.t("add one +tag")
|
||||
self.t("context define foo +tag", input="y\n")
|
||||
self.t("context define foo +tag", input="y\nn\n")
|
||||
|
||||
def test_calendar(self):
|
||||
"""The 'calendar' command should not fail when a context is active"""
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -107,6 +107,43 @@ class TestBug1620(TestCase):
|
|||
code, out, err = self.t ('long')
|
||||
self.assertIn("20150601-1415", out)
|
||||
|
||||
class TestCapitalizedDays(TestCase):
|
||||
"""Make sure capitalized names such as 'Friday' work.
|
||||
|
||||
Requested in:
|
||||
* https://github.com/GothenburgBitFactory/taskwarrior/issues/2160
|
||||
* https://github.com/GothenburgBitFactory/taskwarrior/issues/2364
|
||||
|
||||
Implemented in libshared:
|
||||
* https://github.com/GothenburgBitFactory/libshared/pull/33
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Executed before each test in the class"""
|
||||
self.t = Task()
|
||||
|
||||
def test_dateformat_capitalized(self):
|
||||
"""Verify upper case days and months work"""
|
||||
# Lower case:
|
||||
code, out, err = self.t('add sometask due:mon')
|
||||
code, out, err = self.t('add sometask due:monday')
|
||||
code, out, err = self.t('add sometask due:jan')
|
||||
code, out, err = self.t('add sometask due:january')
|
||||
# Upper case days of the week
|
||||
code, out, err = self.t('add sometask due:Tue')
|
||||
code, out, err = self.t('add sometask due:Tuesday')
|
||||
code, out, err = self.t('add sometask due:Thu')
|
||||
code, out, err = self.t('add sometask due:Thursday')
|
||||
# Upper case months:
|
||||
code, out, err = self.t('add sometask due:Jan')
|
||||
code, out, err = self.t('add sometask due:January')
|
||||
code, out, err = self.t('add sometask due:Jun')
|
||||
code, out, err = self.t('add sometask due:June')
|
||||
code, out, err = self.t('add sometask due:May')
|
||||
|
||||
# Incorrect:
|
||||
code, out, err = self.t.runError('add sometask due:Yo')
|
||||
code, out, err = self.t.runError('add sometask due:TU')
|
||||
|
||||
if __name__ == "__main__":
|
||||
from simpletap import TAPTestRunner
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -46,6 +46,7 @@ class TestCMD(TestCase):
|
|||
|
||||
def test_default_command(self):
|
||||
"""default command"""
|
||||
self.t.config("verbose", "on")
|
||||
code, out, err = self.t()
|
||||
self.assertIn("task list]", err)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -26,6 +26,7 @@
|
|||
#
|
||||
###############################################################################
|
||||
|
||||
import string
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
|
@ -231,7 +232,6 @@ class TestBug697(TestCase):
|
|||
self.assertEqual("BLOCKED\n", out)
|
||||
|
||||
|
||||
@unittest.skip("WaitingFor TW-1262")
|
||||
class TestBug1262(TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
|
@ -241,8 +241,9 @@ class TestBug1262(TestCase):
|
|||
cls.t('add "Buy apples"')
|
||||
|
||||
cls.DEPS = ("1", "2")
|
||||
cls.t("add dep:" + ",".join(cls.DEPS) + '"Make fruit salad!"')
|
||||
cls.t("add dep:" + ",".join(cls.DEPS) + ' "Make fruit salad!"')
|
||||
|
||||
@unittest.skip # Skipping due to undeterminism
|
||||
def test_dependency_contains_matches_ID(self):
|
||||
"""1262: dep.contains matches task IDs"""
|
||||
# NOTE: A more robust test is needed as alternative to this
|
||||
|
@ -254,13 +255,14 @@ class TestBug1262(TestCase):
|
|||
|
||||
def test_dependency_contains_not_matches_other(self):
|
||||
"""1262: dep.contains matches other characters not present in ID nor UUID"""
|
||||
for char in set(string.letters).difference(string.hexdigits):
|
||||
for char in set(string.ascii_letters).difference(string.hexdigits):
|
||||
self.t.runError("list dep.contains:{0}".format(char))
|
||||
|
||||
@unittest.expectedFailure
|
||||
def test_dependency_contains_not_UUID(self):
|
||||
"""1262: dep.contains matches characters in the tasks' UUIDs"""
|
||||
# Get the UUID of the task with description "Buy"
|
||||
code, out, err = self.t("uuid Buy")
|
||||
code, out, err = self.t("uuids Buy")
|
||||
|
||||
# Get only characters that show up in the UUID
|
||||
uuid = {chr for chr in out.splitlines()[0] if chr in string.hexdigits}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -58,6 +58,11 @@ class TestDiagnostics(TestCase):
|
|||
self.assertIn("edlin", out)
|
||||
self.assertIn("strict", out)
|
||||
|
||||
def test_64bit_time_t(self):
|
||||
"""Test that time_t has size of 64 bits"""
|
||||
code, out, err = self.t.diag()
|
||||
self.assertIn("+time_t64", out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from simpletap import TAPTestRunner
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -15,12 +15,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -19,12 +19,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN source scl_source enable devtoolset-7; cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN source scl_source enable devtoolset-7; make -j2
|
||||
RUN source scl_source enable devtoolset-7; make -j8
|
||||
RUN source scl_source enable devtoolset-7; make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN source scl_source enable devtoolset-7; make
|
||||
RUN source scl_source enable devtoolset-7; make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -3,8 +3,8 @@ FROM centos:8
|
|||
RUN dnf update -y
|
||||
RUN dnf install python3 git gcc gcc-c++ make gnutls-devel libuuid-devel glibc-langpack-en -y
|
||||
RUN dnf install epel-release -y
|
||||
RUN dnf install which cmake3 libfaketime -y
|
||||
RUN gcc --version; cmake3 --version
|
||||
RUN dnf install which cmake libfaketime -y
|
||||
RUN gcc --version; cmake --version
|
||||
|
||||
# Setup language environment
|
||||
ENV LC_ALL en_US.UTF-8
|
||||
|
@ -18,12 +18,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -16,12 +16,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -16,12 +16,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -15,12 +15,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -15,12 +15,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -15,12 +15,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
26
test/docker/fedora34
Normal file
26
test/docker/fedora34
Normal file
|
@ -0,0 +1,26 @@
|
|||
FROM fedora:34
|
||||
|
||||
RUN dnf update -y
|
||||
RUN dnf install python3 git gcc gcc-c++ cmake make gnutls-devel libuuid-devel libfaketime glibc-langpack-en -y
|
||||
|
||||
# Setup language environment
|
||||
ENV LC_ALL en_US.UTF-8
|
||||
ENV LANG en_US.UTF-8
|
||||
ENV LANGUAGE en_US.UTF-8
|
||||
|
||||
# Setup taskwarrior
|
||||
ADD . /root/code/
|
||||
WORKDIR /root/code/
|
||||
RUN git clean -dfx
|
||||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
|
@ -21,12 +21,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -14,12 +14,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -16,12 +16,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
|
@ -16,12 +16,12 @@ RUN git clean -dfx
|
|||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j2
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
||||
|
|
27
test/docker/ubuntu2104
Normal file
27
test/docker/ubuntu2104
Normal file
|
@ -0,0 +1,27 @@
|
|||
FROM ubuntu:21.04
|
||||
|
||||
RUN apt-get update
|
||||
RUN DEBIAN_FRONTEND="noninteractive" apt-get install -y build-essential cmake git uuid-dev libgnutls28-dev faketime locales python3
|
||||
|
||||
# Setup language environment
|
||||
RUN locale-gen en_US.UTF-8
|
||||
ENV LC_ALL en_US.UTF-8
|
||||
ENV LANG en_US.UTF-8
|
||||
ENV LANGUAGE en_US.UTF-8
|
||||
|
||||
# Setup taskwarrior
|
||||
ADD . /root/code/
|
||||
WORKDIR /root/code/
|
||||
RUN git clean -dfx
|
||||
RUN git submodule init
|
||||
RUN git submodule update
|
||||
RUN cmake -DCMAKE_BUILD_TYPE=debug .
|
||||
RUN make -j8
|
||||
RUN make install
|
||||
RUN task --version
|
||||
|
||||
# Setup tests
|
||||
WORKDIR /root/code/test/
|
||||
RUN make -j8
|
||||
|
||||
CMD ["bash", "-c", "./run_all -v ; cat all.log | grep 'not ok' ; ./problems"]
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
21
test/due.t
21
test/due.t
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -118,6 +118,25 @@ class TestBug418(TestCase):
|
|||
self.assertNotIn("nine", out)
|
||||
|
||||
|
||||
class TestBug2519(TestCase):
|
||||
def setUp(self):
|
||||
self.t = Task()
|
||||
|
||||
def test_due_today_includes_eod(self):
|
||||
"""Verify that virtual tag +TODAY matches a task due eod"""
|
||||
self.t("add zero due:eod")
|
||||
|
||||
code, out, err = self.t("+TODAY ls")
|
||||
self.assertIn("zero", out)
|
||||
|
||||
def test_eoy_is_not_before_eoy(self):
|
||||
"""Verify that end of year is not before end of year"""
|
||||
self.t("add zero due:eoy")
|
||||
|
||||
code, out, err = self.t.runError("due.before:eoy")
|
||||
self.assertNotIn("1", out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from simpletap import TAPTestRunner
|
||||
unittest.main(testRunner=TAPTestRunner())
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -146,7 +146,6 @@ class TestExportCommand(TestCase):
|
|||
self.t(('add', 'everything depends on me task'))
|
||||
self.t(('add', 'wrong, everything depends on me task'))
|
||||
self.t('1 modify depends:2,3')
|
||||
self.t.config('json.depends.array', 'on')
|
||||
|
||||
deps = self.export(1)['depends']
|
||||
self.assertType(deps, list)
|
||||
|
@ -155,19 +154,6 @@ class TestExportCommand(TestCase):
|
|||
for uuid in deps:
|
||||
self.assertString(uuid, UUID_REGEXP, regexp=True)
|
||||
|
||||
def test_export_depends_oldformat(self):
|
||||
self.t(('add', 'everything depends on me task'))
|
||||
self.t(('add', 'wrong, everything depends on me task'))
|
||||
self.t('1 modify depends:2,3')
|
||||
|
||||
code, out, err = self.t("rc.json.array=off rc.json.depends.array=off 1 export")
|
||||
deps = json.loads(out)["depends"]
|
||||
self.assertString(deps)
|
||||
self.assertEqual(len(deps.split(",")), 2)
|
||||
|
||||
for uuid in deps.split(','):
|
||||
self.assertString(uuid, UUID_REGEXP, regexp=True)
|
||||
|
||||
def test_export_urgency(self):
|
||||
self.t('add urgent task +urgent')
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -659,7 +659,6 @@ class TestBug1600(TestCase):
|
|||
def setUp(self):
|
||||
self.t = Task()
|
||||
|
||||
@unittest.expectedFailure
|
||||
def test_filter_plus_in_descriptions(self):
|
||||
"""filter - description contains +"""
|
||||
self.t("add foobar1")
|
||||
|
@ -671,7 +670,7 @@ class TestBug1600(TestCase):
|
|||
self.assertIn("foobar1", out)
|
||||
self.assertIn("foobar2", out)
|
||||
|
||||
code, out, err = self.t("all description.contains:'foobar\\+'")
|
||||
code, out, err = self.t(r"all description.contains:\'foobar\\+\'")
|
||||
self.assertIn("foobar+", out)
|
||||
self.assertNotIn("foobar1", out)
|
||||
self.assertNotIn("foobar2", out)
|
||||
|
@ -837,6 +836,34 @@ class TestBefore(TestCase):
|
|||
self.assertIn("2", out)
|
||||
|
||||
|
||||
class TestBy(TestCase):
|
||||
def setUp(self):
|
||||
self.t = Task()
|
||||
|
||||
def test_by_eoy_includes_eoy(self):
|
||||
""" Verify by-end-of-year includes task due *at* end-of-year """
|
||||
self.t("add zero due:eoy")
|
||||
|
||||
code, out, err = self.t("due.by:eoy")
|
||||
self.assertIn("zero", out)
|
||||
|
||||
def test_by_tomorrow_includes_tomorrow(self):
|
||||
""" Verify that by-tomorrow also includes tomorrow itself """
|
||||
self.t.faketime("2021-07-16 21:00:00")
|
||||
self.t("add zero due:2021-07-17")
|
||||
|
||||
code, out, err = self.t("due.by:tomorrow")
|
||||
self.assertIn("zero", out)
|
||||
|
||||
def test_by_yesterday_does_not_include_today(self):
|
||||
""" Verify that by-yesterday does not include today """
|
||||
self.t("add zero")
|
||||
|
||||
code, out, err = self.t.runError("entry.by:yesterday")
|
||||
self.assertIn("No matches", err)
|
||||
self.assertNotIn("zero", out)
|
||||
|
||||
|
||||
class Test1424(TestCase):
|
||||
def setUp(self):
|
||||
self.t = Task()
|
||||
|
@ -875,14 +902,14 @@ class Test1452(TestCase):
|
|||
self.task_uuid = self.t.export_one()['uuid']
|
||||
|
||||
def test_get_task_by_uuid_with_prefix(self):
|
||||
"""1452: Tries to filter task simply by it's uuid, using uuid: prefix."""
|
||||
"""1452: Tries to filter task simply by its uuid, using uuid: prefix."""
|
||||
output = self.t.export_one('uuid:%s' % self.task_uuid)
|
||||
|
||||
# Sanity check it is the correct one
|
||||
self.assertEqual(output['uuid'], self.task_uuid)
|
||||
|
||||
def test_get_task_by_uuid_without_prefix(self):
|
||||
"""1452: Tries to filter task simply by it's uuid, without using uuid: prefix."""
|
||||
"""1452: Tries to filter task simply by its uuid, without using uuid: prefix."""
|
||||
output = self.t.export_one(self.task_uuid)
|
||||
|
||||
# Sanity check it is the correct one
|
||||
|
@ -967,7 +994,6 @@ class TestBug1609(TestCase):
|
|||
self.assertIn("two", out)
|
||||
|
||||
|
||||
@unittest.expectedFailure
|
||||
class TestBug1630(TestCase):
|
||||
def setUp(self):
|
||||
"""Executed before each test in the class"""
|
||||
|
@ -1096,6 +1122,7 @@ class TestBug1915(TestCase):
|
|||
self.assertIn("thingB", out)
|
||||
self.assertNotIn("thingC", out)
|
||||
|
||||
@unittest.expectedFailure
|
||||
def test_complex_and_or_query_variant_eight(self):
|
||||
"""1915: Make sure parser handles complex and-or queries correctly (8)"""
|
||||
code, out, err = self.t("rc.verbose:nothing status:pending and \\(project:A or project:B\\) all")
|
||||
|
@ -1103,6 +1130,24 @@ class TestBug1915(TestCase):
|
|||
self.assertIn("thingB", out)
|
||||
self.assertNotIn("thingC", out)
|
||||
|
||||
|
||||
class Test2577(TestCase):
|
||||
def setUp(self):
|
||||
self.t = Task()
|
||||
|
||||
def test_filtering_for_datetime_like(self):
|
||||
"""2577: Check that filtering for datetime-like project names works"""
|
||||
self.t('add one pro:sat') # looks like "saturday"
|
||||
self.t('add two pro:whatever')
|
||||
|
||||
# This should not fail (fails on 2.5.3)
|
||||
code, out, err = self.t('pro:sat')
|
||||
|
||||
# Assert expected output, but the crucial part of this test is success
|
||||
# of the call above
|
||||
self.assertIn("one", out)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from simpletap import TAPTestRunner
|
||||
unittest.main(testRunner=TAPTestRunner())
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -49,7 +49,6 @@ class TestHyphenation(TestCase):
|
|||
code, out, err = self.t("ls")
|
||||
self.assertIn("1 AAAAAAAAAA\n", out)
|
||||
|
||||
@unittest.expectedFailure
|
||||
def test_hyphenation(self):
|
||||
"""Verify hyphenation in the absence of white space"""
|
||||
self.t("add AAAAAAAAAABBBBBBBBBBCCCCCCCCCC")
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -187,15 +187,6 @@ class TestImport(TestCase):
|
|||
self.assertIn("Imported 3 tasks", err)
|
||||
self.assertData1()
|
||||
|
||||
def test_import_old_depend(self):
|
||||
"""One dependency used to be a plain string"""
|
||||
_data = """{"uuid":"a0000000-a000-a000-a000-a00000000000","depends":"a1111111-a111-a111-a111-a11111111111","description":"zero","project":"A","status":"pending","entry":"1234567889"}"""
|
||||
self.t("import", input=self.data1)
|
||||
self.t("import", input=_data)
|
||||
self.t.config("json.depends.array", "0")
|
||||
_t = self.t.export("a0000000-a000-a000-a000-a00000000000")[0]
|
||||
self.assertEqual(_t["depends"], "a1111111-a111-a111-a111-a11111111111")
|
||||
|
||||
def test_import_old_depends(self):
|
||||
"""Several dependencies used to be a comma seperated string"""
|
||||
_data = """{"uuid":"a0000000-a000-a000-a000-a00000000000","depends":"a1111111-a111-a111-a111-a11111111111,a2222222-a222-a222-a222-a22222222222","description":"zero","project":"A","status":"pending","entry":"1234567889"}"""
|
||||
|
@ -207,7 +198,6 @@ class TestImport(TestCase):
|
|||
|
||||
def test_import_new_depend(self):
|
||||
"""One dependency is a single array element"""
|
||||
self.t.config('json.depends.array', 'on')
|
||||
_data = """{"uuid":"a0000000-a000-a000-a000-a00000000000","depends":["a1111111-a111-a111-a111-a11111111111"],"description":"zero","project":"A","status":"pending","entry":"1234567889"}"""
|
||||
self.t("import", input=self.data1)
|
||||
self.t("import", input=_data)
|
||||
|
@ -216,7 +206,6 @@ class TestImport(TestCase):
|
|||
|
||||
def test_import_new_depends(self):
|
||||
"""Several dependencies are an array"""
|
||||
self.t.config('json.depends.array', 'on')
|
||||
_data = """{"uuid":"a0000000-a000-a000-a000-a00000000000","depends":["a1111111-a111-a111-a111-a11111111111","a2222222-a222-a222-a222-a22222222222"],"description":"zero","project":"A","status":"pending","entry":"1234567889"}"""
|
||||
self.t("import", input=self.data1)
|
||||
self.t("import", input=_data)
|
||||
|
@ -303,6 +292,12 @@ class TestImportValidate(TestCase):
|
|||
code, out, err = self.t.runError("import", input=j)
|
||||
self.assertIn("The status 'foo' is not valid.", err)
|
||||
|
||||
def test_import_malformed_annotation(self):
|
||||
"""Verify invalid 'annnotations' is caught"""
|
||||
j = '{"description": "bad", "annotations": "bad"}'
|
||||
code, out, err = self.t.runError("import", input=j)
|
||||
self.assertIn('Annotations is malformed: "bad"', err)
|
||||
|
||||
|
||||
class TestImportWithoutISO(TestCase):
|
||||
def setUp(self):
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -103,6 +103,10 @@ class TestInfoCommand(TestCase):
|
|||
self.assertIn("U_ONE", out)
|
||||
self.assertIn("U_TWO", out)
|
||||
|
||||
# TW-#2060: Make sure UDA attributes are formatted
|
||||
self.assertRegex(out, r"U_ONE\s+\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}")
|
||||
self.assertRegex(out, r"U_TWO\s+P1D")
|
||||
|
||||
class TestBug425(TestCase):
|
||||
def setUp(self):
|
||||
self.t = Task()
|
||||
|
|
324
test/lexer.t.cpp
324
test/lexer.t.cpp
|
@ -99,13 +99,13 @@ int main (int, char**)
|
|||
t.notok (l1.token (token, type), "' \\t ' --> no tokens");
|
||||
|
||||
// \u20ac = Euro symbol.
|
||||
Lexer l2 (" one 'two \\'three\\''+456-(1.3*2 - 0x12) 1.2e-3.4 foo.bar and '\\u20ac'");
|
||||
Lexer l2 (R"( one 'two \'three\''+456-(1.3*2 - 0x12) 1.2e-3.4 foo.bar and '\u20ac')");
|
||||
|
||||
tokens.clear ();
|
||||
while (l2.token (token, type))
|
||||
{
|
||||
std::cout << "# «" << token << "» " << Lexer::typeName (type) << "\n";
|
||||
tokens.push_back (std::pair <std::string, Lexer::Type> (token, type));
|
||||
tokens.emplace_back (token, type);
|
||||
}
|
||||
|
||||
t.is (tokens[0].first, "one", "tokens[0] = 'one'"); // 30
|
||||
|
@ -147,7 +147,7 @@ int main (int, char**)
|
|||
while (l3.token (token, type))
|
||||
{
|
||||
std::cout << "# «" << token << "» " << Lexer::typeName (type) << "\n";
|
||||
tokens.push_back (std::pair <std::string, Lexer::Type> (token, type));
|
||||
tokens.emplace_back (token, type);
|
||||
}
|
||||
|
||||
t.is ((int)tokens.size (), 7, "7 tokens");
|
||||
|
@ -246,13 +246,13 @@ int main (int, char**)
|
|||
|
||||
std::string text = "one 'two' three\\ four";
|
||||
cursor = 0;
|
||||
t.ok (Lexer::readWord (text, cursor, word), "readWord \"one 'two' three\\ four\" --> true");
|
||||
t.ok (Lexer::readWord (text, cursor, word), R"(readWord "one 'two' three\ four" --> true)");
|
||||
t.is (word, "one", " word '" + word + "'");
|
||||
cursor++;
|
||||
t.ok (Lexer::readWord (text, cursor, word), "readWord \"one 'two' three\\ four\" --> true");
|
||||
t.ok (Lexer::readWord (text, cursor, word), R"(readWord "one 'two' three\ four" --> true)");
|
||||
t.is (word, "'two'", " word '" + word + "'");
|
||||
cursor++;
|
||||
t.ok (Lexer::readWord (text, cursor, word), "readWord \"one 'two' three\\ four\" --> true");
|
||||
t.ok (Lexer::readWord (text, cursor, word), R"(readWord "one 'two' three\ four" --> true)");
|
||||
t.is (word, "three four", " word '" + word + "'");
|
||||
|
||||
text = "one ";
|
||||
|
@ -298,219 +298,221 @@ int main (int, char**)
|
|||
{
|
||||
const char* token;
|
||||
Lexer::Type type;
|
||||
bool expfail_token = false;
|
||||
bool expfail_type = false;
|
||||
} results[5];
|
||||
} lexerTests[] =
|
||||
{
|
||||
// Pattern
|
||||
{ "/foo/", { { "/foo/", Lexer::Type::pattern }, NO, NO, NO, NO }, },
|
||||
{ "/a\\/b/", { { "/a\\/b/", Lexer::Type::pattern }, NO, NO, NO, NO }, },
|
||||
{ "/'/", { { "/'/", Lexer::Type::pattern }, NO, NO, NO, NO }, },
|
||||
{ "/foo/", { { "/foo/", Lexer::Type::pattern }, NO, NO, NO, NO }, },
|
||||
{ "/a\\/b/", { { "/a\\/b/", Lexer::Type::pattern }, NO, NO, NO, NO }, },
|
||||
{ "/'/", { { "/'/", Lexer::Type::pattern }, NO, NO, NO, NO }, },
|
||||
|
||||
// Substitution
|
||||
{ "/from/to/g", { { "/from/to/g", Lexer::Type::substitution }, NO, NO, NO, NO }, },
|
||||
{ "/from/to/", { { "/from/to/", Lexer::Type::substitution }, NO, NO, NO, NO }, },
|
||||
{ "/from/to/g", { { "/from/to/g", Lexer::Type::substitution }, NO, NO, NO, NO }, },
|
||||
{ "/from/to/", { { "/from/to/", Lexer::Type::substitution }, NO, NO, NO, NO }, },
|
||||
|
||||
// Tag
|
||||
{ "+tag", { { "+tag", Lexer::Type::tag }, NO, NO, NO, NO }, },
|
||||
{ "-tag", { { "-tag", Lexer::Type::tag }, NO, NO, NO, NO }, },
|
||||
{ "+@tag", { { "+@tag", Lexer::Type::tag }, NO, NO, NO, NO }, },
|
||||
{ "+tag", { { "+tag", Lexer::Type::tag }, NO, NO, NO, NO }, },
|
||||
{ "-tag", { { "-tag", Lexer::Type::tag }, NO, NO, NO, NO }, },
|
||||
{ "+@tag", { { "+@tag", Lexer::Type::tag }, NO, NO, NO, NO }, },
|
||||
|
||||
// Path
|
||||
{ "/long/path/to/file.txt", { { "/long/path/to/file.txt", Lexer::Type::path }, NO, NO, NO, NO }, },
|
||||
{ "/long/path/to/file.txt", { { "/long/path/to/file.txt", Lexer::Type::path }, NO, NO, NO, NO }, },
|
||||
|
||||
// Word
|
||||
{ "1.foo.bar", { { "1.foo.bar", Lexer::Type::word }, NO, NO, NO, NO }, },
|
||||
{ "1.foo.bar", { { "1.foo.bar", Lexer::Type::word }, NO, NO, NO, NO }, },
|
||||
|
||||
// Identifier
|
||||
{ "foo", { { "foo", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "Çirçös", { { "Çirçös", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "☺", { { "☺", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "name", { { "name", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "f1", { { "f1", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "foo.bar", { { "foo.bar", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "a1a1a1a1_a1a1_a1a1_a1a1_a1a1a1a1a1a1", { { "a1a1a1a1_a1a1_a1a1_a1a1_a1a1a1a1a1a1", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "foo", { { "foo", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "Çirçös", { { "Çirçös", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "☺", { { "☺", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "name", { { "name", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "f1", { { "f1", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "foo.bar", { { "foo.bar", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "a1a1a1a1_a1a1_a1a1_a1a1_a1a1a1a1a1a1", { { "a1a1a1a1_a1a1_a1a1_a1a1_a1a1a1a1a1a1", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
|
||||
// Word that starts wih 'or', which is an operator, but should be ignored.
|
||||
{ "ordinary", { { "ordinary", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
{ "ordinary", { { "ordinary", Lexer::Type::identifier }, NO, NO, NO, NO }, },
|
||||
|
||||
// DOM
|
||||
{ "due", { { "due", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.tags", { { "123.tags", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.tags.PENDING", { { "123.tags.PENDING", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.description", { { "123.description", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.annotations.1.description", { { "123.annotations.1.description", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.annotations.1.entry", { { "123.annotations.1.entry", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.annotations.1.entry.year", { { "123.annotations.1.entry.year", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366-b70c-ea7e7520b749.due", { { "a360fc44-315c-4366-b70c-ea7e7520b749.due", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "12345678-1234-1234-1234-123456789012.due", { { "12345678-1234-1234-1234-123456789012.due", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "system.os", { { "system.os", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "rc.foo", { { "rc.foo", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "due", { { "due", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.tags", { { "123.tags", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.tags.PENDING", { { "123.tags.PENDING", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.description", { { "123.description", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.annotations.1.description", { { "123.annotations.1.description", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.annotations.1.entry", { { "123.annotations.1.entry", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "123.annotations.1.entry.year", { { "123.annotations.1.entry.year", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366-b70c-ea7e7520b749.due", { { "a360fc44-315c-4366-b70c-ea7e7520b749.due", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "12345678-1234-1234-1234-123456789012.due", { { "12345678-1234-1234-1234-123456789012.due", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "system.os", { { "system.os", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
{ "rc.foo", { { "rc.foo", Lexer::Type::dom }, NO, NO, NO, NO }, },
|
||||
|
||||
// URL
|
||||
{ "http://example.com", { { "http://example.com", Lexer::Type::url }, NO, NO, NO, NO }, },
|
||||
{ "https://foo.example.com", { { "https://foo.example.com", Lexer::Type::url }, NO, NO, NO, NO }, },
|
||||
{ "http://example.com", { { "http://example.com", Lexer::Type::url }, NO, NO, NO, NO }, },
|
||||
{ "https://foo.example.com", { { "https://foo.example.com", Lexer::Type::url }, NO, NO, NO, NO }, },
|
||||
|
||||
// String
|
||||
{ "'one two'", { { "'one two'", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "\"three\"", { { "\"three\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "'\\''", { { "'''", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "\"\\\"\"", { { "\"\"\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "\"\tfoo\t\"", { { "\"\tfoo\t\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "\"\\u20A43\"", { { "\"₤3\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "\"U+20AC4\"", { { "\"€4\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "'one two'", { { "'one two'", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "\"three\"", { { "\"three\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "'\\''", { { "'''", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{R"("\"")", { {R"(""")", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "\"\tfoo\t\"", { { "\"\tfoo\t\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{R"("\u20A43")", { { "\"₤3\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
{ "\"U+20AC4\"", { { "\"€4\"", Lexer::Type::string }, NO, NO, NO, NO }, },
|
||||
|
||||
// Number
|
||||
{ "1", { { "1", Lexer::Type::number }, NO, NO, NO, NO }, },
|
||||
{ "3.14", { { "3.14", Lexer::Type::number }, NO, NO, NO, NO }, },
|
||||
{ "6.02217e23", { { "6.02217e23", Lexer::Type::number }, NO, NO, NO, NO }, },
|
||||
{ "1.2e-3.4", { { "1.2e-3.4", Lexer::Type::number }, NO, NO, NO, NO }, },
|
||||
{ "0x2f", { { "0x2f", Lexer::Type::hex }, NO, NO, NO, NO }, },
|
||||
{ "1", { { "1", Lexer::Type::number }, NO, NO, NO, NO }, },
|
||||
{ "3.14", { { "3.14", Lexer::Type::number }, NO, NO, NO, NO }, },
|
||||
{ "6.02217e23", { { "6.02217e23", Lexer::Type::number }, NO, NO, NO, NO }, },
|
||||
{ "1.2e-3.4", { { "1.2e-3.4", Lexer::Type::number }, NO, NO, NO, NO }, },
|
||||
{ "0x2f", { { "0x2f", Lexer::Type::hex }, NO, NO, NO, NO }, },
|
||||
|
||||
// Set (1,2,4-7,9)
|
||||
{ "1,2", { { "1,2", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1-2", { { "1-2", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1-2,4", { { "1-2,4", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1-2,4,6-8", { { "1-2,4,6-8", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1-2,4,6-8,10-12", { { "1-2,4,6-8,10-12", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1,2", { { "1,2", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1-2", { { "1-2", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1-2,4", { { "1-2,4", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1-2,4,6-8", { { "1-2,4,6-8", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
{ "1-2,4,6-8,10-12", { { "1-2,4,6-8,10-12", Lexer::Type::set }, NO, NO, NO, NO }, },
|
||||
|
||||
// Pair
|
||||
{ "name:value", { { "name:value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name=value", { { "name=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name:=value", { { "name:=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name.mod:value", { { "name.mod:value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name.mod=value", { { "name.mod=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name:", { { "name:", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name=", { { "name=", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name.mod:", { { "name.mod:", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name.mod=", { { "name.mod=", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "pro:'P 1'", { { "pro:'P 1'", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "rc:x", { { "rc:x", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "rc.name:value", { { "rc.name:value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "rc.name=value", { { "rc.name=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "rc.name:=value", { { "rc.name:=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "due:='eow - 2d'", { { "due:='eow - 2d'", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name:'foo\nbar'", { { "name:'foo\nbar'", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name:value", { { "name:value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name=value", { { "name=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name:=value", { { "name:=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name.mod:value", { { "name.mod:value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name.mod=value", { { "name.mod=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name:", { { "name:", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name=", { { "name=", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name.mod:", { { "name.mod:", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name.mod=", { { "name.mod=", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "pro:'P 1'", { { "pro:'P 1'", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "rc:x", { { "rc:x", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "rc.name:value", { { "rc.name:value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "rc.name=value", { { "rc.name=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "rc.name:=value", { { "rc.name:=value", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "due:='eow - 2d'", { { "due:='eow - 2d'", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
{ "name:'foo\nbar'", { { "name:'foo\nbar'", Lexer::Type::pair }, NO, NO, NO, NO }, },
|
||||
|
||||
// Operator - complete set
|
||||
{ "^", { { "^", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "!", { { "!", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "_neg_", { { "_neg_", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "_pos_", { { "_pos_", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "_hastag_", { { "_hastag_", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "_notag_", { { "_notag_", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "*", { { "*", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "/", { { "/", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "%", { { "%", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "+", { { "+", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "-", { { "-", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "<=", { { "<=", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ ">=", { { ">=", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ ">", { { ">", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "<", { { "<", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "=", { { "=", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "==", { { "==", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "!=", { { "!=", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "!==", { { "!==", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "~", { { "~", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "!~", { { "!~", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "and", { { "and", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "or", { { "or", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "xor", { { "xor", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "(", { { "(", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ ")", { { ")", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "^", { { "^", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "!", { { "!", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "_neg_", { { "_neg_", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "_pos_", { { "_pos_", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "_hastag_", { { "_hastag_", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "_notag_", { { "_notag_", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "*", { { "*", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "/", { { "/", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "%", { { "%", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "+", { { "+", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "-", { { "-", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "<=", { { "<=", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ ">=", { { ">=", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ ">", { { ">", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "<", { { "<", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "=", { { "=", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "==", { { "==", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "!=", { { "!=", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "!==", { { "!==", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "~", { { "~", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "!~", { { "!~", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "and", { { "and", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "or", { { "or", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "xor", { { "xor", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ "(", { { "(", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
{ ")", { { ")", Lexer::Type::op }, NO, NO, NO, NO }, },
|
||||
|
||||
// UUID
|
||||
{ "ffffffff-ffff-ffff-ffff-ffffffffffff", { { "ffffffff-ffff-ffff-ffff-ffffffffffff", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000-0000-0000-0000-0000000", { { "00000000-0000-0000-0000-0000000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000-0000-0000-0000", { { "00000000-0000-0000-0000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000-0000-0000", { { "00000000-0000-0000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000-0000", { { "00000000-0000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000", { { "00000000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366-b70c-ea7e7520b749", { { "a360fc44-315c-4366-b70c-ea7e7520b749", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366-b70c-ea7e752", { { "a360fc44-315c-4366-b70c-ea7e752", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366-b70c", { { "a360fc44-315c-4366-b70c", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366", { { "a360fc44-315c-4366", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c", { { "a360fc44-315c", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44", { { "a360fc44", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "ffffffff-ffff-ffff-ffff-ffffffffffff", { { "ffffffff-ffff-ffff-ffff-ffffffffffff", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "0000000d-0000-0000-0000-000000000000", { { "0000000d-0000-0000-0000-000000000000", Lexer::Type::uuid, true, true }, NO, NO, NO, NO }, },
|
||||
{ "00000000-0000-0000-0000-0000000", { { "00000000-0000-0000-0000-0000000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000-0000-0000-0000", { { "00000000-0000-0000-0000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000-0000-0000", { { "00000000-0000-0000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000-0000", { { "00000000-0000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "00000000", { { "00000000", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366-b70c-ea7e7520b749", { { "a360fc44-315c-4366-b70c-ea7e7520b749", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366-b70c-ea7e752", { { "a360fc44-315c-4366-b70c-ea7e752", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366-b70c", { { "a360fc44-315c-4366-b70c", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c-4366", { { "a360fc44-315c-4366", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44-315c", { { "a360fc44-315c", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
{ "a360fc44", { { "a360fc44", Lexer::Type::uuid }, NO, NO, NO, NO }, },
|
||||
|
||||
// Date
|
||||
{ "2015-W01", { { "2015-W01", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "2015-02-17", { { "2015-02-17", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "2013-11-29T22:58:00Z", { { "2013-11-29T22:58:00Z", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "20131129T225800Z", { { "20131129T225800Z", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "2015-W01", { { "2015-W01", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "2015-02-17", { { "2015-02-17", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "2013-11-29T22:58:00Z", { { "2013-11-29T22:58:00Z", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "20131129T225800Z", { { "20131129T225800Z", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
#ifdef PRODUCT_TASKWARRIOR
|
||||
{ "9th", { { "9th", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "10th", { { "10th", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "today", { { "today", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "9th", { { "9th", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "10th", { { "10th", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
{ "today", { { "today", Lexer::Type::date }, NO, NO, NO, NO }, },
|
||||
#endif
|
||||
|
||||
// Duration
|
||||
{ "year", { { "year", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "4weeks", { { "4weeks", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "PT23H", { { "PT23H", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "1second", { { "1second", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "1s", { { "1s", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "1minute", { { "1minute", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "2hour", { { "2hour", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "3 days", { { "3 days", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "4w", { { "4w", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "5mo", { { "5mo", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "6 years", { { "6 years", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "P1Y", { { "P1Y", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "PT1H", { { "PT1H", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "P1Y1M1DT1H1M1S", { { "P1Y1M1DT1H1M1S", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "year", { { "year", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "4weeks", { { "4weeks", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "PT23H", { { "PT23H", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "1second", { { "1second", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "1s", { { "1s", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "1minute", { { "1minute", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "2hour", { { "2hour", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "3 days", { { "3 days", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "4w", { { "4w", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "5mo", { { "5mo", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "6 years", { { "6 years", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "P1Y", { { "P1Y", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "PT1H", { { "PT1H", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
{ "P1Y1M1DT1H1M1S", { { "P1Y1M1DT1H1M1S", Lexer::Type::duration }, NO, NO, NO, NO }, },
|
||||
|
||||
// Misc
|
||||
{ "--", { { "--", Lexer::Type::separator }, NO, NO, NO, NO }, },
|
||||
{ "--", { { "--", Lexer::Type::separator }, NO, NO, NO, NO }, },
|
||||
|
||||
// Expression
|
||||
// due:eom-2w
|
||||
// due < eom + 1w + 1d
|
||||
// ( /pattern/ or 8ad2e3db-914d-4832-b0e6-72fa04f6e331,3b6218f9-726a-44fc-aa63-889ff52be442 )
|
||||
{ "(1+2)", { { "(", Lexer::Type::op },
|
||||
{ "1", Lexer::Type::number },
|
||||
{ "+", Lexer::Type::op },
|
||||
{ "2", Lexer::Type::number },
|
||||
{ ")", Lexer::Type::op }, }, },
|
||||
{ "description~pattern", { { "description", Lexer::Type::dom },
|
||||
{ "~", Lexer::Type::op },
|
||||
{ "pattern", Lexer::Type::identifier }, NO, NO }, },
|
||||
{ "(+tag)", { { "(", Lexer::Type::op },
|
||||
{ "+tag", Lexer::Type::tag },
|
||||
{ ")", Lexer::Type::op }, NO, NO }, },
|
||||
{ "(name:value)", { { "(", Lexer::Type::op },
|
||||
{ "name:value", Lexer::Type::pair },
|
||||
{ ")", Lexer::Type::op }, NO, NO }, },
|
||||
{ "(1+2)", { { "(", Lexer::Type::op },
|
||||
{ "1", Lexer::Type::number },
|
||||
{ "+", Lexer::Type::op },
|
||||
{ "2", Lexer::Type::number },
|
||||
{ ")", Lexer::Type::op }, }, },
|
||||
{ "description~pattern", { { "description", Lexer::Type::dom },
|
||||
{ "~", Lexer::Type::op },
|
||||
{ "pattern", Lexer::Type::identifier }, NO, NO }, },
|
||||
{ "(+tag)", { { "(", Lexer::Type::op },
|
||||
{ "+tag", Lexer::Type::tag },
|
||||
{ ")", Lexer::Type::op }, NO, NO }, },
|
||||
{ "(name:value)", { { "(", Lexer::Type::op },
|
||||
{ "name:value", Lexer::Type::pair },
|
||||
{ ")", Lexer::Type::op }, NO, NO }, },
|
||||
};
|
||||
#define NUM_TESTS (sizeof (lexerTests) / sizeof (lexerTests[0]))
|
||||
|
||||
for (unsigned int i = 0; i < NUM_TESTS; i++)
|
||||
for (const auto& lexerTest : lexerTests)
|
||||
{
|
||||
// The isolated test puts the input string directly into the Lexer.
|
||||
Lexer isolated (lexerTests[i].input);
|
||||
Lexer isolated (lexerTest.input);
|
||||
|
||||
for (int j = 0; j < 5; j++)
|
||||
for (const auto& result : lexerTest.results)
|
||||
{
|
||||
if (lexerTests[i].results[j].token[0])
|
||||
if (result.token[0])
|
||||
{
|
||||
// Isolated: "<token>"
|
||||
t.ok (isolated.token (token, type), "Isolated Lexer::token(...) --> true");
|
||||
t.is (token, lexerTests[i].results[j].token, " token --> " + token);
|
||||
t.is ((int)type, (int)lexerTests[i].results[j].type, " type --> Lexer::Type::" + Lexer::typeToString (type));
|
||||
t.is (token, result.token, " token --> " + token, result.expfail_token);
|
||||
t.is ((int)type, (int)result.type, " type --> Lexer::Type::" + Lexer::typeToString (type), result.expfail_type);
|
||||
}
|
||||
}
|
||||
|
||||
// The embedded test surrounds the input string with a space.
|
||||
Lexer embedded (std::string (" ") + lexerTests[i].input + " ");
|
||||
Lexer embedded (std::string (" ") + lexerTest.input + " ");
|
||||
|
||||
for (int j = 0; j < 5; j++)
|
||||
for (const auto& result : lexerTest.results)
|
||||
{
|
||||
if (lexerTests[i].results[j].token[0])
|
||||
if (result.token[0])
|
||||
{
|
||||
// Embedded: "<token>"
|
||||
t.ok (embedded.token (token, type), "Embedded Lexer::token(...) --> true");
|
||||
t.is (token, lexerTests[i].results[j].token, " token --> " + token);
|
||||
t.is ((int)type, (int)lexerTests[i].results[j].type, " type --> Lexer::Type::" + Lexer::typeToString (type));
|
||||
t.is (token, result.token, " token --> " + token, result.expfail_token);
|
||||
t.is ((int)type, (int)result.type, " type --> Lexer::Type::" + Lexer::typeToString (type), result.expfail_type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -571,8 +573,8 @@ int main (int, char**)
|
|||
t.is (Lexer::trimLeft ("", " \t"), "", "Lexer::trimLeft '' -> ''");
|
||||
t.is (Lexer::trimLeft ("xxx"), "xxx", "Lexer::trimLeft 'xxx' -> 'xxx'");
|
||||
t.is (Lexer::trimLeft ("xxx", " \t"), "xxx", "Lexer::trimLeft 'xxx' -> 'xxx'");
|
||||
t.is (Lexer::trimLeft (" \t xxx \t "), "\t xxx \t ", "Lexer::trimLeft ' \\t xxx \\t ' -> '\\t xxx \\t '");
|
||||
t.is (Lexer::trimLeft (" \t xxx \t ", " \t"), "xxx \t ", "Lexer::trimLeft ' \\t xxx \\t ' -> 'xxx \\t '");
|
||||
t.is (Lexer::trimLeft (" \t xxx \t "), "\t xxx \t ",R"(Lexer::trimLeft ' \t xxx \t ' -> '\t xxx \t ')");
|
||||
t.is (Lexer::trimLeft (" \t xxx \t ", " \t"), "xxx \t ", R"(Lexer::trimLeft ' \t xxx \t ' -> 'xxx \t ')");
|
||||
|
||||
// std::string Lexer::trimRight (const std::string& in, const std::string& t /*= " "*/)
|
||||
t.is (Lexer::trimRight (""), "", "Lexer::trimRight '' -> ''");
|
||||
|
@ -580,8 +582,8 @@ int main (int, char**)
|
|||
t.is (Lexer::trimRight ("", " \t"), "", "Lexer::trimRight '' -> ''");
|
||||
t.is (Lexer::trimRight ("xxx"), "xxx", "Lexer::trimRight 'xxx' -> 'xxx'");
|
||||
t.is (Lexer::trimRight ("xxx", " \t"), "xxx", "Lexer::trimRight 'xxx' -> 'xxx'");
|
||||
t.is (Lexer::trimRight (" \t xxx \t "), " \t xxx \t", "Lexer::trimRight ' \\t xxx \\t ' -> ' \\t xxx \\t'");
|
||||
t.is (Lexer::trimRight (" \t xxx \t ", " \t"), " \t xxx", "Lexer::trimRight ' \\t xxx \\t ' -> ' \\t xxx'");
|
||||
t.is (Lexer::trimRight (" \t xxx \t "), " \t xxx \t", R"(Lexer::trimRight ' \t xxx \t ' -> ' \t xxx \t')");
|
||||
t.is (Lexer::trimRight (" \t xxx \t ", " \t"), " \t xxx", R"(Lexer::trimRight ' \t xxx \t ' -> ' \t xxx')");
|
||||
|
||||
// std::string Lexer::trim (const std::string& in, const std::string& t /*= " "*/)
|
||||
t.is (Lexer::trim (""), "", "Lexer::trim '' -> ''");
|
||||
|
@ -589,7 +591,7 @@ int main (int, char**)
|
|||
t.is (Lexer::trim ("", " \t"), "", "Lexer::trim '' -> ''");
|
||||
t.is (Lexer::trim ("xxx"), "xxx", "Lexer::trim 'xxx' -> 'xxx'");
|
||||
t.is (Lexer::trim ("xxx", " \t"), "xxx", "Lexer::trim 'xxx' -> 'xxx'");
|
||||
t.is (Lexer::trim (" \t xxx \t "), "\t xxx \t", "Lexer::trim ' \\t xxx \\t ' -> '\\t xxx \\t'");
|
||||
t.is (Lexer::trim (" \t xxx \t "), "\t xxx \t",R"(Lexer::trim ' \t xxx \t ' -> '\t xxx \t')");
|
||||
t.is (Lexer::trim (" \t xxx \t ", " \t"), "xxx", "Lexer::trim ' \\t xxx \\t ' -> 'xxx'");
|
||||
|
||||
return 0;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -44,7 +44,7 @@ class TestMath(TestCase):
|
|||
cls.t.config("dateformat", "YYYY-MM-DD")
|
||||
|
||||
# YYYY-12-21.
|
||||
cls.when = "%d-12-22T00:00:00\n" % datetime.now().year
|
||||
cls.when = "%d-12-21T23:59:59\n" % datetime.now().year
|
||||
|
||||
# Different ways of specifying YYYY-12-21.
|
||||
cls.t("add one due:eoy-10days")
|
||||
|
@ -52,7 +52,7 @@ class TestMath(TestCase):
|
|||
cls.t("add three 'due:eoy-10days'")
|
||||
cls.t("add four due:'eoy - 10days'")
|
||||
cls.t("add five 'due:eoy - 10days'")
|
||||
cls.t("add six 'due:{}-01-01T00:00:00 - 10days'".format (datetime.now().year + 1))
|
||||
cls.t("add six 'due:{}-12-31T23:59:59 - 10days'".format (datetime.now().year))
|
||||
|
||||
def test_compact_unquoted(self):
|
||||
"""compact unquoted"""
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
66
test/nag.t
66
test/nag.t
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -91,6 +91,70 @@ class TestNagging(TestCase):
|
|||
code, out, err = self.t("1 done")
|
||||
self.assertNotIn("NAG", err)
|
||||
|
||||
def test_nagging_bulk(self):
|
||||
"""Verify that only one nag message occurs when completing multiple tasks"""
|
||||
self.t("add one")
|
||||
self.t.faketime("+1d")
|
||||
self.t("add two")
|
||||
self.t("add two")
|
||||
|
||||
code, out, err = self.t("2 done")
|
||||
|
||||
self.assertEqual(err.count("NAG"), 1)
|
||||
|
||||
def test_nagging_active(self):
|
||||
"""Bug 2163: Verify that nagging does not occur when completing the most urgent active task"""
|
||||
self.t("add one")
|
||||
self.t.faketime("+1d")
|
||||
self.t("add two")
|
||||
self.t("2 start")
|
||||
|
||||
code, out, err = self.t("2 done")
|
||||
|
||||
# Taskwarrior should not nag about more urgent tasks
|
||||
self.assertNotIn("NAG", err)
|
||||
|
||||
def test_nagging_start_only_task(self):
|
||||
"""Verify that nagging does not occur when there are no other tasks while starting a task"""
|
||||
self.t("add one")
|
||||
|
||||
code, out, err = self.t("1 start")
|
||||
|
||||
self.assertNotIn("NAG", err)
|
||||
|
||||
def test_nagging_start(self):
|
||||
"""Verify that nagging occurs when there are READY tasks of higher urgency while starting a task"""
|
||||
self.t("add one")
|
||||
self.t.faketime("+10d")
|
||||
self.t("add two")
|
||||
|
||||
code, out, err = self.t("2 start")
|
||||
|
||||
self.assertIn("NAG", err)
|
||||
|
||||
def test_nagging_nonag(self):
|
||||
"""Verify that nagging does not occur when a task has the nonag tag"""
|
||||
self.t("add one +other")
|
||||
self.t.faketime("+10d")
|
||||
self.t("add two +nonag")
|
||||
|
||||
code, out, err = self.t("2 done")
|
||||
|
||||
self.assertNotIn("NAG", err)
|
||||
|
||||
def test_nagging_nonag_bulk(self):
|
||||
"""Verify that nagging occurs even if some tasks in a bulk operation have a nonag tag"""
|
||||
self.t("add one +other")
|
||||
self.t.faketime("+10d")
|
||||
self.t("add two +other")
|
||||
self.t.faketime("+10d")
|
||||
self.t("add three +nonag")
|
||||
|
||||
code, out, err = self.t("2-3 done")
|
||||
|
||||
self.assertIn("NAG", err)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from simpletap import TAPTestRunner
|
||||
unittest.main(testRunner=TAPTestRunner())
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
###############################################################################
|
||||
#
|
||||
# Copyright 2006 - 2021, Paul Beckingham, Federico Hernandez.
|
||||
# Copyright 2006 - 2021, Tomas Babej, Paul Beckingham, Federico Hernandez.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue