Unit Tests

- Now that the 'diagnostics' command can detect duplicate UUID
  values, it makes sense to incorporate that into unit tests that
  employ multiple recurring tasks.
This commit is contained in:
Paul Beckingham 2012-03-03 10:05:27 -05:00
parent 173d24b3fb
commit 524f7f0919
10 changed files with 40 additions and 10 deletions

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 6;
use Test::More tests => 7;
# Create the rc file.
if (open my $fh, '>', 'bug.rc')
@ -66,6 +66,9 @@ my ($id) = $output =~ /(\d+)\s+nonrecurring/;
$output = qx{../src/task rc:bug.rc $id modify due:};
unlike ($output, qr/You cannot remove the due date from a recurring task./ms, 'Can remove due date from a non-recurring task');
$output = qx{../src/task rc:bug.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key bug.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 11;
use Test::More tests => 12;
# Create the rc file.
if (open my $fh, '>', 'bug.rc')
@ -61,6 +61,9 @@ like ($output, qr/2\s+P.+H.+R/ms, 'Found modified child 0 (propagated from paren
like ($output, qr/3\s+P.+H.+R/ms, 'Found modified child 1 (propagated from parent)');
like ($output, qr/4\s+P.+H.+R/ms, 'Found modified child 2 (propagated from parent)');
$output = qx{../src/task rc:bug.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key bug.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 12;
use Test::More tests => 13;
# Create the rc file.
if (open my $fh, '>', 'annual.rc')
@ -68,6 +68,9 @@ like ($output, qr/9\s+1\/1\/2007\s+(?:-|\d+\ssecs?)\s+foo/, 'synthetic 9 no cre
like ($output, qr/10\s+1\/1\/2008\s+(?:-|\d+\ssecs?)\s+foo/, 'synthetic 10 no creep');
like ($output, qr/11\s+1\/1\/2009\s+(?:-|\d+\ssecs?)\s+foo/, 'synthetic 11 no creep');
$output = qx{../src/task rc:annual.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key annual.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 40;
use Test::More tests => 41;
# Create the rc file.
if (open my $fh, '>', 'period.rc')
@ -153,6 +153,9 @@ like ($output, qr/\b2m\b/, 'verify 2m');
like ($output, qr/\b2q\b/, 'verify 2q');
like ($output, qr/\b2y\b/, 'verify 2y');
$output = qx{../src/task rc:period.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key period.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 4;
use Test::More tests => 5;
# Create the rc file.
if (open my $fh, '>', 'uuid.rc')
@ -81,6 +81,9 @@ $unique_uuids{$uuid} = undef;
is (scalar (@all_uuids), 6, '6 tasks created');
is (scalar (keys %unique_uuids), 6, '6 unique UUIDs');
$output = qx{../src/task rc:uuid.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key uuid.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 4;
use Test::More tests => 5;
# Create the rc file.
if (open my $fh, '>', 'recur.rc')
@ -50,6 +50,9 @@ $output = qx{../src/task rc:recur.rc rc.recurrence.limit:4 long};
@tasks = $output =~ /(ONE)/g;
is (scalar @tasks, 4, 'recurrence.limit override to 4');
$output = qx{../src/task rc:recur.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key recur.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 4;
use Test::More tests => 5;
# Create the rc file.
if (open my $fh, '>', 'recur.rc')
@ -53,6 +53,9 @@ like ($output, qr/first .* third .* second/msx, 'daily 3d weekly');
$output = qx{../src/task rc:recur.rc desc};
like ($output, qr/second .* third .* first/msx, 'weekly 3d daily');
$output = qx{../src/task rc:recur.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key recur.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 10;
use Test::More tests => 11;
# Create the rc file.
if (open my $fh, '>', 'recur.rc')
@ -90,6 +90,9 @@ like ($output, qr/Deleted 1 task\./, '3 deleted');
# TODO Duplicate a recurring child task
# TODO Duplicate a recurring parent task
$output = qx{../src/task rc:recur.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key recur.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 7;
use Test::More tests => 8;
# Create the rc file.
if (open my $fh, '>', 'recur.rc')
@ -55,6 +55,9 @@ qx{../src/task rc:recur.rc 5 do};
$output = qx{../src/task rc:recur.rc list};
like ($output, qr/and has been deleted/, 'Parent task deleted');
$output = qx{../src/task rc:recur.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key recur.rc);
ok (! -r 'pending.data' &&

View file

@ -28,7 +28,7 @@
use strict;
use warnings;
use Test::More tests => 4;
use Test::More tests => 5;
# Create the rc file.
if (open my $fh, '>', 'recur.rc')
@ -49,6 +49,9 @@ like ($output, qr/Recurrence\s+weekdays/, 'task recurs every weekday');
qx{../src/task rc:recur.rc 1 do};
$output = qx{../src/task rc:recur.rc list};
$output = qx{../src/task rc:recur.rc diag};
like ($output, qr/No duplicates found/, 'No duplicate UUIDs detected');
# Cleanup.
unlink qw(pending.data completed.data undo.data backlog.data synch.key recur.rc);
ok (! -r 'pending.data' &&