47
91
self.assert_('@@' in lines[2][2:])
48
92
## "Unterminated hunk header for patch:\n%s" % "".join(lines)
94
def test_binary_lines(self):
95
self.assertRaises(BinaryFile, udiff_lines, [1023 * 'a' + '\x00'], [])
96
self.assertRaises(BinaryFile, udiff_lines, [], [1023 * 'a' + '\x00'])
97
udiff_lines([1023 * 'a' + '\x00'], [], allow_binary=True)
98
udiff_lines([], [1023 * 'a' + '\x00'], allow_binary=True)
100
def test_external_diff(self):
101
lines = external_udiff_lines(['boo\n'], ['goo\n'])
102
self.check_patch(lines)
103
self.assertEqual('\n', lines[-1])
105
def test_external_diff_no_fileno(self):
106
# Make sure that we can handle not having a fileno, even
107
# if the diff is large
108
lines = external_udiff_lines(['boo\n']*10000,
111
self.check_patch(lines)
113
def test_external_diff_binary_lang_c(self):
114
orig_lang = os.environ.get('LANG')
116
os.environ['LANG'] = 'C'
117
lines = external_udiff_lines(['\x00foobar\n'], ['foo\x00bar\n'])
118
# Older versions of diffutils say "Binary files", newer
119
# versions just say "Files".
120
self.assertContainsRe(lines[0],
121
'(Binary f|F)iles old and new differ\n')
122
self.assertEquals(lines[1:], ['\n'])
124
if orig_lang is None:
125
del os.environ['LANG']
127
os.environ['LANG'] = orig_lang
129
def test_no_external_diff(self):
130
"""Check that NoDiff is raised when diff is not available"""
131
# Use os.environ['PATH'] to make sure no 'diff' command is available
132
orig_path = os.environ['PATH']
134
os.environ['PATH'] = ''
135
self.assertRaises(NoDiff, external_diff,
136
'old', ['boo\n'], 'new', ['goo\n'],
137
StringIO(), diff_opts=['-u'])
139
os.environ['PATH'] = orig_path
141
def test_internal_diff_default(self):
142
# Default internal diff encoding is utf8
144
internal_diff(u'old_\xb5', ['old_text\n'],
145
u'new_\xe5', ['new_text\n'], output)
146
lines = output.getvalue().splitlines(True)
147
self.check_patch(lines)
148
self.assertEquals(['--- old_\xc2\xb5\n',
149
'+++ new_\xc3\xa5\n',
157
def test_internal_diff_utf8(self):
159
internal_diff(u'old_\xb5', ['old_text\n'],
160
u'new_\xe5', ['new_text\n'], output,
161
path_encoding='utf8')
162
lines = output.getvalue().splitlines(True)
163
self.check_patch(lines)
164
self.assertEquals(['--- old_\xc2\xb5\n',
165
'+++ new_\xc3\xa5\n',
173
def test_internal_diff_iso_8859_1(self):
175
internal_diff(u'old_\xb5', ['old_text\n'],
176
u'new_\xe5', ['new_text\n'], output,
177
path_encoding='iso-8859-1')
178
lines = output.getvalue().splitlines(True)
179
self.check_patch(lines)
180
self.assertEquals(['--- old_\xb5\n',
189
def test_internal_diff_returns_bytes(self):
191
output = StringIO.StringIO()
192
internal_diff(u'old_\xb5', ['old_text\n'],
193
u'new_\xe5', ['new_text\n'], output)
194
self.failUnless(isinstance(output.getvalue(), str),
195
'internal_diff should return bytestrings')
198
class TestDiffFiles(TestCaseInTempDir):
200
def test_external_diff_binary(self):
201
"""The output when using external diff should use diff's i18n error"""
202
# Make sure external_diff doesn't fail in the current LANG
203
lines = external_udiff_lines(['\x00foobar\n'], ['foo\x00bar\n'])
205
cmd = ['diff', '-u', 'old', 'new']
206
open('old', 'wb').write('\x00foobar\n')
207
open('new', 'wb').write('foo\x00bar\n')
208
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
209
stdin=subprocess.PIPE)
210
out, err = pipe.communicate()
211
# Diff returns '2' on Binary files.
212
self.assertEqual(2, pipe.returncode)
213
# We should output whatever diff tells us, plus a trailing newline
214
self.assertEqual(out.splitlines(True) + ['\n'], lines)
217
class TestDiffDates(TestCaseWithTransport):
220
super(TestDiffDates, self).setUp()
221
self.wt = self.make_branch_and_tree('.')
222
self.b = self.wt.branch
223
self.build_tree_contents([
224
('file1', 'file1 contents at rev 1\n'),
225
('file2', 'file2 contents at rev 1\n')
227
self.wt.add(['file1', 'file2'])
229
message='Revision 1',
230
timestamp=1143849600, # 2006-04-01 00:00:00 UTC
233
self.build_tree_contents([('file1', 'file1 contents at rev 2\n')])
235
message='Revision 2',
236
timestamp=1143936000, # 2006-04-02 00:00:00 UTC
239
self.build_tree_contents([('file2', 'file2 contents at rev 3\n')])
241
message='Revision 3',
242
timestamp=1144022400, # 2006-04-03 00:00:00 UTC
245
self.wt.remove(['file2'])
247
message='Revision 4',
248
timestamp=1144108800, # 2006-04-04 00:00:00 UTC
251
self.build_tree_contents([
252
('file1', 'file1 contents in working tree\n')
254
# set the date stamps for files in the working tree to known values
255
os.utime('file1', (1144195200, 1144195200)) # 2006-04-05 00:00:00 UTC
257
def get_diff(self, tree1, tree2, specific_files=None, working_tree=None):
259
if working_tree is not None:
260
extra_trees = (working_tree,)
263
show_diff_trees(tree1, tree2, output, specific_files=specific_files,
264
extra_trees=extra_trees, old_label='old/',
266
return output.getvalue()
268
def test_diff_rev_tree_working_tree(self):
269
output = self.get_diff(self.wt.basis_tree(), self.wt)
270
# note that the date for old/file1 is from rev 2 rather than from
271
# the basis revision (rev 4)
272
self.assertEqualDiff(output, '''\
273
=== modified file 'file1'
274
--- old/file1\t2006-04-02 00:00:00 +0000
275
+++ new/file1\t2006-04-05 00:00:00 +0000
277
-file1 contents at rev 2
278
+file1 contents in working tree
282
def test_diff_rev_tree_rev_tree(self):
283
tree1 = self.b.repository.revision_tree('rev-2')
284
tree2 = self.b.repository.revision_tree('rev-3')
285
output = self.get_diff(tree1, tree2)
286
self.assertEqualDiff(output, '''\
287
=== modified file 'file2'
288
--- old/file2\t2006-04-01 00:00:00 +0000
289
+++ new/file2\t2006-04-03 00:00:00 +0000
291
-file2 contents at rev 1
292
+file2 contents at rev 3
296
def test_diff_add_files(self):
297
tree1 = self.b.repository.revision_tree(None)
298
tree2 = self.b.repository.revision_tree('rev-1')
299
output = self.get_diff(tree1, tree2)
300
# the files have the epoch time stamp for the tree in which
302
self.assertEqualDiff(output, '''\
303
=== added file 'file1'
304
--- old/file1\t1970-01-01 00:00:00 +0000
305
+++ new/file1\t2006-04-01 00:00:00 +0000
307
+file1 contents at rev 1
309
=== added file 'file2'
310
--- old/file2\t1970-01-01 00:00:00 +0000
311
+++ new/file2\t2006-04-01 00:00:00 +0000
313
+file2 contents at rev 1
317
def test_diff_remove_files(self):
318
tree1 = self.b.repository.revision_tree('rev-3')
319
tree2 = self.b.repository.revision_tree('rev-4')
320
output = self.get_diff(tree1, tree2)
321
# the file has the epoch time stamp for the tree in which
323
self.assertEqualDiff(output, '''\
324
=== removed file 'file2'
325
--- old/file2\t2006-04-03 00:00:00 +0000
326
+++ new/file2\t1970-01-01 00:00:00 +0000
328
-file2 contents at rev 3
332
def test_show_diff_specified(self):
333
"""A working tree filename can be used to identify a file"""
334
self.wt.rename_one('file1', 'file1b')
335
old_tree = self.b.repository.revision_tree('rev-1')
336
new_tree = self.b.repository.revision_tree('rev-4')
337
out = self.get_diff(old_tree, new_tree, specific_files=['file1b'],
338
working_tree=self.wt)
339
self.assertContainsRe(out, 'file1\t')
341
def test_recursive_diff(self):
342
"""Children of directories are matched"""
345
self.wt.add(['dir1', 'dir2'])
346
self.wt.rename_one('file1', 'dir1/file1')
347
old_tree = self.b.repository.revision_tree('rev-1')
348
new_tree = self.b.repository.revision_tree('rev-4')
349
out = self.get_diff(old_tree, new_tree, specific_files=['dir1'],
350
working_tree=self.wt)
351
self.assertContainsRe(out, 'file1\t')
352
out = self.get_diff(old_tree, new_tree, specific_files=['dir2'],
353
working_tree=self.wt)
354
self.assertNotContainsRe(out, 'file1\t')
357
class TestPatienceDiffLib(TestCase):
359
def test_unique_lcs(self):
360
unique_lcs = bzrlib.patiencediff.unique_lcs
361
self.assertEquals(unique_lcs('', ''), [])
362
self.assertEquals(unique_lcs('a', 'a'), [(0,0)])
363
self.assertEquals(unique_lcs('a', 'b'), [])
364
self.assertEquals(unique_lcs('ab', 'ab'), [(0,0), (1,1)])
365
self.assertEquals(unique_lcs('abcde', 'cdeab'), [(2,0), (3,1), (4,2)])
366
self.assertEquals(unique_lcs('cdeab', 'abcde'), [(0,2), (1,3), (2,4)])
367
self.assertEquals(unique_lcs('abXde', 'abYde'), [(0,0), (1,1),
369
self.assertEquals(unique_lcs('acbac', 'abc'), [(2,1)])
371
def test_recurse_matches(self):
372
def test_one(a, b, matches):
374
bzrlib.patiencediff.recurse_matches(a, b, 0, 0, len(a), len(b),
376
self.assertEquals(test_matches, matches)
378
test_one(['a', '', 'b', '', 'c'], ['a', 'a', 'b', 'c', 'c'],
379
[(0, 0), (2, 2), (4, 4)])
380
test_one(['a', 'c', 'b', 'a', 'c'], ['a', 'b', 'c'],
381
[(0, 0), (2, 1), (4, 2)])
383
# recurse_matches doesn't match non-unique
384
# lines surrounded by bogus text.
385
# The update has been done in patiencediff.SequenceMatcher instead
387
# This is what it could be
388
#test_one('aBccDe', 'abccde', [(0,0), (2,2), (3,3), (5,5)])
390
# This is what it currently gives:
391
test_one('aBccDe', 'abccde', [(0,0), (5,5)])
393
def test_matching_blocks(self):
394
def chk_blocks(a, b, expected_blocks):
395
# difflib always adds a signature of the total
396
# length, with no matching entries at the end
397
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
398
blocks = s.get_matching_blocks()
399
self.assertEquals((len(a), len(b), 0), blocks[-1])
400
self.assertEquals(expected_blocks, blocks[:-1])
402
# Some basic matching tests
403
chk_blocks('', '', [])
404
chk_blocks([], [], [])
405
chk_blocks('abcd', 'abcd', [(0, 0, 4)])
406
chk_blocks('abcd', 'abce', [(0, 0, 3)])
407
chk_blocks('eabc', 'abce', [(1, 0, 3)])
408
chk_blocks('eabce', 'abce', [(1, 0, 4)])
409
chk_blocks('abcde', 'abXde', [(0, 0, 2), (3, 3, 2)])
410
chk_blocks('abcde', 'abXYZde', [(0, 0, 2), (3, 5, 2)])
411
chk_blocks('abde', 'abXYZde', [(0, 0, 2), (2, 5, 2)])
412
# This may check too much, but it checks to see that
413
# a copied block stays attached to the previous section,
415
# difflib would tend to grab the trailing longest match
416
# which would make the diff not look right
417
chk_blocks('abcdefghijklmnop', 'abcdefxydefghijklmnop',
418
[(0, 0, 6), (6, 11, 10)])
420
# make sure it supports passing in lists
424
'how are you today?\n'],
426
'how are you today?\n'],
427
[(0, 0, 1), (2, 1, 1)])
429
# non unique lines surrounded by non-matching lines
431
chk_blocks('aBccDe', 'abccde', [(0,0,1), (5,5,1)])
433
# But they only need to be locally unique
434
chk_blocks('aBcDec', 'abcdec', [(0,0,1), (2,2,1), (4,4,2)])
436
# non unique blocks won't be matched
437
chk_blocks('aBcdEcdFg', 'abcdecdfg', [(0,0,1), (8,8,1)])
439
# but locally unique ones will
440
chk_blocks('aBcdEeXcdFg', 'abcdecdfg', [(0,0,1), (2,2,2),
441
(5,4,1), (7,5,2), (10,8,1)])
443
chk_blocks('abbabbXd', 'cabbabxd', [(7,7,1)])
444
chk_blocks('abbabbbb', 'cabbabbc', [])
445
chk_blocks('bbbbbbbb', 'cbbbbbbc', [])
447
def test_opcodes(self):
448
def chk_ops(a, b, expected_codes):
449
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
450
self.assertEquals(expected_codes, s.get_opcodes())
454
chk_ops('abcd', 'abcd', [('equal', 0,4, 0,4)])
455
chk_ops('abcd', 'abce', [('equal', 0,3, 0,3),
456
('replace', 3,4, 3,4)
458
chk_ops('eabc', 'abce', [('delete', 0,1, 0,0),
462
chk_ops('eabce', 'abce', [('delete', 0,1, 0,0),
465
chk_ops('abcde', 'abXde', [('equal', 0,2, 0,2),
466
('replace', 2,3, 2,3),
469
chk_ops('abcde', 'abXYZde', [('equal', 0,2, 0,2),
470
('replace', 2,3, 2,5),
473
chk_ops('abde', 'abXYZde', [('equal', 0,2, 0,2),
474
('insert', 2,2, 2,5),
477
chk_ops('abcdefghijklmnop', 'abcdefxydefghijklmnop',
478
[('equal', 0,6, 0,6),
479
('insert', 6,6, 6,11),
480
('equal', 6,16, 11,21)
485
, 'how are you today?\n'],
487
, 'how are you today?\n'],
488
[('equal', 0,1, 0,1),
489
('delete', 1,2, 1,1),
492
chk_ops('aBccDe', 'abccde',
493
[('equal', 0,1, 0,1),
494
('replace', 1,5, 1,5),
497
chk_ops('aBcDec', 'abcdec',
498
[('equal', 0,1, 0,1),
499
('replace', 1,2, 1,2),
501
('replace', 3,4, 3,4),
504
chk_ops('aBcdEcdFg', 'abcdecdfg',
505
[('equal', 0,1, 0,1),
506
('replace', 1,8, 1,8),
509
chk_ops('aBcdEeXcdFg', 'abcdecdfg',
510
[('equal', 0,1, 0,1),
511
('replace', 1,2, 1,2),
513
('delete', 4,5, 4,4),
515
('delete', 6,7, 5,5),
517
('replace', 9,10, 7,8),
518
('equal', 10,11, 8,9)
521
def test_multiple_ranges(self):
522
# There was an earlier bug where we used a bad set of ranges,
523
# this triggers that specific bug, to make sure it doesn't regress
524
def chk_blocks(a, b, expected_blocks):
525
# difflib always adds a signature of the total
526
# length, with no matching entries at the end
527
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
528
blocks = s.get_matching_blocks()
530
self.assertEquals(x, (len(a), len(b), 0))
531
self.assertEquals(expected_blocks, blocks)
533
chk_blocks('abcdefghijklmnop'
534
, 'abcXghiYZQRSTUVWXYZijklmnop'
535
, [(0, 0, 3), (6, 4, 3), (9, 20, 7)])
537
chk_blocks('ABCd efghIjk L'
538
, 'AxyzBCn mo pqrstuvwI1 2 L'
539
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])
541
# These are rot13 code snippets.
543
trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
545
gnxrf_netf = ['svyr*']
546
gnxrf_bcgvbaf = ['ab-erphefr']
548
qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr):
549
sebz omeyvo.nqq vzcbeg fzneg_nqq, nqq_ercbegre_cevag, nqq_ercbegre_ahyy
551
ercbegre = nqq_ercbegre_ahyy
553
ercbegre = nqq_ercbegre_cevag
554
fzneg_nqq(svyr_yvfg, abg ab_erphefr, ercbegre)
557
pynff pzq_zxqve(Pbzznaq):
558
'''.splitlines(True), '''\
559
trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
561
--qel-eha jvyy fubj juvpu svyrf jbhyq or nqqrq, ohg abg npghnyyl
564
gnxrf_netf = ['svyr*']
565
gnxrf_bcgvbaf = ['ab-erphefr', 'qel-eha']
567
qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr, qel_eha=Snyfr):
572
# Guvf vf cbvagyrff, ohg V'q engure abg envfr na reebe
573
npgvba = omeyvo.nqq.nqq_npgvba_ahyy
575
npgvba = omeyvo.nqq.nqq_npgvba_cevag
577
npgvba = omeyvo.nqq.nqq_npgvba_nqq
579
npgvba = omeyvo.nqq.nqq_npgvba_nqq_naq_cevag
581
omeyvo.nqq.fzneg_nqq(svyr_yvfg, abg ab_erphefr, npgvba)
584
pynff pzq_zxqve(Pbzznaq):
586
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])
588
def test_patience_unified_diff(self):
589
txt_a = ['hello there\n',
591
'how are you today?\n']
592
txt_b = ['hello there\n',
593
'how are you today?\n']
594
unified_diff = bzrlib.patiencediff.unified_diff
595
psm = bzrlib.patiencediff.PatienceSequenceMatcher
596
self.assertEquals([ '--- \n',
601
' how are you today?\n'
603
, list(unified_diff(txt_a, txt_b,
604
sequencematcher=psm)))
605
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')
606
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')
607
# This is the result with LongestCommonSubstring matching
608
self.assertEquals(['--- \n',
610
'@@ -1,6 +1,11 @@\n',
622
, list(unified_diff(txt_a, txt_b)))
623
# And the patience diff
624
self.assertEquals(['--- \n',
626
'@@ -4,6 +4,11 @@\n',
639
, list(unified_diff(txt_a, txt_b,
640
sequencematcher=psm)))
643
class TestPatienceDiffLibFiles(TestCaseInTempDir):
645
def test_patience_unified_diff_files(self):
646
txt_a = ['hello there\n',
648
'how are you today?\n']
649
txt_b = ['hello there\n',
650
'how are you today?\n']
651
open('a1', 'wb').writelines(txt_a)
652
open('b1', 'wb').writelines(txt_b)
654
unified_diff_files = bzrlib.patiencediff.unified_diff_files
655
psm = bzrlib.patiencediff.PatienceSequenceMatcher
656
self.assertEquals(['--- a1 \n',
661
' how are you today?\n',
663
, list(unified_diff_files('a1', 'b1',
664
sequencematcher=psm)))
666
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')
667
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')
668
open('a2', 'wb').writelines(txt_a)
669
open('b2', 'wb').writelines(txt_b)
671
# This is the result with LongestCommonSubstring matching
672
self.assertEquals(['--- a2 \n',
674
'@@ -1,6 +1,11 @@\n',
686
, list(unified_diff_files('a2', 'b2')))
688
# And the patience diff
689
self.assertEquals(['--- a2 \n',
691
'@@ -4,6 +4,11 @@\n',
704
, list(unified_diff_files('a2', 'b2',
705
sequencematcher=psm)))