1
# Copyright (C) 2005, 2006 Canonical Development Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
1
from bzrlib.selftest import TestBase
2
from bzrlib.diff import internal_diff
18
3
from cStringIO import StringIO
21
from tempfile import TemporaryFile
23
from bzrlib.diff import internal_diff, external_diff, show_diff_trees
24
from bzrlib.errors import BinaryFile, NoDiff
25
import bzrlib.patiencediff
26
from bzrlib.tests import (TestCase, TestCaseWithTransport,
27
TestCaseInTempDir, TestSkipped)
30
def udiff_lines(old, new, allow_binary=False):
4
def udiff_lines(old, new):
32
internal_diff('old', old, 'new', new, output, allow_binary)
6
internal_diff('old', old, 'new', new, output)
34
8
return output.readlines()
37
def external_udiff_lines(old, new, use_stringio=False):
39
# StringIO has no fileno, so it tests a different codepath
42
output = TemporaryFile()
44
external_diff('old', old, 'new', new, output, diff_opts=['-u'])
46
raise TestSkipped('external "diff" not present to test')
48
lines = output.readlines()
53
class TestDiff(TestCase):
55
def test_add_nl(self):
56
"""diff generates a valid diff for patches that add a newline"""
10
def check_patch(lines):
11
assert len(lines) > 1, \
12
"Not enough lines for a file header for patch:\n%s" % "".join(lines)
13
assert lines[0].startswith ('---'), \
14
'No orig line for patch:\n%s' % "".join(lines)
15
assert lines[1].startswith ('+++'), \
16
'No mod line for patch:\n%s' % "".join(lines)
17
assert len(lines) > 2, \
18
"No hunks for patch:\n%s" % "".join(lines)
19
assert lines[2].startswith('@@'),\
20
"No hunk header for patch:\n%s" % "".join(lines)
21
assert '@@' in lines[2][2:], \
22
"Unterminated hunk header for patch:\n%s" % "".join(lines)
24
class AddNL(TestBase):
26
diff generates a valid diff for patches that add a newline
57
29
lines = udiff_lines(['boo'], ['boo\n'])
58
self.check_patch(lines)
59
self.assertEquals(lines[4], '\\ No newline at end of file\n')
60
## "expected no-nl, got %r" % lines[4]
62
def test_add_nl_2(self):
63
"""diff generates a valid diff for patches that change last line and
31
assert lines[4] == '\\ No newline at end of file\n', \
32
"expected no-nl, got %r" % lines[4]
35
class AddNL2(TestBase):
37
diff generates a valid diff for patches that change last line and add a
66
41
lines = udiff_lines(['boo'], ['goo\n'])
67
self.check_patch(lines)
68
self.assertEquals(lines[4], '\\ No newline at end of file\n')
69
## "expected no-nl, got %r" % lines[4]
43
assert lines[4] == '\\ No newline at end of file\n', \
44
"expected no-nl, got %r" % lines[4]
71
def test_remove_nl(self):
72
"""diff generates a valid diff for patches that change last line and
46
class RemoveNL(TestBase):
48
diff generates a valid diff for patches that change last line and add a
75
52
lines = udiff_lines(['boo\n'], ['boo'])
76
self.check_patch(lines)
77
self.assertEquals(lines[5], '\\ No newline at end of file\n')
78
## "expected no-nl, got %r" % lines[5]
80
def check_patch(self, lines):
81
self.assert_(len(lines) > 1)
82
## "Not enough lines for a file header for patch:\n%s" % "".join(lines)
83
self.assert_(lines[0].startswith ('---'))
84
## 'No orig line for patch:\n%s' % "".join(lines)
85
self.assert_(lines[1].startswith ('+++'))
86
## 'No mod line for patch:\n%s' % "".join(lines)
87
self.assert_(len(lines) > 2)
88
## "No hunks for patch:\n%s" % "".join(lines)
89
self.assert_(lines[2].startswith('@@'))
90
## "No hunk header for patch:\n%s" % "".join(lines)
91
self.assert_('@@' in lines[2][2:])
92
## "Unterminated hunk header for patch:\n%s" % "".join(lines)
94
def test_binary_lines(self):
95
self.assertRaises(BinaryFile, udiff_lines, [1023 * 'a' + '\x00'], [])
96
self.assertRaises(BinaryFile, udiff_lines, [], [1023 * 'a' + '\x00'])
97
udiff_lines([1023 * 'a' + '\x00'], [], allow_binary=True)
98
udiff_lines([], [1023 * 'a' + '\x00'], allow_binary=True)
100
def test_external_diff(self):
101
lines = external_udiff_lines(['boo\n'], ['goo\n'])
102
self.check_patch(lines)
103
self.assertEqual('\n', lines[-1])
105
def test_external_diff_no_fileno(self):
106
# Make sure that we can handle not having a fileno, even
107
# if the diff is large
108
lines = external_udiff_lines(['boo\n']*10000,
111
self.check_patch(lines)
113
def test_external_diff_binary_lang_c(self):
114
orig_lang = os.environ.get('LANG')
116
os.environ['LANG'] = 'C'
117
lines = external_udiff_lines(['\x00foobar\n'], ['foo\x00bar\n'])
118
self.assertEqual(['Binary files old and new differ\n', '\n'], lines)
120
if orig_lang is None:
121
del os.environ['LANG']
123
os.environ['LANG'] = orig_lang
125
def test_no_external_diff(self):
126
"""Check that NoDiff is raised when diff is not available"""
127
# Use os.environ['PATH'] to make sure no 'diff' command is available
128
orig_path = os.environ['PATH']
130
os.environ['PATH'] = ''
131
self.assertRaises(NoDiff, external_diff,
132
'old', ['boo\n'], 'new', ['goo\n'],
133
StringIO(), diff_opts=['-u'])
135
os.environ['PATH'] = orig_path
137
def test_internal_diff_default(self):
138
# Default internal diff encoding is utf8
140
internal_diff(u'old_\xb5', ['old_text\n'],
141
u'new_\xe5', ['new_text\n'], output)
142
lines = output.getvalue().splitlines(True)
143
self.check_patch(lines)
144
self.assertEquals(['--- old_\xc2\xb5\n',
145
'+++ new_\xc3\xa5\n',
153
def test_internal_diff_utf8(self):
155
internal_diff(u'old_\xb5', ['old_text\n'],
156
u'new_\xe5', ['new_text\n'], output,
157
path_encoding='utf8')
158
lines = output.getvalue().splitlines(True)
159
self.check_patch(lines)
160
self.assertEquals(['--- old_\xc2\xb5\n',
161
'+++ new_\xc3\xa5\n',
169
def test_internal_diff_iso_8859_1(self):
171
internal_diff(u'old_\xb5', ['old_text\n'],
172
u'new_\xe5', ['new_text\n'], output,
173
path_encoding='iso-8859-1')
174
lines = output.getvalue().splitlines(True)
175
self.check_patch(lines)
176
self.assertEquals(['--- old_\xb5\n',
185
def test_internal_diff_returns_bytes(self):
187
output = StringIO.StringIO()
188
internal_diff(u'old_\xb5', ['old_text\n'],
189
u'new_\xe5', ['new_text\n'], output)
190
self.failUnless(isinstance(output.getvalue(), str),
191
'internal_diff should return bytestrings')
194
class TestDiffFiles(TestCaseInTempDir):
196
def test_external_diff_binary(self):
197
"""The output when using external diff should use diff's i18n error"""
198
# Make sure external_diff doesn't fail in the current LANG
199
lines = external_udiff_lines(['\x00foobar\n'], ['foo\x00bar\n'])
201
cmd = ['diff', '-u', 'old', 'new']
202
open('old', 'wb').write('\x00foobar\n')
203
open('new', 'wb').write('foo\x00bar\n')
204
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
205
stdin=subprocess.PIPE)
206
out, err = pipe.communicate()
207
# Diff returns '2' on Binary files.
208
self.assertEqual(2, pipe.returncode)
209
# We should output whatever diff tells us, plus a trailing newline
210
self.assertEqual(out.splitlines(True) + ['\n'], lines)
213
class TestDiffDates(TestCaseWithTransport):
216
super(TestDiffDates, self).setUp()
217
self.wt = self.make_branch_and_tree('.')
218
self.b = self.wt.branch
219
self.build_tree_contents([
220
('file1', 'file1 contents at rev 1\n'),
221
('file2', 'file2 contents at rev 1\n')
223
self.wt.add(['file1', 'file2'])
225
message='Revision 1',
226
timestamp=1143849600, # 2006-04-01 00:00:00 UTC
229
self.build_tree_contents([('file1', 'file1 contents at rev 2\n')])
231
message='Revision 2',
232
timestamp=1143936000, # 2006-04-02 00:00:00 UTC
235
self.build_tree_contents([('file2', 'file2 contents at rev 3\n')])
237
message='Revision 3',
238
timestamp=1144022400, # 2006-04-03 00:00:00 UTC
241
self.wt.remove(['file2'])
243
message='Revision 4',
244
timestamp=1144108800, # 2006-04-04 00:00:00 UTC
247
self.build_tree_contents([
248
('file1', 'file1 contents in working tree\n')
250
# set the date stamps for files in the working tree to known values
251
os.utime('file1', (1144195200, 1144195200)) # 2006-04-05 00:00:00 UTC
253
def get_diff(self, tree1, tree2, specific_files=None, working_tree=None):
255
if working_tree is not None:
256
extra_trees = (working_tree,)
259
show_diff_trees(tree1, tree2, output, specific_files=specific_files,
260
extra_trees=extra_trees, old_label='old/',
262
return output.getvalue()
264
def test_diff_rev_tree_working_tree(self):
265
output = self.get_diff(self.wt.basis_tree(), self.wt)
266
# note that the date for old/file1 is from rev 2 rather than from
267
# the basis revision (rev 4)
268
self.assertEqualDiff(output, '''\
269
=== modified file 'file1'
270
--- old/file1\t2006-04-02 00:00:00 +0000
271
+++ new/file1\t2006-04-05 00:00:00 +0000
273
-file1 contents at rev 2
274
+file1 contents in working tree
278
def test_diff_rev_tree_rev_tree(self):
279
tree1 = self.b.repository.revision_tree('rev-2')
280
tree2 = self.b.repository.revision_tree('rev-3')
281
output = self.get_diff(tree1, tree2)
282
self.assertEqualDiff(output, '''\
283
=== modified file 'file2'
284
--- old/file2\t2006-04-01 00:00:00 +0000
285
+++ new/file2\t2006-04-03 00:00:00 +0000
287
-file2 contents at rev 1
288
+file2 contents at rev 3
292
def test_diff_add_files(self):
293
tree1 = self.b.repository.revision_tree(None)
294
tree2 = self.b.repository.revision_tree('rev-1')
295
output = self.get_diff(tree1, tree2)
296
# the files have the epoch time stamp for the tree in which
298
self.assertEqualDiff(output, '''\
299
=== added file 'file1'
300
--- old/file1\t1970-01-01 00:00:00 +0000
301
+++ new/file1\t2006-04-01 00:00:00 +0000
303
+file1 contents at rev 1
305
=== added file 'file2'
306
--- old/file2\t1970-01-01 00:00:00 +0000
307
+++ new/file2\t2006-04-01 00:00:00 +0000
309
+file2 contents at rev 1
313
def test_diff_remove_files(self):
314
tree1 = self.b.repository.revision_tree('rev-3')
315
tree2 = self.b.repository.revision_tree('rev-4')
316
output = self.get_diff(tree1, tree2)
317
# the file has the epoch time stamp for the tree in which
319
self.assertEqualDiff(output, '''\
320
=== removed file 'file2'
321
--- old/file2\t2006-04-03 00:00:00 +0000
322
+++ new/file2\t1970-01-01 00:00:00 +0000
324
-file2 contents at rev 3
328
def test_show_diff_specified(self):
329
"""A working tree filename can be used to identify a file"""
330
self.wt.rename_one('file1', 'file1b')
331
old_tree = self.b.repository.revision_tree('rev-1')
332
new_tree = self.b.repository.revision_tree('rev-4')
333
out = self.get_diff(old_tree, new_tree, specific_files=['file1b'],
334
working_tree=self.wt)
335
self.assertContainsRe(out, 'file1\t')
337
def test_recursive_diff(self):
338
"""Children of directories are matched"""
341
self.wt.add(['dir1', 'dir2'])
342
self.wt.rename_one('file1', 'dir1/file1')
343
old_tree = self.b.repository.revision_tree('rev-1')
344
new_tree = self.b.repository.revision_tree('rev-4')
345
out = self.get_diff(old_tree, new_tree, specific_files=['dir1'],
346
working_tree=self.wt)
347
self.assertContainsRe(out, 'file1\t')
348
out = self.get_diff(old_tree, new_tree, specific_files=['dir2'],
349
working_tree=self.wt)
350
self.assertNotContainsRe(out, 'file1\t')
353
class TestPatienceDiffLib(TestCase):
355
def test_unique_lcs(self):
356
unique_lcs = bzrlib.patiencediff.unique_lcs
357
self.assertEquals(unique_lcs('', ''), [])
358
self.assertEquals(unique_lcs('a', 'a'), [(0,0)])
359
self.assertEquals(unique_lcs('a', 'b'), [])
360
self.assertEquals(unique_lcs('ab', 'ab'), [(0,0), (1,1)])
361
self.assertEquals(unique_lcs('abcde', 'cdeab'), [(2,0), (3,1), (4,2)])
362
self.assertEquals(unique_lcs('cdeab', 'abcde'), [(0,2), (1,3), (2,4)])
363
self.assertEquals(unique_lcs('abXde', 'abYde'), [(0,0), (1,1),
365
self.assertEquals(unique_lcs('acbac', 'abc'), [(2,1)])
367
def test_recurse_matches(self):
368
def test_one(a, b, matches):
370
bzrlib.patiencediff.recurse_matches(a, b, 0, 0, len(a), len(b),
372
self.assertEquals(test_matches, matches)
374
test_one(['a', '', 'b', '', 'c'], ['a', 'a', 'b', 'c', 'c'],
375
[(0, 0), (2, 2), (4, 4)])
376
test_one(['a', 'c', 'b', 'a', 'c'], ['a', 'b', 'c'],
377
[(0, 0), (2, 1), (4, 2)])
379
# recurse_matches doesn't match non-unique
380
# lines surrounded by bogus text.
381
# The update has been done in patiencediff.SequenceMatcher instead
383
# This is what it could be
384
#test_one('aBccDe', 'abccde', [(0,0), (2,2), (3,3), (5,5)])
386
# This is what it currently gives:
387
test_one('aBccDe', 'abccde', [(0,0), (5,5)])
389
def test_matching_blocks(self):
390
def chk_blocks(a, b, expected_blocks):
391
# difflib always adds a signature of the total
392
# length, with no matching entries at the end
393
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
394
blocks = s.get_matching_blocks()
395
self.assertEquals((len(a), len(b), 0), blocks[-1])
396
self.assertEquals(expected_blocks, blocks[:-1])
398
# Some basic matching tests
399
chk_blocks('', '', [])
400
chk_blocks([], [], [])
401
chk_blocks('abcd', 'abcd', [(0, 0, 4)])
402
chk_blocks('abcd', 'abce', [(0, 0, 3)])
403
chk_blocks('eabc', 'abce', [(1, 0, 3)])
404
chk_blocks('eabce', 'abce', [(1, 0, 4)])
405
chk_blocks('abcde', 'abXde', [(0, 0, 2), (3, 3, 2)])
406
chk_blocks('abcde', 'abXYZde', [(0, 0, 2), (3, 5, 2)])
407
chk_blocks('abde', 'abXYZde', [(0, 0, 2), (2, 5, 2)])
408
# This may check too much, but it checks to see that
409
# a copied block stays attached to the previous section,
411
# difflib would tend to grab the trailing longest match
412
# which would make the diff not look right
413
chk_blocks('abcdefghijklmnop', 'abcdefxydefghijklmnop',
414
[(0, 0, 6), (6, 11, 10)])
416
# make sure it supports passing in lists
420
'how are you today?\n'],
422
'how are you today?\n'],
423
[(0, 0, 1), (2, 1, 1)])
425
# non unique lines surrounded by non-matching lines
427
chk_blocks('aBccDe', 'abccde', [(0,0,1), (5,5,1)])
429
# But they only need to be locally unique
430
chk_blocks('aBcDec', 'abcdec', [(0,0,1), (2,2,1), (4,4,2)])
432
# non unique blocks won't be matched
433
chk_blocks('aBcdEcdFg', 'abcdecdfg', [(0,0,1), (8,8,1)])
435
# but locally unique ones will
436
chk_blocks('aBcdEeXcdFg', 'abcdecdfg', [(0,0,1), (2,2,2),
437
(5,4,1), (7,5,2), (10,8,1)])
439
chk_blocks('abbabbXd', 'cabbabxd', [(7,7,1)])
440
chk_blocks('abbabbbb', 'cabbabbc', [])
441
chk_blocks('bbbbbbbb', 'cbbbbbbc', [])
443
def test_opcodes(self):
444
def chk_ops(a, b, expected_codes):
445
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
446
self.assertEquals(expected_codes, s.get_opcodes())
450
chk_ops('abcd', 'abcd', [('equal', 0,4, 0,4)])
451
chk_ops('abcd', 'abce', [('equal', 0,3, 0,3),
452
('replace', 3,4, 3,4)
454
chk_ops('eabc', 'abce', [('delete', 0,1, 0,0),
458
chk_ops('eabce', 'abce', [('delete', 0,1, 0,0),
461
chk_ops('abcde', 'abXde', [('equal', 0,2, 0,2),
462
('replace', 2,3, 2,3),
465
chk_ops('abcde', 'abXYZde', [('equal', 0,2, 0,2),
466
('replace', 2,3, 2,5),
469
chk_ops('abde', 'abXYZde', [('equal', 0,2, 0,2),
470
('insert', 2,2, 2,5),
473
chk_ops('abcdefghijklmnop', 'abcdefxydefghijklmnop',
474
[('equal', 0,6, 0,6),
475
('insert', 6,6, 6,11),
476
('equal', 6,16, 11,21)
481
, 'how are you today?\n'],
483
, 'how are you today?\n'],
484
[('equal', 0,1, 0,1),
485
('delete', 1,2, 1,1),
488
chk_ops('aBccDe', 'abccde',
489
[('equal', 0,1, 0,1),
490
('replace', 1,5, 1,5),
493
chk_ops('aBcDec', 'abcdec',
494
[('equal', 0,1, 0,1),
495
('replace', 1,2, 1,2),
497
('replace', 3,4, 3,4),
500
chk_ops('aBcdEcdFg', 'abcdecdfg',
501
[('equal', 0,1, 0,1),
502
('replace', 1,8, 1,8),
505
chk_ops('aBcdEeXcdFg', 'abcdecdfg',
506
[('equal', 0,1, 0,1),
507
('replace', 1,2, 1,2),
509
('delete', 4,5, 4,4),
511
('delete', 6,7, 5,5),
513
('replace', 9,10, 7,8),
514
('equal', 10,11, 8,9)
517
def test_multiple_ranges(self):
518
# There was an earlier bug where we used a bad set of ranges,
519
# this triggers that specific bug, to make sure it doesn't regress
520
def chk_blocks(a, b, expected_blocks):
521
# difflib always adds a signature of the total
522
# length, with no matching entries at the end
523
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
524
blocks = s.get_matching_blocks()
526
self.assertEquals(x, (len(a), len(b), 0))
527
self.assertEquals(expected_blocks, blocks)
529
chk_blocks('abcdefghijklmnop'
530
, 'abcXghiYZQRSTUVWXYZijklmnop'
531
, [(0, 0, 3), (6, 4, 3), (9, 20, 7)])
533
chk_blocks('ABCd efghIjk L'
534
, 'AxyzBCn mo pqrstuvwI1 2 L'
535
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])
537
# These are rot13 code snippets.
539
trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
541
gnxrf_netf = ['svyr*']
542
gnxrf_bcgvbaf = ['ab-erphefr']
544
qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr):
545
sebz omeyvo.nqq vzcbeg fzneg_nqq, nqq_ercbegre_cevag, nqq_ercbegre_ahyy
547
ercbegre = nqq_ercbegre_ahyy
549
ercbegre = nqq_ercbegre_cevag
550
fzneg_nqq(svyr_yvfg, abg ab_erphefr, ercbegre)
553
pynff pzq_zxqve(Pbzznaq):
554
'''.splitlines(True), '''\
555
trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
557
--qel-eha jvyy fubj juvpu svyrf jbhyq or nqqrq, ohg abg npghnyyl
560
gnxrf_netf = ['svyr*']
561
gnxrf_bcgvbaf = ['ab-erphefr', 'qel-eha']
563
qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr, qel_eha=Snyfr):
568
# Guvf vf cbvagyrff, ohg V'q engure abg envfr na reebe
569
npgvba = omeyvo.nqq.nqq_npgvba_ahyy
571
npgvba = omeyvo.nqq.nqq_npgvba_cevag
573
npgvba = omeyvo.nqq.nqq_npgvba_nqq
575
npgvba = omeyvo.nqq.nqq_npgvba_nqq_naq_cevag
577
omeyvo.nqq.fzneg_nqq(svyr_yvfg, abg ab_erphefr, npgvba)
580
pynff pzq_zxqve(Pbzznaq):
582
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])
584
def test_patience_unified_diff(self):
585
txt_a = ['hello there\n',
587
'how are you today?\n']
588
txt_b = ['hello there\n',
589
'how are you today?\n']
590
unified_diff = bzrlib.patiencediff.unified_diff
591
psm = bzrlib.patiencediff.PatienceSequenceMatcher
592
self.assertEquals([ '--- \n',
597
' how are you today?\n'
599
, list(unified_diff(txt_a, txt_b,
600
sequencematcher=psm)))
601
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')
602
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')
603
# This is the result with LongestCommonSubstring matching
604
self.assertEquals(['--- \n',
606
'@@ -1,6 +1,11 @@\n',
618
, list(unified_diff(txt_a, txt_b)))
619
# And the patience diff
620
self.assertEquals(['--- \n',
622
'@@ -4,6 +4,11 @@\n',
635
, list(unified_diff(txt_a, txt_b,
636
sequencematcher=psm)))
639
class TestPatienceDiffLibFiles(TestCaseInTempDir):
641
def test_patience_unified_diff_files(self):
642
txt_a = ['hello there\n',
644
'how are you today?\n']
645
txt_b = ['hello there\n',
646
'how are you today?\n']
647
open('a1', 'wb').writelines(txt_a)
648
open('b1', 'wb').writelines(txt_b)
650
unified_diff_files = bzrlib.patiencediff.unified_diff_files
651
psm = bzrlib.patiencediff.PatienceSequenceMatcher
652
self.assertEquals(['--- a1 \n',
657
' how are you today?\n',
659
, list(unified_diff_files('a1', 'b1',
660
sequencematcher=psm)))
662
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')
663
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')
664
open('a2', 'wb').writelines(txt_a)
665
open('b2', 'wb').writelines(txt_b)
667
# This is the result with LongestCommonSubstring matching
668
self.assertEquals(['--- a2 \n',
670
'@@ -1,6 +1,11 @@\n',
682
, list(unified_diff_files('a2', 'b2')))
684
# And the patience diff
685
self.assertEquals(['--- a2 \n',
687
'@@ -4,6 +4,11 @@\n',
700
, list(unified_diff_files('a2', 'b2',
701
sequencematcher=psm)))
54
assert lines[5] == '\\ No newline at end of file\n', \
55
"expected no-nl, got %r" % lines[5]