~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_diff.py

  • Committer: Patch Queue Manager
  • Date: 2016-02-01 19:13:13 UTC
  • mfrom: (6614.2.2 trunk)
  • Revision ID: pqm@pqm.ubuntu.com-20160201191313-wdfvmfff1djde6oq
(vila) Release 2.7.0 (Vincent Ladeuil)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005-2014 Canonical Ltd
 
1
# Copyright (C) 2005-2012, 2014, 2016 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
94
94
        """diff generates a valid diff for patches that add a newline"""
95
95
        lines = udiff_lines(['boo'], ['boo\n'])
96
96
        self.check_patch(lines)
97
 
        self.assertEquals(lines[4], '\\ No newline at end of file\n')
 
97
        self.assertEqual(lines[4], '\\ No newline at end of file\n')
98
98
            ## "expected no-nl, got %r" % lines[4]
99
99
 
100
100
    def test_add_nl_2(self):
103
103
        """
104
104
        lines = udiff_lines(['boo'], ['goo\n'])
105
105
        self.check_patch(lines)
106
 
        self.assertEquals(lines[4], '\\ No newline at end of file\n')
 
106
        self.assertEqual(lines[4], '\\ No newline at end of file\n')
107
107
            ## "expected no-nl, got %r" % lines[4]
108
108
 
109
109
    def test_remove_nl(self):
112
112
        """
113
113
        lines = udiff_lines(['boo\n'], ['boo'])
114
114
        self.check_patch(lines)
115
 
        self.assertEquals(lines[5], '\\ No newline at end of file\n')
 
115
        self.assertEqual(lines[5], '\\ No newline at end of file\n')
116
116
            ## "expected no-nl, got %r" % lines[5]
117
117
 
118
118
    def check_patch(self, lines):
119
 
        self.assert_(len(lines) > 1)
 
119
        self.assertTrue(len(lines) > 1)
120
120
            ## "Not enough lines for a file header for patch:\n%s" % "".join(lines)
121
 
        self.assert_(lines[0].startswith ('---'))
 
121
        self.assertTrue(lines[0].startswith ('---'))
122
122
            ## 'No orig line for patch:\n%s' % "".join(lines)
123
 
        self.assert_(lines[1].startswith ('+++'))
 
123
        self.assertTrue(lines[1].startswith ('+++'))
124
124
            ## 'No mod line for patch:\n%s' % "".join(lines)
125
 
        self.assert_(len(lines) > 2)
 
125
        self.assertTrue(len(lines) > 2)
126
126
            ## "No hunks for patch:\n%s" % "".join(lines)
127
 
        self.assert_(lines[2].startswith('@@'))
 
127
        self.assertTrue(lines[2].startswith('@@'))
128
128
            ## "No hunk header for patch:\n%s" % "".join(lines)
129
 
        self.assert_('@@' in lines[2][2:])
 
129
        self.assertTrue('@@' in lines[2][2:])
130
130
            ## "Unterminated hunk header for patch:\n%s" % "".join(lines)
131
131
 
132
132
    def test_binary_lines(self):
157
157
        # Older versions of diffutils say "Binary files", newer
158
158
        # versions just say "Files".
159
159
        self.assertContainsRe(lines[0], '(Binary f|F)iles old and new differ\n')
160
 
        self.assertEquals(lines[1:], ['\n'])
 
160
        self.assertEqual(lines[1:], ['\n'])
161
161
 
162
162
    def test_no_external_diff(self):
163
163
        """Check that NoDiff is raised when diff is not available"""
175
175
                           u'new_\xe5', ['new_text\n'], output)
176
176
        lines = output.getvalue().splitlines(True)
177
177
        self.check_patch(lines)
178
 
        self.assertEquals(['--- old_\xc2\xb5\n',
 
178
        self.assertEqual(['--- old_\xc2\xb5\n',
179
179
                           '+++ new_\xc3\xa5\n',
180
180
                           '@@ -1,1 +1,1 @@\n',
181
181
                           '-old_text\n',
191
191
                           path_encoding='utf8')
192
192
        lines = output.getvalue().splitlines(True)
193
193
        self.check_patch(lines)
194
 
        self.assertEquals(['--- old_\xc2\xb5\n',
 
194
        self.assertEqual(['--- old_\xc2\xb5\n',
195
195
                           '+++ new_\xc3\xa5\n',
196
196
                           '@@ -1,1 +1,1 @@\n',
197
197
                           '-old_text\n',
207
207
                           path_encoding='iso-8859-1')
208
208
        lines = output.getvalue().splitlines(True)
209
209
        self.check_patch(lines)
210
 
        self.assertEquals(['--- old_\xb5\n',
 
210
        self.assertEqual(['--- old_\xb5\n',
211
211
                           '+++ new_\xe5\n',
212
212
                           '@@ -1,1 +1,1 @@\n',
213
213
                           '-old_text\n',
244
244
                           'same_text\n','same_text\n','new_text\n'], output)
245
245
        lines = output.getvalue().splitlines(True)
246
246
        self.check_patch(lines)
247
 
        self.assertEquals(['--- old\n',
 
247
        self.assertEqual(['--- old\n',
248
248
                           '+++ new\n',
249
249
                           '@@ -3,4 +3,4 @@\n',
250
250
                           ' same_text\n',
265
265
                           context_lines=0)
266
266
        lines = output.getvalue().splitlines(True)
267
267
        self.check_patch(lines)
268
 
        self.assertEquals(['--- old\n',
 
268
        self.assertEqual(['--- old\n',
269
269
                           '+++ new\n',
270
270
                           '@@ -6,1 +6,1 @@\n',
271
271
                           '-old_text\n',
283
283
                           context_lines=4)
284
284
        lines = output.getvalue().splitlines(True)
285
285
        self.check_patch(lines)
286
 
        self.assertEquals(['--- old\n',
 
286
        self.assertEqual(['--- old\n',
287
287
                           '+++ new\n',
288
288
                           '@@ -2,5 +2,5 @@\n',
289
289
                           ' same_text\n',
864
864
        b = ''.join([unichr(i) for i in range(4300, 4800, 2)])
865
865
        sm = self._PatienceSequenceMatcher(None, a, b)
866
866
        mb = sm.get_matching_blocks()
867
 
        self.assertEquals(35, len(mb))
 
867
        self.assertEqual(35, len(mb))
868
868
 
869
869
    def test_unique_lcs(self):
870
870
        unique_lcs = self._unique_lcs
871
 
        self.assertEquals(unique_lcs('', ''), [])
872
 
        self.assertEquals(unique_lcs('', 'a'), [])
873
 
        self.assertEquals(unique_lcs('a', ''), [])
874
 
        self.assertEquals(unique_lcs('a', 'a'), [(0,0)])
875
 
        self.assertEquals(unique_lcs('a', 'b'), [])
876
 
        self.assertEquals(unique_lcs('ab', 'ab'), [(0,0), (1,1)])
877
 
        self.assertEquals(unique_lcs('abcde', 'cdeab'), [(2,0), (3,1), (4,2)])
878
 
        self.assertEquals(unique_lcs('cdeab', 'abcde'), [(0,2), (1,3), (2,4)])
879
 
        self.assertEquals(unique_lcs('abXde', 'abYde'), [(0,0), (1,1),
 
871
        self.assertEqual(unique_lcs('', ''), [])
 
872
        self.assertEqual(unique_lcs('', 'a'), [])
 
873
        self.assertEqual(unique_lcs('a', ''), [])
 
874
        self.assertEqual(unique_lcs('a', 'a'), [(0,0)])
 
875
        self.assertEqual(unique_lcs('a', 'b'), [])
 
876
        self.assertEqual(unique_lcs('ab', 'ab'), [(0,0), (1,1)])
 
877
        self.assertEqual(unique_lcs('abcde', 'cdeab'), [(2,0), (3,1), (4,2)])
 
878
        self.assertEqual(unique_lcs('cdeab', 'abcde'), [(0,2), (1,3), (2,4)])
 
879
        self.assertEqual(unique_lcs('abXde', 'abYde'), [(0,0), (1,1),
880
880
                                                         (3,3), (4,4)])
881
 
        self.assertEquals(unique_lcs('acbac', 'abc'), [(2,1)])
 
881
        self.assertEqual(unique_lcs('acbac', 'abc'), [(2,1)])
882
882
 
883
883
    def test_recurse_matches(self):
884
884
        def test_one(a, b, matches):
885
885
            test_matches = []
886
886
            self._recurse_matches(
887
887
                a, b, 0, 0, len(a), len(b), test_matches, 10)
888
 
            self.assertEquals(test_matches, matches)
 
888
            self.assertEqual(test_matches, matches)
889
889
 
890
890
        test_one(['a', '', 'b', '', 'c'], ['a', 'a', 'b', 'c', 'c'],
891
891
                 [(0, 0), (2, 2), (4, 4)])
996
996
    def test_opcodes(self):
997
997
        def chk_ops(a, b, expected_codes):
998
998
            s = self._PatienceSequenceMatcher(None, a, b)
999
 
            self.assertEquals(expected_codes, s.get_opcodes())
 
999
            self.assertEqual(expected_codes, s.get_opcodes())
1000
1000
 
1001
1001
        chk_ops('', '', [])
1002
1002
        chk_ops([], [], [])
1072
1072
    def test_grouped_opcodes(self):
1073
1073
        def chk_ops(a, b, expected_codes, n=3):
1074
1074
            s = self._PatienceSequenceMatcher(None, a, b)
1075
 
            self.assertEquals(expected_codes, list(s.get_grouped_opcodes(n)))
 
1075
            self.assertEqual(expected_codes, list(s.get_grouped_opcodes(n)))
1076
1076
 
1077
1077
        chk_ops('', '', [])
1078
1078
        chk_ops([], [], [])
1172
1172
                 'how are you today?\n']
1173
1173
        unified_diff = patiencediff.unified_diff
1174
1174
        psm = self._PatienceSequenceMatcher
1175
 
        self.assertEquals(['--- \n',
 
1175
        self.assertEqual(['--- \n',
1176
1176
                           '+++ \n',
1177
1177
                           '@@ -1,3 +1,2 @@\n',
1178
1178
                           ' hello there\n',
1184
1184
        txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')
1185
1185
        txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')
1186
1186
        # This is the result with LongestCommonSubstring matching
1187
 
        self.assertEquals(['--- \n',
 
1187
        self.assertEqual(['--- \n',
1188
1188
                           '+++ \n',
1189
1189
                           '@@ -1,6 +1,11 @@\n',
1190
1190
                           ' a\n',
1200
1200
                           ' f\n']
1201
1201
                          , list(unified_diff(txt_a, txt_b)))
1202
1202
        # And the patience diff
1203
 
        self.assertEquals(['--- \n',
 
1203
        self.assertEqual(['--- \n',
1204
1204
                           '+++ \n',
1205
1205
                           '@@ -4,6 +4,11 @@\n',
1206
1206
                           ' d\n',
1226
1226
                 'how are you today?\n']
1227
1227
        unified_diff = patiencediff.unified_diff
1228
1228
        psm = self._PatienceSequenceMatcher
1229
 
        self.assertEquals(['--- a\t2008-08-08\n',
 
1229
        self.assertEqual(['--- a\t2008-08-08\n',
1230
1230
                           '+++ b\t2008-09-09\n',
1231
1231
                           '@@ -1,3 +1,2 @@\n',
1232
1232
                           ' hello there\n',
1284
1284
 
1285
1285
        unified_diff_files = patiencediff.unified_diff_files
1286
1286
        psm = self._PatienceSequenceMatcher
1287
 
        self.assertEquals(['--- a1\n',
 
1287
        self.assertEqual(['--- a1\n',
1288
1288
                           '+++ b1\n',
1289
1289
                           '@@ -1,3 +1,2 @@\n',
1290
1290
                           ' hello there\n',
1300
1300
        with open('b2', 'wb') as f: f.writelines(txt_b)
1301
1301
 
1302
1302
        # This is the result with LongestCommonSubstring matching
1303
 
        self.assertEquals(['--- a2\n',
 
1303
        self.assertEqual(['--- a2\n',
1304
1304
                           '+++ b2\n',
1305
1305
                           '@@ -1,6 +1,11 @@\n',
1306
1306
                           ' a\n',
1317
1317
                          , list(unified_diff_files('a2', 'b2')))
1318
1318
 
1319
1319
        # And the patience diff
1320
 
        self.assertEquals(['--- a2\n',
1321
 
                           '+++ b2\n',
1322
 
                           '@@ -4,6 +4,11 @@\n',
1323
 
                           ' d\n',
1324
 
                           ' e\n',
1325
 
                           ' f\n',
1326
 
                           '+x\n',
1327
 
                           '+y\n',
1328
 
                           '+d\n',
1329
 
                           '+e\n',
1330
 
                           '+f\n',
1331
 
                           ' g\n',
1332
 
                           ' h\n',
1333
 
                           ' i\n',
1334
 
                          ]
1335
 
                          , list(unified_diff_files('a2', 'b2',
1336
 
                                 sequencematcher=psm)))
 
1320
        self.assertEqual(['--- a2\n',
 
1321
                          '+++ b2\n',
 
1322
                          '@@ -4,6 +4,11 @@\n',
 
1323
                          ' d\n',
 
1324
                          ' e\n',
 
1325
                          ' f\n',
 
1326
                          '+x\n',
 
1327
                          '+y\n',
 
1328
                          '+d\n',
 
1329
                          '+e\n',
 
1330
                          '+f\n',
 
1331
                          ' g\n',
 
1332
                          ' h\n',
 
1333
                          ' i\n'],
 
1334
                         list(unified_diff_files('a2', 'b2',
 
1335
                                                 sequencematcher=psm)))
1337
1336
 
1338
1337
 
1339
1338
class TestPatienceDiffLibFiles_c(TestPatienceDiffLibFiles):
1500
1499
                                    None, None, None)
1501
1500
        for _, scenario in EncodingAdapter.encoding_scenarios:
1502
1501
            encoding = scenario['encoding']
1503
 
            dirname  = scenario['info']['directory']
 
1502
            dirname = scenario['info']['directory']
1504
1503
            filename = scenario['info']['filename']
1505
1504
 
1506
1505
            self.overrideAttr(diffobj, '_fenc', lambda: encoding)
1507
1506
            relpath = dirname + u'/' + filename
1508
1507
            fullpath = diffobj._safe_filename('safe', relpath)
1509
 
            self.assertEqual(
1510
 
                    fullpath,
1511
 
                    fullpath.encode(encoding).decode(encoding)
1512
 
                    )
1513
 
            self.assert_(fullpath.startswith(diffobj._root + '/safe'))
 
1508
            self.assertEqual(fullpath,
 
1509
                             fullpath.encode(encoding).decode(encoding))
 
1510
            self.assertTrue(fullpath.startswith(diffobj._root + '/safe'))
1514
1511
 
1515
1512
    def test_unencodable_filename(self):
1516
1513
        diffobj = diff.DiffFromTool(['dummy', '@old_path', '@new_path'],
1517
1514
                                    None, None, None)
1518
1515
        for _, scenario in EncodingAdapter.encoding_scenarios:
1519
1516
            encoding = scenario['encoding']
1520
 
            dirname  = scenario['info']['directory']
 
1517
            dirname = scenario['info']['directory']
1521
1518
            filename = scenario['info']['filename']
1522
1519
 
1523
1520
            if encoding == 'iso-8859-1':
1528
1525
            self.overrideAttr(diffobj, '_fenc', lambda: encoding)
1529
1526
            relpath = dirname + u'/' + filename
1530
1527
            fullpath = diffobj._safe_filename('safe', relpath)
1531
 
            self.assertEqual(
1532
 
                    fullpath,
1533
 
                    fullpath.encode(encoding).decode(encoding)
1534
 
                    )
1535
 
            self.assert_(fullpath.startswith(diffobj._root + '/safe'))
 
1528
            self.assertEqual(fullpath,
 
1529
                             fullpath.encode(encoding).decode(encoding))
 
1530
            self.assertTrue(fullpath.startswith(diffobj._root + '/safe'))
1536
1531
 
1537
1532
 
1538
1533
class TestGetTreesAndBranchesToDiffLocked(tests.TestCaseWithTransport):