2
Copyright (C) 2007 Canonical Ltd
4
This program is free software; you can redistribute it and/or modify
5
it under the terms of the GNU General Public License as published by
6
the Free Software Foundation; either version 2 of the License, or
7
(at your option) any later version.
9
This program is distributed in the hope that it will be useful,
10
but WITHOUT ANY WARRANTY; without even the implied warranty of
11
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
GNU General Public License for more details.
14
You should have received a copy of the GNU General Public License
15
along with this program; if not, write to the Free Software
16
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
Function equate_lines based on bdiff.c from Mercurial.
19
Copyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>
21
Functions unique_lcs/recurse_matches based on _patiencediff_py.py.
22
Copyright (C) 2005 Bram Cohen, Copyright (C) 2005, 2006 Canonical Ltd
31
/* http://www.python.org/dev/peps/pep-0353/ */
32
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
33
typedef int Py_ssize_t;
34
#define PY_SSIZE_T_MAX INT_MAX
35
#define PY_SSIZE_T_MIN INT_MIN
40
# define inline __inline__
41
#elif defined(_MSC_VER)
42
# define inline __inline
48
#define MIN(a, b) (((a) > (b)) ? (b) : (a))
49
#define MAX(a, b) (((a) > (b)) ? (a) : (b))
63
/* values from this array need to correspont to the order of the enum above */
64
static char *opcode_names[] = {
73
int hash; /* hash code of the string */
74
Py_ssize_t next; /* next line from the same equivalence class */
75
Py_ssize_t equiv; /* equivalence class */
82
Py_ssize_t a_head; /* first item in `a` from this equivalence class */
84
Py_ssize_t b_head; /* first item in `b` from this equivalence class */
92
Py_ssize_t last_a_pos;
93
Py_ssize_t last_b_pos;
98
struct matching_line {
99
Py_ssize_t a; /* index of the line in `a` */
100
Py_ssize_t b; /* index of the line in `b` */
104
struct matching_block {
105
Py_ssize_t a; /* index of the first line in `a` */
106
Py_ssize_t b; /* index of the first line in `b` */
107
Py_ssize_t len; /* length of the block */
111
struct matching_blocks {
112
struct matching_block *matches;
132
struct hashtable hashtable;
133
Py_ssize_t *backpointers;
134
} PatienceSequenceMatcher;
137
static inline Py_ssize_t
138
bisect_left(Py_ssize_t *list, Py_ssize_t item, Py_ssize_t lo, Py_ssize_t hi)
141
Py_ssize_t mid = lo / 2 + hi / 2 + (lo % 2 + hi % 2) / 2;
142
if (list[mid] < item)
152
compare_lines(struct line *a, struct line *b)
154
return ((a->hash != b->hash) || (a->len != b->len) ||
155
memcmp(a->data, b->data, a->len));
160
find_equivalence_class(struct bucket *hashtable, Py_ssize_t hsize,
161
struct line *lines, struct line *ref_lines,
165
for (j = lines[i].hash & hsize; hashtable[j].b_head != SENTINEL; j = (j + 1) & hsize) {
166
if (!compare_lines(lines + i, ref_lines + hashtable[j].b_head)) {
175
equate_lines(struct hashtable *result,
176
struct line *lines_a, struct line *lines_b,
177
Py_ssize_t asize, Py_ssize_t bsize)
179
Py_ssize_t i, j, hsize;
180
struct bucket *hashtable;
182
/* check for overflow, we need the table to be at least bsize+1 */
183
if (bsize == PY_SSIZE_T_MAX) {
184
PyErr_SetNone(PyExc_OverflowError);
188
/* build a hash table of the next highest power of 2 */
190
while (hsize < bsize + 1)
193
hashtable = (struct bucket *)malloc(sizeof(struct bucket) * hsize);
194
if (hashtable == NULL) {
199
/* initialise the hashtable */
200
for (i = 0; i < hsize; i++) {
201
hashtable[i].a_count = 0;
202
hashtable[i].b_count = 0;
203
hashtable[i].a_head = SENTINEL;
204
hashtable[i].b_head = SENTINEL;
208
/* add lines from lines_b to the hash table chains. iterating
209
backwards so the matching lines are sorted to the linked list
210
by the line number (because we are adding new lines to the
212
for (i = bsize - 1; i >= 0; i--) {
213
/* find the first hashtable entry, which is either empty or contains
214
the same line as lines_b[i] */
215
j = find_equivalence_class(hashtable, hsize, lines_b, lines_b, i);
217
/* set the equivalence class */
218
lines_b[i].equiv = j;
220
/* add to the head of the equivalence class */
221
lines_b[i].next = hashtable[j].b_head;
222
hashtable[j].b_head = i;
223
hashtable[j].b_count++;
226
/* match items from lines_a to their equivalence class in lines_b.
227
again, iterating backwards for the right order of the linked lists */
228
for (i = asize - 1; i >= 0; i--) {
229
/* find the first hash entry, which is either empty or contains
230
the same line as lines_a[i] */
231
j = find_equivalence_class(hashtable, hsize, lines_a, lines_b, i);
233
/* set the equivalence class, even if we are not interested in this
234
line, because the values are not pre-filled */
235
lines_a[i].equiv = j;
237
/* we are not interested in lines which are not also in lines_b */
238
if (hashtable[j].b_head == SENTINEL)
241
/* add to the head of the equivalence class */
242
lines_a[i].next = hashtable[j].a_head;
243
hashtable[j].a_head = i;
244
hashtable[j].a_count++;
247
result->last_a_pos = -1;
248
result->last_b_pos = -1;
249
result->size = hsize + 1;
250
result->table = hashtable;
257
/* Finds longest common subsequence of unique lines in a[alo:ahi] and
259
Parameter backpointers must have allocated memory for at least
260
4 * (bhi - blo) ints. */
262
unique_lcs(struct matching_line *answer,
263
struct hashtable *hashtable, Py_ssize_t *backpointers,
264
struct line *lines_a, struct line *lines_b,
265
Py_ssize_t alo, Py_ssize_t blo, Py_ssize_t ahi, Py_ssize_t bhi)
267
Py_ssize_t i, k, equiv, apos, bpos, norm_apos, norm_bpos, bsize, stacksize;
268
Py_ssize_t *stacks, *lasts, *btoa;
274
h = hashtable->table;
276
/* "unpack" the allocated memory */
277
stacks = backpointers + bsize;
278
lasts = stacks + bsize;
279
btoa = lasts + bsize;
281
/* initialise the backpointers */
282
for (i = 0; i < bsize; i++)
283
backpointers[i] = SENTINEL;
285
if (hashtable->last_a_pos == -1 || hashtable->last_a_pos > alo)
286
for (i = 0; i < hashtable->size; i++)
287
h[i].a_pos = h[i].a_head;
288
hashtable->last_a_pos = alo;
290
if (hashtable->last_b_pos == -1 || hashtable->last_b_pos > blo)
291
for (i = 0; i < hashtable->size; i++)
292
h[i].b_pos = h[i].b_head;
293
hashtable->last_b_pos = blo;
295
for (bpos = blo; bpos < bhi; bpos++) {
296
equiv = lines_b[bpos].equiv;
298
/* no lines in a or b */
299
if (h[equiv].a_count == 0 || h[equiv].b_count == 0)
302
/* find an unique line in lines_a that matches lines_b[bpos]
303
if we find more than one line within the range alo:ahi,
304
jump to the next line from lines_b immediately */
306
/* loop through all lines in the linked list */
307
for (i = h[equiv].a_pos; i != SENTINEL; i = lines_a[i].next) {
308
/* the index is lower than alo, the the next line */
313
/* the index is higher than ahi, stop searching */
316
/* if the line is within our range, check if it's a duplicate */
317
if (apos != SENTINEL)
319
/* save index to the line */
322
/* this line has no equivalent in lines_a[alo:ahi] */
323
if (apos == SENTINEL)
326
/* check for duplicates of this line in lines_b[blo:bhi] */
327
/* loop through all lines in the linked list */
328
for (i = h[equiv].b_pos; i != SENTINEL; i = lines_b[i].next) {
329
/* the index is lower than blo, the the next line */
334
/* the index is higher than bhi, stop searching */
337
/* if this isn't the line with started with and it's within
338
our range, it's a duplicate */
343
/* use normalised indexes ([0,ahi-alo) instead of [alo,ahi))
344
for the patience sorting algorithm */
345
norm_bpos = bpos - blo;
346
norm_apos = apos - alo;
347
btoa[norm_bpos] = norm_apos;
350
Ok, how does this work...
352
We have a list of matching lines from two lists, a and b. These
353
matches are stored in variable `btoa`. As we are iterating over this
354
table by bpos, the lines from b already form an increasing sequence.
355
We need to "sort" also the lines from a using the patience sorting
356
algorithm, ignoring the lines which would need to be swapped.
358
http://en.wikipedia.org/wiki/Patience_sorting
360
For each pair of lines, we need to place the line from a on either
361
an existing pile that has higher value on the top or create a new
362
pile. Variable `stacks` represents the tops of these piles and in
363
variable `lasts` we store the lines from b, that correspond to the
364
lines from a in `stacks`.
366
Whenever we place a new line on top of a pile, we store a
367
backpointer to the line (b) from top of the previous pile. This means
368
that after the loop, variable `backpointers` will contain an index
369
to the previous matching lines that forms an increasing sequence
370
(over both indexes a and b) with the current matching lines. If
371
either index a or b of the previous matching lines would be higher
372
than indexes of the current one or if the indexes of the current
373
one are 0, it will contain SENTINEL.
375
To construct the LCS, we will just need to follow these backpointers
376
from the top of the last pile and stop when we reach SENTINEL.
379
/* as an optimization, check if the next line comes at the end,
380
because it usually does */
381
if (stacksize && stacks[stacksize - 1] < norm_apos)
383
/* as an optimization, check if the next line comes right after
384
the previous line, because usually it does */
385
else if (stacksize && (stacks[k] < norm_apos) &&
386
(k == stacksize - 1 || stacks[k + 1] > norm_apos))
389
k = bisect_left(stacks, norm_apos, 0, stacksize);
392
backpointers[norm_bpos] = lasts[k - 1];
395
stacks[k] = norm_apos;
396
lasts[k] = norm_bpos;
399
stacks[stacksize] = norm_apos;
400
lasts[stacksize] = norm_bpos;
412
/* backtrace the structures to find the LCS */
414
k = lasts[stacksize - 1];
415
while (k != SENTINEL) {
416
answer[i].a = btoa[k];
425
/* Adds a new line to the list of matching blocks, either extending the
426
current block or adding a new one. */
428
add_matching_line(struct matching_blocks *answer, Py_ssize_t a, Py_ssize_t b)
430
Py_ssize_t last_index = answer->count - 1;
431
if ((last_index >= 0) &&
432
(a == answer->matches[last_index].a +
433
answer->matches[last_index].len) &&
434
(b == answer->matches[last_index].b +
435
answer->matches[last_index].len)) {
436
/* enlarge the last block */
437
answer->matches[last_index].len++;
440
/* create a new block */
442
answer->matches[last_index].a = a;
443
answer->matches[last_index].b = b;
444
answer->matches[last_index].len = 1;
451
recurse_matches(struct matching_blocks *answer, struct hashtable *hashtable,
452
Py_ssize_t *backpointers, struct line *a, struct line *b,
453
Py_ssize_t alo, Py_ssize_t blo, Py_ssize_t ahi, Py_ssize_t bhi,
457
Py_ssize_t new, last_a_pos, last_b_pos, lcs_size, nahi, nbhi, i, apos, bpos;
458
struct matching_line *lcs;
460
if (maxrecursion < 0)
463
if (alo == ahi || blo == bhi)
467
last_a_pos = alo - 1;
468
last_b_pos = blo - 1;
470
lcs = (struct matching_line *)malloc(sizeof(struct matching_line) * (bhi - blo));
474
lcs_size = unique_lcs(lcs, hashtable, backpointers, a, b, alo, blo, ahi, bhi);
476
/* recurse between lines which are unique in each file and match */
477
for (i = lcs_size - 1; i >= 0; i--) {
478
apos = alo + lcs[i].a;
479
bpos = blo + lcs[i].b;
480
if (last_a_pos + 1 != apos || last_b_pos + 1 != bpos) {
481
res = recurse_matches(answer, hashtable,
483
last_a_pos + 1, last_b_pos + 1,
484
apos, bpos, maxrecursion - 1);
490
add_matching_line(answer, apos, bpos);
497
/* find matches between the last match and the end */
499
res = recurse_matches(answer, hashtable,
501
last_a_pos + 1, last_b_pos + 1,
502
ahi, bhi, maxrecursion - 1);
508
/* find matching lines at the very beginning */
509
else if (a[alo].equiv == b[blo].equiv) {
510
while (alo < ahi && blo < bhi && a[alo].equiv == b[blo].equiv)
511
add_matching_line(answer, alo++, blo++);
512
res = recurse_matches(answer, hashtable,
514
alo, blo, ahi, bhi, maxrecursion - 1);
519
/* find matching lines at the very end */
520
else if (a[ahi - 1].equiv == b[bhi - 1].equiv) {
523
while (nahi > alo && nbhi > blo && a[nahi - 1].equiv == b[nbhi - 1].equiv) {
527
res = recurse_matches(answer, hashtable,
529
last_a_pos + 1, last_b_pos + 1,
530
nahi, nbhi, maxrecursion - 1);
533
for (i = 0; i < ahi - nahi; i++)
534
add_matching_line(answer, nahi + i, nbhi + i);
546
load_lines(PyObject *orig, struct line **lines)
548
Py_ssize_t size, i, j;
552
PyObject *seq, *item;
554
seq = PySequence_Fast(orig, "sequence expected");
559
size = PySequence_Fast_GET_SIZE(seq);
565
line = *lines = (struct line *)malloc(sizeof(struct line) * size);
572
for (i = 0; i < size; i++) {
573
item = PySequence_Fast_GET_ITEM(seq, i);
574
if (!PyString_Check(item)){
575
PyErr_Format(PyExc_TypeError,
576
"sequence item %zd: expected string,"
578
i, item->ob_type->tp_name);
582
line->len = PyString_GET_SIZE(item);
583
line->data = p = PyString_AS_STRING(item);
584
/* 'djb2' hash. This gives us a nice compromise between fast hash
585
function and a hash with less collisions. The algorithm doesn't
586
use the hash for actual lookups, only for building the table
587
so a better hash function wouldn't bring us much benefit, but
588
would make this loading code slower. */
590
for (j = 0; j < line->len; j++)
591
h = ((h << 5) + h) + *p++;
593
line->next = SENTINEL;
603
py_unique_lcs(PyObject *self, PyObject *args)
605
PyObject *aseq, *bseq, *res, *item;
606
Py_ssize_t asize, bsize, i, nmatches, *backpointers = NULL;
607
struct line *a = NULL, *b = NULL;
608
struct matching_line *matches = NULL;
609
struct hashtable hashtable;
611
if (!PyArg_ParseTuple(args, "OO", &aseq, &bseq))
614
hashtable.table = NULL;
616
asize = load_lines(aseq, &a);
617
bsize = load_lines(bseq, &b);
618
if (asize == -1 || bsize == -1)
621
if (!equate_lines(&hashtable, a, b, asize, bsize))
624
matches = (struct matching_line *)malloc(sizeof(struct matching_line) * bsize);
628
backpointers = (Py_ssize_t *)malloc(sizeof(Py_ssize_t) * bsize * 4);
629
if (backpointers == NULL)
632
nmatches = unique_lcs(matches, &hashtable, backpointers, a, b, 0, 0, asize, bsize);
634
res = PyList_New(nmatches);
635
for (i = 0; i < nmatches; i++) {
636
#if PY_VERSION_HEX < 0x02050000
637
item = Py_BuildValue("ii", matches[nmatches - i - 1].a, matches[nmatches - i - 1].b);
639
item = Py_BuildValue("nn", matches[nmatches - i - 1].a, matches[nmatches - i - 1].b);
643
if (PyList_SetItem(res, i, item) != 0)
649
free(hashtable.table);
657
free(hashtable.table);
665
py_recurse_matches(PyObject *self, PyObject *args)
667
PyObject *aseq, *bseq, *item, *answer;
668
int maxrecursion, res;
669
Py_ssize_t i, j, asize, bsize, alo, blo, ahi, bhi;
670
Py_ssize_t *backpointers = NULL;
671
struct line *a = NULL, *b = NULL;
672
struct hashtable hashtable;
673
struct matching_blocks matches;
675
#if PY_VERSION_HEX < 0x02050000
676
if (!PyArg_ParseTuple(args, "OOiiiiOi", &aseq, &bseq, &alo, &blo,
677
&ahi, &bhi, &answer, &maxrecursion))
679
if (!PyArg_ParseTuple(args, "OOnnnnOi", &aseq, &bseq, &alo, &blo,
680
&ahi, &bhi, &answer, &maxrecursion))
684
hashtable.table = NULL;
685
matches.matches = NULL;
687
asize = load_lines(aseq, &a);
688
bsize = load_lines(bseq, &b);
689
if (asize == -1 || bsize == -1)
692
if (!equate_lines(&hashtable, a, b, asize, bsize))
696
matches.matches = (struct matching_block *)malloc(sizeof(struct matching_block) * bsize);
697
if (matches.matches == NULL)
700
backpointers = (Py_ssize_t *)malloc(sizeof(Py_ssize_t) * bsize * 4);
701
if (backpointers == NULL)
704
res = recurse_matches(&matches, &hashtable, backpointers,
705
a, b, alo, blo, ahi, bhi, maxrecursion);
709
for (i = 0; i < matches.count; i++) {
710
for (j = 0; j < matches.matches[i].len; j++) {
711
#if PY_VERSION_HEX < 0x02050000
712
item = Py_BuildValue("ii", matches.matches[i].a + j,
713
matches.matches[i].b + j);
715
item = Py_BuildValue("nn", matches.matches[i].a + j,
716
matches.matches[i].b + j);
720
if (PyList_Append(answer, item) != 0)
726
free(matches.matches);
727
free(hashtable.table);
734
free(matches.matches);
735
free(hashtable.table);
743
PatienceSequenceMatcher_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
745
PyObject *junk, *a, *b;
746
PatienceSequenceMatcher *self;
748
self = (PatienceSequenceMatcher *)type->tp_alloc(type, 0);
751
if (!PyArg_ParseTuple(args, "OOO", &junk, &a, &b)) {
756
self->asize = load_lines(a, &(self->a));
757
self->bsize = load_lines(b, &(self->b));
759
if (self->asize == -1 || self->bsize == -1) {
764
if (!equate_lines(&self->hashtable, self->a, self->b, self->asize, self->bsize)) {
769
self->backpointers = (Py_ssize_t *)malloc(sizeof(Py_ssize_t) * self->bsize * 4);
770
if (self->backpointers == NULL) {
777
return (PyObject *)self;
782
PatienceSequenceMatcher_dealloc(PatienceSequenceMatcher* self)
784
free(self->backpointers);
785
free(self->hashtable.table);
788
self->ob_type->tp_free((PyObject *)self);
792
static char PatienceSequenceMatcher_get_matching_blocks_doc[] =
793
"Return list of triples describing matching subsequences.\n"
795
"Each triple is of the form (i, j, n), and means that\n"
796
"a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in\n"
799
"The last triple is a dummy, (len(a), len(b), 0), and is the only\n"
800
"triple with n==0.\n"
802
">>> s = PatienceSequenceMatcher(None, \"abxcd\", \"abcd\")\n"
803
">>> s.get_matching_blocks()\n"
804
"[(0, 0, 2), (3, 2, 2), (5, 4, 0)]\n";
807
PatienceSequenceMatcher_get_matching_blocks(PatienceSequenceMatcher* self)
809
PyObject *answer, *item;
812
struct matching_blocks matches;
815
matches.matches = (struct matching_block *)malloc(sizeof(struct matching_block) * self->bsize);
816
if (matches.matches == NULL)
817
return PyErr_NoMemory();
819
res = recurse_matches(&matches, &self->hashtable, self->backpointers,
820
self->a, self->b, 0, 0,
821
self->asize, self->bsize, 10);
823
free(matches.matches);
824
return PyErr_NoMemory();
827
answer = PyList_New(matches.count + 1);
828
if (answer == NULL) {
829
free(matches.matches);
833
for (i = 0; i < matches.count; i++) {
834
#if PY_VERSION_HEX < 0x02050000
835
item = Py_BuildValue("iii", matches.matches[i].a,
836
matches.matches[i].b, matches.matches[i].len);
838
item = Py_BuildValue("nnn", matches.matches[i].a,
839
matches.matches[i].b, matches.matches[i].len);
843
if (PyList_SetItem(answer, i, item) != 0)
846
#if PY_VERSION_HEX < 0x02050000
847
item = Py_BuildValue("iii", self->asize, self->bsize, 0);
849
item = Py_BuildValue("nnn", self->asize, self->bsize, 0);
853
if (PyList_SetItem(answer, i, item) != 0)
856
free(matches.matches);
860
free(matches.matches);
866
static char PatienceSequenceMatcher_get_opcodes_doc[] =
867
"Return list of 5-tuples describing how to turn a into b.\n"
869
"Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple\n"
870
"has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the\n"
871
"tuple preceding it, and likewise for j1 == the previous j2.\n"
873
"The tags are strings, with these meanings:\n"
875
"'replace': a[i1:i2] should be replaced by b[j1:j2]\n"
876
"'delete': a[i1:i2] should be deleted.\n"
877
" Note that j1==j2 in this case.\n"
878
"'insert': b[j1:j2] should be inserted at a[i1:i1].\n"
879
" Note that i1==i2 in this case.\n"
880
"'equal': a[i1:i2] == b[j1:j2]\n"
882
">>> a = \"qabxcd\"\n"
883
">>> b = \"abycdf\"\n"
884
">>> s = PatienceSequenceMatcher(None, a, b)\n"
885
">>> for tag, i1, i2, j1, j2 in s.get_opcodes():\n"
886
"... print (\"%7s a[%d:%d] (%s) b[%d:%d] (%s)\" %\n"
887
"... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))\n"
888
" delete a[0:1] (q) b[0:0] ()\n"
889
" equal a[1:3] (ab) b[0:2] (ab)\n"
890
"replace a[3:4] (x) b[2:3] (y)\n"
891
" equal a[4:6] (cd) b[3:5] (cd)\n"
892
" insert a[6:6] () b[5:6] (f)\n";
895
PatienceSequenceMatcher_get_opcodes(PatienceSequenceMatcher* self)
897
PyObject *answer, *item;
898
Py_ssize_t i, j, k, ai, bj;
900
struct matching_blocks matches;
903
matches.matches = (struct matching_block *)malloc(sizeof(struct matching_block) * (self->bsize + 1));
904
if (matches.matches == NULL)
905
return PyErr_NoMemory();
907
res = recurse_matches(&matches, &self->hashtable, self->backpointers,
908
self->a, self->b, 0, 0,
909
self->asize, self->bsize, 10);
911
free(matches.matches);
912
return PyErr_NoMemory();
915
matches.matches[matches.count].a = self->asize;
916
matches.matches[matches.count].b = self->bsize;
917
matches.matches[matches.count].len = 0;
920
answer = PyList_New(0);
921
if (answer == NULL) {
922
free(matches.matches);
927
for (k = 0; k < matches.count; k++) {
928
ai = matches.matches[k].a;
929
bj = matches.matches[k].b;
932
if (i < ai && j < bj)
940
#if PY_VERSION_HEX < 0x02050000
941
item = Py_BuildValue("siiii", opcode_names[tag], i, ai, j, bj);
943
item = Py_BuildValue("snnnn", opcode_names[tag], i, ai, j, bj);
947
if (PyList_Append(answer, item) != 0)
951
i = ai + matches.matches[k].len;
952
j = bj + matches.matches[k].len;
954
if (matches.matches[k].len > 0) {
955
#if PY_VERSION_HEX < 0x02050000
956
item = Py_BuildValue("siiii", opcode_names[OP_EQUAL], ai, i, bj, j);
958
item = Py_BuildValue("snnnn", opcode_names[OP_EQUAL], ai, i, bj, j);
962
if (PyList_Append(answer, item) != 0)
967
free(matches.matches);
971
free(matches.matches);
977
static char PatienceSequenceMatcher_get_grouped_opcodes_doc[] =
978
"Isolate change clusters by eliminating ranges with no changes.\n"
980
"Return a list of groups with upto n lines of context.\n"
981
"Each group is in the same format as returned by get_opcodes().\n"
983
">>> from pprint import pprint\n"
984
">>> a = map(str, range(1,40))\n"
986
">>> b[8:8] = ['i'] # Make an insertion\n"
987
">>> b[20] += 'x' # Make a replacement\n"
988
">>> b[23:28] = [] # Make a deletion\n"
989
">>> b[30] += 'y' # Make another replacement\n"
990
">>> pprint(PatienceSequenceMatcher(None,a,b).get_grouped_opcodes())\n"
991
"[[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)],\n"
992
" [('equal', 16, 19, 17, 20),\n"
993
" ('replace', 19, 20, 20, 21),\n"
994
" ('equal', 20, 22, 21, 23),\n"
995
" ('delete', 22, 27, 23, 23),\n"
996
" ('equal', 27, 30, 23, 26)],\n"
997
" [('equal', 31, 34, 27, 30),\n"
998
" ('replace', 34, 35, 30, 31),\n"
999
" ('equal', 35, 38, 31, 34)]]\n";
1002
PatienceSequenceMatcher_get_grouped_opcodes(PatienceSequenceMatcher* self,
1005
PyObject *answer, *group, *item;
1006
Py_ssize_t i, j, k, ai, bj, size, ncodes, tag;
1007
Py_ssize_t i1, i2, j1, j2;
1009
struct matching_blocks matches;
1010
struct opcode *codes;
1012
if (!PyArg_ParseTuple(args, "|i", &n))
1016
matches.matches = (struct matching_block *)malloc(sizeof(struct matching_block) * (self->bsize + 1));
1017
if (matches.matches == NULL)
1018
return PyErr_NoMemory();
1020
res = recurse_matches(&matches, &self->hashtable, self->backpointers,
1021
self->a, self->b, 0, 0,
1022
self->asize, self->bsize, 10);
1024
free(matches.matches);
1025
return PyErr_NoMemory();
1028
matches.matches[matches.count].a = self->asize;
1029
matches.matches[matches.count].b = self->bsize;
1030
matches.matches[matches.count].len = 0;
1034
codes = (struct opcode *)malloc(sizeof(struct opcode) * matches.count * 2);
1035
if (codes == NULL) {
1036
free(matches.matches);
1037
return PyErr_NoMemory();
1041
for (k = 0; k < matches.count; k++) {
1042
ai = matches.matches[k].a;
1043
bj = matches.matches[k].b;
1046
if (i < ai && j < bj)
1054
codes[ncodes].tag = tag;
1055
codes[ncodes].i1 = i;
1056
codes[ncodes].i2 = ai;
1057
codes[ncodes].j1 = j;
1058
codes[ncodes].j2 = bj;
1062
i = ai + matches.matches[k].len;
1063
j = bj + matches.matches[k].len;
1065
if (matches.matches[k].len > 0) {
1066
codes[ncodes].tag = OP_EQUAL;
1067
codes[ncodes].i1 = ai;
1068
codes[ncodes].i2 = i;
1069
codes[ncodes].j1 = bj;
1070
codes[ncodes].j2 = j;
1076
codes[ncodes].tag = OP_EQUAL;
1077
codes[ncodes].i1 = 0;
1078
codes[ncodes].i2 = 1;
1079
codes[ncodes].j1 = 0;
1080
codes[ncodes].j2 = 1;
1084
/* fixup leading and trailing groups if they show no changes. */
1085
if (codes[0].tag == OP_EQUAL) {
1086
codes[0].i1 = MAX(codes[0].i1, codes[0].i2 - n);
1087
codes[0].j1 = MAX(codes[0].j1, codes[0].j2 - n);
1089
if (codes[ncodes - 1].tag == OP_EQUAL) {
1090
codes[ncodes - 1].i2 = MIN(codes[ncodes - 1].i2,
1091
codes[ncodes - 1].i1 + n);
1092
codes[ncodes - 1].j2 = MIN(codes[ncodes - 1].j2,
1093
codes[ncodes - 1].j1 + n);
1098
answer = PyList_New(0);
1102
group = PyList_New(0);
1108
for (i = 0; i < ncodes; i++) {
1114
/* end the current group and start a new one whenever
1115
there is a large range with no changes. */
1116
if (tag == OP_EQUAL && i2 - i1 > nn) {
1117
#if PY_VERSION_HEX < 0x02050000
1118
item = Py_BuildValue("siiii", opcode_names[tag],
1119
i1, MIN(i2, i1 + n), j1, MIN(j2, j1 + n));
1121
item = Py_BuildValue("snnnn", opcode_names[tag],
1122
i1, MIN(i2, i1 + n), j1, MIN(j2, j1 + n));
1126
if (PyList_Append(group, item) != 0)
1128
if (PyList_Append(answer, group) != 0)
1130
group = PyList_New(0);
1133
i1 = MAX(i1, i2 - n);
1134
j1 = MAX(j1, j2 - n);
1136
#if PY_VERSION_HEX < 0x02050000
1137
item = Py_BuildValue("siiii", opcode_names[tag], i1, i2, j1 ,j2);
1139
item = Py_BuildValue("snnnn", opcode_names[tag], i1, i2, j1 ,j2);
1143
if (PyList_Append(group, item) != 0)
1146
size = PyList_Size(group);
1147
if (size > 0 && !(size == 1 && tag == OP_EQUAL)) {
1148
if (PyList_Append(answer, group) != 0)
1155
free(matches.matches);
1160
free(matches.matches);
1167
static PyMethodDef PatienceSequenceMatcher_methods[] = {
1168
{"get_matching_blocks",
1169
(PyCFunction)PatienceSequenceMatcher_get_matching_blocks,
1171
PatienceSequenceMatcher_get_matching_blocks_doc},
1173
(PyCFunction)PatienceSequenceMatcher_get_opcodes,
1175
PatienceSequenceMatcher_get_opcodes_doc},
1176
{"get_grouped_opcodes",
1177
(PyCFunction)PatienceSequenceMatcher_get_grouped_opcodes,
1179
PatienceSequenceMatcher_get_grouped_opcodes_doc},
1184
static char PatienceSequenceMatcher_doc[] =
1185
"C implementation of PatienceSequenceMatcher";
1188
static PyTypeObject PatienceSequenceMatcherType = {
1189
PyObject_HEAD_INIT(NULL)
1191
"PatienceSequenceMatcher", /* tp_name */
1192
sizeof(PatienceSequenceMatcher), /* tp_basicsize */
1193
0, /* tp_itemsize */
1194
(destructor)PatienceSequenceMatcher_dealloc, /* tp_dealloc */
1200
0, /* tp_as_number */
1201
0, /* tp_as_sequence */
1202
0, /* tp_as_mapping */
1206
0, /* tp_getattro */
1207
0, /* tp_setattro */
1208
0, /* tp_as_buffer */
1209
Py_TPFLAGS_DEFAULT, /* tp_flags*/
1210
PatienceSequenceMatcher_doc, /* tp_doc */
1211
0, /* tp_traverse */
1213
0, /* tp_richcompare */
1214
0, /* tp_weaklistoffset */
1216
0, /* tp_iternext */
1217
PatienceSequenceMatcher_methods, /* tp_methods */
1222
0, /* tp_descr_get */
1223
0, /* tp_descr_set */
1224
0, /* tp_dictoffset */
1227
PatienceSequenceMatcher_new, /* tp_new */
1231
static PyMethodDef cpatiencediff_methods[] = {
1232
{"unique_lcs_c", py_unique_lcs, METH_VARARGS},
1233
{"recurse_matches_c", py_recurse_matches, METH_VARARGS},
1239
init_patiencediff_c(void)
1243
if (PyType_Ready(&PatienceSequenceMatcherType) < 0)
1246
m = Py_InitModule3("_patiencediff_c", cpatiencediff_methods,
1247
"C implementation of PatienceSequenceMatcher");
1251
Py_INCREF(&PatienceSequenceMatcherType);
1252
PyModule_AddObject(m, "PatienceSequenceMatcher_c",
1253
(PyObject *)&PatienceSequenceMatcherType);