1
/* Copyright (C) 2009 Canonical Ltd
3
* This program is free software; you can redistribute it and/or modify
4
* it under the terms of the GNU General Public License as published by
5
* the Free Software Foundation; either version 2 of the License, or
6
* (at your option) any later version.
8
* This program is distributed in the hope that it will be useful,
9
* but WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
* GNU General Public License for more details.
13
* You should have received a copy of the GNU General Public License
14
* along with this program; if not, write to the Free Software
15
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
/* Must be defined before importing _static_tuple_c.h so that we get the right
21
#define STATIC_TUPLE_MODULE
24
#include "python-compat.h"
26
#include "_static_tuple_c.h"
27
#include "_export_c_api.h"
29
/* Pyrex 0.9.6.4 exports _simple_set_pyx_api as
30
* import__simple_set_pyx(), while Pyrex 0.9.8.5 and Cython 0.11.3 export them
31
* as import_bzrlib___simple_set_pyx(). As such, we just #define one to be
32
* equivalent to the other in our internal code.
34
#define import__simple_set_pyx import_bzrlib___simple_set_pyx
35
#include "_simple_set_pyx_api.h"
38
# define inline __inline__
39
#elif defined(_MSC_VER)
40
# define inline __inline
46
/* The one and only StaticTuple with no values */
47
static StaticTuple *_empty_tuple = NULL;
48
static PyObject *_interned_tuples = NULL;
52
_StaticTuple_is_interned(StaticTuple *self)
54
return self->flags & STATIC_TUPLE_INTERNED_FLAG;
60
StaticTuple_as_tuple(StaticTuple *self)
62
PyObject *tpl = NULL, *obj = NULL;
66
tpl = PyTuple_New(len);
71
for (i = 0; i < len; ++i) {
72
obj = (PyObject *)self->items[i];
74
PyTuple_SET_ITEM(tpl, i, obj);
80
static char StaticTuple_as_tuple_doc[] = "as_tuple() => tuple";
83
StaticTuple_Intern(StaticTuple *self)
85
PyObject *canonical_tuple = NULL;
87
if (_interned_tuples == NULL || _StaticTuple_is_interned(self)) {
91
/* SimpleSet_Add returns whatever object is present at self
92
* or the new object if it needs to add it.
94
canonical_tuple = SimpleSet_Add(_interned_tuples, (PyObject *)self);
95
if (!canonical_tuple) {
96
// Some sort of exception, propogate it.
99
if (canonical_tuple != (PyObject *)self) {
100
// There was already a tuple with that value
101
return (StaticTuple *)canonical_tuple;
103
self->flags |= STATIC_TUPLE_INTERNED_FLAG;
104
// The two references in the dict do not count, so that the StaticTuple
105
// object does not become immortal just because it was interned.
106
Py_REFCNT(self) -= 1;
110
static char StaticTuple_Intern_doc[] = "intern() => unique StaticTuple\n"
111
"Return a 'canonical' StaticTuple object.\n"
112
"Similar to intern() for strings, this makes sure there\n"
113
"is only one StaticTuple object for a given value\n."
115
" key = StaticTuple('foo', 'bar').intern()\n";
119
StaticTuple_dealloc(StaticTuple *self)
123
if (_StaticTuple_is_interned(self)) {
124
/* revive dead object temporarily for Discard */
126
if (SimpleSet_Discard(_interned_tuples, (PyObject*)self) != 1)
127
Py_FatalError("deletion of interned StaticTuple failed");
128
self->flags &= ~STATIC_TUPLE_INTERNED_FLAG;
131
for (i = 0; i < len; ++i) {
132
Py_XDECREF(self->items[i]);
134
Py_TYPE(self)->tp_free((PyObject *)self);
138
/* Similar to PyTuple_New() */
140
StaticTuple_New(Py_ssize_t size)
144
PyErr_BadInternalCall();
148
if (size < 0 || size > 255) {
149
/* Too big or too small */
150
PyErr_SetString(PyExc_ValueError, "StaticTuple(...)"
151
" takes from 0 to 255 items");
154
if (size == 0 && _empty_tuple != NULL) {
155
Py_INCREF(_empty_tuple);
158
/* Note that we use PyObject_NewVar because we want to allocate a variable
159
* width entry. However we *aren't* truly a PyVarObject because we don't
160
* use a long for ob_size. Instead we use a plain 'size' that is an int,
161
* and will be overloaded with flags in the future.
162
* As such we do the alloc, and then have to clean up anything it does
165
stuple = PyObject_NewVar(StaticTuple, &StaticTuple_Type, size);
166
if (stuple == NULL) {
171
stuple->_unused0 = 0;
172
stuple->_unused1 = 0;
174
memset(stuple->items, 0, sizeof(PyObject *) * size);
176
#if STATIC_TUPLE_HAS_HASH
184
StaticTuple_FromSequence(PyObject *sequence)
186
StaticTuple *new = NULL;
187
PyObject *as_tuple = NULL;
191
if (StaticTuple_CheckExact(sequence)) {
193
return (StaticTuple *)sequence;
195
if (!PySequence_Check(sequence)) {
196
as_tuple = PySequence_Tuple(sequence);
197
if (as_tuple == NULL)
201
size = PySequence_Size(sequence);
205
new = StaticTuple_New(size);
209
for (i = 0; i < size; ++i) {
210
// This returns a new reference, which we then 'steal' with
211
// StaticTuple_SET_ITEM
212
item = PySequence_GetItem(sequence, i);
218
StaticTuple_SET_ITEM(new, i, item);
221
Py_XDECREF(as_tuple);
222
return (StaticTuple *)new;
226
StaticTuple_from_sequence(PyObject *self, PyObject *args, PyObject *kwargs)
229
if (!PyArg_ParseTuple(args, "O", &sequence))
231
return StaticTuple_FromSequence(sequence);
235
/* Check that all items we point to are 'valid' */
237
StaticTuple_check_items(StaticTuple *self)
242
for (i = 0; i < self->size; ++i) {
243
obj = self->items[i];
245
PyErr_SetString(PyExc_RuntimeError, "StaticTuple(...)"
246
" should not have a NULL entry.");
249
if (PyString_CheckExact(obj)
250
|| StaticTuple_CheckExact(obj)
253
|| PyInt_CheckExact(obj)
254
|| PyLong_CheckExact(obj)
255
|| PyFloat_CheckExact(obj)
256
|| PyUnicode_CheckExact(obj)
258
PyErr_Format(PyExc_TypeError, "StaticTuple(...)"
259
" requires that all items are one of"
260
" str, StaticTuple, None, bool, int, long, float, or unicode"
261
" not %s.", Py_TYPE(obj)->tp_name);
268
StaticTuple_new_constructor(PyTypeObject *type, PyObject *args, PyObject *kwds)
271
PyObject *obj = NULL;
272
Py_ssize_t i, len = 0;
274
if (type != &StaticTuple_Type) {
275
PyErr_SetString(PyExc_TypeError, "we only support creating StaticTuple");
278
if (!PyTuple_CheckExact(args)) {
279
PyErr_SetString(PyExc_TypeError, "args must be a tuple");
282
len = PyTuple_GET_SIZE(args);
283
self = (StaticTuple *)StaticTuple_New(len);
287
for (i = 0; i < len; ++i) {
288
obj = PyTuple_GET_ITEM(args, i);
290
self->items[i] = obj;
292
if (!StaticTuple_check_items(self)) {
293
type->tp_dealloc((PyObject *)self);
296
return (PyObject *)self;
300
StaticTuple_repr(StaticTuple *self)
302
PyObject *as_tuple, *tuple_repr, *result;
304
as_tuple = StaticTuple_as_tuple(self);
305
if (as_tuple == NULL) {
308
tuple_repr = PyObject_Repr(as_tuple);
310
if (tuple_repr == NULL) {
313
result = PyString_FromFormat("StaticTuple%s",
314
PyString_AsString(tuple_repr));
319
StaticTuple_hash(StaticTuple *self)
321
/* adapted from tuplehash(), is the specific hash value considered
325
Py_ssize_t len = self->size;
327
long mult = 1000003L;
329
#if STATIC_TUPLE_HAS_HASH
330
if (self->hash != -1) {
336
// TODO: We could set specific flags if we know that, for example, all the
337
// items are strings. I haven't seen a real-world benefit to that
340
y = PyObject_Hash(*p++);
341
if (y == -1) /* failure */
344
/* the cast might truncate len; that doesn't change hash stability */
345
mult += (long)(82520L + len + len);
350
#if STATIC_TUPLE_HAS_HASH
357
StaticTuple_richcompare_to_tuple(StaticTuple *v, PyObject *wt, int op)
360
PyObject *result = NULL;
362
vt = StaticTuple_as_tuple((StaticTuple *)v);
366
if (!PyTuple_Check(wt)) {
367
PyErr_BadInternalCall();
370
/* Now we have 2 tuples to compare, do it */
371
result = PyTuple_Type.tp_richcompare(vt, wt, op);
377
/** Compare two objects to determine if they are equivalent.
378
* The basic flow is as follows
379
* 1) First make sure that both objects are StaticTuple instances. If they
380
* aren't then cast self to a tuple, and have the tuple do the comparison.
381
* 2) Special case comparison to Py_None, because it happens to occur fairly
382
* often in the test suite.
383
* 3) Special case when v and w are the same pointer. As we know the answer to
384
* all queries without walking individual items.
385
* 4) For all operations, we then walk the items to find the first paired
386
* items that are not equal.
387
* 5) If all items found are equal, we then check the length of self and
388
* other to determine equality.
389
* 6) If an item differs, then we apply "op" to those last two items. (eg.
390
* StaticTuple(A, B) > StaticTuple(A, C) iff B > C)
394
StaticTuple_richcompare(PyObject *v, PyObject *w, int op)
396
StaticTuple *v_st, *w_st;
397
Py_ssize_t vlen, wlen, min_len, i;
398
PyObject *v_obj, *w_obj;
399
richcmpfunc string_richcompare;
401
if (!StaticTuple_CheckExact(v)) {
402
/* This has never triggered, according to python-dev it seems this
403
* might trigger if '__op__' is defined but '__rop__' is not, sort of
404
* case. Such as "None == StaticTuple()"
406
fprintf(stderr, "self is not StaticTuple\n");
407
Py_INCREF(Py_NotImplemented);
408
return Py_NotImplemented;
410
v_st = (StaticTuple *)v;
411
if (StaticTuple_CheckExact(w)) {
412
/* The most common case */
413
w_st = (StaticTuple*)w;
414
} else if (PyTuple_Check(w)) {
415
/* One of v or w is a tuple, so we go the 'slow' route and cast up to
418
/* TODO: This seems to be triggering more than I thought it would...
419
* We probably want to optimize comparing self to other when
422
return StaticTuple_richcompare_to_tuple(v_st, w, op);
423
} else if (w == Py_None) {
424
// None is always less than the object
426
case Py_NE:case Py_GT:case Py_GE:
429
case Py_EQ:case Py_LT:case Py_LE:
432
default: // Should never happen
433
return Py_NotImplemented;
436
/* We don't special case this comparison, we just let python handle
439
Py_INCREF(Py_NotImplemented);
440
return Py_NotImplemented;
442
/* Now we know that we have 2 StaticTuple objects, so let's compare them.
443
* This code is inspired from tuplerichcompare, except we know our
444
* objects are limited in scope, so we can inline some comparisons.
447
/* Identical pointers, we can shortcut this easily. */
449
case Py_EQ:case Py_LE:case Py_GE:
452
case Py_NE:case Py_LT:case Py_GT:
458
&& _StaticTuple_is_interned(v_st)
459
&& _StaticTuple_is_interned(w_st))
461
/* If both objects are interned, we know they are different if the
462
* pointer is not the same, which would have been handled by the
463
* previous if. No need to compare the entries.
469
/* The only time we are likely to compare items of different lengths is in
470
* something like the interned_keys set. However, the hash is good enough
471
* that it is rare. Note that 'tuple_richcompare' also does not compare
476
min_len = (vlen < wlen) ? vlen : wlen;
477
string_richcompare = PyString_Type.tp_richcompare;
478
for (i = 0; i < min_len; i++) {
479
PyObject *result = NULL;
480
v_obj = StaticTuple_GET_ITEM(v_st, i);
481
w_obj = StaticTuple_GET_ITEM(w_st, i);
482
if (v_obj == w_obj) {
483
/* Shortcut case, these must be identical */
486
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj)) {
487
result = string_richcompare(v_obj, w_obj, Py_EQ);
488
} else if (StaticTuple_CheckExact(v_obj) &&
489
StaticTuple_CheckExact(w_obj))
491
/* Both are StaticTuple types, so recurse */
492
result = StaticTuple_richcompare(v_obj, w_obj, Py_EQ);
494
/* Fall back to generic richcompare */
495
result = PyObject_RichCompare(v_obj, w_obj, Py_EQ);
497
if (result == NULL) {
498
return NULL; /* There seems to be an error */
500
if (result == Py_False) {
501
// This entry is not identical, Shortcut for Py_EQ
508
if (result != Py_True) {
509
/* We don't know *what* richcompare is returning, but it
510
* isn't something we recognize
512
PyErr_BadInternalCall();
519
/* We walked off one of the lists, but everything compared equal so
520
* far. Just compare the size.
525
case Py_LT: cmp = vlen < wlen; break;
526
case Py_LE: cmp = vlen <= wlen; break;
527
case Py_EQ: cmp = vlen == wlen; break;
528
case Py_NE: cmp = vlen != wlen; break;
529
case Py_GT: cmp = vlen > wlen; break;
530
case Py_GE: cmp = vlen >= wlen; break;
531
default: return NULL; /* cannot happen */
540
/* The last item differs, shortcut the Py_NE case */
545
/* It is some other comparison, go ahead and do the real check. */
546
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj))
548
return string_richcompare(v_obj, w_obj, op);
549
} else if (StaticTuple_CheckExact(v_obj) &&
550
StaticTuple_CheckExact(w_obj))
552
/* Both are StaticTuple types, so recurse */
553
return StaticTuple_richcompare(v_obj, w_obj, op);
555
return PyObject_RichCompare(v_obj, w_obj, op);
561
StaticTuple_length(StaticTuple *self)
568
StaticTuple__is_interned(StaticTuple *self)
570
if (_StaticTuple_is_interned(self)) {
578
static char StaticTuple__is_interned_doc[] = "_is_interned() => True/False\n"
579
"Check to see if this tuple has been interned.\n";
583
StaticTuple_reduce(StaticTuple *self)
585
PyObject *result = NULL, *as_tuple = NULL;
587
result = PyTuple_New(2);
591
as_tuple = StaticTuple_as_tuple(self);
592
if (as_tuple == NULL) {
596
Py_INCREF(&StaticTuple_Type);
597
PyTuple_SET_ITEM(result, 0, (PyObject *)&StaticTuple_Type);
598
PyTuple_SET_ITEM(result, 1, as_tuple);
602
static char StaticTuple_reduce_doc[] = "__reduce__() => tuple\n";
606
StaticTuple_add(PyObject *v, PyObject *w)
608
Py_ssize_t i, len_v, len_w;
611
/* StaticTuples and plain tuples may be added (concatenated) to
614
if (StaticTuple_CheckExact(v)) {
615
len_v = ((StaticTuple*)v)->size;
616
} else if (PyTuple_Check(v)) {
617
len_v = PyTuple_GET_SIZE(v);
619
Py_INCREF(Py_NotImplemented);
620
return Py_NotImplemented;
622
if (StaticTuple_CheckExact(w)) {
623
len_w = ((StaticTuple*)w)->size;
624
} else if (PyTuple_Check(w)) {
625
len_w = PyTuple_GET_SIZE(w);
627
Py_INCREF(Py_NotImplemented);
628
return Py_NotImplemented;
630
result = StaticTuple_New(len_v + len_w);
633
for (i = 0; i < len_v; ++i) {
634
// This returns a new reference, which we then 'steal' with
635
// StaticTuple_SET_ITEM
636
item = PySequence_GetItem(v, i);
641
StaticTuple_SET_ITEM(result, i, item);
643
for (i = 0; i < len_w; ++i) {
644
item = PySequence_GetItem(w, i);
649
StaticTuple_SET_ITEM(result, i+len_v, item);
651
if (!StaticTuple_check_items(result)) {
655
return (PyObject *)result;
659
StaticTuple_item(StaticTuple *self, Py_ssize_t offset)
662
/* We cast to (int) to avoid worrying about whether Py_ssize_t is a
663
* long long, etc. offsets should never be >2**31 anyway.
666
PyErr_Format(PyExc_IndexError, "StaticTuple_item does not support"
667
" negative indices: %d\n", (int)offset);
668
} else if (offset >= self->size) {
669
PyErr_Format(PyExc_IndexError, "StaticTuple index out of range"
670
" %d >= %d", (int)offset, (int)self->size);
673
obj = (PyObject *)self->items[offset];
679
StaticTuple_slice(StaticTuple *self, Py_ssize_t ilow, Py_ssize_t ihigh)
681
PyObject *as_tuple, *result;
683
as_tuple = StaticTuple_as_tuple(self);
684
if (as_tuple == NULL) {
687
result = PyTuple_Type.tp_as_sequence->sq_slice(as_tuple, ilow, ihigh);
693
StaticTuple_traverse(StaticTuple *self, visitproc visit, void *arg)
696
for (i = self->size; --i >= 0;) {
697
Py_VISIT(self->items[i]);
702
static char StaticTuple_doc[] =
703
"C implementation of a StaticTuple structure."
704
"\n This is used as StaticTuple(item1, item2, item3)"
705
"\n This is similar to tuple, less flexible in what it"
706
"\n supports, but also lighter memory consumption."
707
"\n Note that the constructor mimics the () form of tuples"
708
"\n Rather than the 'tuple()' constructor."
709
"\n eg. StaticTuple(a, b) == (a, b) == tuple((a, b))";
711
static PyMethodDef StaticTuple_methods[] = {
712
{"as_tuple", (PyCFunction)StaticTuple_as_tuple, METH_NOARGS, StaticTuple_as_tuple_doc},
713
{"intern", (PyCFunction)StaticTuple_Intern, METH_NOARGS, StaticTuple_Intern_doc},
714
{"_is_interned", (PyCFunction)StaticTuple__is_interned, METH_NOARGS,
715
StaticTuple__is_interned_doc},
716
{"from_sequence", (PyCFunction)StaticTuple_from_sequence,
717
METH_STATIC | METH_VARARGS,
718
"Create a StaticTuple from a given sequence. This functions"
719
" the same as the tuple() constructor."},
720
{"__reduce__", (PyCFunction)StaticTuple_reduce, METH_NOARGS, StaticTuple_reduce_doc},
721
{NULL, NULL} /* sentinel */
725
static PyNumberMethods StaticTuple_as_number = {
726
(binaryfunc) StaticTuple_add, /* nb_add */
730
0, /* nb_remainder */
747
static PySequenceMethods StaticTuple_as_sequence = {
748
(lenfunc)StaticTuple_length, /* sq_length */
751
(ssizeargfunc)StaticTuple_item, /* sq_item */
752
(ssizessizeargfunc)StaticTuple_slice, /* sq_slice */
754
0, /* sq_ass_slice */
758
/* TODO: Implement StaticTuple_as_mapping.
759
* The only thing we really want to support from there is mp_subscript,
760
* so that we could support extended slicing (foo[::2]). Not worth it
765
PyTypeObject StaticTuple_Type = {
766
PyObject_HEAD_INIT(NULL)
768
"bzrlib._static_tuple_c.StaticTuple", /* tp_name */
769
sizeof(StaticTuple), /* tp_basicsize */
770
sizeof(PyObject *), /* tp_itemsize */
771
(destructor)StaticTuple_dealloc, /* tp_dealloc */
776
(reprfunc)StaticTuple_repr, /* tp_repr */
777
&StaticTuple_as_number, /* tp_as_number */
778
&StaticTuple_as_sequence, /* tp_as_sequence */
779
0, /* tp_as_mapping */
780
(hashfunc)StaticTuple_hash, /* tp_hash */
785
0, /* tp_as_buffer */
786
/* Py_TPFLAGS_CHECKTYPES tells the number operations that they shouldn't
787
* try to 'coerce' but instead stuff like 'add' will check it arguments.
789
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_CHECKTYPES, /* tp_flags*/
790
StaticTuple_doc, /* tp_doc */
791
/* gc.get_referents checks the IS_GC flag before it calls tp_traverse
792
* And we don't include this object in the garbage collector because we
793
* know it doesn't create cycles. However, 'meliae' will follow
794
* tp_traverse, even if the object isn't GC, and we want that.
796
(traverseproc)StaticTuple_traverse, /* tp_traverse */
798
StaticTuple_richcompare, /* tp_richcompare */
799
0, /* tp_weaklistoffset */
800
// without implementing tp_iter, Python will fall back to PySequence*
801
// which seems to work ok, we may need something faster/lighter in the
805
StaticTuple_methods, /* tp_methods */
810
0, /* tp_descr_get */
811
0, /* tp_descr_set */
812
0, /* tp_dictoffset */
815
StaticTuple_new_constructor, /* tp_new */
819
static PyMethodDef static_tuple_c_methods[] = {
825
setup_interned_tuples(PyObject *m)
827
_interned_tuples = (PyObject *)SimpleSet_New();
828
if (_interned_tuples != NULL) {
829
Py_INCREF(_interned_tuples);
830
PyModule_AddObject(m, "_interned_tuples", _interned_tuples);
836
setup_empty_tuple(PyObject *m)
839
if (_interned_tuples == NULL) {
840
fprintf(stderr, "You need to call setup_interned_tuples() before"
841
" setup_empty_tuple, because we intern it.\n");
843
// We need to create the empty tuple
844
stuple = (StaticTuple *)StaticTuple_New(0);
845
_empty_tuple = StaticTuple_Intern(stuple);
846
assert(_empty_tuple == stuple);
847
// At this point, refcnt is 2: 1 from New(), and 1 from the return from
848
// intern(). We will keep 1 for the _empty_tuple global, and use the other
849
// for the module reference.
850
PyModule_AddObject(m, "_empty_tuple", (PyObject *)_empty_tuple);
854
_StaticTuple_CheckExact(PyObject *obj)
856
return StaticTuple_CheckExact(obj);
860
setup_c_api(PyObject *m)
862
_export_function(m, "StaticTuple_New", StaticTuple_New,
863
"StaticTuple *(Py_ssize_t)");
864
_export_function(m, "StaticTuple_Intern", StaticTuple_Intern,
865
"StaticTuple *(StaticTuple *)");
866
_export_function(m, "StaticTuple_FromSequence", StaticTuple_FromSequence,
867
"StaticTuple *(PyObject *)");
868
_export_function(m, "_StaticTuple_CheckExact", _StaticTuple_CheckExact,
874
_workaround_pyrex_096(void)
876
/* Work around an incompatibility in how pyrex 0.9.6 exports a module,
877
* versus how pyrex 0.9.8 and cython 0.11 export it.
878
* Namely 0.9.6 exports import__simple_set_pyx and tries to
879
* "import _simple_set_pyx" but it is available only as
880
* "import bzrlib._simple_set_pyx"
881
* It is a shame to hack up sys.modules, but that is what we've got to do.
883
PyObject *sys_module = NULL, *modules = NULL, *set_module = NULL;
886
/* Clear out the current ImportError exception, and try again. */
888
/* Note that this only seems to work if somewhere else imports
889
* bzrlib._simple_set_pyx before importing bzrlib._static_tuple_c
891
set_module = PyImport_ImportModule("bzrlib._simple_set_pyx");
892
if (set_module == NULL) {
895
/* Add the _simple_set_pyx into sys.modules at the appropriate location. */
896
sys_module = PyImport_ImportModule("sys");
897
if (sys_module == NULL) {
900
modules = PyObject_GetAttrString(sys_module, "modules");
901
if (modules == NULL || !PyDict_Check(modules)) {
904
PyDict_SetItemString(modules, "_simple_set_pyx", set_module);
905
/* Now that we have hacked it in, try the import again. */
906
retval = import_bzrlib___simple_set_pyx();
908
Py_XDECREF(set_module);
909
Py_XDECREF(sys_module);
916
init_static_tuple_c(void)
920
StaticTuple_Type.tp_getattro = PyObject_GenericGetAttr;
921
if (PyType_Ready(&StaticTuple_Type) < 0)
924
m = Py_InitModule3("_static_tuple_c", static_tuple_c_methods,
925
"C implementation of a StaticTuple structure");
929
Py_INCREF(&StaticTuple_Type);
930
PyModule_AddObject(m, "StaticTuple", (PyObject *)&StaticTuple_Type);
931
if (import_bzrlib___simple_set_pyx() == -1
932
&& _workaround_pyrex_096() == -1)
936
setup_interned_tuples(m);
937
setup_empty_tuple(m);
941
// vim: tabstop=4 sw=4 expandtab