1
/* Copyright (C) 2009, 2010 Canonical Ltd
3
* This program is free software; you can redistribute it and/or modify
4
* it under the terms of the GNU General Public License as published by
5
* the Free Software Foundation; either version 2 of the License, or
6
* (at your option) any later version.
8
* This program is distributed in the hope that it will be useful,
9
* but WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
* GNU General Public License for more details.
13
* You should have received a copy of the GNU General Public License
14
* along with this program; if not, write to the Free Software
15
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
/* Must be defined before importing _static_tuple_c.h so that we get the right
21
#define STATIC_TUPLE_MODULE
24
#include "python-compat.h"
26
#include "_static_tuple_c.h"
27
#include "_export_c_api.h"
29
/* Pyrex 0.9.6.4 exports _simple_set_pyx_api as
30
* import__simple_set_pyx(), while Pyrex 0.9.8.5 and Cython 0.11.3 export them
31
* as import_bzrlib___simple_set_pyx(). As such, we just #define one to be
32
* equivalent to the other in our internal code.
34
#define import__simple_set_pyx import_bzrlib___simple_set_pyx
35
#include "_simple_set_pyx_api.h"
38
# define inline __inline__
39
#elif defined(_MSC_VER)
40
# define inline __inline
46
/* The one and only StaticTuple with no values */
47
static StaticTuple *_empty_tuple = NULL;
48
static PyObject *_interned_tuples = NULL;
52
_StaticTuple_is_interned(StaticTuple *self)
54
return self->flags & STATIC_TUPLE_INTERNED_FLAG;
60
StaticTuple_as_tuple(StaticTuple *self)
62
PyObject *tpl = NULL, *obj = NULL;
66
tpl = PyTuple_New(len);
71
for (i = 0; i < len; ++i) {
72
obj = (PyObject *)self->items[i];
74
PyTuple_SET_ITEM(tpl, i, obj);
80
static char StaticTuple_as_tuple_doc[] = "as_tuple() => tuple";
83
StaticTuple_Intern(StaticTuple *self)
85
PyObject *canonical_tuple = NULL;
87
if (_interned_tuples == NULL || _StaticTuple_is_interned(self)) {
91
/* SimpleSet_Add returns whatever object is present at self
92
* or the new object if it needs to add it.
94
canonical_tuple = SimpleSet_Add(_interned_tuples, (PyObject *)self);
95
if (!canonical_tuple) {
96
// Some sort of exception, propogate it.
99
if (canonical_tuple != (PyObject *)self) {
100
// There was already a tuple with that value
101
return (StaticTuple *)canonical_tuple;
103
self->flags |= STATIC_TUPLE_INTERNED_FLAG;
104
// The two references in the dict do not count, so that the StaticTuple
105
// object does not become immortal just because it was interned.
106
Py_REFCNT(self) -= 1;
110
static char StaticTuple_Intern_doc[] = "intern() => unique StaticTuple\n"
111
"Return a 'canonical' StaticTuple object.\n"
112
"Similar to intern() for strings, this makes sure there\n"
113
"is only one StaticTuple object for a given value\n."
115
" key = StaticTuple('foo', 'bar').intern()\n";
119
StaticTuple_dealloc(StaticTuple *self)
123
if (_StaticTuple_is_interned(self)) {
124
/* revive dead object temporarily for Discard */
126
if (SimpleSet_Discard(_interned_tuples, (PyObject*)self) != 1)
127
Py_FatalError("deletion of interned StaticTuple failed");
128
self->flags &= ~STATIC_TUPLE_INTERNED_FLAG;
131
for (i = 0; i < len; ++i) {
132
Py_XDECREF(self->items[i]);
134
Py_TYPE(self)->tp_free((PyObject *)self);
138
/* Similar to PyTuple_New() */
140
StaticTuple_New(Py_ssize_t size)
144
if (size < 0 || size > 255) {
145
/* Too big or too small */
146
PyErr_SetString(PyExc_ValueError, "StaticTuple(...)"
147
" takes from 0 to 255 items");
150
if (size == 0 && _empty_tuple != NULL) {
151
Py_INCREF(_empty_tuple);
154
/* Note that we use PyObject_NewVar because we want to allocate a variable
155
* width entry. However we *aren't* truly a PyVarObject because we don't
156
* use a long for ob_size. Instead we use a plain 'size' that is an int,
157
* and will be overloaded with flags in the future.
158
* As such we do the alloc, and then have to clean up anything it does
161
stuple = PyObject_NewVar(StaticTuple, &StaticTuple_Type, size);
162
if (stuple == NULL) {
167
stuple->_unused0 = 0;
168
stuple->_unused1 = 0;
170
memset(stuple->items, 0, sizeof(PyObject *) * size);
172
#if STATIC_TUPLE_HAS_HASH
180
StaticTuple_FromSequence(PyObject *sequence)
182
StaticTuple *new = NULL;
183
PyObject *as_tuple = NULL;
187
if (StaticTuple_CheckExact(sequence)) {
189
return (StaticTuple *)sequence;
191
if (!PySequence_Check(sequence)) {
192
as_tuple = PySequence_Tuple(sequence);
193
if (as_tuple == NULL)
197
size = PySequence_Size(sequence);
201
new = StaticTuple_New(size);
205
for (i = 0; i < size; ++i) {
206
// This returns a new reference, which we then 'steal' with
207
// StaticTuple_SET_ITEM
208
item = PySequence_GetItem(sequence, i);
214
StaticTuple_SET_ITEM(new, i, item);
217
Py_XDECREF(as_tuple);
218
return (StaticTuple *)new;
222
StaticTuple_from_sequence(PyObject *self, PyObject *args, PyObject *kwargs)
225
if (!PyArg_ParseTuple(args, "O", &sequence))
227
return StaticTuple_FromSequence(sequence);
231
/* Check that all items we point to are 'valid' */
233
StaticTuple_check_items(StaticTuple *self)
238
for (i = 0; i < self->size; ++i) {
239
obj = self->items[i];
241
PyErr_SetString(PyExc_RuntimeError, "StaticTuple(...)"
242
" should not have a NULL entry.");
245
if (PyString_CheckExact(obj)
246
|| StaticTuple_CheckExact(obj)
249
|| PyInt_CheckExact(obj)
250
|| PyLong_CheckExact(obj)
251
|| PyFloat_CheckExact(obj)
252
|| PyUnicode_CheckExact(obj)
254
PyErr_Format(PyExc_TypeError, "StaticTuple(...)"
255
" requires that all items are one of"
256
" str, StaticTuple, None, bool, int, long, float, or unicode"
257
" not %s.", Py_TYPE(obj)->tp_name);
264
StaticTuple_new_constructor(PyTypeObject *type, PyObject *args, PyObject *kwds)
267
PyObject *obj = NULL;
268
Py_ssize_t i, len = 0;
270
if (type != &StaticTuple_Type) {
271
PyErr_SetString(PyExc_TypeError, "we only support creating StaticTuple");
274
if (!PyTuple_CheckExact(args)) {
275
PyErr_SetString(PyExc_TypeError, "args must be a tuple");
278
len = PyTuple_GET_SIZE(args);
279
if (len < 0 || len > 255) {
280
/* Check the length here so we can raise a TypeError instead of
281
* StaticTuple_New's ValueError.
283
PyErr_SetString(PyExc_TypeError, "StaticTuple(...)"
284
" takes from 0 to 255 items");
287
self = (StaticTuple *)StaticTuple_New(len);
291
for (i = 0; i < len; ++i) {
292
obj = PyTuple_GET_ITEM(args, i);
294
self->items[i] = obj;
296
if (!StaticTuple_check_items(self)) {
297
type->tp_dealloc((PyObject *)self);
300
return (PyObject *)self;
304
StaticTuple_repr(StaticTuple *self)
306
PyObject *as_tuple, *tuple_repr, *result;
308
as_tuple = StaticTuple_as_tuple(self);
309
if (as_tuple == NULL) {
312
tuple_repr = PyObject_Repr(as_tuple);
314
if (tuple_repr == NULL) {
317
result = PyString_FromFormat("StaticTuple%s",
318
PyString_AsString(tuple_repr));
323
StaticTuple_hash(StaticTuple *self)
325
/* adapted from tuplehash(), is the specific hash value considered
329
Py_ssize_t len = self->size;
331
long mult = 1000003L;
333
#if STATIC_TUPLE_HAS_HASH
334
if (self->hash != -1) {
340
// TODO: We could set specific flags if we know that, for example, all the
341
// items are strings. I haven't seen a real-world benefit to that
344
y = PyObject_Hash(*p++);
345
if (y == -1) /* failure */
348
/* the cast might truncate len; that doesn't change hash stability */
349
mult += (long)(82520L + len + len);
354
#if STATIC_TUPLE_HAS_HASH
361
StaticTuple_richcompare_to_tuple(StaticTuple *v, PyObject *wt, int op)
364
PyObject *result = NULL;
366
vt = StaticTuple_as_tuple((StaticTuple *)v);
370
if (!PyTuple_Check(wt)) {
371
PyErr_BadInternalCall();
374
/* Now we have 2 tuples to compare, do it */
375
result = PyTuple_Type.tp_richcompare(vt, wt, op);
381
/** Compare two objects to determine if they are equivalent.
382
* The basic flow is as follows
383
* 1) First make sure that both objects are StaticTuple instances. If they
384
* aren't then cast self to a tuple, and have the tuple do the comparison.
385
* 2) Special case comparison to Py_None, because it happens to occur fairly
386
* often in the test suite.
387
* 3) Special case when v and w are the same pointer. As we know the answer to
388
* all queries without walking individual items.
389
* 4) For all operations, we then walk the items to find the first paired
390
* items that are not equal.
391
* 5) If all items found are equal, we then check the length of self and
392
* other to determine equality.
393
* 6) If an item differs, then we apply "op" to those last two items. (eg.
394
* StaticTuple(A, B) > StaticTuple(A, C) iff B > C)
398
StaticTuple_richcompare(PyObject *v, PyObject *w, int op)
400
StaticTuple *v_st, *w_st;
401
Py_ssize_t vlen, wlen, min_len, i;
402
PyObject *v_obj, *w_obj;
403
richcmpfunc string_richcompare;
405
if (!StaticTuple_CheckExact(v)) {
406
/* This has never triggered, according to python-dev it seems this
407
* might trigger if '__op__' is defined but '__rop__' is not, sort of
408
* case. Such as "None == StaticTuple()"
410
fprintf(stderr, "self is not StaticTuple\n");
411
Py_INCREF(Py_NotImplemented);
412
return Py_NotImplemented;
414
v_st = (StaticTuple *)v;
415
if (StaticTuple_CheckExact(w)) {
416
/* The most common case */
417
w_st = (StaticTuple*)w;
418
} else if (PyTuple_Check(w)) {
419
/* One of v or w is a tuple, so we go the 'slow' route and cast up to
422
/* TODO: This seems to be triggering more than I thought it would...
423
* We probably want to optimize comparing self to other when
426
return StaticTuple_richcompare_to_tuple(v_st, w, op);
427
} else if (w == Py_None) {
428
// None is always less than the object
430
case Py_NE:case Py_GT:case Py_GE:
433
case Py_EQ:case Py_LT:case Py_LE:
436
default: // Should never happen
437
return Py_NotImplemented;
440
/* We don't special case this comparison, we just let python handle
443
Py_INCREF(Py_NotImplemented);
444
return Py_NotImplemented;
446
/* Now we know that we have 2 StaticTuple objects, so let's compare them.
447
* This code is inspired from tuplerichcompare, except we know our
448
* objects are limited in scope, so we can inline some comparisons.
451
/* Identical pointers, we can shortcut this easily. */
453
case Py_EQ:case Py_LE:case Py_GE:
456
case Py_NE:case Py_LT:case Py_GT:
462
&& _StaticTuple_is_interned(v_st)
463
&& _StaticTuple_is_interned(w_st))
465
/* If both objects are interned, we know they are different if the
466
* pointer is not the same, which would have been handled by the
467
* previous if. No need to compare the entries.
473
/* The only time we are likely to compare items of different lengths is in
474
* something like the interned_keys set. However, the hash is good enough
475
* that it is rare. Note that 'tuple_richcompare' also does not compare
480
min_len = (vlen < wlen) ? vlen : wlen;
481
string_richcompare = PyString_Type.tp_richcompare;
482
for (i = 0; i < min_len; i++) {
483
PyObject *result = NULL;
484
v_obj = StaticTuple_GET_ITEM(v_st, i);
485
w_obj = StaticTuple_GET_ITEM(w_st, i);
486
if (v_obj == w_obj) {
487
/* Shortcut case, these must be identical */
490
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj)) {
491
result = string_richcompare(v_obj, w_obj, Py_EQ);
492
} else if (StaticTuple_CheckExact(v_obj) &&
493
StaticTuple_CheckExact(w_obj))
495
/* Both are StaticTuple types, so recurse */
496
result = StaticTuple_richcompare(v_obj, w_obj, Py_EQ);
498
/* Fall back to generic richcompare */
499
result = PyObject_RichCompare(v_obj, w_obj, Py_EQ);
501
if (result == NULL) {
502
return NULL; /* There seems to be an error */
504
if (result == Py_False) {
505
// This entry is not identical, Shortcut for Py_EQ
512
if (result != Py_True) {
513
/* We don't know *what* richcompare is returning, but it
514
* isn't something we recognize
516
PyErr_BadInternalCall();
523
/* We walked off one of the lists, but everything compared equal so
524
* far. Just compare the size.
529
case Py_LT: cmp = vlen < wlen; break;
530
case Py_LE: cmp = vlen <= wlen; break;
531
case Py_EQ: cmp = vlen == wlen; break;
532
case Py_NE: cmp = vlen != wlen; break;
533
case Py_GT: cmp = vlen > wlen; break;
534
case Py_GE: cmp = vlen >= wlen; break;
535
default: return NULL; /* cannot happen */
544
/* The last item differs, shortcut the Py_NE case */
549
/* It is some other comparison, go ahead and do the real check. */
550
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj))
552
return string_richcompare(v_obj, w_obj, op);
553
} else if (StaticTuple_CheckExact(v_obj) &&
554
StaticTuple_CheckExact(w_obj))
556
/* Both are StaticTuple types, so recurse */
557
return StaticTuple_richcompare(v_obj, w_obj, op);
559
return PyObject_RichCompare(v_obj, w_obj, op);
565
StaticTuple_length(StaticTuple *self)
572
StaticTuple__is_interned(StaticTuple *self)
574
if (_StaticTuple_is_interned(self)) {
582
static char StaticTuple__is_interned_doc[] = "_is_interned() => True/False\n"
583
"Check to see if this tuple has been interned.\n";
587
StaticTuple_reduce(StaticTuple *self)
589
PyObject *result = NULL, *as_tuple = NULL;
591
result = PyTuple_New(2);
595
as_tuple = StaticTuple_as_tuple(self);
596
if (as_tuple == NULL) {
600
Py_INCREF(&StaticTuple_Type);
601
PyTuple_SET_ITEM(result, 0, (PyObject *)&StaticTuple_Type);
602
PyTuple_SET_ITEM(result, 1, as_tuple);
606
static char StaticTuple_reduce_doc[] = "__reduce__() => tuple\n";
610
StaticTuple_add(PyObject *v, PyObject *w)
612
Py_ssize_t i, len_v, len_w;
615
/* StaticTuples and plain tuples may be added (concatenated) to
618
if (StaticTuple_CheckExact(v)) {
619
len_v = ((StaticTuple*)v)->size;
620
} else if (PyTuple_Check(v)) {
621
len_v = PyTuple_GET_SIZE(v);
623
Py_INCREF(Py_NotImplemented);
624
return Py_NotImplemented;
626
if (StaticTuple_CheckExact(w)) {
627
len_w = ((StaticTuple*)w)->size;
628
} else if (PyTuple_Check(w)) {
629
len_w = PyTuple_GET_SIZE(w);
631
Py_INCREF(Py_NotImplemented);
632
return Py_NotImplemented;
634
result = StaticTuple_New(len_v + len_w);
637
for (i = 0; i < len_v; ++i) {
638
// This returns a new reference, which we then 'steal' with
639
// StaticTuple_SET_ITEM
640
item = PySequence_GetItem(v, i);
645
StaticTuple_SET_ITEM(result, i, item);
647
for (i = 0; i < len_w; ++i) {
648
item = PySequence_GetItem(w, i);
653
StaticTuple_SET_ITEM(result, i+len_v, item);
655
if (!StaticTuple_check_items(result)) {
659
return (PyObject *)result;
663
StaticTuple_item(StaticTuple *self, Py_ssize_t offset)
666
/* We cast to (int) to avoid worrying about whether Py_ssize_t is a
667
* long long, etc. offsets should never be >2**31 anyway.
670
PyErr_Format(PyExc_IndexError, "StaticTuple_item does not support"
671
" negative indices: %d\n", (int)offset);
672
} else if (offset >= self->size) {
673
PyErr_Format(PyExc_IndexError, "StaticTuple index out of range"
674
" %d >= %d", (int)offset, (int)self->size);
677
obj = (PyObject *)self->items[offset];
683
StaticTuple_slice(StaticTuple *self, Py_ssize_t ilow, Py_ssize_t ihigh)
685
PyObject *as_tuple, *result;
687
as_tuple = StaticTuple_as_tuple(self);
688
if (as_tuple == NULL) {
691
result = PyTuple_Type.tp_as_sequence->sq_slice(as_tuple, ilow, ihigh);
697
StaticTuple_traverse(StaticTuple *self, visitproc visit, void *arg)
700
for (i = self->size; --i >= 0;) {
701
Py_VISIT(self->items[i]);
708
StaticTuple_sizeof(StaticTuple *self)
712
res = _PyObject_SIZE(&StaticTuple_Type) + (int)self->size * sizeof(void*);
713
return PyInt_FromSsize_t(res);
718
static char StaticTuple_doc[] =
719
"C implementation of a StaticTuple structure."
720
"\n This is used as StaticTuple(item1, item2, item3)"
721
"\n This is similar to tuple, less flexible in what it"
722
"\n supports, but also lighter memory consumption."
723
"\n Note that the constructor mimics the () form of tuples"
724
"\n Rather than the 'tuple()' constructor."
725
"\n eg. StaticTuple(a, b) == (a, b) == tuple((a, b))";
727
static PyMethodDef StaticTuple_methods[] = {
728
{"as_tuple", (PyCFunction)StaticTuple_as_tuple, METH_NOARGS, StaticTuple_as_tuple_doc},
729
{"intern", (PyCFunction)StaticTuple_Intern, METH_NOARGS, StaticTuple_Intern_doc},
730
{"_is_interned", (PyCFunction)StaticTuple__is_interned, METH_NOARGS,
731
StaticTuple__is_interned_doc},
732
{"from_sequence", (PyCFunction)StaticTuple_from_sequence,
733
METH_STATIC | METH_VARARGS,
734
"Create a StaticTuple from a given sequence. This functions"
735
" the same as the tuple() constructor."},
736
{"__reduce__", (PyCFunction)StaticTuple_reduce, METH_NOARGS, StaticTuple_reduce_doc},
737
{"__sizeof__", (PyCFunction)StaticTuple_sizeof, METH_NOARGS},
738
{NULL, NULL} /* sentinel */
742
static PyNumberMethods StaticTuple_as_number = {
743
(binaryfunc) StaticTuple_add, /* nb_add */
747
0, /* nb_remainder */
764
static PySequenceMethods StaticTuple_as_sequence = {
765
(lenfunc)StaticTuple_length, /* sq_length */
768
(ssizeargfunc)StaticTuple_item, /* sq_item */
769
(ssizessizeargfunc)StaticTuple_slice, /* sq_slice */
771
0, /* sq_ass_slice */
775
/* TODO: Implement StaticTuple_as_mapping.
776
* The only thing we really want to support from there is mp_subscript,
777
* so that we could support extended slicing (foo[::2]). Not worth it
782
PyTypeObject StaticTuple_Type = {
783
PyObject_HEAD_INIT(NULL)
785
"bzrlib._static_tuple_c.StaticTuple", /* tp_name */
786
sizeof(StaticTuple), /* tp_basicsize */
787
sizeof(PyObject *), /* tp_itemsize */
788
(destructor)StaticTuple_dealloc, /* tp_dealloc */
793
(reprfunc)StaticTuple_repr, /* tp_repr */
794
&StaticTuple_as_number, /* tp_as_number */
795
&StaticTuple_as_sequence, /* tp_as_sequence */
796
0, /* tp_as_mapping */
797
(hashfunc)StaticTuple_hash, /* tp_hash */
802
0, /* tp_as_buffer */
803
/* Py_TPFLAGS_CHECKTYPES tells the number operations that they shouldn't
804
* try to 'coerce' but instead stuff like 'add' will check it arguments.
806
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_CHECKTYPES, /* tp_flags*/
807
StaticTuple_doc, /* tp_doc */
808
/* gc.get_referents checks the IS_GC flag before it calls tp_traverse
809
* And we don't include this object in the garbage collector because we
810
* know it doesn't create cycles. However, 'meliae' will follow
811
* tp_traverse, even if the object isn't GC, and we want that.
813
(traverseproc)StaticTuple_traverse, /* tp_traverse */
815
StaticTuple_richcompare, /* tp_richcompare */
816
0, /* tp_weaklistoffset */
817
// without implementing tp_iter, Python will fall back to PySequence*
818
// which seems to work ok, we may need something faster/lighter in the
822
StaticTuple_methods, /* tp_methods */
827
0, /* tp_descr_get */
828
0, /* tp_descr_set */
829
0, /* tp_dictoffset */
832
StaticTuple_new_constructor, /* tp_new */
836
static PyMethodDef static_tuple_c_methods[] = {
842
setup_interned_tuples(PyObject *m)
844
_interned_tuples = (PyObject *)SimpleSet_New();
845
if (_interned_tuples != NULL) {
846
Py_INCREF(_interned_tuples);
847
PyModule_AddObject(m, "_interned_tuples", _interned_tuples);
853
setup_empty_tuple(PyObject *m)
856
if (_interned_tuples == NULL) {
857
fprintf(stderr, "You need to call setup_interned_tuples() before"
858
" setup_empty_tuple, because we intern it.\n");
860
// We need to create the empty tuple
861
stuple = (StaticTuple *)StaticTuple_New(0);
862
_empty_tuple = StaticTuple_Intern(stuple);
863
assert(_empty_tuple == stuple);
864
// At this point, refcnt is 2: 1 from New(), and 1 from the return from
865
// intern(). We will keep 1 for the _empty_tuple global, and use the other
866
// for the module reference.
867
PyModule_AddObject(m, "_empty_tuple", (PyObject *)_empty_tuple);
871
_StaticTuple_CheckExact(PyObject *obj)
873
return StaticTuple_CheckExact(obj);
877
setup_c_api(PyObject *m)
879
_export_function(m, "StaticTuple_New", StaticTuple_New,
880
"StaticTuple *(Py_ssize_t)");
881
_export_function(m, "StaticTuple_Intern", StaticTuple_Intern,
882
"StaticTuple *(StaticTuple *)");
883
_export_function(m, "StaticTuple_FromSequence", StaticTuple_FromSequence,
884
"StaticTuple *(PyObject *)");
885
_export_function(m, "_StaticTuple_CheckExact", _StaticTuple_CheckExact,
891
_workaround_pyrex_096(void)
893
/* Work around an incompatibility in how pyrex 0.9.6 exports a module,
894
* versus how pyrex 0.9.8 and cython 0.11 export it.
895
* Namely 0.9.6 exports import__simple_set_pyx and tries to
896
* "import _simple_set_pyx" but it is available only as
897
* "import bzrlib._simple_set_pyx"
898
* It is a shame to hack up sys.modules, but that is what we've got to do.
900
PyObject *sys_module = NULL, *modules = NULL, *set_module = NULL;
903
/* Clear out the current ImportError exception, and try again. */
905
/* Note that this only seems to work if somewhere else imports
906
* bzrlib._simple_set_pyx before importing bzrlib._static_tuple_c
908
set_module = PyImport_ImportModule("bzrlib._simple_set_pyx");
909
if (set_module == NULL) {
912
/* Add the _simple_set_pyx into sys.modules at the appropriate location. */
913
sys_module = PyImport_ImportModule("sys");
914
if (sys_module == NULL) {
917
modules = PyObject_GetAttrString(sys_module, "modules");
918
if (modules == NULL || !PyDict_Check(modules)) {
921
PyDict_SetItemString(modules, "_simple_set_pyx", set_module);
922
/* Now that we have hacked it in, try the import again. */
923
retval = import_bzrlib___simple_set_pyx();
925
Py_XDECREF(set_module);
926
Py_XDECREF(sys_module);
933
init_static_tuple_c(void)
937
StaticTuple_Type.tp_getattro = PyObject_GenericGetAttr;
938
if (PyType_Ready(&StaticTuple_Type) < 0)
941
m = Py_InitModule3("_static_tuple_c", static_tuple_c_methods,
942
"C implementation of a StaticTuple structure");
946
Py_INCREF(&StaticTuple_Type);
947
PyModule_AddObject(m, "StaticTuple", (PyObject *)&StaticTuple_Type);
948
if (import_bzrlib___simple_set_pyx() == -1
949
&& _workaround_pyrex_096() == -1)
953
setup_interned_tuples(m);
954
setup_empty_tuple(m);
958
// vim: tabstop=4 sw=4 expandtab