1
/* Copyright (C) 2009 Canonical Ltd
3
* This program is free software; you can redistribute it and/or modify
4
* it under the terms of the GNU General Public License as published by
5
* the Free Software Foundation; either version 2 of the License, or
6
* (at your option) any later version.
8
* This program is distributed in the hope that it will be useful,
9
* but WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
* GNU General Public License for more details.
13
* You should have received a copy of the GNU General Public License
14
* along with this program; if not, write to the Free Software
15
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
/* Must be defined before importing _static_tuple_c.h so that we get the right
21
#define STATIC_TUPLE_MODULE
23
#include "_static_tuple_c.h"
24
#include "_export_c_api.h"
26
/* Pyrex 0.9.6.4 exports _simple_set_pyx_api as
27
* import__simple_set_pyx(), while Pyrex 0.9.8.5 and Cython 0.11.3 export them
28
* as import_bzrlib___simple_set_pyx(). As such, we just #define one to be
29
* equivalent to the other in our internal code.
31
#define import__simple_set_pyx import_bzrlib___simple_set_pyx
32
#include "_simple_set_pyx_api.h"
34
#include "python-compat.h"
37
# define inline __inline__
38
#elif defined(_MSC_VER)
39
# define inline __inline
45
/* The one and only StaticTuple with no values */
46
static StaticTuple *_empty_tuple = NULL;
47
static PyObject *_interned_tuples = NULL;
51
_StaticTuple_is_interned(StaticTuple *self)
53
return self->flags & STATIC_TUPLE_INTERNED_FLAG;
59
StaticTuple_as_tuple(StaticTuple *self)
61
PyObject *tpl = NULL, *obj = NULL;
65
tpl = PyTuple_New(len);
70
for (i = 0; i < len; ++i) {
71
obj = (PyObject *)self->items[i];
73
PyTuple_SET_ITEM(tpl, i, obj);
79
static char StaticTuple_as_tuple_doc[] = "as_tuple() => tuple";
82
StaticTuple_Intern(StaticTuple *self)
84
PyObject *canonical_tuple = NULL;
86
if (_interned_tuples == NULL || _StaticTuple_is_interned(self)) {
90
/* SimpleSet_Add returns whatever object is present at self
91
* or the new object if it needs to add it.
93
canonical_tuple = SimpleSet_Add(_interned_tuples, (PyObject *)self);
94
if (!canonical_tuple) {
95
// Some sort of exception, propogate it.
98
if (canonical_tuple != (PyObject *)self) {
99
// There was already a tuple with that value
100
return (StaticTuple *)canonical_tuple;
102
self->flags |= STATIC_TUPLE_INTERNED_FLAG;
103
// The two references in the dict do not count, so that the StaticTuple
104
// object does not become immortal just because it was interned.
105
Py_REFCNT(self) -= 1;
109
static char StaticTuple_Intern_doc[] = "intern() => unique StaticTuple\n"
110
"Return a 'canonical' StaticTuple object.\n"
111
"Similar to intern() for strings, this makes sure there\n"
112
"is only one StaticTuple object for a given value\n."
114
" key = StaticTuple('foo', 'bar').intern()\n";
118
StaticTuple_dealloc(StaticTuple *self)
122
if (_StaticTuple_is_interned(self)) {
123
/* revive dead object temporarily for Discard */
125
if (SimpleSet_Discard(_interned_tuples, (PyObject*)self) != 1)
126
Py_FatalError("deletion of interned StaticTuple failed");
127
self->flags &= ~STATIC_TUPLE_INTERNED_FLAG;
130
for (i = 0; i < len; ++i) {
131
Py_XDECREF(self->items[i]);
133
Py_TYPE(self)->tp_free((PyObject *)self);
137
/* Similar to PyTuple_New() */
139
StaticTuple_New(Py_ssize_t size)
143
PyErr_BadInternalCall();
147
if (size == 0 && _empty_tuple != NULL) {
148
Py_INCREF(_empty_tuple);
151
/* Note that we use PyObject_NewVar because we want to allocate a variable
152
* width entry. However we *aren't* truly a PyVarObject because we don't
153
* use a long for ob_size. Instead we use a plain 'size' that is an int,
154
* and will be overloaded with flags in the future.
155
* As such we do the alloc, and then have to clean up anything it does
158
stuple = PyObject_NewVar(StaticTuple, &StaticTuple_Type, size);
159
if (stuple == NULL) {
164
stuple->_unused0 = 0;
165
stuple->_unused1 = 0;
167
memset(stuple->items, 0, sizeof(PyObject *) * size);
169
#if STATIC_TUPLE_HAS_HASH
177
StaticTuple_new_constructor(PyTypeObject *type, PyObject *args, PyObject *kwds)
180
PyObject *obj = NULL;
181
Py_ssize_t i, len = 0;
183
if (type != &StaticTuple_Type) {
184
PyErr_SetString(PyExc_TypeError, "we only support creating StaticTuple");
187
if (!PyTuple_CheckExact(args)) {
188
PyErr_SetString(PyExc_TypeError, "args must be a tuple");
191
len = PyTuple_GET_SIZE(args);
192
if (len < 0 || len > 255) {
193
/* Too big or too small */
194
PyErr_SetString(PyExc_ValueError, "StaticTuple.__init__(...)"
195
" takes from 0 to 255 items");
198
self = (StaticTuple *)StaticTuple_New(len);
202
for (i = 0; i < len; ++i) {
203
obj = PyTuple_GET_ITEM(args, i);
204
if (!PyString_CheckExact(obj)) {
205
if (!StaticTuple_CheckExact(obj)) {
206
PyErr_SetString(PyExc_TypeError, "StaticTuple.__init__(...)"
207
" requires that all items are strings or StaticTuple.");
208
type->tp_dealloc((PyObject *)self);
213
self->items[i] = obj;
215
return (PyObject *)self;
219
StaticTuple_repr(StaticTuple *self)
221
PyObject *as_tuple, *tuple_repr, *result;
223
as_tuple = StaticTuple_as_tuple(self);
224
if (as_tuple == NULL) {
227
tuple_repr = PyObject_Repr(as_tuple);
229
if (tuple_repr == NULL) {
232
result = PyString_FromFormat("%s%s", Py_TYPE(self)->tp_name,
233
PyString_AsString(tuple_repr));
238
StaticTuple_hash(StaticTuple *self)
240
/* adapted from tuplehash(), is the specific hash value considered
244
Py_ssize_t len = self->size;
246
long mult = 1000003L;
248
#if STATIC_TUPLE_HAS_HASH
249
if (self->hash != -1) {
255
// TODO: We could set specific flags if we know that, for example, all the
256
// items are strings. I haven't seen a real-world benefit to that
259
y = PyObject_Hash(*p++);
260
if (y == -1) /* failure */
263
/* the cast might truncate len; that doesn't change hash stability */
264
mult += (long)(82520L + len + len);
269
#if STATIC_TUPLE_HAS_HASH
276
StaticTuple_richcompare_to_tuple(StaticTuple *v, PyObject *wt, int op)
279
PyObject *result = NULL;
281
vt = StaticTuple_as_tuple((StaticTuple *)v);
285
if (!PyTuple_Check(wt)) {
286
PyErr_BadInternalCall();
289
/* Now we have 2 tuples to compare, do it */
290
result = PyTuple_Type.tp_richcompare(vt, wt, op);
296
/** Compare two objects to determine if they are equivalent.
297
* The basic flow is as follows
298
* 1) First make sure that both objects are StaticTuple instances. If they
299
* aren't then cast self to a tuple, and have the tuple do the comparison.
300
* 2) Special case comparison to Py_None, because it happens to occur fairly
301
* often in the test suite.
302
* 3) Special case when v and w are the same pointer. As we know the answer to
303
* all queries without walking individual items.
304
* 4) For all operations, we then walk the items to find the first paired
305
* items that are not equal.
306
* 5) If all items found are equal, we then check the length of self and
307
* other to determine equality.
308
* 6) If an item differs, then we apply "op" to those last two items. (eg.
309
* StaticTuple(A, B) > StaticTuple(A, C) iff B > C)
313
StaticTuple_richcompare(PyObject *v, PyObject *w, int op)
315
StaticTuple *v_st, *w_st;
316
Py_ssize_t vlen, wlen, min_len, i;
317
PyObject *v_obj, *w_obj;
318
richcmpfunc string_richcompare;
320
if (!StaticTuple_CheckExact(v)) {
321
/* This has never triggered, according to python-dev it seems this
322
* might trigger if '__op__' is defined but '__rop__' is not, sort of
323
* case. Such as "None == StaticTuple()"
325
fprintf(stderr, "self is not StaticTuple\n");
326
Py_INCREF(Py_NotImplemented);
327
return Py_NotImplemented;
329
v_st = (StaticTuple *)v;
330
if (StaticTuple_CheckExact(w)) {
331
/* The most common case */
332
w_st = (StaticTuple*)w;
333
} else if (PyTuple_Check(w)) {
334
/* One of v or w is a tuple, so we go the 'slow' route and cast up to
337
/* TODO: This seems to be triggering more than I thought it would...
338
* We probably want to optimize comparing self to other when
341
return StaticTuple_richcompare_to_tuple(v_st, w, op);
342
} else if (w == Py_None) {
343
// None is always less than the object
345
case Py_NE:case Py_GT:case Py_GE:
348
case Py_EQ:case Py_LT:case Py_LE:
351
default: // Should never happen
352
return Py_NotImplemented;
355
/* We don't special case this comparison, we just let python handle
358
Py_INCREF(Py_NotImplemented);
359
return Py_NotImplemented;
361
/* Now we know that we have 2 StaticTuple objects, so let's compare them.
362
* This code is inspired from tuplerichcompare, except we know our
363
* objects are limited in scope, so we can inline some comparisons.
366
/* Identical pointers, we can shortcut this easily. */
368
case Py_EQ:case Py_LE:case Py_GE:
371
case Py_NE:case Py_LT:case Py_GT:
377
&& _StaticTuple_is_interned(v_st)
378
&& _StaticTuple_is_interned(w_st))
380
/* If both objects are interned, we know they are different if the
381
* pointer is not the same, which would have been handled by the
382
* previous if. No need to compare the entries.
388
/* The only time we are likely to compare items of different lengths is in
389
* something like the interned_keys set. However, the hash is good enough
390
* that it is rare. Note that 'tuple_richcompare' also does not compare
395
min_len = (vlen < wlen) ? vlen : wlen;
396
string_richcompare = PyString_Type.tp_richcompare;
397
for (i = 0; i < min_len; i++) {
398
PyObject *result = NULL;
399
v_obj = StaticTuple_GET_ITEM(v_st, i);
400
w_obj = StaticTuple_GET_ITEM(w_st, i);
401
if (v_obj == w_obj) {
402
/* Shortcut case, these must be identical */
405
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj)) {
406
result = string_richcompare(v_obj, w_obj, Py_EQ);
407
} else if (StaticTuple_CheckExact(v_obj) &&
408
StaticTuple_CheckExact(w_obj))
410
/* Both are StaticTuple types, so recurse */
411
result = StaticTuple_richcompare(v_obj, w_obj, Py_EQ);
413
/* Not the same type, obviously they won't compare equal */
416
if (result == NULL) {
417
return NULL; /* There seems to be an error */
419
if (result == Py_NotImplemented) {
420
PyErr_BadInternalCall();
424
if (result == Py_False) {
425
/* This entry is not identical
434
if (result != Py_True) {
435
/* We don't know *what* richcompare is returning, but it
436
* isn't something we recognize
438
PyErr_BadInternalCall();
445
/* We walked off one of the lists, but everything compared equal so
446
* far. Just compare the size.
451
case Py_LT: cmp = vlen < wlen; break;
452
case Py_LE: cmp = vlen <= wlen; break;
453
case Py_EQ: cmp = vlen == wlen; break;
454
case Py_NE: cmp = vlen != wlen; break;
455
case Py_GT: cmp = vlen > wlen; break;
456
case Py_GE: cmp = vlen >= wlen; break;
457
default: return NULL; /* cannot happen */
466
/* The last item differs, shortcut the Py_NE case */
471
/* It is some other comparison, go ahead and do the real check. */
472
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj))
474
return string_richcompare(v_obj, w_obj, op);
475
} else if (StaticTuple_CheckExact(v_obj) &&
476
StaticTuple_CheckExact(w_obj))
478
/* Both are StaticTuple types, so recurse */
479
return StaticTuple_richcompare(v_obj, w_obj, op);
481
Py_INCREF(Py_NotImplemented);
482
return Py_NotImplemented;
488
StaticTuple_length(StaticTuple *self)
495
StaticTuple__is_interned(StaticTuple *self)
497
if (_StaticTuple_is_interned(self)) {
505
static char StaticTuple__is_interned_doc[] = "_is_interned() => True/False\n"
506
"Check to see if this tuple has been interned.\n";
510
StaticTuple_item(StaticTuple *self, Py_ssize_t offset)
513
/* We cast to (int) to avoid worrying about whether Py_ssize_t is a
514
* long long, etc. offsets should never be >2**31 anyway.
517
PyErr_Format(PyExc_IndexError, "StaticTuple_item does not support"
518
" negative indices: %d\n", (int)offset);
519
} else if (offset >= self->size) {
520
PyErr_Format(PyExc_IndexError, "StaticTuple index out of range"
521
" %d >= %d", (int)offset, (int)self->size);
524
obj = (PyObject *)self->items[offset];
530
StaticTuple_slice(StaticTuple *self, Py_ssize_t ilow, Py_ssize_t ihigh)
532
PyObject *as_tuple, *result;
534
as_tuple = StaticTuple_as_tuple(self);
535
if (as_tuple == NULL) {
538
result = PyTuple_Type.tp_as_sequence->sq_slice(as_tuple, ilow, ihigh);
544
StaticTuple_traverse(StaticTuple *self, visitproc visit, void *arg)
547
for (i = self->size; --i >= 0;) {
548
Py_VISIT(self->items[i]);
553
static char StaticTuple_doc[] =
554
"C implementation of a StaticTuple structure."
555
"\n This is used as StaticTuple(item1, item2, item3)"
556
"\n This is similar to tuple, less flexible in what it"
557
"\n supports, but also lighter memory consumption."
558
"\n Note that the constructor mimics the () form of tuples"
559
"\n Rather than the 'tuple()' constructor."
560
"\n eg. StaticTuple(a, b) == (a, b) == tuple((a, b))";
562
static PyMethodDef StaticTuple_methods[] = {
563
{"as_tuple", (PyCFunction)StaticTuple_as_tuple, METH_NOARGS, StaticTuple_as_tuple_doc},
564
{"intern", (PyCFunction)StaticTuple_Intern, METH_NOARGS, StaticTuple_Intern_doc},
565
{"_is_interned", (PyCFunction)StaticTuple__is_interned, METH_NOARGS,
566
StaticTuple__is_interned_doc},
567
{NULL, NULL} /* sentinel */
570
static PySequenceMethods StaticTuple_as_sequence = {
571
(lenfunc)StaticTuple_length, /* sq_length */
574
(ssizeargfunc)StaticTuple_item, /* sq_item */
575
(ssizessizeargfunc)StaticTuple_slice, /* sq_slice */
577
0, /* sq_ass_slice */
581
/* TODO: Implement StaticTuple_as_mapping.
582
* The only thing we really want to support from there is mp_subscript,
583
* so that we could support extended slicing (foo[::2]). Not worth it
588
PyTypeObject StaticTuple_Type = {
589
PyObject_HEAD_INIT(NULL)
591
"StaticTuple", /* tp_name */
592
sizeof(StaticTuple), /* tp_basicsize */
593
sizeof(PyObject *), /* tp_itemsize */
594
(destructor)StaticTuple_dealloc, /* tp_dealloc */
599
(reprfunc)StaticTuple_repr, /* tp_repr */
600
0, /* tp_as_number */
601
&StaticTuple_as_sequence, /* tp_as_sequence */
602
0, /* tp_as_mapping */
603
(hashfunc)StaticTuple_hash, /* tp_hash */
606
PyObject_GenericGetAttr, /* tp_getattro */
608
0, /* tp_as_buffer */
609
Py_TPFLAGS_DEFAULT, /* tp_flags*/
610
StaticTuple_doc, /* tp_doc */
611
/* gc.get_referents checks the IS_GC flag before it calls tp_traverse
612
* And we don't include this object in the garbage collector because we
613
* know it doesn't create cycles. However, 'meliae' will follow
614
* tp_traverse, even if the object isn't GC, and we want that.
616
(traverseproc)StaticTuple_traverse, /* tp_traverse */
618
StaticTuple_richcompare, /* tp_richcompare */
619
0, /* tp_weaklistoffset */
620
// without implementing tp_iter, Python will fall back to PySequence*
621
// which seems to work ok, we may need something faster/lighter in the
625
StaticTuple_methods, /* tp_methods */
630
0, /* tp_descr_get */
631
0, /* tp_descr_set */
632
0, /* tp_dictoffset */
635
StaticTuple_new_constructor, /* tp_new */
639
static PyMethodDef static_tuple_c_methods[] = {
645
setup_interned_tuples(PyObject *m)
647
_interned_tuples = (PyObject *)SimpleSet_New();
648
if (_interned_tuples != NULL) {
649
Py_INCREF(_interned_tuples);
650
PyModule_AddObject(m, "_interned_tuples", _interned_tuples);
656
setup_empty_tuple(PyObject *m)
659
if (_interned_tuples == NULL) {
660
fprintf(stderr, "You need to call setup_interned_tuples() before"
661
" setup_empty_tuple, because we intern it.\n");
663
// We need to create the empty tuple
664
stuple = (StaticTuple *)StaticTuple_New(0);
665
_empty_tuple = StaticTuple_Intern(stuple);
666
assert(_empty_tuple == stuple);
667
// At this point, refcnt is 2: 1 from New(), and 1 from the return from
668
// intern(). We will keep 1 for the _empty_tuple global, and use the other
669
// for the module reference.
670
PyModule_AddObject(m, "_empty_tuple", (PyObject *)_empty_tuple);
674
_StaticTuple_CheckExact(PyObject *obj)
676
return StaticTuple_CheckExact(obj);
680
setup_c_api(PyObject *m)
682
_export_function(m, "StaticTuple_New", StaticTuple_New,
683
"StaticTuple *(Py_ssize_t)");
684
_export_function(m, "StaticTuple_Intern", StaticTuple_Intern,
685
"StaticTuple *(StaticTuple *)");
686
_export_function(m, "_StaticTuple_CheckExact", _StaticTuple_CheckExact,
692
_workaround_pyrex_096()
694
/* Work around an incompatibility in how pyrex 0.9.6 exports a module,
695
* versus how pyrex 0.9.8 and cython 0.11 export it.
696
* Namely 0.9.6 exports import__simple_set_pyx and tries to
697
* "import _simple_set_pyx" but it is available only as
698
* "import bzrlib._simple_set_pyx"
699
* It is a shame to hack up sys.modules, but that is what we've got to do.
701
PyObject *sys_module = NULL, *modules = NULL, *set_module = NULL;
704
/* Clear out the current ImportError exception, and try again. */
706
/* Note that this only seems to work if somewhere else imports
707
* bzrlib._simple_set_pyx before importing bzrlib._static_tuple_c
709
set_module = PyImport_ImportModule("bzrlib._simple_set_pyx");
710
if (set_module == NULL) {
711
// fprintf(stderr, "Failed to import bzrlib._simple_set_pyx\n");
714
/* Add the _simple_set_pyx into sys.modules at the appropriate location. */
715
sys_module = PyImport_ImportModule("sys");
716
if (sys_module == NULL) {
717
// fprintf(stderr, "Failed to import sys\n");
720
modules = PyObject_GetAttrString(sys_module, "modules");
721
if (modules == NULL || !PyDict_Check(modules)) {
722
// fprintf(stderr, "Failed to find sys.modules\n");
725
PyDict_SetItemString(modules, "_simple_set_pyx", set_module);
726
/* Now that we have hacked it in, try the import again. */
727
retval = import_bzrlib___simple_set_pyx();
729
Py_XDECREF(set_module);
730
Py_XDECREF(sys_module);
737
init_static_tuple_c(void)
741
if (PyType_Ready(&StaticTuple_Type) < 0)
744
m = Py_InitModule3("_static_tuple_c", static_tuple_c_methods,
745
"C implementation of a StaticTuple structure");
749
Py_INCREF(&StaticTuple_Type);
750
PyModule_AddObject(m, "StaticTuple", (PyObject *)&StaticTuple_Type);
751
if (import_bzrlib___simple_set_pyx() == -1
752
&& _workaround_pyrex_096() == -1)
756
setup_interned_tuples(m);
757
setup_empty_tuple(m);