1
/* Copyright (C) 2009 Canonical Ltd
3
* This program is free software; you can redistribute it and/or modify
4
* it under the terms of the GNU General Public License as published by
5
* the Free Software Foundation; either version 2 of the License, or
6
* (at your option) any later version.
8
* This program is distributed in the hope that it will be useful,
9
* but WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
* GNU General Public License for more details.
13
* You should have received a copy of the GNU General Public License
14
* along with this program; if not, write to the Free Software
15
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
/* Must be defined before importing _static_tuple_c.h so that we get the right
21
#define STATIC_TUPLE_MODULE
23
#include "_static_tuple_c.h"
24
#include "_export_c_api.h"
26
#include "python-compat.h"
29
# define inline __inline__
30
#elif defined(_MSC_VER)
31
# define inline __inline
37
/* The one and only StaticTuple with no values */
38
static StaticTuple *_empty_tuple = NULL;
39
static PyObject *_interned_tuples = NULL;
43
_StaticTuple_is_interned(StaticTuple *self)
45
return self->flags & STATIC_TUPLE_INTERNED_FLAG;
51
StaticTuple_as_tuple(StaticTuple *self)
53
PyObject *tpl = NULL, *obj = NULL;
57
tpl = PyTuple_New(len);
62
for (i = 0; i < len; ++i) {
63
obj = (PyObject *)self->items[i];
65
PyTuple_SET_ITEM(tpl, i, obj);
71
static char StaticTuple_as_tuple_doc[] = "as_tuple() => tuple";
74
StaticTuple_Intern(StaticTuple *self)
76
PyObject *unique_key = NULL;
78
if (_interned_tuples == NULL) {
82
if (_StaticTuple_is_interned(self)) {
87
unique_key = PyDict_GetItem((PyObject *)_interned_tuples, (PyObject *)self);
89
// An entry already existed, return it, instead of self
90
Py_INCREF(unique_key);
91
return (StaticTuple *)unique_key;
93
// An entry did not exist, make 'self' the unique item
94
if (PyDict_SetItem(_interned_tuples, (PyObject *)self, (PyObject *)self) < 0) {
100
// self was added to the dict, return it.
102
self->flags |= STATIC_TUPLE_INTERNED_FLAG;
103
// The two references in the dict do not count, so that the StaticTuple object
104
// does not become immortal just because it was interned.
105
Py_REFCNT(self) -= 2;
109
static char StaticTuple_Intern_doc[] = "intern() => unique StaticTuple\n"
110
"Return a 'canonical' StaticTuple object.\n"
111
"Similar to intern() for strings, this makes sure there\n"
112
"is only one StaticTuple object for a given value\n."
114
" key = StaticTuple('foo', 'bar').intern()\n";
118
StaticTuple_dealloc(StaticTuple *self)
122
if (_StaticTuple_is_interned(self)) {
123
/* revive dead object temporarily for DelItem */
125
if (PyDict_DelItem(_interned_tuples, (PyObject *)self) != 0) {
126
Py_FatalError("deletion of interned StaticTuple failed");
130
for (i = 0; i < len; ++i) {
131
Py_XDECREF(self->items[i]);
133
Py_TYPE(self)->tp_free((PyObject *)self);
137
/* Similar to PyTuple_New() */
139
StaticTuple_New(Py_ssize_t size)
143
PyErr_BadInternalCall();
147
if (size == 0 && _empty_tuple != NULL) {
148
Py_INCREF(_empty_tuple);
151
/* Note that we use PyObject_NewVar because we want to allocate a variable
152
* width entry. However we *aren't* truly a PyVarObject because we don't
153
* use a long for ob_size. Instead we use a plain 'size' that is an int,
154
* and will be overloaded with flags in the future.
155
* As such we do the alloc, and then have to clean up anything it does
158
stuple = PyObject_NewVar(StaticTuple, &StaticTuple_Type, size);
159
if (stuple == NULL) {
164
stuple->_unused0 = 0;
165
stuple->_unused1 = 0;
167
memset(stuple->items, 0, sizeof(PyObject *) * size);
169
#if STATIC_TUPLE_HAS_HASH
177
StaticTuple_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
180
PyObject *obj = NULL;
181
Py_ssize_t i, len = 0;
184
if (type != &StaticTuple_Type) {
185
PyErr_SetString(PyExc_TypeError, "we only support creating StaticTuple");
188
if (!PyTuple_CheckExact(args)) {
189
PyErr_SetString(PyExc_TypeError, "args must be a tuple");
192
len = PyTuple_GET_SIZE(args);
193
if (len < 0 || len > 255) {
194
/* Too big or too small */
195
PyErr_SetString(PyExc_ValueError, "StaticTuple.__init__(...)"
196
" takes from 0 to 255 key bits");
199
self = (StaticTuple *)StaticTuple_New(len);
204
for (i = 0; i < len; ++i) {
205
obj = PyTuple_GET_ITEM(args, i);
206
if (!PyString_CheckExact(obj)) {
208
if (!StaticTuple_CheckExact(obj)) {
209
PyErr_SetString(PyExc_TypeError, "StaticTuple.__init__(...)"
210
" requires that all key bits are strings or StaticTuple.");
211
/* TODO: What is the proper way to dealloc ? */
212
type->tp_dealloc((PyObject *)self);
217
self->items[i] = obj;
220
self->flags |= STATIC_TUPLE_ALL_STRING;
222
return (PyObject *)self;
226
StaticTuple_repr(StaticTuple *self)
228
PyObject *as_tuple, *result;
230
as_tuple = StaticTuple_as_tuple(self);
231
if (as_tuple == NULL) {
234
result = PyObject_Repr(as_tuple);
240
StaticTuple_hash(StaticTuple *self)
242
/* adapted from tuplehash(), is the specific hash value considered
246
Py_ssize_t len = self->size;
248
long mult = 1000003L;
250
#if STATIC_TUPLE_HAS_HASH
251
if (self->hash != -1) {
257
if (self->flags & STATIC_TUPLE_ALL_STRING
258
&& self->flags & STATIC_TUPLE_DID_HASH) {
259
/* If we know that we only reference strings, and we've already
260
* computed the hash one time before, then we know all the strings will
261
* have valid hash entries, and we can just compute, no branching
265
y = ((PyStringObject*)(*p))->ob_shash;
267
/* the cast might truncate len; that doesn't change hash stability */
268
mult += (long)(82520L + len + len);
273
y = PyObject_Hash(*p++);
274
if (y == -1) /* failure */
277
/* the cast might truncate len; that doesn't change hash stability */
278
mult += (long)(82520L + len + len);
284
#if STATIC_TUPLE_HAS_HASH
285
if (self->hash != -1) {
286
if (self->hash != x) {
287
fprintf(stderr, "hash changed: %d => %d\n", self->hash, x);
292
self->flags |= STATIC_TUPLE_DID_HASH;
297
StaticTuple_richcompare_to_tuple(StaticTuple *v, PyObject *wt, int op)
300
PyObject *result = NULL;
302
vt = StaticTuple_as_tuple((StaticTuple *)v);
306
if (!PyTuple_Check(wt)) {
307
PyErr_BadInternalCall();
311
/* Now we have 2 tuples to compare, do it */
312
result = PyTuple_Type.tp_richcompare(vt, wt, op);
320
StaticTuple_richcompare(PyObject *v, PyObject *w, int op)
322
StaticTuple *vk, *wk;
323
Py_ssize_t vlen, wlen, min_len, i;
324
PyObject *v_obj, *w_obj;
325
richcmpfunc string_richcompare;
327
if (!StaticTuple_CheckExact(v)) {
328
/* This has never triggered, according to python-dev it seems this
329
* might trigger if '__op__' is defined but '__rop__' is not, sort of
330
* case. Such as "None == StaticTuple()"
332
fprintf(stderr, "self is not StaticTuple\n");
333
Py_INCREF(Py_NotImplemented);
334
return Py_NotImplemented;
336
vk = (StaticTuple *)v;
337
if (StaticTuple_CheckExact(w)) {
338
/* The most common case */
339
wk = (StaticTuple*)w;
340
} else if (PyTuple_Check(w)) {
341
/* One of v or w is a tuple, so we go the 'slow' route and cast up to
344
/* TODO: This seems to be triggering more than I thought it would...
345
* We probably want to optimize comparing self to other when
348
return StaticTuple_richcompare_to_tuple(vk, w, op);
349
} else if (w == Py_None) {
350
// None is always less than the object
352
case Py_NE:case Py_GT:case Py_GE:
355
case Py_EQ:case Py_LT:case Py_LE:
360
/* We don't special case this comparison, we just let python handle
363
Py_INCREF(Py_NotImplemented);
364
return Py_NotImplemented;
366
/* Now we know that we have 2 StaticTuple objects, so let's compare them.
367
* This code is somewhat borrowed from tuplerichcompare, except we know our
368
* objects are limited in scope, so we can inline some comparisons.
371
/* Identical pointers, we can shortcut this easily. */
373
case Py_EQ:case Py_LE:case Py_GE:
376
case Py_NE:case Py_LT:case Py_GT:
381
/* TODO: if STATIC_TUPLE_INTERNED_FLAG is set on both objects and they are
382
* not the same pointer, then we know they aren't the same object
383
* without having to do sub-by-sub comparison.
386
/* It will be rare that we compare tuples of different lengths, so we don't
387
* start by optimizing the length comparision, same as the tuple code
388
* TODO: Interning may change this, because we'll be comparing lots of
389
* different StaticTuple objects in the intern dict
393
min_len = (vlen < wlen) ? vlen : wlen;
394
string_richcompare = PyString_Type.tp_richcompare;
395
for (i = 0; i < min_len; i++) {
396
PyObject *result = NULL;
397
v_obj = StaticTuple_GET_ITEM(vk, i);
398
w_obj = StaticTuple_GET_ITEM(wk, i);
399
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj)) {
400
result = string_richcompare(v_obj, w_obj, Py_EQ);
401
} else if (StaticTuple_CheckExact(v_obj) &&
402
StaticTuple_CheckExact(w_obj))
404
/* Both are StaticTuple types, so recurse */
405
result = StaticTuple_richcompare(v_obj, w_obj, Py_EQ);
407
/* Not the same type, obviously they won't compare equal */
410
if (result == NULL) {
411
return NULL; /* There seems to be an error */
413
if (result == Py_NotImplemented) {
414
PyErr_BadInternalCall();
418
if (result == Py_False) {
419
/* This entry is not identical
428
if (result != Py_True) {
429
/* We don't know *what* richcompare is returning, but it
430
* isn't something we recognize
432
PyErr_BadInternalCall();
438
if (i >= vlen || i >= wlen) {
439
/* We walked off one of the lists, but everything compared equal so
440
* far. Just compare the size.
445
case Py_LT: cmp = vlen < wlen; break;
446
case Py_LE: cmp = vlen <= wlen; break;
447
case Py_EQ: cmp = vlen == wlen; break;
448
case Py_NE: cmp = vlen != wlen; break;
449
case Py_GT: cmp = vlen > wlen; break;
450
case Py_GE: cmp = vlen >= wlen; break;
451
default: return NULL; /* cannot happen */
460
/* The last item differs, shortcut the Py_NE case */
465
/* It is some other comparison, go ahead and do the real check. */
466
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj))
468
return string_richcompare(v_obj, w_obj, op);
469
} else if (StaticTuple_CheckExact(v_obj) &&
470
StaticTuple_CheckExact(w_obj))
472
/* Both are StaticTuple types, so recurse */
473
return StaticTuple_richcompare(v_obj, w_obj, op);
475
Py_INCREF(Py_NotImplemented);
476
return Py_NotImplemented;
482
StaticTuple_length(StaticTuple *self)
489
StaticTuple__is_interned(StaticTuple *self)
491
if (_StaticTuple_is_interned(self)) {
499
static char StaticTuple__is_interned_doc[] = "_is_interned() => True/False\n"
500
"Check to see if this key has been interned.\n";
504
StaticTuple_item(StaticTuple *self, Py_ssize_t offset)
507
if (offset < 0 || offset >= self->size) {
508
PyErr_SetString(PyExc_IndexError, "StaticTuple index out of range");
511
obj = (PyObject *)self->items[offset];
517
StaticTuple_slice(StaticTuple *self, Py_ssize_t ilow, Py_ssize_t ihigh)
519
PyObject *as_tuple, *result;
521
as_tuple = StaticTuple_as_tuple(self);
522
if (as_tuple == NULL) {
525
result = PyTuple_Type.tp_as_sequence->sq_slice(as_tuple, ilow, ihigh);
531
StaticTuple_traverse(StaticTuple *self, visitproc visit, void *arg)
534
for (i = self->size; --i >= 0;) {
535
Py_VISIT(self->items[i]);
540
static char StaticTuple_doc[] =
541
"C implementation of a StaticTuple structure."
542
"\n This is used as StaticTuple(key_bit_1, key_bit_2, key_bit_3, ...)"
543
"\n This is similar to tuple, just less flexible in what it"
544
"\n supports, but also lighter memory consumption.";
546
static PyMethodDef StaticTuple_methods[] = {
547
{"as_tuple", (PyCFunction)StaticTuple_as_tuple, METH_NOARGS, StaticTuple_as_tuple_doc},
548
{"intern", (PyCFunction)StaticTuple_Intern, METH_NOARGS, StaticTuple_Intern_doc},
549
{"_is_interned", (PyCFunction)StaticTuple__is_interned, METH_NOARGS,
550
StaticTuple__is_interned_doc},
551
{NULL, NULL} /* sentinel */
554
static PySequenceMethods StaticTuple_as_sequence = {
555
(lenfunc)StaticTuple_length, /* sq_length */
558
(ssizeargfunc)StaticTuple_item, /* sq_item */
559
(ssizessizeargfunc)StaticTuple_slice, /* sq_slice */
561
0, /* sq_ass_slice */
566
PyTypeObject StaticTuple_Type = {
567
PyObject_HEAD_INIT(NULL)
569
"StaticTuple", /* tp_name */
570
sizeof(StaticTuple), /* tp_basicsize */
571
sizeof(PyObject *), /* tp_itemsize */
572
(destructor)StaticTuple_dealloc, /* tp_dealloc */
577
(reprfunc)StaticTuple_repr, /* tp_repr */
578
0, /* tp_as_number */
579
&StaticTuple_as_sequence, /* tp_as_sequence */
580
0, /* tp_as_mapping */
581
(hashfunc)StaticTuple_hash, /* tp_hash */
584
PyObject_GenericGetAttr, /* tp_getattro */
586
0, /* tp_as_buffer */
587
Py_TPFLAGS_DEFAULT, /* tp_flags*/
588
StaticTuple_doc, /* tp_doc */
589
/* gc.get_referents checks the IS_GC flag before it calls tp_traverse
590
* And we don't include this object in the garbage collector because we
591
* know it doesn't create cycles. However, 'meliae' will follow
592
* tp_traverse, even if the object isn't GC, and we want that.
594
(traverseproc)StaticTuple_traverse, /* tp_traverse */
596
// TODO: implement richcompare, we should probably be able to compare vs an
597
// tuple, as well as versus another StaticTuples object.
598
StaticTuple_richcompare, /* tp_richcompare */
599
0, /* tp_weaklistoffset */
600
// We could implement this as returning tuples of keys...
603
StaticTuple_methods, /* tp_methods */
608
0, /* tp_descr_get */
609
0, /* tp_descr_set */
610
0, /* tp_dictoffset */
613
StaticTuple_new, /* tp_new */
617
static char KeyIntern_doc[] = "";
619
static PyMethodDef KeyIntern_methods[] = {
620
// {"as_tuple", (PyCFunction)Keys_as_tuple, METH_NOARGS, Keys_as_tuple_doc},
621
{NULL, NULL} /* sentinel */
624
// static PySequenceMethods KeyIntern_as_sequence = {
625
// 0, //(lenfunc)Keys_length, /* sq_length */
626
// 0, /* sq_concat */
627
// 0, /* sq_repeat */
628
// 0, //(ssizeargfunc)Keys_item, /* sq_item */
630
// 0, /* sq_ass_item */
631
// 0, /* sq_ass_slice */
632
// 0, /* sq_contains */
635
// static PyTypeObject KeyIntern_Type = {
636
// PyObject_HEAD_INIT(NULL)
638
// "KeyIntern", /* tp_name */
639
// sizeof(KeyIntern) - sizeof(Key *), /* tp_basicsize */
640
// sizeof(Key *), /* tp_itemsize */
641
// 0, //(destructor)Keys_dealloc, /* tp_dealloc */
643
// 0, /* tp_getattr */
644
// 0, /* tp_setattr */
645
// 0, /* tp_compare */
646
// // TODO: implement repr() and possibly str()
647
// 0, //(reprfunc)Keys_repr, /* tp_repr */
648
// 0, /* tp_as_number */
649
// &KeyIntern_as_sequence, /* tp_as_sequence */
650
// 0, /* tp_as_mapping */
651
// 0, //(hashfunc)Keys_hash, /* tp_hash */
654
// PyObject_GenericGetAttr, /* tp_getattro */
655
// 0, /* tp_setattro */
656
// 0, /* tp_as_buffer */
657
// Py_TPFLAGS_DEFAULT, /* tp_flags*/
658
// 0, // Keys_doc, /* tp_doc */
659
// /* See Key_traverse for why we have this, even though we aren't GC */
660
// 0, //(traverseproc)Keys_traverse, /* tp_traverse */
662
// // TODO: implement richcompare, we should probably be able to compare vs an
663
// // tuple, as well as versus another Keys object.
664
// 0, //Keys_richcompare, /* tp_richcompare */
665
// 0, /* tp_weaklistoffset */
666
// // We could implement this as returning tuples of keys...
668
// 0, /* tp_iternext */
669
// KeyIntern_methods, /* tp_methods */
670
// 0, /* tp_members */
671
// 0, /* tp_getset */
674
// 0, /* tp_descr_get */
675
// 0, /* tp_descr_set */
676
// 0, /* tp_dictoffset */
679
// 0, //Keys_new, /* tp_new */
683
static PyMethodDef static_tuple_c_methods[] = {
684
// {"unique_lcs_c", py_unique_lcs, METH_VARARGS},
685
// {"recurse_matches_c", py_recurse_matches, METH_VARARGS},
691
setup_interned_tuples(PyObject *m)
693
_interned_tuples = PyDict_New();
694
if (_interned_tuples != NULL) {
695
Py_INCREF(_interned_tuples);
696
PyModule_AddObject(m, "_interned_tuples", _interned_tuples);
702
setup_empty_tuple(PyObject *m)
705
if (_interned_tuples == NULL) {
706
fprintf(stderr, "You need to call setup_interned_tuples() before"
707
" setup_empty_tuple, because we intern it.\n");
709
// We need to create the empty tuple
710
stuple = (StaticTuple *)StaticTuple_New(0);
711
stuple->flags = STATIC_TUPLE_ALL_STRING;
712
_empty_tuple = StaticTuple_Intern(stuple);
713
assert(_empty_tuple == stuple);
714
// At this point, refcnt is 2: 1 from New(), and 1 from the return from
715
// intern(). We will keep 1 for the _empty_tuple global, and use the other
716
// for the module reference.
717
PyModule_AddObject(m, "_empty_tuple", (PyObject *)_empty_tuple);
721
_StaticTuple_CheckExact(PyObject *obj)
723
return StaticTuple_CheckExact(obj);
727
setup_c_api(PyObject *m)
729
_export_function(m, "StaticTuple_New", StaticTuple_New,
730
"StaticTuple *(Py_ssize_t)");
731
_export_function(m, "StaticTuple_Intern", StaticTuple_Intern,
732
"StaticTuple *(StaticTuple *)");
733
_export_function(m, "_StaticTuple_CheckExact", _StaticTuple_CheckExact,
739
init_static_tuple_c(void)
743
if (PyType_Ready(&StaticTuple_Type) < 0)
745
//if (PyType_Ready(&KeyIntern_Type) < 0)
748
m = Py_InitModule3("_static_tuple_c", static_tuple_c_methods,
749
"C implementation of a StaticTuple structure");
753
Py_INCREF(&StaticTuple_Type);
754
PyModule_AddObject(m, "StaticTuple", (PyObject *)&StaticTuple_Type);
755
setup_interned_tuples(m);
756
setup_empty_tuple(m);