1
# Copyright (C) 2007-2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for indices."""
27
class TestGraphIndexBuilder(tests.TestCaseWithMemoryTransport):
29
def test_build_index_empty(self):
30
builder = index.GraphIndexBuilder()
31
stream = builder.finish()
32
contents = stream.read()
34
"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=0\n\n",
37
def test_build_index_empty_two_element_keys(self):
38
builder = index.GraphIndexBuilder(key_elements=2)
39
stream = builder.finish()
40
contents = stream.read()
42
"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=0\n\n",
45
def test_build_index_one_reference_list_empty(self):
46
builder = index.GraphIndexBuilder(reference_lists=1)
47
stream = builder.finish()
48
contents = stream.read()
50
"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=0\n\n",
53
def test_build_index_two_reference_list_empty(self):
54
builder = index.GraphIndexBuilder(reference_lists=2)
55
stream = builder.finish()
56
contents = stream.read()
58
"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=0\n\n",
61
def test_build_index_one_node_no_refs(self):
62
builder = index.GraphIndexBuilder()
63
builder.add_node(('akey', ), 'data')
64
stream = builder.finish()
65
contents = stream.read()
67
"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
68
"akey\x00\x00\x00data\n\n", contents)
70
def test_build_index_one_node_no_refs_accepts_empty_reflist(self):
71
builder = index.GraphIndexBuilder()
72
builder.add_node(('akey', ), 'data', ())
73
stream = builder.finish()
74
contents = stream.read()
76
"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
77
"akey\x00\x00\x00data\n\n", contents)
79
def test_build_index_one_node_2_element_keys(self):
80
# multipart keys are separated by \x00 - because they are fixed length,
81
# not variable this does not cause any issues, and seems clearer to the
83
builder = index.GraphIndexBuilder(key_elements=2)
84
builder.add_node(('akey', 'secondpart'), 'data')
85
stream = builder.finish()
86
contents = stream.read()
88
"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=1\n"
89
"akey\x00secondpart\x00\x00\x00data\n\n", contents)
91
def test_add_node_empty_value(self):
92
builder = index.GraphIndexBuilder()
93
builder.add_node(('akey', ), '')
94
stream = builder.finish()
95
contents = stream.read()
97
"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
98
"akey\x00\x00\x00\n\n", contents)
100
def test_build_index_nodes_sorted(self):
101
# the highest sorted node comes first.
102
builder = index.GraphIndexBuilder()
103
# use three to have a good chance of glitching dictionary hash
104
# lookups etc. Insert in randomish order that is not correct
105
# and not the reverse of the correct order.
106
builder.add_node(('2002', ), 'data')
107
builder.add_node(('2000', ), 'data')
108
builder.add_node(('2001', ), 'data')
109
stream = builder.finish()
110
contents = stream.read()
112
"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=3\n"
113
"2000\x00\x00\x00data\n"
114
"2001\x00\x00\x00data\n"
115
"2002\x00\x00\x00data\n"
118
def test_build_index_2_element_key_nodes_sorted(self):
119
# multiple element keys are sorted first-key, second-key.
120
builder = index.GraphIndexBuilder(key_elements=2)
121
# use three values of each key element, to have a good chance of
122
# glitching dictionary hash lookups etc. Insert in randomish order that
123
# is not correct and not the reverse of the correct order.
124
builder.add_node(('2002', '2002'), 'data')
125
builder.add_node(('2002', '2000'), 'data')
126
builder.add_node(('2002', '2001'), 'data')
127
builder.add_node(('2000', '2002'), 'data')
128
builder.add_node(('2000', '2000'), 'data')
129
builder.add_node(('2000', '2001'), 'data')
130
builder.add_node(('2001', '2002'), 'data')
131
builder.add_node(('2001', '2000'), 'data')
132
builder.add_node(('2001', '2001'), 'data')
133
stream = builder.finish()
134
contents = stream.read()
136
"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=9\n"
137
"2000\x002000\x00\x00\x00data\n"
138
"2000\x002001\x00\x00\x00data\n"
139
"2000\x002002\x00\x00\x00data\n"
140
"2001\x002000\x00\x00\x00data\n"
141
"2001\x002001\x00\x00\x00data\n"
142
"2001\x002002\x00\x00\x00data\n"
143
"2002\x002000\x00\x00\x00data\n"
144
"2002\x002001\x00\x00\x00data\n"
145
"2002\x002002\x00\x00\x00data\n"
148
def test_build_index_reference_lists_are_included_one(self):
149
builder = index.GraphIndexBuilder(reference_lists=1)
150
builder.add_node(('key', ), 'data', ([], ))
151
stream = builder.finish()
152
contents = stream.read()
154
"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
155
"key\x00\x00\x00data\n"
158
def test_build_index_reference_lists_with_2_element_keys(self):
159
builder = index.GraphIndexBuilder(reference_lists=1, key_elements=2)
160
builder.add_node(('key', 'key2'), 'data', ([], ))
161
stream = builder.finish()
162
contents = stream.read()
164
"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=2\nlen=1\n"
165
"key\x00key2\x00\x00\x00data\n"
168
def test_build_index_reference_lists_are_included_two(self):
169
builder = index.GraphIndexBuilder(reference_lists=2)
170
builder.add_node(('key', ), 'data', ([], []))
171
stream = builder.finish()
172
contents = stream.read()
174
"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
175
"key\x00\x00\t\x00data\n"
178
def test_clear_cache(self):
179
builder = index.GraphIndexBuilder(reference_lists=2)
180
# This is a no-op, but the api should exist
181
builder.clear_cache()
183
def test_node_references_are_byte_offsets(self):
184
builder = index.GraphIndexBuilder(reference_lists=1)
185
builder.add_node(('reference', ), 'data', ([], ))
186
builder.add_node(('key', ), 'data', ([('reference', )], ))
187
stream = builder.finish()
188
contents = stream.read()
190
"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=2\n"
191
"key\x00\x0072\x00data\n"
192
"reference\x00\x00\x00data\n"
195
def test_node_references_are_cr_delimited(self):
196
builder = index.GraphIndexBuilder(reference_lists=1)
197
builder.add_node(('reference', ), 'data', ([], ))
198
builder.add_node(('reference2', ), 'data', ([], ))
199
builder.add_node(('key', ), 'data',
200
([('reference', ), ('reference2', )], ))
201
stream = builder.finish()
202
contents = stream.read()
204
"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=3\n"
205
"key\x00\x00077\r094\x00data\n"
206
"reference\x00\x00\x00data\n"
207
"reference2\x00\x00\x00data\n"
210
def test_multiple_reference_lists_are_tab_delimited(self):
211
builder = index.GraphIndexBuilder(reference_lists=2)
212
builder.add_node(('keference', ), 'data', ([], []))
213
builder.add_node(('rey', ), 'data',
214
([('keference', )], [('keference', )]))
215
stream = builder.finish()
216
contents = stream.read()
218
"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=2\n"
219
"keference\x00\x00\t\x00data\n"
220
"rey\x00\x0059\t59\x00data\n"
223
def test_add_node_referencing_missing_key_makes_absent(self):
224
builder = index.GraphIndexBuilder(reference_lists=1)
225
builder.add_node(('rey', ), 'data',
226
([('beference', ), ('aeference2', )], ))
227
stream = builder.finish()
228
contents = stream.read()
230
"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
231
"aeference2\x00a\x00\x00\n"
232
"beference\x00a\x00\x00\n"
233
"rey\x00\x00074\r059\x00data\n"
236
def test_node_references_three_digits(self):
237
# test the node digit expands as needed.
238
builder = index.GraphIndexBuilder(reference_lists=1)
239
references = [(str(val), ) for val in reversed(range(9))]
240
builder.add_node(('2-key', ), '', (references, ))
241
stream = builder.finish()
242
contents = stream.read()
243
self.assertEqualDiff(
244
"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
248
"2-key\x00\x00151\r145\r139\r133\r127\r121\r071\r065\r059\x00\n"
257
def test_absent_has_no_reference_overhead(self):
258
# the offsets after an absent record should be correct when there are
259
# >1 reference lists.
260
builder = index.GraphIndexBuilder(reference_lists=2)
261
builder.add_node(('parent', ), '', ([('aail', ), ('zther', )], []))
262
stream = builder.finish()
263
contents = stream.read()
265
"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
266
"aail\x00a\x00\x00\n"
267
"parent\x00\x0059\r84\t\x00\n"
268
"zther\x00a\x00\x00\n"
271
def test_add_node_bad_key(self):
272
builder = index.GraphIndexBuilder()
273
for bad_char in '\t\n\x0b\x0c\r\x00 ':
274
self.assertRaises(errors.BadIndexKey, builder.add_node,
275
('a%skey' % bad_char, ), 'data')
276
self.assertRaises(errors.BadIndexKey, builder.add_node,
278
self.assertRaises(errors.BadIndexKey, builder.add_node,
279
'not-a-tuple', 'data')
281
self.assertRaises(errors.BadIndexKey, builder.add_node,
284
self.assertRaises(errors.BadIndexKey, builder.add_node,
285
('primary', 'secondary'), 'data')
286
# secondary key elements get checked too:
287
builder = index.GraphIndexBuilder(key_elements=2)
288
for bad_char in '\t\n\x0b\x0c\r\x00 ':
289
self.assertRaises(errors.BadIndexKey, builder.add_node,
290
('prefix', 'a%skey' % bad_char), 'data')
292
def test_add_node_bad_data(self):
293
builder = index.GraphIndexBuilder()
294
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
296
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
299
def test_add_node_bad_mismatched_ref_lists_length(self):
300
builder = index.GraphIndexBuilder()
301
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
303
builder = index.GraphIndexBuilder(reference_lists=1)
304
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
306
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
308
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
310
builder = index.GraphIndexBuilder(reference_lists=2)
311
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
313
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
315
self.assertRaises(errors.BadIndexValue, builder.add_node, ('akey', ),
316
'data aa', ([], [], []))
318
def test_add_node_bad_key_in_reference_lists(self):
319
# first list, first key - trivial
320
builder = index.GraphIndexBuilder(reference_lists=1)
321
self.assertRaises(errors.BadIndexKey, builder.add_node, ('akey', ),
322
'data aa', ([('a key', )], ))
323
# references keys must be tuples too
324
self.assertRaises(errors.BadIndexKey, builder.add_node, ('akey', ),
325
'data aa', (['not-a-tuple'], ))
327
self.assertRaises(errors.BadIndexKey, builder.add_node, ('akey', ),
330
self.assertRaises(errors.BadIndexKey, builder.add_node, ('akey', ),
331
'data aa', ([('primary', 'secondary')], ))
332
# need to check more than the first key in the list
333
self.assertRaises(errors.BadIndexKey, builder.add_node, ('akey', ),
334
'data aa', ([('agoodkey', ), ('that is a bad key', )], ))
335
# and if there is more than one list it should be getting checked
337
builder = index.GraphIndexBuilder(reference_lists=2)
338
self.assertRaises(errors.BadIndexKey, builder.add_node, ('akey', ),
339
'data aa', ([], ['a bad key']))
341
def test_add_duplicate_key(self):
342
builder = index.GraphIndexBuilder()
343
builder.add_node(('key', ), 'data')
344
self.assertRaises(errors.BadIndexDuplicateKey,
345
builder.add_node, ('key', ), 'data')
347
def test_add_duplicate_key_2_elements(self):
348
builder = index.GraphIndexBuilder(key_elements=2)
349
builder.add_node(('key', 'key'), 'data')
350
self.assertRaises(errors.BadIndexDuplicateKey, builder.add_node,
351
('key', 'key'), 'data')
353
def test_add_key_after_referencing_key(self):
354
builder = index.GraphIndexBuilder(reference_lists=1)
355
builder.add_node(('key', ), 'data', ([('reference', )], ))
356
builder.add_node(('reference', ), 'data', ([],))
358
def test_add_key_after_referencing_key_2_elements(self):
359
builder = index.GraphIndexBuilder(reference_lists=1, key_elements=2)
360
builder.add_node(('k', 'ey'), 'data', ([('reference', 'tokey')], ))
361
builder.add_node(('reference', 'tokey'), 'data', ([],))
363
def test_set_optimize(self):
364
builder = index.GraphIndexBuilder(reference_lists=1, key_elements=2)
365
builder.set_optimize(for_size=True)
366
self.assertTrue(builder._optimize_for_size)
367
builder.set_optimize(for_size=False)
368
self.assertFalse(builder._optimize_for_size)
371
class TestGraphIndex(tests.TestCaseWithMemoryTransport):
373
def make_key(self, number):
374
return (str(number) + 'X'*100,)
376
def make_value(self, number):
377
return str(number) + 'Y'*100
379
def make_nodes(self, count=64):
380
# generate a big enough index that we only read some of it on a typical
383
for counter in range(count):
384
nodes.append((self.make_key(counter), self.make_value(counter), ()))
387
def make_index(self, ref_lists=0, key_elements=1, nodes=[]):
388
builder = index.GraphIndexBuilder(ref_lists, key_elements=key_elements)
389
for key, value, references in nodes:
390
builder.add_node(key, value, references)
391
stream = builder.finish()
392
trans = transport.get_transport_from_url('trace+' + self.get_url())
393
size = trans.put_file('index', stream)
394
return index.GraphIndex(trans, 'index', size)
396
def make_index_with_offset(self, ref_lists=0, key_elements=1, nodes=[],
398
builder = index.GraphIndexBuilder(ref_lists, key_elements=key_elements)
399
for key, value, references in nodes:
400
builder.add_node(key, value, references)
401
content = builder.finish().read()
403
trans = self.get_transport()
404
trans.put_bytes('index', (' '*offset) + content)
405
return index.GraphIndex(trans, 'index', size, offset=offset)
407
def test_clear_cache(self):
408
index = self.make_index()
409
# For now, we just want to make sure the api is available. As this is
410
# old code, we don't really worry if it *does* anything.
413
def test_open_bad_index_no_error(self):
414
trans = self.get_transport()
415
trans.put_bytes('name', "not an index\n")
416
idx = index.GraphIndex(trans, 'name', 13)
418
def test_with_offset(self):
419
nodes = self.make_nodes(200)
420
idx = self.make_index_with_offset(offset=1234567, nodes=nodes)
421
self.assertEqual(200, idx.key_count())
423
def test_buffer_all_with_offset(self):
424
nodes = self.make_nodes(200)
425
idx = self.make_index_with_offset(offset=1234567, nodes=nodes)
427
self.assertEqual(200, idx.key_count())
429
def test_side_effect_buffering_with_offset(self):
430
nodes = self.make_nodes(20)
431
index = self.make_index_with_offset(offset=1234567, nodes=nodes)
432
index._transport.recommended_page_size = lambda:64*1024
433
subset_nodes = [nodes[0][0], nodes[10][0], nodes[19][0]]
434
entries = [n[1] for n in index.iter_entries(subset_nodes)]
435
self.assertEqual(sorted(subset_nodes), sorted(entries))
436
self.assertEqual(20, index.key_count())
438
def test_open_sets_parsed_map_empty(self):
439
index = self.make_index()
440
self.assertEqual([], index._parsed_byte_map)
441
self.assertEqual([], index._parsed_key_map)
443
def test_key_count_buffers(self):
444
index = self.make_index(nodes=self.make_nodes(2))
445
# reset the transport log
446
del index._transport._activity[:]
447
self.assertEqual(2, index.key_count())
448
# We should have requested reading the header bytes
450
('readv', 'index', [(0, 200)], True, index._size),
452
index._transport._activity)
453
# And that should have been enough to trigger reading the whole index
455
self.assertIsNot(None, index._nodes)
457
def test_lookup_key_via_location_buffers(self):
458
index = self.make_index()
459
# reset the transport log
460
del index._transport._activity[:]
461
# do a _lookup_keys_via_location call for the middle of the file, which
462
# is what bisection uses.
463
result = index._lookup_keys_via_location(
464
[(index._size // 2, ('missing', ))])
465
# this should have asked for a readv request, with adjust_for_latency,
466
# and two regions: the header, and half-way into the file.
468
('readv', 'index', [(30, 30), (0, 200)], True, 60),
470
index._transport._activity)
471
# and the result should be that the key cannot be present, because this
472
# is a trivial index.
473
self.assertEqual([((index._size // 2, ('missing', )), False)],
475
# And this should have caused the file to be fully buffered
476
self.assertIsNot(None, index._nodes)
477
self.assertEqual([], index._parsed_byte_map)
479
def test_first_lookup_key_via_location(self):
480
# We need enough data so that the _HEADER_READV doesn't consume the
481
# whole file. We always read 800 bytes for every key, and the local
482
# transport natural expansion is 4096 bytes. So we have to have >8192
483
# bytes or we will trigger "buffer_all".
484
# We also want the 'missing' key to fall within the range that *did*
487
index = self.make_index(nodes=self.make_nodes(64))
488
# reset the transport log
489
del index._transport._activity[:]
490
# do a _lookup_keys_via_location call for the middle of the file, which
491
# is what bisection uses.
492
start_lookup = index._size // 2
493
result = index._lookup_keys_via_location(
494
[(start_lookup, ('40missing', ))])
495
# this should have asked for a readv request, with adjust_for_latency,
496
# and two regions: the header, and half-way into the file.
499
[(start_lookup, 800), (0, 200)], True, index._size),
501
index._transport._activity)
502
# and the result should be that the key cannot be present, because this
503
# is a trivial index.
504
self.assertEqual([((start_lookup, ('40missing', )), False)],
506
# And this should not have caused the file to be fully buffered
507
self.assertIs(None, index._nodes)
508
# And the regions of the file that have been parsed should be in the
509
# parsed_byte_map and the parsed_key_map
510
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
511
self.assertEqual([(None, self.make_key(26)),
512
(self.make_key(31), self.make_key(48))],
513
index._parsed_key_map)
515
def test_parsing_non_adjacent_data_trims(self):
516
index = self.make_index(nodes=self.make_nodes(64))
517
result = index._lookup_keys_via_location(
518
[(index._size // 2, ('40', ))])
519
# and the result should be that the key cannot be present, because key is
520
# in the middle of the observed data from a 4K read - the smallest transport
521
# will do today with this api.
522
self.assertEqual([((index._size // 2, ('40', )), False)],
524
# and we should have a parse map that includes the header and the
525
# region that was parsed after trimming.
526
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
527
self.assertEqual([(None, self.make_key(26)),
528
(self.make_key(31), self.make_key(48))],
529
index._parsed_key_map)
531
def test_parsing_data_handles_parsed_contained_regions(self):
532
# the following patten creates a parsed region that is wholly within a
533
# single result from the readv layer:
534
# .... single-read (readv-minimum-size) ...
535
# which then trims the start and end so the parsed size is < readv
537
# then a dual lookup (or a reference lookup for that matter) which
538
# abuts or overlaps the parsed region on both sides will need to
539
# discard the data in the middle, but parse the end as well.
541
# we test this by doing a single lookup to seed the data, then
542
# a lookup for two keys that are present, and adjacent -
543
# we except both to be found, and the parsed byte map to include the
544
# locations of both keys.
545
index = self.make_index(nodes=self.make_nodes(128))
546
result = index._lookup_keys_via_location(
547
[(index._size // 2, ('40', ))])
548
# and we should have a parse map that includes the header and the
549
# region that was parsed after trimming.
550
self.assertEqual([(0, 4045), (11759, 15707)], index._parsed_byte_map)
551
self.assertEqual([(None, self.make_key(116)),
552
(self.make_key(35), self.make_key(51))],
553
index._parsed_key_map)
554
# now ask for two keys, right before and after the parsed region
555
result = index._lookup_keys_via_location(
556
[(11450, self.make_key(34)), (15707, self.make_key(52))])
558
((11450, self.make_key(34)),
559
(index, self.make_key(34), self.make_value(34))),
560
((15707, self.make_key(52)),
561
(index, self.make_key(52), self.make_value(52))),
564
self.assertEqual([(0, 4045), (9889, 17993)], index._parsed_byte_map)
566
def test_lookup_missing_key_answers_without_io_when_map_permits(self):
567
# generate a big enough index that we only read some of it on a typical
569
index = self.make_index(nodes=self.make_nodes(64))
570
# lookup the keys in the middle of the file
571
result =index._lookup_keys_via_location(
572
[(index._size // 2, ('40', ))])
573
# check the parse map, this determines the test validity
574
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
575
self.assertEqual([(None, self.make_key(26)),
576
(self.make_key(31), self.make_key(48))],
577
index._parsed_key_map)
578
# reset the transport log
579
del index._transport._activity[:]
580
# now looking up a key in the portion of the file already parsed should
581
# not create a new transport request, and should return False (cannot
582
# be in the index) - even when the byte location we ask for is outside
584
result = index._lookup_keys_via_location(
586
self.assertEqual([((4000, ('40', )), False)],
588
self.assertEqual([], index._transport._activity)
590
def test_lookup_present_key_answers_without_io_when_map_permits(self):
591
# generate a big enough index that we only read some of it on a typical
593
index = self.make_index(nodes=self.make_nodes(64))
594
# lookup the keys in the middle of the file
595
result =index._lookup_keys_via_location(
596
[(index._size // 2, ('40', ))])
597
# check the parse map, this determines the test validity
598
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
599
self.assertEqual([(None, self.make_key(26)),
600
(self.make_key(31), self.make_key(48))],
601
index._parsed_key_map)
602
# reset the transport log
603
del index._transport._activity[:]
604
# now looking up a key in the portion of the file already parsed should
605
# not create a new transport request, and should return False (cannot
606
# be in the index) - even when the byte location we ask for is outside
609
result = index._lookup_keys_via_location([(4000, self.make_key(40))])
611
[((4000, self.make_key(40)),
612
(index, self.make_key(40), self.make_value(40)))],
614
self.assertEqual([], index._transport._activity)
616
def test_lookup_key_below_probed_area(self):
617
# generate a big enough index that we only read some of it on a typical
619
index = self.make_index(nodes=self.make_nodes(64))
620
# ask for the key in the middle, but a key that is located in the
621
# unparsed region before the middle.
622
result =index._lookup_keys_via_location(
623
[(index._size // 2, ('30', ))])
624
# check the parse map, this determines the test validity
625
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
626
self.assertEqual([(None, self.make_key(26)),
627
(self.make_key(31), self.make_key(48))],
628
index._parsed_key_map)
629
self.assertEqual([((index._size // 2, ('30', )), -1)],
632
def test_lookup_key_above_probed_area(self):
633
# generate a big enough index that we only read some of it on a typical
635
index = self.make_index(nodes=self.make_nodes(64))
636
# ask for the key in the middle, but a key that is located in the
637
# unparsed region after the middle.
638
result =index._lookup_keys_via_location(
639
[(index._size // 2, ('50', ))])
640
# check the parse map, this determines the test validity
641
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
642
self.assertEqual([(None, self.make_key(26)),
643
(self.make_key(31), self.make_key(48))],
644
index._parsed_key_map)
645
self.assertEqual([((index._size // 2, ('50', )), +1)],
648
def test_lookup_key_resolves_references(self):
649
# generate a big enough index that we only read some of it on a typical
652
for counter in range(99):
653
nodes.append((self.make_key(counter), self.make_value(counter),
654
((self.make_key(counter + 20),),) ))
655
index = self.make_index(ref_lists=1, nodes=nodes)
656
# lookup a key in the middle that does not exist, so that when we can
657
# check that the referred-to-keys are not accessed automatically.
658
index_size = index._size
659
index_center = index_size // 2
660
result = index._lookup_keys_via_location(
661
[(index_center, ('40', ))])
662
# check the parse map - only the start and middle should have been
664
self.assertEqual([(0, 4027), (10198, 14028)], index._parsed_byte_map)
665
self.assertEqual([(None, self.make_key(17)),
666
(self.make_key(44), self.make_key(5))],
667
index._parsed_key_map)
668
# and check the transport activity likewise.
670
[('readv', 'index', [(index_center, 800), (0, 200)], True,
672
index._transport._activity)
673
# reset the transport log for testing the reference lookup
674
del index._transport._activity[:]
675
# now looking up a key in the portion of the file already parsed should
676
# only perform IO to resolve its key references.
677
result = index._lookup_keys_via_location([(11000, self.make_key(45))])
679
[((11000, self.make_key(45)),
680
(index, self.make_key(45), self.make_value(45),
681
((self.make_key(65),),)))],
683
self.assertEqual([('readv', 'index', [(15093, 800)], True, index_size)],
684
index._transport._activity)
686
def test_lookup_key_can_buffer_all(self):
688
for counter in range(64):
689
nodes.append((self.make_key(counter), self.make_value(counter),
690
((self.make_key(counter + 20),),) ))
691
index = self.make_index(ref_lists=1, nodes=nodes)
692
# lookup a key in the middle that does not exist, so that when we can
693
# check that the referred-to-keys are not accessed automatically.
694
index_size = index._size
695
index_center = index_size // 2
696
result = index._lookup_keys_via_location([(index_center, ('40', ))])
697
# check the parse map - only the start and middle should have been
699
self.assertEqual([(0, 3890), (6444, 10274)], index._parsed_byte_map)
700
self.assertEqual([(None, self.make_key(25)),
701
(self.make_key(37), self.make_key(52))],
702
index._parsed_key_map)
703
# and check the transport activity likewise.
705
[('readv', 'index', [(index_center, 800), (0, 200)], True,
707
index._transport._activity)
708
# reset the transport log for testing the reference lookup
709
del index._transport._activity[:]
710
# now looking up a key in the portion of the file already parsed should
711
# only perform IO to resolve its key references.
712
result = index._lookup_keys_via_location([(7000, self.make_key(40))])
714
[((7000, self.make_key(40)),
715
(index, self.make_key(40), self.make_value(40),
716
((self.make_key(60),),)))],
718
# Resolving the references would have required more data read, and we
719
# are already above the 50% threshold, so it triggered a _buffer_all
720
self.assertEqual([('get', 'index')], index._transport._activity)
722
def test_iter_all_entries_empty(self):
723
index = self.make_index()
724
self.assertEqual([], list(index.iter_all_entries()))
726
def test_iter_all_entries_simple(self):
727
index = self.make_index(nodes=[(('name', ), 'data', ())])
728
self.assertEqual([(index, ('name', ), 'data')],
729
list(index.iter_all_entries()))
731
def test_iter_all_entries_simple_2_elements(self):
732
index = self.make_index(key_elements=2,
733
nodes=[(('name', 'surname'), 'data', ())])
734
self.assertEqual([(index, ('name', 'surname'), 'data')],
735
list(index.iter_all_entries()))
737
def test_iter_all_entries_references_resolved(self):
738
index = self.make_index(1, nodes=[
739
(('name', ), 'data', ([('ref', )], )),
740
(('ref', ), 'refdata', ([], ))])
741
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),)),
742
(index, ('ref', ), 'refdata', ((), ))]),
743
set(index.iter_all_entries()))
745
def test_iter_entries_buffers_once(self):
746
index = self.make_index(nodes=self.make_nodes(2))
747
# reset the transport log
748
del index._transport._activity[:]
749
self.assertEqual(set([(index, self.make_key(1), self.make_value(1))]),
750
set(index.iter_entries([self.make_key(1)])))
751
# We should have requested reading the header bytes
752
# But not needed any more than that because it would have triggered a
755
('readv', 'index', [(0, 200)], True, index._size),
757
index._transport._activity)
758
# And that should have been enough to trigger reading the whole index
760
self.assertIsNot(None, index._nodes)
762
def test_iter_entries_buffers_by_bytes_read(self):
763
index = self.make_index(nodes=self.make_nodes(64))
764
list(index.iter_entries([self.make_key(10)]))
765
# The first time through isn't enough to trigger a buffer all
766
self.assertIs(None, index._nodes)
767
self.assertEqual(4096, index._bytes_read)
768
# Grabbing a key in that same page won't trigger a buffer all, as we
769
# still haven't read 50% of the file
770
list(index.iter_entries([self.make_key(11)]))
771
self.assertIs(None, index._nodes)
772
self.assertEqual(4096, index._bytes_read)
773
# We haven't read more data, so reading outside the range won't trigger
774
# a buffer all right away
775
list(index.iter_entries([self.make_key(40)]))
776
self.assertIs(None, index._nodes)
777
self.assertEqual(8192, index._bytes_read)
778
# On the next pass, we will not trigger buffer all if the key is
779
# available without reading more
780
list(index.iter_entries([self.make_key(32)]))
781
self.assertIs(None, index._nodes)
782
# But if we *would* need to read more to resolve it, then we will
784
list(index.iter_entries([self.make_key(60)]))
785
self.assertIsNot(None, index._nodes)
787
def test_iter_entries_references_resolved(self):
788
index = self.make_index(1, nodes=[
789
(('name', ), 'data', ([('ref', ), ('ref', )], )),
790
(('ref', ), 'refdata', ([], ))])
791
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),('ref',)),)),
792
(index, ('ref', ), 'refdata', ((), ))]),
793
set(index.iter_entries([('name',), ('ref',)])))
795
def test_iter_entries_references_2_refs_resolved(self):
796
index = self.make_index(2, nodes=[
797
(('name', ), 'data', ([('ref', )], [('ref', )])),
798
(('ref', ), 'refdata', ([], []))])
799
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),), (('ref',),))),
800
(index, ('ref', ), 'refdata', ((), ()))]),
801
set(index.iter_entries([('name',), ('ref',)])))
803
def test_iteration_absent_skipped(self):
804
index = self.make_index(1, nodes=[
805
(('name', ), 'data', ([('ref', )], ))])
806
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),))]),
807
set(index.iter_all_entries()))
808
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),))]),
809
set(index.iter_entries([('name', )])))
810
self.assertEqual([], list(index.iter_entries([('ref', )])))
812
def test_iteration_absent_skipped_2_element_keys(self):
813
index = self.make_index(1, key_elements=2, nodes=[
814
(('name', 'fin'), 'data', ([('ref', 'erence')], ))])
815
self.assertEqual(set([(index, ('name', 'fin'), 'data', ((('ref', 'erence'),),))]),
816
set(index.iter_all_entries()))
817
self.assertEqual(set([(index, ('name', 'fin'), 'data', ((('ref', 'erence'),),))]),
818
set(index.iter_entries([('name', 'fin')])))
819
self.assertEqual([], list(index.iter_entries([('ref', 'erence')])))
821
def test_iter_all_keys(self):
822
index = self.make_index(1, nodes=[
823
(('name', ), 'data', ([('ref', )], )),
824
(('ref', ), 'refdata', ([], ))])
825
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),)),
826
(index, ('ref', ), 'refdata', ((), ))]),
827
set(index.iter_entries([('name', ), ('ref', )])))
829
def test_iter_nothing_empty(self):
830
index = self.make_index()
831
self.assertEqual([], list(index.iter_entries([])))
833
def test_iter_missing_entry_empty(self):
834
index = self.make_index()
835
self.assertEqual([], list(index.iter_entries([('a', )])))
837
def test_iter_missing_entry_empty_no_size(self):
838
idx = self.make_index()
839
idx = index.GraphIndex(idx._transport, 'index', None)
840
self.assertEqual([], list(idx.iter_entries([('a', )])))
842
def test_iter_key_prefix_1_element_key_None(self):
843
index = self.make_index()
844
self.assertRaises(errors.BadIndexKey, list,
845
index.iter_entries_prefix([(None, )]))
847
def test_iter_key_prefix_wrong_length(self):
848
index = self.make_index()
849
self.assertRaises(errors.BadIndexKey, list,
850
index.iter_entries_prefix([('foo', None)]))
851
index = self.make_index(key_elements=2)
852
self.assertRaises(errors.BadIndexKey, list,
853
index.iter_entries_prefix([('foo', )]))
854
self.assertRaises(errors.BadIndexKey, list,
855
index.iter_entries_prefix([('foo', None, None)]))
857
def test_iter_key_prefix_1_key_element_no_refs(self):
858
index = self.make_index( nodes=[
859
(('name', ), 'data', ()),
860
(('ref', ), 'refdata', ())])
861
self.assertEqual(set([(index, ('name', ), 'data'),
862
(index, ('ref', ), 'refdata')]),
863
set(index.iter_entries_prefix([('name', ), ('ref', )])))
865
def test_iter_key_prefix_1_key_element_refs(self):
866
index = self.make_index(1, nodes=[
867
(('name', ), 'data', ([('ref', )], )),
868
(('ref', ), 'refdata', ([], ))])
869
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),)),
870
(index, ('ref', ), 'refdata', ((), ))]),
871
set(index.iter_entries_prefix([('name', ), ('ref', )])))
873
def test_iter_key_prefix_2_key_element_no_refs(self):
874
index = self.make_index(key_elements=2, nodes=[
875
(('name', 'fin1'), 'data', ()),
876
(('name', 'fin2'), 'beta', ()),
877
(('ref', 'erence'), 'refdata', ())])
878
self.assertEqual(set([(index, ('name', 'fin1'), 'data'),
879
(index, ('ref', 'erence'), 'refdata')]),
880
set(index.iter_entries_prefix([('name', 'fin1'), ('ref', 'erence')])))
881
self.assertEqual(set([(index, ('name', 'fin1'), 'data'),
882
(index, ('name', 'fin2'), 'beta')]),
883
set(index.iter_entries_prefix([('name', None)])))
885
def test_iter_key_prefix_2_key_element_refs(self):
886
index = self.make_index(1, key_elements=2, nodes=[
887
(('name', 'fin1'), 'data', ([('ref', 'erence')], )),
888
(('name', 'fin2'), 'beta', ([], )),
889
(('ref', 'erence'), 'refdata', ([], ))])
890
self.assertEqual(set([(index, ('name', 'fin1'), 'data', ((('ref', 'erence'),),)),
891
(index, ('ref', 'erence'), 'refdata', ((), ))]),
892
set(index.iter_entries_prefix([('name', 'fin1'), ('ref', 'erence')])))
893
self.assertEqual(set([(index, ('name', 'fin1'), 'data', ((('ref', 'erence'),),)),
894
(index, ('name', 'fin2'), 'beta', ((), ))]),
895
set(index.iter_entries_prefix([('name', None)])))
897
def test_key_count_empty(self):
898
index = self.make_index()
899
self.assertEqual(0, index.key_count())
901
def test_key_count_one(self):
902
index = self.make_index(nodes=[(('name', ), '', ())])
903
self.assertEqual(1, index.key_count())
905
def test_key_count_two(self):
906
index = self.make_index(nodes=[
907
(('name', ), '', ()), (('foo', ), '', ())])
908
self.assertEqual(2, index.key_count())
910
def test_read_and_parse_tracks_real_read_value(self):
911
index = self.make_index(nodes=self.make_nodes(10))
912
del index._transport._activity[:]
913
index._read_and_parse([(0, 200)])
915
('readv', 'index', [(0, 200)], True, index._size),
917
index._transport._activity)
918
# The readv expansion code will expand the initial request to 4096
919
# bytes, which is more than enough to read the entire index, and we
920
# will track the fact that we read that many bytes.
921
self.assertEqual(index._size, index._bytes_read)
923
def test_read_and_parse_triggers_buffer_all(self):
924
index = self.make_index(key_elements=2, nodes=[
925
(('name', 'fin1'), 'data', ()),
926
(('name', 'fin2'), 'beta', ()),
927
(('ref', 'erence'), 'refdata', ())])
928
self.assertTrue(index._size > 0)
929
self.assertIs(None, index._nodes)
930
index._read_and_parse([(0, index._size)])
931
self.assertIsNot(None, index._nodes)
933
def test_validate_bad_index_errors(self):
934
trans = self.get_transport()
935
trans.put_bytes('name', "not an index\n")
936
idx = index.GraphIndex(trans, 'name', 13)
937
self.assertRaises(errors.BadIndexFormatSignature, idx.validate)
939
def test_validate_bad_node_refs(self):
940
idx = self.make_index(2)
941
trans = self.get_transport()
942
content = trans.get_bytes('index')
943
# change the options line to end with a rather than a parseable number
944
new_content = content[:-2] + 'a\n\n'
945
trans.put_bytes('index', new_content)
946
self.assertRaises(errors.BadIndexOptions, idx.validate)
948
def test_validate_missing_end_line_empty(self):
949
index = self.make_index(2)
950
trans = self.get_transport()
951
content = trans.get_bytes('index')
952
# truncate the last byte
953
trans.put_bytes('index', content[:-1])
954
self.assertRaises(errors.BadIndexData, index.validate)
956
def test_validate_missing_end_line_nonempty(self):
957
index = self.make_index(2, nodes=[(('key', ), '', ([], []))])
958
trans = self.get_transport()
959
content = trans.get_bytes('index')
960
# truncate the last byte
961
trans.put_bytes('index', content[:-1])
962
self.assertRaises(errors.BadIndexData, index.validate)
964
def test_validate_empty(self):
965
index = self.make_index()
968
def test_validate_no_refs_content(self):
969
index = self.make_index(nodes=[(('key', ), 'value', ())])
972
# XXX: external_references tests are duplicated in test_btree_index. We
973
# probably should have per_graph_index tests...
974
def test_external_references_no_refs(self):
975
index = self.make_index(ref_lists=0, nodes=[])
976
self.assertRaises(ValueError, index.external_references, 0)
978
def test_external_references_no_results(self):
979
index = self.make_index(ref_lists=1, nodes=[
980
(('key',), 'value', ([],))])
981
self.assertEqual(set(), index.external_references(0))
983
def test_external_references_missing_ref(self):
984
missing_key = ('missing',)
985
index = self.make_index(ref_lists=1, nodes=[
986
(('key',), 'value', ([missing_key],))])
987
self.assertEqual(set([missing_key]), index.external_references(0))
989
def test_external_references_multiple_ref_lists(self):
990
missing_key = ('missing',)
991
index = self.make_index(ref_lists=2, nodes=[
992
(('key',), 'value', ([], [missing_key]))])
993
self.assertEqual(set([]), index.external_references(0))
994
self.assertEqual(set([missing_key]), index.external_references(1))
996
def test_external_references_two_records(self):
997
index = self.make_index(ref_lists=1, nodes=[
998
(('key-1',), 'value', ([('key-2',)],)),
999
(('key-2',), 'value', ([],)),
1001
self.assertEqual(set([]), index.external_references(0))
1003
def test__find_ancestors(self):
1006
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1007
(key1, 'value', ([key2],)),
1008
(key2, 'value', ([],)),
1011
missing_keys = set()
1012
search_keys = index._find_ancestors([key1], 0, parent_map, missing_keys)
1013
self.assertEqual({key1: (key2,)}, parent_map)
1014
self.assertEqual(set(), missing_keys)
1015
self.assertEqual(set([key2]), search_keys)
1016
search_keys = index._find_ancestors(search_keys, 0, parent_map,
1018
self.assertEqual({key1: (key2,), key2: ()}, parent_map)
1019
self.assertEqual(set(), missing_keys)
1020
self.assertEqual(set(), search_keys)
1022
def test__find_ancestors_w_missing(self):
1026
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1027
(key1, 'value', ([key2],)),
1028
(key2, 'value', ([],)),
1031
missing_keys = set()
1032
search_keys = index._find_ancestors([key2, key3], 0, parent_map,
1034
self.assertEqual({key2: ()}, parent_map)
1035
self.assertEqual(set([key3]), missing_keys)
1036
self.assertEqual(set(), search_keys)
1038
def test__find_ancestors_dont_search_known(self):
1042
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1043
(key1, 'value', ([key2],)),
1044
(key2, 'value', ([key3],)),
1045
(key3, 'value', ([],)),
1047
# We already know about key2, so we won't try to search for key3
1048
parent_map = {key2: (key3,)}
1049
missing_keys = set()
1050
search_keys = index._find_ancestors([key1], 0, parent_map,
1052
self.assertEqual({key1: (key2,), key2: (key3,)}, parent_map)
1053
self.assertEqual(set(), missing_keys)
1054
self.assertEqual(set(), search_keys)
1056
def test_supports_unlimited_cache(self):
1057
builder = index.GraphIndexBuilder(0, key_elements=1)
1058
stream = builder.finish()
1059
trans = self.get_transport()
1060
size = trans.put_file('index', stream)
1061
# It doesn't matter what unlimited_cache does here, just that it can be
1063
idx = index.GraphIndex(trans, 'index', size, unlimited_cache=True)
1066
class TestCombinedGraphIndex(tests.TestCaseWithMemoryTransport):
1068
def make_index(self, name, ref_lists=0, key_elements=1, nodes=[]):
1069
builder = index.GraphIndexBuilder(ref_lists, key_elements=key_elements)
1070
for key, value, references in nodes:
1071
builder.add_node(key, value, references)
1072
stream = builder.finish()
1073
trans = self.get_transport()
1074
size = trans.put_file(name, stream)
1075
return index.GraphIndex(trans, name, size)
1077
def make_combined_index_with_missing(self, missing=['1', '2']):
1078
"""Create a CombinedGraphIndex which will have missing indexes.
1080
This creates a CGI which thinks it has 2 indexes, however they have
1081
been deleted. If CGI._reload_func() is called, then it will repopulate
1084
:param missing: The underlying indexes to delete
1085
:return: (CombinedGraphIndex, reload_counter)
1087
idx1 = self.make_index('1', nodes=[(('1',), '', ())])
1088
idx2 = self.make_index('2', nodes=[(('2',), '', ())])
1089
idx3 = self.make_index('3', nodes=[
1093
# total_reloads, num_changed, num_unchanged
1094
reload_counter = [0, 0, 0]
1096
reload_counter[0] += 1
1097
new_indices = [idx3]
1098
if idx._indices == new_indices:
1099
reload_counter[2] += 1
1101
reload_counter[1] += 1
1102
idx._indices[:] = new_indices
1104
idx = index.CombinedGraphIndex([idx1, idx2], reload_func=reload)
1105
trans = self.get_transport()
1106
for fname in missing:
1108
return idx, reload_counter
1110
def test_open_missing_index_no_error(self):
1111
trans = self.get_transport()
1112
idx1 = index.GraphIndex(trans, 'missing', 100)
1113
idx = index.CombinedGraphIndex([idx1])
1115
def test_add_index(self):
1116
idx = index.CombinedGraphIndex([])
1117
idx1 = self.make_index('name', 0, nodes=[(('key', ), '', ())])
1118
idx.insert_index(0, idx1)
1119
self.assertEqual([(idx1, ('key', ), '')],
1120
list(idx.iter_all_entries()))
1122
def test_clear_cache(self):
1125
class ClearCacheProxy(object):
1127
def __init__(self, index):
1130
def __getattr__(self, name):
1131
return getattr(self._index)
1133
def clear_cache(self):
1134
log.append(self._index)
1135
return self._index.clear_cache()
1137
idx = index.CombinedGraphIndex([])
1138
idx1 = self.make_index('name', 0, nodes=[(('key', ), '', ())])
1139
idx.insert_index(0, ClearCacheProxy(idx1))
1140
idx2 = self.make_index('name', 0, nodes=[(('key', ), '', ())])
1141
idx.insert_index(1, ClearCacheProxy(idx2))
1142
# CombinedGraphIndex should call 'clear_cache()' on all children
1144
self.assertEqual(sorted([idx1, idx2]), sorted(log))
1146
def test_iter_all_entries_empty(self):
1147
idx = index.CombinedGraphIndex([])
1148
self.assertEqual([], list(idx.iter_all_entries()))
1150
def test_iter_all_entries_children_empty(self):
1151
idx1 = self.make_index('name')
1152
idx = index.CombinedGraphIndex([idx1])
1153
self.assertEqual([], list(idx.iter_all_entries()))
1155
def test_iter_all_entries_simple(self):
1156
idx1 = self.make_index('name', nodes=[(('name', ), 'data', ())])
1157
idx = index.CombinedGraphIndex([idx1])
1158
self.assertEqual([(idx1, ('name', ), 'data')],
1159
list(idx.iter_all_entries()))
1161
def test_iter_all_entries_two_indices(self):
1162
idx1 = self.make_index('name1', nodes=[(('name', ), 'data', ())])
1163
idx2 = self.make_index('name2', nodes=[(('2', ), '', ())])
1164
idx = index.CombinedGraphIndex([idx1, idx2])
1165
self.assertEqual([(idx1, ('name', ), 'data'),
1166
(idx2, ('2', ), '')],
1167
list(idx.iter_all_entries()))
1169
def test_iter_entries_two_indices_dup_key(self):
1170
idx1 = self.make_index('name1', nodes=[(('name', ), 'data', ())])
1171
idx2 = self.make_index('name2', nodes=[(('name', ), 'data', ())])
1172
idx = index.CombinedGraphIndex([idx1, idx2])
1173
self.assertEqual([(idx1, ('name', ), 'data')],
1174
list(idx.iter_entries([('name', )])))
1176
def test_iter_all_entries_two_indices_dup_key(self):
1177
idx1 = self.make_index('name1', nodes=[(('name', ), 'data', ())])
1178
idx2 = self.make_index('name2', nodes=[(('name', ), 'data', ())])
1179
idx = index.CombinedGraphIndex([idx1, idx2])
1180
self.assertEqual([(idx1, ('name', ), 'data')],
1181
list(idx.iter_all_entries()))
1183
def test_iter_key_prefix_2_key_element_refs(self):
1184
idx1 = self.make_index('1', 1, key_elements=2, nodes=[
1185
(('name', 'fin1'), 'data', ([('ref', 'erence')], ))])
1186
idx2 = self.make_index('2', 1, key_elements=2, nodes=[
1187
(('name', 'fin2'), 'beta', ([], )),
1188
(('ref', 'erence'), 'refdata', ([], ))])
1189
idx = index.CombinedGraphIndex([idx1, idx2])
1190
self.assertEqual(set([(idx1, ('name', 'fin1'), 'data',
1191
((('ref', 'erence'),),)),
1192
(idx2, ('ref', 'erence'), 'refdata', ((), ))]),
1193
set(idx.iter_entries_prefix([('name', 'fin1'),
1194
('ref', 'erence')])))
1195
self.assertEqual(set([(idx1, ('name', 'fin1'), 'data',
1196
((('ref', 'erence'),),)),
1197
(idx2, ('name', 'fin2'), 'beta', ((), ))]),
1198
set(idx.iter_entries_prefix([('name', None)])))
1200
def test_iter_nothing_empty(self):
1201
idx = index.CombinedGraphIndex([])
1202
self.assertEqual([], list(idx.iter_entries([])))
1204
def test_iter_nothing_children_empty(self):
1205
idx1 = self.make_index('name')
1206
idx = index.CombinedGraphIndex([idx1])
1207
self.assertEqual([], list(idx.iter_entries([])))
1209
def test_iter_all_keys(self):
1210
idx1 = self.make_index('1', 1, nodes=[(('name', ), 'data',
1212
idx2 = self.make_index('2', 1, nodes=[(('ref', ), 'refdata', ((), ))])
1213
idx = index.CombinedGraphIndex([idx1, idx2])
1214
self.assertEqual(set([(idx1, ('name', ), 'data', ((('ref', ), ), )),
1215
(idx2, ('ref', ), 'refdata', ((), ))]),
1216
set(idx.iter_entries([('name', ), ('ref', )])))
1218
def test_iter_all_keys_dup_entry(self):
1219
idx1 = self.make_index('1', 1, nodes=[(('name', ), 'data',
1221
(('ref', ), 'refdata', ([], ))])
1222
idx2 = self.make_index('2', 1, nodes=[(('ref', ), 'refdata', ([], ))])
1223
idx = index.CombinedGraphIndex([idx1, idx2])
1224
self.assertEqual(set([(idx1, ('name', ), 'data', ((('ref',),),)),
1225
(idx1, ('ref', ), 'refdata', ((), ))]),
1226
set(idx.iter_entries([('name', ), ('ref', )])))
1228
def test_iter_missing_entry_empty(self):
1229
idx = index.CombinedGraphIndex([])
1230
self.assertEqual([], list(idx.iter_entries([('a', )])))
1232
def test_iter_missing_entry_one_index(self):
1233
idx1 = self.make_index('1')
1234
idx = index.CombinedGraphIndex([idx1])
1235
self.assertEqual([], list(idx.iter_entries([('a', )])))
1237
def test_iter_missing_entry_two_index(self):
1238
idx1 = self.make_index('1')
1239
idx2 = self.make_index('2')
1240
idx = index.CombinedGraphIndex([idx1, idx2])
1241
self.assertEqual([], list(idx.iter_entries([('a', )])))
1243
def test_iter_entry_present_one_index_only(self):
1244
idx1 = self.make_index('1', nodes=[(('key', ), '', ())])
1245
idx2 = self.make_index('2', nodes=[])
1246
idx = index.CombinedGraphIndex([idx1, idx2])
1247
self.assertEqual([(idx1, ('key', ), '')],
1248
list(idx.iter_entries([('key', )])))
1249
# and in the other direction
1250
idx = index.CombinedGraphIndex([idx2, idx1])
1251
self.assertEqual([(idx1, ('key', ), '')],
1252
list(idx.iter_entries([('key', )])))
1254
def test_key_count_empty(self):
1255
idx1 = self.make_index('1', nodes=[])
1256
idx2 = self.make_index('2', nodes=[])
1257
idx = index.CombinedGraphIndex([idx1, idx2])
1258
self.assertEqual(0, idx.key_count())
1260
def test_key_count_sums_index_keys(self):
1261
idx1 = self.make_index('1', nodes=[
1264
idx2 = self.make_index('2', nodes=[(('1',), '', ())])
1265
idx = index.CombinedGraphIndex([idx1, idx2])
1266
self.assertEqual(3, idx.key_count())
1268
def test_validate_bad_child_index_errors(self):
1269
trans = self.get_transport()
1270
trans.put_bytes('name', "not an index\n")
1271
idx1 = index.GraphIndex(trans, 'name', 13)
1272
idx = index.CombinedGraphIndex([idx1])
1273
self.assertRaises(errors.BadIndexFormatSignature, idx.validate)
1275
def test_validate_empty(self):
1276
idx = index.CombinedGraphIndex([])
1279
def test_key_count_reloads(self):
1280
idx, reload_counter = self.make_combined_index_with_missing()
1281
self.assertEqual(2, idx.key_count())
1282
self.assertEqual([1, 1, 0], reload_counter)
1284
def test_key_count_no_reload(self):
1285
idx, reload_counter = self.make_combined_index_with_missing()
1286
idx._reload_func = None
1287
# Without a _reload_func we just raise the exception
1288
self.assertRaises(errors.NoSuchFile, idx.key_count)
1290
def test_key_count_reloads_and_fails(self):
1291
# We have deleted all underlying indexes, so we will try to reload, but
1292
# still fail. This is mostly to test we don't get stuck in an infinite
1293
# loop trying to reload
1294
idx, reload_counter = self.make_combined_index_with_missing(
1296
self.assertRaises(errors.NoSuchFile, idx.key_count)
1297
self.assertEqual([2, 1, 1], reload_counter)
1299
def test_iter_entries_reloads(self):
1300
index, reload_counter = self.make_combined_index_with_missing()
1301
result = list(index.iter_entries([('1',), ('2',), ('3',)]))
1302
index3 = index._indices[0]
1303
self.assertEqual([(index3, ('1',), ''), (index3, ('2',), '')],
1305
self.assertEqual([1, 1, 0], reload_counter)
1307
def test_iter_entries_reloads_midway(self):
1308
# The first index still looks present, so we get interrupted mid-way
1310
index, reload_counter = self.make_combined_index_with_missing(['2'])
1311
index1, index2 = index._indices
1312
result = list(index.iter_entries([('1',), ('2',), ('3',)]))
1313
index3 = index._indices[0]
1314
# We had already yielded '1', so we just go on to the next, we should
1315
# not yield '1' twice.
1316
self.assertEqual([(index1, ('1',), ''), (index3, ('2',), '')],
1318
self.assertEqual([1, 1, 0], reload_counter)
1320
def test_iter_entries_no_reload(self):
1321
index, reload_counter = self.make_combined_index_with_missing()
1322
index._reload_func = None
1323
# Without a _reload_func we just raise the exception
1324
self.assertListRaises(errors.NoSuchFile, index.iter_entries, [('3',)])
1326
def test_iter_entries_reloads_and_fails(self):
1327
index, reload_counter = self.make_combined_index_with_missing(
1329
self.assertListRaises(errors.NoSuchFile, index.iter_entries, [('3',)])
1330
self.assertEqual([2, 1, 1], reload_counter)
1332
def test_iter_all_entries_reloads(self):
1333
index, reload_counter = self.make_combined_index_with_missing()
1334
result = list(index.iter_all_entries())
1335
index3 = index._indices[0]
1336
self.assertEqual([(index3, ('1',), ''), (index3, ('2',), '')],
1338
self.assertEqual([1, 1, 0], reload_counter)
1340
def test_iter_all_entries_reloads_midway(self):
1341
index, reload_counter = self.make_combined_index_with_missing(['2'])
1342
index1, index2 = index._indices
1343
result = list(index.iter_all_entries())
1344
index3 = index._indices[0]
1345
# We had already yielded '1', so we just go on to the next, we should
1346
# not yield '1' twice.
1347
self.assertEqual([(index1, ('1',), ''), (index3, ('2',), '')],
1349
self.assertEqual([1, 1, 0], reload_counter)
1351
def test_iter_all_entries_no_reload(self):
1352
index, reload_counter = self.make_combined_index_with_missing()
1353
index._reload_func = None
1354
self.assertListRaises(errors.NoSuchFile, index.iter_all_entries)
1356
def test_iter_all_entries_reloads_and_fails(self):
1357
index, reload_counter = self.make_combined_index_with_missing(
1359
self.assertListRaises(errors.NoSuchFile, index.iter_all_entries)
1361
def test_iter_entries_prefix_reloads(self):
1362
index, reload_counter = self.make_combined_index_with_missing()
1363
result = list(index.iter_entries_prefix([('1',)]))
1364
index3 = index._indices[0]
1365
self.assertEqual([(index3, ('1',), '')], result)
1366
self.assertEqual([1, 1, 0], reload_counter)
1368
def test_iter_entries_prefix_reloads_midway(self):
1369
index, reload_counter = self.make_combined_index_with_missing(['2'])
1370
index1, index2 = index._indices
1371
result = list(index.iter_entries_prefix([('1',)]))
1372
index3 = index._indices[0]
1373
# We had already yielded '1', so we just go on to the next, we should
1374
# not yield '1' twice.
1375
self.assertEqual([(index1, ('1',), '')], result)
1376
self.assertEqual([1, 1, 0], reload_counter)
1378
def test_iter_entries_prefix_no_reload(self):
1379
index, reload_counter = self.make_combined_index_with_missing()
1380
index._reload_func = None
1381
self.assertListRaises(errors.NoSuchFile, index.iter_entries_prefix,
1384
def test_iter_entries_prefix_reloads_and_fails(self):
1385
index, reload_counter = self.make_combined_index_with_missing(
1387
self.assertListRaises(errors.NoSuchFile, index.iter_entries_prefix,
1391
def make_index_with_simple_nodes(self, name, num_nodes=1):
1392
"""Make an index named after 'name', with keys named after 'name' too.
1394
Nodes will have a value of '' and no references.
1397
(('index-%s-key-%s' % (name, n),), '', ())
1398
for n in range(1, num_nodes+1)]
1399
return self.make_index('index-%s' % name, 0, nodes=nodes)
1401
def test_reorder_after_iter_entries(self):
1402
# Four indices: [key1] in idx1, [key2,key3] in idx2, [] in idx3,
1404
idx = index.CombinedGraphIndex([])
1405
idx.insert_index(0, self.make_index_with_simple_nodes('1'), '1')
1406
idx.insert_index(1, self.make_index_with_simple_nodes('2'), '2')
1407
idx.insert_index(2, self.make_index_with_simple_nodes('3'), '3')
1408
idx.insert_index(3, self.make_index_with_simple_nodes('4'), '4')
1409
idx1, idx2, idx3, idx4 = idx._indices
1410
# Query a key from idx4 and idx2.
1411
self.assertLength(2, list(idx.iter_entries(
1412
[('index-4-key-1',), ('index-2-key-1',)])))
1413
# Now idx2 and idx4 should be moved to the front (and idx1 should
1414
# still be before idx3).
1415
self.assertEqual([idx2, idx4, idx1, idx3], idx._indices)
1416
self.assertEqual(['2', '4', '1', '3'], idx._index_names)
1418
def test_reorder_propagates_to_siblings(self):
1419
# Two CombinedGraphIndex objects, with the same number of indicies with
1421
cgi1 = index.CombinedGraphIndex([])
1422
cgi2 = index.CombinedGraphIndex([])
1423
cgi1.insert_index(0, self.make_index_with_simple_nodes('1-1'), 'one')
1424
cgi1.insert_index(1, self.make_index_with_simple_nodes('1-2'), 'two')
1425
cgi2.insert_index(0, self.make_index_with_simple_nodes('2-1'), 'one')
1426
cgi2.insert_index(1, self.make_index_with_simple_nodes('2-2'), 'two')
1427
index2_1, index2_2 = cgi2._indices
1428
cgi1.set_sibling_indices([cgi2])
1429
# Trigger a reordering in cgi1. cgi2 will be reordered as well.
1430
list(cgi1.iter_entries([('index-1-2-key-1',)]))
1431
self.assertEqual([index2_2, index2_1], cgi2._indices)
1432
self.assertEqual(['two', 'one'], cgi2._index_names)
1434
def test_validate_reloads(self):
1435
idx, reload_counter = self.make_combined_index_with_missing()
1437
self.assertEqual([1, 1, 0], reload_counter)
1439
def test_validate_reloads_midway(self):
1440
idx, reload_counter = self.make_combined_index_with_missing(['2'])
1443
def test_validate_no_reload(self):
1444
idx, reload_counter = self.make_combined_index_with_missing()
1445
idx._reload_func = None
1446
self.assertRaises(errors.NoSuchFile, idx.validate)
1448
def test_validate_reloads_and_fails(self):
1449
idx, reload_counter = self.make_combined_index_with_missing(
1451
self.assertRaises(errors.NoSuchFile, idx.validate)
1453
def test_find_ancestors_across_indexes(self):
1458
index1 = self.make_index('12', ref_lists=1, nodes=[
1459
(key1, 'value', ([],)),
1460
(key2, 'value', ([key1],)),
1462
index2 = self.make_index('34', ref_lists=1, nodes=[
1463
(key3, 'value', ([key2],)),
1464
(key4, 'value', ([key3],)),
1466
c_index = index.CombinedGraphIndex([index1, index2])
1467
parent_map, missing_keys = c_index.find_ancestry([key1], 0)
1468
self.assertEqual({key1: ()}, parent_map)
1469
self.assertEqual(set(), missing_keys)
1470
# Now look for a key from index2 which requires us to find the key in
1471
# the second index, and then continue searching for parents in the
1473
parent_map, missing_keys = c_index.find_ancestry([key3], 0)
1474
self.assertEqual({key1: (), key2: (key1,), key3: (key2,)}, parent_map)
1475
self.assertEqual(set(), missing_keys)
1477
def test_find_ancestors_missing_keys(self):
1482
index1 = self.make_index('12', ref_lists=1, nodes=[
1483
(key1, 'value', ([],)),
1484
(key2, 'value', ([key1],)),
1486
index2 = self.make_index('34', ref_lists=1, nodes=[
1487
(key3, 'value', ([key2],)),
1489
c_index = index.CombinedGraphIndex([index1, index2])
1490
# Searching for a key which is actually not present at all should
1491
# eventually converge
1492
parent_map, missing_keys = c_index.find_ancestry([key4], 0)
1493
self.assertEqual({}, parent_map)
1494
self.assertEqual(set([key4]), missing_keys)
1496
def test_find_ancestors_no_indexes(self):
1497
c_index = index.CombinedGraphIndex([])
1499
parent_map, missing_keys = c_index.find_ancestry([key1], 0)
1500
self.assertEqual({}, parent_map)
1501
self.assertEqual(set([key1]), missing_keys)
1503
def test_find_ancestors_ghost_parent(self):
1508
index1 = self.make_index('12', ref_lists=1, nodes=[
1509
(key1, 'value', ([],)),
1510
(key2, 'value', ([key1],)),
1512
index2 = self.make_index('34', ref_lists=1, nodes=[
1513
(key4, 'value', ([key2, key3],)),
1515
c_index = index.CombinedGraphIndex([index1, index2])
1516
# Searching for a key which is actually not present at all should
1517
# eventually converge
1518
parent_map, missing_keys = c_index.find_ancestry([key4], 0)
1519
self.assertEqual({key4: (key2, key3), key2: (key1,), key1: ()},
1521
self.assertEqual(set([key3]), missing_keys)
1523
def test__find_ancestors_empty_index(self):
1524
idx = self.make_index('test', ref_lists=1, key_elements=1, nodes=[])
1526
missing_keys = set()
1527
search_keys = idx._find_ancestors([('one',), ('two',)], 0, parent_map,
1529
self.assertEqual(set(), search_keys)
1530
self.assertEqual({}, parent_map)
1531
self.assertEqual(set([('one',), ('two',)]), missing_keys)
1534
class TestInMemoryGraphIndex(tests.TestCaseWithMemoryTransport):
1536
def make_index(self, ref_lists=0, key_elements=1, nodes=[]):
1537
result = index.InMemoryGraphIndex(ref_lists, key_elements=key_elements)
1538
result.add_nodes(nodes)
1541
def test_add_nodes_no_refs(self):
1542
index = self.make_index(0)
1543
index.add_nodes([(('name', ), 'data')])
1544
index.add_nodes([(('name2', ), ''), (('name3', ), '')])
1545
self.assertEqual(set([
1546
(index, ('name', ), 'data'),
1547
(index, ('name2', ), ''),
1548
(index, ('name3', ), ''),
1549
]), set(index.iter_all_entries()))
1551
def test_add_nodes(self):
1552
index = self.make_index(1)
1553
index.add_nodes([(('name', ), 'data', ([],))])
1554
index.add_nodes([(('name2', ), '', ([],)), (('name3', ), '', ([('r', )],))])
1555
self.assertEqual(set([
1556
(index, ('name', ), 'data', ((),)),
1557
(index, ('name2', ), '', ((),)),
1558
(index, ('name3', ), '', ((('r', ), ), )),
1559
]), set(index.iter_all_entries()))
1561
def test_iter_all_entries_empty(self):
1562
index = self.make_index()
1563
self.assertEqual([], list(index.iter_all_entries()))
1565
def test_iter_all_entries_simple(self):
1566
index = self.make_index(nodes=[(('name', ), 'data')])
1567
self.assertEqual([(index, ('name', ), 'data')],
1568
list(index.iter_all_entries()))
1570
def test_iter_all_entries_references(self):
1571
index = self.make_index(1, nodes=[
1572
(('name', ), 'data', ([('ref', )], )),
1573
(('ref', ), 'refdata', ([], ))])
1574
self.assertEqual(set([(index, ('name', ), 'data', ((('ref', ),),)),
1575
(index, ('ref', ), 'refdata', ((), ))]),
1576
set(index.iter_all_entries()))
1578
def test_iteration_absent_skipped(self):
1579
index = self.make_index(1, nodes=[
1580
(('name', ), 'data', ([('ref', )], ))])
1581
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),))]),
1582
set(index.iter_all_entries()))
1583
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),))]),
1584
set(index.iter_entries([('name', )])))
1585
self.assertEqual([], list(index.iter_entries([('ref', )])))
1587
def test_iter_all_keys(self):
1588
index = self.make_index(1, nodes=[
1589
(('name', ), 'data', ([('ref', )], )),
1590
(('ref', ), 'refdata', ([], ))])
1591
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),)),
1592
(index, ('ref', ), 'refdata', ((), ))]),
1593
set(index.iter_entries([('name', ), ('ref', )])))
1595
def test_iter_key_prefix_1_key_element_no_refs(self):
1596
index = self.make_index( nodes=[
1597
(('name', ), 'data'),
1598
(('ref', ), 'refdata')])
1599
self.assertEqual(set([(index, ('name', ), 'data'),
1600
(index, ('ref', ), 'refdata')]),
1601
set(index.iter_entries_prefix([('name', ), ('ref', )])))
1603
def test_iter_key_prefix_1_key_element_refs(self):
1604
index = self.make_index(1, nodes=[
1605
(('name', ), 'data', ([('ref', )], )),
1606
(('ref', ), 'refdata', ([], ))])
1607
self.assertEqual(set([(index, ('name', ), 'data', ((('ref',),),)),
1608
(index, ('ref', ), 'refdata', ((), ))]),
1609
set(index.iter_entries_prefix([('name', ), ('ref', )])))
1611
def test_iter_key_prefix_2_key_element_no_refs(self):
1612
index = self.make_index(key_elements=2, nodes=[
1613
(('name', 'fin1'), 'data'),
1614
(('name', 'fin2'), 'beta'),
1615
(('ref', 'erence'), 'refdata')])
1616
self.assertEqual(set([(index, ('name', 'fin1'), 'data'),
1617
(index, ('ref', 'erence'), 'refdata')]),
1618
set(index.iter_entries_prefix([('name', 'fin1'), ('ref', 'erence')])))
1619
self.assertEqual(set([(index, ('name', 'fin1'), 'data'),
1620
(index, ('name', 'fin2'), 'beta')]),
1621
set(index.iter_entries_prefix([('name', None)])))
1623
def test_iter_key_prefix_2_key_element_refs(self):
1624
index = self.make_index(1, key_elements=2, nodes=[
1625
(('name', 'fin1'), 'data', ([('ref', 'erence')], )),
1626
(('name', 'fin2'), 'beta', ([], )),
1627
(('ref', 'erence'), 'refdata', ([], ))])
1628
self.assertEqual(set([(index, ('name', 'fin1'), 'data', ((('ref', 'erence'),),)),
1629
(index, ('ref', 'erence'), 'refdata', ((), ))]),
1630
set(index.iter_entries_prefix([('name', 'fin1'), ('ref', 'erence')])))
1631
self.assertEqual(set([(index, ('name', 'fin1'), 'data', ((('ref', 'erence'),),)),
1632
(index, ('name', 'fin2'), 'beta', ((), ))]),
1633
set(index.iter_entries_prefix([('name', None)])))
1635
def test_iter_nothing_empty(self):
1636
index = self.make_index()
1637
self.assertEqual([], list(index.iter_entries([])))
1639
def test_iter_missing_entry_empty(self):
1640
index = self.make_index()
1641
self.assertEqual([], list(index.iter_entries(['a'])))
1643
def test_key_count_empty(self):
1644
index = self.make_index()
1645
self.assertEqual(0, index.key_count())
1647
def test_key_count_one(self):
1648
index = self.make_index(nodes=[(('name', ), '')])
1649
self.assertEqual(1, index.key_count())
1651
def test_key_count_two(self):
1652
index = self.make_index(nodes=[(('name', ), ''), (('foo', ), '')])
1653
self.assertEqual(2, index.key_count())
1655
def test_validate_empty(self):
1656
index = self.make_index()
1659
def test_validate_no_refs_content(self):
1660
index = self.make_index(nodes=[(('key', ), 'value')])
1664
class TestGraphIndexPrefixAdapter(tests.TestCaseWithMemoryTransport):
1666
def make_index(self, ref_lists=1, key_elements=2, nodes=[],
1667
add_callback=False):
1668
result = index.InMemoryGraphIndex(ref_lists, key_elements=key_elements)
1669
result.add_nodes(nodes)
1671
add_nodes_callback = result.add_nodes
1673
add_nodes_callback = None
1674
adapter = index.GraphIndexPrefixAdapter(
1675
result, ('prefix', ), key_elements - 1,
1676
add_nodes_callback=add_nodes_callback)
1677
return result, adapter
1679
def test_add_node(self):
1680
index, adapter = self.make_index(add_callback=True)
1681
adapter.add_node(('key',), 'value', ((('ref',),),))
1682
self.assertEqual(set([(index, ('prefix', 'key'), 'value',
1683
((('prefix', 'ref'),),))]),
1684
set(index.iter_all_entries()))
1686
def test_add_nodes(self):
1687
index, adapter = self.make_index(add_callback=True)
1689
(('key',), 'value', ((('ref',),),)),
1690
(('key2',), 'value2', ((),)),
1692
self.assertEqual(set([
1693
(index, ('prefix', 'key2'), 'value2', ((),)),
1694
(index, ('prefix', 'key'), 'value', ((('prefix', 'ref'),),))
1696
set(index.iter_all_entries()))
1698
def test_construct(self):
1699
idx = index.InMemoryGraphIndex()
1700
adapter = index.GraphIndexPrefixAdapter(idx, ('prefix', ), 1)
1702
def test_construct_with_callback(self):
1703
idx = index.InMemoryGraphIndex()
1704
adapter = index.GraphIndexPrefixAdapter(idx, ('prefix', ), 1,
1707
def test_iter_all_entries_cross_prefix_map_errors(self):
1708
index, adapter = self.make_index(nodes=[
1709
(('prefix', 'key1'), 'data1', ((('prefixaltered', 'key2'),),))])
1710
self.assertRaises(errors.BadIndexData, list, adapter.iter_all_entries())
1712
def test_iter_all_entries(self):
1713
index, adapter = self.make_index(nodes=[
1714
(('notprefix', 'key1'), 'data', ((), )),
1715
(('prefix', 'key1'), 'data1', ((), )),
1716
(('prefix', 'key2'), 'data2', ((('prefix', 'key1'),),))])
1717
self.assertEqual(set([(index, ('key1', ), 'data1', ((),)),
1718
(index, ('key2', ), 'data2', ((('key1',),),))]),
1719
set(adapter.iter_all_entries()))
1721
def test_iter_entries(self):
1722
index, adapter = self.make_index(nodes=[
1723
(('notprefix', 'key1'), 'data', ((), )),
1724
(('prefix', 'key1'), 'data1', ((), )),
1725
(('prefix', 'key2'), 'data2', ((('prefix', 'key1'),),))])
1726
# ask for many - get all
1727
self.assertEqual(set([(index, ('key1', ), 'data1', ((),)),
1728
(index, ('key2', ), 'data2', ((('key1', ),),))]),
1729
set(adapter.iter_entries([('key1', ), ('key2', )])))
1730
# ask for one, get one
1731
self.assertEqual(set([(index, ('key1', ), 'data1', ((),))]),
1732
set(adapter.iter_entries([('key1', )])))
1733
# ask for missing, get none
1734
self.assertEqual(set(),
1735
set(adapter.iter_entries([('key3', )])))
1737
def test_iter_entries_prefix(self):
1738
index, adapter = self.make_index(key_elements=3, nodes=[
1739
(('notprefix', 'foo', 'key1'), 'data', ((), )),
1740
(('prefix', 'prefix2', 'key1'), 'data1', ((), )),
1741
(('prefix', 'prefix2', 'key2'), 'data2', ((('prefix', 'prefix2', 'key1'),),))])
1742
# ask for a prefix, get the results for just that prefix, adjusted.
1743
self.assertEqual(set([(index, ('prefix2', 'key1', ), 'data1', ((),)),
1744
(index, ('prefix2', 'key2', ), 'data2', ((('prefix2', 'key1', ),),))]),
1745
set(adapter.iter_entries_prefix([('prefix2', None)])))
1747
def test_key_count_no_matching_keys(self):
1748
index, adapter = self.make_index(nodes=[
1749
(('notprefix', 'key1'), 'data', ((), ))])
1750
self.assertEqual(0, adapter.key_count())
1752
def test_key_count_some_keys(self):
1753
index, adapter = self.make_index(nodes=[
1754
(('notprefix', 'key1'), 'data', ((), )),
1755
(('prefix', 'key1'), 'data1', ((), )),
1756
(('prefix', 'key2'), 'data2', ((('prefix', 'key1'),),))])
1757
self.assertEqual(2, adapter.key_count())
1759
def test_validate(self):
1760
index, adapter = self.make_index()
1763
calls.append('called')
1764
index.validate = validate
1766
self.assertEqual(['called'], calls)