~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Martin Pool
  • Date: 2010-01-29 14:09:05 UTC
  • mto: This revision was merged to the branch mainline in revision 4992.
  • Revision ID: mbp@sourcefrog.net-20100129140905-2uiarb6p8di1ywsr
Correction to url

from review: https://code.edge.launchpad.net/~mbp/bzr/doc/+merge/18250

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005, 2007, 2010 Canonical Ltd
 
1
# Copyright (C) 2005, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
31
31
from bzrlib.bzrdir import BzrDir
32
32
from bzrlib.repofmt import knitrepo
33
33
from bzrlib.tests import TestCaseWithTransport
 
34
from bzrlib.tests.http_utils import TestCaseWithWebserver
34
35
from bzrlib.tests.test_revision import make_branches
35
36
from bzrlib.trace import mutter
36
37
from bzrlib.upgrade import Convert
247
248
                    rev_id).get_file_text('this-file-id'), text)
248
249
 
249
250
 
 
251
class TestHttpFetch(TestCaseWithWebserver):
 
252
    # FIXME RBC 20060124 this really isn't web specific, perhaps an
 
253
    # instrumented readonly transport? Can we do an instrumented
 
254
    # adapter and use self.get_readonly_url ?
 
255
 
 
256
    def test_fetch(self):
 
257
        #highest indices a: 5, b: 7
 
258
        br_a, br_b = make_branches(self)
 
259
        br_rem_a = Branch.open(self.get_readonly_url('branch1'))
 
260
        fetch_steps(self, br_rem_a, br_b, br_a)
 
261
 
 
262
    def _count_log_matches(self, target, logs):
 
263
        """Count the number of times the target file pattern was fetched in an http log"""
 
264
        get_succeeds_re = re.compile(
 
265
            '.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
 
266
            (     target,                    bzrlib.__version__))
 
267
        c = 0
 
268
        for line in logs:
 
269
            if get_succeeds_re.match(line):
 
270
                c += 1
 
271
        return c
 
272
 
 
273
    def test_weaves_are_retrieved_once(self):
 
274
        self.build_tree(("source/", "source/file", "target/"))
 
275
        # This test depends on knit dasta storage.
 
276
        wt = self.make_branch_and_tree('source', format='dirstate-tags')
 
277
        branch = wt.branch
 
278
        wt.add(["file"], ["id"])
 
279
        wt.commit("added file")
 
280
        open("source/file", 'w').write("blah\n")
 
281
        wt.commit("changed file")
 
282
        target = BzrDir.create_branch_and_repo("target/")
 
283
        source = Branch.open(self.get_readonly_url("source/"))
 
284
        target.fetch(source)
 
285
        # this is the path to the literal file. As format changes
 
286
        # occur it needs to be updated. FIXME: ask the store for the
 
287
        # path.
 
288
        self.log("web server logs are:")
 
289
        http_logs = self.get_readonly_server().logs
 
290
        self.log('\n'.join(http_logs))
 
291
        # unfortunately this log entry is branch format specific. We could
 
292
        # factor out the 'what files does this format use' to a method on the
 
293
        # repository, which would let us to this generically. RBC 20060419
 
294
        # RBC 20080408: Or perhaps we can assert that no files are fully read
 
295
        # twice?
 
296
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
 
297
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
 
298
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
 
299
        # this r-h check test will prevent regressions, but it currently already
 
300
        # passes, before the patch to cache-rh is applied :[
 
301
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
302
                                                     http_logs))
 
303
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
304
                                                     http_logs))
 
305
        # FIXME naughty poking in there.
 
306
        self.get_readonly_server().logs = []
 
307
        # check there is nothing more to fetch.  We take care to re-use the
 
308
        # existing transport so that the request logs we're about to examine
 
309
        # aren't cluttered with redundant probes for a smart server.
 
310
        # XXX: Perhaps this further parameterisation: test http with smart
 
311
        # server, and test http without smart server?
 
312
        source = Branch.open(
 
313
            self.get_readonly_url("source/"),
 
314
            possible_transports=[source.bzrdir.root_transport])
 
315
        target.fetch(source)
 
316
        # should make just two requests
 
317
        http_logs = self.get_readonly_server().logs
 
318
        self.log("web server logs are:")
 
319
        self.log('\n'.join(http_logs))
 
320
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
 
321
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
 
322
        self.assertEqual(1, self._count_log_matches('repository/format',
 
323
            http_logs))
 
324
        self.assertEqual(1, self._count_log_matches('revisions.kndx',
 
325
            http_logs))
 
326
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
327
                                                     http_logs))
 
328
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
329
                                                     http_logs))
 
330
        self.assertLength(5, http_logs)
 
331
 
 
332
 
250
333
class TestKnitToPackFetch(TestCaseWithTransport):
251
334
 
252
335
    def find_get_record_stream(self, calls, expected_count=1):