1
# Copyright (C) 2005 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: This is pretty slow on high-latency connections because it
19
## doesn't keep the HTTP connection alive. If you have a smart local
20
## proxy it may be much better. Eventually I want to switch to
21
## urlgrabber which should use HTTP much more efficiently.
24
import urllib2, gzip, zlib
26
from errors import BzrError
27
from revision import Revision
28
from cStringIO import StringIO
30
# h = HTTPConnection('localhost:8000')
31
# h = HTTPConnection('bazaar-ng.org')
34
prefix = 'http://bazaar-ng.org/bzr/main'
39
return urllib2.urlopen(url)
40
except urllib2.URLError, e:
41
raise BzrError("remote fetch failed: %r: %s" % (url, e))
44
history = get_url('/.bzr/revision-history').read().split('\n')
45
for i, rev_id in enumerate(history):
46
print 'read revision %d' % i
47
comp_f = get_url('/.bzr/revision-store/%s.gz' % rev_id)
48
comp_data = comp_f.read()
50
# python gzip needs a seekable file (!!) but the HTTP response
51
# isn't, so we need to buffer it
53
uncomp_f = gzip.GzipFile(fileobj=StringIO(comp_data))
55
rev = Revision.read_xml(uncomp_f)