15
17
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: This is pretty slow on high-latency connections because it
19
## doesn't keep the HTTP connection alive. If you have a smart local
20
## proxy it may be much better. Eventually I want to switch to
21
## urlgrabber which should use HTTP much more efficiently.
24
import urllib2, gzip, zlib
20
"""Proxy object for access to remote branches.
22
At the moment remote branches are only for HTTP and only for read
25
29
from sets import Set
26
30
from cStringIO import StringIO
28
from errors import BzrError
29
from revision import Revision
30
from inventory import Inventory
32
# h = HTTPConnection('localhost:8000')
33
# h = HTTPConnection('bazaar-ng.org')
32
from errors import BzrError, BzrCheckError
33
from branch import Branch
35
35
# velocitynet.com.au transparently proxies connections and thereby
36
36
# breaks keep-alive -- sucks!
39
import urlgrabber.keepalive
40
urlgrabber.keepalive.DEBUG = 2
44
prefix = 'http://localhost:8000'
45
# prefix = 'http://bazaar-ng.org/bzr/main/'
47
def get_url(path, compressed=False):
52
url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
56
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
57
except urllib2.URLError, e:
58
raise BzrError("remote fetch failed: %r: %s" % (url, e))
65
history = get_url('/.bzr/revision-history').readlines()
66
num_revs = len(history)
67
for i, rev_id in enumerate(history):
68
rev_id = rev_id.rstrip()
69
print 'read revision %d/%d' % (i, num_revs)
71
# python gzip needs a seekable file (!!) but the HTTP response
72
# isn't, so we need to buffer it
40
ENABLE_URLGRABBER = False
42
def get_url(url, compressed=False):
46
url_f = urllib2.urlopen(url)
48
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
54
import urlgrabber.keepalive
55
urlgrabber.keepalive.DEBUG = 0
56
def get_url(path, compressed=False):
61
url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
65
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
66
except urllib2.URLError, e:
67
raise BzrError("remote fetch failed: %r: %s" % (url, e))
70
class RemoteBranch(Branch):
71
def __init__(self, baseurl):
72
"""Create new proxy for a remote branch."""
73
self.baseurl = baseurl
76
def controlfile(self, filename, mode):
77
if mode not in ('rb', 'rt', 'r'):
78
raise BzrError("file mode %r not supported for remote branches" % mode)
79
return get_url(self.baseurl + '/.bzr/' + filename, False)
81
def _need_readlock(self):
82
# remote branch always safe for read
85
def _need_writelock(self):
86
raise BzrError("cannot get write lock on HTTP remote branch")
88
def get_revision(self, revision_id):
89
from revision import Revision
90
revf = get_url(self.baseurl + '/.bzr/revision-store/' + revision_id,
92
r = Revision.read_xml(revf)
93
if r.revision_id != revision_id:
94
raise BzrCheckError('revision stored as {%s} actually contains {%s}'
95
% (revision_id, r.revision_id))
74
rev_f = get_url('/.bzr/revision-store/%s' % rev_id,
77
rev = Revision.read_xml(rev_f)
79
inv_id = rev.inventory_id
80
if inv_id not in got_invs:
81
print 'get inventory %s' % inv_id
82
inv_f = get_url('/.bzr/inventory-store/%s' % inv_id,
100
from revision import Revision
101
from branch import Branch
102
from inventory import Inventory
108
history = get_url('/.bzr/revision-history').readlines()
109
num_revs = len(history)
110
for i, rev_id in enumerate(history):
111
rev_id = rev_id.rstrip()
112
print 'read revision %d/%d' % (i, num_revs)
114
# python gzip needs a seekable file (!!) but the HTTP response
115
# isn't, so we need to buffer it
117
rev_f = get_url('/.bzr/revision-store/%s' % rev_id,
84
inv = Inventory.read_xml(inv_f)
85
print '%4d inventory entries' % len(inv)
87
for path, ie in inv.iter_entries():
91
if text_id in got_texts:
93
print ' fetch %s text {%s}' % (path, text_id)
94
text_f = get_url('/.bzr/text-store/%s' % text_id,
96
got_texts.add(text_id)
120
rev = Revision.read_xml(rev_f)
122
inv_id = rev.inventory_id
123
if inv_id not in got_invs:
124
print 'get inventory %s' % inv_id
125
inv_f = get_url('/.bzr/inventory-store/%s' % inv_id,
127
inv = Inventory.read_xml(inv_f)
128
print '%4d inventory entries' % len(inv)
130
for path, ie in inv.iter_entries():
134
if text_id in got_texts:
136
print ' fetch %s text {%s}' % (path, text_id)
137
text_f = get_url('/.bzr/text-store/%s' % text_id,
139
got_texts.add(text_id)
147
BASE_URL = 'http://bazaar-ng.org/bzr/bzr.dev/'
148
b = RemoteBranch(BASE_URL)
149
## print '\n'.join(b.revision_history())
150
from log import show_log
154
if __name__ == '__main__':