15
17
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: This is pretty slow on high-latency connections because it
19
## doesn't keep the HTTP connection alive. If you have a smart local
20
## proxy it may be much better. Eventually I want to switch to
21
## urlgrabber which should use HTTP much more efficiently.
24
import urllib2, gzip, zlib
20
"""Proxy object for access to remote branches.
22
At the moment remote branches are only for HTTP and only for read
25
28
from sets import Set
26
29
from cStringIO import StringIO
28
from errors import BzrError
29
from revision import Revision
30
from inventory import Inventory
32
# h = HTTPConnection('localhost:8000')
33
# h = HTTPConnection('bazaar-ng.org')
32
from errors import BzrError, BzrCheckError
33
from branch import Branch, BZR_BRANCH_FORMAT
34
from trace import mutter
35
36
# velocitynet.com.au transparently proxies connections and thereby
36
37
# breaks keep-alive -- sucks!
39
import urlgrabber.keepalive
40
urlgrabber.keepalive.DEBUG = 2
44
prefix = 'http://localhost:8000'
45
# prefix = 'http://bazaar-ng.org/bzr/main/'
47
def get_url(path, compressed=False):
40
ENABLE_URLGRABBER = True
45
import urlgrabber.keepalive
46
urlgrabber.keepalive.DEBUG = 0
47
def get_url(path, compressed=False):
52
mutter("grab url %s" % url)
53
url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
57
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
58
except urllib2.URLError, e:
59
raise BzrError("remote fetch failed: %r: %s" % (url, e))
61
def get_url(url, compressed=False):
52
url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
65
mutter("get_url %s" % url)
66
url_f = urllib2.urlopen(url)
68
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
74
def _find_remote_root(url):
75
"""Return the prefix URL that corresponds to the branch root."""
79
ff = get_url(url + '/.bzr/branch-format')
84
fmt = fmt.rstrip('\r\n')
85
if fmt != BZR_BRANCH_FORMAT.rstrip('\r\n'):
86
raise BzrError("sorry, branch format %r not supported at url %s"
90
except urllib2.URLError:
96
raise BzrError('no branch root found for URL %s' % orig_url)
102
class RemoteBranch(Branch):
103
def __init__(self, baseurl, find_root=True, lock_mode='r'):
104
"""Create new proxy for a remote branch."""
105
if lock_mode not in ('', 'r'):
106
raise BzrError('lock mode %r is not supported for remote branches'
110
self.baseurl = _find_remote_root(baseurl)
56
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
57
except urllib2.URLError, e:
58
raise BzrError("remote fetch failed: %r: %s" % (url, e))
65
history = get_url('/.bzr/revision-history').readlines()
66
num_revs = len(history)
67
for i, rev_id in enumerate(history):
68
rev_id = rev_id.rstrip()
69
print 'read revision %d/%d' % (i, num_revs)
71
# python gzip needs a seekable file (!!) but the HTTP response
72
# isn't, so we need to buffer it
112
self.baseurl = baseurl
116
return '%s(%r)' % (self.__class__.__name__, self.baseurl)
120
def controlfile(self, filename, mode):
121
if mode not in ('rb', 'rt', 'r'):
122
raise BzrError("file mode %r not supported for remote branches" % mode)
123
return get_url(self.baseurl + '/.bzr/' + filename, False)
125
def _need_readlock(self):
126
# remote branch always safe for read
129
def _need_writelock(self):
130
raise BzrError("cannot get write lock on HTTP remote branch")
132
def relpath(self, path):
133
if not path.startswith(self.baseurl):
134
raise BzrError('path %r is not under base URL %r'
135
% (path, self.baseurl))
136
pl = len(self.baseurl)
137
return path[pl:].lstrip('/')
139
def get_revision(self, revision_id):
140
from revision import Revision
141
revf = get_url(self.baseurl + '/.bzr/revision-store/' + revision_id,
143
r = Revision.read_xml(revf)
144
if r.revision_id != revision_id:
145
raise BzrCheckError('revision stored as {%s} actually contains {%s}'
146
% (revision_id, r.revision_id))
74
rev_f = get_url('/.bzr/revision-store/%s' % rev_id,
77
rev = Revision.read_xml(rev_f)
79
inv_id = rev.inventory_id
80
if inv_id not in got_invs:
81
print 'get inventory %s' % inv_id
82
inv_f = get_url('/.bzr/inventory-store/%s' % inv_id,
151
from revision import Revision
152
from branch import Branch
153
from inventory import Inventory
159
history = get_url('/.bzr/revision-history').readlines()
160
num_revs = len(history)
161
for i, rev_id in enumerate(history):
162
rev_id = rev_id.rstrip()
163
print 'read revision %d/%d' % (i, num_revs)
165
# python gzip needs a seekable file (!!) but the HTTP response
166
# isn't, so we need to buffer it
168
rev_f = get_url('/.bzr/revision-store/%s' % rev_id,
84
inv = Inventory.read_xml(inv_f)
85
print '%4d inventory entries' % len(inv)
87
for path, ie in inv.iter_entries():
91
if text_id in got_texts:
93
print ' fetch %s text {%s}' % (path, text_id)
94
text_f = get_url('/.bzr/text-store/%s' % text_id,
96
got_texts.add(text_id)
171
rev = Revision.read_xml(rev_f)
173
inv_id = rev.inventory_id
174
if inv_id not in got_invs:
175
print 'get inventory %s' % inv_id
176
inv_f = get_url('/.bzr/inventory-store/%s' % inv_id,
178
inv = Inventory.read_xml(inv_f)
179
print '%4d inventory entries' % len(inv)
181
for path, ie in inv.iter_entries():
185
if text_id in got_texts:
187
print ' fetch %s text {%s}' % (path, text_id)
188
text_f = get_url('/.bzr/text-store/%s' % text_id,
190
got_texts.add(text_id)
198
BASE_URL = 'http://bazaar-ng.org/bzr/bzr.dev/'
199
b = RemoteBranch(BASE_URL)
200
## print '\n'.join(b.revision_history())
201
from log import show_log
205
if __name__ == '__main__':