15
17
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: This is pretty slow on high-latency connections because it
19
## doesn't keep the HTTP connection alive. If you have a smart local
20
## proxy it may be much better. Eventually I want to switch to
21
## urlgrabber which should use HTTP much more efficiently.
24
import urllib2, gzip, zlib
20
"""Proxy object for access to remote branches.
22
At the moment remote branches are only for HTTP and only for read
26
28
from cStringIO import StringIO
28
from errors import BzrError
29
from revision import Revision
30
from inventory import Inventory
32
# h = HTTPConnection('localhost:8000')
33
# h = HTTPConnection('bazaar-ng.org')
31
from errors import BzrError, BzrCheckError
32
from branch import Branch, BZR_BRANCH_FORMAT
33
from trace import mutter
35
35
# velocitynet.com.au transparently proxies connections and thereby
36
36
# breaks keep-alive -- sucks!
39
import urlgrabber.keepalive
40
urlgrabber.keepalive.DEBUG = 2
44
prefix = 'http://localhost:8000'
45
# prefix = 'http://bazaar-ng.org/bzr/main/'
47
def get_url(path, compressed=False):
39
ENABLE_URLGRABBER = True
44
import urlgrabber.keepalive
45
urlgrabber.keepalive.DEBUG = 0
46
def get_url(path, compressed=False):
51
mutter("grab url %s" % url)
52
url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
56
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
57
except urllib2.URLError, e:
58
raise BzrError("remote fetch failed: %r: %s" % (url, e))
60
def get_url(url, compressed=False):
52
url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
64
mutter("get_url %s" % url)
65
url_f = urllib2.urlopen(url)
67
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
56
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
57
except urllib2.URLError, e:
58
raise BzrError("remote fetch failed: %r: %s" % (url, e))
65
history = get_url('/.bzr/revision-history').readlines()
66
num_revs = len(history)
67
for i, rev_id in enumerate(history):
68
rev_id = rev_id.rstrip()
69
print 'read revision %d/%d' % (i, num_revs)
71
# python gzip needs a seekable file (!!) but the HTTP response
72
# isn't, so we need to buffer it
74
rev_f = get_url('/.bzr/revision-store/%s' % rev_id,
77
rev = Revision.read_xml(rev_f)
79
inv_id = rev.inventory_id
80
if inv_id not in got_invs:
81
print 'get inventory %s' % inv_id
82
inv_f = get_url('/.bzr/inventory-store/%s' % inv_id,
84
inv = Inventory.read_xml(inv_f)
85
print '%4d inventory entries' % len(inv)
87
for path, ie in inv.iter_entries():
91
if text_id in got_texts:
93
print ' fetch %s text {%s}' % (path, text_id)
94
text_f = get_url('/.bzr/text-store/%s' % text_id,
96
got_texts.add(text_id)
73
def _find_remote_root(url):
74
"""Return the prefix URL that corresponds to the branch root."""
78
ff = get_url(url + '/.bzr/branch-format')
83
fmt = fmt.rstrip('\r\n')
84
if fmt != BZR_BRANCH_FORMAT.rstrip('\r\n'):
85
raise BzrError("sorry, branch format %r not supported at url %s"
89
except urllib2.URLError:
95
raise BzrError('no branch root found for URL %s' % orig_url)
101
class RemoteBranch(Branch):
102
def __init__(self, baseurl, find_root=True):
103
"""Create new proxy for a remote branch."""
105
self.baseurl = _find_remote_root(baseurl)
107
self.baseurl = baseurl
110
self.inventory_store = RemoteStore(baseurl + '/.bzr/inventory-store/')
111
self.text_store = RemoteStore(baseurl + '/.bzr/text-store/')
112
self.revision_store = RemoteStore(baseurl + '/.bzr/revision-store/')
115
b = getattr(self, 'baseurl', 'undefined')
116
return '%s(%r)' % (self.__class__.__name__, b)
120
def controlfile(self, filename, mode):
121
if mode not in ('rb', 'rt', 'r'):
122
raise BzrError("file mode %r not supported for remote branches" % mode)
123
return get_url(self.baseurl + '/.bzr/' + filename, False)
127
# no locking for remote branches yet
130
def lock_write(self):
131
from errors import LockError
132
raise LockError("write lock not supported for remote branch %s"
139
def relpath(self, path):
140
if not path.startswith(self.baseurl):
141
raise BzrError('path %r is not under base URL %r'
142
% (path, self.baseurl))
143
pl = len(self.baseurl)
144
return path[pl:].lstrip('/')
147
def get_revision(self, revision_id):
148
from bzrlib.revision import Revision
149
from bzrlib.xml import unpack_xml
150
revf = self.revision_store[revision_id]
151
r = unpack_xml(Revision, revf)
152
if r.revision_id != revision_id:
153
raise BzrCheckError('revision stored as {%s} actually contains {%s}'
154
% (revision_id, r.revision_id))
158
class RemoteStore(object):
159
def __init__(self, baseurl):
160
self._baseurl = baseurl
163
def _path(self, name):
165
raise ValueError('invalid store id', name)
166
return self._baseurl + '/' + name
168
def __getitem__(self, fileid):
169
p = self._path(fileid)
170
return get_url(p, compressed=True)