15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
"""Proxy object for access to remote branches.
20
At the moment remote branches are only for HTTP and only for read
18
## XXX: This is pretty slow on high-latency connections because it
19
## doesn't keep the HTTP connection alive. If you have a smart local
20
## proxy it may be much better. Eventually I want to switch to
21
## urlgrabber which should use HTTP much more efficiently.
24
import urllib2, gzip, zlib
26
26
from cStringIO import StringIO
31
from bzrlib.errors import BzrError, BzrCheckError
32
from bzrlib.branch import Branch, LocalBranch, BZR_BRANCH_FORMAT
33
from bzrlib.trace import mutter
34
from bzrlib.xml import serializer_v4
37
ENABLE_URLGRABBER = False
39
from bzrlib.errors import BzrError, NoSuchRevision
41
class GetFailed(BzrError):
42
def __init__(self, url, status):
43
BzrError.__init__(self, "Get %s failed with status %s" % (url, status))
48
import util.urlgrabber
49
import util.urlgrabber.keepalive
50
util.urlgrabber.keepalive.DEBUG = 0
51
def get_url(path, compressed=False):
56
mutter("grab url %s" % url)
57
url_f = util.urlgrabber.urlopen(url, keepalive=1, close_connection=0)
58
if url_f.status != 200:
59
raise GetFailed(url, url_f.status)
63
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
64
except urllib2.URLError, e:
65
raise BzrError("remote fetch failed: %r: %s" % (url, e))
67
def get_url(url, compressed=False):
28
from errors import BzrError
29
from revision import Revision
30
from inventory import Inventory
32
# h = HTTPConnection('localhost:8000')
33
# h = HTTPConnection('bazaar-ng.org')
35
# velocitynet.com.au transparently proxies connections and thereby
36
# breaks keep-alive -- sucks!
39
import urlgrabber.keepalive
40
urlgrabber.keepalive.DEBUG = 2
44
prefix = 'http://localhost:8000'
45
# prefix = 'http://bazaar-ng.org/bzr/main/'
47
def get_url(path, compressed=False):
71
mutter("get_url %s" % url)
72
url_f = urllib2.urlopen(url)
52
url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
74
56
return gzip.GzipFile(fileobj=StringIO(url_f.read()))
80
def _find_remote_root(url):
81
"""Return the prefix URL that corresponds to the branch root."""
85
fmt_url = url + '/.bzr/branch-format'
90
fmt = fmt.rstrip('\r\n')
91
if fmt != BZR_BRANCH_FORMAT.rstrip('\r\n'):
92
raise BzrError("sorry, branch format %r not supported at url %s"
57
except urllib2.URLError, e:
58
raise BzrError("remote fetch failed: %r: %s" % (url, e))
65
history = get_url('/.bzr/revision-history').readlines()
66
num_revs = len(history)
67
for i, rev_id in enumerate(history):
68
rev_id = rev_id.rstrip()
69
print 'read revision %d/%d' % (i, num_revs)
71
# python gzip needs a seekable file (!!) but the HTTP response
72
# isn't, so we need to buffer it
74
rev_f = get_url('/.bzr/revision-store/%s' % rev_id,
77
rev = Revision.read_xml(rev_f)
79
inv_id = rev.inventory_id
80
if inv_id not in got_invs:
81
print 'get inventory %s' % inv_id
82
inv_f = get_url('/.bzr/inventory-store/%s' % inv_id,
84
inv = Inventory.read_xml(inv_f)
85
print '%4d inventory entries' % len(inv)
87
for path, ie in inv.iter_entries():
91
if text_id in got_texts:
93
print ' fetch %s text {%s}' % (path, text_id)
94
text_f = get_url('/.bzr/text-store/%s' % text_id,
96
got_texts.add(text_id)
96
except urllib2.URLError:
99
scheme, host, path = list(urlparse.urlparse(url))[:3]
100
# discard params, query, fragment
102
# strip off one component of the path component
103
idx = path.rfind('/')
104
if idx == -1 or path == '/':
105
raise BzrError('no branch root found for URL %s'
106
' or enclosing directories'
109
url = urlparse.urlunparse((scheme, host, path, '', '', ''))
113
class RemoteBranch(LocalBranch):
114
def __init__(self, baseurl, find_root=True):
115
"""Create new proxy for a remote branch."""
117
self.base = _find_remote_root(baseurl)
122
self.inventory_store = RemoteStore(baseurl + '/.bzr/inventory-store/')
123
self.text_store = RemoteStore(baseurl + '/.bzr/text-store/')
124
self.revision_store = RemoteStore(baseurl + '/.bzr/revision-store/')
127
b = getattr(self, 'baseurl', 'undefined')
128
return '%s(%r)' % (self.__class__.__name__, b)
132
def setup_caching(self, cache_root):
133
"""Set up cached stores located under cache_root"""
134
from bzrlib.meta_store import CachedStore
135
for store_name in ('inventory_store', 'text_store', 'revision_store'):
136
if not isinstance(getattr(self, store_name), CachedStore):
137
cache_path = os.path.join(cache_root, store_name)
139
new_store = CachedStore(getattr(self, store_name), cache_path)
140
setattr(self, store_name, new_store)
142
def controlfile(self, filename, mode):
143
if mode not in ('rb', 'rt', 'r'):
144
raise BzrError("file mode %r not supported for remote branches" % mode)
145
return get_url(self.base + '/.bzr/' + filename, False)
149
# no locking for remote branches yet
152
def lock_write(self):
153
from errors import LockError
154
raise LockError("write lock not supported for remote branch %s"
161
def relpath(self, path):
162
if not path.startswith(self.base):
163
raise BzrError('path %r is not under base URL %r'
166
return path[pl:].lstrip('/')
169
def get_revision(self, revision_id):
171
revf = self.revision_store[revision_id]
173
raise NoSuchRevision(self, revision_id)
174
r = serializer_v4.read_revision(revf)
175
if r.revision_id != revision_id:
176
raise BzrCheckError('revision stored as {%s} actually contains {%s}'
177
% (revision_id, r.revision_id))
181
class RemoteStore(object):
182
def __init__(self, baseurl):
183
self._baseurl = baseurl
186
def _path(self, name):
188
raise ValueError('invalid store id', name)
189
return self._baseurl + '/' + name
191
def __getitem__(self, fileid):
192
p = self._path(fileid)
194
return get_url(p, compressed=True)
195
except urllib2.URLError:
196
raise KeyError(fileid)