~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/remotebranch.py

  • Committer: Martin Pool
  • Date: 2005-07-08 06:54:58 UTC
  • Revision ID: mbp@sourcefrog.net-20050708065458-2af06c3659faf1d8
- use new path-based hashcache for WorkingTree- squash mtime/ctime to whole seconds- update and if necessary write out hashcache when WorkingTree object is   created.

Show diffs side-by-side

added added

removed removed

Lines of Context:
38
38
 
39
39
ENABLE_URLGRABBER = True
40
40
 
41
 
from bzrlib.errors import BzrError
42
 
 
43
 
class GetFailed(BzrError):
44
 
    def __init__(self, url, status):
45
 
        BzrError.__init__(self, "Get %s failed with status %s" % (url, status))
46
 
        self.url = url
47
 
        self.status = status
48
41
 
49
42
if ENABLE_URLGRABBER:
50
 
    import util.urlgrabber
51
 
    import util.urlgrabber.keepalive
52
 
    util.urlgrabber.keepalive.DEBUG = 0
 
43
    import urlgrabber
 
44
    import urlgrabber.keepalive
 
45
    urlgrabber.keepalive.DEBUG = 0
53
46
    def get_url(path, compressed=False):
54
47
        try:
55
48
            url = path
56
49
            if compressed:
57
50
                url += '.gz'
58
51
            mutter("grab url %s" % url)
59
 
            url_f = util.urlgrabber.urlopen(url, keepalive=1, close_connection=0)
60
 
            if url_f.status != 200:
61
 
                raise GetFailed(url, url_f.status)
 
52
            url_f = urlgrabber.urlopen(url, keepalive=1, close_connection=0)
62
53
            if not compressed:
63
54
                return url_f
64
55
            else:
176
167
        
177
168
    def __getitem__(self, fileid):
178
169
        p = self._path(fileid)
179
 
        try:
180
 
            return get_url(p, compressed=True)
181
 
        except:
182
 
            raise KeyError(fileid)
 
170
        return get_url(p, compressed=True)
183
171
    
184
172
 
185
173