urldata_cache[fn] = urldata
return urldata
+def try_premirror(u, ud, d):
+ """
+ Should we try premirrors for this url, u?
+ We should if forcefetch is set or the localfile and md5 don't exist
+ """
+ if ud.method.forcefetch(u, ud, d):
+ return True
+ elif os.path.exists(ud.md5) and os.path.exists(ud.localfile):
+ return False
+ else:
+ return True
+
def go(d, urls = None):
"""
Fetch all urls
ud = urldata[u]
m = ud.method
if ud.localfile:
- if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5) and os.path.exists(ud.localfile):
+ if not m.try_premirror(u, ud, d):
# File already present along with md5 stamp file
# Touch md5 file to show activity
try:
pass
continue
lf = bb.utils.lockfile(ud.lockfile)
- if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5) and os.path.exists(ud.localfile):
+ if not m.try_premirror(u, ud, d):
# If someone else fetched this before we got the lock,
# notice and don't try again
try:
return True
return False
+ def try_premirror(self, d, ud):
+ if os.path.exists(ud.clonedir):
+ return False
+
+ return True
+
def go(self, loc, ud, d):
"""Fetch url"""