except OSError:
pass
raise FetchError(ud.module)
+
+ def clean(self, ud, d):
+ """ clean the git directory """
+
+ pkg = data.expand('${PN}', d)
+ localdata = data.createCopy(d)
+ data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
+ data.update_data(localdata)
+ pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
+ bb.utils.remove(pkgdir, True)
+ bb.utils.remove(ud.localpath)
"""
def __init__(self, url, d):
# localpath is the location of a downloaded result. If not set, the file is local.
+ self.donestamp = None
self.localfile = ""
self.localpath = None
self.lockfile = None
return
+ def clean(self, urldata, d):
+ """
+ Clean any existing full or partial download
+ """
+ bb.utils.remove(urldata.localpath)
+
def try_premirror(self, url, urldata, d):
"""
Should premirrors be used?
if ud.lockfile:
bb.utils.unlockfile(lf)
+ def clean(self, urls = []):
+ """
+ Clean files that the fetcher gets or places
+ """
+
+ if len(urls) == 0:
+ urls = self.urls
+
+ for url in urls:
+ if url not in self.ud:
+ self.ud[url] = FetchData(url, d)
+ ud = self.ud[url]
+ ud.setup_localpath(self.d)
+
+ if not ud.localfile or self.localpath is None:
+ continue
+
+ if ud.lockfile:
+ lf = bb.utils.lockfile(ud.lockfile)
+
+ ud.method.clean(ud, self.d)
+ if ud.donestamp:
+ bb.utils.remove(ud.donestamp)
+
+ if ud.lockfile:
+ bb.utils.unlockfile(lf)
+
from . import cvs
from . import git
from . import local
runfetchcmd(cmd, d, cleanup = [ud.localpath])
+ def clean(self, ud, d):
+ """ Clean CVS Files and tarballs """
+
+ pkg = data.expand('${PN}', d)
+ localdata = data.createCopy(d)
+ data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
+ data.update_data(localdata)
+ pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
+
+ bb.utils.remove(pkgdir, True)
+ bb.utils.remove(ud.localpath)
+
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
return True
+ def clean(self, ud, d):
+ """ clean the git directory """
+
+ bb.utils.remove(ud.localpath, True)
+ bb.utils.remove(ud.fullmirror)
+
def supports_srcrev(self):
return True
if os.path.exists(urldata.localpath):
return True
return False
+
+ def clean(self, urldata, d):
+ return
+
# tar them up to a defined filename
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
+ def clean(self, ud, d):
+ """ Clean SVN specific files and dirs """
+
+ bb.utils.remove(ud.localpath)
+ bb.utils.remove(ud.moddir, True)
+
+
def supports_srcrev(self):
return True
python do_cleanall() {
sstate_clean_cachefiles(d)
+ src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split()
+ if len(src_uri) == 0:
+ return
+
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
- dl_dir = bb.data.getVar('DL_DIR', localdata, True)
- dl_dir = os.path.realpath(dl_dir)
-
- src_uri = (bb.data.getVar('SRC_URI', localdata, True) or "").split()
- if len(src_uri) == 0:
- return
- fetcher = bb.fetch2.Fetch(src_uri, localdata)
- for url in src_uri:
- local = fetcher.localpath(url)
- if local is None:
- continue
- local = os.path.realpath(local)
- if local.startswith(dl_dir):
- bb.note("Removing %s*" % local)
- oe.path.remove(local + "*")
+ try:
+ fetcher = bb.fetch2.Fetch(src_uri, localdata)
+ fetcher.clean()
+ except bb.fetch2.BBFetchException, e:
+ raise bb.build.FuncFailed(e)
}
do_cleanall[nostamp] = "1"