]> code.ossystems Code Review - openembedded-core.git/commitdiff
Revert "persist_data: cache connection and use cursor"
authorChris Larson <chris_larson@mentor.com>
Fri, 19 Nov 2010 02:51:51 +0000 (19:51 -0700)
committerRichard Purdie <rpurdie@linux.intel.com>
Wed, 5 Jan 2011 00:58:23 +0000 (00:58 +0000)
Caching the database connection can cause serious issues if it results in
multiple processes (e.g. multiple tasks) simultaneously using the same
connection.

This reverts commit 8a6876752b90efd81d92f0947bfc9527d8260969.

(Bitbake rev: 60b9b18eafad5ac46c7cf1048d749d673c2ee0ad)

Signed-off-by: Chris Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
bitbake/lib/bb/fetch/__init__.py
bitbake/lib/bb/persist_data.py

index 668b7886980dd8625604bba957ff4f4fa5ba5809..d8f5f167fc91b7f4e4ca466fcaa6fdd3b8e7f737 100644 (file)
@@ -144,14 +144,13 @@ def uri_replace(uri, uri_find, uri_replace, d):
 methods = []
 urldata_cache = {}
 saved_headrevs = {}
-persistent_database_connection = {}
 
 def fetcher_init(d):
     """
     Called to initialize the fetchers once the configuration data is known.
     Calls before this must not hit the cache.
     """
-    pd = persist_data.PersistData(d, persistent_database_connection)
+    pd = persist_data.PersistData(d)
     # When to drop SCM head revisions controlled by user policy
     srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
     if srcrev_policy == "cache":
@@ -180,7 +179,7 @@ def fetcher_compare_revisions(d):
     return true/false on whether they've changed.
     """
 
-    pd = persist_data.PersistData(d, persistent_database_connection)
+    pd = persist_data.PersistData(d)
     data = pd.getKeyValues("BB_URI_HEADREVS")
     data2 = bb.fetch.saved_headrevs
 
@@ -758,7 +757,7 @@ class Fetch(object):
         if not hasattr(self, "_latest_revision"):
             raise ParameterError
 
-        pd = persist_data.PersistData(d, persistent_database_connection)
+        pd = persist_data.PersistData(d)
         key = self.generate_revision_key(url, ud, d)
         rev = pd.getValue("BB_URI_HEADREVS", key)
         if rev != None:
@@ -775,7 +774,7 @@ class Fetch(object):
         if hasattr(self, "_sortable_revision"):
             return self._sortable_revision(url, ud, d)
 
-        pd = persist_data.PersistData(d, persistent_database_connection)
+        pd = persist_data.PersistData(d)
         key = self.generate_revision_key(url, ud, d)
 
         latest_rev = self._build_revision(url, ud, d)
index 76bff16658651afae774b07a79e8364ea26184cd..9558e712833f461bd69ec661d1d4f36e5fcd72dd 100644 (file)
@@ -47,10 +47,7 @@ class PersistData:
 
     Why sqlite? It handles all the locking issues for us.
     """
-    def __init__(self, d, persistent_database_connection):
-        if "connection" in persistent_database_connection:
-            self.cursor = persistent_database_connection["connection"].cursor()
-            return
+    def __init__(self, d):
         self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)
         if self.cachedir in [None, '']:
             bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.")
@@ -62,9 +59,7 @@ class PersistData:
         self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3")
         logger.debug(1, "Using '%s' as the persistent data cache", self.cachefile)
 
-        connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
-        persistent_database_connection["connection"] = connection
-        self.cursor = persistent_database_connection["connection"].cursor()
+        self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
 
     def addDomain(self, domain):
         """
@@ -127,7 +122,7 @@ class PersistData:
         count = 0
         while True:
             try:
-                return self.cursor.execute(*query)
+                return self.connection.execute(*query)
             except sqlite3.OperationalError as e:
                 if 'database is locked' in str(e) and count < 500:
                     count = count + 1