From 60b9b18eafad5ac46c7cf1048d749d673c2ee0ad Mon Sep 17 00:00:00 2001 From: Chris Larson Date: Thu, 18 Nov 2010 19:51:51 -0700 Subject: [PATCH] Revert "persist_data: cache connection and use cursor" Caching the database connection can cause serious issues if it results in multiple processes (e.g. multiple tasks) simultaneously using the same connection. This reverts commit 8a6876752b90efd81d92f0947bfc9527d8260969. Signed-off-by: Chris Larson --- lib/bb/fetch/__init__.py | 9 ++++----- lib/bb/persist_data.py | 17 ++++++----------- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/lib/bb/fetch/__init__.py b/lib/bb/fetch/__init__.py index e5838ee6820..7ba868c990c 100644 --- a/lib/bb/fetch/__init__.py +++ b/lib/bb/fetch/__init__.py @@ -144,14 +144,13 @@ def uri_replace(uri, uri_find, uri_replace, d): methods = [] urldata_cache = {} saved_headrevs = {} -persistent_database_connection = {} def fetcher_init(d): """ Called to initialize the fetchers once the configuration data is known. Calls before this must not hit the cache. """ - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) # When to drop SCM head revisions controlled by user policy srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" if srcrev_policy == "cache": @@ -180,7 +179,7 @@ def fetcher_compare_revisons(d): return true/false on whether they've changed. """ - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) data = pd.getKeyValues("BB_URI_HEADREVS") data2 = bb.fetch.saved_headrevs @@ -681,7 +680,7 @@ class Fetch(object): if not hasattr(self, "_latest_revision"): raise ParameterError - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) rev = pd.getValue("BB_URI_HEADREVS", key) if rev != None: @@ -698,7 +697,7 @@ class Fetch(object): if hasattr(self, "_sortable_revision"): return self._sortable_revision(url, ud, d) - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) latest_rev = self._build_revision(url, ud, d) diff --git a/lib/bb/persist_data.py b/lib/bb/persist_data.py index 3c61d9573e8..b574b45de04 100644 --- a/lib/bb/persist_data.py +++ b/lib/bb/persist_data.py @@ -47,10 +47,7 @@ class PersistData: Why sqlite? It handles all the locking issues for us. """ - def __init__(self, d, persistent_database_connection): - if "connection" in persistent_database_connection: - self.cursor = persistent_database_connection["connection"].cursor() - return + def __init__(self, d): self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True) if self.cachedir in [None, '']: bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.") @@ -62,29 +59,27 @@ class PersistData: self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3") logger.debug(1, "Using '%s' as the persistent data cache", self.cachefile) - connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) - persistent_database_connection["connection"] = connection - self.cursor = persistent_database_connection["connection"].cursor() + self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) def addDomain(self, domain): """ Should be called before any domain is used Creates it if it doesn't exist. """ - self.cursor.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain) + self.connection.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain) def delDomain(self, domain): """ Removes a domain and all the data it contains """ - self.cursor.execute("DROP TABLE IF EXISTS %s;" % domain) + self.connection.execute("DROP TABLE IF EXISTS %s;" % domain) def getKeyValues(self, domain): """ Return a list of key + value pairs for a domain """ ret = {} - data = self.cursor.execute("SELECT key, value from %s;" % domain) + data = self.connection.execute("SELECT key, value from %s;" % domain) for row in data: ret[str(row[0])] = str(row[1]) @@ -120,7 +115,7 @@ class PersistData: def _execute(self, *query): while True: try: - return self.cursor.execute(*query) + return self.connection.execute(*query) except sqlite3.OperationalError as e: if 'database is locked' in str(e): continue -- 2.47.3