-import logging
-
from ... import create_engine
from ... import exc
from ...testing.provision import create_db
from ...testing.provision import drop_db
+from ...testing.provision import log
from ...testing.provision import run_reap_dbs
from ...testing.provision import update_db_opts
-log = logging.getLogger(__name__)
-
-
@update_db_opts.for_db("mssql")
def _mssql_update_db_opts(db_url, db_opts):
db_opts["legacy_schema_aliasing"] = False
-import logging
-
from ... import create_engine
from ... import exc
from ...engine import url as sa_url
from ...testing.provision import create_db
from ...testing.provision import drop_db
from ...testing.provision import follower_url_from_main
+from ...testing.provision import log
from ...testing.provision import run_reap_dbs
from ...testing.provision import temp_table_keyword_args
from ...testing.provision import update_db_opts
-log = logging.getLogger(__name__)
-
-
@create_db.for_db("oracle")
def _oracle_create_db(cfg, eng, ident):
# NOTE: make sure you've run "ALTER DATABASE default tablespace users" or
-import logging
import time
from ... import exc
from ... import text
from ...testing.provision import create_db
from ...testing.provision import drop_db
+from ...testing.provision import log
from ...testing.provision import temp_table_keyword_args
-log = logging.getLogger(__name__)
-
-
@create_db.for_db("postgresql")
def _pg_create_db(cfg, eng, ident):
template_db = cfg.options.postgresql_templatedb
from ...testing.provision import create_db
from ...testing.provision import drop_db
from ...testing.provision import follower_url_from_main
+from ...testing.provision import log
from ...testing.provision import post_configure_engine
+from ...testing.provision import run_reap_dbs
from ...testing.provision import temp_table_keyword_args
# use file DBs in all cases, memory acts kind of strangely
# as an attached
if not follower_ident:
+ # note this test_schema.db gets created for all test runs.
+ # there's not any dedicated cleanup step for it. it in some
+ # ways corresponds to the "test.test_schema" schema that's
+ # expected to be already present, so for now it just stays
+ # in a given checkout directory.
dbapi_connection.execute(
'ATTACH DATABASE "test_schema.db" AS test_schema'
)
@drop_db.for_db("sqlite")
def _sqlite_drop_db(cfg, eng, ident):
- if ident:
- os.remove("%s_test_schema.db" % ident)
- else:
- os.remove("%s.db" % ident)
+ for path in ["%s.db" % ident, "%s_test_schema.db" % ident]:
+ if os.path.exists(path):
+ log.info("deleting SQLite database file: %s" % path)
+ os.remove(path)
@temp_table_keyword_args.for_db("sqlite")
def _sqlite_temp_table_keyword_args(cfg, eng):
return {"prefixes": ["TEMPORARY"]}
+
+
+@run_reap_dbs.for_db("sqlite")
+def _reap_sqlite_dbs(url, idents):
+ log.info("db reaper connecting to %r", url)
+
+ log.info("identifiers in file: %s", ", ".join(idents))
+ for ident in idents:
+ # we don't have a config so we can't call _sqlite_drop_db due to the
+ # decorator
+ for path in ["%s.db" % ident, "%s_test_schema.db" % ident]:
+ if os.path.exists(path):
+ log.info("deleting SQLite database file: %s" % path)
+ os.remove(path)
return sqlite
@classmethod
- def get_pool_class(cls, url):
+ def _is_url_file_db(cls, url):
if url.database and url.database != ":memory:":
+ return True
+ else:
+ return False
+
+ @classmethod
+ def get_pool_class(cls, url):
+ if cls._is_url_file_db(url):
return pool.NullPool
else:
return pool.SingletonThreadPool
dbapi_key = config.db.name + "_" + config.db.driver
+ if config.db.name == "sqlite" and config.db.dialect._is_url_file_db(
+ config.db.url
+ ):
+ dbapi_key += "_file"
+
# keep it at 2.7, 3.1, 3.2, etc. for now.
py_version = ".".join([str(v) for v in sys.version_info[0:2]])
urls = collections.defaultdict(set)
idents = collections.defaultdict(set)
+ dialects = {}
with open(idents_file) as file_:
for line in file_:
line = line.strip()
db_name, db_url = line.split(" ")
url_obj = sa_url.make_url(db_url)
+ if db_name not in dialects:
+ dialects[db_name] = url_obj.get_dialect()
+ dialects[db_name].load_provisioning()
url_key = (url_obj.get_backend_name(), url_obj.host)
urls[url_key].add(db_url)
idents[url_key].add(db_name)
# down from 185 on this this is a small slice of a usually
# bigger operation so using a small variance
- @profiling.function_call_count(variance=0.10)
+ @profiling.function_call_count(variance=0.20)
def go1():
return sess2.merge(p1, load=False)
# third call, merge object already present. almost no calls.
- @profiling.function_call_count(variance=0.10, warmup=1)
+ @profiling.function_call_count(variance=0.20, warmup=1)
def go2():
return sess2.merge(p2, load=False)
options={"_initialize": False, "pool_reset_on_return": None}
)
engine.dialect.dbapi = dbapi
- engine.execute("%s something table something" % keyword)
- if expected:
- eq_(dbapi.connect().mock_calls, [call.cursor(), call.commit()])
- else:
- eq_(dbapi.connect().mock_calls, [call.cursor()])
+
+ with engine.connect() as conn:
+ conn.execute("%s something table something" % keyword)
+
+ if expected:
+ eq_(dbapi.connect().mock_calls, [call.cursor(), call.commit()])
+ else:
+ eq_(dbapi.connect().mock_calls, [call.cursor()])
def test_update(self):
self._test_keyword("UPDATE")
self.no_param_engine = engines.testing_engine(
options={"echo": True, "hide_parameters": True}
)
- self.eng.execute("create table foo (data string)")
- self.no_param_engine.execute("create table foo (data string)")
+ self.eng.execute("create table if not exists foo (data string)")
+ self.no_param_engine.execute(
+ "create table if not exists foo (data string)"
+ )
self.buf = logging.handlers.BufferingHandler(100)
for log in [logging.getLogger("sqlalchemy.engine")]:
log.addHandler(self.buf)
def teardown(self):
- self.eng.execute("drop table foo")
+ self.eng.execute("drop table if exists foo")
for log in [logging.getLogger("sqlalchemy.engine")]:
log.removeHandler(self.buf)
@property
def temporary_tables(self):
"""target database supports temporary tables"""
- return skip_if(["mssql", "firebird"], "not supported (?)")
+ return skip_if(
+ ["mssql", "firebird", self._sqlite_file_db], "not supported (?)"
+ )
@property
def temp_table_reflection(self):
def temp_table_names(self):
"""target dialect supports listing of temporary table names"""
- return only_on(["sqlite", "oracle"])
+ return only_on(["sqlite", "oracle"]) + skip_if(self._sqlite_file_db)
@property
def temporary_views(self):
"""target database supports temporary views"""
- return only_on(["sqlite", "postgresql"])
+ return only_on(["sqlite", "postgresql"]) + skip_if(
+ self._sqlite_file_db
+ )
@property
def update_nowait(self):
]
)
+ def _sqlite_file_db(self, config):
+ return against(config, "sqlite") and config.db.dialect._is_url_file_db(
+ config.db.url
+ )
+
+ def _sqlite_memory_db(self, config):
+ return not self._sqlite_file_db(config)
+
def _sqlite_json(self, config):
if not against(config, "sqlite >= 3.9"):
return False
@property
def python_profiling_backend(self):
- return only_on(["sqlite"])
+ return only_on([self._sqlite_memory_db])
nocext: DISABLE_SQLALCHEMY_CEXT=1
cov: COVERAGE={[testenv]cov_args}
sqlite: SQLITE={env:TOX_SQLITE:--db sqlite}
- sqlite_file: BASECOMMAND=/bin/sh -c 'echo "sqlite_file backend tests not enabled for this version"'; /bin/true
+ sqlite_file: SQLITE={env:TOX_SQLITE_FILE:--db sqlite_file}
postgresql: POSTGRESQL={env:TOX_POSTGRESQL:--db postgresql}
mysql: MYSQL={env:TOX_MYSQL:--db mysql --db pymysql}
oracle,oracle6,oracle5: ORACLE={env:TOX_ORACLE:--db oracle}
mssql: MSSQL={env:TOX_MSSQL:--db mssql}
- oracle,oracle6,oracle5,mssql: IDENTS=--write-idents db_idents.txt
- oracle,oracle6,oracle5,mssql: NOMEMORY=--nomemory
+ oracle,oracle6,oracle5,mssql,sqlite_file: IDENTS=--write-idents db_idents.txt
+ oracle,oracle6,oracle5,mssql,sqlite_file: NOMEMORY=--nomemory
backendonly: BACKENDONLY=--backend-only
# tox as of 2.0 blocks all environment variables from the
# outside, unless they are here (or in TOX_TESTENV_PASSENV,
# wildcards OK). Need at least these
-passenv=ORACLE_HOME NLS_LANG TOX_POSTGRESQL TOX_MYSQL TOX_ORACLE TOX_MSSQL TOX_SQLITE TOX_WORKERS
+passenv=ORACLE_HOME NLS_LANG TOX_POSTGRESQL TOX_MYSQL TOX_ORACLE TOX_MSSQL TOX_SQLITE TOX_SQLITE_FILE TOX_WORKERS
# for nocext, we rm *.so in lib in case we are doing usedevelop=True
commands=
nocext: sh -c "rm -f lib/sqlalchemy/*.so"
{env:BASECOMMAND} {env:WORKERS} {env:SQLITE:} {env:POSTGRESQL:} {env:MYSQL:} {env:ORACLE:} {env:MSSQL:} {env:BACKENDONLY:} {env:IDENTS:} {env:NOMEMORY:} {env:COVERAGE:} {posargs}
- oracle,oracle6,oracle5,mssql: python reap_dbs.py db_idents.txt
+ oracle,oracle6,oracle5,mssql,sqlite_file: python reap_dbs.py db_idents.txt
# thanks to https://julien.danjou.info/the-best-flake8-extensions/
[testenv:pep8]