]> git.ipfire.org Git - location/debian/libloc.git/commitdiff
New upstream version 0.9.16 upstream/0.9.16
authorJochen Sprickerhof <git@jochen.sprickerhof.de>
Sat, 29 Oct 2022 16:09:19 +0000 (18:09 +0200)
committerJochen Sprickerhof <git@jochen.sprickerhof.de>
Sat, 29 Oct 2022 16:09:19 +0000 (18:09 +0200)
22 files changed:
Makefile.am
configure.ac
data/database.db [moved from tests/data/location-2022-03-30.db with 80% similarity]
data/signing-key.pem [moved from src/signing-key.pem with 100% similarity]
man/libloc.txt
man/location.txt
src/database.c
src/python/location/__init__.py
src/python/location/database.py
src/python/location/downloader.py
src/python/location/export.py
src/python/location/i18n.py
src/python/location/importer.py
src/python/location/logger.py
src/python/writer.c
src/scripts/location-importer.in
src/systemd/location-update.service.in
src/test-signature.c
src/writer.c
tests/data/signing-key.pem [deleted symlink]
tests/python/test-database.py
tests/python/test-export.py

index 81bd992d639ead903a08b416f4d8ce1fa5c1a01d..7f0d8d045285df5bf234e3665ef0797a7c4480b6 100644 (file)
@@ -56,8 +56,7 @@ databasedir = $(localstatedir)/lib/location
 pkgconfigdir = $(libdir)/pkgconfig
 
 # Overwrite Python path
-#pkgpythondir = $(pythondir)/location
-pkgpythondir = /usr/lib/python3/dist-packages/location
+pkgpythondir = $(pythondir)/location
 
 %: %.in Makefile
        $(SED_PROCESS)
@@ -233,8 +232,7 @@ EXTRA_DIST += \
        src/perl/t/Location.t \
        src/perl/typemap
 
-.PHONY: build-perl
-build-perl:
+build-perl: src/libloc.la
        @mkdir -p $(builddir)/src/perl/{lib,t}
        @test -e $(builddir)/src/perl/Location.xs || ln -s --relative $(srcdir)/src/perl/Location.xs $(builddir)/src/perl/
        @test -e $(builddir)/src/perl/MANIFEST || ln -s --relative $(srcdir)/src/perl/MANIFEST $(builddir)/src/perl/
@@ -243,30 +241,33 @@ build-perl:
        @test -e $(builddir)/src/perl/t/Location.t || ln -s --relative $(srcdir)/src/perl/t/Location.t $(builddir)/src/perl/t/
        @test -e $(builddir)/src/perl/typemap || ln -s --relative $(srcdir)/src/perl/typemap $(builddir)/src/perl/
 
-       cd $(builddir)/src/perl && $(PERL) Makefile.PL PREFIX="$(prefix)" \
+       cd $(builddir)/src/perl && $(PERL) Makefile.PL NO_PACKLIST=1 NO_PERLLOCAL=1 \
+               INSTALLDIRS=vendor \
                INC="-I$(abs_srcdir)/src" LIBS="-L$(abs_builddir)/src/.libs -lloc"
-       cd $(builddir)/src/perl && $(MAKE) LD_RUN_PATH=
+       cd $(builddir)/src/perl && $(MAKE)
+       touch build-perl
 
 .PHONY: check-perl
-check-perl: testdata.db
+check-perl: testdata.db build-perl
        cd $(builddir)/src/perl && $(MAKE) LD_LIBRARY_PATH="$(abs_builddir)/src/.libs" test \
                database="../../$<" keyfile="$(abs_srcdir)/examples/public-key.pem"
 
 .PHONY: install-perl
-install-perl:
-       cd $(builddir)/src/perl && $(MAKE) install DESTIDR=$(DESTDIR)
+install-perl: build-perl
+       cd $(builddir)/src/perl && $(MAKE) install DESTDIR=$(DESTDIR)
 
 .PHONY: clean-perl
 clean-perl:
        cd $(builddir)/src/perl && $(MAKE) distclean
+       rm -f build-perl
 
 .PHONY: uninstall-perl
 uninstall-perl:
-       rm -rvf \
-               $(DESTDIR)/$(prefix)/lib/*/perl/*/Location.pm \
-               $(DESTDIR)/$(prefix)/lib/*/perl/*/auto/Location \
-               $(DESTDIR)/$(prefix)/lib/*/perl/*/perllocal.pod \
-               $(DESTDIR)/$(prefix)/man/man3/Location.3pm
+       rm -vf \
+               $(DESTDIR)/@PERL_MODPATH@/Location.pm \
+               $(DESTDIR)/@PERL_MODPATH@/auto/Location/Location.so \
+               $(DESTDIR)/@PERL_MANPATH@/Location.3pm
+       -rmdir $(DESTDIR)/@PERL_MODPATH@/auto/Location
 
 bin_SCRIPTS = \
        src/scripts/location \
@@ -311,7 +312,15 @@ EXTRA_DIST += \
 # ------------------------------------------------------------------------------
 
 dist_database_DATA = \
-       src/signing-key.pem
+       data/database.db \
+       data/signing-key.pem
+
+install-data-hook:
+       chmod 444 $(DESTDIR)$(databasedir)/database.db
+
+.PHONY: update-database
+update-database:
+       curl https://location.ipfire.org/databases/1/location.db.xz | xz -d > data/database.db
 
 # ------------------------------------------------------------------------------
 
@@ -326,16 +335,12 @@ TESTS_LDADD = \
 
 TESTS_ENVIRONMENT = \
        PYTHONPATH=$(abs_srcdir)/src/python:$(abs_builddir)/src/python/.libs \
-       TEST_DATA_DIR="$(abs_top_srcdir)/tests/data"
+       TEST_DATA_DIR="$(abs_top_srcdir)/data"
 
 TESTS = \
        $(check_PROGRAMS) \
        $(dist_check_SCRIPTS)
 
-EXTRA_DIST += \
-       tests/data/location-2022-03-30.db \
-       tests/data/signing-key.pem
-
 CLEANFILES += \
        testdata.db
 
@@ -444,7 +449,7 @@ src_test_address_LDADD = \
 
 MANPAGES = \
        $(MANPAGES_3) \
-       $(MANPAGES_8)
+       $(MANPAGES_1)
 
 MANPAGES_3 = \
        man/libloc.3 \
@@ -458,12 +463,12 @@ MANPAGES_3 = \
        man/loc_set_log_fn.3 \
        man/loc_set_log_priority.3
 
-MANPAGES_8 = \
-       man/location.8
+MANPAGES_1 = \
+       man/location.1
 
-MANPAGES_TXT   = $(MANPAGES_TXT_3) $(MANPAGES_TXT_8)
+MANPAGES_TXT   = $(MANPAGES_TXT_3) $(MANPAGES_TXT_1)
 MANPAGES_TXT_3 = $(patsubst %.3,%.txt,$(MANPAGES_3))
-MANPAGES_TXT_8 = $(patsubst %.8,%.txt,$(MANPAGES_8))
+MANPAGES_TXT_1 = $(patsubst %.1,%.txt,$(MANPAGES_1))
 MANPAGES_HTML  = $(patsubst %.txt,%.html,$(MANPAGES_TXT))
 MANPAGES_XML   = $(patsubst %.txt,%.xml,$(MANPAGES_TXT))
 
@@ -511,7 +516,7 @@ man/%.xml: man/%.txt man/asciidoc.conf
 man/%.3: man/%.xml
        $(XSLTPROC_COMMAND_MAN)
 
-man/%.8: man/%.xml
+man/%.1: man/%.xml
        $(XSLTPROC_COMMAND_MAN)
 
 man/%.html: man/%.txt man/asciidoc.conf
index 9fad1b97dc7e7a4fc45602c66117eb2adde70cc0..96e6b0e3cf4d35d10ae6475099e00843ffd017b9 100644 (file)
@@ -1,6 +1,6 @@
 AC_PREREQ(2.60)
 AC_INIT([libloc],
-        [0.9.15],
+        [0.9.16],
         [location@lists.ipfire.org],
         [libloc],
         [https://location.ipfire.org/])
@@ -122,7 +122,7 @@ CC_CHECK_FLAGS_APPEND([my_LDFLAGS], [LDFLAGS], [-fno-semantic-interposition])
 
 AC_ARG_WITH([database-path],
        AS_HELP_STRING([--with-database-path], [The default database path]),
-       [], [with_database_path=/var/lib/${PACKAGE_NAME}/database.db]
+       [], [with_database_path=/var/lib/location/database.db]
 )
 
 if test -z "${with_database_path}"; then
@@ -175,10 +175,18 @@ PKG_CHECK_MODULES([PYTHON], [python-${PYTHON_VERSION}])
 AC_PATH_PROG(PERL, perl, no)
 AC_SUBST(PERL)
 
-AX_PROG_PERL_MODULES(ExtUtils::MakeMaker,, AC_MSG_WARN(Need some Perl modules))
+AX_PROG_PERL_MODULES(Config ExtUtils::MakeMaker,, AC_MSG_WARN(Need some Perl modules))
 
 AC_ARG_ENABLE(perl, AS_HELP_STRING([--disable-perl], [do not build the perl modules]), [],[enable_perl=yes])
 AM_CONDITIONAL(ENABLE_PERL, test "$enable_perl" = "yes")
+AS_IF([test "$enable_perl" = "yes"],
+      [
+       PERL_MODPATH=$($PERL -MConfig -e 'print $Config{installvendorarch}')
+       PERL_MANPATH=$($PERL -MConfig -e 'print $Config{installvendorman3dir}')
+       AC_SUBST(PERL_MODPATH)
+       AC_SUBST(PERL_MANPATH)
+       ],
+)
 
 dnl Checking for libresolv
 case "${host}" in
@@ -223,5 +231,7 @@ AC_MSG_RESULT([
         systemd support:        ${have_systemd}
 
        Bindings:
-         perl:                 ${enable_perl}
+         Perl:                 ${enable_perl}
+         Perl module path:     ${PERL_MODPATH}
+         Perl manual path:     ${PERL_MANPATH}
 ])
similarity index 80%
rename from tests/data/location-2022-03-30.db
rename to data/database.db
index fff8d34a41e4ff42f09368936553a8baa814036c..b36cc8a8bc58db0913a8b34fe87beb27c45460b3 100644 (file)
Binary files a/tests/data/location-2022-03-30.db and b/data/database.db differ
similarity index 100%
rename from src/signing-key.pem
rename to data/signing-key.pem
index baf98c16c98b0074c27a22d8e5fa08f4480fa595..ec14e166a864b8cefd75e3468abba3eb2061b16a 100644 (file)
@@ -39,7 +39,7 @@ either version 2.1 of the License, or (at your option) any later version.
 
 == See Also
 
-link:location[8]
+link:location[1]
 
 == Bug Reports
 
index 3dfddf511acce35869123a8efecf1b8ad180abd7..70352d2be23648e0ce772e7aa4519ee732df031b 100644 (file)
@@ -1,4 +1,4 @@
-= location(8)
+= location(1)
 
 == NAME
 location - Query the location database
index be60aa468f4afdf9fc77682d731c4d2529cc3906..617b61eb5a8fe5966136aa54c7cc690579fa2bba 100644 (file)
@@ -641,8 +641,11 @@ LOC_EXPORT int loc_database_verify(struct loc_database* db, FILE* f) {
                }
        }
 
+       int sig1_valid = 0;
+       int sig2_valid = 0;
+
        // Check first signature
-       if (db->signature1.data) {
+       if (db->signature1.length) {
                hexdump(db->ctx, db->signature1.data, db->signature1.length);
 
                r = EVP_DigestVerifyFinal(mdctx,
@@ -650,19 +653,19 @@ LOC_EXPORT int loc_database_verify(struct loc_database* db, FILE* f) {
 
                if (r == 0) {
                        DEBUG(db->ctx, "The first signature is invalid\n");
-                       r = 1;
                } else if (r == 1) {
                        DEBUG(db->ctx, "The first signature is valid\n");
-                       r = 0;
+                       sig1_valid = 1;
                } else {
                        ERROR(db->ctx, "Error verifying the first signature: %s\n",
                                ERR_error_string(ERR_get_error(), NULL));
                        r = -1;
+                       goto CLEANUP;
                }
        }
 
        // Check second signature only when the first one was invalid
-       if (r && db->signature2.data) {
+       if (db->signature2.length) {
                hexdump(db->ctx, db->signature2.data, db->signature2.length);
 
                r = EVP_DigestVerifyFinal(mdctx,
@@ -670,14 +673,14 @@ LOC_EXPORT int loc_database_verify(struct loc_database* db, FILE* f) {
 
                if (r == 0) {
                        DEBUG(db->ctx, "The second signature is invalid\n");
-                       r = 1;
                } else if (r == 1) {
                        DEBUG(db->ctx, "The second signature is valid\n");
-                       r = 0;
+                       sig2_valid = 1;
                } else {
                        ERROR(db->ctx, "Error verifying the second signature: %s\n",
                                ERR_error_string(ERR_get_error(), NULL));
                        r = -1;
+                       goto CLEANUP;
                }
        }
 
@@ -685,6 +688,12 @@ LOC_EXPORT int loc_database_verify(struct loc_database* db, FILE* f) {
        INFO(db->ctx, "Signature checked in %.4fms\n",
                (double)(end - start) / CLOCKS_PER_SEC * 1000);
 
+       // Check if at least one signature as okay
+       if (sig1_valid || sig2_valid)
+               r = 0;
+       else
+               r = 1;
+
 CLEANUP:
        // Cleanup
        EVP_MD_CTX_free(mdctx);
index f63573775f3c67729a85cf2df695897aef7d354f..e0ba510e6cefe7b328449e7310b360177c855279 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/python3
 ###############################################################################
 #                                                                             #
 # libloc - A library to determine the location of someone on the Internet     #
@@ -19,6 +18,7 @@
 
 # Import everything from the C module
 from _location import *
+from _location import __version__
 
 # Initialise logging
 from . import logger
index 5d79941515cf830f81d4aff4a1897c4525d99e82..b97d93a844ffd323797c8be1388df790b0e87d81 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 """
        A lightweight wrapper around psycopg2.
 
index b9e0c22ca9c4a80908c3a89d16b6e6f2834f4107..3618968cfecc4ca874be8ce5393b3cad7b425cdf 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/python3
 ###############################################################################
 #                                                                             #
 # libloc - A library to determine the location of someone on the Internet     #
index f5ed37f7f2d51e73ff47f1f8b4479d8787203c5e..25a532e5438c6ef749fbbcc21e1bc1b0db1c6b8a 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/python3
 ###############################################################################
 #                                                                             #
 # libloc - A library to determine the location of someone on the Internet     #
@@ -48,6 +47,9 @@ class OutputWriter(object):
                self.family = family
                self.directory = directory
 
+               # Tag
+               self.tag = self._make_tag()
+
                # Open output file
                if f:
                        self.f = f
@@ -58,9 +60,6 @@ class OutputWriter(object):
                else:
                        self.f = io.StringIO()
 
-               # Tag
-               self.tag = self._make_tag()
-
                # Call any custom initialization
                self.init()
 
index 2161aa67c9b52122e6ec114d90ce28f7f219362a..c97c51c5e8aac041ce520b223f25a508114d3df8 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/python3
 ###############################################################################
 #                                                                             #
 # libloc - A library to determine the location of someone on the Internet     #
index 96f2218762fd21f4142f2b6980906c5fdb01b8d0..d2851627dd568e1b3cb8dc468decf2cbcffacb79 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/python3
 ###############################################################################
 #                                                                             #
 # libloc - A library to determine the location of someone on the Internet     #
index 0bdf9ec45d0f7ca7b86d7a1e0b44b29f7666f516..62ad8fbc268dffa10e227aa4bd0a68ffb52ad6ff 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/python3
 ###############################################################################
 #                                                                             #
 # libloc - A library to determine the location of someone on the Internet     #
index 5d8027c88c57fde19e7af9610df7a5b83dfb1c60..1c06384a326b5a8f26ed1a560a2c2e1205d3e1dc 100644 (file)
@@ -67,8 +67,8 @@ static int Writer_init(WriterObject* self, PyObject* args, PyObject* kwargs) {
                        return -1;
 
                // Re-open file descriptor
-               f2 = fdopen(fd, "r");
-               if (!f2) {
+               f1 = fdopen(fd, "r");
+               if (!f1) {
                        PyErr_SetFromErrno(PyExc_IOError);
                        return -1;
                }
index 8d4749709ea72acd8ebd1396b25f165251f84167..9faf23b6b7fde58c485da56af5b837a972fb3681 100644 (file)
@@ -1341,8 +1341,10 @@ class CLI(object):
                                "ap-southeast-2": "AU",
                                "ap-southeast-3": "MY",
                                "ap-southeast-4": "AU",
+                               "ap-southeast-6": "AP", # XXX: Precise location not documented anywhere
                                "ap-northeast-1": "JP",
                                "ca-central-1": "CA",
+                               "ca-west-1": "CA",
                                "eu-central-1": "DE",
                                "eu-central-2": "CH",
                                "eu-west-1": "IE",
@@ -1427,37 +1429,34 @@ class CLI(object):
        def _update_overrides_for_spamhaus_drop(self):
                downloader = location.importer.Downloader()
 
-               ip_urls = [
-                                       "https://www.spamhaus.org/drop/drop.txt",
-                                       "https://www.spamhaus.org/drop/edrop.txt",
-                                       "https://www.spamhaus.org/drop/dropv6.txt"
+               ip_lists = [
+                                       ("SPAMHAUS-DROP", "https://www.spamhaus.org/drop/drop.txt"),
+                                       ("SPAMHAUS-EDROP", "https://www.spamhaus.org/drop/edrop.txt"),
+                                       ("SPAMHAUS-DROPV6", "https://www.spamhaus.org/drop/dropv6.txt")
                                ]
 
-               asn_urls = [
-                                       "https://www.spamhaus.org/drop/asndrop.txt"
+               asn_lists = [
+                                       ("SPAMHAUS-ASNDROP", "https://www.spamhaus.org/drop/asndrop.txt")
                                ]
 
-               for url in ip_urls:
-                       # Fetch IP list
+               for name, url in ip_lists:
+                       # Fetch IP list from given URL
                        f = downloader.retrieve(url)
 
                        # Split into lines
                        fcontent = f.readlines()
 
-                       # Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
-                       # downloads.
-                       if len(fcontent) > 10:
-                               self.db.execute("""
-                                       DELETE FROM autnum_overrides WHERE source = 'Spamhaus ASN-DROP list';
-                                       DELETE FROM network_overrides WHERE source = 'Spamhaus DROP lists';
-                               """)
-                       else:
-                               log.error("Spamhaus DROP URL %s returned likely bogus file, ignored" % url)
-                               continue
-
-                       # Iterate through every line, filter comments and add remaining networks to
-                       # the override table in case they are valid...
                        with self.db.transaction():
+                               # Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
+                               # downloads.
+                               if len(fcontent) > 10:
+                                       self.db.execute("DELETE FROM network_overrides WHERE source = %s", name)
+                               else:
+                                       log.error("%s (%s) returned likely bogus file, ignored" % (name, url))
+                                       continue
+
+                               # Iterate through every line, filter comments and add remaining networks to
+                               # the override table in case they are valid...
                                for sline in fcontent:
                                        # The response is assumed to be encoded in UTF-8...
                                        sline = sline.decode("utf-8")
@@ -1475,8 +1474,8 @@ class CLI(object):
 
                                        # Sanitize parsed networks...
                                        if not self._check_parsed_network(network):
-                                               log.warning("Skipping bogus network found in Spamhaus DROP URL %s: %s" % \
-                                                       (url, network))
+                                               log.warning("Skipping bogus network found in %s (%s): %s" % \
+                                                       (name, url, network))
                                                continue
 
                                        # Conduct SQL statement...
@@ -1488,17 +1487,28 @@ class CLI(object):
                                                ) VALUES (%s, %s, %s)
                                                ON CONFLICT (network) DO UPDATE SET is_drop = True""",
                                                "%s" % network,
-                                               "Spamhaus DROP lists",
+                                               name,
                                                True
                                        )
 
-               for url in asn_urls:
+               for name, url in asn_lists:
                        # Fetch URL
                        f = downloader.retrieve(url)
 
-                       # Iterate through every line, filter comments and add remaining ASNs to
-                       # the override table in case they are valid...
+                       # Split into lines
+                       fcontent = f.readlines()
+
                        with self.db.transaction():
+                               # Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
+                               # downloads.
+                               if len(fcontent) > 10:
+                                       self.db.execute("DELETE FROM autnum_overrides WHERE source = %s", name)
+                               else:
+                                       log.error("%s (%s) returned likely bogus file, ignored" % (name, url))
+                                       continue
+
+                               # Iterate through every line, filter comments and add remaining ASNs to
+                               # the override table in case they are valid...
                                for sline in f.readlines():
                                        # The response is assumed to be encoded in UTF-8...
                                        sline = sline.decode("utf-8")
@@ -1518,8 +1528,8 @@ class CLI(object):
 
                                        # Filter invalid ASNs...
                                        if not self._check_parsed_asn(asn):
-                                               log.warning("Skipping bogus ASN found in Spamhaus DROP URL %s: %s" % \
-                                                       (url, asn))
+                                               log.warning("Skipping bogus ASN found in %s (%s): %s" % \
+                                                       (name, url, asn))
                                                continue
 
                                        # Conduct SQL statement...
@@ -1531,7 +1541,7 @@ class CLI(object):
                                                ) VALUES (%s, %s, %s)
                                                ON CONFLICT (number) DO UPDATE SET is_drop = True""",
                                                "%s" % asn,
-                                               "Spamhaus ASN-DROP list",
+                                               name,
                                                True
                                        )
 
index 1c8e1165b065b84f1b1f9ac3a3b1df8201b00ba9..50ba01f1018958b17de3bd2b136a63b00b6db964 100644 (file)
@@ -1,6 +1,6 @@
 [Unit]
 Description=Automatic Location Database Updater
-Documentation=man:location(8) https://man-pages.ipfire.org/libloc/location.html
+Documentation=man:location(1) https://man-pages.ipfire.org/libloc/location.html
 Requires=network.target
 
 [Service]
index 9af9236b86a7b54a8fd1eac75896db2948e2fc4e..e1be5b1889122d6b4ff9f2ad51eeb135307aaeb5 100644 (file)
@@ -94,7 +94,7 @@ int main(int argc, char** argv) {
        }
 
        // Open another public key
-       public_key = freopen(ABS_SRCDIR "/src/signing-key.pem", "r", public_key);
+       public_key = freopen(ABS_SRCDIR "/data/signing-key.pem", "r", public_key);
        if (!public_key) {
                fprintf(stderr, "Could not open public key file: %m\n");
                exit(EXIT_FAILURE);
index 51e9a8ec2a60957037daba637cf41f0c65428ba5..beffcf289dd11380a01b9b9df6d895d4b3d74431 100644 (file)
@@ -102,6 +102,13 @@ LOC_EXPORT int loc_writer_new(struct loc_ctx* ctx, struct loc_writer** writer,
                return r;
        }
 
+       // Add an empty string to the stringpool
+       r = loc_stringpool_add(w->pool, "");
+       if (r) {
+               loc_writer_unref(w);
+               return r;
+       }
+
        // Initialize the network tree
        r = loc_network_tree_new(ctx, &w->networks);
        if (r) {
@@ -740,7 +747,7 @@ LOC_EXPORT int loc_writer_write(struct loc_writer* writer, FILE* f, enum loc_dat
 
        if (writer->signature2_length) {
                DEBUG(writer->ctx, "Copying second signature of %zu byte(s)\n",
-                       writer->signature1_length);
+                       writer->signature2_length);
 
                memcpy(header.signature2, writer->signature2, writer->signature2_length);
                header.signature2_length = htobe16(writer->signature2_length);
diff --git a/tests/data/signing-key.pem b/tests/data/signing-key.pem
deleted file mode 120000 (symlink)
index b1da823..0000000
+++ /dev/null
@@ -1 +0,0 @@
-../../src/signing-key.pem
\ No newline at end of file
index 4846e7a2df6eea2c70f975897de95b7ac3d9073a..1c3448bd2d970e328d542cb036199ee209b0165d 100755 (executable)
@@ -25,7 +25,7 @@ TEST_DATA_DIR = os.environ["TEST_DATA_DIR"]
 
 class Test(unittest.TestCase):
        def setUp(self):
-               path = os.path.join(TEST_DATA_DIR, "location-2022-03-30.db")
+               path = os.path.join(TEST_DATA_DIR, "database.db")
 
                # Load the database
                self.db = location.Database(path)
@@ -45,7 +45,7 @@ class Test(unittest.TestCase):
                self.assertEqual(self.db.license, "CC BY-SA 4.0")
 
                # Created At
-               self.assertEqual(self.db.created_at, 1648619023)
+               self.assertIsInstance(self.db.created_at, int)
 
        def test_fetch_network(self):
                """
index 419b10500ab95db3eaee168bce1131257cc6162c..69218612928a376f5da136d741acdec44237b2a4 100755 (executable)
@@ -25,7 +25,7 @@ TEST_DATA_DIR = os.environ["TEST_DATA_DIR"]
 
 class Test(unittest.TestCase):
        def setUp(self):
-               path = os.path.join(TEST_DATA_DIR, "location-2022-03-30.db")
+               path = os.path.join(TEST_DATA_DIR, "database.db")
 
                # Load the database
                self.db = location.Database(path)