]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
The new fetch_data_files.py downloads all the input data files
authorTim Peters <tim.peters@gmail.com>
Mon, 20 Mar 2006 06:06:07 +0000 (06:06 +0000)
committerTim Peters <tim.peters@gmail.com>
Mon, 20 Mar 2006 06:06:07 +0000 (06:06 +0000)
used by encoding tests.  Fiddled the Windows buildbot helper
scripts to invoke this if needed.  Note that this isn't needed
on the trunk (the encoding tests download input files automatically
in 2.5).

Tools/buildbot/README.txt [new file with mode: 0644]
Tools/buildbot/build.bat
Tools/buildbot/external.bat
Tools/buildbot/fetch_data_files.py [new file with mode: 0644]

diff --git a/Tools/buildbot/README.txt b/Tools/buildbot/README.txt
new file mode 100644 (file)
index 0000000..7c6e05a
--- /dev/null
@@ -0,0 +1,14 @@
+Helpers used by buildbot-driven core Python testing.
+
+external.bat
+build.bat
+test.bat
+clean.bat
+    On Windows, these scripts are executed by the code sent
+    from the buildbot master to the slaves.
+
+fetch_data_files.py
+    Download all the input files various encoding tests want.  This is
+    used by build.bat on Windows (but could be used on any platform).
+    Note that in Python >= 2.5, the encoding tests download input files
+    automatically.
index e3b77be2f12b491c9f212fc753fd3f813f2bfd23..ef9b09298310eec838d92f6a58766a8cc4140a47 100644 (file)
@@ -1,4 +1,7 @@
 @rem Used by the buildbot "compile" step.
 cmd /c Tools\buildbot\external.bat
 call "%VS71COMNTOOLS%vsvars32.bat"
-devenv.com /useenv /build Debug PCbuild\pcbuild.sln
+cd PCbuild
+devenv.com /useenv /build Debug pcbuild.sln
+@rem Fetch encoding test files.  Note that python_d needs to be built first.
+if not exist BIG5.TXT python_d.exe ..\Tools\buildbot\fetch_data_files.py
\ No newline at end of file
index 5dd11140b7619b063ee4b1396522b2e130382721..fff0af23a5e8792a58b7621ea43c3edad4772450 100644 (file)
@@ -4,5 +4,4 @@
 cd ..\r
 \r
 @rem bzip\r
-if not exist bzip2-1.0.3 svn export http://svn.python.org/projects/external/bzip2-1.0.3\r
-\r
+if not exist bzip2-1.0.3 svn export http://svn.python.org/projects/external/bzip2-1.0.3
\ No newline at end of file
diff --git a/Tools/buildbot/fetch_data_files.py b/Tools/buildbot/fetch_data_files.py
new file mode 100644 (file)
index 0000000..f4b6096
--- /dev/null
@@ -0,0 +1,61 @@
+"""A helper to download input files needed by assorted encoding tests.
+
+fetch_data_files.py [directory]
+
+Files are downloaded to directory `directory`.  If a directory isn't given,
+it defaults to the current directory (.).
+"""
+
+DATA_URLS = """
+    http://people.freebsd.org/~perky/i18n/BIG5HKSCS.TXT
+    http://people.freebsd.org/~perky/i18n/EUC-CN.TXT
+    http://people.freebsd.org/~perky/i18n/EUC-JISX0213.TXT
+    http://people.freebsd.org/~perky/i18n/EUC-JP.TXT
+    http://people.freebsd.org/~perky/i18n/EUC-KR.TXT
+    http://people.freebsd.org/~perky/i18n/SHIFT_JISX0213.TXT
+
+    http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP932.TXT
+    http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP936.TXT
+    http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP949.TXT
+    http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
+
+    http://www.unicode.org/Public/3.2-Update/NormalizationTest-3.2.0.txt
+
+    http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/JIS/SHIFTJIS.TXT
+    http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/KSC/JOHAB.TXT
+    http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT
+"""
+
+# Adapted from test_support.open_urlresource() in Python 2.5.
+# Fetch the file give by `url` off the web, and store it in directory
+# `directory`.  The file name is extracted from the last URL component.
+# If the file already exists, it's not fetched again.
+def fetch_file_from_url(url, directory):
+    import urllib, urlparse
+    import os.path
+
+    filename = urlparse.urlparse(url)[2].split('/')[-1] # '/': it's a URL!
+    target = os.path.join(directory, filename)
+    if os.path.exists(target):
+        print "\tskipping %r -- already exists" % target
+    else:
+        print "\tfetching %s ..." % url
+        urllib.urlretrieve(url, target)
+
+def main(urls, directory):
+    print "Downloading data files to %r" % directory
+    for url in urls.split():
+        fetch_file_from_url(url, directory)
+
+if __name__ == "__main__":
+    import sys
+
+    n = len(sys.argv)
+    if n == 1:
+        directory = "."
+    elif n == 2:
+        directory = sys.argv[1]
+    else:
+        raise ValueError("no more than one argument allowed")
+
+    main(DATA_URLS, directory)