-C Modify\ssources\sto\sthat\sthey\scan\sbe\scombined\sinto\sa\ssingle\ssqlite3.c\ssource\nfile.\s\sEliminate\sall\stype-pruned\spointer\swarnings.\s(CVS\s3722)
-D 2007-03-26T22:05:01
+C Get\sthe\s-DSQLITE_OMIT_XFER_OPT\soption\sworking.\s\sRun\sspeed\stests\son\sa\sfull\nregression.\s\sAdd\sthe\sscript\sfor\sgenerating\ssqlite3.c.\s(CVS\s3723)
+D 2007-03-27T12:04:05
F Makefile.in 1fe3d0b46e40fd684e1e61f8e8056cefed16de9f
F Makefile.linux-gcc 2d8574d1ba75f129aba2019f0b959db380a90935
F README 9c4e2d6706bdcc3efdd773ce752a8cdab4f90028
F src/func.c 94372fe3cf26b81d4dcdc15f98ff240c37c8c708
F src/hash.c 449f3d6620193aa557f5d86cbc5cc6b87702b185
F src/hash.h 1b3f7e2609141fd571f62199fc38687d262e9564
-F src/insert.c 72cb64b698796f2005c0158e098124d9490868bb
+F src/insert.c 9dbb62ba053769de20cf6b4ac73ceeb81ffc24f3
F src/legacy.c 2631df6a861f830d6b1c0fe92b9fdd745b2c0cd6
F src/loadext.c c186ad5c9e8a0aaa73d0caf5f604d112e45e8b89
F src/main.c af8922e0205cf618392de2836c9efad71786d0d6
F src/where.c 0825fabc1a185af1567dc82e63683cd9b47d9e22
F tclinstaller.tcl 046e3624671962dc50f0481d7c25b38ef803eb42
F test/aggerror.test a867e273ef9e3d7919f03ef4f0e8c0d2767944f2
-F test/all.test 7da67663cb5af0c95ecd4805d888459023fb8cfd
+F test/all.test 60267b055e82de4fb8b841eabb014bc2f836a4eb
F test/alter.test a2cc30e844cb3b5d203416962f56f78fc11b1978
F test/alter2.test 8b2d81eae944471d473de99ab25ba6d9cda73cd3
F test/alter3.test a6eec8f454be9b6ce73d8d7dc711453675a10ce7
F test/shared_err.test 841f7341eb07ed97c713bf89960a4e9199717193
F test/sort.test 0e4456e729e5a92a625907c63dcdedfbe72c5dc5
F test/speed1.test 0ab227eae013d064f2205adcb9ee6f3c325b5bf4
-F test/speed2.test a6a9e3598cd957045e94385bebdfc64d0d57359e
+F test/speed2.test 31296ea46516ad6093e39f8622a4cbcc766923ec
F test/subquery.test ae324ee928c5fb463a3ce08a8860d6e7f1ca5797
F test/subselect.test 2d13fb7f450db3595adcdd24079a0dd1d2d6abc2
F test/sync.test d05397b8f89f423dd6dba528692019ab036bc1c3
F tool/memleak3.tcl 7707006ee908cffff210c98158788d85bb3fcdbf
F tool/mkkeywordhash.c eb7cd7244b9725e9e77270d0572de8b22a96e83a
F tool/mkopts.tcl 66ac10d240cc6e86abd37dc908d50382f84ff46e x
+F tool/mksqlite3c.tcl ca566a1482d187433f3aea97a83a1ca9fb6af071
F tool/omittest.tcl e6b3d6a1285f9813bc1dea53bb522b4b72774710
F tool/opcodeDoc.awk b3a2a3d5d3075b8bd90b7afe24283efdd586659c
F tool/report1.txt 9eae07f26a8fc53889b45fc833a66a33daa22816
F www/vdbe.tcl 87a31ace769f20d3627a64fa1fade7fed47b90d0
F www/version3.tcl 890248cf7b70e60c383b0e84d77d5132b3ead42b
F www/whentouse.tcl 97e2b5cd296f7d8057e11f44427dea8a4c2db513
-P 8fe317054982969ad539cdbf1b996b97d86ec4f2
-R c60b5a581f8ad645c254d239aa450c65
+P 0b832e218ec12b0eb559e407d80aba6709e2ea85
+R 76a8b8a54e51e640933974bfc36cdc29
U drh
-Z d6b37215b7fed9fea082ff01114bbc2c
+Z 06e3558004d52759f1e96dadb9b6ab93
-0b832e218ec12b0eb559e407d80aba6709e2ea85
\ No newline at end of file
+42c038518c4ba0ef827a5717d450f95165b3c729
\ No newline at end of file
** This file contains C code routines that are called by the parser
** to handle INSERT statements in SQLite.
**
-** $Id: insert.c,v 1.176 2007/02/24 15:18:50 drh Exp $
+** $Id: insert.c,v 1.177 2007/03/27 12:04:05 drh Exp $
*/
#include "sqliteInt.h"
}
}
+
+#ifdef SQLITE_TEST
+/*
+** The following global variable is incremented whenever the
+** transfer optimization is used. This is used for testing
+** purposes only - to make sure the transfer optimization really
+** is happening when it is suppose to.
+*/
+int sqlite3_xferopt_count;
+#endif /* SQLITE_TEST */
+
+
#ifndef SQLITE_OMIT_XFER_OPT
/*
** Check to collation names to see if they are compatible.
return 1;
}
-#ifdef SQLITE_TEST
-/*
-** The following global variable is incremented whenever the
-** transfer optimization is used. This is used for testing
-** purposes only - to make sure the transfer optimization really
-** is happening when it is suppose to.
-*/
-int sqlite3_xferopt_count;
-#endif /* SQLITE_TEST */
-
/*
** Attempt the transfer optimization on INSERTs of the form
**
#***********************************************************************
# This file runs all tests.
#
-# $Id: all.test,v 1.37 2007/03/17 10:26:59 danielk1977 Exp $
+# $Id: all.test,v 1.38 2007/03/27 12:04:06 drh Exp $
set testdir [file dirname $argv0]
source $testdir/tester.tcl
malloc.test
misuse.test
memleak.test
- speed1.test
}
# Files to include in the test. If this list is empty then everything
# This file implements regression tests for SQLite library. The
# focus of this script is measuring executing speed.
#
-# $Id: speed2.test,v 1.2 2007/03/26 16:30:16 drh Exp $
+# $Id: speed2.test,v 1.3 2007/03/27 12:04:06 drh Exp $
#
set testdir [file dirname $argv0]
# Set a uniform random seed
expr srand(0)
-set sqlout [open speed1.txt w]
+set sqlout [open speed2.txt w]
proc tracesql {sql} {
puts $::sqlout $sql\;
}
# Create a database schema.
#
-do_test speed1-1.0 {
+do_test speed2-1.0 {
execsql {
PRAGMA page_size=1024;
PRAGMA cache_size=8192;
append sql "INSERT INTO t1 VALUES($i,$r,'[number_name $r]');\n"
}
db eval BEGIN
-speed_trial speed1-insert1 50000 row $sql
+speed_trial speed2-insert1 50000 row $sql
db eval COMMIT
# 50000 INSERTs on an indexed table
append sql "INSERT INTO t2 VALUES($i,$r,'[number_name $r]');\n"
}
db eval BEGIN
-speed_trial speed1-insert2 50000 row $sql
+speed_trial speed2-insert2 50000 row $sql
db eval COMMIT
set upr [expr {($i+10)*100}]
append sql "SELECT count(*), avg(b) FROM t1 WHERE b>=$lwr AND b<$upr;"
}
-speed_trial speed1-select1 [expr {50*50000}] row $sql
+speed_trial speed2-select1 [expr {50*50000}] row $sql
# 50 SELECTs on an LIKE comparison. There is no index so a full
# table scan is required.
append sql \
"SELECT count(*), avg(b) FROM t1 WHERE c LIKE '%[number_name $i]%';"
}
-speed_trial speed1-select2 [expr {50*50000}] row $sql
+speed_trial speed2-select2 [expr {50*50000}] row $sql
# Create indices
#
db eval BEGIN
-speed_trial speed1-createidx 150000 row {
+speed_trial speed2-createidx 150000 row {
CREATE INDEX i1a ON t1(a);
CREATE INDEX i1b ON t1(b);
CREATE INDEX i1c ON t1(c);
set upr [expr {($i+10)*100}]
append sql "SELECT count(*), avg(b) FROM t1 WHERE b>=$lwr AND b<$upr;"
}
-speed_trial speed1-select3 5000 stmt $sql
+speed_trial speed2-select3 5000 stmt $sql
# 100000 random SELECTs against rowid.
#
set id [expr {int(rand()*50000)+1}]
append sql "SELECT c=='hi' FROM t1 WHERE rowid=$id;\n"
}
-speed_trial speed1-select4 100000 row $sql
+speed_trial speed2-select4 100000 row $sql
# 100000 random SELECTs against a unique indexed column.
#
set id [expr {int(rand()*50000)+1}]
append sql "SELECT c FROM t1 WHERE a=$id;"
}
-speed_trial speed1-select5 100000 row $sql
+speed_trial speed2-select5 100000 row $sql
# 50000 random SELECTs against an indexed column text column
#
db eval {SELECT c FROM t1 ORDER BY random() LIMIT 50000} {
append sql "SELECT c FROM t1 WHERE c='$c';"
}
-speed_trial speed1-select6 50000 row $sql
+speed_trial speed2-select6 50000 row $sql
# Vacuum
-speed_trial speed1-vacuum 100000 row VACUUM
+speed_trial speed2-vacuum 100000 row VACUUM
# 5000 updates of ranges where the field being compared is indexed.
#
append sql "UPDATE t1 SET b=b*2 WHERE a>=$lwr AND a<$upr;"
}
db eval BEGIN
-speed_trial speed1-update1 5000 stmt $sql
+speed_trial speed2-update1 5000 stmt $sql
db eval COMMIT
# 50000 single-row updates. An index is used to find the row quickly.
append sql "UPDATE t1 SET b=$r WHERE a=$i;"
}
db eval BEGIN
-speed_trial speed1-update2 50000 row $sql
+speed_trial speed2-update2 50000 row $sql
db eval COMMIT
# 1 big text update that touches every row in the table.
#
-speed_trial speed1-update3 50000 row {
+speed_trial speed2-update3 50000 row {
UPDATE t1 SET c=a;
}
append sql "UPDATE t1 SET c='[number_name $r]' WHERE a=$i;"
}
db eval BEGIN
-speed_trial speed1-update4 50000 row $sql
+speed_trial speed2-update4 50000 row $sql
db eval COMMIT
# Delete all content in a table.
#
-speed_trial speed1-delete1 50000 row {DELETE FROM t1}
+speed_trial speed2-delete1 50000 row {DELETE FROM t1}
# Copy one table into another
#
-speed_trial speed1-copy1 50000 row {INSERT INTO t1 SELECT * FROM t2}
+speed_trial speed2-copy1 50000 row {INSERT INTO t1 SELECT * FROM t2}
# Delete all content in a table, one row at a time.
#
-speed_trial speed1-delete2 50000 row {DELETE FROM t1 WHERE 1}
+speed_trial speed2-delete2 50000 row {DELETE FROM t1 WHERE 1}
# Refill the table yet again
#
-speed_trial speed1-copy2 50000 row {INSERT INTO t1 SELECT * FROM t2}
+speed_trial speed2-copy2 50000 row {INSERT INTO t1 SELECT * FROM t2}
# Drop the table and recreate it without its indices.
#
db eval BEGIN
-speed_trial speed1-drop1 50000 row {
+speed_trial speed2-drop1 50000 row {
DROP TABLE t1;
CREATE TABLE t1(a INTEGER, b INTEGER, c TEXT);
}
# Refill the table yet again. This copy should be faster because
# there are no indices to deal with.
#
-speed_trial speed1-copy3 50000 row {INSERT INTO t1 SELECT * FROM t2}
+speed_trial speed2-copy3 50000 row {INSERT INTO t1 SELECT * FROM t2}
# Select 20000 rows from the table at random.
#
-speed_trial speed1-random1 50000 row {
+speed_trial speed2-random1 50000 row {
SELECT rowid FROM t1 ORDER BY random() LIMIT 20000
}
# Delete 20000 random rows from the table.
#
-speed_trial speed1-random-del1 20000 row {
+speed_trial speed2-random-del1 20000 row {
DELETE FROM t1 WHERE rowid IN
(SELECT rowid FROM t1 ORDER BY random() LIMIT 20000)
}
-do_test speed1-1.1 {
+do_test speed2-1.1 {
db one {SELECT count(*) FROM t1}
} 30000
# Delete 20000 more rows at random from the table.
#
-speed_trial speed1-random-del2 20000 row {
+speed_trial speed2-random-del2 20000 row {
DELETE FROM t1 WHERE rowid IN
(SELECT rowid FROM t1 ORDER BY random() LIMIT 20000)
}
-do_test speed1-1.2 {
+do_test speed2-1.2 {
db one {SELECT count(*) FROM t1}
} 10000
--- /dev/null
+#!/usr/bin/tclsh
+#
+# To build a single huge source file holding all of SQLite (or at
+# least the core components - the test harness, shell, and TCL
+# interface are omitted.) first do
+#
+# make target_source
+#
+# Then run this script
+#
+# tclsh mkonebigsourcefile.tcl
+#
+# The combined SQLite source code will be written into sqlite3.c
+#
+
+# Open the output file and write a header comment at the beginning
+# of the file.
+#
+set out [open sqlite3.c w]
+puts $out \
+"/******************************************************************************
+** This file is a amalgamation of many separate source files from SQLite. By
+** pulling all the source files into this single unified source file, the
+** entire code can be compiled as a single translation unit, which allows the
+** compiler to do a better job of optimizing.
+*/"
+
+# These are the header files used by SQLite. The first time any of these
+# files are seen in a #include statement in the C code, include the complete
+# text of the file in-line. The file only needs to be included once.
+#
+foreach hdr {
+ btree.h
+ hash.h
+ keywordhash.h
+ opcodes.h
+ os_common.h
+ os.h
+ os_os2.h
+ pager.h
+ parse.h
+ sqlite3ext.h
+ sqlite3.h
+ sqliteInt.h
+ vdbe.h
+ vdbeInt.h
+} {
+ set available_hdr($hdr) 1
+}
+
+# 78 stars used for comment formatting.
+set s78 \
+{*****************************************************************************}
+
+# Insert a comment into the code
+#
+proc section_comment {text} {
+ global out s78
+ set n [string length $text]
+ set nstar [expr {60 - $n}]
+ set stars [string range $s78 0 $nstar]
+ puts $out "/************** $text $stars/"
+}
+
+# Read the source file named $filename and write it into the
+# sqlite3.c output file. If any #include statements are seen,
+# process them approprately.
+#
+proc copy_file {filename} {
+ global seen_hdr available_hdr out
+ set tail [file tail $filename]
+ section_comment "Begin file $tail"
+ set in [open $filename r]
+ while {![eof $in]} {
+ set line [gets $in]
+ if {[regexp {^#\s*include\s+["<]([^">]+)[">]} $line all hdr]} {
+ if {[info exists available_hdr($hdr)]} {
+ if {$available_hdr($hdr)} {
+ if {$hdr!="os_common.h"} {
+ set available_hdr($hdr) 0
+ }
+ section_comment "Include $hdr in the middle of $tail"
+ copy_file tsrc/$hdr
+ section_comment "Continuing where we left off in $tail"
+ }
+ } elseif {![info exists seen_hdr($hdr)]} {
+ set seen_hdr($hdr) 1
+ puts $out $line
+ }
+ } elseif {[regexp {^#ifdef __cplusplus} $line]} {
+ puts $out "#if 0"
+ } elseif {[regexp {^#line} $line]} {
+ # Skip #line directives.
+ } else {
+ puts $out $line
+ }
+ }
+ close $in
+ section_comment "End of $tail"
+}
+
+
+# Process the source files. Process files containing commonly
+# used subroutines first in order to help the compiler find
+# inlining opportunities.
+#
+foreach file {
+ printf.c
+ random.c
+ utf.c
+ util.c
+ hash.c
+ opcodes.c
+
+ os.c
+ os_os2.c
+ os_unix.c
+ os_win.c
+
+ pager.c
+
+ btree.c
+
+ vdbefifo.c
+ vdbemem.c
+ vdbeaux.c
+ vdbeapi.c
+ vdbe.c
+
+ expr.c
+ alter.c
+ analyze.c
+ attach.c
+ auth.c
+ build.c
+ callback.c
+ complete.c
+ date.c
+ delete.c
+ func.c
+ insert.c
+ legacy.c
+ loadext.c
+ pragma.c
+ prepare.c
+ select.c
+ table.c
+ trigger.c
+ update.c
+ vacuum.c
+ vtab.c
+ where.c
+
+ parse.c
+
+ tokenize.c
+
+ main.c
+} {
+ copy_file tsrc/$file
+}
+
+if 0 {
+puts $out "#ifdef SQLITE_TEST"
+foreach file {
+ test1.c
+ test2.c
+ test3.c
+ test4.c
+ test5.c
+ test6.c
+ test7.c
+ test8.c
+ test_async.c
+ test_autoext.c
+ test_loadext.c
+ test_md5.c
+ test_schema.c
+ test_server.c
+ test_tclvar.c
+} {
+ copy_file ../sqlite/src/$file
+}
+puts $out "#endif /* SQLITE_TEST */"
+puts $out "#ifdef SQLITE_TCL"
+copy_file ../sqlite/src/tclsqlite.c
+puts $out "#endif /* SQLITE_TCL */"
+}
+
+close $out