OLD | NEW |
1 # 2007 June 21 | 1 # 2007 June 21 |
2 # | 2 # |
3 # The author disclaims copyright to this source code. In place of | 3 # The author disclaims copyright to this source code. In place of |
4 # a legal notice, here is a blessing: | 4 # a legal notice, here is a blessing: |
5 # | 5 # |
6 # May you do good and not evil. | 6 # May you do good and not evil. |
7 # May you find forgiveness for yourself and forgive others. | 7 # May you find forgiveness for yourself and forgive others. |
8 # May you share freely, never taking more than you give. | 8 # May you share freely, never taking more than you give. |
9 # | 9 # |
10 #************************************************************************* | 10 #************************************************************************* |
11 # This file implements regression tests for SQLite library. The focus | 11 # This file implements regression tests for SQLite library. The focus |
12 # of this script is testing the pluggable tokeniser feature of the | 12 # of this script is testing the pluggable tokeniser feature of the |
13 # FTS3 module. | 13 # FTS3 module. |
14 # | 14 # |
15 # $Id: fts3atoken.test,v 1.1 2007/08/20 17:38:42 shess Exp $ | 15 # $Id: fts3atoken.test,v 1.1 2007/08/20 17:38:42 shess Exp $ |
16 # | 16 # |
17 | 17 |
18 set testdir [file dirname $argv0] | 18 set testdir [file dirname $argv0] |
19 source $testdir/tester.tcl | 19 source $testdir/tester.tcl |
20 | 20 |
21 # If SQLITE_ENABLE_FTS3 is defined, omit this file. | 21 # If SQLITE_ENABLE_FTS3 is defined, omit this file. |
22 ifcapable !fts3 { | 22 ifcapable !fts3 { |
23 finish_test | 23 finish_test |
24 return | 24 return |
25 } | 25 } |
26 | 26 |
27 set ::testprefix fts3token | 27 set ::testprefix fts3atoken |
28 | 28 |
29 proc escape_string {str} { | 29 proc escape_string {str} { |
30 set out "" | 30 set out "" |
31 foreach char [split $str ""] { | 31 foreach char [split $str ""] { |
32 scan $char %c i | 32 scan $char %c i |
33 if {$i<=127} { | 33 if {$i<=127} { |
34 append out $char | 34 append out $char |
35 } else { | 35 } else { |
36 append out [format {\x%.4x} $i] | 36 append out [format {\x%.4x} $i] |
37 } | 37 } |
38 } | 38 } |
39 set out | 39 set out |
40 } | 40 } |
41 | 41 |
42 #-------------------------------------------------------------------------- | 42 #-------------------------------------------------------------------------- |
43 # Test cases fts3token-1.* are the warm-body test for the SQL scalar | 43 # Test cases fts3atoken-1.* are the warm-body test for the SQL scalar |
44 # function fts3_tokenizer(). The procedure is as follows: | 44 # function fts3_tokenizer(). The procedure is as follows: |
45 # | 45 # |
46 # 1: Verify that there is no such fts3 tokenizer as 'blah'. | 46 # 1: Verify that there is no such fts3 tokenizer as 'blah'. |
47 # | 47 # |
48 # 2: Query for the built-in tokenizer 'simple'. Insert a copy of the | 48 # 2: Query for the built-in tokenizer 'simple'. Insert a copy of the |
49 # retrieved value as tokenizer 'blah'. | 49 # retrieved value as tokenizer 'blah'. |
50 # | 50 # |
51 # 3: Test that the value returned for tokenizer 'blah' is now the | 51 # 3: Test that the value returned for tokenizer 'blah' is now the |
52 # same as that retrieved for 'simple'. | 52 # same as that retrieved for 'simple'. |
53 # | 53 # |
54 # 4: Test that it is now possible to create an fts3 table using | 54 # 4: Test that it is now possible to create an fts3 table using |
55 # tokenizer 'blah' (it was not possible in step 1). | 55 # tokenizer 'blah' (it was not possible in step 1). |
56 # | 56 # |
57 # 5: Test that the table created to use tokenizer 'blah' is usable. | 57 # 5: Test that the table created to use tokenizer 'blah' is usable. |
58 # | 58 # |
59 do_test fts3token-1.1 { | 59 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 1 |
| 60 do_test fts3atoken-1.1 { |
60 catchsql { | 61 catchsql { |
61 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah); | 62 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah); |
62 } | 63 } |
63 } {1 {unknown tokenizer: blah}} | 64 } {1 {unknown tokenizer: blah}} |
64 do_test fts3token-1.2 { | 65 do_test fts3atoken-1.2 { |
65 execsql { | 66 execsql { |
66 SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL; | 67 SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL; |
67 } | 68 } |
68 } {0} | 69 } {0} |
69 do_test fts3token-1.3 { | 70 do_test fts3atoken-1.3 { |
70 execsql { | 71 execsql { |
71 SELECT fts3_tokenizer('blah') == fts3_tokenizer('simple'); | 72 SELECT fts3_tokenizer('blah') == fts3_tokenizer('simple'); |
72 } | 73 } |
73 } {1} | 74 } {1} |
74 do_test fts3token-1.4 { | 75 do_test fts3atoken-1.4 { |
75 catchsql { | 76 catchsql { |
76 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah); | 77 CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah); |
77 } | 78 } |
78 } {0 {}} | 79 } {0 {}} |
79 do_test fts3token-1.5 { | 80 do_test fts3atoken-1.5 { |
80 execsql { | 81 execsql { |
81 INSERT INTO t1(content) VALUES('There was movement at the station'); | 82 INSERT INTO t1(content) VALUES('There was movement at the station'); |
82 INSERT INTO t1(content) VALUES('For the word has passed around'); | 83 INSERT INTO t1(content) VALUES('For the word has passed around'); |
83 INSERT INTO t1(content) VALUES('That the colt from ol regret had got away'); | 84 INSERT INTO t1(content) VALUES('That the colt from ol regret had got'); |
84 SELECT content FROM t1 WHERE content MATCH 'movement' | 85 SELECT content FROM t1 WHERE content MATCH 'movement' |
85 } | 86 } |
86 } {{There was movement at the station}} | 87 } {{There was movement at the station}} |
87 | 88 |
| 89 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 0 |
| 90 do_catchsql_test 1.6 { |
| 91 SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL; |
| 92 } {1 {fts3tokenize disabled}} |
| 93 |
| 94 |
88 #-------------------------------------------------------------------------- | 95 #-------------------------------------------------------------------------- |
89 # Test cases fts3token-2.* test error cases in the scalar function based | 96 # Test cases fts3atoken-2.* test error cases in the scalar function based |
90 # API for getting and setting tokenizers. | 97 # API for getting and setting tokenizers. |
91 # | 98 # |
92 do_test fts3token-2.1 { | 99 do_test fts3atoken-2.1 { |
93 catchsql { | 100 catchsql { |
94 SELECT fts3_tokenizer('nosuchtokenizer'); | 101 SELECT fts3_tokenizer('nosuchtokenizer'); |
95 } | 102 } |
96 } {1 {unknown tokenizer: nosuchtokenizer}} | 103 } {1 {unknown tokenizer: nosuchtokenizer}} |
97 | 104 |
98 #-------------------------------------------------------------------------- | 105 #-------------------------------------------------------------------------- |
99 # Test cases fts3token-3.* test the three built-in tokenizers with a | 106 # Test cases fts3atoken-3.* test the three built-in tokenizers with a |
100 # simple input string via the built-in test function. This is as much | 107 # simple input string via the built-in test function. This is as much |
101 # to test the test function as the tokenizer implementations. | 108 # to test the test function as the tokenizer implementations. |
102 # | 109 # |
103 do_test fts3token-3.1 { | 110 do_test fts3atoken-3.1 { |
104 execsql { | 111 execsql { |
105 SELECT fts3_tokenizer_test('simple', 'I don''t see how'); | 112 SELECT fts3_tokenizer_test('simple', 'I don''t see how'); |
106 } | 113 } |
107 } {{0 i I 1 don don 2 t t 3 see see 4 how how}} | 114 } {{0 i I 1 don don 2 t t 3 see see 4 how how}} |
108 do_test fts3token-3.2 { | 115 do_test fts3atoken-3.2 { |
109 execsql { | 116 execsql { |
110 SELECT fts3_tokenizer_test('porter', 'I don''t see how'); | 117 SELECT fts3_tokenizer_test('porter', 'I don''t see how'); |
111 } | 118 } |
112 } {{0 i I 1 don don 2 t t 3 see see 4 how how}} | 119 } {{0 i I 1 don don 2 t t 3 see see 4 how how}} |
113 ifcapable icu { | 120 ifcapable icu { |
114 do_test fts3token-3.3 { | 121 do_test fts3atoken-3.3 { |
115 execsql { | 122 execsql { |
116 SELECT fts3_tokenizer_test('icu', 'I don''t see how'); | 123 SELECT fts3_tokenizer_test('icu', 'I don''t see how'); |
117 } | 124 } |
118 } {{0 i I 1 don't don't 2 see see 3 how how}} | 125 } {{0 i I 1 don't don't 2 see see 3 how how}} |
119 } | 126 } |
120 | 127 |
121 #-------------------------------------------------------------------------- | 128 #-------------------------------------------------------------------------- |
122 # Test cases fts3token-4.* test the ICU tokenizer. In practice, this | 129 # Test cases fts3atoken-4.* test the ICU tokenizer. In practice, this |
123 # tokenizer only has two modes - "thai" and "everybody else". Some other | 130 # tokenizer only has two modes - "thai" and "everybody else". Some other |
124 # Asian languages (Lao, Khmer etc.) require the same special treatment as | 131 # Asian languages (Lao, Khmer etc.) require the same special treatment as |
125 # Thai, but ICU doesn't support them yet. | 132 # Thai, but ICU doesn't support them yet. |
126 # | 133 # |
127 ifcapable icu { | 134 ifcapable icu { |
128 | 135 |
129 proc do_icu_test {name locale input output} { | 136 proc do_icu_test {name locale input output} { |
130 set ::out [db eval { SELECT fts3_tokenizer_test('icu', $locale, $input) }] | 137 set ::out [db eval { SELECT fts3_tokenizer_test('icu', $locale, $input) }] |
131 do_test $name { | 138 do_test $name { |
132 lindex $::out 0 | 139 lindex $::out 0 |
133 } $output | 140 } $output |
134 } | 141 } |
135 | 142 |
136 do_icu_test fts3token-4.1 en_US {} {} | 143 do_icu_test fts3atoken-4.1 en_US {} {} |
137 do_icu_test fts3token-4.2 en_US {Test cases fts3} [list \ | 144 do_icu_test fts3atoken-4.2 en_US {Test cases fts3} [list \ |
138 0 test Test 1 cases cases 2 fts3 fts3 | 145 0 test Test 1 cases cases 2 fts3 fts3 |
139 ] | 146 ] |
140 | 147 |
141 # The following test shows that ICU is smart enough to recognise | 148 # The following test shows that ICU is smart enough to recognise |
142 # Thai chararacters, even when the locale is set to English/United | 149 # Thai chararacters, even when the locale is set to English/United |
143 # States. | 150 # States. |
144 # | 151 # |
145 set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a" | 152 set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a" |
146 set output "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 " | 153 set output "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 " |
147 append output "1 \u0e19\u0e30 \u0e19\u0e30 " | 154 append output "1 \u0e19\u0e30 \u0e19\u0e30 " |
148 append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a" | 155 append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a" |
149 | 156 |
150 do_icu_test fts3token-4.3 th_TH $input $output | 157 do_icu_test fts3atoken-4.3 th_TH $input $output |
151 do_icu_test fts3token-4.4 en_US $input $output | 158 do_icu_test fts3atoken-4.4 en_US $input $output |
152 | 159 |
153 # ICU handles an unknown locale by falling back to the default. | 160 # ICU handles an unknown locale by falling back to the default. |
154 # So this is not an error. | 161 # So this is not an error. |
155 do_icu_test fts3token-4.5 MiddleOfTheOcean $input $output | 162 do_icu_test fts3atoken-4.5 MiddleOfTheOcean $input $output |
156 | 163 |
157 set longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire" | 164 set longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire" |
158 append longtoken "AReallocInTheIcuTokenizerCode" | 165 append longtoken "AReallocInTheIcuTokenizerCode" |
159 | 166 |
160 set input "short tokens then " | 167 set input "short tokens then " |
161 append input $longtoken | 168 append input $longtoken |
162 set output "0 short short " | 169 set output "0 short short " |
163 append output "1 tokens tokens " | 170 append output "1 tokens tokens " |
164 append output "2 then then " | 171 append output "2 then then " |
165 append output "3 [string tolower $longtoken] $longtoken" | 172 append output "3 [string tolower $longtoken] $longtoken" |
166 | 173 |
167 do_icu_test fts3token-4.6 MiddleOfTheOcean $input $output | 174 do_icu_test fts3atoken-4.6 MiddleOfTheOcean $input $output |
168 do_icu_test fts3token-4.7 th_TH $input $output | 175 do_icu_test fts3atoken-4.7 th_TH $input $output |
169 do_icu_test fts3token-4.8 en_US $input $output | 176 do_icu_test fts3atoken-4.8 en_US $input $output |
170 | 177 |
171 do_execsql_test 5.1 { | 178 do_execsql_test 5.1 { |
172 CREATE VIRTUAL TABLE x1 USING fts3(name,TOKENIZE icu en_US); | 179 CREATE VIRTUAL TABLE x1 USING fts3(name,TOKENIZE icu en_US); |
173 insert into x1 (name) values (NULL); | 180 insert into x1 (name) values (NULL); |
174 insert into x1 (name) values (NULL); | 181 insert into x1 (name) values (NULL); |
175 delete from x1; | 182 delete from x1; |
176 } | 183 } |
177 | 184 |
178 proc cp_to_str {codepoint_list} { | 185 proc cp_to_str {codepoint_list} { |
179 set fmt [string repeat %c [llength $codepoint_list]] | 186 set fmt [string repeat %c [llength $codepoint_list]] |
180 eval [list format $fmt] $codepoint_list | 187 eval [list format $fmt] $codepoint_list |
181 } | 188 } |
182 | 189 |
183 do_test 5.2 { | 190 do_test 5.2 { |
184 set str [cp_to_str {19968 26085 32822 32645 27874 23433 20986}] | 191 set str [cp_to_str {19968 26085 32822 32645 27874 23433 20986}] |
185 execsql { INSERT INTO x1 VALUES($str) } | 192 execsql { INSERT INTO x1 VALUES($str) } |
186 } {} | 193 } {} |
187 } | 194 } |
188 | 195 |
189 do_test fts3token-internal { | 196 do_test fts3atoken-internal { |
190 execsql { SELECT fts3_tokenizer_internal_test() } | 197 execsql { SELECT fts3_tokenizer_internal_test() } |
191 } {ok} | 198 } {ok} |
192 | 199 |
193 #------------------------------------------------------------------------- | 200 #------------------------------------------------------------------------- |
194 # Test empty tokenizer names. | 201 # Test empty tokenizer names. |
195 # | 202 # |
196 do_catchsql_test 6.1.1 { | 203 do_catchsql_test 6.1.1 { |
197 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=""); | 204 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=""); |
198 } {1 {unknown tokenizer: }} | 205 } {1 {unknown tokenizer: }} |
199 do_catchsql_test 6.1.2 { | 206 do_catchsql_test 6.1.2 { |
200 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=); | 207 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=); |
201 } {1 {unknown tokenizer: }} | 208 } {1 {unknown tokenizer: }} |
202 do_catchsql_test 6.1.3 { | 209 do_catchsql_test 6.1.3 { |
203 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=" "); | 210 CREATE VIRTUAL TABLE t3 USING fts4(tokenize=" "); |
204 } {1 {unknown tokenizer: }} | 211 } {1 {unknown tokenizer: }} |
205 | 212 |
206 do_catchsql_test 6.2.1 { | 213 do_catchsql_test 6.2.1 { |
207 SELECT fts3_tokenizer(NULL); | 214 SELECT fts3_tokenizer(NULL); |
208 } {1 {unknown tokenizer: }} | 215 } {1 {unknown tokenizer: }} |
| 216 |
| 217 sqlite3_db_config db SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER 1 |
209 do_catchsql_test 6.2.2 { | 218 do_catchsql_test 6.2.2 { |
210 SELECT fts3_tokenizer(NULL, X'1234567812345678'); | 219 SELECT fts3_tokenizer(NULL, X'1234567812345678'); |
211 } {1 {argument type mismatch}} | 220 } {1 {argument type mismatch}} |
212 do_catchsql_test 6.2.3 { | 221 do_catchsql_test 6.2.3 { |
213 SELECT fts3_tokenizer(NULL, X'12345678'); | 222 SELECT fts3_tokenizer(NULL, X'12345678'); |
214 } {1 {argument type mismatch}} | 223 } {1 {argument type mismatch}} |
215 | 224 |
216 | 225 |
217 finish_test | 226 finish_test |
OLD | NEW |