persistentstorage/sqlite3api/TEST/TclScript/tokenize.test
changeset 0 08ec8eefde2f
equal deleted inserted replaced
-1:000000000000 0:08ec8eefde2f
       
     1 # 2008 July 7
       
     2 #
       
     3 # The author disclaims copyright to this source code.  In place of
       
     4 # a legal notice, here is a blessing:
       
     5 #
       
     6 #    May you do good and not evil.
       
     7 #    May you find forgiveness for yourself and forgive others.
       
     8 #    May you share freely, never taking more than you give.
       
     9 #
       
    10 #***********************************************************************
       
    11 # This file implements regression tests for SQLite library.  The
       
    12 # focus of this script testing the tokenizer
       
    13 #
       
    14 # $Id: tokenize.test,v 1.1 2008/07/08 00:06:51 drh Exp $
       
    15 #
       
    16 
       
    17 set testdir [file dirname $argv0]
       
    18 source $testdir/tester.tcl
       
    19 
       
    20 do_test tokenize-1.1 {
       
    21   catchsql {SELECT 1.0e+}
       
    22 } {1 {unrecognized token: "1.0e"}}
       
    23 do_test tokenize-1.2 {
       
    24   catchsql {SELECT 1.0E+}
       
    25 } {1 {unrecognized token: "1.0E"}}
       
    26 do_test tokenize-1.3 {
       
    27   catchsql {SELECT 1.0e-}
       
    28 } {1 {unrecognized token: "1.0e"}}
       
    29 do_test tokenize-1.4 {
       
    30   catchsql {SELECT 1.0E-}
       
    31 } {1 {unrecognized token: "1.0E"}}
       
    32 do_test tokenize-1.5 {
       
    33   catchsql {SELECT 1.0e+/}
       
    34 } {1 {unrecognized token: "1.0e"}}
       
    35 do_test tokenize-1.6 {
       
    36   catchsql {SELECT 1.0E+:}
       
    37 } {1 {unrecognized token: "1.0E"}}
       
    38 do_test tokenize-1.7 {
       
    39   catchsql {SELECT 1.0e-:}
       
    40 } {1 {unrecognized token: "1.0e"}}
       
    41 do_test tokenize-1.8 {
       
    42   catchsql {SELECT 1.0E-/}
       
    43 } {1 {unrecognized token: "1.0E"}}
       
    44 do_test tokenize-1.9 {
       
    45   catchsql {SELECT 1.0F+5}
       
    46 } {1 {unrecognized token: "1.0F"}}
       
    47 do_test tokenize-1.10 {
       
    48   catchsql {SELECT 1.0d-10}
       
    49 } {1 {unrecognized token: "1.0d"}}
       
    50 do_test tokenize-1.11 {
       
    51   catchsql {SELECT 1.0e,5}
       
    52 } {1 {unrecognized token: "1.0e"}}
       
    53 do_test tokenize-1.12 {
       
    54   catchsql {SELECT 1.0E.10}
       
    55 } {1 {unrecognized token: "1.0E"}}
       
    56 
       
    57 do_test tokenize-2.1 {
       
    58   catchsql {SELECT 1, 2 /*}
       
    59 } {1 {near "*": syntax error}}
       
    60 do_test tokenize-2.2 {
       
    61   catchsql {SELECT 1, 2 /* }
       
    62 } {0 {1 2}}
       
    63 
       
    64 
       
    65 finish_test