ReactOS  0.4.14-dev-583-g2a1ba2c
tokenize.c File Reference
#include <ctype.h>
#include <stdarg.h>
#include <stdlib.h>
#include "windef.h"
#include "winbase.h"
#include "wine/unicode.h"
#include "query.h"
#include "sql.tab.h"
Include dependency graph for tokenize.c:

Go to the source code of this file.

Classes

struct  Keyword
 

Macros

#define MAX_TOKEN_LEN   11
 

Typedefs

typedef struct Keyword Keyword
 

Functions

static int compKeyword (const void *m1, const void *m2)
 
static int sqliteKeywordCode (const WCHAR *z, int n)
 
static int isDigit (WCHAR c)
 
static int isSpace (WCHAR c)
 
int sqliteGetToken (const WCHAR *z, int *tokenType, int *skip)
 

Variables

static const WCHAR addW [] = {'A','D','D'}
 
static const WCHAR alterW [] = {'A','L','T','E','R'}
 
static const WCHAR andW [] = {'A','N','D'}
 
static const WCHAR byW [] = {'B','Y'}
 
static const WCHAR charW [] = {'C','H','A','R'}
 
static const WCHAR characterW [] = {'C','H','A','R','A','C','T','E','R'}
 
static const WCHAR createW [] = {'C','R','E','A','T','E'}
 
static const WCHAR deleteW [] = {'D','E','L','E','T','E'}
 
static const WCHAR distinctW [] = {'D','I','S','T','I','N','C','T'}
 
static const WCHAR dropW [] = {'D','R','O','P'}
 
static const WCHAR freeW [] = {'F','R','E','E'}
 
static const WCHAR fromW [] = {'F','R','O','M'}
 
static const WCHAR holdW [] = {'H','O','L','D'}
 
static const WCHAR insertW [] = {'I','N','S','E','R','T'}
 
static const WCHAR intW [] = {'I','N','T'}
 
static const WCHAR integerW [] = {'I','N','T','E','G','E','R'}
 
static const WCHAR intoW [] = {'I','N','T','O'}
 
static const WCHAR isW [] = {'I','S'}
 
static const WCHAR keyW [] = {'K','E','Y'}
 
static const WCHAR likeW [] = {'L','I','K','E'}
 
static const WCHAR localizableW [] = {'L','O','C','A','L','I','Z','A','B','L','E'}
 
static const WCHAR longW [] = {'L','O','N','G'}
 
static const WCHAR longcharW [] = {'L','O','N','G','C','H','A','R'}
 
static const WCHAR notW [] = {'N','O','T'}
 
static const WCHAR nullW [] = {'N','U','L','L'}
 
static const WCHAR objectW [] = {'O','B','J','E','C','T'}
 
static const WCHAR orW [] = {'O','R'}
 
static const WCHAR orderW [] = {'O','R','D','E','R'}
 
static const WCHAR primaryW [] = {'P','R','I','M','A','R','Y'}
 
static const WCHAR selectW [] = {'S','E','L','E','C','T'}
 
static const WCHAR setW [] = {'S','E','T'}
 
static const WCHAR shortW [] = {'S','H','O','R','T'}
 
static const WCHAR tableW [] = {'T','A','B','L','E'}
 
static const WCHAR temporaryW [] = {'T','E','M','P','O','R','A','R','Y'}
 
static const WCHAR updateW [] = {'U','P','D','A','T','E'}
 
static const WCHAR valuesW [] = {'V','A','L','U','E','S'}
 
static const WCHAR whereW [] = {'W','H','E','R','E'}
 
static const Keyword aKeywordTable []
 
static const char isIdChar []
 

Macro Definition Documentation

◆ MAX_TOKEN_LEN

#define MAX_TOKEN_LEN   11

Definition at line 40 of file tokenize.c.

Typedef Documentation

◆ Keyword

Definition at line 33 of file tokenize.c.

Function Documentation

◆ compKeyword()

static int compKeyword ( const void m1,
const void m2 
)
static

Definition at line 127 of file tokenize.c.

127  {
128  const Keyword *k1 = m1, *k2 = m2;
129  int ret, len = min( k1->len, k2->len );
130 
131  if ((ret = memicmpW( k1->name, k2->name, len ))) return ret;
132  if (k1->len < k2->len) return -1;
133  else if (k1->len > k2->len) return 1;
134  return 0;
135 }
const WCHAR * name
Definition: tokenize.c:35
unsigned int len
Definition: tokenize.c:36
int ret
GLenum GLsizei len
Definition: glext.h:6722
#define min(a, b)
Definition: monoChain.cc:55
#define memicmpW(s1, s2, n)
Definition: unicode.h:27

Referenced by sqliteKeywordCode().

◆ isDigit()

static int isDigit ( WCHAR  c)
inlinestatic

Definition at line 191 of file tokenize.c.

192 {
193  return c >= '0' && c <= '9';
194 }
const GLubyte * c
Definition: glext.h:8905

Referenced by sqliteGetToken().

◆ isSpace()

static int isSpace ( WCHAR  c)
inlinestatic

Definition at line 199 of file tokenize.c.

200 {
201  return c == ' ' || c == '\t' || c == '\n' || c == '\f';
202 }
const GLubyte * c
Definition: glext.h:8905

Referenced by sqliteGetToken().

◆ sqliteGetToken()

int sqliteGetToken ( const WCHAR z,
int tokenType,
int skip 
)

Definition at line 209 of file tokenize.c.

209  {
210  int i;
211 
212  *skip = 0;
213  switch( *z ){
214  case ' ': case '\t': case '\n': case '\f':
215  for(i=1; isSpace(z[i]); i++){}
216  *tokenType = TK_SPACE;
217  return i;
218  case '-':
219  if( z[1]==0 ) return -1;
220  *tokenType = TK_MINUS;
221  return 1;
222  case '(':
223  *tokenType = TK_LP;
224  return 1;
225  case ')':
226  *tokenType = TK_RP;
227  return 1;
228  case '*':
229  *tokenType = TK_STAR;
230  return 1;
231  case '=':
232  *tokenType = TK_EQ;
233  return 1;
234  case '<':
235  if( z[1]=='=' ){
236  *tokenType = TK_LE;
237  return 2;
238  }else if( z[1]=='>' ){
239  *tokenType = TK_NE;
240  return 2;
241  }else{
242  *tokenType = TK_LT;
243  return 1;
244  }
245  case '>':
246  if( z[1]=='=' ){
247  *tokenType = TK_GE;
248  return 2;
249  }else{
250  *tokenType = TK_GT;
251  return 1;
252  }
253  case '!':
254  if( z[1]!='=' ){
255  *tokenType = TK_ILLEGAL;
256  return 2;
257  }else{
258  *tokenType = TK_NE;
259  return 2;
260  }
261  case '?':
262  *tokenType = TK_WILDCARD;
263  return 1;
264  case ',':
265  *tokenType = TK_COMMA;
266  return 1;
267  case '`': case '\'': {
268  int delim = z[0];
269  for(i=1; z[i]; i++){
270  if( z[i]==delim )
271  break;
272  }
273  if( z[i] ) i++;
274  if( delim == '`' )
275  *tokenType = TK_ID;
276  else
277  *tokenType = TK_STRING;
278  return i;
279  }
280  case '.':
281  if( !isDigit(z[1]) ){
282  *tokenType = TK_DOT;
283  return 1;
284  }
285  /* Fall through */
286  case '0': case '1': case '2': case '3': case '4':
287  case '5': case '6': case '7': case '8': case '9':
288  *tokenType = TK_INTEGER;
289  for(i=1; isDigit(z[i]); i++){}
290  return i;
291  case '[':
292  for(i=1; z[i] && z[i-1]!=']'; i++){}
293  *tokenType = TK_ID;
294  return i;
295  default:
296  if( !isIdChar[*z] ){
297  break;
298  }
299  for(i=1; isIdChar[z[i]]; i++){}
300  *tokenType = sqliteKeywordCode(z, i);
301  if( *tokenType == TK_ID && z[i] == '`' ) *skip = 1;
302  return i;
303  }
304  *tokenType = TK_ILLEGAL;
305  return 1;
306 }
Definition: sql.tab.c:181
static int isDigit(WCHAR c)
Definition: tokenize.c:191
static int sqliteKeywordCode(const WCHAR *z, int n)
Definition: tokenize.c:142
Definition: sql.tab.c:197
Definition: sql.tab.c:193
GLsizei GLenum const GLvoid GLsizei GLenum GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLint GLint GLint GLshort GLshort GLshort GLubyte GLubyte GLubyte GLuint GLuint GLuint GLushort GLushort GLushort GLbyte GLbyte GLbyte GLbyte GLdouble GLdouble GLdouble GLdouble GLfloat GLfloat GLfloat GLfloat GLint GLint GLint GLint GLshort GLshort GLshort GLshort GLubyte GLubyte GLubyte GLubyte GLuint GLuint GLuint GLuint GLushort GLushort GLushort GLushort GLboolean const GLdouble const GLfloat const GLint const GLshort const GLbyte const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLdouble const GLfloat const GLfloat const GLint const GLint const GLshort const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort const GLdouble const GLfloat const GLint const GLshort GLenum GLenum GLenum GLfloat GLenum GLint GLenum GLenum GLenum GLfloat GLenum GLenum GLint GLenum GLfloat GLenum GLint GLint GLushort GLenum GLenum GLfloat GLenum GLenum GLint GLfloat const GLubyte GLenum GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLint GLint GLsizei GLsizei GLint GLenum GLenum const GLvoid GLenum GLenum const GLfloat GLenum GLenum const GLint GLenum GLenum const GLdouble GLenum GLenum const GLfloat GLenum GLenum const GLint GLsizei GLuint GLfloat GLuint GLbitfield GLfloat GLint GLuint GLboolean GLenum GLfloat GLenum GLbitfield GLenum GLfloat GLfloat GLint GLint const GLfloat GLenum GLfloat GLfloat GLint GLint GLfloat GLfloat GLint GLint const GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat GLint GLfloat GLfloat const GLdouble const GLfloat const GLdouble const GLfloat GLint i
Definition: glfuncs.h:248
GLdouble GLdouble z
Definition: glext.h:5874
Definition: sql.tab.c:207
Definition: sql.tab.c:182
Definition: sql.tab.c:185
static int isSpace(WCHAR c)
Definition: tokenize.c:199
Definition: sql.tab.c:196
Definition: sql.tab.c:200
Definition: sql.tab.c:178
#define skip(...)
Definition: atltest.h:64
static const char isIdChar[]
Definition: tokenize.c:168

Referenced by sql_lex().

◆ sqliteKeywordCode()

static int sqliteKeywordCode ( const WCHAR z,
int  n 
)
static

Definition at line 142 of file tokenize.c.

142  {
143  Keyword key, *r;
144 
145  if( n>MAX_TOKEN_LEN )
146  return TK_ID;
147 
148  key.tokenType = 0;
149  key.name = z;
150  key.len = n;
152  if( r )
153  return r->tokenType;
154  return TK_ID;
155 }
WCHAR * name
Definition: path.c:43
GLdouble GLdouble GLdouble r
Definition: gl.h:2055
#define MAX_TOKEN_LEN
Definition: tokenize.c:40
GLdouble n
Definition: glext.h:7729
GLdouble GLdouble z
Definition: glext.h:5874
static int compKeyword(const void *m1, const void *m2)
Definition: tokenize.c:127
Definition: sql.tab.c:185
HKEY key
Definition: reg.c:42
#define ARRAY_SIZE(a)
Definition: main.h:24
static const Keyword aKeywordTable[]
Definition: tokenize.c:84
Definition: path.c:41
#define bsearch

Referenced by sqliteGetToken().

Variable Documentation

◆ addW

const WCHAR addW[] = {'A','D','D'}
static

Definition at line 42 of file tokenize.c.

◆ aKeywordTable

const Keyword aKeywordTable[]
static

Definition at line 84 of file tokenize.c.

Referenced by sqliteKeywordCode().

◆ alterW

const WCHAR alterW[] = {'A','L','T','E','R'}
static

Definition at line 43 of file tokenize.c.

◆ andW

const WCHAR andW[] = {'A','N','D'}
static

Definition at line 44 of file tokenize.c.

◆ byW

const WCHAR byW[] = {'B','Y'}
static

Definition at line 45 of file tokenize.c.

◆ characterW

const WCHAR characterW[] = {'C','H','A','R','A','C','T','E','R'}
static

Definition at line 47 of file tokenize.c.

Referenced by string_to_unit().

◆ charW

const WCHAR charW[] = {'C','H','A','R'}
static

◆ createW

const WCHAR createW[] = {'C','R','E','A','T','E'}
static

Definition at line 48 of file tokenize.c.

◆ deleteW

const WCHAR deleteW[] = {'D','E','L','E','T','E'}
static

Definition at line 49 of file tokenize.c.

◆ distinctW

const WCHAR distinctW[] = {'D','I','S','T','I','N','C','T'}
static

Definition at line 50 of file tokenize.c.

◆ dropW

const WCHAR dropW[] = {'D','R','O','P'}
static

Definition at line 51 of file tokenize.c.

◆ freeW

const WCHAR freeW[] = {'F','R','E','E'}
static

Definition at line 52 of file tokenize.c.

Referenced by parse_com_class_threadingmodel().

◆ fromW

const WCHAR fromW[] = {'F','R','O','M'}
static

Definition at line 53 of file tokenize.c.

◆ holdW

const WCHAR holdW[] = {'H','O','L','D'}
static

Definition at line 54 of file tokenize.c.

◆ insertW

const WCHAR insertW[] = {'I','N','S','E','R','T'}
static

Definition at line 55 of file tokenize.c.

◆ integerW

const WCHAR integerW[] = {'I','N','T','E','G','E','R'}
static

Definition at line 57 of file tokenize.c.

◆ intoW

const WCHAR intoW[] = {'I','N','T','O'}
static

Definition at line 58 of file tokenize.c.

◆ intW

const WCHAR intW[] = {'I','N','T'}
static

Definition at line 56 of file tokenize.c.

◆ isIdChar

const char isIdChar[]
static
Initial value:
= {
0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1,
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
}

Definition at line 168 of file tokenize.c.

Referenced by sqliteGetToken().

◆ isW

◆ keyW

◆ likeW

const WCHAR likeW[] = {'L','I','K','E'}
static

Definition at line 61 of file tokenize.c.

◆ localizableW

const WCHAR localizableW[] = {'L','O','C','A','L','I','Z','A','B','L','E'}
static

Definition at line 62 of file tokenize.c.

◆ longcharW

const WCHAR longcharW[] = {'L','O','N','G','C','H','A','R'}
static

Definition at line 64 of file tokenize.c.

◆ longW

const WCHAR longW[] = {'L','O','N','G'}
static

Definition at line 63 of file tokenize.c.

Referenced by test_PathYetAnotherMakeUniqueName().

◆ notW

const WCHAR notW[] = {'N','O','T'}
static

Definition at line 65 of file tokenize.c.

◆ nullW

const WCHAR nullW[] = {'N','U','L','L'}
static

Definition at line 66 of file tokenize.c.

◆ objectW

const WCHAR objectW[] = {'O','B','J','E','C','T'}
static

Definition at line 67 of file tokenize.c.

◆ orderW

const WCHAR orderW[] = {'O','R','D','E','R'}
static

Definition at line 69 of file tokenize.c.

◆ orW

const WCHAR orW[] = {'O','R'}
static

Definition at line 68 of file tokenize.c.

◆ primaryW

const WCHAR primaryW[] = {'P','R','I','M','A','R','Y'}
static

Definition at line 70 of file tokenize.c.

◆ selectW

const WCHAR selectW[] = {'S','E','L','E','C','T'}
static

Definition at line 71 of file tokenize.c.

◆ setW

const WCHAR setW[] = {'S','E','T'}
static

Definition at line 72 of file tokenize.c.

◆ shortW

const WCHAR shortW[] = {'S','H','O','R','T'}
static

Definition at line 73 of file tokenize.c.

Referenced by test_PathYetAnotherMakeUniqueName().

◆ tableW

const WCHAR tableW[] = {'T','A','B','L','E'}
static

Definition at line 74 of file tokenize.c.

◆ temporaryW

const WCHAR temporaryW[] = {'T','E','M','P','O','R','A','R','Y'}
static

Definition at line 75 of file tokenize.c.

◆ updateW

const WCHAR updateW[] = {'U','P','D','A','T','E'}
static

Definition at line 76 of file tokenize.c.

◆ valuesW

const WCHAR valuesW[] = {'V','A','L','U','E','S'}
static

Definition at line 77 of file tokenize.c.

◆ whereW

const WCHAR whereW[] = {'W','H','E','R','E'}
static

Definition at line 78 of file tokenize.c.