3
Copyright (C) 2007 MySQL AB
5
This program is free software; you can redistribute it and/or modify
6
it under the terms of the GNU General Public License as published by
7
the Free Software Foundation; version 2 of the License.
9
This program is distributed in the hope that it will be useful,
10
but WITHOUT ANY WARRANTY; without even the implied warranty of
11
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
GNU General Public License for more details.
14
You should have received a copy of the GNU General Public License
15
along with this program; if not, write to the Free Software
16
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
module("proxy.tokenizer", package.seeall)
27
-- * turn constants into ?
28
-- * turn tokens into uppercase
30
-- @param tokens a array of tokens
31
-- @return normalized SQL query
34
function normalize(tokens)
37
for i, token in ipairs(tokens) do
38
-- normalize the query
39
if token["token_name"] == "TK_COMMENT" then
40
elseif token["token_name"] == "TK_LITERAL" then
41
n_q = n_q .. "`" .. token.text .. "` "
42
elseif token["token_name"] == "TK_STRING" then
44
elseif token["token_name"] == "TK_INTEGER" then
46
elseif token["token_name"] == "TK_FLOAT" then
48
elseif token["token_name"] == "TK_FUNCTION" then
49
n_q = n_q .. token.text:upper()
51
n_q = n_q .. token.text:upper() .. " "
59
-- call the included tokenizer
61
-- this function is only a wrapper and exists mostly
62
-- for constancy and documentation reasons
63
function tokenize(packet)
64
return proxy.tokenize(packet)
68
-- return the first command token
70
-- * strips the leading comments
71
function first_stmt_token(tokens)
72
for i, token in ipairs(tokens) do
73
-- normalize the query
74
if token["token_name"] == "TK_COMMENT" then
75
elseif token["token_name"] == "TK_LITERAL" then
76
-- commit and rollback at LITERALS
79
-- TK_SQL_* are normal tokens
90
returns an array of simple token values
91
without id and name, and stripping all comments
93
@param tokens an array of tokens, as produced by the tokenize() function
94
@param quote_strings : if set, the string tokens will be quoted
97
function bare_tokens (tokens, quote_strings)
98
local simple_tokens = {}
99
for i, token in ipairs(tokens) do
100
if (token['token_name'] == 'TK_STRING') and quote_strings then
101
table.insert(simple_tokens, string.format('%q', token['text'] ))
102
elseif (token['token_name'] ~= 'TK_COMMENT') then
103
table.insert(simple_tokens, token['text'])
112
Returns a text query from an array of tokens, stripping off comments
114
@param tokens an array of tokens, as produced by the tokenize() function
115
@param start_item ignores tokens before this one
116
@param end_item ignores token after this one
119
function tokens_to_query ( tokens , start_item, end_item )
120
if not start_item then
128
for i, token in ipairs(tokens) do
129
counter = counter + 1
130
if (counter >= start_item and counter <= end_item ) then
131
if (token['token_name'] == 'TK_STRING') then
132
new_query = new_query .. string.format('%q', token['text'] )
133
elseif token['token_name'] ~= 'TK_COMMENT' then
134
new_query = new_query .. token['text']
136
if (token['token_name'] ~= 'TK_FUNCTION')
138
(token['token_name'] ~= 'TK_COMMENT')
140
new_query = new_query .. ' '
149
returns an array of tokens, stripping off all comments
151
@param tokens an array of tokens, as produced by the tokenize() function
152
@see tokenize, simple_tokens
154
function tokens_without_comments (tokens)
155
local new_tokens = {}
156
for i, token in ipairs(tokens) do
157
if (token['token_name'] ~= 'TK_COMMENT') then
158
table.insert(new_tokens, token['text'])