2
Copyright (c) 2007, 2008, Oracle and/or its affiliates. All rights reserved.
4
This program is free software; you can redistribute it and/or
5
modify it under the terms of the GNU General Public License as
6
published by the Free Software Foundation; version 2 of the
9
This program is distributed in the hope that it will be useful,
10
but WITHOUT ANY WARRANTY; without even the implied warranty of
11
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
GNU General Public License for more details.
14
You should have received a copy of the GNU General Public License
15
along with this program; if not, write to the Free Software
16
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
22
require("mysql.tokenizer")
3
Copyright (C) 2007 MySQL AB
5
This program is free software; you can redistribute it and/or modify
6
it under the terms of the GNU General Public License as published by
7
the Free Software Foundation; version 2 of the License.
9
This program is distributed in the hope that it will be useful,
10
but WITHOUT ANY WARRANTY; without even the implied warranty of
11
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
GNU General Public License for more details.
14
You should have received a copy of the GNU General Public License
15
along with this program; if not, write to the Free Software
16
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24
20
module("proxy.tokenizer", package.seeall)
40
36
-- see http://www.lua.org/pil/11.6.html for more
43
-- literals that are SQL commands if they appear at the start
45
local literal_keywords = {
49
["START"] = { "TRANSACTION" },
53
local token = tokens[i]
40
for i, token in ipairs(tokens) do
55
41
-- normalize the query
56
42
if token["token_name"] == "TK_COMMENT" then
57
elseif token["token_name"] == "TK_COMMENT_MYSQL" then
58
-- a /*!... */ comment
60
-- we can't look into the comment as we don't know which server-version
61
-- we will talk to, pass it on verbatimly
62
table.insert(stack, "/*!" ..token.text .. "*/ ")
63
43
elseif token["token_name"] == "TK_LITERAL" then
64
if token.text:sub(1, 1) == "@" then
65
-- append session variables as is
66
table.insert(stack, token.text .. " ")
67
elseif #stack == 0 then -- nothing is on the stack yet
68
local u_text = token.text:upper()
70
if literal_keywords[u_text] then
71
table.insert(stack, u_text .. " ")
73
table.insert(stack, "`" .. token.text .. "` ")
75
elseif #stack == 1 then
76
local u_text = token.text:upper()
78
local starting_keyword = stack[1]:sub(1, -2)
80
if literal_keywords[starting_keyword] and
81
literal_keywords[starting_keyword][1] == u_text then
82
table.insert(stack, u_text .. " ")
84
table.insert(stack, "`" .. token.text .. "` ")
87
table.insert(stack, "`" .. token.text .. "` ")
44
table.insert(stack, "`" .. token.text .. "` ")
89
45
elseif token["token_name"] == "TK_STRING" or
90
46
token["token_name"] == "TK_INTEGER" or
91
47
token["token_name"] == "TK_FLOAT" then
106
62
-- this function is only a wrapper and exists mostly
107
63
-- for constancy and documentation reasons
108
64
function tokenize(packet)
109
return tokenizer.tokenize(packet)
65
return proxy.tokenize(packet)
115
71
-- * strips the leading comments
116
72
function first_stmt_token(tokens)
117
for i = 1, #tokens do
118
local token = tokens[i]
73
for i, token in ipairs(tokens) do
119
74
-- normalize the query
120
75
if token["token_name"] == "TK_COMMENT" then
121
76
elseif token["token_name"] == "TK_LITERAL" then
143
98
function bare_tokens (tokens, quote_strings)
144
99
local simple_tokens = {}
145
for i = 1, #tokens do
146
local token = tokens[i]
100
for i, token in ipairs(tokens) do
147
101
if (token['token_name'] == 'TK_STRING') and quote_strings then
148
102
table.insert(simple_tokens, string.format('%q', token['text'] ))
149
103
elseif (token['token_name'] ~= 'TK_COMMENT') then
202
155
function tokens_without_comments (tokens)
203
156
local new_tokens = {}
204
for i = 1, #tokens do
205
local token = tokens[i]
157
for i, token in ipairs(tokens) do
206
158
if (token['token_name'] ~= 'TK_COMMENT') then
207
159
table.insert(new_tokens, token['text'])