24
function normalize_query(tokens)
27
for i, token in ipairs(tokens) do
28
-- normalize the query
29
if token["token_name"] == "TK_COMMENT" then
30
elseif token["token_name"] == "TK_LITERAL" then
31
n_q = n_q .. "`" .. token.text .. "` "
32
elseif token["token_name"] == "TK_STRING" then
34
elseif token["token_name"] == "TK_INTEGER" then
36
elseif token["token_name"] == "TK_FLOAT" then
38
elseif token["token_name"] == "TK_FUNCTION" then
39
n_q = n_q .. token.text:upper()
41
n_q = n_q .. token.text:upper() .. " "
24
local tokenizer = require("proxy.tokenizer")
48
26
function read_query(packet)
49
27
if packet:byte() == proxy.COM_QUERY then
50
local tokens = proxy.tokenize(packet:sub(2))
28
local tokens = tokenizer.tokenize(packet:sub(2))
53
for i, token in ipairs(tokens) do
54
32
-- print the token and what we know about it
55
local txt = token["text"]
56
if token["token_name"] == 'TK_STRING' then
57
txt = string.format("%q", txt)
33
local token = tokens[i]
34
local txt = token["text"]
35
if token["token_name"] == 'TK_STRING' then
36
txt = string.format("%q", txt)
59
38
-- print(i .. ": " .. " { " .. token["token_name"] .. ", " .. token["text"] .. " }" )
60
39
print(i .. ": " .. " { " .. token["token_name"] .. ", " .. txt .. " }" )
63
print("normalized query: " .. normalize_query(tokens))
44
print("normalized query: " .. tokenizer.normalize(tokens))