~diego-fmpwizard/mysql-proxy/fmpwizard

« back to all changes in this revision

Viewing changes to examples/tutorial-tokenize.lua

  • Committer: Diego Medina
  • Date: 2009-02-07 03:57:48 UTC
  • mfrom: (565.2.5 tutorials)
  • Revision ID: diego.medina@sun.com-20090207035748-f1xs4mbq1x584us0
mergeĀ fromĀ trunk

Show diffs side-by-side

added added

removed removed

Lines of Context:
21
21
 
22
22
--]]
23
23
 
24
 
function normalize_query(tokens)
25
 
        local n_q = ""
26
 
 
27
 
        for i, token in ipairs(tokens) do
28
 
                -- normalize the query
29
 
                if token["token_name"] == "TK_COMMENT" then
30
 
                elseif token["token_name"] == "TK_LITERAL" then
31
 
                        n_q = n_q .. "`" .. token.text .. "` "
32
 
                elseif token["token_name"] == "TK_STRING" then
33
 
                        n_q = n_q .. "? "
34
 
                elseif token["token_name"] == "TK_INTEGER" then
35
 
                        n_q = n_q .. "? "
36
 
                elseif token["token_name"] == "TK_FLOAT" then
37
 
                        n_q = n_q .. "? "
38
 
                elseif token["token_name"] == "TK_FUNCTION" then
39
 
                        n_q = n_q .. token.text:upper()
40
 
                else
41
 
                        n_q = n_q .. token.text:upper() .. " "
42
 
                end
43
 
        end
44
 
 
45
 
        return n_q
46
 
end
 
24
local tokenizer = require("proxy.tokenizer")
47
25
 
48
26
function read_query(packet) 
49
27
        if packet:byte() == proxy.COM_QUERY then
50
 
                local tokens = proxy.tokenize(packet:sub(2))
 
28
                local tokens = tokenizer.tokenize(packet:sub(2))
51
29
 
52
30
                -- just for debug
53
 
                for i, token in ipairs(tokens) do
 
31
                for i = 1, #tokens do
54
32
                        -- print the token and what we know about it
55
 
            local txt = token["text"]
56
 
            if token["token_name"] == 'TK_STRING' then
57
 
                txt = string.format("%q", txt)
58
 
            end
 
33
                        local token = tokens[i]
 
34
                        local txt = token["text"]
 
35
                        if token["token_name"] == 'TK_STRING' then
 
36
                                txt = string.format("%q", txt)
 
37
                        end
59
38
                        -- print(i .. ": " .. " { " .. token["token_name"] .. ", " .. token["text"] .. " }" )
60
39
                        print(i .. ": " .. " { " .. token["token_name"] .. ", " .. txt .. " }" )
 
40
 
 
41
 
61
42
                end
62
43
 
63
 
                print("normalized query: " .. normalize_query(tokens))
 
44
                print("normalized query: " .. tokenizer.normalize(tokens))
64
45
        print("")
65
46
        end
66
47
end