9
#include "sql-tokenizer.h"
11
#define C(x) x, sizeof(x) - 1
13
START_TEST(test_tokenizer) {
14
GPtrArray *tokens = NULL;
17
tokens = g_ptr_array_new();
19
sql_tokenizer(tokens, C("SELEcT \"qq-end\"\"\", \"\"\"qq-start\", \"'\"`qq-mixed''\" FROM a AS `b`, `ABC``FOO` "));
21
for (i = 0; i < tokens->len; i++) {
22
sql_token *token = tokens->pdata[i];
24
#define T(t_id, t_text) \
25
fail_unless(token->token_id == t_id, "token[%d].token_id should be '%s', got '%s'", i, sql_token_get_name(t_id), sql_token_get_name(token->token_id)); \
26
fail_unless(0 == strcmp(token->text->str, t_text), "token[%d].text should be '%s', got '%s'", i, t_text, token->text->str); \
29
case 0: T(TK_SQL_SELECT, "SELEcT"); break;
30
case 1: T(TK_STRING, "qq-end\""); break;
31
case 2: T(TK_COMMA, ","); break;
32
case 3: T(TK_STRING, "\"qq-start"); break;
33
case 4: T(TK_COMMA, ","); break;
34
case 5: T(TK_STRING, "'\"`qq-mixed''"); break;
35
case 6: T(TK_SQL_FROM, "FROM"); break;
36
case 7: T(TK_LITERAL, "a"); break;
37
case 8: T(TK_SQL_AS, "AS"); break;
38
case 9: T(TK_LITERAL, "b"); break;
39
case 10: T(TK_COMMA, ","); break;
40
case 11: T(TK_LITERAL, "ABC`FOO"); break;
44
* a self-writing test-case
46
printf("case %"G_GSIZE_FORMAT": T(%s, \"%s\"); break;\n", i, sql_token_get_name(token->token_id), token->text->str);
51
for (i = 0; i < tokens->len; i++) {
52
sql_token *token = tokens->pdata[i];
54
sql_token_free(token);
56
g_ptr_array_free(tokens, TRUE);
61
Suite *sql_tokenizer_suite(void) {
62
Suite *s = suite_create("sql-tokenizer");
63
TCase *tc_core = tcase_create("Core");
65
suite_add_tcase (s, tc_core);
66
tcase_add_test(tc_core, test_tokenizer);
73
Suite *s = sql_tokenizer_suite();
74
SRunner *sr = srunner_create(s);
76
srunner_run_all(sr, CK_ENV);
78
nf = srunner_ntests_failed(sr);
82
return (nf == 0) ? EXIT_SUCCESS : EXIT_FAILURE;