|
5 | 5 |
|
6 | 6 | import sqlparse |
7 | 7 | from sqlparse import sql, tokens as T |
| 8 | +from sqlparse.lexer import Lexer |
8 | 9 |
|
9 | 10 |
|
10 | 11 | def test_parse_tokenize(): |
@@ -489,3 +490,45 @@ def test_parenthesis(): |
489 | 490 | T.Newline, |
490 | 491 | T.Newline, |
491 | 492 | T.Punctuation] |
| 493 | + |
| 494 | + |
| 495 | +def test_configurable_syntax(): |
| 496 | + sql = """select * from foo BACON SPAM EGGS;""" |
| 497 | + # sql="""select * from mydb.mytable BACON SPAM EGGS;""" |
| 498 | + tokens = sqlparse.parse(sql)[0] |
| 499 | + |
| 500 | + assert list( |
| 501 | + (t.ttype, t.value) for t in tokens if t.ttype not in sqlparse.tokens.Whitespace |
| 502 | + ) == [ |
| 503 | + (sqlparse.tokens.Keyword.DML, "select"), |
| 504 | + (sqlparse.tokens.Wildcard, "*"), |
| 505 | + (sqlparse.tokens.Keyword, "from"), |
| 506 | + (None, "foo BACON"), |
| 507 | + (None, "SPAM EGGS"), |
| 508 | + (sqlparse.tokens.Punctuation, ";"), |
| 509 | + ] |
| 510 | + |
| 511 | + Lexer().add_keywords( |
| 512 | + { |
| 513 | + "BACON": sqlparse.tokens.Name.Builtin, |
| 514 | + "SPAM": sqlparse.tokens.Keyword, |
| 515 | + "EGGS": sqlparse.tokens.Keyword, |
| 516 | + } |
| 517 | + ) |
| 518 | + |
| 519 | + tokens = sqlparse.parse(sql)[0] |
| 520 | + |
| 521 | + assert list( |
| 522 | + (t.ttype, t.value) for t in tokens if t.ttype not in sqlparse.tokens.Whitespace |
| 523 | + ) == [ |
| 524 | + (sqlparse.tokens.Keyword.DML, "select"), |
| 525 | + (sqlparse.tokens.Wildcard, "*"), |
| 526 | + (sqlparse.tokens.Keyword, "from"), |
| 527 | + (None, "foo"), |
| 528 | + (sqlparse.tokens.Name.Builtin, "BACON"), |
| 529 | + (sqlparse.tokens.Keyword, "SPAM"), |
| 530 | + (sqlparse.tokens.Keyword, "EGGS"), |
| 531 | + (sqlparse.tokens.Punctuation, ";"), |
| 532 | + ] |
| 533 | + # reset the syntax for later tests. |
| 534 | + Lexer().default_initialization() |
0 commit comments