|
| 1 | +import fs from 'fs'; |
| 2 | +import path from 'path'; |
| 3 | +import { SparkSQL } from 'src/parser/spark'; |
| 4 | +import { CaretPosition, EntityContextType } from 'src/parser/common/types'; |
| 5 | + |
| 6 | +const syntaxSql = fs.readFileSync( |
| 7 | + path.join(__dirname, 'fixtures', 'completeAfterSyntaxError.sql'), |
| 8 | + 'utf-8' |
| 9 | +); |
| 10 | + |
| 11 | +describe('SparkSQL Complete After Error Statement', () => { |
| 12 | + const spark = new SparkSQL(); |
| 13 | + |
| 14 | + const keywordResult = [ |
| 15 | + 'WITH', |
| 16 | + 'SELECT', |
| 17 | + 'MAP', |
| 18 | + 'REDUCE', |
| 19 | + 'FROM', |
| 20 | + 'TABLE', |
| 21 | + 'VALUES', |
| 22 | + 'INSERT', |
| 23 | + 'DELETE', |
| 24 | + 'UPDATE', |
| 25 | + 'MERGE', |
| 26 | + 'USE', |
| 27 | + 'SET', |
| 28 | + 'CREATE', |
| 29 | + 'ALTER', |
| 30 | + 'DROP', |
| 31 | + 'SHOW', |
| 32 | + 'REPLACE', |
| 33 | + 'ANALYZE', |
| 34 | + 'DECLARE', |
| 35 | + 'EXPLAIN', |
| 36 | + 'DESC', |
| 37 | + 'DESCRIBE', |
| 38 | + 'COMMENT', |
| 39 | + 'REFRESH', |
| 40 | + 'CACHE', |
| 41 | + 'UNCACHE', |
| 42 | + 'CLEAR', |
| 43 | + 'LOAD', |
| 44 | + 'TRUNCATE', |
| 45 | + 'MSCK', |
| 46 | + 'REPAIR', |
| 47 | + 'ADD', |
| 48 | + 'LIST', |
| 49 | + 'RESET', |
| 50 | + 'OPTIMIZE', |
| 51 | + 'GRANT', |
| 52 | + 'REVOKE', |
| 53 | + 'EXPORT', |
| 54 | + 'IMPORT', |
| 55 | + 'LOCK', |
| 56 | + 'UNLOCK', |
| 57 | + 'START', |
| 58 | + 'COMMIT', |
| 59 | + 'ROLLBACK', |
| 60 | + 'DFS', |
| 61 | + ]; |
| 62 | + |
| 63 | + test('Syntax error but end with semi and in multiline', () => { |
| 64 | + const pos: CaretPosition = { |
| 65 | + lineNumber: 3, |
| 66 | + column: 2, |
| 67 | + }; |
| 68 | + const keywords = spark.getSuggestionAtCaretPosition(syntaxSql, pos)?.keywords; |
| 69 | + expect(keywords).toMatchUnorderedArrary(keywordResult); |
| 70 | + }); |
| 71 | + |
| 72 | + test('Syntax error but end with semi and in single line', () => { |
| 73 | + const pos: CaretPosition = { |
| 74 | + lineNumber: 5, |
| 75 | + column: 20, |
| 76 | + }; |
| 77 | + const keywords = spark.getSuggestionAtCaretPosition(syntaxSql, pos)?.keywords; |
| 78 | + expect(keywords).toMatchUnorderedArrary(keywordResult); |
| 79 | + }); |
| 80 | + |
| 81 | + test('Syntax error but start with keyword and in multiline', () => { |
| 82 | + const pos: CaretPosition = { |
| 83 | + lineNumber: 10, |
| 84 | + column: 13, |
| 85 | + }; |
| 86 | + const suggestion = spark.getSuggestionAtCaretPosition(syntaxSql, pos); |
| 87 | + expect(suggestion).not.toBeUndefined(); |
| 88 | + |
| 89 | + // syntax |
| 90 | + const syntaxes = suggestion?.syntax; |
| 91 | + expect(syntaxes.length).toBe(1); |
| 92 | + expect(syntaxes[0].syntaxContextType).toBe(EntityContextType.TABLE); |
| 93 | + |
| 94 | + const keywords = suggestion?.keywords; |
| 95 | + expect(keywords.length).toBe(1); |
| 96 | + expect(keywords[0]).toBe('TABLE'); |
| 97 | + }); |
| 98 | + |
| 99 | + test('Syntax error but start with keyword and in single line', () => { |
| 100 | + const pos: CaretPosition = { |
| 101 | + lineNumber: 12, |
| 102 | + column: 30, |
| 103 | + }; |
| 104 | + const suggestion = spark.getSuggestionAtCaretPosition(syntaxSql, pos); |
| 105 | + expect(suggestion).not.toBeUndefined(); |
| 106 | + |
| 107 | + // syntax |
| 108 | + const syntaxes = suggestion?.syntax; |
| 109 | + expect(syntaxes.length).toBe(1); |
| 110 | + expect(syntaxes[0].syntaxContextType).toBe(EntityContextType.TABLE); |
| 111 | + |
| 112 | + const keywords = suggestion?.keywords; |
| 113 | + expect(keywords.length).toBe(1); |
| 114 | + expect(keywords[0]).toBe('TABLE'); |
| 115 | + }); |
| 116 | +}); |
0 commit comments