grammar-spec.js 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262
  1. describe("SQL grammar", () => {
  2. let grammar = null;
  3. beforeEach(() => {
  4. waitsForPromise(() => atom.packages.activatePackage("language-sql"));
  5. runs(() => grammar = atom.grammars.grammarForScopeName("source.sql"));
  6. });
  7. it("parses the grammar", () => {
  8. expect(grammar).toBeDefined();
  9. expect(grammar.scopeName).toBe("source.sql");
  10. });
  11. it("uses not as a keyword", () => {
  12. const {tokens} = grammar.tokenizeLine('NOT');
  13. expect(tokens[0]).toEqual({value: 'NOT', scopes: ['source.sql', 'keyword.other.not.sql']});
  14. });
  15. it('tokenizes integers', () => {
  16. const {tokens} = grammar.tokenizeLine('12345');
  17. expect(tokens[0]).toEqual({value: '12345', scopes: ['source.sql', 'constant.numeric.sql']});
  18. });
  19. it('tokenizes integers ending words', () => {
  20. let {tokens} = grammar.tokenizeLine('field1');
  21. expect(tokens[0]).toEqual({value: 'field1', scopes: ['source.sql']});
  22. ({tokens} = grammar.tokenizeLine('2field'));
  23. expect(tokens[0]).toEqual({value: '2field', scopes: ['source.sql']});
  24. ({tokens} = grammar.tokenizeLine('link_from_1_to_2'));
  25. expect(tokens[0]).toEqual({value: 'link_from_1_to_2', scopes: ['source.sql']});
  26. ({tokens} = grammar.tokenizeLine('create table t1'));
  27. expect(tokens[4]).toEqual({value: 't1', scopes: ['source.sql', 'meta.create.sql', 'entity.name.function.sql']});
  28. });
  29. it('tokenizes numbers with decimals in them', () => {
  30. let {tokens} = grammar.tokenizeLine('123.45');
  31. expect(tokens[0]).toEqual({value: '123.45', scopes: ['source.sql', 'constant.numeric.sql']});
  32. ({tokens} = grammar.tokenizeLine('123.'));
  33. expect(tokens[0]).toEqual({value: '123.', scopes: ['source.sql', 'constant.numeric.sql']});
  34. ({tokens} = grammar.tokenizeLine('.123'));
  35. expect(tokens[0]).toEqual({value: '.123', scopes: ['source.sql', 'constant.numeric.sql']});
  36. });
  37. it('tokenizes add', () => {
  38. const {tokens} = grammar.tokenizeLine('ADD CONSTRAINT');
  39. expect(tokens[0]).toEqual({value: 'ADD', scopes: ['source.sql', 'meta.add.sql', 'keyword.other.create.sql']});
  40. });
  41. it('tokenizes create', () => {
  42. const {tokens} = grammar.tokenizeLine('CREATE TABLE');
  43. expect(tokens[0]).toEqual({value: 'CREATE', scopes: ['source.sql', 'meta.create.sql', 'keyword.other.create.sql']});
  44. });
  45. it('does not tokenize create for non-SQL keywords', () => {
  46. const {tokens} = grammar.tokenizeLine('CREATE TABLEOHNO');
  47. expect(tokens[0]).toEqual({value: 'CREATE TABLEOHNO', scopes: ['source.sql']});
  48. });
  49. it('tokenizes create if not exists', () => {
  50. const {tokens} = grammar.tokenizeLine('CREATE TABLE IF NOT EXISTS t1');
  51. expect(tokens[0]).toEqual({value: 'CREATE', scopes: ['source.sql', 'meta.create.sql', 'keyword.other.create.sql']});
  52. expect(tokens[2]).toEqual({value: 'TABLE', scopes: ['source.sql', 'meta.create.sql', 'keyword.other.sql' ]});
  53. expect(tokens[4]).toEqual({value: 'IF NOT EXISTS', scopes: ['source.sql', 'meta.create.sql', 'keyword.other.DML.sql' ]});
  54. expect(tokens[6]).toEqual({value: 't1', scopes: ['source.sql', 'meta.create.sql', 'entity.name.function.sql' ]});
  55. });
  56. it('tokenizes drop', () => {
  57. const {tokens} = grammar.tokenizeLine('DROP CONSTRAINT');
  58. expect(tokens[0]).toEqual({value: 'DROP', scopes: ['source.sql', 'meta.drop.sql', 'keyword.other.drop.sql']});
  59. });
  60. it('does not tokenize drop for non-SQL keywords', () => {
  61. const {tokens} = grammar.tokenizeLine('DROP CONSTRAINTOHNO');
  62. expect(tokens[0]).toEqual({value: 'DROP CONSTRAINTOHNO', scopes: ['source.sql']});
  63. });
  64. it('tokenizes drop if exists', () => {
  65. const {tokens} = grammar.tokenizeLine('DROP TABLE IF EXISTS t1');
  66. expect(tokens[0]).toEqual({value: 'DROP', scopes: ['source.sql', 'meta.drop.sql', 'keyword.other.drop.sql']});
  67. expect(tokens[2]).toEqual({value: 'TABLE', scopes: ['source.sql', 'meta.drop.sql', 'keyword.other.sql' ]});
  68. expect(tokens[4]).toEqual({value: 'IF EXISTS', scopes: ['source.sql', 'meta.drop.sql', 'keyword.other.DML.sql' ]});
  69. expect(tokens[6]).toEqual({value: 't1', scopes: ['source.sql', 'meta.drop.sql', 'entity.name.function.sql' ]});
  70. });
  71. it('tokenizes with', () => {
  72. const {tokens} = grammar.tokenizeLine('WITH field');
  73. expect(tokens[0]).toEqual({value: 'WITH', scopes: ['source.sql', 'keyword.other.DML.sql']});
  74. });
  75. it('tokenizes conditional expressions', () => {
  76. let {tokens} = grammar.tokenizeLine('COALESCE(a,b)');
  77. expect(tokens[0]).toEqual({value: 'COALESCE', scopes: ['source.sql', 'keyword.other.conditional.sql']});
  78. ({tokens} = grammar.tokenizeLine('NVL(a,b)'));
  79. expect(tokens[0]).toEqual({value: 'NVL', scopes: ['source.sql', 'keyword.other.conditional.sql']});
  80. ({tokens} = grammar.tokenizeLine('NULLIF(a,b)'));
  81. expect(tokens[0]).toEqual({value: 'NULLIF', scopes: ['source.sql', 'keyword.other.conditional.sql']});
  82. });
  83. it('tokenizes unique', () => {
  84. const {tokens} = grammar.tokenizeLine('UNIQUE(id)');
  85. expect(tokens[0]).toEqual({value: 'UNIQUE', scopes: ['source.sql', 'storage.modifier.sql']});
  86. });
  87. it('tokenizes scalar functions', () => {
  88. const {tokens} = grammar.tokenizeLine('SELECT CURRENT_DATE');
  89. expect(tokens[2]).toEqual({value: 'CURRENT_DATE', scopes: ['source.sql', 'support.function.scalar.sql']});
  90. });
  91. it('tokenizes math functions', () => {
  92. const {tokens} = grammar.tokenizeLine('SELECT ABS(-4)');
  93. expect(tokens[2]).toEqual({value: 'ABS', scopes: ['source.sql', 'support.function.math.sql']});
  94. });
  95. it('tokenizes window functions', () => {
  96. const {tokens} = grammar.tokenizeLine('SELECT ROW_NUMBER()');
  97. expect(tokens[2]).toEqual({value: 'ROW_NUMBER', scopes: ['source.sql', 'support.function.window.sql']});
  98. });
  99. it("quotes strings", () => {
  100. const {tokens} = grammar.tokenizeLine('"Test"');
  101. expect(tokens[0]).toEqual({value: '"', scopes: ['source.sql', 'string.quoted.double.sql', 'punctuation.definition.string.begin.sql']});
  102. expect(tokens[1]).toEqual({value: 'Test', scopes: ['source.sql', 'string.quoted.double.sql']});
  103. expect(tokens[2]).toEqual({value: '"', scopes: ['source.sql', 'string.quoted.double.sql', 'punctuation.definition.string.end.sql']});
  104. });
  105. it('tokenizes storage types', () => {
  106. const lines = grammar.tokenizeLines(`\
  107. datetime
  108. double precision
  109. integer\
  110. `);
  111. expect(lines[0][0]).toEqual({value: 'datetime', scopes: ['source.sql', 'storage.type.sql']});
  112. expect(lines[1][0]).toEqual({value: 'double precision', scopes: ['source.sql', 'storage.type.sql']});
  113. expect(lines[2][0]).toEqual({value: 'integer', scopes: ['source.sql', 'storage.type.sql']});
  114. });
  115. it('tokenizes storage types with an optional argument', () => {
  116. const lines = grammar.tokenizeLines(`\
  117. bit varying
  118. int()
  119. timestamptz(1)\
  120. `);
  121. expect(lines[0][0]).toEqual({value: 'bit varying', scopes: ['source.sql', 'storage.type.sql']});
  122. expect(lines[1][0]).toEqual({value: 'int', scopes: ['source.sql', 'storage.type.sql']});
  123. expect(lines[1][1]).toEqual({value: '(', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.begin.sql']});
  124. expect(lines[1][2]).toEqual({value: ')', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.end.sql']});
  125. expect(lines[2][0]).toEqual({value: 'timestamptz', scopes: ['source.sql', 'storage.type.sql']});
  126. expect(lines[2][1]).toEqual({value: '(', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.begin.sql']});
  127. expect(lines[2][2]).toEqual({value: '1', scopes: ['source.sql', 'constant.numeric.sql']});
  128. expect(lines[2][3]).toEqual({value: ')', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.end.sql']});
  129. });
  130. it('tokenizes storage types with two optional arguments', () => {
  131. const lines = grammar.tokenizeLines(`\
  132. decimal
  133. decimal(1)
  134. numeric(1,1)\
  135. `);
  136. expect(lines[0][0]).toEqual({value: 'decimal', scopes: ['source.sql', 'storage.type.sql']});
  137. expect(lines[1][0]).toEqual({value: 'decimal', scopes: ['source.sql', 'storage.type.sql']});
  138. expect(lines[1][1]).toEqual({value: '(', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.begin.sql']});
  139. expect(lines[1][2]).toEqual({value: '1', scopes: ['source.sql', 'constant.numeric.sql']});
  140. expect(lines[1][3]).toEqual({value: ')', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.end.sql']});
  141. expect(lines[2][0]).toEqual({value: 'numeric', scopes: ['source.sql', 'storage.type.sql']});
  142. expect(lines[2][1]).toEqual({value: '(', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.begin.sql']});
  143. expect(lines[2][2]).toEqual({value: '1', scopes: ['source.sql', 'constant.numeric.sql']});
  144. expect(lines[2][3]).toEqual({value: ',', scopes: ['source.sql', 'punctuation.separator.parameters.comma.sql']});
  145. expect(lines[2][4]).toEqual({value: '1', scopes: ['source.sql', 'constant.numeric.sql']});
  146. expect(lines[2][5]).toEqual({value: ')', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.end.sql']});
  147. });
  148. it('tokenizes storage types with time zones', () => {
  149. const lines = grammar.tokenizeLines(`\
  150. time
  151. time(1) with time zone
  152. timestamp without time zone\
  153. `);
  154. expect(lines[0][0]).toEqual({value: 'time', scopes: ['source.sql', 'storage.type.sql']});
  155. expect(lines[1][0]).toEqual({value: 'time', scopes: ['source.sql', 'storage.type.sql']});
  156. expect(lines[1][1]).toEqual({value: '(', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.begin.sql']});
  157. expect(lines[1][2]).toEqual({value: '1', scopes: ['source.sql', 'constant.numeric.sql']});
  158. expect(lines[1][3]).toEqual({value: ')', scopes: ['source.sql', 'punctuation.definition.parameters.bracket.round.end.sql']});
  159. expect(lines[1][5]).toEqual({value: 'with time zone', scopes: ['source.sql', 'storage.type.sql']});
  160. expect(lines[2][0]).toEqual({value: 'timestamp', scopes: ['source.sql', 'storage.type.sql']});
  161. expect(lines[2][2]).toEqual({value: 'without time zone', scopes: ['source.sql', 'storage.type.sql']});
  162. });
  163. it('tokenizes comments', () => {
  164. let {tokens} = grammar.tokenizeLine('-- comment');
  165. expect(tokens[0]).toEqual({value: '--', scopes: ['source.sql', 'comment.line.double-dash.sql', 'punctuation.definition.comment.sql']});
  166. expect(tokens[1]).toEqual({value: ' comment', scopes: ['source.sql', 'comment.line.double-dash.sql']});
  167. ({tokens} = grammar.tokenizeLine('AND -- WITH'));
  168. expect(tokens[0]).toEqual({value: 'AND', scopes: ['source.sql', 'keyword.other.DML.sql']});
  169. expect(tokens[2]).toEqual({value: '--', scopes: ['source.sql', 'comment.line.double-dash.sql', 'punctuation.definition.comment.sql']});
  170. expect(tokens[3]).toEqual({value: ' WITH', scopes: ['source.sql', 'comment.line.double-dash.sql']});
  171. ({tokens} = grammar.tokenizeLine('/* comment */'));
  172. expect(tokens[0]).toEqual({value: '/*', scopes: ['source.sql', 'comment.block.sql', 'punctuation.definition.comment.sql']});
  173. expect(tokens[1]).toEqual({value: ' comment ', scopes: ['source.sql', 'comment.block.sql']});
  174. expect(tokens[2]).toEqual({value: '*/', scopes: ['source.sql', 'comment.block.sql', 'punctuation.definition.comment.sql']});
  175. ({tokens} = grammar.tokenizeLine('SELECT /* WITH */ AND'));
  176. expect(tokens[0]).toEqual({value: 'SELECT', scopes: ['source.sql', 'keyword.other.DML.sql']});
  177. expect(tokens[2]).toEqual({value: '/*', scopes: ['source.sql', 'comment.block.sql', 'punctuation.definition.comment.sql']});
  178. expect(tokens[3]).toEqual({value: ' WITH ', scopes: ['source.sql', 'comment.block.sql']});
  179. expect(tokens[4]).toEqual({value: '*/', scopes: ['source.sql', 'comment.block.sql', 'punctuation.definition.comment.sql']});
  180. expect(tokens[6]).toEqual({value: 'AND', scopes: ['source.sql', 'keyword.other.DML.sql']});
  181. });
  182. describe('punctuation', () => {
  183. it('tokenizes parentheses', () => {
  184. const {tokens} = grammar.tokenizeLine('WHERE salary > (SELECT avg(salary) FROM employees)');
  185. expect(tokens[0]).toEqual({value: 'WHERE', scopes: ['source.sql', 'keyword.other.DML.sql']});
  186. expect(tokens[1]).toEqual({value: ' salary ', scopes: ['source.sql']});
  187. expect(tokens[2]).toEqual({value: '>', scopes: ['source.sql', 'keyword.operator.comparison.sql']});
  188. expect(tokens[4]).toEqual({value: '(', scopes: ['source.sql', 'punctuation.definition.section.bracket.round.begin.sql']});
  189. expect(tokens[5]).toEqual({value: 'SELECT', scopes: ['source.sql', 'keyword.other.DML.sql']});
  190. expect(tokens[7]).toEqual({value: 'avg', scopes: ['source.sql', 'support.function.aggregate.sql']});
  191. expect(tokens[8]).toEqual({value: '(', scopes: ['source.sql', 'punctuation.definition.section.bracket.round.begin.sql']});
  192. expect(tokens[9]).toEqual({value: 'salary', scopes: ['source.sql']});
  193. expect(tokens[10]).toEqual({value: ')', scopes: ['source.sql', 'punctuation.definition.section.bracket.round.end.sql']});
  194. expect(tokens[12]).toEqual({value: 'FROM', scopes: ['source.sql', 'keyword.other.DML.sql']});
  195. expect(tokens[13]).toEqual({value: ' employees', scopes: ['source.sql']});
  196. expect(tokens[14]).toEqual({value: ')', scopes: ['source.sql', 'punctuation.definition.section.bracket.round.end.sql']});
  197. });
  198. it('tokenizes commas', () => {
  199. const {tokens} = grammar.tokenizeLine('name, year');
  200. expect(tokens[0]).toEqual({value: 'name', scopes: ['source.sql']});
  201. expect(tokens[1]).toEqual({value: ',', scopes: ['source.sql', 'punctuation.separator.comma.sql']});
  202. expect(tokens[2]).toEqual({value: ' year', scopes: ['source.sql']});
  203. });
  204. it('tokenizes periods', () => {
  205. let {tokens} = grammar.tokenizeLine('.');
  206. expect(tokens[0]).toEqual({value: '.', scopes: ['source.sql', 'punctuation.separator.period.sql']});
  207. ({tokens} = grammar.tokenizeLine('database.table'));
  208. expect(tokens[0]).toEqual({value: 'database', scopes: ['source.sql', 'constant.other.database-name.sql']});
  209. expect(tokens[1]).toEqual({value: '.', scopes: ['source.sql', 'punctuation.separator.period.sql']});
  210. expect(tokens[2]).toEqual({value: 'table', scopes: ['source.sql', 'constant.other.table-name.sql']});
  211. });
  212. it('tokenizes semicolons', () => {
  213. const {tokens} = grammar.tokenizeLine('ORDER BY year;');
  214. expect(tokens[0]).toEqual({value: 'ORDER BY', scopes: ['source.sql', 'keyword.other.DML.sql']});
  215. expect(tokens[1]).toEqual({value: ' year', scopes: ['source.sql']});
  216. expect(tokens[2]).toEqual({value: ';', scopes: ['source.sql', 'punctuation.terminator.statement.semicolon.sql']});
  217. });
  218. });
  219. });