diff --git a/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeExprParser.java b/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeExprParser.java index 4398606d6c..efb2d7fcd8 100644 --- a/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeExprParser.java +++ b/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeExprParser.java @@ -1,10 +1,67 @@ package com.alibaba.druid.sql.dialect.snowflake; +import com.alibaba.druid.DbType; +import com.alibaba.druid.sql.ast.SQLExpr; +import com.alibaba.druid.sql.ast.SQLLimit; import com.alibaba.druid.sql.parser.Lexer; import com.alibaba.druid.sql.parser.SQLExprParser; import com.alibaba.druid.sql.parser.SQLParserFeature; +import com.alibaba.druid.sql.parser.SQLSelectParser; +import com.alibaba.druid.sql.parser.Token; +import com.alibaba.druid.util.FnvHash; public class SnowflakeExprParser extends SQLExprParser { + private static final String[] AGGREGATE_FUNCTIONS = { + "ANY_VALUE", + "APPROX_COUNT_DISTINCT", + "APPROX_PERCENTILE", + "APPROX_TOP_K", + "ARRAY_AGG", + "ARRAY_UNIQUE_AGG", + "AVG", + "BITAND_AGG", + "BITOR_AGG", + "BITXOR_AGG", + "BOOLAND_AGG", + "BOOLOR_AGG", + "BOOLXOR_AGG", + "COUNT", + "COUNT_IF", + "COVAR_POP", + "COVAR_SAMP", + "GROUPING", + "GROUPING_ID", + "HASH_AGG", + "KURTOSIS", + "LISTAGG", + "MAX", + "MAX_BY", + "MEDIAN", + "MIN", + "MIN_BY", + "MODE", + "PERCENTILE_CONT", + "PERCENTILE_DISC", + "RANK", + "DENSE_RANK", + "ROW_NUMBER", + "NTILE", + "SKEW", + "STDDEV", + "STDDEV_POP", + "STDDEV_SAMP", + "SUM", + "VAR_POP", + "VAR_SAMP", + "VARIANCE" + }; + + private static final long[] AGGREGATE_FUNCTIONS_CODES; + + static { + AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(AGGREGATE_FUNCTIONS, true); + } + public SnowflakeExprParser(String sql) { this(new SnowflakeLexer(sql)); this.lexer.nextToken(); @@ -16,6 +73,30 @@ public SnowflakeExprParser(String sql, SQLParserFeature... features) { } public SnowflakeExprParser(Lexer lexer) { - super(lexer); + super(lexer, DbType.snowflake); + this.aggregateFunctions = AGGREGATE_FUNCTIONS; + this.aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES; + } + + @Override + public SQLSelectParser createSelectParser() { + return new SnowflakeSelectParser(this, null); + } + + @Override + public SQLLimit parseLimit() { + SQLLimit limit = new SQLLimit(); + + accept(Token.LIMIT); + + SQLExpr expr = this.expr(); + limit.setRowCount(expr); + + if (lexer.nextIf(Token.OFFSET)) { + SQLExpr offset = this.expr(); + limit.setOffset(offset); + } + + return limit; } } diff --git a/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeLexer.java b/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeLexer.java index d0ab05924a..d3ecbb4f7c 100644 --- a/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeLexer.java +++ b/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeLexer.java @@ -1,10 +1,102 @@ package com.alibaba.druid.sql.dialect.snowflake; import com.alibaba.druid.DbType; +import com.alibaba.druid.sql.parser.Keywords; import com.alibaba.druid.sql.parser.Lexer; import com.alibaba.druid.sql.parser.SQLParserFeature; +import com.alibaba.druid.sql.parser.Token; + +import java.util.HashMap; +import java.util.Map; public class SnowflakeLexer extends Lexer { + public static final Keywords SNOWFLAKE_KEYWORDS; + + static { + Map map = new HashMap<>(); + + map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords()); + + // Snowflake specific keywords + map.put("BEGIN", Token.BEGIN); + map.put("CASCADE", Token.CASCADE); + map.put("CLONE", Token.CLONE); + map.put("CONTINUE", Token.CONTINUE); + map.put("CURRENT", Token.CURRENT); + map.put("FETCH", Token.FETCH); + map.put("FIRST", Token.FIRST); + map.put("IDENTITY", Token.IDENTITY); + map.put("LIMIT", Token.LIMIT); + map.put("NEXT", Token.NEXT); + map.put("NOWAIT", Token.NOWAIT); + map.put("OF", Token.OF); + map.put("OFFSET", Token.OFFSET); + map.put("ONLY", Token.ONLY); + map.put("QUALIFY", Token.QUALIFY); + map.put("RECURSIVE", Token.RECURSIVE); + map.put("RESTART", Token.RESTART); + map.put("RESTRICT", Token.RESTRICT); + map.put("RETURNING", Token.RETURNING); + map.put("ROW", Token.ROW); + map.put("ROWS", Token.ROWS); + map.put("SHARE", Token.SHARE); + map.put("SHOW", Token.SHOW); + map.put("START", Token.START); + map.put("TRUE", Token.TRUE); + map.put("FALSE", Token.FALSE); + map.put("ARRAY", Token.ARRAY); + map.put("IF", Token.IF); + map.put("TYPE", Token.TYPE); + map.put("ILIKE", Token.ILIKE); + map.put("MERGE", Token.MERGE); + map.put("MATCHED", Token.MATCHED); + map.put("PARTITION", Token.PARTITION); + map.put("INTERVAL", Token.INTERVAL); + map.put("LANGUAGE", Token.LANGUAGE); + map.put("LOCAL", Token.LOCAL); + map.put("TOP", Token.TOP); + map.put("USING", Token.USING); + map.put("WINDOW", Token.WINDOW); + map.put("TABLESAMPLE", Token.TABLESAMPLE); + map.put("REPEATABLE", Token.REPEATABLE); + map.put("STAGE", Token.STAGE); + map.put("PIPE", Token.PIPE); + map.put("STREAM", Token.STREAM); + map.put("TASK", Token.TASK); + map.put("WAREHOUSE", Token.WAREHOUSE); + map.put("COPY", Token.COPY); + map.put("OVERWRITE", Token.OVERWRITE); + map.put("PIVOT", Token.PIVOT); + map.put("UNPIVOT", Token.UNPIVOT); + map.put("LATERAL", Token.LATERAL); + map.put("AT", Token.AT); + map.put("BEFORE", Token.BEFORE); + map.put("STATEMENT", Token.STATEMENT); + map.put("TIMESTAMP", Token.TIMESTAMP); + map.put("UNDROP", Token.UNDROP); + map.put("MASKING", Token.MASKING); + map.put("POLICY", Token.POLICY); + map.put("NETWORK", Token.NETWORK); + map.put("INTEGRATION", Token.INTEGRATION); + map.put("NOTIFICATION", Token.NOTIFICATION); + map.put("SECURITY", Token.SECURITY); + map.put("API", Token.API); + map.put("POOL", Token.POOL); + map.put("MONITOR", Token.MONITOR); + map.put("ALERT", Token.ALERT); + map.put("APPLICATION", Token.APPLICATION); + map.put("SERVICE", Token.SERVICE); + map.put("IMAGE", Token.IMAGE); + map.put("MINING", Token.MINING); + + SNOWFLAKE_KEYWORDS = new Keywords(map); + } + + @Override + protected Keywords loadKeywords() { + return SNOWFLAKE_KEYWORDS; + } + public SnowflakeLexer(String input, SQLParserFeature... features) { super(input); dbType = DbType.snowflake; diff --git a/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeSelectParser.java b/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeSelectParser.java new file mode 100644 index 0000000000..d062d94bdd --- /dev/null +++ b/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeSelectParser.java @@ -0,0 +1,53 @@ +package com.alibaba.druid.sql.dialect.snowflake; + +import com.alibaba.druid.DbType; +import com.alibaba.druid.sql.ast.SQLObject; +import com.alibaba.druid.sql.ast.SQLOrderBy; +import com.alibaba.druid.sql.ast.SQLSetQuantifier; +import com.alibaba.druid.sql.ast.statement.SQLSelectQuery; +import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock; +import com.alibaba.druid.sql.parser.SQLExprParser; +import com.alibaba.druid.sql.parser.SQLSelectListCache; +import com.alibaba.druid.sql.parser.SQLSelectParser; +import com.alibaba.druid.sql.parser.Token; + +public class SnowflakeSelectParser extends SQLSelectParser { + public SnowflakeSelectParser(SQLExprParser exprParser, SQLSelectListCache selectListCache) { + super(exprParser, selectListCache); + dbType = DbType.snowflake; + } + + @Override + public SQLSelectQuery query(SQLObject parent, boolean acceptUnion) { + SQLSelectQueryBlock queryBlock = new SQLSelectQueryBlock(dbType); + + if (lexer.nextIf(Token.SELECT)) { + if (lexer.nextIf(Token.DISTINCT)) { + queryBlock.setDistionOption(SQLSetQuantifier.DISTINCT); + } else if (lexer.nextIf(Token.ALL)) { + queryBlock.setDistionOption(SQLSetQuantifier.ALL); + } + + parseSelectList(queryBlock); + } + + parseFrom(queryBlock); + + parseWhere(queryBlock); + + parseGroupBy(queryBlock); + + qualify(queryBlock); + + SQLOrderBy orderBy = parseOrderBy(); + if (orderBy != null) { + queryBlock.setOrderBy(orderBy); + } + + if (lexer.nextIf(Token.LIMIT)) { + queryBlock.setLimit(this.exprParser.parseLimit()); + } + + return queryRest(queryBlock, acceptUnion); + } +} diff --git a/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeStatementParser.java b/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeStatementParser.java index 7145627915..ac3a483df8 100644 --- a/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeStatementParser.java +++ b/core/src/main/java/com/alibaba/druid/sql/dialect/snowflake/SnowflakeStatementParser.java @@ -1,8 +1,18 @@ package com.alibaba.druid.sql.dialect.snowflake; +import com.alibaba.druid.DbType; +import com.alibaba.druid.sql.ast.SQLStatement; +import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement; +import com.alibaba.druid.sql.ast.statement.SQLCreateViewStatement; +import com.alibaba.druid.sql.ast.statement.SQLSelect; +import com.alibaba.druid.sql.ast.statement.SQLSelectStatement; import com.alibaba.druid.sql.parser.Lexer; import com.alibaba.druid.sql.parser.SQLParserFeature; import com.alibaba.druid.sql.parser.SQLStatementParser; +import com.alibaba.druid.sql.parser.Token; +import com.alibaba.druid.util.FnvHash; + +import java.util.List; public class SnowflakeStatementParser extends SQLStatementParser { public SnowflakeStatementParser(String sql) { @@ -16,4 +26,140 @@ public SnowflakeStatementParser(String sql, SQLParserFeature... features) { public SnowflakeStatementParser(Lexer lexer) { super(new SnowflakeExprParser(lexer)); } + + @Override + public SnowflakeSelectParser createSQLSelectParser() { + return new SnowflakeSelectParser(this.exprParser, null); + } + + @Override + public SQLSelectStatement parseSelect() { + SnowflakeSelectParser selectParser = createSQLSelectParser(); + SQLSelect select = selectParser.select(); + return new SQLSelectStatement(select, DbType.snowflake); + } + + @Override + public boolean parseStatementListDialect(List statementList) { + if (lexer.nextIf(Token.UNDROP)) { + lexer.nextIf(Token.TABLE); + lexer.nextIf(Token.SCHEMA); + lexer.nextIf(Token.DATABASE); + return true; + } + + if (lexer.token() == Token.CREATE) { + Lexer.SavePoint mark = lexer.mark(); + lexer.nextToken(); + if (lexer.token() == Token.STAGE + || lexer.token() == Token.WAREHOUSE + || lexer.token() == Token.FILE + || lexer.token() == Token.PIPE + || lexer.token() == Token.STREAM + || lexer.token() == Token.TASK) { + lexer.reset(mark); + lexer.nextToken(); + lexer.nextToken(); + return true; + } + lexer.reset(mark); + } + + if (lexer.nextIf(Token.USE)) { + return true; + } + + if (lexer.nextIf(Token.COPY)) { + return true; + } + + return false; + } + + @Override + public SQLCreateTableStatement parseCreateTable() { + SQLCreateTableStatement stmt = new SQLCreateTableStatement(dbType); + + accept(Token.CREATE); + + if (lexer.nextIf(Token.OR)) { + accept(Token.REPLACE); + stmt.setReplace(true); + } + + if (lexer.nextIf(Token.TEMPORARY)) { + stmt.setTemporary(true); + } + + accept(Token.TABLE); + + if (lexer.nextIf(Token.IF)) { + accept(Token.NOT); + accept(Token.EXISTS); + stmt.setIfNotExists(true); + } + + stmt.setName(this.exprParser.name()); + + // Handle CLONE clause for Snowflake + if (lexer.nextIfIdentifier(FnvHash.Constants.CLONE)) { + stmt.setLike(this.exprParser.name()); + } + + if (lexer.nextIf(Token.LPAREN)) { + for (;;) { + if (lexer.token() == Token.IDENTIFIER || lexer.token() == Token.LITERAL_ALIAS) { + stmt.addColumn(this.exprParser.parseColumn()); + if (lexer.nextIfComma()) { + continue; + } + } + break; + } + accept(Token.RPAREN); + } + + return stmt; + } + + @Override + public SQLCreateViewStatement parseCreateView() { + SQLCreateViewStatement stmt = new SQLCreateViewStatement(dbType); + + accept(Token.CREATE); + + if (lexer.nextIf(Token.OR)) { + accept(Token.REPLACE); + stmt.setOrReplace(true); + } + + accept(Token.VIEW); + + if (lexer.nextIf(Token.IF)) { + accept(Token.NOT); + accept(Token.EXISTS); + stmt.setIfNotExists(true); + } + + stmt.setName(this.exprParser.name()); + + if (lexer.nextIf(Token.LPAREN)) { + for (;;) { + if (lexer.token() == Token.IDENTIFIER || lexer.token() == Token.LITERAL_ALIAS) { + stmt.addColumn(this.exprParser.parseColumn()); + if (lexer.nextIfComma()) { + continue; + } + } + break; + } + accept(Token.RPAREN); + } + + if (lexer.nextIf(Token.AS)) { + stmt.setSubQuery(new SQLSelect(createSQLSelectParser().query())); + } + + return stmt; + } } diff --git a/core/src/main/java/com/alibaba/druid/sql/parser/Token.java b/core/src/main/java/com/alibaba/druid/sql/parser/Token.java index 9dd24b7fe9..e5f01f7677 100644 --- a/core/src/main/java/com/alibaba/druid/sql/parser/Token.java +++ b/core/src/main/java/com/alibaba/druid/sql/parser/Token.java @@ -340,6 +340,43 @@ public enum Token { REPEATABLE("REPEATABLE"), TABLESAMPLE("TABLESAMPLE"), + // Snowflake + CLONE("CLONE"), + STAGE("STAGE"), + PIPE("PIPE"), + STREAM("STREAM"), + TASK("TASK"), + WAREHOUSE("WAREHOUSE"), + FILE("FILE"), + COPY("COPY"), + URL("URL"), + CREDENTIALS("CREDENTIALS"), + ENCRYPTION("ENCRYPTION"), + VALIDATION_MODE("VALIDATION_MODE"), + PATTERN("PATTERN"), + FILES("FILES"), + PIVOT("PIVOT"), + UNPIVOT("UNPIVOT"), + AT("AT"), + BEFORE("BEFORE"), + STATEMENT("STATEMENT"), + TIMESTAMP("TIMESTAMP"), + UNDROP("UNDROP"), + MASKING("MASKING"), + POLICY("POLICY"), + NETWORK("NETWORK"), + INTEGRATION("INTEGRATION"), + NOTIFICATION("NOTIFICATION"), + SECURITY("SECURITY"), + API("API"), + POOL("POOL"), + MONITOR("MONITOR"), + ALERT("ALERT"), + APPLICATION("APPLICATION"), + SERVICE("SERVICE"), + IMAGE("IMAGE"), + MINING("MINING"), + LPAREN("("), RPAREN(")"), LBRACE("{"), diff --git a/core/src/main/java/com/alibaba/druid/util/FnvHash.java b/core/src/main/java/com/alibaba/druid/util/FnvHash.java index 6854b3ec02..883489a3ab 100644 --- a/core/src/main/java/com/alibaba/druid/util/FnvHash.java +++ b/core/src/main/java/com/alibaba/druid/util/FnvHash.java @@ -1160,6 +1160,32 @@ public interface Constants { long INSTALL = fnv1a_64_lower("INSTALL"); long UNLOAD = fnv1a_64_lower("UNLOAD"); + // Snowflake specific + long STAGE = fnv1a_64_lower("STAGE"); + long PIPE = fnv1a_64_lower("PIPE"); + long STREAM = fnv1a_64_lower("STREAM"); + long TASK = fnv1a_64_lower("TASK"); + long WAREHOUSE = fnv1a_64_lower("WAREHOUSE"); + long URL = fnv1a_64_lower("URL"); + long VALIDATION_MODE = fnv1a_64_lower("VALIDATION_MODE"); + long PATTERN = fnv1a_64_lower("PATTERN"); + long FILES = fnv1a_64_lower("FILES"); + long UNPIVOT = fnv1a_64_lower("UNPIVOT"); + long UNDROP = fnv1a_64_lower("UNDROP"); + long MASKING = fnv1a_64_lower("MASKING"); + long NETWORK = fnv1a_64_lower("NETWORK"); + long INTEGRATION = fnv1a_64_lower("INTEGRATION"); + long NOTIFICATION = fnv1a_64_lower("NOTIFICATION"); + long SECURITY = fnv1a_64_lower("SECURITY"); + long API = fnv1a_64_lower("API"); + long POOL = fnv1a_64_lower("POOL"); + long MONITOR = fnv1a_64_lower("MONITOR"); + long ALERT = fnv1a_64_lower("ALERT"); + long APPLICATION = fnv1a_64_lower("APPLICATION"); + long SERVICE = fnv1a_64_lower("SERVICE"); + long IMAGE = fnv1a_64_lower("IMAGE"); + long MINING = fnv1a_64_lower("MINING"); + long AGGREGATE = fnv1a_64_lower("AGGREGATE"); long UNIQUE = fnv1a_64_lower("UNIQUE"); long PRIMARY = fnv1a_64_lower("PRIMARY"); diff --git a/core/src/test/resources/bvt/parser/snowflake/1.txt b/core/src/test/resources/bvt/parser/snowflake/1.txt new file mode 100644 index 0000000000..97290af018 --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/1.txt @@ -0,0 +1,11 @@ +SELECT * FROM employees QUALIFY ROW_NUMBER() OVER (PARTITION BY department_id ORDER BY salary DESC) = 1; +-------------------- +SELECT * +FROM employees +QUALIFY ROW_NUMBER() OVER (PARTITION BY department_id ORDER BY salary DESC) = 1; +------------------------------------------------------------------------------------------------------------------------ +SELECT * FROM table1 LIMIT 10 OFFSET 5; +-------------------- +SELECT * +FROM table1 +LIMIT 10 OFFSET 5; diff --git a/core/src/test/resources/bvt/parser/snowflake/10.txt b/core/src/test/resources/bvt/parser/snowflake/10.txt new file mode 100644 index 0000000000..5d8f61d135 --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/10.txt @@ -0,0 +1,35 @@ +SELECT o.order_id, c.customer_name FROM orders o INNER JOIN customers c ON o.customer_id = c.customer_id; +-------------------- +SELECT o.order_id, c.customer_name +FROM orders o + INNER JOIN customers c ON o.customer_id = c.customer_id; +------------------------------------------------------------------------------------------------------------------------ +SELECT c.customer_name, o.order_id FROM customers c LEFT JOIN orders o ON c.customer_id = o.customer_id; +-------------------- +SELECT c.customer_name, o.order_id +FROM customers c + LEFT JOIN orders o ON c.customer_id = o.customer_id; +------------------------------------------------------------------------------------------------------------------------ +SELECT o.order_id, c.customer_name FROM orders o RIGHT JOIN customers c ON o.customer_id = c.customer_id; +-------------------- +SELECT o.order_id, c.customer_name +FROM orders o + RIGHT JOIN customers c ON o.customer_id = c.customer_id; +------------------------------------------------------------------------------------------------------------------------ +SELECT c.customer_name, o.order_id FROM customers c FULL OUTER JOIN orders o ON c.customer_id = o.customer_id; +-------------------- +SELECT c.customer_name, o.order_id +FROM customers c + FULL JOIN orders o ON c.customer_id = o.customer_id; +------------------------------------------------------------------------------------------------------------------------ +SELECT a.name, b.category FROM table_a a CROSS JOIN table_b b; +-------------------- +SELECT a.name, b.category +FROM table_a a + CROSS JOIN table_b b; +------------------------------------------------------------------------------------------------------------------------ +SELECT e1.employee_name AS employee, e2.employee_name AS manager FROM employees e1 JOIN employees e2 ON e1.manager_id = e2.employee_id; +-------------------- +SELECT e1.employee_name AS employee, e2.employee_name AS manager +FROM employees e1 + JOIN employees e2 ON e1.manager_id = e2.employee_id; diff --git a/core/src/test/resources/bvt/parser/snowflake/11.txt b/core/src/test/resources/bvt/parser/snowflake/11.txt new file mode 100644 index 0000000000..4642bcae70 --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/11.txt @@ -0,0 +1,37 @@ +SELECT department_id, COUNT(*) AS employee_count, AVG(salary) AS avg_salary FROM employees GROUP BY department_id; +-------------------- +SELECT department_id, COUNT(*) AS employee_count, AVG(salary) AS avg_salary +FROM employees +GROUP BY department_id; +------------------------------------------------------------------------------------------------------------------------ +SELECT department_id, SUM(salary) AS total_salary FROM employees GROUP BY department_id HAVING SUM(salary) > 1000000; +-------------------- +SELECT department_id, SUM(salary) AS total_salary +FROM employees +GROUP BY department_id +HAVING SUM(salary) > 1000000; +------------------------------------------------------------------------------------------------------------------------ +SELECT category, COUNT(*) AS product_count FROM products GROUP BY category ORDER BY product_count DESC; +-------------------- +SELECT category, COUNT(*) AS product_count +FROM products +GROUP BY category +ORDER BY product_count DESC; +------------------------------------------------------------------------------------------------------------------------ +SELECT department_id, job_title, COUNT(*) AS cnt FROM employees GROUP BY department_id, job_title; +-------------------- +SELECT department_id, job_title, COUNT(*) AS cnt +FROM employees +GROUP BY department_id, job_title; +------------------------------------------------------------------------------------------------------------------------ +SELECT year, month, SUM(amount) AS total FROM sales GROUP BY ROLLUP (year, month); +-------------------- +SELECT year, month, SUM(amount) AS total +FROM sales +GROUP BY ROLLUP (year, month); +------------------------------------------------------------------------------------------------------------------------ +SELECT year, month, SUM(amount) AS total FROM sales GROUP BY CUBE (year, month); +-------------------- +SELECT year, month, SUM(amount) AS total +FROM sales +GROUP BY CUBE (year, month); diff --git a/core/src/test/resources/bvt/parser/snowflake/12.txt b/core/src/test/resources/bvt/parser/snowflake/12.txt new file mode 100644 index 0000000000..864f2b82bd --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/12.txt @@ -0,0 +1,19 @@ +SELECT employee_id, salary, ROW_NUMBER() OVER (ORDER BY salary DESC) AS row_num FROM employees; +-------------------- +SELECT employee_id, salary, ROW_NUMBER() OVER (ORDER BY salary DESC) AS row_num +FROM employees; +------------------------------------------------------------------------------------------------------------------------ +SELECT employee_id, department_id, salary, ROW_NUMBER() OVER (PARTITION BY department_id ORDER BY salary DESC) AS rank_in_dept FROM employees; +-------------------- +SELECT employee_id, department_id, salary, ROW_NUMBER() OVER (PARTITION BY department_id ORDER BY salary DESC) AS rank_in_dept +FROM employees; +------------------------------------------------------------------------------------------------------------------------ +SELECT employee_id, salary, RANK() OVER (ORDER BY salary DESC) AS rnk FROM employees; +-------------------- +SELECT employee_id, salary, RANK() OVER (ORDER BY salary DESC) AS rnk +FROM employees; +------------------------------------------------------------------------------------------------------------------------ +SELECT order_id, amount, LAG(amount) OVER (ORDER BY order_id) AS prev_amount FROM orders; +-------------------- +SELECT order_id, amount, LAG(amount) OVER (ORDER BY order_id) AS prev_amount +FROM orders; diff --git a/core/src/test/resources/bvt/parser/snowflake/13.txt b/core/src/test/resources/bvt/parser/snowflake/13.txt new file mode 100644 index 0000000000..d0432a60bf --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/13.txt @@ -0,0 +1,15 @@ +SELECT id, name FROM table_a UNION SELECT id, name FROM table_b; +-------------------- +SELECT id, name +FROM table_a +UNION +SELECT id, name +FROM table_b; +------------------------------------------------------------------------------------------------------------------------ +SELECT id, name FROM table_a UNION ALL SELECT id, name FROM table_b; +-------------------- +SELECT id, name +FROM table_a +UNION ALL +SELECT id, name +FROM table_b; diff --git a/core/src/test/resources/bvt/parser/snowflake/14.txt b/core/src/test/resources/bvt/parser/snowflake/14.txt new file mode 100644 index 0000000000..4af092c160 --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/14.txt @@ -0,0 +1,16 @@ +SELECT id, array_col[0] AS first_element FROM arrays_table; +-------------------- +SELECT id, array_col[0] AS first_element +FROM arrays_table; +------------------------------------------------------------------------------------------------------------------------ +SELECT PARSE_JSON('{"name": "John"}') AS json_data; +-------------------- +SELECT PARSE_JSON('{"name": "John"}') AS json_data; +------------------------------------------------------------------------------------------------------------------------ +SELECT ARRAY_CONSTRUCT(1, 2, 3) AS numbers; +-------------------- +SELECT ARRAY_CONSTRUCT(1, 2, 3) AS numbers; +------------------------------------------------------------------------------------------------------------------------ +SELECT OBJECT_CONSTRUCT('name', 'Alice') AS person; +-------------------- +SELECT OBJECT_CONSTRUCT('name', 'Alice') AS person; diff --git a/core/src/test/resources/bvt/parser/snowflake/2.txt b/core/src/test/resources/bvt/parser/snowflake/2.txt new file mode 100644 index 0000000000..fbeaac9b0b --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/2.txt @@ -0,0 +1,24 @@ +CREATE TABLE my_table (id NUMBER, name VARCHAR); +-------------------- +CREATE TABLE my_table ( + id NUMBER, + name VARCHAR +); +------------------------------------------------------------------------------------------------------------------------ +CREATE TABLE my_clone_table CLONE my_table; +-------------------- +CREATE TABLE my_clone_table +CLONE my_table; +------------------------------------------------------------------------------------------------------------------------ +CREATE TEMPORARY TABLE temp_table (id NUMBER, value VARCHAR); +-------------------- +CREATE TEMPORARY TABLE temp_table ( + id NUMBER, + value VARCHAR +); +------------------------------------------------------------------------------------------------------------------------ +CREATE TABLE IF NOT EXISTS my_table (id NUMBER); +-------------------- +CREATE TABLE IF NOT EXISTS my_table ( + id NUMBER +); diff --git a/core/src/test/resources/bvt/parser/snowflake/3.txt b/core/src/test/resources/bvt/parser/snowflake/3.txt new file mode 100644 index 0000000000..bdda3acadf --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/3.txt @@ -0,0 +1,6 @@ +CREATE OR REPLACE VIEW my_view AS SELECT id, name FROM my_table; +-------------------- +CREATE OR REPLACE VIEW my_view +AS +SELECT id, name +FROM my_table; diff --git a/core/src/test/resources/bvt/parser/snowflake/4.txt b/core/src/test/resources/bvt/parser/snowflake/4.txt new file mode 100644 index 0000000000..383f4a8b2e --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/4.txt @@ -0,0 +1,38 @@ +CREATE TABLE simple_table (id NUMBER, name VARCHAR(100)); +-------------------- +CREATE TABLE simple_table ( + id NUMBER, + name VARCHAR(100) +); +------------------------------------------------------------------------------------------------------------------------ +CREATE TABLE cloned_table CLONE source_table; +-------------------- +CREATE TABLE cloned_table +CLONE source_table; +------------------------------------------------------------------------------------------------------------------------ +CREATE TEMPORARY TABLE temp_session_table (session_id VARCHAR, data VARIANT); +-------------------- +CREATE TEMPORARY TABLE temp_session_table ( + session_id VARCHAR, + data VARIANT +); +------------------------------------------------------------------------------------------------------------------------ +CREATE TABLE IF NOT EXISTS optional_table (id NUMBER); +-------------------- +CREATE TABLE IF NOT EXISTS optional_table ( + id NUMBER +); +------------------------------------------------------------------------------------------------------------------------ +CREATE OR REPLACE TABLE replaceable_table (id NUMBER, status VARCHAR); +-------------------- +CREATE OR REPLACE TABLE replaceable_table ( + id NUMBER, + status VARCHAR +); +------------------------------------------------------------------------------------------------------------------------ +CREATE TABLE constrained_table (id NUMBER NOT NULL, name VARCHAR(100) NOT NULL); +-------------------- +CREATE TABLE constrained_table ( + id NUMBER NOT NULL, + name VARCHAR(100) NOT NULL +); diff --git a/core/src/test/resources/bvt/parser/snowflake/5.txt b/core/src/test/resources/bvt/parser/snowflake/5.txt new file mode 100644 index 0000000000..0c034195f3 --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/5.txt @@ -0,0 +1,21 @@ +CREATE VIEW simple_view AS SELECT id, name FROM source_table; +-------------------- +CREATE VIEW simple_view +AS +SELECT id, name +FROM source_table; +------------------------------------------------------------------------------------------------------------------------ +CREATE OR REPLACE VIEW updatable_view AS SELECT * FROM source_table WHERE active = true; +-------------------- +CREATE OR REPLACE VIEW updatable_view +AS +SELECT * +FROM source_table +WHERE active = true; +------------------------------------------------------------------------------------------------------------------------ +CREATE VIEW IF NOT EXISTS optional_view AS SELECT id FROM source_table; +-------------------- +CREATE VIEW IF NOT EXISTS optional_view +AS +SELECT id +FROM source_table; diff --git a/core/src/test/resources/bvt/parser/snowflake/6.txt b/core/src/test/resources/bvt/parser/snowflake/6.txt new file mode 100644 index 0000000000..3611e65220 --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/6.txt @@ -0,0 +1,43 @@ +INSERT INTO target_table VALUES (1, 'test'); +-------------------- +INSERT INTO target_table +VALUES (1, 'test'); +------------------------------------------------------------------------------------------------------------------------ +INSERT INTO target_table (id, name) VALUES (1, 'Alice'), (2, 'Bob'); +-------------------- +INSERT INTO target_table (id, name) +VALUES (1, 'Alice'), (2, 'Bob'); +------------------------------------------------------------------------------------------------------------------------ +INSERT INTO target_table SELECT * FROM source_table; +-------------------- +INSERT INTO target_table +SELECT * +FROM source_table; +------------------------------------------------------------------------------------------------------------------------ +INSERT INTO archive_table SELECT * FROM active_table WHERE created_at < '2024-01-01'; +-------------------- +INSERT INTO archive_table +SELECT * +FROM active_table +WHERE created_at < '2024-01-01'; +------------------------------------------------------------------------------------------------------------------------ +UPDATE target_table SET name = 'Updated' WHERE id = 1; +-------------------- +UPDATE target_table +SET name = 'Updated' +WHERE id = 1; +------------------------------------------------------------------------------------------------------------------------ +UPDATE target_table SET name = 'New', status = 'active' WHERE id = 1; +-------------------- +UPDATE target_table +SET name = 'New', status = 'active' +WHERE id = 1; +------------------------------------------------------------------------------------------------------------------------ +DELETE FROM target_table WHERE id = 1; +-------------------- +DELETE FROM target_table +WHERE id = 1; +------------------------------------------------------------------------------------------------------------------------ +TRUNCATE TABLE target_table; +-------------------- +TRUNCATE TABLE target_table; diff --git a/core/src/test/resources/bvt/parser/snowflake/7.txt b/core/src/test/resources/bvt/parser/snowflake/7.txt new file mode 100644 index 0000000000..c86a82c04d --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/7.txt @@ -0,0 +1,19 @@ +MERGE INTO target_table t USING source_table s ON t.id = s.id WHEN MATCHED THEN UPDATE SET t.name = s.name; +-------------------- +MERGE INTO target_table t +USING source_table s ON t.id = s.id +WHEN MATCHED THEN UPDATE +SET t.name = s.name; +------------------------------------------------------------------------------------------------------------------------ +MERGE INTO target_table t USING source_table s ON t.id = s.id WHEN NOT MATCHED THEN INSERT (id, name) VALUES (s.id, s.name); +-------------------- +MERGE INTO target_table t +USING source_table s ON t.id = s.id +WHEN NOT MATCHED THEN INSERT (id, name) +VALUES (s.id, s.name); +------------------------------------------------------------------------------------------------------------------------ +MERGE INTO target_table t USING source_table s ON t.id = s.id WHEN MATCHED AND s.deleted = true THEN DELETE; +-------------------- +MERGE INTO target_table t +USING source_table s ON t.id = s.id +WHEN MATCHED AND s.deleted = true THEN DELETE; diff --git a/core/src/test/resources/bvt/parser/snowflake/8.txt b/core/src/test/resources/bvt/parser/snowflake/8.txt new file mode 100644 index 0000000000..7d6441cc69 --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/8.txt @@ -0,0 +1,44 @@ +SELECT * FROM employees QUALIFY ROW_NUMBER() OVER (PARTITION BY department_id ORDER BY salary DESC) = 1; +-------------------- +SELECT * +FROM employees +QUALIFY ROW_NUMBER() OVER (PARTITION BY department_id ORDER BY salary DESC) = 1; +------------------------------------------------------------------------------------------------------------------------ +SELECT employee_id, department_id, salary FROM employees QUALIFY RANK() OVER (PARTITION BY department_id ORDER BY salary DESC) <= 3; +-------------------- +SELECT employee_id, department_id, salary +FROM employees +QUALIFY RANK() OVER (PARTITION BY department_id ORDER BY salary DESC) <= 3; +------------------------------------------------------------------------------------------------------------------------ +SELECT * FROM sales QUALIFY DENSE_RANK() OVER (ORDER BY amount DESC) <= 10; +-------------------- +SELECT * +FROM sales +QUALIFY DENSE_RANK() OVER (ORDER BY amount DESC) <= 10; +------------------------------------------------------------------------------------------------------------------------ +SELECT * FROM large_table LIMIT 100 OFFSET 50; +-------------------- +SELECT * +FROM large_table +LIMIT 100 OFFSET 50; +------------------------------------------------------------------------------------------------------------------------ +SELECT * FROM large_table LIMIT 1000; +-------------------- +SELECT * +FROM large_table +LIMIT 1000; +------------------------------------------------------------------------------------------------------------------------ +SELECT DISTINCT category FROM products; +-------------------- +SELECT DISTINCT category +FROM products; +------------------------------------------------------------------------------------------------------------------------ +SELECT DISTINCT department_id, job_title FROM employees; +-------------------- +SELECT DISTINCT department_id, job_title +FROM employees; +------------------------------------------------------------------------------------------------------------------------ +SELECT ALL category FROM products; +-------------------- +SELECT ALL category +FROM products; diff --git a/core/src/test/resources/bvt/parser/snowflake/9.txt b/core/src/test/resources/bvt/parser/snowflake/9.txt new file mode 100644 index 0000000000..b3516efdc0 --- /dev/null +++ b/core/src/test/resources/bvt/parser/snowflake/9.txt @@ -0,0 +1,10 @@ +WITH regional_sales AS (SELECT region, SUM(amount) AS total_sales FROM orders GROUP BY region) SELECT * FROM regional_sales WHERE total_sales > 100000; +-------------------- +WITH regional_sales AS ( + SELECT region, SUM(amount) AS total_sales + FROM orders + GROUP BY region + ) +SELECT * +FROM regional_sales +WHERE total_sales > 100000;