From 046718324c67855e3040c19b2b254c94340a99e3 Mon Sep 17 00:00:00 2001 From: lloyd tabb Date: Sat, 9 Mar 2024 12:02:02 -0800 Subject: [PATCH] Fix upload script, fix time formatting (#1647) * Fix upload script * badly implemented cross join * ignore the snowflake log * add type for object * .fix time tests. * Rewrite the way we get information schema. * prettier fixes * Pipeline changes * add more snowflake limitations. * Finally passing tests. --- .gitignore | 1 + .../src/snowflake_connection.ts | 52 +++-------- .../src/snowflake_executor.ts | 11 ++- packages/malloy/src/dialect/dialect.ts | 6 ++ .../malloy/src/dialect/snowflake/snowflake.ts | 21 +++-- packages/malloy/src/malloy.ts | 6 +- test/snowflake/uploaddata.sql | 9 +- test/src/databases/all/lenses.spec.ts | 89 ++++++++++--------- test/src/databases/all/nomodel.spec.ts | 55 +++++++----- test/src/databases/all/time.spec.ts | 9 +- 10 files changed, 132 insertions(+), 127 deletions(-) diff --git a/.gitignore b/.gitignore index ec429a601..1d1f0830b 100644 --- a/.gitignore +++ b/.gitignore @@ -58,3 +58,4 @@ demo/malloy-demo-composer/env .env test/data/duckdb/duckdb_test.db.wal monospace.json +snowflake.log diff --git a/packages/malloy-db-snowflake/src/snowflake_connection.ts b/packages/malloy-db-snowflake/src/snowflake_connection.ts index fe73919ae..6b12a5abe 100644 --- a/packages/malloy-db-snowflake/src/snowflake_connection.ts +++ b/packages/malloy-db-snowflake/src/snowflake_connection.ts @@ -131,13 +131,9 @@ export class SnowflakeConnection await this.executor.done(); } - private getTempTableName(sqlCommand: string): string { + private getTempViewName(sqlCommand: string): string { const hash = crypto.createHash('md5').update(sqlCommand).digest('hex'); - let tableName = `tt${hash}`; - if (this.scratchSpace) { - tableName = `${this.scratchSpace.database}.${this.scratchSpace.schema}.${tableName}`; - } - return tableName; + return `tt${hash}`; } public async runSQL( @@ -179,10 +175,12 @@ export class SnowflakeConnection ): Promise { const rows = await this.executor.batch(infoQuery); for (const row of rows) { - const snowflakeDataType = row['DATA_TYPE'] as string; + // data types look like `VARCHAR(1234)` + let snowflakeDataType = row['type'] as string; + snowflakeDataType = snowflakeDataType.toLocaleLowerCase().split('(')[0]; const s = structDef; const malloyType = this.dialect.sqlTypeToMalloyType(snowflakeDataType); - const name = row['COLUMN_NAME'] as string; + const name = row['name'] as string; if (malloyType) { s.fields.push({...malloyType, name}); } else { @@ -199,15 +197,6 @@ export class SnowflakeConnection tableKey: string, tablePath: string ): Promise { - // looks like snowflake:schemaName.tableName - tableKey = tableKey.toLowerCase(); - - let [schema, tableName] = ['', tablePath]; - const schema_and_table = tablePath.split('.'); - if (schema_and_table.length === 2) { - [schema, tableName] = schema_and_table; - } - const structDef: StructDef = { type: 'struct', dialect: 'snowflake', @@ -231,16 +220,7 @@ export class SnowflakeConnection // GROUP BY 1,2 // ORDER BY PATH - const infoQuery = ` - SELECT - column_name, -- LOWER(COLUMN_NAME) AS column_name, - LOWER(DATA_TYPE) as data_type - FROM - INFORMATION_SCHEMA.COLUMNS - WHERE - table_schema = UPPER('${schema}') - AND table_name = UPPER('${tableName}'); - `; + const infoQuery = `DESCRIBE TABLE ${tablePath}`; await this.schemaFromQuery(infoQuery, structDef); return structDef; @@ -301,24 +281,14 @@ export class SnowflakeConnection }; // create temp table with same schema as the query - const tempTableName = this.getTempTableName(sqlRef.selectStr); + const tempTableName = this.getTempViewName(sqlRef.selectStr); this.runSQL( ` - CREATE OR REPLACE TEMP TABLE ${tempTableName} as SELECT * FROM ( - ${sqlRef.selectStr} - ) as x WHERE false; + CREATE OR REPLACE TEMP VIEW ${tempTableName} as ${sqlRef.selectStr}; ` ); - const infoQuery = ` - SELECT - column_name, -- LOWER(column_name) as column_name, - LOWER(data_type) as data_type - FROM - INFORMATION_SCHEMA.COLUMNS - WHERE - table_name = UPPER('${tempTableName}'); - `; + const infoQuery = `DESCRIBE TABLE ${tempTableName}`; await this.schemaFromQuery(infoQuery, structDef); return structDef; } @@ -351,7 +321,7 @@ export class SnowflakeConnection } public async manifestTemporaryTable(sqlCommand: string): Promise { - const tableName = this.getTempTableName(sqlCommand); + const tableName = this.getTempViewName(sqlCommand); const cmd = `CREATE OR REPLACE TEMP TABLE ${tableName} AS (${sqlCommand});`; await this.runSQL(cmd); return tableName; diff --git a/packages/malloy-db-snowflake/src/snowflake_executor.ts b/packages/malloy-db-snowflake/src/snowflake_executor.ts index fa09da62b..16750b4fb 100644 --- a/packages/malloy-db-snowflake/src/snowflake_executor.ts +++ b/packages/malloy-db-snowflake/src/snowflake_executor.ts @@ -170,16 +170,15 @@ export class SnowflakeExecutor { private async _setSessionParams(conn: Connection) { // set some default session parameters - // this is quite imporant for snowflake because malloy tends to add quotes to all database identifiers - // and snowflake is case sensitive by with quotes but matches against all caps identifiers without quotes - // await this._execute( - // 'ALTER SESSION SET QUOTED_IDENTIFIERS_IGNORE_CASE = true;', - // conn - // ); // set utc as the default timezone which is the malloy convention await this._execute("ALTER SESSION SET TIMEZONE = 'UTC';", conn); // ensure week starts on Sunday which is the malloy convention await this._execute('ALTER SESSION SET WEEK_START = 7;', conn); + // so javascript can parse the dates + await this._execute( + "ALTER SESSION SET TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DDTHH24:MI:SS.FF3TZH:TZM';", + conn + ); } public async batch(sqlText: string): Promise { diff --git a/packages/malloy/src/dialect/dialect.ts b/packages/malloy/src/dialect/dialect.ts index 69809d420..5e4b5d304 100644 --- a/packages/malloy/src/dialect/dialect.ts +++ b/packages/malloy/src/dialect/dialect.ts @@ -110,6 +110,12 @@ export abstract class Dialect { // StandardSQL dialects can't partition on expression in window functions cantPartitionWindowFunctionsOnExpressions = false; + // Snowflake can't yet support pipelines in nested views. + supportsPipelinesInViews = true; + + // Some dialects don't supporrt arrays. + supportsArraysInData = true; + // return the definition of a function with the given name abstract getGlobalFunctionDef( name: string diff --git a/packages/malloy/src/dialect/snowflake/snowflake.ts b/packages/malloy/src/dialect/snowflake/snowflake.ts index 3375ef04e..dc053bff6 100644 --- a/packages/malloy/src/dialect/snowflake/snowflake.ts +++ b/packages/malloy/src/dialect/snowflake/snowflake.ts @@ -108,6 +108,8 @@ export class SnowflakeDialect extends Dialect { dontUnionIndex = false; supportsQualify = false; supportsNesting = true; + supportsPipelinesInViews = false; + supportsArraysInData = false; // don't mess with the table pathing. quoteTablePath(tablePath: string): string { @@ -115,7 +117,9 @@ export class SnowflakeDialect extends Dialect { } sqlGroupSetTable(groupSetCount: number): string { - return `SELECT index as group_set FROM TABLE(FLATTEN(ARRAY_GENERATE_RANGE(0, ${groupSetCount})))`; + return `CROSS JOIN (SELECT index as group_set FROM TABLE(FLATTEN(ARRAY_GENERATE_RANGE(0, ${ + groupSetCount + 1 + }))))`; } sqlAnyValue(groupSet: number, fieldName: string): string { @@ -142,7 +146,7 @@ export class SnowflakeDialect extends Dialect { ): string { const fields = this.mapFieldsForObjectConstruct(fieldList); const orderByClause = orderBy ? ` WITHIN GROUP (${orderBy})` : ''; - const aggClause = `ARRAY_AGG(CASE WHEN group_set=${groupSet} THEN OBJECT_CONSTRUCT(${fields}) END)${orderByClause}`; + const aggClause = `ARRAY_AGG(CASE WHEN group_set=${groupSet} THEN OBJECT_CONSTRUCT_KEEP_NULL(${fields}) END)${orderByClause}`; if (limit === undefined) { return `COALESCE(${aggClause}, [])`; } @@ -151,7 +155,7 @@ export class SnowflakeDialect extends Dialect { sqlAnyValueTurtle(groupSet: number, fieldList: DialectFieldList): string { const fields = this.mapFieldsForObjectConstruct(fieldList); - return `(ARRAY_AGG(CASE WHEN group_set=${groupSet} THEN OBJECT_CONSTRUCT(${fields}) END) WITHIN GROUP (ORDER BY 1 ASC NULLS LAST))[0]`; + return `(ARRAY_AGG(CASE WHEN group_set=${groupSet} THEN OBJECT_CONSTRUCT_KEEP_NULL(${fields}) END) WITHIN GROUP (ORDER BY 1 ASC NULLS LAST))[0]`; } sqlAnyValueLastTurtle( @@ -170,7 +174,7 @@ export class SnowflakeDialect extends Dialect { const nullValues = fieldList .map(f => `'${f.sqlOutputName}', NULL`) .join(', '); - return `COALESCE(ARRAY_AGG(CASE WHEN group_set=${groupSet} THEN OBJECT_CONSTRUCT(${fields}) END)[0], OBJECT_CONSTRUCT_KEEP_NULL(${nullValues}))`; + return `COALESCE(ARRAY_AGG(CASE WHEN group_set=${groupSet} THEN OBJECT_CONSTRUCT_KEEP_NULL(${fields}) END)[0], OBJECT_CONSTRUCT_KEEP_NULL(${nullValues}))`; } sqlUnnestAlias( @@ -243,6 +247,8 @@ export class SnowflakeDialect extends Dialect { let snowflakeType = fieldType; if (fieldType === 'string') { snowflakeType = 'varchar'; + } else if (fieldType === 'struct') { + snowflakeType = 'variant'; } return `${alias}.value:"${fieldName}"::${snowflakeType}`; } @@ -263,12 +269,13 @@ export class SnowflakeDialect extends Dialect { throw new Error('not implemented yet'); } - sqlCreateFunctionCombineLastStage(lastStageName: string): string { - return `SELECT ARRAY_AGG(OBJECT_CONSTRUCT(*)) FROM ${lastStageName}`; + sqlCreateFunctionCombineLastStage(_lastStageName: string): string { + throw new Error('not implemented yet'); + // return `SELECT ARRAY_AGG(OBJECT_CONSTRUCT(*)) FROM ${lastStageName}`; } sqlSelectAliasAsStruct(alias: string): string { - return `OBJECT_CONSTRUCT(${alias}.*)`; + return `OBJECT_CONSTRUCT_KEEP_NULL(${alias}.*)`; } sqlMaybeQuoteIdentifier(identifier: string): string { return `"${identifier}"`; diff --git a/packages/malloy/src/malloy.ts b/packages/malloy/src/malloy.ts index 467c924dc..de29ebf01 100644 --- a/packages/malloy/src/malloy.ts +++ b/packages/malloy/src/malloy.ts @@ -80,7 +80,7 @@ import { } from './runtime_types'; import {DateTime} from 'luxon'; import {Tag, TagParse, TagParseSpec, Taggable} from './tags'; -import {getDialect} from './dialect'; +import {Dialect, getDialect} from './dialect'; export interface Loggable { // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -2477,6 +2477,10 @@ export class SingleConnectionRuntime< ); } + get dialect(): Dialect { + return getDialect(this.connection.dialectName); + } + getQuoter(): (arg: TemplateStringsArray) => string { return (x: TemplateStringsArray) => this.quote(x.toString()); } diff --git a/test/snowflake/uploaddata.sql b/test/snowflake/uploaddata.sql index 8dc4ccecb..9f40fcc50 100644 --- a/test/snowflake/uploaddata.sql +++ b/test/snowflake/uploaddata.sql @@ -4,15 +4,16 @@ -- snowsql -f uploadddate.sql -drop database malloytestdb; -create database malloytestdb; +drop database malloytest; +create database malloytest; -use malloytestdb; +use malloytest; create schema malloytest; CREATE OR REPLACE FILE FORMAT PARQUET_SCHEMA_DETECTION TYPE = PARQUET - BINARY_AS_TEXT = FALSE; + BINARY_AS_TEXT = FALSE + USE_LOGICAL_TYPE = TRUE; PUT file://../data/duckdb/aircraft.parquet @~/staged; diff --git a/test/src/databases/all/lenses.spec.ts b/test/src/databases/all/lenses.spec.ts index 9b015bd36..f1558b38c 100644 --- a/test/src/databases/all/lenses.spec.ts +++ b/test/src/databases/all/lenses.spec.ts @@ -36,9 +36,10 @@ afterAll(async () => { }); runtimes.runtimeMap.forEach((runtime, databaseName) => { + const q = runtime.getQuoter(); it(`named view plus named view - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: d is { group_by: n } view: m is { aggregate: c is count() } } @@ -47,7 +48,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`named view plus measure - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: d is { group_by: n } measure: c is count() } @@ -56,7 +57,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`dimension plus named view - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } } run: x -> n + m @@ -64,7 +65,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`where headed - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } } run: x -> { where: true } + m @@ -72,7 +73,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`named view plus named view in source - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: d is { group_by: n } view: m is { aggregate: c is count() } view: y is d + m @@ -82,7 +83,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`dimension plus named view in source - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } view: y is n + m } @@ -91,7 +92,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`named view plus dimension in source - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } view: y is m + n } @@ -100,7 +101,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`literal view plus named view - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } } run: x -> { group_by: n } + m @@ -108,7 +109,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`literal view plus measure - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { measure: c is count() } run: x -> { group_by: n } + c @@ -116,7 +117,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`measure plus literal view - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { measure: c is count() } run: x -> c + { group_by: n } @@ -124,7 +125,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`literal view plus named view in source - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } view: y is { group_by: n } + m } @@ -133,7 +134,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`literal view plus measure in source - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { measure: c is count() view: y is { group_by: n } + c } @@ -142,7 +143,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`named view plus literal view - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: d is { group_by: n } } run: x -> d + { aggregate: c is count() } @@ -150,13 +151,13 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`literal view plus literal view - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') run: x -> { group_by: n } + { aggregate: c is count() } `).malloyResultMatches(runtime, {n: 1, c: 1}); }); it(`three named views - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: d1 is { group_by: n1 is n } view: d2 is { group_by: n2 is n } view: m is { aggregate: c is count() } @@ -166,7 +167,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`nested no name - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: d is { group_by: n } view: m is { aggregate: c is count() } } @@ -177,7 +178,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`nested with name - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: d is { group_by: n } view: m is { aggregate: c is count() } } @@ -188,7 +189,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`nested no name with dimension head - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } } run: x -> { @@ -198,7 +199,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`nest dimension only - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } } run: x -> { @@ -208,8 +209,8 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`joined dimension in middle of refinements - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { - join_one: y is ${databaseName}.sql("SELECT 2 AS n") on true + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { + join_one: y is ${databaseName}.sql('SELECT 2 as ${q`n`}') on true view: m is { aggregate: c is count() } } run: x -> m + y.n + { limit: 1 } @@ -217,8 +218,8 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`nest joined dimension refined - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { - join_one: y is ${databaseName}.sql("SELECT 1 AS n") on true + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { + join_one: y is ${databaseName}.sql('SELECT 1 as ${q`n`}') on true view: m is { aggregate: c is count() } } run: x -> { @@ -228,8 +229,8 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`joined dimension refined - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { - join_one: y is ${databaseName}.sql("SELECT 2 AS n") on true + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { + join_one: y is ${databaseName}.sql('SELECT 2 as ${q`n`}') on true view: m is { aggregate: c is count() } } run: x -> y.n + { limit: 1 } @@ -237,8 +238,8 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`nest joined dimension bare - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { - join_one: y is ${databaseName}.sql("SELECT 2 AS n") on true + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { + join_one: y is ${databaseName}.sql('SELECT 2 as ${q`n`}') on true view: m is { aggregate: c is count() } } run: x -> { @@ -248,8 +249,8 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`joined dimension bare - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { - join_one: y is ${databaseName}.sql("SELECT 2 AS n") on true + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { + join_one: y is ${databaseName}.sql('SELECT 2 as ${q`n`}') on true view: m is { aggregate: c is count() } } run: x -> y.n @@ -257,8 +258,8 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`joined dimension nest refinement - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { - join_one: y is ${databaseName}.sql("SELECT 2 AS n") on true + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { + join_one: y is ${databaseName}.sql('SELECT 2 as ${q`n`}') on true view: m is { aggregate: c is count() } } run: x -> { nest: m + y.n } @@ -266,7 +267,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it.skip(`nest measure only in second stage - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } } run: x -> m -> { @@ -276,7 +277,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`nest dimension only in refinement - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is { aggregate: c is count() } } run: x -> m + { @@ -286,7 +287,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`view dimension only - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: m is n } run: x -> m @@ -294,8 +295,8 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`view join dimension only - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { - join_one: y is ${databaseName}.sql("SELECT 2 AS n") on true + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { + join_one: y is ${databaseName}.sql('SELECT 2 as ${q`n`}') on true view: m is y.n } run: x -> m @@ -303,26 +304,26 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`run dimension only - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') run: x -> n `).malloyResultMatches(runtime, {n: 1}); }); it.skip(`second stage refinement chain - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') run: x -> n -> n + { aggregate: c is count() } `).malloyResultMatches(runtime, {n: 1, c: 1}); }); it.skip(`second stage refinement chain in nest - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: v is n -> n + { aggregate: c is count() } } `).malloyResultMatches(runtime, {n: 1, c: 1}); }); it(`copy of view with lens - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { view: metrics is { aggregate: c is count() } view: v is { group_by: n } + metrics view: v2 is v @@ -332,7 +333,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`aggregate copy bug with only old refinement - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { measure: c is count() } run: x -> c + { @@ -342,7 +343,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`aggregate copy bug with only old old refinement - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { measure: c is count() view: v is { aggregate: c } } @@ -353,7 +354,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`but still need to be able to use as output field - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { measure: c is count() view: v is { aggregate: c } } @@ -364,7 +365,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); it(`aggregate copy bug - ${databaseName}`, async () => { await expect(` - source: x is ${databaseName}.sql("SELECT 1 AS n") extend { + source: x is ${databaseName}.sql('SELECT 1 as ${q`n`}') extend { measure: c is count() } run: x -> n + c + { diff --git a/test/src/databases/all/nomodel.spec.ts b/test/src/databases/all/nomodel.spec.ts index 75412ec46..d9597076b 100644 --- a/test/src/databases/all/nomodel.spec.ts +++ b/test/src/databases/all/nomodel.spec.ts @@ -527,7 +527,7 @@ runtimes.runtimeMap.forEach((runtime, databaseName) => { }); test( - `number as null- ${databaseName}`, + `number as null 2 - ${databaseName}`, onlyIf(runtime.supportsNesting, async () => { // a cross join produces a Many to Many result. // symmetric aggregate are needed on both sides of the join @@ -845,21 +845,26 @@ SELECT row_to_json(finalStage) as row FROM __stage0 AS finalStage`); test( `single value to udf - ${databaseName}`, - onlyIf(runtime.supportsNesting, async () => { - await expect(` + onlyIf( + runtime.supportsNesting && runtime.dialect.supportsPipelinesInViews, + async () => { + await expect(` run: ${databaseName}.table('malloytest.state_facts') extend { view: fun is { aggregate: t is count() } -> { select: t1 is t+1 } } -> { nest: fun } `).malloyResultMatches(runtime, {'fun.t1': 52}); - }) + } + ) ); test( `Multi value to udf - ${databaseName}`, - onlyIf(runtime.supportsNesting, async () => { - await expect(` + onlyIf( + runtime.supportsNesting && runtime.dialect.supportsPipelinesInViews, + async () => { + await expect(` run: ${databaseName}.table('malloytest.state_facts') extend { view: fun is { group_by: one is 1 @@ -869,13 +874,16 @@ SELECT row_to_json(finalStage) as row FROM __stage0 AS finalStage`); nest: fun } `).malloyResultMatches(runtime, {'fun.t1': 52}); - }) + } + ) ); test( `Multi value to udf group by - ${databaseName}`, - onlyIf(runtime.supportsNesting, async () => { - await expect(` + onlyIf( + runtime.supportsNesting && runtime.dialect.supportsPipelinesInViews, + async () => { + await expect(` run: ${databaseName}.table('malloytest.state_facts') extend { view: fun is { group_by: one is 1 @@ -885,7 +893,8 @@ SELECT row_to_json(finalStage) as row FROM __stage0 AS finalStage`); nest: fun } `).malloyResultMatches(runtime, {'fun.t1': 52}); - }) + } + ) ); const sql1234 = `${databaseName}.sql('SELECT 1 as ${q`a`}, 2 as ${q`b`} UNION ALL SELECT 3, 4')`; @@ -980,9 +989,11 @@ SELECT row_to_json(finalStage) as row FROM __stage0 AS finalStage`); test( `array unnest - ${databaseName}`, - onlyIf(runtime.supportsNesting, async () => { - const splitFN = getSplitFunction(databaseName); - await expect(` + onlyIf( + runtime.supportsNesting && runtime.dialect.supportsArraysInData, + async () => { + const splitFN = getSplitFunction(databaseName); + await expect(` run: ${databaseName}.sql(""" SELECT ${q`city`}, @@ -994,15 +1005,18 @@ SELECT row_to_json(finalStage) as row FROM __stage0 AS finalStage`); aggregate: c is count() } `).malloyResultMatches(runtime, {c: 145}); - }) + } + ) ); // make sure we can count the total number of elements when fanning out. test( `array unnest x 2 - ${databaseName}`, - onlyIf(runtime.supportsNesting, async () => { - const splitFN = getSplitFunction(databaseName); - await expect(` + onlyIf( + runtime.supportsNesting && runtime.dialect.supportsArraysInData, + async () => { + const splitFN = getSplitFunction(databaseName); + await expect(` run: ${databaseName}.sql(""" SELECT ${q`city`}, @@ -1016,7 +1030,8 @@ SELECT row_to_json(finalStage) as row FROM __stage0 AS finalStage`); c is words.count() a is abreak.count() }`).malloyResultMatches(runtime, {b: 3552, c: 4586, a: 6601}); - }) + } + ) ); test( @@ -1158,14 +1173,14 @@ SELECT row_to_json(finalStage) as row FROM __stage0 AS finalStage`); const back = '\\'; test('backslash quote', async () => { await expect(` - run: ${databaseName}.sql('SELECT 1') -> { + run: ${databaseName}.sql('SELECT 1 as one') -> { select: tick is '${back}${tick}' } `).malloyResultMatches(runtime, {tick}); }); test('backslash backslash', async () => { await expect(` - run: ${databaseName}.sql("SELECT 1") -> { + run: ${databaseName}.sql("SELECT 1 as one") -> { select: back is '${back}${back}' } `).malloyResultMatches(runtime, {back}); diff --git a/test/src/databases/all/time.spec.ts b/test/src/databases/all/time.spec.ts index 7028dd972..5f489449c 100644 --- a/test/src/databases/all/time.spec.ts +++ b/test/src/databases/all/time.spec.ts @@ -34,11 +34,11 @@ import {DateTime as LuxonDateTime} from 'luxon'; const runtimes = new RuntimeList(databasesFromEnvironmentOr(allDatabases)); -const timeSQL = - "SELECT DATE '2021-02-24' as t_date, TIMESTAMP '2021-02-24 03:05:06' as t_timestamp"; - // MTOY todo look at this list for timezone problems, I know there are some describe.each(runtimes.runtimeList)('%s date and time', (dbName, runtime) => { + const q = runtime.getQuoter(); + + const timeSQL = `SELECT DATE '2021-02-24' as ${q`t_date`}, TIMESTAMP '2021-02-24 03:05:06' as ${q`t_timestamp`} `; const sqlEq = mkSqlEqWith(runtime, dbName, {sql: timeSQL}); describe('interval measurement', () => { @@ -654,6 +654,7 @@ describe.each(runtimes.runtimeList)('%s: tz literals', (dbName, runtime) => { }); describe.each(runtimes.runtimeList)('%s: query tz', (dbName, runtime) => { + const q = runtime.getQuoter(); test('literal timestamps', async () => { const query = runtime.loadQuery( ` @@ -708,7 +709,7 @@ describe.each(runtimes.runtimeList)('%s: query tz', (dbName, runtime) => { test('cast date to timestamp', async () => { await expect( - `run: ${dbName}.sql(" SELECT DATE '2020-02-20' AS mex_20") -> { + `run: ${dbName}.sql(""" SELECT DATE '2020-02-20' AS ${q`mex_20`} """) -> { timezone: '${zone}' select: mex_ts is mex_20::timestamp }`