Skip to content

fix(schema-compiler): Fix BigQuery queries datetime/timestamp comparisons #9683

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Jun 19, 2025
21 changes: 16 additions & 5 deletions packages/cubejs-schema-compiler/src/adapter/BaseQuery.js
Original file line number Diff line number Diff line change
Expand Up @@ -958,8 +958,7 @@ export class BaseQuery {
.map(
d => [
d,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
(dateFrom, dateTo, dateField, dimensionDateFrom, dimensionDateTo) => `${dateField} >= ${dimensionDateFrom} AND ${dateField} <= ${dateTo}`
(_dateFrom, dateTo, dateField, dimensionDateFrom, _dimensionDateTo) => `${dateField} >= ${dimensionDateFrom} AND ${dateField} <= ${dateTo}`
]
);
}
Expand All @@ -970,7 +969,7 @@ export class BaseQuery {
.map(
d => [
d,
(dateFrom, dateTo, dateField, dimensionDateFrom, dimensionDateTo, isFromStartToEnd) => `${dateField} >= ${this.timeGroupedColumn(granularity, dateFrom)} AND ${dateField} <= ${dateTo}`
(dateFrom, dateTo, dateField, _dimensionDateFrom, _dimensionDateTo, _isFromStartToEnd) => `${dateField} >= ${this.timeGroupedColumn(granularity, dateFrom)} AND ${dateField} <= ${dateTo}`
]
);
}
Expand All @@ -980,7 +979,7 @@ export class BaseQuery {
return this.timeDimensions
.filter(td => td.granularity)
.map(
d => [d, (dateFrom, dateTo, dateField, dimensionDateFrom, dimensionDateTo, isFromStartToEnd) => {
d => [d, (dateFrom, dateTo, dateField, _dimensionDateFrom, _dimensionDateTo, isFromStartToEnd) => {
// dateFrom based window
const conditions = [];
if (trailingInterval !== 'unbounded') {
Expand Down Expand Up @@ -1788,6 +1787,13 @@ export class BaseQuery {
const dateJoinConditionSql =
dateJoinCondition.map(
([d, f]) => f(
// Time-series table is generated differently in different dialects,
// but some dialects (like BigQuery) require strict date types and can not automatically convert
// between date and timestamp for comparisons, at the same time, time dimensions are expected to be
// timestamps, so we need to align types for join conditions/comparisons.
// But we can't do it here, as it would break interval maths used in some types of
// rolling window join conditions in some dialects (like Redshift), so we need to
// do casts granularly in rolling window join conditions functions.
`${d.dateSeriesAliasName()}.${this.escapeColumnName('date_from')}`,
`${d.dateSeriesAliasName()}.${this.escapeColumnName('date_to')}`,
`${baseQueryAlias}.${d.aliasName()}`,
Expand Down Expand Up @@ -1822,9 +1828,13 @@ export class BaseQuery {
.join(', ');
}

/**
* BigQuery has strict date type and can not automatically convert between date
* and timestamp, so we override dateFromStartToEndConditionSql() in BigQuery Dialect
* @protected
*/
dateFromStartToEndConditionSql(dateJoinCondition, fromRollup, isFromStartToEnd) {
return dateJoinCondition.map(
// TODO these weird conversions to be strict typed for big query.
// TODO Consider adding strict definitions of local and UTC time type
([d, f]) => ({
filterToWhere: () => {
Expand Down Expand Up @@ -3906,6 +3916,7 @@ export class BaseQuery {
like_escape: '{{ like_expr }} ESCAPE {{ escape_char }}',
within_group: '{{ fun_sql }} WITHIN GROUP (ORDER BY {{ within_group_concat }})',
concat_strings: '{{ strings | join(\' || \' ) }}',
rolling_window_expr_timestamp_cast: '{{ value }}'
},
tesseract: {
ilike: '{{ expr }} {% if negated %}NOT {% endif %}ILIKE {{ pattern }}', // May require different overloads in Tesseract than the ilike from expressions used in SQLAPI.
Expand Down
117 changes: 101 additions & 16 deletions packages/cubejs-schema-compiler/src/adapter/BigqueryQuery.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export class BigqueryQuery extends BaseQuery {
}

public convertTz(field) {
return `DATETIME(${this.timeStampCast(field)}, '${this.timezone}')`;
return `TIMESTAMP(DATETIME(${field}), '${this.timezone}')`;
}

public timeStampCast(value) {
Expand All @@ -58,7 +58,7 @@ export class BigqueryQuery extends BaseQuery {
}

public timeGroupedColumn(granularity, dimension) {
return `DATETIME_TRUNC(${dimension}, ${GRANULARITY_TO_INTERVAL[granularity]})`;
return this.timeStampCast(`DATETIME_TRUNC(${dimension}, ${GRANULARITY_TO_INTERVAL[granularity]})`);
}

/**
Expand All @@ -72,7 +72,7 @@ export class BigqueryQuery extends BaseQuery {

return `(${this.dateTimeCast(`'${origin}'`)} + INTERVAL ${intervalFormatted} *
CAST(FLOOR(
DATETIME_DIFF(${source}, ${this.dateTimeCast(`'${origin}'`)}, ${timeUnit}) /
DATETIME_DIFF(${this.dateTimeCast(source)}, ${this.dateTimeCast(`'${origin}'`)}, ${timeUnit}) /
DATETIME_DIFF(${beginOfTime} + INTERVAL ${intervalFormatted}, ${beginOfTime}, ${timeUnit})
) AS INT64))`;
}
Expand Down Expand Up @@ -182,31 +182,31 @@ export class BigqueryQuery extends BaseQuery {
}

public subtractInterval(date, interval) {
return `DATETIME_SUB(${date}, INTERVAL ${this.formatInterval(interval)[0]})`;
}

public addInterval(date, interval) {
return `DATETIME_ADD(${date}, INTERVAL ${this.formatInterval(interval)[0]})`;
}

public subtractTimestampInterval(date, interval) {
const [intervalFormatted, timeUnit] = this.formatInterval(interval);
if (['YEAR', 'MONTH', 'QUARTER'].includes(timeUnit)) {
if (['YEAR', 'MONTH', 'QUARTER'].includes(timeUnit) || intervalFormatted.includes('WEEK')) {
return this.timeStampCast(`DATETIME_SUB(DATETIME(${date}), INTERVAL ${intervalFormatted})`);
}

return `TIMESTAMP_SUB(${date}, INTERVAL ${intervalFormatted})`;
}

public addTimestampInterval(date, interval) {
public addInterval(date, interval) {
const [intervalFormatted, timeUnit] = this.formatInterval(interval);
if (['YEAR', 'MONTH', 'QUARTER'].includes(timeUnit)) {
if (['YEAR', 'MONTH', 'QUARTER'].includes(timeUnit) || intervalFormatted.includes('WEEK')) {
return this.timeStampCast(`DATETIME_ADD(DATETIME(${date}), INTERVAL ${intervalFormatted})`);
}

return `TIMESTAMP_ADD(${date}, INTERVAL ${intervalFormatted})`;
}

public subtractTimestampInterval(timestamp, interval) {
return this.subtractInterval(timestamp, interval);
}

public addTimestampInterval(timestamp, interval) {
return this.addInterval(timestamp, interval);
}

public nowTimestampSql() {
return 'CURRENT_TIMESTAMP()';
}
Expand All @@ -215,6 +215,90 @@ export class BigqueryQuery extends BaseQuery {
return `UNIX_SECONDS(${this.nowTimestampSql()})`;
}

/**
* Should be protected, but BaseQuery is in js
* Overridden from BaseQuery to support BigQuery strict data types for
* joining conditions (note timeStampCast)
*/
public override runningTotalDateJoinCondition() {
return this.timeDimensions
.map(
d => [
d,
(_dateFrom: string, dateTo: string, dateField: string, dimensionDateFrom: string, _dimensionDateTo: string) => `${dateField} >= ${dimensionDateFrom} AND ${dateField} <= ${this.timeStampCast(dateTo)}`
]
);
}

/**
* Should be protected, but BaseQuery is in js
* Overridden from BaseQuery to support BigQuery strict data types for
* joining conditions (note timeStampCast)
*/
public override rollingWindowToDateJoinCondition(granularity) {
return this.timeDimensions
.filter(td => td.granularity)
.map(
d => [
d,
(dateFrom: string, dateTo: string, dateField: string, _dimensionDateFrom: string, _dimensionDateTo: string, _isFromStartToEnd: boolean) => `${dateField} >= ${this.timeGroupedColumn(granularity, dateFrom)} AND ${dateField} <= ${this.timeStampCast(dateTo)}`
]
);
}

/**
* Should be protected, but BaseQuery is in js
* Overridden from BaseQuery to support BigQuery strict data types for
* joining conditions (note timeStampCast)
*/
public override rollingWindowDateJoinCondition(trailingInterval, leadingInterval, offset) {
offset = offset || 'end';
return this.timeDimensions
.filter(td => td.granularity)
.map(
d => [d, (dateFrom: string, dateTo: string, dateField: string, _dimensionDateFrom: string, _dimensionDateTo: string, isFromStartToEnd: boolean) => {
// dateFrom based window
const conditions: string[] = [];
if (trailingInterval !== 'unbounded') {
const startDate = isFromStartToEnd || offset === 'start' ? dateFrom : dateTo;
const trailingStart = trailingInterval ? this.subtractInterval(startDate, trailingInterval) : startDate;
const sign = offset === 'start' ? '>=' : '>';
conditions.push(`${dateField} ${sign} ${this.timeStampCast(trailingStart)}`);
}
if (leadingInterval !== 'unbounded') {
const endDate = isFromStartToEnd || offset === 'end' ? dateTo : dateFrom;
const leadingEnd = leadingInterval ? this.addInterval(endDate, leadingInterval) : endDate;
const sign = offset === 'end' ? '<=' : '<';
conditions.push(`${dateField} ${sign} ${this.timeStampCast(leadingEnd)}`);
}
return conditions.length ? conditions.join(' AND ') : '1 = 1';
}]
);
}

// Should be protected, but BaseQuery is in js
public override dateFromStartToEndConditionSql(dateJoinCondition, fromRollup, isFromStartToEnd) {
return dateJoinCondition.map(
([d, f]) => ({
filterToWhere: () => {
const timeSeries = d.timeSeries();
return f(
isFromStartToEnd ?
this.timeStampCast(this.paramAllocator.allocateParam(timeSeries[0][0])) :
`${this.timeStampInClientTz(d.dateFromParam())}`,
isFromStartToEnd ?
this.timeStampCast(this.paramAllocator.allocateParam(timeSeries[timeSeries.length - 1][1])) :
`${this.timeStampInClientTz(d.dateToParam())}`,
`${fromRollup ? this.dimensionSql(d) : d.convertedToTz()}`,
`${this.timeStampInClientTz(d.dateFromParam())}`,
`${this.timeStampInClientTz(d.dateToParam())}`,
isFromStartToEnd
);
}
})
);
}

// eslint-disable-next-line no-unused-vars
public preAggregationLoadSql(cube, preAggregation, tableName) {
return this.preAggregationSql(cube, preAggregation);
Expand Down Expand Up @@ -250,7 +334,7 @@ export class BigqueryQuery extends BaseQuery {
const templates = super.sqlTemplates();
templates.quotes.identifiers = '`';
templates.quotes.escape = '\\`';
templates.functions.DATETRUNC = 'DATETIME_TRUNC(CAST({{ args[1] }} AS DATETIME), {% if date_part|upper == \'WEEK\' %}{{ \'WEEK(MONDAY)\' }}{% else %}{{ date_part }}{% endif %})';
templates.functions.DATETRUNC = 'TIMESTAMP(DATETIME_TRUNC(CAST({{ args[1] }} AS DATETIME), {% if date_part|upper == \'WEEK\' %}{{ \'WEEK(MONDAY)\' }}{% else %}{{ date_part }}{% endif %}))';
templates.functions.LOG = 'LOG({{ args_concat }}{% if args[1] is undefined %}, 10{% endif %})';
templates.functions.BTRIM = 'TRIM({{ args_concat }})';
templates.functions.STRPOS = 'STRPOS({{ args_concat }})';
Expand All @@ -263,7 +347,8 @@ export class BigqueryQuery extends BaseQuery {
templates.expressions.binary = '{% if op == \'%\' %}MOD({{ left }}, {{ right }}){% else %}({{ left }} {{ op }} {{ right }}){% endif %}';
templates.expressions.interval = 'INTERVAL {{ interval }}';
templates.expressions.extract = 'EXTRACT({% if date_part == \'DOW\' %}DAYOFWEEK{% elif date_part == \'DOY\' %}DAYOFYEAR{% else %}{{ date_part }}{% endif %} FROM {{ expr }})';
templates.expressions.timestamp_literal = 'DATETIME(TIMESTAMP(\'{{ value }}\'))';
templates.expressions.timestamp_literal = 'TIMESTAMP(\'{{ value }}\')';
templates.expressions.rolling_window_expr_timestamp_cast = 'TIMESTAMP({{ value }})';
delete templates.expressions.ilike;
delete templates.expressions.like_escape;
templates.filters.like_pattern = 'CONCAT({% if start_wild %}\'%\'{% else %}\'\'{% endif %}, LOWER({{ value }}), {% if end_wild %}\'%\'{% else %}\'\'{% endif %})';
Expand Down
4 changes: 3 additions & 1 deletion packages/cubejs-testing-drivers/fixtures/mssql.json
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,8 @@
"SQL API: Extended nested Rollup over asterisk",
"SQL API: ungrouped pre-agg",
"SQL API: NULLS FIRST/LAST SQL push down",
"SQL API: SQL push down push to cube quoted alias"
"SQL API: SQL push down push to cube quoted alias",
"SQL API: Date/time comparison with SQL push down",
"SQL API: Date/time comparison with date_trunc with SQL push down"
]
}
3 changes: 2 additions & 1 deletion packages/cubejs-testing-drivers/fixtures/mysql.json
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@
"SQL API: Nested Rollup with aliases",
"SQL API: Nested Rollup over asterisk",
"SQL API: Extended nested Rollup over asterisk",
"SQL API: SQL push down push to cube quoted alias"
"SQL API: SQL push down push to cube quoted alias",
"SQL API: Date/time comparison with date_trunc with SQL push down"
]
}
20 changes: 20 additions & 0 deletions packages/cubejs-testing-drivers/src/tests/testQueries.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2118,5 +2118,25 @@ from
`);
expect(res.rows).toMatchSnapshot();
});

executePg('SQL API: Date/time comparison with SQL push down', async (connection) => {
const res = await connection.query(`
SELECT MEASURE(BigECommerce.rollingCountBy2Day)
FROM BigECommerce
WHERE BigECommerce.orderDate < CAST('2021-01-01' AS TIMESTAMP) AND
LOWER("city") = 'columbus'
`);
expect(res.rows).toMatchSnapshot();
});

executePg('SQL API: Date/time comparison with date_trunc with SQL push down', async (connection) => {
const res = await connection.query(`
SELECT MEASURE(BigECommerce.rollingCountBy2Week)
FROM BigECommerce
WHERE date_trunc('day', BigECommerce.orderDate) < CAST('2021-01-01' AS TIMESTAMP) AND
LOWER("city") = 'columbus'
`);
expect(res.rows).toMatchSnapshot();
});
});
}
Original file line number Diff line number Diff line change
Expand Up @@ -8007,3 +8007,19 @@ Array [
},
]
`;

exports[`Queries with the @cubejs-backend/athena-driver SQL API: Date/time comparison with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Day)": "12",
},
]
`;

exports[`Queries with the @cubejs-backend/athena-driver SQL API: Date/time comparison with date_trunc with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Week)": "12",
},
]
`;
Original file line number Diff line number Diff line change
@@ -1,5 +1,21 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP

exports[`Queries with the @cubejs-backend/bigquery-driver SQL API: Date/time comparison with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Day)": "12",
},
]
`;

exports[`Queries with the @cubejs-backend/bigquery-driver SQL API: Date/time comparison with date_trunc with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Week)": "12",
},
]
`;

exports[`Queries with the @cubejs-backend/bigquery-driver SQL API: NULLS FIRST/LAST SQL push down: nulls_first_last_sql_push_down 1`] = `
Array [
Object {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8952,3 +8952,19 @@ Array [
},
]
`;

exports[`Queries with the @cubejs-backend/clickhouse-driver export-bucket-s3 SQL API: Date/time comparison with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Day)": "12",
},
]
`;

exports[`Queries with the @cubejs-backend/clickhouse-driver export-bucket-s3 SQL API: Date/time comparison with date_trunc with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Week)": "12",
},
]
`;
Original file line number Diff line number Diff line change
Expand Up @@ -8952,3 +8952,19 @@ Array [
},
]
`;

exports[`Queries with the @cubejs-backend/clickhouse-driver export-bucket-s3-prefix SQL API: Date/time comparison with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Day)": "12",
},
]
`;

exports[`Queries with the @cubejs-backend/clickhouse-driver export-bucket-s3-prefix SQL API: Date/time comparison with date_trunc with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Week)": "12",
},
]
`;
Original file line number Diff line number Diff line change
Expand Up @@ -8952,3 +8952,19 @@ Array [
},
]
`;

exports[`Queries with the @cubejs-backend/clickhouse-driver SQL API: Date/time comparison with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Day)": "12",
},
]
`;

exports[`Queries with the @cubejs-backend/clickhouse-driver SQL API: Date/time comparison with date_trunc with SQL push down 1`] = `
Array [
Object {
"measure(BigECommerce.rollingCountBy2Week)": "12",
},
]
`;
Loading
Loading
close