diff --git a/db-service/lib/cqn2sql.js b/db-service/lib/cqn2sql.js index 0ab9bf8f2..942efe888 100644 --- a/db-service/lib/cqn2sql.js +++ b/db-service/lib/cqn2sql.js @@ -263,7 +263,13 @@ class CQN2SQLRenderer { * @returns {string} SQL */ SELECT_columns(q) { - return (q.SELECT.columns ?? ['*']).map(x => this.column_expr(x, q)) + const ret = [] + const arr = q.SELECT.columns ?? ['*'] + for (const x of arr) { + if (x.SELECT?.count) arr.push(this.SELECT_count(x)) + ret.push(this.column_expr(x, q)) + } + return ret } /** @@ -292,24 +298,12 @@ class CQN2SQLRenderer { ? x => { const name = this.column_name(x) const escaped = `${name.replace(/"/g, '""')}` - let col = `${this.output_converter4(x.element, this.quote(name))} AS "${escaped}"` - if (x.SELECT?.count) { - // Return both the sub select and the count for @odata.count - const qc = cds.ql.clone(x, { columns: [{ func: 'count' }], one: 1, limit: 0, orderBy: 0 }) - return [col, `${this.expr(qc)} AS "${escaped}@odata.count"`] - } - return col + return `${this.output_converter4(x.element, this.quote(name))} AS "${escaped}"` } : x => { const name = this.column_name(x) const escaped = `${name.replace(/"/g, '""')}` - let col = `'$."${escaped}"',${this.output_converter4(x.element, this.quote(name))}` - if (x.SELECT?.count) { - // Return both the sub select and the count for @odata.count - const qc = cds.ql.clone(x, { columns: [{ func: 'count' }], one: 1, limit: 0, orderBy: 0 }) - return [col, `'$."${escaped}@odata.count"',${this.expr(qc)}`] - } - return col + return `'$."${escaped}"',${this.output_converter4(x.element, this.quote(name))}` }).flat() if (isSimple) return `SELECT ${cols} FROM (${sql})` @@ -322,6 +316,17 @@ class CQN2SQLRenderer { return `SELECT ${isRoot || SELECT.one ? obj.replace('jsonb', 'json') : `jsonb_group_array(${obj})`} as _json_ FROM (${sql})` } + SELECT_count(q) { + const countQuery = cds.ql.clone(q, { + columns: [{ func: 'count' }], + one: 0, limit: 0, orderBy: 0, expand: 0, count: 0 + }) + countQuery.as = q.as + '@odata.count' + countQuery.elements = undefined + countQuery.element = cds.builtin.types.Int64 + return countQuery + } + /** * Renders a SELECT column expression into generic SQL * @param {import('./infer/cqn').col} x diff --git a/db-service/lib/cqn4sql.js b/db-service/lib/cqn4sql.js index 8843bc209..88c4a8064 100644 --- a/db-service/lib/cqn4sql.js +++ b/db-service/lib/cqn4sql.js @@ -825,7 +825,7 @@ function cqn4sql(originalQuery, model) { const subqueryBase = {} const queryModifiers = { ...column } for (const [key, value] of Object.entries(queryModifiers)) { - if (key in { limit: 1, orderBy: 1, groupBy: 1, excluding: 1, where: 1, having: 1 }) subqueryBase[key] = value + if (key in { limit: 1, orderBy: 1, groupBy: 1, excluding: 1, where: 1, having: 1, count: 1 }) subqueryBase[key] = value } const subquery = { diff --git a/hana/lib/HANAService.js b/hana/lib/HANAService.js index fd31f7a3c..7bc9d71aa 100644 --- a/hana/lib/HANAService.js +++ b/hana/lib/HANAService.js @@ -515,35 +515,89 @@ class HANAService extends SQLService { SELECT_columns(q) { const { SELECT, src } = q if (!SELECT.columns) return '*' + if (SELECT.expand !== 'root') { + const ret = [] + for (const x of q.SELECT.columns) { + if (x.elements && x.element?.isAssociation) continue + ret.push(this.column_expr(x, q)) + } + return ret + } const structures = [] const blobrefs = [] let expands = {} let blobs = {} let hasBooleans = false let path - let sql = SELECT.columns - .map( - SELECT.expand === 'root' - ? x => { - if (x === '*') return '*' - // means x is a sub select expand - if (x.elements && x.element?.isAssociation) { - expands[this.column_name(x)] = x.SELECT.one ? null : [] - - const parent = src - this.extractForeignKeys(x.SELECT.where, parent.as, []).forEach(ref => { - const columnName = this.column_name(ref) - if (!parent.SELECT.columns.find(c => this.column_name(c) === columnName)) { - parent.SELECT.columns.push(ref) + let sql = [] + + // Remove sub expands and track special return column types + for (const x of SELECT.columns) { + if (x === '*') sql.push('*') + // means x is a sub select expand + if (x.elements && x.element?.isAssociation) { + if (x.SELECT?.count) { + // Add count query to src query and output query + const cq = this.SELECT_count(x) + src.SELECT.columns.push(cq) + if (q !== src) q.SELECT.columns.push({ ref: [cq.as], element: cq.element }) + } + + expands[this.column_name(x)] = x.SELECT.one ? null : [] + + const parent = src + this.extractForeignKeys(x.SELECT.where, parent.as, []).forEach(ref => { + const columnName = this.column_name(ref) + if (!parent.SELECT.columns.find(c => this.column_name(c) === columnName)) { + parent.SELECT.columns.push(ref) + } + }) + + if (x.SELECT.from) { + x.SELECT.from = { + join: 'inner', + args: [x.SELECT.from, { ref: [parent.alias], as: parent.as }], + on: x.SELECT.where, + as: x.SELECT.from.as, + } + } else { + x.SELECT.from = { ref: [parent.alias], as: parent.as } + x.SELECT.columns.forEach(col => { + // if (col.ref?.length === 1) { col.ref.unshift(parent.as) } + if (col.ref?.length > 1) { + const colName = this.column_name(col) + if (!parent.SELECT.columns.some(c => this.column_name(c) === colName)) { + const isSource = from => { + if (from.as === col.ref[0]) return true + return from.args?.some(a => { + if (a.args) return isSource(a) + return a.as === col.ref[0] + }) } - }) - if (x.SELECT.from) { - x.SELECT.from = { - join: 'inner', - args: [x.SELECT.from, { ref: [parent.alias], as: parent.as }], - on: x.SELECT.where, - as: x.SELECT.from.as, + // Inject foreign columns into parent selects (recursively) + const as = `$$${col.ref.join('.')}$$` + let rename = col.ref[0] !== parent.as + let curPar = parent + while (curPar) { + if (isSource(curPar.SELECT.from)) { + if (curPar.SELECT.columns.find(c => c.as === as)) { + rename = true + } else { + rename = rename || curPar === parent + curPar.SELECT.columns.push(rename ? { __proto__: col, ref: col.ref, as } : { __proto__: col, ref: [...col.ref] }) + } + break + } else { + curPar.SELECT.columns.push({ __proto__: col, ref: [curPar.SELECT.parent.as, as], as }) + curPar = curPar.SELECT.parent + } + } + if (rename) { + col.as = colName + col.ref = [parent.as, as] + } else { + col.ref = [parent.as, colName] } } else { x.SELECT.from = { ref: [parent.alias], as: parent.as } @@ -590,98 +644,91 @@ class HANAService extends SQLService { } }) } - - x.SELECT.where = undefined - x.SELECT.expand = 'root' - x.SELECT.parent = parent - - const values = this.values - this.values = [] - parent.SELECT.expand = true - this.SELECT(x) - this.values = values - return false - } - if (x.element?.type in this.BINARY_TYPES) { - blobrefs.push(x) - blobs[this.column_name(x)] = null - return false } - if (x.element?.elements || x.element?.items) { - // support for structured types and arrays - structures.push(x) - return false - } - const columnName = this.column_name(x) - if (columnName === '_path_') { - path = this.expr(x) - return false - } - if (x.element?.type === 'cds.Boolean') hasBooleans = true - const converter = x.element?.[this.class._convertOutput] || (e => e) - const sql = x.param !== true && typeof x.val === 'number' ? this.expr({ param: false, __proto__: x }) : this.expr(x) - return `${converter(sql, x.element)} as "${columnName.replace(/"/g, '""')}"` - } - : x => { - if (x === '*') return '*' - // means x is a sub select expand - if (x.elements && x.element?.isAssociation) return false - return this.column_expr(x) - }, - ) - .filter(a => a) + }) + } - if (SELECT.expand === 'root') { - this._blobs = blobs - const blobColumns = Object.keys(blobs) - this.blobs.push(...blobColumns.filter(b => !this.blobs.includes(b))) - if ( - cds.env.features.sql_simple_queries && - (cds.env.features.sql_simple_queries > 1 || !hasBooleans) && - structures.length + ObjectKeys(expands).length + ObjectKeys(blobs).length === 0 && - !q?.src?.SELECT?.parent && - this.temporary.length === 0 - ) { - return `${sql}` - } + x.SELECT.where = undefined + x.SELECT.expand = 'root' + x.SELECT.parent = parent - expands = this.string(JSON.stringify(expands)) - blobs = this.string(JSON.stringify(blobs)) - // When using FOR JSON the whole dataset is put into a single blob - // To increase the potential maximum size of the result set every row is converted to a JSON - // Making each row a maximum size of 2gb instead of the whole result set to be 2gb - // Excluding binary columns as they are not supported by FOR JSON and themselves can be 2gb - const rawJsonColumn = sql.length - ? `(SELECT ${path ? sql : sql.map(c => c.slice(c.lastIndexOf(' as "') + 4))} FROM JSON_TABLE('{}', '$' COLUMNS("'$$FaKeDuMmYCoLuMn$$'" FOR ORDINALITY)) FOR JSON ('format'='no', 'omitnull'='no', 'arraywrap'='no') RETURNS NVARCHAR(2147483647))` - : `'{}'` - - let jsonColumn = rawJsonColumn - if (structures.length) { - // Appending the structured columns to prevent them from being quoted and escaped - // In case of the deep JSON select queries the deep columns depended on a REGEXP_REPLACE which will probably be slower - const structuresConcat = structures - .map((x, i) => { - const name = this.column_name(x) - return `'${i ? ',' : '{'}"${name}":' || COALESCE(${this.quote(name)},'null')` - }) - .join(' || ') - jsonColumn = sql.length - ? `${structuresConcat} || ',' || SUBSTRING(${rawJsonColumn}, 2)` - : `${structuresConcat} || '}'` + const values = this.values + this.values = [] + parent.SELECT.expand = true + this.SELECT(x) + this.values = values + continue } - - // Calculate final output columns once - let outputColumns = '' - outputColumns = `${path ? this.quote('_path_') : `'$['`} as "_path_",${blobs} as "_blobs_",${expands} as "_expands_",${jsonColumn} as "_json_"` - if (blobColumns.length) - outputColumns = `${outputColumns},${blobColumns.map(b => `${this.quote(b)} as "${b.replace(/"/g, '""')}"`)}` - this._outputColumns = outputColumns - if (path) { - sql = `*,${path} as ${this.quote('_path_')}` - } else { - structures.forEach(x => sql.push(this.column_expr(x))) - blobrefs.forEach(x => sql.push(this.column_expr(x))) + if (x.element?.type in this.BINARY_TYPES) { + blobrefs.push(x) + blobs[this.column_name(x)] = null + continue + } + if (x.element?.elements || x.element?.items) { + // support for structured types and arrays + structures.push(x) + continue + } + const columnName = this.column_name(x) + if (columnName === '_path_') { + path = this.expr(x) + continue } + if (x.element?.type === 'cds.Boolean') hasBooleans = true + const converter = x.element?.[this.class._convertOutput] || (e => e) + const s = x.param !== true && typeof x.val === 'number' ? this.expr({ param: false, __proto__: x }) : this.expr(x) + sql.push(`${converter(s, x.element)} as "${columnName.replace(/"/g, '""')}"`) + } + + this._blobs = blobs + const blobColumns = Object.keys(blobs) + this.blobs.push(...blobColumns.filter(b => !this.blobs.includes(b))) + if ( + cds.env.features.sql_simple_queries && + (cds.env.features.sql_simple_queries > 1 || !hasBooleans) && + structures.length + ObjectKeys(expands).length + ObjectKeys(blobs).length === 0 && + !q?.src?.SELECT?.parent && + this.temporary.length === 0 + ) { + return `${sql}` + } + + expands = this.string(JSON.stringify(expands)) + blobs = this.string(JSON.stringify(blobs)) + // When using FOR JSON the whole dataset is put into a single blob + // To increase the potential maximum size of the result set every row is converted to a JSON + // Making each row a maximum size of 2gb instead of the whole result set to be 2gb + // Excluding binary columns as they are not supported by FOR JSON and themselves can be 2gb + const rawJsonColumn = sql.length + ? `(SELECT ${path ? sql : sql.map(c => c.slice(c.lastIndexOf(' as "') + 4))} FROM JSON_TABLE('{}', '$' COLUMNS("'$$FaKeDuMmYCoLuMn$$'" FOR ORDINALITY)) FOR JSON ('format'='no', 'omitnull'='no', 'arraywrap'='no') RETURNS NVARCHAR(2147483647))` + : `'{}'` + + let jsonColumn = rawJsonColumn + if (structures.length) { + // Appending the structured columns to prevent them from being quoted and escaped + // In case of the deep JSON select queries the deep columns depended on a REGEXP_REPLACE which will probably be slower + const structuresConcat = structures + .map((x, i) => { + const name = this.column_name(x) + return `'${i ? ',' : '{'}"${name}":' || COALESCE(${this.quote(name)},'null')` + }) + .join(' || ') + jsonColumn = sql.length + ? `${structuresConcat} || ',' || SUBSTRING(${rawJsonColumn}, 2)` + : `${structuresConcat} || '}'` + } + + // Calculate final output columns once + let outputColumns = '' + outputColumns = `${path ? this.quote('_path_') : `'$['`} as "_path_",${blobs} as "_blobs_",${expands} as "_expands_",${jsonColumn} as "_json_"` + if (blobColumns.length) + outputColumns = `${outputColumns},${blobColumns.map(b => `${this.quote(b)} as "${b.replace(/"/g, '""')}"`)}` + this._outputColumns = outputColumns + if (path) { + sql = `*,${path} as ${this.quote('_path_')}` + } else { + structures.forEach(x => sql.push(this.column_expr(x))) + blobrefs.forEach(x => sql.push(this.column_expr(x))) } return sql } @@ -690,6 +737,15 @@ class HANAService extends SQLService { return sql } + SELECT_count(q) { + const countQuery = super.SELECT_count(q) + countQuery.SELECT.from = countQuery.SELECT.from + countQuery.SELECT.where = countQuery.SELECT.where + // Ensure that the query is not considered an expand query + countQuery.SELECT.parent = undefined + return countQuery + } + from_dummy() { return ' FROM DUMMY' } diff --git a/postgres/lib/PostgresService.js b/postgres/lib/PostgresService.js index 27cc832c5..e43567d16 100644 --- a/postgres/lib/PostgresService.js +++ b/postgres/lib/PostgresService.js @@ -389,14 +389,7 @@ GROUP BY k const cols = SELECT.columns.map(x => { const name = this.column_name(x) const outputConverter = this.output_converter4(x.element, `${queryAlias}.${this.quote(name)}`) - let col = `${outputConverter} as ${this.doubleQuote(name)}` - - if (x.SELECT?.count) { - // Return both the sub select and the count for @odata.count - const qc = cds.ql.clone(x, { columns: [{ func: 'count' }], one: 1, limit: 0, orderBy: 0 }) - col += `,${this.expr(qc)} as ${this.doubleQuote(`${name}@odata.count`)}` - } - return col + return `${outputConverter} as ${this.doubleQuote(name)}` }) const isRoot = SELECT.expand === 'root' const isSimple = cds.env.features.sql_simple_queries && diff --git a/test/scenarios/bookshop/read.test.js b/test/scenarios/bookshop/read.test.js index fde08c5e7..c826a4d5e 100644 --- a/test/scenarios/bookshop/read.test.js +++ b/test/scenarios/bookshop/read.test.js @@ -39,6 +39,28 @@ describe('Bookshop - Read', () => { expect(res.data['@odata.count']).to.be.eq(5) }) + test('Books $count in expand', async () => { + const res = await GET( + `/admin/Authors?$select=name&$expand=books($count=true)`, admin + ) + expect(res.status).to.be.eq(200) + for (const row of res.data.value) { + expect(row['books@odata.count']).to.be.eq(row.books.length + '') + } + }) + + test.skip('Books $count in orderby', async () => { + const res = await GET( + `/admin/Authors?$select=name&$expand=books($count=true)&$orderby=books/$count desc`, admin + ) + }) + + test.skip('Books $count in filter', async () => { + const res = await GET( + `/admin/Authors?$select=name&$expand=books($count=true)&$filter=books/$count eq 2`, admin + ) + }) + test('Books with groupby with path expression and expand result', async () => { const res = await GET( '/admin/Books?$apply=filter(title%20ne%20%27bar%27)/groupby((author/name),aggregate(price with sum as totalAmount))',