Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
});
const sqlSettings: Array = [];
if (localSettings.length > 0) {
// Later settings should win, so we're going to loop backwards and not
// add settings for keys we've already seen.
const seenKeys: Array = [];
// TODO:perf: looping backwards is slow
for (let i = localSettings.length - 1; i >= 0; i--) {
const [key, value] = localSettings[i];
if (!seenKeys.includes(key)) {
seenKeys.push(key);
// Make sure that the third config is always `true` so that we are only
// ever setting variables on the transaction.
// Also, we're using `unshift` to undo the reverse-looping we're doing
sqlSettings.unshift(sql.fragment`set_config(${sql.value(key)}, ${sql.value(value)}, true)`);
}
}
}
const sqlSettingsQuery =
sqlSettings.length > 0 ? sql.compile(sql.query`select ${sql.join(sqlSettings, ', ')}`) : null;
// If we can avoid transactions, we get greater performance.
const needTransaction =
pgForceTransaction ||
!!sqlSettingsQuery ||
(operationType !== 'query' && operationType !== 'subscription');
// Now we've caught as many errors as we can at this stage, let's create a DB connection.
const withAuthenticatedPgClient: WithAuthenticatedPgClientFunction = !needTransaction
? simpleWithPgClient(pgPool)
* `EXISTS(select 1 from (${sqlCommon} OFFSET ${first}) __random_table_alias__)`.
*
* We could see if there's at least one row in sqlCommon that's not
* already in our chosen result set.
*
* We've chosen the latter approach here because it doesn't place a limit
* on queryHasBefore.
*/
// Drop the `first` limit, see if there are any records that aren't
// already in the list we've fetched.
return sql.fragment`\
exists(
${sqlCommon}
and (${queryBuilder.getSelectCursor() ||
sql.null})::text not in (select __cursor::text from ${sqlQueryAlias})
${offset === 0 ? sql.blank : sql.fragment`offset ${sql.value(offset)}`}
)`;
}
} else {
assert(!invert || offset === 0); // isForwardOrSymmetric
assert(!canHaveCursorInWhere);
// We're dealing with LIMIT/OFFSET pagination here, which means `natural`
// cursors, so the `queryBuilder` factors the before/after, first/last
// into the limit / offset.
const { limit } = queryBuilder.getFinalLimitAndOffset();
if (limit == null) {
// If paginating backwards, then offset > 0 has already been dealt
// with. Unbounded, so there's no next page.
return sql.fragment`false`;
} else if (invert) {
assert(offset === 0);
if (addNotDistinctFromNullCase) {
/*
* `is null` is not sufficient here because the record might exist but
* have null as each of its values; so we use `is not distinct from null`
* to assert that the record itself doesn't exist. This is typically used
* with column values.
*/
buildObject = sql.fragment`(case when (${this.getTableAlias()} is not distinct from null) then null else ${buildObject} end)`;
} else if (addNullCase) {
/*
* `is null` is probably used here because it's the result of a function;
* functions seem to have trouble differentiating between `null::my_type`
* and `(null,null,null)::my_type`, always opting for the latter which
* then causes issues with the `GraphQLNonNull`s in the schema.
*/
buildObject = sql.fragment`(case when (${this.getTableAlias()} is null) then null else ${buildObject} end)`;
}
return buildObject;
}
buildWhereBoundClause(isLower: boolean): SQL {