diff --git a/.changeset/fifty-bobcats-jog.md b/.changeset/fifty-bobcats-jog.md new file mode 100644 index 00000000000..ba41f5dbb74 --- /dev/null +++ b/.changeset/fifty-bobcats-jog.md @@ -0,0 +1,14 @@ +--- +'@graphql-tools/executor': major +'@graphql-tools/utils': minor +--- + +Upgrade to non-duplicating Incremental Delivery format + +## Description + +GraphQL Incremental Delivery is moving to a [new response format without duplication](https://github.com/graphql/defer-stream-wg/discussions/69). + +This PR updates the executor within graphql-tools to follow the new format, a BREAKING CHANGE. + +Incremental Delivery has now been disabled for subscriptions, also a BREAKING CHANGE. The GraphQL Working Group has decided to disable incremental delivery support for subscriptions (1) to gather more information about use cases and (2) explore how to interleaving the incremental response streams generated from different source events into one overall subscription response stream. diff --git a/packages/executor/src/execution/AccumulatorMap.ts b/packages/executor/src/execution/AccumulatorMap.ts new file mode 100644 index 00000000000..156fe71c207 --- /dev/null +++ b/packages/executor/src/execution/AccumulatorMap.ts @@ -0,0 +1,17 @@ +/** + * ES6 Map with additional `add` method to accumulate items. + */ +export class AccumulatorMap extends Map> { + get [Symbol.toStringTag]() { + return 'AccumulatorMap'; + } + + add(key: K, item: T): void { + const group = this.get(key); + if (group === undefined) { + this.set(key, [item]); + } else { + group.push(item); + } + } +} diff --git a/packages/executor/src/execution/BoxedPromiseOrValue.ts b/packages/executor/src/execution/BoxedPromiseOrValue.ts new file mode 100644 index 00000000000..630d1e6fcf8 --- /dev/null +++ b/packages/executor/src/execution/BoxedPromiseOrValue.ts @@ -0,0 +1,25 @@ +import { isPromise } from '@graphql-tools/utils'; +import type { MaybePromise } from '@graphql-tools/utils'; + +/** + * A BoxedPromiseOrValue is a container for a value or promise where the value + * will be updated when the promise resolves. + * + * A BoxedPromiseOrValue may only be used with promises whose possible + * rejection has already been handled, otherwise this will lead to unhandled + * promise rejections. + * + * @internal + * */ +export class BoxedPromiseOrValue { + value: MaybePromise; + + constructor(value: MaybePromise) { + this.value = value; + if (isPromise(value)) { + value.then(resolved => { + this.value = resolved; + }); + } + } +} diff --git a/packages/executor/src/execution/IncrementalGraph.ts b/packages/executor/src/execution/IncrementalGraph.ts new file mode 100644 index 00000000000..0a4dc5a5632 --- /dev/null +++ b/packages/executor/src/execution/IncrementalGraph.ts @@ -0,0 +1,314 @@ +import type { GraphQLError } from 'graphql'; +import { isPromise } from '@graphql-tools/utils'; +import { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js'; +import { invariant } from './invariant.js'; +import { promiseWithResolvers } from './promiseWithResolvers.js'; +import type { + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, + DeferredGroupedFieldSetResult, + IncrementalDataRecord, + IncrementalDataRecordResult, + ReconcilableDeferredGroupedFieldSetResult, + StreamItemRecord, + StreamRecord, + SubsequentResultRecord, +} from './types.js'; +import { isDeferredFragmentRecord, isDeferredGroupedFieldSetRecord } from './types.js'; + +/** + * @internal + */ +export class IncrementalGraph { + private _rootNodes: Set; + + private _completedQueue: Array; + private _nextQueue: Array< + (iterable: IteratorResult>) => void + >; + + constructor() { + this._rootNodes = new Set(); + this._completedQueue = []; + this._nextQueue = []; + } + + getNewPending( + incrementalDataRecords: ReadonlyArray, + ): ReadonlyArray { + const initialResultChildren = new Set(); + this._addIncrementalDataRecords(incrementalDataRecords, undefined, initialResultChildren); + return this._promoteNonEmptyToRoot(initialResultChildren); + } + + addCompletedReconcilableDeferredGroupedFieldSet( + reconcilableResult: ReconcilableDeferredGroupedFieldSetResult, + ): void { + for (const deferredFragmentRecord of reconcilableResult.deferredGroupedFieldSetRecord + .deferredFragmentRecords) { + deferredFragmentRecord.deferredGroupedFieldSetRecords.delete( + reconcilableResult.deferredGroupedFieldSetRecord, + ); + deferredFragmentRecord.reconcilableResults.add(reconcilableResult); + } + + const incrementalDataRecords = reconcilableResult.incrementalDataRecords; + if (incrementalDataRecords !== undefined) { + this._addIncrementalDataRecords( + incrementalDataRecords, + reconcilableResult.deferredGroupedFieldSetRecord.deferredFragmentRecords, + ); + } + } + + completedIncrementalData() { + return { + [Symbol.asyncIterator]() { + return this; + }, + next: (): Promise>> => { + const firstResult = this._completedQueue.shift(); + if (firstResult !== undefined) { + return Promise.resolve({ + value: this._yieldCurrentCompletedIncrementalData(firstResult), + done: false, + }); + } + const { promise, resolve } = + promiseWithResolvers>>(); + this._nextQueue.push(resolve); + return promise; + }, + return: (): Promise>> => { + for (const resolve of this._nextQueue) { + resolve({ value: undefined, done: true }); + } + return Promise.resolve({ value: undefined, done: true }); + }, + }; + } + + hasNext(): boolean { + return this._rootNodes.size > 0; + } + + completeDeferredFragment(deferredFragmentRecord: DeferredFragmentRecord): + | { + newPending: ReadonlyArray; + reconcilableResults: ReadonlyArray; + } + | undefined { + if ( + !this._rootNodes.has(deferredFragmentRecord) || + deferredFragmentRecord.deferredGroupedFieldSetRecords.size > 0 + ) { + return; + } + const reconcilableResults = Array.from(deferredFragmentRecord.reconcilableResults); + this._removePending(deferredFragmentRecord); + for (const reconcilableResult of reconcilableResults) { + for (const otherDeferredFragmentRecord of reconcilableResult.deferredGroupedFieldSetRecord + .deferredFragmentRecords) { + otherDeferredFragmentRecord.reconcilableResults.delete(reconcilableResult); + } + } + const newPending = this._promoteNonEmptyToRoot(deferredFragmentRecord.children); + return { newPending, reconcilableResults }; + } + + removeDeferredFragment(deferredFragmentRecord: DeferredFragmentRecord): boolean { + if (!this._rootNodes.has(deferredFragmentRecord)) { + return false; + } + this._removePending(deferredFragmentRecord); + return true; + } + + removeStream(streamRecord: StreamRecord): void { + this._removePending(streamRecord); + } + + private _removePending(subsequentResultRecord: SubsequentResultRecord): void { + this._rootNodes.delete(subsequentResultRecord); + if (this._rootNodes.size === 0) { + for (const resolve of this._nextQueue) { + resolve({ value: undefined, done: true }); + } + } + } + + private _addIncrementalDataRecords( + incrementalDataRecords: ReadonlyArray, + parents: ReadonlyArray | undefined, + initialResultChildren?: Set | undefined, + ): void { + for (const incrementalDataRecord of incrementalDataRecords) { + if (isDeferredGroupedFieldSetRecord(incrementalDataRecord)) { + for (const deferredFragmentRecord of incrementalDataRecord.deferredFragmentRecords) { + this._addDeferredFragment(deferredFragmentRecord, initialResultChildren); + deferredFragmentRecord.deferredGroupedFieldSetRecords.add(incrementalDataRecord); + } + if (this._hasPendingFragment(incrementalDataRecord)) { + this._onDeferredGroupedFieldSet(incrementalDataRecord); + } + } else if (parents === undefined) { + invariant(initialResultChildren !== undefined); + initialResultChildren.add(incrementalDataRecord); + } else { + for (const parent of parents) { + this._addDeferredFragment(parent, initialResultChildren); + parent.children.add(incrementalDataRecord); + } + } + } + } + + private _promoteNonEmptyToRoot( + maybeEmptyNewPending: Set, + ): ReadonlyArray { + const newPending: Array = []; + for (const subsequentResultRecord of maybeEmptyNewPending) { + if (isDeferredFragmentRecord(subsequentResultRecord)) { + if (subsequentResultRecord.deferredGroupedFieldSetRecords.size > 0) { + subsequentResultRecord.setAsPending(); + for (const deferredGroupedFieldSetRecord of subsequentResultRecord.deferredGroupedFieldSetRecords) { + if (!this._hasPendingFragment(deferredGroupedFieldSetRecord)) { + this._onDeferredGroupedFieldSet(deferredGroupedFieldSetRecord); + } + } + this._rootNodes.add(subsequentResultRecord); + newPending.push(subsequentResultRecord); + continue; + } + for (const child of subsequentResultRecord.children) { + maybeEmptyNewPending.add(child); + } + } else { + this._rootNodes.add(subsequentResultRecord); + newPending.push(subsequentResultRecord); + + this._onStreamItems(subsequentResultRecord); + } + } + return newPending; + } + + private _hasPendingFragment( + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + ): boolean { + return deferredGroupedFieldSetRecord.deferredFragmentRecords.some(deferredFragmentRecord => + this._rootNodes.has(deferredFragmentRecord), + ); + } + + private _addDeferredFragment( + deferredFragmentRecord: DeferredFragmentRecord, + subsequentResultRecords: Set | undefined, + ): void { + if (this._rootNodes.has(deferredFragmentRecord)) { + return; + } + const parent = deferredFragmentRecord.parent; + if (parent === undefined) { + invariant(subsequentResultRecords !== undefined); + subsequentResultRecords.add(deferredFragmentRecord); + return; + } + parent.children.add(deferredFragmentRecord); + this._addDeferredFragment(parent, subsequentResultRecords); + } + + private _onDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + ): void { + const result = ( + deferredGroupedFieldSetRecord.result as BoxedPromiseOrValue + ).value; + if (isPromise(result)) { + result.then(resolved => this._enqueue(resolved)); + } else { + this._enqueue(result); + } + } + + private async _onStreamItems(streamRecord: StreamRecord): Promise { + let items: Array = []; + let errors: Array = []; + let incrementalDataRecords: Array = []; + const streamItemQueue = streamRecord.streamItemQueue; + let streamItemRecord: StreamItemRecord | undefined; + while ((streamItemRecord = streamItemQueue.shift()) !== undefined) { + let result = + streamItemRecord instanceof BoxedPromiseOrValue + ? streamItemRecord.value + : streamItemRecord().value; + if (isPromise(result)) { + if (items.length > 0) { + this._enqueue({ + streamRecord, + result: + // TODO add additional test case or rework for coverage + errors.length > 0 /* c8 ignore start */ + ? { items, errors } /* c8 ignore stop */ + : { items }, + incrementalDataRecords, + }); + items = []; + errors = []; + incrementalDataRecords = []; + } + result = await result; + // wait an additional tick to coalesce resolving additional promises + // within the queue + await Promise.resolve(); + } + if (result.item === undefined) { + if (items.length > 0) { + this._enqueue({ + streamRecord, + result: errors.length > 0 ? { items, errors } : { items }, + incrementalDataRecords, + }); + } + this._enqueue( + result.errors === undefined + ? { streamRecord } + : { + streamRecord, + errors: result.errors, + }, + ); + return; + } + items.push(result.item); + if (result.errors !== undefined) { + errors.push(...result.errors); + } + if (result.incrementalDataRecords !== undefined) { + incrementalDataRecords.push(...result.incrementalDataRecords); + } + } + } + + private *_yieldCurrentCompletedIncrementalData( + first: IncrementalDataRecordResult, + ): Generator { + yield first; + let completed; + while ((completed = this._completedQueue.shift()) !== undefined) { + yield completed; + } + } + + private _enqueue(completed: IncrementalDataRecordResult): void { + const next = this._nextQueue.shift(); + if (next !== undefined) { + next({ + value: this._yieldCurrentCompletedIncrementalData(completed), + done: false, + }); + return; + } + this._completedQueue.push(completed); + } +} diff --git a/packages/executor/src/execution/IncrementalPublisher.ts b/packages/executor/src/execution/IncrementalPublisher.ts new file mode 100644 index 00000000000..02e63a2e414 --- /dev/null +++ b/packages/executor/src/execution/IncrementalPublisher.ts @@ -0,0 +1,372 @@ +import type { GraphQLError } from 'graphql'; +import { pathToArray } from '@graphql-tools/utils'; +import { IncrementalGraph } from './IncrementalGraph.js'; +import { invariant } from './invariant.js'; +import type { + CancellableStreamRecord, + CompletedResult, + DeferredFragmentRecord, + DeferredGroupedFieldSetResult, + IncrementalDataRecord, + IncrementalDataRecordResult, + IncrementalDeferResult, + IncrementalExecutionResults, + IncrementalResult, + IncrementalStreamResult, + InitialIncrementalExecutionResult, + PendingResult, + StreamItemsResult, + SubsequentIncrementalExecutionResult, + SubsequentResultRecord, +} from './types.js'; +import { + isCancellableStreamRecord, + isDeferredGroupedFieldSetResult, + isNonReconcilableDeferredGroupedFieldSetResult, +} from './types.js'; + +export function buildIncrementalResponse( + context: IncrementalPublisherContext, + result: TData, + errors: ReadonlyArray | undefined, + incrementalDataRecords: ReadonlyArray, +): IncrementalExecutionResults { + const incrementalPublisher = new IncrementalPublisher(context); + return incrementalPublisher.buildResponse(result, errors, incrementalDataRecords); +} + +interface IncrementalPublisherContext { + signal: AbortSignal | undefined; + cancellableStreams: Set | undefined; +} + +interface SubsequentIncrementalExecutionResultContext { + pending: Array; + incremental: Array>; + completed: Array; +} + +/** + * This class is used to publish incremental results to the client, enabling semi-concurrent + * execution while preserving result order. + * + * @internal + */ +class IncrementalPublisher { + private _context: IncrementalPublisherContext; + private _nextId: number; + private _incrementalGraph: IncrementalGraph; + + constructor(context: IncrementalPublisherContext) { + this._context = context; + this._nextId = 0; + this._incrementalGraph = new IncrementalGraph(); + } + + buildResponse( + data: TData, + errors: ReadonlyArray | undefined, + incrementalDataRecords: ReadonlyArray, + ): IncrementalExecutionResults { + const newPending = this._incrementalGraph.getNewPending(incrementalDataRecords); + + const pending = this._pendingSourcesToResults(newPending); + + const initialResult: InitialIncrementalExecutionResult = + errors === undefined + ? { data, pending, hasNext: true } + : { errors, data, pending, hasNext: true }; + + return { + initialResult, + subsequentResults: this._subscribe(), + }; + } + + private _pendingSourcesToResults( + newPending: ReadonlyArray, + ): Array { + const pendingResults: Array = []; + for (const pendingSource of newPending) { + const id = String(this._getNextId()); + pendingSource.id = id; + const pendingResult: PendingResult = { + id, + path: pathToArray(pendingSource.path), + }; + if (pendingSource.label !== undefined) { + pendingResult.label = pendingSource.label; + } + pendingResults.push(pendingResult); + } + return pendingResults; + } + + private _getNextId(): string { + return String(this._nextId++); + } + + private _subscribe(): AsyncGenerator< + SubsequentIncrementalExecutionResult, + void, + void + > { + let isDone = false; + + this._context.signal?.addEventListener('abort', () => { + this._incrementalGraph.completedIncrementalData().return(); + }); + + const _next = async (): Promise< + IteratorResult, void> + > => { + if (isDone) { + await this._returnAsyncIteratorsIgnoringErrors(); + return { value: undefined, done: true }; + } + + const context: SubsequentIncrementalExecutionResultContext = { + pending: [], + incremental: [], + completed: [], + }; + + const completedIncrementalData = this._incrementalGraph.completedIncrementalData(); + // use the raw iterator rather than 'for await ... of' so as not to trigger the + // '.return()' method on the iterator when exiting the loop with the next value + const asyncIterator = completedIncrementalData[Symbol.asyncIterator](); + let iteration = await asyncIterator.next(); + while (!iteration.done) { + for (const completedResult of iteration.value) { + this._handleCompletedIncrementalData(completedResult, context); + } + + const { incremental, completed } = context; + if (incremental.length > 0 || completed.length > 0) { + const hasNext = this._incrementalGraph.hasNext(); + + if (!hasNext) { + isDone = true; + } + + const subsequentIncrementalExecutionResult: SubsequentIncrementalExecutionResult = + { + hasNext, + }; + + const pending = context.pending; + if (pending.length > 0) { + subsequentIncrementalExecutionResult.pending = pending; + } + if (incremental.length > 0) { + subsequentIncrementalExecutionResult.incremental = incremental; + } + if (completed.length > 0) { + subsequentIncrementalExecutionResult.completed = completed; + } + + return { value: subsequentIncrementalExecutionResult, done: false }; + } + + iteration = await asyncIterator.next(); + } + + if (this._context.signal?.aborted) { + throw this._context.signal.reason; + } + + await this._returnAsyncIteratorsIgnoringErrors(); + return { value: undefined, done: true }; + }; + + const _return = async (): Promise< + IteratorResult, void> + > => { + isDone = true; + await this._returnAsyncIterators(); + return { value: undefined, done: true }; + }; + + const _throw = async ( + error?: unknown, + ): Promise, void>> => { + isDone = true; + await this._returnAsyncIterators(); + return Promise.reject(error); + }; + + return { + [Symbol.asyncIterator]() { + return this; + }, + next: _next, + return: _return, + throw: _throw, + }; + } + + private _handleCompletedIncrementalData( + completedIncrementalData: IncrementalDataRecordResult, + context: SubsequentIncrementalExecutionResultContext, + ): void { + if (isDeferredGroupedFieldSetResult(completedIncrementalData)) { + this._handleCompletedDeferredGroupedFieldSet(completedIncrementalData, context); + } else { + this._handleCompletedStreamItems(completedIncrementalData, context); + } + } + + private _handleCompletedDeferredGroupedFieldSet( + deferredGroupedFieldSetResult: DeferredGroupedFieldSetResult, + context: SubsequentIncrementalExecutionResultContext, + ): void { + if (isNonReconcilableDeferredGroupedFieldSetResult(deferredGroupedFieldSetResult)) { + for (const deferredFragmentRecord of deferredGroupedFieldSetResult + .deferredGroupedFieldSetRecord.deferredFragmentRecords) { + const id = deferredFragmentRecord.id; + if (!this._incrementalGraph.removeDeferredFragment(deferredFragmentRecord)) { + // This can occur if multiple deferred grouped field sets error for a fragment. + continue; + } + invariant(id !== undefined); + context.completed.push({ + id, + errors: deferredGroupedFieldSetResult.errors, + }); + } + return; + } + + this._incrementalGraph.addCompletedReconcilableDeferredGroupedFieldSet( + deferredGroupedFieldSetResult, + ); + + for (const deferredFragmentRecord of deferredGroupedFieldSetResult.deferredGroupedFieldSetRecord + .deferredFragmentRecords) { + const completion = this._incrementalGraph.completeDeferredFragment(deferredFragmentRecord); + if (completion === undefined) { + continue; + } + const id = deferredFragmentRecord.id; + invariant(id !== undefined); + const incremental = context.incremental; + const { newPending, reconcilableResults } = completion; + context.pending.push(...this._pendingSourcesToResults(newPending)); + for (const reconcilableResult of reconcilableResults) { + const { bestId, subPath } = this._getBestIdAndSubPath( + id, + deferredFragmentRecord, + reconcilableResult, + ); + const incrementalEntry: IncrementalDeferResult = { + ...reconcilableResult.result, + id: bestId, + }; + if (subPath !== undefined) { + incrementalEntry.subPath = subPath; + } + incremental.push(incrementalEntry); + } + context.completed.push({ id }); + } + } + + private _handleCompletedStreamItems( + streamItemsResult: StreamItemsResult, + context: SubsequentIncrementalExecutionResultContext, + ): void { + const streamRecord = streamItemsResult.streamRecord; + const id = streamRecord.id; + invariant(id !== undefined); + if (streamItemsResult.errors !== undefined) { + context.completed.push({ + id, + errors: streamItemsResult.errors, + }); + this._incrementalGraph.removeStream(streamRecord); + if (isCancellableStreamRecord(streamRecord)) { + invariant(this._context.cancellableStreams !== undefined); + this._context.cancellableStreams.delete(streamRecord); + streamRecord.earlyReturn().catch(() => { + /* c8 ignore next 1 */ + // ignore error + }); + } + } else if (streamItemsResult.result === undefined) { + context.completed.push({ id }); + this._incrementalGraph.removeStream(streamRecord); + if (isCancellableStreamRecord(streamRecord)) { + invariant(this._context.cancellableStreams !== undefined); + this._context.cancellableStreams.delete(streamRecord); + } + } else { + const incrementalEntry: IncrementalStreamResult = { + id, + ...streamItemsResult.result, + }; + + context.incremental.push(incrementalEntry); + + const incrementalDataRecords = streamItemsResult.incrementalDataRecords; + if (incrementalDataRecords !== undefined) { + const newPending = this._incrementalGraph.getNewPending(incrementalDataRecords); + context.pending.push(...this._pendingSourcesToResults(newPending)); + } + } + } + + private _getBestIdAndSubPath( + initialId: string, + initialDeferredFragmentRecord: DeferredFragmentRecord, + deferredGroupedFieldSetResult: DeferredGroupedFieldSetResult, + ): { bestId: string; subPath: ReadonlyArray | undefined } { + let maxLength = pathToArray(initialDeferredFragmentRecord.path).length; + let bestId = initialId; + + for (const deferredFragmentRecord of deferredGroupedFieldSetResult.deferredGroupedFieldSetRecord + .deferredFragmentRecords) { + if (deferredFragmentRecord === initialDeferredFragmentRecord) { + continue; + } + const id = deferredFragmentRecord.id; + // TODO: add test case for when an fragment has not been released, but might be processed for the shortest path. + /* c8 ignore next 3 */ + if (id === undefined) { + continue; + } + const fragmentPath = pathToArray(deferredFragmentRecord.path); + const length = fragmentPath.length; + if (length > maxLength) { + maxLength = length; + bestId = id; + } + } + const subPath = deferredGroupedFieldSetResult.path.slice(maxLength); + return { + bestId, + subPath: subPath.length > 0 ? subPath : undefined, + }; + } + + private async _returnAsyncIterators(): Promise { + await this._incrementalGraph.completedIncrementalData().return(); + + const cancellableStreams = this._context.cancellableStreams; + if (cancellableStreams === undefined) { + return; + } + const promises: Array> = []; + for (const streamRecord of cancellableStreams) { + if (streamRecord.earlyReturn !== undefined) { + promises.push(streamRecord.earlyReturn()); + } + } + await Promise.all(promises); + } + + private async _returnAsyncIteratorsIgnoringErrors(): Promise { + await this._returnAsyncIterators().catch(() => { + // Ignore errors + }); + } +} diff --git a/packages/executor/src/execution/__tests__/abort-signal.test.ts b/packages/executor/src/execution/__tests__/abort-signal.test.ts index 920d7c95165..cee8391aea9 100644 --- a/packages/executor/src/execution/__tests__/abort-signal.test.ts +++ b/packages/executor/src/execution/__tests__/abort-signal.test.ts @@ -143,7 +143,7 @@ describe('Abort Signal', () => { Mutation: { first() { didInvokeFirstFn = true; - return true; + return Promise.resolve(true); }, second() { didInvokeSecondFn = true; @@ -168,7 +168,7 @@ describe('Abort Signal', () => { `), signal: controller.signal, }); - expect(result$).rejects.toMatchInlineSnapshot(`DOMException {}`); + await expect(result$).rejects.toMatchInlineSnapshot(`DOMException {}`); expect(didInvokeFirstFn).toBe(true); expect(didInvokeSecondFn).toBe(true); expect(didInvokeThirdFn).toBe(false); @@ -275,6 +275,7 @@ describe('Abort Signal', () => { data: { counter: [], }, + pending: [{ id: '0', path: ['counter'] }], hasNext: true, }, }); @@ -356,6 +357,10 @@ describe('Abort Signal', () => { counter1: [], counter2: [], }, + pending: [ + { id: '0', path: ['counter1'] }, + { id: '1', path: ['counter2'] }, + ], hasNext: true, }, }); @@ -433,6 +438,14 @@ describe('Abort Signal', () => { "root": {}, }, "hasNext": true, + "pending": [ + { + "id": "0", + "path": [ + "root", + ], + }, + ], } `); const next$ = iterator.next(); @@ -442,6 +455,89 @@ describe('Abort Signal', () => { await expect(next$).rejects.toThrow('This operation was aborted'); expect(bResolverGotInvoked).toBe(false); }); + it('stops pending stream execution for never-returning incremental delivery (@defer)', async () => { + const aResolverGotInvokedD = createDeferred(); + const requestGotCancelledD = createDeferred(); + let bResolverGotInvoked = false; + + const schema = makeExecutableSchema({ + typeDefs: /* GraphQL */ ` + type Query { + root: A! + } + type A { + a: B! + } + type B { + b: String + } + `, + resolvers: { + Query: { + async root() { + return {}; + }, + }, + A: { + async a() { + aResolverGotInvokedD.resolve(); + await requestGotCancelledD.promise; + return {}; + }, + }, + B: { + b() { + bResolverGotInvoked = true; + return new Promise(() => {}); + }, + }, + }, + }); + const controller = new AbortController(); + const result = await normalizedExecutor({ + schema, + document: parse(/* GraphQL */ ` + query { + root { + ... @defer { + a { + b + } + } + } + } + `), + signal: controller.signal, + }); + + if (!isAsyncIterable(result)) { + throw new Error('Result is not an async iterable'); + } + + const iterator = result[Symbol.asyncIterator](); + const next = await iterator.next(); + expect(next.value).toMatchInlineSnapshot(` +{ + "data": { + "root": {}, + }, + "hasNext": true, + "pending": [ + { + "id": "0", + "path": [ + "root", + ], + }, + ], +} +`); + const next$ = iterator.next(); + await aResolverGotInvokedD.promise; + controller.abort(); + await expect(next$).rejects.toThrow('This operation was aborted'); + expect(bResolverGotInvoked).toBe(false); + }); it('stops promise execution', async () => { const controller = new AbortController(); const d = createDeferred(); diff --git a/packages/executor/src/execution/__tests__/backpressure.test.ts b/packages/executor/src/execution/__tests__/backpressure.test.ts index 9d8b8c78ec7..9d2124979d1 100644 --- a/packages/executor/src/execution/__tests__/backpressure.test.ts +++ b/packages/executor/src/execution/__tests__/backpressure.test.ts @@ -69,6 +69,7 @@ describe('Defer Stream cancellation', () => { data: { countdownStream: [], }, + pending: [{ id: '0', path: ['countdownStream'] }], hasNext: true, }); break; @@ -93,6 +94,7 @@ describe('Defer Stream cancellation', () => { data: { countdownStream: [], }, + pending: [{ id: '0', path: ['countdownStream'] }], hasNext: true, }); break; @@ -120,6 +122,7 @@ describe('Defer Stream cancellation', () => { data: { countdownStream: [], }, + pending: [{ id: '0', path: ['countdownStream'] }], hasNext: true, }); break; @@ -128,7 +131,7 @@ describe('Defer Stream cancellation', () => { incremental: [ { items: [3], - path: ['countdownStream', 0], + id: '0', }, ], hasNext: true, @@ -158,6 +161,7 @@ describe('Defer Stream cancellation', () => { data: { countdownStream: [3], }, + pending: [{ id: '0', path: ['countdownStream'] }], hasNext: true, }); break; diff --git a/packages/executor/src/execution/__tests__/defer-test.ts b/packages/executor/src/execution/__tests__/defer-test.ts index bf577a7320a..104e0c42ba4 100644 --- a/packages/executor/src/execution/__tests__/defer-test.ts +++ b/packages/executor/src/execution/__tests__/defer-test.ts @@ -10,20 +10,18 @@ import { } from 'graphql'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; +import { execute } from '../execute.js'; +import { promiseWithResolvers } from '../promiseWithResolvers.js'; import type { InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, -} from '../execute.js'; -import { execute } from '../execute.js'; +} from '../types.js'; const friendType = new GraphQLObjectType({ fields: { id: { type: GraphQLID }, name: { type: GraphQLString }, - promiseNonNullErrorField: { - type: new GraphQLNonNull(GraphQLString), - resolve: () => Promise.resolve(null), - }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, }, name: 'Friend', }); @@ -34,64 +32,114 @@ const friends = [ { name: 'C-3PO', id: 4 }, ]; +const deeperObject = new GraphQLObjectType({ + fields: { + foo: { type: GraphQLString }, + bar: { type: GraphQLString }, + baz: { type: GraphQLString }, + bak: { type: GraphQLString }, + }, + name: 'DeeperObject', +}); + +const nestedObject = new GraphQLObjectType({ + fields: { + deeperObject: { type: deeperObject }, + name: { type: GraphQLString }, + }, + name: 'NestedObject', +}); + +const anotherNestedObject = new GraphQLObjectType({ + fields: { + deeperObject: { type: deeperObject }, + }, + name: 'AnotherNestedObject', +}); + +const hero = { + name: 'Luke', + id: 1, + friends, + nestedObject, + anotherNestedObject, +}; + +const c = new GraphQLObjectType({ + fields: { + d: { type: GraphQLString }, + nonNullErrorField: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: 'c', +}); + +const e = new GraphQLObjectType({ + fields: { + f: { type: GraphQLString }, + }, + name: 'e', +}); + +const b = new GraphQLObjectType({ + fields: { + c: { type: c }, + e: { type: e }, + }, + name: 'b', +}); + +const a = new GraphQLObjectType({ + fields: { + b: { type: b }, + someField: { type: GraphQLString }, + }, + name: 'a', +}); + +const g = new GraphQLObjectType({ + fields: { + h: { type: GraphQLString }, + }, + name: 'g', +}); + const heroType = new GraphQLObjectType({ fields: { id: { type: GraphQLID }, name: { type: GraphQLString }, - slowField: { - type: GraphQLString, - resolve: async () => { - await resolveOnNextTick(); - return 'slow'; - }, - }, - errorField: { - type: GraphQLString, - resolve: () => { - throw new Error('bad'); - }, - }, - nonNullErrorField: { - type: new GraphQLNonNull(GraphQLString), - resolve: () => null, - }, - promiseNonNullErrorField: { - type: new GraphQLNonNull(GraphQLString), - resolve: () => Promise.resolve(null), - }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, friends: { type: new GraphQLList(friendType), - resolve: () => friends, - }, - asyncFriends: { - type: new GraphQLList(friendType), - async *resolve() { - yield await Promise.resolve(friends[0]); - }, }, + nestedObject: { type: nestedObject }, + anotherNestedObject: { type: anotherNestedObject }, }, name: 'Hero', }); -const hero = { name: 'Luke', id: 1 }; - const query = new GraphQLObjectType({ fields: { hero: { type: heroType, - resolve: () => hero, }, + a: { type: a }, + g: { type: g }, }, name: 'Query', }); const schema = new GraphQLSchema({ query }); -async function complete(document: DocumentNode) { +async function complete( + document: DocumentNode, + rootValue: unknown = { hero }, + enableEarlyExecution = false, +) { const result = await execute({ schema, document, - rootValue: {}, + rootValue, + enableEarlyExecution, }); if ('initialResult' in result) { @@ -107,7 +155,7 @@ async function complete(document: DocumentNode) { describe('Execute: defer directive', () => { it('Can defer fragments containing scalar types', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -115,39 +163,37 @@ describe('Execute: defer directive', () => { } } fragment NameFragment on Hero { - id name } `); - const result = await complete(document); - expect(result).toEqual([ + expectJSON(result).toDeepEqual([ { data: { hero: { id: '1', }, }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { incremental: [ { data: { - id: '1', name: 'Luke', }, - path: ['hero'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can disable defer using if argument', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -158,7 +204,6 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); expectJSON(result).toDeepEqual({ @@ -170,9 +215,8 @@ describe('Execute: defer directive', () => { }, }); }); - it('Does not disable defer with null if argument', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery($shouldDefer: Boolean) { hero { id @@ -183,27 +227,139 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { incremental: [ { data: { name: 'Luke' }, - path: ['hero'], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Does not execute deferred fragments early when not specified', async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const order: Array = []; + const result = await complete(document, { + hero: { + ...hero, + id: async () => { + await resolveOnNextTick(); + await resolveOnNextTick(); + order.push('slow-id'); + return hero.id; + }, + name: () => { + order.push('fast-name'); + return hero.name; + }, + }, + }); + + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + id: '1', + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + name: 'Luke', + }, + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); + expect(order).toEqual(['slow-id', 'fast-name']); }); + it('Does execute deferred fragments early when specified', async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const order: Array = []; + const result = await complete( + document, + { + hero: { + ...hero, + id: async () => { + await resolveOnNextTick(); + await resolveOnNextTick(); + order.push('slow-id'); + return hero.id; + }, + name: () => { + order.push('fast-name'); + return hero.name; + }, + }, + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + id: '1', + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + name: 'Luke', + }, + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual(['fast-name', 'slow-id']); + }); it('Can defer fragments on the top level Query field', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { ...QueryFragment @defer(label: "DeferQuery") } @@ -213,12 +369,12 @@ describe('Execute: defer directive', () => { } } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { data: {}, + pending: [{ id: '0', path: [], label: 'DeferQuery' }], hasNext: true, }, { @@ -229,32 +385,38 @@ describe('Execute: defer directive', () => { id: '1', }, }, - path: [], - label: 'DeferQuery', + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can defer fragments with errors on the top level Query field', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { ...QueryFragment @defer(label: "DeferQuery") } fragment QueryFragment on Query { hero { - errorField + name } } `); - - const result = await complete(document); + const result = await complete(document, { + hero: { + ...hero, + name: () => { + throw new Error('bad'); + }, + }, + }); expectJSON(result).toDeepEqual([ { data: {}, + pending: [{ id: '0', path: [], label: 'DeferQuery' }], hasNext: true, }, { @@ -262,35 +424,33 @@ describe('Execute: defer directive', () => { { data: { hero: { - errorField: null, + name: null, }, }, errors: [ { message: 'bad', locations: [{ line: 7, column: 11 }], - path: ['hero', 'errorField'], + path: ['hero', 'name'], }, ], - path: [], - label: 'DeferQuery', + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can defer a fragment within an already deferred fragment', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { - id ...TopFragment @defer(label: "DeferTop") } } fragment TopFragment on Hero { - name + id ...NestedFragment @defer(label: "DeferNested") } fragment NestedFragment on Hero { @@ -299,45 +459,41 @@ describe('Execute: defer directive', () => { } } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { data: { - hero: { - id: '1', - }, + hero: {}, }, + pending: [{ id: '0', path: ['hero'], label: 'DeferTop' }], hasNext: true, }, { + pending: [{ id: '1', path: ['hero'], label: 'DeferNested' }], incremental: [ { data: { - friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + id: '1', }, - path: ['hero'], - label: 'DeferNested', + id: '0', }, { data: { - name: 'Luke', + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], }, - path: ['hero'], - label: 'DeferTop', + id: '1', }, ], + completed: [{ id: '0' }, { id: '1' }], hasNext: false, }, ]); }); - it('Can defer a fragment that is also not deferred, deferred fragment is first', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { - id ...TopFragment @defer(label: "DeferTop") ...TopFragment } @@ -346,38 +502,19 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); - expectJSON(result).toDeepEqual([ - { - data: { - hero: { - id: '1', - name: 'Luke', - }, + expectJSON(result).toDeepEqual({ + data: { + hero: { + name: 'Luke', }, - hasNext: true, - }, - { - incremental: [ - { - data: { - name: 'Luke', - }, - path: ['hero'], - label: 'DeferTop', - }, - ], - hasNext: false, }, - ]); + }); }); - it('Can defer a fragment that is also not deferred, non-deferred fragment is first', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { - id ...TopFragment ...TopFragment @defer(label: "DeferTop") } @@ -386,35 +523,18 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); - expectJSON(result).toDeepEqual([ - { - data: { - hero: { - id: '1', - name: 'Luke', - }, + expectJSON(result).toDeepEqual({ + data: { + hero: { + name: 'Luke', }, - hasNext: true, - }, - { - incremental: [ - { - data: { - name: 'Luke', - }, - path: ['hero'], - label: 'DeferTop', - }, - ], - hasNext: false, }, - ]); + }); }); it('Can defer an inline fragment', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -424,102 +544,1637 @@ describe('Execute: defer directive', () => { } } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'], label: 'InlineDeferred' }], hasNext: true, }, { - incremental: [{ data: { name: 'Luke' }, path: ['hero'], label: 'InlineDeferred' }], + incremental: [{ data: { name: 'Luke' }, id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles errors thrown in deferred fragments', async () => { - const document = parse(/* GraphQL */ ` + it('Does not emit empty defer fragments', async () => { + const document = parse(` query HeroNameQuery { hero { - id - ...NameFragment @defer + ... @defer { + name @skip(if: true) + } } } - fragment NameFragment on Hero { - errorField + fragment TopFragment on Hero { + name } `); + const result = await complete(document); + expectJSON(result).toDeepEqual({ + data: { + hero: {}, + }, + }); + }); + it('Emits children of empty defer fragments', async () => { + const document = parse(` + query HeroNameQuery { + hero { + ... @defer { + ... @defer { + name + } + } + } + } + `); const result = await complete(document); expectJSON(result).toDeepEqual([ { - data: { hero: { id: '1' } }, + data: { + hero: {}, + }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { - incremental: [ - { - data: { errorField: null }, - path: ['hero'], - errors: [ - { - message: 'bad', - locations: [{ line: 9, column: 9 }], - path: ['hero', 'errorField'], - }, - ], - }, - ], + incremental: [{ data: { name: 'Luke' }, id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles non-nullable errors thrown in deferred fragments', async () => { - const document = parse(/* GraphQL */ ` + it('Can separately emit defer fragments with different labels with varying fields', async () => { + const document = parse(` query HeroNameQuery { hero { - id - ...NameFragment @defer + ... @defer(label: "DeferID") { + id + } + ... @defer(label: "DeferName") { + name + } } } - fragment NameFragment on Hero { - nonNullErrorField - } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { - data: { hero: { id: '1' } }, + data: { + hero: {}, + }, + pending: [ + { id: '0', path: ['hero'], label: 'DeferID' }, + { id: '1', path: ['hero'], label: 'DeferName' }, + ], hasNext: true, }, { incremental: [ { - data: null, - path: ['hero'], - errors: [ - { - message: 'Cannot return null for non-nullable field Hero.nonNullErrorField.', - locations: [{ line: 9, column: 9 }], - path: ['hero', 'nonNullErrorField'], - }, - ], + data: { + id: '1', + }, + id: '0', }, - ], + { + data: { + name: 'Luke', + }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Separately emits defer fragments with different labels with varying subfields', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [], label: 'DeferID' }, + { id: '1', path: [], label: 'DeferName' }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: '0', + }, + { + data: { id: '1' }, + id: '0', + subPath: ['hero'], + }, + { + data: { name: 'Luke' }, + id: '1', + subPath: ['hero'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Separately emits defer fragments with different labels with varying subfields that return promises', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await complete(document, { + hero: { + id: () => Promise.resolve('1'), + name: () => Promise.resolve('Luke'), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [], label: 'DeferID' }, + { id: '1', path: [], label: 'DeferName' }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: '0', + }, + { + data: { id: '1' }, + id: '0', + subPath: ['hero'], + }, + { + data: { name: 'Luke' }, + id: '1', + subPath: ['hero'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Separately emits defer fragments with varying subfields of same priorities but different level of defers', async () => { + const document = parse(` + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [ + { id: '0', path: ['hero'], label: 'DeferID' }, + { id: '1', path: [], label: 'DeferName' }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { + id: '1', + }, + id: '0', + }, + { + data: { + name: 'Luke', + }, + id: '1', + subPath: ['hero'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Separately emits nested defer fragments with varying subfields of same priorities but different level of defers', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferName") { + hero { + name + ... @defer(label: "DeferID") { + id + } + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [{ id: '0', path: [], label: 'DeferName' }], + hasNext: true, + }, + { + pending: [{ id: '1', path: ['hero'], label: 'DeferID' }], + incremental: [ + { + data: { + hero: { + name: 'Luke', + }, + }, + id: '0', + }, + { + data: { + id: '1', + }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Initiates all deferred grouped field sets immediately if and only if they have been released as pending', async () => { + const document = parse(` + query { + ... @defer { + a { + ... @defer { + b { + c { d } + } + } + } + } + ... @defer { + a { + someField + ... @defer { + b { + e { f } + } + } + } + } + } + `); + + const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); + let cResolverCalled = false; + let eResolverCalled = false; + const executeResult = execute({ + schema, + document, + rootValue: { + a: { + someField: slowFieldPromise, + b: { + c: () => { + cResolverCalled = true; + return { d: 'd' }; + }, + e: () => { + eResolverCalled = true; + return { f: 'f' }; + }, + }, + }, + }, + enableEarlyExecution: false, + }); + + expect('initialResult' in executeResult).toBeTruthy(); + + // @ts-expect-error once we assert that initialResult is in executeResult then it should work fine + const result1 = executeResult.initialResult; + expectJSON(result1).toDeepEqual({ + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }); + + // @ts-expect-error once we assert that initialResult is in executeResult then it should work fine + const iterator = executeResult.subsequentResults[Symbol.asyncIterator](); + + expect(cResolverCalled).toBe(false); + expect(eResolverCalled).toBe(false); + + const result2 = await iterator.next(); + expectJSON(result2).toDeepEqual({ + value: { + pending: [{ id: '2', path: ['a'] }], + incremental: [ + { + data: { a: {} }, + id: '0', + }, + { + data: { b: {} }, + id: '2', + }, + { + data: { c: { d: 'd' } }, + id: '2', + subPath: ['b'], + }, + ], + completed: [{ id: '0' }, { id: '2' }], + hasNext: true, + }, + done: false, + }); + + expect(cResolverCalled).toBe(true); + expect(eResolverCalled).toBe(false); + + resolveSlowField('someField'); + + const result3 = await iterator.next(); + expectJSON(result3).toDeepEqual({ + value: { + pending: [{ id: '3', path: ['a'] }], + incremental: [ + { + data: { someField: 'someField' }, + id: '1', + subPath: ['a'], + }, + { + data: { e: { f: 'f' } }, + id: '3', + subPath: ['b'], + }, + ], + completed: [{ id: '1' }, { id: '3' }], + hasNext: false, + }, + done: false, + }); + + expect(eResolverCalled).toBe(true); + + const result4 = await iterator.next(); + expectJSON(result4).toDeepEqual({ + value: undefined, + done: true, + }); + }); + + it('Can deduplicate multiple defers on the same object', async () => { + const document = parse(` + query { + hero { + friends { + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + } + } + } + } + } + } + } + + fragment FriendFrag on Friend { + id + name + } + `); + const result = await complete(document); + + expectJSON(result).toDeepEqual([ + { + data: { hero: { friends: [{}, {}, {}] } }, + pending: [ + { id: '0', path: ['hero', 'friends', 0] }, + { id: '1', path: ['hero', 'friends', 1] }, + { id: '2', path: ['hero', 'friends', 2] }, + ], + hasNext: true, + }, + { + incremental: [ + { data: { id: '2', name: 'Han' }, id: '0' }, + { data: { id: '3', name: 'Leia' }, id: '1' }, + { data: { id: '4', name: 'C-3PO' }, id: '2' }, + ], + completed: [{ id: '0' }, { id: '1' }, { id: '2' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates fields present in the initial payload', async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + foo + } + } + anotherNestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + bar + } + } + anotherNestedObject { + deeperObject { + foo + } + } + } + } + } + `); + const result = await complete(document, { + hero: { + nestedObject: { deeperObject: { foo: 'foo', bar: 'bar' } }, + anotherNestedObject: { deeperObject: { foo: 'foo' } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + anotherNestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { bar: 'bar' }, + id: '0', + subPath: ['nestedObject', 'deeperObject'], + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates fields present in a parent defer payload', async () => { + const document = parse(` + query { + hero { + ... @defer { + nestedObject { + deeperObject { + foo + ... @defer { + foo + bar + } + } + } + } + } + } + `); + const result = await complete(document, { + hero: { nestedObject: { deeperObject: { foo: 'foo', bar: 'bar' } } }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + pending: [{ id: '1', path: ['hero', 'nestedObject', 'deeperObject'] }], + incremental: [ + { + data: { + nestedObject: { + deeperObject: { foo: 'foo' }, + }, + }, + id: '0', + }, + { + data: { + bar: 'bar', + }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates fields with deferred fragments at multiple levels', async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + foo + bar + } + ... @defer { + deeperObject { + foo + bar + baz + ... @defer { + foo + bar + baz + bak + } + } + } + } + } + } + } + `); + const result = await complete(document, { + hero: { + nestedObject: { + deeperObject: { foo: 'foo', bar: 'bar', baz: 'baz', bak: 'bak' }, + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + pending: [ + { id: '1', path: ['hero', 'nestedObject'] }, + { id: '2', path: ['hero', 'nestedObject', 'deeperObject'] }, + ], + incremental: [ + { + data: { bar: 'bar' }, + id: '0', + subPath: ['nestedObject', 'deeperObject'], + }, + { + data: { baz: 'baz' }, + id: '1', + subPath: ['deeperObject'], + }, + { + data: { bak: 'bak' }, + id: '2', + }, + ], + completed: [{ id: '0' }, { id: '1' }, { id: '2' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates multiple fields from deferred fragments from different branches occurring at the same level', async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + ... @defer { + foo + } + } + } + ... @defer { + nestedObject { + deeperObject { + ... @defer { + foo + bar + } + } + } + } + } + } + `); + const result = await complete(document, { + hero: { nestedObject: { deeperObject: { foo: 'foo', bar: 'bar' } } }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: {}, + }, + }, + }, + pending: [ + { id: '0', path: ['hero', 'nestedObject', 'deeperObject'] }, + { id: '1', path: ['hero', 'nestedObject', 'deeperObject'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { + foo: 'foo', + }, + id: '0', + }, + { + data: { + bar: 'bar', + }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicate fields with deferred fragments in different branches at multiple non-overlapping levels', async () => { + const document = parse(` + query { + a { + b { + c { + d + } + ... @defer { + e { + f + } + } + } + } + ... @defer { + a { + b { + e { + f + } + } + } + g { + h + } + } + } + `); + const result = await complete(document, { + a: { + b: { + c: { d: 'd' }, + e: { f: 'f' }, + }, + }, + g: { h: 'h' }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: { + b: { + c: { + d: 'd', + }, + }, + }, + }, + pending: [ + { id: '0', path: ['a', 'b'] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { e: { f: 'f' } }, + id: '0', + }, + { + data: { g: { h: 'h' } }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Correctly bundles varying subfields into incremental data records unique by defer combination, ignoring fields in a fragment masked by a parent defer', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer { + hero { + id + } + } + ... @defer { + hero { + name + shouldBeWithNameDespiteAdditionalDefer: name + ... @defer { + shouldBeWithNameDespiteAdditionalDefer: name + } + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: '0', + }, + { + data: { id: '1' }, + id: '0', + subPath: ['hero'], + }, + { + data: { + name: 'Luke', + shouldBeWithNameDespiteAdditionalDefer: 'Luke', + }, + id: '1', + subPath: ['hero'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Nulls cross defer boundaries, null first', async () => { + const document = parse(` + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + `); + const result = await complete(document, { + a: { b: { c: { d: 'd' } }, someField: 'someField' }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: '0', path: [] }, + { id: '1', path: ['a'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: '1', + }, + { + data: { d: 'd' }, + id: '1', + subPath: ['b', 'c'], + }, + ], + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 8, column: 17 }], + path: ['a', 'b', 'c', 'nonNullErrorField'], + }, + ], + }, + { id: '1' }, + ], + hasNext: false, + }, + ]); + }); + + it('Nulls cross defer boundaries, value first', async () => { + const document = parse(` + query { + ... @defer { + a { + b { + c { + d + } + } + } + } + a { + ... @defer { + someField + b { + c { + nonNullErrorField + } + } + } + } + } + `); + const result = await complete(document, { + a: { + b: { c: { d: 'd' }, nonNullErrorFIeld: null }, + someField: 'someField', + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: '0', path: [] }, + { id: '1', path: ['a'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: '1', + }, + { + data: { d: 'd' }, + id: '0', + subPath: ['a', 'b', 'c'], + }, + ], + completed: [ + { id: '0' }, + { + id: '1', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 17, column: 17 }], + path: ['a', 'b', 'c', 'nonNullErrorField'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it('Handles multiple erroring deferred grouped field sets', async () => { + const document = parse(` + query { + ... @defer { + a { + b { + c { + someError: nonNullErrorField + } + } + } + } + ... @defer { + a { + b { + c { + anotherError: nonNullErrorField + } + } + } + } + } + `); + const result = await complete(document, { + a: { + b: { c: { nonNullErrorField: null } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 7, column: 17 }], + path: ['a', 'b', 'c', 'someError'], + }, + ], + }, + { + id: '1', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 16, column: 17 }], + path: ['a', 'b', 'c', 'anotherError'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it('Handles multiple erroring deferred grouped field sets for the same fragment', async () => { + const document = parse(` + query { + ... @defer { + a { + b { + someC: c { + d: d + } + anotherC: c { + d: d + } + } + } + } + ... @defer { + a { + b { + someC: c { + someError: nonNullErrorField + } + anotherC: c { + anotherError: nonNullErrorField + } + } + } + } + } + `); + const result = await complete(document, { + a: { + b: { c: { d: 'd', nonNullErrorField: null } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { a: { b: { someC: {}, anotherC: {} } } }, + id: '0', + }, + { + data: { d: 'd' }, + id: '0', + subPath: ['a', 'b', 'someC'], + }, + { + data: { d: 'd' }, + id: '0', + subPath: ['a', 'b', 'anotherC'], + }, + ], + completed: [ + { + id: '1', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 19, column: 17 }], + path: ['a', 'b', 'someC', 'someError'], + }, + ], + }, + { id: '0' }, + ], + hasNext: false, + }, + ]); + }); + + it('filters a payload with a null that cannot be merged', async () => { + const document = parse(` + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + `); + const result = await complete( + document, + { + a: { + b: { + c: { + d: 'd', + nonNullErrorField: async () => { + await resolveOnNextTick(); + return null; + }, + }, + }, + someField: 'someField', + }, + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: '0', path: [] }, + { id: '1', path: ['a'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: '1', + }, + { + data: { d: 'd' }, + id: '1', + subPath: ['b', 'c'], + }, + ], + completed: [{ id: '1' }], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 8, column: 17 }], + path: ['a', 'b', 'c', 'nonNullErrorField'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it('Cancels deferred fields when initial result exhibits null bubbling', async () => { + const document = parse(` + query { + hero { + nonNullName + } + ... @defer { + hero { + name + } + } + } + `); + const result = await complete( + document, + { + hero: { + ...hero, + nonNullName: () => null, + }, + }, + true, + ); + expectJSON(result).toDeepEqual({ + data: { + hero: null, + }, + errors: [ + { + message: 'Cannot return null for non-nullable field Hero.nonNullName.', + locations: [{ line: 4, column: 11 }], + path: ['hero', 'nonNullName'], + }, + ], + }); + }); + + it('Cancels deferred fields when deferred result exhibits null bubbling', async () => { + const document = parse(` + query { + ... @defer { + hero { + nonNullName + name + } + } + } + `); + const result = await complete( + document, + { + hero: { + ...hero, + nonNullName: () => null, + }, + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [{ id: '0', path: [] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + hero: null, + }, + errors: [ + { + message: 'Cannot return null for non-nullable field Hero.nonNullName.', + locations: [{ line: 5, column: 13 }], + path: ['hero', 'nonNullName'], + }, + ], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates list fields', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual({ + data: { + hero: { + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + }, + }, + }); + }); + + it('Deduplicates async iterable list fields', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + ...hero, + friends: async function* resolve() { + yield await Promise.resolve(friends[0]); + }, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [{ name: 'Han' }] } }, + }); + }); + + it('Deduplicates empty async iterable list fields', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + ...hero, + // eslint-disable-next-line require-yield + friends: async function* resolve() { + await resolveOnNextTick(); + }, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [] } }, + }); + }); + + it('Does not deduplicate list fields with non-overlapping fields', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + id + } + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { id: '2' }, + id: '0', + subPath: ['friends', 0], + }, + { + data: { id: '3' }, + id: '0', + subPath: ['friends', 1], + }, + { + data: { id: '4' }, + id: '0', + subPath: ['friends', 2], + }, + ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); + it('Deduplicates list fields that return empty lists', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + ...hero, + friends: () => [], + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [] } }, + }); + }); + + it('Deduplicates null object fields', async () => { + const document = parse(` + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + ...hero, + nestedObject: () => null, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { nestedObject: null } }, + }); + }); + + it('Deduplicates promise object fields', async () => { + const document = parse(` + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + nestedObject: () => Promise.resolve({ name: 'foo' }), + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { nestedObject: { name: 'foo' } } }, + }); + }); + + it('Handles errors thrown in deferred fragments', async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const result = await complete(document, { + hero: { + ...hero, + name: () => { + throw new Error('bad'); + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { name: null }, + id: '0', + errors: [ + { + message: 'bad', + locations: [{ line: 9, column: 9 }], + path: ['hero', 'name'], + }, + ], + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles non-nullable errors thrown in deferred fragments', async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + nonNullName + } + `); + const result = await complete(document, { + hero: { + ...hero, + nonNullName: () => null, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field Hero.nonNullName.', + locations: [{ line: 9, column: 9 }], + path: ['hero', 'nonNullName'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); it('Handles non-nullable errors thrown outside deferred fragments', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { - nonNullErrorField + nonNullName ...NameFragment @defer } } @@ -527,19 +2182,23 @@ describe('Execute: defer directive', () => { id } `); - - const result = await complete(document); + const result = await complete(document, { + hero: { + ...hero, + nonNullName: () => null, + }, + }); expectJSON(result).toDeepEqual({ errors: [ { - message: 'Cannot return null for non-nullable field Hero.nonNullErrorField.', + message: 'Cannot return null for non-nullable field Hero.nonNullName.', locations: [ { line: 4, column: 11, }, ], - path: ['hero', 'nonNullErrorField'], + path: ['hero', 'nonNullName'], }, ], data: { @@ -547,9 +2206,8 @@ describe('Execute: defer directive', () => { }, }); }); - it('Handles async non-nullable errors thrown in deferred fragments', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -557,26 +2215,30 @@ describe('Execute: defer directive', () => { } } fragment NameFragment on Hero { - promiseNonNullErrorField + nonNullName } `); - - const result = await complete(document); + const result = await complete(document, { + hero: { + ...hero, + nonNullName: () => Promise.resolve(null), + }, + }); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { - incremental: [ + completed: [ { - data: null, - path: ['hero'], + id: '0', errors: [ { - message: 'Cannot return null for non-nullable field Hero.promiseNonNullErrorField.', + message: 'Cannot return null for non-nullable field Hero.nonNullName.', locations: [{ line: 9, column: 9 }], - path: ['hero', 'promiseNonNullErrorField'], + path: ['hero', 'nonNullName'], }, ], }, @@ -585,9 +2247,8 @@ describe('Execute: defer directive', () => { }, ]); }); - it('Returns payloads in correct order', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -595,7 +2256,7 @@ describe('Execute: defer directive', () => { } } fragment NameFragment on Hero { - slowField + name friends { ...NestedFragment @defer } @@ -604,111 +2265,130 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); + const result = await complete(document, { + hero: { + ...hero, + name: async () => { + await resolveOnNextTick(); + return 'slow'; + }, + }, + }); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' }, }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { + pending: [ + { id: '1', path: ['hero', 'friends', 0] }, + { id: '2', path: ['hero', 'friends', 1] }, + { id: '3', path: ['hero', 'friends', 2] }, + ], incremental: [ { - data: { slowField: 'slow', friends: [{}, {}, {}] }, - path: ['hero'], + data: { name: 'slow', friends: [{}, {}, {}] }, + id: '0', }, + { data: { name: 'Han' }, id: '1' }, + { data: { name: 'Leia' }, id: '2' }, + { data: { name: 'C-3PO' }, id: '3' }, ], - hasNext: true, - }, - { - incremental: [ - { data: { name: 'Han' }, path: ['hero', 'friends', 0] }, - { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, - { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, - ], + completed: [{ id: '0' }, { id: '1' }, { id: '2' }, { id: '3' }], hasNext: false, }, ]); }); - it('Returns payloads from synchronous data in correct order', async () => { - const document = parse(/* GraphQL */ ` - query HeroNameQuery { - hero { - id - ...NameFragment @defer - } - } - fragment NameFragment on Hero { - name - friends { - ...NestedFragment @defer - } + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer } - fragment NestedFragment on Friend { - name + } + fragment NameFragment on Hero { + name + friends { + ...NestedFragment @defer } - `); - + } + fragment NestedFragment on Friend { + name + } + `); const result = await complete(document); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' }, }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { + pending: [ + { id: '1', path: ['hero', 'friends', 0] }, + { id: '2', path: ['hero', 'friends', 1] }, + { id: '3', path: ['hero', 'friends', 2] }, + ], incremental: [ { data: { name: 'Luke', friends: [{}, {}, {}], }, - path: ['hero'], + id: '0', }, + { data: { name: 'Han' }, id: '1' }, + { data: { name: 'Leia' }, id: '2' }, + { data: { name: 'C-3PO' }, id: '3' }, ], - hasNext: true, - }, - { - incremental: [ - { data: { name: 'Han' }, path: ['hero', 'friends', 0] }, - { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, - { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, - ], + completed: [{ id: '0' }, { id: '1' }, { id: '2' }, { id: '3' }], hasNext: false, }, ]); }); it('Filters deferred payloads when a list item returned by an async iterable is nulled', async () => { - const document = parse(/* GraphQL */ ` - query { - hero { - asyncFriends { - promiseNonNullErrorField - ...NameFragment @defer - } + const document = parse(` + query { + hero { + friends { + nonNullName + ...NameFragment @defer } } - fragment NameFragment on Friend { - name - } - `); - - const result = await complete(document); + } + fragment NameFragment on Friend { + name + } + `); + const result = await complete(document, { + hero: { + ...hero, + async *friends() { + yield await Promise.resolve({ + ...friends[0], + nonNullName: () => Promise.resolve(null), + }); + }, + }, + }); expectJSON(result).toDeepEqual({ data: { hero: { - asyncFriends: [null], + friends: [null], }, }, errors: [ { - message: 'Cannot return null for non-nullable field Friend.promiseNonNullErrorField.', - locations: [{ line: 5, column: 13 }], - path: ['hero', 'asyncFriends', 0, 'promiseNonNullErrorField'], + message: 'Cannot return null for non-nullable field Friend.nonNullName.', + locations: [{ line: 5, column: 11 }], + path: ['hero', 'friends', 0, 'nonNullName'], }, ], }); diff --git a/packages/executor/src/execution/__tests__/flattenAsyncIterable-test.ts b/packages/executor/src/execution/__tests__/flattenAsyncIterable-test.ts deleted file mode 100644 index adff7786eac..00000000000 --- a/packages/executor/src/execution/__tests__/flattenAsyncIterable-test.ts +++ /dev/null @@ -1,146 +0,0 @@ -import { flattenAsyncIterable } from '../flattenAsyncIterable.js'; - -describe('flattenAsyncIterable', () => { - it('flatten nested async generators', async () => { - async function* source() { - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(1.1); - yield await Promise.resolve(1.2); - })(), - ); - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(2.1); - yield await Promise.resolve(2.2); - })(), - ); - } - - const doubles = flattenAsyncIterable(source()); - - const result = []; - for await (const x of doubles) { - result.push(x); - } - expect(result).toEqual([1.1, 1.2, 2.1, 2.2]); - }); - - it('allows returning early from a nested async generator', async () => { - async function* source() { - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(1.1); - yield await Promise.resolve(1.2); - })(), - ); - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(2.1); /* c8 ignore start */ - // Not reachable, early return - yield await Promise.resolve(2.2); - })(), - ); - // Not reachable, early return - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(3.1); - yield await Promise.resolve(3.2); - })(), - ); - } - /* c8 ignore stop */ - - const doubles = flattenAsyncIterable(source()); - - expect(await doubles.next()).toEqual({ value: 1.1, done: false }); - expect(await doubles.next()).toEqual({ value: 1.2, done: false }); - expect(await doubles.next()).toEqual({ value: 2.1, done: false }); - - // Early return - expect(await doubles.return()).toEqual({ - value: undefined, - done: true, - }); - - // Subsequent next calls - expect(await doubles.next()).toEqual({ - value: undefined, - done: true, - }); - expect(await doubles.next()).toEqual({ - value: undefined, - done: true, - }); - }); - - it('allows throwing errors from a nested async generator', async () => { - async function* source() { - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(1.1); - yield await Promise.resolve(1.2); - })(), - ); - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(2.1); /* c8 ignore start */ - // Not reachable, early return - yield await Promise.resolve(2.2); - })(), - ); - // Not reachable, early return - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(3.1); - yield await Promise.resolve(3.2); - })(), - ); - } - /* c8 ignore stop */ - - const doubles = flattenAsyncIterable(source()); - - expect(await doubles.next()).toEqual({ value: 1.1, done: false }); - expect(await doubles.next()).toEqual({ value: 1.2, done: false }); - expect(await doubles.next()).toEqual({ value: 2.1, done: false }); - - // Throw error - let caughtError; - try { - await doubles.throw('ouch'); /* c8 ignore start */ - } catch (e) { - caughtError = e; - } - expect(caughtError).toEqual('ouch'); - }); - it('completely yields sub-iterables even when next() called in parallel', async () => { - async function* source() { - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(1.1); - yield await Promise.resolve(1.2); - })(), - ); - yield await Promise.resolve( - (async function* nested(): AsyncGenerator { - yield await Promise.resolve(2.1); - yield await Promise.resolve(2.2); - })(), - ); - } - - const result = flattenAsyncIterable(source()); - - const promise1 = result.next(); - const promise2 = result.next(); - expect(await promise1).toEqual({ value: 1.1, done: false }); - expect(await promise2).toEqual({ value: 1.2, done: false }); - expect(await result.next()).toEqual({ value: 2.1, done: false }); - expect(await result.next()).toEqual({ value: 2.2, done: false }); - expect(await result.next()).toEqual({ - value: undefined, - done: true, - }); - }); -}); diff --git a/packages/executor/src/execution/__tests__/lists-test.ts b/packages/executor/src/execution/__tests__/lists-test.ts index eca608b712c..18c64ffe2a0 100644 --- a/packages/executor/src/execution/__tests__/lists-test.ts +++ b/packages/executor/src/execution/__tests__/lists-test.ts @@ -2,6 +2,7 @@ import { buildSchema, GraphQLFieldResolver, GraphQLList, + GraphQLNonNull, GraphQLObjectType, GraphQLSchema, GraphQLString, @@ -27,7 +28,7 @@ describe('Execute: Accepts any iterable as list value', () => { }); }); - it('Accepts an Generator function as a List value', () => { + it('Accepts a Generator function as a List value', () => { function* listField() { yield 'one'; yield 2; @@ -91,7 +92,7 @@ describe('Execute: Accepts async iterables as list value', () => { name: 'ObjectWrapper', fields: { index: { - type: GraphQLString, + type: new GraphQLNonNull(GraphQLString), resolve, }, }, @@ -127,12 +128,12 @@ describe('Execute: Accepts async iterables as list value', () => { } expectJSON(await complete({ listField })).toDeepEqual({ - data: { listField: ['two', '4', null] }, + data: { listField: null }, errors: [ { message: 'bad', locations: [{ line: 1, column: 3 }], - path: ['listField', 2], + path: ['listField'], }, ], }); @@ -190,7 +191,7 @@ describe('Execute: Accepts async iterables as list value', () => { return Promise.resolve(index); }), ).toDeepEqual({ - data: { listField: [{ index: '0' }, { index: '1' }, { index: null }] }, + data: { listField: [{ index: '0' }, { index: '1' }, null] }, errors: [ { message: 'bad', diff --git a/packages/executor/src/execution/__tests__/mutations-test.ts b/packages/executor/src/execution/__tests__/mutations-test.ts index c4b2d36f2dc..94e5441e14b 100644 --- a/packages/executor/src/execution/__tests__/mutations-test.ts +++ b/packages/executor/src/execution/__tests__/mutations-test.ts @@ -189,7 +189,6 @@ describe('Execute: Handles mutation execution ordering', () => { ], }); }); - it('Mutation fields with @defer do not block next mutation', async () => { const document = parse(` mutation M { @@ -221,29 +220,29 @@ describe('Execute: Handles mutation execution ordering', () => { patches.push(patch); } - expectJSON(patches).toDeepEqual([ + expect(patches).toEqual([ { data: { first: {}, second: { theNumber: 2 }, }, + pending: [{ id: '0', path: ['first'], label: 'defer-label' }], hasNext: true, }, { incremental: [ { - label: 'defer-label', - path: ['first'], + id: '0', data: { promiseToGetTheNumber: 2, }, }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Mutation inside of a fragment', async () => { const document = parse(` mutation M { @@ -262,14 +261,13 @@ describe('Execute: Handles mutation execution ordering', () => { const rootValue = new Root(6); const mutationResult = await execute({ schema, document, rootValue }); - expectJSON(mutationResult).toDeepEqual({ + expect(mutationResult).toEqual({ data: { first: { theNumber: 1 }, second: { theNumber: 2 }, }, }); }); - it('Mutation with @defer is not executed serially', async () => { const document = parse(` mutation M { @@ -306,13 +304,13 @@ describe('Execute: Handles mutation execution ordering', () => { data: { second: { theNumber: 2 }, }, + pending: [{ id: '0', path: [], label: 'defer-label' }], hasNext: true, }, { incremental: [ { - label: 'defer-label', - path: [], + id: '0', data: { first: { theNumber: 1, @@ -320,6 +318,7 @@ describe('Execute: Handles mutation execution ordering', () => { }, }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); diff --git a/packages/executor/src/execution/__tests__/nonnull-test.ts b/packages/executor/src/execution/__tests__/nonnull-test.ts index fb4eead69bd..78bc207e026 100644 --- a/packages/executor/src/execution/__tests__/nonnull-test.ts +++ b/packages/executor/src/execution/__tests__/nonnull-test.ts @@ -247,6 +247,16 @@ describe('Execute: handles non-nullable types', () => { path: ['syncNest', 'syncNest', 'sync'], locations: [{ line: 6, column: 22 }], }, + { + message: promiseError.message, + path: ['syncNest', 'promise'], + locations: [{ line: 5, column: 11 }], + }, + { + message: promiseError.message, + path: ['syncNest', 'syncNest', 'promise'], + locations: [{ line: 6, column: 27 }], + }, { message: syncError.message, path: ['syncNest', 'promiseNest', 'sync'], @@ -262,21 +272,6 @@ describe('Execute: handles non-nullable types', () => { path: ['promiseNest', 'syncNest', 'sync'], locations: [{ line: 12, column: 22 }], }, - { - message: promiseError.message, - path: ['syncNest', 'promise'], - locations: [{ line: 5, column: 11 }], - }, - { - message: promiseError.message, - path: ['syncNest', 'syncNest', 'promise'], - locations: [{ line: 6, column: 27 }], - }, - { - message: syncError.message, - path: ['promiseNest', 'promiseNest', 'sync'], - locations: [{ line: 13, column: 25 }], - }, { message: promiseError.message, path: ['syncNest', 'promiseNest', 'promise'], @@ -292,6 +287,11 @@ describe('Execute: handles non-nullable types', () => { path: ['promiseNest', 'syncNest', 'promise'], locations: [{ line: 12, column: 27 }], }, + { + message: syncError.message, + path: ['promiseNest', 'promiseNest', 'sync'], + locations: [{ line: 13, column: 25 }], + }, { message: promiseError.message, path: ['promiseNest', 'promiseNest', 'promise'], @@ -521,7 +521,7 @@ describe('Execute: handles non-nullable types', () => { type: new GraphQLNonNull(GraphQLString), }, }, - resolve: (_, args: any) => 'Passed: ' + String(args.cannotBeNull), + resolve: (_, { cannotBeNull }) => 'Passed: ' + String(cannotBeNull), }, }, }), diff --git a/packages/executor/src/execution/__tests__/stream-test.ts b/packages/executor/src/execution/__tests__/stream-test.ts index 183cbf8b05f..51acf8f323c 100644 --- a/packages/executor/src/execution/__tests__/stream-test.ts +++ b/packages/executor/src/execution/__tests__/stream-test.ts @@ -10,11 +10,13 @@ import { } from 'graphql'; import { MaybePromise } from '@graphql-tools/utils'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; +import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; +import { execute } from '../execute.js'; +import { promiseWithResolvers } from '../promiseWithResolvers.js'; import type { InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, -} from '../execute.js'; -import { execute } from '../execute.js'; +} from '../types.js'; const friendType = new GraphQLObjectType({ fields: { @@ -76,11 +78,16 @@ const query = new GraphQLObjectType({ const schema = new GraphQLSchema({ query }); -async function complete(document: DocumentNode, rootValue: unknown = {}) { +async function complete( + document: DocumentNode, + rootValue: unknown = {}, + enableEarlyExecution = false, +) { const result = await execute({ schema, document, rootValue, + enableEarlyExecution, }); if ('initialResult' in result) { @@ -117,65 +124,47 @@ async function completeAsync(document: DocumentNode, numCalls: number, rootValue return Promise.all(promises); } -function createResolvablePromise(): [Promise, (value?: T) => void] { - let resolveFn; - const promise = new Promise(resolve => { - resolveFn = resolve; - }); - return [promise, resolveFn as unknown as (value?: T) => void]; -} - describe('Execute: stream directive', () => { it('Can stream a list field', async () => { const document = parse('{ scalarList @stream(initialCount: 1) }'); const result = await complete(document, { scalarList: () => ['apple', 'banana', 'coconut'], }); - expect(result).toEqual([ + expectJSON(result).toDeepEqual([ { data: { scalarList: ['apple'], }, + pending: [{ id: '0', path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['banana'], path: ['scalarList', 1] }], - hasNext: true, - }, - { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['banana', 'coconut'], id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can use default value of initialCount', async () => { const document = parse('{ scalarList @stream }'); const result = await complete(document, { scalarList: () => ['apple', 'banana', 'coconut'], }); - expect(result).toEqual([ + expectJSON(result).toDeepEqual([ { data: { scalarList: [], }, + pending: [{ id: '0', path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['apple'], path: ['scalarList', 0] }], - hasNext: true, - }, - { - incremental: [{ items: ['banana'], path: ['scalarList', 1] }], - hasNext: true, - }, - { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['apple', 'banana', 'coconut'], id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Negative values of initialCount throw field errors', async () => { const document = parse('{ scalarList @stream(initialCount: -2) }'); const result = await complete(document, { @@ -199,7 +188,6 @@ describe('Execute: stream directive', () => { }, }); }); - it('Returns label from stream directive', async () => { const document = parse('{ scalarList @stream(initialCount: 1, label: "scalar-stream") }'); const result = await complete(document, { @@ -210,31 +198,21 @@ describe('Execute: stream directive', () => { data: { scalarList: ['apple'], }, + pending: [{ id: '0', path: ['scalarList'], label: 'scalar-stream' }], hasNext: true, }, { incremental: [ { - items: ['banana'], - path: ['scalarList', 1], - label: 'scalar-stream', - }, - ], - hasNext: true, - }, - { - incremental: [ - { - items: ['coconut'], - path: ['scalarList', 2], - label: 'scalar-stream', + items: ['banana', 'coconut'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can disable @stream using if argument', async () => { const document = parse('{ scalarList @stream(initialCount: 0, if: false) }'); const result = await complete(document, { @@ -244,7 +222,6 @@ describe('Execute: stream directive', () => { data: { scalarList: ['apple', 'banana', 'coconut'] }, }); }); - it('Does not disable stream with null if argument', async () => { const document = parse( 'query ($shouldStream: Boolean) { scalarList @stream(initialCount: 2, if: $shouldStream) }', @@ -255,15 +232,16 @@ describe('Execute: stream directive', () => { expectJSON(result).toDeepEqual([ { data: { scalarList: ['apple', 'banana'] }, + pending: [{ id: '0', path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['coconut'], id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream multi-dimensional lists', async () => { const document = parse('{ scalarListList @stream(initialCount: 1) }'); const result = await complete(document, { @@ -278,29 +256,24 @@ describe('Execute: stream directive', () => { data: { scalarListList: [['apple', 'apple', 'apple']], }, + pending: [{ id: '0', path: ['scalarListList'] }], hasNext: true, }, { incremental: [ { - items: [['banana', 'banana', 'banana']], - path: ['scalarListList', 1], - }, - ], - hasNext: true, - }, - { - incremental: [ - { - items: [['coconut', 'coconut', 'coconut']], - path: ['scalarListList', 2], + items: [ + ['banana', 'banana', 'banana'], + ['coconut', 'coconut', 'coconut'], + ], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream a field that returns a list of promises', async () => { const document = parse(/* GraphQL */ ` query { @@ -327,6 +300,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { @@ -338,14 +312,14 @@ describe('Execute: stream directive', () => { id: '3', }, ], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream in correct order with lists of promises', async () => { const document = parse(/* GraphQL */ ` query { @@ -363,13 +337,14 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ name: 'Luke', id: '1' }], - path: ['friendList', 0], + id: '0', }, ], hasNext: true, @@ -378,7 +353,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han', id: '2' }], - path: ['friendList', 1], + id: '0', }, ], hasNext: true, @@ -387,14 +362,172 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Does not execute early if not specified', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 0) { + id + } + } + `); + const order: Array = []; + const result = await complete(document, { + friendList: () => + friends.map((f, i) => ({ + id: async () => { + const slowness = 3 - i; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(i); + return f.id; + }, + })), + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '2' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '3' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual([0, 1, 2]); + }); + it('Executes early if specified', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 0) { + id + } + } + `); + const order: Array = []; + const result = await complete( + document, + { + friendList: () => + friends.map((f, i) => ({ + id: async () => { + const slowness = 3 - i; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(i); + return f.id; + }, + })), + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1' }, { id: '2' }, { id: '3' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual([2, 1, 0]); + }); + it('Can stream a field that returns a list with nested promises', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `); + const result = await complete(document, { + friendList: () => + friends.map(f => ({ + name: Promise.resolve(f.name), + id: Promise.resolve(f.id), + })), + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [ + { + name: 'Luke', + id: '1', + }, + { + name: 'Han', + id: '2', + }, + ], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [ + { + name: 'Leia', + id: '3', + }, + ], + id: '0', + }, + ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles rejections in a field that returns a list of promises before initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -425,20 +558,21 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }, null], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles rejections in a field that returns a list of promises after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -462,13 +596,14 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [null], - path: ['friendList', 1], + id: '0', errors: [ { message: 'bad', @@ -477,16 +612,21 @@ describe('Execute: stream directive', () => { }, ], }, + ], + hasNext: true, + }, + { + incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream a field that returns an async iterable', async () => { const document = parse(/* GraphQL */ ` query { @@ -508,13 +648,14 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ name: 'Luke', id: '1' }], - path: ['friendList', 0], + id: '0', }, ], hasNext: true, @@ -523,7 +664,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han', id: '2' }], - path: ['friendList', 1], + id: '0', }, ], hasNext: true, @@ -532,17 +673,17 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], hasNext: true, }, { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream a field that returns an async iterable, using a non-zero initialCount', async () => { const document = parse(/* GraphQL */ ` query { @@ -567,20 +708,24 @@ describe('Execute: stream directive', () => { { name: 'Han', id: '2' }, ], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + hasNext: true, + }, + { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Negative values of initialCount throw field errors on a field that returns an async iterable', async () => { const document = parse(/* GraphQL */ ` query { @@ -606,7 +751,125 @@ describe('Execute: stream directive', () => { }, }); }); - + it('Does not execute early if not specified, when streaming from an async iterable', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 0) { + id + } + } + `); + const order: Array = []; + const slowFriend = async (n: number) => ({ + id: async () => { + const slowness = (3 - n) * 10; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(n); + return friends[n].id; + }, + }); + const result = await complete(document, { + async *friendList() { + yield await Promise.resolve(slowFriend(0)); + yield await Promise.resolve(slowFriend(1)); + yield await Promise.resolve(slowFriend(2)); + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '2' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '3' }], + id: '0', + }, + ], + hasNext: true, + }, + { + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual([0, 1, 2]); + }); + it('Executes early if specified when streaming from an async iterable', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 0) { + id + } + } + `); + const order: Array = []; + const slowFriend = (n: number) => ({ + id: async () => { + const slowness = (3 - n) * 10; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(n); + return friends[n].id; + }, + }); + const result = await complete( + document, + { + async *friendList() { + yield await Promise.resolve(slowFriend(0)); + yield await Promise.resolve(slowFriend(1)); + yield await Promise.resolve(slowFriend(2)); + }, + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1' }, { id: '2' }, { id: '3' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual([2, 1, 0]); + }); it('Can handle concurrent calls to .next() without waiting', async () => { const document = parse(/* GraphQL */ ` query { @@ -633,6 +896,7 @@ describe('Execute: stream directive', () => { { name: 'Han', id: '2' }, ], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, }, @@ -642,17 +906,22 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + hasNext: true, + }, + }, + { + done: false, + value: { + completed: [{ id: '0' }], hasNext: false, }, }, { done: true, value: undefined }, - { done: true, value: undefined }, ]); }); - it('Handles error thrown in async iterable before initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -673,15 +942,14 @@ describe('Execute: stream directive', () => { { message: 'bad', locations: [{ line: 3, column: 9 }], - path: ['friendList', 1], + path: ['friendList'], }, ], data: { - friendList: [{ name: 'Luke', id: '1' }, null], + friendList: null, }, }); }); - it('Handles error thrown in async iterable after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -702,18 +970,18 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: [null], - path: ['friendList', 1], + id: '0', errors: [ { message: 'bad', locations: [{ line: 3, column: 9 }], - path: ['friendList', 1], + path: ['friendList'], }, ], }, @@ -722,7 +990,6 @@ describe('Execute: stream directive', () => { }, ]); }); - it('Handles null returned in non-null list items after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -732,7 +999,7 @@ describe('Execute: stream directive', () => { } `); const result = await complete(document, { - nonNullFriendList: () => [friends[0], null], + nonNullFriendList: () => [friends[0], null, friends[1]], }); expectJSON(result).toDeepEqual([ @@ -740,13 +1007,13 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ name: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + id: '0', errors: [ { message: 'Cannot return null for non-nullable field Query.nonNullFriendList.', @@ -760,7 +1027,6 @@ describe('Execute: stream directive', () => { }, ]); }); - it('Handles null returned in non-null async iterable list items after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -785,20 +1051,202 @@ describe('Execute: stream directive', () => { expectJSON(result).toDeepEqual([ { data: { - nonNullFriendList: [{ name: 'Luke' }], + nonNullFriendList: [{ name: 'Luke' }], + }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field Query.nonNullFriendList.', + locations: [{ line: 3, column: 9 }], + path: ['nonNullFriendList', 1], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it('Handles errors thrown by completeValue after initialCount is reached', async () => { + const document = parse(/* GraphQL */ ` + query { + scalarList @stream(initialCount: 1) + } + `); + const result = await complete(document, { + scalarList: () => [friends[0].name, {}], + }); + expectJSON(result).toDeepEqual([ + { + data: { + scalarList: ['Luke'], + }, + pending: [{ id: '0', path: ['scalarList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [null], + id: '0', + errors: [ + { + message: 'String cannot represent value: {}', + locations: [{ line: 3, column: 9 }], + path: ['scalarList', 1], + }, + ], + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles async errors thrown by completeValue after initialCount is reached', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + `); + const result = await complete(document, { + friendList: () => [ + Promise.resolve({ nonNullName: friends[0].name }), + Promise.resolve({ + nonNullName: () => Promise.reject(new Error('Oops')), + }), + Promise.resolve({ nonNullName: friends[1].name }), + ], + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [{ nonNullName: 'Luke' }], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [null], + id: '0', + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['friendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ nonNullName: 'Han' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles nested async errors thrown by completeValue after initialCount is reached', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + `); + const result = await complete(document, { + friendList: () => [ + { nonNullName: Promise.resolve(friends[0].name) }, + { nonNullName: Promise.reject(new Error('Oops')) }, + { nonNullName: Promise.resolve(friends[1].name) }, + ], + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [{ nonNullName: 'Luke' }], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [null], + id: '0', + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['friendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ nonNullName: 'Han' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles async errors thrown by completeValue after initialCount is reached for a non-nullable list', async () => { + const document = parse(/* GraphQL */ ` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `); + const result = await complete(document, { + nonNullFriendList: () => [ + Promise.resolve({ nonNullName: friends[0].name }), + Promise.resolve({ + nonNullName: () => Promise.reject(new Error('Oops')), + }), + Promise.resolve({ nonNullName: friends[1].name }), + ], + }); + expectJSON(result).toDeepEqual([ + { + data: { + nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + id: '0', errors: [ { - message: 'Cannot return null for non-nullable field Query.nonNullFriendList.', - locations: [{ line: 3, column: 9 }], - path: ['nonNullFriendList', 1], + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['nonNullFriendList', 1, 'nonNullName'], }, ], }, @@ -807,33 +1255,38 @@ describe('Execute: stream directive', () => { }, ]); }); - - it('Handles errors thrown by completeValue after initialCount is reached', async () => { + it('Handles nested async errors thrown by completeValue after initialCount is reached for a non-nullable list', async () => { const document = parse(/* GraphQL */ ` query { - scalarList @stream(initialCount: 1) + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } } `); const result = await complete(document, { - scalarList: () => [friends[0].name, {}], + nonNullFriendList: () => [ + { nonNullName: Promise.resolve(friends[0].name) }, + { nonNullName: Promise.reject(new Error('Oops')) }, + { nonNullName: Promise.resolve(friends[1].name) }, + ], }); expectJSON(result).toDeepEqual([ { data: { - scalarList: ['Luke'], + nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: [null], - path: ['scalarList', 1], + id: '0', errors: [ { - message: 'String cannot represent value: {}', - locations: [{ line: 3, column: 9 }], - path: ['scalarList', 1], + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['nonNullFriendList', 1, 'nonNullName'], }, ], }, @@ -842,8 +1295,7 @@ describe('Execute: stream directive', () => { }, ]); }); - - it('Handles async errors thrown by completeValue after initialCount is reached', async () => { + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable', async () => { const document = parse(/* GraphQL */ ` query { friendList @stream(initialCount: 1) { @@ -852,26 +1304,27 @@ describe('Execute: stream directive', () => { } `); const result = await complete(document, { - friendList: () => [ - Promise.resolve({ nonNullName: friends[0].name }), - Promise.resolve({ + async *friendList() { + yield await Promise.resolve({ nonNullName: friends[0].name }); + yield await Promise.resolve({ nonNullName: () => Promise.reject(new Error('Oops')), - }), - Promise.resolve({ nonNullName: friends[1].name }), - ], + }); + yield await Promise.resolve({ nonNullName: friends[1].name }); + }, }); expectJSON(result).toDeepEqual([ { data: { friendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [null], - path: ['friendList', 1], + id: '0', errors: [ { message: 'Oops', @@ -887,15 +1340,18 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], + id: '0', }, ], + hasNext: true, + }, + { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - - it('Handles async errors thrown by completeValue after initialCount is reached for a non-nullable list', async () => { + it('Handles async errors thrown by completeValue after initialCount is reached from async generator for a non-nullable list', async () => { const document = parse(/* GraphQL */ ` query { nonNullFriendList @stream(initialCount: 1) { @@ -904,26 +1360,25 @@ describe('Execute: stream directive', () => { } `); const result = await complete(document, { - nonNullFriendList: () => [ - Promise.resolve({ nonNullName: friends[0].name }), - Promise.resolve({ + async *nonNullFriendList() { + yield await Promise.resolve({ nonNullName: friends[0].name }); + yield await Promise.resolve({ nonNullName: () => Promise.reject(new Error('Oops')), - }), - Promise.resolve({ nonNullName: friends[1].name }), - ], + }); /* c8 ignore start */ + } /* c8 ignore stop */, }); expectJSON(result).toDeepEqual([ { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + id: '0', errors: [ { message: 'Oops', @@ -937,63 +1392,70 @@ describe('Execute: stream directive', () => { }, ]); }); - - it('Handles async errors thrown by completeValue after initialCount is reached from async iterable', async () => { + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable does not provide a return method) ', async () => { const document = parse(/* GraphQL */ ` query { - friendList @stream(initialCount: 1) { + nonNullFriendList @stream(initialCount: 1) { nonNullName } } `); + let count = 0; const result = await complete(document, { - async *friendList() { - yield await Promise.resolve({ nonNullName: friends[0].name }); - yield await Promise.resolve({ - nonNullName: () => Promise.reject(new Error('Oops')), - }); - yield await Promise.resolve({ nonNullName: friends[1].name }); + nonNullFriendList: { + [Symbol.asyncIterator]: () => ({ + next: async () => { + switch (count++) { + case 0: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[0].name }, + }); + case 1: + return Promise.resolve({ + done: false, + value: { + nonNullName: () => Promise.reject(new Error('Oops')), + }, + }); + // Not reached + /* c8 ignore next 5 */ + case 2: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[1].name }, + }); + } + }, + }), }, }); expectJSON(result).toDeepEqual([ { data: { - friendList: [{ nonNullName: 'Luke' }], + nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: [null], - path: ['friendList', 1], + id: '0', errors: [ { message: 'Oops', locations: [{ line: 4, column: 11 }], - path: ['friendList', 1, 'nonNullName'], + path: ['nonNullFriendList', 1, 'nonNullName'], }, ], }, ], - hasNext: true, - }, - { - incremental: [ - { - items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], - }, - ], - hasNext: true, - }, - { hasNext: false, }, ]); }); - - it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list', async () => { + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable provides concurrent next/return methods and has a slow return ', async () => { const document = parse(/* GraphQL */ ` query { nonNullFriendList @stream(initialCount: 1) { @@ -1001,29 +1463,58 @@ describe('Execute: stream directive', () => { } } `); + let count = 0; + let returned = false; const result = await complete(document, { - async *nonNullFriendList() { - yield await Promise.resolve({ nonNullName: friends[0].name }); - yield await Promise.resolve({ - nonNullName: () => Promise.reject(new Error('Oops')), - }); - yield await Promise.resolve({ - nonNullName: friends[1].name, - }); /* c8 ignore start */ - } /* c8 ignore stop */, + nonNullFriendList: { + [Symbol.asyncIterator]: () => ({ + next: async () => { + /* c8 ignore next 3 */ + if (returned) { + return Promise.resolve({ done: true }); + } + switch (count++) { + case 0: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[0].name }, + }); + case 1: + return Promise.resolve({ + done: false, + value: { + nonNullName: () => Promise.reject(new Error('Oops')), + }, + }); + // Not reached + /* c8 ignore next 5 */ + case 2: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[1].name }, + }); + } + }, + return: async () => { + await resolveOnNextTick(); + returned = true; + return { done: true }; + }, + }), + }, }); expectJSON(result).toDeepEqual([ { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + id: '0', errors: [ { message: 'Oops', @@ -1036,8 +1527,8 @@ describe('Execute: stream directive', () => { hasNext: false, }, ]); + expect(returned).toBeTruthy(); }); - it('Filters payloads that are nulled', async () => { const document = parse(/* GraphQL */ ` query { @@ -1070,7 +1561,6 @@ describe('Execute: stream directive', () => { }, }); }); - it('Filters payloads that are nulled by a later synchronous error', async () => { const document = parse(/* GraphQL */ ` query { @@ -1103,7 +1593,6 @@ describe('Execute: stream directive', () => { }, }); }); - it('Does not filter payloads when null error is in a different path', async () => { const document = parse(/* GraphQL */ ` query { @@ -1133,13 +1622,26 @@ describe('Execute: stream directive', () => { otherNestedObject: {}, nestedObject: { nestedFriendList: [] }, }, + pending: [ + { id: '0', path: ['otherNestedObject'] }, + { id: '1', path: ['nestedObject', 'nestedFriendList'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ name: 'Luke' }], + id: '1', + }, + ], hasNext: true, }, { incremental: [ { data: { scalarField: null }, - path: ['otherNestedObject'], + id: '0', errors: [ { message: 'Oops', @@ -1148,16 +1650,12 @@ describe('Execute: stream directive', () => { }, ], }, - { - items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], - }, ], + completed: [{ id: '0' }, { id: '1' }], hasNext: false, }, ]); }); - it('Filters stream payloads that are nulled in a deferred payload', async () => { const document = parse(/* GraphQL */ ` query { @@ -1188,6 +1686,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ id: '0', path: ['nestedObject'] }], hasNext: true, }, { @@ -1196,7 +1695,7 @@ describe('Execute: stream directive', () => { data: { deeperNestedObject: null, }, - path: ['nestedObject'], + id: '0', errors: [ { message: @@ -1207,11 +1706,11 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Filters defer payloads that are nulled in a stream response', async () => { const document = parse(/* GraphQL */ ` query { @@ -1236,13 +1735,14 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [null], - path: ['friendList', 0], + id: '0', errors: [ { message: 'Cannot return null for non-nullable field Friend.nonNullName.', @@ -1255,6 +1755,7 @@ describe('Execute: stream directive', () => { hasNext: true, }, { + completed: [{ id: '0' }], hasNext: false, }, ]); @@ -1266,11 +1767,11 @@ describe('Execute: stream directive', () => { const iterable = { [Symbol.asyncIterator]: () => ({ next: () => { + /* c8 ignore start */ if (requested) { - /* c8 ignore next 3 */ - // Not reached, iterator should end immediately. - expect('Not reached').toBeFalsy(); - } + // stream is filtered, next is not called, and so this is not reached. + return Promise.reject(new Error('Oops')); + } /* c8 ignore stop */ requested = true; const friend = friends[0]; return Promise.resolve({ @@ -1283,6 +1784,7 @@ describe('Execute: stream directive', () => { }, return: () => { returned = true; + // Ignores errors from return. return Promise.reject(new Error('Oops')); }, }), @@ -1314,6 +1816,7 @@ describe('Execute: stream directive', () => { }, }, }, + enableEarlyExecution: true, }); expect('initialResult' in executeResult).toBeTruthy(); @@ -1325,6 +1828,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ id: '0', path: ['nestedObject'] }], hasNext: true, }); @@ -1337,7 +1841,7 @@ describe('Execute: stream directive', () => { data: { deeperNestedObject: null, }, - path: ['nestedObject'], + id: '0', errors: [ { message: @@ -1348,6 +1852,7 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ id: '0' }], hasNext: false, }, }); @@ -1357,7 +1862,6 @@ describe('Execute: stream directive', () => { expect(returned).toBeTruthy(); }); - it('Handles promises returned by completeValue after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -1382,13 +1886,14 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1', name: 'Luke' }], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ id: '2', name: 'Han' }], - path: ['friendList', 1], + id: '0', }, ], hasNext: true, @@ -1397,19 +1902,79 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ id: '3', name: 'Leia' }], - path: ['friendList', 2], + id: '0', }, ], hasNext: true, }, { + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles overlapping deferred and non-deferred streams', async () => { + const document = parse(/* GraphQL */ ` + query { + nestedObject { + nestedFriendList @stream(initialCount: 0) { + id + } + } + nestedObject { + ... @defer { + nestedFriendList @stream(initialCount: 0) { + id + name + } + } + } + } + `); + const result = await complete(document, { + nestedObject: { + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + nestedObject: { + nestedFriendList: [], + }, + }, + pending: [{ id: '0', path: ['nestedObject', 'nestedFriendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1', name: 'Luke' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '2', name: 'Han' }], + id: '0', + }, + ], + hasNext: true, + }, + { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Returns payloads in correct order when parent deferred fragment resolves slower than stream', async () => { - const [slowFieldPromise, resolveSlowField] = createResolvablePromise(); + const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); const document = parse(/* GraphQL */ ` query { nestedObject { @@ -1435,6 +2000,7 @@ describe('Execute: stream directive', () => { }, }, }, + enableEarlyExecution: false, }); expect('initialResult' in executeResult).toBeTruthy(); @@ -1446,6 +2012,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ id: '0', path: ['nestedObject'] }], hasNext: true, }); @@ -1454,45 +2021,53 @@ describe('Execute: stream directive', () => { const result2 = await result2Promise; expectJSON(result2).toDeepEqual({ value: { + pending: [{ id: '1', path: ['nestedObject', 'nestedFriendList'] }], incremental: [ { data: { scalarField: 'slow', nestedFriendList: [] }, - path: ['nestedObject'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: true, }, done: false, }); + const result3 = await iterator.next(); expectJSON(result3).toDeepEqual({ value: { incremental: [ { items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], + id: '1', }, ], hasNext: true, }, done: false, }); + const result4 = await iterator.next(); expectJSON(result4).toDeepEqual({ value: { incremental: [ { items: [{ name: 'Han' }], - path: ['nestedObject', 'nestedFriendList', 1], + id: '1', }, ], hasNext: true, }, done: false, }); + const result5 = await iterator.next(); expectJSON(result5).toDeepEqual({ - value: { hasNext: false }, + value: { + completed: [{ id: '1' }], + hasNext: false, + }, done: false, }); const result6 = await iterator.next(); @@ -1501,10 +2076,10 @@ describe('Execute: stream directive', () => { done: true, }); }); - it('Can @defer fields that are resolved after async iterable is complete', async () => { - const [slowFieldPromise, resolveSlowField] = createResolvablePromise(); - const [iterableCompletionPromise, resolveIterableCompletion] = createResolvablePromise(); + const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); + const { promise: iterableCompletionPromise, resolve: resolveIterableCompletion } = + promiseWithResolvers(); const document = parse(/* GraphQL */ ` query { @@ -1531,6 +2106,7 @@ describe('Execute: stream directive', () => { await iterableCompletionPromise; }, }, + enableEarlyExecution: false, }); expect('initialResult' in executeResult).toBeTruthy(); @@ -1542,26 +2118,25 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1' }], }, + pending: [ + { id: '0', path: ['friendList', 0], label: 'DeferName' }, + { id: '1', path: ['friendList'], label: 'stream-label' }, + ], hasNext: true, }); const result2Promise = iterator.next(); - resolveIterableCompletion(); + resolveIterableCompletion(null); const result2 = await result2Promise; expectJSON(result2).toDeepEqual({ value: { incremental: [ { data: { name: 'Luke' }, - path: ['friendList', 0], - label: 'DeferName', - }, - { - items: [{ id: '2' }], - path: ['friendList', 1], - label: 'stream-label', + id: '0', }, ], + completed: [{ id: '0' }], hasNext: true, }, done: false, @@ -1572,27 +2147,49 @@ describe('Execute: stream directive', () => { const result3 = await result3Promise; expectJSON(result3).toDeepEqual({ value: { + pending: [{ id: '2', path: ['friendList', 1], label: 'DeferName' }], incremental: [ { - data: { name: 'Han' }, - path: ['friendList', 1], - label: 'DeferName', + items: [{ id: '2' }], + id: '1', }, ], - hasNext: false, + hasNext: true, }, done: false, }); const result4 = await iterator.next(); expectJSON(result4).toDeepEqual({ + value: { + completed: [{ id: '1' }], + hasNext: true, + }, + done: false, + }); + const result5 = await iterator.next(); + expectJSON(result5).toDeepEqual({ + value: { + incremental: [ + { + data: { name: 'Han' }, + id: '2', + }, + ], + completed: [{ id: '2' }], + hasNext: false, + }, + done: false, + }); + const result6 = await iterator.next(); + expectJSON(result6).toDeepEqual({ value: undefined, done: true, }); }); - it('Can @defer fields that are resolved before async iterable is complete', async () => { - const [slowFieldPromise, resolveSlowField] = createResolvablePromise(); - const [iterableCompletionPromise, resolveIterableCompletion] = createResolvablePromise(); + const { promise: slowFieldPromise, resolve: resolveSlowField } = promiseWithResolvers(); + const { promise: iterableCompletionPromise, resolve: resolveIterableCompletion } = + promiseWithResolvers(); const document = parse(/* GraphQL */ ` query { @@ -1619,6 +2216,7 @@ describe('Execute: stream directive', () => { await iterableCompletionPromise; }, }, + enableEarlyExecution: false, }); expect('initialResult' in executeResult).toBeTruthy(); // @ts-expect-error once we assert that initialResult is in executeResult then it should work fine @@ -1630,6 +2228,10 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1' }], }, + pending: [ + { id: '0', path: ['friendList', 0], label: 'DeferName' }, + { id: '1', path: ['friendList'], label: 'stream-label' }, + ], hasNext: true, }); @@ -1641,15 +2243,10 @@ describe('Execute: stream directive', () => { incremental: [ { data: { name: 'Luke' }, - path: ['friendList', 0], - label: 'DeferName', - }, - { - items: [{ id: '2' }], - path: ['friendList', 1], - label: 'stream-label', + id: '0', }, ], + completed: [{ id: '0' }], hasNext: true, }, done: false, @@ -1658,32 +2255,50 @@ describe('Execute: stream directive', () => { const result3 = await iterator.next(); expectJSON(result3).toDeepEqual({ value: { + pending: [{ id: '2', path: ['friendList', 1], label: 'DeferName' }], incremental: [ { - data: { name: 'Han' }, - path: ['friendList', 1], - label: 'DeferName', + items: [{ id: '2' }], + id: '1', }, ], hasNext: true, }, done: false, }); - const result4Promise = iterator.next(); - resolveIterableCompletion(); - const result4 = await result4Promise; + + const result4 = await iterator.next(); expectJSON(result4).toDeepEqual({ - value: { hasNext: false }, + value: { + incremental: [ + { + data: { name: 'Han' }, + id: '2', + }, + ], + completed: [{ id: '2' }], + hasNext: true, + }, done: false, }); - const result5 = await iterator.next(); + const result5Promise = iterator.next(); + resolveIterableCompletion(null); + const result5 = await result5Promise; expectJSON(result5).toDeepEqual({ + value: { + completed: [{ id: '1' }], + hasNext: false, + }, + done: false, + }); + + const result6 = await iterator.next(); + expectJSON(result6).toDeepEqual({ value: undefined, done: true, }); }); - it('Returns underlying async iterables when returned generator is returned', async () => { let returned = false; let index = 0; @@ -1691,7 +2306,7 @@ describe('Execute: stream directive', () => { [Symbol.asyncIterator]: () => ({ next: () => { const friend = friends[index++]; - if (!friend) { + if (friend == null) { return Promise.resolve({ done: true, value: undefined }); } return Promise.resolve({ done: false, value: friend }); @@ -1733,6 +2348,10 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [ + { id: '0', path: ['friendList', 0] }, + { id: '1', path: ['friendList'] }, + ], hasNext: true, }); const returnPromise = iterator.return(); @@ -1745,14 +2364,13 @@ describe('Execute: stream directive', () => { await returnPromise; expect(returned).toBeTruthy(); }); - it('Can return async iterable when underlying iterable does not have a return method', async () => { let index = 0; const iterable = { [Symbol.asyncIterator]: () => ({ next: () => { const friend = friends[index++]; - if (!friend) { + if (friend == null) { return Promise.resolve({ done: true, value: undefined }); } return Promise.resolve({ done: false, value: friend }); @@ -1790,6 +2408,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }); @@ -1802,7 +2421,6 @@ describe('Execute: stream directive', () => { }); await returnPromise; }); - it('Returns underlying async iterables when returned generator is thrown', async () => { let index = 0; let returned = false; @@ -1810,7 +2428,7 @@ describe('Execute: stream directive', () => { [Symbol.asyncIterator]: () => ({ next: () => { const friend = friends[index++]; - if (!friend) { + if (friend == null) { return Promise.resolve({ done: true, value: undefined }); } return Promise.resolve({ done: false, value: friend }); @@ -1851,6 +2469,10 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [ + { id: '0', path: ['friendList', 0] }, + { id: '1', path: ['friendList'] }, + ], hasNext: true, }); diff --git a/packages/executor/src/execution/__tests__/subscribe-test.ts b/packages/executor/src/execution/__tests__/subscribe-test.ts index bf17d40dae7..08f8db05ea9 100644 --- a/packages/executor/src/execution/__tests__/subscribe-test.ts +++ b/packages/executor/src/execution/__tests__/subscribe-test.ts @@ -10,6 +10,7 @@ import { import { makeExecutableSchema } from '@graphql-tools/schema'; import { ExecutionResult, isAsyncIterable, isPromise, MaybePromise } from '@graphql-tools/utils'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; +import { expectPromise } from '../../__testUtils__/expectPromise.js'; import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; import { assertAsyncIterable } from '../../../../loaders/url/tests/test-utils.js'; import { ExecutionArgs, subscribe } from '../execute.js'; @@ -85,9 +86,15 @@ const emailSchema = new GraphQLSchema({ function createSubscription( pubsub: SimplePubSub, variableValues?: { readonly [variable: string]: unknown }, + errorWithIncrementalSubscription = true, ) { const document = parse(` - subscription ($priority: Int = 0, $shouldDefer: Boolean = false, $asyncResolver: Boolean = false) { + subscription ( + $priority: Int = 0 + $shouldDefer: Boolean = false + $shouldStream: Boolean = false + $asyncResolver: Boolean = false + ) { importantEmail(priority: $priority) { email { from @@ -98,6 +105,7 @@ function createSubscription( } ... @defer(if: $shouldDefer) { inbox { + emails @include(if: $shouldStream) @stream(if: $shouldStream) unread total } @@ -135,32 +143,10 @@ function createSubscription( document, rootValue: data, variableValues, + errorWithIncrementalSubscription, }); } -// TODO: consider adding this method to testUtils (with tests) -function expectPromise(maybePromise: unknown) { - expect(isPromise(maybePromise)).toBeTruthy(); - - return { - toResolve() { - return maybePromise; - }, - async toRejectWith(message: string) { - let caughtError: Error; - - try { - /* c8 ignore next 2 */ - await maybePromise; - } catch (error) { - caughtError = error as Error; - expect(caughtError).toBeInstanceOf(Error); - expect(caughtError).toHaveProperty('message', message); - } - }, - }; -} - const DummyQueryType = new GraphQLObjectType({ name: 'Query', fields: { @@ -721,7 +707,7 @@ describe('Subscription Publish Phase', () => { }); }); - it('produces additional payloads for subscriptions with @defer', async () => { + it('subscribe function returns errors with @defer', async () => { const pubsub = new SimplePubSub(); const subscription = await createSubscription(pubsub, { shouldDefer: true, @@ -741,6 +727,76 @@ describe('Subscription Publish Phase', () => { }), ).toBeTruthy(); + const errorPayload = { + done: false, + value: { + errors: [ + { + message: + '`@defer` directive not supported on subscription operations. Disable `@defer` by setting the `if` argument to `false`.', + locations: [{ line: 8, column: 7 }], + path: ['importantEmail'], + }, + ], + data: { importantEmail: null }, + }, + }; + + // The previously waited on payload now has a value. + expectJSON(await payload).toDeepEqual(errorPayload); + + // Another new email arrives, after all incrementally delivered payloads are received. + expect( + pubsub.emit({ + from: 'hyo@graphql.org', + subject: 'Tools', + message: 'I <3 making things', + unread: true, + }), + ).toBeTruthy(); + + // The next waited on payload will have a value. + // @ts-expect-error we have asserted it is an async iterable + expectJSON(await subscription.next()).toDeepEqual(errorPayload); + + // @ts-expect-error + expectJSON(await subscription.return()).toDeepEqual({ + done: true, + value: undefined, + }); + + // Awaiting a subscription after closing it results in completed results. + // @ts-expect-error + expectJSON(await subscription.next()).toDeepEqual({ + done: true, + value: undefined, + }); + }); + + it('produces additional payloads for subscriptions with @defer if allowed', async () => { + const pubsub = new SimplePubSub(); + const subscription = await createSubscription( + pubsub, + { + shouldDefer: true, + }, + false, + ); + expect(isAsyncIterable(subscription)).toBeTruthy(); + // Wait for the next subscription payload. + // @ts-expect-error we have asserted it is an async iterable + const payload = subscription.next(); + + // A new email arrives! + expect( + pubsub.emit({ + from: 'yuzhi@graphql.org', + subject: 'Alright', + message: 'Tests are good', + unread: true, + }), + ).toBeTruthy(); + // The previously waited on payload now has a value. expectJSON(await payload).toDeepEqual({ done: false, @@ -753,6 +809,7 @@ describe('Subscription Publish Phase', () => { }, }, }, + pending: [{ id: '0', path: ['importantEmail'] }], hasNext: true, }, }); @@ -770,9 +827,10 @@ describe('Subscription Publish Phase', () => { total: 2, }, }, - path: ['importantEmail'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, }); @@ -800,6 +858,7 @@ describe('Subscription Publish Phase', () => { }, }, }, + pending: [{ id: '0', path: ['importantEmail'] }], hasNext: true, }, }); @@ -827,9 +886,10 @@ describe('Subscription Publish Phase', () => { total: 3, }, }, - path: ['importantEmail'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, }); @@ -847,6 +907,7 @@ describe('Subscription Publish Phase', () => { }, }, }, + pending: [{ id: '0', path: ['importantEmail'] }], hasNext: true, }, }); @@ -866,6 +927,93 @@ describe('Subscription Publish Phase', () => { }); }); + it('subscribe function returns errors with @stream', async () => { + const pubsub = new SimplePubSub(); + const subscription = await createSubscription(pubsub, { + shouldStream: true, + }); + expect(isAsyncIterable(subscription)).toBeTruthy(); + // Wait for the next subscription payload. + // @ts-expect-error + const payload = subscription.next(); + + // A new email arrives! + expect( + pubsub.emit({ + from: 'yuzhi@graphql.org', + subject: 'Alright', + message: 'Tests are good', + unread: true, + }), + ).toBeTruthy(); + + // The previously waited on payload now has a value. + expectJSON(await payload).toDeepEqual({ + done: false, + value: { + errors: [ + { + message: + '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', + locations: [{ line: 18, column: 13 }], + path: ['importantEmail', 'inbox', 'emails'], + }, + ], + data: { + importantEmail: { + email: { from: 'yuzhi@graphql.org', subject: 'Alright' }, + inbox: { emails: null, unread: 1, total: 2 }, + }, + }, + }, + }); + + // Another new email arrives, after all incrementally delivered payloads are received. + expect( + pubsub.emit({ + from: 'hyo@graphql.org', + subject: 'Tools', + message: 'I <3 making things', + unread: true, + }), + ).toBeTruthy(); + + // The next waited on payload will have a value. + // @ts-expect-error we have asserted it is an async iterable + expectJSON(await subscription.next()).toDeepEqual({ + done: false, + value: { + errors: [ + { + message: + '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', + locations: [{ line: 18, column: 13 }], + path: ['importantEmail', 'inbox', 'emails'], + }, + ], + data: { + importantEmail: { + email: { from: 'hyo@graphql.org', subject: 'Tools' }, + inbox: { emails: null, unread: 2, total: 3 }, + }, + }, + }, + }); + + // @ts-expect-error we have asserted it is an async iterable + expectJSON(await subscription.return()).toDeepEqual({ + done: true, + value: undefined, + }); + + // Awaiting a subscription after closing it results in completed results. + // @ts-expect-error we have asserted it is an async iterable + expectJSON(await subscription.next()).toDeepEqual({ + done: true, + value: undefined, + }); + }); + it('produces a payload when there are multiple events', async () => { const pubsub = new SimplePubSub(); const subscription = createSubscription(pubsub); diff --git a/packages/executor/src/execution/buildFieldPlan.ts b/packages/executor/src/execution/buildFieldPlan.ts new file mode 100644 index 00000000000..c03a4b66a0e --- /dev/null +++ b/packages/executor/src/execution/buildFieldPlan.ts @@ -0,0 +1,82 @@ +import type { DeferUsage, FieldGroup, GroupedFieldSet } from './collectFields.js'; +import { getBySet } from './getBySet.js'; +import { isSameSet } from './isSameSet.js'; + +export type DeferUsageSet = ReadonlySet; + +export interface FieldPlan { + groupedFieldSet: GroupedFieldSet; + newGroupedFieldSets: Map; +} + +export function buildFieldPlan( + originalGroupedFieldSet: GroupedFieldSet, + parentDeferUsages: DeferUsageSet = new Set(), +): FieldPlan { + const groupedFieldSet = new Map(); + + const newGroupedFieldSets = new Map>(); + + const map = new Map< + string, + { + deferUsageSet: DeferUsageSet; + fieldGroup: FieldGroup; + } + >(); + + for (const [responseKey, fieldGroup] of originalGroupedFieldSet) { + const deferUsageSet = new Set(); + let inOriginalResult = false; + for (const fieldDetails of fieldGroup) { + const deferUsage = fieldDetails.deferUsage; + if (deferUsage === undefined) { + inOriginalResult = true; + continue; + } + deferUsageSet.add(deferUsage); + } + if (inOriginalResult) { + deferUsageSet.clear(); + } else { + deferUsageSet.forEach(deferUsage => { + const ancestors = getAncestors(deferUsage); + for (const ancestor of ancestors) { + if (deferUsageSet.has(ancestor)) { + deferUsageSet.delete(deferUsage); + } + } + }); + } + map.set(responseKey, { deferUsageSet, fieldGroup }); + } + + for (const [responseKey, { deferUsageSet, fieldGroup }] of map) { + if (isSameSet(deferUsageSet, parentDeferUsages)) { + groupedFieldSet.set(responseKey, fieldGroup); + continue; + } + + let newGroupedFieldSet = getBySet(newGroupedFieldSets, deferUsageSet); + if (newGroupedFieldSet === undefined) { + newGroupedFieldSet = new Map(); + newGroupedFieldSets.set(deferUsageSet, newGroupedFieldSet); + } + newGroupedFieldSet.set(responseKey, fieldGroup); + } + + return { + groupedFieldSet, + newGroupedFieldSets, + }; +} + +function getAncestors(deferUsage: DeferUsage): ReadonlyArray { + const ancestors: Array = []; + let parentDeferUsage: DeferUsage | undefined = deferUsage.parentDeferUsage; + while (parentDeferUsage !== undefined) { + ancestors.unshift(parentDeferUsage); + parentDeferUsage = parentDeferUsage.parentDeferUsage; + } + return ancestors; +} diff --git a/packages/executor/src/execution/collectFields.ts b/packages/executor/src/execution/collectFields.ts new file mode 100644 index 00000000000..9d109fa8422 --- /dev/null +++ b/packages/executor/src/execution/collectFields.ts @@ -0,0 +1,320 @@ +import type { + FieldNode, + FragmentDefinitionNode, + FragmentSpreadNode, + GraphQLObjectType, + GraphQLSchema, + InlineFragmentNode, + SelectionSetNode, +} from 'graphql'; +import { + getDirectiveValues, + GraphQLIncludeDirective, + GraphQLSkipDirective, + isAbstractType, + Kind, + typeFromAST, +} from 'graphql'; +import { GraphQLDeferDirective } from '@graphql-tools/utils'; +import { AccumulatorMap } from './AccumulatorMap.js'; +import { invariant } from './invariant.js'; + +export interface DeferUsage { + label: string | undefined; + parentDeferUsage: DeferUsage | undefined; +} + +export interface FieldDetails { + node: FieldNode; + deferUsage: DeferUsage | undefined; +} + +export type FieldGroup = ReadonlyArray; + +export type GroupedFieldSet = ReadonlyMap; + +interface CollectFieldsContext { + schema: GraphQLSchema; + fragments: Record; + variableValues: TVariables; + errorWithIncrementalSubscription: boolean; + runtimeType: GraphQLObjectType; + visitedFragmentNames: Set; +} + +/** + * Given a selectionSet, collects all of the fields and returns them. + * + * CollectFields requires the "runtime type" of an object. For a field that + * returns an Interface or Union type, the "runtime type" will be the actual + * object type returned by that field. + * + * @internal + */ +export function collectFields( + schema: GraphQLSchema, + fragments: Record, + variableValues: TVariables, + runtimeType: GraphQLObjectType, + selectionSet: SelectionSetNode, + errorWithIncrementalSubscription: boolean, +): { + groupedFieldSet: GroupedFieldSet; + newDeferUsages: ReadonlyArray; +} { + const groupedFieldSet = new AccumulatorMap(); + const newDeferUsages: Array = []; + const context: CollectFieldsContext = { + schema, + fragments, + variableValues, + runtimeType, + errorWithIncrementalSubscription, + visitedFragmentNames: new Set(), + }; + + collectFieldsImpl(context, selectionSet, groupedFieldSet, newDeferUsages); + return { groupedFieldSet, newDeferUsages }; +} + +/** + * Given an array of field nodes, collects all of the subfields of the passed + * in fields, and returns them at the end. + * + * CollectSubFields requires the "return type" of an object. For a field that + * returns an Interface or Union type, the "return type" will be the actual + * object type returned by that field. + * + * @internal + */ +export function collectSubfields( + schema: GraphQLSchema, + fragments: Record, + variableValues: { [variable: string]: unknown }, + errorWithIncrementalSubscription: boolean, + returnType: GraphQLObjectType, + fieldGroup: FieldGroup, +): { + groupedFieldSet: GroupedFieldSet; + newDeferUsages: ReadonlyArray; +} { + const context: CollectFieldsContext = { + schema, + fragments, + variableValues, + runtimeType: returnType, + errorWithIncrementalSubscription, + visitedFragmentNames: new Set(), + }; + const subGroupedFieldSet = new AccumulatorMap(); + const newDeferUsages: Array = []; + + for (const fieldDetail of fieldGroup) { + const node = fieldDetail.node; + if (node.selectionSet) { + collectFieldsImpl( + context, + node.selectionSet, + subGroupedFieldSet, + newDeferUsages, + fieldDetail.deferUsage, + ); + } + } + + return { + groupedFieldSet: subGroupedFieldSet, + newDeferUsages, + }; +} + +function collectFieldsImpl( + context: CollectFieldsContext, + selectionSet: SelectionSetNode, + groupedFieldSet: AccumulatorMap, + newDeferUsages: Array, + deferUsage?: DeferUsage, +): void { + const { + schema, + fragments, + variableValues, + runtimeType, + errorWithIncrementalSubscription, + visitedFragmentNames, + } = context; + + for (const selection of selectionSet.selections) { + switch (selection.kind) { + case Kind.FIELD: { + if (!shouldIncludeNode(variableValues, selection)) { + continue; + } + groupedFieldSet.add(getFieldEntryKey(selection), { + node: selection, + deferUsage, + }); + break; + } + case Kind.INLINE_FRAGMENT: { + if ( + !shouldIncludeNode(variableValues, selection) || + !doesFragmentConditionMatch(schema, selection, runtimeType) + ) { + continue; + } + + const newDeferUsage = getDeferUsage( + errorWithIncrementalSubscription, + variableValues, + selection, + deferUsage, + ); + + if (!newDeferUsage) { + collectFieldsImpl( + context, + selection.selectionSet, + groupedFieldSet, + newDeferUsages, + deferUsage, + ); + } else { + newDeferUsages.push(newDeferUsage); + collectFieldsImpl( + context, + selection.selectionSet, + groupedFieldSet, + newDeferUsages, + newDeferUsage, + ); + } + + break; + } + case Kind.FRAGMENT_SPREAD: { + const fragName = selection.name.value; + + const newDeferUsage = getDeferUsage( + errorWithIncrementalSubscription, + variableValues, + selection, + deferUsage, + ); + + if ( + !newDeferUsage && + (visitedFragmentNames.has(fragName) || !shouldIncludeNode(variableValues, selection)) + ) { + continue; + } + + const fragment = fragments[fragName]; + if (fragment == null || !doesFragmentConditionMatch(schema, fragment, runtimeType)) { + continue; + } + if (!newDeferUsage) { + visitedFragmentNames.add(fragName); + collectFieldsImpl( + context, + fragment.selectionSet, + groupedFieldSet, + newDeferUsages, + deferUsage, + ); + } else { + newDeferUsages.push(newDeferUsage); + collectFieldsImpl( + context, + fragment.selectionSet, + groupedFieldSet, + newDeferUsages, + newDeferUsage, + ); + } + break; + } + } + } +} + +/** + * Returns an object containing the `@defer` arguments if a field should be + * deferred based on the experimental flag, defer directive present and + * not disabled by the "if" argument. + */ +function getDeferUsage( + errorWithIncrementalSubscription: boolean, + variableValues: { [variable: string]: unknown }, + node: FragmentSpreadNode | InlineFragmentNode, + parentDeferUsage: DeferUsage | undefined, +): DeferUsage | undefined { + const defer = getDirectiveValues(GraphQLDeferDirective, node, variableValues); + + if (!defer) { + return; + } + + if (defer['if'] === false) { + return; + } + + invariant( + !errorWithIncrementalSubscription, + '`@defer` directive not supported on subscription operations. Disable `@defer` by setting the `if` argument to `false`.', + ); + + return { + label: typeof defer['label'] === 'string' ? defer['label'] : undefined, + parentDeferUsage, + }; +} + +/** + * Determines if a field should be included based on the `@include` and `@skip` + * directives, where `@skip` has higher precedence than `@include`. + */ +function shouldIncludeNode( + variableValues: { [variable: string]: unknown }, + node: FragmentSpreadNode | FieldNode | InlineFragmentNode, +): boolean { + const skip = getDirectiveValues(GraphQLSkipDirective, node, variableValues); + if (skip?.['if'] === true) { + return false; + } + + const include = getDirectiveValues(GraphQLIncludeDirective, node, variableValues); + if (include?.['if'] === false) { + return false; + } + return true; +} + +/** + * Determines if a fragment is applicable to the given type. + */ +function doesFragmentConditionMatch( + schema: GraphQLSchema, + fragment: FragmentDefinitionNode | InlineFragmentNode, + type: GraphQLObjectType, +): boolean { + const typeConditionNode = fragment.typeCondition; + if (!typeConditionNode) { + return true; + } + const conditionalType = typeFromAST(schema, typeConditionNode); + if (conditionalType === type) { + return true; + } + if (isAbstractType(conditionalType)) { + return schema.isSubType(conditionalType, type); + } + return false; +} + +/** + * Implements the logic to compute the key of a given field's entry + */ +function getFieldEntryKey(node: FieldNode): string { + return node.alias ? node.alias.value : node.name.value; +} diff --git a/packages/executor/src/execution/execute.ts b/packages/executor/src/execution/execute.ts index f452a4ec75f..d9e84781581 100644 --- a/packages/executor/src/execution/execute.ts +++ b/packages/executor/src/execution/execute.ts @@ -8,7 +8,6 @@ import { GraphQLError, GraphQLField, GraphQLFieldResolver, - GraphQLFormattedError, GraphQLLeafType, GraphQLList, GraphQLObjectType, @@ -24,16 +23,14 @@ import { Kind, locatedError, OperationDefinitionNode, + OperationTypeNode, SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, versionInfo, } from 'graphql'; -import { ValueOrPromise } from 'value-or-promise'; import { - collectSubFields as _collectSubfields, addPath, - collectFields, createGraphQLError, getArgumentValues, getDefinedRootType, @@ -53,31 +50,50 @@ import { promiseReduce, } from '@graphql-tools/utils'; import { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js'; +import { buildFieldPlan, DeferUsageSet, FieldPlan } from './buildFieldPlan.js'; import { coerceError } from './coerceError.js'; +import { + collectSubfields as _collectSubfields, + collectFields, + DeferUsage, + FieldGroup, + GroupedFieldSet, +} from './collectFields.js'; import { flattenAsyncIterable } from './flattenAsyncIterable.js'; +import { buildIncrementalResponse } from './IncrementalPublisher.js'; import { invariant } from './invariant.js'; import { promiseForObject } from './promiseForObject.js'; +import { + CancellableStreamRecord, + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, + DeferredGroupedFieldSetResult, + IncrementalDataRecord, + IncrementalExecutionResults, + InitialIncrementalExecutionResult, + SingularExecutionResult, + StreamItemRecord, + StreamItemResult, + StreamRecord, + SubsequentIncrementalExecutionResult, +} from './types.js'; import { getVariableValues } from './values.js'; -export interface SingularExecutionResult { - errors?: ReadonlyArray; - data?: TData | null; - extensions?: TExtensions; -} - /** * A memoized collection of relevant subfields with regard to the return * type. Memoizing ensures the subfields are not repeatedly calculated, which * saves overhead when resolving lists of values. */ const collectSubfields = memoize3( - (exeContext: ExecutionContext, returnType: GraphQLObjectType, fieldNodes: Array) => + (exeContext: ExecutionContext, returnType: GraphQLObjectType, fieldGroup: FieldGroup) => _collectSubfields( exeContext.schema, exeContext.fragments, exeContext.variableValues, + exeContext.errorWithIncrementalSubscription, returnType, - fieldNodes, + fieldGroup, ), ); @@ -120,118 +136,20 @@ export interface ExecutionContext { fieldResolver: GraphQLFieldResolver; typeResolver: GraphQLTypeResolver; subscribeFieldResolver: GraphQLFieldResolver; - errors: Array; - subsequentPayloads: Set; - signal?: AbortSignal; -} - -export interface FormattedExecutionResult< - TData = Record, - TExtensions = Record, -> { - errors?: ReadonlyArray; - data?: TData | null; - extensions?: TExtensions; -} - -export interface IncrementalExecutionResults< - TData = Record, - TExtensions = Record, -> { - initialResult: InitialIncrementalExecutionResult; - subsequentResults: AsyncGenerator< - SubsequentIncrementalExecutionResult, - void, - void - >; -} - -export interface InitialIncrementalExecutionResult< - TData = Record, - TExtensions = Record, -> extends SingularExecutionResult { - hasNext: boolean; - incremental?: ReadonlyArray>; - extensions?: TExtensions; -} - -export interface FormattedInitialIncrementalExecutionResult< - TData = Record, - TExtensions = Record, -> extends FormattedExecutionResult { - hasNext: boolean; - incremental?: ReadonlyArray>; - extensions?: TExtensions; -} - -export interface SubsequentIncrementalExecutionResult< - TData = Record, - TExtensions = Record, -> { - hasNext: boolean; - incremental?: ReadonlyArray>; - extensions?: TExtensions; -} - -export interface FormattedSubsequentIncrementalExecutionResult< - TData = Record, - TExtensions = Record, -> { - hasNext: boolean; - incremental?: ReadonlyArray>; - extensions?: TExtensions; -} - -export interface IncrementalDeferResult< - TData = Record, - TExtensions = Record, -> extends SingularExecutionResult { - path?: ReadonlyArray; - label?: string; -} - -export interface FormattedIncrementalDeferResult< - TData = Record, - TExtensions = Record, -> extends FormattedExecutionResult { - path?: ReadonlyArray; - label?: string; -} - -export interface IncrementalStreamResult< - TData = Array, - TExtensions = Record, -> { - errors?: ReadonlyArray; - items?: TData | null; - path?: ReadonlyArray; - label?: string; - extensions?: TExtensions; + enableEarlyExecution: boolean; + errorWithIncrementalSubscription: boolean; + signal: AbortSignal | undefined; + errors: Map | undefined; + cancellableStreams: Set | undefined; + incrementalDataRecords: Array | undefined; } -export interface FormattedIncrementalStreamResult< - TData = Array, - TExtensions = Record, -> { - errors?: ReadonlyArray; - items?: TData | null; - path?: ReadonlyArray; - label?: string; - extensions?: TExtensions; +interface IncrementalContext { + errors: Map | undefined; + deferUsageSet?: DeferUsageSet | undefined; + incrementalDataRecords: Array | undefined; } -export type IncrementalResult< - TData = Record, - TExtensions = Record, -> = IncrementalDeferResult | IncrementalStreamResult; - -export type FormattedIncrementalResult< - TData = Record, - TExtensions = Record, -> = - | FormattedIncrementalDeferResult - | FormattedIncrementalStreamResult; - export interface ExecutionArgs { schema: GraphQLSchema; document: TypedDocumentNode; @@ -242,9 +160,17 @@ export interface ExecutionArgs { fieldResolver?: Maybe>; typeResolver?: Maybe>; subscribeFieldResolver?: Maybe>; + enableEarlyExecution?: Maybe; + errorWithIncrementalSubscription?: Maybe; signal?: AbortSignal; } +interface StreamUsage { + label: string | undefined; + initialCount: number; + fieldGroup: FieldGroup; +} + /** * Implements the "Executing requests" section of the GraphQL specification, * including `@defer` and `@stream` as proposed in @@ -282,55 +208,7 @@ export function execute( }; } - return executeImpl(exeContext); -} - -function executeImpl( - exeContext: ExecutionContext, -): MaybePromise | IncrementalExecutionResults> { - if (exeContext.signal?.aborted) { - throw exeContext.signal.reason; - } - - // Return a Promise that will eventually resolve to the data described by - // The "Response" section of the GraphQL specification. - // - // If errors are encountered while executing a GraphQL field, only that - // field and its descendants will be omitted, and sibling fields will still - // be executed. An execution which encounters errors will still result in a - // resolved Promise. - // - // Errors from sub-fields of a NonNull type may propagate to the top level, - // at which point we still log the error and null the parent field, which - // in this case is the entire response. - const result = new ValueOrPromise(() => executeOperation(exeContext)) - .then( - data => { - const initialResult = buildResponse(data, exeContext.errors); - if (exeContext.subsequentPayloads.size > 0) { - return { - initialResult: { - ...initialResult, - hasNext: true, - }, - subsequentResults: yieldSubsequentPayloads(exeContext), - }; - } - - return initialResult; - }, - (error: any) => { - if (exeContext.signal?.aborted) { - throw exeContext.signal.reason; - } - - exeContext.errors.push(error); - return buildResponse(null, exeContext.errors); - }, - ) - .resolve()!; - - return result; + return executeOperation(exeContext); } /** @@ -353,13 +231,80 @@ export function executeSync(args: ExecutionArgs): SingularExecutionResult { * Given a completed execution context and data, build the `{ errors, data }` * response defined by the "Response" section of the GraphQL specification. */ -function buildResponse( - data: TData | null, - errors: ReadonlyArray, +function buildDataResponse( + exeContext: ExecutionContext, + data: TData, +): SingularExecutionResult | IncrementalExecutionResults { + const { errors, incrementalDataRecords } = exeContext; + if (incrementalDataRecords === undefined) { + return buildSingleResult(data, errors); + } + + if (errors === undefined) { + return buildIncrementalResponse(exeContext, data, undefined, incrementalDataRecords); + } + + const filteredIncrementalDataRecords = filterIncrementalDataRecords( + undefined, + errors, + incrementalDataRecords, + ); + + if (filteredIncrementalDataRecords.length === 0) { + return buildSingleResult(data, errors); + } + + return buildIncrementalResponse( + exeContext, + data, + Array.from(errors.values()), + filteredIncrementalDataRecords, + ); +} + +function buildSingleResult( + data: TData, + errors: ReadonlyMap | undefined, ): SingularExecutionResult { - return errors.length === 0 ? { data } : { errors, data }; + return errors !== undefined ? { errors: Array.from(errors.values()), data } : { data }; } +function filterIncrementalDataRecords( + initialPath: Path | undefined, + errors: ReadonlyMap, + incrementalDataRecords: ReadonlyArray, +): ReadonlyArray { + const filteredIncrementalDataRecords: Array = []; + for (const incrementalDataRecord of incrementalDataRecords) { + let currentPath = incrementalDataRecord.path; + + if (errors.has(currentPath)) { + continue; + } + + const paths: Array = [currentPath]; + let filtered = false; + while (currentPath !== initialPath) { + // Because currentPath leads to initialPath or is undefined, and the + // loop will exit if initialPath is undefined, currentPath must be + // defined. + // TODO: Consider, however, adding an invariant. + + currentPath = currentPath!.prev; + if (errors.has(currentPath)) { + filtered = true; + break; + } + paths.push(currentPath); + } + + if (!filtered) { + filteredIncrementalDataRecords.push(incrementalDataRecord); + } + } + + return filteredIncrementalDataRecords; +} /** * Essential assertions before executing to provide developer feedback for * improper use of the GraphQL library. @@ -417,6 +362,8 @@ export function buildExecutionContext( exeContext: ExecutionContext, -): MaybePromise { - const { operation, schema, fragments, variableValues, rootValue } = exeContext; - const rootType = getDefinedRootType(schema, operation.operation, [operation]); - if (rootType == null) { - createGraphQLError(`Schema is not configured to execute ${operation.operation} operation.`, { - nodes: operation, - }); +): MaybePromise | IncrementalExecutionResults> { + if (exeContext.signal?.aborted) { + throw exeContext.signal.reason; } - const { fields: rootFields, patches } = collectFields( - schema, - fragments, - variableValues, - rootType, - operation.selectionSet, - ); - const path = undefined; - let result: MaybePromise; + try { + const { + operation, + schema, + fragments, + variableValues, + rootValue, + errorWithIncrementalSubscription, + } = exeContext; + const rootType = getDefinedRootType(schema, operation.operation, [operation]); + if (rootType == null) { + createGraphQLError(`Schema is not configured to execute ${operation.operation} operation.`, { + nodes: operation, + }); + } + + const collectedFields = collectFields( + schema, + fragments, + variableValues, + rootType, + operation.selectionSet, + errorWithIncrementalSubscription, + ); + let groupedFieldSet = collectedFields.groupedFieldSet; + const newDeferUsages = collectedFields.newDeferUsages; + let data: MaybePromise; + if (newDeferUsages.length === 0) { + data = executeRootGroupedFieldSet( + exeContext, + operation.operation, + rootType, + rootValue, + groupedFieldSet, + undefined, + ); + } else { + const fieldPLan = buildFieldPlan(groupedFieldSet); + groupedFieldSet = fieldPLan.groupedFieldSet; + const newGroupedFieldSets = fieldPLan.newGroupedFieldSets; + const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map()); + + data = executeRootGroupedFieldSet( + exeContext, + operation.operation, + rootType, + rootValue, + groupedFieldSet, + newDeferMap, + ); + + if (newGroupedFieldSets.size > 0) { + const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets( + exeContext, + rootType, + rootValue, + undefined, + undefined, + newGroupedFieldSets, + newDeferMap, + ); + + addIncrementalDataRecords(exeContext, newDeferredGroupedFieldSetRecords); + } + } + if (isPromise(data)) { + return data.then( + resolved => buildDataResponse(exeContext, resolved), + error => { + if (exeContext.signal?.aborted) { + throw exeContext.signal.reason; + } + return { + data: null, + errors: withError(exeContext.errors, error), + }; + }, + ); + } + return buildDataResponse(exeContext, data); + } catch (error) { + if (exeContext.signal?.aborted) { + throw exeContext.signal.reason; + } + return { data: null, errors: withError(exeContext.errors, error as GraphQLError) }; + } +} - if (operation.operation === 'mutation') { - result = executeFieldsSerially(exeContext, rootType, rootValue, path, rootFields); +function executeRootGroupedFieldSet( + exeContext: ExecutionContext, + operation: OperationTypeNode, + rootType: GraphQLObjectType, + rootValue: unknown, + groupedFieldSet: GroupedFieldSet, + deferMap: ReadonlyMap | undefined, +): MaybePromise { + let result: MaybePromise; + if (operation === 'mutation') { + result = executeFieldsSerially( + exeContext, + rootType, + rootValue, + undefined, + groupedFieldSet, + undefined, + deferMap, + ); } else { - result = executeFields(exeContext, rootType, rootValue, path, rootFields) as TData; + result = executeFields( + exeContext, + rootType, + rootValue, + undefined, + groupedFieldSet, + undefined, + deferMap, + ) as MaybePromise; } + return result; +} - for (const patch of patches) { - const { label, fields: patchFields } = patch; - executeDeferredFragment(exeContext, rootType, rootValue, patchFields, label, path); +function addIncrementalDataRecords( + context: ExecutionContext | IncrementalContext, + newIncrementalDataRecords: ReadonlyArray, +): void { + const incrementalDataRecords = context.incrementalDataRecords; + if (incrementalDataRecords === undefined) { + context.incrementalDataRecords = [...newIncrementalDataRecords]; + return; } + incrementalDataRecords.push(...newIncrementalDataRecords); +} - return result; +function withError( + errors: ReadonlyMap | undefined, + error: GraphQLError, +): ReadonlyArray { + return errors === undefined ? [error] : [...errors.values(), error]; } /** * Implements the "Executing selection sets" section of the spec * for fields that must be executed serially. */ -function executeFieldsSerially( +function executeFieldsSerially( exeContext: ExecutionContext, parentType: GraphQLObjectType, sourceValue: unknown, path: Path | undefined, - fields: Map>, + groupedFieldSet: GroupedFieldSet, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise { return promiseReduce( - fields, - (results, [responseName, fieldNodes]) => { + groupedFieldSet, + (results, [responseName, fieldGroup]) => { const fieldPath = addPath(path, responseName, parentType.name); if (exeContext.signal?.aborted) { throw exeContext.signal.reason; } - return new ValueOrPromise(() => - executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath), - ).then(result => { - if (result === undefined) { - return results; - } - - results[responseName] = result; - + const result = executeField( + exeContext, + parentType, + sourceValue, + fieldGroup, + fieldPath, + incrementalContext, + deferMap, + ); + if (result === undefined) { return results; - }); + } + if (isPromise(result)) { + return result.then(resolved => { + results[responseName] = resolved; + return results; + }); + } + results[responseName] = result; + return results; }, Object.create(null), - ).resolve(); + ); } /** @@ -581,14 +654,15 @@ function executeFields( parentType: GraphQLObjectType, sourceValue: unknown, path: Path | undefined, - fields: Map>, - asyncPayloadRecord?: AsyncPayloadRecord, + groupedFieldSet: GroupedFieldSet, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { const results = Object.create(null); let containsPromise = false; try { - for (const [responseName, fieldNodes] of fields) { + for (const [responseName, fieldGroup] of groupedFieldSet) { if (exeContext.signal?.aborted) { throw exeContext.signal.reason; } @@ -598,9 +672,10 @@ function executeFields( exeContext, parentType, sourceValue, - fieldNodes, + fieldGroup, fieldPath, - asyncPayloadRecord, + incrementalContext, + deferMap, ); if (result !== undefined) { @@ -615,12 +690,12 @@ function executeFields( // Ensure that any promises returned by other fields are handled, as they may also reject. return promiseForObject(results, exeContext.signal).finally(() => { throw error; - }); + }) as never; } throw error; } - // If there are no promises, we can just return the object + // If there are no promises, we can just return the object and any incrementalDataRecords if (!containsPromise) { return results; } @@ -631,6 +706,10 @@ function executeFields( return promiseForObject(results, exeContext.signal); } +function toNodes(fieldGroup: FieldGroup): Array { + return fieldGroup.map(fieldDetails => fieldDetails.node); +} + /** * Implements the "Executing fields" section of the spec * In particular, this function figures out the value that the field returns by @@ -641,12 +720,12 @@ function executeField( exeContext: ExecutionContext, parentType: GraphQLObjectType, source: unknown, - fieldNodes: Array, + fieldGroup: FieldGroup, path: Path, - asyncPayloadRecord?: AsyncPayloadRecord, -): MaybePromise { - const errors = asyncPayloadRecord?.errors ?? exeContext.errors; - const fieldDef = getFieldDef(exeContext.schema, parentType, fieldNodes[0]); + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, +): MaybePromise | undefined { + const fieldDef = getFieldDef(exeContext.schema, parentType, fieldGroup[0].node); if (!fieldDef) { return; } @@ -654,14 +733,14 @@ function executeField( const returnType = fieldDef.type; const resolveFn = fieldDef.resolve ?? exeContext.fieldResolver; - const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, path); + const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), parentType, path); // Get the resolve function, regardless of if its result is normal or abrupt (error). try { // Build a JS object of arguments from the field.arguments AST, using the // variables scope to fulfill any variable references. // TODO: find a way to memoize, in case this field is within a List type. - const args = getArgumentValues(fieldDef, fieldNodes[0], exeContext.variableValues); + const args = getArgumentValues(fieldDef, fieldGroup[0].node, exeContext.variableValues); // The resolve function's optional third argument is a context value that // is provided to every resolve function within an execution. It is commonly @@ -670,41 +749,42 @@ function executeField( const result = resolveFn(source, args, contextValue, info); - let completed; if (isPromise(result)) { - completed = result.then(resolved => - completeValue(exeContext, returnType, fieldNodes, info, path, resolved, asyncPayloadRecord), - ); - } else { - completed = completeValue( + return completePromisedValue( exeContext, returnType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } + const completed = completeValue( + exeContext, + returnType, + fieldGroup, + info, + path, + result, + incrementalContext, + deferMap, + ); + if (isPromise(completed)) { // Note: we don't rely on a `catch` method, but we do expect "thenable" // to take a second callback for the error case. return completed.then(undefined, rawError => { - rawError = coerceError(rawError); - const error = locatedError(rawError, fieldNodes, pathToArray(path)); - const handledError = handleFieldError(error, returnType, errors); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return handledError; + handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext); + return null; }); } return completed; } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(path)); - const handledError = handleFieldError(error, returnType, errors); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return handledError; + handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext); + return null; } } @@ -736,10 +816,15 @@ export function buildResolveInfo( } function handleFieldError( - error: GraphQLError, + rawError: unknown, + exeContext: ExecutionContext, returnType: GraphQLOutputType, - errors: Array, -): null { + fieldGroup: FieldGroup, + path: Path, + incrementalContext: IncrementalContext | undefined, +): void { + const error = locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path)); + // If the field type is non-nullable, then it is resolved without any // protection from errors, however it still properly locates the error. if (isNonNullType(returnType)) { @@ -748,8 +833,13 @@ function handleFieldError( // Otherwise, error protection is applied, logging the error and resolving // a null value for this field if one is encountered. - errors.push(error); - return null; + const context = incrementalContext ?? exeContext; + let errors = context.errors; + if (errors === undefined) { + errors = new Map(); + context.errors = errors; + } + errors.set(path, error); } /** @@ -776,11 +866,12 @@ function handleFieldError( function completeValue( exeContext: ExecutionContext, returnType: GraphQLOutputType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise { // If result is an Error, throw a located error. if (result instanceof Error) { @@ -793,13 +884,14 @@ function completeValue( const completed = completeValue( exeContext, returnType.ofType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); - if (completed === null) { + if (completed == null) { throw new Error( `Cannot return null for non-nullable field ${info.parentType.name}.${info.fieldName}.`, ); @@ -817,11 +909,12 @@ function completeValue( return completeListValue( exeContext, returnType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } @@ -837,11 +930,12 @@ function completeValue( return completeAbstractValue( exeContext, returnType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } @@ -850,43 +944,78 @@ function completeValue( return completeObjectValue( exeContext, returnType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } /* c8 ignore next 6 */ // Not reachable, all possible output types have been considered. - console.assert(false, 'Cannot complete value of unexpected output type: ' + inspect(returnType)); + invariant(false, 'Cannot complete value of unexpected output type: ' + inspect(returnType)); +} + +async function completePromisedValue( + exeContext: ExecutionContext, + returnType: GraphQLOutputType, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + path: Path, + result: PromiseLike, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, +): Promise { + try { + const resolved = await result; + let completed = completeValue( + exeContext, + returnType, + fieldGroup, + info, + path, + resolved, + incrementalContext, + deferMap, + ); + + if (isPromise(completed)) { + completed = await completed; + } + return completed; + } catch (rawError) { + handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext); + return null; + } } /** - * Returns an object containing the `@stream` arguments if a field should be + * Returns an object containing info for streaming if a field should be * streamed based on the experimental flag, stream directive present and * not disabled by the "if" argument. */ -function getStreamValues( +function getStreamUsage( exeContext: ExecutionContext, - fieldNodes: Array, + fieldGroup: FieldGroup, path: Path, -): - | undefined - | { - initialCount: number | undefined; - label: string | undefined; - } { +): StreamUsage | undefined { // do not stream inner lists of multi-dimensional lists if (typeof path.key === 'number') { return; } + // TODO: add test for this case (a streamed list nested under a list). + /* c8 ignore next 7 */ + if ((fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage !== undefined) { + return (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage; + } + // validation only allows equivalent streams on multiple fields, so it is // safe to only check the first fieldNode for the stream directive const stream = getDirectiveValues( GraphQLStreamDirective, - fieldNodes[0], + fieldGroup[0].node, exeContext.variableValues, ) as { initialCount: number; @@ -906,10 +1035,25 @@ function getStreamValues( invariant(stream['initialCount'] >= 0, 'initialCount must be a positive integer'); - return { + invariant( + !exeContext.errorWithIncrementalSubscription, + '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', + ); + + const streamedFieldGroup: FieldGroup = fieldGroup.map(fieldDetails => ({ + node: fieldDetails.node, + deferUsage: undefined, + })); + + const streamUsage = { initialCount: stream['initialCount'], label: typeof stream['label'] === 'string' ? stream['label'] : undefined, + fieldGroup: streamedFieldGroup, }; + + (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage = streamUsage; + + return streamUsage; } /** @@ -919,69 +1063,114 @@ function getStreamValues( async function completeAsyncIteratorValue( exeContext: ExecutionContext, itemType: GraphQLOutputType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, - iterator: AsyncIterator, - asyncPayloadRecord?: AsyncPayloadRecord, + asyncIterator: AsyncIterator, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): Promise> { exeContext.signal?.addEventListener('abort', () => { - iterator.return?.(); + asyncIterator.return?.(); }); - const errors = asyncPayloadRecord?.errors ?? exeContext.errors; - const stream = getStreamValues(exeContext, fieldNodes, path); let containsPromise = false; const completedResults: Array = []; let index = 0; - + const streamUsage = getStreamUsage(exeContext, fieldGroup, path); while (true) { - if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) { - executeStreamIterator( + if (streamUsage && index >= streamUsage.initialCount) { + const streamItemQueue = buildAsyncStreamItemQueue( index, - iterator, + path, + asyncIterator, exeContext, - fieldNodes, + streamUsage.fieldGroup, info, itemType, - path, - stream.label, - asyncPayloadRecord, ); + + const returnFn = asyncIterator.return; + let streamRecord: StreamRecord | CancellableStreamRecord; + if (returnFn === undefined) { + streamRecord = { + label: streamUsage.label, + path, + streamItemQueue, + }; + } else { + streamRecord = { + label: streamUsage.label, + path, + streamItemQueue, + earlyReturn: returnFn.bind(asyncIterator), + }; + if (exeContext.cancellableStreams === undefined) { + exeContext.cancellableStreams = new Set(); + } + exeContext.cancellableStreams.add(streamRecord); + } + + const context = incrementalContext ?? exeContext; + addIncrementalDataRecords(context, [streamRecord]); break; } const itemPath = addPath(path, index, undefined); let iteration; try { - iteration = await iterator.next(); - if (iteration.done) { - break; - } + iteration = await asyncIterator.next(); } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath)); - completedResults.push(handleFieldError(error, itemType, errors)); + throw locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path)); + } + + // TODO: add test case for stream returning done before initialCount + /* c8 ignore next 3 */ + if (iteration.done) { break; } - if ( + const item = iteration.value; + // TODO: add tests for stream backed by asyncIterator that returns a promise + /* c8 ignore start */ + if (isPromise(item)) { + completedResults.push( + completePromisedListItemValue( + item, + exeContext, + itemType, + fieldGroup, + info, + itemPath, + incrementalContext, + deferMap, + ), + ); + containsPromise = true; + } else if ( + /* c8 ignore stop */ completeListItemValue( - iteration.value, + item, completedResults, - errors, exeContext, itemType, - fieldNodes, + fieldGroup, info, itemPath, - asyncPayloadRecord, + incrementalContext, + deferMap, ) + // TODO: add tests for stream backed by asyncIterator that completes to a promise + /* c8 ignore start */ ) { containsPromise = true; } - index += 1; + /* c8 ignore stop */ + index++; } - return containsPromise ? Promise.all(completedResults) : completedResults; + + return containsPromise + ? /* c8 ignore start */ Promise.all(completedResults) + : /* c8 ignore stop */ completedResults; } /** @@ -991,26 +1180,27 @@ async function completeAsyncIteratorValue( function completeListValue( exeContext: ExecutionContext, returnType: GraphQLList, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { const itemType = returnType.ofType; - const errors = asyncPayloadRecord?.errors ?? exeContext.errors; if (isAsyncIterable(result)) { - const iterator = result[Symbol.asyncIterator](); + const asyncIterator = result[Symbol.asyncIterator](); return completeAsyncIteratorValue( exeContext, itemType, - fieldNodes, + fieldGroup, info, path, - iterator, - asyncPayloadRecord, + asyncIterator, + incrementalContext, + deferMap, ); } @@ -1020,52 +1210,96 @@ function completeListValue( ); } - const stream = getStreamValues(exeContext, fieldNodes, path); + return completeIterableValue( + exeContext, + itemType, + fieldGroup, + info, + path, + result, + incrementalContext, + deferMap, + ); +} +function completeIterableValue( + exeContext: ExecutionContext, + itemType: GraphQLOutputType, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + path: Path, + items: Iterable, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, +): MaybePromise> { // This is specified as a simple map, however we're optimizing the path // where the list contains no Promises by avoiding creating another Promise. let containsPromise = false; - let previousAsyncPayloadRecord = asyncPayloadRecord; const completedResults: Array = []; let index = 0; - for (const item of result) { - // No need to modify the info object containing the path, - // since from here on it is not ever accessed by resolver functions. - const itemPath = addPath(path, index, undefined); - - if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) { - previousAsyncPayloadRecord = executeStreamField( + const streamUsage = getStreamUsage(exeContext, fieldGroup, path); + const iterator = items[Symbol.iterator](); + let iteration = iterator.next(); + while (!iteration.done) { + const item = iteration.value; + + if (streamUsage && index >= streamUsage.initialCount) { + const streamRecord: StreamRecord = { + label: streamUsage.label, path, - itemPath, - item, - exeContext, - fieldNodes, - info, - itemType, - stream.label, - previousAsyncPayloadRecord, - ); - index++; - continue; + streamItemQueue: buildSyncStreamItemQueue( + item, + index, + path, + iterator, + exeContext, + streamUsage.fieldGroup, + info, + itemType, + ), + }; + + const context = incrementalContext ?? exeContext; + addIncrementalDataRecords(context, [streamRecord]); + break; } - if ( + // No need to modify the info object containing the path, + // since from here on it is not ever accessed by resolver functions. + const itemPath = addPath(path, index, undefined); + + if (isPromise(item)) { + completedResults.push( + completePromisedListItemValue( + item, + exeContext, + itemType, + fieldGroup, + info, + itemPath, + incrementalContext, + deferMap, + ), + ); + containsPromise = true; + } else if ( completeListItemValue( item, completedResults, - errors, exeContext, itemType, - fieldNodes, + fieldGroup, info, itemPath, - asyncPayloadRecord, + incrementalContext, + deferMap, ) ) { containsPromise = true; } - index++; + + iteration = iterator.next(); } return containsPromise ? Promise.all(completedResults) : completedResults; @@ -1079,68 +1313,85 @@ function completeListValue( function completeListItemValue( item: unknown, completedResults: Array, - errors: Array, exeContext: ExecutionContext, itemType: GraphQLOutputType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemPath: Path, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): boolean { try { - let completedItem; - if (isPromise(item)) { - completedItem = item.then(resolved => - completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - resolved, - asyncPayloadRecord, - ), - ); - } else { - completedItem = completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - item, - asyncPayloadRecord, - ); - } + const completedItem = completeValue( + exeContext, + itemType, + fieldGroup, + info, + itemPath, + item, + incrementalContext, + deferMap, + ); if (isPromise(completedItem)) { // Note: we don't rely on a `catch` method, but we do expect "thenable" // to take a second callback for the error case. completedResults.push( completedItem.then(undefined, rawError => { - rawError = coerceError(rawError); - const error = locatedError(rawError, fieldNodes, pathToArray(itemPath)); - const handledError = handleFieldError(error, itemType, errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - return handledError; + handleFieldError( + rawError, + exeContext, + itemType, + fieldGroup, + itemPath, + incrementalContext, + ); + return null; }), ); - return true; } completedResults.push(completedItem); } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath)); - const handledError = handleFieldError(error, itemType, errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - completedResults.push(handledError); + handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext); + completedResults.push(null); } - return false; } +async function completePromisedListItemValue( + item: unknown, + exeContext: ExecutionContext, + itemType: GraphQLOutputType, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + itemPath: Path, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, +): Promise { + try { + const resolved = await item; + let completed = completeValue( + exeContext, + itemType, + fieldGroup, + info, + itemPath, + resolved, + incrementalContext, + deferMap, + ); + if (isPromise(completed)) { + completed = await completed; + } + return completed; + } catch (rawError) { + handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext); + return null; + } +} + /** * Complete a Scalar or Enum by serializing to a valid value, returning * null if serialization is not possible. @@ -1177,11 +1428,12 @@ function completeLeafValue(returnType: GraphQLLeafType, result: unknown): unknow function completeAbstractValue( exeContext: ExecutionContext, returnType: GraphQLAbstractType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { const resolveTypeFn = returnType.resolveType ?? exeContext.typeResolver; const contextValue = exeContext.contextValue; @@ -1195,27 +1447,29 @@ function completeAbstractValue( resolvedRuntimeType, exeContext, returnType, - fieldNodes, + fieldGroup, info, result, ), - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ), ); } return completeObjectValue( exeContext, - ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldNodes, info, result), - fieldNodes, + ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldGroup, info, result), + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } @@ -1223,14 +1477,14 @@ function ensureValidRuntimeType( runtimeTypeName: unknown, exeContext: ExecutionContext, returnType: GraphQLAbstractType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, result: unknown, ): GraphQLObjectType { if (runtimeTypeName == null) { throw createGraphQLError( `Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, - { nodes: fieldNodes }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1256,21 +1510,21 @@ function ensureValidRuntimeType( if (runtimeType == null) { throw createGraphQLError( `Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, - { nodes: fieldNodes }, + { nodes: toNodes(fieldGroup) }, ); } if (!isObjectType(runtimeType)) { throw createGraphQLError( `Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, - { nodes: fieldNodes }, + { nodes: toNodes(fieldGroup) }, ); } if (!exeContext.schema.isSubType(returnType, runtimeType)) { throw createGraphQLError( `Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, - { nodes: fieldNodes }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1283,11 +1537,12 @@ function ensureValidRuntimeType( function completeObjectValue( exeContext: ExecutionContext, returnType: GraphQLObjectType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { // If there is an isTypeOf predicate function, call it with the // current result. If isTypeOf returns false, then raise an error rather @@ -1298,87 +1553,148 @@ function completeObjectValue( if (isPromise(isTypeOf)) { return isTypeOf.then(resolvedIsTypeOf => { if (!resolvedIsTypeOf) { - throw invalidReturnTypeError(returnType, result, fieldNodes); + throw invalidReturnTypeError(returnType, result, fieldGroup); } return collectAndExecuteSubfields( exeContext, returnType, - fieldNodes, + fieldGroup, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); }); } if (!isTypeOf) { - throw invalidReturnTypeError(returnType, result, fieldNodes); + throw invalidReturnTypeError(returnType, result, fieldGroup); } } return collectAndExecuteSubfields( exeContext, returnType, - fieldNodes, + fieldGroup, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } function invalidReturnTypeError( returnType: GraphQLObjectType, result: unknown, - fieldNodes: Array, + fieldGroup: FieldGroup, ): GraphQLError { return createGraphQLError( `Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, - { - nodes: fieldNodes, - }, + { nodes: toNodes(fieldGroup) }, ); } +function addNewDeferredFragments( + newDeferUsages: ReadonlyArray, + newDeferMap: Map, + path?: Path | undefined, +): ReadonlyMap { + // For each new deferUsage object: + for (const newDeferUsage of newDeferUsages) { + const parentDeferUsage = newDeferUsage.parentDeferUsage; + + const parent = + parentDeferUsage === undefined + ? undefined + : deferredFragmentRecordFromDeferUsage(parentDeferUsage, newDeferMap); + + // Instantiate the new record. + const deferredFragmentRecord = new DeferredFragmentRecord(path, newDeferUsage.label, parent); + + // Update the map. + newDeferMap.set(newDeferUsage, deferredFragmentRecord); + } + + return newDeferMap; +} + +function deferredFragmentRecordFromDeferUsage( + deferUsage: DeferUsage, + deferMap: ReadonlyMap, +): DeferredFragmentRecord { + return deferMap.get(deferUsage)!; +} + function collectAndExecuteSubfields( exeContext: ExecutionContext, returnType: GraphQLObjectType, - fieldNodes: Array, + fieldGroup: FieldGroup, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { // Collect sub-fields to execute to complete this value. - const { fields: subFieldNodes, patches: subPatches } = collectSubfields( - exeContext, - returnType, - fieldNodes, - ); + const collectedSubfields = collectSubfields(exeContext, returnType, fieldGroup); + let groupedFieldSet = collectedSubfields.groupedFieldSet; + const newDeferUsages = collectedSubfields.newDeferUsages; + if (deferMap === undefined && newDeferUsages.length === 0) { + return executeFields( + exeContext, + returnType, + result, + path, + groupedFieldSet, + incrementalContext, + undefined, + ); + } + const subFieldPlan = buildSubFieldPlan(groupedFieldSet, incrementalContext?.deferUsageSet); + + groupedFieldSet = subFieldPlan.groupedFieldSet; + const newGroupedFieldSets = subFieldPlan.newGroupedFieldSets; + const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map(deferMap), path); const subFields = executeFields( exeContext, returnType, result, path, - subFieldNodes, - asyncPayloadRecord, + groupedFieldSet, + incrementalContext, + newDeferMap, ); - for (const subPatch of subPatches) { - const { label, fields: subPatchFieldNodes } = subPatch; - executeDeferredFragment( + if (newGroupedFieldSets.size > 0) { + const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets( exeContext, returnType, result, - subPatchFieldNodes, - label, path, - asyncPayloadRecord, + incrementalContext?.deferUsageSet, + newGroupedFieldSets, + newDeferMap, ); - } + const context = incrementalContext ?? exeContext; + addIncrementalDataRecords(context, newDeferredGroupedFieldSetRecords); + } return subFields; } +function buildSubFieldPlan( + originalGroupedFieldSet: GroupedFieldSet, + deferUsageSet: DeferUsageSet | undefined, +): FieldPlan { + let fieldPlan = (originalGroupedFieldSet as unknown as { _fieldPlan: FieldPlan })._fieldPlan; + if (fieldPlan !== undefined) { + return fieldPlan; + } + fieldPlan = buildFieldPlan(originalGroupedFieldSet, deferUsageSet); + (originalGroupedFieldSet as unknown as { _fieldPlan: FieldPlan })._fieldPlan = fieldPlan; + return fieldPlan; +} + /** * If a resolveType function is not given, then a default resolve behavior is * used which attempts two strategies: @@ -1484,6 +1800,25 @@ export const defaultFieldResolver: GraphQLFieldResolver = func * * Accepts an object with named arguments. */ +export function subscribe( + args: ExecutionArgs & { + errorWithIncrementalSubscription: true | undefined | null; + }, +): MaybePromise< + AsyncGenerator, void, void> | SingularExecutionResult +>; +export function subscribe( + args: ExecutionArgs, +): MaybePromise< + | AsyncGenerator< + | SingularExecutionResult + | InitialIncrementalExecutionResult + | SubsequentIncrementalExecutionResult, + void, + void + > + | SingularExecutionResult +>; export function subscribe( args: ExecutionArgs, ): MaybePromise< @@ -1613,7 +1948,9 @@ function mapSourceToResponse( mapAsyncIterator( resultOrStream[Symbol.asyncIterator](), async (payload: unknown) => - ensureAsyncIterable(await executeImpl(buildPerEventExecutionContext(exeContext, payload))), + ensureAsyncIterable( + await executeOperation(buildPerEventExecutionContext(exeContext, payload)), + ), (error: Error) => { const wrappedError = createGraphQLError(error.message, { originalError: error, @@ -1641,7 +1978,14 @@ function createSourceEventStreamImpl( } function executeSubscription(exeContext: ExecutionContext): MaybePromise> { - const { schema, fragments, operation, variableValues, rootValue } = exeContext; + const { + schema, + fragments, + operation, + variableValues, + rootValue, + errorWithIncrementalSubscription, + } = exeContext; const rootType = schema.getSubscriptionType(); if (rootType == null) { @@ -1650,25 +1994,27 @@ function executeSubscription(exeContext: ExecutionContext): MaybePromise { - throw locatedError(error, fieldNodes, pathToArray(path)); + throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); }); } return assertEventStream(result, exeContext.signal); } catch (error) { - throw locatedError(error, fieldNodes, pathToArray(path)); + throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); } } @@ -1722,495 +2068,456 @@ function assertEventStream(result: unknown, signal?: AbortSignal): AsyncIterable }; } -function executeDeferredFragment( +function executeDeferredGroupedFieldSets( exeContext: ExecutionContext, parentType: GraphQLObjectType, sourceValue: unknown, - fields: Map>, - label?: string, - path?: Path, - parentContext?: AsyncPayloadRecord, -): void { - const asyncPayloadRecord = new DeferredFragmentRecord({ - label, - path, - parentContext, - exeContext, - }); - let promiseOrData; - try { - promiseOrData = executeFields( - exeContext, - parentType, - sourceValue, + path: Path | undefined, + parentDeferUsages: DeferUsageSet | undefined, + newGroupedFieldSets: Map, + deferMap: ReadonlyMap, +): ReadonlyArray { + const newDeferredGroupedFieldSetRecords: Array = []; + + for (const [deferUsageSet, groupedFieldSet] of newGroupedFieldSets) { + const deferredFragmentRecords = getDeferredFragmentRecords(deferUsageSet, deferMap); + + const deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord = { path, - fields, - asyncPayloadRecord, - ); + deferredFragmentRecords, + result: undefined as unknown as BoxedPromiseOrValue, + }; - if (isPromise(promiseOrData)) { - promiseOrData = promiseOrData.then(null, e => { - asyncPayloadRecord.errors.push(e); - return null; - }); + const executor = () => + executeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + exeContext, + parentType, + sourceValue, + path, + groupedFieldSet, + { + errors: undefined, + deferUsageSet, + incrementalDataRecords: undefined, + }, + deferMap, + ); + + if (exeContext.enableEarlyExecution) { + deferredGroupedFieldSetRecord.result = new BoxedPromiseOrValue( + shouldDefer(parentDeferUsages, deferUsageSet) + ? Promise.resolve().then(executor) + : executor(), + ); + } else { + deferredGroupedFieldSetRecord.result = () => new BoxedPromiseOrValue(executor()); + const resolveThunk = () => { + const maybeThunk = deferredGroupedFieldSetRecord.result; + if (!(maybeThunk instanceof BoxedPromiseOrValue)) { + deferredGroupedFieldSetRecord.result = maybeThunk(); + } + }; + for (const deferredFragmentRecord of deferredFragmentRecords) { + deferredFragmentRecord.onPending(resolveThunk); + } } - } catch (e) { - asyncPayloadRecord.errors.push(e as GraphQLError); - promiseOrData = null; + + newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord); } - asyncPayloadRecord.addData(promiseOrData); + + return newDeferredGroupedFieldSetRecords; } -function executeStreamField( - path: Path, - itemPath: Path, - item: MaybePromise, +function shouldDefer( + parentDeferUsages: undefined | DeferUsageSet, + deferUsages: DeferUsageSet, +): boolean { + // If we have a new child defer usage, defer. + // Otherwise, this defer usage was already deferred when it was initially + // encountered, and is now in the midst of executing early, so the new + // deferred grouped fields set can be executed immediately. + return ( + parentDeferUsages === undefined || + !Array.from(deferUsages).every(deferUsage => parentDeferUsages.has(deferUsage)) + ); +} + +function executeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, exeContext: ExecutionContext, - fieldNodes: Array, - info: GraphQLResolveInfo, - itemType: GraphQLOutputType, - label?: string, - parentContext?: AsyncPayloadRecord, -): AsyncPayloadRecord { - const asyncPayloadRecord = new StreamRecord({ - label, - path: itemPath, - parentContext, - exeContext, - }); - let completedItem: MaybePromise; + parentType: GraphQLObjectType, + sourceValue: unknown, + path: Path | undefined, + groupedFieldSet: GroupedFieldSet, + incrementalContext: IncrementalContext, + deferMap: ReadonlyMap, +): MaybePromise { + let result; try { - try { - if (isPromise(item)) { - completedItem = item.then(resolved => - completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - resolved, - asyncPayloadRecord, - ), - ); - } else { - completedItem = completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - item, - asyncPayloadRecord, - ); - } - - if (isPromise(completedItem)) { - // Note: we don't rely on a `catch` method, but we do expect "thenable" - // to take a second callback for the error case. - completedItem = completedItem.then(undefined, rawError => { - rawError = coerceError(rawError); - const error = locatedError(rawError, fieldNodes, pathToArray(itemPath)); - const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - return handledError; - }); - } - } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath)); - completedItem = handleFieldError(error, itemType, asyncPayloadRecord.errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - } + result = executeFields( + exeContext, + parentType, + sourceValue, + path, + groupedFieldSet, + incrementalContext, + deferMap, + ); } catch (error) { - asyncPayloadRecord.errors.push(error as GraphQLError); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - asyncPayloadRecord.addItems(null); - return asyncPayloadRecord; - } - - let completedItems: MaybePromise | null>; - if (isPromise(completedItem)) { - completedItems = completedItem.then( - value => [value], - error => { - asyncPayloadRecord.errors.push(error); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return null; - }, + return { + deferredGroupedFieldSetRecord, + path: pathToArray(path), + errors: withError(incrementalContext.errors, error as GraphQLError), + }; + } + + if (isPromise(result)) { + return result.then( + resolved => + buildDeferredGroupedFieldSetResult( + incrementalContext, + deferredGroupedFieldSetRecord, + path, + resolved, + ), + error => ({ + deferredGroupedFieldSetRecord, + path: pathToArray(path), + errors: withError(incrementalContext.errors, error as GraphQLError), + }), ); - } else { - completedItems = [completedItem]; } - asyncPayloadRecord.addItems(completedItems); - return asyncPayloadRecord; + return buildDeferredGroupedFieldSetResult( + incrementalContext, + deferredGroupedFieldSetRecord, + path, + result, + ); } -async function executeStreamIteratorItem( - iterator: AsyncIterator, - exeContext: ExecutionContext, - fieldNodes: Array, - info: GraphQLResolveInfo, - itemType: GraphQLOutputType, - asyncPayloadRecord: StreamRecord, - itemPath: Path, -): Promise> { - let item; - try { - const { value, done } = await iterator.next(); - if (done) { - asyncPayloadRecord.setIsCompletedIterator(); - return { done, value: undefined }; - } - item = value; - } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath)); - const value = handleFieldError(error, itemType, asyncPayloadRecord.errors); - // don't continue if iterator throws - return { done: true, value }; +function buildDeferredGroupedFieldSetResult( + incrementalContext: IncrementalContext, + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + path: Path | undefined, + data: Record, +): DeferredGroupedFieldSetResult { + const { errors, incrementalDataRecords } = incrementalContext; + if (incrementalDataRecords === undefined) { + return { + deferredGroupedFieldSetRecord, + path: pathToArray(path), + result: errors === undefined ? { data } : { data, errors: [...errors.values()] }, + incrementalDataRecords, + }; } - let completedItem; - try { - completedItem = completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - item, - asyncPayloadRecord, - ); - if (isPromise(completedItem)) { - completedItem = completedItem.then(undefined, rawError => { - const error = locatedError(rawError, fieldNodes, pathToArray(itemPath)); - const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - return handledError; - }); - } - return { done: false, value: completedItem }; - } catch (rawError) { - const error = locatedError(rawError, fieldNodes, pathToArray(itemPath)); - const value = handleFieldError(error, itemType, asyncPayloadRecord.errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - return { done: false, value }; + if (errors === undefined) { + return { + deferredGroupedFieldSetRecord, + path: pathToArray(path), + result: { data }, + incrementalDataRecords, + }; } + + return { + deferredGroupedFieldSetRecord, + path: pathToArray(path), + result: { data, errors: [...errors.values()] }, + incrementalDataRecords: filterIncrementalDataRecords(path, errors, incrementalDataRecords), + }; +} + +function getDeferredFragmentRecords( + deferUsages: DeferUsageSet, + deferMap: ReadonlyMap, +): ReadonlyArray { + return Array.from(deferUsages).map(deferUsage => + deferredFragmentRecordFromDeferUsage(deferUsage, deferMap), + ); } -async function executeStreamIterator( +function buildSyncStreamItemQueue( + initialItem: MaybePromise, initialIndex: number, - iterator: AsyncIterator, + streamPath: Path, + iterator: Iterator, exeContext: ExecutionContext, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemType: GraphQLOutputType, - path: Path, - label?: string, - parentContext?: AsyncPayloadRecord, -): Promise { - let index = initialIndex; - let previousAsyncPayloadRecord = parentContext ?? undefined; - while (true) { - const itemPath = addPath(path, index, undefined); - const asyncPayloadRecord = new StreamRecord({ - label, - path: itemPath, - parentContext: previousAsyncPayloadRecord, - iterator, - exeContext, - }); - - let iteration; - try { - iteration = await executeStreamIteratorItem( - iterator, +): Array { + const streamItemQueue: Array = []; + + const enableEarlyExecution = exeContext.enableEarlyExecution; + + const firstExecutor = () => { + const initialPath = addPath(streamPath, initialIndex, undefined); + const firstStreamItem = new BoxedPromiseOrValue( + completeStreamItem( + streamPath, + initialPath, + initialItem, exeContext, - fieldNodes, + { errors: undefined, incrementalDataRecords: undefined }, + fieldGroup, info, itemType, - asyncPayloadRecord, - itemPath, - ); - } catch (error) { - asyncPayloadRecord.errors.push(error as GraphQLError); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - asyncPayloadRecord.addItems(null); - // entire stream has errored and bubbled upwards - if (iterator?.return) { - iterator.return().catch(() => { - // ignore errors - }); - } - return; - } + ), + ); - const { done, value: completedItem } = iteration; + let iteration = iterator.next(); + let currentIndex = initialIndex + 1; + let currentStreamItem: + | BoxedPromiseOrValue + | (() => BoxedPromiseOrValue) = firstStreamItem; + while (!iteration.done) { + // TODO: add test case for early sync termination + /* c8 ignore next 6 */ + if (currentStreamItem instanceof BoxedPromiseOrValue) { + const result = currentStreamItem.value; + if (!isPromise(result) && result.errors !== undefined) { + break; + } + } - let completedItems: MaybePromise | null>; - if (isPromise(completedItem)) { - completedItems = completedItem.then( - value => [value], - error => { - asyncPayloadRecord.errors.push(error); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return null; - }, - ); - } else { - completedItems = [completedItem]; - } + const itemPath = addPath(streamPath, currentIndex, undefined); - asyncPayloadRecord.addItems(completedItems); + const value = iteration.value; - if (done) { - break; - } - previousAsyncPayloadRecord = asyncPayloadRecord; - index++; - } -} + const currentExecutor = () => + completeStreamItem( + streamPath, + itemPath, + value, + exeContext, + { errors: undefined, incrementalDataRecords: undefined }, + fieldGroup, + info, + itemType, + ); -function filterSubsequentPayloads( - exeContext: ExecutionContext, - nullPath: Path, - currentAsyncRecord: AsyncPayloadRecord | undefined, -): void { - const nullPathArray = pathToArray(nullPath); - exeContext.subsequentPayloads.forEach(asyncRecord => { - if (asyncRecord === currentAsyncRecord) { - // don't remove payload from where error originates - return; - } - for (let i = 0; i < nullPathArray.length; i++) { - if (asyncRecord.path[i] !== nullPathArray[i]) { - // asyncRecord points to a path unaffected by this payload - return; - } - } - // asyncRecord path points to nulled error field - if (isStreamPayload(asyncRecord) && asyncRecord.iterator?.return) { - asyncRecord.iterator.return().catch(() => { - // ignore error - }); - } - exeContext.subsequentPayloads.delete(asyncRecord); - }); -} + currentStreamItem = enableEarlyExecution + ? new BoxedPromiseOrValue(currentExecutor()) + : () => new BoxedPromiseOrValue(currentExecutor()); -function getCompletedIncrementalResults(exeContext: ExecutionContext): Array { - const incrementalResults: Array = []; - for (const asyncPayloadRecord of exeContext.subsequentPayloads) { - const incrementalResult: IncrementalResult = {}; - if (!asyncPayloadRecord.isCompleted) { - continue; - } - exeContext.subsequentPayloads.delete(asyncPayloadRecord); - if (isStreamPayload(asyncPayloadRecord)) { - const items = asyncPayloadRecord.items; - if (asyncPayloadRecord.isCompletedIterator) { - // async iterable resolver just finished but there may be pending payloads - continue; - } - (incrementalResult as IncrementalStreamResult).items = items; - } else { - const data = asyncPayloadRecord.data; - (incrementalResult as IncrementalDeferResult).data = data ?? null; - } + streamItemQueue.push(currentStreamItem); - incrementalResult.path = asyncPayloadRecord.path; - if (asyncPayloadRecord.label) { - incrementalResult.label = asyncPayloadRecord.label; - } - if (asyncPayloadRecord.errors.length > 0) { - incrementalResult.errors = asyncPayloadRecord.errors; + iteration = iterator.next(); + currentIndex = initialIndex + 1; } - incrementalResults.push(incrementalResult); - } - return incrementalResults; -} -function yieldSubsequentPayloads( - exeContext: ExecutionContext, -): AsyncGenerator { - let isDone = false; + streamItemQueue.push(new BoxedPromiseOrValue({ path: streamPath })); - const abortPromise = new Promise((_, reject) => { - exeContext.signal?.addEventListener('abort', () => { - isDone = true; - reject(exeContext.signal?.reason); - }); - }); - - async function next(): Promise> { - if (isDone) { - return { value: undefined, done: true }; - } + return firstStreamItem.value; + }; - await Promise.race([ - abortPromise, - ...Array.from(exeContext.subsequentPayloads).map(p => p.promise), - ]); + streamItemQueue.push( + enableEarlyExecution + ? new BoxedPromiseOrValue(Promise.resolve().then(firstExecutor)) + : () => new BoxedPromiseOrValue(firstExecutor()), + ); - if (isDone) { - // a different call to next has exhausted all payloads - return { value: undefined, done: true }; - } + return streamItemQueue; +} - const incremental = getCompletedIncrementalResults(exeContext); - const hasNext = exeContext.subsequentPayloads.size > 0; +function buildAsyncStreamItemQueue( + initialIndex: number, + streamPath: Path, + asyncIterator: AsyncIterator, + exeContext: ExecutionContext, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + itemType: GraphQLOutputType, +): Array { + const streamItemQueue: Array = []; + const executor = () => + getNextAsyncStreamItemResult( + streamItemQueue, + streamPath, + initialIndex, + asyncIterator, + exeContext, + fieldGroup, + info, + itemType, + ); - if (!incremental.length && hasNext) { - return next(); - } + streamItemQueue.push( + exeContext.enableEarlyExecution + ? new BoxedPromiseOrValue(executor()) + : () => new BoxedPromiseOrValue(executor()), + ); - if (!hasNext) { - isDone = true; - } + return streamItemQueue; +} +async function getNextAsyncStreamItemResult( + streamItemQueue: Array, + streamPath: Path, + index: number, + asyncIterator: AsyncIterator, + exeContext: ExecutionContext, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + itemType: GraphQLOutputType, +): Promise { + let iteration; + try { + iteration = await asyncIterator.next(); + } catch (error) { return { - value: incremental.length ? { incremental, hasNext } : { hasNext }, - done: false, + path: streamPath, + errors: [locatedError(error, toNodes(fieldGroup), pathToArray(streamPath))], }; } - function returnStreamIterators() { - const promises: Array>> = []; - exeContext.subsequentPayloads.forEach(asyncPayloadRecord => { - if (isStreamPayload(asyncPayloadRecord) && asyncPayloadRecord.iterator?.return) { - promises.push(asyncPayloadRecord.iterator.return()); - } - }); - return Promise.all(promises); + if (iteration.done) { + return { path: streamPath }; } - return { - [Symbol.asyncIterator]() { - return this; - }, - next, - async return(): Promise> { - await returnStreamIterators(); - isDone = true; - return { value: undefined, done: true }; - }, - async throw(error?: unknown): Promise> { - await returnStreamIterators(); - isDone = true; - return Promise.reject(error); - }, - }; -} + const itemPath = addPath(streamPath, index, undefined); -class DeferredFragmentRecord { - type: 'defer'; - errors: Array; - label: string | undefined; - path: Array; - promise: Promise; - data: Record | null; - parentContext: AsyncPayloadRecord | undefined; - isCompleted: boolean; - _exeContext: ExecutionContext; - _resolve?: (arg: MaybePromise | null>) => void; - constructor(opts: { - label: string | undefined; - path: Path | undefined; - parentContext: AsyncPayloadRecord | undefined; - exeContext: ExecutionContext; - }) { - this.type = 'defer'; - this.label = opts.label; - this.path = pathToArray(opts.path); - this.parentContext = opts.parentContext; - this.errors = []; - this._exeContext = opts.exeContext; - this._exeContext.subsequentPayloads.add(this); - this.isCompleted = false; - this.data = null; - this.promise = new Promise | null>(resolve => { - this._resolve = MaybePromise => { - resolve(MaybePromise); - }; - }).then(data => { - this.data = data; - this.isCompleted = true; - }); - } + const result = completeStreamItem( + streamPath, + itemPath, + iteration.value, + exeContext, + { errors: undefined, incrementalDataRecords: undefined }, + fieldGroup, + info, + itemType, + ); - addData(data: MaybePromise | null>) { - const parentData = this.parentContext?.promise; - if (parentData) { - this._resolve?.(parentData.then(() => data)); - return; - } - this._resolve?.(data); - } + const executor = () => + getNextAsyncStreamItemResult( + streamItemQueue, + streamPath, + index, + asyncIterator, + exeContext, + fieldGroup, + info, + itemType, + ); + + streamItemQueue.push( + exeContext.enableEarlyExecution + ? new BoxedPromiseOrValue(executor()) + : () => new BoxedPromiseOrValue(executor()), + ); + + return result; } -class StreamRecord { - type: 'stream'; - errors: Array; - label: string | undefined; - path: Array; - items: Array | null; - promise: Promise; - parentContext: AsyncPayloadRecord | undefined; - iterator: AsyncIterator | undefined; - isCompletedIterator?: boolean; - isCompleted: boolean; - _exeContext: ExecutionContext; - _resolve?: (arg: MaybePromise | null>) => void; - constructor(opts: { - label: string | undefined; - path: Path | undefined; - iterator?: AsyncIterator; - parentContext: AsyncPayloadRecord | undefined; - exeContext: ExecutionContext; - }) { - this.type = 'stream'; - this.items = null; - this.label = opts.label; - this.path = pathToArray(opts.path); - this.parentContext = opts.parentContext; - this.iterator = opts.iterator; - this.errors = []; - this._exeContext = opts.exeContext; - this._exeContext.subsequentPayloads.add(this); - this.isCompleted = false; - this.items = null; - this.promise = new Promise | null>(resolve => { - this._resolve = MaybePromise => { - resolve(MaybePromise); - }; - }).then(items => { - this.items = items; - this.isCompleted = true; - }); +function completeStreamItem( + streamPath: Path, + itemPath: Path, + item: unknown, + exeContext: ExecutionContext, + incrementalContext: IncrementalContext, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + itemType: GraphQLOutputType, +): MaybePromise { + if (isPromise(item)) { + return completePromisedValue( + exeContext, + itemType, + fieldGroup, + info, + itemPath, + item, + incrementalContext, + new Map(), + ).then( + resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), + error => ({ + path: streamPath, + errors: withError(incrementalContext.errors, error), + }), + ); } - addItems(items: MaybePromise | null>) { - const parentData = this.parentContext?.promise; - if (parentData) { - this._resolve?.(parentData.then(() => items)); - return; + let result: MaybePromise; + try { + try { + result = completeValue( + exeContext, + itemType, + fieldGroup, + info, + itemPath, + item, + incrementalContext, + new Map(), + ); + } catch (rawError) { + handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext); + result = null; } - this._resolve?.(items); + } catch (error) { + return { + path: streamPath, + errors: withError(incrementalContext.errors, error as GraphQLError), + }; } - setIsCompletedIterator() { - this.isCompletedIterator = true; + if (isPromise(result)) { + return result + .then(undefined, rawError => { + handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext); + return null; + }) + .then( + resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), + error => ({ + path: streamPath, + errors: withError(incrementalContext.errors, error), + }), + ); } + + return buildStreamItemResult(incrementalContext, streamPath, result); } -type AsyncPayloadRecord = DeferredFragmentRecord | StreamRecord; +function buildStreamItemResult( + incrementalContext: IncrementalContext, + streamPath: Path, + item: unknown, +): StreamItemResult { + const { errors, incrementalDataRecords } = incrementalContext; + if (incrementalDataRecords === undefined) { + return { + path: streamPath, + item, + errors: errors === undefined ? undefined : [...errors.values()], + incrementalDataRecords, + }; + } + + if (errors === undefined) { + return { + path: streamPath, + item, + errors, + incrementalDataRecords, + }; + } -function isStreamPayload(asyncPayload: AsyncPayloadRecord): asyncPayload is StreamRecord { - return asyncPayload.type === 'stream'; + return { + path: streamPath, + item, + errors: [...errors.values()], + incrementalDataRecords: filterIncrementalDataRecords( + streamPath, + errors, + incrementalDataRecords, + ), + }; } - /** * This method looks up the field on the given type definition. * It has special casing for the three introspection fields, diff --git a/packages/executor/src/execution/getBySet.ts b/packages/executor/src/execution/getBySet.ts new file mode 100644 index 00000000000..4ddabd30021 --- /dev/null +++ b/packages/executor/src/execution/getBySet.ts @@ -0,0 +1,13 @@ +import { isSameSet } from './isSameSet.js'; + +export function getBySet( + map: ReadonlyMap, U>, + setToMatch: ReadonlySet, +): U | undefined { + for (const set of map.keys()) { + if (isSameSet(set, setToMatch)) { + return map.get(set); + } + } + return undefined; +} diff --git a/packages/executor/src/execution/isSameSet.ts b/packages/executor/src/execution/isSameSet.ts new file mode 100644 index 00000000000..f2837d848cd --- /dev/null +++ b/packages/executor/src/execution/isSameSet.ts @@ -0,0 +1,11 @@ +export function isSameSet(setA: ReadonlySet, setB: ReadonlySet): boolean { + if (setA.size !== setB.size) { + return false; + } + for (const item of setA) { + if (!setB.has(item)) { + return false; + } + } + return true; +} diff --git a/packages/executor/src/execution/promiseWithResolvers.ts b/packages/executor/src/execution/promiseWithResolvers.ts new file mode 100644 index 00000000000..eb533e7b4cf --- /dev/null +++ b/packages/executor/src/execution/promiseWithResolvers.ts @@ -0,0 +1,20 @@ +import { MaybePromise } from '@graphql-tools/utils'; + +/** + * Based on Promise.withResolvers proposal + * https://github.com/tc39/proposal-promise-with-resolvers + */ +export function promiseWithResolvers(): { + promise: Promise; + resolve: (value: T | MaybePromise) => void; + reject: (reason?: any) => void; +} { + // these are assigned synchronously within the Promise constructor + let resolve!: (value: T | MaybePromise) => void; + let reject!: (reason?: any) => void; + const promise = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + return { promise, resolve, reject }; +} diff --git a/packages/executor/src/execution/types.ts b/packages/executor/src/execution/types.ts new file mode 100644 index 00000000000..c65a4805b4f --- /dev/null +++ b/packages/executor/src/execution/types.ts @@ -0,0 +1,287 @@ +import type { GraphQLError, GraphQLFormattedError } from 'graphql'; +import type { Path } from '@graphql-tools/utils'; +import type { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js'; + +/** + * The result of GraphQL execution. + * + * - `errors` is included when any errors occurred as a non-empty array. + * - `data` is the result of a successful execution of the query. + * - `hasNext` is true if a future payload is expected. + * - `extensions` is reserved for adding non-standard properties. + * - `incremental` is a list of the results from defer/stream directives. + */ +export interface SingularExecutionResult { + errors?: ReadonlyArray; + data?: TData | null; + extensions?: TExtensions; +} + +export interface FormattedExecutionResult< + TData = Record, + TExtensions = Record, +> { + errors?: ReadonlyArray; + data?: TData | null; + extensions?: TExtensions; +} + +export interface IncrementalExecutionResults< + TData = unknown, + TExtensions = Record, +> { + initialResult: InitialIncrementalExecutionResult; + subsequentResults: AsyncGenerator< + SubsequentIncrementalExecutionResult, + void, + void + >; +} + +export interface InitialIncrementalExecutionResult< + TData = Record, + TExtensions = Record, +> extends SingularExecutionResult { + data: TData; + pending: ReadonlyArray; + hasNext: true; + extensions?: TExtensions; +} + +export interface FormattedInitialIncrementalExecutionResult< + TData = Record, + TExtensions = Record, +> extends FormattedExecutionResult { + data: TData; + pending: ReadonlyArray; + hasNext: boolean; + extensions?: TExtensions; +} + +export interface SubsequentIncrementalExecutionResult< + TData = unknown, + TExtensions = Record, +> { + pending?: ReadonlyArray; + incremental?: ReadonlyArray>; + completed?: ReadonlyArray; + hasNext: boolean; + extensions?: TExtensions; +} + +export interface FormattedSubsequentIncrementalExecutionResult< + TData = unknown, + TExtensions = Record, +> { + hasNext: boolean; + pending?: ReadonlyArray; + incremental?: ReadonlyArray>; + completed?: ReadonlyArray; + extensions?: TExtensions; +} + +interface BareDeferredGroupedFieldSetResult> { + errors?: ReadonlyArray; + data: TData; +} + +export interface IncrementalDeferResult< + TData = Record, + TExtensions = Record, +> extends BareDeferredGroupedFieldSetResult { + id: string; + subPath?: ReadonlyArray; + extensions?: TExtensions; +} + +export interface FormattedIncrementalDeferResult< + TData = Record, + TExtensions = Record, +> { + errors?: ReadonlyArray; + data: TData; + id: string; + subPath?: ReadonlyArray; + extensions?: TExtensions; +} + +interface BareStreamItemsResult> { + errors?: ReadonlyArray; + items: TData; +} + +export interface IncrementalStreamResult< + TData = ReadonlyArray, + TExtensions = Record, +> extends BareStreamItemsResult { + id: string; + subPath?: ReadonlyArray; + extensions?: TExtensions; +} + +export interface FormattedIncrementalStreamResult< + TData = Array, + TExtensions = Record, +> { + errors?: ReadonlyArray; + items: TData; + id: string; + subPath?: ReadonlyArray; + extensions?: TExtensions; +} + +export type IncrementalResult> = + | IncrementalDeferResult + | IncrementalStreamResult; + +export type FormattedIncrementalResult> = + | FormattedIncrementalDeferResult + | FormattedIncrementalStreamResult; + +export interface PendingResult { + id: string; + path: ReadonlyArray; + label?: string; +} + +export interface CompletedResult { + id: string; + errors?: ReadonlyArray; +} + +export interface FormattedCompletedResult { + path: ReadonlyArray; + label?: string; + errors?: ReadonlyArray; +} + +export function isDeferredGroupedFieldSetRecord( + incrementalDataRecord: IncrementalDataRecord, +): incrementalDataRecord is DeferredGroupedFieldSetRecord { + return 'deferredFragmentRecords' in incrementalDataRecord; +} + +export type DeferredGroupedFieldSetResult = + | ReconcilableDeferredGroupedFieldSetResult + | NonReconcilableDeferredGroupedFieldSetResult; + +export function isDeferredGroupedFieldSetResult( + subsequentResult: DeferredGroupedFieldSetResult | StreamItemsResult, +): subsequentResult is DeferredGroupedFieldSetResult { + return 'deferredGroupedFieldSetRecord' in subsequentResult; +} + +export interface ReconcilableDeferredGroupedFieldSetResult { + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord; + path: Array; + result: BareDeferredGroupedFieldSetResult; + incrementalDataRecords: ReadonlyArray | undefined; + errors?: never; +} + +interface NonReconcilableDeferredGroupedFieldSetResult { + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord; + path: Array; + errors: ReadonlyArray; + result?: never; +} + +export function isNonReconcilableDeferredGroupedFieldSetResult( + deferredGroupedFieldSetResult: DeferredGroupedFieldSetResult, +): deferredGroupedFieldSetResult is NonReconcilableDeferredGroupedFieldSetResult { + return deferredGroupedFieldSetResult.errors !== undefined; +} + +export interface DeferredGroupedFieldSetRecord { + path: Path | undefined; + deferredFragmentRecords: ReadonlyArray; + result: + | BoxedPromiseOrValue + | (() => BoxedPromiseOrValue); +} + +export type SubsequentResultRecord = DeferredFragmentRecord | StreamRecord; + +/** @internal */ +export class DeferredFragmentRecord { + path: Path | undefined; + label: string | undefined; + id?: string | undefined; + parent: DeferredFragmentRecord | undefined; + deferredGroupedFieldSetRecords: Set; + reconcilableResults: Set; + children: Set; + pending: boolean; + fns: Array<() => void>; + + constructor( + path: Path | undefined, + label: string | undefined, + parent: DeferredFragmentRecord | undefined, + ) { + this.path = path; + this.label = label; + this.parent = parent; + this.deferredGroupedFieldSetRecords = new Set(); + this.reconcilableResults = new Set(); + this.children = new Set(); + this.pending = false; + this.fns = []; + } + + onPending(fn: () => void): void { + this.fns.push(fn); + } + + setAsPending(): void { + this.pending = true; + for (const fn of this.fns) { + fn(); + } + } +} + +export function isDeferredFragmentRecord( + subsequentResultRecord: SubsequentResultRecord, +): subsequentResultRecord is DeferredFragmentRecord { + return subsequentResultRecord instanceof DeferredFragmentRecord; +} + +export interface StreamItemResult { + path: Path; + item?: unknown; + incrementalDataRecords?: ReadonlyArray | undefined; + errors?: ReadonlyArray | undefined; +} + +export type StreamItemRecord = + | BoxedPromiseOrValue + | (() => BoxedPromiseOrValue); + +export interface StreamRecord { + path: Path; + label: string | undefined; + id?: string | undefined; + streamItemQueue: Array; +} + +export interface StreamItemsResult { + streamRecord: StreamRecord; + result?: BareStreamItemsResult | undefined; + incrementalDataRecords?: ReadonlyArray | undefined; + errors?: ReadonlyArray | undefined; +} + +export interface CancellableStreamRecord extends StreamRecord { + earlyReturn: () => Promise; +} + +export function isCancellableStreamRecord( + subsequentResultRecord: SubsequentResultRecord, +): subsequentResultRecord is CancellableStreamRecord { + return 'earlyReturn' in subsequentResultRecord; +} + +export type IncrementalDataRecord = DeferredGroupedFieldSetRecord | StreamRecord; + +export type IncrementalDataRecordResult = DeferredGroupedFieldSetResult | StreamItemsResult; diff --git a/packages/federation/test/__snapshots__/defer.test.ts.snap b/packages/federation/test/__snapshots__/defer.test.ts.snap index c399c70c004..2840206a9e5 100644 --- a/packages/federation/test/__snapshots__/defer.test.ts.snap +++ b/packages/federation/test/__snapshots__/defer.test.ts.snap @@ -16,8 +16,39 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` ], }, "hasNext": true, + "pending": [ + { + "id": "0", + "path": [], + }, + { + "id": "1", + "path": [ + "users", + 0, + ], + }, + { + "id": "2", + "path": [ + "users", + 1, + ], + }, + ], }, { + "completed": [ + { + "id": "0", + }, + { + "id": "3", + }, + { + "id": "4", + }, + ], "hasNext": true, "incremental": [ { @@ -51,12 +82,24 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` }, ], }, - "path": [], + "id": "0", }, { "data": { "name": "Ada Lovelace", }, + "id": "3", + }, + { + "data": { + "name": "Alan Turing", + }, + "id": "4", + }, + ], + "pending": [ + { + "id": "3", "path": [ "posts", 0, @@ -64,9 +107,7 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` ], }, { - "data": { - "name": "Alan Turing", - }, + "id": "4", "path": [ "posts", 1, @@ -76,7 +117,27 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` ], }, { - "hasNext": true, + "completed": [ + { + "id": "1", + }, + { + "id": "2", + }, + { + "id": "5", + }, + { + "id": "6", + }, + { + "id": "7", + }, + { + "id": "8", + }, + ], + "hasNext": false, "incremental": [ { "data": { @@ -89,10 +150,7 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` }, ], }, - "path": [ - "users", - 0, - ], + "id": "1", }, { "data": { @@ -105,20 +163,36 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` }, ], }, - "path": [ - "users", - 1, - ], + "id": "2", }, - ], - }, - { - "hasNext": false, - "incremental": [ { "data": { "title": "Hello, World!", }, + "id": "5", + }, + { + "data": { + "name": "Ada Lovelace", + }, + "id": "6", + }, + { + "data": { + "title": "My Story", + }, + "id": "7", + }, + { + "data": { + "name": "Alan Turing", + }, + "id": "8", + }, + ], + "pending": [ + { + "id": "5", "path": [ "users", 0, @@ -127,32 +201,26 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` ], }, { - "data": { - "title": "My Story", - }, + "id": "6", "path": [ "users", - 1, + 0, "posts", 0, + "author", ], }, { - "data": { - "name": "Ada Lovelace", - }, + "id": "7", "path": [ "users", - 0, + 1, "posts", 0, - "author", ], }, { - "data": { - "name": "Alan Turing", - }, + "id": "8", "path": [ "users", 1, @@ -171,8 +239,23 @@ exports[`Defer defers the root fields: defer-root-fields 1`] = ` { "data": {}, "hasNext": true, + "pending": [ + { + "id": "0", + "path": [], + }, + { + "id": "1", + "path": [], + }, + ], }, { + "completed": [ + { + "id": "0", + }, + ], "hasNext": true, "incremental": [ { @@ -208,11 +291,16 @@ exports[`Defer defers the root fields: defer-root-fields 1`] = ` }, ], }, - "path": [], + "id": "0", }, ], }, { + "completed": [ + { + "id": "1", + }, + ], "hasNext": false, "incremental": [ { @@ -248,7 +336,7 @@ exports[`Defer defers the root fields: defer-root-fields 1`] = ` }, ], }, - "path": [], + "id": "1", }, ], }, diff --git a/packages/federation/test/defer.test.ts b/packages/federation/test/defer.test.ts index 69a31d1c07a..1d47a924a88 100644 --- a/packages/federation/test/defer.test.ts +++ b/packages/federation/test/defer.test.ts @@ -1,3 +1,4 @@ +import { exec } from 'child_process'; import { inspect } from 'util'; import { GraphQLSchema, parse, print } from 'graphql'; import _ from 'lodash'; @@ -5,45 +6,14 @@ import { IntrospectAndCompose, LocalGraphQLDataSource } from '@apollo/gateway'; import { buildSubgraphSchema } from '@apollo/subgraph'; import { createDefaultExecutor } from '@graphql-tools/delegate'; import { normalizedExecutor } from '@graphql-tools/executor'; -import { ExecutionResult, mergeDeep } from '@graphql-tools/utils'; +import { ExecutionResult, mergeIncrementalResult } from '@graphql-tools/utils'; import { assertAsyncIterable } from '../../loaders/url/tests/test-utils'; import { getStitchedSchemaFromSupergraphSdl } from '../src/supergraph'; function mergeDeferredResults(values: ExecutionResult[]) { const result: ExecutionResult = {}; for (const value of values) { - if (value.data) { - if (!result.data) { - result.data = value.data; - } else { - result.data = mergeDeep([result.data, value.data]); - } - } - if (value.errors) { - result.errors = result.errors || []; - result.errors = [...result.errors, ...value.errors]; - } - if (value.incremental) { - for (const incremental of value.incremental) { - if (incremental.path) { - result.data = result.data || {}; - if (!incremental.path.length) { - result.data = mergeDeep([result.data, incremental.data]); - } else { - const existingData = _.get(result.data, incremental.path); - if (!existingData) { - _.set(result.data, incremental.path, incremental.data); - } else { - _.set(result.data, incremental.path, mergeDeep([existingData, incremental.data])); - } - } - } - if (incremental.errors) { - result.errors = result.errors || []; - result.errors = [...result.errors, ...incremental.errors]; - } - } - } + mergeIncrementalResult({ incrementalResult: value, executionResult: result }); } return result; } diff --git a/packages/utils/package.json b/packages/utils/package.json index eb6b12f5e4c..d2b60d75aa6 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -53,11 +53,13 @@ "dependencies": { "@graphql-typed-document-node/core": "^3.1.1", "cross-inspect": "1.0.0", + "dlv": "^1.1.3", "dset": "^3.1.2", "tslib": "^2.4.0" }, "devDependencies": { "@types/dateformat": "3.0.1", + "@types/dlv": "^1.1.4", "dateformat": "4.6.3", "graphql-scalars": "1.23.0" }, diff --git a/packages/utils/src/Interfaces.ts b/packages/utils/src/Interfaces.ts index 552cdb2da5a..9617b613d6d 100644 --- a/packages/utils/src/Interfaces.ts +++ b/packages/utils/src/Interfaces.ts @@ -66,6 +66,9 @@ export interface ExecutionResult { label?: string; path?: ReadonlyArray; items?: TData | null; + id?: string; + pending?: ReadonlyArray<{ id: string; path: ReadonlyArray }>; + completed?: ReadonlyArray<{ id: string; errors?: ReadonlyArray }>; } export interface ExecutionRequest< diff --git a/packages/utils/src/mergeIncrementalResult.ts b/packages/utils/src/mergeIncrementalResult.ts index 3851fddd0dc..16fe4f09d63 100644 --- a/packages/utils/src/mergeIncrementalResult.ts +++ b/packages/utils/src/mergeIncrementalResult.ts @@ -1,7 +1,10 @@ +import dlv from 'dlv'; import { dset } from 'dset/merge'; import { GraphQLError } from 'graphql'; import { ExecutionResult } from './Interfaces.js'; +const pathsMap = new WeakMap>>(); + export function mergeIncrementalResult({ incrementalResult, executionResult, @@ -9,17 +12,56 @@ export function mergeIncrementalResult({ incrementalResult: ExecutionResult; executionResult: ExecutionResult; }) { - const path = ['data', ...(incrementalResult.path ?? [])]; + let path: ReadonlyArray | undefined = [ + 'data', + ...(incrementalResult.path ?? []), + ]; + + for (const result of [executionResult, incrementalResult]) { + if (result.pending) { + let paths = pathsMap.get(executionResult); + if (paths === undefined) { + paths = new Map(); + pathsMap.set(executionResult, paths); + } + + for (const { id, path } of result.pending) { + paths.set(id, ['data', ...path]); + } + } + } if (incrementalResult.items) { - for (const item of incrementalResult.items) { - dset(executionResult, path, item); - // Increment the last path segment (the array index) to merge the next item at the next index - (path[path.length - 1] as number)++; + if (incrementalResult.id) { + const id = incrementalResult.id; + + path = pathsMap.get(executionResult)?.get(id); + if (path === undefined) { + throw new Error('Invalid incremental delivery format.'); + } + + const list = dlv(executionResult, path as Array); + list.push(...incrementalResult.items); + } else { + const path = ['data', ...(incrementalResult.path ?? [])]; + for (const item of incrementalResult.items) { + dset(executionResult, path, item); + // Increment the last path segment (the array index) to merge the next item at the next index + (path[path.length - 1] as number)++; + } } } if (incrementalResult.data) { + if (incrementalResult.id) { + const id = incrementalResult.id; + if (id !== undefined) { + path = pathsMap.get(executionResult)?.get(id); + if (path === undefined) { + throw new Error('Invalid incremental delivery format.'); + } + } + } dset(executionResult, path, incrementalResult.data); } @@ -40,4 +82,16 @@ export function mergeIncrementalResult({ }); }); } + + if (incrementalResult.completed) { + // Remove tracking and add additional errors + for (const { id, errors } of incrementalResult.completed) { + pathsMap.get(executionResult)?.delete(id); + + if (errors) { + executionResult.errors = executionResult.errors || []; + (executionResult.errors as GraphQLError[]).push(...errors); + } + } + } } diff --git a/packages/utils/tests/mergeIncrementalResult.spec.ts b/packages/utils/tests/mergeIncrementalResult.spec.ts index 0313357a47b..378cfa34942 100644 --- a/packages/utils/tests/mergeIncrementalResult.spec.ts +++ b/packages/utils/tests/mergeIncrementalResult.spec.ts @@ -20,6 +20,15 @@ describe('mergeIncrementalResult', () => { expect(executionResult).toEqual({ data: { user: { age: 42, name: 'John' } } }); }); + it('should deep merge data with basic path with new format', () => { + const executionResult = { data: { user: { name: 'John' } }, pending: [{ id: '0', path: [] }] }; + const incrementalResult = { incremental: [{ id: '0', data: { user: { age: 42 } } }] }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult.data).toEqual({ user: { age: 42, name: 'John' } }); + }); + it('should merge data at path', () => { const executionResult = { data: { user: { name: 'John' } } }; const incrementalResult = { path: ['user'], data: { age: 42 } }; @@ -29,6 +38,18 @@ describe('mergeIncrementalResult', () => { expect(executionResult).toEqual({ data: { user: { age: 42, name: 'John' } } }); }); + it('should merge data at path with new format', () => { + const executionResult = { + data: { user: { name: 'John' } }, + pending: [{ id: '0', path: ['user'] }], + }; + const incrementalResult = { incremental: [{ id: '0', data: { age: 42 } }] }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult.data).toEqual({ user: { age: 42, name: 'John' } }); + }); + it('should push items', () => { const executionResult = { data: { user: { name: 'John' } } }; const incrementalResult = { @@ -69,6 +90,27 @@ describe('mergeIncrementalResult', () => { }); }); + it('should push items at path with new format', () => { + const executionResult = { + data: { + user: { name: 'John', comments: ['comment 1', 'comment 2'] }, + }, + pending: [{ id: '0', path: ['user', 'comments'] }], + }; + const incrementalResult = { + incremental: [{ id: '0', items: ['comment 3', 'comment 4'] }], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult.data).toEqual({ + user: { + name: 'John', + comments: ['comment 1', 'comment 2', 'comment 3', 'comment 4'], + }, + }); + }); + it('should merge items at path', () => { const executionResult = { data: { @@ -113,6 +155,38 @@ describe('mergeIncrementalResult', () => { }); }); + it('should add errors with new format', () => { + const executionResult = { data: { user: { name: 'John' } }, pending: [{ id: '0', path: [] }] }; + const incrementalResult = { + incremental: [ + { id: '0', errors: [new GraphQLError('error 1'), new GraphQLError('error 2')] }, + ], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + data: { user: { name: 'John' } }, + errors: [new GraphQLError('error 1'), new GraphQLError('error 2')], + pending: [{ id: '0', path: [] }], + }); + }); + + it('should add completion errors with new format', () => { + const executionResult = { data: { user: { name: 'John' } }, pending: [{ id: '0', path: [] }] }; + const incrementalResult = { + completed: [{ id: '0', errors: [new GraphQLError('error 1'), new GraphQLError('error 2')] }], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + data: { user: { name: 'John' } }, + errors: [new GraphQLError('error 1'), new GraphQLError('error 2')], + pending: [{ id: '0', path: [] }], + }); + }); + it('should keep errors', () => { const executionResult = { errors: [new GraphQLError('error 1')] }; const incrementalResult = { data: { user: { name: 'John' } }, path: [] }; @@ -125,6 +199,24 @@ describe('mergeIncrementalResult', () => { }); }); + it('should keep errors with new format', () => { + const executionResult = { + errors: [new GraphQLError('error 1')], + pending: [{ id: '0', path: [] }], + }; + const incrementalResult = { + incremental: [{ id: '0', data: { user: { name: 'John' } }, path: [] }], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + data: { user: { name: 'John' } }, + errors: [new GraphQLError('error 1')], + pending: [{ id: '0', path: [] }], + }); + }); + it('should merge errors', () => { const executionResult = { errors: [new GraphQLError('error 1')] }; @@ -143,6 +235,52 @@ describe('mergeIncrementalResult', () => { }); }); + it('should merge errors with new format', () => { + const executionResult = { + errors: [new GraphQLError('error 1')], + pending: [{ id: '0', path: [] }], + }; + + const incrementalResult = { + incremental: [ + { id: '0', errors: [new GraphQLError('error 2'), new GraphQLError('error 3')] }, + ], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + errors: [ + new GraphQLError('error 1'), + new GraphQLError('error 2'), + new GraphQLError('error 3'), + ], + pending: [{ id: '0', path: [] }], + }); + }); + + it('should merge completion errors with new format', () => { + const executionResult = { + errors: [new GraphQLError('error 1')], + pending: [{ id: '0', path: [] }], + }; + + const incrementalResult = { + completed: [{ id: '0', errors: [new GraphQLError('error 2'), new GraphQLError('error 3')] }], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + errors: [ + new GraphQLError('error 1'), + new GraphQLError('error 2'), + new GraphQLError('error 3'), + ], + pending: [{ id: '0', path: [] }], + }); + }); + it('should keep extensions', () => { const exeuctionResult = { data: { user: { name: 'John' } }, extensions: { foo: 'bar' } }; const incrementalResult = { data: { user: { age: 42 } }, path: [] }; diff --git a/yarn.lock b/yarn.lock index 7105d2ae675..9e315bfd0d7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2652,6 +2652,11 @@ dependencies: "@types/ms" "*" +"@types/dlv@^1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@types/dlv/-/dlv-1.1.4.tgz#e92f76b78adf2b118b5a807956f36434baefbab0" + integrity sha512-m8KmImw4Jt+4rIgupwfivrWEOnj1LzkmKkqbh075uG13eTQ1ZxHWT6T0vIdSQhLIjQCiR0n0lZdtyDOPO1x2Mw== + "@types/eslint-scope@^3.7.3": version "3.7.3" resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224"