From 0c48fef4085ca02529503689829837f54b5d29c5 Mon Sep 17 00:00:00 2001 From: Yaacov Rydzinski Date: Mon, 24 Apr 2023 16:25:28 +0300 Subject: [PATCH 1/6] incremental delivery: without branching, with deduplication --- src/execution/IncrementalPublisher.ts | 499 ++++++++++---- src/execution/__tests__/defer-test.ts | 485 ++++++------- src/execution/__tests__/mutations-test.ts | 4 +- src/execution/__tests__/stream-test.ts | 349 +++++++--- src/execution/collectFields.ts | 380 ++++++++--- src/execution/execute.ts | 638 ++++++++++++------ src/jsutils/OrderedSet.ts | 93 +++ src/jsutils/__tests__/OrderedSet-test.ts | 34 + .../rules/SingleFieldSubscriptionsRule.ts | 18 +- 9 files changed, 1670 insertions(+), 830 deletions(-) create mode 100644 src/jsutils/OrderedSet.ts create mode 100644 src/jsutils/__tests__/OrderedSet-test.ts diff --git a/src/execution/IncrementalPublisher.ts b/src/execution/IncrementalPublisher.ts index ac1f8f0d72..8e40c90819 100644 --- a/src/execution/IncrementalPublisher.ts +++ b/src/execution/IncrementalPublisher.ts @@ -8,6 +8,13 @@ import type { GraphQLFormattedError, } from '../error/GraphQLError.js'; +import type { GroupedFieldSet } from './collectFields.js'; + +interface IncrementalUpdate> { + incremental: ReadonlyArray>; + completed: ReadonlyArray; +} + /** * The result of GraphQL execution. * @@ -36,7 +43,7 @@ export interface FormattedExecutionResult< } export interface ExperimentalIncrementalExecutionResults< - TData = ObjMap, + TData = unknown, TExtensions = ObjMap, > { initialResult: InitialIncrementalExecutionResult; @@ -51,8 +58,8 @@ export interface InitialIncrementalExecutionResult< TData = ObjMap, TExtensions = ObjMap, > extends ExecutionResult { - hasNext: boolean; - incremental?: ReadonlyArray>; + data: TData; + hasNext: true; extensions?: TExtensions; } @@ -60,26 +67,26 @@ export interface FormattedInitialIncrementalExecutionResult< TData = ObjMap, TExtensions = ObjMap, > extends FormattedExecutionResult { + data: TData; hasNext: boolean; - incremental?: ReadonlyArray>; extensions?: TExtensions; } export interface SubsequentIncrementalExecutionResult< - TData = ObjMap, + TData = unknown, TExtensions = ObjMap, -> { +> extends Partial> { hasNext: boolean; - incremental?: ReadonlyArray>; extensions?: TExtensions; } export interface FormattedSubsequentIncrementalExecutionResult< - TData = ObjMap, + TData = unknown, TExtensions = ObjMap, > { hasNext: boolean; incremental?: ReadonlyArray>; + completed?: ReadonlyArray; extensions?: TExtensions; } @@ -88,9 +95,8 @@ export interface IncrementalDeferResult< TExtensions = ObjMap, > { errors?: ReadonlyArray; - data?: TData | null; + data: TData; path?: ReadonlyArray; - label?: string; extensions?: TExtensions; } @@ -99,9 +105,8 @@ export interface FormattedIncrementalDeferResult< TExtensions = ObjMap, > { errors?: ReadonlyArray; - data?: TData | null; + data: TData; path?: ReadonlyArray; - label?: string; extensions?: TExtensions; } @@ -110,9 +115,8 @@ export interface IncrementalStreamResult< TExtensions = ObjMap, > { errors?: ReadonlyArray; - items?: TData | null; + items: TData; path?: ReadonlyArray; - label?: string; extensions?: TExtensions; } @@ -121,44 +125,52 @@ export interface FormattedIncrementalStreamResult< TExtensions = ObjMap, > { errors?: ReadonlyArray; - items?: TData | null; + items: TData; path?: ReadonlyArray; - label?: string; extensions?: TExtensions; } -export type IncrementalResult< - TData = ObjMap, - TExtensions = ObjMap, -> = +export type IncrementalResult> = | IncrementalDeferResult | IncrementalStreamResult; export type FormattedIncrementalResult< - TData = ObjMap, + TData = unknown, TExtensions = ObjMap, > = | FormattedIncrementalDeferResult | FormattedIncrementalStreamResult; +export interface CompletedResult { + path: ReadonlyArray; + label?: string; + errors?: ReadonlyArray; +} + +export interface FormattedCompletedResult { + path: ReadonlyArray; + label?: string; + errors?: ReadonlyArray; +} + /** * This class is used to publish incremental results to the client, enabling semi-concurrent * execution while preserving result order. * * The internal publishing state is managed as follows: * - * '_released': the set of Incremental Data records that are ready to be sent to the client, + * '_released': the set of Subsequent Result records that are ready to be sent to the client, * i.e. their parents have completed and they have also completed. * - * `_pending`: the set of Incremental Data records that are definitely pending, i.e. their - * parents have completed so that they can no longer be filtered. This includes all Incremental - * Data records in `released`, as well as Incremental Data records that have not yet completed. + * `_pending`: the set of Subsequent Result records that are definitely pending, i.e. their + * parents have completed so that they can no longer be filtered. This includes all Subsequent + * Result records in `released`, as well as the records that have not yet completed. * * @internal */ export class IncrementalPublisher { - private _released: Set; - private _pending: Set; + private _released: Set; + private _pending: Set; // these are assigned within the Promise executor called synchronously within the constructor private _signalled!: Promise; @@ -170,60 +182,98 @@ export class IncrementalPublisher { this._reset(); } - prepareInitialResultRecord(): InitialResultRecord { - return { - errors: [], - children: new Set(), - }; + reportNewDeferFragmentRecord( + deferredFragmentRecord: DeferredFragmentRecord, + parentIncrementalResultRecord: + | InitialResultRecord + | DeferredFragmentRecord + | StreamItemsRecord, + ): void { + parentIncrementalResultRecord.children.add(deferredFragmentRecord); } - prepareNewDeferredFragmentRecord(opts: { - label: string | undefined; - path: Path | undefined; - parentContext: IncrementalDataRecord; - }): DeferredFragmentRecord { - const deferredFragmentRecord = new DeferredFragmentRecord(opts); - - const parentContext = opts.parentContext; - parentContext.children.add(deferredFragmentRecord); - - return deferredFragmentRecord; + reportNewDeferredGroupedFieldSetRecord( + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + ): void { + for (const deferredFragmentRecord of deferredGroupedFieldSetRecord.deferredFragmentRecords) { + deferredFragmentRecord._pending.add(deferredGroupedFieldSetRecord); + deferredFragmentRecord.deferredGroupedFieldSetRecords.add( + deferredGroupedFieldSetRecord, + ); + } } - prepareNewStreamItemsRecord(opts: { - label: string | undefined; - path: Path | undefined; - asyncIterator?: AsyncIterator; - parentContext: IncrementalDataRecord; - }): StreamItemsRecord { - const streamItemsRecord = new StreamItemsRecord(opts); + reportNewStreamItemsRecord( + streamItemsRecord: StreamItemsRecord, + parentIncrementalDataRecord: IncrementalDataRecord, + ): void { + if (isDeferredGroupedFieldSetRecord(parentIncrementalDataRecord)) { + for (const parent of parentIncrementalDataRecord.deferredFragmentRecords) { + parent.children.add(streamItemsRecord); + } + } else { + parentIncrementalDataRecord.children.add(streamItemsRecord); + } + } - const parentContext = opts.parentContext; - parentContext.children.add(streamItemsRecord); + completeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + data: ObjMap, + ): void { + deferredGroupedFieldSetRecord.data = data; + for (const deferredFragmentRecord of deferredGroupedFieldSetRecord.deferredFragmentRecords) { + deferredFragmentRecord._pending.delete(deferredGroupedFieldSetRecord); + if (deferredFragmentRecord._pending.size === 0) { + this.completeDeferredFragmentRecord(deferredFragmentRecord); + } + } + } - return streamItemsRecord; + markErroredDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + error: GraphQLError, + ): void { + for (const deferredFragmentRecord of deferredGroupedFieldSetRecord.deferredFragmentRecords) { + deferredFragmentRecord.errors.push(error); + this.completeDeferredFragmentRecord(deferredFragmentRecord); + } } completeDeferredFragmentRecord( deferredFragmentRecord: DeferredFragmentRecord, - data: ObjMap | null, ): void { - deferredFragmentRecord.data = data; - deferredFragmentRecord.isCompleted = true; this._release(deferredFragmentRecord); } completeStreamItemsRecord( streamItemsRecord: StreamItemsRecord, - items: Array | null, + items: Array, ) { streamItemsRecord.items = items; streamItemsRecord.isCompleted = true; this._release(streamItemsRecord); } + markErroredStreamItemsRecord( + streamItemsRecord: StreamItemsRecord, + error: GraphQLError, + ) { + streamItemsRecord.streamRecord.errors.push(error); + this.setIsFinalRecord(streamItemsRecord); + streamItemsRecord.isCompleted = true; + streamItemsRecord.streamRecord.earlyReturn?.().catch(() => { + // ignore error + }); + this._release(streamItemsRecord); + } + + setIsFinalRecord(streamItemsRecord: StreamItemsRecord) { + streamItemsRecord.isFinalRecord = true; + } + setIsCompletedAsyncIterator(streamItemsRecord: StreamItemsRecord) { streamItemsRecord.isCompletedAsyncIterator = true; + this.setIsFinalRecord(streamItemsRecord); } addFieldError( @@ -267,31 +317,31 @@ export class IncrementalPublisher { return { data: null, errors }; } - filter(nullPath: Path, erroringIncrementalDataRecord: IncrementalDataRecord) { + filter( + nullPath: Path | undefined, + erroringIncrementalDataRecord: IncrementalDataRecord, + ): void { const nullPathArray = pathToArray(nullPath); - const asyncIterators = new Set>(); + const streams = new Set(); - const descendants = this._getDescendants( - erroringIncrementalDataRecord.children, - ); + const children = this._getChildren(erroringIncrementalDataRecord); + const descendants = this._getDescendants(children); for (const child of descendants) { - if (!this._matchesPath(child.path, nullPathArray)) { + if (!this._nullsChildSubsequentResultRecord(child, nullPathArray)) { continue; } child.filtered = true; if (isStreamItemsRecord(child)) { - if (child.asyncIterator !== undefined) { - asyncIterators.add(child.asyncIterator); - } + streams.add(child.streamRecord); } } - asyncIterators.forEach((asyncIterator) => { - asyncIterator.return?.().catch(() => { + streams.forEach((stream) => { + stream.earlyReturn?.().catch(() => { // ignore error }); }); @@ -335,13 +385,17 @@ export class IncrementalPublisher { }; const returnStreamIterators = async (): Promise => { - const promises: Array>> = []; - this._pending.forEach((incrementalDataRecord) => { - if ( - isStreamItemsRecord(incrementalDataRecord) && - incrementalDataRecord.asyncIterator?.return - ) { - promises.push(incrementalDataRecord.asyncIterator.return()); + const streams = new Set(); + const descendants = this._getDescendants(this._pending); + for (const subsequentResultRecord of descendants) { + if (isStreamItemsRecord(subsequentResultRecord)) { + streams.add(subsequentResultRecord.streamRecord); + } + } + const promises: Array> = []; + streams.forEach((streamRecord) => { + if (streamRecord.earlyReturn) { + promises.push(streamRecord.earlyReturn()); } }); await Promise.all(promises); @@ -387,79 +441,162 @@ export class IncrementalPublisher { this._signalled = signalled; } - private _introduce(item: SubsequentDataRecord) { + private _introduce(item: SubsequentResultRecord) { this._pending.add(item); } - private _release(item: SubsequentDataRecord): void { + private _release(item: SubsequentResultRecord): void { if (this._pending.has(item)) { this._released.add(item); this._trigger(); } } - private _push(item: SubsequentDataRecord): void { + private _push(item: SubsequentResultRecord): void { this._released.add(item); this._pending.add(item); this._trigger(); } private _getIncrementalResult( - completedRecords: ReadonlySet, + completedRecords: ReadonlySet, ): SubsequentIncrementalExecutionResult | undefined { + const { incremental, completed } = this._processPending(completedRecords); + + const hasNext = this._pending.size > 0; + if (incremental.length === 0 && completed.length === 0 && hasNext) { + return undefined; + } + + const result: SubsequentIncrementalExecutionResult = { hasNext }; + if (incremental.length) { + result.incremental = incremental; + } + if (completed.length) { + result.completed = completed; + } + + return result; + } + + private _processPending( + completedRecords: ReadonlySet, + ): IncrementalUpdate { const incrementalResults: Array = []; - let encounteredCompletedAsyncIterator = false; - for (const incrementalDataRecord of completedRecords) { - const incrementalResult: IncrementalResult = {}; - for (const child of incrementalDataRecord.children) { + const completedResults: Array = []; + for (const subsequentResultRecord of completedRecords) { + for (const child of subsequentResultRecord.children) { if (child.filtered) { continue; } this._publish(child); } - if (isStreamItemsRecord(incrementalDataRecord)) { - const items = incrementalDataRecord.items; - if (incrementalDataRecord.isCompletedAsyncIterator) { + if (isStreamItemsRecord(subsequentResultRecord)) { + if (subsequentResultRecord.isFinalRecord) { + completedResults.push( + this._completedRecordToResult(subsequentResultRecord.streamRecord), + ); + } + if (subsequentResultRecord.isCompletedAsyncIterator) { // async iterable resolver just finished but there may be pending payloads - encounteredCompletedAsyncIterator = true; continue; } - (incrementalResult as IncrementalStreamResult).items = items; + if (subsequentResultRecord.streamRecord.errors.length > 0) { + continue; + } + const incrementalResult: IncrementalStreamResult = { + items: subsequentResultRecord.items, + path: subsequentResultRecord.streamRecord.path, + }; + if (subsequentResultRecord.errors.length > 0) { + incrementalResult.errors = subsequentResultRecord.errors; + } + incrementalResults.push(incrementalResult); } else { - const data = incrementalDataRecord.data; - (incrementalResult as IncrementalDeferResult).data = data ?? null; - } - - incrementalResult.path = incrementalDataRecord.path; - if (incrementalDataRecord.label != null) { - incrementalResult.label = incrementalDataRecord.label; - } - if (incrementalDataRecord.errors.length > 0) { - incrementalResult.errors = incrementalDataRecord.errors; + completedResults.push( + this._completedRecordToResult(subsequentResultRecord), + ); + if (subsequentResultRecord.errors.length > 0) { + continue; + } + for (const deferredGroupedFieldSetRecord of subsequentResultRecord.deferredGroupedFieldSetRecords) { + if (!deferredGroupedFieldSetRecord.sent) { + deferredGroupedFieldSetRecord.sent = true; + const incrementalResult: IncrementalDeferResult = { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + data: deferredGroupedFieldSetRecord.data!, + path: deferredGroupedFieldSetRecord.path, + }; + if (deferredGroupedFieldSetRecord.errors.length > 0) { + incrementalResult.errors = deferredGroupedFieldSetRecord.errors; + } + incrementalResults.push(incrementalResult); + } + } } - incrementalResults.push(incrementalResult); } - const hasNext = this._pending.size > 0; - return incrementalResults.length - ? { incremental: incrementalResults, hasNext } - : encounteredCompletedAsyncIterator && !hasNext - ? { hasNext: false } - : undefined; + return { + incremental: incrementalResults, + completed: completedResults, + }; } - private _publish(subsequentResultRecord: SubsequentDataRecord) { - if (subsequentResultRecord.isCompleted) { + private _completedRecordToResult( + completedRecord: DeferredFragmentRecord | StreamRecord, + ): CompletedResult { + const result: CompletedResult = { + path: completedRecord.path, + }; + if (completedRecord.label !== undefined) { + result.label = completedRecord.label; + } + if (completedRecord.errors.length > 0) { + result.errors = completedRecord.errors; + } + return result; + } + + private _publish(subsequentResultRecord: SubsequentResultRecord): void { + if (isStreamItemsRecord(subsequentResultRecord)) { + if (subsequentResultRecord.isCompleted) { + this._push(subsequentResultRecord); + return; + } + + this._introduce(subsequentResultRecord); + return; + } + + if (subsequentResultRecord._pending.size === 0) { this._push(subsequentResultRecord); } else { this._introduce(subsequentResultRecord); } } + private _getChildren( + erroringIncrementalDataRecord: IncrementalDataRecord, + ): ReadonlySet { + const children = new Set(); + if (isDeferredGroupedFieldSetRecord(erroringIncrementalDataRecord)) { + for (const erroringIncrementalResultRecord of erroringIncrementalDataRecord.deferredFragmentRecords) { + for (const child of erroringIncrementalResultRecord.children) { + children.add(child); + } + } + } else { + for (const child of erroringIncrementalDataRecord.children) { + children.add(child); + } + } + return children; + } + private _getDescendants( - children: ReadonlySet, - descendants = new Set(), - ): ReadonlySet { + children: ReadonlySet, + descendants = new Set(), + ): ReadonlySet { for (const child of children) { descendants.add(child); this._getDescendants(child.children, descendants); @@ -467,9 +604,26 @@ export class IncrementalPublisher { return descendants; } + private _nullsChildSubsequentResultRecord( + subsequentResultRecord: SubsequentResultRecord, + nullPath: ReadonlyArray, + ): boolean { + const incrementalDataRecords = isStreamItemsRecord(subsequentResultRecord) + ? [subsequentResultRecord] + : subsequentResultRecord.deferredGroupedFieldSetRecords; + + for (const incrementalDataRecord of incrementalDataRecords) { + if (this._matchesPath(incrementalDataRecord.path, nullPath)) { + return true; + } + } + + return false; + } + private _matchesPath( - testPath: Array, - basePath: Array, + testPath: ReadonlyArray, + basePath: ReadonlyArray, ): boolean { for (let i = 0; i < basePath.length; i++) { if (basePath[i] !== testPath[i]) { @@ -481,65 +635,118 @@ export class IncrementalPublisher { } } -export interface InitialResultRecord { +function isDeferredGroupedFieldSetRecord( + incrementalDataRecord: unknown, +): incrementalDataRecord is DeferredGroupedFieldSetRecord { + return incrementalDataRecord instanceof DeferredGroupedFieldSetRecord; +} + +function isStreamItemsRecord( + subsequentResultRecord: unknown, +): subsequentResultRecord is StreamItemsRecord { + return subsequentResultRecord instanceof StreamItemsRecord; +} + +/** @internal */ +export class InitialResultRecord { errors: Array; - children: Set; + children: Set; + constructor() { + this.errors = []; + this.children = new Set(); + } } /** @internal */ -export class DeferredFragmentRecord { +export class DeferredGroupedFieldSetRecord { + path: ReadonlyArray; + deferredFragmentRecords: ReadonlyArray; + groupedFieldSet: GroupedFieldSet; + shouldInitiateDefer: boolean; errors: Array; + data: ObjMap | undefined; + sent: boolean; + + constructor(opts: { + path: Path | undefined; + deferredFragmentRecords: ReadonlyArray; + groupedFieldSet: GroupedFieldSet; + shouldInitiateDefer: boolean; + }) { + this.path = pathToArray(opts.path); + this.deferredFragmentRecords = opts.deferredFragmentRecords; + this.groupedFieldSet = opts.groupedFieldSet; + this.shouldInitiateDefer = opts.shouldInitiateDefer; + this.errors = []; + this.sent = false; + } +} + +/** @internal */ +export class DeferredFragmentRecord { + path: ReadonlyArray; label: string | undefined; - path: Array; - data: ObjMap | null; - children: Set; - isCompleted: boolean; + children: Set; + deferredGroupedFieldSetRecords: Set; + errors: Array; filtered: boolean; - constructor(opts: { label: string | undefined; path: Path | undefined }) { - this.label = opts.label; + _pending: Set; + + constructor(opts: { path: Path | undefined; label: string | undefined }) { this.path = pathToArray(opts.path); - this.errors = []; + this.label = opts.label; this.children = new Set(); - this.isCompleted = false; this.filtered = false; - this.data = null; + this.deferredGroupedFieldSetRecords = new Set(); + this.errors = []; + this._pending = new Set(); } } /** @internal */ -export class StreamItemsRecord { - errors: Array; +export class StreamRecord { label: string | undefined; - path: Array; - items: Array | null; - children: Set; - asyncIterator: AsyncIterator | undefined; - isCompletedAsyncIterator?: boolean; - isCompleted: boolean; - filtered: boolean; + path: ReadonlyArray; + errors: Array; + earlyReturn?: (() => Promise) | undefined; constructor(opts: { label: string | undefined; - path: Path | undefined; - asyncIterator?: AsyncIterator; + path: Path; + earlyReturn?: (() => Promise) | undefined; }) { - this.items = null; this.label = opts.label; this.path = pathToArray(opts.path); - this.asyncIterator = opts.asyncIterator; this.errors = []; + this.earlyReturn = opts.earlyReturn; + } +} + +/** @internal */ +export class StreamItemsRecord { + errors: Array; + streamRecord: StreamRecord; + path: ReadonlyArray; + items: Array; + children: Set; + isFinalRecord?: boolean; + isCompletedAsyncIterator?: boolean; + isCompleted: boolean; + filtered: boolean; + + constructor(opts: { streamRecord: StreamRecord; path: Path | undefined }) { + this.streamRecord = opts.streamRecord; + this.path = pathToArray(opts.path); this.children = new Set(); + this.errors = []; this.isCompleted = false; this.filtered = false; - this.items = null; + this.items = []; } } -export type SubsequentDataRecord = DeferredFragmentRecord | StreamItemsRecord; +export type IncrementalDataRecord = + | InitialResultRecord + | DeferredGroupedFieldSetRecord + | StreamItemsRecord; -export type IncrementalDataRecord = InitialResultRecord | SubsequentDataRecord; - -function isStreamItemsRecord( - subsequentResultRecord: SubsequentDataRecord, -): subsequentResultRecord is StreamItemsRecord { - return subsequentResultRecord instanceof StreamItemsRecord; -} +type SubsequentResultRecord = DeferredFragmentRecord | StreamItemsRecord; diff --git a/src/execution/__tests__/defer-test.ts b/src/execution/__tests__/defer-test.ts index 72b03d29d4..7fca565f31 100644 --- a/src/execution/__tests__/defer-test.ts +++ b/src/execution/__tests__/defer-test.ts @@ -187,6 +187,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -239,6 +240,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -270,9 +272,9 @@ describe('Execute: defer directive', () => { }, }, path: [], - label: 'DeferQuery', }, ], + completed: [{ path: [], label: 'DeferQuery' }], hasNext: false, }, ]); @@ -318,9 +320,9 @@ describe('Execute: defer directive', () => { }, ], path: [], - label: 'DeferQuery', }, ], + completed: [{ path: [], label: 'DeferQuery' }], hasNext: false, }, ]); @@ -355,19 +357,21 @@ describe('Execute: defer directive', () => { incremental: [ { data: { - friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + id: '1', }, path: ['hero'], - label: 'DeferNested', }, { data: { - id: '1', + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], }, path: ['hero'], - label: 'DeferTop', }, ], + completed: [ + { path: ['hero'], label: 'DeferTop' }, + { path: ['hero'], label: 'DeferNested' }, + ], hasNext: false, }, ]); @@ -395,15 +399,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { - name: 'Luke', - }, - path: ['hero'], - label: 'DeferTop', - }, - ], + completed: [{ path: ['hero'], label: 'DeferTop' }], hasNext: false, }, ]); @@ -431,15 +427,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { - name: 'Luke', - }, - path: ['hero'], - label: 'DeferTop', - }, - ], + completed: [{ path: ['hero'], label: 'DeferTop' }], hasNext: false, }, ]); @@ -464,15 +452,14 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { data: { name: 'Luke' }, path: ['hero'], label: 'InlineDeferred' }, - ], + incremental: [{ data: { name: 'Luke' }, path: ['hero'] }], + completed: [{ path: ['hero'], label: 'InlineDeferred' }], hasNext: false, }, ]); }); - it('Emits empty defer fragments', async () => { + it('Does not emit empty defer fragments', async () => { const document = parse(` query HeroNameQuery { hero { @@ -494,12 +481,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: {}, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -533,16 +515,18 @@ describe('Execute: defer directive', () => { id: '1', }, path: ['hero'], - label: 'DeferID', }, { data: { name: 'Luke', }, path: ['hero'], - label: 'DeferName', }, ], + completed: [ + { path: ['hero'], label: 'DeferID' }, + { path: ['hero'], label: 'DeferName' }, + ], hasNext: false, }, ]); @@ -572,24 +556,72 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - hero: { - id: '1', - }, - }, + data: { hero: {} }, path: [], - label: 'DeferID', }, { - data: { - hero: { - name: 'Luke', - }, - }, + data: { id: '1' }, + path: ['hero'], + }, + { + data: { name: 'Luke' }, + path: ['hero'], + }, + ], + completed: [ + { path: [], label: 'DeferID' }, + { path: [], label: 'DeferName' }, + ], + hasNext: false, + }, + ]); + }); + + it('Separately emits defer fragments with different labels with varying subfields that return promises', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await complete(document, { + hero: { + id: () => Promise.resolve('1'), + name: () => Promise.resolve('Luke'), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, path: [], - label: 'DeferName', + }, + { + data: { id: '1' }, + path: ['hero'], + }, + { + data: { name: 'Luke' }, + path: ['hero'], }, ], + completed: [ + { path: [], label: 'DeferID' }, + { path: [], label: 'DeferName' }, + ], hasNext: false, }, ]); @@ -625,18 +657,18 @@ describe('Execute: defer directive', () => { id: '1', }, path: ['hero'], - label: 'DeferID', }, { data: { - hero: { - name: 'Luke', - }, + name: 'Luke', }, - path: [], - label: 'DeferName', + path: ['hero'], }, ], + completed: [ + { path: ['hero'], label: 'DeferID' }, + { path: [], label: 'DeferName' }, + ], hasNext: false, }, ]); @@ -670,9 +702,9 @@ describe('Execute: defer directive', () => { }, }, path: [], - label: 'DeferName', }, ], + completed: [{ path: [], label: 'DeferName' }], hasNext: true, }, { @@ -682,15 +714,15 @@ describe('Execute: defer directive', () => { id: '1', }, path: ['hero'], - label: 'DeferID', }, ], + completed: [{ path: ['hero'], label: 'DeferID' }], hasNext: false, }, ]); }); - it('Does not deduplicate multiple defers on the same object', async () => { + it('Can deduplicate multiple defers on the same object', async () => { const document = parse(` query { hero { @@ -725,25 +757,30 @@ describe('Execute: defer directive', () => { }, { incremental: [ - { data: {}, path: ['hero', 'friends', 0] }, - { data: {}, path: ['hero', 'friends', 0] }, - { data: {}, path: ['hero', 'friends', 0] }, { data: { id: '2', name: 'Han' }, path: ['hero', 'friends', 0] }, - { data: {}, path: ['hero', 'friends', 1] }, - { data: {}, path: ['hero', 'friends', 1] }, - { data: {}, path: ['hero', 'friends', 1] }, { data: { id: '3', name: 'Leia' }, path: ['hero', 'friends', 1] }, - { data: {}, path: ['hero', 'friends', 2] }, - { data: {}, path: ['hero', 'friends', 2] }, - { data: {}, path: ['hero', 'friends', 2] }, { data: { id: '4', name: 'C-3PO' }, path: ['hero', 'friends', 2] }, ], + completed: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + ], hasNext: false, }, ]); }); - it('Does not deduplicate fields present in the initial payload', async () => { + it('Deduplicates fields present in the initial payload', async () => { const document = parse(` query { hero { @@ -799,27 +836,17 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - nestedObject: { - deeperObject: { - bar: 'bar', - }, - }, - anotherNestedObject: { - deeperObject: { - foo: 'foo', - }, - }, - }, - path: ['hero'], + data: { bar: 'bar' }, + path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate fields present in a parent defer payload', async () => { + it('Deduplicates fields present in a parent defer payload', async () => { const document = parse(` query { hero { @@ -852,32 +879,31 @@ describe('Execute: defer directive', () => { { data: { nestedObject: { - deeperObject: { - foo: 'foo', - }, + deeperObject: { foo: 'foo' }, }, }, path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: true, }, { incremental: [ { data: { - foo: 'foo', bar: 'bar', }, path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], hasNext: false, }, ]); }); - it('Does not deduplicate fields with deferred fragments at multiple levels', async () => { + it('Deduplicates fields with deferred fragments at multiple levels', async () => { const document = parse(` query { hero { @@ -933,52 +959,37 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - nestedObject: { - deeperObject: { - foo: 'foo', - bar: 'bar', - }, - }, - }, - path: ['hero'], + data: { bar: 'bar' }, + path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero'] }], hasNext: true, }, { incremental: [ { - data: { - deeperObject: { - foo: 'foo', - bar: 'bar', - baz: 'baz', - }, - }, - path: ['hero', 'nestedObject'], + data: { baz: 'baz' }, + path: ['hero', 'nestedObject', 'deeperObject'], }, ], hasNext: true, + completed: [{ path: ['hero', 'nestedObject'] }], }, { incremental: [ { - data: { - foo: 'foo', - bar: 'bar', - baz: 'baz', - bak: 'bak', - }, + data: { bak: 'bak' }, path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], hasNext: false, }, ]); }); - it('Does not combine multiple fields from deferred fragments from different branches occurring at the same level', async () => { + it('Deduplicates multiple fields from deferred fragments from different branches occurring at the same level', async () => { const document = parse(` query { hero { @@ -1024,14 +1035,10 @@ describe('Execute: defer directive', () => { }, path: ['hero', 'nestedObject', 'deeperObject'], }, - { - data: { - nestedObject: { - deeperObject: {}, - }, - }, - path: ['hero'], - }, + ], + completed: [ + { path: ['hero'] }, + { path: ['hero', 'nestedObject', 'deeperObject'] }, ], hasNext: true, }, @@ -1039,18 +1046,18 @@ describe('Execute: defer directive', () => { incremental: [ { data: { - foo: 'foo', bar: 'bar', }, path: ['hero', 'nestedObject', 'deeperObject'], }, ], + completed: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], hasNext: false, }, ]); }); - it('Does not deduplicate fields with deferred fragments in different branches at multiple non-overlapping levels', async () => { + it('Deduplicate fields with deferred fragments in different branches at multiple non-overlapping levels', async () => { const document = parse(` query { a { @@ -1104,35 +1111,21 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - e: { - f: 'f', - }, - }, + data: { e: { f: 'f' } }, path: ['a', 'b'], }, { - data: { - a: { - b: { - e: { - f: 'f', - }, - }, - }, - g: { - h: 'h', - }, - }, + data: { g: { h: 'h' } }, path: [], }, ], + completed: [{ path: ['a', 'b'] }, { path: [] }], hasNext: false, }, ]); }); - it('Preserves error boundaries, null first', async () => { + it('Nulls cross defer boundaries, null first', async () => { const document = parse(` query { ... @defer { @@ -1169,24 +1162,17 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - b: { - c: { - d: 'd', - }, - }, - }, + data: { b: { c: {} } }, path: ['a'], }, { - data: { - a: { - b: { - c: null, - }, - someField: 'someField', - }, - }, + data: { d: 'd' }, + path: ['a', 'b', 'c'], + }, + ], + completed: [ + { + path: [], errors: [ { message: @@ -1195,15 +1181,15 @@ describe('Execute: defer directive', () => { path: ['a', 'b', 'c', 'nonNullErrorField'], }, ], - path: [], }, + { path: ['a'] }, ], hasNext: false, }, ]); }); - it('Preserves error boundaries, value first', async () => { + it('Nulls cross defer boundaries, value first', async () => { const document = parse(` query { ... @defer { @@ -1243,12 +1229,17 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - b: { - c: null, - }, - someField: 'someField', - }, + data: { b: { c: {} } }, + path: ['a'], + }, + { + data: { d: 'd' }, + path: ['a', 'b', 'c'], + }, + ], + completed: [ + { + path: ['a'], errors: [ { message: @@ -1257,27 +1248,15 @@ describe('Execute: defer directive', () => { path: ['a', 'b', 'c', 'nonNullErrorField'], }, ], - path: ['a'], - }, - { - data: { - a: { - b: { - c: { - d: 'd', - }, - }, - }, - }, - path: [], }, + { path: [] }, ], hasNext: false, }, ]); }); - it('Correctly handle a slow null', async () => { + it('filters a payload with a null that cannot be merged', async () => { const document = parse(` query { ... @defer { @@ -1325,29 +1304,21 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - b: { - c: { - d: 'd', - }, - }, - }, + data: { b: { c: {} } }, path: ['a'], }, + { + data: { d: 'd' }, + path: ['a', 'b', 'c'], + }, ], + completed: [{ path: ['a'] }], hasNext: true, }, { - incremental: [ + completed: [ { - data: { - a: { - b: { - c: null, - }, - someField: 'someField', - }, - }, + path: [], errors: [ { message: @@ -1356,7 +1327,6 @@ describe('Execute: defer directive', () => { path: ['a', 'b', 'c', 'nonNullErrorField'], }, ], - path: [], }, ], hasNext: false, @@ -1383,35 +1353,19 @@ describe('Execute: defer directive', () => { nonNullName: () => null, }, }); - expectJSON(result).toDeepEqual([ - { - data: { - hero: null, - }, - errors: [ - { - message: - 'Cannot return null for non-nullable field Hero.nonNullName.', - locations: [{ line: 4, column: 11 }], - path: ['hero', 'nonNullName'], - }, - ], - hasNext: true, - }, - { - incremental: [ - { - data: { - hero: { - name: 'Luke', - }, - }, - path: [], - }, - ], - hasNext: false, + expectJSON(result).toDeepEqual({ + data: { + hero: null, }, - ]); + errors: [ + { + message: + 'Cannot return null for non-nullable field Hero.nonNullName.', + locations: [{ line: 4, column: 11 }], + path: ['hero', 'nonNullName'], + }, + ], + }); }); it('Cancels deferred fields when deferred result exhibits null bubbling', async () => { @@ -1453,12 +1407,13 @@ describe('Execute: defer directive', () => { path: [], }, ], + completed: [{ path: [] }], hasNext: false, }, ]); }); - it('Does not deduplicate list fields', async () => { + it('Deduplicates list fields', async () => { const document = parse(` query { hero { @@ -1484,20 +1439,13 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { - friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], - }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate async iterable list fields', async () => { + it('Deduplicates async iterable list fields', async () => { const document = parse(` query { hero { @@ -1526,18 +1474,13 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { friends: [{ name: 'Han' }] }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate empty async iterable list fields', async () => { + it('Deduplicates empty async iterable list fields', async () => { const document = parse(` query { hero { @@ -1567,12 +1510,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { friends: [] }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -1606,18 +1544,25 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { - friends: [{ id: '2' }, { id: '3' }, { id: '4' }], - }, - path: ['hero'], + data: { id: '2' }, + path: ['hero', 'friends', 0], + }, + { + data: { id: '3' }, + path: ['hero', 'friends', 1], + }, + { + data: { id: '4' }, + path: ['hero', 'friends', 2], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate list fields that return empty lists', async () => { + it('Deduplicates list fields that return empty lists', async () => { const document = parse(` query { hero { @@ -1644,18 +1589,13 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { friends: [] }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate null object fields', async () => { + it('Deduplicates null object fields', async () => { const document = parse(` query { hero { @@ -1682,18 +1622,13 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { nestedObject: null }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); }); - it('Does not deduplicate promise object fields', async () => { + it('Deduplicates promise object fields', async () => { const document = parse(` query { hero { @@ -1719,12 +1654,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ - { - data: { nestedObject: { name: 'foo' } }, - path: ['hero'], - }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -1769,6 +1699,7 @@ describe('Execute: defer directive', () => { ], }, ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -1797,9 +1728,8 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - data: null, path: ['hero'], errors: [ { @@ -1876,9 +1806,8 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - data: null, path: ['hero'], errors: [ { @@ -1935,6 +1864,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: true, }, { @@ -1943,6 +1873,11 @@ describe('Execute: defer directive', () => { { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, ], + completed: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + ], hasNext: false, }, ]); @@ -1983,6 +1918,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], + completed: [{ path: ['hero'] }], hasNext: true, }, { @@ -1991,6 +1927,11 @@ describe('Execute: defer directive', () => { { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, ], + completed: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + ], hasNext: false, }, ]); diff --git a/src/execution/__tests__/mutations-test.ts b/src/execution/__tests__/mutations-test.ts index fa533c75ea..64262ea020 100644 --- a/src/execution/__tests__/mutations-test.ts +++ b/src/execution/__tests__/mutations-test.ts @@ -242,13 +242,13 @@ describe('Execute: Handles mutation execution ordering', () => { { incremental: [ { - label: 'defer-label', path: ['first'], data: { promiseToGetTheNumber: 2, }, }, ], + completed: [{ path: ['first'], label: 'defer-label' }], hasNext: false, }, ]); @@ -317,7 +317,6 @@ describe('Execute: Handles mutation execution ordering', () => { { incremental: [ { - label: 'defer-label', path: [], data: { first: { @@ -326,6 +325,7 @@ describe('Execute: Handles mutation execution ordering', () => { }, }, ], + completed: [{ path: [], label: 'defer-label' }], hasNext: false, }, ]); diff --git a/src/execution/__tests__/stream-test.ts b/src/execution/__tests__/stream-test.ts index e3f39acff5..be1e96be5a 100644 --- a/src/execution/__tests__/stream-test.ts +++ b/src/execution/__tests__/stream-test.ts @@ -1,7 +1,8 @@ -import { assert } from 'chai'; +import { assert, expect } from 'chai'; import { describe, it } from 'mocha'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; +import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; import type { PromiseOrValue } from '../../jsutils/PromiseOrValue.js'; import { promiseWithResolvers } from '../../jsutils/promiseWithResolvers.js'; @@ -144,11 +145,12 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [{ items: ['banana'], path: ['scalarList', 1] }], + incremental: [{ items: ['banana'], path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['coconut'], path: ['scalarList'] }], + completed: [{ path: ['scalarList'] }], hasNext: false, }, ]); @@ -166,15 +168,16 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [{ items: ['apple'], path: ['scalarList', 0] }], + incremental: [{ items: ['apple'], path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['banana'], path: ['scalarList', 1] }], + incremental: [{ items: ['banana'], path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['coconut'], path: ['scalarList'] }], + completed: [{ path: ['scalarList'] }], hasNext: false, }, ]); @@ -220,8 +223,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: ['banana'], - path: ['scalarList', 1], - label: 'scalar-stream', + path: ['scalarList'], }, ], hasNext: true, @@ -230,10 +232,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: ['coconut'], - path: ['scalarList', 2], - label: 'scalar-stream', + path: ['scalarList'], }, ], + completed: [{ path: ['scalarList'], label: 'scalar-stream' }], hasNext: false, }, ]); @@ -262,7 +264,8 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['coconut'], path: ['scalarList'] }], + completed: [{ path: ['scalarList'] }], hasNext: false, }, ]); @@ -287,7 +290,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [['banana', 'banana', 'banana']], - path: ['scalarListList', 1], + path: ['scalarListList'], }, ], hasNext: true, @@ -296,9 +299,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [['coconut', 'coconut', 'coconut']], - path: ['scalarListList', 2], + path: ['scalarListList'], }, ], + completed: [{ path: ['scalarListList'] }], hasNext: false, }, ]); @@ -340,9 +344,10 @@ describe('Execute: stream directive', () => { id: '3', }, ], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -370,7 +375,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Luke', id: '1' }], - path: ['friendList', 0], + path: ['friendList'], }, ], hasNext: true, @@ -379,7 +384,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han', id: '2' }], - path: ['friendList', 1], + path: ['friendList'], }, ], hasNext: true, @@ -388,9 +393,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -436,9 +442,10 @@ describe('Execute: stream directive', () => { id: '3', }, ], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -479,9 +486,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -515,7 +523,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'bad', @@ -531,9 +539,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -565,7 +574,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Luke', id: '1' }], - path: ['friendList', 0], + path: ['friendList'], }, ], hasNext: true, @@ -574,7 +583,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han', id: '2' }], - path: ['friendList', 1], + path: ['friendList'], }, ], hasNext: true, @@ -583,12 +592,13 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -623,12 +633,13 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -694,13 +705,19 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, }, - { done: false, value: { hasNext: false } }, + { + done: false, + value: { + completed: [{ path: ['friendList'] }], + hasNext: false, + }, + }, { done: true, value: undefined }, ]); }); @@ -755,10 +772,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'bad', @@ -792,10 +808,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: @@ -839,10 +854,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: @@ -877,7 +891,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['scalarList', 1], + path: ['scalarList'], errors: [ { message: 'String cannot represent value: {}', @@ -887,6 +901,7 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ path: ['scalarList'] }], hasNext: false, }, ]); @@ -919,7 +934,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'Oops', @@ -935,9 +950,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -968,7 +984,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'Oops', @@ -984,9 +1000,10 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], + path: ['friendList'], }, ], + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -1016,10 +1033,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: 'Oops', @@ -1056,10 +1072,9 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: 'Oops', @@ -1101,7 +1116,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 1], + path: ['friendList'], errors: [ { message: 'Oops', @@ -1117,12 +1132,13 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -1154,10 +1170,153 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ + completed: [ + { + path: ['nonNullFriendList'], + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['nonNullFriendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable does not provide a return method) ', async () => { + const document = parse(` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `); + let count = 0; + const result = await complete(document, { + nonNullFriendList: { + [Symbol.asyncIterator]: () => ({ + next: async () => { + switch (count++) { + case 0: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[0].name }, + }); + case 1: + return Promise.resolve({ + done: false, + value: { + nonNullName: () => Promise.reject(new Error('Oops')), + }, + }); + case 2: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[1].name }, + }); + // Not reached + /* c8 ignore next 5 */ + case 3: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[2].name }, + }); + } + }, + }), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + nonNullFriendList: [{ nonNullName: 'Luke' }], + }, + hasNext: true, + }, + { + completed: [ + { + path: ['nonNullFriendList'], + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['nonNullFriendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable provides concurrent next/return methods and has a slow return ', async () => { + const document = parse(` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `); + let count = 0; + let returned = false; + const result = await complete(document, { + nonNullFriendList: { + [Symbol.asyncIterator]: () => ({ + next: async () => { + /* c8 ignore next 3 */ + if (returned) { + return Promise.resolve({ done: true }); + } + switch (count++) { + case 0: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[0].name }, + }); + case 1: + return Promise.resolve({ + done: false, + value: { + nonNullName: () => Promise.reject(new Error('Oops')), + }, + }); + case 2: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[1].name }, + }); + // Not reached + /* c8 ignore next 5 */ + case 3: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[2].name }, + }); + } + }, + return: async () => { + await resolveOnNextTick(); + returned = true; + return { done: true }; + }, + }), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + nonNullFriendList: [{ nonNullName: 'Luke' }], + }, + hasNext: true, + }, + { + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + path: ['nonNullFriendList'], errors: [ { message: 'Oops', @@ -1170,6 +1329,7 @@ describe('Execute: stream directive', () => { hasNext: false, }, ]); + expect(returned).to.equal(true); }); it('Filters payloads that are nulled', async () => { const document = parse(` @@ -1283,12 +1443,14 @@ describe('Execute: stream directive', () => { }, { items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], + path: ['nestedObject', 'nestedFriendList'], }, ], + completed: [{ path: ['otherNestedObject'] }], hasNext: true, }, { + completed: [{ path: ['nestedObject', 'nestedFriendList'] }], hasNext: false, }, ]); @@ -1346,6 +1508,7 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: false, }, ]); @@ -1380,7 +1543,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [null], - path: ['friendList', 0], + path: ['friendList'], errors: [ { message: @@ -1394,6 +1557,7 @@ describe('Execute: stream directive', () => { hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -1405,10 +1569,11 @@ describe('Execute: stream directive', () => { const iterable = { [Symbol.asyncIterator]: () => ({ next: () => { + /* c8 ignore start */ if (requested) { - // Ignores further errors when filtered. + // stream is filtered, next is not called, and so this is not reached. return Promise.reject(new Error('Oops')); - } + } /* c8 ignore stop */ requested = true; const friend = friends[0]; return Promise.resolve({ @@ -1489,6 +1654,7 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: false, }, }); @@ -1528,7 +1694,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ id: '2', name: 'Han' }], - path: ['friendList', 1], + path: ['friendList'], }, ], hasNext: true, @@ -1537,12 +1703,13 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ id: '3', name: 'Leia' }], - path: ['friendList', 2], + path: ['friendList'], }, ], hasNext: true, }, { + completed: [{ path: ['friendList'] }], hasNext: false, }, ]); @@ -1584,42 +1751,25 @@ describe('Execute: stream directive', () => { }, { incremental: [ - { - items: [{ id: '1' }], - path: ['nestedObject', 'nestedFriendList', 0], - }, - { - data: { - nestedFriendList: [], - }, - path: ['nestedObject'], - }, - ], - hasNext: true, - }, - { - incremental: [ - { - items: [{ id: '2' }], - path: ['nestedObject', 'nestedFriendList', 1], - }, { items: [{ id: '1', name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], + path: ['nestedObject', 'nestedFriendList'], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: true, }, { incremental: [ { items: [{ id: '2', name: 'Han' }], - path: ['nestedObject', 'nestedFriendList', 1], + path: ['nestedObject', 'nestedFriendList'], }, ], hasNext: true, }, { + completed: [{ path: ['nestedObject', 'nestedFriendList'] }], hasNext: false, }, ]); @@ -1675,6 +1825,7 @@ describe('Execute: stream directive', () => { path: ['nestedObject'], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: true, }, done: false, @@ -1685,7 +1836,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], + path: ['nestedObject', 'nestedFriendList'], }, ], hasNext: true, @@ -1698,7 +1849,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han' }], - path: ['nestedObject', 'nestedFriendList', 1], + path: ['nestedObject', 'nestedFriendList'], }, ], hasNext: true, @@ -1707,7 +1858,10 @@ describe('Execute: stream directive', () => { }); const result5 = await iterator.next(); expectJSON(result5).toDeepEqual({ - value: { hasNext: false }, + value: { + completed: [{ path: ['nestedObject', 'nestedFriendList'] }], + hasNext: false, + }, done: false, }); const result6 = await iterator.next(); @@ -1770,14 +1924,13 @@ describe('Execute: stream directive', () => { { data: { name: 'Luke' }, path: ['friendList', 0], - label: 'DeferName', }, { items: [{ id: '2' }], - path: ['friendList', 1], - label: 'stream-label', + path: ['friendList'], }, ], + completed: [{ path: ['friendList', 0], label: 'DeferName' }], hasNext: true, }, done: false, @@ -1787,20 +1940,28 @@ describe('Execute: stream directive', () => { resolveSlowField('Han'); const result3 = await result3Promise; expectJSON(result3).toDeepEqual({ + value: { + completed: [{ path: ['friendList'], label: 'stream-label' }], + hasNext: true, + }, + done: false, + }); + const result4 = await iterator.next(); + expectJSON(result4).toDeepEqual({ value: { incremental: [ { data: { name: 'Han' }, path: ['friendList', 1], - label: 'DeferName', }, ], + completed: [{ path: ['friendList', 1], label: 'DeferName' }], hasNext: false, }, done: false, }); - const result4 = await iterator.next(); - expectJSON(result4).toDeepEqual({ + const result5 = await iterator.next(); + expectJSON(result5).toDeepEqual({ value: undefined, done: true, }); @@ -1859,14 +2020,13 @@ describe('Execute: stream directive', () => { { data: { name: 'Luke' }, path: ['friendList', 0], - label: 'DeferName', }, { items: [{ id: '2' }], - path: ['friendList', 1], - label: 'stream-label', + path: ['friendList'], }, ], + completed: [{ path: ['friendList', 0], label: 'DeferName' }], hasNext: true, }, done: false, @@ -1879,9 +2039,9 @@ describe('Execute: stream directive', () => { { data: { name: 'Han' }, path: ['friendList', 1], - label: 'DeferName', }, ], + completed: [{ path: ['friendList', 1], label: 'DeferName' }], hasNext: true, }, done: false, @@ -1890,7 +2050,10 @@ describe('Execute: stream directive', () => { resolveIterableCompletion(null); const result4 = await result4Promise; expectJSON(result4).toDeepEqual({ - value: { hasNext: false }, + value: { + completed: [{ path: ['friendList'], label: 'stream-label' }], + hasNext: false, + }, done: false, }); diff --git a/src/execution/collectFields.ts b/src/execution/collectFields.ts index af263112ec..43b36343eb 100644 --- a/src/execution/collectFields.ts +++ b/src/execution/collectFields.ts @@ -1,6 +1,8 @@ import { AccumulatorMap } from '../jsutils/AccumulatorMap.js'; import { invariant } from '../jsutils/invariant.js'; import type { ObjMap } from '../jsutils/ObjMap.js'; +import type { ReadonlyOrderedSet } from '../jsutils/OrderedSet.js'; +import { OrderedSet } from '../jsutils/OrderedSet.js'; import type { FieldNode, @@ -26,18 +28,52 @@ import { typeFromAST } from '../utilities/typeFromAST.js'; import { getDirectiveValues } from './values.js'; -export type FieldGroup = ReadonlyArray; +export interface DeferUsage { + label: string | undefined; + ancestors: ReadonlyArray; +} + +export const NON_DEFERRED_TARGET_SET = new OrderedSet([ + undefined, +]).freeze(); + +export type Target = DeferUsage | undefined; +export type TargetSet = ReadonlyOrderedSet; +export type DeferUsageSet = ReadonlyOrderedSet; + +export interface FieldDetails { + node: FieldNode; + target: Target; +} + +export interface FieldGroup { + fields: ReadonlyArray; + targets: TargetSet; +} export type GroupedFieldSet = Map; -export interface PatchFields { - label: string | undefined; +export interface GroupedFieldSetDetails { groupedFieldSet: GroupedFieldSet; + shouldInitiateDefer: boolean; } -export interface FieldsAndPatches { +export interface CollectFieldsResult { groupedFieldSet: GroupedFieldSet; - patches: Array; + newGroupedFieldSetDetails: Map; + newDeferUsages: ReadonlyArray; +} + +interface CollectFieldsContext { + schema: GraphQLSchema; + fragments: ObjMap; + variableValues: { [variable: string]: unknown }; + operation: OperationDefinitionNode; + runtimeType: GraphQLObjectType; + targetsByKey: Map>; + fieldsByTarget: Map>; + newDeferUsages: Array; + visitedFragmentNames: Set; } /** @@ -55,21 +91,25 @@ export function collectFields( variableValues: { [variable: string]: unknown }, runtimeType: GraphQLObjectType, operation: OperationDefinitionNode, -): FieldsAndPatches { - const groupedFieldSet = new AccumulatorMap(); - const patches: Array = []; - collectFieldsImpl( +): CollectFieldsResult { + const context: CollectFieldsContext = { schema, fragments, variableValues, - operation, runtimeType, - operation.selectionSet, - groupedFieldSet, - patches, - new Set(), - ); - return { groupedFieldSet, patches }; + operation, + fieldsByTarget: new Map(), + targetsByKey: new Map(), + newDeferUsages: [], + visitedFragmentNames: new Set(), + }; + + collectFieldsImpl(context, operation.selectionSet); + + return { + ...buildGroupedFieldSets(context.targetsByKey, context.fieldsByTarget), + newDeferUsages: context.newDeferUsages, + }; } /** @@ -90,53 +130,74 @@ export function collectSubfields( operation: OperationDefinitionNode, returnType: GraphQLObjectType, fieldGroup: FieldGroup, -): FieldsAndPatches { - const subGroupedFieldSet = new AccumulatorMap(); - const visitedFragmentNames = new Set(); - - const subPatches: Array = []; - const subFieldsAndPatches = { - groupedFieldSet: subGroupedFieldSet, - patches: subPatches, +): CollectFieldsResult { + const context: CollectFieldsContext = { + schema, + fragments, + variableValues, + runtimeType: returnType, + operation, + fieldsByTarget: new Map(), + targetsByKey: new Map(), + newDeferUsages: [], + visitedFragmentNames: new Set(), }; - for (const node of fieldGroup) { + for (const fieldDetails of fieldGroup.fields) { + const node = fieldDetails.node; if (node.selectionSet) { - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - returnType, - node.selectionSet, - subGroupedFieldSet, - subPatches, - visitedFragmentNames, - ); + collectFieldsImpl(context, node.selectionSet, fieldDetails.target); } } - return subFieldsAndPatches; + + return { + ...buildGroupedFieldSets( + context.targetsByKey, + context.fieldsByTarget, + fieldGroup.targets, + ), + newDeferUsages: context.newDeferUsages, + }; } -// eslint-disable-next-line max-params function collectFieldsImpl( - schema: GraphQLSchema, - fragments: ObjMap, - variableValues: { [variable: string]: unknown }, - operation: OperationDefinitionNode, - runtimeType: GraphQLObjectType, + context: CollectFieldsContext, selectionSet: SelectionSetNode, - groupedFieldSet: AccumulatorMap, - patches: Array, - visitedFragmentNames: Set, + parentTarget?: Target, + newTarget?: Target, ): void { + const { + schema, + fragments, + variableValues, + runtimeType, + operation, + targetsByKey, + fieldsByTarget, + newDeferUsages, + visitedFragmentNames, + } = context; + for (const selection of selectionSet.selections) { switch (selection.kind) { case Kind.FIELD: { if (!shouldIncludeNode(variableValues, selection)) { continue; } - groupedFieldSet.add(getFieldEntryKey(selection), selection); + const key = getFieldEntryKey(selection); + const target = newTarget ?? parentTarget; + let keyTargets = targetsByKey.get(key); + if (keyTargets === undefined) { + keyTargets = new Set(); + targetsByKey.set(key, keyTargets); + } + keyTargets.add(target); + let targetFields = fieldsByTarget.get(target); + if (targetFields === undefined) { + targetFields = new AccumulatorMap(); + fieldsByTarget.set(target, targetFields); + } + targetFields.add(key, selection); break; } case Kind.INLINE_FRAGMENT: { @@ -149,36 +210,25 @@ function collectFieldsImpl( const defer = getDeferValues(operation, variableValues, selection); - if (defer) { - const patchFields = new AccumulatorMap(); - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - runtimeType, - selection.selectionSet, - patchFields, - patches, - visitedFragmentNames, - ); - patches.push({ - label: defer.label, - groupedFieldSet: patchFields, - }); + let target: Target; + if (!defer) { + target = newTarget; } else { - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - runtimeType, - selection.selectionSet, - groupedFieldSet, - patches, - visitedFragmentNames, - ); + const ancestors = + parentTarget === undefined + ? [parentTarget] + : [parentTarget, ...parentTarget.ancestors]; + target = { ...defer, ancestors }; + newDeferUsages.push(target); } + + collectFieldsImpl( + context, + selection.selectionSet, + parentTarget, + target, + ); + break; } case Kind.FRAGMENT_SPREAD: { @@ -201,40 +251,20 @@ function collectFieldsImpl( continue; } + let target: Target; if (!defer) { visitedFragmentNames.add(fragName); - } - - if (defer) { - const patchFields = new AccumulatorMap(); - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - runtimeType, - fragment.selectionSet, - patchFields, - patches, - visitedFragmentNames, - ); - patches.push({ - label: defer.label, - groupedFieldSet: patchFields, - }); + target = newTarget; } else { - collectFieldsImpl( - schema, - fragments, - variableValues, - operation, - runtimeType, - fragment.selectionSet, - groupedFieldSet, - patches, - visitedFragmentNames, - ); + const ancestors = + parentTarget === undefined + ? [parentTarget] + : [parentTarget, ...parentTarget.ancestors]; + target = { ...defer, ancestors }; + newDeferUsages.push(target); } + + collectFieldsImpl(context, fragment.selectionSet, parentTarget, target); break; } } @@ -323,3 +353,143 @@ function doesFragmentConditionMatch( function getFieldEntryKey(node: FieldNode): string { return node.alias ? node.alias.value : node.name.value; } + +function buildGroupedFieldSets( + targetsByKey: Map>, + fieldsByTarget: Map>>, + parentTargets = NON_DEFERRED_TARGET_SET, +): { + groupedFieldSet: GroupedFieldSet; + newGroupedFieldSetDetails: Map; +} { + const { parentTargetKeys, targetSetDetailsMap } = getTargetSetDetails( + targetsByKey, + parentTargets, + ); + + const groupedFieldSet = + parentTargetKeys.size > 0 + ? getOrderedGroupedFieldSet( + parentTargetKeys, + parentTargets, + targetsByKey, + fieldsByTarget, + ) + : new Map(); + + const newGroupedFieldSetDetails = new Map< + DeferUsageSet, + GroupedFieldSetDetails + >(); + + for (const [maskingTargets, targetSetDetails] of targetSetDetailsMap) { + const { keys, shouldInitiateDefer } = targetSetDetails; + + const newGroupedFieldSet = getOrderedGroupedFieldSet( + keys, + maskingTargets, + targetsByKey, + fieldsByTarget, + ); + + // All TargetSets that causes new grouped field sets consist only of DeferUsages + // and have shouldInitiateDefer defined + newGroupedFieldSetDetails.set(maskingTargets as DeferUsageSet, { + groupedFieldSet: newGroupedFieldSet, + shouldInitiateDefer, + }); + } + + return { + groupedFieldSet, + newGroupedFieldSetDetails, + }; +} + +interface TargetSetDetails { + keys: Set; + shouldInitiateDefer: boolean; +} + +function getTargetSetDetails( + targetsByKey: Map>, + parentTargets: TargetSet, +): { + parentTargetKeys: ReadonlySet; + targetSetDetailsMap: Map; +} { + const parentTargetKeys = new Set(); + const targetSetDetailsMap = new Map(); + + for (const [responseKey, targets] of targetsByKey) { + const maskingTargetList: Array = []; + for (const target of targets) { + if ( + target === undefined || + target.ancestors.every((ancestor) => !targets.has(ancestor)) + ) { + maskingTargetList.push(target); + } + } + + const maskingTargets = new OrderedSet(maskingTargetList).freeze(); + if (maskingTargets === parentTargets) { + parentTargetKeys.add(responseKey); + continue; + } + + let targetSetDetails = targetSetDetailsMap.get(maskingTargets); + if (targetSetDetails === undefined) { + targetSetDetails = { + keys: new Set(), + shouldInitiateDefer: maskingTargetList.some( + (deferUsage) => !parentTargets.has(deferUsage), + ), + }; + targetSetDetailsMap.set(maskingTargets, targetSetDetails); + } + targetSetDetails.keys.add(responseKey); + } + + return { + parentTargetKeys, + targetSetDetailsMap, + }; +} + +function getOrderedGroupedFieldSet( + keys: ReadonlySet, + maskingTargets: TargetSet, + targetsByKey: Map>, + fieldsByTarget: Map>>, +): GroupedFieldSet { + const groupedFieldSet = new Map< + string, + { fields: Array; targets: TargetSet } + >(); + + const firstTarget = maskingTargets.values().next().value as Target; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const firstFields = fieldsByTarget.get(firstTarget)!; + for (const [key] of firstFields) { + if (keys.has(key)) { + let fieldGroup = groupedFieldSet.get(key); + if (fieldGroup === undefined) { + fieldGroup = { fields: [], targets: maskingTargets }; + groupedFieldSet.set(key, fieldGroup); + } + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + for (const target of targetsByKey.get(key)!) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const fieldsForTarget = fieldsByTarget.get(target)!; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const nodes = fieldsForTarget.get(key)!; + // the following line is an optional minor optimization + fieldsForTarget.delete(key); + fieldGroup.fields.push(...nodes.map((node) => ({ node, target }))); + } + } + } + + return groupedFieldSet; +} diff --git a/src/execution/execute.ts b/src/execution/execute.ts index 8e3db0f59c..55bba806f5 100644 --- a/src/execution/execute.ts +++ b/src/execution/execute.ts @@ -18,6 +18,7 @@ import { locatedError } from '../error/locatedError.js'; import type { DocumentNode, + FieldNode, FragmentDefinitionNode, OperationDefinitionNode, } from '../language/ast.js'; @@ -46,20 +47,31 @@ import { GraphQLStreamDirective } from '../type/directives.js'; import type { GraphQLSchema } from '../type/schema.js'; import { assertValidSchema } from '../type/validate.js'; -import type { FieldGroup, GroupedFieldSet } from './collectFields.js'; +import type { + DeferUsage, + DeferUsageSet, + FieldGroup, + GroupedFieldSet, + GroupedFieldSetDetails, +} from './collectFields.js'; import { collectFields, collectSubfields as _collectSubfields, + NON_DEFERRED_TARGET_SET, } from './collectFields.js'; import type { ExecutionResult, ExperimentalIncrementalExecutionResults, IncrementalDataRecord, +} from './IncrementalPublisher.js'; +import { + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, + IncrementalPublisher, InitialResultRecord, StreamItemsRecord, - SubsequentDataRecord, + StreamRecord, } from './IncrementalPublisher.js'; -import { IncrementalPublisher } from './IncrementalPublisher.js'; import { mapAsyncIterable } from './mapAsyncIterable.js'; import { getArgumentValues, @@ -143,6 +155,12 @@ export interface ExecutionArgs { subscribeFieldResolver?: Maybe>; } +export interface StreamUsage { + label: string | undefined; + initialCount: number; + fieldGroup: FieldGroup; +} + const UNEXPECTED_EXPERIMENTAL_DIRECTIVES = 'The provided schema unexpectedly contains experimental directives (@defer or @stream). These directives may only be utilized if experimental execution features are explicitly enabled.'; @@ -232,7 +250,7 @@ function executeImpl( // at which point we still log the error and null the parent field, which // in this case is the entire response. const incrementalPublisher = exeContext.incrementalPublisher; - const initialResultRecord = incrementalPublisher.prepareInitialResultRecord(); + const initialResultRecord = new InitialResultRecord(); try { const data = executeOperation(exeContext, initialResultRecord); if (isPromise(data)) { @@ -371,8 +389,14 @@ function executeOperation( exeContext: ExecutionContext, initialResultRecord: InitialResultRecord, ): PromiseOrValue> { - const { operation, schema, fragments, variableValues, rootValue } = - exeContext; + const { + operation, + schema, + fragments, + variableValues, + rootValue, + incrementalPublisher, + } = exeContext; const rootType = schema.getRootType(operation.operation); if (rootType == null) { throw new GraphQLError( @@ -381,16 +405,25 @@ function executeOperation( ); } - const { groupedFieldSet, patches } = collectFields( - schema, - fragments, - variableValues, - rootType, - operation, + const { groupedFieldSet, newGroupedFieldSetDetails, newDeferUsages } = + collectFields(schema, fragments, variableValues, rootType, operation); + + const newDeferMap = addNewDeferredFragments( + incrementalPublisher, + newDeferUsages, + initialResultRecord, ); + const path = undefined; - let result; + const newDeferredGroupedFieldSetRecords = addNewDeferredGroupedFieldSets( + incrementalPublisher, + newGroupedFieldSetDetails, + newDeferMap, + path, + ); + + let result; switch (operation.operation) { case OperationTypeNode.QUERY: result = executeFields( @@ -400,6 +433,7 @@ function executeOperation( path, groupedFieldSet, initialResultRecord, + newDeferMap, ); break; case OperationTypeNode.MUTATION: @@ -410,6 +444,7 @@ function executeOperation( path, groupedFieldSet, initialResultRecord, + newDeferMap, ); break; case OperationTypeNode.SUBSCRIPTION: @@ -422,21 +457,18 @@ function executeOperation( path, groupedFieldSet, initialResultRecord, + newDeferMap, ); } - for (const patch of patches) { - const { label, groupedFieldSet: patchGroupedFieldSet } = patch; - executeDeferredFragment( - exeContext, - rootType, - rootValue, - patchGroupedFieldSet, - initialResultRecord, - label, - path, - ); - } + executeDeferredGroupedFieldSets( + exeContext, + rootType, + rootValue, + path, + newDeferredGroupedFieldSetRecords, + newDeferMap, + ); return result; } @@ -452,6 +484,7 @@ function executeFieldsSerially( path: Path | undefined, groupedFieldSet: GroupedFieldSet, incrementalDataRecord: InitialResultRecord, + deferMap: ReadonlyMap, ): PromiseOrValue> { return promiseReduce( groupedFieldSet, @@ -464,6 +497,7 @@ function executeFieldsSerially( fieldGroup, fieldPath, incrementalDataRecord, + deferMap, ); if (result === undefined) { return results; @@ -492,6 +526,7 @@ function executeFields( path: Path | undefined, groupedFieldSet: GroupedFieldSet, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): PromiseOrValue> { const results = Object.create(null); let containsPromise = false; @@ -506,6 +541,7 @@ function executeFields( fieldGroup, fieldPath, incrementalDataRecord, + deferMap, ); if (result !== undefined) { @@ -536,6 +572,10 @@ function executeFields( return promiseForObject(results); } +function toNodes(fieldGroup: FieldGroup): ReadonlyArray { + return fieldGroup.fields.map((fieldDetails) => fieldDetails.node); +} + /** * Implements the "Executing fields" section of the spec * In particular, this function figures out the value that the field returns by @@ -549,8 +589,9 @@ function executeField( fieldGroup: FieldGroup, path: Path, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): PromiseOrValue { - const fieldName = fieldGroup[0].name.value; + const fieldName = fieldGroup.fields[0].node.name.value; const fieldDef = exeContext.schema.getField(parentType, fieldName); if (!fieldDef) { return; @@ -574,7 +615,7 @@ function executeField( // TODO: find a way to memoize, in case this field is within a List type. const args = getArgumentValues( fieldDef, - fieldGroup[0], + fieldGroup.fields[0].node, exeContext.variableValues, ); @@ -594,6 +635,7 @@ function executeField( path, result, incrementalDataRecord, + deferMap, ); } @@ -605,6 +647,7 @@ function executeField( path, result, incrementalDataRecord, + deferMap, ); if (isPromise(completed)) { @@ -653,7 +696,7 @@ export function buildResolveInfo( // information about the current execution state. return { fieldName: fieldDef.name, - fieldNodes: fieldGroup, + fieldNodes: toNodes(fieldGroup), returnType: fieldDef.type, parentType, path, @@ -673,7 +716,7 @@ function handleFieldError( path: Path, incrementalDataRecord: IncrementalDataRecord, ): void { - const error = locatedError(rawError, fieldGroup, pathToArray(path)); + const error = locatedError(rawError, toNodes(fieldGroup), pathToArray(path)); // If the field type is non-nullable, then it is resolved without any // protection from errors, however it still properly locates the error. @@ -715,6 +758,7 @@ function completeValue( path: Path, result: unknown, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): PromiseOrValue { // If result is an Error, throw a located error. if (result instanceof Error) { @@ -732,6 +776,7 @@ function completeValue( path, result, incrementalDataRecord, + deferMap, ); if (completed === null) { throw new Error( @@ -756,6 +801,7 @@ function completeValue( path, result, incrementalDataRecord, + deferMap, ); } @@ -776,6 +822,7 @@ function completeValue( path, result, incrementalDataRecord, + deferMap, ); } @@ -789,6 +836,7 @@ function completeValue( path, result, incrementalDataRecord, + deferMap, ); } /* c8 ignore next 6 */ @@ -807,6 +855,7 @@ async function completePromisedValue( path: Path, result: Promise, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): Promise { try { const resolved = await result; @@ -818,6 +867,7 @@ async function completePromisedValue( path, resolved, incrementalDataRecord, + deferMap, ); if (isPromise(completed)) { completed = await completed; @@ -838,30 +888,35 @@ async function completePromisedValue( } /** - * Returns an object containing the `@stream` arguments if a field should be + * Returns an object containing info for streaming if a field should be * streamed based on the experimental flag, stream directive present and * not disabled by the "if" argument. */ -function getStreamValues( +function getStreamUsage( exeContext: ExecutionContext, fieldGroup: FieldGroup, path: Path, -): - | undefined - | { - initialCount: number | undefined; - label: string | undefined; - } { +): StreamUsage | undefined { // do not stream inner lists of multi-dimensional lists if (typeof path.key === 'number') { return; } + // TODO: add test for this case (a streamed list nested under a list). + /* c8 ignore next 7 */ + if ( + (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage !== + undefined + ) { + return (fieldGroup as unknown as { _streamUsage: StreamUsage }) + ._streamUsage; + } + // validation only allows equivalent streams on multiple fields, so it is // safe to only check the first fieldNode for the stream directive const stream = getDirectiveValues( GraphQLStreamDirective, - fieldGroup[0], + fieldGroup.fields[0].node, exeContext.variableValues, ); @@ -888,12 +943,25 @@ function getStreamValues( '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', ); - return { + const streamedFieldGroup: FieldGroup = { + fields: fieldGroup.fields.map((fieldDetails) => ({ + node: fieldDetails.node, + target: undefined, + })), + targets: NON_DEFERRED_TARGET_SET, + }; + + const streamUsage = { initialCount: stream.initialCount, label: typeof stream.label === 'string' ? stream.label : undefined, + fieldGroup: streamedFieldGroup, }; -} + (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage = + streamUsage; + + return streamUsage; +} /** * Complete a async iterator value by completing the result and calling * recursively until all the results are completed. @@ -906,29 +974,35 @@ async function completeAsyncIteratorValue( path: Path, asyncIterator: AsyncIterator, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): Promise> { - const stream = getStreamValues(exeContext, fieldGroup, path); + const streamUsage = getStreamUsage(exeContext, fieldGroup, path); let containsPromise = false; const completedResults: Array = []; let index = 0; // eslint-disable-next-line no-constant-condition while (true) { - if ( - stream && - typeof stream.initialCount === 'number' && - index >= stream.initialCount - ) { + if (streamUsage && index >= streamUsage.initialCount) { + const earlyReturn = asyncIterator.return; + const streamRecord = new StreamRecord({ + label: streamUsage.label, + path, + earlyReturn: + earlyReturn === undefined + ? undefined + : earlyReturn.bind(asyncIterator), + }); // eslint-disable-next-line @typescript-eslint/no-floating-promises executeStreamAsyncIterator( index, asyncIterator, exeContext, - fieldGroup, + streamUsage.fieldGroup, info, itemType, path, incrementalDataRecord, - stream.label, + streamRecord, ); break; } @@ -942,7 +1016,7 @@ async function completeAsyncIteratorValue( break; } } catch (rawError) { - throw locatedError(rawError, fieldGroup, pathToArray(path)); + throw locatedError(rawError, toNodes(fieldGroup), pathToArray(path)); } if ( @@ -955,6 +1029,7 @@ async function completeAsyncIteratorValue( info, itemPath, incrementalDataRecord, + deferMap, ) ) { containsPromise = true; @@ -976,6 +1051,7 @@ function completeListValue( path: Path, result: unknown, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): PromiseOrValue> { const itemType = returnType.ofType; @@ -990,6 +1066,7 @@ function completeListValue( path, asyncIterator, incrementalDataRecord, + deferMap, ); } @@ -999,34 +1076,34 @@ function completeListValue( ); } - const stream = getStreamValues(exeContext, fieldGroup, path); + const streamUsage = getStreamUsage(exeContext, fieldGroup, path); // This is specified as a simple map, however we're optimizing the path // where the list contains no Promises by avoiding creating another Promise. let containsPromise = false; - let previousIncrementalDataRecord = incrementalDataRecord; + let currentParents = incrementalDataRecord; const completedResults: Array = []; let index = 0; + let streamRecord: StreamRecord | undefined; for (const item of result) { // No need to modify the info object containing the path, // since from here on it is not ever accessed by resolver functions. const itemPath = addPath(path, index, undefined); - if ( - stream && - typeof stream.initialCount === 'number' && - index >= stream.initialCount - ) { - previousIncrementalDataRecord = executeStreamField( + if (streamUsage && index >= streamUsage.initialCount) { + if (streamRecord === undefined) { + streamRecord = new StreamRecord({ label: streamUsage.label, path }); + } + currentParents = executeStreamField( path, itemPath, item, exeContext, - fieldGroup, + streamUsage.fieldGroup, info, itemType, - previousIncrementalDataRecord, - stream.label, + currentParents, + streamRecord, ); index++; continue; @@ -1042,6 +1119,7 @@ function completeListValue( info, itemPath, incrementalDataRecord, + deferMap, ) ) { containsPromise = true; @@ -1050,6 +1128,12 @@ function completeListValue( index++; } + if (streamRecord !== undefined) { + exeContext.incrementalPublisher.setIsFinalRecord( + currentParents as StreamItemsRecord, + ); + } + return containsPromise ? Promise.all(completedResults) : completedResults; } @@ -1067,6 +1151,7 @@ function completeListItemValue( info: GraphQLResolveInfo, itemPath: Path, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): boolean { if (isPromise(item)) { completedResults.push( @@ -1078,6 +1163,7 @@ function completeListItemValue( itemPath, item, incrementalDataRecord, + deferMap, ), ); @@ -1093,6 +1179,7 @@ function completeListItemValue( itemPath, item, incrementalDataRecord, + deferMap, ); if (isPromise(completedItem)) { @@ -1166,6 +1253,7 @@ function completeAbstractValue( path: Path, result: unknown, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): PromiseOrValue> { const resolveTypeFn = returnType.resolveType ?? exeContext.typeResolver; const contextValue = exeContext.contextValue; @@ -1188,6 +1276,7 @@ function completeAbstractValue( path, result, incrementalDataRecord, + deferMap, ), ); } @@ -1207,6 +1296,7 @@ function completeAbstractValue( path, result, incrementalDataRecord, + deferMap, ); } @@ -1221,7 +1311,7 @@ function ensureValidRuntimeType( if (runtimeTypeName == null) { throw new GraphQLError( `Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1244,21 +1334,21 @@ function ensureValidRuntimeType( if (runtimeType == null) { throw new GraphQLError( `Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } if (!isObjectType(runtimeType)) { throw new GraphQLError( `Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } if (!exeContext.schema.isSubType(returnType, runtimeType)) { throw new GraphQLError( `Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1276,6 +1366,7 @@ function completeObjectValue( path: Path, result: unknown, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): PromiseOrValue> { // If there is an isTypeOf predicate function, call it with the // current result. If isTypeOf returns false, then raise an error rather @@ -1295,6 +1386,7 @@ function completeObjectValue( path, result, incrementalDataRecord, + deferMap, ); }); } @@ -1311,6 +1403,7 @@ function completeObjectValue( path, result, incrementalDataRecord, + deferMap, ); } @@ -1321,7 +1414,96 @@ function invalidReturnTypeError( ): GraphQLError { return new GraphQLError( `Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, + ); +} + +function addNewDeferredFragments( + incrementalPublisher: IncrementalPublisher, + newDeferUsages: ReadonlyArray, + incrementalDataRecord: IncrementalDataRecord, + deferMap?: ReadonlyMap, + path?: Path | undefined, +): ReadonlyMap { + let newDeferMap; + if (newDeferUsages.length === 0) { + newDeferMap = deferMap ?? new Map(); + } else { + newDeferMap = + deferMap === undefined + ? new Map() + : new Map(deferMap); + for (const deferUsage of newDeferUsages) { + const parentDeferUsage = deferUsage.ancestors[0]; + + const parent = + parentDeferUsage === undefined + ? (incrementalDataRecord as InitialResultRecord | StreamItemsRecord) + : deferredFragmentRecordFromDeferUsage(parentDeferUsage, newDeferMap); + + const deferredFragmentRecord = new DeferredFragmentRecord({ + path, + label: deferUsage.label, + }); + + incrementalPublisher.reportNewDeferFragmentRecord( + deferredFragmentRecord, + parent, + ); + + newDeferMap.set(deferUsage, deferredFragmentRecord); + } + } + + return newDeferMap; +} + +function deferredFragmentRecordFromDeferUsage( + deferUsage: DeferUsage, + deferMap: ReadonlyMap, +): DeferredFragmentRecord { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return deferMap.get(deferUsage)!; +} + +function addNewDeferredGroupedFieldSets( + incrementalPublisher: IncrementalPublisher, + newGroupedFieldSetDetails: Map, + deferMap: ReadonlyMap, + path?: Path | undefined, +): ReadonlyArray { + const newDeferredGroupedFieldSetRecords: Array = + []; + + for (const [ + newGroupedFieldSetDeferUsages, + { groupedFieldSet, shouldInitiateDefer }, + ] of newGroupedFieldSetDetails) { + const deferredFragmentRecords = getDeferredFragmentRecords( + newGroupedFieldSetDeferUsages, + deferMap, + ); + const deferredGroupedFieldSetRecord = new DeferredGroupedFieldSetRecord({ + path, + deferredFragmentRecords, + groupedFieldSet, + shouldInitiateDefer, + }); + incrementalPublisher.reportNewDeferredGroupedFieldSetRecord( + deferredGroupedFieldSetRecord, + ); + newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord); + } + + return newDeferredGroupedFieldSetRecords; +} + +function getDeferredFragmentRecords( + deferUsages: DeferUsageSet, + deferMap: ReadonlyMap, +): ReadonlyArray { + return Array.from(deferUsages).map((deferUsage) => + deferredFragmentRecordFromDeferUsage(deferUsage, deferMap), ); } @@ -1332,32 +1514,47 @@ function collectAndExecuteSubfields( path: Path, result: unknown, incrementalDataRecord: IncrementalDataRecord, + deferMap: ReadonlyMap, ): PromiseOrValue> { // Collect sub-fields to execute to complete this value. - const { groupedFieldSet: subGroupedFieldSet, patches: subPatches } = + const { groupedFieldSet, newGroupedFieldSetDetails, newDeferUsages } = collectSubfields(exeContext, returnType, fieldGroup); + const incrementalPublisher = exeContext.incrementalPublisher; + + const newDeferMap = addNewDeferredFragments( + incrementalPublisher, + newDeferUsages, + incrementalDataRecord, + deferMap, + path, + ); + + const newDeferredGroupedFieldSetRecords = addNewDeferredGroupedFieldSets( + incrementalPublisher, + newGroupedFieldSetDetails, + newDeferMap, + path, + ); + const subFields = executeFields( exeContext, returnType, result, path, - subGroupedFieldSet, + groupedFieldSet, incrementalDataRecord, + newDeferMap, ); - for (const subPatch of subPatches) { - const { label, groupedFieldSet: subPatchGroupedFieldSet } = subPatch; - executeDeferredFragment( - exeContext, - returnType, - result, - subPatchGroupedFieldSet, - incrementalDataRecord, - label, - path, - ); - } + executeDeferredGroupedFieldSets( + exeContext, + returnType, + result, + path, + newDeferredGroupedFieldSetRecords, + newDeferMap, + ); return subFields; } @@ -1583,15 +1780,18 @@ function executeSubscription( operation, ); - const firstRootField = groupedFieldSet.entries().next().value; + const firstRootField = groupedFieldSet.entries().next().value as [ + string, + FieldGroup, + ]; const [responseName, fieldGroup] = firstRootField; - const fieldName = fieldGroup[0].name.value; + const fieldName = fieldGroup.fields[0].node.name.value; const fieldDef = schema.getField(rootType, fieldName); if (!fieldDef) { throw new GraphQLError( `The subscription field "${fieldName}" is not defined.`, - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1610,7 +1810,11 @@ function executeSubscription( // Build a JS object of arguments from the field.arguments AST, using the // variables scope to fulfill any variable references. - const args = getArgumentValues(fieldDef, fieldGroup[0], variableValues); + const args = getArgumentValues( + fieldDef, + fieldGroup.fields[0].node, + variableValues, + ); // The resolve function's optional third argument is a context value that // is provided to every resolve function within an execution. It is commonly @@ -1624,13 +1828,13 @@ function executeSubscription( if (isPromise(result)) { return result.then(assertEventStream).then(undefined, (error) => { - throw locatedError(error, fieldGroup, pathToArray(path)); + throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); }); } return assertEventStream(result); } catch (error) { - throw locatedError(error, fieldGroup, pathToArray(path)); + throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); } } @@ -1650,60 +1854,84 @@ function assertEventStream(result: unknown): AsyncIterable { return result; } -function executeDeferredFragment( +function executeDeferredGroupedFieldSets( exeContext: ExecutionContext, parentType: GraphQLObjectType, sourceValue: unknown, - fields: GroupedFieldSet, - parentContext: IncrementalDataRecord, - label?: string, - path?: Path, + path: Path | undefined, + newDeferredGroupedFieldSetRecords: ReadonlyArray, + deferMap: ReadonlyMap, ): void { - const incrementalPublisher = exeContext.incrementalPublisher; - const incrementalDataRecord = - incrementalPublisher.prepareNewDeferredFragmentRecord({ - label, + for (const deferredGroupedFieldSetRecord of newDeferredGroupedFieldSetRecords) { + if (deferredGroupedFieldSetRecord.shouldInitiateDefer) { + // eslint-disable-next-line @typescript-eslint/no-floating-promises + Promise.resolve().then(() => + executeDeferredGroupedFieldSet( + exeContext, + parentType, + sourceValue, + path, + deferredGroupedFieldSetRecord, + deferMap, + ), + ); + continue; + } + + executeDeferredGroupedFieldSet( + exeContext, + parentType, + sourceValue, path, - parentContext, - }); + deferredGroupedFieldSetRecord, + deferMap, + ); + } +} - let promiseOrData; +function executeDeferredGroupedFieldSet( + exeContext: ExecutionContext, + parentType: GraphQLObjectType, + sourceValue: unknown, + path: Path | undefined, + deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, + deferMap: ReadonlyMap, +): void { try { - promiseOrData = executeFields( + const incrementalResult = executeFields( exeContext, parentType, sourceValue, path, - fields, - incrementalDataRecord, + deferredGroupedFieldSetRecord.groupedFieldSet, + deferredGroupedFieldSetRecord, + deferMap, ); - if (isPromise(promiseOrData)) { - promiseOrData = promiseOrData.then( + if (isPromise(incrementalResult)) { + incrementalResult.then( (resolved) => - incrementalPublisher.completeDeferredFragmentRecord( - incrementalDataRecord, + exeContext.incrementalPublisher.completeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, resolved, ), - (e) => { - incrementalPublisher.addFieldError(incrementalDataRecord, e); - incrementalPublisher.completeDeferredFragmentRecord( - incrementalDataRecord, - null, - ); - }, - ); - } else { - incrementalPublisher.completeDeferredFragmentRecord( - incrementalDataRecord, - promiseOrData, + (error) => + exeContext.incrementalPublisher.markErroredDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + error, + ), ); + return; } - } catch (e) { - incrementalPublisher.addFieldError(incrementalDataRecord, e); - incrementalPublisher.completeDeferredFragmentRecord( - incrementalDataRecord, - null, + + exeContext.incrementalPublisher.completeDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + incrementalResult, + ); + } catch (error) { + exeContext.incrementalPublisher.markErroredDeferredGroupedFieldSet( + deferredGroupedFieldSetRecord, + error, ); } } @@ -1716,16 +1944,18 @@ function executeStreamField( fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemType: GraphQLOutputType, - parentContext: IncrementalDataRecord, - label?: string, -): SubsequentDataRecord { + incrementalDataRecord: IncrementalDataRecord, + streamRecord: StreamRecord, +): StreamItemsRecord { const incrementalPublisher = exeContext.incrementalPublisher; - const incrementalDataRecord = - incrementalPublisher.prepareNewStreamItemsRecord({ - label, - path: itemPath, - parentContext, - }); + const streamItemsRecord = new StreamItemsRecord({ + streamRecord, + path: itemPath, + }); + incrementalPublisher.reportNewStreamItemsRecord( + streamItemsRecord, + incrementalDataRecord, + ); if (isPromise(item)) { completePromisedValue( @@ -1735,24 +1965,23 @@ function executeStreamField( info, itemPath, item, - incrementalDataRecord, + streamItemsRecord, + new Map(), ).then( (value) => - incrementalPublisher.completeStreamItemsRecord(incrementalDataRecord, [ + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ value, ]), (error) => { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - null, + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord( + streamItemsRecord, + error, ); - return null; }, ); - return incrementalDataRecord; + return streamItemsRecord; } let completedItem: PromiseOrValue; @@ -1765,7 +1994,8 @@ function executeStreamField( info, itemPath, item, - incrementalDataRecord, + streamItemsRecord, + new Map(), ); } catch (rawError) { handleFieldError( @@ -1774,16 +2004,15 @@ function executeStreamField( itemType, fieldGroup, itemPath, - incrementalDataRecord, + streamItemsRecord, ); completedItem = null; - exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord); + incrementalPublisher.filter(itemPath, streamItemsRecord); } } catch (error) { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord(incrementalDataRecord, null); - return incrementalDataRecord; + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord(streamItemsRecord, error); + return streamItemsRecord; } if (isPromise(completedItem)) { @@ -1795,34 +2024,32 @@ function executeStreamField( itemType, fieldGroup, itemPath, - incrementalDataRecord, + streamItemsRecord, ); - exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord); + incrementalPublisher.filter(itemPath, streamItemsRecord); return null; }) .then( (value) => - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - [value], - ), + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ + value, + ]), (error) => { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - null, + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord( + streamItemsRecord, + error, ); }, ); - return incrementalDataRecord; + return streamItemsRecord; } - incrementalPublisher.completeStreamItemsRecord(incrementalDataRecord, [ + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ completedItem, ]); - return incrementalDataRecord; + return streamItemsRecord; } async function executeStreamAsyncIteratorItem( @@ -1831,23 +2058,28 @@ async function executeStreamAsyncIteratorItem( fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemType: GraphQLOutputType, - incrementalDataRecord: StreamItemsRecord, - path: Path, + streamItemsRecord: StreamItemsRecord, itemPath: Path, ): Promise> { let item; try { - const { value, done } = await asyncIterator.next(); - - if (done) { + const iteration = await asyncIterator.next(); + if (streamItemsRecord.streamRecord.errors.length > 0) { + return { done: true, value: undefined }; + } + if (iteration.done) { exeContext.incrementalPublisher.setIsCompletedAsyncIterator( - incrementalDataRecord, + streamItemsRecord, ); return { done: true, value: undefined }; } - item = value; + item = iteration.value; } catch (rawError) { - throw locatedError(rawError, fieldGroup, pathToArray(path)); + throw locatedError( + rawError, + toNodes(fieldGroup), + streamItemsRecord.streamRecord.path, + ); } let completedItem; try { @@ -1858,7 +2090,8 @@ async function executeStreamAsyncIteratorItem( info, itemPath, item, - incrementalDataRecord, + streamItemsRecord, + new Map(), ); if (isPromise(completedItem)) { @@ -1869,9 +2102,9 @@ async function executeStreamAsyncIteratorItem( itemType, fieldGroup, itemPath, - incrementalDataRecord, + streamItemsRecord, ); - exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord); + exeContext.incrementalPublisher.filter(itemPath, streamItemsRecord); return null; }); } @@ -1883,9 +2116,9 @@ async function executeStreamAsyncIteratorItem( itemType, fieldGroup, itemPath, - incrementalDataRecord, + streamItemsRecord, ); - exeContext.incrementalPublisher.filter(itemPath, incrementalDataRecord); + exeContext.incrementalPublisher.filter(itemPath, streamItemsRecord); return { done: false, value: null }; } } @@ -1898,22 +2131,23 @@ async function executeStreamAsyncIterator( info: GraphQLResolveInfo, itemType: GraphQLOutputType, path: Path, - parentContext: IncrementalDataRecord, - label?: string, + incrementalDataRecord: IncrementalDataRecord, + streamRecord: StreamRecord, ): Promise { const incrementalPublisher = exeContext.incrementalPublisher; let index = initialIndex; - let previousIncrementalDataRecord = parentContext; + let currentIncrementalDataRecord = incrementalDataRecord; // eslint-disable-next-line no-constant-condition while (true) { const itemPath = addPath(path, index, undefined); - const incrementalDataRecord = - incrementalPublisher.prepareNewStreamItemsRecord({ - label, - path: itemPath, - parentContext: previousIncrementalDataRecord, - asyncIterator, - }); + const streamItemsRecord = new StreamItemsRecord({ + streamRecord, + path: itemPath, + }); + incrementalPublisher.reportNewStreamItemsRecord( + streamItemsRecord, + currentIncrementalDataRecord, + ); let iteration; try { @@ -1924,23 +2158,15 @@ async function executeStreamAsyncIterator( fieldGroup, info, itemType, - incrementalDataRecord, - path, + streamItemsRecord, itemPath, ); } catch (error) { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - null, + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord( + streamItemsRecord, + error, ); - // entire stream has errored and bubbled upwards - if (asyncIterator?.return) { - asyncIterator.return().catch(() => { - // ignore errors - }); - } return; } @@ -1949,21 +2175,19 @@ async function executeStreamAsyncIterator( if (isPromise(completedItem)) { completedItem.then( (value) => - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - [value], - ), + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ + value, + ]), (error) => { - incrementalPublisher.addFieldError(incrementalDataRecord, error); - incrementalPublisher.filter(path, incrementalDataRecord); - incrementalPublisher.completeStreamItemsRecord( - incrementalDataRecord, - null, + incrementalPublisher.filter(path, streamItemsRecord); + incrementalPublisher.markErroredStreamItemsRecord( + streamItemsRecord, + error, ); }, ); } else { - incrementalPublisher.completeStreamItemsRecord(incrementalDataRecord, [ + incrementalPublisher.completeStreamItemsRecord(streamItemsRecord, [ completedItem, ]); } @@ -1971,7 +2195,7 @@ async function executeStreamAsyncIterator( if (done) { break; } - previousIncrementalDataRecord = incrementalDataRecord; + currentIncrementalDataRecord = streamItemsRecord; index++; } } diff --git a/src/jsutils/OrderedSet.ts b/src/jsutils/OrderedSet.ts new file mode 100644 index 0000000000..3cb97977bb --- /dev/null +++ b/src/jsutils/OrderedSet.ts @@ -0,0 +1,93 @@ +const setContainingUndefined = new Set([undefined]); +const setsContainingOneItem = new WeakMap>(); +const setsAppendedByUndefined = new WeakMap< + ReadonlySet, + Set +>(); +const setsAppendedByDefined = new WeakMap< + ReadonlySet, + WeakMap> +>(); + +function createOrderedSet( + item: T, +): ReadonlySet { + if (item === undefined) { + return setContainingUndefined; + } + + let set = setsContainingOneItem.get(item); + if (set === undefined) { + set = new Set([item]); + set.add(item); + setsContainingOneItem.set(item, set); + } + return set as ReadonlyOrderedSet; +} + +function appendToOrderedSet( + set: ReadonlySet, + item: T | undefined, +): ReadonlySet { + if (set.has(item)) { + return set; + } + + if (item === undefined) { + let appendedSet = setsAppendedByUndefined.get(set); + if (appendedSet === undefined) { + appendedSet = new Set(set); + appendedSet.add(undefined); + setsAppendedByUndefined.set(set, appendedSet); + } + return appendedSet as ReadonlySet; + } + + let appendedSets = setsAppendedByDefined.get(set); + if (appendedSets === undefined) { + appendedSets = new WeakMap(); + setsAppendedByDefined.set(set, appendedSets); + const appendedSet = new Set(set); + appendedSet.add(item); + appendedSets.set(item, appendedSet); + return appendedSet as ReadonlySet; + } + + let appendedSet: Set | undefined = appendedSets.get(item); + if (appendedSet === undefined) { + appendedSet = new Set(set); + appendedSet.add(item); + appendedSets.set(item, appendedSet); + } + + return appendedSet as ReadonlySet; +} + +export type ReadonlyOrderedSet = ReadonlySet; + +const emptySet = new Set(); + +/** + * A set that when frozen can be directly compared for equality. + * + * Sets are limited to JSON serializable values. + * + * @internal + */ +export class OrderedSet { + _set: ReadonlySet = emptySet as ReadonlySet; + constructor(items: Iterable) { + for (const item of items) { + if (this._set === emptySet) { + this._set = createOrderedSet(item); + continue; + } + + this._set = appendToOrderedSet(this._set, item); + } + } + + freeze(): ReadonlyOrderedSet { + return this._set as ReadonlyOrderedSet; + } +} diff --git a/src/jsutils/__tests__/OrderedSet-test.ts b/src/jsutils/__tests__/OrderedSet-test.ts new file mode 100644 index 0000000000..445053a32a --- /dev/null +++ b/src/jsutils/__tests__/OrderedSet-test.ts @@ -0,0 +1,34 @@ +import { expect } from 'chai'; +import { describe, it } from 'mocha'; + +import { OrderedSet } from '../OrderedSet.js'; + +describe('OrderedSet', () => { + it('empty sets are equal', () => { + const orderedSetA = new OrderedSet([]).freeze(); + const orderedSetB = new OrderedSet([]).freeze(); + + expect(orderedSetA).to.equal(orderedSetB); + }); + + it('sets with members in different orders or numbers are equal', () => { + const a = { a: 'a' }; + const b = { b: 'b' }; + const c = { c: 'c' }; + const orderedSetA = new OrderedSet([a, b, c, a, undefined]).freeze(); + const orderedSetB = new OrderedSet([undefined, b, a, b, c]).freeze(); + + expect(orderedSetA).to.not.equal(orderedSetB); + }); + + it('sets with members in different orders or numbers are equal', () => { + const a = { a: 'a' }; + const b = { b: 'b' }; + const c = { c: 'c' }; + const d = { c: 'd' }; + const orderedSetA = new OrderedSet([a, b, c, a, undefined]).freeze(); + const orderedSetB = new OrderedSet([undefined, b, a, b, d]).freeze(); + + expect(orderedSetA).to.not.equal(orderedSetB); + }); +}); diff --git a/src/validation/rules/SingleFieldSubscriptionsRule.ts b/src/validation/rules/SingleFieldSubscriptionsRule.ts index c6cd93ab58..c0d1031103 100644 --- a/src/validation/rules/SingleFieldSubscriptionsRule.ts +++ b/src/validation/rules/SingleFieldSubscriptionsRule.ts @@ -3,16 +3,22 @@ import type { ObjMap } from '../../jsutils/ObjMap.js'; import { GraphQLError } from '../../error/GraphQLError.js'; import type { + FieldNode, FragmentDefinitionNode, OperationDefinitionNode, } from '../../language/ast.js'; import { Kind } from '../../language/kinds.js'; import type { ASTVisitor } from '../../language/visitor.js'; +import type { FieldGroup } from '../../execution/collectFields.js'; import { collectFields } from '../../execution/collectFields.js'; import type { ValidationContext } from '../ValidationContext.js'; +function toNodes(fieldGroup: FieldGroup): ReadonlyArray { + return fieldGroup.fields.map((fieldDetails) => fieldDetails.node); +} + /** * Subscriptions must only include a non-introspection field. * @@ -49,9 +55,11 @@ export function SingleFieldSubscriptionsRule( node, ); if (groupedFieldSet.size > 1) { - const fieldSelectionLists = [...groupedFieldSet.values()]; - const extraFieldSelectionLists = fieldSelectionLists.slice(1); - const extraFieldSelections = extraFieldSelectionLists.flat(); + const fieldGroups = [...groupedFieldSet.values()]; + const extraFieldGroups = fieldGroups.slice(1); + const extraFieldSelections = extraFieldGroups.flatMap( + (fieldGroup) => toNodes(fieldGroup), + ); context.reportError( new GraphQLError( operationName != null @@ -62,14 +70,14 @@ export function SingleFieldSubscriptionsRule( ); } for (const fieldGroup of groupedFieldSet.values()) { - const fieldName = fieldGroup[0].name.value; + const fieldName = toNodes(fieldGroup)[0].name.value; if (fieldName.startsWith('__')) { context.reportError( new GraphQLError( operationName != null ? `Subscription "${operationName}" must not select an introspection top level field.` : 'Anonymous Subscription must not select an introspection top level field.', - { nodes: fieldGroup }, + { nodes: toNodes(fieldGroup) }, ), ); } From 6b6f344602f88734d7f1db1da4b17c736be6d8a3 Mon Sep 17 00:00:00 2001 From: Yaacov Rydzinski Date: Mon, 15 May 2023 12:57:58 +0300 Subject: [PATCH 2/6] add pending notifications --- src/execution/IncrementalPublisher.ts | 56 +++++++++++++++- src/execution/__tests__/defer-test.ts | 82 +++++++++++++++++++++++ src/execution/__tests__/mutations-test.ts | 2 + src/execution/__tests__/stream-test.ts | 52 ++++++++++++++ 4 files changed, 190 insertions(+), 2 deletions(-) diff --git a/src/execution/IncrementalPublisher.ts b/src/execution/IncrementalPublisher.ts index 8e40c90819..9dc4bc957a 100644 --- a/src/execution/IncrementalPublisher.ts +++ b/src/execution/IncrementalPublisher.ts @@ -11,6 +11,7 @@ import type { import type { GroupedFieldSet } from './collectFields.js'; interface IncrementalUpdate> { + pending: ReadonlyArray; incremental: ReadonlyArray>; completed: ReadonlyArray; } @@ -59,6 +60,7 @@ export interface InitialIncrementalExecutionResult< TExtensions = ObjMap, > extends ExecutionResult { data: TData; + pending: ReadonlyArray; hasNext: true; extensions?: TExtensions; } @@ -68,6 +70,7 @@ export interface FormattedInitialIncrementalExecutionResult< TExtensions = ObjMap, > extends FormattedExecutionResult { data: TData; + pending: ReadonlyArray; hasNext: boolean; extensions?: TExtensions; } @@ -85,6 +88,7 @@ export interface FormattedSubsequentIncrementalExecutionResult< TExtensions = ObjMap, > { hasNext: boolean; + pending?: ReadonlyArray; incremental?: ReadonlyArray>; completed?: ReadonlyArray; extensions?: TExtensions; @@ -141,6 +145,11 @@ export type FormattedIncrementalResult< | FormattedIncrementalDeferResult | FormattedIncrementalStreamResult; +export interface PendingResult { + path: ReadonlyArray; + label?: string; +} + export interface CompletedResult { path: ReadonlyArray; label?: string; @@ -296,10 +305,20 @@ export class IncrementalPublisher { const errors = initialResultRecord.errors; const initialResult = errors.length === 0 ? { data } : { errors, data }; - if (this._pending.size > 0) { + const pending = this._pending; + if (pending.size > 0) { + const pendingSources = new Set(); + for (const subsequentResultRecord of pending) { + const pendingSource = isStreamItemsRecord(subsequentResultRecord) + ? subsequentResultRecord.streamRecord + : subsequentResultRecord; + pendingSources.add(pendingSource); + } + return { initialResult: { ...initialResult, + pending: this.pendingSourcesToResults(pendingSources), hasNext: true, }, subsequentResults: this._subscribe(), @@ -347,6 +366,23 @@ export class IncrementalPublisher { }); } + pendingSourcesToResults( + pendingSources: ReadonlySet, + ): Array { + const pendingResults: Array = []; + for (const pendingSource of pendingSources) { + pendingSource.pendingSent = true; + const pendingResult: PendingResult = { + path: pendingSource.path, + }; + if (pendingSource.label !== undefined) { + pendingResult.label = pendingSource.label; + } + pendingResults.push(pendingResult); + } + return pendingResults; + } + private _subscribe(): AsyncGenerator< SubsequentIncrementalExecutionResult, void, @@ -461,7 +497,8 @@ export class IncrementalPublisher { private _getIncrementalResult( completedRecords: ReadonlySet, ): SubsequentIncrementalExecutionResult | undefined { - const { incremental, completed } = this._processPending(completedRecords); + const { pending, incremental, completed } = + this._processPending(completedRecords); const hasNext = this._pending.size > 0; if (incremental.length === 0 && completed.length === 0 && hasNext) { @@ -469,6 +506,9 @@ export class IncrementalPublisher { } const result: SubsequentIncrementalExecutionResult = { hasNext }; + if (pending.length) { + result.pending = pending; + } if (incremental.length) { result.incremental = incremental; } @@ -482,6 +522,7 @@ export class IncrementalPublisher { private _processPending( completedRecords: ReadonlySet, ): IncrementalUpdate { + const newPendingSources = new Set(); const incrementalResults: Array = []; const completedResults: Array = []; for (const subsequentResultRecord of completedRecords) { @@ -489,10 +530,17 @@ export class IncrementalPublisher { if (child.filtered) { continue; } + const pendingSource = isStreamItemsRecord(child) + ? child.streamRecord + : child; + if (!pendingSource.pendingSent) { + newPendingSources.add(pendingSource); + } this._publish(child); } if (isStreamItemsRecord(subsequentResultRecord)) { if (subsequentResultRecord.isFinalRecord) { + newPendingSources.delete(subsequentResultRecord.streamRecord); completedResults.push( this._completedRecordToResult(subsequentResultRecord.streamRecord), ); @@ -513,6 +561,7 @@ export class IncrementalPublisher { } incrementalResults.push(incrementalResult); } else { + newPendingSources.delete(subsequentResultRecord); completedResults.push( this._completedRecordToResult(subsequentResultRecord), ); @@ -537,6 +586,7 @@ export class IncrementalPublisher { } return { + pending: this.pendingSourcesToResults(newPendingSources), incremental: incrementalResults, completed: completedResults, }; @@ -690,6 +740,7 @@ export class DeferredFragmentRecord { deferredGroupedFieldSetRecords: Set; errors: Array; filtered: boolean; + pendingSent?: boolean; _pending: Set; constructor(opts: { path: Path | undefined; label: string | undefined }) { @@ -709,6 +760,7 @@ export class StreamRecord { path: ReadonlyArray; errors: Array; earlyReturn?: (() => Promise) | undefined; + pendingSent?: boolean; constructor(opts: { label: string | undefined; path: Path; diff --git a/src/execution/__tests__/defer-test.ts b/src/execution/__tests__/defer-test.ts index 7fca565f31..33c310523b 100644 --- a/src/execution/__tests__/defer-test.ts +++ b/src/execution/__tests__/defer-test.ts @@ -176,6 +176,7 @@ describe('Execute: defer directive', () => { id: '1', }, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -231,6 +232,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -261,6 +263,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: {}, + pending: [{ path: [], label: 'DeferQuery' }], hasNext: true, }, { @@ -302,6 +305,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: {}, + pending: [{ path: [], label: 'DeferQuery' }], hasNext: true, }, { @@ -351,6 +355,10 @@ describe('Execute: defer directive', () => { data: { hero: {}, }, + pending: [ + { path: ['hero'], label: 'DeferTop' }, + { path: ['hero'], label: 'DeferNested' }, + ], hasNext: true, }, { @@ -396,6 +404,7 @@ describe('Execute: defer directive', () => { name: 'Luke', }, }, + pending: [{ path: ['hero'], label: 'DeferTop' }], hasNext: true, }, { @@ -424,6 +433,7 @@ describe('Execute: defer directive', () => { name: 'Luke', }, }, + pending: [{ path: ['hero'], label: 'DeferTop' }], hasNext: true, }, { @@ -449,6 +459,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ path: ['hero'], label: 'InlineDeferred' }], hasNext: true, }, { @@ -478,6 +489,7 @@ describe('Execute: defer directive', () => { data: { hero: {}, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -506,6 +518,10 @@ describe('Execute: defer directive', () => { data: { hero: {}, }, + pending: [ + { path: ['hero'], label: 'DeferID' }, + { path: ['hero'], label: 'DeferName' }, + ], hasNext: true, }, { @@ -551,6 +567,10 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: {}, + pending: [ + { path: [], label: 'DeferID' }, + { path: [], label: 'DeferName' }, + ], hasNext: true, }, { @@ -601,6 +621,10 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: {}, + pending: [ + { path: [], label: 'DeferID' }, + { path: [], label: 'DeferName' }, + ], hasNext: true, }, { @@ -648,6 +672,10 @@ describe('Execute: defer directive', () => { data: { hero: {}, }, + pending: [ + { path: [], label: 'DeferName' }, + { path: ['hero'], label: 'DeferID' }, + ], hasNext: true, }, { @@ -691,9 +719,11 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: {}, + pending: [{ path: [], label: 'DeferName' }], hasNext: true, }, { + pending: [{ path: ['hero'], label: 'DeferID' }], incremental: [ { data: { @@ -753,6 +783,20 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { friends: [{}, {}, {}] } }, + pending: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 2] }, + { path: ['hero', 'friends', 2] }, + ], hasNext: true, }, { @@ -831,6 +875,7 @@ describe('Execute: defer directive', () => { }, }, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -872,9 +917,11 @@ describe('Execute: defer directive', () => { data: { hero: {}, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { + pending: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], incremental: [ { data: { @@ -954,9 +1001,11 @@ describe('Execute: defer directive', () => { }, }, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { + pending: [{ path: ['hero', 'nestedObject'] }], incremental: [ { data: { bar: 'bar' }, @@ -967,6 +1016,7 @@ describe('Execute: defer directive', () => { hasNext: true, }, { + pending: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], incremental: [ { data: { baz: 'baz' }, @@ -1025,9 +1075,14 @@ describe('Execute: defer directive', () => { }, }, }, + pending: [ + { path: ['hero'] }, + { path: ['hero', 'nestedObject', 'deeperObject'] }, + ], hasNext: true, }, { + pending: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], incremental: [ { data: { @@ -1106,6 +1161,7 @@ describe('Execute: defer directive', () => { }, }, }, + pending: [{ path: [] }, { path: ['a', 'b'] }], hasNext: true, }, { @@ -1157,6 +1213,7 @@ describe('Execute: defer directive', () => { data: { a: {}, }, + pending: [{ path: [] }, { path: ['a'] }], hasNext: true, }, { @@ -1224,6 +1281,7 @@ describe('Execute: defer directive', () => { data: { a: {}, }, + pending: [{ path: [] }, { path: ['a'] }], hasNext: true, }, { @@ -1299,6 +1357,7 @@ describe('Execute: defer directive', () => { data: { a: {}, }, + pending: [{ path: [] }, { path: ['a'] }], hasNext: true, }, { @@ -1388,6 +1447,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: {}, + pending: [{ path: [] }], hasNext: true, }, { @@ -1436,6 +1496,7 @@ describe('Execute: defer directive', () => { friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], }, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1471,6 +1532,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { friends: [{ name: 'Han' }] } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1507,6 +1569,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { friends: [] } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1539,6 +1602,7 @@ describe('Execute: defer directive', () => { friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], }, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1586,6 +1650,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { friends: [] } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1619,6 +1684,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { nestedObject: null } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1651,6 +1717,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { nestedObject: { name: 'foo' } } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1683,6 +1750,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1725,6 +1793,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1803,6 +1872,7 @@ describe('Execute: defer directive', () => { expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ path: ['hero'] }], hasNext: true, }, { @@ -1855,9 +1925,15 @@ describe('Execute: defer directive', () => { data: { hero: { id: '1' }, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { + pending: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + ], incremental: [ { data: { name: 'slow', friends: [{}, {}, {}] }, @@ -1906,9 +1982,15 @@ describe('Execute: defer directive', () => { data: { hero: { id: '1' }, }, + pending: [{ path: ['hero'] }], hasNext: true, }, { + pending: [ + { path: ['hero', 'friends', 0] }, + { path: ['hero', 'friends', 1] }, + { path: ['hero', 'friends', 2] }, + ], incremental: [ { data: { diff --git a/src/execution/__tests__/mutations-test.ts b/src/execution/__tests__/mutations-test.ts index 64262ea020..13003f7d6b 100644 --- a/src/execution/__tests__/mutations-test.ts +++ b/src/execution/__tests__/mutations-test.ts @@ -237,6 +237,7 @@ describe('Execute: Handles mutation execution ordering', () => { first: {}, second: { theNumber: 2 }, }, + pending: [{ path: ['first'], label: 'defer-label' }], hasNext: true, }, { @@ -312,6 +313,7 @@ describe('Execute: Handles mutation execution ordering', () => { data: { second: { theNumber: 2 }, }, + pending: [{ path: [], label: 'defer-label' }], hasNext: true, }, { diff --git a/src/execution/__tests__/stream-test.ts b/src/execution/__tests__/stream-test.ts index be1e96be5a..12d4ddd43f 100644 --- a/src/execution/__tests__/stream-test.ts +++ b/src/execution/__tests__/stream-test.ts @@ -142,6 +142,7 @@ describe('Execute: stream directive', () => { data: { scalarList: ['apple'], }, + pending: [{ path: ['scalarList'] }], hasNext: true, }, { @@ -165,6 +166,7 @@ describe('Execute: stream directive', () => { data: { scalarList: [], }, + pending: [{ path: ['scalarList'] }], hasNext: true, }, { @@ -217,6 +219,7 @@ describe('Execute: stream directive', () => { data: { scalarList: ['apple'], }, + pending: [{ path: ['scalarList'], label: 'scalar-stream' }], hasNext: true, }, { @@ -261,6 +264,7 @@ describe('Execute: stream directive', () => { expectJSON(result).toDeepEqual([ { data: { scalarList: ['apple', 'banana'] }, + pending: [{ path: ['scalarList'] }], hasNext: true, }, { @@ -284,6 +288,7 @@ describe('Execute: stream directive', () => { data: { scalarListList: [['apple', 'apple', 'apple']], }, + pending: [{ path: ['scalarListList'] }], hasNext: true, }, { @@ -333,6 +338,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -369,6 +375,7 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -431,6 +438,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -480,6 +488,7 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }, null], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -517,6 +526,7 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -568,6 +578,7 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -627,6 +638,7 @@ describe('Execute: stream directive', () => { { name: 'Han', id: '2' }, ], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -696,6 +708,7 @@ describe('Execute: stream directive', () => { { name: 'Han', id: '2' }, ], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, }, @@ -769,6 +782,7 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -805,6 +819,7 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ name: 'Luke' }], }, + pending: [{ path: ['nonNullFriendList'] }], hasNext: true, }, { @@ -851,6 +866,7 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ name: 'Luke' }], }, + pending: [{ path: ['nonNullFriendList'] }], hasNext: true, }, { @@ -885,6 +901,7 @@ describe('Execute: stream directive', () => { data: { scalarList: ['Luke'], }, + pending: [{ path: ['scalarList'] }], hasNext: true, }, { @@ -928,6 +945,7 @@ describe('Execute: stream directive', () => { data: { friendList: [{ nonNullName: 'Luke' }], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -978,6 +996,7 @@ describe('Execute: stream directive', () => { data: { friendList: [{ nonNullName: 'Luke' }], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -1030,6 +1049,7 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ path: ['nonNullFriendList'] }], hasNext: true, }, { @@ -1069,6 +1089,7 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ path: ['nonNullFriendList'] }], hasNext: true, }, { @@ -1110,6 +1131,7 @@ describe('Execute: stream directive', () => { data: { friendList: [{ nonNullName: 'Luke' }], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -1167,6 +1189,7 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ path: ['nonNullFriendList'] }], hasNext: true, }, { @@ -1234,6 +1257,7 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ path: ['nonNullFriendList'] }], hasNext: true, }, { @@ -1311,6 +1335,7 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ path: ['nonNullFriendList'] }], hasNext: true, }, { @@ -1426,6 +1451,10 @@ describe('Execute: stream directive', () => { otherNestedObject: {}, nestedObject: { nestedFriendList: [] }, }, + pending: [ + { path: ['otherNestedObject'] }, + { path: ['nestedObject', 'nestedFriendList'] }, + ], hasNext: true, }, { @@ -1485,6 +1514,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ path: ['nestedObject'] }], hasNext: true, }, { @@ -1537,6 +1567,7 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -1627,6 +1658,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ path: ['nestedObject'] }], hasNext: true, }); @@ -1688,6 +1720,7 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1', name: 'Luke' }], }, + pending: [{ path: ['friendList'] }], hasNext: true, }, { @@ -1747,6 +1780,10 @@ describe('Execute: stream directive', () => { nestedFriendList: [], }, }, + pending: [ + { path: ['nestedObject'] }, + { path: ['nestedObject', 'nestedFriendList'] }, + ], hasNext: true, }, { @@ -1811,6 +1848,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ path: ['nestedObject'] }], hasNext: true, }); @@ -1819,6 +1857,7 @@ describe('Execute: stream directive', () => { const result2 = await result2Promise; expectJSON(result2).toDeepEqual({ value: { + pending: [{ path: ['nestedObject', 'nestedFriendList'] }], incremental: [ { data: { scalarField: 'slow', nestedFriendList: [] }, @@ -1912,6 +1951,10 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1' }], }, + pending: [ + { path: ['friendList', 0], label: 'DeferName' }, + { path: ['friendList'], label: 'stream-label' }, + ], hasNext: true, }); @@ -1920,6 +1963,7 @@ describe('Execute: stream directive', () => { const result2 = await result2Promise; expectJSON(result2).toDeepEqual({ value: { + pending: [{ path: ['friendList', 1], label: 'DeferName' }], incremental: [ { data: { name: 'Luke' }, @@ -2008,6 +2052,10 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1' }], }, + pending: [ + { path: ['friendList', 0], label: 'DeferName' }, + { path: ['friendList'], label: 'stream-label' }, + ], hasNext: true, }); @@ -2016,6 +2064,7 @@ describe('Execute: stream directive', () => { const result2 = await result2Promise; expectJSON(result2).toDeepEqual({ value: { + pending: [{ path: ['friendList', 1], label: 'DeferName' }], incremental: [ { data: { name: 'Luke' }, @@ -2111,6 +2160,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ path: ['friendList', 0] }, { path: ['friendList'] }], hasNext: true, }); const returnPromise = iterator.return(); @@ -2166,6 +2216,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ path: ['friendList'] }], hasNext: true, }); @@ -2225,6 +2276,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ path: ['friendList', 0] }, { path: ['friendList'] }], hasNext: true, }); From e62ca02391e9cd1c5e185d7bd8162956130ce433 Mon Sep 17 00:00:00 2001 From: Yaacov Rydzinski Date: Fri, 16 Jun 2023 11:57:32 +0300 Subject: [PATCH 3/6] add helpers --- src/execution/IncrementalPublisher.ts | 50 ++++++- src/execution/__tests__/defer-test.ts | 174 ++++++++++++++++++++++- src/execution/__tests__/executor-test.ts | 22 ++- src/execution/collectFields.ts | 52 ++++--- src/execution/execute.ts | 58 ++++++-- src/type/definition.ts | 18 ++- 6 files changed, 337 insertions(+), 37 deletions(-) diff --git a/src/execution/IncrementalPublisher.ts b/src/execution/IncrementalPublisher.ts index 9dc4bc957a..7c415f8b8e 100644 --- a/src/execution/IncrementalPublisher.ts +++ b/src/execution/IncrementalPublisher.ts @@ -615,12 +615,16 @@ export class IncrementalPublisher { } this._introduce(subsequentResultRecord); + subsequentResultRecord.publish(); return; } if (subsequentResultRecord._pending.size === 0) { this._push(subsequentResultRecord); } else { + for (const deferredGroupedFieldSetRecord of subsequentResultRecord.deferredGroupedFieldSetRecords) { + deferredGroupedFieldSetRecord.publish(); + } this._introduce(subsequentResultRecord); } } @@ -701,33 +705,56 @@ function isStreamItemsRecord( export class InitialResultRecord { errors: Array; children: Set; + priority: number; + deferPriority: number; + published: true; constructor() { this.errors = []; this.children = new Set(); + this.priority = 0; + this.deferPriority = 0; + this.published = true; } } /** @internal */ export class DeferredGroupedFieldSetRecord { path: ReadonlyArray; + priority: number; + deferPriority: number; deferredFragmentRecords: ReadonlyArray; groupedFieldSet: GroupedFieldSet; shouldInitiateDefer: boolean; errors: Array; data: ObjMap | undefined; + published: true | Promise; + publish: () => void; sent: boolean; constructor(opts: { path: Path | undefined; + priority: number; + deferPriority: number; deferredFragmentRecords: ReadonlyArray; groupedFieldSet: GroupedFieldSet; shouldInitiateDefer: boolean; }) { this.path = pathToArray(opts.path); + this.priority = opts.priority; + this.deferPriority = opts.deferPriority; this.deferredFragmentRecords = opts.deferredFragmentRecords; this.groupedFieldSet = opts.groupedFieldSet; this.shouldInitiateDefer = opts.shouldInitiateDefer; this.errors = []; + // promiseWithResolvers uses void only as a generic type parameter + // see: https://typescript-eslint.io/rules/no-invalid-void-type/ + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + const { promise: published, resolve } = promiseWithResolvers(); + this.published = published; + this.publish = () => { + resolve(); + this.published = true; + }; this.sent = false; } } @@ -778,21 +805,42 @@ export class StreamItemsRecord { errors: Array; streamRecord: StreamRecord; path: ReadonlyArray; + priority: number; + deferPriority: number; items: Array; children: Set; isFinalRecord?: boolean; isCompletedAsyncIterator?: boolean; isCompleted: boolean; filtered: boolean; + published: true | Promise; + publish: () => void; + sent: boolean; - constructor(opts: { streamRecord: StreamRecord; path: Path | undefined }) { + constructor(opts: { + streamRecord: StreamRecord; + path: Path | undefined; + priority: number; + }) { this.streamRecord = opts.streamRecord; this.path = pathToArray(opts.path); + this.priority = opts.priority; + this.deferPriority = 0; this.children = new Set(); this.errors = []; this.isCompleted = false; this.filtered = false; this.items = []; + // promiseWithResolvers uses void only as a generic type parameter + // see: https://typescript-eslint.io/rules/no-invalid-void-type/ + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + const { promise: published, resolve } = promiseWithResolvers(); + this.published = published; + this.publish = () => { + resolve(); + this.published = true; + }; + this.sent = false; } } diff --git a/src/execution/__tests__/defer-test.ts b/src/execution/__tests__/defer-test.ts index 33c310523b..d430142f7c 100644 --- a/src/execution/__tests__/defer-test.ts +++ b/src/execution/__tests__/defer-test.ts @@ -1,13 +1,17 @@ -import { expect } from 'chai'; +import { assert, expect } from 'chai'; import { describe, it } from 'mocha'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; import { expectPromise } from '../../__testUtils__/expectPromise.js'; import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; +import { isPromise } from '../../jsutils/isPromise.js'; + import type { DocumentNode } from '../../language/ast.js'; +import { Kind } from '../../language/kinds.js'; import { parse } from '../../language/parser.js'; +import type { FieldDetails } from '../../type/definition.js'; import { GraphQLList, GraphQLNonNull, @@ -216,6 +220,174 @@ describe('Execute: defer directive', () => { }, }); }); + it('Can provides correct info about deferred execution state when resolver could defer', async () => { + let fieldDetails: ReadonlyArray | undefined; + let deferPriority; + let published; + let resumed; + + const SomeType = new GraphQLObjectType({ + name: 'SomeType', + fields: { + someField: { + type: GraphQLString, + resolve: () => Promise.resolve('someField'), + }, + deferredField: { + type: GraphQLString, + resolve: async (_parent, _args, _context, info) => { + fieldDetails = info.fieldDetails; + deferPriority = info.deferPriority; + published = info.published; + await published; + resumed = true; + }, + }, + }, + }); + + const someSchema = new GraphQLSchema({ query: SomeType }); + + const document = parse(` + query { + someField + ... @defer { + deferredField + } + } + `); + + const operation = document.definitions[0]; + assert(operation.kind === Kind.OPERATION_DEFINITION); + const fragment = operation.selectionSet.selections[1]; + assert(fragment.kind === Kind.INLINE_FRAGMENT); + const field = fragment.selectionSet.selections[0]; + + const result = experimentalExecuteIncrementally({ + schema: someSchema, + document, + }); + + expect(fieldDetails).to.equal(undefined); + expect(deferPriority).to.equal(undefined); + expect(published).to.equal(undefined); + expect(resumed).to.equal(undefined); + + const initialPayload = await result; + assert('initialResult' in initialPayload); + const iterator = initialPayload.subsequentResults[Symbol.asyncIterator](); + await iterator.next(); + + assert(fieldDetails !== undefined); + expect(fieldDetails[0].node).to.equal(field); + expect(fieldDetails[0].target?.deferPriority).to.equal(1); + expect(deferPriority).to.equal(1); + expect(isPromise(published)).to.equal(true); + expect(resumed).to.equal(true); + }); + it('Can provides correct info about deferred execution state when deferred field is masked by non-deferred field', async () => { + let fieldDetails: ReadonlyArray | undefined; + let deferPriority; + let published; + + const SomeType = new GraphQLObjectType({ + name: 'SomeType', + fields: { + someField: { + type: GraphQLString, + resolve: (_parent, _args, _context, info) => { + fieldDetails = info.fieldDetails; + deferPriority = info.deferPriority; + published = info.published; + return 'someField'; + }, + }, + }, + }); + + const someSchema = new GraphQLSchema({ query: SomeType }); + + const document = parse(` + query { + someField + ... @defer { + someField + } + } + `); + + const operation = document.definitions[0]; + assert(operation.kind === Kind.OPERATION_DEFINITION); + const node1 = operation.selectionSet.selections[0]; + const fragment = operation.selectionSet.selections[1]; + assert(fragment.kind === Kind.INLINE_FRAGMENT); + const node2 = fragment.selectionSet.selections[0]; + + const result = experimentalExecuteIncrementally({ + schema: someSchema, + document, + }); + + const initialPayload = await result; + assert('initialResult' in initialPayload); + expect(initialPayload.initialResult).to.deep.equal({ + data: { + someField: 'someField', + }, + pending: [{ path: [] }], + hasNext: true, + }); + + assert(fieldDetails !== undefined); + expect(fieldDetails[0].node).to.equal(node1); + expect(fieldDetails[0].target).to.equal(undefined); + expect(fieldDetails[1].node).to.equal(node2); + expect(fieldDetails[1].target?.deferPriority).to.equal(1); + expect(deferPriority).to.equal(0); + expect(published).to.equal(true); + }); + it('Can provides correct info about deferred execution state when resolver need not defer', async () => { + let deferPriority; + let published; + const SomeType = new GraphQLObjectType({ + name: 'SomeType', + fields: { + deferredField: { + type: GraphQLString, + resolve: (_parent, _args, _context, info) => { + deferPriority = info.deferPriority; + published = info.published; + }, + }, + }, + }); + + const someSchema = new GraphQLSchema({ query: SomeType }); + + const document = parse(` + query { + ... @defer { + deferredField + } + } + `); + + const result = experimentalExecuteIncrementally({ + schema: someSchema, + document, + }); + + expect(deferPriority).to.equal(undefined); + expect(published).to.equal(undefined); + + const initialPayload = await result; + assert('initialResult' in initialPayload); + const iterator = initialPayload.subsequentResults[Symbol.asyncIterator](); + await iterator.next(); + + expect(deferPriority).to.equal(1); + expect(published).to.equal(true); + }); it('Does not disable defer with null if argument', async () => { const document = parse(` query HeroNameQuery($shouldDefer: Boolean) { diff --git a/src/execution/__tests__/executor-test.ts b/src/execution/__tests__/executor-test.ts index c29b4ae60d..9132ca36bd 100644 --- a/src/execution/__tests__/executor-test.ts +++ b/src/execution/__tests__/executor-test.ts @@ -9,6 +9,7 @@ import { inspect } from '../../jsutils/inspect.js'; import { Kind } from '../../language/kinds.js'; import { parse } from '../../language/parser.js'; +import type { GraphQLResolveInfo } from '../../type/definition.js'; import { GraphQLInterfaceType, GraphQLList, @@ -191,7 +192,7 @@ describe('Execute: Handles basic execution tasks', () => { }); it('provides info about current execution state', () => { - let resolvedInfo; + let resolvedInfo: GraphQLResolveInfo | undefined; const testType = new GraphQLObjectType({ name: 'Test', fields: { @@ -213,7 +214,7 @@ describe('Execute: Handles basic execution tasks', () => { expect(resolvedInfo).to.have.all.keys( 'fieldName', - 'fieldNodes', + 'fieldDetails', 'returnType', 'parentType', 'path', @@ -222,6 +223,9 @@ describe('Execute: Handles basic execution tasks', () => { 'rootValue', 'operation', 'variableValues', + 'priority', + 'deferPriority', + 'published', ); const operation = document.definitions[0]; @@ -234,14 +238,24 @@ describe('Execute: Handles basic execution tasks', () => { schema, rootValue, operation, + priority: 0, + deferPriority: 0, + published: true, }); - const field = operation.selectionSet.selections[0]; expect(resolvedInfo).to.deep.include({ - fieldNodes: [field], path: { prev: undefined, key: 'result', typename: 'Test' }, variableValues: { var: 'abc' }, }); + + const fieldDetails = resolvedInfo?.fieldDetails; + assert(fieldDetails !== undefined); + + const field = operation.selectionSet.selections[0]; + expect(fieldDetails[0]).to.deep.include({ + node: field, + target: undefined, + }); }); it('populates path correctly with complex types', () => { diff --git a/src/execution/collectFields.ts b/src/execution/collectFields.ts index 43b36343eb..dc63ff803e 100644 --- a/src/execution/collectFields.ts +++ b/src/execution/collectFields.ts @@ -15,7 +15,12 @@ import type { import { OperationTypeNode } from '../language/ast.js'; import { Kind } from '../language/kinds.js'; -import type { GraphQLObjectType } from '../type/definition.js'; +import type { + DeferUsage, + FieldDetails, + GraphQLObjectType, + Target, +} from '../type/definition.js'; import { isAbstractType } from '../type/definition.js'; import { GraphQLDeferDirective, @@ -28,24 +33,13 @@ import { typeFromAST } from '../utilities/typeFromAST.js'; import { getDirectiveValues } from './values.js'; -export interface DeferUsage { - label: string | undefined; - ancestors: ReadonlyArray; -} - export const NON_DEFERRED_TARGET_SET = new OrderedSet([ undefined, ]).freeze(); -export type Target = DeferUsage | undefined; export type TargetSet = ReadonlyOrderedSet; export type DeferUsageSet = ReadonlyOrderedSet; -export interface FieldDetails { - node: FieldNode; - target: Target; -} - export interface FieldGroup { fields: ReadonlyArray; targets: TargetSet; @@ -213,12 +207,19 @@ function collectFieldsImpl( let target: Target; if (!defer) { target = newTarget; + } else if (parentTarget === undefined) { + target = { + ...defer, + ancestors: [parentTarget], + deferPriority: 1, + }; + newDeferUsages.push(target); } else { - const ancestors = - parentTarget === undefined - ? [parentTarget] - : [parentTarget, ...parentTarget.ancestors]; - target = { ...defer, ancestors }; + target = { + ...defer, + ancestors: [parentTarget, ...parentTarget.ancestors], + deferPriority: parentTarget.deferPriority + 1, + }; newDeferUsages.push(target); } @@ -255,12 +256,19 @@ function collectFieldsImpl( if (!defer) { visitedFragmentNames.add(fragName); target = newTarget; + } else if (parentTarget === undefined) { + target = { + ...defer, + ancestors: [parentTarget], + deferPriority: 1, + }; + newDeferUsages.push(target); } else { - const ancestors = - parentTarget === undefined - ? [parentTarget] - : [parentTarget, ...parentTarget.ancestors]; - target = { ...defer, ancestors }; + target = { + ...defer, + ancestors: [parentTarget, ...parentTarget.ancestors], + deferPriority: parentTarget.deferPriority + 1, + }; newDeferUsages.push(target); } diff --git a/src/execution/execute.ts b/src/execution/execute.ts index 55bba806f5..1683aaffae 100644 --- a/src/execution/execute.ts +++ b/src/execution/execute.ts @@ -26,6 +26,7 @@ import { OperationTypeNode } from '../language/ast.js'; import { Kind } from '../language/kinds.js'; import type { + DeferUsage, GraphQLAbstractType, GraphQLField, GraphQLFieldResolver, @@ -48,7 +49,6 @@ import type { GraphQLSchema } from '../type/schema.js'; import { assertValidSchema } from '../type/validate.js'; import type { - DeferUsage, DeferUsageSet, FieldGroup, GroupedFieldSet, @@ -419,6 +419,7 @@ function executeOperation( const newDeferredGroupedFieldSetRecords = addNewDeferredGroupedFieldSets( incrementalPublisher, newGroupedFieldSetDetails, + initialResultRecord, newDeferMap, path, ); @@ -606,6 +607,7 @@ function executeField( fieldGroup, parentType, path, + incrementalDataRecord, ); // Get the resolve function, regardless of if its result is normal or abrupt (error). @@ -691,12 +693,31 @@ export function buildResolveInfo( fieldGroup: FieldGroup, parentType: GraphQLObjectType, path: Path, + incrementalDataRecord?: IncrementalDataRecord | undefined, ): GraphQLResolveInfo { // The resolve function's optional fourth argument is a collection of // information about the current execution state. + if (incrementalDataRecord === undefined) { + return { + fieldName: fieldDef.name, + fieldDetails: fieldGroup.fields, + returnType: fieldDef.type, + parentType, + path, + schema: exeContext.schema, + fragments: exeContext.fragments, + rootValue: exeContext.rootValue, + operation: exeContext.operation, + variableValues: exeContext.variableValues, + priority: 0, + deferPriority: 0, + published: true, + }; + } + return { fieldName: fieldDef.name, - fieldNodes: toNodes(fieldGroup), + fieldDetails: fieldGroup.fields, returnType: fieldDef.type, parentType, path, @@ -705,6 +726,12 @@ export function buildResolveInfo( rootValue: exeContext.rootValue, operation: exeContext.operation, variableValues: exeContext.variableValues, + priority: incrementalDataRecord.priority, + deferPriority: incrementalDataRecord.deferPriority, + published: + incrementalDataRecord.published === true + ? true + : incrementalDataRecord.published, }; } @@ -1469,6 +1496,7 @@ function deferredFragmentRecordFromDeferUsage( function addNewDeferredGroupedFieldSets( incrementalPublisher: IncrementalPublisher, newGroupedFieldSetDetails: Map, + incrementalDataRecord: IncrementalDataRecord, deferMap: ReadonlyMap, path?: Path | undefined, ): ReadonlyArray { @@ -1483,12 +1511,23 @@ function addNewDeferredGroupedFieldSets( newGroupedFieldSetDeferUsages, deferMap, ); - const deferredGroupedFieldSetRecord = new DeferredGroupedFieldSetRecord({ - path, - deferredFragmentRecords, - groupedFieldSet, - shouldInitiateDefer, - }); + const deferredGroupedFieldSetRecord = shouldInitiateDefer + ? new DeferredGroupedFieldSetRecord({ + path, + priority: incrementalDataRecord.priority + 1, + deferPriority: incrementalDataRecord.deferPriority + 1, + deferredFragmentRecords, + groupedFieldSet, + shouldInitiateDefer: true, + }) + : new DeferredGroupedFieldSetRecord({ + path, + priority: incrementalDataRecord.priority, + deferPriority: incrementalDataRecord.deferPriority, + deferredFragmentRecords, + groupedFieldSet, + shouldInitiateDefer: false, + }); incrementalPublisher.reportNewDeferredGroupedFieldSetRecord( deferredGroupedFieldSetRecord, ); @@ -1533,6 +1572,7 @@ function collectAndExecuteSubfields( const newDeferredGroupedFieldSetRecords = addNewDeferredGroupedFieldSets( incrementalPublisher, newGroupedFieldSetDetails, + incrementalDataRecord, newDeferMap, path, ); @@ -1951,6 +1991,7 @@ function executeStreamField( const streamItemsRecord = new StreamItemsRecord({ streamRecord, path: itemPath, + priority: incrementalDataRecord.priority + 1, }); incrementalPublisher.reportNewStreamItemsRecord( streamItemsRecord, @@ -2143,6 +2184,7 @@ async function executeStreamAsyncIterator( const streamItemsRecord = new StreamItemsRecord({ streamRecord, path: itemPath, + priority: incrementalDataRecord.priority + 1, }); incrementalPublisher.reportNewStreamItemsRecord( streamItemsRecord, diff --git a/src/type/definition.ts b/src/type/definition.ts index 0ca4152bd2..1da73124eb 100644 --- a/src/type/definition.ts +++ b/src/type/definition.ts @@ -885,9 +885,22 @@ export type GraphQLFieldResolver< info: GraphQLResolveInfo, ) => TResult; +export interface DeferUsage { + label: string | undefined; + ancestors: ReadonlyArray; + deferPriority: number; +} + +export type Target = DeferUsage | undefined; + +export interface FieldDetails { + node: FieldNode; + target: Target; +} + export interface GraphQLResolveInfo { readonly fieldName: string; - readonly fieldNodes: ReadonlyArray; + readonly fieldDetails: ReadonlyArray; readonly returnType: GraphQLOutputType; readonly parentType: GraphQLObjectType; readonly path: Path; @@ -896,6 +909,9 @@ export interface GraphQLResolveInfo { readonly rootValue: unknown; readonly operation: OperationDefinitionNode; readonly variableValues: { [variable: string]: unknown }; + readonly priority: number; + readonly deferPriority: number; + readonly published: true | Promise; } /** From ce5930475fa3cea860c0b46d0b0c948a3dd71a79 Mon Sep 17 00:00:00 2001 From: Yaacov Rydzinski Date: Sun, 14 May 2023 18:43:28 +0300 Subject: [PATCH 4/6] consolidate payloads --- src/execution/IncrementalPublisher.ts | 102 +++++++++++++---------- src/execution/__tests__/defer-test.ts | 48 +++-------- src/execution/__tests__/stream-test.ts | 108 +++++-------------------- 3 files changed, 91 insertions(+), 167 deletions(-) diff --git a/src/execution/IncrementalPublisher.ts b/src/execution/IncrementalPublisher.ts index 7c415f8b8e..cc3c40b207 100644 --- a/src/execution/IncrementalPublisher.ts +++ b/src/execution/IncrementalPublisher.ts @@ -162,6 +162,12 @@ export interface FormattedCompletedResult { errors?: ReadonlyArray; } +interface IncrementalAggregate { + newPendingSources: Set; + incrementalResults: Array; + completedResults: Array; +} + /** * This class is used to publish incremental results to the client, enabling semi-concurrent * execution while preserving result order. @@ -399,20 +405,28 @@ export class IncrementalPublisher { return { value: undefined, done: true }; } - for (const item of this._released) { - this._pending.delete(item); - } - const released = this._released; - this._released = new Set(); + if (this._released.size > 0) { + let aggregate = this._incrementalInitializer(); + do { + for (const item of this._released) { + this._pending.delete(item); + } + const released = this._released; + this._released = new Set(); - const result = this._getIncrementalResult(released); + aggregate = this._incrementalReducer(aggregate, released); + } while (this._released.size > 0); - if (this._pending.size === 0) { - isDone = true; - } + const hasNext = this._pending.size > 0; + + if (!hasNext) { + isDone = true; + } - if (result !== undefined) { - return { value: result, done: false }; + return { + value: this._incrementalFinalizer(aggregate), + done: false, + }; } // eslint-disable-next-line no-await-in-loop @@ -494,37 +508,20 @@ export class IncrementalPublisher { this._trigger(); } - private _getIncrementalResult( - completedRecords: ReadonlySet, - ): SubsequentIncrementalExecutionResult | undefined { - const { pending, incremental, completed } = - this._processPending(completedRecords); - - const hasNext = this._pending.size > 0; - if (incremental.length === 0 && completed.length === 0 && hasNext) { - return undefined; - } - - const result: SubsequentIncrementalExecutionResult = { hasNext }; - if (pending.length) { - result.pending = pending; - } - if (incremental.length) { - result.incremental = incremental; - } - if (completed.length) { - result.completed = completed; - } - - return result; + private _incrementalInitializer(): IncrementalAggregate { + return { + newPendingSources: new Set(), + incrementalResults: [], + completedResults: [], + }; } - private _processPending( + private _incrementalReducer( + aggregate: IncrementalAggregate, completedRecords: ReadonlySet, - ): IncrementalUpdate { - const newPendingSources = new Set(); - const incrementalResults: Array = []; - const completedResults: Array = []; + ): IncrementalAggregate { + const { newPendingSources, incrementalResults, completedResults } = + aggregate; for (const subsequentResultRecord of completedRecords) { for (const child of subsequentResultRecord.children) { if (child.filtered) { @@ -585,11 +582,30 @@ export class IncrementalPublisher { } } - return { - pending: this.pendingSourcesToResults(newPendingSources), - incremental: incrementalResults, - completed: completedResults, + return aggregate; + } + + private _incrementalFinalizer( + aggregate: IncrementalAggregate, + ): SubsequentIncrementalExecutionResult { + const { newPendingSources, incrementalResults, completedResults } = + aggregate; + const pendingResults = this.pendingSourcesToResults(newPendingSources); + + const result: SubsequentIncrementalExecutionResult = { + hasNext: this._pending.size > 0, }; + if (pendingResults.length) { + result.pending = pendingResults; + } + if (incrementalResults.length) { + result.incremental = incrementalResults; + } + if (completedResults.length) { + result.completed = completedResults; + } + + return result; } private _completedRecordToResult( diff --git a/src/execution/__tests__/defer-test.ts b/src/execution/__tests__/defer-test.ts index d430142f7c..5ef4b866b8 100644 --- a/src/execution/__tests__/defer-test.ts +++ b/src/execution/__tests__/defer-test.ts @@ -1177,35 +1177,25 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - pending: [{ path: ['hero', 'nestedObject'] }], incremental: [ { data: { bar: 'bar' }, path: ['hero', 'nestedObject', 'deeperObject'], }, - ], - completed: [{ path: ['hero'] }], - hasNext: true, - }, - { - pending: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], - incremental: [ { data: { baz: 'baz' }, path: ['hero', 'nestedObject', 'deeperObject'], }, - ], - hasNext: true, - completed: [{ path: ['hero', 'nestedObject'] }], - }, - { - incremental: [ { data: { bak: 'bak' }, path: ['hero', 'nestedObject', 'deeperObject'], }, ], - completed: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], + completed: [ + { path: ['hero'] }, + { path: ['hero', 'nestedObject'] }, + { path: ['hero', 'nestedObject', 'deeperObject'] }, + ], hasNext: false, }, ]); @@ -1254,7 +1244,6 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - pending: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], incremental: [ { data: { @@ -1262,15 +1251,6 @@ describe('Execute: defer directive', () => { }, path: ['hero', 'nestedObject', 'deeperObject'], }, - ], - completed: [ - { path: ['hero'] }, - { path: ['hero', 'nestedObject', 'deeperObject'] }, - ], - hasNext: true, - }, - { - incremental: [ { data: { bar: 'bar', @@ -1278,7 +1258,11 @@ describe('Execute: defer directive', () => { path: ['hero', 'nestedObject', 'deeperObject'], }, ], - completed: [{ path: ['hero', 'nestedObject', 'deeperObject'] }], + completed: [ + { path: ['hero'] }, + { path: ['hero', 'nestedObject', 'deeperObject'] }, + { path: ['hero', 'nestedObject', 'deeperObject'] }, + ], hasNext: false, }, ]); @@ -2101,27 +2085,17 @@ describe('Execute: defer directive', () => { hasNext: true, }, { - pending: [ - { path: ['hero', 'friends', 0] }, - { path: ['hero', 'friends', 1] }, - { path: ['hero', 'friends', 2] }, - ], incremental: [ { data: { name: 'slow', friends: [{}, {}, {}] }, path: ['hero'], }, - ], - completed: [{ path: ['hero'] }], - hasNext: true, - }, - { - incremental: [ { data: { name: 'Han' }, path: ['hero', 'friends', 0] }, { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, ], completed: [ + { path: ['hero'] }, { path: ['hero', 'friends', 0] }, { path: ['hero', 'friends', 1] }, { path: ['hero', 'friends', 2] }, diff --git a/src/execution/__tests__/stream-test.ts b/src/execution/__tests__/stream-test.ts index 12d4ddd43f..1dd97f7263 100644 --- a/src/execution/__tests__/stream-test.ts +++ b/src/execution/__tests__/stream-test.ts @@ -146,11 +146,10 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [{ items: ['banana'], path: ['scalarList'] }], - hasNext: true, - }, - { - incremental: [{ items: ['coconut'], path: ['scalarList'] }], + incremental: [ + { items: ['banana'], path: ['scalarList'] }, + { items: ['coconut'], path: ['scalarList'] }, + ], completed: [{ path: ['scalarList'] }], hasNext: false, }, @@ -170,15 +169,11 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [{ items: ['apple'], path: ['scalarList'] }], - hasNext: true, - }, - { - incremental: [{ items: ['banana'], path: ['scalarList'] }], - hasNext: true, - }, - { - incremental: [{ items: ['coconut'], path: ['scalarList'] }], + incremental: [ + { items: ['apple'], path: ['scalarList'] }, + { items: ['banana'], path: ['scalarList'] }, + { items: ['coconut'], path: ['scalarList'] }, + ], completed: [{ path: ['scalarList'] }], hasNext: false, }, @@ -228,11 +223,6 @@ describe('Execute: stream directive', () => { items: ['banana'], path: ['scalarList'], }, - ], - hasNext: true, - }, - { - incremental: [ { items: ['coconut'], path: ['scalarList'], @@ -297,11 +287,6 @@ describe('Execute: stream directive', () => { items: [['banana', 'banana', 'banana']], path: ['scalarListList'], }, - ], - hasNext: true, - }, - { - incremental: [ { items: [['coconut', 'coconut', 'coconut']], path: ['scalarListList'], @@ -384,20 +369,10 @@ describe('Execute: stream directive', () => { items: [{ name: 'Luke', id: '1' }], path: ['friendList'], }, - ], - hasNext: true, - }, - { - incremental: [ { items: [{ name: 'Han', id: '2' }], path: ['friendList'], }, - ], - hasNext: true, - }, - { - incremental: [ { items: [{ name: 'Leia', id: '3' }], path: ['friendList'], @@ -542,11 +517,6 @@ describe('Execute: stream directive', () => { }, ], }, - ], - hasNext: true, - }, - { - incremental: [ { items: [{ name: 'Leia', id: '3' }], path: ['friendList'], @@ -961,11 +931,6 @@ describe('Execute: stream directive', () => { }, ], }, - ], - hasNext: true, - }, - { - incremental: [ { items: [{ nonNullName: 'Han' }], path: ['friendList'], @@ -1012,11 +977,6 @@ describe('Execute: stream directive', () => { }, ], }, - ], - hasNext: true, - }, - { - incremental: [ { items: [{ nonNullName: 'Han' }], path: ['friendList'], @@ -1147,11 +1107,6 @@ describe('Execute: stream directive', () => { }, ], }, - ], - hasNext: true, - }, - { - incremental: [ { items: [{ nonNullName: 'Han' }], path: ['friendList'], @@ -1475,11 +1430,10 @@ describe('Execute: stream directive', () => { path: ['nestedObject', 'nestedFriendList'], }, ], - completed: [{ path: ['otherNestedObject'] }], - hasNext: true, - }, - { - completed: [{ path: ['nestedObject', 'nestedFriendList'] }], + completed: [ + { path: ['otherNestedObject'] }, + { path: ['nestedObject', 'nestedFriendList'] }, + ], hasNext: false, }, ]); @@ -1585,9 +1539,6 @@ describe('Execute: stream directive', () => { ], }, ], - hasNext: true, - }, - { completed: [{ path: ['friendList'] }], hasNext: false, }, @@ -1739,9 +1690,6 @@ describe('Execute: stream directive', () => { path: ['friendList'], }, ], - hasNext: true, - }, - { completed: [{ path: ['friendList'] }], hasNext: false, }, @@ -1792,17 +1740,12 @@ describe('Execute: stream directive', () => { items: [{ id: '1', name: 'Luke' }], path: ['nestedObject', 'nestedFriendList'], }, - ], - completed: [{ path: ['nestedObject'] }], - hasNext: true, - }, - { - incremental: [ { items: [{ id: '2', name: 'Han' }], path: ['nestedObject', 'nestedFriendList'], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: true, }, { @@ -1863,27 +1806,18 @@ describe('Execute: stream directive', () => { data: { scalarField: 'slow', nestedFriendList: [] }, path: ['nestedObject'], }, - ], - completed: [{ path: ['nestedObject'] }], - hasNext: true, - }, - done: false, - }); - const result3 = await iterator.next(); - expectJSON(result3).toDeepEqual({ - value: { - incremental: [ { items: [{ name: 'Luke' }], path: ['nestedObject', 'nestedFriendList'], }, ], + completed: [{ path: ['nestedObject'] }], hasNext: true, }, done: false, }); - const result4 = await iterator.next(); - expectJSON(result4).toDeepEqual({ + const result3 = await iterator.next(); + expectJSON(result3).toDeepEqual({ value: { incremental: [ { @@ -1895,16 +1829,16 @@ describe('Execute: stream directive', () => { }, done: false, }); - const result5 = await iterator.next(); - expectJSON(result5).toDeepEqual({ + const result4 = await iterator.next(); + expectJSON(result4).toDeepEqual({ value: { completed: [{ path: ['nestedObject', 'nestedFriendList'] }], hasNext: false, }, done: false, }); - const result6 = await iterator.next(); - expectJSON(result6).toDeepEqual({ + const result5 = await iterator.next(); + expectJSON(result5).toDeepEqual({ value: undefined, done: true, }); From 26672e49903294c708ef3b60d27efe392d9a8dd3 Mon Sep 17 00:00:00 2001 From: Yaacov Rydzinski Date: Thu, 6 Jul 2023 15:11:48 +0300 Subject: [PATCH 5/6] inline child records that are ready except for the initial result, which is left alone! --- src/execution/IncrementalPublisher.ts | 268 ++++++++++++++++++++++--- src/execution/__tests__/defer-test.ts | 8 +- src/execution/__tests__/stream-test.ts | 259 ++++++++++++++++++------ src/execution/execute.ts | 1 + 4 files changed, 450 insertions(+), 86 deletions(-) diff --git a/src/execution/IncrementalPublisher.ts b/src/execution/IncrementalPublisher.ts index cc3c40b207..113784752b 100644 --- a/src/execution/IncrementalPublisher.ts +++ b/src/execution/IncrementalPublisher.ts @@ -162,10 +162,18 @@ export interface FormattedCompletedResult { errors?: ReadonlyArray; } +interface IncrementalStreamTarget { + errors?: Array; + items: Array; +} + interface IncrementalAggregate { newPendingSources: Set; incrementalResults: Array; completedResults: Array; + deferParents: Map; + initialStreams: Map; + streamTargets: Map; } /** @@ -199,12 +207,17 @@ export class IncrementalPublisher { reportNewDeferFragmentRecord( deferredFragmentRecord: DeferredFragmentRecord, + parentIncrementalDataRecord: + | InitialResultRecord + | DeferredGroupedFieldSetRecord + | StreamItemsRecord, parentIncrementalResultRecord: | InitialResultRecord | DeferredFragmentRecord | StreamItemsRecord, ): void { parentIncrementalResultRecord.children.add(deferredFragmentRecord); + parentIncrementalDataRecord.childDefers.add(deferredFragmentRecord); } reportNewDeferredGroupedFieldSetRecord( @@ -225,9 +238,21 @@ export class IncrementalPublisher { if (isDeferredGroupedFieldSetRecord(parentIncrementalDataRecord)) { for (const parent of parentIncrementalDataRecord.deferredFragmentRecords) { parent.children.add(streamItemsRecord); + parentIncrementalDataRecord.childStreams.add( + streamItemsRecord.streamRecord, + ); + } + } else if (isStreamItemsRecord(parentIncrementalDataRecord)) { + const streamRecord = streamItemsRecord.streamRecord; + if (streamRecord !== parentIncrementalDataRecord.streamRecord) { + parentIncrementalDataRecord.childStreams.add(streamRecord); } + parentIncrementalDataRecord.children.add(streamItemsRecord); } else { parentIncrementalDataRecord.children.add(streamItemsRecord); + parentIncrementalDataRecord.childStreams.add( + streamItemsRecord.streamRecord, + ); } } @@ -235,7 +260,14 @@ export class IncrementalPublisher { deferredGroupedFieldSetRecord: DeferredGroupedFieldSetRecord, data: ObjMap, ): void { - deferredGroupedFieldSetRecord.data = data; + deferredGroupedFieldSetRecord.result = { + data, + path: deferredGroupedFieldSetRecord.path, + }; + const errors = deferredGroupedFieldSetRecord.errors; + if (errors.length > 0) { + deferredGroupedFieldSetRecord.result.errors = errors; + } for (const deferredFragmentRecord of deferredGroupedFieldSetRecord.deferredFragmentRecords) { deferredFragmentRecord._pending.delete(deferredGroupedFieldSetRecord); if (deferredFragmentRecord._pending.size === 0) { @@ -264,7 +296,14 @@ export class IncrementalPublisher { streamItemsRecord: StreamItemsRecord, items: Array, ) { - streamItemsRecord.items = items; + streamItemsRecord.result = { + items, + path: streamItemsRecord.streamRecord.path, + }; + const errors = streamItemsRecord.errors; + if (errors.length > 0) { + streamItemsRecord.result.errors = errors; + } streamItemsRecord.isCompleted = true; this._release(streamItemsRecord); } @@ -513,6 +552,9 @@ export class IncrementalPublisher { newPendingSources: new Set(), incrementalResults: [], completedResults: [], + deferParents: new Map(), + initialStreams: new Map(), + streamTargets: new Map(), }; } @@ -520,8 +562,13 @@ export class IncrementalPublisher { aggregate: IncrementalAggregate, completedRecords: ReadonlySet, ): IncrementalAggregate { - const { newPendingSources, incrementalResults, completedResults } = - aggregate; + const { + newPendingSources, + incrementalResults, + completedResults, + deferParents, + initialStreams, + } = aggregate; for (const subsequentResultRecord of completedRecords) { for (const child of subsequentResultRecord.children) { if (child.filtered) { @@ -549,14 +596,59 @@ export class IncrementalPublisher { if (subsequentResultRecord.streamRecord.errors.length > 0) { continue; } - const incrementalResult: IncrementalStreamResult = { - items: subsequentResultRecord.items, - path: subsequentResultRecord.streamRecord.path, - }; - if (subsequentResultRecord.errors.length > 0) { - incrementalResult.errors = subsequentResultRecord.errors; + this._updateTargets(subsequentResultRecord, aggregate); + const streamRecord = subsequentResultRecord.streamRecord; + const initialStream = initialStreams.get(streamRecord); + if (initialStream === undefined) { + initialStreams.set(streamRecord, subsequentResultRecord); + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + incrementalResults.push(subsequentResultRecord.result!); + } else if (isStreamItemsRecord(initialStream)) { + if (initialStream.streamRecord === streamRecord) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const items = subsequentResultRecord.result!.items; + if (items.length > 0) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + initialStream.result!.items.push(...items); + } + this._updateTargetErrors( + initialStream, + subsequentResultRecord.errors, + ); + } else { + const target = this._findTargetFromStreamPath( + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + initialStream.result!.items, + initialStream.path, + streamRecord.path, + ) as Array; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const items = subsequentResultRecord.result!.items; + if (items.length > 0) { + target.push(...items); + } + this._updateTargetErrors( + initialStream, + subsequentResultRecord.errors, + ); + } + } else { + const target = this._findTarget( + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + initialStream.result!.data, + initialStream.path, + streamRecord.path, + ) as Array; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const items = subsequentResultRecord.result!.items; + if (items.length > 0) { + target.push(...items); + } + this._updateTargetErrors( + initialStream, + subsequentResultRecord.errors, + ); } - incrementalResults.push(incrementalResult); } else { newPendingSources.delete(subsequentResultRecord); completedResults.push( @@ -565,18 +657,52 @@ export class IncrementalPublisher { if (subsequentResultRecord.errors.length > 0) { continue; } + const parent = deferParents.get(subsequentResultRecord); for (const deferredGroupedFieldSetRecord of subsequentResultRecord.deferredGroupedFieldSetRecords) { if (!deferredGroupedFieldSetRecord.sent) { + this._updateTargets(deferredGroupedFieldSetRecord, aggregate); deferredGroupedFieldSetRecord.sent = true; - const incrementalResult: IncrementalDeferResult = { + if (parent === undefined) { + const incrementalResult: IncrementalDeferResult = { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + data: deferredGroupedFieldSetRecord.result!.data, + path: deferredGroupedFieldSetRecord.path, + }; + if (deferredGroupedFieldSetRecord.errors.length > 0) { + incrementalResult.errors = deferredGroupedFieldSetRecord.errors; + } + incrementalResults.push(incrementalResult); + } else { + const deferredFragmentTarget = isStreamItemsRecord(parent) + ? this._findTargetFromStreamPath( + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + parent.result!.items, + parent.path, + deferredGroupedFieldSetRecord.path, + ) + : this._findTarget( + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + parent.result!.data, + parent.path, + deferredGroupedFieldSetRecord.path, + ); + + const deferredGroupedFieldSetTarget = this._findTarget( + deferredFragmentTarget, + subsequentResultRecord.path, + deferredGroupedFieldSetRecord.path, + ); // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - data: deferredGroupedFieldSetRecord.data!, - path: deferredGroupedFieldSetRecord.path, - }; - if (deferredGroupedFieldSetRecord.errors.length > 0) { - incrementalResult.errors = deferredGroupedFieldSetRecord.errors; + const data = deferredGroupedFieldSetRecord.result!.data; + for (const key of Object.keys(data)) { + (deferredGroupedFieldSetTarget as ObjMap)[key] = + data[key]; + } + this._updateTargetErrors( + parent, + deferredGroupedFieldSetRecord.errors, + ); } - incrementalResults.push(incrementalResult); } } } @@ -585,6 +711,74 @@ export class IncrementalPublisher { return aggregate; } + private _updateTargets( + subsequentDataRecord: SubsequentDataRecord, + aggregate: IncrementalAggregate, + ): void { + const { childDefers, childStreams } = subsequentDataRecord; + const { deferParents, initialStreams } = aggregate; + for (const childDefer of childDefers) { + deferParents.set(childDefer, subsequentDataRecord); + } + for (const childStream of childStreams) { + initialStreams.set(childStream, subsequentDataRecord); + } + } + + private _findTarget( + data: ObjMap | Array, + dataPath: ReadonlyArray, + targetPath: ReadonlyArray, + ): ObjMap | Array { + let i = 0; + while (i < dataPath.length) { + i++; + } + let dataOrItems = data; + while (i < targetPath.length) { + const key = targetPath[i++]; + const value = (dataOrItems as ObjMap)[key as string]; + dataOrItems = value as ObjMap; + } + return dataOrItems; + } + + private _findTargetFromStreamPath( + data: ObjMap | Array, + dataPath: ReadonlyArray, + targetPath: ReadonlyArray, + ): ObjMap | Array { + const pathToStream = [...dataPath]; + const start = pathToStream.pop() as number; + let i = 0; + while (i < pathToStream.length) { + i++; + } + const adjustedIndex = (targetPath[i++] as number) - start; + let dataOrItems = (data as Array)[adjustedIndex]; + while (i < targetPath.length) { + const key = targetPath[i++]; + const value = (dataOrItems as ObjMap)[key as string]; + dataOrItems = value as ObjMap; + } + return dataOrItems as ObjMap | Array; + } + + private _updateTargetErrors( + subsequentDataRecord: SubsequentDataRecord, + errors: ReadonlyArray, + ): void { + for (const error of errors) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const result = subsequentDataRecord.result!; + if (result.errors === undefined) { + result.errors = [error]; + } else { + result.errors.push(error); + } + } + } + private _incrementalFinalizer( aggregate: IncrementalAggregate, ): SubsequentIncrementalExecutionResult { @@ -722,12 +916,16 @@ export class InitialResultRecord { errors: Array; children: Set; priority: number; + childDefers: Set; + childStreams: Set; deferPriority: number; published: true; constructor() { this.errors = []; this.children = new Set(); this.priority = 0; + this.childDefers = new Set(); + this.childStreams = new Set(); this.deferPriority = 0; this.published = true; } @@ -739,10 +937,19 @@ export class DeferredGroupedFieldSetRecord { priority: number; deferPriority: number; deferredFragmentRecords: ReadonlyArray; + childDefers: Set; + childStreams: Set; groupedFieldSet: GroupedFieldSet; shouldInitiateDefer: boolean; errors: Array; - data: ObjMap | undefined; + result: + | { + errors?: Array; + data: ObjMap; + path: ReadonlyArray; + } + | undefined; + published: true | Promise; publish: () => void; sent: boolean; @@ -759,6 +966,8 @@ export class DeferredGroupedFieldSetRecord { this.priority = opts.priority; this.deferPriority = opts.deferPriority; this.deferredFragmentRecords = opts.deferredFragmentRecords; + this.childDefers = new Set(); + this.childStreams = new Set(); this.groupedFieldSet = opts.groupedFieldSet; this.shouldInitiateDefer = opts.shouldInitiateDefer; this.errors = []; @@ -819,12 +1028,21 @@ export class StreamRecord { /** @internal */ export class StreamItemsRecord { errors: Array; + result: + | { + errors?: Array; + items: Array; + path: ReadonlyArray; + } + | undefined; + streamRecord: StreamRecord; path: ReadonlyArray; priority: number; deferPriority: number; - items: Array; children: Set; + childDefers: Set; + childStreams: Set; isFinalRecord?: boolean; isCompletedAsyncIterator?: boolean; isCompleted: boolean; @@ -843,10 +1061,11 @@ export class StreamItemsRecord { this.priority = opts.priority; this.deferPriority = 0; this.children = new Set(); + this.childDefers = new Set(); + this.childStreams = new Set(); this.errors = []; this.isCompleted = false; this.filtered = false; - this.items = []; // promiseWithResolvers uses void only as a generic type parameter // see: https://typescript-eslint.io/rules/no-invalid-void-type/ // eslint-disable-next-line @typescript-eslint/no-invalid-void-type @@ -860,9 +1079,8 @@ export class StreamItemsRecord { } } -export type IncrementalDataRecord = - | InitialResultRecord - | DeferredGroupedFieldSetRecord - | StreamItemsRecord; +export type IncrementalDataRecord = InitialResultRecord | SubsequentDataRecord; + +type SubsequentDataRecord = DeferredGroupedFieldSetRecord | StreamItemsRecord; type SubsequentResultRecord = DeferredFragmentRecord | StreamItemsRecord; diff --git a/src/execution/__tests__/defer-test.ts b/src/execution/__tests__/defer-test.ts index 5ef4b866b8..ce19fca3a9 100644 --- a/src/execution/__tests__/defer-test.ts +++ b/src/execution/__tests__/defer-test.ts @@ -2087,12 +2087,12 @@ describe('Execute: defer directive', () => { { incremental: [ { - data: { name: 'slow', friends: [{}, {}, {}] }, + data: { + name: 'slow', + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + }, path: ['hero'], }, - { data: { name: 'Han' }, path: ['hero', 'friends', 0] }, - { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, - { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, ], completed: [ { path: ['hero'] }, diff --git a/src/execution/__tests__/stream-test.ts b/src/execution/__tests__/stream-test.ts index 1dd97f7263..32f7e310ca 100644 --- a/src/execution/__tests__/stream-test.ts +++ b/src/execution/__tests__/stream-test.ts @@ -11,6 +11,7 @@ import type { DocumentNode } from '../../language/ast.js'; import { parse } from '../../language/parser.js'; import { + GraphQLEnumType, GraphQLList, GraphQLNonNull, GraphQLObjectType, @@ -24,19 +25,35 @@ import type { SubsequentIncrementalExecutionResult, } from '../IncrementalPublisher.js'; +const episodeEnum = new GraphQLEnumType({ + name: 'Episode', + values: { + NEW_HOPE: { + value: 4, + }, + EMPIRE: { + value: 5, + }, + JEDI: { + value: 6, + }, + }, +}); + const friendType = new GraphQLObjectType({ fields: { id: { type: GraphQLID }, name: { type: GraphQLString }, nonNullName: { type: new GraphQLNonNull(GraphQLString) }, + appearsIn: { type: new GraphQLList(episodeEnum) }, }, name: 'Friend', }); const friends = [ - { name: 'Luke', id: 1 }, - { name: 'Han', id: 2 }, - { name: 'Leia', id: 3 }, + { name: 'Luke', id: 1, appearsIn: [4, 5, 6] }, + { name: 'Han', id: 2, appearsIn: [4, 5, 6] }, + { name: 'Leia', id: 3, appearsIn: [4, 5, 6] }, ]; const query = new GraphQLObjectType({ @@ -146,10 +163,7 @@ describe('Execute: stream directive', () => { hasNext: true, }, { - incremental: [ - { items: ['banana'], path: ['scalarList'] }, - { items: ['coconut'], path: ['scalarList'] }, - ], + incremental: [{ items: ['banana', 'coconut'], path: ['scalarList'] }], completed: [{ path: ['scalarList'] }], hasNext: false, }, @@ -170,9 +184,7 @@ describe('Execute: stream directive', () => { }, { incremental: [ - { items: ['apple'], path: ['scalarList'] }, - { items: ['banana'], path: ['scalarList'] }, - { items: ['coconut'], path: ['scalarList'] }, + { items: ['apple', 'banana', 'coconut'], path: ['scalarList'] }, ], completed: [{ path: ['scalarList'] }], hasNext: false, @@ -220,11 +232,7 @@ describe('Execute: stream directive', () => { { incremental: [ { - items: ['banana'], - path: ['scalarList'], - }, - { - items: ['coconut'], + items: ['banana', 'coconut'], path: ['scalarList'], }, ], @@ -284,15 +292,127 @@ describe('Execute: stream directive', () => { { incremental: [ { - items: [['banana', 'banana', 'banana']], + items: [ + ['banana', 'banana', 'banana'], + ['coconut', 'coconut', 'coconut'], + ], path: ['scalarListList'], }, + ], + completed: [{ path: ['scalarListList'] }], + hasNext: false, + }, + ]); + }); + it('Can nest stream directives', async () => { + const document = parse(` + query { + friendList @stream(initialCount: 1) { + name + appearsIn @stream(initialCount: 1) + } + } + `); + const result = await complete(document, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [ + { + name: 'Luke', + appearsIn: ['NEW_HOPE'], + }, + ], + }, + pending: [ + { path: ['friendList'] }, + { path: ['friendList', 0, 'appearsIn'] }, + ], + hasNext: true, + }, + { + incremental: [ { - items: [['coconut', 'coconut', 'coconut']], - path: ['scalarListList'], + items: [ + { + name: 'Han', + appearsIn: ['NEW_HOPE', 'EMPIRE', 'JEDI'], + }, + { + name: 'Leia', + appearsIn: ['NEW_HOPE', 'EMPIRE', 'JEDI'], + }, + ], + path: ['friendList'], + }, + { + items: ['EMPIRE', 'JEDI'], + path: ['friendList', 0, 'appearsIn'], }, ], - completed: [{ path: ['scalarListList'] }], + completed: [ + { path: ['friendList'] }, + { path: ['friendList', 0, 'appearsIn'] }, + { path: ['friendList', 1, 'appearsIn'] }, + { path: ['friendList', 2, 'appearsIn'] }, + ], + hasNext: false, + }, + ]); + }); + it('Can nest stream directives under defer', async () => { + const document = parse(` + query { + friendList @stream(initialCount: 1) { + ... @defer { + name + appearsIn @stream(initialCount: 1) + } + } + } + `); + const result = await complete(document, { + friendList: () => friends.map((f) => Promise.resolve(f)), + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [{}], + }, + pending: [{ path: ['friendList'] }, { path: ['friendList', 0] }], + hasNext: true, + }, + { + incremental: [ + { + items: [ + { + name: 'Han', + appearsIn: ['NEW_HOPE', 'EMPIRE', 'JEDI'], + }, + { + name: 'Leia', + appearsIn: ['NEW_HOPE', 'EMPIRE', 'JEDI'], + }, + ], + path: ['friendList'], + }, + { + data: { name: 'Luke', appearsIn: ['NEW_HOPE', 'EMPIRE', 'JEDI'] }, + path: ['friendList', 0], + }, + ], + completed: [ + { path: ['friendList', 0] }, + { path: ['friendList'] }, + { path: ['friendList', 1] }, + { path: ['friendList', 2] }, + { path: ['friendList', 0, 'appearsIn'] }, + { path: ['friendList', 1, 'appearsIn'] }, + { path: ['friendList', 2, 'appearsIn'] }, + ], hasNext: false, }, ]); @@ -366,15 +486,11 @@ describe('Execute: stream directive', () => { { incremental: [ { - items: [{ name: 'Luke', id: '1' }], - path: ['friendList'], - }, - { - items: [{ name: 'Han', id: '2' }], - path: ['friendList'], - }, - { - items: [{ name: 'Leia', id: '3' }], + items: [ + { name: 'Luke', id: '1' }, + { name: 'Han', id: '2' }, + { name: 'Leia', id: '3' }, + ], path: ['friendList'], }, ], @@ -481,7 +597,7 @@ describe('Execute: stream directive', () => { it('Handles rejections in a field that returns a list of promises after initialCount is reached', async () => { const document = parse(` query { - friendList @stream(initialCount: 1) { + friendList @stream { name id } @@ -499,7 +615,7 @@ describe('Execute: stream directive', () => { expectJSON(result).toDeepEqual([ { data: { - friendList: [{ name: 'Luke', id: '1' }], + friendList: [], }, pending: [{ path: ['friendList'] }], hasNext: true, @@ -507,7 +623,7 @@ describe('Execute: stream directive', () => { { incremental: [ { - items: [null], + items: [{ name: 'Luke', id: '1' }, null, { name: 'Leia', id: '3' }], path: ['friendList'], errors: [ { @@ -517,9 +633,55 @@ describe('Execute: stream directive', () => { }, ], }, + ], + completed: [{ path: ['friendList'] }], + hasNext: false, + }, + ]); + }); + it('Handles multiple rejections in a field that returns a list of promises after initialCount is reached', async () => { + const document = parse(` + query { + friendList @stream(initialCount: 1) { + name + id + } + } + `); + const result = await complete(document, { + friendList: () => + friends.map((f, i) => { + if (i >= 1) { + return Promise.reject(new Error('bad')); + } + return Promise.resolve(f); + }), + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [{ name: 'Luke', id: '1' }], + }, + pending: [{ path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ { - items: [{ name: 'Leia', id: '3' }], + items: [null, null], path: ['friendList'], + errors: [ + { + message: 'bad', + locations: [{ line: 3, column: 9 }], + path: ['friendList', 1], + }, + { + message: 'bad', + locations: [{ line: 3, column: 9 }], + path: ['friendList', 2], + }, + ], }, ], completed: [{ path: ['friendList'] }], @@ -921,7 +1083,7 @@ describe('Execute: stream directive', () => { { incremental: [ { - items: [null], + items: [null, { nonNullName: 'Han' }], path: ['friendList'], errors: [ { @@ -931,10 +1093,6 @@ describe('Execute: stream directive', () => { }, ], }, - { - items: [{ nonNullName: 'Han' }], - path: ['friendList'], - }, ], completed: [{ path: ['friendList'] }], hasNext: false, @@ -967,7 +1125,7 @@ describe('Execute: stream directive', () => { { incremental: [ { - items: [null], + items: [null, { nonNullName: 'Han' }], path: ['friendList'], errors: [ { @@ -977,10 +1135,6 @@ describe('Execute: stream directive', () => { }, ], }, - { - items: [{ nonNullName: 'Han' }], - path: ['friendList'], - }, ], completed: [{ path: ['friendList'] }], hasNext: false, @@ -1097,7 +1251,7 @@ describe('Execute: stream directive', () => { { incremental: [ { - items: [null], + items: [null, { nonNullName: 'Han' }], path: ['friendList'], errors: [ { @@ -1107,10 +1261,6 @@ describe('Execute: stream directive', () => { }, ], }, - { - items: [{ nonNullName: 'Han' }], - path: ['friendList'], - }, ], hasNext: true, }, @@ -1737,11 +1887,10 @@ describe('Execute: stream directive', () => { { incremental: [ { - items: [{ id: '1', name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList'], - }, - { - items: [{ id: '2', name: 'Han' }], + items: [ + { id: '1', name: 'Luke' }, + { id: '2', name: 'Han' }, + ], path: ['nestedObject', 'nestedFriendList'], }, ], @@ -1803,13 +1952,9 @@ describe('Execute: stream directive', () => { pending: [{ path: ['nestedObject', 'nestedFriendList'] }], incremental: [ { - data: { scalarField: 'slow', nestedFriendList: [] }, + data: { scalarField: 'slow', nestedFriendList: [{ name: 'Luke' }] }, path: ['nestedObject'], }, - { - items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList'], - }, ], completed: [{ path: ['nestedObject'] }], hasNext: true, diff --git a/src/execution/execute.ts b/src/execution/execute.ts index 1683aaffae..9395727a2a 100644 --- a/src/execution/execute.ts +++ b/src/execution/execute.ts @@ -1475,6 +1475,7 @@ function addNewDeferredFragments( incrementalPublisher.reportNewDeferFragmentRecord( deferredFragmentRecord, + incrementalDataRecord, parent, ); From 4a38de2d20b8fcac71678899e69957c3a0e3d403 Mon Sep 17 00:00:00 2001 From: Yaacov Rydzinski Date: Thu, 6 Jul 2023 22:21:58 +0300 Subject: [PATCH 6/6] don't send completed if pending was not sent as now we hae true inlining --- src/execution/IncrementalPublisher.ts | 24 ++++++++++++++++-------- src/execution/__tests__/defer-test.ts | 14 ++------------ src/execution/__tests__/stream-test.ts | 12 +----------- 3 files changed, 19 insertions(+), 31 deletions(-) diff --git a/src/execution/IncrementalPublisher.ts b/src/execution/IncrementalPublisher.ts index 113784752b..0292d7caad 100644 --- a/src/execution/IncrementalPublisher.ts +++ b/src/execution/IncrementalPublisher.ts @@ -584,10 +584,15 @@ export class IncrementalPublisher { } if (isStreamItemsRecord(subsequentResultRecord)) { if (subsequentResultRecord.isFinalRecord) { - newPendingSources.delete(subsequentResultRecord.streamRecord); - completedResults.push( - this._completedRecordToResult(subsequentResultRecord.streamRecord), - ); + if (newPendingSources.has(subsequentResultRecord.streamRecord)) { + newPendingSources.delete(subsequentResultRecord.streamRecord); + } else { + completedResults.push( + this._completedRecordToResult( + subsequentResultRecord.streamRecord, + ), + ); + } } if (subsequentResultRecord.isCompletedAsyncIterator) { // async iterable resolver just finished but there may be pending payloads @@ -650,10 +655,13 @@ export class IncrementalPublisher { ); } } else { - newPendingSources.delete(subsequentResultRecord); - completedResults.push( - this._completedRecordToResult(subsequentResultRecord), - ); + if (newPendingSources.has(subsequentResultRecord)) { + newPendingSources.delete(subsequentResultRecord); + } else { + completedResults.push( + this._completedRecordToResult(subsequentResultRecord), + ); + } if (subsequentResultRecord.errors.length > 0) { continue; } diff --git a/src/execution/__tests__/defer-test.ts b/src/execution/__tests__/defer-test.ts index ce19fca3a9..558c1c0994 100644 --- a/src/execution/__tests__/defer-test.ts +++ b/src/execution/__tests__/defer-test.ts @@ -1191,11 +1191,7 @@ describe('Execute: defer directive', () => { path: ['hero', 'nestedObject', 'deeperObject'], }, ], - completed: [ - { path: ['hero'] }, - { path: ['hero', 'nestedObject'] }, - { path: ['hero', 'nestedObject', 'deeperObject'] }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); @@ -1261,7 +1257,6 @@ describe('Execute: defer directive', () => { completed: [ { path: ['hero'] }, { path: ['hero', 'nestedObject', 'deeperObject'] }, - { path: ['hero', 'nestedObject', 'deeperObject'] }, ], hasNext: false, }, @@ -2094,12 +2089,7 @@ describe('Execute: defer directive', () => { path: ['hero'], }, ], - completed: [ - { path: ['hero'] }, - { path: ['hero', 'friends', 0] }, - { path: ['hero', 'friends', 1] }, - { path: ['hero', 'friends', 2] }, - ], + completed: [{ path: ['hero'] }], hasNext: false, }, ]); diff --git a/src/execution/__tests__/stream-test.ts b/src/execution/__tests__/stream-test.ts index 32f7e310ca..a1b217815a 100644 --- a/src/execution/__tests__/stream-test.ts +++ b/src/execution/__tests__/stream-test.ts @@ -355,8 +355,6 @@ describe('Execute: stream directive', () => { completed: [ { path: ['friendList'] }, { path: ['friendList', 0, 'appearsIn'] }, - { path: ['friendList', 1, 'appearsIn'] }, - { path: ['friendList', 2, 'appearsIn'] }, ], hasNext: false, }, @@ -404,15 +402,7 @@ describe('Execute: stream directive', () => { path: ['friendList', 0], }, ], - completed: [ - { path: ['friendList', 0] }, - { path: ['friendList'] }, - { path: ['friendList', 1] }, - { path: ['friendList', 2] }, - { path: ['friendList', 0, 'appearsIn'] }, - { path: ['friendList', 1, 'appearsIn'] }, - { path: ['friendList', 2, 'appearsIn'] }, - ], + completed: [{ path: ['friendList', 0] }, { path: ['friendList'] }], hasNext: false, }, ]);