diff --git a/.changeset/fuzzy-elephants-smash.md b/.changeset/fuzzy-elephants-smash.md new file mode 100644 index 0000000000..1cbf781bbb --- /dev/null +++ b/.changeset/fuzzy-elephants-smash.md @@ -0,0 +1,5 @@ +--- +'houdini': minor +--- + +Add @optimisticKey decorator diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fc118bddeb..0013e13819 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -148,7 +148,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - framework: [e2e-svelte, e2e-kit] + framework: [e2e-svelte, e2e-kit, e2e-react] runs-on: ${{ matrix.os }} steps: @@ -160,7 +160,7 @@ jobs: - name: Install Node.js uses: actions/setup-node@v3 with: - node-version: 16.17.0 + node-version: 21.7.3 # pnpm @@ -209,8 +209,13 @@ jobs: - name: Build packages run: pnpm run build - - name: End-to-End Tests ${{ matrix.framework }} + - name: End-to-End Tests (Svelte) run: pnpm run --filter ${{ matrix.framework }} build && pnpm --filter ${{ matrix.framework }} tests + if: matrix.framework != 'e2e-react' + + - name: End-to-End Tests (React) + run: pnpm --filter ${{ matrix.framework }} tests + if: matrix.framework == 'e2e-react' e2e_sveltekit_linter: name: End-to-End Linter diff --git a/e2e/_api/graphql.mjs b/e2e/_api/graphql.mjs index 2a893fc501..6aadc6d1af 100644 --- a/e2e/_api/graphql.mjs +++ b/e2e/_api/graphql.mjs @@ -4,17 +4,266 @@ import { GraphQLError } from 'graphql' import { GraphQLScalarType, Kind } from 'graphql' import { createPubSub } from 'graphql-yoga' import path from 'path' -import url from 'url' import { connectionFromArray } from './util.mjs' const pubSub = createPubSub() -const sourceFiles = ['schema.graphql', 'schema-hello.graphql'] -export const typeDefs = sourceFiles.map((filepath) => { - const filepathToUse = path.join(path.dirname(url.fileURLToPath(import.meta.url)), filepath) - return fs.readFileSync(path.resolve(filepathToUse), 'utf-8') -}) +export const typeDefs = /* GraphQL */ ` + """ + Date custom scalar type + """ + scalar DateTime + scalar File + + """ + Can be Value1 or Value2. + """ + enum MyEnum { + "The first value" + Value1 + "The second value" + Value2 @deprecated(reason: "Use Value1 instead") + } + + enum TypeOfUser { + NICE + COOL + } + + enum ForceReturn { + "Normal" + NORMAL + "No value" + NULL + "Some error" + ERROR + } + + type Mutation { + addUser( + """ + The users birth date + """ + birthDate: DateTime! + name: String! + snapshot: String! + enumValue: MyEnum + types: [TypeOfUser!] + delay: Int + force: ForceReturn + ): User + addNonNullUser( + birthDate: DateTime! + name: String! + snapshot: String! + enumValue: MyEnum + types: [TypeOfUser!] + delay: Int + force: ForceReturn + ): User! + updateUser( + id: ID! + name: String + snapshot: String! + birthDate: DateTime + delay: Int + avatarURL: String + ): User! + updateUserByID( + id: ID! + name: String + snapshot: String! + birthDate: DateTime + delay: Int + avatarURL: String + ): User! + singleUpload(file: File!): String! + multipleUpload(files: [File!]!): [String!]! + addCity(name: String!): City! + addLibrary(city: ID!, name: String!): Library! + addBook(library: ID!, title: String!): Book! + deleteCity(city: ID!): City! + deleteLibrary(library: ID!): Library! + deleteBook(book: ID!, delay: Int, force: ForceReturn): Book + updateRentedBook(userId: String!, bookId: Int!, rate: Int!): RentedBook + createA(a: String!): A! + createB(b: String!): B! + } + + """ + A node. + """ + interface Node { + id: ID! + } + + type PageInfo { + endCursor: String + hasNextPage: Boolean! + hasPreviousPage: Boolean! + startCursor: String + } + + input UserNameFilter { + name: String! + } + + union UnionAorB = A | B + + type Query { + hello: String + aOrB: [UnionAorB!]! + avgYearsBirthDate: Float! + node(id: ID!): Node + user(id: ID!, snapshot: String!, tmp: Boolean, delay: Int, forceNullDate: Boolean): User! + usersConnection( + after: String + before: String + first: Int + last: Int + snapshot: String! + ): UserConnection! + usersList(limit: Int = 4, offset: Int, snapshot: String!): [User!]! + userNodes(limit: Int = 4, offset: Int, snapshot: String!): UserNodes! + userSearch(filter: UserNameFilter!, snapshot: String!): [User!]! + session: String + cities: [City]! + city(id: ID!, delay: Int): City + userNodesResult(snapshot: String!, forceMessage: Boolean!): UserNodesResult! + userResult(id: ID!, snapshot: String!, forceMessage: Boolean!): UserResult! + rentedBooks: [RentedBook!]! + animals: AnimalConnection! + monkeys: MonkeyConnection! + """ + Get a monkey by its id + """ + monkey(id: ID!): Monkey + } + + type Subscription { + userUpdate(id: ID!, snapshot: String): User + } + + type User implements Node { + birthDate: DateTime + friendsConnection(after: String, before: String, first: Int, last: Int): UserConnection! + "This is the same list as what's used globally. its here to tests fragments" + usersConnection(after: String, before: String, first: Int, last: Int): UserConnection! + usersConnectionSnapshot( + after: String + before: String + first: Int + last: Int + snapshot: String! + ): UserConnection! + "This is the same list as what's used globally. its here to tests fragments" + userSearch(filter: UserNameFilter!, snapshot: String!): [User!]! + friendsList(limit: Int, offset: Int): [User!]! + id: ID! + name: String! + enumValue: MyEnum + types: [TypeOfUser!]! + testField(someParam: Boolean!): String + avatarURL(size: Int): String! + } + + interface Animal implements Node { + id: ID! + name: String! + } + + """ + A monkey. + """ + type Monkey implements Node & Animal { + id: ID! + name: String! + """ + Whether the monkey has a banana or not + """ + hasBanana: Boolean! + """ + Whether the monkey has a banana or not + """ + oldHasBanana: Boolean @deprecated(reason: "Use hasBanana") + } + + interface AnimalConnection { + edges: [AnimalEdge!]! + pageInfo: PageInfo! + } + + interface AnimalEdge { + cursor: String + node: Animal + } + + type MonkeyConnection implements AnimalConnection { + edges: [MonkeyEdge!]! + pageInfo: PageInfo! + } + + type MonkeyEdge implements AnimalEdge { + cursor: String + node: Monkey + } + + type UserConnection { + edges: [UserEdge!]! + pageInfo: PageInfo! + } + + type UserEdge { + cursor: String + node: User + } + + type UserNodes { + totalCount: Int + nodes: [User!]! + } + + type Book { + id: ID! + title: String! + } + + type Library { + id: ID! + name: String! + books: [Book]! + } + + type City { + id: ID! + name: String! + libraries: [Library]! + } + + type RentedBook { + userId: String! + bookId: Int! + rate: Int! + } + + type A { + id: ID! + a: String! + } + + type B { + id: ID! + b: String! + } + + union UserNodesResult = UserNodes | Message1 + union UserResult = User | Message1 + + type Message1 { + message: String! + } +` // Example Cities/Libraries/Books data // Assume a traditional relational database for storage - each table with unique ID. @@ -356,6 +605,7 @@ export const resolvers = { birthDate: args.birthDate, enumValue: args.enumValue, types: args.types ?? [], + avatarURL: '', } list.push(user) return user @@ -376,6 +626,33 @@ export const resolvers = { if (args.name) { list[userIndex].name = args.name } + if (args.avatarURL) { + list[userIndex].avatarURL = args.avatarURL + } + + pubSub.publish('userUpdate', args.id + ':' + args.snapshot, list[userIndex]) + + return list[userIndex] + }, + updateUserByID: async (_, args) => { + if (args.delay) { + await sleep(args.delay) + } + + const list = getUserSnapshot(args.snapshot) + const userIndex = list.findIndex((c) => c.id === args.id) + if (userIndex === -1) { + throw new GraphQLError('User not found', { code: 404 }) + } + if (args.birthDate) { + list[userIndex].birthDate = args.birthDate + } + if (args.name) { + list[userIndex].name = args.name + } + if (args.avatarURL) { + list[userIndex].avatarURL = args.avatarURL + } pubSub.publish('userUpdate', args.id + ':' + args.snapshot, list[userIndex]) diff --git a/e2e/_api/graphql.mjs.d.ts b/e2e/_api/graphql.mjs.d.ts index 2596b15315..bffc567edd 100644 --- a/e2e/_api/graphql.mjs.d.ts +++ b/e2e/_api/graphql.mjs.d.ts @@ -5,5 +5,7 @@ export type User = { avatarURL: string } -export const dataUsers: User[] = [] -export function getUserSnapshot(snapshot: string): User {} +export const dataUsers: User[] +export function getUserSnapshot(snapshot: string): User +export const resolvers: any +export const typeDefs: any diff --git a/e2e/_api/schema-hello.graphql b/e2e/_api/schema-hello.graphql deleted file mode 100644 index 836660f9d1..0000000000 --- a/e2e/_api/schema-hello.graphql +++ /dev/null @@ -1,3 +0,0 @@ -extend type Query { - hello: String -} diff --git a/e2e/_api/schema.graphql b/e2e/_api/schema.graphql index b72816845e..21bc432e95 100644 --- a/e2e/_api/schema.graphql +++ b/e2e/_api/schema.graphql @@ -30,7 +30,9 @@ enum ForceReturn { type Mutation { addUser( - """The users birth date""" + """ + The users birth date + """ birthDate: DateTime! name: String! snapshot: String! @@ -48,7 +50,22 @@ type Mutation { delay: Int force: ForceReturn ): User! - updateUser(id: ID!, name: String, snapshot: String!, birthDate: DateTime, delay: Int): User! + updateUser( + id: ID! + name: String + snapshot: String! + birthDate: DateTime + delay: Int + avatarURL: String + ): User! + updateUserByID( + id: ID! + name: String + snapshot: String! + birthDate: DateTime + delay: Int + avatarURL: String + ): User! singleUpload(file: File!): String! multipleUpload(files: [File!]!): [String!]! addCity(name: String!): City! @@ -83,6 +100,7 @@ input UserNameFilter { union UnionAorB = A | B type Query { + hello: String aOrB: [UnionAorB!]! avgYearsBirthDate: Float! node(id: ID!): Node diff --git a/e2e/_api/server.mjs b/e2e/_api/server.mjs index ce736ea311..f5dbaa48b1 100755 --- a/e2e/_api/server.mjs +++ b/e2e/_api/server.mjs @@ -58,7 +58,7 @@ async function main() { cors: { origin: ['*'], credentials: true, - methods: ['POST'], + methods: ['*'], }, maskedErrors: false, graphiql: { diff --git a/e2e/react/houdini.config.js b/e2e/react/houdini.config.js index 316b80b02c..458347bf7f 100644 --- a/e2e/react/houdini.config.js +++ b/e2e/react/houdini.config.js @@ -2,9 +2,6 @@ /// /** @type {import('houdini').ConfigFile} */ const config = { - watchSchema: { - url: 'http://localhost:4000/graphql', - }, defaultPartial: true, scalars: { DateTime: { diff --git a/e2e/react/package.json b/e2e/react/package.json index d8fa61f143..3f217ee706 100644 --- a/e2e/react/package.json +++ b/e2e/react/package.json @@ -4,15 +4,13 @@ "version": "0.0.0", "type": "module", "scripts": { - "api": "cross-env TZ=utc e2e-api", "build:": "cd ../../ && ((pnpm run build && cd -) || (cd - && exit 1))", "build:dev": "pnpm build: && pnpm dev", "build:web": "pnpm build: && pnpm web", "build:test": "pnpm build: && pnpm test", "build:generate": "pnpm build: && pnpm houdini generate", "build:build": "pnpm build: && pnpm build", - "web": "vite dev", - "dev": "concurrently \"pnpm run web\" \"pnpm run api\" -n \"web,api\" -c \"green,magenta\"", + "dev": "vite dev", "build": "vite build", "tests": "playwright test", "test": "npm run tests", @@ -32,8 +30,8 @@ "houdini": "workspace:^", "houdini-adapter-cloudflare": "workspace:^", "houdini-react": "workspace:^", - "react": "^18.3.0-canary-d6dcad6a8-20230914", - "react-dom": "^18.3.0-canary-d6dcad6a8-20230914", + "react": "19.0.0-rc-eb259b5d3b-20240605", + "react-dom": "19.0.0-rc-eb259b5d3b-20240605", "react-streaming-compat": "^0.3.18" }, "devDependencies": { diff --git a/e2e/react/public/assets/output.css b/e2e/react/public/assets/output.css index 83ca340122..301b97f063 100644 --- a/e2e/react/public/assets/output.css +++ b/e2e/react/public/assets/output.css @@ -1264,7 +1264,7 @@ td, th { padding: 6px; text-align: left; - vertical-align: top; + vertical-align: middle; word-wrap: break-word; } @@ -1469,3 +1469,4 @@ body > footer { text-decoration: underline; } } + diff --git a/e2e/react/schema.graphql b/e2e/react/schema.graphql index 42c28bca36..6fea3d4a96 100644 --- a/e2e/react/schema.graphql +++ b/e2e/react/schema.graphql @@ -104,7 +104,7 @@ type Mutation { multipleUpload(files: [File!]!): [String!]! singleUpload(file: File!): String! updateRentedBook(bookId: Int!, rate: Int!, userId: String!): RentedBook - updateUser(birthDate: DateTime, delay: Int, id: ID!, name: String, snapshot: String!): User! + updateUser(avatarURL: String, birthDate: DateTime, delay: Int, id: ID!, name: String, snapshot: String!): User! } """Can be Value1 or Value2.""" diff --git a/e2e/react/src/+client.ts b/e2e/react/src/+client.ts index d144a7e115..6e9e951288 100644 --- a/e2e/react/src/+client.ts +++ b/e2e/react/src/+client.ts @@ -1,6 +1,4 @@ import { HoudiniClient } from '$houdini' // Export the Houdini client -export default new HoudiniClient({ - url: 'http://localhost:4000/graphql', -}) +export default new HoudiniClient() diff --git a/e2e/react/src/api/+schema.ts b/e2e/react/src/api/+schema.ts new file mode 100644 index 0000000000..f01f6ac1dd --- /dev/null +++ b/e2e/react/src/api/+schema.ts @@ -0,0 +1,7 @@ +import { typeDefs, resolvers } from 'e2e-api/graphql.mjs' +import { createSchema } from 'graphql-yoga' + +export default createSchema({ + typeDefs, + resolvers, +}) diff --git a/e2e/react/src/routes/handle/test.ts b/e2e/react/src/routes/handle/test.ts index 4fcf59997d..d2ef73e12c 100644 --- a/e2e/react/src/routes/handle/test.ts +++ b/e2e/react/src/routes/handle/test.ts @@ -36,34 +36,3 @@ test('handle fetch remembers server-side variables', async function ({ page }) { await expect(page.textContent('#result')).resolves.toContain(dataUsers[0].avatarURL) await expect(getVariables()).resolves.toEqual({ userID: '1', size: 51 }) }) - -test('handle survives navigation', async function ({ page }) { - // in this test, variables are stringified in the #variables div - const getVariables = async () => { - return JSON.parse((await page.textContent('#variables')) as string) - } - - // visit the page for user 2 - await goto(page, routes.handle_2) - await expect(page.textContent('#result')).resolves.toEqual(dataUsers[1].avatarURL) - await expect(getVariables()).resolves.toEqual({ userID: '2' }) - - // navigate to the page for user 1 by clicking on the nav link - await page.click(`a[href="${routes.handle_1}"]`) - // wait for the page to load - await page.waitForSelector('[data-user="1"]', { - timeout: 1000, - }) - await expect(page.textContent('#result')).resolves.toEqual(dataUsers[0].avatarURL) - await expect(getVariables()).resolves.toEqual({ userID: '1' }) - - // click on the larger button and wait for it to resolve - await page.click('#larger') - await page.waitForSelector('[data-size="51"]', { - timeout: 1000, - }) - - // make sure the and variables line up - await expect(page.textContent('#result')).resolves.toContain(dataUsers[0].avatarURL) - await expect(getVariables()).resolves.toEqual({ userID: '1', size: 51 }) -}) diff --git a/e2e/react/src/routes/optimistic-keys/+page.gql b/e2e/react/src/routes/optimistic-keys/+page.gql new file mode 100644 index 0000000000..8153088ae0 --- /dev/null +++ b/e2e/react/src/routes/optimistic-keys/+page.gql @@ -0,0 +1,11 @@ +query OptimisticKeyTest { + usersConnection(snapshot: "OptimisticKeyTest") @list(name: "OptimisticKeyTest") @required { + edges @required { + node @required { + id + name + avatarURL + } + } + } +} diff --git a/e2e/react/src/routes/optimistic-keys/+page.tsx b/e2e/react/src/routes/optimistic-keys/+page.tsx new file mode 100644 index 0000000000..9907513fb5 --- /dev/null +++ b/e2e/react/src/routes/optimistic-keys/+page.tsx @@ -0,0 +1,108 @@ +import { useMutation, graphql } from '$houdini' +import React from 'react' + +import { PageProps } from './$types' + +export default function OptimisticKeyTestView({ OptimisticKeyTest }: PageProps) { + const [error, setError] = React.useState('') + + const [_, update] = useMutation( + graphql(` + mutation OptimisticKeyTestUpdateMutation($id: ID!, $avatarURL: String!) { + updateUserByID( + id: $id + snapshot: "OptimisticKeyTest" + avatarURL: $avatarURL + delay: 500 + ) { + id + avatarURL + } + } + `) + ) + + const [__, create] = useMutation( + graphql(` + mutation OptimisticKeyTestCreateMutation($name: String!, $birthDate: DateTime!) { + addUser( + snapshot: "OptimisticKeyTest" + name: $name + birthDate: $birthDate + delay: 400 + ) { + id @optimisticKey + ...OptimisticKeyTest_insert @mask_disable @prepend + } + } + `) + ) + + return ( + <> + {error ?
{error}
: null} +
+ +
+ + + + + + + + + + + + {OptimisticKeyTest?.usersConnection.edges.map((edge, i) => ( + + + + + + + ))} + +
IDNameAvatar URL
{edge.node.id}{edge.node.name}{edge.node.avatarURL} + +
+ + ) +} diff --git a/e2e/react/src/routes/optimistic-keys/test.ts b/e2e/react/src/routes/optimistic-keys/test.ts new file mode 100644 index 0000000000..30a4ae99e9 --- /dev/null +++ b/e2e/react/src/routes/optimistic-keys/test.ts @@ -0,0 +1,42 @@ +import { expect, test } from '@playwright/test' +import { routes } from '~/utils/routes' +import { sleep } from '~/utils/sleep' +import { goto } from '~/utils/testsHelper.js' + +test('@optimisticKey', async ({ page }) => { + await goto(page, routes.optimistic_keys) + + // in order for this to work, we should be able to create a new user + // and then update it immediately + await page.click('[data-test-action="create"]') + + const getValue = async () => { + const elements = await page.getByTestId('target') + return await elements.textContent() + } + + // the value in the last row should be 'optimistic value 1' + expect(await getValue()).toBe('optimistic value 1') + + // click on the last list in the row + await page.click('[data-test-action="update"]') + + // wait a few seconds and make sure there are no errors + await sleep(300) + let found = false + try { + await page.waitForSelector('[data-error="true"]', { timeout: 100 }) + found = true + } catch {} + + expect(found).toBe(false) + + // the value in the last row should be 'optimistic value 2' + expect(await getValue()).toBe('optimistic value 2') + + // wait for the final mutation to resolve + await sleep(500) + + // the value in the last row should be 'final value' + expect(await getValue()).toBe('final value') +}) diff --git a/e2e/react/src/routes/pagination/query/offset/test.ts b/e2e/react/src/routes/pagination/query/offset/test.ts index eef83a7f48..7f046302a1 100644 --- a/e2e/react/src/routes/pagination/query/offset/test.ts +++ b/e2e/react/src/routes/pagination/query/offset/test.ts @@ -1,4 +1,4 @@ -import { expect, test } from '@playwright/test' +import { test } from '@playwright/test' import { routes } from '~/utils/routes.js' import { expect_1_gql, expect_to_be, goto } from '~/utils/testsHelper.js' diff --git a/e2e/react/src/routes/scalars/test.ts b/e2e/react/src/routes/scalars/test.ts index bdd68bad3b..6733fb8609 100644 --- a/e2e/react/src/routes/scalars/test.ts +++ b/e2e/react/src/routes/scalars/test.ts @@ -6,6 +6,6 @@ test('Scalars', async ({ page }) => { await goto(page, routes.scalars) await expect(page.textContent('#result')).resolves.toEqual( - 'Bruce Willis-3/18/1955Samuel Jackson-12/20/1948Morgan Freeman-5/30/1937Tom Hanks-7/8/1956' + 'Bruce Willis-3/19/1955Samuel Jackson-12/21/1948Morgan Freeman-5/31/1937Tom Hanks-7/9/1956' ) }) diff --git a/e2e/react/src/utils/routes.ts b/e2e/react/src/utils/routes.ts index 481cbbcf1f..7d31ca0c97 100644 --- a/e2e/react/src/utils/routes.ts +++ b/e2e/react/src/utils/routes.ts @@ -12,4 +12,5 @@ export const routes = { pagination_query_offset: '/pagination/query/offset', pagination_query_offset_singlepage: '/pagination/query/offset-singlepage', pagination_query_offset_variable: '/pagination/query/offset-variable/2', + optimistic_keys: '/optimistic-keys', } as const diff --git a/e2e/react/src/utils/testsHelper.ts b/e2e/react/src/utils/testsHelper.ts index 976cf5840f..f8d701ac1e 100644 --- a/e2e/react/src/utils/testsHelper.ts +++ b/e2e/react/src/utils/testsHelper.ts @@ -55,7 +55,7 @@ export async function expect_n_gql( async function fnRes(response: Response) { // console.log('<<', response.status(), response.url()); - if (response.url().endsWith('/graphql')) { + if (response.url().endsWith('/_api')) { timing.push(new Date().valueOf() - start) try { const json = await response.json() diff --git a/package.json b/package.json index 4280e47f65..51ba70b2fc 100755 --- a/package.json +++ b/package.json @@ -53,7 +53,7 @@ }, "packageManager": "pnpm@8.6.7", "lint-staged": { - "*.ts": "prettier -w ", + "*.ts&!(*.d.ts)": "prettier -w ", "*.tsx": "prettier -w ", "*.js": "prettier -w ", "*.json": "prettier -w " diff --git a/packages/create-houdini/templates/react-typescript/package.json b/packages/create-houdini/templates/react-typescript/package.json index e34f760d63..6eea120fcd 100644 --- a/packages/create-houdini/templates/react-typescript/package.json +++ b/packages/create-houdini/templates/react-typescript/package.json @@ -12,8 +12,8 @@ "houdini": "^HOUDINI_VERSION", "houdini-react": "^HOUDINI_VERSION", "houdini-adapter-auto": "^HOUDINI_VERSION", - "react": "19.0.0-canary-2b036d3f1-20240327", - "react-dom": "19.0.0-canary-2b036d3f1-20240327", + "react": "19.0.0-rc-eb259b5d3b-20240605", + "react-dom": "19.0.0-rc-eb259b5d3b-20240605", "graphql-yoga": "4.0.4", "graphql": "15.8.0", "@whatwg-node/server": "^0.9.14" @@ -27,7 +27,7 @@ }, "resolutions": { "graphql": "15.8.0", - "react": "19.0.0-canary-2b036d3f1-20240327", - "react-dom": "19.0.0-canary-2b036d3f1-20240327" + "react": "19.0.0-rc-eb259b5d3b-20240605", + "react-dom": "19.0.0-rc-eb259b5d3b-20240605" } } diff --git a/packages/create-houdini/templates/react/package.json b/packages/create-houdini/templates/react/package.json index 9ca1789e72..1e0bcb4111 100644 --- a/packages/create-houdini/templates/react/package.json +++ b/packages/create-houdini/templates/react/package.json @@ -12,8 +12,8 @@ "houdini": "^HOUDINI_VERSION", "houdini-react": "^HOUDINI_VERSION", "houdini-adapter-auto": "^HOUDINI_VERSION", - "react": "19.0.0-canary-2b036d3f1-20240327", - "react-dom": "19.0.0-canary-2b036d3f1-20240327", + "react": "19.0.0-rc-eb259b5d3b-20240605", + "react-dom": "19.0.0-rc-eb259b5d3b-20240605", "graphql-yoga": "4.0.4", "graphql": "15.8.0", "@whatwg-node/server": "^0.9.14" @@ -24,7 +24,7 @@ }, "resolutions": { "graphql": "15.8.0", - "react": "19.0.0-canary-2b036d3f1-20240327", - "react-dom": "19.0.0-canary-2b036d3f1-20240327" + "react": "19.0.0-rc-eb259b5d3b-20240605", + "react-dom": "19.0.0-rc-eb259b5d3b-20240605" } } diff --git a/packages/houdini-react/package.json b/packages/houdini-react/package.json index 44b7a76b44..6d474870d8 100644 --- a/packages/houdini-react/package.json +++ b/packages/houdini-react/package.json @@ -42,8 +42,8 @@ "graphql": "^15.8.0", "graphql-yoga": "^4.0.4", "houdini": "workspace:^", - "react": "19.0.0-canary-2b036d3f1-20240327", - "react-dom": "19.0.0-canary-2b036d3f1-20240327", + "react": "19.0.0-rc-eb259b5d3b-20240605", + "react-dom": "19.0.0-rc-eb259b5d3b-20240605", "react-streaming-compat": "^0.3.18", "recast": "^0.23.1", "rollup": "^3.7.4", diff --git a/packages/houdini-react/src/plugin/index.ts b/packages/houdini-react/src/plugin/index.ts index 19f9f3a5a4..0cd73b1dea 100644 --- a/packages/houdini-react/src/plugin/index.ts +++ b/packages/houdini-react/src/plugin/index.ts @@ -4,12 +4,12 @@ import { plugin, fragmentKey, load_manifest, + processComponentFieldDirective, type ArtifactKinds, type Document, type Config, type Plugin, type ProjectManifest, - processComponentFieldDirective, } from 'houdini' import path from 'node:path' import { loadEnv } from 'vite' @@ -31,13 +31,11 @@ export const hooks: Plugin = async () => ({ // always make sure our definition of the manifest is up to date before // we generate anything async beforeGenerate({ config }) { - if (!manifest) { - try { - manifest = await load_manifest({ config }) - } catch (e) { - console.log('something went wrong: ' + (e as Error).message) - return - } + try { + manifest = await load_manifest({ config }) + } catch (e) { + console.log('something went wrong: ' + (e as Error).message) + return } }, diff --git a/packages/houdini-react/src/plugin/vite.tsx b/packages/houdini-react/src/plugin/vite.tsx index 8de6d609fe..88edff4307 100644 --- a/packages/houdini-react/src/plugin/vite.tsx +++ b/packages/houdini-react/src/plugin/vite.tsx @@ -11,7 +11,7 @@ import { import React from 'react' import { build, ConfigEnv, type BuildOptions, type Connect } from 'vite' -import { setManifest } from '.' +import { manifest, setManifest } from '.' import { writeTsconfig } from './codegen/typeRoot' let viteEnv: ConfigEnv @@ -30,12 +30,11 @@ let devServer = false // virtual:houdini/pages/[name] - An entry for every page // virtual:houdini/artifacts/[name] - An entry for loading an artifact and notifying the artifact cache -let manifest: ProjectManifest - export default { // we want to set up some vite aliases by default async config(config, env) { viteEnv = env + let manifest: ProjectManifest try { manifest = await load_manifest({ config, @@ -182,11 +181,14 @@ export default { // the filename is the true arg. the extension just tells vite how to transfrom. const parsedPath = path.parse(arg) - const queryName = parsedPath.name + const pageName = parsedPath.name // if we are rendering the virtual page if (which === 'pages') { - const page = manifest.pages[queryName] + const page = manifest.pages[pageName] + if (!page) { + throw new Error('unknown page' + pageName) + } // we need the list of queries that have loading states (and therefore create ssr signals) const pendingQueries = page.queries.filter((query) => { @@ -205,7 +207,7 @@ export default { import { Cache } from '$houdini/runtime/cache/cache' import { router_cache } from '$houdini' import client from '$houdini/plugins/houdini-react/runtime/client' - import Component from '$houdini/plugins/houdini-react/units/entries/${queryName}.jsx' + import Component from '$houdini/plugins/houdini-react/units/entries/${pageName}.jsx' import { injectComponents } from '$houdini/plugins/houdini-react/runtime/componentFields' // if there is pending data (or artifacts) then we should prime the caches @@ -223,7 +225,7 @@ export default { componentCache: window.__houdini__client__.componentCache, createComponent: (fn, props) => React.createElement(fn, props) }) - window.__houdini__hydration__layer__ ??= window.__houdini__cache__._internal_unstable.storage.createLayer(true) + window.__houdini__hydration__layer__ ??= window.__houdini__cache__._internal_unstable.storage.createLayer() // link up the cache we just created with the client window.__houdini__client__.setCache(window.__houdini__cache__) @@ -283,7 +285,7 @@ export default { initialVariables: window.__houdini__pending_variables__, initialArtifacts, components: { - '${queryName}': Component + '${pageName}': Component } }) } @@ -306,14 +308,14 @@ export default { if (which === 'artifacts') { // the arg is the name of the artifact const artifact = (await fs.readFile( - path.join(config.artifactDirectory, queryName + '.js') + path.join(config.artifactDirectory, pageName + '.js') ))!.replace('export default', 'const artifact = ') return ( artifact + ` -if (window.__houdini__nav_caches__ && window.__houdini__nav_caches__.artifact_cache && !window.__houdini__nav_caches__.artifact_cache.has("${queryName}")) { - window.__houdini__nav_caches__.artifact_cache.set(${JSON.stringify(queryName)}, artifact) +if (window.__houdini__nav_caches__ && window.__houdini__nav_caches__.artifact_cache && !window.__houdini__nav_caches__.artifact_cache.has("${pageName}")) { + window.__houdini__nav_caches__.artifact_cache.set(${JSON.stringify(pageName)}, artifact) } ` ) diff --git a/packages/houdini-react/src/runtime/hooks/useDocumentHandle.ts b/packages/houdini-react/src/runtime/hooks/useDocumentHandle.ts index fb712d5b70..049b8fc155 100644 --- a/packages/houdini-react/src/runtime/hooks/useDocumentHandle.ts +++ b/packages/houdini-react/src/runtime/hooks/useDocumentHandle.ts @@ -67,10 +67,17 @@ export function useDocumentHandle< // before we send the query, we need to figure out which variables are // actually useful for this document const usedVariables = Object.fromEntries( - Object.keys(observer.artifact.input?.fields ?? {}).map((fieldName) => [ - fieldName, - location.params[fieldName], - ]) + Object.keys(observer.artifact.input?.fields ?? {}).reduce<[string, any][]>( + (entries, fieldName) => { + // if the field is not a url parameter, skip it + if (!(fieldName in location.params)) { + return entries + } + + return [...entries, [fieldName, location.params[fieldName]]] + }, + [] + ) ) return observer.send({ diff --git a/packages/houdini-react/src/runtime/routing/Router.tsx b/packages/houdini-react/src/runtime/routing/Router.tsx index ebfe61e628..0e4033de29 100644 --- a/packages/houdini-react/src/runtime/routing/Router.tsx +++ b/packages/houdini-react/src/runtime/routing/Router.tsx @@ -73,7 +73,7 @@ export function Router({ const { component_cache, data_cache } = useRouterContext() const PageComponent = component_cache.get(page.id)! - // if we got this far then we're past the suspense + // if we got this far then we're past suspense // // Now that we know we aren't going to throw, let's set up the event listeners @@ -100,6 +100,7 @@ export function Router({ } }, []) + // the function to call to navigate to a url const goto = (url: string) => { // clear the data cache so that we refetch queries with the new session (will force a cache-lookup) data_cache.clear() @@ -212,7 +213,9 @@ function usePageData({ } // send the request - const observer = client.observe({ artifact, cache }) + const observer = data_cache.has(artifact.name) + ? data_cache.get(artifact.name)! + : client.observe({ artifact, cache }) let resolve: () => void = () => {} let reject: (message: string) => void = () => {} @@ -244,7 +247,7 @@ function usePageData({ const artifactName = "${artifact.name}" const value = ${JSON.stringify( - await marshalSelection({ + marshalSelection({ selection: observer.artifact.selection, data: observer.state.data, }) @@ -357,7 +360,6 @@ function usePageData({ } // compare the last known variables with the current set - // const last = last_variables.get(artifact) let last: GraphQLVariables = {} let usedVariables: GraphQLVariables = {} for (const variable of Object.keys(pageVariables)) { diff --git a/packages/houdini/src/cmd/generate.ts b/packages/houdini/src/cmd/generate.ts index 82c0be4b61..ed046e1f3f 100644 --- a/packages/houdini/src/cmd/generate.ts +++ b/packages/houdini/src/cmd/generate.ts @@ -37,6 +37,7 @@ export async function generate( if (config.localSchema) { config.schema = await loadLocalSchema(config) } + // Pull the newest schema if the flag is set else if (args.pullSchema && (await config.apiURL())) { await pullSchema(args) diff --git a/packages/houdini/src/codegen/generators/artifacts/index.ts b/packages/houdini/src/codegen/generators/artifacts/index.ts index 368bb7b936..031e055d93 100644 --- a/packages/houdini/src/codegen/generators/artifacts/index.ts +++ b/packages/houdini/src/codegen/generators/artifacts/index.ts @@ -6,6 +6,7 @@ import type { Config, Document, DocumentArtifact, + MutationArtifact, QueryArtifact, SubscriptionSelection, } from '../../../lib' @@ -362,6 +363,18 @@ export default function artifactGenerator(stats: { } } + // mutations might have optimisticKeys we need to track + if (artifact.kind === 'HoudiniMutation') { + // look for the optimistic key directive + graphql.visit(doc.document, { + [graphql.Kind.DIRECTIVE](node) { + if (node.name.value === config.optimisticKeyDirective) { + ;(artifact as MutationArtifact).optimisticKeys = true + } + }, + }) + } + // adding artifactData of plugins (only if any information is present) artifact.pluginData = {} for (const plugin of config.plugins) { diff --git a/packages/houdini/src/codegen/generators/artifacts/selection.ts b/packages/houdini/src/codegen/generators/artifacts/selection.ts index 60b2950524..b6a4f531e2 100644 --- a/packages/houdini/src/codegen/generators/artifacts/selection.ts +++ b/packages/houdini/src/codegen/generators/artifacts/selection.ts @@ -197,6 +197,10 @@ function prepareSelection({ {} ), })) + + if (field.directives.find((d) => d.name.value === config.optimisticKeyDirective)) { + fieldObj.optimisticKey = true + } } if (keys.includes(field.name.value)) { diff --git a/packages/houdini/src/codegen/generators/artifacts/tests/artifacts.test.ts b/packages/houdini/src/codegen/generators/artifacts/tests/artifacts.test.ts index 283a273459..6f21173aba 100644 --- a/packages/houdini/src/codegen/generators/artifacts/tests/artifacts.test.ts +++ b/packages/houdini/src/codegen/generators/artifacts/tests/artifacts.test.ts @@ -2102,6 +2102,88 @@ describe('mutation artifacts', function () { `) }) + test('optimsticKey paths', async function () { + // the config to use in tests + const config = testConfig() + const docs = [ + mockCollectedDoc( + `mutation A { + addFriend { + friend { + id @optimisticKey + } + } + }` + ), + ] + + // execute the generator + await runPipeline(config, docs) + + expect(docs[0]).toMatchInlineSnapshot(` + export default { + "name": "A", + "kind": "HoudiniMutation", + "hash": "cec5c0890ba01a7c84acd987c7e7d797e8e08f2a4af3df33fa2a69f31230563c", + + "raw": \`mutation A { + addFriend { + friend { + id + } + } + } + \`, + + "rootType": "Mutation", + + "selection": { + "fields": { + "addFriend": { + "type": "AddFriendOutput", + "keyRaw": "addFriend", + + "selection": { + "fields": { + "friend": { + "type": "User", + "keyRaw": "friend", + + "selection": { + "fields": { + "id": { + "type": "ID", + "keyRaw": "id", + + "directives": [{ + "name": "optimisticKey", + "arguments": {} + }], + + "optimisticKey": true, + "visible": true + } + } + }, + + "visible": true + } + } + }, + + "visible": true + } + } + }, + + "pluginData": {}, + "optimisticKeys": true + }; + + "HoudiniHash=78181b23be8762c5db8fd2686b9fed3e8082853857f967e31990ba79350332f4"; + `) + }) + test('insert operation allList', async function () { // the config to use in tests const config = testConfig() diff --git a/packages/houdini/src/codegen/generators/definitions/schema.test.ts b/packages/houdini/src/codegen/generators/definitions/schema.test.ts index 6483c81cb5..528c219dc1 100644 --- a/packages/houdini/src/codegen/generators/definitions/schema.test.ts +++ b/packages/houdini/src/codegen/generators/definitions/schema.test.ts @@ -36,6 +36,9 @@ test('adds internal documents to schema', async function () { """@prepend is used to tell the runtime to add the result to the end of the list""" directive @prepend on FRAGMENT_SPREAD + """@optimisticKey is used to identify a field as an optimistic key""" + directive @optimisticKey on FIELD + """@append is used to tell the runtime to add the result to the start of the list""" directive @append on FRAGMENT_SPREAD @@ -125,6 +128,9 @@ test('list operations are included', async function () { """@prepend is used to tell the runtime to add the result to the end of the list""" directive @prepend on FRAGMENT_SPREAD + """@optimisticKey is used to identify a field as an optimistic key""" + directive @optimisticKey on FIELD + """@append is used to tell the runtime to add the result to the start of the list""" directive @append on FRAGMENT_SPREAD @@ -233,6 +239,9 @@ test('list operations are included but delete directive should not be in when we """@prepend is used to tell the runtime to add the result to the end of the list""" directive @prepend on FRAGMENT_SPREAD + """@optimisticKey is used to identify a field as an optimistic key""" + directive @optimisticKey on FIELD + """@append is used to tell the runtime to add the result to the start of the list""" directive @append on FRAGMENT_SPREAD @@ -354,6 +363,9 @@ test("writing twice doesn't duplicate definitions", async function () { """@prepend is used to tell the runtime to add the result to the end of the list""" directive @prepend on FRAGMENT_SPREAD + """@optimisticKey is used to identify a field as an optimistic key""" + directive @optimisticKey on FIELD + """@append is used to tell the runtime to add the result to the start of the list""" directive @append on FRAGMENT_SPREAD diff --git a/packages/houdini/src/codegen/transforms/schema.ts b/packages/houdini/src/codegen/transforms/schema.ts index b83694f185..369cc5cbc5 100644 --- a/packages/houdini/src/codegen/transforms/schema.ts +++ b/packages/houdini/src/codegen/transforms/schema.ts @@ -52,6 +52,11 @@ directive @${config.paginateDirective}(${config.listOrPaginateNameArg}: String, """ directive @${config.listPrependDirective} on FRAGMENT_SPREAD +""" + @${config.optimisticKeyDirective} is used to identify a field as an optimistic key +""" +directive @${config.optimisticKeyDirective} on FIELD + """ @${ config.listAppendDirective @@ -194,11 +199,11 @@ directive @${config.componentFieldDirective}(field: String!, prop: String, expor .join('\n')}` // newSchema holds the schema elements that we need to remove from queries (eg added by plugins) - config.newSchema = graphql.print(mergeTypeDefs([internalSchema, config.newSchema])) + config.newSchema = graphql.print(mergeTypeDefs([internalSchema])) // schemaString is the value that gets printed to disk to extend the user's schema // it gets updated when newSchema is set so we just need to add the extensions - config.schemaString += extensions + config.schemaString += extensions.replaceAll('extend type', 'type') // build up the full schema by mixing everything together config.schema = graphql.buildSchema( diff --git a/packages/houdini/src/codegen/validators/typeCheck.test.ts b/packages/houdini/src/codegen/validators/typeCheck.test.ts index 914c4c9763..c2bf439545 100755 --- a/packages/houdini/src/codegen/validators/typeCheck.test.ts +++ b/packages/houdini/src/codegen/validators/typeCheck.test.ts @@ -1162,6 +1162,125 @@ const table: Row[] = [ `, ], }, + { + title: '@optimisticKey on single key', + pass: true, + documents: [ + ` + mutation B { + updateUser { + id @optimisticKey + } + } + `, + ], + }, + { + title: '@optimisticKey on non-key', + pass: false, + documents: [ + ` + mutation B { + updateUser { + firstName @optimisticKey + } + } + `, + ` + mutation A { + updateUser { + firstName @optimisticKey + } + } + `, + ], + }, + { + title: '@optimisticKey on multiple key - missing', + pass: false, + documents: [ + ` + mutation A { + updateGhost { + aka @optimisticKey + } + } + `, + ` + mutation B { + updateGhost { + aka @optimisticKey + } + } + `, + ], + }, + { + title: '@optimisticKey on multiple key - found', + pass: true, + documents: [ + ` + mutation A { + updateGhost { + aka @optimisticKey + name @optimisticKey + } + } + `, + ` + mutation B { + updateGhost { + aka @optimisticKey + name @optimisticKey + } + } + `, + ], + }, + { + title: '@optimisticKey on non-mutation', + pass: false, + documents: [ + ` + query A { + ghost { + aka @optimisticKey + name @optimisticKey + } + } + `, + ` + query B { + ghost { + aka @optimisticKey + name @optimisticKey + } + } + `, + ], + }, + { + title: '@optimisticKey on object type', + pass: false, + documents: [ + ` + mutation A { + updateGhost @optimisticKey { + aka + name + } + } + `, + ` + mutation B { + updateGhost @optimisticKey { + aka + name + } + } + `, + ], + }, { title: '@required may not be used on non-nullable fields', pass: false, diff --git a/packages/houdini/src/codegen/validators/typeCheck.ts b/packages/houdini/src/codegen/validators/typeCheck.ts index af9ff9381e..e376048133 100755 --- a/packages/houdini/src/codegen/validators/typeCheck.ts +++ b/packages/houdini/src/codegen/validators/typeCheck.ts @@ -346,7 +346,9 @@ export default async function typeCheck(config: Config, docs: Document[]): Promi // make sure every argument defined in a fragment is used noUnusedFragmentArguments(config), // make sure that @loading is used correctly - validateLoadingDirective(config) + validateLoadingDirective(config), + // make sure @optimisticKey is used on any keys + validateOptimisticKeys(config) ) for (const { filename, document: parsed, originalString } of docs) { @@ -1200,6 +1202,65 @@ function validateLoadingDirective(config: Config) { } } +function validateOptimisticKeys(config: Config) { + return function (ctx: graphql.ValidationContext): graphql.ASTVisitor { + const typeInfo = new graphql.TypeInfo(config.schema) + return graphql.visitWithTypeInfo(typeInfo, { + [graphql.Kind.SELECTION_SET]: (node, _, __, ___, ancestors) => { + // track if we find an optimistic key directive + let found: string[] = [] + // look at every field in the selection set + for (const selection of node.selections) { + // if we find the directive, mark it + if ( + selection.kind === 'Field' && + selection.directives?.find( + (d) => d.name.value === config.optimisticKeyDirective + ) + ) { + // add the field to the list + found.push(selection.name.value) + } + } + + // if we did find a directive make sure that we found the directive on + // every key for the type + if (found.length > 0) { + const doc = ancestors[0] as graphql.DocumentNode + const operation = doc.definitions?.find( + (def) => def.kind === 'OperationDefinition' + ) + if ( + operation && + (operation as graphql.OperationDefinitionNode).operation !== 'mutation' + ) { + ctx.reportError( + new graphql.GraphQLError( + `@${config.optimisticKeyDirective} can only be in mutations` + ) + ) + return + } + + const parent = typeInfo.getParentType() + if (!parent) { + return + } + const keys = config.keyFieldsForType(parent.name) + // make sure that the two lists match + if (keys.length !== found.length || !keys.every((key) => found.includes(key))) { + ctx.reportError( + new graphql.GraphQLError( + `@${config.optimisticKeyDirective} must be applied to every key field for a type` + ) + ) + } + } + }, + }) + } +} + export function getAndVerifyNodeInterface(config: Config): graphql.GraphQLInterfaceType | null { const { schema } = config diff --git a/packages/houdini/src/lib/config.ts b/packages/houdini/src/lib/config.ts index 1e17117d80..ea5e5b4307 100644 --- a/packages/houdini/src/lib/config.ts +++ b/packages/houdini/src/lib/config.ts @@ -17,6 +17,7 @@ import * as fs from './fs' import { pullSchema } from './introspection' import * as path from './path' import { plugin } from './plugin' +import { loadLocalSchema } from './router' import type { LogLevels, PluginConfig, PluginHooks, PluginInit, ValueMap } from './types' import { LogLevel } from './types' @@ -368,6 +369,10 @@ export class Config { return path.join(this.sourceDir, 'api') } + get localSchemaPath() { + return path.join(this.localApiDir, '+schema') + } + get localAPIUrl() { return localApiEndpoint(this.configFile) } @@ -622,6 +627,10 @@ export class Config { return 'list' } + get optimisticKeyDirective() { + return 'optimisticKey' + } + get listPrependDirective() { return 'prepend' } @@ -1004,7 +1013,10 @@ export async function getConfig({ noSchema, forceReload, ...extraConfig -}: PluginConfig & { noSchema?: boolean; forceReload?: boolean } = {}): Promise { +}: PluginConfig & { + noSchema?: boolean + forceReload?: boolean +} = {}): Promise { // if we force a reload, we will bypass this part if (!forceReload) { if (_config) { @@ -1122,6 +1134,11 @@ export async function getConfig({ } catch {} _config.localSchema = localSchema + // if we have a local schema, then we should just build it if we haven't + if (localSchema) { + _config.schema = await loadLocalSchema(_config) + } + const apiURL = await _config.apiURL() // look up the schema if we need to diff --git a/packages/houdini/src/lib/router/conventions.ts b/packages/houdini/src/lib/router/conventions.ts index e524c113b9..dd433945a2 100644 --- a/packages/houdini/src/lib/router/conventions.ts +++ b/packages/houdini/src/lib/router/conventions.ts @@ -130,7 +130,7 @@ function fallbacks_units_dir(config: Config, which: 'page' | 'layout', base?: st return path.join(units_dir(config, base), 'fallbacks', which) } -function units_dir(config: Config, base: string = base_dir(config)) { +export function units_dir(config: Config, base: string = base_dir(config)) { return path.join(base, 'units') } diff --git a/packages/houdini/src/lib/router/server.ts b/packages/houdini/src/lib/router/server.ts index 988d84bd41..097f19eda4 100644 --- a/packages/houdini/src/lib/router/server.ts +++ b/packages/houdini/src/lib/router/server.ts @@ -1,6 +1,7 @@ import type * as graphql from 'graphql' import path from 'node:path' +import { fs } from '..' import type { Config } from '../config' import { localApiEndpoint, type ConfigFile } from '../types' @@ -18,21 +19,30 @@ export function internalRoutes(config: ConfigFile): string[] { } export async function buildLocalSchema(config: Config): Promise { - // before we build the local schcema, we need to generate the typescript config file - // so that we can resolve all of the necessary imports + // before we build the local schcema, we should check if it already exists + // so we dont do it again // load the current version of vite const { build } = await import('vite') + const schema = path.join(config.localApiDir, '+schema') + const outDir = path.join(config.rootDir, 'temp') + process.env.HOUDINI_SECONDARY_BUILD = 'true' - const schema = path.join(config.localApiDir, '+schema') + try { + await fs.remove(path.join(outDir, 'assets', 'schema.js')) + } catch {} + + try { + await fs.mkdir(outDir) + } catch {} // build the schema somewhere we can import from await build({ logLevel: 'silent', build: { - outDir: path.join(config.rootDir, 'temp'), + outDir, rollupOptions: { input: { schema, @@ -55,7 +65,9 @@ export async function buildLocalSchema(config: Config): Promise { } export async function loadLocalSchema(config: Config): Promise { - await buildLocalSchema(config) + if (!isSecondaryBuild()) { + await buildLocalSchema(config) + } // import the schema we just built const { default: schema } = await import( diff --git a/packages/houdini/src/runtime/cache/cache.ts b/packages/houdini/src/runtime/cache/cache.ts index 1a5db7137c..1ef490d907 100644 --- a/packages/houdini/src/runtime/cache/cache.ts +++ b/packages/houdini/src/runtime/cache/cache.ts @@ -145,6 +145,11 @@ export class Cache { return handler } + // when an optimistic key resolves, we might momentarily know the same record by different IDs + registerKeyMap(source: string | number, mapped: string | number) { + this._internal_unstable.storage.registerIDMapping(source, mapped) + } + // remove the record from the cache's store and unsubscribe from it delete(id: string, layer?: Layer) { // clean up any subscribers associated with the record before we destroy the actual values that will let us diff --git a/packages/houdini/src/runtime/cache/storage.ts b/packages/houdini/src/runtime/cache/storage.ts index 44971ff5bb..2a11cd84ce 100644 --- a/packages/houdini/src/runtime/cache/storage.ts +++ b/packages/houdini/src/runtime/cache/storage.ts @@ -7,9 +7,10 @@ import type { GraphQLValue } from '../lib/types' // ie: deleting a user should not slow down looking up a list of cats export class InMemoryStorage { - private data: Layer[] + data: Layer[] private idCount = 1 private rank = 0 + private idMaps: Record = {} constructor() { this.data = [] @@ -23,6 +24,10 @@ export class InMemoryStorage { return this.rank++ } + registerIDMapping(from: string | number, to: string | number) { + this.idMaps[from] = to + } + // create a layer and return its id createLayer(optimistic: boolean = false): Layer { // generate the next layer @@ -68,9 +73,8 @@ export class InMemoryStorage { layer.replaceID(replacement) } } - get( - id: string, + targetID: string, field: string, defaultValue?: any ): { @@ -90,93 +94,102 @@ export class InMemoryStorage { // the list of layers we used to build up the value const layerIDs: number[] = [] + // the record might be known by multiple ids and we need to look at every layer + // in the correct order + const recordIDs = [this.idMaps[targetID], targetID].filter(Boolean) as string[] + // go through the list of layers in reverse for (let i = this.data.length - 1; i >= 0; i--) { - const layer = this.data[i] - let [layerValue, kind] = layer.get(id, field) - const layerOperations = layer.getOperations(id, field) || [] - layer.deletedIDs.forEach((v) => { - // if the layer wants to undo a delete for the id - if (layer.operations[v]?.undoDeletesInList?.includes(field)) { - return + // consider every id that we know about + for (const id of recordIDs) { + const layer = this.data[i] + let [layerValue, kind] = layer.get(id, field) + + const layerOperations = layer.getOperations(id, field) || [] + layer.deletedIDs.forEach((v) => { + // if the layer wants to undo a delete for the id + if (layer.operations[v]?.undoDeletesInList?.includes(field)) { + return + } + operations.remove.add(v) + }) + + // if we don't have a value to return, we're done + if (typeof layerValue === 'undefined' && defaultValue) { + const targetLayer = this.topLayer + targetLayer.writeField(id, field, defaultValue) + layerValue = defaultValue } - operations.remove.add(v) - }) - - // if we don't have a value to return, we're done - if (typeof layerValue === 'undefined' && defaultValue) { - const targetLayer = this.topLayer - const layerID = targetLayer.id - targetLayer.writeField(id, field, defaultValue) - layerValue = defaultValue - } - // if the layer does not contain a value for the field, move on - if (typeof layerValue === 'undefined' && layerOperations.length === 0) { - if (layer.deletedIDs.size > 0) { - layerIDs.push(layer.id) + // if the layer does not contain a value for the field, move on + if (typeof layerValue === 'undefined' && layerOperations.length === 0) { + if (layer.deletedIDs.size > 0) { + layerIDs.push(layer.id) + } + continue } - continue - } - // if the result isn't an array we can just use the value since we can't - // apply operations to the field - if (typeof layerValue !== 'undefined' && !Array.isArray(layerValue)) { - return { - value: layerValue, - kind, - displayLayers: [layer.id], + // if the result isn't an array we can just use the value since we can't + // apply operations to the field + if (typeof layerValue !== 'undefined' && !Array.isArray(layerValue)) { + return { + value: layerValue, + kind, + displayLayers: [layer.id], + } } - } - // if the layer contains operations or values add it to the list of relevant layers - // add the layer to the list - layerIDs.push(layer.id) - - // if we have an operation - if (layerOperations.length > 0) { - // process every operation - for (const op of layerOperations) { - // remove operation - if (isRemoveOperation(op)) { - operations.remove.add(op.id) - } - // inserts are sorted by location - if (isInsertOperation(op)) { - operations.insert[op.location].unshift(op.id) - } - // if we found a delete operation, we're done - if (isDeleteOperation(op)) { - return { - value: undefined, - kind: 'unknown', - displayLayers: [], + // if the layer contains operations or values add it to the list of relevant layers + // add the layer to the list + layerIDs.push(layer.id) + + // if we have an operation + if (layerOperations.length > 0) { + // process every operation + for (const op of layerOperations) { + // remove operation + if (isRemoveOperation(op)) { + operations.remove.add(op.id) + } + // inserts are sorted by location + if (isInsertOperation(op)) { + operations.insert[op.location].unshift(op.id) + } + // if we found a delete operation, we're done + if (isDeleteOperation(op)) { + return { + value: undefined, + kind: 'unknown', + displayLayers: [], + } } } } - } - // if we don't have a value to return, we're done - if (typeof layerValue === 'undefined') { - continue - } + // if we don't have a value to return, we're done + if (typeof layerValue === 'undefined') { + continue + } - // if there are no operations, move along - if ( - !operations.remove.size && - !operations.insert.start.length && - !operations.insert.end.length - ) { - return { value: layerValue, displayLayers: layerIDs, kind: 'link' } - } + // if there are no operations, move along + if ( + !operations.remove.size && + !operations.insert.start.length && + !operations.insert.end.length + ) { + return { value: layerValue, displayLayers: layerIDs, kind: 'link' } + } - // we have operations to apply to the list - return { - value: [...operations.insert.start, ...layerValue, ...operations.insert.end].filter( - (value) => !operations.remove.has(value as string) - ), - displayLayers: layerIDs, - kind, + // we have operations to apply to the list + return { + value: [ + ...operations.insert.start, + ...layerValue, + ...operations.insert.end, + ].filter((value) => !operations.remove.has(value as string)), + displayLayers: layerIDs, + kind, + } } } @@ -249,6 +262,12 @@ export class InMemoryStorage { // delete the layers we merged this.data.splice(startingIndex + 1, layerIndex - startingIndex - 1) + + // if everything had merged down then there are no optimistic layers left and we can + // reset any deleted id mappings + if (this.data.length === 1) { + this.idMaps = {} + } } get topLayer(): Layer { diff --git a/packages/houdini/src/runtime/client/index.ts b/packages/houdini/src/runtime/client/index.ts index d3abd7d55e..1d6d202d40 100644 --- a/packages/houdini/src/runtime/client/index.ts +++ b/packages/houdini/src/runtime/client/index.ts @@ -14,6 +14,7 @@ import { mutation as mutationPlugin, query as queryPlugin, throwOnError as throwOnErrorPlugin, + optimisticKeys, } from './plugins' import pluginsFromPlugins from './plugins/injectedPlugins' @@ -115,6 +116,7 @@ export class HoudiniClient { // to the standard set ( [ + optimisticKeys(this.cache ?? cacheRef), // make sure that documents always work queryPlugin(this.cache ?? cacheRef), mutationPlugin(this.cache ?? cacheRef), diff --git a/packages/houdini/src/runtime/client/plugins/cache.test.ts b/packages/houdini/src/runtime/client/plugins/cache.test.ts index 3524c2a774..cfcd388847 100644 --- a/packages/houdini/src/runtime/client/plugins/cache.test.ts +++ b/packages/houdini/src/runtime/client/plugins/cache.test.ts @@ -1,14 +1,11 @@ import { beforeEach, expect, test, vi } from 'vitest' -import { createPluginHooks, HoudiniClient, type HoudiniClientConstructorArgs } from '..' import { testConfigFile } from '../../../test' import { Cache } from '../../cache/cache' -import { CachePolicy, PendingValue, type QueryArtifact } from '../../lib' +import { CachePolicy, PendingValue } from '../../lib' import { setMockConfig } from '../../lib/config' -import { ArtifactKind, DataSource } from '../../lib/types' -import type { ClientPlugin } from '../documentStore' -import { DocumentStore } from '../documentStore' import { cachePolicy } from './cache' +import { createStore, fakeFetch } from './test' /** * Testing the cache plugin @@ -471,92 +468,3 @@ test('loading states when fetching is true', async function () { variables: null, }) }) - -/** - * Utilities for testing the cache plugin - */ -export function createStore( - args: Partial & { artifact?: QueryArtifact } = {} -): DocumentStore { - // if we dont have anything passed, just use the fake fetch as the plugin - if (!args.plugins && !args.pipeline) { - args.plugins = [fakeFetch({})] - } - - // instantiate the client - const client = new HoudiniClient({ - url: 'URL', - ...args, - }) - - return new DocumentStore({ - plugins: args.plugins ? createPluginHooks(client.plugins) : undefined, - pipeline: args.pipeline ? createPluginHooks(client.plugins) : undefined, - client, - artifact: args.artifact ?? { - kind: ArtifactKind.Query, - hash: '7777', - raw: 'RAW_TEXT', - name: 'TestArtifact', - rootType: 'Query', - pluginData: {}, - enableLoadingState: 'local', - selection: { - fields: { - viewer: { - type: 'User', - visible: true, - keyRaw: 'viewer', - loading: { kind: 'continue' }, - selection: { - fields: { - id: { - type: 'ID', - visible: true, - keyRaw: 'id', - }, - firstName: { - type: 'String', - visible: true, - keyRaw: 'firstName', - loading: { kind: 'value' }, - }, - __typename: { - type: 'String', - visible: true, - keyRaw: '__typename', - }, - }, - }, - }, - }, - }, - }, - }) -} - -export function fakeFetch({ - result = { - data: { - viewer: { - id: '1', - firstName: 'bob', - __typename: 'User', - }, - }, - errors: null, - fetching: false, - variables: null, - source: DataSource.Network, - partial: false, - stale: false, - }, - spy = vi.fn(), -} = {}) { - return (() => ({ - network(ctx, { resolve }) { - spy(ctx) - resolve(ctx, { ...result }) - }, - })) as ClientPlugin -} diff --git a/packages/houdini/src/runtime/client/plugins/index.ts b/packages/houdini/src/runtime/client/plugins/index.ts index 2392e13fb8..be90ef87bc 100644 --- a/packages/houdini/src/runtime/client/plugins/index.ts +++ b/packages/houdini/src/runtime/client/plugins/index.ts @@ -6,3 +6,4 @@ export * from './mutation' export * from './subscription' export * from './throwOnError' export * from './fetchParams' +export { optimisticKeys } from './optimisticKeys' diff --git a/packages/houdini/src/runtime/client/plugins/mutation.ts b/packages/houdini/src/runtime/client/plugins/mutation.ts index 0f50245f22..c3150d607d 100644 --- a/packages/houdini/src/runtime/client/plugins/mutation.ts +++ b/packages/houdini/src/runtime/client/plugins/mutation.ts @@ -16,14 +16,14 @@ export const mutation = (cache: Cache) => // well-defined ordering to a subtle situation so that seems like a win const layerOptimistic = cache._internal_unstable.storage.createLayer(true) - // the optimistic response gets passed in the context's stuff bag - const optimisticResponse = ctx.stuff.optimisticResponse - // if there is an optimistic response then we need to write the value immediately // hold onto the list of subscribers that we updated because of the optimistic response // and make sure they are included in the final set of subscribers to notify let toNotify: SubscriptionSpec[] = [] + + // the optimistic response gets passed in the context's stuff bag + const optimisticResponse = ctx.stuff.optimisticResponse if (optimisticResponse) { toNotify = cache.write({ selection: ctx.artifact.selection, diff --git a/packages/houdini/src/runtime/client/plugins/optimisticKeys.test.ts b/packages/houdini/src/runtime/client/plugins/optimisticKeys.test.ts new file mode 100644 index 0000000000..8963411a75 --- /dev/null +++ b/packages/houdini/src/runtime/client/plugins/optimisticKeys.test.ts @@ -0,0 +1,200 @@ +import { sleep } from '@kitql/helpers' +import { beforeEach, expect, test } from 'vitest' + +import { testConfigFile } from '../../../test' +import { Cache } from '../../cache/cache' +import { setMockConfig } from '../../lib/config' +import { ArtifactKind, type QueryResult, type GraphQLObject } from '../../lib/types' +import { mutation } from './mutation' +import { optimisticKeys } from './optimisticKeys' +import { createStore, fakeFetch } from './test' + +/** + * Testing the cache plugin + */ +const config = testConfigFile() +beforeEach(async () => { + setMockConfig({}) +}) + +test('OptimisticKeys Plugin', async function () { + const callbacks = {} + const keys = {} + + // create a cache instance we can test against with the mutation plugin + const cache = new Cache({ ...config, disabled: false }) + + // we are going to block the mutation so we can look at the optimistic layers that are + // created before the mutation resolves + let resolveMutation: (() => void) | null = null + + const first = createStore({ + artifact: { + kind: ArtifactKind.Mutation, + hash: '7777', + raw: 'RAW_TEXT', + name: 'TestArtifact', + rootType: 'Mutation', + pluginData: {}, + optimisticKeys: true, + selection: { + fields: { + createUser: { + type: 'User', + visible: true, + keyRaw: 'createUser', + loading: { kind: 'continue' }, + selection: { + fields: { + id: { + type: 'ID', + visible: true, + keyRaw: 'id', + optimisticKey: true, + directives: [{ name: 'optimisticKey', arguments: {} }], + }, + firstName: { + type: 'String', + visible: true, + keyRaw: 'firstName', + loading: { kind: 'value' }, + }, + __typename: { + type: 'String', + visible: true, + keyRaw: '__typename', + }, + }, + }, + }, + }, + }, + input: { + fields: {}, + types: {}, + defaults: {}, + runtimeScalars: {}, + }, + }, + pipeline: [ + optimisticKeys(cache, callbacks, keys), + mutation(cache), + fakeFetch({ + data: { + createUser: { id: '1', firstName: 'Alice', __typename: 'User' }, + }, + onRequest: (variables, cb) => (resolveMutation = cb), + }), + ], + }) + + // send the first mutation (this should block) + first.send({ + stuff: { + optimisticResponse: { + createUser: { firstName: 'John' }, + }, + }, + }) + + // since we can't await the send we are going to have to be a little creative with our timing + await sleep(200) + expect(resolveMutation).not.toBeNull() + + // we should have added an ID to the cache + let optimisticLink = cache._internal_unstable.storage.data[0].links['_ROOT_']['createUser'] + expect(optimisticLink).toBeDefined() + const record = cache._internal_unstable.storage.data[0].fields[optimisticLink as string] + expect(record.id).toBeDefined() + + // now that we have an id, we can send a second mutation that will block until we resolve the first + let secondVariables: GraphQLObject | null = null + const second = createStore({ + artifact: { + kind: ArtifactKind.Mutation, + hash: '7777', + raw: 'RAW_TEXT', + name: 'TestArtifact', + rootType: 'Mutation', + pluginData: {}, + optimisticKeys: false, + input: { + fields: { + id: 'String', + }, + types: {}, + defaults: {}, + runtimeScalars: {}, + }, + selection: { + fields: { + createUser: { + type: 'User', + visible: true, + keyRaw: 'createUser', + loading: { kind: 'continue' }, + selection: { + fields: { + id: { + type: 'ID', + visible: true, + keyRaw: 'id', + optimisticKey: false, + }, + firstName: { + type: 'String', + visible: true, + keyRaw: 'firstName', + loading: { kind: 'value' }, + }, + __typename: { + type: 'String', + visible: true, + keyRaw: '__typename', + }, + }, + }, + }, + }, + }, + }, + pipeline: [ + optimisticKeys(cache, callbacks, keys), + fakeFetch({ + data: { + createUser: { id: '2', firstName: 'Alice', __typename: 'User' }, + }, + onRequest: (variables, cb) => { + secondVariables = variables + cb() + }, + }), + ], + }) + + // sending the second mutation with the optimistic ID as an input should block + // until the first mutation resolves. + let secondResolved: QueryResult | null = null + second + .send({ + variables: { + id: record.id, + }, + }) + .then((val) => (secondResolved = val)) + + // wait for a bit, just to be sure + await sleep(200) + expect(secondResolved).toBeFalsy() + + // we can now resolve the first mutation (which will provide the ID for the second) + if (resolveMutation) { + // @ts-ignore + resolveMutation?.() + } + + // make sure we did get a value + await sleep(200) + expect(secondVariables).toEqual({ id: '1' }) + expect(secondResolved).toBeTruthy() +}) diff --git a/packages/houdini/src/runtime/client/plugins/optimisticKeys.ts b/packages/houdini/src/runtime/client/plugins/optimisticKeys.ts new file mode 100644 index 0000000000..656adc2186 --- /dev/null +++ b/packages/houdini/src/runtime/client/plugins/optimisticKeys.ts @@ -0,0 +1,452 @@ +import type { Cache } from '../../cache/cache' +import configFile from '../../imports/config' +import { computeID, getFieldsForType, keyFieldsForType, marshalSelection } from '../../lib' +import type { + GraphQLObject, + NestedList, + GraphQLValue, + SubscriptionSelection, +} from '../../lib/types' +import { ArtifactKind } from '../../lib/types' +import type { ClientPlugin } from '../documentStore' + +// This plugin is responsible for coordinating requests that have optimistic keys. +// When a mutation contains optimistically generated keys as inputs, we need to block +// the request pipeline until we have a true value for the key. This means that we need +// a way to keep track of the pending keys and then notify other request chains. +// +// The major constraint here is that a document could be invoked multiple times, each of which +// can put the corresponding chain into a pending state. A document can also contain multiple +// keys in its response so we need to keep track of the query path in our data where we encounter the key. +// +// Therefore, we have 2 different mappings we need to track: +// a mapping from optimistic key to the list of callbacks that need to be notified +// a mapping of invocation id and path to the generated optimistic key +// NOTE: we need 2 different indexes so even though ^ could be merged into a single map. +// since we need to know if an input is a generated key and if a path is a generated key + +export type CallbackMap = Record void>> +export type KeyMap = Record> +type OptimisticObjectIDMap = Record> + +const keys: KeyMap = {} +const callbacks: CallbackMap = {} +const objectIDMap: OptimisticObjectIDMap = {} + +export const optimisticKeys = + ( + cache: Cache, + callbackCache: CallbackMap = callbacks, + keyCache: KeyMap = keys, + objectIDs: OptimisticObjectIDMap = objectIDMap, + invocationCounter: number = 1 + ): ClientPlugin => + () => { + return { + async start(ctx, { next }) { + // the optimistic response gets passed in the context's stuff bag + const optimisticResponse = ctx.stuff.optimisticResponse + + const newCtx = { ...ctx } + + // if the request has an optimistic response with optimistic keys embedded inside, we need to + // add them to the response and register the values in our global state (only on the client) + if ( + optimisticResponse && + ctx.artifact.kind === ArtifactKind.Mutation && + ctx.artifact.optimisticKeys + ) { + newCtx.stuff.mutationID = invocationCounter++ + + // add the keys to the response + addKeysToResponse({ + selection: ctx.artifact.selection, + response: optimisticResponse, + callbackStore: callbackCache, + keyStore: keyCache, + objectIDs, + mutationID: newCtx.stuff.mutationID, + }) + + // use the updated optimistic response for the rest of the chain + newCtx.stuff.optimisticResponse = optimisticResponse + } + + // make sure we write to the correct layer in the cache + next(newCtx) + }, + // if a request has variables that contain an optimistic key we need to block the + // request before it is sent to the server + beforeNetwork(ctx, { next }) { + // if there are no optimistic keys, just move onto the next step + if (Object.keys(keyCache).length === 0) { + return next(ctx) + } + + // look through the outgoing variables for ones that we have flagged as optimistic + const pendingVariables: Record = extractInputKeys( + ctx.variables ?? {}, + callbackCache + ) + + // if there aren't any pending variables in the query, just move along + if (Object.keys(pendingVariables).length === 0) { + return next(ctx) + } + + // we need to register a callback with each pending variable + for (const key of Object.keys(pendingVariables)) { + callbackCache[key].push((newID) => { + pendingVariables[key] = newID + + // if that was the last variable that we needed to wait for, we can move on + if (Object.values(pendingVariables).every((value) => value !== null)) { + // add the optimistic keys back into the input variables + next({ + ...ctx, + variables: replaceKeyWithVariable( + { ...ctx.variables }, + pendingVariables as Record + ), + }) + } + }) + } + }, + afterNetwork(ctx, { value, resolve }) { + // if the artifact contained optimistic keys we need to extract them from the response + // and notify any dependent chains + if ( + ctx.artifact.kind === ArtifactKind.Mutation && + ctx.artifact.optimisticKeys && + typeof ctx.stuff.mutationID !== 'undefined' + ) { + // look for any values in the response that correspond to values in the keyCache + extractResponseKeys( + cache, + value.data ?? {}, + ctx.artifact.selection, + keyCache, + ctx.stuff.mutationID, + { + onNewKey: (optimisticValue, realValue) => { + callbackCache[optimisticValue].forEach((cb) => { + cb(realValue) + }) + + // clean up the caches since we're done with this key + delete callbackCache[optimisticValue] + }, + onIDChange: (optimisticValue, realValue) => + cache.registerKeyMap(optimisticValue, realValue), + } + ) + } + + // we're done + resolve(ctx) + }, + + // when the mutation ends, we no longer have any dependents that we have to track + end(ctx, { resolve }) { + if (typeof ctx.stuff.mutationID !== 'undefined') { + delete keyCache[ctx.stuff.mutationID] + delete objectIDs[ctx.stuff.mutationID] + } + + resolve(ctx) + }, + } + } + +function addKeysToResponse(args: { + selection: SubscriptionSelection + response: GraphQLObject + callbackStore: CallbackMap + keyStore: KeyMap + type?: string + path?: string + mutationID: number + objectIDs: OptimisticObjectIDMap +}): any { + // we need to walk the selection and inject the optimistic keys into the response + // collect all of the fields that we need to write + let targetSelection = getFieldsForType( + args.selection, + args.response['__typename'] as string | undefined, + false + ) + const newKeys = [] + + // data is an object with fields that we need to write to the store + for (const [field, { type, selection: fieldSelection, optimisticKey }] of Object.entries( + targetSelection + )) { + const value = args.response[field] + const pathSoFar = `${args.path ?? ''}.${field}` + + // if this field is marked as an optimistic key, add it to the obj + if (optimisticKey) { + // figure out the value we should use for the optimistic key + let keyValue + + // if there is a value already in the response then we should use that + if (value) { + // marshal the value into something we can use for an id + const { marshaled } = marshalSelection({ + data: { marshaled: value }, + selection: { + fields: { + value: { + type, + keyRaw: 'value', + }, + }, + }, + }) as { marshaled: string } + + // use the marshaled value as the key + keyValue = marshaled + } + // if the field isn't present in the optimistic payload then we need to come up + // with our own value for the key based on the type + else { + keyValue = generateKey(type) + } + + // we need to populate the various stores that we use to track the keys + newKeys.push(keyValue) + args.response[field] = keyValue + args.callbackStore[keyValue] = [] + args.keyStore[args.mutationID] = { + [pathSoFar]: keyValue, + } + } + + // keep walking down the selection + if (fieldSelection) { + if (Array.isArray(value)) { + for (const [index, item] of flattenList(value).entries()) { + if (item && typeof item === 'object' && !Array.isArray(item)) { + addKeysToResponse({ + ...args, + selection: fieldSelection, + response: item, + type, + path: `${pathSoFar}[${index}]`, + }) + } + } + } else if (value && typeof value == 'object') { + addKeysToResponse({ + ...args, + selection: fieldSelection, + response: value, + type, + path: pathSoFar, + }) + } + } + } + + // if there were optimistic keys added to the response, we need to + // track the ID holding the new value + if (newKeys.length > 0) { + const objID = `${args.type}:${computeID(configFile, args.type ?? '', args.response)}` + for (const key of newKeys) { + args.objectIDs[args.mutationID] = { + ...args.objectIDs[args.mutationID], + [key]: objID, + } + } + } + + return args.response +} + +function extractInputKeys( + obj: GraphQLObject, + store: CallbackMap, + found: Record = {} +) { + for (const value of Object.values(obj)) { + if (typeof value === 'string' && store[value]) { + found[value] = null + } + + if (Array.isArray(value)) { + for (const item of flattenList(value)) { + if (item && typeof item === 'object') { + extractInputKeys(item as GraphQLObject, store, found) + } + } + } else if (value && typeof value === 'object') { + extractInputKeys(value, store, found) + } + } + + return found +} + +function extractResponseKeys( + cache: Cache, + response: GraphQLObject, + selection: SubscriptionSelection, + keyMap: KeyMap, + mutationID: number, + events: { + onNewKey: (optimisticValue: string | number, realValue: string | number) => void + onIDChange: (optimisticValue: string | number, realValue: string | number) => void + }, + objectIDs: OptimisticObjectIDMap = objectIDMap, + path: string = '', + type: string = '' +) { + // collect all of the fields that we need to write + let targetSelection = getFieldsForType( + selection, + response['__typename'] as string | undefined, + false + ) + + let optimisticID: string | null = null + + // data is an object with fields that we need to write to the store + for (const [field, value] of Object.entries(response)) { + // if the path corresponds to an optimistic key + const pathSoFar = `${path ?? ''}.${field}` + + if (typeof value === 'string' && keyMap[mutationID][pathSoFar]) { + const newKey = keyMap[mutationID][pathSoFar] + // notify the listeners that the key has changed + events.onNewKey(newKey, value) + + // grab the optimistic ID referenced by the path + optimisticID = objectIDs[mutationID][newKey] + } + + // grab the selection info we care about + if (!selection || !targetSelection[field]) { + continue + } + + // look up the field in our schema + let { type, selection: fieldSelection } = targetSelection[field] + + // walk down lists in the response + if (Array.isArray(value)) { + for (const [index, item] of flattenList(value).entries()) { + if (item && typeof item === 'object' && fieldSelection) { + extractResponseKeys( + cache, + item as GraphQLObject, + fieldSelection, + keyMap, + mutationID, + events, + objectIDs, + `${pathSoFar}[${index}]`, + type + ) + } + } + } + // walk down objects in the response + else if (value && typeof value === 'object' && fieldSelection) { + extractResponseKeys( + cache, + value, + fieldSelection, + keyMap, + mutationID, + events, + objectIDs, + pathSoFar, + type + ) + } + } + + // if we found an optimistic ID in the previous step + if (optimisticID) { + // once we're done walking down, we can compute the id + const id = computeID(configFile, type, response) + + // if the id has changed, we need to tell the cache that the two ids are the same + events.onIDChange(`${type}:${id}`, optimisticID) + + // we need to write new values for the key fields in the cache + // that are owned by the old key + cache.write({ + selection: { + fields: Object.fromEntries( + keyFieldsForType(configFile, type).map((key) => [ + key, + { + type: 'scalar', + keyRaw: key, + }, + ]) + ), + }, + parent: optimisticID, + data: response, + }) + } +} + +function flattenList(source: NestedList): Array { + const result: Array = [] + const left = [...source] + while (left.length > 0) { + const head = left.shift() + if (Array.isArray(head)) { + left.push(...head) + } else { + result.push(head) + } + } + + return result +} + +function replaceKeyWithVariable( + variables: GraphQLObject, + keys: Record +): GraphQLObject { + for (const [key, value] of Object.entries(variables)) { + if (typeof value === 'string' && keys[value]) { + variables[key] = keys[value] + } + + if (Array.isArray(value)) { + for (const item of flattenList(value)) { + if (item && typeof item === 'object') { + replaceKeyWithVariable(item as GraphQLObject, keys) + } + } + } else if (value && typeof value === 'object') { + replaceKeyWithVariable(value, keys) + } + } + + return variables +} + +function generateKey(type: string) { + if (type === 'Int') { + return new Date().getTime() + } + + if (type === 'String') { + return new Date().getTime().toString() + } + + if (type === 'ID') { + return new Date().getTime().toString() + } + + throw new Error( + `unsupported type for optimistic key: ${type}. Please provide a value in your mutation arguments.` + ) +} diff --git a/packages/houdini/src/runtime/client/plugins/query.test.ts b/packages/houdini/src/runtime/client/plugins/query.test.ts index 5201842b9d..f2c7785042 100644 --- a/packages/houdini/src/runtime/client/plugins/query.test.ts +++ b/packages/houdini/src/runtime/client/plugins/query.test.ts @@ -3,8 +3,8 @@ import { beforeEach, expect, test, vi } from 'vitest' import { testConfigFile } from '../../../test' import { Cache } from '../../cache/cache' import { setMockConfig } from '../../lib/config' -import { createStore, fakeFetch } from './cache.test' import { query } from './query' +import { createStore, fakeFetch } from './test' const config = testConfigFile() beforeEach(async () => { diff --git a/packages/houdini/src/runtime/client/plugins/test.ts b/packages/houdini/src/runtime/client/plugins/test.ts new file mode 100644 index 0000000000..3196dfe11f --- /dev/null +++ b/packages/houdini/src/runtime/client/plugins/test.ts @@ -0,0 +1,107 @@ +import { vi } from 'vitest' + +import { createPluginHooks, HoudiniClient, type HoudiniClientConstructorArgs } from '..' +import type { DocumentArtifact, GraphQLObject, QueryResult } from '../../lib' +import { ArtifactKind, DataSource } from '../../lib/types' +import type { ClientPlugin, ClientPluginContext } from '../documentStore' +import { DocumentStore } from '../documentStore' + +/** + * Utilities for testing the cache plugin + */ +export function createStore( + args: Partial & { artifact?: DocumentArtifact } = {} +): DocumentStore { + // if we dont have anything passed, just use the fake fetch as the plugin + if (!args.plugins && !args.pipeline) { + args.plugins = [fakeFetch({})] + } + + // instantiate the client + const client = new HoudiniClient({ + url: 'URL', + ...args, + }) + + return new DocumentStore({ + plugins: args.plugins ? createPluginHooks(client.plugins) : undefined, + pipeline: args.pipeline ? createPluginHooks(client.plugins) : undefined, + client, + artifact: args.artifact ?? { + kind: ArtifactKind.Query, + hash: '7777', + raw: 'RAW_TEXT', + name: 'TestArtifact', + rootType: 'Query', + pluginData: {}, + enableLoadingState: 'local', + selection: { + fields: { + viewer: { + type: 'User', + visible: true, + keyRaw: 'viewer', + loading: { kind: 'continue' }, + selection: { + fields: { + id: { + type: 'ID', + visible: true, + keyRaw: 'id', + }, + firstName: { + type: 'String', + visible: true, + keyRaw: 'firstName', + loading: { kind: 'value' }, + }, + __typename: { + type: 'String', + visible: true, + keyRaw: '__typename', + }, + }, + }, + }, + }, + }, + }, + }) +} + +export function fakeFetch({ + data, + spy = vi.fn(), + onRequest, +}: { + data?: any + spy?: (ctx: ClientPluginContext) => void + onRequest?: (variables: GraphQLObject, cb: () => void) => void +}) { + const result: QueryResult = { + data: data ?? { + viewer: { + id: '1', + firstName: 'bob', + __typename: 'User', + }, + }, + errors: null, + fetching: false, + variables: null, + source: DataSource.Network, + partial: false, + stale: false, + } + + return (() => ({ + network(ctx, { resolve }) { + spy?.(ctx) + if (onRequest) { + onRequest(ctx.variables ?? {}, () => resolve(ctx, { ...result })) + } else { + resolve(ctx, { ...result }) + } + }, + })) as ClientPlugin +} diff --git a/packages/houdini/src/runtime/client/utils/documentPlugins.ts b/packages/houdini/src/runtime/client/utils/documentPlugins.ts index 5d23f9d85e..4403c1a301 100644 --- a/packages/houdini/src/runtime/client/utils/documentPlugins.ts +++ b/packages/houdini/src/runtime/client/utils/documentPlugins.ts @@ -42,6 +42,7 @@ export const documentPlugin = (kind: ArtifactKinds, source: () => ClientHooks): return { start: enterWrapper(sourceHandlers.start), network: enterWrapper(sourceHandlers.network), + beforeNetwork: enterWrapper(sourceHandlers.beforeNetwork), afterNetwork: exitWrapper(sourceHandlers.afterNetwork), end: exitWrapper(sourceHandlers.end), catch: sourceHandlers.catch diff --git a/packages/houdini/src/runtime/lib/scalars.test.ts b/packages/houdini/src/runtime/lib/scalars.test.ts index 66b6c9d983..47ec837ad1 100644 --- a/packages/houdini/src/runtime/lib/scalars.test.ts +++ b/packages/houdini/src/runtime/lib/scalars.test.ts @@ -341,12 +341,12 @@ describe('marshal selection', function () { ], } - await expect( + expect( marshalSelection({ selection: artifact.selection, data, }) - ).resolves.toEqual({ + ).toEqual({ items: [ { createdAt: date.getTime(), @@ -371,12 +371,12 @@ describe('marshal selection', function () { ], } - await expect( + expect( marshalSelection({ selection: artifact.selection, data, }) - ).resolves.toEqual({ + ).toEqual({ items: [ { dates: [date1.getTime(), date2.getTime()], @@ -394,12 +394,12 @@ describe('marshal selection', function () { ], } - await expect( + expect( marshalSelection({ selection: artifact.selection, data, }) - ).resolves.toEqual({ + ).toEqual({ items: [ { dates: [], @@ -427,12 +427,12 @@ describe('marshal selection', function () { ], } - await expect(() => + expect(() => marshalSelection({ selection: artifact.selection, data, }) - ).rejects.toThrow(/Scalar type DateTime is missing a `marshal` function/) + ).toThrow(/Scalar type DateTime is missing a `marshal` function/) }) test('undefined', async function () { @@ -458,12 +458,12 @@ describe('marshal selection', function () { }, } - await expect( + expect( marshalSelection({ selection, data, }) - ).resolves.toEqual({ + ).toEqual({ item: undefined, }) }) @@ -491,12 +491,12 @@ describe('marshal selection', function () { }, } - await expect( + expect( marshalSelection({ selection, data, }) - ).resolves.toEqual({ + ).toEqual({ item: null, }) }) @@ -551,12 +551,12 @@ describe('marshal selection', function () { }, } - await expect( + expect( marshalSelection({ selection, data, }) - ).resolves.toEqual({ + ).toEqual({ item: { createdAt: date.getTime(), creator: { @@ -580,12 +580,12 @@ describe('marshal selection', function () { }, } - await expect( + expect( marshalSelection({ selection, data, }) - ).resolves.toEqual({ + ).toEqual({ rootBool: true, }) }) @@ -604,12 +604,12 @@ describe('marshal selection', function () { }, } - await expect( + expect( marshalSelection({ selection, data, }) - ).resolves.toEqual({ + ).toEqual({ enumValue: 'Hello', }) }) @@ -628,12 +628,12 @@ describe('marshal selection', function () { }, } - await expect( + expect( marshalSelection({ selection, data, }) - ).resolves.toEqual({ + ).toEqual({ enumValue: ['Hello', 'World'], }) }) diff --git a/packages/houdini/src/runtime/lib/scalars.ts b/packages/houdini/src/runtime/lib/scalars.ts index 2638c57a03..2d0405d783 100644 --- a/packages/houdini/src/runtime/lib/scalars.ts +++ b/packages/houdini/src/runtime/lib/scalars.ts @@ -10,13 +10,13 @@ import { type SubscriptionSelection, } from './types' -export async function marshalSelection({ +export function marshalSelection({ selection, data, }: { selection: SubscriptionSelection data: any -}): Promise<{} | null | undefined> { +}): {} | null | undefined { const config = getCurrentConfig() if (data === null || typeof data === 'undefined') { @@ -26,52 +26,50 @@ export async function marshalSelection({ // if we are looking at a list if (Array.isArray(data)) { // unmarshal every entry in the list - return await Promise.all(data.map((val) => marshalSelection({ selection, data: val }))) + return data.map((val) => marshalSelection({ selection, data: val })) } const targetSelection = getFieldsForType(selection, data['__typename'] as string, false) // we're looking at an object, build it up from the current input return Object.fromEntries( - await Promise.all( - Object.entries(data as {}).map(async ([fieldName, value]) => { - // leave the fragment entry alone - if (fieldName === fragmentKey) { - return [fieldName, value] - } + Object.entries(data as {}).map(([fieldName, value]) => { + // leave the fragment entry alone + if (fieldName === fragmentKey) { + return [fieldName, value] + } - // look up the type for the field - const { type, selection } = targetSelection[fieldName] - // if we don't have type information for this field, just use it directly - // it's most likely a non-custom scalars or enums - if (!type) { - return [fieldName, value] - } + // look up the type for the field + const { type, selection } = targetSelection[fieldName] + // if we don't have type information for this field, just use it directly + // it's most likely a non-custom scalars or enums + if (!type) { + return [fieldName, value] + } - // if there is a sub selection, walk down the selection - if (selection) { - return [fieldName, await marshalSelection({ selection, data: value })] - } + // if there is a sub selection, walk down the selection + if (selection) { + return [fieldName, marshalSelection({ selection, data: value })] + } - // is the type something that requires marshaling - if (config!.scalars?.[type]) { - const marshalFn = config!.scalars[type].marshal - if (!marshalFn) { - throw new Error( - `Scalar type ${type} is missing a \`marshal\` function. See https://houdinigraphql.com/api/config#custom-scalars for help on configuring custom scalars.` - ) - } - if (Array.isArray(value)) { - return [fieldName, value.map(marshalFn)] - } - return [fieldName, marshalFn(value)] + // is the type something that requires marshaling + if (config!.scalars?.[type]) { + const marshalFn = config!.scalars[type].marshal + if (!marshalFn) { + throw new Error( + `Scalar type ${type} is missing a \`marshal\` function. See https://houdinigraphql.com/api/config#custom-scalars for help on configuring custom scalars.` + ) + } + if (Array.isArray(value)) { + return [fieldName, value.map(marshalFn)] } + return [fieldName, marshalFn(value)] + } - // if the type doesn't require marshaling and isn't a referenced type - // then the type is a scalar that doesn't require marshaling - return [fieldName, value] - }) - ) + // if the type doesn't require marshaling and isn't a referenced type + // then the type is a scalar that doesn't require marshaling + return [fieldName, value] + }) ) } diff --git a/packages/houdini/src/runtime/lib/types.ts b/packages/houdini/src/runtime/lib/types.ts index e212aef317..04314f25ba 100644 --- a/packages/houdini/src/runtime/lib/types.ts +++ b/packages/houdini/src/runtime/lib/types.ts @@ -32,6 +32,7 @@ declare global { optimisticResponse?: GraphQLObject parentID?: string silenceLoading?: boolean + mutationID?: number } } } @@ -78,7 +79,9 @@ export type QueryArtifact = BaseCompiledDocument<'HoudiniQuery'> & { enableLoadingState?: 'global' | 'local' } -export type MutationArtifact = BaseCompiledDocument<'HoudiniMutation'> +export type MutationArtifact = BaseCompiledDocument<'HoudiniMutation'> & { + optimisticKeys?: boolean +} export type FragmentArtifact = BaseCompiledDocument<'HoudiniFragment'> & { enableLoadingState?: 'global' | 'local' @@ -167,14 +170,9 @@ export type MutationOperation = { export type GraphQLObject = { [key: string]: GraphQLValue } -export type GraphQLValue = - | number - | string - | boolean - | null - | GraphQLObject - | GraphQLValue[] - | undefined +export type GraphQLDefaultScalar = string | number | boolean + +export type GraphQLValue = GraphQLDefaultScalar | null | GraphQLObject | GraphQLValue[] | undefined export type GraphQLVariables = { [key: string]: any } | null @@ -221,6 +219,7 @@ export type SubscriptionSelection = { fragment: string variables: ValueMap | null } + optimisticKey?: boolean } } abstractFields?: { diff --git a/packages/houdini/src/test/index.ts b/packages/houdini/src/test/index.ts index 37cbcce655..05cb21f70b 100644 --- a/packages/houdini/src/test/index.ts +++ b/packages/houdini/src/test/index.ts @@ -211,6 +211,7 @@ export function testConfigFile({ plugins, ...config }: Partial = {}) type Mutation { updateUser: User! + updateGhost: Ghost! addFriend: AddFriendOutput! believeIn: BelieveInOutput! deleteUser(id: ID!): DeleteUserOutput! diff --git a/packages/houdini/src/vite/houdini.ts b/packages/houdini/src/vite/houdini.ts index dab196730c..dbc0a625a7 100644 --- a/packages/houdini/src/vite/houdini.ts +++ b/packages/houdini/src/vite/houdini.ts @@ -1,3 +1,4 @@ +import type * as graphql from 'graphql' import type { SourceMapInput } from 'rollup' import type { Plugin as VitePlugin, UserConfig, ResolvedConfig, ConfigEnv } from 'vite' @@ -31,6 +32,7 @@ export default function Plugin(opts: PluginConfig = {}): VitePlugin { // add watch-and-run to their vite config async config(userConfig, env) { config = await getConfig(opts) + viteEnv = env let result: UserConfig = { @@ -68,6 +70,7 @@ export default function Plugin(opts: PluginConfig = {}): VitePlugin { if (!isSecondaryBuild()) { viteConfig = conf } + for (const plugin of config.plugins) { if (typeof plugin.vite?.configResolved !== 'function') { continue @@ -195,6 +198,14 @@ export default function Plugin(opts: PluginConfig = {}): VitePlugin { async configureServer(server) { devServer = true + // if there is a local schema we need to use that when generating + if (config.localSchema) { + config.schema = (await server.ssrLoadModule(config.localSchemaPath)) + .default as graphql.GraphQLSchema + // make sure we watch the file for changes + server.watcher.add(config.localSchemaPath) + } + for (const plugin of config.plugins) { if (typeof plugin.vite?.configureServer !== 'function') { continue @@ -206,11 +217,6 @@ export default function Plugin(opts: PluginConfig = {}): VitePlugin { }) } - // if there is a local schema we need to use that when generating - if (config.localSchema && !config.schema) { - config.schema = await loadLocalSchema(config) - } - process.env.HOUDINI_PORT = String(server.config.server.port ?? 5173) try { diff --git a/packages/houdini/src/vite/index.ts b/packages/houdini/src/vite/index.ts index 33245fd2b5..510e7b6505 100644 --- a/packages/houdini/src/vite/index.ts +++ b/packages/houdini/src/vite/index.ts @@ -1,10 +1,11 @@ +import type * as graphql from 'graphql' import minimatch from 'minimatch' import type { Plugin } from 'vite' import watch_and_run from 'vite-plugin-watch-and-run' import generate from '../codegen' import type { PluginConfig } from '../lib' -import { getConfig, formatErrors, path, loadLocalSchema } from '../lib' +import { getConfig, formatErrors, path, isSecondaryBuild } from '../lib' import houdini_vite from './houdini' import { watch_local_schema, watch_remote_schema } from './schema' @@ -13,7 +14,7 @@ export * from './imports' export * from './schema' export * from './houdini' -export default function (opts?: PluginConfig): Plugin[] { +export default function (opts?: PluginConfig): (Plugin | null)[] { // we need some way for the graphql tag to detect that we are running on the server // so we don't get an error when importing. process.env.HOUDINI_PLUGIN = 'true' @@ -21,58 +22,62 @@ export default function (opts?: PluginConfig): Plugin[] { // a container of a list const watchSchemaListref = { list: [] as string[] } - return [ - houdini_vite(opts), - watch_remote_schema(opts), - watch_local_schema(watchSchemaListref), - watch_and_run([ - { - name: 'Houdini', - quiet: true, - async watchFile(filepath: string) { - // load the config file - const config = await getConfig(opts) + return isSecondaryBuild() + ? [] + : [ + houdini_vite(opts), + watch_remote_schema(opts), + watch_local_schema(watchSchemaListref), + watch_and_run([ + { + name: 'Houdini', + quiet: true, + async watchFile(filepath: string) { + // load the config file + const config = await getConfig(opts) - // we need to watch some specific files - if (config.localSchema) { - const toWatch = watchSchemaListref.list - if (toWatch.includes(filepath)) { - // if it's a schema change, let's reload the config - await getConfig({ ...opts, forceReload: true }) - return true - } - } else { - const schemaPath = path.join( - path.dirname(config.filepath), - config.schemaPath! - ) - if (minimatch(filepath, schemaPath)) { - // if it's a schema change, let's reload the config - await getConfig({ ...opts, forceReload: true }) - return true - } - } + // we need to watch some specific files + if (config.localSchema) { + const toWatch = watchSchemaListref.list + if (toWatch.includes(filepath)) { + // if it's a schema change, let's reload the config + await getConfig({ ...opts, forceReload: true }) + return true + } + } else { + const schemaPath = path.join( + path.dirname(config.filepath), + config.schemaPath! + ) + if (minimatch(filepath, schemaPath)) { + // if it's a schema change, let's reload the config + await getConfig({ ...opts, forceReload: true }) + return true + } + } - return config.includeFile(filepath, { root: process.cwd() }) - }, - async run() { - // load the config file - const config = await getConfig(opts) - if (config.localSchema) { - // reload the schema - config.schema = await loadLocalSchema(config) - } + return config.includeFile(filepath, { root: process.cwd() }) + }, + async run(server) { + // load the config file + const config = await getConfig(opts) + if (config.localSchema) { + config.schema = (await server.ssrLoadModule(config.localSchemaPath)) + .default as graphql.GraphQLSchema + // reload the schema + // config.schema = await loadLocalSchema(config) + } - // make sure we behave as if we're generating from inside the plugin (changes logging behavior) - config.pluginMode = true + // make sure we behave as if we're generating from inside the plugin (changes logging behavior) + config.pluginMode = true - // generate the runtime - await generate(config) - }, - delay: 100, - watchKind: ['add', 'change', 'unlink'], - formatErrors, - }, - ]), - ] + // generate the runtime + await generate(config) + }, + delay: 100, + watchKind: ['add', 'change', 'unlink'], + formatErrors, + }, + ]), + ] } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0371b1d402..7c8d984ab1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -227,14 +227,14 @@ importers: specifier: workspace:^ version: link:../../packages/houdini-react react: - specifier: ^18.3.0-canary-d6dcad6a8-20230914 - version: 18.3.0-canary-d7a98a5e9-20230517 + specifier: 19.0.0-rc-eb259b5d3b-20240605 + version: 19.0.0-rc-eb259b5d3b-20240605 react-dom: - specifier: ^18.3.0-canary-d6dcad6a8-20230914 - version: 18.3.0-canary-d6dcad6a8-20230914(react@18.3.0-canary-d7a98a5e9-20230517) + specifier: 19.0.0-rc-eb259b5d3b-20240605 + version: 19.0.0-rc-eb259b5d3b-20240605(react@19.0.0-rc-eb259b5d3b-20240605) react-streaming-compat: specifier: ^0.3.18 - version: 0.3.18(react-dom@18.3.0-canary-d6dcad6a8-20230914)(react@18.3.0-canary-d7a98a5e9-20230517) + version: 0.3.18(react-dom@19.0.0-rc-eb259b5d3b-20240605)(react@19.0.0-rc-eb259b5d3b-20240605) devDependencies: '@playwright/test': specifier: 1.30.0 @@ -569,14 +569,14 @@ importers: specifier: workspace:^ version: link:../houdini react: - specifier: 19.0.0-canary-2b036d3f1-20240327 - version: 19.0.0-canary-2b036d3f1-20240327 + specifier: 19.0.0-rc-eb259b5d3b-20240605 + version: 19.0.0-rc-eb259b5d3b-20240605 react-dom: - specifier: 19.0.0-canary-2b036d3f1-20240327 - version: 19.0.0-canary-2b036d3f1-20240327(react@19.0.0-canary-2b036d3f1-20240327) + specifier: 19.0.0-rc-eb259b5d3b-20240605 + version: 19.0.0-rc-eb259b5d3b-20240605(react@19.0.0-rc-eb259b5d3b-20240605) react-streaming-compat: specifier: ^0.3.18 - version: 0.3.18(react-dom@19.0.0-canary-2b036d3f1-20240327)(react@19.0.0-canary-2b036d3f1-20240327) + version: 0.3.18(react-dom@19.0.0-rc-eb259b5d3b-20240605)(react@19.0.0-rc-eb259b5d3b-20240605) recast: specifier: ^0.23.1 version: 0.23.1 @@ -585,7 +585,7 @@ importers: version: 3.14.0 use-deep-compare-effect: specifier: ^1.8.1 - version: 1.8.1(react@19.0.0-canary-2b036d3f1-20240327) + version: 1.8.1(react@19.0.0-rc-eb259b5d3b-20240605) devDependencies: '@types/cookie-parser': specifier: ^1.4.3 @@ -610,7 +610,7 @@ importers: version: 18.0.11 next: specifier: ^13.0.1 - version: 13.1.1(@babel/core@7.20.7)(react-dom@19.0.0-canary-2b036d3f1-20240327)(react@19.0.0-canary-2b036d3f1-20240327) + version: 13.1.1(@babel/core@7.20.7)(react-dom@19.0.0-rc-eb259b5d3b-20240605)(react@19.0.0-rc-eb259b5d3b-20240605) scripts: specifier: workspace:^ version: link:../_scripts @@ -8254,6 +8254,7 @@ packages: hasBin: true dependencies: js-tokens: 4.0.0 + dev: true /loupe@2.3.6: resolution: {integrity: sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==} @@ -8986,7 +8987,7 @@ packages: engines: {node: '>= 0.6'} dev: false - /next@13.1.1(@babel/core@7.20.7)(react-dom@19.0.0-canary-2b036d3f1-20240327)(react@19.0.0-canary-2b036d3f1-20240327): + /next@13.1.1(@babel/core@7.20.7)(react-dom@19.0.0-rc-eb259b5d3b-20240605)(react@19.0.0-rc-eb259b5d3b-20240605): resolution: {integrity: sha512-R5eBAaIa3X7LJeYvv1bMdGnAVF4fVToEjim7MkflceFPuANY3YyvFxXee/A+acrSYwYPvOvf7f6v/BM/48ea5w==} engines: {node: '>=14.6.0'} hasBin: true @@ -9008,9 +9009,9 @@ packages: '@swc/helpers': 0.4.14 caniuse-lite: 1.0.30001441 postcss: 8.4.14 - react: 19.0.0-canary-2b036d3f1-20240327 - react-dom: 19.0.0-canary-2b036d3f1-20240327(react@19.0.0-canary-2b036d3f1-20240327) - styled-jsx: 5.1.1(@babel/core@7.20.7)(react@19.0.0-canary-2b036d3f1-20240327) + react: 19.0.0-rc-eb259b5d3b-20240605 + react-dom: 19.0.0-rc-eb259b5d3b-20240605(react@19.0.0-rc-eb259b5d3b-20240605) + styled-jsx: 5.1.1(@babel/core@7.20.7)(react@19.0.0-rc-eb259b5d3b-20240605) optionalDependencies: '@next/swc-android-arm-eabi': 13.1.1 '@next/swc-android-arm64': 13.1.1 @@ -9719,7 +9720,7 @@ packages: /puppeteer@1.20.0: resolution: {integrity: sha512-bt48RDBy2eIwZPrkgbcwHtb51mj2nKvHOPMaSH2IsWiv7lOG9k9zhaRzpDZafrk05ajMc3cu+lSQYYOfH2DkVQ==} engines: {node: '>=6.4.0'} - deprecated: < 21.9.0 is no longer supported + deprecated: < 22.5.0 is no longer supported requiresBuild: true dependencies: debug: 4.3.4(supports-color@9.3.1) @@ -9806,23 +9807,13 @@ packages: scheduler: 0.19.1 dev: true - /react-dom@18.3.0-canary-d6dcad6a8-20230914(react@18.3.0-canary-d7a98a5e9-20230517): - resolution: {integrity: sha512-KzS+Jy/WXC6I9bi9PtBU0+iMPHPNvNLdyIDJqgX91AiBP9IDDMjaDbgW0QKphi1qIOesYMeJz0uZkajhlfS8lg==} + /react-dom@19.0.0-rc-eb259b5d3b-20240605(react@19.0.0-rc-eb259b5d3b-20240605): + resolution: {integrity: sha512-KDnYQBjOsyBjUDb0obGL7K2H3rj/6qfKCtKS9biSyR6R+Rstl8nPBVPlyrbcA/96t8ZWavZkU2JfHFveo1cb5A==} peerDependencies: - react: 18.3.0-canary-d6dcad6a8-20230914 + react: 19.0.0-rc-eb259b5d3b-20240605 dependencies: - loose-envify: 1.4.0 - react: 18.3.0-canary-d7a98a5e9-20230517 - scheduler: 0.24.0-canary-d6dcad6a8-20230914 - dev: false - - /react-dom@19.0.0-canary-2b036d3f1-20240327(react@19.0.0-canary-2b036d3f1-20240327): - resolution: {integrity: sha512-pxGk4bDSRFDqVa+hAdkTva+EdMbrxC0X1mR1QC1comx2U2EQfocy1SySSa//m3ivU674sYW7saKDo/fQV8rprw==} - peerDependencies: - react: 19.0.0-canary-2b036d3f1-20240327 - dependencies: - react: 19.0.0-canary-2b036d3f1-20240327 - scheduler: 0.25.0-canary-2b036d3f1-20240327 + react: 19.0.0-rc-eb259b5d3b-20240605 + scheduler: 0.25.0-rc-eb259b5d3b-20240605 /react-is@16.13.1: resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} @@ -9841,7 +9832,7 @@ packages: engines: {node: '>=0.10.0'} dev: true - /react-streaming-compat@0.3.18(react-dom@18.3.0-canary-d6dcad6a8-20230914)(react@18.3.0-canary-d7a98a5e9-20230517): + /react-streaming-compat@0.3.18(react-dom@19.0.0-rc-eb259b5d3b-20240605)(react@19.0.0-rc-eb259b5d3b-20240605): resolution: {integrity: sha512-KyvJHZ3JLQyNQSU/Rg+FYPaU/LGjSrdByE1zHS5DP/I6hxEmDUJYXf9eWdZZMi3lq+sUEwy1P7ije4blb0wF/A==} peerDependencies: react: '>=18' @@ -9850,21 +9841,8 @@ packages: '@brillout/import': 0.2.3 '@brillout/json-serializer': 0.5.6 isbot-fast: 1.2.0 - react: 18.3.0-canary-d7a98a5e9-20230517 - react-dom: 18.3.0-canary-d6dcad6a8-20230914(react@18.3.0-canary-d7a98a5e9-20230517) - dev: false - - /react-streaming-compat@0.3.18(react-dom@19.0.0-canary-2b036d3f1-20240327)(react@19.0.0-canary-2b036d3f1-20240327): - resolution: {integrity: sha512-KyvJHZ3JLQyNQSU/Rg+FYPaU/LGjSrdByE1zHS5DP/I6hxEmDUJYXf9eWdZZMi3lq+sUEwy1P7ije4blb0wF/A==} - peerDependencies: - react: '>=18' - react-dom: '>=18' - dependencies: - '@brillout/import': 0.2.3 - '@brillout/json-serializer': 0.5.6 - isbot-fast: 1.2.0 - react: 19.0.0-canary-2b036d3f1-20240327 - react-dom: 19.0.0-canary-2b036d3f1-20240327(react@19.0.0-canary-2b036d3f1-20240327) + react: 19.0.0-rc-eb259b5d3b-20240605 + react-dom: 19.0.0-rc-eb259b5d3b-20240605(react@19.0.0-rc-eb259b5d3b-20240605) dev: false /react@16.14.0: @@ -9876,15 +9854,8 @@ packages: prop-types: 15.8.1 dev: true - /react@18.3.0-canary-d7a98a5e9-20230517: - resolution: {integrity: sha512-WCoMOYGg0OR7IoQ9YhubaJ4j7743LBTx4OOcaRuI4wZkshvPIOuVWrZNOarMuKRj8bm/5DKuAV/p2kd74AbQmg==} - engines: {node: '>=0.10.0'} - dependencies: - loose-envify: 1.4.0 - dev: false - - /react@19.0.0-canary-2b036d3f1-20240327: - resolution: {integrity: sha512-dI3DePzDBPIypHcn+84a1H/9IUX67XyK1kCi1KETaKIJrf3LciB1gKSQ5P0G7HEVEIeSKuvpq0QB0uLC3Ta+wA==} + /react@19.0.0-rc-eb259b5d3b-20240605: + resolution: {integrity: sha512-Ez1rbLWt97LP31Us+kGIJOjf8iN+pxaIgi8r+8HJ+LnMEndi1bc9JShu4CVstRZDKk2UDkn7a4Ewhk7fdajizQ==} engines: {node: '>=0.10.0'} /read-cache@1.0.0: @@ -10306,14 +10277,8 @@ packages: object-assign: 4.1.1 dev: true - /scheduler@0.24.0-canary-d6dcad6a8-20230914: - resolution: {integrity: sha512-tC/9jHWGULTtIk39bb16jrDYyqwz0BHlQlNa3kZYyyFx8JsxioqzT/WoaInIrbkwRaY/zjYzm8IUzE3zH2wKqg==} - dependencies: - loose-envify: 1.4.0 - dev: false - - /scheduler@0.25.0-canary-2b036d3f1-20240327: - resolution: {integrity: sha512-ZaJBj3+g9DPMfnsCrCvxQ4G+/6RcH3dRE1dSfB0/mGJB72ZE1agIvwNOGOgbf5wQw4Ka3w3vv0KcHt3jOAH6Lg==} + /scheduler@0.25.0-rc-eb259b5d3b-20240605: + resolution: {integrity: sha512-0uINpZJVamgAUdXJQ2kDSXY3c2LbrAFknJ0neXCzq7y1gfT03TCfxjH24gaTsSK84bdYpD6FOsbciYEfunjA1g==} /selfsigned@2.1.1: resolution: {integrity: sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ==} @@ -10770,7 +10735,7 @@ packages: acorn: 8.10.0 dev: true - /styled-jsx@5.1.1(@babel/core@7.20.7)(react@19.0.0-canary-2b036d3f1-20240327): + /styled-jsx@5.1.1(@babel/core@7.20.7)(react@19.0.0-rc-eb259b5d3b-20240605): resolution: {integrity: sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==} engines: {node: '>= 12.0.0'} peerDependencies: @@ -10785,7 +10750,7 @@ packages: dependencies: '@babel/core': 7.20.7 client-only: 0.0.1 - react: 19.0.0-canary-2b036d3f1-20240327 + react: 19.0.0-rc-eb259b5d3b-20240605 dev: true /stylis@4.1.3: @@ -11574,7 +11539,7 @@ packages: resolution: {integrity: sha512-WHN8KDQblxd32odxeIgo83rdVDE2bvdkb86it7bMhYZwWKJz0+O0RK/eZiHYnM+zgt/U7hAHOlCQGfjjvSkw2g==} dev: false - /use-deep-compare-effect@1.8.1(react@19.0.0-canary-2b036d3f1-20240327): + /use-deep-compare-effect@1.8.1(react@19.0.0-rc-eb259b5d3b-20240605): resolution: {integrity: sha512-kbeNVZ9Zkc0RFGpfMN3MNfaKNvcLNyxOAAd9O4CBZ+kCBXXscn9s/4I+8ytUER4RDpEYs5+O6Rs4PqiZ+rHr5Q==} engines: {node: '>=10', npm: '>=6'} peerDependencies: @@ -11582,7 +11547,7 @@ packages: dependencies: '@babel/runtime': 7.20.7 dequal: 2.0.3 - react: 19.0.0-canary-2b036d3f1-20240327 + react: 19.0.0-rc-eb259b5d3b-20240605 dev: false /util-deprecate@1.0.2: diff --git a/site/src/routes/api/mutation/+page.svx b/site/src/routes/api/mutation/+page.svx index 63674eceee..26ebf19fd9 100644 --- a/site/src/routes/api/mutation/+page.svx +++ b/site/src/routes/api/mutation/+page.svx @@ -194,6 +194,88 @@ you don't have to provide a complete response for an optimistic value, the cache whatever information you give it (as long as its found in the mutation body). Because of this, the store value won't update until the mutation resolves. +### Optimistic Keys + +Sometimes it's not possible to know the ID to provide an optimistic response before hand. Most commonly, +this happens when you are trying to create a new record and insert the result into a list. Take for example, +the following mutation: + +```graphql +mutation CreateTodoItem($text: String!) { + createItem(text: $text) { + item { + id + text + ...All_Items_insert + } + } +} +``` + +If you wanted to submit this mutation with an optimistic response, it might look something like: + +```javascript +CreateTodoItem.mutate( + { + text: "My Item": + }, + { + optimisticResponse: { + createItem: { + item: { + id: "????" // <--- what goes here? + text: "My Item", + } + } + } + } +) +``` + +To support this situation, you can tell Houdini to generate a temporary ID for you using the `@optimisticKey` +directive: + +```graphql +mutation CreateTodoItem($text: String!) { + createItem(text: $text) { + item { + id @optimisticKey + text + + ...All_Items_insert + } + } +} +``` + +And now you don't have to provide an ID to the optimitic response. Houdini will keep track the generated value and +replace it with the real one when the mutation resolves: + +```javascript +CreateTodoItem.mutate( + { + text: "My Item": + }, + { + optimisticResponse: { + createItem: { + item: { + text: "My Item", + } + } + } + } +) +``` + +Now you might be asking how this is different from the regular situation where you just made up an id on your own. Well, the +important distinction is that Houdini tracks these generated keys internally. If you were to use one as the input for another +mutation before the create mutation resolves (say to mark the new item as complete), the second mutation would block while +it waits for the valid ID value from the server. + +If your record's keys are a custom scalar that Houdini cannot support, you will have to provide a value in your `optimisticResponse` +object. + ### Why is typescript missing fields? If you are using typescript, you might notice that the generated types for optimistic diff --git a/vite.config.ts b/vite.config.ts index 601ec054a1..5adb7519ea 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -14,7 +14,7 @@ export default defineConfig({ houdini: path.resolve('./packages/houdini/src/lib'), }, coverage: { - provider: 'c8', + provider: 'v8', }, }, })