Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 34 additions & 1 deletion __tests__/core/array.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ import {
IJsonPatch,
setLivelinessChecking,
detach,
cast
cast,
SnapshotIn
} from "../../src"
import { observable, autorun, configure } from "mobx"
import { expect, test } from "bun:test"
Expand Down Expand Up @@ -288,6 +289,38 @@ test("it should reconciliate keyed instances correctly", () => {
expect(store.todos[1] === coffee).toBe(true)
expect(store.todos[2] === biscuit).toBe(false)
})
test("it should reconciliate large array instances efficiently", () => {
const Todo = types.model("Task", {
id: types.identifier,
task: ""
})
const Store = types
.model("Store", {
todos: types.array(types.maybeNull(Todo))
})
.actions(self => ({
setTodos(todos: SnapshotIn<typeof self.todos>) {
self.todos = cast(todos)
}
}))
const todos = Array.from({ length: 5000 }, (_, i) => ({ id: `todo-${i}`, task: `task-${i}` }))
const reversedTodos = [...todos].reverse()
const store = Store.create({
todos: todos
})

const todosBeforeSet = store.todos.slice()

const now = Date.now()
store.setTodos(reversedTodos)
const elapsed = Date.now() - now
expect(elapsed).toBeLessThan(1000)

for (let i = 0; i < todos.length; i++) {
// Check reconciliation, the instance in store should be the same instance as before the set, just in reversed order
expect(store.todos[i]).toBe(todosBeforeSet[todos.length - i - 1])
}
})
test("it correctly reconciliate when swapping", () => {
const Task = types.model("Task", {})
const Store = types.model({
Expand Down
52 changes: 46 additions & 6 deletions src/types/complex-types/array.ts
Original file line number Diff line number Diff line change
Expand Up @@ -350,6 +350,30 @@ export function array<IT extends IAnyType>(subtype: IT): IArrayType<IT> {
return new ArrayType<IT>(`${subtype.name}[]`, subtype)
}

function buildObjectByIdMap(nodes: AnyNode[]): [Set<string>, Map<string, Array<AnyNode>> | null] {
// Creates a map of node by identifier value.
// In theory, several nodes can have the same identifier, if the array contains different types, so every identifier is mapped to an array of nodes with the same values.
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This line of your comment is an interesting one. Basically, it states that we still have the same worst case complexity.

That being said, in those scenarios we're likely to fall back to the old code path. In the case where we have an array with models of the same type containing identifiers, we get a nice perf boost 🚀

(nothing to action here, I just like talking things through)

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, I'm trying to understand when it's OK to use the map and when it isn't...

// In practice this in probably a rare case, so we can live with the performance hit.
//
// If not all nodes have identifier, we can't use the map for lookups, so we return null.
Comment on lines +354 to +358
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Love that you've thought through this!

I'd highly recommend capturing various edge cases in test (if we haven't already done so). Some cases that come to mind would be tests for arrays of:

  1. all scalars,
  2. model types without identifiers,
  3. model types with identifiers,
  4. union of two distinct model types with and without identifiers
  5. union of scalar and model type (I believe MST supports this, but double check).

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks, of course these should be tested. I'll try to add those soon

const identifierAttributes = new Set<string>()
const keyToObjectMap = new Map<string, Array<AnyObjectNode>>()

for (const node of nodes) {
if (node instanceof ObjectNode && node.identifierAttribute && node.identifier !== null) {
identifierAttributes.add(node.identifierAttribute)
const key = node.identifier
if (!keyToObjectMap.has(key)) {
keyToObjectMap.set(key, [])
}
keyToObjectMap.get(key)!.push(node)
} else {
return [identifierAttributes, null] // Not all nodes have identifier, so we can't use the map.
}
}
return [identifierAttributes, keyToObjectMap]
}

function reconcileArrayChildren<TT>(
parent: AnyObjectNode,
childType: IType<any, any, TT>,
Expand All @@ -359,6 +383,8 @@ function reconcileArrayChildren<TT>(
): AnyNode[] | null {
let nothingChanged = true

const [identifierAttributes, oldNodeMap] = buildObjectByIdMap(oldNodes)

for (let i = 0; ; i++) {
const hasNewNode = i <= newValues.length - 1
const oldNode = oldNodes[i]
Expand Down Expand Up @@ -404,14 +430,28 @@ function reconcileArrayChildren<TT>(
// nothing to do, try to reorder
let oldMatch = undefined

// find a possible candidate to reuse
for (let j = i; j < oldNodes.length; j++) {
if (areSame(oldNodes[j], newValue)) {
oldMatch = oldNodes.splice(j, 1)[0]
break
// Try to find match by identifier attributes
if (oldNodeMap && typeof newValue === "object" && newValue !== null) {
for (const identifierAttribute of identifierAttributes) {
if (!(identifierAttribute in newValue)) {
continue
}
const identifierValue = (newValue as any)[identifierAttribute]
const matchingNodes = oldNodeMap.get(identifierValue) || []

oldMatch = matchingNodes.find(node => areSame(node, newValue))
if (oldMatch) {
break
}
}
} else {
for (let j = i; j < oldNodes.length; j++) {
if (areSame(oldNodes[j], newValue)) {
oldMatch = oldNodes.splice(j, 1)[0]
break
}
}
}

nothingChanged = false
const newNode = valueAsNode(childType, parent, newPath, newValue, oldMatch)
oldNodes.splice(i, 0, newNode)
Expand Down