Created
April 1, 2020 11:17
-
-
Save ascorbic/c32f18b8ddac007364dc8a44a14efc84 to your computer and use it in GitHub Desktop.
Generated by XState Viz: https://xstate.js.org/viz
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const noop = async () => {} | |
const initialize = noop | |
const customizeSchema = noop | |
const sourceNodes = noop | |
const buildSchema = noop | |
const createPages = noop | |
const createPagesStatefully = noop | |
const calculateDirtyQueries = noop | |
const extractQueries = noop | |
const runStaticQueries = noop | |
const runPageQueries = noop | |
const startWebpackServer = noop | |
const writeOutRequires = noop | |
const waitUntilAllJobsComplete = noop | |
const MAX_RECURSION = 2 | |
const NODE_MUTATION_BATCH_SIZE = 20 | |
const NODE_MUTATION_BATCH_TIMEOUT = 5000 | |
const assignAnything = assign({ recursionCount: 1 }) | |
const assignMutatedNodes = assignAnything | |
const context = { | |
recursionCount: 0, | |
nodesMutatedDuringQueryRun: false, | |
firstRun: true, | |
nodeMutationBatch: [], | |
runningBatch: [], | |
} | |
const rageAgainstTheStateMachine = async () => { | |
console.error(`I won't do what you tell me!`) | |
} | |
const emitPageDataToWebsocket = () => {} | |
const emitStaticQueryDataToWebsocket = () => {} | |
/** | |
* Event handler used in all states where we're not ready to process node | |
* mutations. Instead we add it to a batch to process when we're next idle | |
*/ | |
const ADD_NODE_MUTATION = { | |
actions: assignAnything, | |
} | |
/** | |
* Event handler used in all states where we're not ready to process a file change | |
* Instead we add it to a batch to process when we're next idle | |
*/ | |
const SOURCE_FILE_CHANGED = { | |
actions: assignAnything, | |
} | |
/** | |
* When running queries we might add nodes (e.g from resolvers). If so we'll | |
* want to re-run queries and schema inference | |
*/ | |
const runMutationAndMarkDirty = { | |
actions: assignAnything, | |
} | |
/** | |
* Handler for when we're inside handlers that should be able to mutate nodes | |
*/ | |
const skipDeferredApi = { | |
actions: assignAnything, | |
} | |
// eslint-disable-next-line new-cap | |
const machine = Machine( | |
{ | |
id: `build`, | |
initial: `initializing`, | |
context, | |
states: { | |
initializing: { | |
on: { ADD_NODE_MUTATION: skipDeferredApi }, | |
invoke: { | |
src: initialize, | |
onDone: { | |
target: `customizingSchema`, | |
actions: assignAnything, | |
}, | |
onError: { | |
target: `failed`, | |
}, | |
}, | |
}, | |
customizingSchema: { | |
on: { ADD_NODE_MUTATION: skipDeferredApi }, | |
invoke: { | |
src: customizeSchema, | |
id: `customizing-schema`, | |
onDone: { | |
target: `sourcingNodes`, | |
}, | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
sourcingNodes: { | |
on: { | |
ADD_NODE_MUTATION: skipDeferredApi, | |
}, | |
invoke: { | |
src: sourceNodes, | |
id: `sourcing-nodes`, | |
onDone: { | |
target: `buildingSchema`, | |
}, | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
buildingSchema: { | |
on: { ADD_NODE_MUTATION: skipDeferredApi }, | |
invoke: { | |
id: `building-schema`, | |
src: buildSchema, | |
onDone: { | |
target: `creatingPages`, | |
actions: assignAnything, | |
}, | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
creatingPages: { | |
on: { ADD_NODE_MUTATION: runMutationAndMarkDirty }, | |
invoke: { | |
id: `creating-pages`, | |
src: createPages, | |
onDone: [ | |
{ | |
target: `creatingPagesStatefully`, | |
cond: context => context.firstRun, | |
}, | |
{ | |
target: `extractingQueries`, | |
actions: assignMutatedNodes, | |
}, | |
], | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
extractingQueries: { | |
on: { ADD_NODE_MUTATION }, | |
invoke: { | |
id: `extracting-queries`, | |
src: extractQueries, | |
onDone: [ | |
{ | |
target: `writingRequires`, | |
}, | |
], | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
writingRequires: { | |
on: { | |
ADD_NODE_MUTATION, | |
SOURCE_FILE_CHANGED, | |
}, | |
invoke: { | |
src: writeOutRequires, | |
id: `writing-requires`, | |
onDone: { | |
target: `calculatingDirtyQueries`, | |
}, | |
onError: { | |
target: `failed`, | |
}, | |
}, | |
}, | |
calculatingDirtyQueries: { | |
on: { | |
"": [ | |
{ | |
cond: ctx => ctx.filesDirty, | |
target: `extractingQueries`, | |
}, | |
], | |
ADD_NODE_MUTATION, | |
SOURCE_FILE_CHANGED, | |
}, | |
invoke: { | |
id: `calculating-dirty-queries`, | |
src: calculateDirtyQueries, | |
onDone: [ | |
{ | |
target: `runningStaticQueries`, | |
actions: assignAnything, | |
}, | |
], | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
creatingPagesStatefully: { | |
on: { | |
"": [ | |
{ | |
cond: ctx => ctx.filesDirty, | |
target: `extractingQueries`, | |
}, | |
], | |
ADD_NODE_MUTATION: runMutationAndMarkDirty, | |
SOURCE_FILE_CHANGED, | |
}, | |
invoke: { | |
src: createPagesStatefully, | |
id: `creating-pages-statefully`, | |
onDone: { | |
target: `extractingQueries`, | |
}, | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
runningStaticQueries: { | |
on: { | |
"": [ | |
{ | |
cond: ctx => ctx.filesDirty, | |
target: `extractingQueries`, | |
}, | |
], | |
ADD_NODE_MUTATION: runMutationAndMarkDirty, | |
SOURCE_FILE_CHANGED, | |
}, | |
invoke: { | |
src: runStaticQueries, | |
id: `running-static-queries`, | |
onDone: { | |
target: `runningPageQueries`, | |
actions: assignAnything, | |
}, | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
runningPageQueries: { | |
on: { | |
"": [ | |
{ | |
cond: ctx => ctx.filesDirty, | |
target: `extractingQueries`, | |
}, | |
], | |
ADD_NODE_MUTATION: runMutationAndMarkDirty, | |
SOURCE_FILE_CHANGED, | |
}, | |
invoke: { | |
src: runPageQueries, | |
id: `running-page-queries`, | |
onDone: [ | |
{ | |
target: `checkingForMutatedNodes`, | |
actions: assignAnything, | |
}, | |
], | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
checkingForMutatedNodes: { | |
on: { | |
"": [ | |
// Nothing was mutated. Moving to next state | |
{ | |
target: `waitingForJobs`, | |
cond: context => | |
// console.log( | |
// `checking for mutated nodes`, | |
// context.nodesMutatedDuringQueryRun | |
// ) | |
!context.nodesMutatedDuringQueryRun, | |
}, | |
// Nodes were mutated. Starting again. | |
{ | |
actions: assignAnything, | |
target: `customizingSchema`, | |
cond: ctx => ctx.recursionCount < MAX_RECURSION, | |
}, | |
// We seem to be stuck in a loop. Bailing. | |
{ | |
actions: assignAnything, | |
target: `idle`, | |
}, | |
], | |
}, | |
}, | |
waitingForJobs: { | |
on: { | |
ADD_NODE_MUTATION, | |
SOURCE_FILE_CHANGED, | |
}, | |
invoke: { | |
src: waitUntilAllJobsComplete, | |
id: `waiting-for-jobs`, | |
onDone: [ | |
{ | |
target: `runningWebpack`, | |
cond: ctx => ctx.firstRun, | |
}, | |
{ | |
target: `idle`, | |
}, | |
], | |
onError: { | |
target: `idle`, | |
}, | |
}, | |
}, | |
// writingArtifacts: { | |
// invoke: { | |
// src: writingArtifacts, | |
// id: `writing-artifacts`, | |
// onDone: { | |
// target: `idle`, | |
// }, | |
// onError: { | |
// target: `idle`, | |
// }, | |
// }, | |
// }, | |
// batchingPageMutations: { | |
// invoke: { | |
// src: batchingPageMutations, | |
// id: `batchingPageMutations`, | |
// onDone: { | |
// target: `runningStaticQueries`, | |
// }, | |
// onError: { | |
// target: `idle`, | |
// }, | |
// }, | |
// }, | |
runningWebpack: { | |
on: { | |
ADD_NODE_MUTATION, | |
SOURCE_FILE_CHANGED, | |
}, | |
invoke: { | |
src: startWebpackServer, | |
id: `running-webpack`, | |
onDone: { | |
target: `idle`, | |
actions: assignAnything, | |
}, | |
onError: { | |
target: `failed`, | |
}, | |
}, | |
}, | |
// There is an empty bus and doors are closed | |
idle: { | |
entry: [ | |
assignAnything, | |
], | |
on: { | |
"": [ | |
// Node mutations are prioritised because we don't want | |
// to run queries on data that is stale | |
{ | |
cond: ctx => !!ctx.nodeMutationBatch.length, | |
target: `batchingNodeMutations`, | |
}, | |
{ | |
cond: ctx => ctx.filesDirty, | |
target: `extractingQueries`, | |
}, | |
], | |
WEBHOOK_RECEIVED: { | |
target: `refreshing`, | |
actions: assignAnything, | |
}, | |
ADD_NODE_MUTATION: { | |
...ADD_NODE_MUTATION, | |
target: `batchingNodeMutations`, | |
}, | |
SOURCE_FILE_CHANGED: { | |
target: `extractingQueries`, | |
}, | |
}, | |
}, | |
refreshing: { | |
on: { ADD_NODE_MUTATION }, | |
invoke: { | |
src: async (ctx, event) => {}, | |
id: `refreshing`, | |
onDone: { | |
target: `customizingSchema`, | |
actions: assignAnything, | |
}, | |
onError: { | |
target: `failed`, | |
}, | |
}, | |
}, | |
// Doors are open for people to enter | |
batchingNodeMutations: { | |
on: { | |
// Check if the batch is already full on entry | |
"": { | |
cond: ctx => | |
ctx.nodeMutationBatch?.length >= NODE_MUTATION_BATCH_SIZE, | |
target: `committingBatch`, | |
}, | |
// More people enter same bus | |
ADD_NODE_MUTATION: [ | |
// If this fills the batch then commit it | |
{ | |
...ADD_NODE_MUTATION, | |
cond: ctx => | |
ctx.nodeMutationBatch?.length >= NODE_MUTATION_BATCH_SIZE, | |
target: `committingBatch`, | |
}, | |
// otherwise just add it to the batch | |
ADD_NODE_MUTATION, | |
], | |
}, | |
// Check if bus is either full or if enough time has passed since | |
// first passenger entered the bus | |
// Fallback | |
after: { | |
[NODE_MUTATION_BATCH_TIMEOUT]: `committingBatch`, | |
}, | |
}, | |
committingBatch: { | |
on: { ADD_NODE_MUTATION }, | |
entry: [ | |
assignAnything | |
], | |
invoke: { | |
src: async ({ runningBatch, store }) => | |
// Consume the entire batch and run actions | |
// console.log(`runningBatch`, runningBatch) | |
Promise.all( | |
runningBatch.map(payload => callRealApi(payload, store)) | |
), | |
onDone: { | |
target: `buildingSchema`, | |
actions: assignAnything, | |
}, | |
}, | |
}, | |
failed: { | |
invoke: { | |
src: async (context, event) => { | |
console.error(event) | |
}, | |
}, | |
}, | |
}, | |
}, | |
{ | |
actions: { | |
"rage-against-the-state-machine": rageAgainstTheStateMachine, | |
}, | |
} | |
) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment