Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding support for events #63

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,5 @@ node_modules
/coverage
*.iml
.idea/
.history
.parcel-cache
11 changes: 8 additions & 3 deletions example/example.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,18 @@ import {groqStore, Subscription} from '../src/browser'
populate()

let subscription: Subscription | null | undefined
const dataset = groqStore({
const store = groqStore({
projectId: 'groqstore',
dataset: 'fixture',
listen: true,
overlayDrafts: true,
})

store.on('datasetLoaded', ({dataset, documents}) => {
// eslint-disable-next-line no-console
console.info(`Dataset "${dataset}" loaded with ${documents.length} documents`)
})

function attach() {
clearBtnEl.addEventListener('click', clear, false)
executeBtnEl.addEventListener('click', execute, false)
Expand All @@ -39,7 +44,7 @@ import {groqStore, Subscription} from '../src/browser'
resultEl.value = '… querying …'
localStorage.setItem('groqStore', queryEl.value)
try {
onResult(await dataset.query(queryEl.value))
onResult(await store.query(queryEl.value))
} catch (err: any) {
onError(err.message || 'Unknown error')
}
Expand All @@ -56,7 +61,7 @@ import {groqStore, Subscription} from '../src/browser'
resultEl.value = '… querying …'
executeBtnEl.disabled = true
subscribeBtnEl.textContent = 'Unsubscribe'
subscription = dataset.subscribe(queryEl.value, {}, onResult)
subscription = store.subscribe(queryEl.value, {}, onResult)
}
}

Expand Down
2 changes: 1 addition & 1 deletion example/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,6 @@ <h2>Result</h2>
</section>
</div>

<script src="example.ts"></script>
<script src="example.ts" type="module"></script>
</body>
</html>
4 changes: 2 additions & 2 deletions example/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
"private": true,
"description": "",
"license": "MIT",
"main": "src/example.ts",
"scripts": {
"start": "parcel index.html"
},
"devDependencies": {
"parcel-bundler": "^1.12.5"
}
},
"default": "example.ts"
}
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@
"build": "pkg build --strict && pkg --strict",
"lint": "eslint .",
"prepublishOnly": "npm run build",
"prettify": "prettier --write .",
"start": "cd example && npm start",
"test": "tsdx test"
},
Expand Down
28 changes: 28 additions & 0 deletions src/events.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import {EventEmitter} from 'events'
import {SanityDocument} from '@sanity/types'

/** @public */
export type Events = {
/**
* Emitted after the dataset was loaded.
*/
datasetLoaded: {
dataset: string
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What's the purpose of this field? It seems a bit artificial to have dataset (and not projectId) here. And whenever you set up the listener you probably already know which dataset/projectId it's applied to?

documents: SanityDocument[]
}

/**
* Emitted each time the dataset changes. This happens when the dataset
* is initialised, and after mutations are applied through listeners.
*/
datasetChanged: {
dataset: string
documents: SanityDocument[]
}
}

/** @public */
export interface TypedEventEmitter extends EventEmitter {
on<K extends keyof Events>(s: K, listener: (v: Events[K]) => void): this
emit<K extends keyof Events>(eventName: K, params: Events[K]): boolean
}
24 changes: 23 additions & 1 deletion src/groqStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,11 @@ import {SanityDocument} from '@sanity/types'
import {parse, evaluate} from 'groq-js'
import {Config, EnvImplementations, GroqSubscription, GroqStore, Subscription} from './types'
import {getSyncingDataset} from './syncingDataset'
import {EventEmitter} from 'events'
import {TypedEventEmitter} from './events'

export function groqStore(config: Config, envImplementations: EnvImplementations): GroqStore {
const eventEmitter = new EventEmitter() as TypedEventEmitter
let documents: SanityDocument[] = []
const executeThrottled = throttle(config.subscriptionThrottleMs || 50, executeAllSubscriptions)
const activeSubscriptions: GroqSubscription[] = []
Expand All @@ -20,9 +23,20 @@ export function groqStore(config: Config, envImplementations: EnvImplementations
(docs) => {
documents = docs
executeThrottled()
eventEmitter.emit('datasetChanged', {
dataset: config.dataset,
documents: docs,
})
Comment on lines +26 to +29
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this emitter should be throttled, the same way executeAllSubscriptions is.
To avoid a mismatch where you will see datasetChanged events fire, but store.query and such not yet "see" the changes yet since these are throttled.

},
envImplementations
)

dataset.loaded.then(() => {
eventEmitter.emit('datasetLoaded', {
dataset: config.dataset,
documents,
})
})
}

await dataset.loaded
Expand Down Expand Up @@ -97,8 +111,16 @@ export function groqStore(config: Config, envImplementations: EnvImplementations

function close() {
executeThrottled.cancel()
eventEmitter.removeAllListeners()
return dataset ? dataset.unsubscribe() : Promise.resolve()
}

return {query, getDocument, getDocuments, subscribe, close}
return {
query,
getDocument,
getDocuments,
subscribe,
close,
on: (...args) => eventEmitter.on(...args),
}
}
1 change: 1 addition & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ export function groqStore(config: Config): GroqStore {

export {default as groq} from 'groq'
export type {Subscription, GroqStore, Config, EnvImplementations} from './types'
export type {Events, TypedEventEmitter} from './events'
2 changes: 2 additions & 0 deletions src/types.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {SanityDocument} from '@sanity/types'
import EventSourcePolyfill from 'eventsource'
import {TypedEventEmitter} from './events'

/** @public */
export interface Subscription {
Expand Down Expand Up @@ -89,6 +90,7 @@ export interface GroqStore {
callback: (err: Error | undefined, result?: R) => void
) => Subscription
close: () => Promise<void>
on: TypedEventEmitter['on']
}

export interface ApiError {
Expand Down
76 changes: 76 additions & 0 deletions test/events.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import EventSource from 'eventsource'
import {groqStore as groqStoreApi} from '../src/groqStore'
import {EnvImplementations, Config} from '../src/types'
import * as baseConfig from './config'
import * as listener from '../src/listen'
import {MutationEvent} from '@sanity/client'

describe('events', () => {
it('datasetLoaded fires when dataset is loaded', async () => {
const config: Config = {
...baseConfig,
token: 'my-token',
}

const getDocuments = jest.fn().mockResolvedValue([{_id: 'foo', value: 'bar'}])
const datasetLoadedCb = jest.fn()

const store = groqStoreApi(config, {
EventSource: EventSource as any as EnvImplementations['EventSource'],
getDocuments,
})

store.on('datasetLoaded', datasetLoadedCb)

await store.query('*')

expect(datasetLoadedCb).toBeCalledTimes(1)
expect(datasetLoadedCb).toBeCalledWith({
dataset: 'fixture',
documents: [{_id: 'foo', value: 'bar'}],
})
})

it('datasetChanged fires each time the dataset changes', async () => {
const config: Config = {
...baseConfig,
listen: true,
token: 'my-token',
}

jest.useFakeTimers()
jest.spyOn(listener, 'listen').mockImplementation((_esImpl, _config, handlers) => {
handlers.open()

// Call `next()` a little bit later to imitate a mutation received event
// eslint-disable-next-line max-nested-callbacks
setTimeout(() => handlers.next({} as any as MutationEvent), 50)

return {
unsubscribe: () => Promise.resolve(),
}
})

const getDocuments = jest.fn().mockResolvedValue([{_id: 'foo', value: 'bar'}])
const datasetChangedCb = jest.fn()

const store = groqStoreApi(config, {
EventSource: EventSource as any as EnvImplementations['EventSource'],
getDocuments,
})

store.on('datasetChanged', datasetChangedCb)

await store.query('*')

expect(datasetChangedCb).toBeCalledTimes(1)
expect(datasetChangedCb).toBeCalledWith({
dataset: 'fixture',
documents: [{_id: 'foo', value: 'bar'}],
})

jest.advanceTimersByTime(100)

expect(datasetChangedCb).toBeCalledTimes(2)
})
})