Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cache parameter queries and buckets #200

Merged
merged 30 commits into from
Mar 20, 2025
Merged
Changes from 18 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
62d40f8
POC of incremental update lookups.
rkistner Feb 12, 2025
5837e14
Cache dynamic bucket lookups.
rkistner Mar 5, 2025
39e256a
Fix sizeCalculation.
rkistner Feb 12, 2025
4e52f86
Optimization: skip checking for bucket_parameters changes if there are
rkistner Feb 12, 2025
f8582c5
Fix tests.
rkistner Feb 13, 2025
c8e63be
Fix import.
rkistner Feb 25, 2025
74e66e3
Merge remote-tracking branch 'origin/main' into optimize-bucket-looku…
rkistner Mar 11, 2025
6ebefa9
Improve parameter query filtering.
rkistner Mar 11, 2025
27c7577
Track last_op for each bucket.
rkistner Mar 11, 2025
48846ad
Fix cache size calculation.
rkistner Mar 11, 2025
1f73456
Update bucket counts after compact.
rkistner Mar 11, 2025
357477f
Merge remote-tracking branch 'origin/main' into optimize-bucket-looku…
rkistner Mar 11, 2025
8f430db
Fix some tests.
rkistner Mar 12, 2025
dfbdf09
Use an explicit ParameterLookup class for better typing.
rkistner Mar 12, 2025
71309df
Fix sync-rules tests.
rkistner Mar 12, 2025
246c3b6
Fix another test.
rkistner Mar 12, 2025
b61b0ca
Add changeset.
rkistner Mar 12, 2025
95d98ee
Merge remote-tracking branch 'origin/main' into optimize-bucket-looku…
rkistner Mar 13, 2025
4dc78b8
Merge remote-tracking branch 'origin/main' into optimize-bucket-looku…
rkistner Mar 17, 2025
bdf9361
Remove op_count from bucket_state.
rkistner Mar 18, 2025
3cdf69d
Fix compact.
rkistner Mar 18, 2025
f28eaef
Cleanup and comments.
rkistner Mar 19, 2025
f1af3a1
Simplify type guard.
rkistner Mar 19, 2025
ec45ae4
Tweaks and tests for hasIntersection.
rkistner Mar 19, 2025
1a76b2c
Use set intersection.
rkistner Mar 19, 2025
66fed0f
Fix handling of checkpoints only containing a write checkpoint update.
rkistner Mar 19, 2025
7a6cff1
Use a Symbol instead of null for INVALIDATE_ALL_BUCKETS.
rkistner Mar 19, 2025
65889dd
Fix typo.
rkistner Mar 19, 2025
8f45ca5
Add tests; fix parameter query lookup issue.
rkistner Mar 19, 2025
80fbe0b
Update snapshots for postgres.
rkistner Mar 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .changeset/swift-wolves-sleep.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
'@powersync/service-module-postgres-storage': minor
'@powersync/service-module-mongodb-storage': minor
'@powersync/service-core-tests': minor
'@powersync/service-core': minor
'@powersync/service-sync-rules': minor
---

Cache parameter queries and buckets to reduce incremental sync overhead
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { migrations } from '@powersync/service-core';
import * as storage from '../../../storage/storage-index.js';
import { MongoStorageConfig } from '../../../types/types.js';

const INDEX_NAME = 'bucket_updates';

export const up: migrations.PowerSyncMigrationFunction = async (context) => {
const {
service_context: { configuration }
} = context;
const db = storage.createPowerSyncMongo(configuration.storage as MongoStorageConfig);

try {
await db.bucket_state.createIndex(
{
'_id.g': 1,
last_op: 1
},
{ name: INDEX_NAME, unique: true }
);
} finally {
await db.client.close();
}
};

export const down: migrations.PowerSyncMigrationFunction = async (context) => {
const {
service_context: { configuration }
} = context;

const db = storage.createPowerSyncMongo(configuration.storage as MongoStorageConfig);

try {
if (await db.bucket_state.indexExists(INDEX_NAME)) {
await db.bucket_state.dropIndex(INDEX_NAME);
}
} finally {
await db.client.close();
}
};
Original file line number Diff line number Diff line change
@@ -314,10 +314,12 @@ export class MongoCompactor {
let lastOpId: BucketDataKey | null = null;
let targetOp: bigint | null = null;
let gotAnOp = false;
let numberOfOpsToClear = 0;
for await (let op of query.stream()) {
if (op.op == 'MOVE' || op.op == 'REMOVE' || op.op == 'CLEAR') {
checksum = utils.addChecksums(checksum, op.checksum);
lastOpId = op._id;
numberOfOpsToClear += 1;
if (op.op != 'CLEAR') {
gotAnOp = true;
}
@@ -337,7 +339,7 @@ export class MongoCompactor {
return;
}

logger.info(`Flushing CLEAR at ${lastOpId?.o}`);
logger.info(`Flushing CLEAR for ${numberOfOpsToClear} ops at ${lastOpId?.o}`);
await this.db.bucket_data.deleteMany(
{
_id: {
@@ -362,6 +364,22 @@ export class MongoCompactor {
},
{ session }
);

// Note: This does not update anything if there is no existing state
await this.db.bucket_state.updateOne(
{
_id: {
g: this.group_id,
b: bucket
}
},
{
$inc: {
op_count: 1 - numberOfOpsToClear
}
},
{ session }
);
},
{
writeConcern: { w: 'majority' },
Original file line number Diff line number Diff line change
@@ -9,27 +9,29 @@ import {
} from '@powersync/lib-services-framework';
import {
BroadcastIterable,
CHECKPOINT_INVALIDATE_ALL,
CheckpointChanges,
GetCheckpointChangesOptions,
InternalOpId,
internalToExternalOpId,
ProtocolOpId,
ReplicationCheckpoint,
SourceTable,
storage,
utils,
WatchWriteCheckpointOptions
WatchWriteCheckpointOptions,
CHECKPOINT_INVALIDATE_ALL,
deserializeParameterLookup
} from '@powersync/service-core';
import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
import { SqliteJsonRow, ParameterLookup, SqlSyncRules } from '@powersync/service-sync-rules';
import * as bson from 'bson';
import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
import { LRUCache } from 'lru-cache';
import * as timers from 'timers/promises';
import { MongoBucketStorage } from '../MongoBucketStorage.js';
import { PowerSyncMongo } from './db.js';
import {
BucketDataDocument,
BucketDataKey,
BucketStateDocument,
SourceKey,
SourceTableDocument,
SyncRuleCheckpointState,
@@ -39,6 +41,7 @@ import { MongoBucketBatch } from './MongoBucketBatch.js';
import { MongoCompactor } from './MongoCompactor.js';
import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
import { idPrefixFilter, mapOpEntry, readSingleBatch } from './util.js';
import { JSONBig } from '@powersync/service-jsonbig';

export class MongoSyncBucketStorage
extends BaseObserver<storage.SyncRulesBucketStorageListener>
@@ -154,7 +157,7 @@ export class MongoSyncBucketStorage

await callback(batch);
await batch.flush();
if (batch.last_flushed_op) {
if (batch.last_flushed_op != null) {
return { flushed_op: batch.last_flushed_op };
} else {
return null;
@@ -252,7 +255,7 @@ export class MongoSyncBucketStorage
return result!;
}

async getParameterSets(checkpoint: utils.InternalOpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> {
async getParameterSets(checkpoint: utils.InternalOpId, lookups: ParameterLookup[]): Promise<SqliteJsonRow[]> {
const lookupFilter = lookups.map((lookup) => {
return storage.serializeLookup(lookup);
});
@@ -585,6 +588,13 @@ export class MongoSyncBucketStorage
{ maxTimeMS: lib_mongo.db.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
);

await this.db.bucket_state.deleteMany(
{
_id: idPrefixFilter<BucketStateDocument['_id']>({ g: this.group_id }, ['b'])
},
{ maxTimeMS: lib_mongo.db.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
);

await this.db.source_tables.deleteMany(
{
group_id: this.group_id
@@ -795,12 +805,7 @@ export class MongoSyncBucketStorage

const updates: CheckpointChanges =
lastCheckpoint == null
? {
invalidateDataBuckets: true,
invalidateParameterBuckets: true,
updatedDataBuckets: [],
updatedParameterBucketDefinitions: []
}
? CHECKPOINT_INVALIDATE_ALL
: await this.getCheckpointChanges({
lastCheckpoint: lastCheckpoint,
nextCheckpoint: checkpoint
@@ -869,7 +874,119 @@ export class MongoSyncBucketStorage
return pipeline;
}

private async getDataBucketChanges(
options: GetCheckpointChangesOptions
): Promise<Pick<CheckpointChanges, 'updatedDataBuckets' | 'invalidateDataBuckets'>> {
const bucketStateUpdates = await this.db.bucket_state
.find(
{
// We have an index on (_id.g, last_op).
'_id.g': this.group_id,
last_op: { $gt: BigInt(options.lastCheckpoint) }
},
{
projection: {
'_id.b': 1
},
limit: 1001,
batchSize: 1001,
singleBatch: true
}
)
.toArray();

const buckets = bucketStateUpdates.map((doc) => doc._id.b);
const invalidateDataBuckets = buckets.length > 1000;

return {
invalidateDataBuckets: invalidateDataBuckets,
updatedDataBuckets: invalidateDataBuckets ? [] : buckets
};
}

private async getParameterBucketChanges(
options: GetCheckpointChangesOptions
): Promise<Pick<CheckpointChanges, 'updatedParameterLookups' | 'invalidateParameterBuckets'>> {
// TODO: limit max query running time
const parameterUpdates = await this.db.bucket_parameters
.find(
{
_id: { $gt: BigInt(options.lastCheckpoint), $lt: BigInt(options.nextCheckpoint) },
'key.g': this.group_id
},
{
projection: {
lookup: 1
},
limit: 1001,
batchSize: 1001,
singleBatch: true
}
)
.toArray();
const invalidateParameterUpdates = parameterUpdates.length > 1000;

return {
invalidateParameterBuckets: invalidateParameterUpdates,
updatedParameterLookups: invalidateParameterUpdates
? new Set<string>()
: new Set<string>(parameterUpdates.map((p) => JSONBig.stringify(deserializeParameterLookup(p.lookup))))
};
}

// TODO:
// We can optimize this by implementing it like ChecksumCache: We can use partial cache results to do
// more efficient lookups in some cases.
private checkpointChangesCache = new LRUCache<string, CheckpointChanges, { options: GetCheckpointChangesOptions }>({
max: 50,
maxSize: 10 * 1024 * 1024,
sizeCalculation: (value: CheckpointChanges) => {
const paramSize = [...value.updatedParameterLookups].reduce<number>((a, b) => a + b.length, 0);
const bucketSize = [...value.updatedDataBuckets].reduce<number>((a, b) => a + b.length, 0);
return 100 + paramSize + bucketSize;
},
fetchMethod: async (_key, _staleValue, options) => {
return this.getCheckpointChangesInternal(options.context.options);
}
});

private _hasDynamicBucketsCached: boolean | undefined = undefined;

private hasDynamicBucketQueries(): boolean {
if (this._hasDynamicBucketsCached != null) {
return this._hasDynamicBucketsCached;
}
const syncRules = this.getParsedSyncRules({
defaultSchema: 'default' // n/a
});
const hasDynamicBuckets = syncRules.hasDynamicBucketQueries();
this._hasDynamicBucketsCached = hasDynamicBuckets;
return hasDynamicBuckets;
}

async getCheckpointChanges(options: GetCheckpointChangesOptions): Promise<CheckpointChanges> {
return CHECKPOINT_INVALIDATE_ALL;
if (!this.hasDynamicBucketQueries()) {
// Special case when we have no dynamic parameter queries.
// In this case, we can avoid doing any queries.
return {
invalidateDataBuckets: true,
updatedDataBuckets: [],
invalidateParameterBuckets: false,
updatedParameterLookups: new Set<string>()
};
}
const key = `${options.lastCheckpoint}_${options.nextCheckpoint}`;
const result = await this.checkpointChangesCache.fetch(key, { context: { options } });
return result!;
}

private async getCheckpointChangesInternal(options: GetCheckpointChangesOptions): Promise<CheckpointChanges> {
const dataUpdates = await this.getDataBucketChanges(options);
const parameterUpdates = await this.getParameterBucketChanges(options);

return {
...dataUpdates,
...parameterUpdates
};
}
}
Original file line number Diff line number Diff line change
@@ -11,6 +11,7 @@ import { PowerSyncMongo } from './db.js';
import {
BucketDataDocument,
BucketParameterDocument,
BucketStateDocument,
CurrentBucket,
CurrentDataDocument,
SourceKey
@@ -48,6 +49,7 @@ export class PersistedBatch {
bucketData: mongo.AnyBulkWriteOperation<BucketDataDocument>[] = [];
bucketParameters: mongo.AnyBulkWriteOperation<BucketParameterDocument>[] = [];
currentData: mongo.AnyBulkWriteOperation<CurrentDataDocument>[] = [];
bucketStates: Map<string, BucketStateUpdate> = new Map();

/**
* For debug logging only.
@@ -66,6 +68,19 @@ export class PersistedBatch {
this.currentSize = writtenSize;
}

private incrementBucket(bucket: string, op_id: InternalOpId) {
let existingState = this.bucketStates.get(bucket);
if (existingState) {
existingState.lastOp = op_id;
existingState.incrementCount += 1;
} else {
this.bucketStates.set(bucket, {
lastOp: op_id,
incrementCount: 1
});
}
}

saveBucketData(options: {
op_seq: MongoIdSequence;
sourceKey: storage.ReplicaId;
@@ -120,6 +135,7 @@ export class PersistedBatch {
}
}
});
this.incrementBucket(k.bucket, op_id);
}

for (let bd of remaining_buckets.values()) {
@@ -147,6 +163,7 @@ export class PersistedBatch {
}
});
this.currentSize += 200;
this.incrementBucket(bd.bucket, op_id);
}
}

@@ -277,6 +294,14 @@ export class PersistedBatch {
});
}

if (this.bucketStates.size > 0) {
await db.bucket_state.bulkWrite(this.getBucketStateUpdates(), {
session,
// Per-bucket operation - order doesn't matter
ordered: false
});
}

const duration = performance.now() - startAt;
logger.info(
`powersync_${this.group_id} Flushed ${this.bucketData.length} + ${this.bucketParameters.length} + ${
@@ -287,7 +312,37 @@ export class PersistedBatch {
this.bucketData = [];
this.bucketParameters = [];
this.currentData = [];
this.bucketStates.clear();
this.currentSize = 0;
this.debugLastOpId = null;
}

private getBucketStateUpdates(): mongo.AnyBulkWriteOperation<BucketStateDocument>[] {
return Array.from(this.bucketStates.entries()).map(([bucket, state]) => {
return {
updateOne: {
filter: {
_id: {
g: this.group_id,
b: bucket
}
},
update: {
$set: {
last_op: state.lastOp
},
$inc: {
op_count: state.incrementCount
}
},
upsert: true
}
} satisfies mongo.AnyBulkWriteOperation<BucketStateDocument>;
});
}
}

interface BucketStateUpdate {
lastOp: InternalOpId;
incrementCount: number;
}
Original file line number Diff line number Diff line change
@@ -6,6 +6,7 @@ import { MongoStorageConfig } from '../../types/types.js';
import {
BucketDataDocument,
BucketParameterDocument,
BucketStateDocument,
CurrentDataDocument,
CustomWriteCheckpointDocument,
IdSequenceDocument,
@@ -33,6 +34,7 @@ export class PowerSyncMongo {
readonly write_checkpoints: mongo.Collection<WriteCheckpointDocument>;
readonly instance: mongo.Collection<InstanceDocument>;
readonly locks: mongo.Collection<lib_mongo.locks.Lock>;
readonly bucket_state: mongo.Collection<BucketStateDocument>;

readonly client: mongo.MongoClient;
readonly db: mongo.Db;
@@ -55,6 +57,7 @@ export class PowerSyncMongo {
this.write_checkpoints = db.collection('write_checkpoints');
this.instance = db.collection('instance');
this.locks = this.db.collection('locks');
this.bucket_state = this.db.collection('bucket_state');
}

/**
@@ -70,6 +73,7 @@ export class PowerSyncMongo {
await this.write_checkpoints.deleteMany({});
await this.instance.deleteOne({});
await this.locks.deleteMany({});
await this.bucket_state.deleteMany({});
}

/**
Original file line number Diff line number Diff line change
@@ -75,6 +75,15 @@ export interface SourceTableDocument {
snapshot_done: boolean | undefined;
}

export interface BucketStateDocument {
_id: {
g: number;
b: string;
};
last_op: bigint;
op_count: number;
}

export interface IdSequenceDocument {
_id: string;
op_id: bigint;
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import * as lib_postgres from '@powersync/lib-service-postgres';
import { ReplicationAssertionError } from '@powersync/lib-services-framework';
import {
BroadcastIterable,
CHECKPOINT_INVALIDATE_ALL,
@@ -14,23 +13,22 @@ import {
} from '@powersync/service-core';
import { JSONBig } from '@powersync/service-jsonbig';
import * as sync_rules from '@powersync/service-sync-rules';
import * as timers from 'timers/promises';
import * as uuid from 'uuid';
import { BIGINT_MAX } from '../types/codecs.js';
import { models, RequiredOperationBatchLimits } from '../types/types.js';
import { replicaIdToSubkey } from '../utils/bson.js';
import { mapOpEntry } from '../utils/bucket-data.js';
import * as timers from 'timers/promises';

import * as framework from '@powersync/lib-services-framework';
import { StatementParam } from '@powersync/service-jpgwire';
import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
import { SourceTableDecoded, StoredRelationId } from '../types/models/SourceTable.js';
import { pick } from '../utils/ts-codec.js';
import { PostgresBucketBatch } from './batch/PostgresBucketBatch.js';
import { PostgresWriteCheckpointAPI } from './checkpoints/PostgresWriteCheckpointAPI.js';
import { PostgresBucketStorageFactory } from './PostgresBucketStorageFactory.js';
import { PostgresCompactor } from './PostgresCompactor.js';
import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
import { Decoded } from 'ts-codec';

export type PostgresSyncRulesStorageOptions = {
factory: PostgresBucketStorageFactory;
@@ -354,7 +352,7 @@ export class PostgresSyncRulesStorage

async getParameterSets(
checkpoint: utils.InternalOpId,
lookups: sync_rules.SqliteJsonValue[][]
lookups: sync_rules.ParameterLookup[]
): Promise<sync_rules.SqliteJsonRow[]> {
const rows = await this.db.sql`
SELECT DISTINCT
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { getUuidReplicaIdentityBson, OplogEntry, storage } from '@powersync/service-core';
import { RequestParameters } from '@powersync/service-sync-rules';
import { ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
import { expect, test } from 'vitest';
import * as test_utils from '../test-utils/test-utils-index.js';

@@ -65,7 +65,9 @@ bucket_definitions:
});
});

const parameters = await bucketStorage.getParameterSets(result!.flushed_op, [['mybucket', '1', 'user1']]);
const parameters = await bucketStorage.getParameterSets(result!.flushed_op, [
ParameterLookup.normalized('mybucket', '1', ['user1'])
]);
expect(parameters).toEqual([
{
group_id: 'group1a'
@@ -110,15 +112,19 @@ bucket_definitions:
});
});

const parameters = await bucketStorage.getParameterSets(result2!.flushed_op, [['mybucket', '1', 'user1']]);
const parameters = await bucketStorage.getParameterSets(result2!.flushed_op, [
ParameterLookup.normalized('mybucket', '1', ['user1'])
]);
expect(parameters).toEqual([
{
group_id: 'group2'
}
]);

// Use the checkpoint to get older data if relevant
const parameters2 = await bucketStorage.getParameterSets(result1!.flushed_op, [['mybucket', '1', 'user1']]);
const parameters2 = await bucketStorage.getParameterSets(result1!.flushed_op, [
ParameterLookup.normalized('mybucket', '1', ['user1'])
]);
expect(parameters2).toEqual([
{
group_id: 'group1'
@@ -183,8 +189,8 @@ bucket_definitions:
// There removal operation for the association of `list2`::`todo2` should not interfere with the new
// association of `list1`::`todo2`
const parameters = await bucketStorage.getParameterSets(result2!.flushed_op, [
['mybucket', '1', 'list1'],
['mybucket', '1', 'list2']
ParameterLookup.normalized('mybucket', '1', ['list1']),
ParameterLookup.normalized('mybucket', '1', ['list2'])
]);

expect(parameters.sort((a, b) => (a.todo_id as string).localeCompare(b.todo_id as string))).toEqual([
@@ -230,11 +236,17 @@ bucket_definitions:

const checkpoint = result!.flushed_op;

const parameters1 = await bucketStorage.getParameterSets(checkpoint, [['mybucket', '1', 314n, 314, 3.14]]);
const parameters1 = await bucketStorage.getParameterSets(checkpoint, [
ParameterLookup.normalized('mybucket', '1', [314n, 314, 3.14])
]);
expect(parameters1).toEqual([TEST_PARAMS]);
const parameters2 = await bucketStorage.getParameterSets(checkpoint, [['mybucket', '1', 314, 314n, 3.14]]);
const parameters2 = await bucketStorage.getParameterSets(checkpoint, [
ParameterLookup.normalized('mybucket', '1', [314, 314n, 3.14])
]);
expect(parameters2).toEqual([TEST_PARAMS]);
const parameters3 = await bucketStorage.getParameterSets(checkpoint, [['mybucket', '1', 314n, 314, 3]]);
const parameters3 = await bucketStorage.getParameterSets(checkpoint, [
ParameterLookup.normalized('mybucket', '1', [314n, 314, 3])
]);
expect(parameters3).toEqual([]);
});

@@ -286,7 +298,9 @@ bucket_definitions:

const checkpoint = result!.flushed_op;

const parameters1 = await bucketStorage.getParameterSets(checkpoint, [['mybucket', '1', 1152921504606846976n]]);
const parameters1 = await bucketStorage.getParameterSets(checkpoint, [
ParameterLookup.normalized('mybucket', '1', [1152921504606846976n])
]);
expect(parameters1).toEqual([TEST_PARAMS]);
});

@@ -387,7 +401,7 @@ bucket_definitions:
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];

const lookups = q1.getLookups(parameters);
expect(lookups).toEqual([['by_workspace', '1', 'u1']]);
expect(lookups).toEqual([ParameterLookup.normalized('by_workspace', '1', ['u1'])]);

const parameter_sets = await bucketStorage.getParameterSets(checkpoint, lookups);
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
@@ -457,7 +471,7 @@ bucket_definitions:
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];

const lookups = q1.getLookups(parameters);
expect(lookups).toEqual([['by_public_workspace', '1']]);
expect(lookups).toEqual([ParameterLookup.normalized('by_public_workspace', '1', [])]);

const parameter_sets = await bucketStorage.getParameterSets(checkpoint, lookups);
parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
@@ -546,15 +560,15 @@ bucket_definitions:
// Test intermediate values - could be moved to sync_rules.test.ts
const q1 = sync_rules.bucket_descriptors[0].parameter_queries[0];
const lookups1 = q1.getLookups(parameters);
expect(lookups1).toEqual([['by_workspace', '1']]);
expect(lookups1).toEqual([ParameterLookup.normalized('by_workspace', '1', [])]);

const parameter_sets1 = await bucketStorage.getParameterSets(checkpoint, lookups1);
parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);

const q2 = sync_rules.bucket_descriptors[0].parameter_queries[1];
const lookups2 = q2.getLookups(parameters);
expect(lookups2).toEqual([['by_workspace', '2', 'u1']]);
expect(lookups2).toEqual([ParameterLookup.normalized('by_workspace', '2', ['u1'])]);

const parameter_sets2 = await bucketStorage.getParameterSets(checkpoint, lookups2);
parameter_sets2.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
@@ -861,7 +875,9 @@ bucket_definitions:

const { checkpoint } = await bucketStorage.getCheckpoint();

const parameters = await bucketStorage.getParameterSets(checkpoint, [['mybucket', '1', 'user1']]);
const parameters = await bucketStorage.getParameterSets(checkpoint, [
ParameterLookup.normalized('mybucket', '1', ['user1'])
]);
expect(parameters).toEqual([]);
});

9 changes: 5 additions & 4 deletions packages/service-core/src/storage/SyncRulesBucketStorage.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { ObserverClient } from '@powersync/lib-services-framework';
import { SqlSyncRules, SqliteJsonRow, SqliteJsonValue } from '@powersync/service-sync-rules';
import { ParameterLookup, SqlSyncRules, SqliteJsonRow } from '@powersync/service-sync-rules';
import * as util from '../util/util-index.js';
import { BucketStorageBatch, FlushedResult } from './BucketStorageBatch.js';
import { BucketStorageFactory } from './BucketStorageFactory.js';
@@ -71,7 +71,7 @@ export interface SyncRulesBucketStorage
/**
* Used to resolve "dynamic" parameter queries.
*/
getParameterSets(checkpoint: util.InternalOpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]>;
getParameterSets(checkpoint: util.InternalOpId, lookups: ParameterLookup[]): Promise<SqliteJsonRow[]>;

getCheckpointChanges(options: GetCheckpointChangesOptions): Promise<CheckpointChanges>;

@@ -253,13 +253,14 @@ export interface GetCheckpointChangesOptions {
export interface CheckpointChanges {
updatedDataBuckets: string[];
invalidateDataBuckets: boolean;
updatedParameterBucketDefinitions: string[];
/** Serialized using JSONBig */
updatedParameterLookups: Set<string>;
invalidateParameterBuckets: boolean;
}

export const CHECKPOINT_INVALIDATE_ALL: CheckpointChanges = {
updatedDataBuckets: [],
invalidateDataBuckets: true,
updatedParameterBucketDefinitions: [],
updatedParameterLookups: new Set<string>(),
invalidateParameterBuckets: true
};
22 changes: 10 additions & 12 deletions packages/service-core/src/storage/bson.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import * as bson from 'bson';

import { SqliteJsonValue } from '@powersync/service-sync-rules';
import { ParameterLookup, SqliteJsonValue } from '@powersync/service-sync-rules';
import { ReplicaId } from './BucketStorageBatch.js';

type NodeBuffer = Buffer<ArrayBuffer>;
@@ -24,23 +24,21 @@ export const BSON_DESERIALIZE_DATA_OPTIONS: bson.DeserializeOptions = {
* Lookup serialization must be number-agnostic. I.e. normalize numbers, instead of preserving numbers.
* @param lookup
*/
export const serializeLookupBuffer = (lookup: SqliteJsonValue[]): NodeBuffer => {
const normalized = lookup.map((value) => {
if (typeof value == 'number' && Number.isInteger(value)) {
return BigInt(value);
} else {
return value;
}
});
return bson.serialize({ l: normalized }) as NodeBuffer;
export const serializeLookupBuffer = (lookup: ParameterLookup): NodeBuffer => {
return bson.serialize({ l: lookup.values }) as NodeBuffer;
};

export const serializeLookup = (lookup: SqliteJsonValue[]) => {
export const serializeLookup = (lookup: ParameterLookup) => {
return new bson.Binary(serializeLookupBuffer(lookup));
};

export const getLookupBucketDefinitionName = (lookup: bson.Binary) => {
export const deserializeParameterLookup = (lookup: bson.Binary) => {
const parsed = bson.deserialize(lookup.buffer, BSON_DESERIALIZE_INTERNAL_OPTIONS).l as SqliteJsonValue[];
return parsed;
};

export const getLookupBucketDefinitionName = (lookup: bson.Binary) => {
const parsed = deserializeParameterLookup(lookup);
return parsed[0] as string;
};

76 changes: 54 additions & 22 deletions packages/service-core/src/sync/BucketChecksumState.ts
Original file line number Diff line number Diff line change
@@ -7,6 +7,8 @@ import { ErrorCode, logger, ServiceAssertionError, ServiceError } from '@powersy
import { BucketParameterQuerier } from '@powersync/service-sync-rules/src/BucketParameterQuerier.js';
import { BucketSyncState } from './sync.js';
import { SyncContext } from './SyncContext.js';
import { JSONBig } from '@powersync/service-jsonbig';
import { hasIntersection } from './util.js';

export interface BucketChecksumStateOptions {
syncContext: SyncContext;
@@ -268,6 +270,10 @@ export class BucketParameterState {
public readonly syncParams: RequestParameters;
private readonly querier: BucketParameterQuerier;
private readonly staticBuckets: Map<string, BucketDescription>;
private cachedDynamicBuckets: BucketDescription[] | null = null;
private cachedDynamicBucketSet: Set<string> | null = null;

private readonly lookups: Set<string>;

constructor(
context: SyncContext,
@@ -282,6 +288,7 @@ export class BucketParameterState {

this.querier = syncRules.getBucketParameterQuerier(this.syncParams);
this.staticBuckets = new Map<string, BucketDescription>(this.querier.staticBuckets.map((b) => [b.bucket, b]));
this.lookups = new Set<string>(this.querier.parameterQueryLookups.map((l) => JSONBig.stringify(l.values)));
}

async getCheckpointUpdate(checkpoint: storage.StorageCheckpointUpdate): Promise<CheckpointUpdate | null> {
@@ -361,36 +368,61 @@ export class BucketParameterState {
const staticBuckets = querier.staticBuckets;
const update = checkpoint.update;

let hasChange = false;
if (update.invalidateDataBuckets || update.updatedDataBuckets?.length > 0) {
hasChange = true;
} else if (update.invalidateParameterBuckets) {
hasChange = true;
let hasParameterChange = false;
let invalidateDataBuckets = false;
// If hasParameterChange == true, then invalidateDataBuckets = true
// If invalidateDataBuckets == true, we ignore updatedBuckets
let updatedBuckets = new Set<string>();

if (update.invalidateDataBuckets) {
invalidateDataBuckets = true;
}

if (update.invalidateParameterBuckets) {
hasParameterChange = true;
} else {
for (let bucket of update.updatedParameterBucketDefinitions ?? []) {
if (querier.dynamicBucketDefinitions.has(bucket)) {
hasChange = true;
break;
}
if (hasIntersection(this.lookups, update.updatedParameterLookups)) {
// This is a very coarse re-check of all queries
hasParameterChange = true;
}
}

if (!hasChange) {
return null;
}
let dynamicBuckets: BucketDescription[];
if (hasParameterChange || this.cachedDynamicBuckets == null || this.cachedDynamicBucketSet == null) {
dynamicBuckets = await querier.queryDynamicBucketDescriptions({
getParameterSets(lookups) {
return storage.getParameterSets(checkpoint.base.checkpoint, lookups);
}
});
this.cachedDynamicBuckets = dynamicBuckets;
this.cachedDynamicBucketSet = new Set<string>(dynamicBuckets.map((b) => b.bucket));
invalidateDataBuckets = true;
} else {
dynamicBuckets = this.cachedDynamicBuckets;

const dynamicBuckets = await querier.queryDynamicBucketDescriptions({
getParameterSets(lookups) {
return storage.getParameterSets(checkpoint.base.checkpoint, lookups);
if (!invalidateDataBuckets) {
// TODO: Do set intersection instead
for (let bucket of update.updatedDataBuckets ?? []) {
if (this.staticBuckets.has(bucket) || this.cachedDynamicBucketSet.has(bucket)) {
updatedBuckets.add(bucket);
}
}
}
});
}
const allBuckets = [...staticBuckets, ...dynamicBuckets];

return {
buckets: allBuckets,
// We cannot track individual bucket updates for dynamic lookups yet
updatedBuckets: null
};
if (invalidateDataBuckets) {
return {
buckets: allBuckets,
// We cannot track individual bucket updates for dynamic lookups yet
updatedBuckets: null
};
} else {
return {
buckets: allBuckets,
updatedBuckets: updatedBuckets
};
}
}
}

12 changes: 12 additions & 0 deletions packages/service-core/src/sync/util.ts
Original file line number Diff line number Diff line change
@@ -153,3 +153,15 @@ export function settledPromise<T>(promise: Promise<T>): Promise<PromiseSettledRe
}
);
}

export function hasIntersection<T>(a: Set<T>, b: Set<T>) {
if (a.size > b.size) {
[a, b] = [b, a];
}
// Now, a is always smaller than b, so iterate over a
for (let value of a) {
if (b.has(value)) {
return true;
}
}
}
17 changes: 9 additions & 8 deletions packages/service-core/test/src/sync/BucketChecksumState.test.ts
Original file line number Diff line number Diff line change
@@ -8,7 +8,8 @@ import {
SyncContext,
WatchFilterEvent
} from '@/index.js';
import { RequestParameters, SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
import { JSONBig } from '@powersync/service-jsonbig';
import { RequestParameters, SqliteJsonRow, ParameterLookup, SqlSyncRules } from '@powersync/service-sync-rules';
import { describe, expect, test } from 'vitest';

describe('BucketChecksumState', () => {
@@ -99,7 +100,7 @@ bucket_definitions:
update: {
updatedDataBuckets: ['global[]'],
invalidateDataBuckets: false,
updatedParameterBucketDefinitions: [],
updatedParameterLookups: new Set(),
invalidateParameterBuckets: false
}
}))!;
@@ -474,10 +475,10 @@ bucket_definitions:

storage.getParameterSets = async (
checkpoint: InternalOpId,
lookups: SqliteJsonValue[][]
lookups: ParameterLookup[]
): Promise<SqliteJsonRow[]> => {
expect(checkpoint).toEqual(1n);
expect(lookups).toEqual([['by_project', '1', 'u1']]);
expect(lookups).toEqual([ParameterLookup.normalized('by_project', '1', ['u1'])]);
return [{ id: 1 }, { id: 2 }];
};

@@ -519,10 +520,10 @@ bucket_definitions:

storage.getParameterSets = async (
checkpoint: InternalOpId,
lookups: SqliteJsonValue[][]
lookups: ParameterLookup[]
): Promise<SqliteJsonRow[]> => {
expect(checkpoint).toEqual(2n);
expect(lookups).toEqual([['by_project', '1', 'u1']]);
expect(lookups).toEqual([ParameterLookup.normalized('by_project', '1', ['u1'])]);
return [{ id: 1 }, { id: 2 }, { id: 3 }];
};

@@ -533,7 +534,7 @@ bucket_definitions:
update: {
invalidateDataBuckets: false,
updatedDataBuckets: [],
updatedParameterBucketDefinitions: ['by_project'],
updatedParameterLookups: new Set([JSONBig.stringify(['by_project', '1', 'u1'])]),
invalidateParameterBuckets: false
}
}))!;
@@ -580,7 +581,7 @@ class MockBucketChecksumStateStorage implements BucketChecksumStateStorage {
);
}

async getParameterSets(checkpoint: InternalOpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> {
async getParameterSets(checkpoint: InternalOpId, lookups: ParameterLookup[]): Promise<SqliteJsonRow[]> {
throw new Error('Method not implemented.');
}
}
35 changes: 29 additions & 6 deletions packages/sync-rules/src/BucketParameterQuerier.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { BucketDescription } from './BucketDescription.js';
import { RequestParameters, SqliteJsonRow, SqliteJsonValue } from './types.js';
import { normalizeParameterValue } from './utils.js';

/**
* Represents a set of parameter queries for a specific request.
@@ -19,11 +20,11 @@ export interface BucketParameterQuerier {
* True if there are dynamic buckets, meaning queryDynamicBucketDescriptions() should be used.
*
* If this is false, queryDynamicBucketDescriptions() will always return an empty array,
* and dynamicBucketDefinitions.size == 0.
* and parameterQueryLookups.length == 0.
*/
readonly hasDynamicBuckets: boolean;

readonly dynamicBucketDefinitions: Set<string>;
readonly parameterQueryLookups: ParameterLookup[];

/**
* These buckets depend on parameter storage, and needs to be retrieved dynamically for each checkpoint.
@@ -39,19 +40,19 @@ export interface BucketParameterQuerier {
}

export interface ParameterLookupSource {
getParameterSets: (lookups: SqliteJsonValue[][]) => Promise<SqliteJsonRow[]>;
getParameterSets: (lookups: ParameterLookup[]) => Promise<SqliteJsonRow[]>;
}

export interface QueryBucketDescriptorOptions extends ParameterLookupSource {
parameters: RequestParameters;
}

export function mergeBucketParameterQueriers(queriers: BucketParameterQuerier[]): BucketParameterQuerier {
const dynamicBucketDefinitions = new Set<string>(queriers.flatMap((q) => [...q.dynamicBucketDefinitions]));
const parameterQueryLookups = queriers.flatMap((q) => q.parameterQueryLookups);
return {
staticBuckets: queriers.flatMap((q) => q.staticBuckets),
hasDynamicBuckets: dynamicBucketDefinitions.size > 0,
dynamicBucketDefinitions,
hasDynamicBuckets: parameterQueryLookups.length > 0,
parameterQueryLookups: parameterQueryLookups,
async queryDynamicBucketDescriptions(source: ParameterLookupSource) {
let results: BucketDescription[] = [];
for (let q of queriers) {
@@ -63,3 +64,25 @@ export function mergeBucketParameterQueriers(queriers: BucketParameterQuerier[])
}
};
}

/**
* Represents an equality filter from a parameter query.
*
* Other query types are not supported yet.
*/
export class ParameterLookup {
// bucket definition name, parameter query index, ...lookup values
readonly values: SqliteJsonValue[];

static normalized(bucketDefinitionName: string, queryIndex: string, values: SqliteJsonValue[]): ParameterLookup {
return new ParameterLookup([bucketDefinitionName, queryIndex, ...values.map(normalizeParameterValue)]);
}

/**
*
* @param values must be pre-normalized (any integer converted into bigint)
*/
constructor(values: SqliteJsonValue[]) {
this.values = values;
}
}
6 changes: 5 additions & 1 deletion packages/sync-rules/src/SqlBucketDescriptor.ts
Original file line number Diff line number Diff line change
@@ -113,7 +113,7 @@ export class SqlBucketDescriptor {
const staticQuerier = {
staticBuckets,
hasDynamicBuckets: false,
dynamicBucketDefinitions: new Set<string>(),
parameterQueryLookups: [],
queryDynamicBucketDescriptions: async () => []
} satisfies BucketParameterQuerier;

@@ -133,6 +133,10 @@ export class SqlBucketDescriptor {
return results;
}

hasDynamicBucketQueries(): boolean {
return this.parameter_queries.length > 0;
}

getSourceTables(): Set<TablePattern> {
let result = new Set<TablePattern>();
for (let query of this.parameter_queries) {
27 changes: 14 additions & 13 deletions packages/sync-rules/src/SqlParameterQuery.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { parse, SelectedColumn } from 'pgsql-ast-parser';
import { BucketDescription, BucketPriority, defaultBucketPriority } from './BucketDescription.js';
import { BucketParameterQuerier, ParameterLookupSource } from './BucketParameterQuerier.js';
import { BucketParameterQuerier, ParameterLookup, ParameterLookupSource } from './BucketParameterQuerier.js';
import { SqlRuleError } from './errors.js';
import { SourceTableInterface } from './SourceTableInterface.js';
import { SqlTools } from './sql_filters.js';
@@ -23,7 +23,7 @@ import {
SqliteJsonValue,
SqliteRow
} from './types.js';
import { filterJsonRow, getBucketId, isJsonValue, isSelectStatement } from './utils.js';
import { filterJsonRow, getBucketId, isJsonValue, isSelectStatement, normalizeParameterValue } from './utils.js';

/**
* Represents a parameter query, such as:
@@ -230,15 +230,15 @@ export class SqlParameterQuery {
let lookup: SqliteJsonValue[] = [this.descriptor_name!, this.id!];
lookup.push(
...this.input_parameters!.map((param) => {
return param.filteredRowToLookupValue(filterParamSet);
return normalizeParameterValue(param.filteredRowToLookupValue(filterParamSet));
})
);

const data = this.transformRows(row);

const role: EvaluatedParameters = {
bucket_parameters: data.map((row) => filterJsonRow(row)),
lookup: lookup
lookup: new ParameterLookup(lookup)
};
result.push(role);
}
@@ -297,7 +297,7 @@ export class SqlParameterQuery {
*
* Each lookup is [bucket definition name, parameter query index, ...lookup values]
*/
getLookups(parameters: RequestParameters): SqliteJsonValue[][] {
getLookups(parameters: RequestParameters): ParameterLookup[] {
if (!this.expanded_input_parameter) {
let lookup: SqliteJsonValue[] = [this.descriptor_name!, this.id!];

@@ -308,7 +308,7 @@ export class SqlParameterQuery {
const value = param.parametersToLookupValue(parameters);

if (isJsonValue(value)) {
return value;
return normalizeParameterValue(value);
} else {
valid = false;
return null;
@@ -318,7 +318,7 @@ export class SqlParameterQuery {
if (!valid) {
return [];
}
return [lookup];
return [new ParameterLookup(lookup)];
} else {
const arrayString = this.expanded_input_parameter.parametersToLookupValue(parameters);

@@ -339,17 +339,18 @@ export class SqlParameterQuery {
.map((expandedValue) => {
let lookup: SqliteJsonValue[] = [this.descriptor_name!, this.id!];
let valid = true;
const normalizedExpandedValue = normalizeParameterValue(expandedValue);
lookup.push(
...this.input_parameters!.map((param): SqliteJsonValue => {
if (param == this.expanded_input_parameter) {
// Expand array value
return expandedValue;
return normalizedExpandedValue;
} else {
// Scalar value
const value = param.parametersToLookupValue(parameters);

if (isJsonValue(value)) {
return value;
return normalizeParameterValue(value);
} else {
valid = false;
return null;
@@ -361,9 +362,9 @@ export class SqlParameterQuery {
return null;
}

return lookup;
return new ParameterLookup(lookup);
})
.filter((lookup) => lookup != null) as SqliteJsonValue[][];
.filter((lookup) => lookup != null) as ParameterLookup[];
}
}

@@ -375,15 +376,15 @@ export class SqlParameterQuery {
return {
staticBuckets: [],
hasDynamicBuckets: false,
dynamicBucketDefinitions: new Set<string>(),
parameterQueryLookups: [],
queryDynamicBucketDescriptions: async () => []
};
}

return {
staticBuckets: [],
hasDynamicBuckets: true,
dynamicBucketDefinitions: new Set<string>([this.descriptor_name!]),
parameterQueryLookups: lookups,
queryDynamicBucketDescriptions: async (source: ParameterLookupSource) => {
const bucketParameters = await source.getParameterSets(lookups);
return this.resolveBucketDescriptions(bucketParameters, requestParameters);
4 changes: 4 additions & 0 deletions packages/sync-rules/src/SqlSyncRules.ts
Original file line number Diff line number Diff line change
@@ -326,6 +326,10 @@ export class SqlSyncRules implements SyncRules {
return mergeBucketParameterQueriers(queriers);
}

hasDynamicBucketQueries() {
return this.bucket_descriptors.some((query) => query.hasDynamicBucketQueries());
}

getSourceTables(): TablePattern[] {
const sourceTables = new Map<String, TablePattern>();
for (const bucket of this.bucket_descriptors) {
1 change: 1 addition & 0 deletions packages/sync-rules/src/index.ts
Original file line number Diff line number Diff line change
@@ -21,3 +21,4 @@ export * from './TablePattern.js';
export * from './TsSchemaGenerator.js';
export * from './types.js';
export * from './utils.js';
export * from './BucketParameterQuerier.js';
5 changes: 3 additions & 2 deletions packages/sync-rules/src/types.ts
Original file line number Diff line number Diff line change
@@ -5,6 +5,7 @@ import { SyncRulesOptions } from './SqlSyncRules.js';
import { TablePattern } from './TablePattern.js';
import { toSyncRulesParameters } from './utils.js';
import { BucketDescription, BucketPriority } from './BucketDescription.js';
import { ParameterLookup } from './BucketParameterQuerier.js';

export interface SyncRules {
evaluateRow(options: EvaluateRowOptions): EvaluationResult[];
@@ -18,7 +19,7 @@ export interface QueryParseOptions extends SyncRulesOptions {
}

export interface EvaluatedParameters {
lookup: SqliteJsonValue[];
lookup: ParameterLookup;

/**
* Parameters used to generate bucket id. May be incomplete.
@@ -61,7 +62,7 @@ export function isEvaluatedRow(e: EvaluationResult): e is EvaluatedRow {
}

export function isEvaluatedParameters(e: EvaluatedParametersResult): e is EvaluatedParameters {
return Array.isArray((e as any).lookup);
return Array.isArray((e as EvaluatedParameters).lookup?.values);
}

export type EvaluationResult = EvaluatedRow | EvaluationError;
10 changes: 10 additions & 0 deletions packages/sync-rules/src/utils.ts
Original file line number Diff line number Diff line change
@@ -170,3 +170,13 @@ export const JSONBucketNameSerialize = {
return stringifyRaw(value, replacer, space)!;
}
};

/**
* Lookup serialization must be number-agnostic. I.e. normalize numbers, instead of preserving numbers.
*/
export function normalizeParameterValue(value: SqliteJsonValue): SqliteJsonValue {
if (typeof value == 'number' && Number.isInteger(value)) {
return BigInt(value);
}
return value;
}
167 changes: 101 additions & 66 deletions packages/sync-rules/test/src/parameter_queries.test.ts

Large diffs are not rendered by default.

22 changes: 11 additions & 11 deletions packages/sync-rules/test/src/sync_rules.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { describe, expect, test } from 'vitest';
import { SqlSyncRules } from '../../src/index.js';
import { ParameterLookup, SqlSyncRules } from '../../src/index.js';

import { ASSETS, BASIC_SCHEMA, PARSE_OPTIONS, TestSourceTable, USERS, normalizeTokenParameters } from './util.js';

@@ -37,10 +37,10 @@ bucket_definitions:
bucket: 'mybucket[]'
}
]);
expect(rules.hasDynamicBucketQueries()).toBe(false);
expect(rules.getBucketParameterQuerier(normalizeTokenParameters({}))).toMatchObject({
staticBuckets: [{ bucket: 'mybucket[]', priority: 3 }],
hasDynamicBuckets: false,
dynamicBucketDefinitions: new Set()
hasDynamicBuckets: false
});
});

@@ -72,8 +72,7 @@ bucket_definitions:
});
expect(rules.getBucketParameterQuerier(normalizeTokenParameters({}))).toMatchObject({
staticBuckets: [],
hasDynamicBuckets: false,
dynamicBucketDefinitions: new Set()
hasDynamicBuckets: false
});
});

@@ -94,7 +93,7 @@ bucket_definitions:
expect(rules.evaluateParameterRow(USERS, { id: 'user1', is_admin: 1 })).toEqual([
{
bucket_parameters: [{}],
lookup: ['mybucket', '1', 'user1']
lookup: ParameterLookup.normalized('mybucket', '1', ['user1'])
}
]);
expect(rules.evaluateParameterRow(USERS, { id: 'user1', is_admin: 0 })).toEqual([]);
@@ -936,15 +935,16 @@ bucket_definitions:
);
const bucket = rules.bucket_descriptors[0];
expect(bucket.bucket_parameters).toEqual(['user_id']);
expect(rules.hasDynamicBucketQueries()).toBe(true);

expect(rules.getBucketParameterQuerier(normalizeTokenParameters({ user_id: 'user1' }))).toMatchObject({
hasDynamicBuckets: true,
dynamicBucketDefinitions: new Set([
'mybucket',
'by_list',
parameterQueryLookups: [
ParameterLookup.normalized('mybucket', '2', ['user1']),
ParameterLookup.normalized('by_list', '1', ['user1']),
// These are not filtered out yet, due to how the lookups are structured internally
'admin_only'
]),
ParameterLookup.normalized('admin_only', '1', [1])
],
staticBuckets: [
{
bucket: 'mybucket["user1"]',