@@ -7,7 +7,7 @@ use core::ffi::c_int;
7
7
use core:: slice;
8
8
9
9
use sqlite:: { Connection , Context , ResultCode , Value } ;
10
- use sqlite_nostd as sqlite;
10
+ use sqlite_nostd:: { self as sqlite, ManagedStmt } ;
11
11
12
12
use crate :: create_sqlite_text_fn;
13
13
use crate :: error:: { PSResult , SQLiteError } ;
@@ -143,16 +143,7 @@ fn powersync_trigger_insert_sql_impl(
143
143
let local_db = ctx. db_handle ( ) ;
144
144
let stmt2 = local_db. prepare_v2 ( "select json_extract(e.value, '$.name') as name from json_each(json_extract(?, '$.columns')) e" ) ?;
145
145
stmt2. bind_text ( 1 , table, sqlite:: Destructor :: STATIC ) ?;
146
-
147
- let mut column_names_quoted: Vec < String > = alloc:: vec![ ] ;
148
- while stmt2. step ( ) ? == ResultCode :: ROW {
149
- let name = stmt2. column_text ( 0 ) ?;
150
-
151
- let foo: String = format ! ( "{:}, NEW.{:}" , quote_string( name) , quote_identifier( name) ) ;
152
- column_names_quoted. push ( foo) ;
153
- }
154
-
155
- let json_fragment = column_names_quoted. join ( ", " ) ;
146
+ let json_fragment = json_object_fragment ( "NEW" , & stmt2) ?;
156
147
157
148
return if !local_only && !insert_only {
158
149
let trigger = format ! ( "\
@@ -165,8 +156,8 @@ fn powersync_trigger_insert_sql_impl(
165
156
THEN RAISE (FAIL, 'id is required')
166
157
END;
167
158
INSERT INTO {:}
168
- SELECT NEW.id, json_object( {:}) ;
169
- INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', json_object( {:}) ))));
159
+ SELECT NEW.id, {:};
160
+ INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', {:}))));
170
161
INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) VALUES({:}, NEW.id);
171
162
INSERT OR REPLACE INTO ps_buckets(name, last_op, target_op) VALUES('$local', 0, {:});
172
163
END" , trigger_name, quoted_name, internal_name, json_fragment, type_string, json_fragment, type_string, MAX_OP_ID ) ;
@@ -178,7 +169,7 @@ fn powersync_trigger_insert_sql_impl(
178
169
INSTEAD OF INSERT ON {:}
179
170
FOR EACH ROW
180
171
BEGIN
181
- INSERT INTO {:} SELECT NEW.id, json_object( {:}) ;
172
+ INSERT INTO {:} SELECT NEW.id, {:};
182
173
END" ,
183
174
trigger_name, quoted_name, internal_name, json_fragment
184
175
) ;
@@ -189,7 +180,7 @@ fn powersync_trigger_insert_sql_impl(
189
180
INSTEAD OF INSERT ON {:}
190
181
FOR EACH ROW
191
182
BEGIN
192
- INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', json_object( {:}) ))));
183
+ INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PUT', 'type', {}, 'id', NEW.id, 'data', json(powersync_diff('{{}}', {:}))));
193
184
END" , trigger_name, quoted_name, type_string, json_fragment) ;
194
185
Ok ( trigger)
195
186
} else {
@@ -224,20 +215,9 @@ fn powersync_trigger_update_sql_impl(
224
215
let db = ctx. db_handle ( ) ;
225
216
let stmt2 = db. prepare_v2 ( "select json_extract(e.value, '$.name') as name from json_each(json_extract(?, '$.columns')) e" ) . into_db_result ( db) ?;
226
217
stmt2. bind_text ( 1 , table, sqlite:: Destructor :: STATIC ) ?;
227
-
228
- let mut column_names_quoted_new: Vec < String > = alloc:: vec![ ] ;
229
- let mut column_names_quoted_old: Vec < String > = alloc:: vec![ ] ;
230
- while stmt2. step ( ) ? == ResultCode :: ROW {
231
- let name = stmt2. column_text ( 0 ) ?;
232
-
233
- let foo_new: String = format ! ( "{:}, NEW.{:}" , quote_string( name) , quote_identifier( name) ) ;
234
- column_names_quoted_new. push ( foo_new) ;
235
- let foo_old: String = format ! ( "{:}, OLD.{:}" , quote_string( name) , quote_identifier( name) ) ;
236
- column_names_quoted_old. push ( foo_old) ;
237
- }
238
-
239
- let json_fragment_new = column_names_quoted_new. join ( ", " ) ;
240
- let json_fragment_old = column_names_quoted_old. join ( ", " ) ;
218
+ let json_fragment_new = json_object_fragment ( "NEW" , & stmt2) ?;
219
+ stmt2. reset ( ) ?;
220
+ let json_fragment_old = json_object_fragment ( "OLD" , & stmt2) ?;
241
221
242
222
return if !local_only && !insert_only {
243
223
let trigger = format ! ( "\
@@ -250,9 +230,9 @@ BEGIN
250
230
THEN RAISE (FAIL, 'Cannot update id')
251
231
END;
252
232
UPDATE {:}
253
- SET data = json_object( {:})
233
+ SET data = {:}
254
234
WHERE id = NEW.id;
255
- INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff(json_object( {:}), json_object( {:}) ))));
235
+ INSERT INTO powersync_crud_(data) VALUES(json_object('op', 'PATCH', 'type', {:}, 'id', NEW.id, 'data', json(powersync_diff({:}, {:}))));
256
236
INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) VALUES({:}, NEW.id);
257
237
INSERT OR REPLACE INTO ps_buckets(name, last_op, target_op) VALUES('$local', 0, {:});
258
238
END" , trigger_name, quoted_name, internal_name, json_fragment_new, type_string, json_fragment_old, json_fragment_new, type_string, MAX_OP_ID ) ;
@@ -269,7 +249,7 @@ BEGIN
269
249
THEN RAISE (FAIL, 'Cannot update id')
270
250
END;
271
251
UPDATE {:}
272
- SET data = json_object( {:})
252
+ SET data = {:}
273
253
WHERE id = NEW.id;
274
254
END" ,
275
255
trigger_name, quoted_name, internal_name, json_fragment_new
@@ -335,3 +315,45 @@ pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> {
335
315
336
316
Ok ( ( ) )
337
317
}
318
+
319
+ /// Given a query returning column names, return a JSON object fragment for a trigger.
320
+ ///
321
+ /// Example output with prefix "NEW": "json_object('id', NEW.id, 'name', NEW.name, 'age', NEW.age)".
322
+ fn json_object_fragment ( prefix : & str , name_results : & ManagedStmt ) -> Result < String , SQLiteError > {
323
+ // floor(SQLITE_MAX_FUNCTION_ARG / 2).
324
+ // To keep databases portable, we use the default limit of 100 args for this,
325
+ // and don't try to query the limit dynamically.
326
+ const MAX_ARG_COUNT : usize = 50 ;
327
+
328
+ let mut column_names_quoted: Vec < String > = alloc:: vec![ ] ;
329
+ while name_results. step ( ) ? == ResultCode :: ROW {
330
+ let name = name_results. column_text ( 0 ) ?;
331
+
332
+ let quoted: String = format ! (
333
+ "{:}, {:}.{:}" ,
334
+ quote_string( name) ,
335
+ prefix,
336
+ quote_identifier( name)
337
+ ) ;
338
+ column_names_quoted. push ( quoted) ;
339
+ }
340
+
341
+ // SQLITE_MAX_COLUMN - 1 (because of the id column)
342
+ if column_names_quoted. len ( ) > 1999 {
343
+ return Err ( SQLiteError :: from ( ResultCode :: TOOBIG ) ) ;
344
+ } else if column_names_quoted. len ( ) <= MAX_ARG_COUNT {
345
+ // Small number of columns - use json_object() directly.
346
+ let json_fragment = column_names_quoted. join ( ", " ) ;
347
+ return Ok ( format ! ( "json_object({:})" , json_fragment) ) ;
348
+ } else {
349
+ // Too many columns to use json_object directly.
350
+ // Instead, we build up the JSON object in chunks,
351
+ // and merge using powersync_json_merge().
352
+ let mut fragments: Vec < String > = alloc:: vec![ ] ;
353
+ for chunk in column_names_quoted. chunks ( MAX_ARG_COUNT ) {
354
+ let sub_fragment = chunk. join ( ", " ) ;
355
+ fragments. push ( format ! ( "json_object({:})" , sub_fragment) ) ;
356
+ }
357
+ return Ok ( format ! ( "powersync_json_merge({:})" , fragments. join( ", " ) ) ) ;
358
+ }
359
+ }
0 commit comments