@@ -151,36 +151,30 @@ impl FsStore {
151
151
Ok ( ( ) )
152
152
}
153
153
154
- pub fn compact ( & self , id : & DocumentId , full_doc : & [ u8 ] ) -> Result < ( ) , Error > {
154
+ pub fn compact (
155
+ & self ,
156
+ id : & DocumentId ,
157
+ full_doc : & [ u8 ] ,
158
+ new_heads : Vec < ChangeHash > ,
159
+ ) -> Result < ( ) , Error > {
155
160
let paths = DocIdPaths :: from ( id) ;
156
161
157
162
// Load all the data we have into a doc
158
163
match Chunks :: load ( & self . root , id) {
159
164
Ok ( Some ( chunks) ) => {
160
- let doc = chunks
161
- . to_doc ( )
162
- . map_err ( |e| Error ( ErrorKind :: LoadDocToCompact ( e) ) ) ?;
163
-
164
165
// Write the snapshot
165
- let output_chunk_name = SavedChunkName :: new_snapshot ( doc. get_heads ( ) ) ;
166
- let chunk = doc. save ( ) ;
167
- write_chunk ( & self . root , & paths, & chunk, output_chunk_name. clone ( ) ) ?;
166
+ let output_chunk_name = SavedChunkName :: new_snapshot ( new_heads) ;
167
+ write_chunk ( & self . root , & paths, full_doc, output_chunk_name. clone ( ) ) ?;
168
168
169
169
// Remove all the old data
170
170
for incremental in chunks. incrementals . keys ( ) {
171
171
let path = paths. chunk_path ( & self . root , incremental) ;
172
172
std:: fs:: remove_file ( & path)
173
173
. map_err ( |e| Error ( ErrorKind :: DeleteChunk ( path, e) ) ) ?;
174
174
}
175
- let just_wrote = paths. chunk_path ( & self . root , & output_chunk_name) ;
176
175
for snapshot in chunks. snapshots . keys ( ) {
177
176
let path = paths. chunk_path ( & self . root , snapshot) ;
178
177
179
- if path == just_wrote {
180
- tracing:: trace!( "Somehow trying to delete the same path we just wrote to. Not today Satan" ) ;
181
- continue ;
182
- }
183
-
184
178
std:: fs:: remove_file ( & path)
185
179
. map_err ( |e| Error ( ErrorKind :: DeleteChunk ( path, e) ) ) ?;
186
180
}
@@ -441,21 +435,6 @@ impl Chunks {
441
435
incrementals,
442
436
} ) )
443
437
}
444
-
445
- fn to_doc ( & self ) -> Result < automerge:: Automerge , automerge:: AutomergeError > {
446
- let mut bytes = Vec :: new ( ) ;
447
- for chunk in self . snapshots . values ( ) {
448
- bytes. extend ( chunk) ;
449
- }
450
- for chunk in self . incrementals . values ( ) {
451
- bytes. extend ( chunk) ;
452
- }
453
-
454
- automerge:: Automerge :: load_with_options (
455
- & bytes,
456
- automerge:: LoadOptions :: new ( ) . on_partial_load ( automerge:: OnPartialLoad :: Ignore ) ,
457
- )
458
- }
459
438
}
460
439
461
440
mod error {
@@ -499,8 +478,6 @@ mod error {
499
478
ErrReadingChunkFile ( PathBuf , std:: io:: Error ) ,
500
479
#[ error( "error creating level 2 path {0}: {1}" ) ]
501
480
CreateLevel2Path ( PathBuf , std:: io:: Error ) ,
502
- #[ error( "error loading doc to compact: {0}" ) ]
503
- LoadDocToCompact ( automerge:: AutomergeError ) ,
504
481
#[ error( "error creating temp file: {0}" ) ]
505
482
CreateTempFile ( std:: io:: Error ) ,
506
483
#[ error( "error writing temp file {0}: {1}" ) ]
0 commit comments