@@ -3,6 +3,7 @@ import { LMDBCache, Cache } from "@parcel/cache"
3
3
import path from "path"
4
4
import type { Diagnostic } from "@parcel/diagnostic"
5
5
import reporter from "gatsby-cli/lib/reporter"
6
+ import { WorkerPool } from "gatsby-worker"
6
7
import { ensureDir , emptyDir , existsSync , remove , readdir } from "fs-extra"
7
8
import telemetry from "gatsby-telemetry"
8
9
import { isNearMatch } from "../is-near-match"
@@ -52,6 +53,28 @@ export function constructParcel(siteRoot: string, cache?: Cache): Parcel {
52
53
} )
53
54
}
54
55
56
+ interface IProcessBundle {
57
+ filePath : string
58
+ mainEntryPath ?: string
59
+ }
60
+
61
+ type RunParcelReturn = Array < IProcessBundle >
62
+
63
+ export async function runParcel ( siteRoot : string ) : Promise < RunParcelReturn > {
64
+ const cache = new LMDBCache ( getCacheDir ( siteRoot ) ) as unknown as Cache
65
+ const parcel = constructParcel ( siteRoot , cache )
66
+ const { bundleGraph } = await parcel . run ( )
67
+ const bundles = bundleGraph . getBundles ( )
68
+ // bundles is not serializable, so we need to extract the data we need
69
+ // so it crosses IPC boundaries
70
+ return bundles . map ( bundle => {
71
+ return {
72
+ filePath : bundle . filePath ,
73
+ mainEntryPath : bundle . getMainEntry ( ) ?. filePath ,
74
+ }
75
+ } )
76
+ }
77
+
55
78
/**
56
79
* Compile known gatsby-* files (e.g. `gatsby-config`, `gatsby-node`)
57
80
* and output in `<SITE_ROOT>/.cache/compiled`.
@@ -107,33 +130,59 @@ export async function compileGatsbyFiles(
107
130
} )
108
131
}
109
132
133
+ const worker = new WorkerPool < typeof import ( "./compile-gatsby-files" ) > (
134
+ require . resolve ( `./compile-gatsby-files` ) ,
135
+ {
136
+ numWorkers : 1 ,
137
+ }
138
+ )
139
+
110
140
const distDir = `${ siteRoot } /${ COMPILED_CACHE_DIR } `
111
141
await ensureDir ( distDir )
112
142
await emptyDir ( distDir )
113
143
114
144
await exponentialBackoff ( retry )
115
145
116
- // for whatever reason TS thinks LMDBCache is some browser Cache and not actually Parcel's Cache
117
- // so we force type it to Parcel's Cache
118
- const cache = new LMDBCache ( getCacheDir ( siteRoot ) ) as unknown as Cache
119
- const parcel = constructParcel ( siteRoot , cache )
120
- const { bundleGraph } = await parcel . run ( )
121
- let cacheClosePromise = Promise . resolve ( )
146
+ let bundles : RunParcelReturn = [ ]
122
147
try {
123
- // @ts -ignore store is public field on LMDBCache class, but public interface for Cache
124
- // doesn't have it. There doesn't seem to be proper public API for this, so we have to
125
- // resort to reaching into internals. Just in case this is wrapped in try/catch if
126
- // parcel changes internals in future (closing cache is only needed when retrying
127
- // so the if the change happens we shouldn't fail on happy builds)
128
- cacheClosePromise = cache . store . close ( )
129
- } catch ( e ) {
130
- reporter . verbose ( `Failed to close parcel cache\n${ e . toString ( ) } ` )
148
+ // sometimes parcel segfaults which is not something we can recover from, so we run parcel
149
+ // in child process and IF it fails we try to delete parcel's cache (this seems to "fix" the problem
150
+ // causing segfaults?) and retry few times
151
+ // not ideal, but having gatsby segfaulting is really frustrating and common remedy is to clean
152
+ // entire .cache for users, which is not ideal either especially when we can just delete parcel's cache
153
+ // and to recover automatically
154
+ bundles = await worker . single . runParcel ( siteRoot )
155
+ } catch ( error ) {
156
+ if ( error . diagnostics ) {
157
+ handleErrors ( error . diagnostics )
158
+ return
159
+ } else if ( retry >= RETRY_COUNT ) {
160
+ reporter . panic ( {
161
+ id : `11904` ,
162
+ error,
163
+ context : {
164
+ siteRoot,
165
+ retries : RETRY_COUNT ,
166
+ sourceMessage : error . message ,
167
+ } ,
168
+ } )
169
+ } else {
170
+ await exponentialBackoff ( retry )
171
+ try {
172
+ await remove ( getCacheDir ( siteRoot ) )
173
+ } catch {
174
+ // in windows we might get "EBUSY" errors if LMDB failed to close, so this try/catch is
175
+ // to prevent EBUSY errors from potentially hiding real import errors
176
+ }
177
+ await compileGatsbyFiles ( siteRoot , retry + 1 )
178
+ return
179
+ }
180
+ } finally {
181
+ worker . end ( )
131
182
}
132
183
133
184
await exponentialBackoff ( retry )
134
185
135
- const bundles = bundleGraph . getBundles ( )
136
-
137
186
if ( bundles . length === 0 ) return
138
187
139
188
let compiledTSFilesCount = 0
@@ -150,7 +199,7 @@ export async function compileGatsbyFiles(
150
199
siteRoot,
151
200
retries : RETRY_COUNT ,
152
201
compiledFileLocation : bundle . filePath ,
153
- sourceFileLocation : bundle . getMainEntry ( ) ?. filePath ,
202
+ sourceFileLocation : bundle . mainEntryPath ,
154
203
} ,
155
204
} )
156
205
} else if ( retry > 0 ) {
@@ -165,9 +214,6 @@ export async function compileGatsbyFiles(
165
214
)
166
215
}
167
216
168
- // sometimes parcel cache gets in weird state and we need to clear the cache
169
- await cacheClosePromise
170
-
171
217
try {
172
218
await remove ( getCacheDir ( siteRoot ) )
173
219
} catch {
@@ -179,7 +225,7 @@ export async function compileGatsbyFiles(
179
225
return
180
226
}
181
227
182
- const mainEntry = bundle . getMainEntry ( ) ?. filePath
228
+ const mainEntry = bundle . mainEntryPath
183
229
// mainEntry won't exist for shared chunks
184
230
if ( mainEntry ) {
185
231
if ( mainEntry . endsWith ( `.ts` ) ) {
0 commit comments