Skip to content

Commit 4a1ac00

Browse files
committed
fix(sync): remove nested transaction that deadlocks > 10 file syncs
sync.ts wraps the add/modify loop in engine.transaction(), and each importFromContent inside opens another one. PGLite's _runExclusiveTransaction is a non-reentrant mutex — the second call queues on the mutex the first is holding, and the process hangs forever in ep_poll. Reproduced with a 15-file commit: unpatched hangs, patched runs in 3.4s. Fix drops the outer wrap; per-file atomicity is correct anyway (one file's failure should not roll back the others).
1 parent b7e3005 commit 4a1ac00

1 file changed

Lines changed: 21 additions & 21 deletions

File tree

src/commands/sync.ts

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -203,29 +203,29 @@ export async function performSync(engine: BrainEngine, opts: SyncOpts): Promise<
203203
pagesAffected.push(newSlug);
204204
}
205205

206-
// Process adds and modifies
207-
const useTransaction = (filtered.added.length + filtered.modified.length) > 10;
208-
const processAddsModifies = async () => {
209-
for (const path of [...filtered.added, ...filtered.modified]) {
210-
const filePath = join(repoPath, path);
211-
if (!existsSync(filePath)) continue;
212-
try {
213-
const result = await importFile(engine, filePath, path, { noEmbed });
214-
if (result.status === 'imported') {
215-
chunksCreated += result.chunks;
216-
pagesAffected.push(result.slug);
217-
}
218-
} catch (e: unknown) {
219-
const msg = e instanceof Error ? e.message : String(e);
220-
console.error(` Warning: skipped ${path}: ${msg}`);
206+
// Process adds and modifies.
207+
//
208+
// NOTE: do NOT wrap this loop in engine.transaction(). importFromContent
209+
// already opens its own inner transaction per file, and PGLite transactions
210+
// are not reentrant — they acquire the same _runExclusiveTransaction mutex,
211+
// so a nested call from inside a user callback queues forever on the mutex
212+
// the outer transaction is still holding. Result: incremental sync hangs in
213+
// ep_poll whenever the diff crosses the old > 10 threshold that used to
214+
// trigger the outer wrap. Per-file atomicity is also the right granularity:
215+
// one file's failure should not roll back the others' successful imports.
216+
for (const path of [...filtered.added, ...filtered.modified]) {
217+
const filePath = join(repoPath, path);
218+
if (!existsSync(filePath)) continue;
219+
try {
220+
const result = await importFile(engine, filePath, path, { noEmbed });
221+
if (result.status === 'imported') {
222+
chunksCreated += result.chunks;
223+
pagesAffected.push(result.slug);
221224
}
225+
} catch (e: unknown) {
226+
const msg = e instanceof Error ? e.message : String(e);
227+
console.error(` Warning: skipped ${path}: ${msg}`);
222228
}
223-
};
224-
225-
if (useTransaction) {
226-
await engine.transaction(async () => { await processAddsModifies(); });
227-
} else {
228-
await processAddsModifies();
229229
}
230230

231231
const elapsed = Date.now() - start;

0 commit comments

Comments
 (0)