Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 58 additions & 9 deletions app/build.bat
Original file line number Diff line number Diff line change
Expand Up @@ -610,6 +610,13 @@ if errorlevel 1 (
endlocal & exit /b 1
)

rem ── Pre-build verification (typecheck + tests across packages) ──
call :run_pre_build_tests
if errorlevel 1 (
for %%f in (!_all_expected!) do echo FAIL %%f>>"!BUILD_RESULTS_FILE!"
endlocal & exit /b 1
)

rem ── Build frontend ──
call :build_frontend
if errorlevel 1 (
Expand Down Expand Up @@ -721,6 +728,40 @@ call :elapsed_since !_icons_start! _icons_elapsed
echo === Icons done ^(!_icons_elapsed!^) ===
exit /b 0

rem ─── run_pre_build_tests ─────────────────────────────────────────────────
rem Pre-build verification: ensure source quality across packages before producing artifacts.
rem Backend unit tests run inside backend\build.bat, here we cover the rest.
rem Skip with SKIP_TESTS=1 only in emergencies (e.g. broken upstream tooling); CI must never set it.

:run_pre_build_tests
if "%SKIP_TESTS%"=="1" (
echo === Pre-build tests skipped ^(SKIP_TESTS=1^) ===
exit /b 0
)
set "_test_start=0"
call :get_timestamp _test_start
echo === Running pre-build verification ===
cd /d "!ROOT_DIR!\shared"
call bun install --frozen-lockfile
if errorlevel 1 exit /b 1
call bun run typecheck
if errorlevel 1 exit /b 1
cd /d "!ROOT_DIR!\cli"
call bun install --frozen-lockfile
if errorlevel 1 exit /b 1
call bun run typecheck
if errorlevel 1 exit /b 1
cd /d "!ROOT_DIR!\frontend"
call bun install --frozen-lockfile
if errorlevel 1 exit /b 1
call bun run check
if errorlevel 1 exit /b 1
cd /d "!SCRIPT_DIR!"
set "_test_elapsed=0"
call :elapsed_since !_test_start! _test_elapsed
echo === Pre-build verification done ^(!_test_elapsed!^) ===
exit /b 0

rem ─── build_frontend ──────────────────────────────────────────────────────

:build_frontend
Expand All @@ -732,8 +773,11 @@ set "_fe_start=0"
call :get_timestamp _fe_start
echo === Building frontend ===
cd /d "!ROOT_DIR!\frontend"
call build.bat
call bun i --frozen-lockfile
if errorlevel 1 exit /b 1
call bun --bun run build
if errorlevel 1 exit /b 1
cd /d "!SCRIPT_DIR!"
set "_fe_elapsed=0"
call :elapsed_since !_fe_start! _fe_elapsed
echo === Frontend done ^(!_fe_elapsed!^) ===
Expand All @@ -749,10 +793,13 @@ call :get_timestamp _be_start
echo === Building backend ^(target: !BUN_TGT!^) ===
cd /d "!ROOT_DIR!\backend"
if exist build rmdir /s /q build
bun i --frozen-lockfile
call bun i --frozen-lockfile
if errorlevel 1 ( endlocal & exit /b 1 )
mkdir build
bun build --compile --target !BUN_TGT! src/app.ts --outfile build\lish-backend.exe
call bun build --compile --target !BUN_TGT! ./src/app.ts --outfile build\lish-backend.exe
if errorlevel 1 ( endlocal & exit /b 1 )
mkdir build\lish
call bun build ./src/lish/checksum-worker.ts --target bun --outfile build\lish\checksum-worker.js
if errorlevel 1 ( endlocal & exit /b 1 )
set "_be_elapsed=0"
call :elapsed_since !_be_start! _be_elapsed
Expand All @@ -764,19 +811,19 @@ rem ─── sync_product_info ────────────────

:sync_product_info
set "PRODUCT_JSON=!ROOT_DIR!\shared\src\product.json"
for /f "tokens=*" %%v in ('bun -e "process.stdout.write(require(process.argv[1]).version)" "!PRODUCT_JSON!"') do set "PRODUCT_VERSION=%%v"
for /f "tokens=*" %%n in ('bun -e "process.stdout.write(require(process.argv[1]).name)" "!PRODUCT_JSON!"') do set "PRODUCT_NAME=%%n"
for /f "tokens=*" %%d in ('bun -e "process.stdout.write(require(process.argv[1]).identifier)" "!PRODUCT_JSON!"') do set "PRODUCT_IDENTIFIER=%%d"
for /f "tokens=*" %%v in ('call bun -e "process.stdout.write(require(process.argv[1]).version)" "!PRODUCT_JSON!"') do set "PRODUCT_VERSION=%%v"
for /f "tokens=*" %%n in ('call bun -e "process.stdout.write(require(process.argv[1]).name)" "!PRODUCT_JSON!"') do set "PRODUCT_NAME=%%n"
for /f "tokens=*" %%d in ('call bun -e "process.stdout.write(require(process.argv[1]).identifier)" "!PRODUCT_JSON!"') do set "PRODUCT_IDENTIFIER=%%d"
echo Product: !PRODUCT_NAME! v!PRODUCT_VERSION! (!PRODUCT_IDENTIFIER!)

rem Sync tauri.conf.json
bun -e "var f=require('fs'),p=require(process.argv[1]),t=process.argv[2],c=JSON.parse(f.readFileSync(t,'utf8'));c.productName=p.name;c.mainBinaryName=p.name;c.version=p.version;c.identifier=p.identifier;c.bundle.windows.nsis.startMenuFolder=p.name;f.writeFileSync(t,JSON.stringify(c,null,'\t')+'\n')" "!PRODUCT_JSON!" "!SCRIPT_DIR!tauri.conf.json"
call bun -e "var f=require('fs'),p=require(process.argv[1]),t=process.argv[2],c=JSON.parse(f.readFileSync(t,'utf8'));c.productName=p.name;c.mainBinaryName=p.name;c.version=p.version;c.identifier=p.identifier;c.bundle.windows.nsis.startMenuFolder=p.name;f.writeFileSync(t,JSON.stringify(c,null,'\t')+'\n')" "!PRODUCT_JSON!" "!SCRIPT_DIR!tauri.conf.json"

rem Sync Cargo.toml version
bun -e "var f=require('fs'),v=process.argv[1],t=process.argv[2],s=f.readFileSync(t,'utf8').replace(/^version = \"[^\"]*\"/m,'version = \"'+v+'\"');f.writeFileSync(t,s)" "!PRODUCT_VERSION!" "!SCRIPT_DIR!Cargo.toml"
call bun -e "var f=require('fs'),v=process.argv[1],t=process.argv[2],s=f.readFileSync(t,'utf8').replace(/^version = \"[^\"]*\"/m,'version = \"'+v+'\"');f.writeFileSync(t,s)" "!PRODUCT_VERSION!" "!SCRIPT_DIR!Cargo.toml"

rem Sync wix-fragment-debug.wxs
bun -e "var f=require('fs'),n=process.argv[1],s=f.readFileSync(process.argv[2],'utf8').replace(/\{\{product_name\}\}/g,n);f.writeFileSync(process.argv[2],s)" "!PRODUCT_NAME!" "!SCRIPT_DIR!wix-fragment-debug.wxs"
call bun -e "var f=require('fs'),n=process.argv[1],s=f.readFileSync(process.argv[2],'utf8').replace(/\{\{product_name\}\}/g,n);f.writeFileSync(process.argv[2],s)" "!PRODUCT_NAME!" "!SCRIPT_DIR!wix-fragment-debug.wxs"
exit /b 0

rem ─── build_zip ────────────────────────────────────────────────────────────
Expand All @@ -788,6 +835,8 @@ if exist "!ZIP_STAGING!" rmdir /s /q "!ZIP_STAGING!"
mkdir "!ZIP_STAGING!"
copy /y "!BUILD_RELEASE_DIR!\!PRODUCT_NAME!.exe" "!ZIP_STAGING!\!PRODUCT_NAME!.exe" >nul
copy /y "!ROOT_DIR!\backend\build\lish-backend.exe" "!ZIP_STAGING!\lish-backend.exe" >nul
mkdir "!ZIP_STAGING!\lish"
xcopy /e /i /y "!ROOT_DIR!\backend\build\lish" "!ZIP_STAGING!\lish" >nul
rem Create Debug.bat from template
powershell -Command "(Get-Content '!SCRIPT_DIR!bundle-scripts\Debug.bat' -Raw) -replace '\{\{product_name\}\}','!PRODUCT_NAME!' | Set-Content '!ZIP_STAGING!\Debug.bat' -NoNewline"
powershell -Command "Compress-Archive -Path '!ZIP_STAGING!\*' -DestinationPath '!FINAL_DIR!\!PRODUCT_NAME!_!PRODUCT_VERSION!_windows_!_arch!.zip' -CompressionLevel !ZIP_PS_LEVEL! -Force"
Expand Down
24 changes: 24 additions & 0 deletions app/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,22 @@ build_frontend() {
echo "=== Frontend done ($(elapsed_since $_t)) ==="
}

# Pre-build verification: ensure source quality across packages before producing artifacts.
# Backend unit tests run inside backend/build.sh, here we cover the rest.
# Skip with SKIP_TESTS=1 only in emergencies (e.g. broken upstream tooling); CI must never set it.
run_pre_build_tests() {
if [ "${SKIP_TESTS:-0}" = "1" ]; then
echo "=== Pre-build tests skipped (SKIP_TESTS=1) ==="
return 0
fi
_t=$(date +%s)
echo "=== Running pre-build verification ==="
(cd "$ROOT_DIR/shared" && bun install --frozen-lockfile && bun run typecheck)
(cd "$ROOT_DIR/cli" && bun install --frozen-lockfile && bun run typecheck)
(cd "$ROOT_DIR/frontend" && bun install --frozen-lockfile && bun run check)
echo "=== Pre-build verification done ($(elapsed_since $_t)) ==="
}

build_backend() {
if [ "$BUILD_OS" = "macos" ] && [ "$BUILD_ARCH" = "universal" ]; then
_t=$(date +%s)
Expand Down Expand Up @@ -563,6 +579,7 @@ ${PRODUCT_NAME} - peer-to-peer file sharing application
%files
/usr/bin/${PRODUCT_NAME_LOWER}
/usr/bin/lish-backend
/usr/bin/lish
/usr/share/applications/${PRODUCT_NAME_LOWER}.desktop
/usr/share/applications/${PRODUCT_NAME_LOWER}-debug.desktop
/usr/share/icons/hicolor/256x256/apps/${PRODUCT_NAME_LOWER}.png
Expand Down Expand Up @@ -646,13 +663,17 @@ APPRUN_EOF
_stage_zip_linux() {
cp "$BUILD_RELEASE_DIR/$PRODUCT_NAME_LOWER" "$ZIP_STAGING/"
cp "$ROOT_DIR/backend/build/lish-backend" "$ZIP_STAGING/lish-backend"
mkdir -p "$ZIP_STAGING/lish"
cp -r "$ROOT_DIR/backend/build/lish/." "$ZIP_STAGING/lish/"
_copy_debug_script
chmod +x "$ZIP_STAGING/$PRODUCT_NAME_LOWER" "$ZIP_STAGING/lish-backend"
}

_stage_zip_windows() {
cp "$BUILD_RELEASE_DIR/${PRODUCT_NAME}.exe" "$ZIP_STAGING/"
cp "$ROOT_DIR/backend/build/lish-backend.exe" "$ZIP_STAGING/lish-backend.exe"
mkdir -p "$ZIP_STAGING/lish"
cp -r "$ROOT_DIR/backend/build/lish/." "$ZIP_STAGING/lish/"
sed "s/{{product_name}}/$PRODUCT_NAME/g" \
"$SCRIPT_DIR/bundle-scripts/Debug.bat" >"$ZIP_STAGING/Debug.bat"
}
Expand Down Expand Up @@ -694,6 +715,8 @@ build_linux_packages() {
chmod +x "$PKG_STAGING/usr/bin/$PRODUCT_NAME_LOWER"
cp "$ROOT_DIR/backend/build/lish-backend" "$PKG_STAGING/usr/bin/"
chmod +x "$PKG_STAGING/usr/bin/lish-backend"
mkdir -p "$PKG_STAGING/usr/bin/lish"
cp -r "$ROOT_DIR/backend/build/lish/." "$PKG_STAGING/usr/bin/lish/"

generate_desktop_entry "$PKG_STAGING/usr/share/applications/${PRODUCT_NAME_LOWER}.desktop"
generate_desktop_entry "$PKG_STAGING/usr/share/applications/${PRODUCT_NAME_LOWER}-debug.desktop" --debug
Expand Down Expand Up @@ -914,6 +937,7 @@ docker_inner_build() {
_inner_fail=0

build_icons
run_pre_build_tests
build_frontend
build_backend
sync_product_info
Expand Down
3 changes: 2 additions & 1 deletion app/tauri.linux.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
"mainBinaryName": "libershare",
"bundle": {
"resources": {
"../backend/build/lish-backend": "./lish-backend"
"../backend/build/lish-backend": "./lish-backend",
"../backend/build/lish/checksum-worker.js": "./lish/checksum-worker.js"
},
"linux": {
"deb": {
Expand Down
3 changes: 2 additions & 1 deletion app/tauri.macos.conf.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
{
"bundle": {
"resources": {
"../backend/build/lish-backend": "./lish-backend"
"../backend/build/lish-backend": "./lish-backend",
"../backend/build/lish/checksum-worker.js": "./lish/checksum-worker.js"
}
}
}
3 changes: 2 additions & 1 deletion app/tauri.windows.conf.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
{
"bundle": {
"resources": {
"../backend/build/lish-backend.exe": "./lish-backend.exe"
"../backend/build/lish-backend.exe": "./lish-backend.exe",
"../backend/build/lish/checksum-worker.js": "./lish/checksum-worker.js"
},
"windows": {
"wix": {
Expand Down
20 changes: 18 additions & 2 deletions backend/build.bat
Original file line number Diff line number Diff line change
@@ -1,8 +1,24 @@
@echo off
if exist build rmdir /s /q build
bun i --frozen-lockfile
call bun i --frozen-lockfile
if errorlevel 1 exit /b 1

rem Pre-build verification: typecheck + unit tests must pass before producing artifacts.
rem Skip with SKIP_TESTS=1 only in emergencies (e.g. broken upstream tooling); CI must never set it.
if not "%SKIP_TESTS%"=="1" (
call bun run typecheck
if errorlevel 1 exit /b 1
call bun run test
if errorlevel 1 exit /b 1
)

mkdir build
bun build --compile src/app.ts --outfile build\lish-backend.exe
call bun build --compile ./src/app.ts --outfile build\lish-backend.exe
if errorlevel 1 exit /b 1
mkdir build\lish
call bun build ./src/lish/checksum-worker.ts --target bun --outfile build\lish\checksum-worker.js
if errorlevel 1 exit /b 1

rem Patch PE subsystem from CONSOLE (3) to WINDOWS_GUI (2) to prevent console window
powershell -Command "$f='%~dp0build\lish-backend.exe'; $b=[IO.File]::ReadAllBytes($f); $pe=[BitConverter]::ToInt32($b,0x3C); $b[$pe+0x5C]=2; $b[$pe+0x5D]=0; [IO.File]::WriteAllBytes($f,$b)"
if errorlevel 1 exit /b 1
16 changes: 13 additions & 3 deletions backend/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,22 @@ done
mkdir -p build
bun i --frozen-lockfile

# Pre-build verification: typecheck + unit tests must pass before producing artifacts.
# Skip with SKIP_TESTS=1 only in emergencies (e.g. broken upstream tooling); CI must never set it.
if [ "${SKIP_TESTS:-0}" != "1" ]; then
bun run typecheck
bun run test
fi

mkdir -p build/lish

if [ -n "$BUN_TARGET" ]; then
echo "Building backend for target: $BUN_TARGET"
case "$BUN_TARGET" in
*windows*) bun build --compile --target "$BUN_TARGET" src/app.ts --outfile build/lish-backend.exe ;;
*) bun build --compile --target "$BUN_TARGET" src/app.ts --outfile build/lish-backend ;;
*windows*) bun build --compile --target "$BUN_TARGET" ./src/app.ts --outfile build/lish-backend.exe ;;
*) bun build --compile --target "$BUN_TARGET" ./src/app.ts --outfile build/lish-backend ;;
esac
else
bun build --compile src/app.ts --outfile build/lish-backend
bun build --compile ./src/app.ts --outfile build/lish-backend
fi
bun build ./src/lish/checksum-worker.ts --target bun --outfile build/lish/checksum-worker.js
7 changes: 2 additions & 5 deletions backend/src/app.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { dirname, join } from 'path';
import { pathToFileURL } from 'url';
import { productName, productVersion } from '@shared';
import { setupLogger, type LogLevel } from './logger.ts';
import { Networks } from './lishnet/lishnets.ts';
Expand All @@ -15,11 +16,7 @@ const args = process.argv.slice(2);
// Default dataDir: next to binary if compiled, otherwise ./data (relative to CWD)
const isCompiledBinary = process.execPath !== Bun.which('bun');
let dataDir = isCompiledBinary ? join(dirname(process.execPath), 'data') : './data';

// In compiled binaries, import.meta.url is always the binary path (/$bunfs/root/<binary>),
// so the worker is at ./lish/checksum-worker.js relative to it.
// In dev mode the default in lish.ts (./checksum-worker.ts relative to lish.ts) is correct.
if (isCompiledBinary) setWorkerUrl(new URL('./lish/checksum-worker.js', import.meta.url).href);
if (isCompiledBinary) setWorkerUrl(pathToFileURL(join(dirname(process.execPath), 'lish', 'checksum-worker.js')).href);

let logLevel: LogLevel = isCompiledBinary ? 'info' : 'debug';
let apiHost = 'localhost';
Expand Down
48 changes: 37 additions & 11 deletions backend/src/lish/lish.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,23 @@ import { calculateChecksum } from './checksum.ts';
import { Utils } from '../utils.ts';
import { type DataServer } from './data-server.ts';

// Worker URL for checksum-worker. Default works in dev mode (import.meta.url is the actual file URL).
// In compiled binaries, import.meta.url is always the binary path, so app.ts must call setWorkerUrl()
// with new URL('./lish/checksum-worker.js', import.meta.url).href before any LISH creation.
let _workerUrl: string = new URL('./checksum-worker.ts', import.meta.url).href;
// Cached at module load: Bun.which() walks PATH on every call, and this flag is consulted on
// every parallel-checksum invocation. Compiled binaries always have execPath !== Bun.which('bun').
const _isCompiledBinary = process.execPath !== Bun.which('bun');
const _canTerminateBusyWorkers = !(process.platform === 'win32' && _isCompiledBinary);

/** Override the checksum worker URL. Must be called from the main entrypoint (app.ts) in compiled mode. */
let _workerUrl: string | null = null;

/** Override the checksum worker URL for tests or external launchers. */
export function setWorkerUrl(url: string): void {
_workerUrl = url;
}

function createChecksumWorker(): Worker {
if (_workerUrl) return new Worker(_workerUrl);
return new Worker(new URL('./checksum-worker.ts', import.meta.url));
}

// Helper to normalize paths to forward slashes
function normalizePath(p: string): string {
return p.replace(/\\/g, '/');
Expand Down Expand Up @@ -74,9 +81,15 @@ async function calculateChecksumsParallel(filePath: string, fileSize: number, ch
const totalChunks = Math.ceil(fileSize / chunkSize);
const cpuCount = maxWorkers > 0 ? maxWorkers : navigator.hardwareConcurrency || 1;
const workerCount = Math.min(cpuCount, totalChunks);
const releaseWorkers = (): void => {
for (const worker of workers) {
if (_canTerminateBusyWorkers) worker.terminate();
else (worker as Worker & { unref?: () => void }).unref?.();
}
};
// Create workers
const workers: Worker[] = [];
for (let i = 0; i < workerCount; i++) workers.push(new Worker(_workerUrl));
for (let i = 0; i < workerCount; i++) workers.push(createChecksumWorker());
let completedChunks = 0;
const results: string[] = new Array(totalChunks);
let nextChunk = 0;
Expand All @@ -87,14 +100,28 @@ async function calculateChecksumsParallel(filePath: string, fileSize: number, ch
function abortHandler(): void {
if (finished) return;
finished = true;
workers.forEach(w => w.terminate());
releaseWorkers();
rejectAll(new CodedError(ErrorCodes.LISH_CREATE_CANCELLED));
}
function failWorker(error: unknown): void {
if (finished) return;
finished = true;
signal?.removeEventListener('abort', abortHandler);
releaseWorkers();
rejectAll(error instanceof Error ? error : new Error(String(error)));
}
if (signal?.aborted) {
abortHandler();
return;
}
signal?.addEventListener('abort', abortHandler, { once: true });
for (const worker of workers) {
worker.addEventListener('error', event => {
const message = event instanceof ErrorEvent ? event.message : 'checksum worker failed';
failWorker(new Error(message));
});
worker.addEventListener('messageerror', () => failWorker(new Error('checksum worker message could not be deserialized')));
}
function feedWorker(workerIndex: number): void {
if (finished) return;
if (nextChunk >= totalChunks) return;
Expand All @@ -106,8 +133,7 @@ async function calculateChecksumsParallel(filePath: string, fileSize: number, ch
worker.removeEventListener('message', handler);
if (finished) return;
if (event.data.error) {
finished = true;
rejectAll(new Error(event.data.error));
failWorker(new Error(event.data.error));
return;
}
results[chunkIndex] = event.data.checksum;
Expand All @@ -125,8 +151,8 @@ async function calculateChecksumsParallel(filePath: string, fileSize: number, ch
// Start one chunk per worker
for (let i = 0; i < workerCount; i++) feedWorker(i);
});
// Terminate workers
workers.forEach(w => w.terminate());
// Release workers via the platform-aware helper so the success path matches abort/error cleanup.
releaseWorkers();
return results;
}

Expand Down
Loading