|
| 1 | +import { ParallelHasher } from 'ts-md5'; |
| 2 | + |
| 3 | +import { toQueryString } from '../helpers'; |
| 4 | +import { Provider, providerByName } from './providers'; |
| 5 | +import { |
| 6 | + UploadDetails, |
| 7 | + UploadPartDetails, |
| 8 | + UploadResponse, |
| 9 | + UploadSignature, |
| 10 | +} from './types'; |
| 11 | +import { createUpload, Upload } from './upload'; |
| 12 | + |
| 13 | +const API_ENDPOINT = `/api/engine/v2/uploads`; |
| 14 | + |
| 15 | +let _token: string; |
| 16 | +let _api_key: string; |
| 17 | + |
| 18 | +/** Set the authentication token for API requests */ |
| 19 | +export function setToken(token: string) { |
| 20 | + console.debug('[UPLOADS] Set a token'); |
| 21 | + _token = token; |
| 22 | +} |
| 23 | + |
| 24 | +/** Set the API key for API requests */ |
| 25 | +export function setAppKey(key: string) { |
| 26 | + console.debug('[UPLOADS] Set an API key'); |
| 27 | + _token = 'API_KEY'; |
| 28 | + _api_key = key; |
| 29 | +} |
| 30 | + |
| 31 | +function authHeader(): Record<string, string> { |
| 32 | + return _token === 'API_KEY' |
| 33 | + ? { 'x-api-key': _api_key } |
| 34 | + : { Authorization: `Bearer ${_token}` }; |
| 35 | +} |
| 36 | + |
| 37 | +function headers(): Record<string, string> { |
| 38 | + return { 'Content-Type': 'application/json', ...authHeader() }; |
| 39 | +} |
| 40 | + |
| 41 | +/** Get the provider for an upload based on file details */ |
| 42 | +export async function getProvider(details: UploadDetails): Promise<Provider> { |
| 43 | + const query = toQueryString(details); |
| 44 | + const result = await fetch(`${API_ENDPOINT}/new?${query}`, { |
| 45 | + headers: { ...headers() }, |
| 46 | + }); |
| 47 | + const data: { residence: string } = await result.json(); |
| 48 | + return providerByName(data.residence); |
| 49 | +} |
| 50 | + |
| 51 | +/** Create a new upload */ |
| 52 | +export async function createNewUpload( |
| 53 | + details: UploadDetails, |
| 54 | + file: File, |
| 55 | +): Promise<Upload> { |
| 56 | + console.debug(`[UPLOADS] Creating upload for ${file.name}...`); |
| 57 | + const result = await fetch(`${API_ENDPOINT}`, { |
| 58 | + method: 'POST', |
| 59 | + body: JSON.stringify(details), |
| 60 | + headers: { ...headers() }, |
| 61 | + }); |
| 62 | + const data: UploadResponse = await result.json(); |
| 63 | + const provider = providerByName(data.residence); |
| 64 | + |
| 65 | + // Handle direct uploads (small files) |
| 66 | + if (data.type === 'direct_upload') { |
| 67 | + console.debug(`[UPLOADS] Direct upload for ${file.name}`); |
| 68 | + return createUpload( |
| 69 | + data.upload_id, |
| 70 | + file, |
| 71 | + provider, |
| 72 | + '', |
| 73 | + true, |
| 74 | + data.signature, |
| 75 | + ); |
| 76 | + } |
| 77 | + console.debug(`[UPLOADS] Chunked upload for ${file.name}`); |
| 78 | + |
| 79 | + // Handle chunked uploads (large files) |
| 80 | + let resume_id = ''; |
| 81 | + // Initialise file in blob store if required |
| 82 | + if (data.signature.url) { |
| 83 | + const init_result = await fetch(data.signature.url, { |
| 84 | + method: data.signature.verb, |
| 85 | + headers: data.signature.headers, |
| 86 | + }); |
| 87 | + const provider_data = await init_result.text(); |
| 88 | + resume_id = provider.resume_id(provider_data); |
| 89 | + } |
| 90 | + console.debug( |
| 91 | + `[UPLOADS] Initialised upload for ${file.name} (${resume_id})`, |
| 92 | + ); |
| 93 | + return createUpload(data.upload_id, file, provider, resume_id, false); |
| 94 | +} |
| 95 | + |
| 96 | +/** Get signature for the first part of a chunked upload */ |
| 97 | +export async function preparePart( |
| 98 | + upload_id: string, |
| 99 | + resumable_id: string, |
| 100 | + part_id: number, |
| 101 | + part_hash: string, |
| 102 | +): Promise<UploadSignature> { |
| 103 | + console.debug( |
| 104 | + `[UPLOADS] Starting upload ${upload_id}, initialising part ${part_id}...`, |
| 105 | + ); |
| 106 | + const result = await fetch( |
| 107 | + `${API_ENDPOINT}/${upload_id}?part=${part_id}&file_id=${encodeURIComponent(part_hash)}`, |
| 108 | + { |
| 109 | + method: 'PATCH', |
| 110 | + body: JSON.stringify({ resumable_id }), |
| 111 | + headers: { ...headers() }, |
| 112 | + }, |
| 113 | + ); |
| 114 | + const data: UploadResponse = await result.json(); |
| 115 | + return data.signature; |
| 116 | +} |
| 117 | + |
| 118 | +/** Notify completion of a part and get signature for the next part */ |
| 119 | +export async function prepareNextPart( |
| 120 | + upload_id: string, |
| 121 | + next_part_id: number, |
| 122 | + next_part_hash: string, |
| 123 | + finished_parts: UploadPartDetails, |
| 124 | +): Promise<UploadSignature> { |
| 125 | + console.debug( |
| 126 | + `[UPLOADS] Finished parts for upload ${upload_id}(${finished_parts.part_list?.join(', ')})`, |
| 127 | + ); |
| 128 | + console.debug(`[UPLOADS] Initialising next part ${next_part_id}...`); |
| 129 | + const result = await fetch( |
| 130 | + `${API_ENDPOINT}/${upload_id}?part=${next_part_id}&file_id=${encodeURIComponent(next_part_hash)}`, |
| 131 | + { |
| 132 | + method: 'PATCH', |
| 133 | + body: JSON.stringify(finished_parts), |
| 134 | + headers: { ...headers() }, |
| 135 | + }, |
| 136 | + ); |
| 137 | + const data: UploadResponse = await result.json(); |
| 138 | + return data.signature; |
| 139 | +} |
| 140 | + |
| 141 | +/** Finalize all parts and get the commit signature */ |
| 142 | +export async function finishUpload( |
| 143 | + upload_id: string, |
| 144 | + parts: UploadPartDetails, |
| 145 | +): Promise<UploadSignature> { |
| 146 | + console.debug(`[UPLOADS] Finalising upload ${upload_id}...`); |
| 147 | + const result = await fetch(`${API_ENDPOINT}/${upload_id}?`, { |
| 148 | + method: 'PATCH', |
| 149 | + body: JSON.stringify(parts), |
| 150 | + headers: { ...headers() }, |
| 151 | + }); |
| 152 | + const data: UploadResponse = await result.json(); |
| 153 | + return { ...data.signature, body: data.body }; |
| 154 | +} |
| 155 | + |
| 156 | +/** Commit the upload in PlaceOS */ |
| 157 | +export async function commitUpload(upload_id: string): Promise<void> { |
| 158 | + console.debug(`[UPLOADS] Commiting upload ${upload_id}...`); |
| 159 | + await fetch(`${API_ENDPOINT}/${upload_id}`, { |
| 160 | + method: 'PUT', |
| 161 | + headers: { ...headers() }, |
| 162 | + }); |
| 163 | +} |
| 164 | + |
| 165 | +/////////////////////////////////////////////////////////////// |
| 166 | +///////////////////// Hashing Methods ///////////////////// |
| 167 | +/////////////////////////////////////////////////////////////// |
| 168 | + |
| 169 | +const WORKER_COUNT = 3; |
| 170 | +let _workers: ParallelHasher[] = []; |
| 171 | +let _index = -1; |
| 172 | +let _busy_workers: Set<number> = new Set(); |
| 173 | +let _worker_waiters: Array<(index: number) => void> = []; |
| 174 | + |
| 175 | +// This allows the href of the MD5 worker to be configurable |
| 176 | +export const MD5_WORKER_URL: string = '/node_modules/ts-md5/dist/md5_worker.js'; |
| 177 | + |
| 178 | +/** Initialise hash workers */ |
| 179 | +export function setupHashWorkers( |
| 180 | + url: string = MD5_WORKER_URL, |
| 181 | + options?: WorkerOptions, |
| 182 | +) { |
| 183 | + console.debug('[UPLOADS] Setting up hash workers...'); |
| 184 | + if (_workers?.length > 0) _workers.forEach((_) => _.terminate()); |
| 185 | + _workers = []; |
| 186 | + _busy_workers.clear(); |
| 187 | + _worker_waiters = []; |
| 188 | + for (let i = 0; i < WORKER_COUNT; i += 1) { |
| 189 | + _workers.push(new ParallelHasher(url, options)); |
| 190 | + } |
| 191 | +} |
| 192 | + |
| 193 | +/** Get the next hash worker (simple round-robin, doesn't track availability) */ |
| 194 | +export function nextHashWorker() { |
| 195 | + _index += 1; |
| 196 | + _index = _index % WORKER_COUNT; |
| 197 | + return _workers[_index]; |
| 198 | +} |
| 199 | + |
| 200 | +/** Get the number of available hash workers */ |
| 201 | +export function getHashWorkerCount(): number { |
| 202 | + return _workers.length || WORKER_COUNT; |
| 203 | +} |
| 204 | + |
| 205 | +/** Acquire a free hash worker, waiting if all are busy */ |
| 206 | +export async function acquireHashWorker(): Promise<{ |
| 207 | + worker: ParallelHasher; |
| 208 | + index: number; |
| 209 | +}> { |
| 210 | + // Find a free worker |
| 211 | + for (let i = 0; i < _workers.length; i++) { |
| 212 | + if (!_busy_workers.has(i)) { |
| 213 | + _busy_workers.add(i); |
| 214 | + return { worker: _workers[i], index: i }; |
| 215 | + } |
| 216 | + } |
| 217 | + |
| 218 | + // All workers busy, wait for one to become free |
| 219 | + return new Promise((resolve) => { |
| 220 | + _worker_waiters.push((index: number) => { |
| 221 | + _busy_workers.add(index); |
| 222 | + resolve({ worker: _workers[index], index }); |
| 223 | + }); |
| 224 | + }); |
| 225 | +} |
| 226 | + |
| 227 | +/** Release a hash worker back to the pool */ |
| 228 | +export function releaseHashWorker(index: number): void { |
| 229 | + _busy_workers.delete(index); |
| 230 | + |
| 231 | + // If anyone is waiting for a worker, give them this one |
| 232 | + if (_worker_waiters.length > 0) { |
| 233 | + const waiter = _worker_waiters.shift()!; |
| 234 | + waiter(index); |
| 235 | + } |
| 236 | +} |
| 237 | + |
| 238 | +/////////////////////////////////////////////////////////////// |
| 239 | +////////////////////// Helper Methods ///////////////////// |
| 240 | +/////////////////////////////////////////////////////////////// |
| 241 | + |
| 242 | +export function hexToBinary(input: string) { |
| 243 | + let result = ''; |
| 244 | + if (input.length % 2 > 0) input = '0' + input; |
| 245 | + for (let i = 0, length = input.length; i < length; i += 2) { |
| 246 | + result += String.fromCharCode(parseInt(input.slice(i, i + 2), 16)); |
| 247 | + } |
| 248 | + return result; |
| 249 | +} |
0 commit comments