diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..e245f1a --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,44 @@ +name: build + +on: + push: + tags: + - "v*" + +permissions: + packages: write + contents: write + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + targets: + - x86_64-unknown-linux-gnu + - aarch64-unknown-linux-gnu + - x86_64-pc-windows-msvc + - x86_64-apple-darwin + - aarch64-apple-darwin + steps: + - uses: actions/checkout@v4 + + - name: Setup Deno + uses: denoland/setup-deno@v2 + with: + deno-version: v2.x + + - name: Build + run: deno compile --target ${{matrix.targets}} --allow-write --allow-net --allow-read --include worker.ts main.ts + + - name: Create Release and Upload Release Asset + uses: softprops/action-gh-release@v1 + with: + tag_name: ${{ github.ref }} + name: Fast Down ${{ github.ref }} + body: 修复了一些已知问题 + draft: false + prerelease: false + files: | + fast-down + fast-down.exe diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..17aff5b --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,28 @@ +name: build + +on: + push: + branches: + - main + paths-ignore: + - "docs/**" + - "README.md" + pull_request: + paths-ignore: + - "docs/**" + - "README.md" + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Deno + uses: denoland/setup-deno@v2 + with: + deno-version: v2.x + + - name: Test + run: deno test --allow-write --allow-net --allow-read diff --git a/deno.json b/deno.json index b206dc5..c366228 100644 --- a/deno.json +++ b/deno.json @@ -1,6 +1,6 @@ { "tasks": { - "build": "deno compile --allow-write --allow-net --allow-read --include worker.ts main.ts " + "build": "deno compile --allow-write --allow-net --allow-read --include worker.ts main.ts" }, "compilerOptions": { "lib": ["deno.window", "deno.worker"] diff --git a/main.ts b/main.ts index 7ba9e30..a7f5846 100644 --- a/main.ts +++ b/main.ts @@ -1,12 +1,12 @@ import { dirname, join, basename } from "jsr:@std/path"; import { Mutex } from "npm:async-mutex"; -import { exec } from "./workerpool.ts"; +import { downloadChunk } from "./workerpool.ts"; export async function download( url: string, filename: string, threads: number, - chunkSize = 10 * 1024 * 1024, + chunkSize = 5 * 1024 * 1024, headers: HeadersInit = {} ) { async function writeFile(buf: ArrayBuffer, pos: number) { @@ -44,14 +44,13 @@ export async function download( const contentLength = await getContentLength(); const chunkCount = Math.ceil(contentLength / chunkSize); const chunks: Chunk[] = Array.from({ length: chunkCount }, (_, i) => ({ - url, start: i * chunkSize, end: Math.min((i + 1) * chunkSize, contentLength) - 1, - headers, })); - await exec( + await downloadChunk( threads, - () => new Worker(import.meta.resolve("./worker.ts"), { type: "module" }), + url, + headers, chunks, async (i) => { await writeFile(i.data, i.origin.start); @@ -66,10 +65,8 @@ export async function download( } export interface Chunk { - url: string; start: number; end: number; - headers: HeadersInit; } if (import.meta.main) { diff --git a/main_test.ts b/main_test.ts index f020a89..7262909 100644 --- a/main_test.ts +++ b/main_test.ts @@ -1,9 +1,8 @@ import { crypto } from "jsr:@std/crypto"; -import { download } from "./main.ts"; -import { join } from "jsr:@std/path/join"; -import { basename } from "jsr:@std/path/basename"; +import { join, basename } from "jsr:@std/path"; import { encodeHex } from "jsr:@std/encoding/hex"; import { assertEquals } from "jsr:@std/assert"; +import { download } from "./main.ts"; Deno.test("Download ISO file test", async () => { const url = diff --git a/worker.ts b/worker.ts index 5c6feba..8b01cb8 100644 --- a/worker.ts +++ b/worker.ts @@ -1,13 +1,20 @@ import type { Chunk } from "./main.ts"; -addEventListener("message", async ({ data }: MessageEvent) => { - for (const item of data) { - const r = await fetch(item.url, { - headers: { Range: `bytes=${item.start}-${item.end}` }, +interface EventData { + url: string; + headers: HeadersInit; + chunks: Chunk[]; +} + +addEventListener("message", async ({ data }: MessageEvent) => { + for (const item of data.chunks) { + const r = await fetch(data.url, { + headers: { + ...data.headers, + Range: `bytes=${item.start}-${item.end}`, + }, }); - if (r.status !== 206) { - throw new Error(`Invalid status code ${r.status}"}`); - } + if (r.status !== 206) throw new Error(`Invalid status code ${r.status}"}`); const buf = await r.arrayBuffer(); self.postMessage(buf, [buf]); } diff --git a/workerpool.ts b/workerpool.ts index eff15a8..738ca03 100644 --- a/workerpool.ts +++ b/workerpool.ts @@ -1,13 +1,16 @@ // deno-lint-ignore-file ban-ts-comment -export function exec( +export function downloadChunk( threadCount: number, - workerFactory: () => Worker, + url: string, + headers: HeadersInit, data: T[], onProgress: (data: { origin: T; data: R; index: number }) => void, maxRetries: number = 3 ) { if (data.length < 1) return Promise.resolve([]); if (threadCount < 1) throw new Error("threadCount must be greater than 0"); + const workerFactory = () => + new Worker(import.meta.resolve("./worker.ts"), { type: "module" }); return new Promise((resolve, reject) => { const baseChunkCount = Math.floor(data.length / threadCount); const remainingChunks = data.length % threadCount; @@ -83,9 +86,11 @@ export function exec( splitPoint; printWorkerData(workerData, i); printWorkerData(targetWorker, targetWorkerIndex); - workerData.worker.postMessage( - data.slice(splitPoint, workerData.endChunk) - ); + workerData.worker.postMessage({ + url, + headers, + chunks: data.slice(splitPoint, workerData.endChunk), + }); }; const errorHandel = (err: ErrorEvent) => { if (workerData.retryCount >= maxRetries) { @@ -97,13 +102,19 @@ export function exec( workerData.retryCount++; workerData.stolen = false; printWorkerData(workerData, i, "try: "); - workerData.worker.postMessage( - data.slice(workerData.currentChunk, workerData.endChunk) - ); + workerData.worker.postMessage({ + url, + headers, + chunks: data.slice(workerData.currentChunk, workerData.endChunk), + }); }; workerData.worker.addEventListener("message", messageHandle); workerData.worker.addEventListener("error", errorHandel); - workerData.worker.postMessage(data.slice(startChunk, endChunk)); + workerData.worker.postMessage({ + url, + headers, + chunks: data.slice(startChunk, endChunk), + }); } }); }