mirror of
https://github.com/philomena-dev/philomena.git
synced 2025-02-01 03:46:44 +01:00
More routes
This commit is contained in:
parent
55e4e582f1
commit
9c51433550
9 changed files with 134 additions and 46 deletions
|
@ -1,16 +1,8 @@
|
|||
/// <reference lib="WebWorker" />
|
||||
|
||||
import { wait, json, u8Array } from 'utils/async';
|
||||
import { evenlyDivide } from 'utils/array';
|
||||
import { fetchBackoff } from 'utils/requests';
|
||||
import { Zip } from 'utils/zip';
|
||||
|
||||
declare const self: ServiceWorkerGlobalScope;
|
||||
|
||||
const wait = (ms: number): Promise<void> => new Promise(resolve => setTimeout(resolve, ms));
|
||||
const buffer = (blob: Blob) => blob.arrayBuffer().then(buf => new Uint8Array(buf));
|
||||
const json = (resp: Response) => resp.json();
|
||||
const blob = (resp: Response) => resp.blob();
|
||||
|
||||
interface Image {
|
||||
id: number;
|
||||
name: string;
|
||||
|
@ -22,8 +14,8 @@ interface PageResult {
|
|||
total: number;
|
||||
}
|
||||
|
||||
function handleStream(event: FetchEvent, url: URL): void {
|
||||
const concurrency = parseInt(url.searchParams.get('concurrency') || '1', 5);
|
||||
export function handleBulk(event: FetchEvent, url: URL): void {
|
||||
const concurrency = parseInt(url.searchParams.get('concurrency') || '1', 10);
|
||||
const queryString = url.searchParams.get('q');
|
||||
const failures = [];
|
||||
const zipper = new Zip();
|
||||
|
@ -39,18 +31,20 @@ function handleStream(event: FetchEvent, url: URL): void {
|
|||
pull(controller) {
|
||||
// Path to fetch next
|
||||
const nextQuery = encodeURIComponent(`(${queryString}),id.lte:${maxId}`);
|
||||
const consumer = (buf: Uint8Array) => controller.enqueue(buf);
|
||||
|
||||
return fetchBackoff(`/search/download?q=${nextQuery}`)
|
||||
.then(json)
|
||||
.then(({ images, total }: PageResult): Promise<void> => {
|
||||
if (total === 0) {
|
||||
// Done, no results left
|
||||
// Finalize zip and close stream to prevent any further pulls
|
||||
return buffer(zipper.finalize())
|
||||
.then(buf => {
|
||||
controller.enqueue(buf);
|
||||
// Finalize zip
|
||||
zipper.finalize(consumer);
|
||||
|
||||
// Close stream
|
||||
controller.close();
|
||||
});
|
||||
|
||||
// Done
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
// Decrease maximum ID for next round below current minimum
|
||||
|
@ -74,9 +68,8 @@ function handleStream(event: FetchEvent, url: URL): void {
|
|||
// eslint-disable-next-line camelcase
|
||||
for (const { name, view_url } of images) {
|
||||
promise = promise
|
||||
.then(() => fetchBackoff(view_url)).then(blob).then(buffer)
|
||||
.then(file => zipper.storeFile(name, file.buffer)).then(buffer)
|
||||
.then(entry => controller.enqueue(entry))
|
||||
.then(() => fetchBackoff(view_url).then(u8Array))
|
||||
.then(file => zipper.storeFile(name, file.buffer, consumer))
|
||||
.catch(() => { failures.push(view_url); });
|
||||
}
|
||||
|
||||
|
@ -92,15 +85,3 @@ function handleStream(event: FetchEvent, url: URL): void {
|
|||
}
|
||||
}));
|
||||
}
|
||||
|
||||
self.addEventListener('fetch', event => {
|
||||
const url = new URL(event.request.url);
|
||||
|
||||
// Streaming path
|
||||
if (url.pathname === '/js/stream') return handleStream(event, url);
|
||||
|
||||
// Otherwise, not destined for us
|
||||
return event.respondWith(fetch(event.request));
|
||||
});
|
||||
|
||||
export default null;
|
29
assets/js/sw/download.ts
Normal file
29
assets/js/sw/download.ts
Normal file
|
@ -0,0 +1,29 @@
|
|||
import { escapeFilename, ifOk } from 'utils/requests';
|
||||
|
||||
function allowedOrigin(target: string): boolean {
|
||||
const selfUrl = new URL(self.location.toString());
|
||||
const cdnHost = selfUrl.searchParams.get('cdn');
|
||||
|
||||
return new URL(target).hostname === cdnHost;
|
||||
}
|
||||
|
||||
export function handleDownload(event: FetchEvent, url: URL): void {
|
||||
const target = url.searchParams.get('target');
|
||||
const name = url.searchParams.get('name');
|
||||
|
||||
if (!target || !name || !allowedOrigin(target)) {
|
||||
return event.respondWith(new Response('Don\'t know what to download!', { status: 400 }));
|
||||
}
|
||||
|
||||
const generateResponse = ifOk((upstream: Response) => {
|
||||
const headers = new Headers(upstream.headers);
|
||||
|
||||
headers.set('content-disposition', `attachment; filename="${escapeFilename(name)}"`);
|
||||
|
||||
return new Response(upstream.body, { headers });
|
||||
});
|
||||
|
||||
fetch(target)
|
||||
.then(generateResponse)
|
||||
.then(event.respondWith);
|
||||
}
|
7
assets/js/sw/tsconfig.json
Normal file
7
assets/js/sw/tsconfig.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"extends": "../../tsconfig",
|
||||
"compilerOptions": {
|
||||
"lib": ["WebWorker"]
|
||||
},
|
||||
"include": ["*.ts"]
|
||||
}
|
18
assets/js/sw/worker.ts
Normal file
18
assets/js/sw/worker.ts
Normal file
|
@ -0,0 +1,18 @@
|
|||
import { handleBulk } from './bulk';
|
||||
import { handleDownload } from './download';
|
||||
|
||||
// Declarations for TypeScript
|
||||
declare const self: ServiceWorkerGlobalScope;
|
||||
export default null;
|
||||
|
||||
/**
|
||||
* Performs routing under the ServiceWorker path scope.
|
||||
*/
|
||||
self.addEventListener('fetch', event => {
|
||||
const url = new URL(event.request.url);
|
||||
|
||||
if (url.pathname === '/js/stream') return handleBulk(event, url);
|
||||
if (url.pathname === '/js/download') return handleDownload(event, url);
|
||||
|
||||
return event.respondWith(fetch(event.request));
|
||||
});
|
17
assets/js/utils/async.ts
Normal file
17
assets/js/utils/async.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
/*
|
||||
* Miscellaneous utilities for asynchronous code.
|
||||
*/
|
||||
|
||||
export function wait(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export function json(resp: Response): Promise<any> {
|
||||
return resp.json();
|
||||
}
|
||||
|
||||
export function u8Array(resp: Response): Promise<Uint8Array> {
|
||||
return resp
|
||||
.arrayBuffer()
|
||||
.then(buf => new Uint8Array(buf));
|
||||
}
|
|
@ -2,6 +2,8 @@
|
|||
* Request Utils
|
||||
*/
|
||||
|
||||
import { wait } from './async';
|
||||
|
||||
function fetchJson(verb, endpoint, body) {
|
||||
const data = {
|
||||
method: verb,
|
||||
|
@ -52,13 +54,37 @@ function fetchBackoff(...fetchArgs) {
|
|||
return fetch(...fetchArgs)
|
||||
.then(handleError)
|
||||
.catch(() =>
|
||||
new Promise(resolve =>
|
||||
setTimeout(() => resolve(fetchBackoffTimeout(newTimeout)), timeout)
|
||||
)
|
||||
wait(timeout).then(fetchBackoffTimeout(newTimeout))
|
||||
);
|
||||
}
|
||||
|
||||
return fetchBackoffTimeout(5000);
|
||||
}
|
||||
|
||||
export { fetchJson, fetchHtml, fetchBackoff, handleError };
|
||||
/**
|
||||
* Escape a filename for inclusion in a Content-Disposition
|
||||
* response header.
|
||||
*
|
||||
* @param {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
function escapeFilename(name) {
|
||||
return name
|
||||
.replace(/[^-_+a-zA-Z0-9]/, '_')
|
||||
.substring(0, 150);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the wrapped function if the response was okay,
|
||||
* otherwise return the response.
|
||||
* @param {(_: Response) => Response} responseGenerator
|
||||
* @returns {(_: Response) => Response}
|
||||
*/
|
||||
function ifOk(responseGenerator) {
|
||||
return resp => {
|
||||
if (resp.ok) return new Response(responseGenerator(resp));
|
||||
return resp;
|
||||
};
|
||||
}
|
||||
|
||||
export { fetchJson, fetchHtml, fetchBackoff, handleError, escapeFilename, ifOk };
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
import { crc32, asciiEncode, serialize } from './binary';
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
type U8Consumer = (_: Uint8Array) => void;
|
||||
|
||||
interface FileInfo {
|
||||
headerOffset: number;
|
||||
byteLength: number;
|
||||
|
@ -18,7 +21,7 @@ export class Zip {
|
|||
this.offset = 0;
|
||||
}
|
||||
|
||||
storeFile(name: string, file: ArrayBuffer): Blob {
|
||||
storeFile(name: string, file: ArrayBuffer, consumer: U8Consumer): void {
|
||||
const crc = crc32(file);
|
||||
const ns = asciiEncode(name);
|
||||
|
||||
|
@ -51,11 +54,14 @@ export class Zip {
|
|||
]);
|
||||
|
||||
this.offset += header.byteLength + ns.byteLength + localField.byteLength + file.byteLength;
|
||||
return new Blob([header, ns, localField, file]);
|
||||
|
||||
consumer(new Uint8Array(header));
|
||||
consumer(new Uint8Array(ns));
|
||||
consumer(new Uint8Array(localField));
|
||||
consumer(new Uint8Array(file));
|
||||
}
|
||||
|
||||
finalize(): Blob {
|
||||
const segments = [];
|
||||
finalize(consumer: U8Consumer): void {
|
||||
const cdOff = this.offset;
|
||||
let numFiles = 0;
|
||||
|
||||
|
@ -91,7 +97,10 @@ export class Zip {
|
|||
]);
|
||||
|
||||
this.offset += cdEntry.byteLength + info.name.byteLength + cdField.byteLength;
|
||||
segments.push(cdEntry, info.name, cdField);
|
||||
|
||||
consumer(new Uint8Array(cdEntry));
|
||||
consumer(new Uint8Array(info.name));
|
||||
consumer(new Uint8Array(cdField));
|
||||
|
||||
numFiles++;
|
||||
}
|
||||
|
@ -129,8 +138,9 @@ export class Zip {
|
|||
]);
|
||||
|
||||
this.offset += endCd64.byteLength + endLoc64.byteLength + endCd.byteLength;
|
||||
segments.push(endCd64, endLoc64, endCd);
|
||||
|
||||
return new Blob(segments);
|
||||
consumer(new Uint8Array(endCd64));
|
||||
consumer(new Uint8Array(endLoc64));
|
||||
consumer(new Uint8Array(endCd));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ module.exports = {
|
|||
mode: isDevelopment ? 'development' : 'production',
|
||||
entry: {
|
||||
'js/app.js': './js/app.js',
|
||||
'js/worker.js': './js/worker.ts',
|
||||
'js/worker.js': './js/sw/worker.ts',
|
||||
...themes
|
||||
},
|
||||
output: {
|
||||
|
|
|
@ -53,7 +53,7 @@ defmodule PhilomenaWeb.LayoutView do
|
|||
interactions: Jason.encode!(interactions),
|
||||
ignored_tag_list: Jason.encode!(ignored_tag_list(conn.assigns[:tags])),
|
||||
hide_staff_tools: conn.cookies["hide_staff_tools"],
|
||||
worker_path: Routes.static_path(conn, "/js/worker.js")
|
||||
worker_path: Routes.static_path(conn, "/js/worker.js") <> "?cdn=" <> cdn_host()
|
||||
]
|
||||
|
||||
data = Keyword.merge(data, extra)
|
||||
|
|
Loading…
Reference in a new issue