import { createElement, createFragment, createRender } from "../../lib/skeleton/index.js"; import { animate, slideYOut } from "../../lib/animate.js"; import rxjs, { effect, onClick } from "../../lib/rx.js"; import { loadCSS } from "../../helpers/loader.js"; import { qs } from "../../lib/dom.js"; import { AjaxError } from "../../lib/error.js"; import assert from "../../lib/assert.js"; import { currentPath, isNativeFileUpload } from "./helper.js"; import { mkdir, save } from "./model_virtual_layer.js"; import t from "../../locales/index.js"; const workers$ = new rxjs.BehaviorSubject({ tasks: [], size: null }); export default function(render) { const $page = createFragment(`
`); if (!document.querySelector(`[is="component_upload_queue"]`)) { const $queue = createElement(`
`); document.body.appendChild($queue); componentUploadQueue(createRender($queue), { workers$ }); } componentFilezone(createRender($page.children[0]), { workers$ }); componentUploadFAB(createRender($page.children[1]), { workers$ }); render($page); } export function init() { return loadCSS(import.meta.url, "./ctrl_upload.css"); } function componentUploadFAB(render, { workers$ }) { const $page = createElement(`
`); effect(rxjs.fromEvent(qs($page, `input[type="file"]`), "change").pipe( rxjs.tap(async (e) => workers$.next(await processFiles(e.target.files))), )); render($page); } function componentFilezone(render, { workers$ }) { const selector = `[data-bind="filemanager-children"]`; const $target = document.body.querySelector(selector); $target.ondragenter = (e) => { if (!isNativeFileUpload(e)) return; $target.classList.add("dropzone"); }; $target.ondrop = async (e) => { if (!isNativeFileUpload(e)) return; $target.classList.remove("dropzone"); e.preventDefault(); const loadID = setTimeout(() => render(createElement("
LOADING
")), 2000); if (e.dataTransfer.items instanceof window.DataTransferItemList) { workers$.next(await processItems(e.dataTransfer.items)); } else if (e.dataTransfer.files instanceof window.FileList) { workers$.next(await processFiles(e.dataTransfer.files)); } else { assert.fail("NOT_IMPLEMENTED - unknown entry type in ctrl_upload.js", entry); } clearTimeout(loadID); render(createFragment("")); }; $target.ondragleave = (e) => { if (!isNativeFileUpload(e)) return; if (!(e.relatedTarget === null || // eg: drag outside the window !e.relatedTarget.closest(selector) // eg: drag on the breadcrumb, ... )) return; $target.classList.remove("dropzone"); }; $target.ondragover = (e) => e.preventDefault(); } const MAX_WORKERS = 4; function componentUploadQueue(render, { workers$ }) { const $page = createElement(` `); render($page); const $content = qs($page, ".stats_content"); const $file = createElement(`
`); // feature1: close the queue onClick(qs($page, `img[alt="close"]`)).pipe( // rxjs.mergeMap(() => animate($page, { time: 200, keyframes: slideYOut(50) })), rxjs.tap(() => $page.classList.add("hidden")), ).subscribe(); // feature2: setup the task queue in the dom workers$.subscribe(({ tasks }) => { if (tasks.length === 0) return; $page.classList.remove("hidden"); const $fragment = document.createDocumentFragment(); for (let i = 0; i`) const $close = qs($page, `img[alt="close"]`); const updateDOMTaskProgress = ($task, text) => $task.firstElementChild.nextElementSibling.textContent = text; const updateDOMTaskSpeed = ($task, text) => $task.firstElementChild.firstElementChild.nextElementSibling.textContent = formatSpeed(text); const updateDOMGlobalSpeed = function (workersSpeed) { let last = 0; return (nworker, currentWorkerSpeed) => { workersSpeed[nworker] = currentWorkerSpeed; if (new Date() - last <= 500) return; last = new Date(); const speed = workersSpeed.reduce((acc, el) => acc + el, 0); const $speed = $page.firstElementChild.nextElementSibling.firstElementChild; $speed.textContent = formatSpeed(speed); }; }(new Array(MAX_WORKERS).fill(0)); const updateDOMGlobalTitle = ($page, text) => $page.firstElementChild.nextElementSibling.childNodes[0].textContent = text; const updateDOMWithStatus = ($task, { status, exec, nworker }) => { const cancel = () => exec.cancel(); switch (status) { case "todo": break; case "doing": updateDOMTaskProgress($task, formatPercent(0)); const $stop = $icon.cloneNode(true); $task.firstElementChild.nextElementSibling.nextElementSibling.appendChild($stop); $stop.onclick = () => { cancel(); $task.firstElementChild.nextElementSibling.nextElementSibling.classList.add("hidden"); }; $close.addEventListener("click", cancel); break; case "done": updateDOMGlobalSpeed(nworker, 0); updateDOMTaskProgress($task, t("Done")); updateDOMTaskSpeed($task, 0); $task.removeAttribute("data-path"); $task.classList.remove("todo_color"); $task.firstElementChild.nextElementSibling.nextElementSibling.classList.add("hidden"); $close.removeEventListener("click", cancel); break; case "error": updateDOMGlobalTitle($page, t("Error")); // TODO: only apply if err is not abort type updateDOMGlobalSpeed(nworker, 0); updateDOMTaskProgress($task, t("Error")); updateDOMTaskSpeed($task, 0); $task.removeAttribute("data-path"); $task.classList.remove("todo_color"); $task.classList.add("error_color"); $close.removeEventListener("click", cancel); break; default: assert.fail(`UNEXPECTED_STATUS status="${status}" path="${$task.getAttribute("path")}"`); } }; let tasks = []; const reservations = new Array(MAX_WORKERS).fill(false); const processWorkerQueue = async (nworker) => { while(tasks.length > 0) { updateDOMGlobalTitle($page, t("Running")+"...") const task = tasks.shift(); const $task = qs($page, `[data-path="${task.path}"]`); const exec = task.exec({ error: (err) => updateDOMWithStatus($task, { status: "error", nworker }), progress: (progress) => updateDOMTaskProgress($task, formatPercent(progress)), speed: (speed) => { updateDOMTaskSpeed($task, speed); updateDOMGlobalSpeed(nworker, speed); }, }); updateDOMWithStatus($task, { exec, status: "doing", nworker }); await exec.run(task); updateDOMWithStatus($task, { exec, status: "done", nworker }); if (tasks.length === 0 // no remaining tasks && reservations.filter((t) => t === true).length === 1 // only for the last remaining job ) updateDOMGlobalTitle($page, t("Done")); } }; const noFailureAllowed = (fn) => fn().catch(() => noFailureAllowed(fn)); workers$.subscribe(async ({ tasks: newTasks }) => { tasks = tasks.concat(newTasks); // add new tasks to the pool while(true) { const nworker = reservations.indexOf(false); if (nworker === -1) break; // the pool of workers is already to its max reservations[nworker] = true; noFailureAllowed(processWorkerQueue.bind(this, nworker)).then(() => reservations[nworker] = false); } }); } class IExecutor { contructor() {} cancel() { throw new Error("NOT_IMPLEMENTED"); } run() { throw new Error("NOT_IMPLEMENTED"); } } function workerImplFile({ error, progress, speed }) { return new class Worker extends IExecutor { constructor() { super(); this.xhr = null; this.prevProgress = []; } cancel() { this.xhr.abort(); } async run({ entry, path, virtual }) { return new Promise((done, err) => { this.xhr = new XMLHttpRequest(); this.xhr.open("POST", "api/files/cat?path=" + encodeURIComponent(path)); this.xhr.withCredentials = true; this.xhr.setRequestHeader("X-Requested-With", "XmlHttpRequest"); this.xhr.upload.onabort = () => { err(new AjaxError("aborted", null, "ABORTED")); error(new AjaxError("aborted", null, "ABORTED")); }; this.xhr.upload.onprogress = (e) => { if (!e.lengthComputable) return; const percent = Math.floor(100 * e.loaded / e.total); progress(percent); if (this.prevProgress.length === 0) { this.prevProgress.push(e); return; } this.prevProgress.push(e); const calculateTime = (p1, pm1) => (p1.timeStamp - pm1.timeStamp)/1000; const calculateBytes = (p1, pm1) => p1.loaded - pm1.loaded; const lastIdx = this.prevProgress.length - 1; let avgSpeed = 0; for (let i=1; i 5000) { this.prevProgress.shift(); } }; this.xhr.onload = () => { progress(100); virtual.afterSuccess(); done(); }; this.xhr.onerror = function(e) { err(new AjaxError("failed", e, "FAILED")); vitual.afterError(); }; entry.file( (file) => this.xhr.send(file), (err) => this.xhr.onerror(err), ); }); } } } function workerImplDirectory({ error, progress }) { return new class Worker extends IExecutor { constructor() { super(); this.xhr = null; } cancel() { this.xhr.abort(); } run({ virtual, path }) { return new Promise((done, err) => { this.xhr = new XMLHttpRequest(); this.xhr.open("POST", "api/files/mkdir?path=" + encodeURIComponent(path)); this.xhr.withCredentials = true; this.xhr.setRequestHeader("X-Requested-With", "XmlHttpRequest"); this.xhr.onerror = function(e) { err(new AjaxError("failed", e, "FAILED")); }; let percent = 0; const id = setInterval(() => { percent += 10; if (percent >= 100) { clearInterval(id); return; } progress(percent); }, 100); this.xhr.upload.onabort = () => { err(new AjaxError("aborted", null, "ABORTED")); error(new AjaxError("aborted", null, "ABORTED")); clearInterval(id); virtual.afterError(); }; this.xhr.onload = () => { clearInterval(id); progress(100); virtual.afterSuccess(); setTimeout(() => done(), 500); }; this.xhr.send(null); }); } } } async function processFiles(filelist) { // TODO const files = []; const detectFiletype = (file) => { // the 4096 is an heuristic I've observed and taken from: // https://stackoverflow.com/questions/25016442 // however the proposed answer is just wrong as it doesn't consider folder with // name such as: test.png and as Stackoverflow favor consanguinity with their // point system, I couldn't rectify the proposed answer. The following code is // actually working as expected if (file.size % 4096 !== 0) { return Promise.resolve("file"); } return new Promise((done, err) => { const reader = new window.FileReader(); const tid = setTimeout(() => reader.abort(), 1000); reader.onload = () => done("file"); reader.onabort = () => done("file"); reader.onerror = () => { done("directory"); clearTimeout(tid); } reader.readAsArrayBuffer(file); }); } for (const currentFile of filelist) { const type = await detectFiletype(currentFile); const file = { type, date: currentFile.lastModified, name: currentFile.name, path: currentFile.name }; if (type === "directory") file.path += "/"; else if (type === "file") { fs.push({ type: "file", path, exec: workerImplFile, entry: currentFile }); } else assert.fail(`NOT_SUPPORTED type="${type}"`, type); file.exec = workerImplFile.bind(file); files.push(file); } return files; } async function processItems(itemList) { const bfs = async (queue) => { const tasks = []; let size = 0; let path = ""; const basepath = currentPath(); while (queue.length > 0) { const entry = queue.shift(); const path = basepath + entry.fullPath.substring(1); let task = null; if (entry === null) continue; if (entry.isFile) { const entrySize = await new Promise((done) => entry.getMetadata(({ size }) => done(size))); task = { type: "file", entry, path, exec: workerImplFile, virtual: save(path, entrySize), }; size += entrySize; } else if (entry.isDirectory) { task = { type: "directory", path: path + "/", exec: workerImplDirectory, virtual: mkdir(path), }; size += 5000; // that's to calculate the remaining time for an upload, aka made up size is ok queue = queue.concat(await new Promise((done) => { entry.createReader().readEntries(done); })); } else { assert.fail("NOT_IMPLEMENTED - unknown entry type in ctrl_upload.js", entry); } task.virtual.before(); tasks.push(task); } return { tasks, size: 1000 }; } const entries = []; for (const item of itemList) entries.push(item.webkitGetAsEntry()); return await bfs(entries); } function formatPercent(number) { return `${number}%`; } function formatSpeed(bytes, si = true) { const thresh = si ? 1000 : 1024; if (Math.abs(bytes) < thresh) { return bytes.toFixed(1) + "B/s"; } const units = si ? ["kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"] : ["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]; let u = -1; do { bytes /= thresh; ++u; } while (Math.abs(bytes) >= thresh && u < units.length - 1); return bytes.toFixed(1) + units[u] + "/s"; }