Add errors to chuncked upload

This commit is contained in:
SebClem 2024-07-10 22:51:20 +02:00
parent fceb773aba
commit 2ee3687270
Signed by: sebclem
GPG Key ID: 5A4308F6A359EA50

View File

@ -4,7 +4,6 @@ import got, {
HTTPError, HTTPError,
RequestError, RequestError,
type Method, type Method,
type OptionsInit,
type PlainResponse, type PlainResponse,
} from "got"; } from "got";
import { DateTime } from "luxon"; import { DateTime } from "luxon";
@ -18,8 +17,6 @@ import { templateToRegexp } from "./backupConfigService.js";
import { getChunkEndpoint, getEndpoint } from "./webdavConfigService.js"; import { getChunkEndpoint, getEndpoint } from "./webdavConfigService.js";
import { States } from "../types/status.js"; import { States } from "../types/status.js";
import { randomUUID } from "crypto"; import { randomUUID } from "crypto";
import e, { response } from "express";
import { NONAME } from "dns";
const CHUNK_SIZE = 5 * 1024 * 1024; // 5MiB Same as desktop client const CHUNK_SIZE = 5 * 1024 * 1024; // 5MiB Same as desktop client
const CHUNK_NUMBER_SIZE = 5; // To add landing "0" const CHUNK_NUMBER_SIZE = 5; // To add landing "0"
@ -332,51 +329,103 @@ export function webdavUploadFile(
}); });
} }
export function chunkedUpload( export async function chunkedUpload(
localPath: string, localPath: string,
webdavPath: string, webdavPath: string,
config: WebdavConfig config: WebdavConfig
) { ) {
return new Promise<void>(async (resolve, reject) => { const uuid = randomUUID();
const uuid = randomUUID(); const fileSize = fs.statSync(localPath).size;
const fileSize = fs.statSync(localPath).size;
const chunkEndpoint = getChunkEndpoint(config); const chunkEndpoint = getChunkEndpoint(config);
const chunkedUrl = config.url + chunkEndpoint + uuid; const chunkedUrl = config.url + chunkEndpoint + uuid;
const finalDestination = config.url + getEndpoint(config) + webdavPath; const finalDestination = config.url + getEndpoint(config) + webdavPath;
try {
await initChunkedUpload(chunkedUrl, finalDestination, config); await initChunkedUpload(chunkedUrl, finalDestination, config);
} catch (err) {
let start = 0; if (err instanceof RequestError) {
let end = fileSize > CHUNK_SIZE ? CHUNK_SIZE : fileSize; messageManager.error(
let current_size = end; "Fail to init chuncked upload.",
let uploadedBytes = 0; `Code: ${err.code} Body: ${err.response?.body}`
);
let i = 0; logger.error(`Fail to init chuncked upload`);
while (start < fileSize) { logger.error(`Code: ${err.code}`);
const chunk = fs.createReadStream(localPath, { start, end }); logger.error(`Body: ${err.response?.body}`);
try { } else {
const chunckNumber = i.toString().padStart(CHUNK_NUMBER_SIZE, "0"); messageManager.error(
await uploadChunk( "Fail to init chuncked upload.",
chunkedUrl + `/${chunckNumber}`, (err as Error).message
finalDestination, );
chunk, logger.error(`Fail to init chuncked upload`);
current_size, logger.error((err as Error).message);
fileSize,
config
);
start = end;
end = Math.min(start + CHUNK_SIZE, fileSize - 1);
current_size = end - start;
i++;
} catch (error) {
reject();
return;
}
} }
logger.debug("Chunked upload funished, assembling chunks."); fs.unlinkSync(localPath);
assembleChunkedUpload(chunkedUrl, finalDestination, fileSize, config); throw err;
resolve(); }
});
let start = 0;
let end = fileSize > CHUNK_SIZE ? CHUNK_SIZE : fileSize;
let current_size = end;
let uploadedBytes = 0;
let i = 0;
while (start < fileSize) {
const chunk = fs.createReadStream(localPath, { start, end });
try {
const chunckNumber = i.toString().padStart(CHUNK_NUMBER_SIZE, "0");
await uploadChunk(
chunkedUrl + `/${chunckNumber}`,
finalDestination,
chunk,
current_size,
fileSize,
config
);
start = end;
end = Math.min(start + CHUNK_SIZE, fileSize - 1);
current_size = end - start;
i++;
} catch (error) {
if (error instanceof Error) {
messageManager.error(
"Fail to upload file to Cloud.",
`Error: ${error.message}`
);
logger.error(`Fail to upload file to Cloud`);
} else {
messageManager.error(
"Fail to upload file to Cloud.",
`Code: ${(error as PlainResponse).statusCode} Body: ${
(error as PlainResponse).body
}`
);
logger.error(`Fail to upload file to Cloud`);
logger.error(`Code: ${(error as PlainResponse).statusCode}`);
logger.error(`Body: ${(error as PlainResponse).body}`);
}
throw error;
}
}
logger.debug("Chunked upload funished, assembling chunks.");
try {
await assembleChunkedUpload(chunkedUrl, finalDestination, fileSize, config);
} catch (err) {
if (err instanceof RequestError) {
messageManager.error(
"Fail to assembling chunks.",
`Code: ${err.code} Body: ${err.response?.body}`
);
logger.error("Fail to assemble chunks");
logger.error(`Code: ${err.code}`);
logger.error(`Body: ${err.response?.body}`);
} else {
messageManager.error("Fail to assemble chunks", (err as Error).message);
logger.error("Fail to assemble chunks");
logger.error((err as Error).message);
}
fs.unlinkSync(localPath);
throw err;
}
} }
export function uploadChunk( export function uploadChunk(
@ -449,7 +498,7 @@ export function assembleChunkedUpload(
totalLength: number, totalLength: number,
config: WebdavConfig config: WebdavConfig
) { ) {
let chunckFile = `${url}/.file`; const chunckFile = `${url}/.file`;
logger.debug(`Assemble chuncked upload.`); logger.debug(`Assemble chuncked upload.`);
logger.debug(`...URI: ${encodeURI(chunckFile)}`); logger.debug(`...URI: ${encodeURI(chunckFile)}`);
logger.debug(`...Final destination: ${encodeURI(finalDestination)}`); logger.debug(`...Final destination: ${encodeURI(finalDestination)}`);