mirror of
https://github.com/Sebclem/hassio-nextcloud-backup.git
synced 2024-11-22 09:12:58 +01:00
Add errors to chuncked upload
This commit is contained in:
parent
fceb773aba
commit
2ee3687270
@ -4,7 +4,6 @@ import got, {
|
|||||||
HTTPError,
|
HTTPError,
|
||||||
RequestError,
|
RequestError,
|
||||||
type Method,
|
type Method,
|
||||||
type OptionsInit,
|
|
||||||
type PlainResponse,
|
type PlainResponse,
|
||||||
} from "got";
|
} from "got";
|
||||||
import { DateTime } from "luxon";
|
import { DateTime } from "luxon";
|
||||||
@ -18,8 +17,6 @@ import { templateToRegexp } from "./backupConfigService.js";
|
|||||||
import { getChunkEndpoint, getEndpoint } from "./webdavConfigService.js";
|
import { getChunkEndpoint, getEndpoint } from "./webdavConfigService.js";
|
||||||
import { States } from "../types/status.js";
|
import { States } from "../types/status.js";
|
||||||
import { randomUUID } from "crypto";
|
import { randomUUID } from "crypto";
|
||||||
import e, { response } from "express";
|
|
||||||
import { NONAME } from "dns";
|
|
||||||
|
|
||||||
const CHUNK_SIZE = 5 * 1024 * 1024; // 5MiB Same as desktop client
|
const CHUNK_SIZE = 5 * 1024 * 1024; // 5MiB Same as desktop client
|
||||||
const CHUNK_NUMBER_SIZE = 5; // To add landing "0"
|
const CHUNK_NUMBER_SIZE = 5; // To add landing "0"
|
||||||
@ -332,19 +329,39 @@ export function webdavUploadFile(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function chunkedUpload(
|
export async function chunkedUpload(
|
||||||
localPath: string,
|
localPath: string,
|
||||||
webdavPath: string,
|
webdavPath: string,
|
||||||
config: WebdavConfig
|
config: WebdavConfig
|
||||||
) {
|
) {
|
||||||
return new Promise<void>(async (resolve, reject) => {
|
|
||||||
const uuid = randomUUID();
|
const uuid = randomUUID();
|
||||||
const fileSize = fs.statSync(localPath).size;
|
const fileSize = fs.statSync(localPath).size;
|
||||||
|
|
||||||
const chunkEndpoint = getChunkEndpoint(config);
|
const chunkEndpoint = getChunkEndpoint(config);
|
||||||
const chunkedUrl = config.url + chunkEndpoint + uuid;
|
const chunkedUrl = config.url + chunkEndpoint + uuid;
|
||||||
const finalDestination = config.url + getEndpoint(config) + webdavPath;
|
const finalDestination = config.url + getEndpoint(config) + webdavPath;
|
||||||
|
try {
|
||||||
await initChunkedUpload(chunkedUrl, finalDestination, config);
|
await initChunkedUpload(chunkedUrl, finalDestination, config);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof RequestError) {
|
||||||
|
messageManager.error(
|
||||||
|
"Fail to init chuncked upload.",
|
||||||
|
`Code: ${err.code} Body: ${err.response?.body}`
|
||||||
|
);
|
||||||
|
logger.error(`Fail to init chuncked upload`);
|
||||||
|
logger.error(`Code: ${err.code}`);
|
||||||
|
logger.error(`Body: ${err.response?.body}`);
|
||||||
|
} else {
|
||||||
|
messageManager.error(
|
||||||
|
"Fail to init chuncked upload.",
|
||||||
|
(err as Error).message
|
||||||
|
);
|
||||||
|
logger.error(`Fail to init chuncked upload`);
|
||||||
|
logger.error((err as Error).message);
|
||||||
|
}
|
||||||
|
fs.unlinkSync(localPath);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
let start = 0;
|
let start = 0;
|
||||||
let end = fileSize > CHUNK_SIZE ? CHUNK_SIZE : fileSize;
|
let end = fileSize > CHUNK_SIZE ? CHUNK_SIZE : fileSize;
|
||||||
@ -369,14 +386,46 @@ export function chunkedUpload(
|
|||||||
current_size = end - start;
|
current_size = end - start;
|
||||||
i++;
|
i++;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
reject();
|
if (error instanceof Error) {
|
||||||
return;
|
messageManager.error(
|
||||||
|
"Fail to upload file to Cloud.",
|
||||||
|
`Error: ${error.message}`
|
||||||
|
);
|
||||||
|
logger.error(`Fail to upload file to Cloud`);
|
||||||
|
} else {
|
||||||
|
messageManager.error(
|
||||||
|
"Fail to upload file to Cloud.",
|
||||||
|
`Code: ${(error as PlainResponse).statusCode} Body: ${
|
||||||
|
(error as PlainResponse).body
|
||||||
|
}`
|
||||||
|
);
|
||||||
|
logger.error(`Fail to upload file to Cloud`);
|
||||||
|
logger.error(`Code: ${(error as PlainResponse).statusCode}`);
|
||||||
|
logger.error(`Body: ${(error as PlainResponse).body}`);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.debug("Chunked upload funished, assembling chunks.");
|
logger.debug("Chunked upload funished, assembling chunks.");
|
||||||
assembleChunkedUpload(chunkedUrl, finalDestination, fileSize, config);
|
try {
|
||||||
resolve();
|
await assembleChunkedUpload(chunkedUrl, finalDestination, fileSize, config);
|
||||||
});
|
} catch (err) {
|
||||||
|
if (err instanceof RequestError) {
|
||||||
|
messageManager.error(
|
||||||
|
"Fail to assembling chunks.",
|
||||||
|
`Code: ${err.code} Body: ${err.response?.body}`
|
||||||
|
);
|
||||||
|
logger.error("Fail to assemble chunks");
|
||||||
|
logger.error(`Code: ${err.code}`);
|
||||||
|
logger.error(`Body: ${err.response?.body}`);
|
||||||
|
} else {
|
||||||
|
messageManager.error("Fail to assemble chunks", (err as Error).message);
|
||||||
|
logger.error("Fail to assemble chunks");
|
||||||
|
logger.error((err as Error).message);
|
||||||
|
}
|
||||||
|
fs.unlinkSync(localPath);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function uploadChunk(
|
export function uploadChunk(
|
||||||
@ -449,7 +498,7 @@ export function assembleChunkedUpload(
|
|||||||
totalLength: number,
|
totalLength: number,
|
||||||
config: WebdavConfig
|
config: WebdavConfig
|
||||||
) {
|
) {
|
||||||
let chunckFile = `${url}/.file`;
|
const chunckFile = `${url}/.file`;
|
||||||
logger.debug(`Assemble chuncked upload.`);
|
logger.debug(`Assemble chuncked upload.`);
|
||||||
logger.debug(`...URI: ${encodeURI(chunckFile)}`);
|
logger.debug(`...URI: ${encodeURI(chunckFile)}`);
|
||||||
logger.debug(`...Final destination: ${encodeURI(finalDestination)}`);
|
logger.debug(`...Final destination: ${encodeURI(finalDestination)}`);
|
||||||
|
Loading…
Reference in New Issue
Block a user