Fix chunked upload content size issue

This commit is contained in:
SebClem 2024-07-12 16:41:52 +02:00
parent 73a0160e77
commit 8f0b6889f7
Signed by: sebclem
GPG Key ID: 5A4308F6A359EA50

View File

@ -343,7 +343,7 @@ export async function chunkedUpload(
const finalDestination = config.url + getEndpoint(config) + webdavPath; const finalDestination = config.url + getEndpoint(config) + webdavPath;
const status = statusTools.getStatus(); const status = statusTools.getStatus();
status.status = States.BKUP_UPLOAD_CLOUD; status.status = States.BKUP_UPLOAD_CLOUD;
status.progress = 0; status.progress = -1;
statusTools.setStatus(status); statusTools.setStatus(status);
try { try {
await initChunkedUpload(chunkedUrl, finalDestination, config); await initChunkedUpload(chunkedUrl, finalDestination, config);
@ -365,16 +365,21 @@ export async function chunkedUpload(
logger.error((err as Error).message); logger.error((err as Error).message);
} }
fs.unlinkSync(localPath); fs.unlinkSync(localPath);
const status = statusTools.getStatus();
status.status = States.IDLE;
status.progress = undefined;
statusTools.setStatus(status);
throw err; throw err;
} }
let start = 0; let start = 0;
let end = fileSize > CHUNK_SIZE ? CHUNK_SIZE : fileSize; let end = Math.min(CHUNK_SIZE - 1, fileSize - 1);
let current_size = end;
let current_size = end + 1;
// const uploadedBytes = 0; // const uploadedBytes = 0;
let i = 0; let i = 1;
while (start < fileSize) { while (start < fileSize - 1) {
const chunk = fs.createReadStream(localPath, { start, end }); const chunk = fs.createReadStream(localPath, { start, end });
try { try {
const chunckNumber = i.toString().padStart(CHUNK_NUMBER_SIZE, "0"); const chunckNumber = i.toString().padStart(CHUNK_NUMBER_SIZE, "0");
@ -386,9 +391,9 @@ export async function chunkedUpload(
fileSize, fileSize,
config config
); );
start = end; start = end + 1;
end = Math.min(start + CHUNK_SIZE, fileSize - 1); end = Math.min(start + CHUNK_SIZE - 1, fileSize - 1);
current_size = end - start; current_size = end - start + 1;
i++; i++;
} catch (error) { } catch (error) {
if (error instanceof Error) { if (error instanceof Error) {
@ -408,6 +413,10 @@ export async function chunkedUpload(
logger.error(`Code: ${(error as PlainResponse).statusCode}`); logger.error(`Code: ${(error as PlainResponse).statusCode}`);
logger.error(`Body: ${(error as PlainResponse).body as string}`); logger.error(`Body: ${(error as PlainResponse).body as string}`);
} }
const status = statusTools.getStatus();
status.status = States.IDLE;
status.progress = undefined;
statusTools.setStatus(status);
throw error; throw error;
} }
} }
@ -435,6 +444,10 @@ export async function chunkedUpload(
logger.error((err as Error).message); logger.error((err as Error).message);
} }
fs.unlinkSync(localPath); fs.unlinkSync(localPath);
const status = statusTools.getStatus();
status.status = States.IDLE;
status.progress = undefined;
statusTools.setStatus(status);
throw err; throw err;
} }
} }
@ -452,6 +465,7 @@ export function uploadChunk(
logger.debug(`...URI: ${encodeURI(url)}`); logger.debug(`...URI: ${encodeURI(url)}`);
logger.debug(`...Final destination: ${encodeURI(finalDestination)}`); logger.debug(`...Final destination: ${encodeURI(finalDestination)}`);
logger.debug(`...Chunk size: ${contentLength}`); logger.debug(`...Chunk size: ${contentLength}`);
logger.debug(`...Total size: ${totalLength}`);
got.stream got.stream
.put(url, { .put(url, {
headers: { headers: {