const chunkSize = 2_000_000;
const chunks = Math.ceil(file.size / chunkSize);
+ const arrayBufferToHex = (buffer: ArrayBuffer): string => {
+ return Array.from(new Uint8Array(buffer))
+ .map((b) => b.toString(16).padStart(2, "0"))
+ .join("");
+ };
+
+ const hash = await window.crypto.subtle.digest("SHA-256", await file.arrayBuffer());
+ console.log("checksum for the entire file is:", arrayBufferToHex(hash));
+
+ const data: Blob[] = [];
for (let i = 0; i < chunks; i++) {
const start = i * chunkSize;
const end = start + chunkSize;
const chunk = file.slice(start, end);
+ data.push(chunk);
+
+ console.log("Uploading", start, "to", end, " (total: " + chunk.size + " of " + file.size + ")");
await prepareRequest(endpoints[i]).post(chunk).fetchAsResponse();
}
+
+ const uploadedChunks = new Blob(data);
+ const uploadedHash = await window.crypto.subtle.digest("SHA-256", await uploadedChunks.arrayBuffer());
+ console.log("checksum for the entire file is:", arrayBufferToHex(uploadedHash));
}
export function setup(): void {
element.addEventListener("upload", (event: CustomEvent<File>) => {
void upload(element, event.detail);
});
+
+ const file = new File(["a".repeat(4_000_001)], "test.txt");
+ void upload(element, file);
});
}
const { endpoints } = response;
const chunkSize = 2000000;
const chunks = Math.ceil(file.size / chunkSize);
+ const arrayBufferToHex = (buffer) => {
+ return Array.from(new Uint8Array(buffer))
+ .map((b) => b.toString(16).padStart(2, "0"))
+ .join("");
+ };
+ const hash = await window.crypto.subtle.digest("SHA-256", await file.arrayBuffer());
+ console.log("checksum for the entire file is:", arrayBufferToHex(hash));
+ const data = [];
for (let i = 0; i < chunks; i++) {
const start = i * chunkSize;
const end = start + chunkSize;
const chunk = file.slice(start, end);
+ data.push(chunk);
+ console.log("Uploading", start, "to", end, " (total: " + chunk.size + " of " + file.size + ")");
await (0, Backend_1.prepareRequest)(endpoints[i]).post(chunk).fetchAsResponse();
}
+ const uploadedChunks = new Blob(data);
+ const uploadedHash = await window.crypto.subtle.digest("SHA-256", await uploadedChunks.arrayBuffer());
+ console.log("checksum for the entire file is:", arrayBufferToHex(uploadedHash));
}
function setup() {
(0, Selector_1.wheneverFirstSeen)("woltlab-core-file-upload", (element) => {
element.addEventListener("upload", (event) => {
void upload(element, event.detail);
});
+ const file = new File(["a".repeat(4000001)], "test.txt");
+ void upload(element, file);
});
}
exports.setup = setup;
use Psr\Http\Server\RequestHandlerInterface;
use wcf\http\Helper;
use wcf\system\exception\IllegalLinkException;
+use wcf\system\io\AtomicWriter;
use wcf\system\WCF;
final class FileUploadAction implements RequestHandlerInterface
throw new IllegalLinkException();
}
- // Check if the actual size matches the expectations.
- if ($parameters['sequenceNo'] === $chunks - 1) {
- // The last chunk is most likely smaller than our chunk size.
- $expectedSize = $row['filesize'] - $chunkSize * ($chunks - 1);
- } else {
- $expectedSize = $chunkSize;
- }
-
- $chunk = \file_get_contents('php://input');
- $actualSize = \strlen($chunk);
-
- if ($actualSize !== $expectedSize) {
- throw new IllegalLinkException();
- }
-
$folderA = \substr($row['identifier'], 0, 2);
$folderB = \substr($row['identifier'], 2, 2);
$parameters['sequenceNo'],
);
- \file_put_contents($tmpPath . $filename, $chunk);
+ // Write the chunk using a buffer to avoid blowing up the memory limit.
+ // See https://stackoverflow.com/a/61997147
+ $file = new AtomicWriter($tmpPath . $filename);
+ $bufferSize = 1 * 1024 * 1024;
+
+ $fh = \fopen('php://input', 'rb');
+ while (!\feof($fh)) {
+ $file->write(\fread($fh, $bufferSize));
+ }
+ \fclose($fh);
+
+ $file->flush();
+
+ // Check if we have all chunks.
+ $data = [];
+ for ($i = 0; $i < $chunks; $i++) {
+ $filename = \sprintf(
+ '%s-%d.bin',
+ $row['identifier'],
+ $i,
+ );
+
+ if (\file_exists($tmpPath . $filename)) {
+ $data[] = $tmpPath . $filename;
+ }
+ }
+
+ if (\count($data) === $chunks) {
+ // Concatenate the files by reading only a limited buffer at a time
+ // to avoid blowing up the memory limit.
+ // See https://stackoverflow.com/a/61997147
+ $bufferSize = 1 * 1024 * 1024;
+
+ $newFilename = \sprintf('%s-final.bin', $row['identifier']);
+ $file = new AtomicWriter($tmpPath . $newFilename);
+ foreach ($data as $fileChunk) {
+ $fh = \fopen($fileChunk, 'rb');
+ while (!\feof($fh)) {
+ $file->write(\fread($fh, $bufferSize));
+ }
+ \fclose($fh);
+ }
+
+ $file->flush();
+
+ \wcfDebug(
+ \memory_get_peak_usage(true),
+ \hash_file(
+ 'sha256',
+ $tmpPath . $newFilename,
+ )
+ );
+ }
+
+ \wcfDebug(\memory_get_peak_usage(true));
// TODO: Dummy response to simulate a successful upload of a chunk.
return new EmptyResponse();