};
async function upload(element: WoltlabCoreFileUploadElement, file: File): Promise<void> {
+ const fileHash = await getSha256Hash(await file.arrayBuffer());
+
const response = (await prepareRequest(element.dataset.endpoint!)
.post({
filename: file.name,
- filesize: file.size,
+ fileSize: file.size,
+ fileHash,
})
.fetchAsJson()) as PreflightResponse;
const { endpoints } = response;
const chunkSize = 2_000_000;
const chunks = Math.ceil(file.size / chunkSize);
- const arrayBufferToHex = (buffer: ArrayBuffer): string => {
- return Array.from(new Uint8Array(buffer))
- .map((b) => b.toString(16).padStart(2, "0"))
- .join("");
- };
-
- const hash = await window.crypto.subtle.digest("SHA-256", await file.arrayBuffer());
- console.log("checksum for the entire file is:", arrayBufferToHex(hash));
-
- const data: Blob[] = [];
for (let i = 0; i < chunks; i++) {
const start = i * chunkSize;
const end = start + chunkSize;
const chunk = file.slice(start, end);
- data.push(chunk);
- console.log("Uploading", start, "to", end, " (total: " + chunk.size + " of " + file.size + ")");
+ const endpoint = new URL(endpoints[i]);
- await prepareRequest(endpoints[i]).post(chunk).fetchAsResponse();
+ const checksum = await getSha256Hash(await chunk.arrayBuffer());
+ endpoint.searchParams.append("checksum", checksum);
+
+ await prepareRequest(endpoint.toString()).post(chunk).fetchAsResponse();
}
+}
+
+async function getSha256Hash(data: BufferSource): Promise<string> {
+ const buffer = await window.crypto.subtle.digest("SHA-256", data);
- const uploadedChunks = new Blob(data);
- const uploadedHash = await window.crypto.subtle.digest("SHA-256", await uploadedChunks.arrayBuffer());
- console.log("checksum for the entire file is:", arrayBufferToHex(uploadedHash));
+ return Array.from(new Uint8Array(buffer))
+ .map((b) => b.toString(16).padStart(2, "0"))
+ .join("");
}
export function setup(): void {
Object.defineProperty(exports, "__esModule", { value: true });
exports.setup = void 0;
async function upload(element, file) {
+ const fileHash = await getSha256Hash(await file.arrayBuffer());
const response = (await (0, Backend_1.prepareRequest)(element.dataset.endpoint)
.post({
filename: file.name,
- filesize: file.size,
+ fileSize: file.size,
+ fileHash,
})
.fetchAsJson());
const { endpoints } = response;
const chunkSize = 2000000;
const chunks = Math.ceil(file.size / chunkSize);
- const arrayBufferToHex = (buffer) => {
- return Array.from(new Uint8Array(buffer))
- .map((b) => b.toString(16).padStart(2, "0"))
- .join("");
- };
- const hash = await window.crypto.subtle.digest("SHA-256", await file.arrayBuffer());
- console.log("checksum for the entire file is:", arrayBufferToHex(hash));
- const data = [];
for (let i = 0; i < chunks; i++) {
const start = i * chunkSize;
const end = start + chunkSize;
const chunk = file.slice(start, end);
- data.push(chunk);
- console.log("Uploading", start, "to", end, " (total: " + chunk.size + " of " + file.size + ")");
- await (0, Backend_1.prepareRequest)(endpoints[i]).post(chunk).fetchAsResponse();
+ const endpoint = new URL(endpoints[i]);
+ const checksum = await getSha256Hash(await chunk.arrayBuffer());
+ endpoint.searchParams.append("checksum", checksum);
+ await (0, Backend_1.prepareRequest)(endpoint.toString()).post(chunk).fetchAsResponse();
}
- const uploadedChunks = new Blob(data);
- const uploadedHash = await window.crypto.subtle.digest("SHA-256", await uploadedChunks.arrayBuffer());
- console.log("checksum for the entire file is:", arrayBufferToHex(uploadedHash));
+ }
+ async function getSha256Hash(data) {
+ const buffer = await window.crypto.subtle.digest("SHA-256", data);
+ return Array.from(new Uint8Array(buffer))
+ .map((b) => b.toString(16).padStart(2, "0"))
+ .join("");
}
function setup() {
(0, Selector_1.wheneverFirstSeen)("woltlab-core-file-upload", (element) => {
use wcf\http\Helper;
use wcf\system\exception\IllegalLinkException;
use wcf\system\io\AtomicWriter;
+use wcf\system\io\File;
use wcf\system\WCF;
final class FileUploadAction implements RequestHandlerInterface
$request->getQueryParams(),
<<<'EOT'
array {
+ checksum: non-empty-string,
identifier: non-empty-string,
sequenceNo: int,
}
$row = $statement->fetchSingleRow();
if ($row === false) {
+ // TODO: Proper error message
throw new IllegalLinkException();
}
// Check if this is a valid sequence no.
// TODO: The chunk calculation shouldn’t be based on a fixed number.
$chunkSize = 2_000_000;
- $chunks = (int)\ceil($row['filesize'] / $chunkSize);
+ $chunks = (int)\ceil($row['fileSize'] / $chunkSize);
if ($parameters['sequenceNo'] >= $chunks) {
+ // TODO: Proper error message
+ throw new IllegalLinkException();
+ }
+
+ // Check if the checksum matches the received data.
+ $ctx = \hash_init('sha256');
+ $bufferSize = 1 * 1024 * 1024;
+ $stream = $request->getBody();
+ while (!$stream->eof()) {
+ \hash_update($ctx, $stream->read($bufferSize));
+ }
+ $result = \hash_final($ctx);
+ $stream->rewind();
+
+ if ($result !== $parameters['checksum']) {
+ // TODO: Proper error message
throw new IllegalLinkException();
}
// Write the chunk using a buffer to avoid blowing up the memory limit.
// See https://stackoverflow.com/a/61997147
- $file = new AtomicWriter($tmpPath . $filename);
+ $result = new AtomicWriter($tmpPath . $filename);
$bufferSize = 1 * 1024 * 1024;
- $fh = \fopen('php://input', 'rb');
- while (!\feof($fh)) {
- $file->write(\fread($fh, $bufferSize));
+ while (!$stream->eof()) {
+ $result->write($stream->read($bufferSize));
}
- \fclose($fh);
- $file->flush();
+ $result->flush();
// Check if we have all chunks.
$data = [];
$bufferSize = 1 * 1024 * 1024;
$newFilename = \sprintf('%s-final.bin', $row['identifier']);
- $file = new AtomicWriter($tmpPath . $newFilename);
+ $result = new AtomicWriter($tmpPath . $newFilename);
foreach ($data as $fileChunk) {
- $fh = \fopen($fileChunk, 'rb');
- while (!\feof($fh)) {
- $file->write(\fread($fh, $bufferSize));
+ $source = new File($fileChunk, 'rb');
+ while (!$source->eof()) {
+ $result->write($source->read($bufferSize));
}
- \fclose($fh);
+ $source->close();
}
- $file->flush();
+ $result->flush();
\wcfDebug(
\memory_get_peak_usage(true),
<<<'EOT'
array {
filename: non-empty-string,
- filesize: positive-int,
+ fileSize: positive-int,
+ fileHash: non-empty-string,
}
EOT,
);
// TODO: The chunk calculation shouldn’t be based on a fixed number.
$chunkSize = 2_000_000;
- $chunks = (int)\ceil($parameters['filesize'] / $chunkSize);
+ $chunks = (int)\ceil($parameters['fileSize'] / $chunkSize);
$identifier = $this->createTemporaryFile($parameters);
$identifier = \bin2hex(\random_bytes(20));
$sql = "INSERT INTO wcf1_file_temporary
- (identifier, time, filename, filesize)
- VALUES (?, ?, ?, ?)";
+ (identifier, time, filename, fileSize, fileHash)
+ VALUES (?, ?, ?, ?, ?)";
$statement = WCF::getDB()->prepare($sql);
$statement->execute([
$identifier,
\TIME_NOW,
$parameters['filename'],
- $parameters['filesize'],
+ $parameters['fileSize'],
+ $parameters['fileHash'],
]);
return $identifier;
identifier CHAR(40) NOT NULL PRIMARY KEY,
time INT NOT NULL,
filename VARCHAR(255) NOT NULL,
- filesize BIGINT NOT NULL
+ fileSize BIGINT NOT NULL,
+ fileHash CHAR(64) NOT NULL
);
DROP TABLE IF EXISTS wcf1_file_chunk;