2024-04-11 10:13:11 +00:00
|
|
|
<?php
|
|
|
|
|
|
|
|
|
|
namespace App\Http\Controllers;
|
|
|
|
|
|
|
|
|
|
use Illuminate\Http\Request;
|
|
|
|
|
use Illuminate\Http\UploadedFile;
|
2024-06-10 20:43:34 +00:00
|
|
|
use Illuminate\Routing\Controller as BaseController;
|
2024-04-11 10:13:11 +00:00
|
|
|
use Pion\Laravel\ChunkUpload\Exceptions\UploadMissingFileException;
|
|
|
|
|
use Pion\Laravel\ChunkUpload\Handler\HandlerFactory;
|
|
|
|
|
use Pion\Laravel\ChunkUpload\Receiver\FileReceiver;
|
|
|
|
|
|
|
|
|
|
class UploadController extends BaseController
|
|
|
|
|
{
|
refactor(backup): validate database backup upload file type and size
Add allowlist of backup file extensions (sql, sql.gz, tar, tgz, zip,
dump, bak, bson, archive, bz2, xz, and compound variants) and enforce
a 10 GiB maximum file size on the backup upload endpoint. Validation
runs early on each chunk using the dropzone metadata and again on the
assembled file. Also drops the unused createFilename helper and the
commented-out S3 block.
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-20 09:45:00 +00:00
|
|
|
private const MAX_BYTES = 10 * 1024 * 1024 * 1024; // 10 GiB
|
|
|
|
|
|
|
|
|
|
private const ALLOWED_EXTENSIONS = [
|
|
|
|
|
'sql',
|
|
|
|
|
'sql.gz',
|
|
|
|
|
'gz',
|
|
|
|
|
'zip',
|
|
|
|
|
'tar',
|
|
|
|
|
'tar.gz',
|
|
|
|
|
'tgz',
|
|
|
|
|
'dump',
|
|
|
|
|
'bak',
|
|
|
|
|
'bson',
|
|
|
|
|
'bson.gz',
|
|
|
|
|
'archive',
|
|
|
|
|
'archive.gz',
|
|
|
|
|
'bz2',
|
|
|
|
|
'xz',
|
|
|
|
|
];
|
|
|
|
|
|
2024-04-11 10:13:11 +00:00
|
|
|
public function upload(Request $request)
|
|
|
|
|
{
|
2026-01-02 15:29:48 +00:00
|
|
|
$databaseIdentifier = request()->route('databaseUuid');
|
|
|
|
|
$resource = getResourceByUuid($databaseIdentifier, data_get(auth()->user()->currentTeam(), 'id'));
|
2024-04-11 10:13:11 +00:00
|
|
|
if (is_null($resource)) {
|
|
|
|
|
return response()->json(['error' => 'You do not have permission for this database'], 500);
|
|
|
|
|
}
|
refactor(backup): validate database backup upload file type and size
Add allowlist of backup file extensions (sql, sql.gz, tar, tgz, zip,
dump, bak, bson, archive, bz2, xz, and compound variants) and enforce
a 10 GiB maximum file size on the backup upload endpoint. Validation
runs early on each chunk using the dropzone metadata and again on the
assembled file. Also drops the unused createFilename helper and the
commented-out S3 block.
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-20 09:45:00 +00:00
|
|
|
|
|
|
|
|
$chunk = $request->file('file');
|
|
|
|
|
$originalName = $chunk instanceof UploadedFile ? $chunk->getClientOriginalName() : null;
|
|
|
|
|
if (blank($originalName) || ! self::hasAllowedExtension($originalName)) {
|
|
|
|
|
return response()->json([
|
|
|
|
|
'error' => 'Unsupported file type. Allowed extensions: '.implode(', ', self::ALLOWED_EXTENSIONS),
|
|
|
|
|
], 422);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
$declaredTotalSize = (int) $request->input('dzTotalFilesize', 0);
|
|
|
|
|
if ($declaredTotalSize > self::MAX_BYTES) {
|
|
|
|
|
return response()->json([
|
|
|
|
|
'error' => 'File exceeds maximum allowed size of '.self::formatMaxSize().'.',
|
|
|
|
|
], 422);
|
|
|
|
|
}
|
|
|
|
|
|
2024-06-10 20:43:34 +00:00
|
|
|
$receiver = new FileReceiver('file', $request, HandlerFactory::classFromRequest($request));
|
2024-04-11 10:13:11 +00:00
|
|
|
|
|
|
|
|
if ($receiver->isUploaded() === false) {
|
2024-07-24 19:11:12 +00:00
|
|
|
throw new UploadMissingFileException;
|
2024-04-11 10:13:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
$save = $receiver->receive();
|
|
|
|
|
|
|
|
|
|
if ($save->isFinished()) {
|
2026-01-02 15:29:48 +00:00
|
|
|
// Use the original identifier from the route to maintain path consistency
|
|
|
|
|
// For ServiceDatabase: {name}-{service_uuid}
|
|
|
|
|
// For standalone databases: {uuid}
|
|
|
|
|
return $this->saveFile($save->getFile(), $databaseIdentifier);
|
2024-04-11 10:13:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
$handler = $save->handler();
|
2024-06-10 20:43:34 +00:00
|
|
|
|
2024-04-11 10:13:11 +00:00
|
|
|
return response()->json([
|
2024-06-10 20:43:34 +00:00
|
|
|
'done' => $handler->getPercentageDone(),
|
|
|
|
|
'status' => true,
|
2024-04-11 10:13:11 +00:00
|
|
|
]);
|
|
|
|
|
}
|
refactor(backup): validate database backup upload file type and size
Add allowlist of backup file extensions (sql, sql.gz, tar, tgz, zip,
dump, bak, bson, archive, bz2, xz, and compound variants) and enforce
a 10 GiB maximum file size on the backup upload endpoint. Validation
runs early on each chunk using the dropzone metadata and again on the
assembled file. Also drops the unused createFilename helper and the
commented-out S3 block.
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-20 09:45:00 +00:00
|
|
|
|
2026-01-02 15:29:48 +00:00
|
|
|
protected function saveFile(UploadedFile $file, string $resourceIdentifier)
|
2024-04-11 10:13:11 +00:00
|
|
|
{
|
refactor(backup): validate database backup upload file type and size
Add allowlist of backup file extensions (sql, sql.gz, tar, tgz, zip,
dump, bak, bson, archive, bz2, xz, and compound variants) and enforce
a 10 GiB maximum file size on the backup upload endpoint. Validation
runs early on each chunk using the dropzone metadata and again on the
assembled file. Also drops the unused createFilename helper and the
commented-out S3 block.
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-20 09:45:00 +00:00
|
|
|
$originalName = $file->getClientOriginalName();
|
|
|
|
|
$size = $file->getSize();
|
|
|
|
|
|
|
|
|
|
if (! self::hasAllowedExtension($originalName) || $size === false || $size > self::MAX_BYTES) {
|
|
|
|
|
@unlink($file->getPathname());
|
|
|
|
|
|
|
|
|
|
return response()->json([
|
|
|
|
|
'error' => 'Uploaded file failed validation.',
|
|
|
|
|
], 422);
|
|
|
|
|
}
|
|
|
|
|
|
2024-04-11 10:13:11 +00:00
|
|
|
$mime = str_replace('/', '-', $file->getMimeType());
|
2026-01-02 15:29:48 +00:00
|
|
|
$filePath = "upload/{$resourceIdentifier}";
|
2024-06-10 20:43:34 +00:00
|
|
|
$finalPath = storage_path('app/'.$filePath);
|
2024-04-11 10:13:11 +00:00
|
|
|
$file->move($finalPath, 'restore');
|
|
|
|
|
|
|
|
|
|
return response()->json([
|
2024-06-10 20:43:34 +00:00
|
|
|
'mime_type' => $mime,
|
2024-04-11 10:13:11 +00:00
|
|
|
]);
|
|
|
|
|
}
|
2024-06-10 20:43:34 +00:00
|
|
|
|
refactor(backup): validate database backup upload file type and size
Add allowlist of backup file extensions (sql, sql.gz, tar, tgz, zip,
dump, bak, bson, archive, bz2, xz, and compound variants) and enforce
a 10 GiB maximum file size on the backup upload endpoint. Validation
runs early on each chunk using the dropzone metadata and again on the
assembled file. Also drops the unused createFilename helper and the
commented-out S3 block.
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-20 09:45:00 +00:00
|
|
|
private static function hasAllowedExtension(string $name): bool
|
2024-04-11 10:13:11 +00:00
|
|
|
{
|
refactor(backup): validate database backup upload file type and size
Add allowlist of backup file extensions (sql, sql.gz, tar, tgz, zip,
dump, bak, bson, archive, bz2, xz, and compound variants) and enforce
a 10 GiB maximum file size on the backup upload endpoint. Validation
runs early on each chunk using the dropzone metadata and again on the
assembled file. Also drops the unused createFilename helper and the
commented-out S3 block.
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-20 09:45:00 +00:00
|
|
|
$lower = strtolower($name);
|
|
|
|
|
$suffixes = array_map(fn ($ext) => '.'.$ext, self::ALLOWED_EXTENSIONS);
|
|
|
|
|
usort($suffixes, fn ($a, $b) => strlen($b) <=> strlen($a));
|
|
|
|
|
|
|
|
|
|
foreach ($suffixes as $suffix) {
|
|
|
|
|
if (! str_ends_with($lower, $suffix)) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
$stem = substr($lower, 0, -strlen($suffix));
|
|
|
|
|
if ($stem !== '' && ! str_ends_with($stem, '.')) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2024-04-11 10:13:11 +00:00
|
|
|
|
refactor(backup): validate database backup upload file type and size
Add allowlist of backup file extensions (sql, sql.gz, tar, tgz, zip,
dump, bak, bson, archive, bz2, xz, and compound variants) and enforce
a 10 GiB maximum file size on the backup upload endpoint. Validation
runs early on each chunk using the dropzone metadata and again on the
assembled file. Also drops the unused createFilename helper and the
commented-out S3 block.
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-20 09:45:00 +00:00
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2024-04-11 10:13:11 +00:00
|
|
|
|
refactor(backup): validate database backup upload file type and size
Add allowlist of backup file extensions (sql, sql.gz, tar, tgz, zip,
dump, bak, bson, archive, bz2, xz, and compound variants) and enforce
a 10 GiB maximum file size on the backup upload endpoint. Validation
runs early on each chunk using the dropzone metadata and again on the
assembled file. Also drops the unused createFilename helper and the
commented-out S3 block.
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-20 09:45:00 +00:00
|
|
|
private static function formatMaxSize(): string
|
|
|
|
|
{
|
|
|
|
|
return (self::MAX_BYTES / (1024 * 1024 * 1024)).' GiB';
|
2024-04-11 10:13:11 +00:00
|
|
|
}
|
|
|
|
|
}
|