diff --git a/apps/dav/composer/composer/autoload_classmap.php b/apps/dav/composer/composer/autoload_classmap.php index a100dac1d85..e7e2c34be62 100644 --- a/apps/dav/composer/composer/autoload_classmap.php +++ b/apps/dav/composer/composer/autoload_classmap.php @@ -310,8 +310,10 @@ return array( 'OCA\\DAV\\Traits\\PrincipalProxyTrait' => $baseDir . '/../lib/Traits/PrincipalProxyTrait.php', 'OCA\\DAV\\Upload\\AssemblyStream' => $baseDir . '/../lib/Upload/AssemblyStream.php', 'OCA\\DAV\\Upload\\ChunkingPlugin' => $baseDir . '/../lib/Upload/ChunkingPlugin.php', + 'OCA\\DAV\\Upload\\ChunkingV2Plugin' => $baseDir . '/../lib/Upload/ChunkingV2Plugin.php', 'OCA\\DAV\\Upload\\CleanupService' => $baseDir . '/../lib/Upload/CleanupService.php', 'OCA\\DAV\\Upload\\FutureFile' => $baseDir . '/../lib/Upload/FutureFile.php', + 'OCA\\DAV\\Upload\\PartFile' => $baseDir . '/../lib/Upload/PartFile.php', 'OCA\\DAV\\Upload\\RootCollection' => $baseDir . '/../lib/Upload/RootCollection.php', 'OCA\\DAV\\Upload\\UploadFile' => $baseDir . '/../lib/Upload/UploadFile.php', 'OCA\\DAV\\Upload\\UploadFolder' => $baseDir . '/../lib/Upload/UploadFolder.php', diff --git a/apps/dav/composer/composer/autoload_static.php b/apps/dav/composer/composer/autoload_static.php index 4187bb6c6f3..5fa87bc354a 100644 --- a/apps/dav/composer/composer/autoload_static.php +++ b/apps/dav/composer/composer/autoload_static.php @@ -325,8 +325,10 @@ class ComposerStaticInitDAV 'OCA\\DAV\\Traits\\PrincipalProxyTrait' => __DIR__ . '/..' . '/../lib/Traits/PrincipalProxyTrait.php', 'OCA\\DAV\\Upload\\AssemblyStream' => __DIR__ . '/..' . '/../lib/Upload/AssemblyStream.php', 'OCA\\DAV\\Upload\\ChunkingPlugin' => __DIR__ . '/..' . '/../lib/Upload/ChunkingPlugin.php', + 'OCA\\DAV\\Upload\\ChunkingV2Plugin' => __DIR__ . '/..' . '/../lib/Upload/ChunkingV2Plugin.php', 'OCA\\DAV\\Upload\\CleanupService' => __DIR__ . '/..' . '/../lib/Upload/CleanupService.php', 'OCA\\DAV\\Upload\\FutureFile' => __DIR__ . '/..' . '/../lib/Upload/FutureFile.php', + 'OCA\\DAV\\Upload\\PartFile' => __DIR__ . '/..' . '/../lib/Upload/PartFile.php', 'OCA\\DAV\\Upload\\RootCollection' => __DIR__ . '/..' . '/../lib/Upload/RootCollection.php', 'OCA\\DAV\\Upload\\UploadFile' => __DIR__ . '/..' . '/../lib/Upload/UploadFile.php', 'OCA\\DAV\\Upload\\UploadFolder' => __DIR__ . '/..' . '/../lib/Upload/UploadFolder.php', diff --git a/apps/dav/lib/Connector/Sabre/Directory.php b/apps/dav/lib/Connector/Sabre/Directory.php index 531ccff9d92..c29070fe921 100644 --- a/apps/dav/lib/Connector/Sabre/Directory.php +++ b/apps/dav/lib/Connector/Sabre/Directory.php @@ -38,6 +38,7 @@ use OC\Metadata\FileMetadata; use OCA\DAV\Connector\Sabre\Exception\FileLocked; use OCA\DAV\Connector\Sabre\Exception\Forbidden; use OCA\DAV\Connector\Sabre\Exception\InvalidPath; +use OCA\DAV\Upload\FutureFile; use OCP\Files\FileInfo; use OCP\Files\Folder; use OCP\Files\ForbiddenException; diff --git a/apps/dav/lib/Connector/Sabre/Node.php b/apps/dav/lib/Connector/Sabre/Node.php index ee159cef1d6..2c8d313eefd 100644 --- a/apps/dav/lib/Connector/Sabre/Node.php +++ b/apps/dav/lib/Connector/Sabre/Node.php @@ -261,6 +261,10 @@ abstract class Node implements \Sabre\DAV\INode { return $this->info->getId(); } + public function getInternalPath(): string { + return $this->info->getInternalPath(); + } + /** * @param string $user * @return int diff --git a/apps/dav/lib/Server.php b/apps/dav/lib/Server.php index a5833e5175f..ada279bc7b2 100644 --- a/apps/dav/lib/Server.php +++ b/apps/dav/lib/Server.php @@ -71,9 +71,11 @@ use OCA\DAV\Profiler\ProfilerPlugin; use OCA\DAV\Provisioning\Apple\AppleProvisioningPlugin; use OCA\DAV\SystemTag\SystemTagPlugin; use OCA\DAV\Upload\ChunkingPlugin; +use OCA\DAV\Upload\ChunkingV2Plugin; use OCP\AppFramework\Http\Response; use OCP\Diagnostics\IEventLogger; use OCP\EventDispatcher\IEventDispatcher; +use OCP\ICacheFactory; use OCP\IRequest; use OCP\Profiler\IProfiler; use OCP\SabrePluginEvent; @@ -218,6 +220,7 @@ class Server { $this->server->addPlugin(new CopyEtagHeaderPlugin()); $this->server->addPlugin(new RequestIdHeaderPlugin(\OC::$server->get(IRequest::class))); + $this->server->addPlugin(new ChunkingV2Plugin(\OCP\Server::get(ICacheFactory::class))); $this->server->addPlugin(new ChunkingPlugin()); // allow setup of additional plugins diff --git a/apps/dav/lib/Upload/ChunkingV2Plugin.php b/apps/dav/lib/Upload/ChunkingV2Plugin.php new file mode 100644 index 00000000000..cb7c802125c --- /dev/null +++ b/apps/dav/lib/Upload/ChunkingV2Plugin.php @@ -0,0 +1,392 @@ + + * + * @author Julius Härtl + * + * @license GNU AGPL version 3 or any later version + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + * + */ + +namespace OCA\DAV\Upload; + +use Exception; +use InvalidArgumentException; +use OC\Files\Filesystem; +use OC\Files\ObjectStore\ObjectStoreStorage; +use OC\Files\View; +use OC_Hook; +use OCA\DAV\Connector\Sabre\Directory; +use OCA\DAV\Connector\Sabre\File; +use OCP\Files\IMimeTypeDetector; +use OCP\Files\IRootFolder; +use OCP\Files\ObjectStore\IObjectStoreMultiPartUpload; +use OCP\Files\Storage\IChunkedFileWrite; +use OCP\Files\StorageInvalidException; +use OCP\ICache; +use OCP\ICacheFactory; +use OCP\Lock\ILockingProvider; +use Sabre\DAV\Exception\BadRequest; +use Sabre\DAV\Exception\InsufficientStorage; +use Sabre\DAV\Exception\NotFound; +use Sabre\DAV\Exception\PreconditionFailed; +use Sabre\DAV\ICollection; +use Sabre\DAV\INode; +use Sabre\DAV\Server; +use Sabre\DAV\ServerPlugin; +use Sabre\HTTP\RequestInterface; +use Sabre\HTTP\ResponseInterface; +use Sabre\Uri; + +class ChunkingV2Plugin extends ServerPlugin { + /** @var Server */ + private $server; + /** @var UploadFolder */ + private $uploadFolder; + /** @var ICache */ + private $cache; + + private ?string $uploadId = null; + private ?string $uploadPath = null; + + private const TEMP_TARGET = '.target'; + + public const CACHE_KEY = 'chunking-v2'; + public const UPLOAD_TARGET_PATH = 'upload-target-path'; + public const UPLOAD_TARGET_ID = 'upload-target-id'; + public const UPLOAD_ID = 'upload-id'; + + private const DESTINATION_HEADER = 'Destination'; + + public function __construct(ICacheFactory $cacheFactory) { + $this->cache = $cacheFactory->createDistributed(self::CACHE_KEY); + } + + /** + * @inheritdoc + */ + public function initialize(Server $server) { + $server->on('afterMethod:MKCOL', [$this, 'afterMkcol']); + $server->on('beforeMethod:PUT', [$this, 'beforePut']); + $server->on('beforeMethod:DELETE', [$this, 'beforeDelete']); + $server->on('beforeMove', [$this, 'beforeMove'], 90); + + $this->server = $server; + } + + /** + * @param string $path + * @param bool $createIfNotExists + * @return FutureFile|UploadFile|ICollection|INode + */ + private function getUploadFile(string $path, bool $createIfNotExists = false) { + try { + $actualFile = $this->server->tree->getNodeForPath($path); + // Only directly upload to the target file if it is on the same storage + // There may be further potential to optimize here by also uploading + // to other storages directly. This would require to also carefully pick + // the storage/path used in getStorage() + if ($actualFile instanceof File && $this->uploadFolder->getStorage()->getId() === $actualFile->getNode()->getStorage()->getId()) { + return $actualFile; + } + } catch (NotFound $e) { + // If there is no target file we upload to the upload folder first + } + + // Use file in the upload directory that will be copied or moved afterwards + if ($createIfNotExists) { + $this->uploadFolder->createFile(self::TEMP_TARGET); + } + + /** @var UploadFile $uploadFile */ + $uploadFile = $this->uploadFolder->getChild(self::TEMP_TARGET); + return $uploadFile->getFile(); + } + + public function afterMkcol(RequestInterface $request, ResponseInterface $response): bool { + try { + $this->prepareUpload($request->getPath()); + $this->checkPrerequisites(false); + } catch (BadRequest|StorageInvalidException|NotFound $e) { + return true; + } + + $this->uploadPath = $this->server->calculateUri($this->server->httpRequest->getHeader(self::DESTINATION_HEADER)); + $targetFile = $this->getUploadFile($this->uploadPath, true); + [$storage, $storagePath] = $this->getUploadStorage($this->uploadPath); + + $this->uploadId = $storage->startChunkedWrite($storagePath); + + $this->cache->set($this->uploadFolder->getName(), [ + self::UPLOAD_ID => $this->uploadId, + self::UPLOAD_TARGET_PATH => $this->uploadPath, + self::UPLOAD_TARGET_ID => $targetFile->getId(), + ], 86400); + + $response->setStatus(201); + return true; + } + + public function beforePut(RequestInterface $request, ResponseInterface $response): bool { + try { + $this->prepareUpload(dirname($request->getPath())); + $this->checkPrerequisites(); + } catch (StorageInvalidException|BadRequest|NotFound $e) { + return true; + } + + [$storage, $storagePath] = $this->getUploadStorage($this->uploadPath); + + $chunkName = basename($request->getPath()); + $partId = is_numeric($chunkName) ? (int)$chunkName : -1; + if (!($partId >= 1 && $partId <= 10000)) { + throw new BadRequest('Invalid chunk name, must be numeric between 1 and 10000'); + } + + $uploadFile = $this->getUploadFile($this->uploadPath); + $tempTargetFile = null; + + $additionalSize = (int)$request->getHeader('Content-Length'); + if ($this->uploadFolder->childExists(self::TEMP_TARGET) && $this->uploadPath) { + /** @var UploadFile $tempTargetFile */ + $tempTargetFile = $this->uploadFolder->getChild(self::TEMP_TARGET); + [$destinationDir, $destinationName] = Uri\split($this->uploadPath); + /** @var Directory $destinationParent */ + $destinationParent = $this->server->tree->getNodeForPath($destinationDir); + $free = $storage->free_space($destinationParent->getInternalPath()); + $newSize = $tempTargetFile->getSize() + $additionalSize; + if ($free >= 0 && ($tempTargetFile->getSize() > $free || $newSize > $free)) { + throw new InsufficientStorage("Insufficient space in $this->uploadPath"); + } + } + + $stream = $request->getBodyAsStream(); + $storage->putChunkedWritePart($storagePath, $this->uploadId, (string)$partId, $stream, $additionalSize); + + $storage->getCache()->update($uploadFile->getId(), ['size' => $uploadFile->getSize() + $additionalSize]); + if ($tempTargetFile) { + $storage->getPropagator()->propagateChange($tempTargetFile->getInternalPath(), time(), $additionalSize); + } + + $response->setStatus(201); + return false; + } + + public function beforeMove($sourcePath, $destination): bool { + try { + $this->prepareUpload(dirname($sourcePath)); + $this->checkPrerequisites(); + } catch (StorageInvalidException|BadRequest|NotFound|PreconditionFailed $e) { + return true; + } + [$storage, $storagePath] = $this->getUploadStorage($this->uploadPath); + + $targetFile = $this->getUploadFile($this->uploadPath); + + [$destinationDir, $destinationName] = Uri\split($destination); + /** @var Directory $destinationParent */ + $destinationParent = $this->server->tree->getNodeForPath($destinationDir); + $destinationExists = $destinationParent->childExists($destinationName); + + + // allow sync clients to send the modification and creation time along in a header + $updateFileInfo = []; + if ($this->server->httpRequest->getHeader('X-OC-MTime') !== null) { + $updateFileInfo['mtime'] = $this->sanitizeMtime($this->server->httpRequest->getHeader('X-OC-MTime')); + $this->server->httpResponse->setHeader('X-OC-MTime', 'accepted'); + } + if ($this->server->httpRequest->getHeader('X-OC-CTime') !== null) { + $updateFileInfo['creation_time'] = $this->sanitizeMtime($this->server->httpRequest->getHeader('X-OC-CTime')); + $this->server->httpResponse->setHeader('X-OC-CTime', 'accepted'); + } + $updateFileInfo['mimetype'] = \OCP\Server::get(IMimeTypeDetector::class)->detectPath($destinationName); + + if ($storage->instanceOfStorage(ObjectStoreStorage::class) && $storage->getObjectStore() instanceof IObjectStoreMultiPartUpload) { + /** @var ObjectStoreStorage $storage */ + /** @var IObjectStoreMultiPartUpload $objectStore */ + $objectStore = $storage->getObjectStore(); + $parts = $objectStore->getMultipartUploads($storage->getURN($targetFile->getId()), $this->uploadId); + $size = 0; + foreach ($parts as $part) { + $size += $part['Size']; + } + $free = $storage->free_space($destinationParent->getInternalPath()); + if ($free >= 0 && ($size > $free)) { + throw new InsufficientStorage("Insufficient space in $this->uploadPath"); + } + } + + $destinationInView = $destinationParent->getFileInfo()->getPath() . '/' . $destinationName; + $this->completeChunkedWrite($destinationInView); + + $rootView = new View(); + $rootView->putFileInfo($destinationInView, $updateFileInfo); + + $sourceNode = $this->server->tree->getNodeForPath($sourcePath); + if ($sourceNode instanceof FutureFile) { + $this->uploadFolder->delete(); + } + + $this->server->emit('afterMove', [$sourcePath, $destination]); + $this->server->emit('afterUnbind', [$sourcePath]); + $this->server->emit('afterBind', [$destination]); + + $response = $this->server->httpResponse; + $response->setHeader('Content-Type', 'application/xml; charset=utf-8'); + $response->setHeader('Content-Length', '0'); + $response->setStatus($destinationExists ? 204 : 201); + return false; + } + + public function beforeDelete(RequestInterface $request, ResponseInterface $response) { + try { + $this->prepareUpload($request->getPath()); + if (!$this->uploadFolder instanceof UploadFolder) { + return true; + } + + [$storage, $storagePath] = $this->getUploadStorage($this->uploadPath); + $storage->cancelChunkedWrite($storagePath, $this->uploadId); + return true; + } catch (NotFound $e) { + return true; + } + } + + /** + * @throws BadRequest + * @throws PreconditionFailed + * @throws StorageInvalidException + */ + private function checkPrerequisites(bool $checkUploadMetadata = true): void { + if (!$this->uploadFolder instanceof UploadFolder || empty($this->server->httpRequest->getHeader(self::DESTINATION_HEADER))) { + throw new BadRequest('Skipping chunked file writing as the destination header was not passed'); + } + if (!$this->uploadFolder->getStorage()->instanceOfStorage(IChunkedFileWrite::class)) { + throw new StorageInvalidException('Storage does not support chunked file writing'); + } + + if ($checkUploadMetadata) { + if ($this->uploadId === null || $this->uploadPath === null) { + throw new PreconditionFailed('Missing metadata for chunked upload'); + } + } + } + + /** + * @return array [IStorage, string] + */ + private function getUploadStorage(string $targetPath): array { + $storage = $this->uploadFolder->getStorage(); + $targetFile = $this->getUploadFile($targetPath); + return [$storage, $targetFile->getInternalPath()]; + } + + protected function sanitizeMtime(string $mtimeFromRequest): int { + if (!is_numeric($mtimeFromRequest)) { + throw new InvalidArgumentException('X-OC-MTime header must be an integer (unix timestamp).'); + } + + return (int)$mtimeFromRequest; + } + + /** + * @throws NotFound + */ + public function prepareUpload($path): void { + $this->uploadFolder = $this->server->tree->getNodeForPath($path); + $uploadMetadata = $this->cache->get($this->uploadFolder->getName()); + $this->uploadId = $uploadMetadata[self::UPLOAD_ID] ?? null; + $this->uploadPath = $uploadMetadata[self::UPLOAD_TARGET_PATH] ?? null; + } + + private function completeChunkedWrite(string $targetAbsolutePath): void { + $uploadFile = $this->getUploadFile($this->uploadPath)->getNode(); + [$storage, $storagePath] = $this->getUploadStorage($this->uploadPath); + + $rootFolder = \OCP\Server::get(IRootFolder::class); + $exists = $rootFolder->nodeExists($targetAbsolutePath); + + $uploadFile->lock(ILockingProvider::LOCK_SHARED); + $this->emitPreHooks($targetAbsolutePath, $exists); + try { + $uploadFile->changeLock(ILockingProvider::LOCK_EXCLUSIVE); + $storage->completeChunkedWrite($storagePath, $this->uploadId); + $uploadFile->changeLock(ILockingProvider::LOCK_SHARED); + } catch (Exception $e) { + $uploadFile->unlock(ILockingProvider::LOCK_EXCLUSIVE); + throw $e; + } + + // If the file was not uploaded to the user storage directly we need to copy/move it + try { + $uploadFileAbsolutePath = Filesystem::getRoot() . $uploadFile->getPath(); + if ($uploadFileAbsolutePath !== $targetAbsolutePath) { + $uploadFile = $rootFolder->get($uploadFile->getFileInfo()->getPath()); + if ($exists) { + $uploadFile->copy($targetAbsolutePath); + } else { + $uploadFile->move($targetAbsolutePath); + } + } + $this->emitPostHooks($targetAbsolutePath, $exists); + } catch (Exception $e) { + $uploadFile->unlock(ILockingProvider::LOCK_SHARED); + throw $e; + } + } + + private function emitPreHooks(string $target, bool $exists): void { + $hookPath = $this->getHookPath($target); + if (!$exists) { + OC_Hook::emit(Filesystem::CLASSNAME, Filesystem::signal_create, [ + Filesystem::signal_param_path => $hookPath, + ]); + } else { + OC_Hook::emit(Filesystem::CLASSNAME, Filesystem::signal_update, [ + Filesystem::signal_param_path => $hookPath, + ]); + } + OC_Hook::emit(Filesystem::CLASSNAME, Filesystem::signal_write, [ + Filesystem::signal_param_path => $hookPath, + ]); + } + + private function emitPostHooks(string $target, bool $exists): void { + $hookPath = $this->getHookPath($target); + if (!$exists) { + OC_Hook::emit(Filesystem::CLASSNAME, Filesystem::signal_post_create, [ + Filesystem::signal_param_path => $hookPath, + ]); + } else { + OC_Hook::emit(Filesystem::CLASSNAME, Filesystem::signal_post_update, [ + Filesystem::signal_param_path => $hookPath, + ]); + } + OC_Hook::emit(Filesystem::CLASSNAME, Filesystem::signal_post_write, [ + Filesystem::signal_param_path => $hookPath, + ]); + } + + private function getHookPath(string $path): ?string { + if (!Filesystem::getView()) { + return $path; + } + return Filesystem::getView()->getRelativePath($path); + } +} diff --git a/apps/dav/lib/Upload/FutureFile.php b/apps/dav/lib/Upload/FutureFile.php index eba550a62da..0b158e364cf 100644 --- a/apps/dav/lib/Upload/FutureFile.php +++ b/apps/dav/lib/Upload/FutureFile.php @@ -36,7 +36,6 @@ use Sabre\DAV\IFile; * @package OCA\DAV\Upload */ class FutureFile implements \Sabre\DAV\IFile { - /** @var Directory */ private $root; /** @var string */ @@ -66,6 +65,10 @@ class FutureFile implements \Sabre\DAV\IFile { return AssemblyStream::wrap($nodes); } + public function getPath() { + return $this->root->getFileInfo()->getInternalPath() . '/.file'; + } + /** * @inheritdoc */ diff --git a/apps/dav/lib/Upload/PartFile.php b/apps/dav/lib/Upload/PartFile.php new file mode 100644 index 00000000000..8bfe992a987 --- /dev/null +++ b/apps/dav/lib/Upload/PartFile.php @@ -0,0 +1,111 @@ + + * @author Lukas Reschke + * @author Thomas Müller + * + * @license AGPL-3.0 + * + * This code is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License, version 3, + * as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License, version 3, + * along with this program. If not, see + * + */ +namespace OCA\DAV\Upload; + +use OCA\DAV\Connector\Sabre\Directory; +use Sabre\DAV\Exception\Forbidden; +use Sabre\DAV\IFile; + +/** + * This class represents an Upload part which is not present on the storage itself + * but handled directly by external storage services like S3 with Multipart Upload + */ +class PartFile implements IFile { + /** @var Directory */ + private $root; + /** @var array */ + private $partInfo; + + public function __construct(Directory $root, array $partInfo) { + $this->root = $root; + $this->partInfo = $partInfo; + } + + /** + * @inheritdoc + */ + public function put($data) { + throw new Forbidden('Permission denied to put into this file'); + } + + /** + * @inheritdoc + */ + public function get() { + throw new Forbidden('Permission denied to get this file'); + } + + public function getPath() { + return $this->root->getFileInfo()->getInternalPath() . '/' . $this->partInfo['PartNumber']; + } + + /** + * @inheritdoc + */ + public function getContentType() { + return 'application/octet-stream'; + } + + /** + * @inheritdoc + */ + public function getETag() { + return $this->partInfo['ETag']; + } + + /** + * @inheritdoc + */ + public function getSize() { + return $this->partInfo['Size']; + } + + /** + * @inheritdoc + */ + public function delete() { + $this->root->delete(); + } + + /** + * @inheritdoc + */ + public function getName() { + return $this->partInfo['PartNumber']; + } + + /** + * @inheritdoc + */ + public function setName($name) { + throw new Forbidden('Permission denied to rename this file'); + } + + /** + * @inheritdoc + */ + public function getLastModified() { + return $this->partInfo['LastModified']; + } +} diff --git a/apps/dav/lib/Upload/UploadFile.php b/apps/dav/lib/Upload/UploadFile.php index 023d17955c1..efe1385c8ce 100644 --- a/apps/dav/lib/Upload/UploadFile.php +++ b/apps/dav/lib/Upload/UploadFile.php @@ -44,6 +44,10 @@ class UploadFile implements IFile { return $this->file->get(); } + public function getId() { + return $this->file->getId(); + } + public function getContentType() { return $this->file->getContentType(); } @@ -75,4 +79,16 @@ class UploadFile implements IFile { public function getLastModified() { return $this->file->getLastModified(); } + + public function getInternalPath(): string { + return $this->file->getInternalPath(); + } + + public function getFile(): File { + return $this->file; + } + + public function getNode() { + return $this->file->getNode(); + } } diff --git a/apps/dav/lib/Upload/UploadFolder.php b/apps/dav/lib/Upload/UploadFolder.php index bb7c494cee3..66c190d84d9 100644 --- a/apps/dav/lib/Upload/UploadFolder.php +++ b/apps/dav/lib/Upload/UploadFolder.php @@ -24,20 +24,25 @@ */ namespace OCA\DAV\Upload; +use OC\Files\ObjectStore\ObjectStoreStorage; use OCA\DAV\Connector\Sabre\Directory; +use OCP\Files\ObjectStore\IObjectStoreMultiPartUpload; +use OCP\Files\Storage\IStorage; use Sabre\DAV\Exception\Forbidden; use Sabre\DAV\ICollection; class UploadFolder implements ICollection { - /** @var Directory */ private $node; /** @var CleanupService */ private $cleanupService; + /** @var IStorage */ + private $storage; - public function __construct(Directory $node, CleanupService $cleanupService) { + public function __construct(Directory $node, CleanupService $cleanupService, IStorage $storage) { $this->node = $node; $this->cleanupService = $cleanupService; + $this->storage = $storage; } public function createFile($name, $data = null) { @@ -66,6 +71,23 @@ class UploadFolder implements ICollection { $children[] = new UploadFile($child); } + if ($this->storage->instanceOfStorage(ObjectStoreStorage::class)) { + /** @var ObjectStoreStorage $storage */ + $objectStore = $this->storage->getObjectStore(); + if ($objectStore instanceof IObjectStoreMultiPartUpload) { + $cache = \OC::$server->getMemCacheFactory()->createDistributed(ChunkingV2Plugin::CACHE_KEY); + $uploadSession = $cache->get($this->getName()); + if ($uploadSession) { + $uploadId = $uploadSession[ChunkingV2Plugin::UPLOAD_ID]; + $id = $uploadSession[ChunkingV2Plugin::UPLOAD_TARGET_ID]; + $parts = $objectStore->getMultipartUploads($this->storage->getURN($id), $uploadId); + foreach ($parts as $part) { + $children[] = new PartFile($this->node, $part); + } + } + } + } + return $children; } @@ -94,4 +116,8 @@ class UploadFolder implements ICollection { public function getLastModified() { return $this->node->getLastModified(); } + + public function getStorage() { + return $this->storage; + } } diff --git a/apps/dav/lib/Upload/UploadHome.php b/apps/dav/lib/Upload/UploadHome.php index 35d47b6a82a..6664d8c85b6 100644 --- a/apps/dav/lib/Upload/UploadHome.php +++ b/apps/dav/lib/Upload/UploadHome.php @@ -32,7 +32,6 @@ use Sabre\DAV\Exception\Forbidden; use Sabre\DAV\ICollection; class UploadHome implements ICollection { - /** @var array */ private $principalInfo; /** @var CleanupService */ @@ -55,12 +54,12 @@ class UploadHome implements ICollection { } public function getChild($name): UploadFolder { - return new UploadFolder($this->impl()->getChild($name), $this->cleanupService); + return new UploadFolder($this->impl()->getChild($name), $this->cleanupService, $this->getStorage()); } public function getChildren(): array { return array_map(function ($node) { - return new UploadFolder($node, $this->cleanupService); + return new UploadFolder($node, $this->cleanupService, $this->getStorage()); }, $this->impl()->getChildren()); } @@ -89,14 +88,24 @@ class UploadHome implements ICollection { * @return Directory */ private function impl() { + $view = $this->getView(); + $rootInfo = $view->getFileInfo(''); + return new Directory($view, $rootInfo); + } + + private function getView() { $rootView = new View(); $user = \OC::$server->getUserSession()->getUser(); Filesystem::initMountPoints($user->getUID()); if (!$rootView->file_exists('/' . $user->getUID() . '/uploads')) { $rootView->mkdir('/' . $user->getUID() . '/uploads'); } - $view = new View('/' . $user->getUID() . '/uploads'); - $rootInfo = $view->getFileInfo(''); - return new Directory($view, $rootInfo); + return new View('/' . $user->getUID() . '/uploads'); + } + + private function getStorage() { + $view = $this->getView(); + $storage = $view->getFileInfo('')->getStorage(); + return $storage; } } diff --git a/apps/files/js/file-upload.js b/apps/files/js/file-upload.js index 7d6bde6e0f9..f3a39e5861a 100644 --- a/apps/files/js/file-upload.js +++ b/apps/files/js/file-upload.js @@ -269,8 +269,12 @@ OC.FileUpload.prototype = { && this.getFile().size > this.uploader.fileUploadParam.maxChunkSize ) { data.isChunked = true; + var headers = { + Destination: this.uploader.davClient._buildUrl(this.getTargetDestination()) + }; + chunkFolderPromise = this.uploader.davClient.createDirectory( - 'uploads/' + OC.getCurrentUser().uid + '/' + this.getId() + 'uploads/' + OC.getCurrentUser().uid + '/' + this.getId(), headers ); // TODO: if fails, it means same id already existed, need to retry } else { @@ -309,17 +313,22 @@ OC.FileUpload.prototype = { } if (size) { headers['OC-Total-Length'] = size; - } + headers['Destination'] = this.uploader.davClient._buildUrl(this.getTargetDestination()); return this.uploader.davClient.move( 'uploads/' + uid + '/' + this.getId() + '/.file', - 'files/' + uid + '/' + OC.joinPaths(this.getFullPath(), this.getFileName()), + this.getTargetDestination(), true, headers ); }, + getTargetDestination: function() { + var uid = OC.getCurrentUser().uid; + return 'files/' + uid + '/' + OC.joinPaths(this.getFullPath(), this.getFileName()); + }, + _deleteChunkFolder: function() { // delete transfer directory for this upload this.uploader.davClient.remove( @@ -1326,6 +1335,10 @@ OC.Uploader.prototype = _.extend({ } var range = data.contentRange.split(' ')[1]; var chunkId = range.split('/')[0].split('-')[0]; + // Use a numeric chunk id and set the Destination header on all request for ChunkingV2 + chunkId = Math.ceil((data.chunkSize+Number(chunkId)) / upload.uploader.fileUploadParam.maxChunkSize); + data.headers['Destination'] = self.davClient._buildUrl(upload.getTargetDestination()); + data.url = OC.getRootPath() + '/remote.php/dav/uploads' + '/' + OC.getCurrentUser().uid + diff --git a/apps/files/js/jquery.fileupload.js b/apps/files/js/jquery.fileupload.js index 9b382ccae39..da516b15e1c 100644 --- a/apps/files/js/jquery.fileupload.js +++ b/apps/files/js/jquery.fileupload.js @@ -733,6 +733,12 @@ promise = dfd.promise(), jqXHR, upload; + + // Dynamically adjust the chunk size for Chunking V2 to fit into the 10000 chunk limit + if (file.size/mcs > 10000) { + mcs = Math.ceil(file.size/10000) + } + if (!(this._isXHRUpload(options) && slice && (ub || mcs < fs)) || options.data) { return false; diff --git a/build/integration/features/bootstrap/BasicStructure.php b/build/integration/features/bootstrap/BasicStructure.php index 9060c85c756..e12a40ac6b4 100644 --- a/build/integration/features/bootstrap/BasicStructure.php +++ b/build/integration/features/bootstrap/BasicStructure.php @@ -179,7 +179,7 @@ trait BasicStructure { $options['auth'] = [$this->currentUser, $this->regularUser]; } $options['headers'] = [ - 'OCS_APIREQUEST' => 'true' + 'OCS-APIRequest' => 'true' ]; if ($body instanceof TableNode) { $fd = $body->getRowsHash(); @@ -306,7 +306,7 @@ trait BasicStructure { * @param string $user */ public function loggingInUsingWebAs($user) { - $loginUrl = substr($this->baseUrl, 0, -5) . '/login'; + $loginUrl = substr($this->baseUrl, 0, -5) . '/index.php/login'; // Request a new session and extract CSRF token $client = new Client(); $response = $client->get( diff --git a/build/integration/features/bootstrap/WebDav.php b/build/integration/features/bootstrap/WebDav.php index 680db01a260..00ba5c28862 100644 --- a/build/integration/features/bootstrap/WebDav.php +++ b/build/integration/features/bootstrap/WebDav.php @@ -54,6 +54,9 @@ trait WebDav { /** @var int */ private $storedFileID = null; + private string $s3MultipartDestination; + private string $uploadId; + /** * @Given /^using dav path "([^"]*)"$/ */ @@ -751,6 +754,7 @@ trait WebDav { * @Given user :user creates a new chunking upload with id :id */ public function userCreatesANewChunkingUploadWithId($user, $id) { + $this->parts = []; $destination = '/uploads/' . $user . '/' . $id; $this->makeDavRequest($user, 'MKCOL', $destination, [], null, "uploads"); } @@ -792,6 +796,60 @@ trait WebDav { } } + + /** + * @Given user :user creates a new chunking v2 upload with id :id and destination :targetDestination + */ + public function userCreatesANewChunkingv2UploadWithIdAndDestination($user, $id, $targetDestination) { + $this->s3MultipartDestination = $this->getTargetDestination($user, $targetDestination); + $this->newUploadId(); + $destination = '/uploads/' . $user . '/' . $this->getUploadId($id); + $this->response = $this->makeDavRequest($user, 'MKCOL', $destination, [ + 'Destination' => $this->s3MultipartDestination, + ], null, "uploads"); + } + + /** + * @Given user :user uploads new chunk v2 file :num to id :id + */ + public function userUploadsNewChunkv2FileToIdAndDestination($user, $num, $id) { + $data = \GuzzleHttp\Psr7\Utils::streamFor(fopen('/tmp/part-upload-' . $num, 'r')); + $destination = '/uploads/' . $user . '/' . $this->getUploadId($id) . '/' . $num; + $this->response = $this->makeDavRequest($user, 'PUT', $destination, [ + 'Destination' => $this->s3MultipartDestination + ], $data, "uploads"); + } + + /** + * @Given user :user moves new chunk v2 file with id :id + */ + public function userMovesNewChunkv2FileWithIdToMychunkedfileAndDestination($user, $id) { + $source = '/uploads/' . $user . '/' . $this->getUploadId($id) . '/.file'; + try { + $this->response = $this->makeDavRequest($user, 'MOVE', $source, [ + 'Destination' => $this->s3MultipartDestination, + ], null, "uploads"); + } catch (\GuzzleHttp\Exception\ServerException $e) { + // 5xx responses cause a server exception + $this->response = $e->getResponse(); + } catch (\GuzzleHttp\Exception\ClientException $e) { + // 4xx responses cause a client exception + $this->response = $e->getResponse(); + } + } + + private function getTargetDestination(string $user, string $destination): string { + return substr($this->baseUrl, 0, -4) . $this->getDavFilesPath($user) . $destination; + } + + private function getUploadId(string $id): string { + return $id . '-' . $this->uploadId; + } + + private function newUploadId() { + $this->uploadId = (string)time(); + } + /** * @Given /^Downloading file "([^"]*)" as "([^"]*)"$/ */ @@ -980,4 +1038,44 @@ trait WebDav { $currentFileID = $this->getFileIdForPath($user, $path); Assert::assertEquals($currentFileID, $this->storedFileID); } + + /** + * @Given /^user "([^"]*)" creates a file locally with "([^"]*)" x 5 MB chunks$/ + */ + public function userCreatesAFileLocallyWithChunks($arg1, $chunks) { + $this->parts = []; + for ($i = 1;$i <= (int)$chunks;$i++) { + $randomletter = substr(str_shuffle("abcdefghijklmnopqrstuvwxyz"), 0, 1); + file_put_contents('/tmp/part-upload-' . $i, str_repeat($randomletter, 5 * 1024 * 1024)); + $this->parts[] = '/tmp/part-upload-' . $i; + } + } + + /** + * @Given user :user creates the chunk :id with a size of :size MB + */ + public function userCreatesAChunk($user, $id, $size) { + $randomletter = substr(str_shuffle("abcdefghijklmnopqrstuvwxyz"), 0, 1); + file_put_contents('/tmp/part-upload-' . $id, str_repeat($randomletter, (int)$size * 1024 * 1024)); + $this->parts[] = '/tmp/part-upload-' . $id; + } + + /** + * @Then /^Downloaded content should be the created file$/ + */ + public function downloadedContentShouldBeTheCreatedFile() { + $content = ''; + sort($this->parts); + foreach ($this->parts as $part) { + $content .= file_get_contents($part); + } + Assert::assertEquals($content, (string)$this->response->getBody()); + } + + /** + * @Then /^the S3 multipart upload was successful with status "([^"]*)"$/ + */ + public function theSmultipartUploadWasSuccessful($status) { + Assert::assertEquals((int)$status, $this->response->getStatusCode()); + } } diff --git a/build/integration/features/webdav-related.feature b/build/integration/features/webdav-related.feature index 21e195af115..f63ee24527f 100644 --- a/build/integration/features/webdav-related.feature +++ b/build/integration/features/webdav-related.feature @@ -191,10 +191,10 @@ Feature: webdav-related And As an "user1" And user "user1" created a folder "/testquota" And as "user1" creating a share with - | path | testquota | - | shareType | 0 | - | permissions | 31 | - | shareWith | user0 | + | path | testquota | + | shareType | 0 | + | permissions | 31 | + | shareWith | user0 | And user "user0" accepts last share And As an "user0" When User "user0" uploads file "data/textfile.txt" to "/testquota/asdf.txt" @@ -630,3 +630,99 @@ Feature: webdav-related And As an "user1" And user "user1" created a folder "/testshare " Then the HTTP status code should be "400" + + @s3-multipart + Scenario: Upload chunked file asc with new chunking v2 + Given using new dav path + And user "user0" exists + And user "user0" creates a file locally with "3" x 5 MB chunks + And user "user0" creates a new chunking v2 upload with id "chunking-42" and destination "/myChunkedFile1.txt" + And user "user0" uploads new chunk v2 file "1" to id "chunking-42" + And user "user0" uploads new chunk v2 file "2" to id "chunking-42" + And user "user0" uploads new chunk v2 file "3" to id "chunking-42" + And user "user0" moves new chunk v2 file with id "chunking-42" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/myChunkedFile1.txt" + Then Downloaded content should be the created file + + @s3-multipart + Scenario: Upload chunked file desc with new chunking v2 + Given using new dav path + And user "user0" exists + And user "user0" creates a file locally with "3" x 5 MB chunks + And user "user0" creates a new chunking v2 upload with id "chunking-42" and destination "/myChunkedFile.txt" + And user "user0" uploads new chunk v2 file "3" to id "chunking-42" + And user "user0" uploads new chunk v2 file "2" to id "chunking-42" + And user "user0" uploads new chunk v2 file "1" to id "chunking-42" + And user "user0" moves new chunk v2 file with id "chunking-42" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/myChunkedFile.txt" + Then Downloaded content should be the created file + + @s3-multipart + Scenario: Upload chunked file with random chunk sizes + Given using new dav path + And user "user0" exists + And user "user0" creates a new chunking v2 upload with id "chunking-random" and destination "/myChunkedFile.txt" + And user user0 creates the chunk 1 with a size of 5 MB + And user user0 creates the chunk 2 with a size of 7 MB + And user user0 creates the chunk 3 with a size of 9 MB + And user user0 creates the chunk 4 with a size of 1 MB + And user "user0" uploads new chunk v2 file "1" to id "chunking-random" + And user "user0" uploads new chunk v2 file "3" to id "chunking-random" + And user "user0" uploads new chunk v2 file "2" to id "chunking-random" + And user "user0" uploads new chunk v2 file "4" to id "chunking-random" + And user "user0" moves new chunk v2 file with id "chunking-random" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/myChunkedFile.txt" + Then Downloaded content should be the created file + + @s3-multipart + Scenario: Upload chunked file with too low chunk sizes + Given using new dav path + And user "user0" exists + And user "user0" creates a new chunking v2 upload with id "chunking-random" and destination "/myChunkedFile.txt" + And user user0 creates the chunk 1 with a size of 5 MB + And user user0 creates the chunk 2 with a size of 2 MB + And user user0 creates the chunk 3 with a size of 5 MB + And user user0 creates the chunk 4 with a size of 1 MB + And user "user0" uploads new chunk v2 file "1" to id "chunking-random" + And user "user0" uploads new chunk v2 file "3" to id "chunking-random" + And user "user0" uploads new chunk v2 file "2" to id "chunking-random" + And user "user0" uploads new chunk v2 file "4" to id "chunking-random" + And user "user0" moves new chunk v2 file with id "chunking-random" + Then the HTTP status code should be "500" + + @s3-multipart + Scenario: Upload chunked file with special characters with new chunking v2 + Given using new dav path + And user "user0" exists + And user "user0" creates a file locally with "3" x 5 MB chunks + And user "user0" creates a new chunking v2 upload with id "chunking-42" and destination "/äöü.txt" + And user "user0" uploads new chunk v2 file "1" to id "chunking-42" + And user "user0" uploads new chunk v2 file "2" to id "chunking-42" + And user "user0" uploads new chunk v2 file "3" to id "chunking-42" + And user "user0" moves new chunk v2 file with id "chunking-42" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/äöü.txt" + Then Downloaded content should be the created file + + @s3-multipart + Scenario: Upload chunked file with special characters in path with new chunking v2 + Given using new dav path + And user "user0" exists + And User "user0" created a folder "üäöé" + And user "user0" creates a file locally with "3" x 5 MB chunks + And user "user0" creates a new chunking v2 upload with id "chunking-42" and destination "/üäöé/äöü.txt" + And user "user0" uploads new chunk v2 file "1" to id "chunking-42" + And user "user0" uploads new chunk v2 file "2" to id "chunking-42" + And user "user0" uploads new chunk v2 file "3" to id "chunking-42" + And user "user0" moves new chunk v2 file with id "chunking-42" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/üäöé/äöü.txt" + Then Downloaded content should be the created file diff --git a/core/src/files/client.js b/core/src/files/client.js index 2c71fbe46e1..9d32fefdfc4 100644 --- a/core/src/files/client.js +++ b/core/src/files/client.js @@ -758,7 +758,7 @@ import escapeHTML from 'escape-html' return promise }, - _simpleCall: function(method, path) { + _simpleCall: function(method, path, headers) { if (!path) { throw 'Missing argument "path"' } @@ -769,7 +769,8 @@ import escapeHTML from 'escape-html' this._client.request( method, - this._buildUrl(path) + this._buildUrl(path), + headers ? headers : {} ).then( function(result) { if (self._isSuccessStatus(result.status)) { @@ -790,8 +791,8 @@ import escapeHTML from 'escape-html' * * @returns {Promise} */ - createDirectory: function(path) { - return this._simpleCall('MKCOL', path) + createDirectory: function(path, headers) { + return this._simpleCall('MKCOL', path, headers) }, /** diff --git a/lib/composer/composer/autoload_classmap.php b/lib/composer/composer/autoload_classmap.php index 57a828dbefb..ee6117f9b73 100644 --- a/lib/composer/composer/autoload_classmap.php +++ b/lib/composer/composer/autoload_classmap.php @@ -331,6 +331,7 @@ return array( 'OCP\\Files\\Notify\\INotifyHandler' => $baseDir . '/lib/public/Files/Notify/INotifyHandler.php', 'OCP\\Files\\Notify\\IRenameChange' => $baseDir . '/lib/public/Files/Notify/IRenameChange.php', 'OCP\\Files\\ObjectStore\\IObjectStore' => $baseDir . '/lib/public/Files/ObjectStore/IObjectStore.php', + 'OCP\\Files\\ObjectStore\\IObjectStoreMultiPartUpload' => $baseDir . '/lib/public/Files/ObjectStore/IObjectStoreMultiPartUpload.php', 'OCP\\Files\\ReservedWordException' => $baseDir . '/lib/public/Files/ReservedWordException.php', 'OCP\\Files\\Search\\ISearchBinaryOperator' => $baseDir . '/lib/public/Files/Search/ISearchBinaryOperator.php', 'OCP\\Files\\Search\\ISearchComparison' => $baseDir . '/lib/public/Files/Search/ISearchComparison.php', @@ -348,6 +349,7 @@ return array( 'OCP\\Files\\StorageInvalidException' => $baseDir . '/lib/public/Files/StorageInvalidException.php', 'OCP\\Files\\StorageNotAvailableException' => $baseDir . '/lib/public/Files/StorageNotAvailableException.php', 'OCP\\Files\\StorageTimeoutException' => $baseDir . '/lib/public/Files/StorageTimeoutException.php', + 'OCP\\Files\\Storage\\IChunkedFileWrite' => $baseDir . '/lib/public/Files/Storage/IChunkedFileWrite.php', 'OCP\\Files\\Storage\\IDisableEncryptionStorage' => $baseDir . '/lib/public/Files/Storage/IDisableEncryptionStorage.php', 'OCP\\Files\\Storage\\ILockingStorage' => $baseDir . '/lib/public/Files/Storage/ILockingStorage.php', 'OCP\\Files\\Storage\\INotifyStorage' => $baseDir . '/lib/public/Files/Storage/INotifyStorage.php', diff --git a/lib/composer/composer/autoload_static.php b/lib/composer/composer/autoload_static.php index e9d1ba50024..253b9ddbada 100644 --- a/lib/composer/composer/autoload_static.php +++ b/lib/composer/composer/autoload_static.php @@ -364,6 +364,7 @@ class ComposerStaticInit749170dad3f5e7f9ca158f5a9f04f6a2 'OCP\\Files\\Notify\\INotifyHandler' => __DIR__ . '/../../..' . '/lib/public/Files/Notify/INotifyHandler.php', 'OCP\\Files\\Notify\\IRenameChange' => __DIR__ . '/../../..' . '/lib/public/Files/Notify/IRenameChange.php', 'OCP\\Files\\ObjectStore\\IObjectStore' => __DIR__ . '/../../..' . '/lib/public/Files/ObjectStore/IObjectStore.php', + 'OCP\\Files\\ObjectStore\\IObjectStoreMultiPartUpload' => __DIR__ . '/../../..' . '/lib/public/Files/ObjectStore/IObjectStoreMultiPartUpload.php', 'OCP\\Files\\ReservedWordException' => __DIR__ . '/../../..' . '/lib/public/Files/ReservedWordException.php', 'OCP\\Files\\Search\\ISearchBinaryOperator' => __DIR__ . '/../../..' . '/lib/public/Files/Search/ISearchBinaryOperator.php', 'OCP\\Files\\Search\\ISearchComparison' => __DIR__ . '/../../..' . '/lib/public/Files/Search/ISearchComparison.php', @@ -381,6 +382,7 @@ class ComposerStaticInit749170dad3f5e7f9ca158f5a9f04f6a2 'OCP\\Files\\StorageInvalidException' => __DIR__ . '/../../..' . '/lib/public/Files/StorageInvalidException.php', 'OCP\\Files\\StorageNotAvailableException' => __DIR__ . '/../../..' . '/lib/public/Files/StorageNotAvailableException.php', 'OCP\\Files\\StorageTimeoutException' => __DIR__ . '/../../..' . '/lib/public/Files/StorageTimeoutException.php', + 'OCP\\Files\\Storage\\IChunkedFileWrite' => __DIR__ . '/../../..' . '/lib/public/Files/Storage/IChunkedFileWrite.php', 'OCP\\Files\\Storage\\IDisableEncryptionStorage' => __DIR__ . '/../../..' . '/lib/public/Files/Storage/IDisableEncryptionStorage.php', 'OCP\\Files\\Storage\\ILockingStorage' => __DIR__ . '/../../..' . '/lib/public/Files/Storage/ILockingStorage.php', 'OCP\\Files\\Storage\\INotifyStorage' => __DIR__ . '/../../..' . '/lib/public/Files/Storage/INotifyStorage.php', diff --git a/lib/private/Files/ObjectStore/ObjectStoreStorage.php b/lib/private/Files/ObjectStore/ObjectStoreStorage.php index d0c5bd14b38..4ca00cf6a16 100644 --- a/lib/private/Files/ObjectStore/ObjectStoreStorage.php +++ b/lib/private/Files/ObjectStore/ObjectStoreStorage.php @@ -29,6 +29,8 @@ */ namespace OC\Files\ObjectStore; +use Aws\S3\Exception\S3Exception; +use Aws\S3\Exception\S3MultipartUploadException; use Icewind\Streams\CallbackWrapper; use Icewind\Streams\CountWrapper; use Icewind\Streams\IteratorDirectory; @@ -37,11 +39,14 @@ use OC\Files\Cache\CacheEntry; use OC\Files\Storage\PolyFill\CopyDirectory; use OCP\Files\Cache\ICacheEntry; use OCP\Files\FileInfo; +use OCP\Files\GenericFileException; use OCP\Files\NotFoundException; use OCP\Files\ObjectStore\IObjectStore; +use OCP\Files\ObjectStore\IObjectStoreMultiPartUpload; +use OCP\Files\Storage\IChunkedFileWrite; use OCP\Files\Storage\IStorage; -class ObjectStoreStorage extends \OC\Files\Storage\Common { +class ObjectStoreStorage extends \OC\Files\Storage\Common implements IChunkedFileWrite { use CopyDirectory; /** @@ -91,7 +96,6 @@ class ObjectStoreStorage extends \OC\Files\Storage\Common { public function mkdir($path) { $path = $this->normalizePath($path); - if ($this->file_exists($path)) { return false; } @@ -627,4 +631,72 @@ class ObjectStoreStorage extends \OC\Files\Storage\Common { throw $e; } } + + public function startChunkedWrite(string $targetPath): string { + if (!$this->objectStore instanceof IObjectStoreMultiPartUpload) { + throw new GenericFileException('Object store does not support multipart upload'); + } + $cacheEntry = $this->getCache()->get($targetPath); + $urn = $this->getURN($cacheEntry->getId()); + return $this->objectStore->initiateMultipartUpload($urn); + } + + /** + * + * @throws GenericFileException + */ + public function putChunkedWritePart(string $targetPath, string $writeToken, string $chunkId, $data, $size = null): ?array { + if (!$this->objectStore instanceof IObjectStoreMultiPartUpload) { + throw new GenericFileException('Object store does not support multipart upload'); + } + $cacheEntry = $this->getCache()->get($targetPath); + $urn = $this->getURN($cacheEntry->getId()); + + $result = $this->objectStore->uploadMultipartPart($urn, $writeToken, (int)$chunkId, $data, $size); + + $parts[$chunkId] = [ + 'PartNumber' => $chunkId, + 'ETag' => trim($result->get('ETag'), '"') + ]; + return $parts[$chunkId]; + } + + public function completeChunkedWrite(string $targetPath, string $writeToken): int { + if (!$this->objectStore instanceof IObjectStoreMultiPartUpload) { + throw new GenericFileException('Object store does not support multipart upload'); + } + $cacheEntry = $this->getCache()->get($targetPath); + $urn = $this->getURN($cacheEntry->getId()); + $parts = $this->objectStore->getMultipartUploads($urn, $writeToken); + $sortedParts = array_values($parts); + sort($sortedParts); + try { + $size = $this->objectStore->completeMultipartUpload($urn, $writeToken, $sortedParts); + $stat = $this->stat($targetPath); + $mtime = time(); + if (is_array($stat)) { + $stat['size'] = $size; + $stat['mtime'] = $mtime; + $stat['mimetype'] = $this->getMimeType($targetPath); + $this->getCache()->update($stat['fileid'], $stat); + } + } catch (S3MultipartUploadException | S3Exception $e) { + $this->objectStore->abortMultipartUpload($urn, $writeToken); + $this->logger->logException($e, [ + 'app' => 'objectstore', + 'message' => 'Could not compete multipart upload ' . $urn. ' with uploadId ' . $writeToken + ]); + throw new GenericFileException('Could not write chunked file'); + } + return $size; + } + + public function cancelChunkedWrite(string $targetPath, string $writeToken): void { + if (!$this->objectStore instanceof IObjectStoreMultiPartUpload) { + throw new GenericFileException('Object store does not support multipart upload'); + } + $cacheEntry = $this->getCache()->get($targetPath); + $urn = $this->getURN($cacheEntry->getId()); + $this->objectStore->abortMultipartUpload($urn, $writeToken); + } } diff --git a/lib/private/Files/ObjectStore/S3.php b/lib/private/Files/ObjectStore/S3.php index 6492145fb63..ebc8886f12d 100644 --- a/lib/private/Files/ObjectStore/S3.php +++ b/lib/private/Files/ObjectStore/S3.php @@ -23,9 +23,12 @@ */ namespace OC\Files\ObjectStore; +use Aws\Result; +use Exception; use OCP\Files\ObjectStore\IObjectStore; +use OCP\Files\ObjectStore\IObjectStoreMultiPartUpload; -class S3 implements IObjectStore { +class S3 implements IObjectStore, IObjectStoreMultiPartUpload { use S3ConnectionTrait; use S3ObjectTrait; @@ -41,4 +44,59 @@ class S3 implements IObjectStore { public function getStorageId() { return $this->id; } + + public function initiateMultipartUpload(string $urn): string { + $upload = $this->getConnection()->createMultipartUpload([ + 'Bucket' => $this->bucket, + 'Key' => $urn, + ]); + $uploadId = $upload->get('UploadId'); + if ($uploadId === null) { + throw new Exception('No upload id returned'); + } + return (string)$uploadId; + } + + public function uploadMultipartPart(string $urn, string $uploadId, int $partId, $stream, $size): Result { + return $this->getConnection()->uploadPart([ + 'Body' => $stream, + 'Bucket' => $this->bucket, + 'Key' => $urn, + 'ContentLength' => $size, + 'PartNumber' => $partId, + 'UploadId' => $uploadId, + ]); + } + + public function getMultipartUploads(string $urn, string $uploadId): array { + $parts = $this->getConnection()->listParts([ + 'Bucket' => $this->bucket, + 'Key' => $urn, + 'UploadId' => $uploadId, + 'MaxParts' => 10000 + ]); + return $parts->get('Parts') ?? []; + } + + public function completeMultipartUpload(string $urn, string $uploadId, array $result): int { + $this->getConnection()->completeMultipartUpload([ + 'Bucket' => $this->bucket, + 'Key' => $urn, + 'UploadId' => $uploadId, + 'MultipartUpload' => ['Parts' => $result], + ]); + $stat = $this->getConnection()->headObject([ + 'Bucket' => $this->bucket, + 'Key' => $urn, + ]); + return (int)$stat->get('ContentLength'); + } + + public function abortMultipartUpload($urn, $uploadId): void { + $this->getConnection()->abortMultipartUpload([ + 'Bucket' => $this->bucket, + 'Key' => $urn, + 'UploadId' => $uploadId + ]); + } } diff --git a/lib/public/Files/ObjectStore/IObjectStoreMultiPartUpload.php b/lib/public/Files/ObjectStore/IObjectStoreMultiPartUpload.php new file mode 100644 index 00000000000..f46982f3112 --- /dev/null +++ b/lib/public/Files/ObjectStore/IObjectStoreMultiPartUpload.php @@ -0,0 +1,59 @@ + + * + * @author Julius Härtl + * + * @license GNU AGPL version 3 or any later version + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + * + */ + +declare(strict_types=1); + + +namespace OCP\Files\ObjectStore; + +use Aws\Result; + +/** + * @since 26.0.0 + */ +interface IObjectStoreMultiPartUpload { + /** + * @since 26.0.0 + */ + public function initiateMultipartUpload(string $urn): string; + + /** + * @since 26.0.0 + */ + public function uploadMultipartPart(string $urn, string $uploadId, int $partId, $stream, $size): Result; + + /** + * @since 26.0.0 + */ + public function completeMultipartUpload(string $urn, string $uploadId, array $result): int; + + /** + * @since 26.0.0 + */ + public function abortMultipartUpload(string $urn, string $uploadId): void; + + /** + * @since 26.0.0 + */ + public function getMultipartUploads(string $urn, string $uploadId): array; +} diff --git a/lib/public/Files/Storage/IChunkedFileWrite.php b/lib/public/Files/Storage/IChunkedFileWrite.php new file mode 100644 index 00000000000..01f5cbbb20a --- /dev/null +++ b/lib/public/Files/Storage/IChunkedFileWrite.php @@ -0,0 +1,70 @@ + + * + * @author Julius Härtl + * + * @license GNU AGPL version 3 or any later version + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + * + */ + +declare(strict_types=1); + + +namespace OCP\Files\Storage; + +use OCP\Files\GenericFileException; + +/** + * @since 26.0.0 + */ +interface IChunkedFileWrite extends IStorage { + /** + * @param string $targetPath Relative target path in the storage + * @return string writeToken to be used with the other methods to uniquely identify the file write operation + * @throws GenericFileException + * @since 26.0.0 + */ + public function startChunkedWrite(string $targetPath): string; + + /** + * @param string $targetPath + * @param string $writeToken + * @param string $chunkId + * @param resource $data + * @param int|null $size + * @throws GenericFileException + * @since 26.0.0 + */ + public function putChunkedWritePart(string $targetPath, string $writeToken, string $chunkId, $data, int $size = null): ?array; + + /** + * @param string $targetPath + * @param string $writeToken + * @return int + * @throws GenericFileException + * @since 26.0.0 + */ + public function completeChunkedWrite(string $targetPath, string $writeToken): int; + + /** + * @param string $targetPath + * @param string $writeToken + * @throws GenericFileException + * @since 26.0.0 + */ + public function cancelChunkedWrite(string $targetPath, string $writeToken): void; +}