Type hints for multifilesystem
This commit is contained in:
parent
cecb17df03
commit
698ae875ce
@ -23,75 +23,57 @@ Uses one folder per collection and one file per collection entry.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from itertools import chain
|
from typing import Iterator, Optional
|
||||||
from tempfile import TemporaryDirectory
|
|
||||||
|
|
||||||
from radicale import pathutils, storage
|
from radicale import config
|
||||||
from radicale.storage.multifilesystem.cache import CollectionCacheMixin
|
from radicale.storage.multifilesystem.base import CollectionBase, StorageBase
|
||||||
|
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||||
from radicale.storage.multifilesystem.create_collection import \
|
from radicale.storage.multifilesystem.create_collection import \
|
||||||
StorageCreateCollectionMixin
|
StoragePartCreateCollection
|
||||||
from radicale.storage.multifilesystem.delete import CollectionDeleteMixin
|
from radicale.storage.multifilesystem.delete import CollectionPartDelete
|
||||||
from radicale.storage.multifilesystem.discover import StorageDiscoverMixin
|
from radicale.storage.multifilesystem.discover import StoragePartDiscover
|
||||||
from radicale.storage.multifilesystem.get import CollectionGetMixin
|
from radicale.storage.multifilesystem.get import CollectionPartGet
|
||||||
from radicale.storage.multifilesystem.history import CollectionHistoryMixin
|
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||||
from radicale.storage.multifilesystem.lock import (CollectionLockMixin,
|
from radicale.storage.multifilesystem.lock import (CollectionPartLock,
|
||||||
StorageLockMixin)
|
StoragePartLock)
|
||||||
from radicale.storage.multifilesystem.meta import CollectionMetaMixin
|
from radicale.storage.multifilesystem.meta import CollectionPartMeta
|
||||||
from radicale.storage.multifilesystem.move import StorageMoveMixin
|
from radicale.storage.multifilesystem.move import StoragePartMove
|
||||||
from radicale.storage.multifilesystem.sync import CollectionSyncMixin
|
from radicale.storage.multifilesystem.sync import CollectionPartSync
|
||||||
from radicale.storage.multifilesystem.upload import CollectionUploadMixin
|
from radicale.storage.multifilesystem.upload import CollectionPartUpload
|
||||||
from radicale.storage.multifilesystem.verify import StorageVerifyMixin
|
from radicale.storage.multifilesystem.verify import StoragePartVerify
|
||||||
|
|
||||||
|
|
||||||
class Collection(
|
class Collection(
|
||||||
CollectionCacheMixin, CollectionDeleteMixin, CollectionGetMixin,
|
CollectionPartDelete, CollectionPartMeta, CollectionPartSync,
|
||||||
CollectionHistoryMixin, CollectionLockMixin, CollectionMetaMixin,
|
CollectionPartUpload, CollectionPartGet, CollectionPartCache,
|
||||||
CollectionSyncMixin, CollectionUploadMixin, storage.BaseCollection):
|
CollectionPartLock, CollectionPartHistory, CollectionBase):
|
||||||
|
|
||||||
def __init__(self, storage_, path, filesystem_path=None):
|
_etag_cache: Optional[str]
|
||||||
self._storage = storage_
|
|
||||||
folder = self._storage._get_collection_root_folder()
|
def __init__(self, storage_: "Storage", path: str,
|
||||||
# Path should already be sanitized
|
filesystem_path: Optional[str] = None) -> None:
|
||||||
self._path = pathutils.strip_path(path)
|
super().__init__(storage_, path, filesystem_path)
|
||||||
self._encoding = self._storage.configuration.get("encoding", "stock")
|
|
||||||
if filesystem_path is None:
|
|
||||||
filesystem_path = pathutils.path_to_filesystem(folder, self.path)
|
|
||||||
self._filesystem_path = filesystem_path
|
|
||||||
self._etag_cache = None
|
self._etag_cache = None
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self) -> str:
|
||||||
return self._path
|
return self._path
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def _atomic_write(self, path, mode="w", newline=None):
|
|
||||||
parent_dir, name = os.path.split(path)
|
|
||||||
# Do not use mkstemp because it creates with permissions 0o600
|
|
||||||
with TemporaryDirectory(
|
|
||||||
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
|
|
||||||
with open(os.path.join(tmp_dir, name), mode, newline=newline,
|
|
||||||
encoding=None if "b" in mode else self._encoding) as tmp:
|
|
||||||
yield tmp
|
|
||||||
tmp.flush()
|
|
||||||
self._storage._fsync(tmp)
|
|
||||||
os.replace(os.path.join(tmp_dir, name), path)
|
|
||||||
self._storage._sync_directory(parent_dir)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_modified(self):
|
def last_modified(self) -> str:
|
||||||
relevant_files = chain(
|
def relevant_files_iter() -> Iterator[str]:
|
||||||
(self._filesystem_path,),
|
yield self._filesystem_path
|
||||||
(self._props_path,) if os.path.exists(self._props_path) else (),
|
if os.path.exists(self._props_path):
|
||||||
(os.path.join(self._filesystem_path, h) for h in self._list()))
|
yield self._props_path
|
||||||
last = max(map(os.path.getmtime, relevant_files))
|
for href in self._list():
|
||||||
|
yield os.path.join(self._filesystem_path, href)
|
||||||
|
last = max(map(os.path.getmtime, relevant_files_iter()))
|
||||||
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
|
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def etag(self):
|
def etag(self) -> str:
|
||||||
# reuse cached value if the storage is read-only
|
# reuse cached value if the storage is read-only
|
||||||
if self._storage._lock.locked == "w" or self._etag_cache is None:
|
if self._storage._lock.locked == "w" or self._etag_cache is None:
|
||||||
self._etag_cache = super().etag
|
self._etag_cache = super().etag
|
||||||
@ -99,61 +81,11 @@ class Collection(
|
|||||||
|
|
||||||
|
|
||||||
class Storage(
|
class Storage(
|
||||||
StorageCreateCollectionMixin, StorageDiscoverMixin, StorageLockMixin,
|
StoragePartCreateCollection, StoragePartLock, StoragePartMove,
|
||||||
StorageMoveMixin, StorageVerifyMixin, storage.BaseStorage):
|
StoragePartVerify, StoragePartDiscover, StorageBase):
|
||||||
|
|
||||||
_collection_class = Collection
|
_collection_class = Collection
|
||||||
|
|
||||||
def __init__(self, configuration):
|
def __init__(self, configuration: config.Configuration) -> None:
|
||||||
super().__init__(configuration)
|
super().__init__(configuration)
|
||||||
folder = configuration.get("storage", "filesystem_folder")
|
self._makedirs_synced(self._filesystem_folder)
|
||||||
self._makedirs_synced(folder)
|
|
||||||
|
|
||||||
def _get_collection_root_folder(self):
|
|
||||||
filesystem_folder = self.configuration.get(
|
|
||||||
"storage", "filesystem_folder")
|
|
||||||
return os.path.join(filesystem_folder, "collection-root")
|
|
||||||
|
|
||||||
def _fsync(self, f):
|
|
||||||
if self.configuration.get("storage", "_filesystem_fsync"):
|
|
||||||
try:
|
|
||||||
pathutils.fsync(f.fileno())
|
|
||||||
except OSError as e:
|
|
||||||
raise RuntimeError("Fsync'ing file %r failed: %s" %
|
|
||||||
(f.name, e)) from e
|
|
||||||
|
|
||||||
def _sync_directory(self, path):
|
|
||||||
"""Sync directory to disk.
|
|
||||||
|
|
||||||
This only works on POSIX and does nothing on other systems.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not self.configuration.get("storage", "_filesystem_fsync"):
|
|
||||||
return
|
|
||||||
if os.name == "posix":
|
|
||||||
try:
|
|
||||||
fd = os.open(path, 0)
|
|
||||||
try:
|
|
||||||
pathutils.fsync(fd)
|
|
||||||
finally:
|
|
||||||
os.close(fd)
|
|
||||||
except OSError as e:
|
|
||||||
raise RuntimeError("Fsync'ing directory %r failed: %s" %
|
|
||||||
(path, e)) from e
|
|
||||||
|
|
||||||
def _makedirs_synced(self, filesystem_path):
|
|
||||||
"""Recursively create a directory and its parents in a sync'ed way.
|
|
||||||
|
|
||||||
This method acts silently when the folder already exists.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if os.path.isdir(filesystem_path):
|
|
||||||
return
|
|
||||||
parent_filesystem_path = os.path.dirname(filesystem_path)
|
|
||||||
# Prevent infinite loop
|
|
||||||
if filesystem_path != parent_filesystem_path:
|
|
||||||
# Create parent dirs recursively
|
|
||||||
self._makedirs_synced(parent_filesystem_path)
|
|
||||||
# Possible race!
|
|
||||||
os.makedirs(filesystem_path, exist_ok=True)
|
|
||||||
self._sync_directory(parent_filesystem_path)
|
|
||||||
|
121
radicale/storage/multifilesystem/base.py
Normal file
121
radicale/storage/multifilesystem/base.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
# This file is part of Radicale Server - Calendar Server
|
||||||
|
# Copyright © 2014 Jean-Marc Martins
|
||||||
|
# Copyright © 2012-2017 Guillaume Ayoub
|
||||||
|
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||||
|
#
|
||||||
|
# This library is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This library is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from typing import IO, AnyStr, Iterator, Optional, Type
|
||||||
|
|
||||||
|
from radicale import config, pathutils, storage, types
|
||||||
|
from radicale.storage import multifilesystem # noqa:F401
|
||||||
|
|
||||||
|
|
||||||
|
class CollectionBase(storage.BaseCollection):
|
||||||
|
|
||||||
|
_storage: "multifilesystem.Storage"
|
||||||
|
_path: str
|
||||||
|
_encoding: str
|
||||||
|
_filesystem_path: str
|
||||||
|
|
||||||
|
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||||
|
filesystem_path: Optional[str] = None) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self._storage = storage_
|
||||||
|
folder = storage_._get_collection_root_folder()
|
||||||
|
# Path should already be sanitized
|
||||||
|
self._path = pathutils.strip_path(path)
|
||||||
|
self._encoding = storage_.configuration.get("encoding", "stock")
|
||||||
|
if filesystem_path is None:
|
||||||
|
filesystem_path = pathutils.path_to_filesystem(folder, self.path)
|
||||||
|
self._filesystem_path = filesystem_path
|
||||||
|
|
||||||
|
@types.contextmanager
|
||||||
|
def _atomic_write(self, path: str, mode: str = "w",
|
||||||
|
newline: Optional[str] = None) -> Iterator[IO[AnyStr]]:
|
||||||
|
# TODO: Overload with Literal when dropping support for Python < 3.8
|
||||||
|
parent_dir, name = os.path.split(path)
|
||||||
|
# Do not use mkstemp because it creates with permissions 0o600
|
||||||
|
with TemporaryDirectory(
|
||||||
|
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
|
||||||
|
with open(os.path.join(tmp_dir, name), mode, newline=newline,
|
||||||
|
encoding=None if "b" in mode else self._encoding) as tmp:
|
||||||
|
yield tmp
|
||||||
|
tmp.flush()
|
||||||
|
self._storage._fsync(tmp)
|
||||||
|
os.replace(os.path.join(tmp_dir, name), path)
|
||||||
|
self._storage._sync_directory(parent_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class StorageBase(storage.BaseStorage):
|
||||||
|
|
||||||
|
_collection_class: Type["multifilesystem.Collection"]
|
||||||
|
_filesystem_folder: str
|
||||||
|
_filesystem_fsync: bool
|
||||||
|
|
||||||
|
def __init__(self, configuration: config.Configuration) -> None:
|
||||||
|
super().__init__(configuration)
|
||||||
|
self._filesystem_folder = configuration.get(
|
||||||
|
"storage", "filesystem_folder")
|
||||||
|
self._filesystem_fsync = configuration.get(
|
||||||
|
"storage", "_filesystem_fsync")
|
||||||
|
|
||||||
|
def _get_collection_root_folder(self) -> str:
|
||||||
|
return os.path.join(self._filesystem_folder, "collection-root")
|
||||||
|
|
||||||
|
def _fsync(self, f: IO[AnyStr]) -> None:
|
||||||
|
if self._filesystem_fsync:
|
||||||
|
try:
|
||||||
|
pathutils.fsync(f.fileno())
|
||||||
|
except OSError as e:
|
||||||
|
raise RuntimeError("Fsync'ing file %r failed: %s" %
|
||||||
|
(f.name, e)) from e
|
||||||
|
|
||||||
|
def _sync_directory(self, path: str) -> None:
|
||||||
|
"""Sync directory to disk.
|
||||||
|
|
||||||
|
This only works on POSIX and does nothing on other systems.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self._filesystem_fsync:
|
||||||
|
return
|
||||||
|
if os.name == "posix":
|
||||||
|
try:
|
||||||
|
fd = os.open(path, 0)
|
||||||
|
try:
|
||||||
|
pathutils.fsync(fd)
|
||||||
|
finally:
|
||||||
|
os.close(fd)
|
||||||
|
except OSError as e:
|
||||||
|
raise RuntimeError("Fsync'ing directory %r failed: %s" %
|
||||||
|
(path, e)) from e
|
||||||
|
|
||||||
|
def _makedirs_synced(self, filesystem_path: str) -> None:
|
||||||
|
"""Recursively create a directory and its parents in a sync'ed way.
|
||||||
|
|
||||||
|
This method acts silently when the folder already exists.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if os.path.isdir(filesystem_path):
|
||||||
|
return
|
||||||
|
parent_filesystem_path = os.path.dirname(filesystem_path)
|
||||||
|
# Prevent infinite loop
|
||||||
|
if filesystem_path != parent_filesystem_path:
|
||||||
|
# Create parent dirs recursively
|
||||||
|
self._makedirs_synced(parent_filesystem_path)
|
||||||
|
# Possible race!
|
||||||
|
os.makedirs(filesystem_path, exist_ok=True)
|
||||||
|
self._sync_directory(parent_filesystem_path)
|
@ -21,16 +21,27 @@ import os
|
|||||||
import pickle
|
import pickle
|
||||||
import time
|
import time
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
|
from typing import BinaryIO, Iterable, NamedTuple, Optional, cast
|
||||||
|
|
||||||
|
import radicale.item as radicale_item
|
||||||
from radicale import pathutils, storage
|
from radicale import pathutils, storage
|
||||||
from radicale.log import logger
|
from radicale.log import logger
|
||||||
|
from radicale.storage.multifilesystem.base import CollectionBase
|
||||||
|
|
||||||
|
CacheContent = NamedTuple("CacheContent", [
|
||||||
|
("uid", str), ("etag", str), ("text", str), ("name", str), ("tag", str),
|
||||||
|
("start", int), ("end", int)])
|
||||||
|
|
||||||
|
|
||||||
class CollectionCacheMixin:
|
class CollectionPartCache(CollectionBase):
|
||||||
def _clean_cache(self, folder, names, max_age=None):
|
|
||||||
|
def _clean_cache(self, folder: str, names: Iterable[str],
|
||||||
|
max_age: int = 0) -> None:
|
||||||
"""Delete all ``names`` in ``folder`` that are older than ``max_age``.
|
"""Delete all ``names`` in ``folder`` that are older than ``max_age``.
|
||||||
"""
|
"""
|
||||||
age_limit = time.time() - max_age if max_age is not None else None
|
age_limit: Optional[float] = None
|
||||||
|
if max_age is not None and max_age > 0:
|
||||||
|
age_limit = time.time() - max_age
|
||||||
modified = False
|
modified = False
|
||||||
for name in names:
|
for name in names:
|
||||||
if not pathutils.is_safe_filesystem_path_component(name):
|
if not pathutils.is_safe_filesystem_path_component(name):
|
||||||
@ -55,47 +66,49 @@ class CollectionCacheMixin:
|
|||||||
self._storage._sync_directory(folder)
|
self._storage._sync_directory(folder)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _item_cache_hash(raw_text):
|
def _item_cache_hash(raw_text: bytes) -> str:
|
||||||
_hash = sha256()
|
_hash = sha256()
|
||||||
_hash.update(storage.CACHE_VERSION)
|
_hash.update(storage.CACHE_VERSION)
|
||||||
_hash.update(raw_text)
|
_hash.update(raw_text)
|
||||||
return _hash.hexdigest()
|
return _hash.hexdigest()
|
||||||
|
|
||||||
def _item_cache_content(self, item, cache_hash=None):
|
def _item_cache_content(self, item: radicale_item.Item) -> CacheContent:
|
||||||
text = item.serialize()
|
return CacheContent(item.uid, item.etag, item.serialize(), item.name,
|
||||||
if cache_hash is None:
|
item.component_name, *item.time_range)
|
||||||
cache_hash = self._item_cache_hash(text.encode(self._encoding))
|
|
||||||
return (cache_hash, item.uid, item.etag, text, item.name,
|
|
||||||
item.component_name, *item.time_range)
|
|
||||||
|
|
||||||
def _store_item_cache(self, href, item, cache_hash=None):
|
def _store_item_cache(self, href: str, item: radicale_item.Item,
|
||||||
|
cache_hash: str = "") -> CacheContent:
|
||||||
|
if not cache_hash:
|
||||||
|
cache_hash = self._item_cache_hash(
|
||||||
|
item.serialize().encode(self._encoding))
|
||||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
||||||
"item")
|
"item")
|
||||||
content = self._item_cache_content(item, cache_hash)
|
content = self._item_cache_content(item)
|
||||||
self._storage._makedirs_synced(cache_folder)
|
self._storage._makedirs_synced(cache_folder)
|
||||||
# Race: Other processes might have created and locked the file.
|
# Race: Other processes might have created and locked the file.
|
||||||
with contextlib.suppress(PermissionError), self._atomic_write(
|
with contextlib.suppress(PermissionError), self._atomic_write(
|
||||||
os.path.join(cache_folder, href), "wb") as f:
|
os.path.join(cache_folder, href), "wb") as fo:
|
||||||
pickle.dump(content, f)
|
fb = cast(BinaryIO, fo)
|
||||||
|
pickle.dump((cache_hash, *content), fb)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
def _load_item_cache(self, href, input_hash):
|
def _load_item_cache(self, href: str, cache_hash: str
|
||||||
|
) -> Optional[CacheContent]:
|
||||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
||||||
"item")
|
"item")
|
||||||
cache_hash = uid = etag = text = name = tag = start = end = None
|
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(cache_folder, href), "rb") as f:
|
with open(os.path.join(cache_folder, href), "rb") as f:
|
||||||
cache_hash, *content = pickle.load(f)
|
hash_, *remainder = pickle.load(f)
|
||||||
if cache_hash == input_hash:
|
if hash_ and hash_ == cache_hash:
|
||||||
uid, etag, text, name, tag, start, end = content
|
return CacheContent(*remainder)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
except (pickle.UnpicklingError, ValueError) as e:
|
except (pickle.UnpicklingError, ValueError) as e:
|
||||||
logger.warning("Failed to load item cache entry %r in %r: %s",
|
logger.warning("Failed to load item cache entry %r in %r: %s",
|
||||||
href, self.path, e, exc_info=True)
|
href, self.path, e, exc_info=True)
|
||||||
return cache_hash, uid, etag, text, name, tag, start, end
|
return None
|
||||||
|
|
||||||
def _clean_item_cache(self):
|
def _clean_item_cache(self) -> None:
|
||||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
|
||||||
"item")
|
"item")
|
||||||
self._clean_cache(cache_folder, (
|
self._clean_cache(cache_folder, (
|
||||||
|
@ -18,13 +18,19 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
from typing import Iterable, Optional, cast
|
||||||
|
|
||||||
|
import radicale.item as radicale_item
|
||||||
from radicale import pathutils
|
from radicale import pathutils
|
||||||
|
from radicale.storage import multifilesystem
|
||||||
|
from radicale.storage.multifilesystem.base import StorageBase
|
||||||
|
|
||||||
|
|
||||||
class StorageCreateCollectionMixin:
|
class StoragePartCreateCollection(StorageBase):
|
||||||
|
|
||||||
def create_collection(self, href, items=None, props=None):
|
def create_collection(self, href: str,
|
||||||
|
items: Optional[Iterable[radicale_item.Item]] = None,
|
||||||
|
props=None) -> "multifilesystem.Collection":
|
||||||
folder = self._get_collection_root_folder()
|
folder = self._get_collection_root_folder()
|
||||||
|
|
||||||
# Path should already be sanitized
|
# Path should already be sanitized
|
||||||
@ -34,19 +40,21 @@ class StorageCreateCollectionMixin:
|
|||||||
if not props:
|
if not props:
|
||||||
self._makedirs_synced(filesystem_path)
|
self._makedirs_synced(filesystem_path)
|
||||||
return self._collection_class(
|
return self._collection_class(
|
||||||
self, pathutils.unstrip_path(sane_path, True))
|
cast(multifilesystem.Storage, self),
|
||||||
|
pathutils.unstrip_path(sane_path, True))
|
||||||
|
|
||||||
parent_dir = os.path.dirname(filesystem_path)
|
parent_dir = os.path.dirname(filesystem_path)
|
||||||
self._makedirs_synced(parent_dir)
|
self._makedirs_synced(parent_dir)
|
||||||
|
|
||||||
# Create a temporary directory with an unsafe name
|
# Create a temporary directory with an unsafe name
|
||||||
with TemporaryDirectory(
|
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
|
||||||
prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
|
) as tmp_dir:
|
||||||
# The temporary directory itself can't be renamed
|
# The temporary directory itself can't be renamed
|
||||||
tmp_filesystem_path = os.path.join(tmp_dir, "collection")
|
tmp_filesystem_path = os.path.join(tmp_dir, "collection")
|
||||||
os.makedirs(tmp_filesystem_path)
|
os.makedirs(tmp_filesystem_path)
|
||||||
col = self._collection_class(
|
col = self._collection_class(
|
||||||
self, pathutils.unstrip_path(sane_path, True),
|
cast(multifilesystem.Storage, self),
|
||||||
|
pathutils.unstrip_path(sane_path, True),
|
||||||
filesystem_path=tmp_filesystem_path)
|
filesystem_path=tmp_filesystem_path)
|
||||||
col.set_meta(props)
|
col.set_meta(props)
|
||||||
if items is not None:
|
if items is not None:
|
||||||
@ -62,4 +70,5 @@ class StorageCreateCollectionMixin:
|
|||||||
self._sync_directory(parent_dir)
|
self._sync_directory(parent_dir)
|
||||||
|
|
||||||
return self._collection_class(
|
return self._collection_class(
|
||||||
self, pathutils.unstrip_path(sane_path, True))
|
cast(multifilesystem.Storage, self),
|
||||||
|
pathutils.unstrip_path(sane_path, True))
|
||||||
|
@ -18,20 +18,24 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from radicale import pathutils, storage
|
from radicale import pathutils, storage
|
||||||
|
from radicale.storage.multifilesystem.base import CollectionBase
|
||||||
|
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||||
|
|
||||||
|
|
||||||
class CollectionDeleteMixin:
|
class CollectionPartDelete(CollectionPartHistory, CollectionBase):
|
||||||
def delete(self, href=None):
|
|
||||||
|
def delete(self, href: Optional[str] = None) -> None:
|
||||||
if href is None:
|
if href is None:
|
||||||
# Delete the collection
|
# Delete the collection
|
||||||
parent_dir = os.path.dirname(self._filesystem_path)
|
parent_dir = os.path.dirname(self._filesystem_path)
|
||||||
try:
|
try:
|
||||||
os.rmdir(self._filesystem_path)
|
os.rmdir(self._filesystem_path)
|
||||||
except OSError:
|
except OSError:
|
||||||
with TemporaryDirectory(
|
with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
|
||||||
prefix=".Radicale.tmp-", dir=parent_dir) as tmp:
|
) as tmp:
|
||||||
os.rename(self._filesystem_path, os.path.join(
|
os.rename(self._filesystem_path, os.path.join(
|
||||||
tmp, os.path.basename(self._filesystem_path)))
|
tmp, os.path.basename(self._filesystem_path)))
|
||||||
self._storage._sync_directory(parent_dir)
|
self._storage._sync_directory(parent_dir)
|
||||||
|
@ -16,18 +16,31 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
|
from typing import Callable, ContextManager, Iterator, Optional, cast
|
||||||
|
|
||||||
from radicale import pathutils
|
from radicale import pathutils, types
|
||||||
from radicale.log import logger
|
from radicale.log import logger
|
||||||
|
from radicale.storage import multifilesystem
|
||||||
|
from radicale.storage.multifilesystem.base import StorageBase
|
||||||
|
|
||||||
|
|
||||||
class StorageDiscoverMixin:
|
@types.contextmanager
|
||||||
|
def _null_child_context_manager(path: str,
|
||||||
|
href: Optional[str]) -> Iterator[None]:
|
||||||
|
yield
|
||||||
|
|
||||||
def discover(self, path, depth="0", child_context_manager=(
|
|
||||||
lambda path, href=None: contextlib.ExitStack())):
|
class StoragePartDiscover(StorageBase):
|
||||||
|
|
||||||
|
def discover(
|
||||||
|
self, path: str, depth: str = "0", child_context_manager: Optional[
|
||||||
|
Callable[[str, Optional[str]], ContextManager[None]]] = None
|
||||||
|
) -> Iterator[types.CollectionOrItem]:
|
||||||
|
# assert isinstance(self, multifilesystem.Storage)
|
||||||
|
if child_context_manager is None:
|
||||||
|
child_context_manager = _null_child_context_manager
|
||||||
# Path should already be sanitized
|
# Path should already be sanitized
|
||||||
sane_path = pathutils.strip_path(path)
|
sane_path = pathutils.strip_path(path)
|
||||||
attributes = sane_path.split("/") if sane_path else []
|
attributes = sane_path.split("/") if sane_path else []
|
||||||
@ -44,6 +57,7 @@ class StorageDiscoverMixin:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Check if the path exists and if it leads to a collection or an item
|
# Check if the path exists and if it leads to a collection or an item
|
||||||
|
href: Optional[str]
|
||||||
if not os.path.isdir(filesystem_path):
|
if not os.path.isdir(filesystem_path):
|
||||||
if attributes and os.path.isfile(filesystem_path):
|
if attributes and os.path.isfile(filesystem_path):
|
||||||
href = attributes.pop()
|
href = attributes.pop()
|
||||||
@ -54,10 +68,13 @@ class StorageDiscoverMixin:
|
|||||||
|
|
||||||
sane_path = "/".join(attributes)
|
sane_path = "/".join(attributes)
|
||||||
collection = self._collection_class(
|
collection = self._collection_class(
|
||||||
self, pathutils.unstrip_path(sane_path, True))
|
cast(multifilesystem.Storage, self),
|
||||||
|
pathutils.unstrip_path(sane_path, True))
|
||||||
|
|
||||||
if href:
|
if href:
|
||||||
yield collection._get(href)
|
item = collection._get(href)
|
||||||
|
if item is not None:
|
||||||
|
yield item
|
||||||
return
|
return
|
||||||
|
|
||||||
yield collection
|
yield collection
|
||||||
@ -67,7 +84,9 @@ class StorageDiscoverMixin:
|
|||||||
|
|
||||||
for href in collection._list():
|
for href in collection._list():
|
||||||
with child_context_manager(sane_path, href):
|
with child_context_manager(sane_path, href):
|
||||||
yield collection._get(href)
|
item = collection._get(href)
|
||||||
|
if item is not None:
|
||||||
|
yield item
|
||||||
|
|
||||||
for entry in os.scandir(filesystem_path):
|
for entry in os.scandir(filesystem_path):
|
||||||
if not entry.is_dir():
|
if not entry.is_dir():
|
||||||
@ -80,5 +99,6 @@ class StorageDiscoverMixin:
|
|||||||
continue
|
continue
|
||||||
sane_child_path = posixpath.join(sane_path, href)
|
sane_child_path = posixpath.join(sane_path, href)
|
||||||
child_path = pathutils.unstrip_path(sane_child_path, True)
|
child_path = pathutils.unstrip_path(sane_child_path, True)
|
||||||
with child_context_manager(sane_child_path):
|
with child_context_manager(sane_child_path, None):
|
||||||
yield self._collection_class(self, child_path)
|
yield self._collection_class(
|
||||||
|
cast(multifilesystem.Storage, self), child_path)
|
||||||
|
@ -19,20 +19,30 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from typing import Iterable, Iterator, Optional, Tuple
|
||||||
|
|
||||||
import vobject
|
import vobject
|
||||||
|
|
||||||
import radicale.item as radicale_item
|
import radicale.item as radicale_item
|
||||||
from radicale import pathutils
|
from radicale import pathutils
|
||||||
from radicale.log import logger
|
from radicale.log import logger
|
||||||
|
from radicale.storage import multifilesystem
|
||||||
|
from radicale.storage.multifilesystem.base import CollectionBase
|
||||||
|
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||||
|
from radicale.storage.multifilesystem.lock import CollectionPartLock
|
||||||
|
|
||||||
|
|
||||||
class CollectionGetMixin:
|
class CollectionPartGet(CollectionPartCache, CollectionPartLock,
|
||||||
def __init__(self):
|
CollectionBase):
|
||||||
super().__init__()
|
|
||||||
|
_item_cache_cleaned: bool
|
||||||
|
|
||||||
|
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||||
|
filesystem_path: Optional[str] = None) -> None:
|
||||||
|
super().__init__(storage_, path, filesystem_path)
|
||||||
self._item_cache_cleaned = False
|
self._item_cache_cleaned = False
|
||||||
|
|
||||||
def _list(self):
|
def _list(self) -> Iterator[str]:
|
||||||
for entry in os.scandir(self._filesystem_path):
|
for entry in os.scandir(self._filesystem_path):
|
||||||
if not entry.is_file():
|
if not entry.is_file():
|
||||||
continue
|
continue
|
||||||
@ -43,13 +53,14 @@ class CollectionGetMixin:
|
|||||||
continue
|
continue
|
||||||
yield href
|
yield href
|
||||||
|
|
||||||
def _get(self, href, verify_href=True):
|
def _get(self, href: str, verify_href: bool = True
|
||||||
|
) -> Optional[radicale_item.Item]:
|
||||||
if verify_href:
|
if verify_href:
|
||||||
try:
|
try:
|
||||||
if not pathutils.is_safe_filesystem_path_component(href):
|
if not pathutils.is_safe_filesystem_path_component(href):
|
||||||
raise pathutils.UnsafePathError(href)
|
raise pathutils.UnsafePathError(href)
|
||||||
path = pathutils.path_to_filesystem(
|
path = pathutils.path_to_filesystem(self._filesystem_path,
|
||||||
self._filesystem_path, href)
|
href)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Can't translate name %r safely to filesystem in %r: %s",
|
"Can't translate name %r safely to filesystem in %r: %s",
|
||||||
@ -70,19 +81,17 @@ class CollectionGetMixin:
|
|||||||
raise
|
raise
|
||||||
# The hash of the component in the file system. This is used to check,
|
# The hash of the component in the file system. This is used to check,
|
||||||
# if the entry in the cache is still valid.
|
# if the entry in the cache is still valid.
|
||||||
input_hash = self._item_cache_hash(raw_text)
|
cache_hash = self._item_cache_hash(raw_text)
|
||||||
cache_hash, uid, etag, text, name, tag, start, end = \
|
cache_content = self._load_item_cache(href, cache_hash)
|
||||||
self._load_item_cache(href, input_hash)
|
if cache_content is None:
|
||||||
if input_hash != cache_hash:
|
|
||||||
with self._acquire_cache_lock("item"):
|
with self._acquire_cache_lock("item"):
|
||||||
# Lock the item cache to prevent multpile processes from
|
# Lock the item cache to prevent multpile processes from
|
||||||
# generating the same data in parallel.
|
# generating the same data in parallel.
|
||||||
# This improves the performance for multiple requests.
|
# This improves the performance for multiple requests.
|
||||||
if self._storage._lock.locked == "r":
|
if self._storage._lock.locked == "r":
|
||||||
# Check if another process created the file in the meantime
|
# Check if another process created the file in the meantime
|
||||||
cache_hash, uid, etag, text, name, tag, start, end = \
|
cache_content = self._load_item_cache(href, cache_hash)
|
||||||
self._load_item_cache(href, input_hash)
|
if cache_content is None:
|
||||||
if input_hash != cache_hash:
|
|
||||||
try:
|
try:
|
||||||
vobject_items = list(vobject.readComponents(
|
vobject_items = list(vobject.readComponents(
|
||||||
raw_text.decode(self._encoding)))
|
raw_text.decode(self._encoding)))
|
||||||
@ -91,9 +100,8 @@ class CollectionGetMixin:
|
|||||||
vobject_item, = vobject_items
|
vobject_item, = vobject_items
|
||||||
temp_item = radicale_item.Item(
|
temp_item = radicale_item.Item(
|
||||||
collection=self, vobject_item=vobject_item)
|
collection=self, vobject_item=vobject_item)
|
||||||
cache_hash, uid, etag, text, name, tag, start, end = \
|
cache_content = self._store_item_cache(
|
||||||
self._store_item_cache(
|
href, temp_item, cache_hash)
|
||||||
href, temp_item, input_hash)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise RuntimeError("Failed to load item %r in %r: %s" %
|
raise RuntimeError("Failed to load item %r in %r: %s" %
|
||||||
(href, self.path, e)) from e
|
(href, self.path, e)) from e
|
||||||
@ -108,11 +116,14 @@ class CollectionGetMixin:
|
|||||||
# Don't keep reference to ``vobject_item``, because it requires a lot
|
# Don't keep reference to ``vobject_item``, because it requires a lot
|
||||||
# of memory.
|
# of memory.
|
||||||
return radicale_item.Item(
|
return radicale_item.Item(
|
||||||
collection=self, href=href, last_modified=last_modified, etag=etag,
|
collection=self, href=href, last_modified=last_modified,
|
||||||
text=text, uid=uid, name=name, component_name=tag,
|
etag=cache_content.etag, text=cache_content.text,
|
||||||
time_range=(start, end))
|
uid=cache_content.uid, name=cache_content.name,
|
||||||
|
component_name=cache_content.tag,
|
||||||
|
time_range=(cache_content.start, cache_content.end))
|
||||||
|
|
||||||
def get_multi(self, hrefs):
|
def get_multi(self, hrefs: Iterable[str]
|
||||||
|
) -> Iterator[Tuple[str, Optional[radicale_item.Item]]]:
|
||||||
# It's faster to check for file name collissions here, because
|
# It's faster to check for file name collissions here, because
|
||||||
# we only need to call os.listdir once.
|
# we only need to call os.listdir once.
|
||||||
files = None
|
files = None
|
||||||
@ -124,13 +135,16 @@ class CollectionGetMixin:
|
|||||||
path = os.path.join(self._filesystem_path, href)
|
path = os.path.join(self._filesystem_path, href)
|
||||||
if (not pathutils.is_safe_filesystem_path_component(href) or
|
if (not pathutils.is_safe_filesystem_path_component(href) or
|
||||||
href not in files and os.path.lexists(path)):
|
href not in files and os.path.lexists(path)):
|
||||||
logger.debug(
|
logger.debug("Can't translate name safely to filesystem: %r",
|
||||||
"Can't translate name safely to filesystem: %r", href)
|
href)
|
||||||
yield (href, None)
|
yield (href, None)
|
||||||
else:
|
else:
|
||||||
yield (href, self._get(href, verify_href=False))
|
yield (href, self._get(href, verify_href=False))
|
||||||
|
|
||||||
def get_all(self):
|
def get_all(self) -> Iterator[radicale_item.Item]:
|
||||||
# We don't need to check for collissions, because the the file names
|
for href in self._list():
|
||||||
# are from os.listdir.
|
# We don't need to check for collissions, because the file names
|
||||||
return (self._get(href, verify_href=False) for href in self._list())
|
# are from os.listdir.
|
||||||
|
item = self._get(href, verify_href=False)
|
||||||
|
if item is not None:
|
||||||
|
yield item
|
||||||
|
@ -20,13 +20,25 @@ import binascii
|
|||||||
import contextlib
|
import contextlib
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
|
from typing import BinaryIO, Optional, cast
|
||||||
|
|
||||||
import radicale.item as radicale_item
|
import radicale.item as radicale_item
|
||||||
from radicale import pathutils
|
from radicale import pathutils
|
||||||
from radicale.log import logger
|
from radicale.log import logger
|
||||||
|
from radicale.storage import multifilesystem
|
||||||
|
from radicale.storage.multifilesystem.base import CollectionBase
|
||||||
|
|
||||||
|
|
||||||
class CollectionHistoryMixin:
|
class CollectionPartHistory(CollectionBase):
|
||||||
|
|
||||||
|
_max_sync_token_age: int
|
||||||
|
|
||||||
|
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||||
|
filesystem_path: Optional[str] = None) -> None:
|
||||||
|
super().__init__(storage_, path, filesystem_path)
|
||||||
|
self._max_sync_token_age = storage_.configuration.get(
|
||||||
|
"storage", "max_sync_token_age")
|
||||||
|
|
||||||
def _update_history_etag(self, href, item):
|
def _update_history_etag(self, href, item):
|
||||||
"""Updates and retrieves the history etag from the history cache.
|
"""Updates and retrieves the history etag from the history cache.
|
||||||
|
|
||||||
@ -56,8 +68,9 @@ class CollectionHistoryMixin:
|
|||||||
history_etag + "/" + etag).strip("\"")
|
history_etag + "/" + etag).strip("\"")
|
||||||
# Race: Other processes might have created and locked the file.
|
# Race: Other processes might have created and locked the file.
|
||||||
with contextlib.suppress(PermissionError), self._atomic_write(
|
with contextlib.suppress(PermissionError), self._atomic_write(
|
||||||
os.path.join(history_folder, href), "wb") as f:
|
os.path.join(history_folder, href), "wb") as fo:
|
||||||
pickle.dump([etag, history_etag], f)
|
fb = cast(BinaryIO, fo)
|
||||||
|
pickle.dump([etag, history_etag], fb)
|
||||||
return history_etag
|
return history_etag
|
||||||
|
|
||||||
def _get_deleted_history_hrefs(self):
|
def _get_deleted_history_hrefs(self):
|
||||||
@ -79,5 +92,4 @@ class CollectionHistoryMixin:
|
|||||||
history_folder = os.path.join(self._filesystem_path,
|
history_folder = os.path.join(self._filesystem_path,
|
||||||
".Radicale.cache", "history")
|
".Radicale.cache", "history")
|
||||||
self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
|
self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
|
||||||
max_age=self._storage.configuration.get(
|
max_age=self._max_sync_token_age)
|
||||||
"storage", "max_sync_token_age"))
|
|
||||||
|
@ -23,56 +23,65 @@ import shlex
|
|||||||
import signal
|
import signal
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Iterator
|
||||||
|
|
||||||
from radicale import pathutils
|
from radicale import config, pathutils, types
|
||||||
from radicale.log import logger
|
from radicale.log import logger
|
||||||
|
from radicale.storage.multifilesystem.base import CollectionBase, StorageBase
|
||||||
|
|
||||||
|
|
||||||
class CollectionLockMixin:
|
class CollectionPartLock(CollectionBase):
|
||||||
def _acquire_cache_lock(self, ns=""):
|
|
||||||
|
@types.contextmanager
|
||||||
|
def _acquire_cache_lock(self, ns: str = "") -> Iterator[None]:
|
||||||
if self._storage._lock.locked == "w":
|
if self._storage._lock.locked == "w":
|
||||||
return contextlib.ExitStack()
|
yield
|
||||||
|
return
|
||||||
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache")
|
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache")
|
||||||
self._storage._makedirs_synced(cache_folder)
|
self._storage._makedirs_synced(cache_folder)
|
||||||
lock_path = os.path.join(cache_folder,
|
lock_path = os.path.join(cache_folder,
|
||||||
".Radicale.lock" + (".%s" % ns if ns else ""))
|
".Radicale.lock" + (".%s" % ns if ns else ""))
|
||||||
lock = pathutils.RwLock(lock_path)
|
lock = pathutils.RwLock(lock_path)
|
||||||
return lock.acquire("w")
|
with lock.acquire("w"):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
class StorageLockMixin:
|
class StoragePartLock(StorageBase):
|
||||||
|
|
||||||
def __init__(self, configuration):
|
_lock: pathutils.RwLock
|
||||||
|
_hook: str
|
||||||
|
|
||||||
|
def __init__(self, configuration: config.Configuration) -> None:
|
||||||
super().__init__(configuration)
|
super().__init__(configuration)
|
||||||
folder = self.configuration.get("storage", "filesystem_folder")
|
lock_path = os.path.join(self._filesystem_folder, ".Radicale.lock")
|
||||||
lock_path = os.path.join(folder, ".Radicale.lock")
|
|
||||||
self._lock = pathutils.RwLock(lock_path)
|
self._lock = pathutils.RwLock(lock_path)
|
||||||
|
self._hook = configuration.get("storage", "hook")
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@types.contextmanager
|
||||||
def acquire_lock(self, mode, user=""):
|
def acquire_lock(self, mode: str, user: str = "") -> Iterator[None]:
|
||||||
with self._lock.acquire(mode):
|
with self._lock.acquire(mode):
|
||||||
yield
|
yield
|
||||||
# execute hook
|
# execute hook
|
||||||
hook = self.configuration.get("storage", "hook")
|
if mode == "w" and self._hook:
|
||||||
if mode == "w" and hook:
|
|
||||||
folder = self.configuration.get("storage", "filesystem_folder")
|
|
||||||
debug = logger.isEnabledFor(logging.DEBUG)
|
debug = logger.isEnabledFor(logging.DEBUG)
|
||||||
popen_kwargs = dict(
|
|
||||||
stdin=subprocess.DEVNULL,
|
|
||||||
stdout=subprocess.PIPE if debug else subprocess.DEVNULL,
|
|
||||||
stderr=subprocess.PIPE if debug else subprocess.DEVNULL,
|
|
||||||
shell=True, universal_newlines=True, cwd=folder)
|
|
||||||
# Use new process group for child to prevent terminals
|
# Use new process group for child to prevent terminals
|
||||||
# from sending SIGINT etc.
|
# from sending SIGINT etc.
|
||||||
|
preexec_fn = None
|
||||||
|
creationflags = 0
|
||||||
if os.name == "posix":
|
if os.name == "posix":
|
||||||
# Process group is also used to identify child processes
|
# Process group is also used to identify child processes
|
||||||
popen_kwargs["preexec_fn"] = os.setpgrp
|
preexec_fn = os.setpgrp
|
||||||
elif sys.platform == "win32":
|
elif sys.platform == "win32":
|
||||||
popen_kwargs["creationflags"] = (
|
creationflags |= subprocess.CREATE_NEW_PROCESS_GROUP
|
||||||
subprocess.CREATE_NEW_PROCESS_GROUP)
|
command = self._hook % {
|
||||||
command = hook % {"user": shlex.quote(user or "Anonymous")}
|
"user": shlex.quote(user or "Anonymous")}
|
||||||
logger.debug("Running storage hook")
|
logger.debug("Running storage hook")
|
||||||
p = subprocess.Popen(command, **popen_kwargs)
|
p = subprocess.Popen(
|
||||||
|
command, stdin=subprocess.DEVNULL,
|
||||||
|
stdout=subprocess.PIPE if debug else subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.PIPE if debug else subprocess.DEVNULL,
|
||||||
|
shell=True, universal_newlines=True, preexec_fn=preexec_fn,
|
||||||
|
cwd=self._filesystem_folder, creationflags=creationflags)
|
||||||
try:
|
try:
|
||||||
stdout_data, stderr_data = p.communicate()
|
stdout_data, stderr_data = p.communicate()
|
||||||
except BaseException: # e.g. KeyboardInterrupt or SystemExit
|
except BaseException: # e.g. KeyboardInterrupt or SystemExit
|
||||||
|
@ -18,18 +18,33 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
from typing import Mapping, Optional, TextIO, Union, cast, overload
|
||||||
|
|
||||||
import radicale.item as radicale_item
|
import radicale.item as radicale_item
|
||||||
|
from radicale.storage import multifilesystem
|
||||||
|
from radicale.storage.multifilesystem.base import CollectionBase
|
||||||
|
|
||||||
|
|
||||||
class CollectionMetaMixin:
|
class CollectionPartMeta(CollectionBase):
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
_meta_cache: Optional[Mapping[str, str]]
|
||||||
|
_props_path: str
|
||||||
|
|
||||||
|
def __init__(self, storage_: "multifilesystem.Storage", path: str,
|
||||||
|
filesystem_path: Optional[str] = None) -> None:
|
||||||
|
super().__init__(storage_, path, filesystem_path)
|
||||||
self._meta_cache = None
|
self._meta_cache = None
|
||||||
self._props_path = os.path.join(
|
self._props_path = os.path.join(
|
||||||
self._filesystem_path, ".Radicale.props")
|
self._filesystem_path, ".Radicale.props")
|
||||||
|
|
||||||
def get_meta(self, key=None):
|
@overload
|
||||||
|
def get_meta(self, key: None = None) -> Mapping[str, str]: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get_meta(self, key: str) -> Optional[str]: ...
|
||||||
|
|
||||||
|
def get_meta(self, key: Optional[str] = None) -> Union[Mapping[str, str],
|
||||||
|
Optional[str]]:
|
||||||
# reuse cached value if the storage is read-only
|
# reuse cached value if the storage is read-only
|
||||||
if self._storage._lock.locked == "w" or self._meta_cache is None:
|
if self._storage._lock.locked == "w" or self._meta_cache is None:
|
||||||
try:
|
try:
|
||||||
@ -45,6 +60,7 @@ class CollectionMetaMixin:
|
|||||||
"%r: %s" % (self.path, e)) from e
|
"%r: %s" % (self.path, e)) from e
|
||||||
return self._meta_cache if key is None else self._meta_cache.get(key)
|
return self._meta_cache if key is None else self._meta_cache.get(key)
|
||||||
|
|
||||||
def set_meta(self, props):
|
def set_meta(self, props: Mapping[str, str]) -> None:
|
||||||
with self._atomic_write(self._props_path, "w") as f:
|
with self._atomic_write(self._props_path, "w") as fo:
|
||||||
|
f = cast(TextIO, fo)
|
||||||
json.dump(props, f, sort_keys=True)
|
json.dump(props, f, sort_keys=True)
|
||||||
|
@ -18,19 +18,25 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from radicale import pathutils
|
from radicale import item as radicale_item
|
||||||
|
from radicale import pathutils, storage
|
||||||
|
from radicale.storage import multifilesystem
|
||||||
|
from radicale.storage.multifilesystem.base import StorageBase
|
||||||
|
|
||||||
|
|
||||||
class StorageMoveMixin:
|
class StoragePartMove(StorageBase):
|
||||||
|
|
||||||
def move(self, item, to_collection, to_href):
|
def move(self, item: radicale_item.Item,
|
||||||
|
to_collection: storage.BaseCollection, to_href: str) -> None:
|
||||||
if not pathutils.is_safe_filesystem_path_component(to_href):
|
if not pathutils.is_safe_filesystem_path_component(to_href):
|
||||||
raise pathutils.UnsafePathError(to_href)
|
raise pathutils.UnsafePathError(to_href)
|
||||||
os.replace(
|
assert isinstance(to_collection, multifilesystem.Collection)
|
||||||
pathutils.path_to_filesystem(
|
assert isinstance(item.collection, multifilesystem.Collection)
|
||||||
item.collection._filesystem_path, item.href),
|
assert item.href
|
||||||
pathutils.path_to_filesystem(
|
os.replace(pathutils.path_to_filesystem(
|
||||||
to_collection._filesystem_path, to_href))
|
item.collection._filesystem_path, item.href),
|
||||||
|
pathutils.path_to_filesystem(
|
||||||
|
to_collection._filesystem_path, to_href))
|
||||||
self._sync_directory(to_collection._filesystem_path)
|
self._sync_directory(to_collection._filesystem_path)
|
||||||
if item.collection._filesystem_path != to_collection._filesystem_path:
|
if item.collection._filesystem_path != to_collection._filesystem_path:
|
||||||
self._sync_directory(item.collection._filesystem_path)
|
self._sync_directory(item.collection._filesystem_path)
|
||||||
|
@ -21,16 +21,22 @@ import itertools
|
|||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
|
from typing import BinaryIO, Iterable, Tuple, cast
|
||||||
|
|
||||||
from radicale.log import logger
|
from radicale.log import logger
|
||||||
|
from radicale.storage.multifilesystem.base import CollectionBase
|
||||||
|
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||||
|
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||||
|
|
||||||
|
|
||||||
class CollectionSyncMixin:
|
class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
|
||||||
def sync(self, old_token=""):
|
CollectionBase):
|
||||||
|
|
||||||
|
def sync(self, old_token: str = "") -> Tuple[str, Iterable[str]]:
|
||||||
# The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
|
# The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
|
||||||
# where TOKEN_NAME is the sha256 hash of all history etags of present
|
# where TOKEN_NAME is the sha256 hash of all history etags of present
|
||||||
# and past items of the collection.
|
# and past items of the collection.
|
||||||
def check_token_name(token_name):
|
def check_token_name(token_name: str) -> bool:
|
||||||
if len(token_name) != 64:
|
if len(token_name) != 64:
|
||||||
return False
|
return False
|
||||||
for c in token_name:
|
for c in token_name:
|
||||||
@ -89,15 +95,15 @@ class CollectionSyncMixin:
|
|||||||
self._storage._makedirs_synced(token_folder)
|
self._storage._makedirs_synced(token_folder)
|
||||||
try:
|
try:
|
||||||
# Race: Other processes might have created and locked the file.
|
# Race: Other processes might have created and locked the file.
|
||||||
with self._atomic_write(token_path, "wb") as f:
|
with self._atomic_write(token_path, "wb") as fo:
|
||||||
pickle.dump(state, f)
|
fb = cast(BinaryIO, fo)
|
||||||
|
pickle.dump(state, fb)
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
# clean up old sync tokens and item cache
|
# clean up old sync tokens and item cache
|
||||||
self._clean_cache(token_folder, os.listdir(token_folder),
|
self._clean_cache(token_folder, os.listdir(token_folder),
|
||||||
max_age=self._storage.configuration.get(
|
max_age=self._max_sync_token_age)
|
||||||
"storage", "max_sync_token_age"))
|
|
||||||
self._clean_history()
|
self._clean_history()
|
||||||
else:
|
else:
|
||||||
# Try to update the modification time
|
# Try to update the modification time
|
||||||
|
@ -19,13 +19,21 @@
|
|||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Iterable, Set, TextIO, cast
|
||||||
|
|
||||||
import radicale.item as radicale_item
|
import radicale.item as radicale_item
|
||||||
from radicale import pathutils
|
from radicale import pathutils
|
||||||
|
from radicale.storage.multifilesystem.base import CollectionBase
|
||||||
|
from radicale.storage.multifilesystem.cache import CollectionPartCache
|
||||||
|
from radicale.storage.multifilesystem.get import CollectionPartGet
|
||||||
|
from radicale.storage.multifilesystem.history import CollectionPartHistory
|
||||||
|
|
||||||
|
|
||||||
class CollectionUploadMixin:
|
class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
|
||||||
def upload(self, href, item):
|
CollectionPartHistory, CollectionBase):
|
||||||
|
|
||||||
|
def upload(self, href: str, item: radicale_item.Item
|
||||||
|
) -> radicale_item.Item:
|
||||||
if not pathutils.is_safe_filesystem_path_component(href):
|
if not pathutils.is_safe_filesystem_path_component(href):
|
||||||
raise pathutils.UnsafePathError(href)
|
raise pathutils.UnsafePathError(href)
|
||||||
try:
|
try:
|
||||||
@ -34,17 +42,22 @@ class CollectionUploadMixin:
|
|||||||
raise ValueError("Failed to store item %r in collection %r: %s" %
|
raise ValueError("Failed to store item %r in collection %r: %s" %
|
||||||
(href, self.path, e)) from e
|
(href, self.path, e)) from e
|
||||||
path = pathutils.path_to_filesystem(self._filesystem_path, href)
|
path = pathutils.path_to_filesystem(self._filesystem_path, href)
|
||||||
with self._atomic_write(path, newline="") as fd:
|
with self._atomic_write(path, newline="") as fo:
|
||||||
fd.write(item.serialize())
|
f = cast(TextIO, fo)
|
||||||
|
f.write(item.serialize())
|
||||||
# Clean the cache after the actual item is stored, or the cache entry
|
# Clean the cache after the actual item is stored, or the cache entry
|
||||||
# will be removed again.
|
# will be removed again.
|
||||||
self._clean_item_cache()
|
self._clean_item_cache()
|
||||||
# Track the change
|
# Track the change
|
||||||
self._update_history_etag(href, item)
|
self._update_history_etag(href, item)
|
||||||
self._clean_history()
|
self._clean_history()
|
||||||
return self._get(href, verify_href=False)
|
uploaded_item = self._get(href, verify_href=False)
|
||||||
|
if uploaded_item is None:
|
||||||
|
raise RuntimeError("Storage modified externally")
|
||||||
|
return uploaded_item
|
||||||
|
|
||||||
def _upload_all_nonatomic(self, items, suffix=""):
|
def _upload_all_nonatomic(self, items: Iterable[radicale_item.Item],
|
||||||
|
suffix: str = "") -> None:
|
||||||
"""Upload a new set of items.
|
"""Upload a new set of items.
|
||||||
|
|
||||||
This takes a list of vobject items and
|
This takes a list of vobject items and
|
||||||
@ -54,7 +67,7 @@ class CollectionUploadMixin:
|
|||||||
cache_folder = os.path.join(self._filesystem_path,
|
cache_folder = os.path.join(self._filesystem_path,
|
||||||
".Radicale.cache", "item")
|
".Radicale.cache", "item")
|
||||||
self._storage._makedirs_synced(cache_folder)
|
self._storage._makedirs_synced(cache_folder)
|
||||||
hrefs = set()
|
hrefs: Set[str] = set()
|
||||||
for item in items:
|
for item in items:
|
||||||
uid = item.uid
|
uid = item.uid
|
||||||
try:
|
try:
|
||||||
@ -92,14 +105,15 @@ class CollectionUploadMixin:
|
|||||||
sys.platform == "win32" and e.errno == 123):
|
sys.platform == "win32" and e.errno == 123):
|
||||||
continue
|
continue
|
||||||
raise
|
raise
|
||||||
|
assert href is not None and f is not None
|
||||||
with f:
|
with f:
|
||||||
f.write(item.serialize())
|
f.write(item.serialize())
|
||||||
f.flush()
|
f.flush()
|
||||||
self._storage._fsync(f)
|
self._storage._fsync(f)
|
||||||
hrefs.add(href)
|
hrefs.add(href)
|
||||||
with open(os.path.join(cache_folder, href), "wb") as f:
|
with open(os.path.join(cache_folder, href), "wb") as fb:
|
||||||
pickle.dump(cache_content, f)
|
pickle.dump(cache_content, fb)
|
||||||
f.flush()
|
fb.flush()
|
||||||
self._storage._fsync(f)
|
self._storage._fsync(fb)
|
||||||
self._storage._sync_directory(cache_folder)
|
self._storage._sync_directory(cache_folder)
|
||||||
self._storage._sync_directory(self._filesystem_path)
|
self._storage._sync_directory(self._filesystem_path)
|
||||||
|
@ -16,23 +16,27 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import contextlib
|
from typing import Iterator, Optional, Set
|
||||||
|
|
||||||
from radicale import pathutils, storage
|
from radicale import pathutils, storage, types
|
||||||
from radicale.log import logger
|
from radicale.log import logger
|
||||||
|
from radicale.storage.multifilesystem.base import StorageBase
|
||||||
|
from radicale.storage.multifilesystem.discover import StoragePartDiscover
|
||||||
|
|
||||||
|
|
||||||
class StorageVerifyMixin:
|
class StoragePartVerify(StoragePartDiscover, StorageBase):
|
||||||
def verify(self):
|
|
||||||
|
def verify(self) -> bool:
|
||||||
item_errors = collection_errors = 0
|
item_errors = collection_errors = 0
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@types.contextmanager
|
||||||
def exception_cm(sane_path, href=None):
|
def exception_cm(sane_path: str, href: Optional[str]
|
||||||
|
) -> Iterator[None]:
|
||||||
nonlocal item_errors, collection_errors
|
nonlocal item_errors, collection_errors
|
||||||
try:
|
try:
|
||||||
yield
|
yield
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if href:
|
if href is not None:
|
||||||
item_errors += 1
|
item_errors += 1
|
||||||
name = "item %r in %r" % (href, sane_path)
|
name = "item %r in %r" % (href, sane_path)
|
||||||
else:
|
else:
|
||||||
@ -45,13 +49,14 @@ class StorageVerifyMixin:
|
|||||||
sane_path = remaining_sane_paths.pop(0)
|
sane_path = remaining_sane_paths.pop(0)
|
||||||
path = pathutils.unstrip_path(sane_path, True)
|
path = pathutils.unstrip_path(sane_path, True)
|
||||||
logger.debug("Verifying collection %r", sane_path)
|
logger.debug("Verifying collection %r", sane_path)
|
||||||
with exception_cm(sane_path):
|
with exception_cm(sane_path, None):
|
||||||
saved_item_errors = item_errors
|
saved_item_errors = item_errors
|
||||||
collection = None
|
collection: Optional[storage.BaseCollection] = None
|
||||||
uids = set()
|
uids: Set[str] = set()
|
||||||
has_child_collections = False
|
has_child_collections = False
|
||||||
for item in self.discover(path, "1", exception_cm):
|
for item in self.discover(path, "1", exception_cm):
|
||||||
if not collection:
|
if not collection:
|
||||||
|
assert isinstance(item, storage.BaseCollection)
|
||||||
collection = item
|
collection = item
|
||||||
collection.get_meta()
|
collection.get_meta()
|
||||||
continue
|
continue
|
||||||
@ -65,6 +70,7 @@ class StorageVerifyMixin:
|
|||||||
uids.add(item.uid)
|
uids.add(item.uid)
|
||||||
logger.debug("Verified item %r in %r",
|
logger.debug("Verified item %r in %r",
|
||||||
item.href, sane_path)
|
item.href, sane_path)
|
||||||
|
assert collection
|
||||||
if item_errors == saved_item_errors:
|
if item_errors == saved_item_errors:
|
||||||
collection.sync()
|
collection.sync()
|
||||||
if has_child_collections and collection.tag:
|
if has_child_collections and collection.tag:
|
||||||
|
Loading…
Reference in New Issue
Block a user