remove unused methods from storage interface

This commit is contained in:
Unrud 2018-09-04 03:33:39 +02:00
parent a8c587f984
commit aec2a62931
3 changed files with 28 additions and 54 deletions

View File

@ -151,7 +151,7 @@ def xml_report(base_prefix, path, xml_request, collection, unlock_storage_fn):
else: else:
yield item, False yield item, False
if collection_requested: if collection_requested:
yield from collection.get_all_filtered(filters) yield from collection.get_filtered(filters)
# Retrieve everything required for finishing the request. # Retrieve everything required for finishing the request.
retrieved_items = list(retrieve_items(collection, hreferences, retrieved_items = list(retrieve_items(collection, hreferences,

View File

@ -170,47 +170,31 @@ class BaseCollection:
ValueError is raised for invalid or old tokens. ValueError is raised for invalid or old tokens.
WARNING: This simple default implementation treats all sync-token as WARNING: This simple default implementation treats all sync-token as
invalid. It adheres to the specification but some clients invalid.
(e.g. InfCloud) don't like it. Subclasses should provide a
more sophisticated implementation.
""" """
token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"") token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"")
if old_token: if old_token:
raise ValueError("Sync token are not supported") raise ValueError("Sync token are not supported")
return token, self.list() return token, (item.href for item in self.get_all())
def list(self):
"""List collection items."""
raise NotImplementedError
def get(self, href):
"""Fetch a single item."""
raise NotImplementedError
def get_multi(self, hrefs): def get_multi(self, hrefs):
"""Fetch multiple items. """Fetch multiple items.
Functionally similar to ``get``, but might bring performance benefits It's not required to return the requested items in the correct order.
on some storages when used cleverly. It's not required to return the Duplicated hrefs can be ignored.
requested items in the correct order. Duplicated hrefs can be ignored.
Returns tuples with the href and the item or None if the item doesn't Returns tuples with the href and the item or None if the item doesn't
exist. exist.
""" """
return ((href, self.get(href)) for href in hrefs) raise NotImplementedError
def get_all(self): def get_all(self):
"""Fetch all items. """Fetch all items."""
raise NotImplementedError
Functionally similar to ``get``, but might bring performance benefits def get_filtered(self, filters):
on some storages when used cleverly.
"""
return map(self.get, self.list())
def get_all_filtered(self, filters):
"""Fetch all items with optional filtering. """Fetch all items with optional filtering.
This can largely improve performance of reports depending on This can largely improve performance of reports depending on
@ -221,18 +205,10 @@ class BaseCollection:
matched. matched.
This returns all events by default This returns all events by default
""" """
return ((item, False) for item in self.get_all()) return ((item, False) for item in self.get_all())
def has(self, href):
"""Check if an item exists by its href.
Functionally similar to ``get``, but might bring performance benefits
on some storages when used cleverly.
"""
return self.get(href) is not None
def has_uid(self, uid): def has_uid(self, uid):
"""Check if a UID exists in the collection.""" """Check if a UID exists in the collection."""
for item in self.get_all(): for item in self.get_all():
@ -351,4 +327,4 @@ class BaseCollection:
@classmethod @classmethod
def verify(cls): def verify(cls):
"""Check the storage for errors.""" """Check the storage for errors."""
return True raise NotImplementedError

View File

@ -169,7 +169,7 @@ class Collection(storage.BaseCollection):
collection = cls(pathutils.unstrip_path(sane_path, True)) collection = cls(pathutils.unstrip_path(sane_path, True))
if href: if href:
yield collection.get(href) yield collection._get(href)
return return
yield collection yield collection
@ -177,10 +177,9 @@ class Collection(storage.BaseCollection):
if depth == "0": if depth == "0":
return return
for href in collection.list(): for href in collection._list():
with child_context_manager( with child_context_manager(sane_path, href):
pathutils.unstrip_path(sane_path, True), href): yield collection._get(href)
yield collection.get(href)
for entry in os.scandir(filesystem_path): for entry in os.scandir(filesystem_path):
if not entry.is_dir(): if not entry.is_dir():
@ -191,9 +190,9 @@ class Collection(storage.BaseCollection):
logger.debug("Skipping collection %r in %r", logger.debug("Skipping collection %r in %r",
href, sane_path) href, sane_path)
continue continue
child_path = pathutils.unstrip_path( sane_child_path = posixpath.join(sane_path, href)
posixpath.join(sane_path, href), True) child_path = pathutils.unstrip_path(sane_child_path, True)
with child_context_manager(child_path): with child_context_manager(sane_child_path):
yield cls(child_path) yield cls(child_path)
@classmethod @classmethod
@ -201,9 +200,8 @@ class Collection(storage.BaseCollection):
item_errors = collection_errors = 0 item_errors = collection_errors = 0
@contextlib.contextmanager @contextlib.contextmanager
def exception_cm(path, href=None): def exception_cm(sane_path, href=None):
nonlocal item_errors, collection_errors nonlocal item_errors, collection_errors
sane_path = pathutils.strip_path(path)
try: try:
yield yield
except Exception as e: except Exception as e:
@ -220,7 +218,7 @@ class Collection(storage.BaseCollection):
sane_path = remaining_sane_paths.pop(0) sane_path = remaining_sane_paths.pop(0)
path = pathutils.unstrip_path(sane_path, True) path = pathutils.unstrip_path(sane_path, True)
logger.debug("Verifying collection %r", sane_path) logger.debug("Verifying collection %r", sane_path)
with exception_cm(path): with exception_cm(sane_path):
saved_item_errors = item_errors saved_item_errors = item_errors
collection = None collection = None
uids = set() uids = set()
@ -565,7 +563,7 @@ class Collection(storage.BaseCollection):
changes.append(href) changes.append(href)
return token, changes return token, changes
def list(self): def _list(self):
for entry in os.scandir(self._filesystem_path): for entry in os.scandir(self._filesystem_path):
if not entry.is_file(): if not entry.is_file():
continue continue
@ -637,7 +635,7 @@ class Collection(storage.BaseCollection):
e.name for e in os.scandir(cache_folder) if not e.name for e in os.scandir(cache_folder) if not
os.path.isfile(os.path.join(self._filesystem_path, e.name)))) os.path.isfile(os.path.join(self._filesystem_path, e.name))))
def get(self, href, verify_href=True): def _get(self, href, verify_href=True):
if verify_href: if verify_href:
try: try:
if not pathutils.is_safe_filesystem_path_component(href): if not pathutils.is_safe_filesystem_path_component(href):
@ -722,21 +720,21 @@ class Collection(storage.BaseCollection):
"Can't translate name safely to filesystem: %r", href) "Can't translate name safely to filesystem: %r", href)
yield (href, None) yield (href, None)
else: else:
yield (href, self.get(href, verify_href=False)) yield (href, self._get(href, verify_href=False))
def get_all(self): def get_all(self):
# We don't need to check for collissions, because the the file names # We don't need to check for collissions, because the the file names
# are from os.listdir. # are from os.listdir.
return (self.get(href, verify_href=False) for href in self.list()) return (self._get(href, verify_href=False) for href in self._list())
def get_all_filtered(self, filters): def get_filtered(self, filters):
tag, start, end, simple = radicale_filter.simplify_prefilters( tag, start, end, simple = radicale_filter.simplify_prefilters(
filters, collection_tag=self.get_meta("tag")) filters, collection_tag=self.get_meta("tag"))
if not tag: if not tag:
# no filter # no filter
yield from ((item, simple) for item in self.get_all()) yield from ((item, simple) for item in self.get_all())
return return
for item in (self.get(h, verify_href=False) for h in self.list()): for item in (self._get(h, verify_href=False) for h in self._list()):
istart, iend = item.time_range istart, iend = item.time_range
if tag == item.component_name and istart < end and iend > start: if tag == item.component_name and istart < end and iend > start:
yield item, simple and (start <= istart or iend <= end) yield item, simple and (start <= istart or iend <= end)
@ -758,7 +756,7 @@ class Collection(storage.BaseCollection):
# Track the change # Track the change
self._update_history_etag(href, item) self._update_history_etag(href, item)
self._clean_history_cache() self._clean_history_cache()
return self.get(href, verify_href=False) return self._get(href, verify_href=False)
def delete(self, href=None): def delete(self, href=None):
if href is None: if href is None:
@ -811,7 +809,7 @@ class Collection(storage.BaseCollection):
relevant_files = chain( relevant_files = chain(
(self._filesystem_path,), (self._filesystem_path,),
(self._props_path,) if os.path.exists(self._props_path) else (), (self._props_path,) if os.path.exists(self._props_path) else (),
(os.path.join(self._filesystem_path, h) for h in self.list())) (os.path.join(self._filesystem_path, h) for h in self._list()))
last = max(map(os.path.getmtime, relevant_files)) last = max(map(os.path.getmtime, relevant_files))
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last)) return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))