Improve error handling
* Check the configuration file for errors (check option names and basic type checking). * Perform basic type checking on command line arguments. * Only print stack traces in debug mode. * Include much more information in error messages (e.g. include the path of invalid files). * Send Bad Request to clients for invalid XML requests or iCalendar data. * Change the log level of some messages.
This commit is contained in:
parent
824835bcd4
commit
c9664137a5
@ -42,7 +42,6 @@ import ssl
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import traceback
|
|
||||||
import wsgiref.simple_server
|
import wsgiref.simple_server
|
||||||
import zlib
|
import zlib
|
||||||
from http import client
|
from http import client
|
||||||
@ -58,6 +57,8 @@ VERSION = "2.0.0"
|
|||||||
NOT_ALLOWED = (
|
NOT_ALLOWED = (
|
||||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||||
"Access to the requested resource forbidden.")
|
"Access to the requested resource forbidden.")
|
||||||
|
BAD_REQUEST = (
|
||||||
|
client.BAD_REQUEST, (("Content-Type", "text/plain"),), "Bad Request")
|
||||||
NOT_FOUND = (
|
NOT_FOUND = (
|
||||||
client.NOT_FOUND, (("Content-Type", "text/plain"),),
|
client.NOT_FOUND, (("Content-Type", "text/plain"),),
|
||||||
"The requested resource could not be found.")
|
"The requested resource could not be found.")
|
||||||
@ -79,6 +80,9 @@ REMOTE_DESTINATION = (
|
|||||||
DIRECTORY_LISTING = (
|
DIRECTORY_LISTING = (
|
||||||
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
||||||
"Directory listings are not supported.")
|
"Directory listings are not supported.")
|
||||||
|
INTERNAL_SERVER_ERROR = (
|
||||||
|
client.INTERNAL_SERVER_ERROR, (("Content-Type", "text/plain"),),
|
||||||
|
"A server error occurred. Please contact the administrator.")
|
||||||
|
|
||||||
DAV_HEADERS = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
|
DAV_HEADERS = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
|
||||||
|
|
||||||
@ -125,11 +129,10 @@ class HTTPServer(wsgiref.simple_server.WSGIServer):
|
|||||||
|
|
||||||
def handle_error(self, request, client_address):
|
def handle_error(self, request, client_address):
|
||||||
if issubclass(sys.exc_info()[0], socket.timeout):
|
if issubclass(sys.exc_info()[0], socket.timeout):
|
||||||
self.logger.error("connection timeout")
|
self.logger.info("connection timeout")
|
||||||
else:
|
else:
|
||||||
self.logger.error(
|
self.logger.error("An exception occurred during request: %s",
|
||||||
"An exception occurred during request:\n%s",
|
sys.exc_info()[1], exc_info=True)
|
||||||
traceback.format_exc())
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPSServer(HTTPServer):
|
class HTTPSServer(HTTPServer):
|
||||||
@ -194,7 +197,7 @@ class RequestHandler(wsgiref.simple_server.WSGIRequestHandler):
|
|||||||
error = self.error_stream.getvalue().strip("\n")
|
error = self.error_stream.getvalue().strip("\n")
|
||||||
if error:
|
if error:
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
"An exception occurred during request:\n%s" % error)
|
"An unhandled exception occurred during request:\n%s" % error)
|
||||||
|
|
||||||
|
|
||||||
class Application:
|
class Application:
|
||||||
@ -266,27 +269,40 @@ class Application:
|
|||||||
path = item.collection.path
|
path = item.collection.path
|
||||||
if self.authorized(user, path, "r"):
|
if self.authorized(user, path, "r"):
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"%s has read access to collection %s",
|
"%s has read access to collection %r",
|
||||||
user or "Anonymous", path or "/")
|
"%r" % user if user else "anonymous user", path or "/")
|
||||||
read_allowed_items.append(item)
|
read_allowed_items.append(item)
|
||||||
else:
|
else:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"%s has NO read access to collection %s",
|
"%s has NO read access to collection %r",
|
||||||
user or "Anonymous", path or "/")
|
"%r" % user if user else "anonymous user", path or "/")
|
||||||
if self.authorized(user, path, "w"):
|
if self.authorized(user, path, "w"):
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"%s has write access to collection %s",
|
"%s has write access to collection %s",
|
||||||
user or "Anonymous", path or "/")
|
"%r" % user if user else "anonymous user", path or "/")
|
||||||
write_allowed_items.append(item)
|
write_allowed_items.append(item)
|
||||||
else:
|
else:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"%s has NO write access to collection %s",
|
"%s has NO write access to collection %s",
|
||||||
user or "Anonymous", path or "/")
|
"%r" % user if user else "anonymous user", path or "/")
|
||||||
return read_allowed_items, write_allowed_items
|
return read_allowed_items, write_allowed_items
|
||||||
|
|
||||||
def __call__(self, environ, start_response):
|
def __call__(self, environ, start_response):
|
||||||
"""Manage a request."""
|
try:
|
||||||
|
status, headers, answers = self._handle_request(environ)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error("An exception occurred during request: %s",
|
||||||
|
e, exc_info=True)
|
||||||
|
status, headers, answer = INTERNAL_SERVER_ERROR
|
||||||
|
status = "%d %s" % (
|
||||||
|
status, client.responses.get(status, "Unknown"))
|
||||||
|
headers = [("Content-Length", str(len(answer)))] + list(headers)
|
||||||
|
answers = [answer.encode("ascii")]
|
||||||
|
start_response(status, headers)
|
||||||
|
return answers
|
||||||
|
|
||||||
|
def _handle_request(self, environ):
|
||||||
|
"""Manage a request."""
|
||||||
def response(status, headers=(), answer=None):
|
def response(status, headers=(), answer=None):
|
||||||
headers = dict(headers)
|
headers = dict(headers)
|
||||||
# Set content length
|
# Set content length
|
||||||
@ -314,34 +330,33 @@ class Application:
|
|||||||
|
|
||||||
# Start response
|
# Start response
|
||||||
time_end = datetime.datetime.now()
|
time_end = datetime.datetime.now()
|
||||||
status = "%i %s" % (
|
status = "%d %s" % (
|
||||||
status, client.responses.get(status, "Unknown"))
|
status, client.responses.get(status, "Unknown"))
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
"%s answer status for %s in %.3f seconds: %s",
|
"%s answer status for %r%s in %.3f seconds: %s",
|
||||||
environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo,
|
environ["REQUEST_METHOD"], environ["PATH_INFO"], depthinfo,
|
||||||
(time_end - time_begin).total_seconds(), status)
|
(time_end - time_begin).total_seconds(), status)
|
||||||
start_response(status, list(headers.items()))
|
|
||||||
# Return response content
|
# Return response content
|
||||||
return [answer] if answer else []
|
return status, list(headers.items()), [answer] if answer else []
|
||||||
|
|
||||||
remote_host = "UNKNOWN"
|
remote_host = "unknown"
|
||||||
if environ.get("REMOTE_HOST"):
|
if environ.get("REMOTE_HOST"):
|
||||||
remote_host = environ["REMOTE_HOST"]
|
remote_host = "%r" % environ["REMOTE_HOST"]
|
||||||
elif environ.get("REMOTE_ADDR"):
|
elif environ.get("REMOTE_ADDR"):
|
||||||
remote_host = environ["REMOTE_ADDR"]
|
remote_host = environ["REMOTE_ADDR"]
|
||||||
if environ.get("HTTP_X_FORWARDED_FOR"):
|
if environ.get("HTTP_X_FORWARDED_FOR"):
|
||||||
remote_host = "%s (forwarded by %s)" % (
|
remote_host = "%r (forwarded by %s)" % (
|
||||||
environ["HTTP_X_FORWARDED_FOR"], remote_host)
|
environ["HTTP_X_FORWARDED_FOR"], remote_host)
|
||||||
remote_useragent = "UNKNOWN"
|
remote_useragent = ""
|
||||||
if environ.get("HTTP_USER_AGENT"):
|
if environ.get("HTTP_USER_AGENT"):
|
||||||
remote_useragent = environ["HTTP_USER_AGENT"]
|
remote_useragent = " using %r" % environ["HTTP_USER_AGENT"]
|
||||||
depthinfo = ""
|
depthinfo = ""
|
||||||
if environ.get("HTTP_DEPTH"):
|
if environ.get("HTTP_DEPTH"):
|
||||||
depthinfo = " with depth " + environ["HTTP_DEPTH"]
|
depthinfo = " with depth %r" % environ["HTTP_DEPTH"]
|
||||||
time_begin = datetime.datetime.now()
|
time_begin = datetime.datetime.now()
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
"%s request for %s received from %s using \"%s\"",
|
"%s request for %r%s received from %s%s",
|
||||||
environ["REQUEST_METHOD"], environ["PATH_INFO"] + depthinfo,
|
environ["REQUEST_METHOD"], environ["PATH_INFO"], depthinfo,
|
||||||
remote_host, remote_useragent)
|
remote_host, remote_useragent)
|
||||||
headers = pprint.pformat(self.headers_log(environ))
|
headers = pprint.pformat(self.headers_log(environ))
|
||||||
self.logger.debug("Request headers:\n%s", headers)
|
self.logger.debug("Request headers:\n%s", headers)
|
||||||
@ -350,18 +365,18 @@ class Application:
|
|||||||
if "HTTP_X_SCRIPT_NAME" in environ:
|
if "HTTP_X_SCRIPT_NAME" in environ:
|
||||||
environ["SCRIPT_NAME"] = environ["HTTP_X_SCRIPT_NAME"]
|
environ["SCRIPT_NAME"] = environ["HTTP_X_SCRIPT_NAME"]
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"Script name overwritten by client: %s",
|
"Script name overwritten by client: %r",
|
||||||
environ["SCRIPT_NAME"])
|
environ["SCRIPT_NAME"])
|
||||||
# Sanitize base prefix
|
# Sanitize base prefix
|
||||||
environ["SCRIPT_NAME"] = storage.sanitize_path(
|
environ["SCRIPT_NAME"] = storage.sanitize_path(
|
||||||
environ.get("SCRIPT_NAME", "")).rstrip("/")
|
environ.get("SCRIPT_NAME", "")).rstrip("/")
|
||||||
self.logger.debug("Sanitized script name: %s", environ["SCRIPT_NAME"])
|
self.logger.debug("Sanitized script name: %r", environ["SCRIPT_NAME"])
|
||||||
base_prefix = environ["SCRIPT_NAME"]
|
base_prefix = environ["SCRIPT_NAME"]
|
||||||
# Sanitize request URI (a WSGI server indicates with an empty path,
|
# Sanitize request URI (a WSGI server indicates with an empty path,
|
||||||
# that the URL targets the application root without a trailing slash)
|
# that the URL targets the application root without a trailing slash)
|
||||||
if environ["PATH_INFO"]:
|
if environ["PATH_INFO"]:
|
||||||
environ["PATH_INFO"] = storage.sanitize_path(environ["PATH_INFO"])
|
environ["PATH_INFO"] = storage.sanitize_path(environ["PATH_INFO"])
|
||||||
self.logger.debug("Sanitized path: %s", environ["PATH_INFO"])
|
self.logger.debug("Sanitized path: %r", environ["PATH_INFO"])
|
||||||
path = environ["PATH_INFO"]
|
path = environ["PATH_INFO"]
|
||||||
if base_prefix and path.startswith(base_prefix):
|
if base_prefix and path.startswith(base_prefix):
|
||||||
path = path[len(base_prefix):]
|
path = path[len(base_prefix):]
|
||||||
@ -393,18 +408,20 @@ class Application:
|
|||||||
is_authenticated = True
|
is_authenticated = True
|
||||||
elif not storage.is_safe_path_component(user):
|
elif not storage.is_safe_path_component(user):
|
||||||
# Prevent usernames like "user/calendar.ics"
|
# Prevent usernames like "user/calendar.ics"
|
||||||
self.logger.info("Refused unsafe username: %s", user)
|
self.logger.info("Refused unsafe username: %r", user)
|
||||||
is_authenticated = False
|
is_authenticated = False
|
||||||
else:
|
else:
|
||||||
is_authenticated = self.Auth.is_authenticated(user, password)
|
is_authenticated = self.Auth.is_authenticated(user, password)
|
||||||
if not is_authenticated:
|
if not is_authenticated:
|
||||||
self.logger.info("Failed login attempt: %s", user)
|
self.logger.info("Failed login attempt: %r", user)
|
||||||
# Random delay to avoid timing oracles and bruteforce attacks
|
# Random delay to avoid timing oracles and bruteforce attacks
|
||||||
delay = self.configuration.getfloat("auth", "delay")
|
delay = self.configuration.getfloat("auth", "delay")
|
||||||
if delay > 0:
|
if delay > 0:
|
||||||
random_delay = delay * (0.5 + random.random())
|
random_delay = delay * (0.5 + random.random())
|
||||||
self.logger.debug("Sleeping %.3f seconds", random_delay)
|
self.logger.debug("Sleeping %.3f seconds", random_delay)
|
||||||
time.sleep(random_delay)
|
time.sleep(random_delay)
|
||||||
|
else:
|
||||||
|
self.logger.info("Successful login: %r", user)
|
||||||
|
|
||||||
# Create principal collection
|
# Create principal collection
|
||||||
if user and is_authenticated:
|
if user and is_authenticated:
|
||||||
@ -417,6 +434,9 @@ class Application:
|
|||||||
if not principal:
|
if not principal:
|
||||||
with self.Collection.acquire_lock("w", user):
|
with self.Collection.acquire_lock("w", user):
|
||||||
self.Collection.create_collection(principal_path)
|
self.Collection.create_collection(principal_path)
|
||||||
|
else:
|
||||||
|
self.logger.warning("Access to principal path %r denied by "
|
||||||
|
"rights backend", principal_path)
|
||||||
|
|
||||||
# Verify content length
|
# Verify content length
|
||||||
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
||||||
@ -424,7 +444,7 @@ class Application:
|
|||||||
max_content_length = self.configuration.getint(
|
max_content_length = self.configuration.getint(
|
||||||
"server", "max_content_length")
|
"server", "max_content_length")
|
||||||
if max_content_length and content_length > max_content_length:
|
if max_content_length and content_length > max_content_length:
|
||||||
self.logger.debug(
|
self.logger.info(
|
||||||
"Request body too large: %d", content_length)
|
"Request body too large: %d", content_length)
|
||||||
return response(*REQUEST_ENTITY_TOO_LARGE)
|
return response(*REQUEST_ENTITY_TOO_LARGE)
|
||||||
|
|
||||||
@ -436,7 +456,7 @@ class Application:
|
|||||||
return response(*REQUEST_TIMEOUT)
|
return response(*REQUEST_TIMEOUT)
|
||||||
if (status, headers, answer) == NOT_ALLOWED:
|
if (status, headers, answer) == NOT_ALLOWED:
|
||||||
self.logger.info("Access denied for %s",
|
self.logger.info("Access denied for %s",
|
||||||
"'%s'" % user if user else "anonymous user")
|
"%r" % user if user else "anonymous user")
|
||||||
else:
|
else:
|
||||||
status, headers, answer = NOT_ALLOWED
|
status, headers, answer = NOT_ALLOWED
|
||||||
|
|
||||||
@ -477,7 +497,7 @@ class Application:
|
|||||||
return b""
|
return b""
|
||||||
content = environ["wsgi.input"].read(content_length)
|
content = environ["wsgi.input"].read(content_length)
|
||||||
if len(content) < content_length:
|
if len(content) < content_length:
|
||||||
raise ValueError("Request body too short: %d" % len(content))
|
raise RuntimeError("Request body too short: %d" % len(content))
|
||||||
return content
|
return content
|
||||||
|
|
||||||
def _read_content(self, environ):
|
def _read_content(self, environ):
|
||||||
@ -491,9 +511,9 @@ class Application:
|
|||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
xml_content = ET.fromstring(content)
|
xml_content = ET.fromstring(content)
|
||||||
except ET.ParseError:
|
except ET.ParseError as e:
|
||||||
self.logger.debug("Request content (Invalid XML):\n%s", content)
|
self.logger.debug("Request content (Invalid XML):\n%s", content)
|
||||||
raise
|
raise RuntimeError("Failed to parse XML: %s" % e) from e
|
||||||
if self.logger.isEnabledFor(logging.DEBUG):
|
if self.logger.isEnabledFor(logging.DEBUG):
|
||||||
self.logger.debug("Request content:\n%s",
|
self.logger.debug("Request content:\n%s",
|
||||||
xmlutils.pretty_xml(xml_content))
|
xmlutils.pretty_xml(xml_content))
|
||||||
@ -569,7 +589,12 @@ class Application:
|
|||||||
"""Manage MKCALENDAR request."""
|
"""Manage MKCALENDAR request."""
|
||||||
if not self.authorized(user, path, "w"):
|
if not self.authorized(user, path, "w"):
|
||||||
return NOT_ALLOWED
|
return NOT_ALLOWED
|
||||||
|
try:
|
||||||
xml_content = self._read_xml_content(environ)
|
xml_content = self._read_xml_content(environ)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.logger.warning(
|
||||||
|
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
||||||
|
return BAD_REQUEST
|
||||||
with self.Collection.acquire_lock("w", user):
|
with self.Collection.acquire_lock("w", user):
|
||||||
item = next(self.Collection.discover(path), None)
|
item = next(self.Collection.discover(path), None)
|
||||||
if item:
|
if item:
|
||||||
@ -585,7 +610,12 @@ class Application:
|
|||||||
"""Manage MKCOL request."""
|
"""Manage MKCOL request."""
|
||||||
if not self.authorized(user, path, "w"):
|
if not self.authorized(user, path, "w"):
|
||||||
return NOT_ALLOWED
|
return NOT_ALLOWED
|
||||||
|
try:
|
||||||
xml_content = self._read_xml_content(environ)
|
xml_content = self._read_xml_content(environ)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.logger.warning(
|
||||||
|
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
||||||
|
return BAD_REQUEST
|
||||||
with self.Collection.acquire_lock("w", user):
|
with self.Collection.acquire_lock("w", user):
|
||||||
item = next(self.Collection.discover(path), None)
|
item = next(self.Collection.discover(path), None)
|
||||||
if item:
|
if item:
|
||||||
@ -596,14 +626,18 @@ class Application:
|
|||||||
|
|
||||||
def do_MOVE(self, environ, base_prefix, path, user):
|
def do_MOVE(self, environ, base_prefix, path, user):
|
||||||
"""Manage MOVE request."""
|
"""Manage MOVE request."""
|
||||||
to_url = urlparse(environ["HTTP_DESTINATION"])
|
raw_dest = environ.get("HTTP_DESTINATION", "")
|
||||||
|
to_url = urlparse(raw_dest)
|
||||||
if to_url.netloc != environ["HTTP_HOST"]:
|
if to_url.netloc != environ["HTTP_HOST"]:
|
||||||
|
self.logger.info("Unsupported destination address: %r", raw_dest)
|
||||||
# Remote destination server, not supported
|
# Remote destination server, not supported
|
||||||
return REMOTE_DESTINATION
|
return REMOTE_DESTINATION
|
||||||
if not self._access(user, path, "w"):
|
if not self._access(user, path, "w"):
|
||||||
return NOT_ALLOWED
|
return NOT_ALLOWED
|
||||||
to_path = storage.sanitize_path(to_url.path)
|
to_path = storage.sanitize_path(to_url.path)
|
||||||
if not (to_path + "/").startswith(base_prefix + "/"):
|
if not (to_path + "/").startswith(base_prefix + "/"):
|
||||||
|
self.logger.warning("Destination %r from MOVE request on %r does"
|
||||||
|
"n't start with base prefix", to_path, path)
|
||||||
return NOT_ALLOWED
|
return NOT_ALLOWED
|
||||||
to_path = to_path[len(base_prefix):]
|
to_path = to_path[len(base_prefix):]
|
||||||
if not self._access(user, to_path, "w"):
|
if not self._access(user, to_path, "w"):
|
||||||
@ -646,7 +680,12 @@ class Application:
|
|||||||
"""Manage PROPFIND request."""
|
"""Manage PROPFIND request."""
|
||||||
if not self._access(user, path, "r"):
|
if not self._access(user, path, "r"):
|
||||||
return NOT_ALLOWED
|
return NOT_ALLOWED
|
||||||
|
try:
|
||||||
xml_content = self._read_xml_content(environ)
|
xml_content = self._read_xml_content(environ)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.logger.warning(
|
||||||
|
"Bad PROPFIND request on %r: %s", path, e, exc_info=True)
|
||||||
|
return BAD_REQUEST
|
||||||
with self.Collection.acquire_lock("r", user):
|
with self.Collection.acquire_lock("r", user):
|
||||||
items = self.Collection.discover(
|
items = self.Collection.discover(
|
||||||
path, environ.get("HTTP_DEPTH", "0"))
|
path, environ.get("HTTP_DEPTH", "0"))
|
||||||
@ -672,7 +711,12 @@ class Application:
|
|||||||
"""Manage PROPPATCH request."""
|
"""Manage PROPPATCH request."""
|
||||||
if not self.authorized(user, path, "w"):
|
if not self.authorized(user, path, "w"):
|
||||||
return NOT_ALLOWED
|
return NOT_ALLOWED
|
||||||
|
try:
|
||||||
xml_content = self._read_xml_content(environ)
|
xml_content = self._read_xml_content(environ)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.logger.warning(
|
||||||
|
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
||||||
|
return BAD_REQUEST
|
||||||
with self.Collection.acquire_lock("w", user):
|
with self.Collection.acquire_lock("w", user):
|
||||||
item = next(self.Collection.discover(path), None)
|
item = next(self.Collection.discover(path), None)
|
||||||
if not isinstance(item, self.Collection):
|
if not isinstance(item, self.Collection):
|
||||||
@ -688,7 +732,12 @@ class Application:
|
|||||||
"""Manage PUT request."""
|
"""Manage PUT request."""
|
||||||
if not self._access(user, path, "w"):
|
if not self._access(user, path, "w"):
|
||||||
return NOT_ALLOWED
|
return NOT_ALLOWED
|
||||||
|
try:
|
||||||
content = self._read_content(environ)
|
content = self._read_content(environ)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.logger.warning(
|
||||||
|
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||||
|
return BAD_REQUEST
|
||||||
with self.Collection.acquire_lock("w", user):
|
with self.Collection.acquire_lock("w", user):
|
||||||
parent_path = storage.sanitize_path(
|
parent_path = storage.sanitize_path(
|
||||||
"/%s/" % posixpath.dirname(path.strip("/")))
|
"/%s/" % posixpath.dirname(path.strip("/")))
|
||||||
@ -720,7 +769,12 @@ class Application:
|
|||||||
# Creation asked but item found: item can't be replaced
|
# Creation asked but item found: item can't be replaced
|
||||||
return PRECONDITION_FAILED
|
return PRECONDITION_FAILED
|
||||||
|
|
||||||
|
try:
|
||||||
items = list(vobject.readComponents(content or ""))
|
items = list(vobject.readComponents(content or ""))
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.warning(
|
||||||
|
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
||||||
|
return BAD_REQUEST
|
||||||
content_type = environ.get("CONTENT_TYPE", "").split(";")[0]
|
content_type = environ.get("CONTENT_TYPE", "").split(";")[0]
|
||||||
tags = {value: key for key, value in xmlutils.MIMETYPES.items()}
|
tags = {value: key for key, value in xmlutils.MIMETYPES.items()}
|
||||||
tag = tags.get(content_type)
|
tag = tags.get(content_type)
|
||||||
@ -740,7 +794,12 @@ class Application:
|
|||||||
"""Manage REPORT request."""
|
"""Manage REPORT request."""
|
||||||
if not self._access(user, path, "r"):
|
if not self._access(user, path, "r"):
|
||||||
return NOT_ALLOWED
|
return NOT_ALLOWED
|
||||||
|
try:
|
||||||
xml_content = self._read_xml_content(environ)
|
xml_content = self._read_xml_content(environ)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.logger.warning(
|
||||||
|
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
||||||
|
return BAD_REQUEST
|
||||||
with self.Collection.acquire_lock("r", user):
|
with self.Collection.acquire_lock("r", user):
|
||||||
item = next(self.Collection.discover(path), None)
|
item = next(self.Collection.discover(path), None)
|
||||||
if not self._access(user, path, "r", item):
|
if not self._access(user, path, "r", item):
|
||||||
|
@ -58,8 +58,10 @@ def run():
|
|||||||
args.append(long_name)
|
args.append(long_name)
|
||||||
kwargs["dest"] = "{0}_{1}".format(section, option)
|
kwargs["dest"] = "{0}_{1}".format(section, option)
|
||||||
groups[group].append(kwargs["dest"])
|
groups[group].append(kwargs["dest"])
|
||||||
|
del kwargs["value"]
|
||||||
|
|
||||||
if kwargs.pop("value") in ("True", "False"):
|
if kwargs["type"] == bool:
|
||||||
|
del kwargs["type"]
|
||||||
kwargs["action"] = "store_const"
|
kwargs["action"] = "store_const"
|
||||||
kwargs["const"] = "True"
|
kwargs["const"] = "True"
|
||||||
opposite_args = kwargs.pop("opposite", [])
|
opposite_args = kwargs.pop("opposite", [])
|
||||||
@ -75,19 +77,22 @@ def run():
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if args.config is not None:
|
if args.config is not None:
|
||||||
configuration = config.load()
|
config_paths = [args.config] if args.config else []
|
||||||
if args.config:
|
ignore_missing_paths = False
|
||||||
configuration_found = configuration.read(args.config)
|
|
||||||
else:
|
else:
|
||||||
configuration_found = True
|
config_paths = ["/etc/radicale/config",
|
||||||
else:
|
|
||||||
configuration_paths = [
|
|
||||||
"/etc/radicale/config",
|
|
||||||
os.path.expanduser("~/.config/radicale/config")]
|
os.path.expanduser("~/.config/radicale/config")]
|
||||||
if "RADICALE_CONFIG" in os.environ:
|
if "RADICALE_CONFIG" in os.environ:
|
||||||
configuration_paths.append(os.environ["RADICALE_CONFIG"])
|
config_paths.append(os.environ["RADICALE_CONFIG"])
|
||||||
configuration = config.load(configuration_paths)
|
ignore_missing_paths = True
|
||||||
configuration_found = True
|
try:
|
||||||
|
configuration = config.load(config_paths,
|
||||||
|
ignore_missing_paths=ignore_missing_paths)
|
||||||
|
except Exception as e:
|
||||||
|
print("ERROR: Invalid configuration: %s" % e, file=sys.stderr)
|
||||||
|
if args.logging_debug:
|
||||||
|
raise
|
||||||
|
exit(1)
|
||||||
|
|
||||||
# Update Radicale configuration according to arguments
|
# Update Radicale configuration according to arguments
|
||||||
for group, actions in groups.items():
|
for group, actions in groups.items():
|
||||||
@ -100,17 +105,19 @@ def run():
|
|||||||
# Start logging
|
# Start logging
|
||||||
filename = os.path.expanduser(configuration.get("logging", "config"))
|
filename = os.path.expanduser(configuration.get("logging", "config"))
|
||||||
debug = configuration.getboolean("logging", "debug")
|
debug = configuration.getboolean("logging", "debug")
|
||||||
|
try:
|
||||||
logger = log.start("radicale", filename, debug)
|
logger = log.start("radicale", filename, debug)
|
||||||
|
except Exception as e:
|
||||||
# Log a warning if the configuration file of the command line is not found
|
print("ERROR: Failed to start logger: %s" % e, file=sys.stderr)
|
||||||
if not configuration_found:
|
if debug:
|
||||||
logger.error("Configuration file '%s' not found" % args.config)
|
raise
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
serve(configuration, logger)
|
serve(configuration, logger)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
logger.exception("An exception occurred during server startup:")
|
logger.error("An exception occurred during server startup: %s", e,
|
||||||
|
exc_info=True)
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
@ -124,7 +131,7 @@ def daemonize(configuration, logger):
|
|||||||
pid_fd = os.open(
|
pid_fd = os.open(
|
||||||
pid_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
|
pid_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise OSError("PID file exists: %s" %
|
raise OSError("PID file exists: %r" %
|
||||||
configuration.get("server", "pid")) from e
|
configuration.get("server", "pid")) from e
|
||||||
pid = os.fork()
|
pid = os.fork()
|
||||||
if pid:
|
if pid:
|
||||||
@ -171,9 +178,9 @@ def serve(configuration, logger):
|
|||||||
filename = getattr(server_class, name)
|
filename = getattr(server_class, name)
|
||||||
try:
|
try:
|
||||||
open(filename, "r").close()
|
open(filename, "r").close()
|
||||||
except IOError as exception:
|
except OSError as e:
|
||||||
logger.warning("Error while reading SSL %s %r: %s" % (
|
raise RuntimeError("Failed to read SSL %s %r: %s" %
|
||||||
name, filename, exception))
|
(name, filename, e)) from e
|
||||||
else:
|
else:
|
||||||
server_class = ThreadedHTTPServer
|
server_class = ThreadedHTTPServer
|
||||||
server_class.client_timeout = configuration.getint("server", "timeout")
|
server_class.client_timeout = configuration.getint("server", "timeout")
|
||||||
@ -188,16 +195,23 @@ def serve(configuration, logger):
|
|||||||
shutdown_program = False
|
shutdown_program = False
|
||||||
|
|
||||||
for host in configuration.get("server", "hosts").split(","):
|
for host in configuration.get("server", "hosts").split(","):
|
||||||
|
try:
|
||||||
address, port = host.strip().rsplit(":", 1)
|
address, port = host.strip().rsplit(":", 1)
|
||||||
address, port = address.strip("[] "), int(port)
|
address, port = address.strip("[] "), int(port)
|
||||||
|
except ValueError as e:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Failed to parse address %r: %s" % (host, e)) from e
|
||||||
application = Application(configuration, logger)
|
application = Application(configuration, logger)
|
||||||
|
try:
|
||||||
server = make_server(
|
server = make_server(
|
||||||
address, port, application, server_class, RequestHandler)
|
address, port, application, server_class, RequestHandler)
|
||||||
|
except OSError as e:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Failed to start server %r: %s" % (host, e)) from e
|
||||||
servers[server.socket] = server
|
servers[server.socket] = server
|
||||||
logger.debug("Listening to %s port %s",
|
logger.info("Listening to %r on port %d%s",
|
||||||
server.server_name, server.server_port)
|
server.server_name, server.server_port, " using SSL"
|
||||||
if configuration.getboolean("server", "ssl"):
|
if configuration.getboolean("server", "ssl") else "")
|
||||||
logger.debug("Using SSL")
|
|
||||||
|
|
||||||
# Create a socket pair to notify the select syscall of program shutdown
|
# Create a socket pair to notify the select syscall of program shutdown
|
||||||
# This is not available in python < 3.5 on Windows
|
# This is not available in python < 3.5 on Windows
|
||||||
@ -232,7 +246,7 @@ def serve(configuration, logger):
|
|||||||
select_timeout = 1.0
|
select_timeout = 1.0
|
||||||
if configuration.getboolean("server", "daemon"):
|
if configuration.getboolean("server", "daemon"):
|
||||||
daemonize(configuration, logger)
|
daemonize(configuration, logger)
|
||||||
logger.debug("Radicale server ready")
|
logger.info("Radicale server ready")
|
||||||
while not shutdown_program:
|
while not shutdown_program:
|
||||||
try:
|
try:
|
||||||
rlist, _, xlist = select.select(
|
rlist, _, xlist = select.select(
|
||||||
@ -241,7 +255,7 @@ def serve(configuration, logger):
|
|||||||
# SIGINT is handled by signal handler above
|
# SIGINT is handled by signal handler above
|
||||||
rlist, xlist = [], []
|
rlist, xlist = [], []
|
||||||
if xlist:
|
if xlist:
|
||||||
raise RuntimeError("Unhandled socket error")
|
raise RuntimeError("unhandled socket error")
|
||||||
if rlist:
|
if rlist:
|
||||||
server = servers.get(rlist[0])
|
server = servers.get(rlist[0])
|
||||||
if server:
|
if server:
|
||||||
|
@ -64,7 +64,6 @@ from importlib import import_module
|
|||||||
def load(configuration, logger):
|
def load(configuration, logger):
|
||||||
"""Load the authentication manager chosen in configuration."""
|
"""Load the authentication manager chosen in configuration."""
|
||||||
auth_type = configuration.get("auth", "type")
|
auth_type = configuration.get("auth", "type")
|
||||||
logger.debug("Authentication type is %s", auth_type)
|
|
||||||
if auth_type == "None":
|
if auth_type == "None":
|
||||||
class_ = NoneAuth
|
class_ = NoneAuth
|
||||||
elif auth_type == "remote_user":
|
elif auth_type == "remote_user":
|
||||||
@ -74,7 +73,12 @@ def load(configuration, logger):
|
|||||||
elif auth_type == "htpasswd":
|
elif auth_type == "htpasswd":
|
||||||
class_ = Auth
|
class_ = Auth
|
||||||
else:
|
else:
|
||||||
|
try:
|
||||||
class_ = import_module(auth_type).Auth
|
class_ = import_module(auth_type).Auth
|
||||||
|
except ImportError as e:
|
||||||
|
raise RuntimeError("Authentication module %r not found" %
|
||||||
|
auth_type) from e
|
||||||
|
logger.info("Authentication type is %r", auth_type)
|
||||||
return class_(configuration, logger)
|
return class_(configuration, logger)
|
||||||
|
|
||||||
|
|
||||||
@ -127,18 +131,18 @@ class Auth(BaseAuth):
|
|||||||
elif self.encryption == "md5":
|
elif self.encryption == "md5":
|
||||||
try:
|
try:
|
||||||
from passlib.hash import apr_md5_crypt
|
from passlib.hash import apr_md5_crypt
|
||||||
except ImportError:
|
except ImportError as e:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"The htpasswd encryption method 'md5' requires "
|
"The htpasswd encryption method 'md5' requires "
|
||||||
"the passlib module.")
|
"the passlib module.") from e
|
||||||
self.verify = functools.partial(self._md5apr1, apr_md5_crypt)
|
self.verify = functools.partial(self._md5apr1, apr_md5_crypt)
|
||||||
elif self.encryption == "bcrypt":
|
elif self.encryption == "bcrypt":
|
||||||
try:
|
try:
|
||||||
from passlib.hash import bcrypt
|
from passlib.hash import bcrypt
|
||||||
except ImportError:
|
except ImportError as e:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"The htpasswd encryption method 'bcrypt' requires "
|
"The htpasswd encryption method 'bcrypt' requires "
|
||||||
"the passlib module with bcrypt support.")
|
"the passlib module with bcrypt support.") from e
|
||||||
# A call to `encrypt` raises passlib.exc.MissingBackendError with a
|
# A call to `encrypt` raises passlib.exc.MissingBackendError with a
|
||||||
# good error message if bcrypt backend is not available. Trigger
|
# good error message if bcrypt backend is not available. Trigger
|
||||||
# this here.
|
# this here.
|
||||||
@ -147,14 +151,14 @@ class Auth(BaseAuth):
|
|||||||
elif self.encryption == "crypt":
|
elif self.encryption == "crypt":
|
||||||
try:
|
try:
|
||||||
import crypt
|
import crypt
|
||||||
except ImportError:
|
except ImportError as e:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"The htpasswd encryption method 'crypt' requires "
|
"The htpasswd encryption method 'crypt' requires "
|
||||||
"the crypt() system support.")
|
"the crypt() system support.") from e
|
||||||
self.verify = functools.partial(self._crypt, crypt)
|
self.verify = functools.partial(self._crypt, crypt)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"The htpasswd encryption method '%s' is not "
|
"The htpasswd encryption method %r is not "
|
||||||
"supported." % self.encryption)
|
"supported." % self.encryption)
|
||||||
|
|
||||||
def _plain(self, hash_value, password):
|
def _plain(self, hash_value, password):
|
||||||
@ -201,17 +205,25 @@ class Auth(BaseAuth):
|
|||||||
# The content of the file is not cached because reading is generally a
|
# The content of the file is not cached because reading is generally a
|
||||||
# very cheap operation, and it's useful to get live updates of the
|
# very cheap operation, and it's useful to get live updates of the
|
||||||
# htpasswd file.
|
# htpasswd file.
|
||||||
|
try:
|
||||||
with open(self.filename) as fd:
|
with open(self.filename) as fd:
|
||||||
for line in fd:
|
for line in fd:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line:
|
if line:
|
||||||
|
try:
|
||||||
login, hash_value = line.split(":")
|
login, hash_value = line.split(":")
|
||||||
# Always compare both login and password to avoid timing
|
# Always compare both login and password to avoid
|
||||||
# attacks, see #591.
|
# timing attacks, see #591.
|
||||||
login_ok = hmac.compare_digest(login, user)
|
login_ok = hmac.compare_digest(login, user)
|
||||||
password_ok = self.verify(hash_value, password)
|
password_ok = self.verify(hash_value, password)
|
||||||
if login_ok & password_ok:
|
if login_ok & password_ok:
|
||||||
return True
|
return True
|
||||||
|
except ValueError as e:
|
||||||
|
raise RuntimeError("Invalid htpasswd file %r: %s" %
|
||||||
|
(self.filename, e)) from e
|
||||||
|
except OSError as e:
|
||||||
|
raise RuntimeError("Failed to load htpasswd file %r: %s" %
|
||||||
|
(self.filename, e)) from e
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,111 +33,141 @@ INITIAL_CONFIG = OrderedDict([
|
|||||||
("hosts", {
|
("hosts", {
|
||||||
"value": "127.0.0.1:5232",
|
"value": "127.0.0.1:5232",
|
||||||
"help": "set server hostnames including ports",
|
"help": "set server hostnames including ports",
|
||||||
"aliases": ["-H", "--hosts"]}),
|
"aliases": ["-H", "--hosts"],
|
||||||
|
"type": str}),
|
||||||
("daemon", {
|
("daemon", {
|
||||||
"value": "False",
|
"value": "False",
|
||||||
"help": "launch as daemon",
|
"help": "launch as daemon",
|
||||||
"aliases": ["-d", "--daemon"],
|
"aliases": ["-d", "--daemon"],
|
||||||
"opposite": ["-f", "--foreground"]}),
|
"opposite": ["-f", "--foreground"],
|
||||||
|
"type": bool}),
|
||||||
("pid", {
|
("pid", {
|
||||||
"value": "",
|
"value": "",
|
||||||
"help": "set PID filename for daemon mode",
|
"help": "set PID filename for daemon mode",
|
||||||
"aliases": ["-p", "--pid"]}),
|
"aliases": ["-p", "--pid"],
|
||||||
|
"type": str}),
|
||||||
("max_connections", {
|
("max_connections", {
|
||||||
"value": "20",
|
"value": "20",
|
||||||
"help": "maximum number of parallel connections"}),
|
"help": "maximum number of parallel connections",
|
||||||
|
"type": int}),
|
||||||
("max_content_length", {
|
("max_content_length", {
|
||||||
"value": "10000000",
|
"value": "10000000",
|
||||||
"help": "maximum size of request body in bytes"}),
|
"help": "maximum size of request body in bytes",
|
||||||
|
"type": int}),
|
||||||
("timeout", {
|
("timeout", {
|
||||||
"value": "10",
|
"value": "10",
|
||||||
"help": "socket timeout"}),
|
"help": "socket timeout",
|
||||||
|
"type": int}),
|
||||||
("ssl", {
|
("ssl", {
|
||||||
"value": "False",
|
"value": "False",
|
||||||
"help": "use SSL connection",
|
"help": "use SSL connection",
|
||||||
"aliases": ["-s", "--ssl"],
|
"aliases": ["-s", "--ssl"],
|
||||||
"opposite": ["-S", "--no-ssl"]}),
|
"opposite": ["-S", "--no-ssl"],
|
||||||
|
"type": bool}),
|
||||||
("certificate", {
|
("certificate", {
|
||||||
"value": "/etc/ssl/radicale.cert.pem",
|
"value": "/etc/ssl/radicale.cert.pem",
|
||||||
"help": "set certificate file",
|
"help": "set certificate file",
|
||||||
"aliases": ["-c", "--certificate"]}),
|
"aliases": ["-c", "--certificate"],
|
||||||
|
"type": str}),
|
||||||
("key", {
|
("key", {
|
||||||
"value": "/etc/ssl/radicale.key.pem",
|
"value": "/etc/ssl/radicale.key.pem",
|
||||||
"help": "set private key file",
|
"help": "set private key file",
|
||||||
"aliases": ["-k", "--key"]}),
|
"aliases": ["-k", "--key"],
|
||||||
|
"type": str}),
|
||||||
("protocol", {
|
("protocol", {
|
||||||
"value": "PROTOCOL_TLSv1_2",
|
"value": "PROTOCOL_TLSv1_2",
|
||||||
"help": "SSL protocol used"}),
|
"help": "SSL protocol used",
|
||||||
|
"type": str}),
|
||||||
("ciphers", {
|
("ciphers", {
|
||||||
"value": "",
|
"value": "",
|
||||||
"help": "available ciphers"}),
|
"help": "available ciphers",
|
||||||
|
"type": str}),
|
||||||
("dns_lookup", {
|
("dns_lookup", {
|
||||||
"value": "True",
|
"value": "True",
|
||||||
"help": "use reverse DNS to resolve client address in logs"}),
|
"help": "use reverse DNS to resolve client address in logs",
|
||||||
|
"type": bool}),
|
||||||
("realm", {
|
("realm", {
|
||||||
"value": "Radicale - Password Required",
|
"value": "Radicale - Password Required",
|
||||||
"help": "message displayed when a password is needed"})])),
|
"help": "message displayed when a password is needed",
|
||||||
|
"type": str})])),
|
||||||
("encoding", OrderedDict([
|
("encoding", OrderedDict([
|
||||||
("request", {
|
("request", {
|
||||||
"value": "utf-8",
|
"value": "utf-8",
|
||||||
"help": "encoding for responding requests"}),
|
"help": "encoding for responding requests",
|
||||||
|
"type": str}),
|
||||||
("stock", {
|
("stock", {
|
||||||
"value": "utf-8",
|
"value": "utf-8",
|
||||||
"help": "encoding for storing local collections"})])),
|
"help": "encoding for storing local collections",
|
||||||
|
"type": str})])),
|
||||||
("auth", OrderedDict([
|
("auth", OrderedDict([
|
||||||
("type", {
|
("type", {
|
||||||
"value": "None",
|
"value": "None",
|
||||||
"help": "authentication method"}),
|
"help": "authentication method",
|
||||||
|
"type": str}),
|
||||||
("htpasswd_filename", {
|
("htpasswd_filename", {
|
||||||
"value": "/etc/radicale/users",
|
"value": "/etc/radicale/users",
|
||||||
"help": "htpasswd filename"}),
|
"help": "htpasswd filename",
|
||||||
|
"type": str}),
|
||||||
("htpasswd_encryption", {
|
("htpasswd_encryption", {
|
||||||
"value": "bcrypt",
|
"value": "bcrypt",
|
||||||
"help": "htpasswd encryption method"}),
|
"help": "htpasswd encryption method",
|
||||||
|
"type": str}),
|
||||||
("delay", {
|
("delay", {
|
||||||
"value": "1",
|
"value": "1",
|
||||||
"help": "incorrect authentication delay"})])),
|
"help": "incorrect authentication delay",
|
||||||
|
"type": float})])),
|
||||||
("rights", OrderedDict([
|
("rights", OrderedDict([
|
||||||
("type", {
|
("type", {
|
||||||
"value": "owner_only",
|
"value": "owner_only",
|
||||||
"help": "rights backend"}),
|
"help": "rights backend",
|
||||||
|
"type": str}),
|
||||||
("file", {
|
("file", {
|
||||||
"value": "/etc/radicale/rights",
|
"value": "/etc/radicale/rights",
|
||||||
"help": "file for rights management from_file"})])),
|
"help": "file for rights management from_file",
|
||||||
|
"type": str})])),
|
||||||
("storage", OrderedDict([
|
("storage", OrderedDict([
|
||||||
("type", {
|
("type", {
|
||||||
"value": "multifilesystem",
|
"value": "multifilesystem",
|
||||||
"help": "storage backend"}),
|
"help": "storage backend",
|
||||||
|
"type": str}),
|
||||||
("filesystem_folder", {
|
("filesystem_folder", {
|
||||||
"value": os.path.expanduser(
|
"value": os.path.expanduser(
|
||||||
"/var/lib/radicale/collections"),
|
"/var/lib/radicale/collections"),
|
||||||
"help": "path where collections are stored"}),
|
"help": "path where collections are stored",
|
||||||
|
"type": str}),
|
||||||
("filesystem_fsync", {
|
("filesystem_fsync", {
|
||||||
"value": "True",
|
"value": "True",
|
||||||
"help": "sync all changes to filesystem during requests"}),
|
"help": "sync all changes to filesystem during requests",
|
||||||
|
"type": bool}),
|
||||||
("filesystem_close_lock_file", {
|
("filesystem_close_lock_file", {
|
||||||
"value": "False",
|
"value": "False",
|
||||||
"help": "close the lock file when no more clients are waiting"}),
|
"help": "close the lock file when no more clients are waiting",
|
||||||
|
"type": bool}),
|
||||||
("hook", {
|
("hook", {
|
||||||
"value": "",
|
"value": "",
|
||||||
"help": "command that is run after changes to storage"})])),
|
"help": "command that is run after changes to storage",
|
||||||
|
"type": str})])),
|
||||||
("logging", OrderedDict([
|
("logging", OrderedDict([
|
||||||
("config", {
|
("config", {
|
||||||
"value": "",
|
"value": "",
|
||||||
"help": "logging configuration file"}),
|
"help": "logging configuration file",
|
||||||
|
"type": str}),
|
||||||
("debug", {
|
("debug", {
|
||||||
"value": "False",
|
"value": "False",
|
||||||
"help": "print debug information",
|
"help": "print debug information",
|
||||||
"aliases": ["-D", "--debug"]}),
|
"aliases": ["-D", "--debug"],
|
||||||
|
"type": bool}),
|
||||||
("full_environment", {
|
("full_environment", {
|
||||||
"value": "False",
|
"value": "False",
|
||||||
"help": "store all environment variables"}),
|
"help": "store all environment variables",
|
||||||
|
"type": bool}),
|
||||||
("mask_passwords", {
|
("mask_passwords", {
|
||||||
"value": "True",
|
"value": "True",
|
||||||
"help": "mask passwords in logs"})]))])
|
"help": "mask passwords in logs",
|
||||||
|
"type": bool})]))])
|
||||||
|
|
||||||
|
|
||||||
def load(paths=(), extra_config=None):
|
def load(paths=(), extra_config=None, ignore_missing_paths=True):
|
||||||
config = ConfigParser()
|
config = ConfigParser()
|
||||||
for section, values in INITIAL_CONFIG.items():
|
for section, values in INITIAL_CONFIG.items():
|
||||||
config.add_section(section)
|
config.add_section(section)
|
||||||
@ -148,6 +178,31 @@ def load(paths=(), extra_config=None):
|
|||||||
for key, value in values.items():
|
for key, value in values.items():
|
||||||
config.set(section, key, value)
|
config.set(section, key, value)
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if path:
|
if path or not ignore_missing_paths:
|
||||||
config.read(path)
|
try:
|
||||||
|
if not config.read(path) and not ignore_missing_paths:
|
||||||
|
raise RuntimeError("No such file: %r" % path)
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Failed to load config file %r: %s" % (path, e)) from e
|
||||||
|
# Check the configuration
|
||||||
|
for section in config.sections():
|
||||||
|
if section == "headers":
|
||||||
|
continue
|
||||||
|
if section not in INITIAL_CONFIG:
|
||||||
|
raise RuntimeError("Invalid section %r in config" % section)
|
||||||
|
for option in config[section]:
|
||||||
|
if option not in INITIAL_CONFIG[section]:
|
||||||
|
raise RuntimeError("Invalid option %r in section %r in "
|
||||||
|
"config" % (option, section))
|
||||||
|
type_ = INITIAL_CONFIG[section][option]["type"]
|
||||||
|
try:
|
||||||
|
if type_ == bool:
|
||||||
|
config.getboolean(section, option)
|
||||||
|
else:
|
||||||
|
type_(config.get(section, option))
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Invalid value %r for option %r in section %r in config" %
|
||||||
|
(config.get(section, option), option, section)) from e
|
||||||
return config
|
return config
|
||||||
|
@ -38,6 +38,12 @@ def configure_from_file(logger, filename, debug):
|
|||||||
return logger
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
class RemoveTracebackFilter(logging.Filter):
|
||||||
|
def filter(self, record):
|
||||||
|
record.exc_info = None
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def start(name="radicale", filename=None, debug=False):
|
def start(name="radicale", filename=None, debug=False):
|
||||||
"""Start the logging according to the configuration."""
|
"""Start the logging according to the configuration."""
|
||||||
logger = logging.getLogger(name)
|
logger = logging.getLogger(name)
|
||||||
@ -53,12 +59,14 @@ def start(name="radicale", filename=None, debug=False):
|
|||||||
# Default configuration, standard output
|
# Default configuration, standard output
|
||||||
if filename:
|
if filename:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Logging configuration file '%s' not found, using stderr." %
|
"WARNING: Logging configuration file %r not found, using "
|
||||||
filename)
|
"stderr" % filename)
|
||||||
handler = logging.StreamHandler(sys.stderr)
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
handler.setFormatter(
|
handler.setFormatter(
|
||||||
logging.Formatter("[%(thread)x] %(levelname)s: %(message)s"))
|
logging.Formatter("[%(thread)x] %(levelname)s: %(message)s"))
|
||||||
logger.addHandler(handler)
|
logger.addHandler(handler)
|
||||||
if debug:
|
if debug:
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
logger.addFilter(RemoveTracebackFilter())
|
||||||
return logger
|
return logger
|
||||||
|
@ -37,9 +37,9 @@ Leading or ending slashes are trimmed from collection's path.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import configparser
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
from configparser import ConfigParser
|
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
from . import storage
|
from . import storage
|
||||||
@ -50,7 +50,6 @@ def load(configuration, logger):
|
|||||||
rights_type = configuration.get("rights", "type")
|
rights_type = configuration.get("rights", "type")
|
||||||
if configuration.get("auth", "type") == "None":
|
if configuration.get("auth", "type") == "None":
|
||||||
rights_type = "None"
|
rights_type = "None"
|
||||||
logger.info("Rights type is %r", rights_type)
|
|
||||||
if rights_type == "None":
|
if rights_type == "None":
|
||||||
rights_class = NoneRights
|
rights_class = NoneRights
|
||||||
elif rights_type == "authenticated":
|
elif rights_type == "authenticated":
|
||||||
@ -62,7 +61,12 @@ def load(configuration, logger):
|
|||||||
elif rights_type == "from_file":
|
elif rights_type == "from_file":
|
||||||
rights_class = Rights
|
rights_class = Rights
|
||||||
else:
|
else:
|
||||||
|
try:
|
||||||
rights_class = import_module(rights_type).Rights
|
rights_class = import_module(rights_type).Rights
|
||||||
|
except ImportError as e:
|
||||||
|
raise RuntimeError("Rights module %r not found" %
|
||||||
|
rights_type) from e
|
||||||
|
logger.info("Rights type is %r", rights_type)
|
||||||
return rights_class(configuration, logger).authorized
|
return rights_class(configuration, logger).authorized
|
||||||
|
|
||||||
|
|
||||||
@ -116,27 +120,36 @@ class Rights(BaseRights):
|
|||||||
# Prevent "regex injection"
|
# Prevent "regex injection"
|
||||||
user_escaped = re.escape(user)
|
user_escaped = re.escape(user)
|
||||||
sane_path_escaped = re.escape(sane_path)
|
sane_path_escaped = re.escape(sane_path)
|
||||||
regex = ConfigParser(
|
regex = configparser.ConfigParser(
|
||||||
{"login": user_escaped, "path": sane_path_escaped})
|
{"login": user_escaped, "path": sane_path_escaped})
|
||||||
|
try:
|
||||||
if not regex.read(self.filename):
|
if not regex.read(self.filename):
|
||||||
raise RuntimeError("Failed to read rights from file %r",
|
raise RuntimeError("No such file: %r" %
|
||||||
self.filename)
|
self.filename)
|
||||||
return False
|
except Exception as e:
|
||||||
|
raise RuntimeError("Failed to load rights file %r: %s" %
|
||||||
|
(self.filename, e)) from e
|
||||||
for section in regex.sections():
|
for section in regex.sections():
|
||||||
re_user = regex.get(section, "user")
|
try:
|
||||||
re_collection = regex.get(section, "collection")
|
re_user_pattern = regex.get(section, "user")
|
||||||
self.logger.debug(
|
re_collection_pattern = regex.get(section, "collection")
|
||||||
"Test if '%s:%s' matches against '%s:%s' from section '%s'",
|
|
||||||
user, sane_path, re_user, re_collection, section)
|
|
||||||
# Emulate fullmatch
|
# Emulate fullmatch
|
||||||
user_match = re.match(r"(?:%s)\Z" % re_user, user)
|
user_match = re.match(r"(?:%s)\Z" % re_user_pattern, user)
|
||||||
if user_match:
|
collection_match = user_match and re.match(
|
||||||
re_collection = re_collection.format(*user_match.groups())
|
r"(?:%s)\Z" % re_collection_pattern.format(
|
||||||
# Emulate fullmatch
|
*map(re.escape, user_match.groups())), sane_path)
|
||||||
if re.match(r"(?:%s)\Z" % re_collection, sane_path):
|
except Exception as e:
|
||||||
self.logger.debug("Section '%s' matches", section)
|
raise RuntimeError("Error in section %r of rights file %r: "
|
||||||
|
"%s" % (section, self.filename, e)) from e
|
||||||
|
if user_match and collection_match:
|
||||||
|
self.logger.debug("Rule %r:%r matches %r:%r from section %r",
|
||||||
|
user, sane_path, re_user_pattern,
|
||||||
|
re_collection_pattern, section)
|
||||||
return permission in regex.get(section, "permission")
|
return permission in regex.get(section, "permission")
|
||||||
else:
|
else:
|
||||||
self.logger.debug("Section '%s' does not match", section)
|
self.logger.debug("Rule %r:%r doesn't match %r:%r from section"
|
||||||
|
" %r", user, sane_path, re_user_pattern,
|
||||||
|
re_collection_pattern, section)
|
||||||
|
self.logger.info(
|
||||||
|
"Rights: %r:%r doesn't match any section", user, sane_path)
|
||||||
return False
|
return False
|
||||||
|
@ -91,7 +91,12 @@ def load(configuration, logger):
|
|||||||
if storage_type == "multifilesystem":
|
if storage_type == "multifilesystem":
|
||||||
collection_class = Collection
|
collection_class = Collection
|
||||||
else:
|
else:
|
||||||
|
try:
|
||||||
collection_class = import_module(storage_type).Collection
|
collection_class = import_module(storage_type).Collection
|
||||||
|
except ImportError as e:
|
||||||
|
raise RuntimeError("Storage module %r not found" %
|
||||||
|
storage_type) from e
|
||||||
|
logger.info("Storage type is %r", storage_type)
|
||||||
|
|
||||||
class CollectionCopy(collection_class):
|
class CollectionCopy(collection_class):
|
||||||
"""Collection copy, avoids overriding the original class attributes."""
|
"""Collection copy, avoids overriding the original class attributes."""
|
||||||
@ -184,25 +189,25 @@ def path_to_filesystem(root, *paths):
|
|||||||
|
|
||||||
class UnsafePathError(ValueError):
|
class UnsafePathError(ValueError):
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
message = "Can't translate name safely to filesystem: %s" % path
|
message = "Can't translate name safely to filesystem: %r" % path
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class CollidingPathError(ValueError):
|
class CollidingPathError(ValueError):
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
message = "File name collision: %s" % path
|
message = "File name collision: %r" % path
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class ComponentExistsError(ValueError):
|
class ComponentExistsError(ValueError):
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
message = "Component already exists: %s" % path
|
message = "Component already exists: %r" % path
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class ComponentNotFoundError(ValueError):
|
class ComponentNotFoundError(ValueError):
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
message = "Component doesn't exist: %s" % path
|
message = "Component doesn't exist: %r" % path
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
@ -510,7 +515,8 @@ class Collection(BaseCollection):
|
|||||||
for href in os.listdir(filesystem_path):
|
for href in os.listdir(filesystem_path):
|
||||||
if not is_safe_filesystem_path_component(href):
|
if not is_safe_filesystem_path_component(href):
|
||||||
if not href.startswith(".Radicale"):
|
if not href.startswith(".Radicale"):
|
||||||
cls.logger.debug("Skipping collection: %s", href)
|
cls.logger.debug("Skipping collection %r in %r", href,
|
||||||
|
path)
|
||||||
continue
|
continue
|
||||||
child_filesystem_path = path_to_filesystem(filesystem_path, href)
|
child_filesystem_path = path_to_filesystem(filesystem_path, href)
|
||||||
if os.path.isdir(child_filesystem_path):
|
if os.path.isdir(child_filesystem_path):
|
||||||
@ -621,7 +627,8 @@ class Collection(BaseCollection):
|
|||||||
for href in os.listdir(self._filesystem_path):
|
for href in os.listdir(self._filesystem_path):
|
||||||
if not is_safe_filesystem_path_component(href):
|
if not is_safe_filesystem_path_component(href):
|
||||||
if not href.startswith(".Radicale"):
|
if not href.startswith(".Radicale"):
|
||||||
self.logger.debug("Skipping component: %s", href)
|
self.logger.debug(
|
||||||
|
"Skipping item %r in %r", href, self.path)
|
||||||
continue
|
continue
|
||||||
path = os.path.join(self._filesystem_path, href)
|
path = os.path.join(self._filesystem_path, href)
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
@ -631,8 +638,8 @@ class Collection(BaseCollection):
|
|||||||
if not href:
|
if not href:
|
||||||
return None
|
return None
|
||||||
if not is_safe_filesystem_path_component(href):
|
if not is_safe_filesystem_path_component(href):
|
||||||
self.logger.debug(
|
self.logger.debug("Can't translate name %r safely to filesystem "
|
||||||
"Can't translate name safely to filesystem: %s", href)
|
"in %r", href, self.path)
|
||||||
return None
|
return None
|
||||||
path = path_to_filesystem(self._filesystem_path, href)
|
path = path_to_filesystem(self._filesystem_path, href)
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
@ -644,9 +651,9 @@ class Collection(BaseCollection):
|
|||||||
time.gmtime(os.path.getmtime(path)))
|
time.gmtime(os.path.getmtime(path)))
|
||||||
try:
|
try:
|
||||||
item = vobject.readOne(text)
|
item = vobject.readOne(text)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
self.logger.error("Failed to parse component: %s", href)
|
raise RuntimeError("Failed to parse item %r in %r" %
|
||||||
raise
|
(href, self.path)) from e
|
||||||
return Item(self, item, href, last_modified)
|
return Item(self, item, href, last_modified)
|
||||||
|
|
||||||
def upload(self, href, vobject_item):
|
def upload(self, href, vobject_item):
|
||||||
@ -685,7 +692,11 @@ class Collection(BaseCollection):
|
|||||||
def get_meta(self, key=None):
|
def get_meta(self, key=None):
|
||||||
if os.path.exists(self._props_path):
|
if os.path.exists(self._props_path):
|
||||||
with open(self._props_path, encoding=self.encoding) as f:
|
with open(self._props_path, encoding=self.encoding) as f:
|
||||||
|
try:
|
||||||
meta = json.load(f)
|
meta = json.load(f)
|
||||||
|
except ValueError as e:
|
||||||
|
raise RuntimeError("Failed to load properties of collect"
|
||||||
|
"ion %r: %s" % (self.path, e)) from e
|
||||||
return meta.get(key) if key else meta
|
return meta.get(key) if key else meta
|
||||||
|
|
||||||
def set_meta(self, props):
|
def set_meta(self, props):
|
||||||
@ -715,8 +726,8 @@ class Collection(BaseCollection):
|
|||||||
items.append(self.get(href).item)
|
items.append(self.get(href).item)
|
||||||
time_end = datetime.datetime.now()
|
time_end = datetime.datetime.now()
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
"Collection read %d items in %s sec from %s", len(items),
|
"Read %d items in %.3f seconds from %r", len(items),
|
||||||
(time_end - time_begin).total_seconds(), self._filesystem_path)
|
(time_end - time_begin).total_seconds(), self.path)
|
||||||
if self.get_meta("tag") == "VCALENDAR":
|
if self.get_meta("tag") == "VCALENDAR":
|
||||||
collection = vobject.iCalendar()
|
collection = vobject.iCalendar()
|
||||||
for item in items:
|
for item in items:
|
||||||
|
@ -434,11 +434,11 @@ def name_from_path(path, collection):
|
|||||||
path = path.strip("/") + "/"
|
path = path.strip("/") + "/"
|
||||||
start = collection.path + "/"
|
start = collection.path + "/"
|
||||||
if not path.startswith(start):
|
if not path.startswith(start):
|
||||||
raise ValueError("'%s' doesn't start with '%s'" % (path, start))
|
raise ValueError("%r doesn't start with %r" % (path, start))
|
||||||
name = path[len(start):][:-1]
|
name = path[len(start):][:-1]
|
||||||
if name and not storage.is_safe_path_component(name):
|
if name and not storage.is_safe_path_component(name):
|
||||||
raise ValueError("'%s' is not a component in collection '%s'" %
|
raise ValueError("%r is not a component in collection %r" %
|
||||||
(path, collection.path))
|
(name, collection.path))
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
@ -839,7 +839,8 @@ def report(base_prefix, path, xml_request, collection):
|
|||||||
# properties, just return an empty result.
|
# properties, just return an empty result.
|
||||||
# InfCloud asks for expand-property reports (even if we don't announce
|
# InfCloud asks for expand-property reports (even if we don't announce
|
||||||
# support for them) and stops working if an error code is returned.
|
# support for them) and stops working if an error code is returned.
|
||||||
collection.logger.warning("Unsupported report method: %s", root.tag)
|
collection.logger.warning("Unsupported REPORT method %r on %r "
|
||||||
|
"requested", root.tag, path)
|
||||||
return multistatus
|
return multistatus
|
||||||
prop_element = root.find(_tag("D", "prop"))
|
prop_element = root.find(_tag("D", "prop"))
|
||||||
props = (
|
props = (
|
||||||
@ -857,8 +858,8 @@ def report(base_prefix, path, xml_request, collection):
|
|||||||
if (href_path + "/").startswith(base_prefix + "/"):
|
if (href_path + "/").startswith(base_prefix + "/"):
|
||||||
hreferences.add(href_path[len(base_prefix):])
|
hreferences.add(href_path[len(base_prefix):])
|
||||||
else:
|
else:
|
||||||
collection.logger.info(
|
collection.logger.warning("Skipping invalid path %r in REPORT "
|
||||||
"Skipping invalid path: %s", href_path)
|
"request on %r", href_path, path)
|
||||||
else:
|
else:
|
||||||
hreferences = (path,)
|
hreferences = (path,)
|
||||||
filters = (
|
filters = (
|
||||||
@ -868,8 +869,9 @@ def report(base_prefix, path, xml_request, collection):
|
|||||||
for hreference in hreferences:
|
for hreference in hreferences:
|
||||||
try:
|
try:
|
||||||
name = name_from_path(hreference, collection)
|
name = name_from_path(hreference, collection)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
collection.logger.info("Skipping invalid path: %s", hreference)
|
collection.logger.warning("Skipping invalid path %r in REPORT "
|
||||||
|
"request on %r: %s", hreference, path, e)
|
||||||
response = _item_response(base_prefix, hreference,
|
response = _item_response(base_prefix, hreference,
|
||||||
found_item=False)
|
found_item=False)
|
||||||
multistatus.append(response)
|
multistatus.append(response)
|
||||||
@ -891,12 +893,16 @@ def report(base_prefix, path, xml_request, collection):
|
|||||||
if not item:
|
if not item:
|
||||||
continue
|
continue
|
||||||
if filters:
|
if filters:
|
||||||
match = (
|
try:
|
||||||
_comp_match if collection.get_meta("tag") == "VCALENDAR"
|
match = (_comp_match
|
||||||
|
if collection.get_meta("tag") == "VCALENDAR"
|
||||||
else _prop_match)
|
else _prop_match)
|
||||||
if not all(match(item, filter_[0]) for filter_ in filters
|
if not all(match(item, filter_[0]) for filter_ in filters
|
||||||
if filter_):
|
if filter_):
|
||||||
continue
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError("Failed to filter item %r from %r: %s" %
|
||||||
|
(collection.path, item.href, e)) from e
|
||||||
|
|
||||||
found_props = []
|
found_props = []
|
||||||
not_found_props = []
|
not_found_props = []
|
||||||
|
Loading…
Reference in New Issue
Block a user