2008-12-30 17:25:42 +01:00
|
|
|
# This file is part of Radicale Server - Calendar Server
|
2009-07-27 17:04:54 +02:00
|
|
|
# Copyright © 2008 Nicolas Kandel
|
|
|
|
# Copyright © 2008 Pascal Halter
|
2017-05-27 17:28:07 +02:00
|
|
|
# Copyright © 2008-2017 Guillaume Ayoub
|
2008-12-30 17:25:42 +01:00
|
|
|
#
|
|
|
|
# This library is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This library is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2010-01-19 20:31:21 +01:00
|
|
|
"""
|
2018-08-16 08:00:00 +02:00
|
|
|
Radicale WSGI application.
|
2010-01-19 20:31:21 +01:00
|
|
|
|
2018-08-16 08:00:00 +02:00
|
|
|
Can be used with an external WSGI server or the built-in server.
|
2010-01-19 20:31:21 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
2010-01-21 18:52:53 +01:00
|
|
|
import base64
|
2018-08-21 18:43:46 +02:00
|
|
|
import contextlib
|
2017-04-15 10:51:00 +02:00
|
|
|
import datetime
|
2016-08-25 04:33:14 +02:00
|
|
|
import io
|
2016-08-11 02:10:09 +02:00
|
|
|
import itertools
|
2017-05-07 08:17:39 +02:00
|
|
|
import logging
|
2016-08-02 14:37:39 +02:00
|
|
|
import os
|
2018-01-14 18:29:37 +01:00
|
|
|
import pkg_resources
|
2016-08-04 06:08:08 +02:00
|
|
|
import posixpath
|
2016-08-02 14:37:39 +02:00
|
|
|
import pprint
|
2017-05-23 03:11:41 +02:00
|
|
|
import random
|
2011-05-01 15:25:52 +02:00
|
|
|
import socket
|
2018-08-21 18:43:46 +02:00
|
|
|
import sys
|
2016-08-02 14:37:39 +02:00
|
|
|
import threading
|
2017-05-23 03:11:41 +02:00
|
|
|
import time
|
2016-05-21 02:26:03 +02:00
|
|
|
import zlib
|
2016-03-31 19:57:40 +02:00
|
|
|
from http import client
|
2018-08-18 16:43:19 +02:00
|
|
|
from urllib.parse import urlparse, quote
|
2017-05-07 08:17:39 +02:00
|
|
|
from xml.etree import ElementTree as ET
|
2008-12-30 17:25:42 +01:00
|
|
|
|
2018-08-14 18:45:21 +02:00
|
|
|
import vobject
|
2016-04-22 04:37:02 +02:00
|
|
|
|
2018-04-20 22:53:42 +02:00
|
|
|
from radicale import auth, config, log, rights, storage, web, xmlutils
|
2018-08-16 07:59:55 +02:00
|
|
|
from radicale.log import logger
|
2018-01-14 18:29:37 +01:00
|
|
|
|
2018-08-18 12:56:40 +02:00
|
|
|
VERSION = pkg_resources.get_distribution("radicale").version
|
2010-05-31 00:49:52 +02:00
|
|
|
|
2016-09-02 11:04:29 +02:00
|
|
|
NOT_ALLOWED = (
|
2016-10-12 14:50:53 +02:00
|
|
|
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
2016-09-02 11:04:29 +02:00
|
|
|
"Access to the requested resource forbidden.")
|
2018-08-14 18:39:09 +02:00
|
|
|
FORBIDDEN = (
|
|
|
|
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
|
|
|
"Action on the requested resource refused.")
|
2017-05-31 11:08:32 +02:00
|
|
|
BAD_REQUEST = (
|
|
|
|
client.BAD_REQUEST, (("Content-Type", "text/plain"),), "Bad Request")
|
2016-09-02 11:04:29 +02:00
|
|
|
NOT_FOUND = (
|
2016-10-12 14:50:53 +02:00
|
|
|
client.NOT_FOUND, (("Content-Type", "text/plain"),),
|
2016-09-02 11:04:29 +02:00
|
|
|
"The requested resource could not be found.")
|
2018-08-14 18:39:09 +02:00
|
|
|
CONFLICT = (
|
|
|
|
client.CONFLICT, (("Content-Type", "text/plain"),),
|
|
|
|
"Conflict in the request.")
|
2016-09-02 11:04:29 +02:00
|
|
|
WEBDAV_PRECONDITION_FAILED = (
|
2016-10-12 14:50:53 +02:00
|
|
|
client.CONFLICT, (("Content-Type", "text/plain"),),
|
2016-09-02 11:04:29 +02:00
|
|
|
"WebDAV precondition failed.")
|
2018-08-14 18:39:09 +02:00
|
|
|
METHOD_NOT_ALLOWED = (
|
|
|
|
client.METHOD_NOT_ALLOWED, (("Content-Type", "text/plain"),),
|
|
|
|
"The method is not allowed on the requested resource.")
|
2016-09-02 11:04:29 +02:00
|
|
|
PRECONDITION_FAILED = (
|
|
|
|
client.PRECONDITION_FAILED,
|
2016-10-12 14:50:53 +02:00
|
|
|
(("Content-Type", "text/plain"),), "Precondition failed.")
|
2016-09-02 11:04:29 +02:00
|
|
|
REQUEST_TIMEOUT = (
|
2016-10-12 14:50:53 +02:00
|
|
|
client.REQUEST_TIMEOUT, (("Content-Type", "text/plain"),),
|
2016-09-02 11:04:29 +02:00
|
|
|
"Connection timed out.")
|
|
|
|
REQUEST_ENTITY_TOO_LARGE = (
|
2016-10-12 14:50:53 +02:00
|
|
|
client.REQUEST_ENTITY_TOO_LARGE, (("Content-Type", "text/plain"),),
|
2016-09-02 11:04:29 +02:00
|
|
|
"Request body too large.")
|
|
|
|
REMOTE_DESTINATION = (
|
2016-10-12 14:50:53 +02:00
|
|
|
client.BAD_GATEWAY, (("Content-Type", "text/plain"),),
|
2016-09-02 11:04:29 +02:00
|
|
|
"Remote destination not supported.")
|
|
|
|
DIRECTORY_LISTING = (
|
2016-10-12 14:50:53 +02:00
|
|
|
client.FORBIDDEN, (("Content-Type", "text/plain"),),
|
2016-09-02 11:04:29 +02:00
|
|
|
"Directory listings are not supported.")
|
2017-05-31 11:08:32 +02:00
|
|
|
INTERNAL_SERVER_ERROR = (
|
|
|
|
client.INTERNAL_SERVER_ERROR, (("Content-Type", "text/plain"),),
|
|
|
|
"A server error occurred. Please contact the administrator.")
|
2016-08-31 00:41:08 +02:00
|
|
|
|
2016-08-05 02:14:49 +02:00
|
|
|
DAV_HEADERS = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
|
2012-08-15 23:39:18 +02:00
|
|
|
|
2011-02-08 19:27:00 +01:00
|
|
|
|
2016-04-22 04:37:02 +02:00
|
|
|
class Application:
|
2011-12-31 13:31:22 +01:00
|
|
|
"""WSGI application managing collections."""
|
2016-08-05 02:14:49 +02:00
|
|
|
|
2018-08-18 12:56:38 +02:00
|
|
|
def __init__(self, configuration):
|
2011-05-01 14:46:29 +02:00
|
|
|
"""Initialize application."""
|
2016-03-31 19:57:40 +02:00
|
|
|
super().__init__()
|
2016-04-22 04:37:02 +02:00
|
|
|
self.configuration = configuration
|
2018-08-16 07:59:55 +02:00
|
|
|
self.Auth = auth.load(configuration)
|
|
|
|
self.Collection = storage.load(configuration)
|
|
|
|
self.Rights = rights.load(configuration)
|
|
|
|
self.Web = web.load(configuration)
|
2016-04-22 04:37:02 +02:00
|
|
|
self.encoding = configuration.get("encoding", "request")
|
2016-06-11 12:53:58 +02:00
|
|
|
|
|
|
|
def headers_log(self, environ):
|
|
|
|
"""Sanitize headers for logging."""
|
2011-05-11 17:09:44 +02:00
|
|
|
request_environ = dict(environ)
|
2016-08-05 02:14:49 +02:00
|
|
|
|
|
|
|
# Mask passwords
|
|
|
|
mask_passwords = self.configuration.getboolean(
|
|
|
|
"logging", "mask_passwords")
|
2017-08-14 18:16:45 +02:00
|
|
|
authorization = request_environ.get("HTTP_AUTHORIZATION", "")
|
|
|
|
if mask_passwords and authorization.startswith("Basic"):
|
2016-06-11 12:53:58 +02:00
|
|
|
request_environ["HTTP_AUTHORIZATION"] = "Basic **masked**"
|
2017-08-14 18:16:46 +02:00
|
|
|
if request_environ.get("HTTP_COOKIE"):
|
|
|
|
request_environ["HTTP_COOKIE"] = "**masked**"
|
2016-08-05 02:14:49 +02:00
|
|
|
|
2011-05-11 17:09:44 +02:00
|
|
|
return request_environ
|
2008-12-30 17:25:42 +01:00
|
|
|
|
2011-05-01 14:46:29 +02:00
|
|
|
def decode(self, text, environ):
|
|
|
|
"""Try to magically decode ``text`` according to given ``environ``."""
|
2010-01-21 18:52:53 +01:00
|
|
|
# List of charsets to try
|
|
|
|
charsets = []
|
|
|
|
|
|
|
|
# First append content charset given in the request
|
2011-05-01 14:46:29 +02:00
|
|
|
content_type = environ.get("CONTENT_TYPE")
|
2010-02-10 18:57:21 +01:00
|
|
|
if content_type and "charset=" in content_type:
|
2015-04-29 19:07:17 +02:00
|
|
|
charsets.append(
|
|
|
|
content_type.split("charset=")[1].split(";")[0].strip())
|
2010-01-21 18:52:53 +01:00
|
|
|
# Then append default Radicale charset
|
2011-05-01 14:46:29 +02:00
|
|
|
charsets.append(self.encoding)
|
2010-01-21 18:52:53 +01:00
|
|
|
# Then append various fallbacks
|
|
|
|
charsets.append("utf-8")
|
|
|
|
charsets.append("iso8859-1")
|
|
|
|
|
|
|
|
# Try to decode
|
|
|
|
for charset in charsets:
|
|
|
|
try:
|
|
|
|
return text.decode(charset)
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
|
|
|
raise UnicodeDecodeError
|
|
|
|
|
2012-08-15 15:12:18 +02:00
|
|
|
def collect_allowed_items(self, items, user):
|
2012-09-15 09:08:01 +02:00
|
|
|
"""Get items from request that user is allowed to access."""
|
2012-08-15 15:12:18 +02:00
|
|
|
for item in items:
|
2017-06-16 23:12:08 +02:00
|
|
|
if isinstance(item, storage.BaseCollection):
|
2017-06-16 23:12:52 +02:00
|
|
|
path = storage.sanitize_path("/%s/" % item.path)
|
2018-08-21 18:43:45 +02:00
|
|
|
if item.get_meta("tag"):
|
|
|
|
permissions = self.Rights.authorized(user, path, "rw")
|
|
|
|
target = "collection with tag %r" % item.path
|
|
|
|
else:
|
|
|
|
permissions = self.Rights.authorized(user, path, "RW")
|
|
|
|
target = "collection %r" % item.path
|
2012-09-15 09:08:01 +02:00
|
|
|
else:
|
2018-08-21 18:43:45 +02:00
|
|
|
path = storage.sanitize_path("/%s/" % item.collection.path)
|
|
|
|
permissions = self.Rights.authorized(user, path, "rw")
|
2017-06-16 23:12:52 +02:00
|
|
|
target = "item %r from %r" % (item.href, item.collection.path)
|
2018-08-21 18:43:45 +02:00
|
|
|
if rights.intersect_permissions(permissions, "Ww"):
|
|
|
|
permission = "w"
|
|
|
|
status = "write"
|
|
|
|
elif rights.intersect_permissions(permissions, "Rr"):
|
|
|
|
permission = "r"
|
|
|
|
status = "read"
|
|
|
|
else:
|
|
|
|
permission = ""
|
|
|
|
status = "NO"
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug(
|
2017-06-16 23:12:52 +02:00
|
|
|
"%s has %s access to %s",
|
2018-08-21 18:43:45 +02:00
|
|
|
repr(user) if user else "anonymous user", status, target)
|
|
|
|
if permission:
|
|
|
|
yield item, permission
|
2012-08-15 15:12:18 +02:00
|
|
|
|
2011-05-01 14:46:29 +02:00
|
|
|
def __call__(self, environ, start_response):
|
2018-08-16 07:59:56 +02:00
|
|
|
with log.register_stream(environ["wsgi.errors"]):
|
2017-06-21 09:48:59 +02:00
|
|
|
try:
|
2018-08-16 07:59:56 +02:00
|
|
|
status, headers, answers = self._handle_request(environ)
|
|
|
|
except Exception as e:
|
|
|
|
try:
|
|
|
|
method = str(environ["REQUEST_METHOD"])
|
|
|
|
except Exception:
|
|
|
|
method = "unknown"
|
|
|
|
try:
|
|
|
|
path = str(environ.get("PATH_INFO", ""))
|
|
|
|
except Exception:
|
|
|
|
path = ""
|
|
|
|
logger.error("An exception occurred during %s request on %r: "
|
|
|
|
"%s", method, path, e, exc_info=True)
|
|
|
|
status, headers, answer = INTERNAL_SERVER_ERROR
|
|
|
|
answer = answer.encode("ascii")
|
|
|
|
status = "%d %s" % (
|
|
|
|
status, client.responses.get(status, "Unknown"))
|
|
|
|
headers = [
|
|
|
|
("Content-Length", str(len(answer)))] + list(headers)
|
|
|
|
answers = [answer]
|
|
|
|
start_response(status, headers)
|
2017-05-31 11:08:32 +02:00
|
|
|
return answers
|
2016-08-05 02:14:49 +02:00
|
|
|
|
2017-05-31 11:08:32 +02:00
|
|
|
def _handle_request(self, environ):
|
|
|
|
"""Manage a request."""
|
2016-09-02 11:05:35 +02:00
|
|
|
def response(status, headers=(), answer=None):
|
2016-09-02 11:04:29 +02:00
|
|
|
headers = dict(headers)
|
2016-09-02 04:10:11 +02:00
|
|
|
# Set content length
|
|
|
|
if answer:
|
2017-05-07 08:17:35 +02:00
|
|
|
if hasattr(answer, "encode"):
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Response content:\n%s", answer)
|
2017-05-07 08:17:35 +02:00
|
|
|
headers["Content-Type"] += "; charset=%s" % self.encoding
|
|
|
|
answer = answer.encode(self.encoding)
|
2016-09-02 04:10:11 +02:00
|
|
|
accept_encoding = [
|
|
|
|
encoding.strip() for encoding in
|
|
|
|
environ.get("HTTP_ACCEPT_ENCODING", "").split(",")
|
|
|
|
if encoding.strip()]
|
|
|
|
|
|
|
|
if "gzip" in accept_encoding:
|
|
|
|
zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS)
|
|
|
|
answer = zcomp.compress(answer) + zcomp.flush()
|
|
|
|
headers["Content-Encoding"] = "gzip"
|
|
|
|
|
|
|
|
headers["Content-Length"] = str(len(answer))
|
|
|
|
|
|
|
|
# Add extra headers set in configuration
|
|
|
|
if self.configuration.has_section("headers"):
|
|
|
|
for key in self.configuration.options("headers"):
|
|
|
|
headers[key] = self.configuration.get("headers", key)
|
|
|
|
|
2016-06-10 14:30:58 +02:00
|
|
|
# Start response
|
2016-09-17 15:35:43 +02:00
|
|
|
time_end = datetime.datetime.now()
|
2017-05-31 11:08:32 +02:00
|
|
|
status = "%d %s" % (
|
2016-08-02 14:37:39 +02:00
|
|
|
status, client.responses.get(status, "Unknown"))
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.info(
|
2017-06-17 01:41:33 +02:00
|
|
|
"%s response status for %r%s in %.3f seconds: %s",
|
2017-05-31 12:01:35 +02:00
|
|
|
environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""),
|
|
|
|
depthinfo, (time_end - time_begin).total_seconds(), status)
|
2016-06-10 14:30:58 +02:00
|
|
|
# Return response content
|
2017-05-31 11:08:32 +02:00
|
|
|
return status, list(headers.items()), [answer] if answer else []
|
2016-06-10 14:30:58 +02:00
|
|
|
|
2017-05-31 11:08:32 +02:00
|
|
|
remote_host = "unknown"
|
2017-03-07 18:18:37 +01:00
|
|
|
if environ.get("REMOTE_HOST"):
|
2017-07-22 23:20:51 +02:00
|
|
|
remote_host = repr(environ["REMOTE_HOST"])
|
2017-03-07 18:22:17 +01:00
|
|
|
elif environ.get("REMOTE_ADDR"):
|
|
|
|
remote_host = environ["REMOTE_ADDR"]
|
2017-03-07 18:18:37 +01:00
|
|
|
if environ.get("HTTP_X_FORWARDED_FOR"):
|
2017-05-31 11:08:32 +02:00
|
|
|
remote_host = "%r (forwarded by %s)" % (
|
2017-03-07 18:21:27 +01:00
|
|
|
environ["HTTP_X_FORWARDED_FOR"], remote_host)
|
2017-05-31 11:08:32 +02:00
|
|
|
remote_useragent = ""
|
2017-03-07 18:18:37 +01:00
|
|
|
if environ.get("HTTP_USER_AGENT"):
|
2017-05-31 11:08:32 +02:00
|
|
|
remote_useragent = " using %r" % environ["HTTP_USER_AGENT"]
|
2016-09-19 19:59:47 +02:00
|
|
|
depthinfo = ""
|
2017-03-07 18:18:37 +01:00
|
|
|
if environ.get("HTTP_DEPTH"):
|
2017-05-31 11:08:32 +02:00
|
|
|
depthinfo = " with depth %r" % environ["HTTP_DEPTH"]
|
2016-09-17 15:35:43 +02:00
|
|
|
time_begin = datetime.datetime.now()
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.info(
|
2017-05-31 11:08:32 +02:00
|
|
|
"%s request for %r%s received from %s%s",
|
2017-06-16 23:15:17 +02:00
|
|
|
environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo,
|
2017-02-26 16:19:38 +01:00
|
|
|
remote_host, remote_useragent)
|
2011-05-11 17:09:44 +02:00
|
|
|
headers = pprint.pformat(self.headers_log(environ))
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Request headers:\n%s", headers)
|
2011-05-01 14:46:29 +02:00
|
|
|
|
2016-09-04 21:10:58 +02:00
|
|
|
# Let reverse proxies overwrite SCRIPT_NAME
|
|
|
|
if "HTTP_X_SCRIPT_NAME" in environ:
|
2017-06-16 23:15:17 +02:00
|
|
|
# script_name must be removed from PATH_INFO by the client.
|
|
|
|
unsafe_base_prefix = environ["HTTP_X_SCRIPT_NAME"]
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Script name overwritten by client: %r",
|
|
|
|
unsafe_base_prefix)
|
2017-06-16 23:15:17 +02:00
|
|
|
else:
|
|
|
|
# SCRIPT_NAME is already removed from PATH_INFO, according to the
|
|
|
|
# WSGI specification.
|
|
|
|
unsafe_base_prefix = environ.get("SCRIPT_NAME", "")
|
2016-09-04 20:15:08 +02:00
|
|
|
# Sanitize base prefix
|
2017-06-16 23:15:17 +02:00
|
|
|
base_prefix = storage.sanitize_path(unsafe_base_prefix).rstrip("/")
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Sanitized script name: %r", base_prefix)
|
2017-05-30 22:34:41 +02:00
|
|
|
# Sanitize request URI (a WSGI server indicates with an empty path,
|
|
|
|
# that the URL targets the application root without a trailing slash)
|
2017-06-16 23:15:17 +02:00
|
|
|
path = storage.sanitize_path(environ.get("PATH_INFO", ""))
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Sanitized path: %r", path)
|
2012-08-03 13:10:20 +02:00
|
|
|
|
2011-05-01 14:46:29 +02:00
|
|
|
# Get function corresponding to method
|
2015-12-24 05:57:33 +01:00
|
|
|
function = getattr(self, "do_%s" % environ["REQUEST_METHOD"].upper())
|
2010-06-27 01:45:49 +02:00
|
|
|
|
2017-09-17 14:03:44 +02:00
|
|
|
# If "/.well-known" is not available, clients query "/"
|
|
|
|
if path == "/.well-known" or path.startswith("/.well-known/"):
|
|
|
|
return response(*NOT_FOUND)
|
|
|
|
|
2012-08-09 14:15:20 +02:00
|
|
|
# Ask authentication backend to check rights
|
2018-08-16 07:59:58 +02:00
|
|
|
login = password = ""
|
2017-05-31 02:05:55 +02:00
|
|
|
external_login = self.Auth.get_external_login(environ)
|
|
|
|
authorization = environ.get("HTTP_AUTHORIZATION", "")
|
|
|
|
if external_login:
|
|
|
|
login, password = external_login
|
2018-04-30 00:18:36 +02:00
|
|
|
login, password = login or "", password or ""
|
2017-05-31 02:05:55 +02:00
|
|
|
elif authorization.startswith("Basic"):
|
2016-05-26 12:21:09 +02:00
|
|
|
authorization = authorization[len("Basic"):].strip()
|
2016-08-30 23:13:33 +02:00
|
|
|
login, password = self.decode(base64.b64decode(
|
2013-09-19 14:40:03 +02:00
|
|
|
authorization.encode("ascii")), environ).split(":", 1)
|
2012-08-09 14:15:20 +02:00
|
|
|
|
2018-04-30 00:18:36 +02:00
|
|
|
user = self.Auth.login(login, password) or "" if login else ""
|
|
|
|
if user and login == user:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.info("Successful login: %r", user)
|
2018-04-30 00:18:36 +02:00
|
|
|
elif user:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.info("Successful login: %r -> %r", login, user)
|
2018-04-30 00:18:36 +02:00
|
|
|
elif login:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.info("Failed login attempt: %r", login)
|
2018-04-30 00:18:36 +02:00
|
|
|
# Random delay to avoid timing oracles and bruteforce attacks
|
|
|
|
delay = self.configuration.getfloat("auth", "delay")
|
|
|
|
if delay > 0:
|
|
|
|
random_delay = delay * (0.5 + random.random())
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Sleeping %.3f seconds", random_delay)
|
2018-04-30 00:18:36 +02:00
|
|
|
time.sleep(random_delay)
|
|
|
|
|
|
|
|
if user and not storage.is_safe_path_component(user):
|
2016-08-01 09:10:23 +02:00
|
|
|
# Prevent usernames like "user/calendar.ics"
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.info("Refused unsafe username: %r", user)
|
2018-04-30 00:18:36 +02:00
|
|
|
user = ""
|
2014-01-05 20:54:17 +01:00
|
|
|
|
2016-08-01 20:51:27 +02:00
|
|
|
# Create principal collection
|
2018-04-30 00:18:36 +02:00
|
|
|
if user:
|
2016-08-01 20:51:27 +02:00
|
|
|
principal_path = "/%s/" % user
|
2018-08-21 18:43:45 +02:00
|
|
|
if self.Rights.authorized(user, principal_path, "W"):
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("r", user):
|
2016-08-02 14:00:42 +02:00
|
|
|
principal = next(
|
2016-08-05 02:14:49 +02:00
|
|
|
self.Collection.discover(principal_path, depth="1"),
|
|
|
|
None)
|
2016-08-04 06:08:08 +02:00
|
|
|
if not principal:
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("w", user):
|
2017-07-30 12:00:42 +02:00
|
|
|
try:
|
|
|
|
self.Collection.create_collection(principal_path)
|
|
|
|
except ValueError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning("Failed to create principal "
|
|
|
|
"collection %r: %s", user, e)
|
2018-04-30 00:18:36 +02:00
|
|
|
user = ""
|
2017-05-31 11:08:32 +02:00
|
|
|
else:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning("Access to principal path %r denied by "
|
|
|
|
"rights backend", principal_path)
|
2016-08-01 20:51:27 +02:00
|
|
|
|
2018-08-18 12:56:38 +02:00
|
|
|
if self.configuration.getboolean("internal", "internal_server"):
|
|
|
|
# Verify content length
|
|
|
|
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
|
|
|
if content_length:
|
|
|
|
max_content_length = self.configuration.getint(
|
|
|
|
"server", "max_content_length")
|
|
|
|
if max_content_length and content_length > max_content_length:
|
|
|
|
logger.info("Request body too large: %d", content_length)
|
|
|
|
return response(*REQUEST_ENTITY_TOO_LARGE)
|
2016-08-08 07:00:24 +02:00
|
|
|
|
2018-04-30 00:18:36 +02:00
|
|
|
if not login or user:
|
2017-06-23 21:55:44 +02:00
|
|
|
status, headers, answer = function(
|
|
|
|
environ, base_prefix, path, user)
|
2017-05-23 03:13:28 +02:00
|
|
|
if (status, headers, answer) == NOT_ALLOWED:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.info("Access to %r denied for %s", path,
|
|
|
|
repr(user) if user else "anonymous user")
|
2012-08-08 18:44:25 +02:00
|
|
|
else:
|
2013-09-13 15:05:02 +02:00
|
|
|
status, headers, answer = NOT_ALLOWED
|
|
|
|
|
2018-04-30 00:18:36 +02:00
|
|
|
if ((status, headers, answer) == NOT_ALLOWED and not user and
|
|
|
|
not external_login):
|
2012-08-08 18:44:25 +02:00
|
|
|
# Unknown or unauthorized user
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Asking client for authentication")
|
2013-09-12 13:48:49 +02:00
|
|
|
status = client.UNAUTHORIZED
|
2018-08-16 08:00:01 +02:00
|
|
|
realm = self.configuration.get("auth", "realm")
|
2016-09-02 14:41:31 +02:00
|
|
|
headers = dict(headers)
|
2016-10-12 14:30:18 +02:00
|
|
|
headers.update({
|
2013-09-12 13:48:49 +02:00
|
|
|
"WWW-Authenticate":
|
2016-08-31 00:41:08 +02:00
|
|
|
"Basic realm=\"%s\"" % realm})
|
2011-05-01 14:46:29 +02:00
|
|
|
|
2016-06-10 14:30:58 +02:00
|
|
|
return response(status, headers, answer)
|
2012-08-08 18:29:09 +02:00
|
|
|
|
2016-08-04 06:08:08 +02:00
|
|
|
def _access(self, user, path, permission, item=None):
|
2018-08-21 18:43:45 +02:00
|
|
|
if permission not in "rw":
|
|
|
|
raise ValueError("Invalid permission argument: %r" % permission)
|
|
|
|
if not item:
|
|
|
|
permissions = permission + permission.upper()
|
|
|
|
parent_permissions = permission
|
|
|
|
elif isinstance(item, storage.BaseCollection):
|
|
|
|
if item.get_meta("tag"):
|
|
|
|
permissions = permission
|
|
|
|
else:
|
|
|
|
permissions = permission.upper()
|
|
|
|
parent_permissions = ""
|
|
|
|
else:
|
|
|
|
permissions = ""
|
|
|
|
parent_permissions = permission
|
|
|
|
if permissions and self.Rights.authorized(user, path, permissions):
|
|
|
|
return True
|
|
|
|
if parent_permissions:
|
|
|
|
parent_path = storage.sanitize_path(
|
|
|
|
"/%s/" % posixpath.dirname(path.strip("/")))
|
|
|
|
if self.Rights.authorized(user, parent_path, parent_permissions):
|
|
|
|
return True
|
|
|
|
return False
|
2016-08-04 06:08:08 +02:00
|
|
|
|
2017-05-07 08:17:39 +02:00
|
|
|
def _read_raw_content(self, environ):
|
2016-08-08 07:00:24 +02:00
|
|
|
content_length = int(environ.get("CONTENT_LENGTH") or 0)
|
2017-05-07 08:17:39 +02:00
|
|
|
if not content_length:
|
|
|
|
return b""
|
|
|
|
content = environ["wsgi.input"].read(content_length)
|
|
|
|
if len(content) < content_length:
|
2017-05-31 11:08:32 +02:00
|
|
|
raise RuntimeError("Request body too short: %d" % len(content))
|
2017-05-07 08:17:39 +02:00
|
|
|
return content
|
|
|
|
|
|
|
|
def _read_content(self, environ):
|
|
|
|
content = self.decode(self._read_raw_content(environ), environ)
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Request content:\n%s", content)
|
2016-08-08 07:00:24 +02:00
|
|
|
return content
|
|
|
|
|
2017-05-07 08:17:39 +02:00
|
|
|
def _read_xml_content(self, environ):
|
|
|
|
content = self.decode(self._read_raw_content(environ), environ)
|
|
|
|
if not content:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
xml_content = ET.fromstring(content)
|
2017-05-31 11:08:32 +02:00
|
|
|
except ET.ParseError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("Request content (Invalid XML):\n%s", content)
|
2017-05-31 11:08:32 +02:00
|
|
|
raise RuntimeError("Failed to parse XML: %s" % e) from e
|
2018-08-16 07:59:55 +02:00
|
|
|
if logger.isEnabledFor(logging.DEBUG):
|
|
|
|
logger.debug("Request content:\n%s",
|
|
|
|
xmlutils.pretty_xml(xml_content))
|
2017-05-07 08:17:39 +02:00
|
|
|
return xml_content
|
|
|
|
|
|
|
|
def _write_xml_content(self, xml_content):
|
2018-08-16 07:59:55 +02:00
|
|
|
if logger.isEnabledFor(logging.DEBUG):
|
|
|
|
logger.debug("Response content:\n%s",
|
|
|
|
xmlutils.pretty_xml(xml_content))
|
2017-05-07 08:17:39 +02:00
|
|
|
f = io.BytesIO()
|
|
|
|
ET.ElementTree(xml_content).write(f, encoding=self.encoding,
|
|
|
|
xml_declaration=True)
|
|
|
|
return f.getvalue()
|
|
|
|
|
2018-08-14 18:39:09 +02:00
|
|
|
def _webdav_error_response(self, namespace, name,
|
|
|
|
status=WEBDAV_PRECONDITION_FAILED[0]):
|
|
|
|
"""Generate XML error response."""
|
|
|
|
headers = {"Content-Type": "text/xml; charset=%s" % self.encoding}
|
|
|
|
content = self._write_xml_content(
|
|
|
|
xmlutils.webdav_error(namespace, name))
|
|
|
|
return status, headers, content
|
|
|
|
|
2018-08-18 16:43:19 +02:00
|
|
|
def _propose_filename(self, collection):
|
|
|
|
"""Propose a filename for a collection."""
|
|
|
|
tag = collection.get_meta("tag")
|
|
|
|
if tag == "VADDRESSBOOK":
|
|
|
|
fallback_title = "Address book"
|
|
|
|
suffix = ".vcf"
|
|
|
|
elif tag == "VCALENDAR":
|
|
|
|
fallback_title = "Calendar"
|
|
|
|
suffix = ".ics"
|
|
|
|
else:
|
|
|
|
fallback_title = posixpath.basename(collection.path)
|
|
|
|
suffix = ""
|
|
|
|
title = collection.get_meta("D:displayname") or fallback_title
|
|
|
|
if title and not title.lower().endswith(suffix.lower()):
|
|
|
|
title += suffix
|
|
|
|
return title
|
|
|
|
|
|
|
|
def _content_disposition_attachement(self, filename):
|
|
|
|
value = "attachement"
|
|
|
|
try:
|
|
|
|
encoded_filename = quote(filename, encoding=self.encoding)
|
|
|
|
except UnicodeEncodeError as e:
|
|
|
|
logger.warning("Failed to encode filename: %r", filename,
|
|
|
|
exc_info=True)
|
|
|
|
encoded_filename = ""
|
|
|
|
if encoded_filename:
|
|
|
|
value += "; filename*=%s''%s" % (self.encoding, encoded_filename)
|
|
|
|
return value
|
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_DELETE(self, environ, base_prefix, path, user):
|
2011-06-29 11:04:09 +02:00
|
|
|
"""Manage DELETE request."""
|
2016-08-04 06:08:08 +02:00
|
|
|
if not self._access(user, path, "w"):
|
2014-07-28 11:28:12 +02:00
|
|
|
return NOT_ALLOWED
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("w", user):
|
2016-08-05 02:14:49 +02:00
|
|
|
item = next(self.Collection.discover(path), None)
|
2016-08-04 06:08:08 +02:00
|
|
|
if not item:
|
2016-08-31 01:42:43 +02:00
|
|
|
return NOT_FOUND
|
2018-08-21 18:43:45 +02:00
|
|
|
if not self._access(user, path, "w", item):
|
|
|
|
return NOT_ALLOWED
|
2016-04-08 14:41:05 +02:00
|
|
|
if_match = environ.get("HTTP_IF_MATCH", "*")
|
2016-08-04 06:08:08 +02:00
|
|
|
if if_match not in ("*", item.etag):
|
|
|
|
# ETag precondition not verified, do not delete item
|
2016-08-31 00:41:08 +02:00
|
|
|
return PRECONDITION_FAILED
|
2017-06-16 23:12:08 +02:00
|
|
|
if isinstance(item, storage.BaseCollection):
|
2017-05-07 08:17:39 +02:00
|
|
|
xml_answer = xmlutils.delete(base_prefix, path, item)
|
2016-08-04 06:08:08 +02:00
|
|
|
else:
|
2017-05-07 08:17:39 +02:00
|
|
|
xml_answer = xmlutils.delete(
|
2016-09-04 20:15:08 +02:00
|
|
|
base_prefix, path, item.collection, item.href)
|
2017-05-07 08:17:39 +02:00
|
|
|
headers = {"Content-Type": "text/xml; charset=%s" % self.encoding}
|
|
|
|
return client.OK, headers, self._write_xml_content(xml_answer)
|
2011-06-29 11:04:09 +02:00
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_GET(self, environ, base_prefix, path, user):
|
2016-04-07 19:25:10 +02:00
|
|
|
"""Manage GET request."""
|
2017-05-31 13:18:40 +02:00
|
|
|
# Redirect to .web if the root URL is requested
|
2016-08-04 06:08:08 +02:00
|
|
|
if not path.strip("/"):
|
2017-05-31 13:18:40 +02:00
|
|
|
web_path = ".web"
|
2017-06-16 23:15:17 +02:00
|
|
|
if not environ.get("PATH_INFO"):
|
2017-05-31 13:18:40 +02:00
|
|
|
web_path = posixpath.join(posixpath.basename(base_prefix),
|
|
|
|
web_path)
|
2017-07-01 04:20:13 +02:00
|
|
|
return (client.FOUND,
|
2017-05-31 13:18:40 +02:00
|
|
|
{"Location": web_path, "Content-Type": "text/plain"},
|
|
|
|
"Redirected to %s" % web_path)
|
|
|
|
# Dispatch .web URL to web module
|
|
|
|
if path == "/.web" or path.startswith("/.web/"):
|
2017-06-16 23:16:47 +02:00
|
|
|
return self.Web.get(environ, base_prefix, path, user)
|
2016-08-04 06:08:08 +02:00
|
|
|
if not self._access(user, path, "r"):
|
2012-08-15 22:36:42 +02:00
|
|
|
return NOT_ALLOWED
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("r", user):
|
2016-08-05 02:14:49 +02:00
|
|
|
item = next(self.Collection.discover(path), None)
|
2016-08-04 06:08:08 +02:00
|
|
|
if not item:
|
2016-08-31 00:41:08 +02:00
|
|
|
return NOT_FOUND
|
2018-08-21 18:43:45 +02:00
|
|
|
if not self._access(user, path, "r", item):
|
|
|
|
return NOT_ALLOWED
|
2017-06-16 23:12:08 +02:00
|
|
|
if isinstance(item, storage.BaseCollection):
|
2017-08-29 20:08:25 +02:00
|
|
|
tag = item.get_meta("tag")
|
|
|
|
if not tag:
|
2016-08-31 00:45:14 +02:00
|
|
|
return DIRECTORY_LISTING
|
2017-08-29 20:08:25 +02:00
|
|
|
content_type = xmlutils.MIMETYPES[tag]
|
2018-08-18 16:43:19 +02:00
|
|
|
content_disposition = self._content_disposition_attachement(
|
|
|
|
self._propose_filename(item))
|
2016-07-14 01:39:57 +02:00
|
|
|
else:
|
2017-08-29 20:08:25 +02:00
|
|
|
content_type = xmlutils.OBJECT_MIMETYPES[item.name]
|
2018-08-18 16:43:19 +02:00
|
|
|
content_disposition = ""
|
2016-05-12 18:55:03 +02:00
|
|
|
headers = {
|
2016-08-04 06:08:08 +02:00
|
|
|
"Content-Type": content_type,
|
2017-06-09 02:29:39 +02:00
|
|
|
"Last-Modified": item.last_modified,
|
2016-08-04 06:08:08 +02:00
|
|
|
"ETag": item.etag}
|
2018-08-18 16:43:19 +02:00
|
|
|
if content_disposition:
|
|
|
|
headers["Content-Disposition"] = content_disposition
|
2016-08-04 06:08:08 +02:00
|
|
|
answer = item.serialize()
|
|
|
|
return client.OK, headers, answer
|
2012-08-09 16:00:31 +02:00
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_HEAD(self, environ, base_prefix, path, user):
|
2011-05-01 14:46:29 +02:00
|
|
|
"""Manage HEAD request."""
|
2016-09-04 20:15:08 +02:00
|
|
|
status, headers, answer = self.do_GET(
|
|
|
|
environ, base_prefix, path, user)
|
2011-05-01 14:46:29 +02:00
|
|
|
return status, headers, None
|
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_MKCALENDAR(self, environ, base_prefix, path, user):
|
2011-02-01 17:01:30 +01:00
|
|
|
"""Manage MKCALENDAR request."""
|
2017-06-16 23:12:52 +02:00
|
|
|
if not self.Rights.authorized(user, path, "w"):
|
2012-08-09 15:39:01 +02:00
|
|
|
return NOT_ALLOWED
|
2017-05-31 11:08:32 +02:00
|
|
|
try:
|
|
|
|
xml_content = self._read_xml_content(environ)
|
|
|
|
except RuntimeError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-05-31 11:08:32 +02:00
|
|
|
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2017-06-23 21:55:44 +02:00
|
|
|
except socket.timeout as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("client timed out", exc_info=True)
|
2017-06-23 21:55:44 +02:00
|
|
|
return REQUEST_TIMEOUT
|
2018-08-21 18:43:46 +02:00
|
|
|
# Prepare before locking
|
|
|
|
props = xmlutils.props_from_request(xml_content)
|
|
|
|
props["tag"] = "VCALENDAR"
|
|
|
|
# TODO: use this?
|
|
|
|
# timezone = props.get("C:calendar-timezone")
|
|
|
|
try:
|
|
|
|
storage.check_and_sanitize_props(props)
|
|
|
|
except ValueError as e:
|
|
|
|
logger.warning(
|
|
|
|
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("w", user):
|
2016-08-05 02:14:49 +02:00
|
|
|
item = next(self.Collection.discover(path), None)
|
2016-08-04 06:08:08 +02:00
|
|
|
if item:
|
2018-08-14 18:39:09 +02:00
|
|
|
return self._webdav_error_response(
|
|
|
|
"D", "resource-must-be-null")
|
|
|
|
parent_path = storage.sanitize_path(
|
|
|
|
"/%s/" % posixpath.dirname(path.strip("/")))
|
|
|
|
parent_item = next(self.Collection.discover(parent_path), None)
|
|
|
|
if not parent_item:
|
|
|
|
return CONFLICT
|
|
|
|
if (not isinstance(parent_item, storage.BaseCollection) or
|
|
|
|
parent_item.get_meta("tag")):
|
|
|
|
return FORBIDDEN
|
2017-06-09 02:30:09 +02:00
|
|
|
try:
|
|
|
|
self.Collection.create_collection(path, props=props)
|
|
|
|
except ValueError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-06-09 02:30:09 +02:00
|
|
|
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2016-08-04 06:08:08 +02:00
|
|
|
return client.CREATED, {}, None
|
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_MKCOL(self, environ, base_prefix, path, user):
|
2011-12-31 13:31:22 +01:00
|
|
|
"""Manage MKCOL request."""
|
2018-08-21 18:43:45 +02:00
|
|
|
permissions = self.Rights.authorized(user, path, "Ww")
|
|
|
|
if not permissions:
|
2012-08-09 15:39:01 +02:00
|
|
|
return NOT_ALLOWED
|
2017-05-31 11:08:32 +02:00
|
|
|
try:
|
|
|
|
xml_content = self._read_xml_content(environ)
|
|
|
|
except RuntimeError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-05-31 11:08:32 +02:00
|
|
|
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2017-06-23 21:55:44 +02:00
|
|
|
except socket.timeout as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("client timed out", exc_info=True)
|
2017-06-23 21:55:44 +02:00
|
|
|
return REQUEST_TIMEOUT
|
2018-08-21 18:43:46 +02:00
|
|
|
# Prepare before locking
|
|
|
|
props = xmlutils.props_from_request(xml_content)
|
|
|
|
try:
|
|
|
|
storage.check_and_sanitize_props(props)
|
|
|
|
except ValueError as e:
|
|
|
|
logger.warning(
|
|
|
|
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
|
|
|
if (props.get("tag") and "w" not in permissions or
|
|
|
|
not props.get("tag") and "W" not in permissions):
|
|
|
|
return NOT_ALLOWED
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("w", user):
|
2016-08-05 02:14:49 +02:00
|
|
|
item = next(self.Collection.discover(path), None)
|
2016-08-04 06:08:08 +02:00
|
|
|
if item:
|
2018-08-14 18:39:09 +02:00
|
|
|
return METHOD_NOT_ALLOWED
|
|
|
|
parent_path = storage.sanitize_path(
|
|
|
|
"/%s/" % posixpath.dirname(path.strip("/")))
|
|
|
|
parent_item = next(self.Collection.discover(parent_path), None)
|
|
|
|
if not parent_item:
|
|
|
|
return CONFLICT
|
|
|
|
if (not isinstance(parent_item, storage.BaseCollection) or
|
|
|
|
parent_item.get_meta("tag")):
|
|
|
|
return FORBIDDEN
|
2017-06-09 02:30:09 +02:00
|
|
|
try:
|
|
|
|
self.Collection.create_collection(path, props=props)
|
|
|
|
except ValueError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-06-09 02:30:09 +02:00
|
|
|
"Bad MKCOL request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2016-08-04 06:08:08 +02:00
|
|
|
return client.CREATED, {}, None
|
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_MOVE(self, environ, base_prefix, path, user):
|
2011-06-29 23:57:56 +02:00
|
|
|
"""Manage MOVE request."""
|
2017-05-31 11:08:32 +02:00
|
|
|
raw_dest = environ.get("HTTP_DESTINATION", "")
|
|
|
|
to_url = urlparse(raw_dest)
|
2016-08-04 06:08:08 +02:00
|
|
|
if to_url.netloc != environ["HTTP_HOST"]:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.info("Unsupported destination address: %r", raw_dest)
|
2016-08-04 06:08:08 +02:00
|
|
|
# Remote destination server, not supported
|
2016-08-31 00:41:08 +02:00
|
|
|
return REMOTE_DESTINATION
|
2016-08-05 02:14:49 +02:00
|
|
|
if not self._access(user, path, "w"):
|
|
|
|
return NOT_ALLOWED
|
2016-08-04 06:08:08 +02:00
|
|
|
to_path = storage.sanitize_path(to_url.path)
|
2017-05-30 22:34:41 +02:00
|
|
|
if not (to_path + "/").startswith(base_prefix + "/"):
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning("Destination %r from MOVE request on %r doesn't "
|
|
|
|
"start with base prefix", to_path, path)
|
2017-05-30 22:34:41 +02:00
|
|
|
return NOT_ALLOWED
|
|
|
|
to_path = to_path[len(base_prefix):]
|
2016-08-05 02:14:49 +02:00
|
|
|
if not self._access(user, to_path, "w"):
|
2012-08-15 22:36:42 +02:00
|
|
|
return NOT_ALLOWED
|
2016-08-05 02:14:49 +02:00
|
|
|
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("w", user):
|
2016-08-05 02:14:49 +02:00
|
|
|
item = next(self.Collection.discover(path), None)
|
2016-08-04 06:08:08 +02:00
|
|
|
if not item:
|
2016-08-31 01:42:43 +02:00
|
|
|
return NOT_FOUND
|
2018-08-21 18:43:45 +02:00
|
|
|
if (not self._access(user, path, "w", item) or
|
|
|
|
not self._access(user, to_path, "w", item)):
|
|
|
|
return NOT_ALLOWED
|
2017-06-16 23:12:08 +02:00
|
|
|
if isinstance(item, storage.BaseCollection):
|
2018-08-14 18:39:09 +02:00
|
|
|
# TODO: support moving collections
|
|
|
|
return METHOD_NOT_ALLOWED
|
2016-08-05 02:14:49 +02:00
|
|
|
|
|
|
|
to_item = next(self.Collection.discover(to_path), None)
|
2018-08-14 18:39:09 +02:00
|
|
|
if isinstance(to_item, storage.BaseCollection):
|
|
|
|
return FORBIDDEN
|
2016-08-04 06:08:08 +02:00
|
|
|
to_parent_path = storage.sanitize_path(
|
|
|
|
"/%s/" % posixpath.dirname(to_path.strip("/")))
|
2016-08-05 02:14:49 +02:00
|
|
|
to_collection = next(
|
|
|
|
self.Collection.discover(to_parent_path), None)
|
2016-08-06 04:45:44 +02:00
|
|
|
if not to_collection:
|
2018-08-14 18:39:09 +02:00
|
|
|
return CONFLICT
|
|
|
|
tag = item.collection.get_meta("tag")
|
|
|
|
if not tag or tag != to_collection.get_meta("tag"):
|
|
|
|
return FORBIDDEN
|
|
|
|
if to_item and environ.get("HTTP_OVERWRITE", "F") != "T":
|
|
|
|
return PRECONDITION_FAILED
|
|
|
|
if (to_item and item.uid != to_item.uid or
|
|
|
|
not to_item and
|
|
|
|
to_collection.path != item.collection.path and
|
|
|
|
to_collection.has_uid(item.uid)):
|
|
|
|
return self._webdav_error_response(
|
|
|
|
"C" if tag == "VCALENDAR" else "CR", "no-uid-conflict")
|
2016-08-05 02:14:49 +02:00
|
|
|
to_href = posixpath.basename(to_path.strip("/"))
|
2017-06-09 02:30:09 +02:00
|
|
|
try:
|
|
|
|
self.Collection.move(item, to_collection, to_href)
|
|
|
|
except ValueError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-06-09 02:30:09 +02:00
|
|
|
"Bad MOVE request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2018-08-14 18:39:09 +02:00
|
|
|
return client.NO_CONTENT if to_item else client.CREATED, {}, None
|
2016-08-04 06:08:08 +02:00
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_OPTIONS(self, environ, base_prefix, path, user):
|
2010-01-19 20:31:21 +01:00
|
|
|
"""Manage OPTIONS request."""
|
2011-05-01 14:46:29 +02:00
|
|
|
headers = {
|
2016-08-05 02:14:49 +02:00
|
|
|
"Allow": ", ".join(
|
|
|
|
name[3:] for name in dir(self) if name.startswith("do_")),
|
|
|
|
"DAV": DAV_HEADERS}
|
2011-05-01 14:46:29 +02:00
|
|
|
return client.OK, headers, None
|
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_PROPFIND(self, environ, base_prefix, path, user):
|
2010-01-19 20:31:21 +01:00
|
|
|
"""Manage PROPFIND request."""
|
2016-08-08 06:59:15 +02:00
|
|
|
if not self._access(user, path, "r"):
|
|
|
|
return NOT_ALLOWED
|
2017-05-31 11:08:32 +02:00
|
|
|
try:
|
|
|
|
xml_content = self._read_xml_content(environ)
|
|
|
|
except RuntimeError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-05-31 11:08:32 +02:00
|
|
|
"Bad PROPFIND request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2017-06-23 21:55:44 +02:00
|
|
|
except socket.timeout as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("client timed out", exc_info=True)
|
2017-06-23 21:55:44 +02:00
|
|
|
return REQUEST_TIMEOUT
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("r", user):
|
2016-08-05 02:14:49 +02:00
|
|
|
items = self.Collection.discover(
|
|
|
|
path, environ.get("HTTP_DEPTH", "0"))
|
2016-08-11 02:10:09 +02:00
|
|
|
# take root item for rights checking
|
|
|
|
item = next(items, None)
|
|
|
|
if not item:
|
2016-08-31 00:41:08 +02:00
|
|
|
return NOT_FOUND
|
2018-08-21 18:43:45 +02:00
|
|
|
if not self._access(user, path, "r", item):
|
|
|
|
return NOT_ALLOWED
|
2016-08-11 02:10:09 +02:00
|
|
|
# put item back
|
|
|
|
items = itertools.chain([item], items)
|
2018-08-21 18:43:45 +02:00
|
|
|
allowed_items = self.collect_allowed_items(items, user)
|
2017-05-07 08:17:39 +02:00
|
|
|
headers = {"DAV": DAV_HEADERS,
|
|
|
|
"Content-Type": "text/xml; charset=%s" % self.encoding}
|
|
|
|
status, xml_answer = xmlutils.propfind(
|
2018-08-21 18:43:45 +02:00
|
|
|
base_prefix, path, xml_content, allowed_items, user)
|
2016-08-12 23:34:08 +02:00
|
|
|
if status == client.FORBIDDEN:
|
|
|
|
return NOT_ALLOWED
|
2018-08-14 18:39:09 +02:00
|
|
|
return status, headers, self._write_xml_content(xml_answer)
|
2016-08-04 06:08:08 +02:00
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_PROPPATCH(self, environ, base_prefix, path, user):
|
2011-04-28 18:04:34 +02:00
|
|
|
"""Manage PROPPATCH request."""
|
2018-08-21 18:43:45 +02:00
|
|
|
if not self._access(user, path, "w"):
|
2012-08-09 15:39:01 +02:00
|
|
|
return NOT_ALLOWED
|
2017-05-31 11:08:32 +02:00
|
|
|
try:
|
|
|
|
xml_content = self._read_xml_content(environ)
|
|
|
|
except RuntimeError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-05-31 11:08:32 +02:00
|
|
|
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2017-06-23 21:55:44 +02:00
|
|
|
except socket.timeout as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("client timed out", exc_info=True)
|
2017-06-23 21:55:44 +02:00
|
|
|
return REQUEST_TIMEOUT
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("w", user):
|
2016-08-05 02:14:49 +02:00
|
|
|
item = next(self.Collection.discover(path), None)
|
2018-08-14 18:39:09 +02:00
|
|
|
if not item:
|
|
|
|
return NOT_FOUND
|
2018-08-21 18:43:45 +02:00
|
|
|
if not self._access(user, path, "w", item):
|
|
|
|
return NOT_ALLOWED
|
2017-06-16 23:12:08 +02:00
|
|
|
if not isinstance(item, storage.BaseCollection):
|
2018-08-14 18:39:09 +02:00
|
|
|
return FORBIDDEN
|
2017-05-07 08:17:39 +02:00
|
|
|
headers = {"DAV": DAV_HEADERS,
|
|
|
|
"Content-Type": "text/xml; charset=%s" % self.encoding}
|
2017-07-22 21:25:36 +02:00
|
|
|
try:
|
|
|
|
xml_answer = xmlutils.proppatch(base_prefix, path, xml_content,
|
|
|
|
item)
|
|
|
|
except ValueError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-07-22 21:25:36 +02:00
|
|
|
"Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2017-05-07 08:17:39 +02:00
|
|
|
return (client.MULTI_STATUS, headers,
|
|
|
|
self._write_xml_content(xml_answer))
|
2012-09-15 09:08:01 +02:00
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_PUT(self, environ, base_prefix, path, user):
|
2010-01-19 20:31:21 +01:00
|
|
|
"""Manage PUT request."""
|
2016-08-04 06:08:08 +02:00
|
|
|
if not self._access(user, path, "w"):
|
2012-08-15 22:36:42 +02:00
|
|
|
return NOT_ALLOWED
|
2017-05-31 11:08:32 +02:00
|
|
|
try:
|
|
|
|
content = self._read_content(environ)
|
|
|
|
except RuntimeError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True)
|
2017-05-31 11:08:32 +02:00
|
|
|
return BAD_REQUEST
|
2017-06-23 21:55:44 +02:00
|
|
|
except socket.timeout as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("client timed out", exc_info=True)
|
2017-06-23 21:55:44 +02:00
|
|
|
return REQUEST_TIMEOUT
|
2018-08-21 18:43:46 +02:00
|
|
|
# Prepare before locking
|
|
|
|
parent_path = storage.sanitize_path(
|
|
|
|
"/%s/" % posixpath.dirname(path.strip("/")))
|
|
|
|
permissions = self.Rights.authorized(user, path, "Ww")
|
|
|
|
parent_permissions = self.Rights.authorized(user, parent_path, "w")
|
|
|
|
|
|
|
|
def prepare(vobject_items, tag=None, write_whole_collection=None):
|
|
|
|
if (write_whole_collection or
|
|
|
|
permissions and not parent_permissions):
|
|
|
|
write_whole_collection = True
|
|
|
|
content_type = environ.get("CONTENT_TYPE",
|
|
|
|
"").split(";")[0]
|
|
|
|
tags = {value: key
|
|
|
|
for key, value in xmlutils.MIMETYPES.items()}
|
|
|
|
tag = storage.predict_tag_of_whole_collection(
|
|
|
|
vobject_items, tags.get(content_type))
|
|
|
|
if not tag:
|
|
|
|
raise ValueError("Can't determine collection tag")
|
|
|
|
collection_path = storage.sanitize_path(path).strip("/")
|
|
|
|
elif (write_whole_collection is not None and
|
|
|
|
not write_whole_collection or
|
|
|
|
not permissions and parent_permissions):
|
|
|
|
write_whole_collection = False
|
|
|
|
if tag is None:
|
|
|
|
tag = storage.predict_tag_of_parent_collection(
|
|
|
|
vobject_items)
|
|
|
|
collection_path = posixpath.dirname(
|
|
|
|
storage.sanitize_path(path).strip("/"))
|
|
|
|
props = None
|
|
|
|
stored_exc_info = None
|
|
|
|
items = []
|
|
|
|
try:
|
|
|
|
if tag:
|
|
|
|
storage.check_and_sanitize_items(
|
|
|
|
vobject_items, is_collection=write_whole_collection,
|
|
|
|
tag=tag)
|
|
|
|
if write_whole_collection and tag == "VCALENDAR":
|
|
|
|
vobject_components = []
|
|
|
|
vobject_item, = vobject_items
|
|
|
|
for content in ("vevent", "vtodo", "vjournal"):
|
|
|
|
vobject_components.extend(
|
|
|
|
getattr(vobject_item, "%s_list" % content, []))
|
|
|
|
vobject_components_by_uid = itertools.groupby(
|
|
|
|
sorted(vobject_components, key=storage.get_uid),
|
|
|
|
storage.get_uid)
|
|
|
|
for uid, components in vobject_components_by_uid:
|
|
|
|
vobject_collection = vobject.iCalendar()
|
|
|
|
for component in components:
|
|
|
|
vobject_collection.add(component)
|
|
|
|
item = storage.Item(
|
|
|
|
collection_path=collection_path,
|
2018-08-21 18:43:47 +02:00
|
|
|
vobject_item=vobject_collection)
|
2018-08-21 18:43:46 +02:00
|
|
|
item.prepare()
|
|
|
|
items.append(item)
|
|
|
|
elif write_whole_collection and tag == "VADDRESSBOOK":
|
|
|
|
for vobject_item in vobject_items:
|
|
|
|
item = storage.Item(
|
|
|
|
collection_path=collection_path,
|
2018-08-21 18:43:47 +02:00
|
|
|
vobject_item=vobject_item)
|
2018-08-21 18:43:46 +02:00
|
|
|
item.prepare()
|
|
|
|
items.append(item)
|
|
|
|
elif not write_whole_collection:
|
|
|
|
vobject_item, = vobject_items
|
|
|
|
item = storage.Item(collection_path=collection_path,
|
2018-08-21 18:43:47 +02:00
|
|
|
vobject_item=vobject_item)
|
2018-08-21 18:43:46 +02:00
|
|
|
item.prepare()
|
|
|
|
items.append(item)
|
|
|
|
|
|
|
|
if write_whole_collection:
|
|
|
|
props = {}
|
|
|
|
if tag:
|
|
|
|
props["tag"] = tag
|
|
|
|
if tag == "VCALENDAR" and vobject_items:
|
|
|
|
if hasattr(vobject_items[0], "x_wr_calname"):
|
|
|
|
calname = vobject_items[0].x_wr_calname.value
|
|
|
|
if calname:
|
|
|
|
props["D:displayname"] = calname
|
|
|
|
if hasattr(vobject_items[0], "x_wr_caldesc"):
|
|
|
|
caldesc = vobject_items[0].x_wr_caldesc.value
|
|
|
|
if caldesc:
|
|
|
|
props["C:calendar-description"] = caldesc
|
|
|
|
storage.check_and_sanitize_props(props)
|
|
|
|
except Exception:
|
|
|
|
stored_exc_info = sys.exc_info()
|
|
|
|
|
|
|
|
# Use generator for items and delete references to free memory
|
|
|
|
# early
|
|
|
|
def items_generator():
|
|
|
|
while items:
|
|
|
|
yield items.pop(0)
|
|
|
|
|
|
|
|
return (items_generator(), tag, write_whole_collection, props,
|
|
|
|
stored_exc_info)
|
|
|
|
|
|
|
|
try:
|
|
|
|
vobject_items = tuple(vobject.readComponents(content or ""))
|
|
|
|
except Exception as e:
|
|
|
|
logger.warning(
|
|
|
|
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
|
|
|
(prepared_items, prepared_tag, prepared_write_whole_collection,
|
|
|
|
prepared_props, prepared_exc_info) = prepare(vobject_items)
|
|
|
|
|
2016-08-25 05:37:22 +02:00
|
|
|
with self.Collection.acquire_lock("w", user):
|
2016-08-05 02:14:49 +02:00
|
|
|
item = next(self.Collection.discover(path), None)
|
|
|
|
parent_item = next(self.Collection.discover(parent_path), None)
|
2018-08-14 18:39:09 +02:00
|
|
|
if not parent_item:
|
|
|
|
return CONFLICT
|
2016-08-05 02:14:49 +02:00
|
|
|
|
2016-08-04 06:08:08 +02:00
|
|
|
write_whole_collection = (
|
2017-06-16 23:12:08 +02:00
|
|
|
isinstance(item, storage.BaseCollection) or
|
2018-08-14 18:39:09 +02:00
|
|
|
not parent_item.get_meta("tag"))
|
2018-08-21 18:43:46 +02:00
|
|
|
|
|
|
|
if write_whole_collection:
|
|
|
|
tag = prepared_tag
|
|
|
|
else:
|
|
|
|
tag = parent_item.get_meta("tag")
|
|
|
|
|
2016-08-05 02:14:49 +02:00
|
|
|
if write_whole_collection:
|
2018-08-21 18:43:46 +02:00
|
|
|
if not self.Rights.authorized(user, path, "w" if tag else "W"):
|
2016-08-05 02:14:49 +02:00
|
|
|
return NOT_ALLOWED
|
2018-08-21 18:43:45 +02:00
|
|
|
elif not self.Rights.authorized(user, parent_path, "w"):
|
2016-08-04 06:08:08 +02:00
|
|
|
return NOT_ALLOWED
|
2016-08-05 02:14:49 +02:00
|
|
|
|
2016-08-04 06:08:08 +02:00
|
|
|
etag = environ.get("HTTP_IF_MATCH", "")
|
2016-08-05 02:14:49 +02:00
|
|
|
if not item and etag:
|
|
|
|
# Etag asked but no item found: item has been removed
|
2016-08-31 00:41:08 +02:00
|
|
|
return PRECONDITION_FAILED
|
2016-08-05 02:14:49 +02:00
|
|
|
if item and etag and item.etag != etag:
|
|
|
|
# Etag asked but item not matching: item has changed
|
2016-08-31 00:41:08 +02:00
|
|
|
return PRECONDITION_FAILED
|
2016-08-04 23:35:01 +02:00
|
|
|
|
2016-08-05 02:14:49 +02:00
|
|
|
match = environ.get("HTTP_IF_NONE_MATCH", "") == "*"
|
|
|
|
if item and match:
|
|
|
|
# Creation asked but item found: item can't be replaced
|
2016-08-31 00:41:08 +02:00
|
|
|
return PRECONDITION_FAILED
|
2012-09-15 09:08:01 +02:00
|
|
|
|
2018-08-21 18:43:46 +02:00
|
|
|
if (tag != prepared_tag or
|
|
|
|
prepared_write_whole_collection != write_whole_collection):
|
|
|
|
(prepared_items, prepared_tag, prepared_write_whole_collection,
|
|
|
|
prepared_props, prepared_exc_info) = prepare(
|
|
|
|
vobject_items, tag, write_whole_collection)
|
|
|
|
props = prepared_props
|
|
|
|
if prepared_exc_info:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2018-08-21 18:43:46 +02:00
|
|
|
"Bad PUT request on %r: %s", path, prepared_exc_info[1],
|
|
|
|
exc_info=prepared_exc_info)
|
2017-05-31 11:08:32 +02:00
|
|
|
return BAD_REQUEST
|
2016-08-05 02:14:49 +02:00
|
|
|
|
2016-08-04 06:08:08 +02:00
|
|
|
if write_whole_collection:
|
2017-06-06 20:01:07 +02:00
|
|
|
try:
|
2018-08-21 18:43:46 +02:00
|
|
|
etag = self.Collection.create_collection(
|
|
|
|
path, prepared_items, props).etag
|
2017-06-06 20:01:07 +02:00
|
|
|
except ValueError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-06-06 20:01:07 +02:00
|
|
|
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2016-05-29 01:18:29 +02:00
|
|
|
else:
|
2018-08-21 18:43:46 +02:00
|
|
|
prepared_item, = prepared_items
|
|
|
|
if (item and item.uid != prepared_item.uid or
|
|
|
|
not item and parent_item.has_uid(prepared_item.uid)):
|
2018-08-16 07:59:57 +02:00
|
|
|
return self._webdav_error_response(
|
|
|
|
"C" if tag == "VCALENDAR" else "CR",
|
|
|
|
"no-uid-conflict")
|
|
|
|
|
2016-08-04 06:08:08 +02:00
|
|
|
href = posixpath.basename(path.strip("/"))
|
2017-06-06 20:01:07 +02:00
|
|
|
try:
|
2018-08-21 18:43:46 +02:00
|
|
|
etag = parent_item.upload(href, prepared_item).etag
|
2017-06-06 20:01:07 +02:00
|
|
|
except ValueError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-06-06 20:01:07 +02:00
|
|
|
"Bad PUT request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2018-08-21 18:43:46 +02:00
|
|
|
|
|
|
|
headers = {"ETag": etag}
|
2016-08-04 06:08:08 +02:00
|
|
|
return client.CREATED, headers, None
|
2008-12-30 17:25:42 +01:00
|
|
|
|
2016-09-04 20:15:08 +02:00
|
|
|
def do_REPORT(self, environ, base_prefix, path, user):
|
2010-01-19 20:31:21 +01:00
|
|
|
"""Manage REPORT request."""
|
2017-03-13 08:22:14 +01:00
|
|
|
if not self._access(user, path, "r"):
|
2012-08-09 15:39:01 +02:00
|
|
|
return NOT_ALLOWED
|
2017-05-31 11:08:32 +02:00
|
|
|
try:
|
|
|
|
xml_content = self._read_xml_content(environ)
|
|
|
|
except RuntimeError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-05-31 11:08:32 +02:00
|
|
|
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2017-06-23 21:55:44 +02:00
|
|
|
except socket.timeout as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.debug("client timed out", exc_info=True)
|
2017-06-23 21:55:44 +02:00
|
|
|
return REQUEST_TIMEOUT
|
2018-08-21 18:43:46 +02:00
|
|
|
with contextlib.ExitStack() as lock_stack:
|
|
|
|
lock_stack.enter_context(self.Collection.acquire_lock("r", user))
|
2016-08-05 02:14:49 +02:00
|
|
|
item = next(self.Collection.discover(path), None)
|
2016-08-04 06:08:08 +02:00
|
|
|
if not item:
|
2016-08-31 00:41:08 +02:00
|
|
|
return NOT_FOUND
|
2018-08-21 18:43:45 +02:00
|
|
|
if not self._access(user, path, "r", item):
|
|
|
|
return NOT_ALLOWED
|
2017-06-16 23:12:08 +02:00
|
|
|
if isinstance(item, storage.BaseCollection):
|
2016-08-04 06:08:08 +02:00
|
|
|
collection = item
|
|
|
|
else:
|
|
|
|
collection = item.collection
|
2017-05-07 08:17:39 +02:00
|
|
|
headers = {"Content-Type": "text/xml; charset=%s" % self.encoding}
|
2017-08-29 20:08:30 +02:00
|
|
|
try:
|
|
|
|
status, xml_answer = xmlutils.report(
|
2018-08-21 18:43:46 +02:00
|
|
|
base_prefix, path, xml_content, collection,
|
|
|
|
lock_stack.close)
|
2017-08-29 20:08:30 +02:00
|
|
|
except ValueError as e:
|
2018-08-16 07:59:55 +02:00
|
|
|
logger.warning(
|
2017-08-29 20:08:30 +02:00
|
|
|
"Bad REPORT request on %r: %s", path, e, exc_info=True)
|
|
|
|
return BAD_REQUEST
|
2017-06-02 12:44:23 +02:00
|
|
|
return (status, headers, self._write_xml_content(xml_answer))
|
2018-04-20 22:53:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
_application = None
|
2018-04-29 21:45:34 +02:00
|
|
|
_application_config_path = None
|
2018-04-20 22:53:42 +02:00
|
|
|
_application_lock = threading.Lock()
|
|
|
|
|
|
|
|
|
2018-08-16 07:59:56 +02:00
|
|
|
def _init_application(config_path, wsgi_errors):
|
2018-04-29 21:45:34 +02:00
|
|
|
global _application, _application_config_path
|
2018-04-29 21:43:10 +02:00
|
|
|
with _application_lock:
|
|
|
|
if _application is not None:
|
|
|
|
return
|
2018-08-16 07:59:55 +02:00
|
|
|
log.setup()
|
2018-08-16 07:59:56 +02:00
|
|
|
with log.register_stream(wsgi_errors):
|
|
|
|
_application_config_path = config_path
|
|
|
|
configuration = config.load([config_path] if config_path else [],
|
|
|
|
ignore_missing_paths=False)
|
2018-08-16 08:00:02 +02:00
|
|
|
log.set_level(configuration.get("logging", "level"))
|
2018-08-16 07:59:56 +02:00
|
|
|
_application = Application(configuration)
|
2018-04-29 21:43:10 +02:00
|
|
|
|
|
|
|
|
|
|
|
def application(environ, start_response):
|
2018-08-16 07:57:32 +02:00
|
|
|
config_path = environ.get("RADICALE_CONFIG",
|
|
|
|
os.environ.get("RADICALE_CONFIG"))
|
2018-04-20 22:53:42 +02:00
|
|
|
if _application is None:
|
2018-08-16 07:59:56 +02:00
|
|
|
_init_application(config_path, environ["wsgi.errors"])
|
2018-04-29 21:45:34 +02:00
|
|
|
if _application_config_path != config_path:
|
|
|
|
raise ValueError("RADICALE_CONFIG must not change: %s != %s" %
|
|
|
|
(repr(config_path), repr(_application_config_path)))
|
2018-04-20 22:53:42 +02:00
|
|
|
return _application(environ, start_response)
|