1# -*- coding: utf-8 -*- 2# (c) 2009-2021 Martin Wendt and contributors; see WsgiDAV https://github.com/mar10/wsgidav 3# Original PyFileServer (c) 2005 Ho Chun Wei. 4# Licensed under the MIT license: 5# http://www.opensource.org/licenses/mit-license.php 6""" 7WSGI application that handles one single WebDAV request. 8""" 9from wsgidav import compat, util, xml_tools 10from wsgidav.dav_error import ( 11 HTTP_BAD_GATEWAY, 12 HTTP_BAD_REQUEST, 13 HTTP_CONFLICT, 14 HTTP_CREATED, 15 HTTP_FAILED_DEPENDENCY, 16 HTTP_FORBIDDEN, 17 HTTP_INTERNAL_ERROR, 18 HTTP_LENGTH_REQUIRED, 19 HTTP_MEDIATYPE_NOT_SUPPORTED, 20 HTTP_METHOD_NOT_ALLOWED, 21 HTTP_NO_CONTENT, 22 HTTP_NOT_FOUND, 23 HTTP_NOT_IMPLEMENTED, 24 HTTP_OK, 25 HTTP_PRECONDITION_FAILED, 26 HTTP_RANGE_NOT_SATISFIABLE, 27 DAVError, 28 PRECONDITION_CODE_LockTokenMismatch, 29 PRECONDITION_CODE_PropfindFiniteDepth, 30 as_DAVError, 31 get_http_status_string, 32) 33from wsgidav.util import etree 34 35__docformat__ = "reStructuredText" 36 37_logger = util.get_module_logger(__name__) 38 39DEFAULT_BLOCK_SIZE = 8192 40 41 42# ======================================================================== 43# RequestServer 44# ======================================================================== 45class RequestServer(object): 46 def __init__(self, dav_provider): 47 self._davProvider = dav_provider 48 self.allow_propfind_infinite = True 49 self._verbose = 3 50 self.block_size = DEFAULT_BLOCK_SIZE 51 # _logger.debug("RequestServer: __init__") 52 53 self._possible_methods = ["OPTIONS", "HEAD", "GET", "PROPFIND"] 54 # if self._davProvider.prop_manager is not None: 55 # self._possible_methods.extend( [ "PROPFIND" ] ) 56 if not self._davProvider.is_readonly(): 57 self._possible_methods.extend( 58 ["PUT", "DELETE", "COPY", "MOVE", "MKCOL", "PROPPATCH", "POST"] 59 ) 60 # if self._davProvider.prop_manager is not None: 61 # self._possible_methods.extend( [ "PROPPATCH" ] ) 62 if self._davProvider.lock_manager is not None: 63 self._possible_methods.extend(["LOCK", "UNLOCK"]) 64 65 def __del__(self): 66 # _logger.debug("RequestServer: __del__") 67 pass 68 69 def __call__(self, environ, start_response): 70 assert "wsgidav.verbose" in environ 71 provider = self._davProvider 72 # TODO: allow anonymous somehow: this should run, even if http_authenticator middleware 73 # is not installed 74 # assert "wsgidav.auth.user_name" in environ 75 if "wsgidav.auth.user_name" not in environ: 76 _logger.warning("Missing 'wsgidav.auth.user_name' in environ") 77 78 environ["wsgidav.user_name"] = environ.get( 79 "wsgidav.auth.user_name", "anonymous" 80 ) 81 requestmethod = environ["REQUEST_METHOD"] 82 83 self.block_size = environ["wsgidav.config"].get( 84 "block_size", DEFAULT_BLOCK_SIZE 85 ) 86 87 # Convert 'infinity' and 'T'/'F' to a common case 88 if environ.get("HTTP_DEPTH") is not None: 89 environ["HTTP_DEPTH"] = environ["HTTP_DEPTH"].lower() 90 if environ.get("HTTP_OVERWRITE") is not None: 91 environ["HTTP_OVERWRITE"] = environ["HTTP_OVERWRITE"].upper() 92 93 if "HTTP_EXPECT" in environ: 94 pass 95 96 # Dispatch HTTP request methods to 'do_METHOD()' handlers 97 method = None 98 if requestmethod in self._possible_methods: 99 method_name = "do_{}".format(requestmethod) 100 method = getattr(self, method_name, None) 101 if not method: 102 _logger.error("Invalid HTTP method {!r}".format(requestmethod)) 103 self._fail(HTTP_METHOD_NOT_ALLOWED) 104 105 if environ.get("wsgidav.debug_break"): 106 pass # Set a break point here 107 108 if environ.get("wsgidav.debug_profile"): 109 from cProfile import Profile 110 111 profile = Profile() 112 res = profile.runcall( 113 provider.custom_request_handler, environ, start_response, method 114 ) 115 # sort: 0:"calls",1:"time", 2: "cumulative" 116 profile.print_stats(sort=2) 117 for v in res: 118 yield v 119 if hasattr(res, "close"): 120 res.close() 121 return 122 123 # Run requesthandler (provider may override, #55) 124 # _logger.warning("#1...") 125 app_iter = provider.custom_request_handler(environ, start_response, method) 126 # _logger.warning("#1... 2") 127 try: 128 # _logger.warning("#1... 3") 129 for v in app_iter: 130 # _logger.warning("#1... 4") 131 yield v 132 # _logger.warning("#1... 5") 133 # except Exception: 134 # _logger.warning("#1... 6") 135 # _logger.exception("") 136 # status = "500 Oops" 137 # response_headers = [("content-type", "text/plain")] 138 # start_response(status, response_headers, sys.exc_info()) 139 # return ["error body goes here"] 140 finally: 141 # _logger.warning("#1... 7") 142 if hasattr(app_iter, "close"): 143 # _logger.warning("#1... 8") 144 app_iter.close() 145 return 146 147 def _fail(self, value, context_info=None, src_exception=None, err_condition=None): 148 """Wrapper to raise (and log) DAVError.""" 149 util.fail(value, context_info, src_exception, err_condition) 150 151 def _send_response( 152 self, environ, start_response, root_res, success_code, error_list 153 ): 154 """Send WSGI response (single or multistatus). 155 156 - If error_list is None or [], then <success_code> is send as response. 157 - If error_list contains a single error with a URL that matches root_res, 158 then this error is returned. 159 - If error_list contains more than one error, then '207 Multi-Status' is 160 returned. 161 """ 162 assert success_code in (HTTP_CREATED, HTTP_NO_CONTENT, HTTP_OK) 163 if not error_list: 164 # Status OK 165 return util.send_status_response(environ, start_response, success_code) 166 if len(error_list) == 1 and error_list[0][0] == root_res.get_href(): 167 # Only one error that occurred on the root resource 168 return util.send_status_response(environ, start_response, error_list[0][1]) 169 170 # Multiple errors, or error on one single child 171 multistatusEL = xml_tools.make_multistatus_el() 172 173 for refurl, e in error_list: 174 # assert refurl.startswith("http:") 175 assert refurl.startswith("/") 176 assert isinstance(e, DAVError) 177 responseEL = etree.SubElement(multistatusEL, "{DAV:}response") 178 etree.SubElement(responseEL, "{DAV:}href").text = refurl 179 etree.SubElement(responseEL, "{DAV:}status").text = "HTTP/1.1 {}".format( 180 get_http_status_string(e) 181 ) 182 183 return util.send_multi_status_response(environ, start_response, multistatusEL) 184 185 def _check_write_permission(self, res, depth, environ): 186 """Raise DAVError(HTTP_LOCKED), if res is locked. 187 188 If depth=='infinity', we also raise when child resources are locked. 189 """ 190 lock_man = self._davProvider.lock_manager 191 if lock_man is None or res is None: 192 return True 193 194 ref_url = res.get_ref_url() 195 196 if "wsgidav.conditions.if" not in environ: 197 util.parse_if_header_dict(environ) 198 199 # raise HTTP_LOCKED if conflict exists 200 lock_man.check_write_permission( 201 ref_url, 202 depth, 203 environ["wsgidav.ifLockTokenList"], 204 environ["wsgidav.user_name"], 205 ) 206 207 def _evaluate_if_headers(self, res, environ): 208 """Apply HTTP headers on <path>, raising DAVError if conditions fail. 209 210 Add environ['wsgidav.conditions.if'] and environ['wsgidav.ifLockTokenList']. 211 Handle these headers: 212 213 - If-Match, If-Modified-Since, If-None-Match, If-Unmodified-Since: 214 Raising HTTP_PRECONDITION_FAILED or HTTP_NOT_MODIFIED 215 - If: 216 Raising HTTP_PRECONDITION_FAILED 217 218 @see http://www.webdav.org/specs/rfc4918.html#HEADER_If 219 @see util.evaluate_http_conditionals 220 """ 221 # Add parsed If header to environ 222 if "wsgidav.conditions.if" not in environ: 223 util.parse_if_header_dict(environ) 224 225 # Bail out, if res does not exist 226 if res is None: 227 return 228 229 ifDict = environ["wsgidav.conditions.if"] 230 231 # Raise HTTP_PRECONDITION_FAILED or HTTP_NOT_MODIFIED, if standard 232 # HTTP condition fails 233 last_modified = -1 # nonvalid modified time 234 entitytag = "[]" # Non-valid entity tag 235 if res.get_last_modified() is not None: 236 last_modified = int(res.get_last_modified()) 237 if res.get_etag() is not None: 238 entitytag = res.get_etag() 239 240 if ( 241 "HTTP_IF_MODIFIED_SINCE" in environ 242 or "HTTP_IF_UNMODIFIED_SINCE" in environ 243 or "HTTP_IF_MATCH" in environ 244 or "HTTP_IF_NONE_MATCH" in environ 245 ): 246 util.evaluate_http_conditionals(res, last_modified, entitytag, environ) 247 248 if "HTTP_IF" not in environ: 249 return 250 251 # Raise HTTP_PRECONDITION_FAILED, if DAV 'If' condition fails 252 # TODO: handle empty locked resources 253 # TODO: handle unmapped locked resources 254 # isnewfile = not provider.exists(mappedpath) 255 256 ref_url = res.get_ref_url() 257 lock_man = self._davProvider.lock_manager 258 locktokenlist = [] 259 if lock_man: 260 lockList = lock_man.get_indirect_url_lock_list( 261 ref_url, environ["wsgidav.user_name"] 262 ) 263 for lock in lockList: 264 locktokenlist.append(lock["token"]) 265 266 if not util.test_if_header_dict(res, ifDict, ref_url, locktokenlist, entitytag): 267 self._fail(HTTP_PRECONDITION_FAILED, "'If' header condition failed.") 268 269 return 270 271 def do_PROPFIND(self, environ, start_response): 272 """ 273 TODO: does not yet support If and If HTTP Conditions 274 @see http://www.webdav.org/specs/rfc4918.html#METHOD_PROPFIND 275 """ 276 path = environ["PATH_INFO"] 277 res = self._davProvider.get_resource_inst(path, environ) 278 279 # RFC: By default, the PROPFIND method without a Depth header MUST act 280 # as if a "Depth: infinity" header was included. 281 environ.setdefault("HTTP_DEPTH", "infinity") 282 if not environ["HTTP_DEPTH"] in ("0", "1", "infinity"): 283 self._fail( 284 HTTP_BAD_REQUEST, 285 "Invalid Depth header: '{}'.".format(environ["HTTP_DEPTH"]), 286 ) 287 288 if environ["HTTP_DEPTH"] == "infinity" and not self.allow_propfind_infinite: 289 self._fail( 290 HTTP_FORBIDDEN, 291 "PROPFIND 'infinite' was disabled for security reasons.", 292 err_condition=PRECONDITION_CODE_PropfindFiniteDepth, 293 ) 294 295 if res is None: 296 self._fail(HTTP_NOT_FOUND, path) 297 298 if environ.get("wsgidav.debug_break"): 299 pass # break point 300 301 self._evaluate_if_headers(res, environ) 302 303 # Parse PROPFIND request 304 requestEL = util.parse_xml_body(environ, allow_empty=True) 305 if requestEL is None: 306 # An empty PROPFIND request body MUST be treated as a request for 307 # the names and values of all properties. 308 requestEL = etree.XML( 309 "<D:propfind xmlns:D='DAV:'><D:allprop/></D:propfind>" 310 ) 311 312 if requestEL.tag != "{DAV:}propfind": 313 self._fail(HTTP_BAD_REQUEST) 314 315 propNameList = [] 316 propFindMode = None 317 for pfnode in requestEL: 318 if pfnode.tag == "{DAV:}allprop": 319 if propFindMode: 320 # RFC: allprop and name are mutually exclusive 321 self._fail(HTTP_BAD_REQUEST) 322 propFindMode = "allprop" 323 # TODO: implement <include> option 324 # elif pfnode.tag == "{DAV:}include": 325 # if not propFindMode in (None, "allprop"): 326 # self._fail(HTTP_BAD_REQUEST, 327 # "<include> element is only valid with 'allprop'.") 328 # for pfpnode in pfnode: 329 # propNameList.append(pfpnode.tag) 330 elif pfnode.tag == "{DAV:}name": 331 if propFindMode: # RFC: allprop and name are mutually exclusive 332 self._fail(HTTP_BAD_REQUEST) 333 propFindMode = "name" 334 elif pfnode.tag == "{DAV:}prop": 335 # RFC: allprop and name are mutually exclusive 336 if propFindMode not in (None, "named"): 337 self._fail(HTTP_BAD_REQUEST) 338 propFindMode = "named" 339 for pfpnode in pfnode: 340 propNameList.append(pfpnode.tag) 341 342 # --- Build list of resource URIs 343 344 reslist = res.get_descendants(depth=environ["HTTP_DEPTH"], add_self=True) 345 # if environ["wsgidav.verbose"] >= 3: 346 # pprint(reslist, indent=4) 347 348 multistatusEL = xml_tools.make_multistatus_el() 349 responsedescription = [] 350 351 for child in reslist: 352 353 if propFindMode == "allprop": 354 propList = child.get_properties("allprop") 355 elif propFindMode == "name": 356 propList = child.get_properties("name") 357 else: 358 propList = child.get_properties("named", name_list=propNameList) 359 360 href = child.get_href() 361 util.add_property_response(multistatusEL, href, propList) 362 363 if responsedescription: 364 etree.SubElement( 365 multistatusEL, "{DAV:}responsedescription" 366 ).text = "\n".join(responsedescription) 367 368 return util.send_multi_status_response(environ, start_response, multistatusEL) 369 370 def do_PROPPATCH(self, environ, start_response): 371 """Handle PROPPATCH request to set or remove a property. 372 373 @see http://www.webdav.org/specs/rfc4918.html#METHOD_PROPPATCH 374 """ 375 path = environ["PATH_INFO"] 376 res = self._davProvider.get_resource_inst(path, environ) 377 378 # Only accept Depth: 0 (but assume this, if omitted) 379 environ.setdefault("HTTP_DEPTH", "0") 380 if environ["HTTP_DEPTH"] != "0": 381 self._fail(HTTP_BAD_REQUEST, "Depth must be '0'.") 382 383 if res is None: 384 self._fail(HTTP_NOT_FOUND, path) 385 386 self._evaluate_if_headers(res, environ) 387 self._check_write_permission(res, "0", environ) 388 389 # Parse request 390 requestEL = util.parse_xml_body(environ) 391 392 if requestEL.tag != "{DAV:}propertyupdate": 393 self._fail(HTTP_BAD_REQUEST) 394 395 # Create a list of update request tuples: (name, value) 396 propupdatelist = [] 397 398 for ppnode in requestEL: 399 propupdatemethod = None 400 if ppnode.tag == "{DAV:}remove": 401 propupdatemethod = "remove" 402 elif ppnode.tag == "{DAV:}set": 403 propupdatemethod = "set" 404 else: 405 self._fail( 406 HTTP_BAD_REQUEST, "Unknown tag (expected 'set' or 'remove')." 407 ) 408 409 for propnode in ppnode: 410 if propnode.tag != "{DAV:}prop": 411 self._fail(HTTP_BAD_REQUEST, "Unknown tag (expected 'prop').") 412 413 for propertynode in propnode: 414 propvalue = None 415 if propupdatemethod == "remove": 416 propvalue = None # Mark as 'remove' 417 if len(propertynode) > 0: 418 # 14.23: All the XML elements in a 'prop' XML 419 # element inside of a 'remove' XML element MUST be 420 # empty 421 self._fail( 422 HTTP_BAD_REQUEST, 423 "prop element must be empty for 'remove'.", 424 ) 425 else: 426 propvalue = propertynode 427 428 propupdatelist.append((propertynode.tag, propvalue)) 429 430 # Apply updates in SIMULATION MODE and create a result list (name, 431 # result) 432 successflag = True 433 writeresultlist = [] 434 435 for (name, propvalue) in propupdatelist: 436 try: 437 res.set_property_value(name, propvalue, dry_run=True) 438 except Exception as e: 439 writeresult = as_DAVError(e) 440 else: 441 writeresult = "200 OK" 442 writeresultlist.append((name, writeresult)) 443 successflag = successflag and writeresult == "200 OK" 444 445 # Generate response list of 2-tuples (name, value) 446 # <value> is None on success, or an instance of DAVError 447 propResponseList = [] 448 responsedescription = [] 449 450 if not successflag: 451 # If dry run failed: convert all OK to FAILED_DEPENDENCY. 452 for (name, result) in writeresultlist: 453 if result == "200 OK": 454 result = DAVError(HTTP_FAILED_DEPENDENCY) 455 elif isinstance(result, DAVError): 456 responsedescription.append(result.get_user_info()) 457 propResponseList.append((name, result)) 458 459 else: 460 # Dry-run succeeded: set properties again, this time in 'real' mode 461 # In theory, there should be no exceptions thrown here, but this is 462 # real live... 463 for (name, propvalue) in propupdatelist: 464 try: 465 res.set_property_value(name, propvalue, dry_run=False) 466 # Set value to None, so the response xml contains empty tags 467 propResponseList.append((name, None)) 468 except Exception as e: 469 e = as_DAVError(e) 470 propResponseList.append((name, e)) 471 responsedescription.append(e.get_user_info()) 472 473 # Generate response XML 474 multistatusEL = xml_tools.make_multistatus_el() 475 href = res.get_href() 476 util.add_property_response(multistatusEL, href, propResponseList) 477 if responsedescription: 478 etree.SubElement( 479 multistatusEL, "{DAV:}responsedescription" 480 ).text = "\n".join(responsedescription) 481 482 # Send response 483 return util.send_multi_status_response(environ, start_response, multistatusEL) 484 485 def do_MKCOL(self, environ, start_response): 486 """Handle MKCOL request to create a new collection. 487 488 @see http://www.webdav.org/specs/rfc4918.html#METHOD_MKCOL 489 """ 490 path = environ["PATH_INFO"] 491 provider = self._davProvider 492 # res = provider.get_resource_inst(path, environ) 493 494 # Do not understand ANY request body entities 495 if util.get_content_length(environ) != 0: 496 self._fail( 497 HTTP_MEDIATYPE_NOT_SUPPORTED, 498 "The server does not handle any body content.", 499 ) 500 501 # Only accept Depth: 0 (but assume this, if omitted) 502 if environ.setdefault("HTTP_DEPTH", "0") != "0": 503 self._fail(HTTP_BAD_REQUEST, "Depth must be '0'.") 504 505 if provider.exists(path, environ): 506 self._fail( 507 HTTP_METHOD_NOT_ALLOWED, 508 "MKCOL can only be executed on an unmapped URL.", 509 ) 510 511 parentRes = provider.get_resource_inst(util.get_uri_parent(path), environ) 512 if not parentRes or not parentRes.is_collection: 513 self._fail(HTTP_CONFLICT, "Parent must be an existing collection.") 514 515 # TODO: should we check If headers here? 516 # self._evaluate_if_headers(res, environ) 517 # Check for write permissions on the PARENT 518 self._check_write_permission(parentRes, "0", environ) 519 520 parentRes.create_collection(util.get_uri_name(path)) 521 522 return util.send_status_response(environ, start_response, HTTP_CREATED) 523 524 def do_POST(self, environ, start_response): 525 """ 526 @see http://www.webdav.org/specs/rfc4918.html#METHOD_POST 527 @see http://stackoverflow.com/a/22606899/19166 528 """ 529 self._fail(HTTP_METHOD_NOT_ALLOWED) 530 531 def do_DELETE(self, environ, start_response): 532 """ 533 @see: http://www.webdav.org/specs/rfc4918.html#METHOD_DELETE 534 """ 535 path = environ["PATH_INFO"] 536 provider = self._davProvider 537 res = provider.get_resource_inst(path, environ) 538 539 # --- Check request preconditions ------------------------------------- 540 541 if util.get_content_length(environ) != 0: 542 self._fail( 543 HTTP_MEDIATYPE_NOT_SUPPORTED, 544 "The server does not handle any body content.", 545 ) 546 if res is None: 547 self._fail(HTTP_NOT_FOUND, path) 548 549 if res.is_collection: 550 # Delete over collection 551 # "The DELETE method on a collection MUST act as if a 552 # 'Depth: infinity' header was used on it. A client MUST NOT submit 553 # a Depth header with a DELETE on a collection with any value but 554 # infinity." 555 if environ.setdefault("HTTP_DEPTH", "infinity") != "infinity": 556 self._fail( 557 HTTP_BAD_REQUEST, 558 "Only Depth: infinity is supported for collections.", 559 ) 560 else: 561 if not environ.setdefault("HTTP_DEPTH", "0") in ("0", "infinity"): 562 self._fail( 563 HTTP_BAD_REQUEST, 564 "Only Depth: 0 or infinity are supported for non-collections.", 565 ) 566 567 self._evaluate_if_headers(res, environ) 568 # We need write access on the parent collection. Also we check for 569 # locked children 570 parentRes = provider.get_resource_inst(util.get_uri_parent(path), environ) 571 if parentRes: 572 # self._check_write_permission(parentRes, environ["HTTP_DEPTH"], environ) 573 self._check_write_permission(parentRes, "0", environ) 574 else: 575 # self._check_write_permission(res, environ["HTTP_DEPTH"], environ) 576 self._check_write_permission(res, "0", environ) 577 578 # --- Let provider handle the request natively ------------------------ 579 580 # Errors in deletion; [ (<ref-url>, <DAVError>), ... ] 581 error_list = [] 582 583 try: 584 handled = res.handle_delete() 585 assert handled in (True, False) or type(handled) is list 586 if type(handled) is list: 587 error_list = handled 588 handled = True 589 except Exception as e: 590 error_list = [(res.get_href(), as_DAVError(e))] 591 handled = True 592 if handled: 593 return self._send_response( 594 environ, start_response, res, HTTP_NO_CONTENT, error_list 595 ) 596 597 # --- Let provider implement own recursion ---------------------------- 598 599 # Get a list of all resources (parents after children, so we can remove 600 # them in that order) 601 reverseChildList = res.get_descendants( 602 depth_first=True, depth=environ["HTTP_DEPTH"], add_self=True 603 ) 604 605 if res.is_collection and res.support_recursive_delete(): 606 has_conflicts = False 607 for childRes in reverseChildList: 608 try: 609 self._evaluate_if_headers(childRes, environ) 610 self._check_write_permission(childRes, "0", environ) 611 except Exception: 612 has_conflicts = True 613 break 614 615 if not has_conflicts: 616 try: 617 error_list = res.delete() 618 except Exception as e: 619 error_list = [(res.get_href(), as_DAVError(e))] 620 return self._send_response( 621 environ, start_response, res, HTTP_NO_CONTENT, error_list 622 ) 623 624 # --- Implement file-by-file processing ------------------------------- 625 626 # Hidden paths (ancestors of failed deletes) {<path>: True, ...} 627 ignore_dict = {} 628 for childRes in reverseChildList: 629 if childRes.path in ignore_dict: 630 _logger.debug( 631 "Skipping {} (contains error child)".format(childRes.path) 632 ) 633 ignore_dict[util.get_uri_parent(childRes.path)] = "" 634 continue 635 636 try: 637 # 9.6.1.: Any headers included with delete must be applied in 638 # processing every resource to be deleted 639 self._evaluate_if_headers(childRes, environ) 640 self._check_write_permission(childRes, "0", environ) 641 childRes.delete() 642 # Double-check, if deletion succeeded 643 if provider.exists(childRes.path, environ): 644 raise DAVError( 645 HTTP_INTERNAL_ERROR, "Resource could not be deleted." 646 ) 647 except Exception as e: 648 error_list.append((childRes.get_href(), as_DAVError(e))) 649 ignore_dict[util.get_uri_parent(childRes.path)] = True 650 651 # --- Send response --------------------------------------------------- 652 653 return self._send_response( 654 environ, start_response, res, HTTP_NO_CONTENT, error_list 655 ) 656 657 def _stream_data_chunked(self, environ, block_size): 658 """Get the data from a chunked transfer.""" 659 # Chunked Transfer Coding 660 # http://www.servlets.com/rfcs/rfc2616-sec3.html#sec3.6.1 661 662 if "Darwin" in environ.get("HTTP_USER_AGENT", "") and environ.get( 663 "HTTP_X_EXPECTED_ENTITY_LENGTH" 664 ): 665 # Mac Finder, that does not prepend chunk-size + CRLF , 666 # like it should to comply with the spec. It sends chunk 667 # size as integer in a HTTP header instead. 668 WORKAROUND_CHUNK_LENGTH = True 669 buf = environ.get("HTTP_X_EXPECTED_ENTITY_LENGTH", "0") 670 length = int(buf) 671 else: 672 WORKAROUND_CHUNK_LENGTH = False 673 buf = environ["wsgi.input"].readline() 674 environ["wsgidav.some_input_read"] = 1 675 if buf == compat.b_empty: 676 length = 0 677 else: 678 length = int(buf, 16) 679 680 while length > 0: 681 buf = environ["wsgi.input"].read(block_size) 682 yield buf 683 if WORKAROUND_CHUNK_LENGTH: 684 environ["wsgidav.some_input_read"] = 1 685 # Keep receiving until we read expected size or reach 686 # EOF 687 if buf == compat.b_empty: 688 length = 0 689 else: 690 length -= len(buf) 691 else: 692 environ["wsgi.input"].readline() 693 buf = environ["wsgi.input"].readline() 694 if buf == compat.b_empty: 695 length = 0 696 else: 697 length = int(buf, 16) 698 environ["wsgidav.all_input_read"] = 1 699 700 def _stream_data(self, environ, content_length, block_size): 701 """Get the data from a non-chunked transfer.""" 702 if content_length == 0: 703 # TODO: review this 704 # Windows MiniRedir submit PUT with Content-Length 0, 705 # before LOCK and the real PUT. So we have to accept this. 706 _logger.debug("PUT: Content-Length == 0. Creating empty file...") 707 708 # elif content_length < 0: 709 # # TODO: review this 710 # # If CONTENT_LENGTH is invalid, we may try to workaround this 711 # # by reading until the end of the stream. This may block however! 712 # # The iterator produced small chunks of varying size, but not 713 # # sure, if we always get everything before it times out. 714 # _logger.warning("PUT with invalid Content-Length (%s). " 715 # "Trying to read all (this may timeout)..." 716 # .format(environ.get("CONTENT_LENGTH"))) 717 # nb = 0 718 # try: 719 # for s in environ["wsgi.input"]: 720 # environ["wsgidav.some_input_read"] = 1 721 # _logger.debug("PUT: read from wsgi.input.__iter__, len=%s" % len(s)) 722 # yield s 723 # nb += len (s) 724 # except socket.timeout: 725 # _logger.warning("PUT: input timed out after writing %s bytes" % nb) 726 # hasErrors = True 727 else: 728 assert content_length > 0 729 contentremain = content_length 730 while contentremain > 0: 731 n = min(contentremain, block_size) 732 readbuffer = environ["wsgi.input"].read(n) 733 # This happens with litmus expect-100 test: 734 if not len(readbuffer) > 0: 735 _logger.error("input.read({}) returned 0 bytes".format(n)) 736 break 737 environ["wsgidav.some_input_read"] = 1 738 yield readbuffer 739 contentremain -= len(readbuffer) 740 741 if contentremain == 0: 742 environ["wsgidav.all_input_read"] = 1 743 744 def do_PUT(self, environ, start_response): 745 """ 746 @see: http://www.webdav.org/specs/rfc4918.html#METHOD_PUT 747 """ 748 path = environ["PATH_INFO"] 749 provider = self._davProvider 750 res = provider.get_resource_inst(path, environ) 751 parentRes = provider.get_resource_inst(util.get_uri_parent(path), environ) 752 753 isnewfile = res is None 754 755 # Test for unsupported stuff 756 if "HTTP_CONTENT_ENCODING" in environ: 757 util.fail(HTTP_NOT_IMPLEMENTED, "Content-encoding header is not supported.") 758 759 # An origin server that allows PUT on a given target resource MUST send 760 # a 400 (Bad Request) response to a PUT request that contains a 761 # Content-Range header field 762 # (http://tools.ietf.org/html/rfc7231#section-4.3.4) 763 if "HTTP_CONTENT_RANGE" in environ: 764 util.fail( 765 HTTP_BAD_REQUEST, "Content-range header is not allowed on PUT requests." 766 ) 767 768 if res and res.is_collection: 769 self._fail(HTTP_METHOD_NOT_ALLOWED, "Cannot PUT to a collection") 770 elif ( 771 parentRes is None or not parentRes.is_collection 772 ): # TODO: allow parentRes==None? 773 self._fail(HTTP_CONFLICT, "PUT parent must be a collection") 774 775 self._evaluate_if_headers(res, environ) 776 777 if isnewfile: 778 self._check_write_permission(parentRes, "0", environ) 779 res = parentRes.create_empty_resource(util.get_uri_name(path)) 780 else: 781 self._check_write_permission(res, "0", environ) 782 783 # Start Content Processing 784 # Content-Length may be 0 or greater. (Set to -1 if missing or invalid.) 785 # WORKAROUND_BAD_LENGTH = True 786 try: 787 content_length = max(-1, int(environ.get("CONTENT_LENGTH", -1))) 788 except ValueError: 789 content_length = -1 790 791 # if content_length < 0 and not WORKAROUND_BAD_LENGTH: 792 if (content_length < 0) and ( 793 environ.get("HTTP_TRANSFER_ENCODING", "").lower() != "chunked" 794 ): 795 # HOTFIX: not fully understood, but MS sends PUT without content-length, 796 # when creating new files 797 agent = environ.get("HTTP_USER_AGENT", "") 798 if "Microsoft-WebDAV-MiniRedir" in agent or "gvfs/" in agent: # issue #10 799 _logger.warning( 800 "Setting misssing Content-Length to 0 for MS / gvfs client" 801 ) 802 content_length = 0 803 else: 804 util.fail( 805 HTTP_LENGTH_REQUIRED, 806 "PUT request with invalid Content-Length: ({})".format( 807 environ.get("CONTENT_LENGTH") 808 ), 809 ) 810 811 hasErrors = False 812 try: 813 if environ.get("HTTP_TRANSFER_ENCODING", "").lower() == "chunked": 814 data_stream = self._stream_data_chunked(environ, self.block_size) 815 else: 816 data_stream = self._stream_data( 817 environ, content_length, self.block_size 818 ) 819 820 fileobj = res.begin_write(content_type=environ.get("CONTENT_TYPE")) 821 822 # Process the data in the body. 823 824 # If the fileobj has a writelines() method, give it the data stream. 825 # If it doesn't, itearate the stream and call write() for each 826 # iteration. This gives providers more flexibility in how they 827 # consume the data. 828 if getattr(fileobj, "writelines", None): 829 fileobj.writelines(data_stream) 830 else: 831 for data in data_stream: 832 fileobj.write(data) 833 834 fileobj.close() 835 836 except Exception as e: 837 res.end_write(with_errors=True) 838 _logger.exception("PUT: byte copy failed") 839 util.fail(e) 840 841 res.end_write(hasErrors) 842 843 headers = None 844 if res.support_etag(): 845 entitytag = res.get_etag() 846 if entitytag is not None: 847 headers = [("ETag", '"{}"'.format(entitytag))] 848 849 if isnewfile: 850 return util.send_status_response( 851 environ, start_response, HTTP_CREATED, add_headers=headers 852 ) 853 return util.send_status_response( 854 environ, start_response, HTTP_NO_CONTENT, add_headers=headers 855 ) 856 857 def do_COPY(self, environ, start_response): 858 return self._copy_or_move(environ, start_response, False) 859 860 def do_MOVE(self, environ, start_response): 861 return self._copy_or_move(environ, start_response, True) 862 863 def _copy_or_move(self, environ, start_response, is_move): 864 """ 865 @see: http://www.webdav.org/specs/rfc4918.html#METHOD_COPY 866 @see: http://www.webdav.org/specs/rfc4918.html#METHOD_MOVE 867 """ 868 src_path = environ["PATH_INFO"] 869 provider = self._davProvider 870 src_res = provider.get_resource_inst(src_path, environ) 871 src_parent_res = provider.get_resource_inst( 872 util.get_uri_parent(src_path), environ 873 ) 874 875 def _debug_exception(e): 876 """Log internal exceptions with stacktrace that otherwise would be hidden.""" 877 if self._verbose >= 5: 878 _logger.exception("_debug_exception") 879 return 880 881 # --- Check source ---------------------------------------------------- 882 883 if src_res is None: 884 self._fail(HTTP_NOT_FOUND, src_path) 885 if "HTTP_DESTINATION" not in environ: 886 self._fail(HTTP_BAD_REQUEST, "Missing required Destination header.") 887 if not environ.setdefault("HTTP_OVERWRITE", "T") in ("T", "F"): 888 # Overwrite defaults to 'T' 889 self._fail(HTTP_BAD_REQUEST, "Invalid Overwrite header.") 890 if util.get_content_length(environ) != 0: 891 # RFC 2518 defined support for <propertybehavior>. 892 # This was dropped with RFC 4918. 893 # Still clients may send it (e.g. DAVExplorer 0.9.1 File-Copy) sends 894 # <A:propertybehavior xmlns:A="DAV:"> <A:keepalive>*</A:keepalive> 895 body = environ["wsgi.input"].read(util.get_content_length(environ)) 896 environ["wsgidav.all_input_read"] = 1 897 _logger.info("Ignored copy/move body: '{}'...".format(body[:50])) 898 899 if src_res.is_collection: 900 # The COPY method on a collection without a Depth header MUST act as 901 # if a Depth header with value "infinity" was included. 902 # A client may submit a Depth header on a COPY on a collection with 903 # a value of "0" or "infinity". 904 environ.setdefault("HTTP_DEPTH", "infinity") 905 if not environ["HTTP_DEPTH"] in ("0", "infinity"): 906 self._fail(HTTP_BAD_REQUEST, "Invalid Depth header.") 907 if is_move and environ["HTTP_DEPTH"] != "infinity": 908 self._fail( 909 HTTP_BAD_REQUEST, 910 "Depth header for MOVE collection must be 'infinity'.", 911 ) 912 else: 913 # It's an existing non-collection: assume Depth 0 914 # Note: litmus 'copymove: 3 (copy_simple)' sends 'infinity' for a 915 # non-collection resource, so we accept that too 916 environ.setdefault("HTTP_DEPTH", "0") 917 if not environ["HTTP_DEPTH"] in ("0", "infinity"): 918 self._fail(HTTP_BAD_REQUEST, "Invalid Depth header.") 919 environ["HTTP_DEPTH"] = "0" 920 921 # --- Get destination path and check for cross-realm access ----------- 922 923 # Destination header may be quoted (e.g. DAV Explorer sends unquoted, 924 # Windows quoted) 925 http_destination = compat.unquote(environ["HTTP_DESTINATION"]) 926 927 # Return fragments as part of <path> 928 # Fixes litmus -> running `basic': 9. delete_fragment....... WARNING: 929 # DELETE removed collection resource withRequest-URI including 930 # fragment; unsafe 931 ( 932 dest_scheme, 933 dest_netloc, 934 dest_path, 935 _dest_params, 936 _dest_query, 937 _dest_frag, 938 ) = compat.urlparse(http_destination, allow_fragments=False) 939 940 if src_res.is_collection: 941 dest_path = dest_path.rstrip("/") + "/" 942 943 dest_scheme = dest_scheme.lower() if dest_scheme else "" 944 url_scheme = environ["wsgi.url_scheme"].lower() 945 fwd_scheme = environ.get("HTTP_X_FORWARDED_PROTO", "").lower() 946 if dest_scheme and dest_scheme not in (url_scheme, fwd_scheme): 947 self._fail( 948 HTTP_BAD_GATEWAY, 949 "Source and destination must have the same scheme.\n" 950 "If you are running behind a reverse proxy, you may have to " 951 "rewrite the 'Destination' haeader.\n" 952 "(See https://github.com/mar10/wsgidav/issues/183)", 953 ) 954 elif dest_netloc and dest_netloc.lower() != environ["HTTP_HOST"].lower(): 955 # TODO: this should consider environ["SERVER_PORT"] also 956 self._fail( 957 HTTP_BAD_GATEWAY, "Source and destination must have the same host name." 958 ) 959 elif not dest_path.startswith(provider.mount_path + provider.share_path): 960 # Inter-realm copying not supported, since its not possible to 961 # authentication-wise 962 self._fail(HTTP_BAD_GATEWAY, "Inter-realm copy/move is not supported.") 963 964 dest_path = dest_path[len(provider.mount_path + provider.share_path) :] 965 assert dest_path.startswith("/") 966 967 # dest_path is now relative to current mount/share starting with '/' 968 969 dest_res = provider.get_resource_inst(dest_path, environ) 970 dest_exists = dest_res is not None 971 972 dest_parent_res = provider.get_resource_inst( 973 util.get_uri_parent(dest_path), environ 974 ) 975 976 if not dest_parent_res or not dest_parent_res.is_collection: 977 self._fail(HTTP_CONFLICT, "Destination parent must be a collection.") 978 979 self._evaluate_if_headers(src_res, environ) 980 self._evaluate_if_headers(dest_res, environ) 981 # Check permissions 982 # http://www.webdav.org/specs/rfc4918.html#rfc.section.7.4 983 if is_move: 984 self._check_write_permission(src_res, "infinity", environ) 985 # Cannot remove members from locked-0 collections 986 if src_parent_res: 987 self._check_write_permission(src_parent_res, "0", environ) 988 989 # Cannot create or new members in locked-0 collections 990 if not dest_exists: 991 self._check_write_permission(dest_parent_res, "0", environ) 992 # If target exists, it must not be locked 993 self._check_write_permission(dest_res, "infinity", environ) 994 995 if src_path == dest_path: 996 self._fail(HTTP_FORBIDDEN, "Cannot copy/move source onto itself") 997 elif util.is_equal_or_child_uri(src_path, dest_path): 998 self._fail(HTTP_FORBIDDEN, "Cannot copy/move source below itself") 999 1000 if dest_exists and environ["HTTP_OVERWRITE"] != "T": 1001 self._fail( 1002 HTTP_PRECONDITION_FAILED, 1003 "Destination already exists and Overwrite is set to false", 1004 ) 1005 1006 # --- Let provider handle the request natively ------------------------ 1007 1008 # Errors in copy/move; [ (<ref-url>, <DAVError>), ... ] 1009 error_list = [] 1010 success_code = HTTP_CREATED 1011 if dest_exists: 1012 success_code = HTTP_NO_CONTENT 1013 1014 try: 1015 if is_move: 1016 handled = src_res.handle_move(dest_path) 1017 else: 1018 isInfinity = environ["HTTP_DEPTH"] == "infinity" 1019 handled = src_res.handle_copy(dest_path, isInfinity) 1020 assert handled in (True, False) or type(handled) is list 1021 if type(handled) is list: 1022 error_list = handled 1023 handled = True 1024 except Exception as e: 1025 _debug_exception(e) 1026 error_list = [(src_res.get_href(), as_DAVError(e))] 1027 handled = True 1028 if handled: 1029 return self._send_response( 1030 environ, start_response, src_res, HTTP_NO_CONTENT, error_list 1031 ) 1032 1033 # --- Cleanup destination before copy/move ---------------------------- 1034 1035 src_list = src_res.get_descendants(add_self=True) 1036 1037 src_root_len = len(src_path) 1038 dest_root_len = len(dest_path) 1039 1040 if dest_exists: 1041 if is_move or not dest_res.is_collection or not src_res.is_collection: 1042 # MOVE: 1043 # If a resource exists at the destination and the Overwrite 1044 # header is "T", then prior to performing the move, the server 1045 # MUST perform a DELETE with "Depth: infinity" on the 1046 # destination resource. 1047 _logger.debug("Remove dest before move: '{}'".format(dest_res)) 1048 dest_res.delete() 1049 dest_res = None 1050 else: 1051 # COPY collection over collection: 1052 # Remove destination files, that are not part of source, because 1053 # source and dest collections must not be merged (9.8.4). 1054 # This is not the same as deleting the complete dest collection 1055 # before copying, because that would also discard the history of 1056 # existing resources. 1057 reverse_dest_list = dest_res.get_descendants( 1058 depth_first=True, add_self=False 1059 ) 1060 src_path_list = [s.path for s in src_list] 1061 _logger.debug("check src_path_list: {}".format(src_path_list)) 1062 for dres in reverse_dest_list: 1063 _logger.debug("check unmatched dest before copy: {}".format(dres)) 1064 rel_url = dres.path[dest_root_len:] 1065 sp = src_path + rel_url 1066 if sp not in src_path_list: 1067 _logger.debug( 1068 "Remove unmatched dest before copy: {}".format(dres) 1069 ) 1070 dres.delete() 1071 1072 # --- Let provider implement recursive move --------------------------- 1073 # We do this only, if the provider supports it, and no conflicts exist. 1074 # A provider can implement this very efficiently, without allocating 1075 # double memory as a copy/delete approach would. 1076 1077 if is_move and src_res.support_recursive_move(dest_path): 1078 has_conflicts = False 1079 for s in src_list: 1080 try: 1081 self._evaluate_if_headers(s, environ) 1082 except Exception as e: 1083 _debug_exception(e) 1084 has_conflicts = True 1085 break 1086 1087 if not has_conflicts: 1088 try: 1089 _logger.debug( 1090 "Recursive move: {} -> '{}'".format(src_res, dest_path) 1091 ) 1092 error_list = src_res.move_recursive(dest_path) 1093 except Exception as e: 1094 _debug_exception(e) 1095 error_list = [(src_res.get_href(), as_DAVError(e))] 1096 1097 return self._send_response( 1098 environ, start_response, src_res, success_code, error_list 1099 ) 1100 1101 # --- Copy/move file-by-file using copy/delete ------------------------ 1102 1103 # We get here, if 1104 # - the provider does not support recursive moves 1105 # - this is a copy request 1106 # In this case we would probably not win too much by a native provider 1107 # implementation, since we had to handle single child errors anyway. 1108 # - the source tree is partially locked 1109 # We would have to pass this information to the native provider. 1110 1111 # Hidden paths (paths of failed copy/moves) {<src_path>: True, ...} 1112 ignore_dict = {} 1113 1114 for sres in src_list: 1115 # Skip this resource, if there was a failure copying a parent 1116 parent_error = False 1117 for ignorePath in ignore_dict.keys(): 1118 if util.is_equal_or_child_uri(ignorePath, sres.path): 1119 parent_error = True 1120 break 1121 if parent_error: 1122 _logger.debug( 1123 "Copy: skipping '{}', because of parent error".format(sres.path) 1124 ) 1125 continue 1126 1127 try: 1128 rel_url = sres.path[src_root_len:] 1129 dpath = dest_path + rel_url 1130 1131 self._evaluate_if_headers(sres, environ) 1132 1133 # We copy resources and their properties top-down. 1134 # Collections are simply created (without members), for 1135 # non-collections bytes are copied (overwriting target) 1136 sres.copy_move_single(dpath, is_move) 1137 1138 # If copy succeeded, and it was a non-collection delete it now. 1139 # So the source tree shrinks while the destination grows and we 1140 # don't have to allocate the memory twice. 1141 # We cannot remove collections here, because we have not yet 1142 # copied all children. 1143 if is_move and not sres.is_collection: 1144 sres.delete() 1145 1146 except Exception as e: 1147 _debug_exception(e) 1148 ignore_dict[sres.path] = True 1149 # TODO: the error-href should be 'most appropriate of the source 1150 # and destination URLs'. So maybe this should be the destination 1151 # href sometimes. 1152 # http://www.webdav.org/specs/rfc4918.html#rfc.section.9.8.5 1153 error_list.append((sres.get_href(), as_DAVError(e))) 1154 1155 # MOVE: Remove source tree (bottom-up) 1156 if is_move: 1157 reverse_src_list = src_list[:] 1158 reverse_src_list.reverse() 1159 _logger.debug("Delete after move, ignore_dict={}".format(ignore_dict)) 1160 for sres in reverse_src_list: 1161 # Non-collections have already been removed in the copy loop. 1162 if not sres.is_collection: 1163 continue 1164 # Skip collections that contain errors (unmoved resources) 1165 child_error = False 1166 for ignorePath in ignore_dict.keys(): 1167 if util.is_equal_or_child_uri(sres.path, ignorePath): 1168 child_error = True 1169 break 1170 if child_error: 1171 _logger.debug( 1172 "Delete after move: skipping '{}', because of child error".format( 1173 sres.path 1174 ) 1175 ) 1176 continue 1177 1178 try: 1179 _logger.debug("Remove collection after move: {}".format(sres)) 1180 sres.delete() 1181 except Exception as e: 1182 _debug_exception(e) 1183 error_list.append((src_res.get_href(), as_DAVError(e))) 1184 1185 _logger.debug("ErrorList: {}".format(error_list)) 1186 1187 # --- Return response ------------------------------------------------- 1188 1189 return self._send_response( 1190 environ, start_response, src_res, success_code, error_list 1191 ) 1192 1193 def do_LOCK(self, environ, start_response): 1194 """ 1195 @see: http://www.webdav.org/specs/rfc4918.html#METHOD_LOCK 1196 """ 1197 path = environ["PATH_INFO"] 1198 provider = self._davProvider 1199 res = provider.get_resource_inst(path, environ) 1200 lock_man = provider.lock_manager 1201 1202 if lock_man is None: 1203 # http://www.webdav.org/specs/rfc4918.html#rfc.section.6.3 1204 self._fail(HTTP_NOT_IMPLEMENTED, "This realm does not support locking.") 1205 if res and res.prevent_locking(): 1206 self._fail(HTTP_FORBIDDEN, "This resource does not support locking.") 1207 1208 if environ.setdefault("HTTP_DEPTH", "infinity") not in ("0", "infinity"): 1209 self._fail(HTTP_BAD_REQUEST, "Expected Depth: 'infinity' or '0'.") 1210 1211 self._evaluate_if_headers(res, environ) 1212 1213 timeout_secs = util.read_timeout_value_header(environ.get("HTTP_TIMEOUT", "")) 1214 submitted_token_list = environ["wsgidav.ifLockTokenList"] 1215 1216 lockinfo_el = util.parse_xml_body(environ, allow_empty=True) 1217 1218 # --- Special case: empty request body -------------------------------- 1219 1220 if lockinfo_el is None: 1221 # TODO: @see 9.10.2 1222 # TODO: 'URL of a resource within the scope of the lock' 1223 # Other (shared) locks are unaffected and don't prevent refreshing 1224 # TODO: check for valid user 1225 # TODO: check for If with single lock token 1226 environ["HTTP_DEPTH"] = "0" # MUST ignore depth header on refresh 1227 1228 if res is None: 1229 self._fail( 1230 HTTP_BAD_REQUEST, "LOCK refresh must specify an existing resource." 1231 ) 1232 if len(submitted_token_list) != 1: 1233 self._fail( 1234 HTTP_BAD_REQUEST, 1235 "Expected a lock token (only one lock may be refreshed at a time).", 1236 ) 1237 elif not lock_man.is_url_locked_by_token( 1238 res.get_ref_url(), submitted_token_list[0] 1239 ): 1240 self._fail( 1241 HTTP_PRECONDITION_FAILED, 1242 "Lock token does not match URL.", 1243 err_condition=PRECONDITION_CODE_LockTokenMismatch, 1244 ) 1245 # TODO: test, if token is owned by user 1246 1247 lock = lock_man.refresh(submitted_token_list[0], timeout_secs) 1248 1249 # The lock root may be <path>, or a parent of <path>. 1250 lock_path = provider.ref_url_to_path(lock["root"]) 1251 lock_res = provider.get_resource_inst(lock_path, environ) 1252 1253 prop_el = xml_tools.make_prop_el() 1254 # TODO: handle exceptions in get_property_value 1255 lockdiscovery_el = lock_res.get_property_value("{DAV:}lockdiscovery") 1256 prop_el.append(lockdiscovery_el) 1257 1258 # Lock-Token header is not returned 1259 xml = xml_tools.xml_to_bytes(prop_el) 1260 start_response( 1261 "200 OK", 1262 [ 1263 ("Content-Type", "application/xml"), 1264 ("Content-Length", str(len(xml))), 1265 ("Date", util.get_rfc1123_time()), 1266 ], 1267 ) 1268 return [xml] 1269 1270 # --- Standard case: parse xml body ----------------------------------- 1271 1272 if lockinfo_el.tag != "{DAV:}lockinfo": 1273 self._fail(HTTP_BAD_REQUEST) 1274 1275 lock_type = None 1276 lock_scope = None 1277 lock_owner = compat.to_bytes("") 1278 lock_depth = environ.setdefault("HTTP_DEPTH", "infinity") 1279 1280 for linode in lockinfo_el: 1281 if linode.tag == "{DAV:}lockscope": 1282 for lsnode in linode: 1283 if lsnode.tag == "{DAV:}exclusive": 1284 lock_scope = "exclusive" 1285 elif lsnode.tag == "{DAV:}shared": 1286 lock_scope = "shared" 1287 break 1288 elif linode.tag == "{DAV:}locktype": 1289 for ltnode in linode: 1290 if ltnode.tag == "{DAV:}write": 1291 lock_type = "write" # only type accepted 1292 break 1293 1294 elif linode.tag == "{DAV:}owner": 1295 # Store whole <owner> tag, so we can use etree.XML() later 1296 lock_owner = xml_tools.xml_to_bytes(linode, pretty_print=False) 1297 1298 else: 1299 self._fail(HTTP_BAD_REQUEST, "Invalid node '{}'.".format(linode.tag)) 1300 1301 if not lock_scope: 1302 self._fail(HTTP_BAD_REQUEST, "Missing or invalid lockscope.") 1303 if not lock_type: 1304 self._fail(HTTP_BAD_REQUEST, "Missing or invalid locktype.") 1305 1306 if environ.get("wsgidav.debug_break"): 1307 pass # break point 1308 1309 # TODO: check for locked parents BEFORE creating an empty child 1310 1311 # http://www.webdav.org/specs/rfc4918.html#rfc.section.9.10.4 1312 # Locking unmapped URLs: must create an empty resource 1313 createdNewResource = False 1314 if res is None: 1315 parentRes = provider.get_resource_inst(util.get_uri_parent(path), environ) 1316 if not parentRes or not parentRes.is_collection: 1317 self._fail(HTTP_CONFLICT, "LOCK-0 parent must be a collection") 1318 res = parentRes.create_empty_resource(util.get_uri_name(path)) 1319 createdNewResource = True 1320 1321 # --- Check, if path is already locked -------------------------------- 1322 1323 # May raise DAVError(HTTP_LOCKED): 1324 lock = lock_man.acquire( 1325 res.get_ref_url(), 1326 lock_type, 1327 lock_scope, 1328 lock_depth, 1329 lock_owner, 1330 timeout_secs, 1331 environ["wsgidav.user_name"], 1332 submitted_token_list, 1333 ) 1334 1335 # Lock succeeded 1336 prop_el = xml_tools.make_prop_el() 1337 # TODO: handle exceptions in get_property_value 1338 lockdiscovery_el = res.get_property_value("{DAV:}lockdiscovery") 1339 prop_el.append(lockdiscovery_el) 1340 1341 respcode = "200 OK" 1342 if createdNewResource: 1343 respcode = "201 Created" 1344 1345 xml = xml_tools.xml_to_bytes(prop_el) 1346 start_response( 1347 respcode, 1348 [ 1349 ("Content-Type", "application/xml"), 1350 ("Content-Length", str(len(xml))), 1351 ("Lock-Token", lock["token"]), 1352 ("Date", util.get_rfc1123_time()), 1353 ], 1354 ) 1355 return [xml] 1356 1357 # TODO: LOCK may also fail with HTTP_FORBIDDEN. 1358 # In this case we should return 207 Multi-Status. 1359 # http://www.webdav.org/specs/rfc4918.html#rfc.section.9.10.9 1360 # Checking this would require to call res.prevent_locking() 1361 # recursively. 1362 1363 # # --- Locking FAILED: return fault response 1364 # if len(conflictList) == 1 and conflictList[0][0]["root"] == res.get_ref_url(): 1365 # # If there is only one error for the root URL, send as simple error response 1366 # return util.send_status_response(environ, start_response, conflictList[0][1]) 1367 # 1368 # dictStatus = {} 1369 # 1370 # for lock_dict, e in conflictList: 1371 # dictStatus[lock_dict["root"]] = e 1372 # 1373 # if not res.get_ref_url() in dictStatus: 1374 # dictStatus[res.get_ref_url()] = DAVError(HTTP_FAILED_DEPENDENCY) 1375 # 1376 # # Return multi-status fault response 1377 # multistatusEL = xml_tools.make_multistatus_el() 1378 # for nu, e in dictStatus.items(): 1379 # responseEL = etree.SubElement(multistatusEL, "{DAV:}response") 1380 # etree.SubElement(responseEL, "{DAV:}href").text = nu 1381 # etree.SubElement(responseEL, "{DAV:}status").text = "HTTP/1.1 %s" % 1382 # get_http_status_string(e) 1383 # # TODO: all responses should have this(?): 1384 # if e.context_info: 1385 # etree.SubElement(multistatusEL, "{DAV:}responsedescription").text = e.context_info 1386 # 1387 # if responsedescription: 1388 # etree.SubElement(multistatusEL, "{DAV:}responsedescription").text = "\n".join( 1389 # responsedescription) 1390 # 1391 # return util.send_multi_status_response(environ, start_response, 1392 # multistatusEL) 1393 1394 def do_UNLOCK(self, environ, start_response): 1395 """ 1396 @see: http://www.webdav.org/specs/rfc4918.html#METHOD_UNLOCK 1397 """ 1398 path = environ["PATH_INFO"] 1399 provider = self._davProvider 1400 res = self._davProvider.get_resource_inst(path, environ) 1401 1402 lock_man = provider.lock_manager 1403 if lock_man is None: 1404 self._fail(HTTP_NOT_IMPLEMENTED, "This share does not support locking.") 1405 elif util.get_content_length(environ) != 0: 1406 self._fail( 1407 HTTP_MEDIATYPE_NOT_SUPPORTED, 1408 "The server does not handle any body content.", 1409 ) 1410 elif res is None: 1411 self._fail(HTTP_NOT_FOUND, path) 1412 elif "HTTP_LOCK_TOKEN" not in environ: 1413 self._fail(HTTP_BAD_REQUEST, "Missing lock token.") 1414 1415 self._evaluate_if_headers(res, environ) 1416 1417 lock_token = environ["HTTP_LOCK_TOKEN"].strip("<>") 1418 ref_url = res.get_ref_url() 1419 1420 if not lock_man.is_url_locked_by_token(ref_url, lock_token): 1421 self._fail( 1422 HTTP_CONFLICT, 1423 "Resource is not locked by token.", 1424 err_condition=PRECONDITION_CODE_LockTokenMismatch, 1425 ) 1426 1427 if not lock_man.is_token_locked_by_user( 1428 lock_token, environ["wsgidav.user_name"] 1429 ): 1430 # TODO: there must be a way to allow this for admins. 1431 # Maybe test for "remove_locks" in environ["wsgidav.roles"] 1432 self._fail(HTTP_FORBIDDEN, "Token was created by another user.") 1433 1434 # TODO: Is this correct?: unlock(a/b/c) will remove Lock for 'a/b' 1435 lock_man.release(lock_token) 1436 1437 return util.send_status_response(environ, start_response, HTTP_NO_CONTENT) 1438 1439 def do_OPTIONS(self, environ, start_response): 1440 """ 1441 @see http://www.webdav.org/specs/rfc4918.html#HEADER_DAV 1442 """ 1443 path = environ["PATH_INFO"] 1444 provider = self._davProvider 1445 res = provider.get_resource_inst(path, environ) 1446 1447 dav_compliance_level = "1,2" 1448 if provider is None or provider.is_readonly() or provider.lock_manager is None: 1449 dav_compliance_level = "1" 1450 1451 headers = [ 1452 ("Content-Type", "text/html"), 1453 ("Content-Length", "0"), 1454 ("DAV", dav_compliance_level), 1455 ("Date", util.get_rfc1123_time()), 1456 ] 1457 1458 if path == "/": 1459 path = "*" # Hotfix for WinXP 1460 1461 if path == "*": 1462 # Answer HTTP 'OPTIONS' method on server-level. 1463 # From RFC 2616 1464 # If the Request-URI is an asterisk ("*"), the OPTIONS request is 1465 # intended to apply to the server in general rather than to a specific 1466 # resource. Since a server's communication options typically depend on 1467 # the resource, the "*" request is only useful as a "ping" or "no-op" 1468 # type of method; it does nothing beyond allowing the client to test the 1469 # capabilities of the server. For example, this can be used to test a 1470 # proxy for HTTP/1.1 compliance (or lack thereof). 1471 start_response("200 OK", headers) 1472 return [b""] 1473 1474 # Determine allowed request methods 1475 allow = ["OPTIONS"] 1476 if res and res.is_collection: 1477 # Existing collection 1478 allow.extend(["HEAD", "GET", "PROPFIND"]) 1479 # if provider.prop_manager is not None: 1480 # allow.extend( [ "PROPFIND" ] ) 1481 if not provider.is_readonly(): 1482 allow.extend(["DELETE", "COPY", "MOVE", "PROPPATCH"]) 1483 # if provider.prop_manager is not None: 1484 # allow.extend( [ "PROPPATCH" ] ) 1485 if provider.lock_manager is not None: 1486 allow.extend(["LOCK", "UNLOCK"]) 1487 elif res: 1488 # Existing resource 1489 allow.extend(["HEAD", "GET", "PROPFIND"]) 1490 # if provider.prop_manager is not None: 1491 # allow.extend( [ "PROPFIND" ] ) 1492 if not provider.is_readonly(): 1493 allow.extend(["PUT", "DELETE", "COPY", "MOVE", "PROPPATCH"]) 1494 # if provider.prop_manager is not None: 1495 # allow.extend( [ "PROPPATCH" ] ) 1496 if provider.lock_manager is not None: 1497 allow.extend(["LOCK", "UNLOCK"]) 1498 if res.support_ranges(): 1499 headers.append(("Accept-Ranges", "bytes")) 1500 elif provider.is_collection(util.get_uri_parent(path), environ): 1501 # A new resource below an existing collection 1502 # TODO: should we allow LOCK here? I think it is allowed to lock an 1503 # non-existing resource 1504 if not provider.is_readonly(): 1505 allow.extend(["PUT", "MKCOL"]) 1506 else: 1507 self._fail(HTTP_NOT_FOUND, path) 1508 1509 headers.append(("Allow", ", ".join(allow))) 1510 1511 if environ["wsgidav.config"].get("add_header_MS_Author_Via", False): 1512 headers.append(("MS-Author-Via", "DAV")) 1513 1514 start_response("200 OK", headers) 1515 return [b""] 1516 1517 def do_GET(self, environ, start_response): 1518 return self._send_resource(environ, start_response, is_head_method=False) 1519 1520 def do_HEAD(self, environ, start_response): 1521 return self._send_resource(environ, start_response, is_head_method=True) 1522 1523 def _send_resource(self, environ, start_response, is_head_method): 1524 """ 1525 If-Range 1526 If the entity is unchanged, send me the part(s) that I am missing; 1527 otherwise, send me the entire new entity 1528 If-Range: "737060cd8c284d8af7ad3082f209582d" 1529 1530 @see: http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.27 1531 """ 1532 path = environ["PATH_INFO"] 1533 res = self._davProvider.get_resource_inst(path, environ) 1534 1535 if util.get_content_length(environ) != 0: 1536 self._fail( 1537 HTTP_MEDIATYPE_NOT_SUPPORTED, 1538 "The server does not handle any body content.", 1539 ) 1540 elif environ.setdefault("HTTP_DEPTH", "0") != "0": 1541 self._fail(HTTP_BAD_REQUEST, "Only Depth: 0 supported.") 1542 elif res is None: 1543 self._fail(HTTP_NOT_FOUND, path) 1544 elif res.is_collection: 1545 self._fail( 1546 HTTP_FORBIDDEN, 1547 "Directory browsing is not enabled." 1548 "(to enable it put WsgiDavDirBrowser into middleware_stack" 1549 "option and set dir_browser -> enabled = True option.)", 1550 ) 1551 1552 self._evaluate_if_headers(res, environ) 1553 1554 filesize = res.get_content_length() 1555 if filesize is None: 1556 filesize = -1 # flag logic to read until EOF 1557 1558 last_modified = res.get_last_modified() 1559 if last_modified is None: 1560 last_modified = -1 1561 1562 entitytag = res.get_etag() 1563 if entitytag is None: 1564 entitytag = "[]" 1565 1566 # Ranges 1567 doignoreranges = ( 1568 not res.support_content_length() 1569 or not res.support_ranges() 1570 or filesize == 0 1571 ) 1572 if ( 1573 "HTTP_RANGE" in environ 1574 and "HTTP_IF_RANGE" in environ 1575 and not doignoreranges 1576 ): 1577 ifrange = environ["HTTP_IF_RANGE"] 1578 # Try as http-date first (Return None, if invalid date string) 1579 secstime = util.parse_time_string(ifrange) 1580 if secstime: 1581 # cast to integer, as last_modified may be a floating point number 1582 if int(last_modified) != secstime: 1583 doignoreranges = True 1584 else: 1585 # Use as entity tag 1586 ifrange = ifrange.strip('" ') 1587 if entitytag is None or ifrange != entitytag: 1588 doignoreranges = True 1589 1590 ispartialranges = False 1591 if "HTTP_RANGE" in environ and not doignoreranges: 1592 ispartialranges = True 1593 list_ranges, _totallength = util.obtain_content_ranges( 1594 environ["HTTP_RANGE"], filesize 1595 ) 1596 if len(list_ranges) == 0: 1597 # No valid ranges present 1598 self._fail(HTTP_RANGE_NOT_SATISFIABLE) 1599 1600 # More than one range present -> take only the first range, since 1601 # multiple range returns require multipart, which is not supported 1602 # obtain_content_ranges supports more than one range in case the above 1603 # behaviour changes in future 1604 (range_start, range_end, range_length) = list_ranges[0] 1605 else: 1606 (range_start, range_end, range_length) = (0, filesize - 1, filesize) 1607 1608 # Content Processing 1609 mimetype = res.get_content_type() # provider.get_content_type(path) 1610 1611 response_headers = [] 1612 if res.support_content_length(): 1613 # Content-length must be of type string 1614 response_headers.append(("Content-Length", str(range_length))) 1615 if res.support_modified(): 1616 response_headers.append( 1617 ("Last-Modified", util.get_rfc1123_time(last_modified)) 1618 ) 1619 response_headers.append(("Content-Type", mimetype)) 1620 response_headers.append(("Date", util.get_rfc1123_time())) 1621 if res.support_etag(): 1622 response_headers.append(("ETag", '"{}"'.format(entitytag))) 1623 1624 if res.support_ranges(): 1625 response_headers.append(("Accept-Ranges", "bytes")) 1626 1627 if "response_headers" in environ["wsgidav.config"]: 1628 customHeaders = environ["wsgidav.config"]["response_headers"] 1629 for header, value in customHeaders: 1630 response_headers.append((header, value)) 1631 1632 res.finalize_headers(environ, response_headers) 1633 1634 if ispartialranges: 1635 # response_headers.append(("Content-Ranges", "bytes " + str(range_start) + "-" + 1636 # str(range_end) + "/" + str(range_length))) 1637 response_headers.append( 1638 ( 1639 "Content-Range", 1640 "bytes {}-{}/{}".format(range_start, range_end, filesize), 1641 ) 1642 ) 1643 start_response("206 Partial Content", response_headers) 1644 else: 1645 start_response("200 OK", response_headers) 1646 1647 # Return empty body for HEAD requests 1648 if is_head_method: 1649 yield b"" 1650 return 1651 1652 fileobj = res.get_content() 1653 1654 if not doignoreranges: 1655 fileobj.seek(range_start) 1656 1657 contentlengthremaining = range_length 1658 try: 1659 while 1: 1660 if ( 1661 contentlengthremaining < 0 1662 or contentlengthremaining > self.block_size 1663 ): 1664 readbuffer = fileobj.read(self.block_size) 1665 else: 1666 readbuffer = fileobj.read(contentlengthremaining) 1667 assert compat.is_bytes(readbuffer) 1668 yield readbuffer 1669 contentlengthremaining -= len(readbuffer) 1670 if len(readbuffer) == 0 or contentlengthremaining == 0: 1671 break 1672 finally: 1673 # yield readbuffer MAY fail with a GeneratorExit error 1674 # we still need to close the file 1675 fileobj.close() 1676 return 1677 1678 1679# def do_TRACE(self, environ, start_response): 1680# """ TODO: TRACE pending, but not essential.""" 1681# self._fail(HTTP_NOT_IMPLEMENTED) 1682