1import os 2 3import fasteners 4 5from conans.client.tools.env import no_op 6from conans.errors import NotFoundException 7from conans.util.files import decode_text, md5sum, path_exists, relative_dirs, rmdir 8 9 10class ServerDiskAdapter(object): 11 """Manage access to disk files with common methods required 12 for conan operations""" 13 def __init__(self, base_url, base_storage_path, updown_auth_manager): 14 """ 15 :param: base_url Base url for generate urls to download and upload operations""" 16 17 self.base_url = base_url 18 # URLs are generated removing this base path 19 self.updown_auth_manager = updown_auth_manager 20 self._store_folder = base_storage_path 21 22 # ONLY USED BY APIV1 23 def get_download_urls(self, paths, user=None): 24 """Get the urls for download the specified files using s3 signed request. 25 returns a dict with this structure: {"filepath": "http://..."} 26 27 paths is a list of path files """ 28 29 assert isinstance(paths, list) 30 ret = {} 31 for filepath in paths: 32 url_path = os.path.relpath(filepath, self._store_folder) 33 url_path = url_path.replace("\\", "/") 34 # FALTA SIZE DEL FICHERO PARA EL UPLOAD URL! 35 signature = self.updown_auth_manager.get_token_for(url_path, user) 36 url = "%s/%s?signature=%s" % (self.base_url, url_path, decode_text(signature)) 37 ret[filepath] = url 38 39 return ret 40 41 # ONLY USED BY APIV1 42 def get_upload_urls(self, paths_sizes, user=None): 43 """Get the urls for upload the specified files using s3 signed request. 44 returns a dict with this structure: {"filepath": "http://..."} 45 46 paths_sizes is a dict of {path: size_in_bytes} """ 47 assert isinstance(paths_sizes, dict) 48 ret = {} 49 for filepath, filesize in paths_sizes.items(): 50 url_path = os.path.relpath(filepath, self._store_folder) 51 url_path = url_path.replace("\\", "/") 52 # FALTA SIZE DEL FICHERO PARA EL UPLOAD URL! 53 signature = self.updown_auth_manager.get_token_for(url_path, user, filesize) 54 url = "%s/%s?signature=%s" % (self.base_url, url_path, decode_text(signature)) 55 ret[filepath] = url 56 57 return ret 58 59 def _get_paths(self, absolute_path, files_subset): 60 if not path_exists(absolute_path, self._store_folder): 61 raise NotFoundException("") 62 paths = relative_dirs(absolute_path) 63 if files_subset is not None: 64 paths = set(paths).intersection(set(files_subset)) 65 abs_paths = [os.path.join(absolute_path, relpath) for relpath in paths] 66 return abs_paths 67 68 def get_snapshot(self, absolute_path="", files_subset=None): 69 """returns a dict with the filepaths and md5""" 70 abs_paths = self._get_paths(absolute_path, files_subset) 71 return {filepath: md5sum(filepath) for filepath in abs_paths} 72 73 def get_file_list(self, absolute_path="", files_subset=None): 74 abs_paths = self._get_paths(absolute_path, files_subset) 75 return abs_paths 76 77 def delete_folder(self, path): 78 """Delete folder from disk. Path already contains base dir""" 79 if not path_exists(path, self._store_folder): 80 raise NotFoundException("") 81 rmdir(path) 82 83 def delete_file(self, path): 84 """Delete files from bucket. Path already contains base dir""" 85 if not path_exists(path, self._store_folder): 86 raise NotFoundException("") 87 os.remove(path) 88 89 def path_exists(self, path): 90 return os.path.exists(path) 91 92 def read_file(self, path, lock_file): 93 with fasteners.InterProcessLock(lock_file) if lock_file else no_op(): 94 with open(path) as f: 95 return f.read() 96 97 def write_file(self, path, contents, lock_file): 98 with fasteners.InterProcessLock(lock_file) if lock_file else no_op(): 99 with open(path, "w") as f: 100 f.write(contents) 101 102 def base_storage_folder(self): 103 return self._store_folder 104