1import os 2import time 3import traceback 4from collections import namedtuple 5 6from six.moves.urllib.parse import parse_qs, urljoin, urlparse, urlsplit 7 8from conans.client.downloaders.download import run_downloader 9from conans.client.downloaders.file_downloader import FileDownloader 10from conans.client.remote_manager import check_compressed_files 11from conans.client.rest.client_routes import ClientV1Router 12from conans.client.rest.file_uploader import FileUploader 13from conans.client.rest.rest_client_common import RestCommonMethods, handle_return_deserializer 14from conans.errors import ConanException, NotFoundException, NoRestV2Available, \ 15 PackageNotFoundException 16from conans.model.info import ConanInfo 17from conans.model.manifest import FileTreeManifest 18from conans.paths import CONANINFO, CONAN_MANIFEST, EXPORT_SOURCES_TGZ_NAME, EXPORT_TGZ_NAME, \ 19 PACKAGE_TGZ_NAME 20from conans.util.files import decode_text 21from conans.util.log import logger 22 23 24def complete_url(base_url, url): 25 """ Ensures that an url is absolute by completing relative urls with 26 the remote url. urls that are already absolute are not modified. 27 """ 28 if bool(urlparse(url).netloc): 29 return url 30 return urljoin(base_url, url) 31 32 33class RestV1Methods(RestCommonMethods): 34 35 @property 36 def router(self): 37 return ClientV1Router(self.remote_url.rstrip("/"), self._artifacts_properties, 38 self._matrix_params) 39 40 def _download_files(self, file_urls, snapshot_md5): 41 """ 42 :param: file_urls is a dict with {filename: url} 43 :param snapshot_md5: dict with {filaname: md5 checksum} of files to be downloaded 44 45 Its a generator, so it yields elements for memory performance 46 """ 47 # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz 48 # can be < conanfile, conaninfo, and sent always the last, so smaller files go first 49 retry = self._config.retry 50 retry_wait = self._config.retry_wait 51 download_cache = self._config.download_cache 52 for filename, resource_url in sorted(file_urls.items(), reverse=True): 53 auth, _ = self._file_server_capabilities(resource_url) 54 md5 = snapshot_md5.get(filename, None) if snapshot_md5 else None 55 assert not download_cache or snapshot_md5, \ 56 "if download_cache is set, we need the file checksums" 57 contents = run_downloader(self.requester, None, self.verify_ssl, retry=retry, 58 retry_wait=retry_wait, download_cache=download_cache, 59 url=resource_url, auth=auth, md5=md5) 60 yield os.path.normpath(filename), contents 61 62 def _file_server_capabilities(self, resource_url): 63 auth = None 64 dedup = False 65 urltokens = urlsplit(resource_url) 66 query_string = urltokens[3] 67 parsed_string_dict = parse_qs(query_string) 68 if "signature" not in parsed_string_dict and "Signature" not in parsed_string_dict: 69 # If monolithic server, we can use same auth, and server understand dedup 70 auth = self.auth 71 dedup = True 72 return auth, dedup 73 74 def get_recipe_manifest(self, ref): 75 """Gets a FileTreeManifest from conans""" 76 # Obtain the URLs 77 url = self.router.recipe_manifest(ref) 78 urls = self._get_file_to_url_dict(url) 79 80 md5s = self.get_recipe_snapshot(ref) if self._config.download_cache else None 81 # Get the digest 82 contents = self._download_files(urls, md5s) 83 # Unroll generator and decode shas (plain text) 84 contents = {key: decode_text(value) for key, value in dict(contents).items()} 85 return FileTreeManifest.loads(contents[CONAN_MANIFEST]) 86 87 def get_package_manifest(self, pref): 88 """Gets a FileTreeManifest from a package""" 89 pref = pref.copy_with_revs(None, None) 90 # Obtain the URLs 91 url = self.router.package_manifest(pref) 92 urls = self._get_file_to_url_dict(url) 93 94 # Get the digest 95 md5s = self.get_package_snapshot(pref) if self._config.download_cache else None 96 contents = self._download_files(urls, md5s) 97 try: 98 # Unroll generator and decode shas (plain text) 99 content = dict(contents)[CONAN_MANIFEST] 100 return FileTreeManifest.loads(decode_text(content)) 101 except Exception as e: 102 msg = "Error retrieving manifest file for package " \ 103 "'{}' from remote ({}): '{}'".format(repr(pref), self.remote_url, e) 104 logger.error(msg) 105 logger.error(traceback.format_exc()) 106 raise ConanException(msg) 107 108 def get_package_info(self, pref, headers): 109 """Gets a ConanInfo file from a package""" 110 pref = pref.copy_with_revs(None, None) 111 url = self.router.package_download_urls(pref) 112 urls = self._get_file_to_url_dict(url, headers=headers) 113 if not urls: 114 raise PackageNotFoundException(pref) 115 116 if CONANINFO not in urls: 117 raise NotFoundException("Package %s doesn't have the %s file!" % (pref, 118 CONANINFO)) 119 md5s = self.get_package_snapshot(pref) if self._config.download_cache else None 120 # Get the info (in memory) 121 contents = self._download_files({CONANINFO: urls[CONANINFO]}, md5s) 122 # Unroll generator and decode shas (plain text) 123 contents = {key: decode_text(value) for key, value in dict(contents).items()} 124 return ConanInfo.loads(contents[CONANINFO]) 125 126 def _get_file_to_url_dict(self, url, data=None, headers=None): 127 """Call to url and decode the json returning a dict of {filepath: url} dict 128 converting the url to a complete url when needed""" 129 urls = self.get_json(url, data=data, headers=headers) 130 return {filepath: complete_url(self.remote_url, url) for filepath, url in urls.items()} 131 132 def _upload_recipe(self, ref, files_to_upload, retry, retry_wait): 133 # Get the upload urls and then upload files 134 url = self.router.recipe_upload_urls(ref) 135 file_sizes = {filename.replace("\\", "/"): os.stat(abs_path).st_size 136 for filename, abs_path in files_to_upload.items()} 137 urls = self._get_file_to_url_dict(url, data=file_sizes) 138 if self._matrix_params: 139 urls = self.router.add_matrix_params(urls) 140 self._upload_files(urls, files_to_upload, self._output, retry, retry_wait, 141 display_name=str(ref)) 142 143 def _upload_package(self, pref, files_to_upload, retry, retry_wait): 144 # Get the upload urls and then upload files 145 url = self.router.package_upload_urls(pref) 146 file_sizes = {filename: os.stat(abs_path).st_size for filename, 147 abs_path in files_to_upload.items()} 148 logger.debug("Requesting upload urls...") 149 urls = self._get_file_to_url_dict(url, data=file_sizes) 150 if self._matrix_params: 151 urls = self.router.add_matrix_params(urls) 152 logger.debug("Requesting upload urls...Done!") 153 short_pref_name = "%s:%s" % (pref.ref, pref.id[0:4]) 154 self._upload_files(urls, files_to_upload, self._output, retry, retry_wait, 155 display_name=short_pref_name) 156 157 def _upload_files(self, file_urls, files, output, retry, retry_wait, display_name=None): 158 t1 = time.time() 159 failed = [] 160 uploader = FileUploader(self.requester, output, self.verify_ssl, self._config) 161 # conan_package.tgz and conan_export.tgz are uploaded first to avoid uploading conaninfo.txt 162 # or conanamanifest.txt with missing files due to a network failure 163 for filename, resource_url in sorted(file_urls.items()): 164 if output and not output.is_terminal: 165 msg = "Uploading: %s" % filename if not display_name else ( 166 "Uploading %s -> %s" % (filename, display_name)) 167 output.writeln(msg) 168 auth, dedup = self._file_server_capabilities(resource_url) 169 try: 170 headers = self._artifacts_properties if not self._matrix_params else {} 171 uploader.upload(resource_url, files[filename], auth=auth, dedup=dedup, 172 retry=retry, retry_wait=retry_wait, 173 headers=headers, display_name=display_name) 174 except Exception as exc: 175 output.error("\nError uploading file: %s, '%s'" % (filename, exc)) 176 failed.append(filename) 177 178 if failed: 179 raise ConanException("Execute upload again to retry upload the failed files: %s" 180 % ", ".join(failed)) 181 else: 182 logger.debug("UPLOAD: \nAll uploaded! Total time: %s\n" % str(time.time() - t1)) 183 184 def _download_files_to_folder(self, file_urls, to_folder, snapshot_md5): 185 """ 186 :param: file_urls is a dict with {filename: abs_path} 187 188 It writes downloaded files to disk (appending to file, only keeps chunks in memory) 189 """ 190 ret = {} 191 # Take advantage of filenames ordering, so that conan_package.tgz and conan_export.tgz 192 # can be < conanfile, conaninfo, and sent always the last, so smaller files go first 193 retry = self._config.retry 194 retry_wait = self._config.retry_wait 195 download_cache = self._config.download_cache 196 for filename, resource_url in sorted(file_urls.items(), reverse=True): 197 if self._output and not self._output.is_terminal: 198 self._output.writeln("Downloading %s" % filename) 199 auth, _ = self._file_server_capabilities(resource_url) 200 abs_path = os.path.join(to_folder, filename) 201 md5 = snapshot_md5.get(filename, None) if snapshot_md5 else None 202 assert not download_cache or snapshot_md5, \ 203 "if download_cache is set, we need the file checksums" 204 run_downloader(self.requester, self._output, self.verify_ssl, retry=retry, 205 retry_wait=retry_wait, download_cache=download_cache, 206 url=resource_url, file_path=abs_path, auth=auth, md5=md5) 207 ret[filename] = abs_path 208 return ret 209 210 def get_recipe(self, ref, dest_folder): 211 urls = self._get_recipe_urls(ref) 212 urls.pop(EXPORT_SOURCES_TGZ_NAME, None) 213 check_compressed_files(EXPORT_TGZ_NAME, urls) 214 md5s = self.get_recipe_snapshot(ref) if self._config.download_cache else None 215 zipped_files = self._download_files_to_folder(urls, dest_folder, md5s) 216 return zipped_files 217 218 def get_recipe_sources(self, ref, dest_folder): 219 urls = self._get_recipe_urls(ref) 220 check_compressed_files(EXPORT_SOURCES_TGZ_NAME, urls) 221 if EXPORT_SOURCES_TGZ_NAME not in urls: 222 return None 223 urls = {EXPORT_SOURCES_TGZ_NAME: urls[EXPORT_SOURCES_TGZ_NAME]} 224 md5s = self.get_recipe_snapshot(ref) if self._config.download_cache else None 225 zipped_files = self._download_files_to_folder(urls, dest_folder, md5s) 226 return zipped_files 227 228 def _get_recipe_urls(self, ref): 229 """Gets a dict of filename:contents from conans""" 230 # Get the conanfile snapshot first 231 url = self.router.recipe_download_urls(ref) 232 urls = self._get_file_to_url_dict(url) 233 return urls 234 235 def get_package(self, pref, dest_folder): 236 urls = self._get_package_urls(pref) 237 check_compressed_files(PACKAGE_TGZ_NAME, urls) 238 md5s = self.get_package_snapshot(pref) if self._config.download_cache else None 239 zipped_files = self._download_files_to_folder(urls, dest_folder, md5s) 240 return zipped_files 241 242 def _get_package_urls(self, pref): 243 """Gets a dict of filename:contents from package""" 244 url = self.router.package_download_urls(pref) 245 urls = self._get_file_to_url_dict(url) 246 if not urls: 247 raise PackageNotFoundException(pref) 248 249 return urls 250 251 def get_recipe_path(self, ref, path): 252 url = self.router.recipe_download_urls(ref) 253 return self._get_path(url, path) 254 255 def get_package_path(self, pref, path): 256 """Gets a file content or a directory list""" 257 url = self.router.package_download_urls(pref) 258 return self._get_path(url, path) 259 260 def _get_path(self, url, path): 261 urls = self._get_file_to_url_dict(url) 262 263 def is_dir(the_path): 264 if the_path == ".": 265 return True 266 for _the_file in urls: 267 if the_path == _the_file: 268 return False 269 elif _the_file.startswith(the_path): 270 return True 271 raise NotFoundException("The specified path doesn't exist") 272 273 if is_dir(path): 274 ret = [] 275 for the_file in urls: 276 if path == "." or the_file.startswith(path): 277 tmp = the_file[len(path) - 1:].split("/", 1)[0] 278 if tmp not in ret: 279 ret.append(tmp) 280 return sorted(ret) 281 else: 282 downloader = FileDownloader(self.requester, None, self.verify_ssl, self._config.retry, 283 self._config.retry_wait) 284 auth, _ = self._file_server_capabilities(urls[path]) 285 content = downloader.download(urls[path], auth=auth) 286 287 return decode_text(content) 288 289 def _get_snapshot(self, url): 290 try: 291 snapshot = self.get_json(url) 292 snapshot = {os.path.normpath(filename): the_md5 293 for filename, the_md5 in snapshot.items()} 294 except NotFoundException: 295 snapshot = [] 296 return snapshot 297 298 @handle_return_deserializer() 299 def _remove_conanfile_files(self, ref, files): 300 self.check_credentials() 301 payload = {"files": [filename.replace("\\", "/") for filename in files]} 302 url = self.router.remove_recipe_files(ref) 303 return self._post_json(url, payload) 304 305 @handle_return_deserializer() 306 def remove_packages(self, ref, package_ids): 307 """ Remove any packages specified by package_ids""" 308 self.check_credentials() 309 payload = {"package_ids": package_ids} 310 url = self.router.remove_packages(ref) 311 ret = self._post_json(url, payload) 312 if not package_ids and ret.status_code == 404: 313 # Double check if it is a 404 because there are no packages 314 try: 315 if not self.search_packages(ref, query=None): 316 return namedtuple("_", ['status_code', 'content'])(200, b'') 317 except Exception as e: 318 logger.warning("Unexpected error searching {} packages" 319 " in remote {}: {}".format(ref, self.remote_url, e)) 320 return ret 321 322 @handle_return_deserializer() 323 def remove_conanfile(self, ref): 324 """ Remove a recipe and packages """ 325 self.check_credentials() 326 url = self.router.remove_recipe(ref) 327 logger.debug("REST: remove: %s" % url) 328 response = self.requester.delete(url, auth=self.auth, headers=self.custom_headers, 329 verify=self.verify_ssl) 330 return response 331 332 def get_recipe_revisions(self, ref): 333 raise NoRestV2Available("The remote doesn't support revisions") 334 335 def get_package_revisions(self, pref): 336 raise NoRestV2Available("The remote doesn't support revisions") 337 338 def get_latest_recipe_revision(self, ref): 339 raise NoRestV2Available("The remote doesn't support revisions") 340 341 def get_latest_package_revision(self, pref, headers): 342 raise NoRestV2Available("The remote doesn't support revisions") 343 344 def _post_json(self, url, payload): 345 logger.debug("REST: post: %s" % url) 346 response = self.requester.post(url, 347 auth=self.auth, 348 headers=self.custom_headers, 349 verify=self.verify_ssl, 350 json=payload) 351 return response 352