1import os, subprocess
2import argparse
3import asyncio
4import threading
5import copy
6import shutil
7from concurrent.futures.thread import ThreadPoolExecutor
8from pathlib import Path
9import typing as T
10
11from . import mlog
12from .mesonlib import quiet_git, GitException, Popen_safe, MesonException, windows_proof_rmtree
13from .wrap.wrap import PackageDefinition, Resolver, WrapException, ALL_TYPES
14from .wrap import wraptool
15
16ALL_TYPES_STRING = ', '.join(ALL_TYPES)
17
18class Logger:
19    def __init__(self, total_tasks: int) -> None:
20        self.lock = threading.Lock()
21        self.total_tasks = total_tasks
22        self.completed_tasks = 0
23        self.running_tasks = set()
24        self.should_erase_line = ''
25
26    def flush(self) -> None:
27        if self.should_erase_line:
28            print(self.should_erase_line, end='\r')
29            self.should_erase_line = ''
30
31    def print_progress(self) -> None:
32        line = f'Progress: {self.completed_tasks} / {self.total_tasks}'
33        max_len = shutil.get_terminal_size().columns - len(line)
34        running = ', '.join(self.running_tasks)
35        if len(running) + 3 > max_len:
36            running = running[:max_len - 6] + '...'
37        line = line + f' ({running})'
38        print(self.should_erase_line, line, sep='', end='\r')
39        self.should_erase_line = '\x1b[K'
40
41    def start(self, wrap_name: str) -> None:
42        with self.lock:
43            self.running_tasks.add(wrap_name)
44            self.print_progress()
45
46    def done(self, wrap_name: str, log_queue: T.List[T.Tuple[mlog.TV_LoggableList, T.Any]]) -> None:
47        with self.lock:
48            self.flush()
49            for args, kwargs in log_queue:
50                mlog.log(*args, **kwargs)
51            self.running_tasks.remove(wrap_name)
52            self.completed_tasks += 1
53            self.print_progress()
54
55
56class Runner:
57    def __init__(self, logger: Logger, r: Resolver, wrap: PackageDefinition, repo_dir: str, options: argparse.Namespace) -> None:
58        # FIXME: Do a copy because Resolver.resolve() is stateful method that
59        # cannot be called from multiple threads.
60        self.wrap_resolver = copy.copy(r)
61        self.wrap = wrap
62        self.repo_dir = repo_dir
63        self.options = options
64        self.run_method = options.subprojects_func.__get__(self)
65        self.log_queue = []
66        self.logger = logger
67
68    def log(self, *args, **kwargs):
69        self.log_queue.append((args, kwargs))
70
71    def run(self):
72        self.logger.start(self.wrap.name)
73        try:
74            result = self.run_method()
75        except MesonException as e:
76            self.log(mlog.red('Error:'), str(e))
77            result = False
78        self.logger.done(self.wrap.name, self.log_queue)
79        return result
80
81    def update_wrapdb_file(self):
82        try:
83            patch_url = self.wrap.get('patch_url')
84            branch, revision = wraptool.parse_patch_url(patch_url)
85        except WrapException:
86            return
87        new_branch, new_revision = wraptool.get_latest_version(self.wrap.name)
88        if new_branch != branch or new_revision != revision:
89            wraptool.update_wrap_file(self.wrap.filename, self.wrap.name, new_branch, new_revision)
90            self.log('  -> New wrap file downloaded.')
91
92    def update_file(self):
93        self.update_wrapdb_file()
94        if not os.path.isdir(self.repo_dir):
95            # The subproject is not needed, or it is a tarball extracted in
96            # 'libfoo-1.0' directory and the version has been bumped and the new
97            # directory is 'libfoo-2.0'. In that case forcing a meson
98            # reconfigure will download and use the new tarball.
99            self.log('  -> Not used.')
100            return True
101        elif self.options.reset:
102            # Delete existing directory and redownload. It is possible that nothing
103            # changed but we have no way to know. Hopefully tarballs are still
104            # cached.
105            windows_proof_rmtree(self.repo_dir)
106            try:
107                self.wrap_resolver.resolve(self.wrap.name, 'meson')
108                self.log('  -> New version extracted')
109                return True
110            except WrapException as e:
111                self.log('  ->', mlog.red(str(e)))
112                return False
113        else:
114            # The subproject has not changed, or the new source and/or patch
115            # tarballs should be extracted in the same directory than previous
116            # version.
117            self.log('  -> Subproject has not changed, or the new source/patch needs to be extracted on the same location.')
118            self.log('     Pass --reset option to delete directory and redownload.')
119            return False
120
121    def git_output(self, cmd):
122        return quiet_git(cmd, self.repo_dir, check=True)[1]
123
124    def git_verbose(self, cmd):
125        self.log(self.git_output(cmd))
126
127    def git_stash(self):
128        # That git command return 1 (failure) when there is something to stash.
129        # We don't want to stash when there is nothing to stash because that would
130        # print spurious "No local changes to save".
131        if not quiet_git(['diff', '--quiet', 'HEAD'], self.repo_dir)[0]:
132            # Don't pipe stdout here because we want the user to see their changes have
133            # been saved.
134            self.git_verbose(['stash'])
135
136    def git_show(self):
137        commit_message = self.git_output(['show', '--quiet', '--pretty=format:%h%n%d%n%s%n[%an]'])
138        parts = [s.strip() for s in commit_message.split('\n')]
139        self.log('  ->', mlog.yellow(parts[0]), mlog.red(parts[1]), parts[2], mlog.blue(parts[3]))
140
141    def git_rebase(self, revision):
142        try:
143            self.git_output(['-c', 'rebase.autoStash=true', 'rebase', 'FETCH_HEAD'])
144        except GitException as e:
145            self.log('  -> Could not rebase', mlog.bold(self.repo_dir), 'onto', mlog.bold(revision))
146            self.log(mlog.red(e.output))
147            self.log(mlog.red(str(e)))
148            return False
149        return True
150
151    def git_reset(self, revision):
152        try:
153            # Stash local changes, commits can always be found back in reflog, to
154            # avoid any data lost by mistake.
155            self.git_stash()
156            self.git_output(['reset', '--hard', 'FETCH_HEAD'])
157        except GitException as e:
158            self.log('  -> Could not reset', mlog.bold(self.repo_dir), 'to', mlog.bold(revision))
159            self.log(mlog.red(e.output))
160            self.log(mlog.red(str(e)))
161            return False
162        return True
163
164    def git_checkout(self, revision, create=False):
165        cmd = ['checkout', '--ignore-other-worktrees', revision, '--']
166        if create:
167            cmd.insert('-b', 1)
168        try:
169            # Stash local changes, commits can always be found back in reflog, to
170            # avoid any data lost by mistake.
171            self.git_stash()
172            self.git_output(cmd)
173        except GitException as e:
174            self.log('  -> Could not checkout', mlog.bold(revision), 'in', mlog.bold(self.repo_dir))
175            self.log(mlog.red(e.output))
176            self.log(mlog.red(str(e)))
177            return False
178        return True
179
180    def git_checkout_and_reset(self, revision):
181        # revision could be a branch that already exists but is outdated, so we still
182        # have to reset after the checkout.
183        success = self.git_checkout(revision)
184        if success:
185            success = self.git_reset(revision)
186        return success
187
188    def git_checkout_and_rebase(self, revision):
189        # revision could be a branch that already exists but is outdated, so we still
190        # have to rebase after the checkout.
191        success = self.git_checkout(revision)
192        if success:
193            success = self.git_rebase(revision)
194        return success
195
196    def update_git(self):
197        if not os.path.isdir(self.repo_dir):
198            self.log('  -> Not used.')
199            return True
200        if not os.path.exists(os.path.join(self.repo_dir, '.git')):
201            if self.options.reset:
202                # Delete existing directory and redownload
203                windows_proof_rmtree(self.repo_dir)
204                try:
205                    self.wrap_resolver.resolve(self.wrap.name, 'meson')
206                    self.update_git_done()
207                    return True
208                except WrapException as e:
209                    self.log('  ->', mlog.red(str(e)))
210                    return False
211            else:
212                self.log('  -> Not a git repository.')
213                self.log('Pass --reset option to delete directory and redownload.')
214                return False
215        revision = self.wrap.values.get('revision')
216        url = self.wrap.values.get('url')
217        push_url = self.wrap.values.get('push-url')
218        if not revision or not url:
219            # It could be a detached git submodule for example.
220            self.log('  -> No revision or URL specified.')
221            return True
222        try:
223            origin_url = self.git_output(['remote', 'get-url', 'origin']).strip()
224        except GitException as e:
225            self.log('  -> Failed to determine current origin URL in', mlog.bold(self.repo_dir))
226            self.log(mlog.red(e.output))
227            self.log(mlog.red(str(e)))
228            return False
229        if self.options.reset:
230            try:
231                self.git_output(['remote', 'set-url', 'origin', url])
232                if push_url:
233                    self.git_output(['remote', 'set-url', '--push', 'origin', push_url])
234            except GitException as e:
235                self.log('  -> Failed to reset origin URL in', mlog.bold(self.repo_dir))
236                self.log(mlog.red(e.output))
237                self.log(mlog.red(str(e)))
238                return False
239        elif url != origin_url:
240            self.log(f'  -> URL changed from {origin_url!r} to {url!r}')
241            return False
242        try:
243            # Same as `git branch --show-current` but compatible with older git version
244            branch = self.git_output(['rev-parse', '--abbrev-ref', 'HEAD']).strip()
245            branch = branch if branch != 'HEAD' else ''
246        except GitException as e:
247            self.log('  -> Failed to determine current branch in', mlog.bold(self.repo_dir))
248            self.log(mlog.red(e.output))
249            self.log(mlog.red(str(e)))
250            return False
251        if self.wrap_resolver.is_git_full_commit_id(revision) and \
252                quiet_git(['rev-parse', '--verify', revision + '^{commit}'], self.repo_dir)[0]:
253            # The revision we need is both a commit and available. So we do not
254            # need to fetch it because it cannot be updated.  Instead, trick
255            # git into setting FETCH_HEAD just in case, from the local commit.
256            self.git_output(['fetch', '.', revision])
257        else:
258            try:
259                # Fetch only the revision we need, this avoids fetching useless branches.
260                # revision can be either a branch, tag or commit id. In all cases we want
261                # FETCH_HEAD to be set to the desired commit and "git checkout <revision>"
262                # to to either switch to existing/new branch, or detach to tag/commit.
263                # It is more complicated than it first appear, see discussion there:
264                # https://github.com/mesonbuild/meson/pull/7723#discussion_r488816189.
265                heads_refmap = '+refs/heads/*:refs/remotes/origin/*'
266                tags_refmap = '+refs/tags/*:refs/tags/*'
267                self.git_output(['fetch', '--refmap', heads_refmap, '--refmap', tags_refmap, 'origin', revision])
268            except GitException as e:
269                self.log('  -> Could not fetch revision', mlog.bold(revision), 'in', mlog.bold(self.repo_dir))
270                if quiet_git(['rev-parse', revision + '^{commit}'], self.repo_dir)[0]:
271                    self.log(mlog.yellow('WARNING:'), 'Proceeding with locally available copy')
272                    # Trick git into setting FETCH_HEAD from the local revision.
273                    quiet_git(['fetch', '.', revision], self.repo_dir)
274                else:
275                    self.log(mlog.red(e.output))
276                    self.log(mlog.red(str(e)))
277                    return False
278
279        if branch == '':
280            # We are currently in detached mode
281            if self.options.reset:
282                success = self.git_checkout_and_reset(revision)
283            else:
284                success = self.git_checkout_and_rebase(revision)
285        elif branch == revision:
286            # We are in the same branch. A reset could still be needed in the case
287            # a force push happened on remote repository.
288            if self.options.reset:
289                success = self.git_reset(revision)
290            else:
291                success = self.git_rebase(revision)
292        else:
293            # We are in another branch, either the user created their own branch and
294            # we should rebase it, or revision changed in the wrap file and we need
295            # to checkout the new branch.
296            if self.options.reset:
297                success = self.git_checkout_and_reset(revision)
298            else:
299                success = self.git_rebase(revision)
300        if success:
301            self.update_git_done()
302        return success
303
304    def update_git_done(self):
305        self.git_output(['submodule', 'update', '--checkout', '--recursive'])
306        self.git_show()
307
308    def update_hg(self):
309        if not os.path.isdir(self.repo_dir):
310            self.log('  -> Not used.')
311            return True
312        revno = self.wrap.get('revision')
313        if revno.lower() == 'tip':
314            # Failure to do pull is not a fatal error,
315            # because otherwise you can't develop without
316            # a working net connection.
317            subprocess.call(['hg', 'pull'], cwd=self.repo_dir)
318        else:
319            if subprocess.call(['hg', 'checkout', revno], cwd=self.repo_dir) != 0:
320                subprocess.check_call(['hg', 'pull'], cwd=self.repo_dir)
321                subprocess.check_call(['hg', 'checkout', revno], cwd=self.repo_dir)
322        return True
323
324    def update_svn(self):
325        if not os.path.isdir(self.repo_dir):
326            self.log('  -> Not used.')
327            return True
328        revno = self.wrap.get('revision')
329        p, out, _ = Popen_safe(['svn', 'info', '--show-item', 'revision', self.repo_dir])
330        current_revno = out
331        if current_revno == revno:
332            return True
333        if revno.lower() == 'head':
334            # Failure to do pull is not a fatal error,
335            # because otherwise you can't develop without
336            # a working net connection.
337            subprocess.call(['svn', 'update'], cwd=self.repo_dir)
338        else:
339            subprocess.check_call(['svn', 'update', '-r', revno], cwd=self.repo_dir)
340        return True
341
342    def update(self):
343        self.log(f'Updating {self.wrap.name}...')
344        if self.wrap.type == 'file':
345            return self.update_file()
346        elif self.wrap.type == 'git':
347            return self.update_git()
348        elif self.wrap.type == 'hg':
349            return self.update_hg()
350        elif self.wrap.type == 'svn':
351            return self.update_svn()
352        elif self.wrap.type is None:
353            self.log('  -> Cannot update subproject with no wrap file')
354        else:
355            self.log('  -> Cannot update', self.wrap.type, 'subproject')
356        return True
357
358    def checkout(self):
359        if self.wrap.type != 'git' or not os.path.isdir(self.repo_dir):
360            return True
361        branch_name = self.options.branch_name if self.options.branch_name else self.wrap.get('revision')
362        if not branch_name:
363            # It could be a detached git submodule for example.
364            return True
365        self.log(f'Checkout {branch_name} in {self.wrap.name}...')
366        if self.git_checkout(branch_name, create=self.options.b):
367            self.git_show()
368            return True
369        return False
370
371    def download(self):
372        self.log(f'Download {self.wrap.name}...')
373        if os.path.isdir(self.repo_dir):
374            self.log('  -> Already downloaded')
375            return True
376        try:
377            self.wrap_resolver.resolve(self.wrap.name, 'meson')
378            self.log('  -> done')
379        except WrapException as e:
380            self.log('  ->', mlog.red(str(e)))
381            return False
382        return True
383
384    def foreach(self):
385        self.log(f'Executing command in {self.repo_dir}')
386        if not os.path.isdir(self.repo_dir):
387            self.log('  -> Not downloaded yet')
388            return True
389        cmd = [self.options.command] + self.options.args
390        p, out, _ = Popen_safe(cmd, stderr=subprocess.STDOUT, cwd=self.repo_dir)
391        if p.returncode != 0:
392            err_message = "Command '{}' returned non-zero exit status {}.".format(" ".join(cmd), p.returncode)
393            self.log('  -> ', mlog.red(err_message))
394            self.log(out, end='')
395            return False
396
397        self.log(out, end='')
398        return True
399
400    def purge(self) -> bool:
401        # if subproject is not wrap-based, then don't remove it
402        if not self.wrap.type:
403            return True
404
405        if self.wrap.redirected:
406            redirect_file = Path(self.wrap.original_filename).resolve()
407            if self.options.confirm:
408                redirect_file.unlink()
409            mlog.log(f'Deleting {redirect_file}')
410
411        if self.wrap.type == 'redirect':
412            redirect_file = Path(self.wrap.filename).resolve()
413            if self.options.confirm:
414                redirect_file.unlink()
415            self.log(f'Deleting {redirect_file}')
416
417        if self.options.include_cache:
418            packagecache = Path(self.wrap_resolver.cachedir).resolve()
419            try:
420                subproject_cache_file = packagecache / self.wrap.get("source_filename")
421                if subproject_cache_file.is_file():
422                    if self.options.confirm:
423                        subproject_cache_file.unlink()
424                    self.log(f'Deleting {subproject_cache_file}')
425            except WrapException:
426                pass
427
428            try:
429                subproject_patch_file = packagecache / self.wrap.get("patch_filename")
430                if subproject_patch_file.is_file():
431                    if self.options.confirm:
432                        subproject_patch_file.unlink()
433                    self.log(f'Deleting {subproject_patch_file}')
434            except WrapException:
435                pass
436
437            # Don't log that we will remove an empty directory. Since purge is
438            # parallelized, another thread could have deleted it already.
439            try:
440                if not any(packagecache.iterdir()):
441                    windows_proof_rmtree(str(packagecache))
442            except FileNotFoundError:
443                pass
444
445        # NOTE: Do not use .resolve() here; the subproject directory may be a symlink
446        subproject_source_dir = Path(self.repo_dir)
447        # Resolve just the parent, just to print out the full path
448        subproject_source_dir = subproject_source_dir.parent.resolve() / subproject_source_dir.name
449
450        # Don't follow symlink. This is covered by the next if statement, but why
451        # not be doubly sure.
452        if subproject_source_dir.is_symlink():
453            if self.options.confirm:
454                subproject_source_dir.unlink()
455            self.log(f'Deleting {subproject_source_dir}')
456            return True
457        if not subproject_source_dir.is_dir():
458            return True
459
460        try:
461            if self.options.confirm:
462                windows_proof_rmtree(str(subproject_source_dir))
463            self.log(f'Deleting {subproject_source_dir}')
464        except OSError as e:
465            mlog.error(f'Unable to remove: {subproject_source_dir}: {e}')
466            return False
467
468        return True
469
470    @staticmethod
471    def post_purge(options):
472        if not options.confirm:
473            mlog.log('')
474            mlog.log('Nothing has been deleted, run again with --confirm to apply.')
475
476def add_common_arguments(p):
477    p.add_argument('--sourcedir', default='.',
478                   help='Path to source directory')
479    p.add_argument('--types', default='',
480                   help=f'Comma-separated list of subproject types. Supported types are: {ALL_TYPES_STRING} (default: all)')
481    p.add_argument('--num-processes', default=None, type=int,
482                   help='How many parallel processes to use (Since 0.59.0).')
483
484def add_subprojects_argument(p):
485    p.add_argument('subprojects', nargs='*',
486                   help='List of subprojects (default: all)')
487
488def add_arguments(parser):
489    subparsers = parser.add_subparsers(title='Commands', dest='command')
490    subparsers.required = True
491
492    p = subparsers.add_parser('update', help='Update all subprojects from wrap files')
493    p.add_argument('--rebase', default=True, action='store_true',
494                   help='Rebase your branch on top of wrap\'s revision. ' + \
495                        'Deprecated, it is now the default behaviour. (git only)')
496    p.add_argument('--reset', default=False, action='store_true',
497                   help='Checkout wrap\'s revision and hard reset to that commit. (git only)')
498    add_common_arguments(p)
499    add_subprojects_argument(p)
500    p.set_defaults(subprojects_func=Runner.update)
501
502    p = subparsers.add_parser('checkout', help='Checkout a branch (git only)')
503    p.add_argument('-b', default=False, action='store_true',
504                   help='Create a new branch')
505    p.add_argument('branch_name', nargs='?',
506                   help='Name of the branch to checkout or create (default: revision set in wrap file)')
507    add_common_arguments(p)
508    add_subprojects_argument(p)
509    p.set_defaults(subprojects_func=Runner.checkout)
510
511    p = subparsers.add_parser('download', help='Ensure subprojects are fetched, even if not in use. ' +
512                                               'Already downloaded subprojects are not modified. ' +
513                                               'This can be used to pre-fetch all subprojects and avoid downloads during configure.')
514    add_common_arguments(p)
515    add_subprojects_argument(p)
516    p.set_defaults(subprojects_func=Runner.download)
517
518    p = subparsers.add_parser('foreach', help='Execute a command in each subproject directory.')
519    p.add_argument('command', metavar='command ...',
520                   help='Command to execute in each subproject directory')
521    p.add_argument('args', nargs=argparse.REMAINDER,
522                   help=argparse.SUPPRESS)
523    add_common_arguments(p)
524    p.set_defaults(subprojects=[])
525    p.set_defaults(subprojects_func=Runner.foreach)
526
527    p = subparsers.add_parser('purge', help='Remove all wrap-based subproject artifacts')
528    add_common_arguments(p)
529    add_subprojects_argument(p)
530    p.add_argument('--include-cache', action='store_true', default=False, help='Remove the package cache as well')
531    p.add_argument('--confirm', action='store_true', default=False, help='Confirm the removal of subproject artifacts')
532    p.set_defaults(subprojects_func=Runner.purge)
533    p.set_defaults(post_func=Runner.post_purge)
534
535def run(options):
536    src_dir = os.path.relpath(os.path.realpath(options.sourcedir))
537    if not os.path.isfile(os.path.join(src_dir, 'meson.build')):
538        mlog.error('Directory', mlog.bold(src_dir), 'does not seem to be a Meson source directory.')
539        return 1
540    subprojects_dir = os.path.join(src_dir, 'subprojects')
541    if not os.path.isdir(subprojects_dir):
542        mlog.log('Directory', mlog.bold(src_dir), 'does not seem to have subprojects.')
543        return 0
544    r = Resolver(src_dir, 'subprojects')
545    if options.subprojects:
546        wraps = [wrap for name, wrap in r.wraps.items() if name in options.subprojects]
547    else:
548        wraps = r.wraps.values()
549    types = [t.strip() for t in options.types.split(',')] if options.types else []
550    for t in types:
551        if t not in ALL_TYPES:
552            raise MesonException(f'Unknown subproject type {t!r}, supported types are: {ALL_TYPES_STRING}')
553    tasks = []
554    task_names = []
555    loop = asyncio.get_event_loop()
556    executor = ThreadPoolExecutor(options.num_processes)
557    if types:
558        wraps = [wrap for wrap in wraps if wrap.type in types]
559    logger = Logger(len(wraps))
560    for wrap in wraps:
561        dirname = Path(subprojects_dir, wrap.directory).as_posix()
562        runner = Runner(logger, r, wrap, dirname, options)
563        task = loop.run_in_executor(executor, runner.run)
564        tasks.append(task)
565        task_names.append(wrap.name)
566    results = loop.run_until_complete(asyncio.gather(*tasks))
567    logger.flush()
568    post_func = getattr(options, 'post_func', None)
569    if post_func:
570        post_func(options)
571    failures = [name for name, success in zip(task_names, results) if not success]
572    if failures:
573        m = 'Please check logs above as command failed in some subprojects which could have been left in conflict state: '
574        m += ', '.join(failures)
575        mlog.warning(m)
576    return len(failures)
577