1 //! The Cargo "compile" operation.
2 //!
3 //! This module contains the entry point for starting the compilation process
4 //! for commands like `build`, `test`, `doc`, `rustc`, etc.
5 //!
6 //! The `compile` function will do all the work to compile a workspace. A
7 //! rough outline is:
8 //!
9 //! - Resolve the dependency graph (see `ops::resolve`).
10 //! - Download any packages needed (see `PackageSet`).
11 //! - Generate a list of top-level "units" of work for the targets the user
12 //! requested on the command-line. Each `Unit` corresponds to a compiler
13 //! invocation. This is done in this module (`generate_targets`).
14 //! - Build the graph of `Unit` dependencies (see
15 //! `core::compiler::context::unit_dependencies`).
16 //! - Create a `Context` which will perform the following steps:
17 //! - Prepare the `target` directory (see `Layout`).
18 //! - Create a job queue (see `JobQueue`). The queue checks the
19 //! fingerprint of each `Unit` to determine if it should run or be
20 //! skipped.
21 //! - Execute the queue. Each leaf in the queue's dependency graph is
22 //! executed, and then removed from the graph when finished. This
23 //! repeats until the queue is empty.
24
25 use std::collections::{BTreeSet, HashMap, HashSet};
26 use std::hash::{Hash, Hasher};
27 use std::sync::Arc;
28
29 use crate::core::compiler::unit_dependencies::build_unit_dependencies;
30 use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
31 use crate::core::compiler::{standard_lib, TargetInfo};
32 use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context};
33 use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit};
34 use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
35 use crate::core::profiles::{Profiles, UnitFor};
36 use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
37 use crate::core::resolver::{HasDevUnits, Resolve};
38 use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target};
39 use crate::core::{PackageId, PackageIdSpec, SourceId, TargetKind, Workspace};
40 use crate::drop_println;
41 use crate::ops;
42 use crate::ops::resolve::WorkspaceResolve;
43 use crate::util::config::Config;
44 use crate::util::interning::InternedString;
45 use crate::util::restricted_names::is_glob_pattern;
46 use crate::util::{closest_msg, profile, CargoResult, StableHasher};
47
48 use anyhow::Context as _;
49
50 /// Contains information about how a package should be compiled.
51 ///
52 /// Note on distinction between `CompileOptions` and `BuildConfig`:
53 /// `BuildConfig` contains values that need to be retained after
54 /// `BuildContext` is created. The other fields are no longer necessary. Think
55 /// of it as `CompileOptions` are high-level settings requested on the
56 /// command-line, and `BuildConfig` are low-level settings for actually
57 /// driving `rustc`.
58 #[derive(Debug)]
59 pub struct CompileOptions {
60 /// Configuration information for a rustc build
61 pub build_config: BuildConfig,
62 /// Feature flags requested by the user.
63 pub cli_features: CliFeatures,
64 /// A set of packages to build.
65 pub spec: Packages,
66 /// Filter to apply to the root package to select which targets will be
67 /// built.
68 pub filter: CompileFilter,
69 /// Extra arguments to be passed to rustdoc (single target only)
70 pub target_rustdoc_args: Option<Vec<String>>,
71 /// The specified target will be compiled with all the available arguments,
72 /// note that this only accounts for the *final* invocation of rustc
73 pub target_rustc_args: Option<Vec<String>>,
74 /// Extra arguments passed to all selected targets for rustdoc.
75 pub local_rustdoc_args: Option<Vec<String>>,
76 /// Whether the `--document-private-items` flags was specified and should
77 /// be forwarded to `rustdoc`.
78 pub rustdoc_document_private_items: bool,
79 /// Whether the build process should check the minimum Rust version
80 /// defined in the cargo metadata for a crate.
81 pub honor_rust_version: bool,
82 }
83
84 impl<'a> CompileOptions {
new(config: &Config, mode: CompileMode) -> CargoResult<CompileOptions>85 pub fn new(config: &Config, mode: CompileMode) -> CargoResult<CompileOptions> {
86 Ok(CompileOptions {
87 build_config: BuildConfig::new(config, None, &[], mode)?,
88 cli_features: CliFeatures::new_all(false),
89 spec: ops::Packages::Packages(Vec::new()),
90 filter: CompileFilter::Default {
91 required_features_filterable: false,
92 },
93 target_rustdoc_args: None,
94 target_rustc_args: None,
95 local_rustdoc_args: None,
96 rustdoc_document_private_items: false,
97 honor_rust_version: true,
98 })
99 }
100 }
101
102 #[derive(Clone, PartialEq, Eq, Debug)]
103 pub enum Packages {
104 Default,
105 All,
106 OptOut(Vec<String>),
107 Packages(Vec<String>),
108 }
109
110 impl Packages {
from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self>111 pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self> {
112 Ok(match (all, exclude.len(), package.len()) {
113 (false, 0, 0) => Packages::Default,
114 (false, 0, _) => Packages::Packages(package),
115 (false, _, _) => anyhow::bail!("--exclude can only be used together with --workspace"),
116 (true, 0, _) => Packages::All,
117 (true, _, _) => Packages::OptOut(exclude),
118 })
119 }
120
121 /// Converts selected packages from a workspace to `PackageIdSpec`s.
to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult<Vec<PackageIdSpec>>122 pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult<Vec<PackageIdSpec>> {
123 let specs = match self {
124 Packages::All => ws
125 .members()
126 .map(Package::package_id)
127 .map(PackageIdSpec::from_package_id)
128 .collect(),
129 Packages::OptOut(opt_out) => {
130 let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
131 let specs = ws
132 .members()
133 .filter(|pkg| {
134 !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
135 })
136 .map(Package::package_id)
137 .map(PackageIdSpec::from_package_id)
138 .collect();
139 let warn = |e| ws.config().shell().warn(e);
140 emit_package_not_found(ws, names, true).or_else(warn)?;
141 emit_pattern_not_found(ws, patterns, true).or_else(warn)?;
142 specs
143 }
144 Packages::Packages(packages) if packages.is_empty() => {
145 vec![PackageIdSpec::from_package_id(ws.current()?.package_id())]
146 }
147 Packages::Packages(opt_in) => {
148 let (mut patterns, packages) = opt_patterns_and_names(opt_in)?;
149 let mut specs = packages
150 .iter()
151 .map(|p| PackageIdSpec::parse(p))
152 .collect::<CargoResult<Vec<_>>>()?;
153 if !patterns.is_empty() {
154 let matched_pkgs = ws
155 .members()
156 .filter(|pkg| match_patterns(pkg, &mut patterns))
157 .map(Package::package_id)
158 .map(PackageIdSpec::from_package_id);
159 specs.extend(matched_pkgs);
160 }
161 emit_pattern_not_found(ws, patterns, false)?;
162 specs
163 }
164 Packages::Default => ws
165 .default_members()
166 .map(Package::package_id)
167 .map(PackageIdSpec::from_package_id)
168 .collect(),
169 };
170 if specs.is_empty() {
171 if ws.is_virtual() {
172 anyhow::bail!(
173 "manifest path `{}` contains no package: The manifest is virtual, \
174 and the workspace has no members.",
175 ws.root().display()
176 )
177 }
178 anyhow::bail!("no packages to compile")
179 }
180 Ok(specs)
181 }
182
183 /// Gets a list of selected packages from a workspace.
get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult<Vec<&'ws Package>>184 pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult<Vec<&'ws Package>> {
185 let packages: Vec<_> = match self {
186 Packages::Default => ws.default_members().collect(),
187 Packages::All => ws.members().collect(),
188 Packages::OptOut(opt_out) => {
189 let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
190 let packages = ws
191 .members()
192 .filter(|pkg| {
193 !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
194 })
195 .collect();
196 emit_package_not_found(ws, names, true)?;
197 emit_pattern_not_found(ws, patterns, true)?;
198 packages
199 }
200 Packages::Packages(opt_in) => {
201 let (mut patterns, mut names) = opt_patterns_and_names(opt_in)?;
202 let packages = ws
203 .members()
204 .filter(|pkg| {
205 names.remove(pkg.name().as_str()) || match_patterns(pkg, &mut patterns)
206 })
207 .collect();
208 emit_package_not_found(ws, names, false)?;
209 emit_pattern_not_found(ws, patterns, false)?;
210 packages
211 }
212 };
213 Ok(packages)
214 }
215
216 /// Returns whether or not the user needs to pass a `-p` flag to target a
217 /// specific package in the workspace.
needs_spec_flag(&self, ws: &Workspace<'_>) -> bool218 pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool {
219 match self {
220 Packages::Default => ws.default_members().count() > 1,
221 Packages::All => ws.members().count() > 1,
222 Packages::Packages(_) => true,
223 Packages::OptOut(_) => true,
224 }
225 }
226 }
227
228 #[derive(Debug, PartialEq, Eq)]
229 pub enum LibRule {
230 /// Include the library, fail if not present
231 True,
232 /// Include the library if present
233 Default,
234 /// Exclude the library
235 False,
236 }
237
238 #[derive(Debug)]
239 pub enum FilterRule {
240 All,
241 Just(Vec<String>),
242 }
243
244 #[derive(Debug)]
245 pub enum CompileFilter {
246 Default {
247 /// Flag whether targets can be safely skipped when required-features are not satisfied.
248 required_features_filterable: bool,
249 },
250 Only {
251 all_targets: bool,
252 lib: LibRule,
253 bins: FilterRule,
254 examples: FilterRule,
255 tests: FilterRule,
256 benches: FilterRule,
257 },
258 }
259
compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>>260 pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
261 let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
262 compile_with_exec(ws, options, &exec)
263 }
264
265 /// Like `compile` but allows specifying a custom `Executor` that will be able to intercept build
266 /// calls and add custom logic. `compile` uses `DefaultExecutor` which just passes calls through.
compile_with_exec<'a>( ws: &Workspace<'a>, options: &CompileOptions, exec: &Arc<dyn Executor>, ) -> CargoResult<Compilation<'a>>267 pub fn compile_with_exec<'a>(
268 ws: &Workspace<'a>,
269 options: &CompileOptions,
270 exec: &Arc<dyn Executor>,
271 ) -> CargoResult<Compilation<'a>> {
272 ws.emit_warnings()?;
273 compile_ws(ws, options, exec)
274 }
275
compile_ws<'a>( ws: &Workspace<'a>, options: &CompileOptions, exec: &Arc<dyn Executor>, ) -> CargoResult<Compilation<'a>>276 pub fn compile_ws<'a>(
277 ws: &Workspace<'a>,
278 options: &CompileOptions,
279 exec: &Arc<dyn Executor>,
280 ) -> CargoResult<Compilation<'a>> {
281 let interner = UnitInterner::new();
282 let bcx = create_bcx(ws, options, &interner)?;
283 if options.build_config.unit_graph {
284 unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?;
285 return Compilation::new(&bcx);
286 }
287 let _p = profile::start("compiling");
288 let cx = Context::new(&bcx)?;
289 cx.compile(exec)
290 }
291
print<'a>( ws: &Workspace<'a>, options: &CompileOptions, print_opt_value: &str, ) -> CargoResult<()>292 pub fn print<'a>(
293 ws: &Workspace<'a>,
294 options: &CompileOptions,
295 print_opt_value: &str,
296 ) -> CargoResult<()> {
297 let CompileOptions {
298 ref build_config,
299 ref target_rustc_args,
300 ..
301 } = *options;
302 let config = ws.config();
303 let rustc = config.load_global_rustc(Some(ws))?;
304 for (index, kind) in build_config.requested_kinds.iter().enumerate() {
305 if index != 0 {
306 drop_println!(config);
307 }
308 let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?;
309 let mut process = rustc.process();
310 process.args(&target_info.rustflags);
311 if let Some(args) = target_rustc_args {
312 process.args(args);
313 }
314 if let CompileKind::Target(t) = kind {
315 process.arg("--target").arg(t.short_name());
316 }
317 process.arg("--print").arg(print_opt_value);
318 process.exec()?;
319 }
320 Ok(())
321 }
322
create_bcx<'a, 'cfg>( ws: &'a Workspace<'cfg>, options: &'a CompileOptions, interner: &'a UnitInterner, ) -> CargoResult<BuildContext<'a, 'cfg>>323 pub fn create_bcx<'a, 'cfg>(
324 ws: &'a Workspace<'cfg>,
325 options: &'a CompileOptions,
326 interner: &'a UnitInterner,
327 ) -> CargoResult<BuildContext<'a, 'cfg>> {
328 let CompileOptions {
329 ref build_config,
330 ref spec,
331 ref cli_features,
332 ref filter,
333 ref target_rustdoc_args,
334 ref target_rustc_args,
335 ref local_rustdoc_args,
336 rustdoc_document_private_items,
337 honor_rust_version,
338 } = *options;
339 let config = ws.config();
340
341 // Perform some pre-flight validation.
342 match build_config.mode {
343 CompileMode::Test
344 | CompileMode::Build
345 | CompileMode::Check { .. }
346 | CompileMode::Bench
347 | CompileMode::RunCustomBuild => {
348 if std::env::var("RUST_FLAGS").is_ok() {
349 config.shell().warn(
350 "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?",
351 )?;
352 }
353 }
354 CompileMode::Doc { .. } | CompileMode::Doctest => {
355 if std::env::var("RUSTDOC_FLAGS").is_ok() {
356 config.shell().warn(
357 "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?"
358 )?;
359 }
360 }
361 }
362 config.validate_term_config()?;
363
364 let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
365
366 let specs = spec.to_package_id_specs(ws)?;
367 let has_dev_units = if filter.need_dev_deps(build_config.mode) {
368 HasDevUnits::Yes
369 } else {
370 HasDevUnits::No
371 };
372 let resolve = ops::resolve_ws_with_opts(
373 ws,
374 &target_data,
375 &build_config.requested_kinds,
376 cli_features,
377 &specs,
378 has_dev_units,
379 crate::core::resolver::features::ForceAllTargets::No,
380 )?;
381 let WorkspaceResolve {
382 mut pkg_set,
383 workspace_resolve,
384 targeted_resolve: resolve,
385 resolved_features,
386 } = resolve;
387
388 let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std {
389 if build_config.build_plan {
390 config
391 .shell()
392 .warn("-Zbuild-std does not currently fully support --build-plan")?;
393 }
394 if build_config.requested_kinds[0].is_host() {
395 // TODO: This should eventually be fixed. Unfortunately it is not
396 // easy to get the host triple in BuildConfig. Consider changing
397 // requested_target to an enum, or some other approach.
398 anyhow::bail!("-Zbuild-std requires --target");
399 }
400 let (std_package_set, std_resolve, std_features) =
401 standard_lib::resolve_std(ws, &target_data, &build_config.requested_kinds, crates)?;
402 pkg_set.add_set(std_package_set);
403 Some((std_resolve, std_features))
404 } else {
405 None
406 };
407
408 // Find the packages in the resolver that the user wants to build (those
409 // passed in with `-p` or the defaults from the workspace), and convert
410 // Vec<PackageIdSpec> to a Vec<PackageId>.
411 let to_build_ids = resolve.specs_to_ids(&specs)?;
412 // Now get the `Package` for each `PackageId`. This may trigger a download
413 // if the user specified `-p` for a dependency that is not downloaded.
414 // Dependencies will be downloaded during build_unit_dependencies.
415 let mut to_builds = pkg_set.get_many(to_build_ids)?;
416
417 // The ordering here affects some error messages coming out of cargo, so
418 // let's be test and CLI friendly by always printing in the same order if
419 // there's an error.
420 to_builds.sort_by_key(|p| p.package_id());
421
422 for pkg in to_builds.iter() {
423 pkg.manifest().print_teapot(config);
424
425 if build_config.mode.is_any_test()
426 && !ws.is_member(pkg)
427 && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
428 {
429 anyhow::bail!(
430 "package `{}` cannot be tested because it requires dev-dependencies \
431 and is not a member of the workspace",
432 pkg.name()
433 );
434 }
435 }
436
437 let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
438 (&Some(ref args), _) => (Some(args.clone()), "rustc"),
439 (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"),
440 _ => (None, ""),
441 };
442
443 if extra_args.is_some() && to_builds.len() != 1 {
444 panic!(
445 "`{}` should not accept multiple `-p` flags",
446 extra_args_name
447 );
448 }
449
450 let profiles = Profiles::new(ws, build_config.requested_profile)?;
451 profiles.validate_packages(
452 ws.profiles(),
453 &mut config.shell(),
454 workspace_resolve.as_ref().unwrap_or(&resolve),
455 )?;
456
457 // If `--target` has not been specified, then the unit graph is built
458 // assuming `--target $HOST` was specified. See
459 // `rebuild_unit_graph_shared` for more on why this is done.
460 let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?);
461 let explicit_host_kinds: Vec<_> = build_config
462 .requested_kinds
463 .iter()
464 .map(|kind| match kind {
465 CompileKind::Host => explicit_host_kind,
466 CompileKind::Target(t) => CompileKind::Target(*t),
467 })
468 .collect();
469
470 // Passing `build_config.requested_kinds` instead of
471 // `explicit_host_kinds` here so that `generate_targets` can do
472 // its own special handling of `CompileKind::Host`. It will
473 // internally replace the host kind by the `explicit_host_kind`
474 // before setting as a unit.
475 let mut units = generate_targets(
476 ws,
477 &to_builds,
478 filter,
479 &build_config.requested_kinds,
480 explicit_host_kind,
481 build_config.mode,
482 &resolve,
483 &workspace_resolve,
484 &resolved_features,
485 &pkg_set,
486 &profiles,
487 interner,
488 )?;
489
490 let std_roots = if let Some(crates) = &config.cli_unstable().build_std {
491 // Only build libtest if it looks like it is needed.
492 let mut crates = crates.clone();
493 if !crates.iter().any(|c| c == "test")
494 && units
495 .iter()
496 .any(|unit| unit.mode.is_rustc_test() && unit.target.harness())
497 {
498 // Only build libtest when libstd is built (libtest depends on libstd)
499 if crates.iter().any(|c| c == "std") {
500 crates.push("test".to_string());
501 }
502 }
503 let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
504 standard_lib::generate_std_roots(
505 &crates,
506 std_resolve,
507 std_features,
508 &explicit_host_kinds,
509 &pkg_set,
510 interner,
511 &profiles,
512 )?
513 } else {
514 Default::default()
515 };
516
517 let mut unit_graph = build_unit_dependencies(
518 ws,
519 &pkg_set,
520 &resolve,
521 &resolved_features,
522 std_resolve_features.as_ref(),
523 &units,
524 &std_roots,
525 build_config.mode,
526 &target_data,
527 &profiles,
528 interner,
529 )?;
530
531 // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
532 // what heuristics to use in that case.
533 if build_config.mode == (CompileMode::Doc { deps: true }) {
534 remove_duplicate_doc(build_config, &units, &mut unit_graph);
535 }
536
537 if build_config
538 .requested_kinds
539 .iter()
540 .any(CompileKind::is_host)
541 {
542 // Rebuild the unit graph, replacing the explicit host targets with
543 // CompileKind::Host, merging any dependencies shared with build
544 // dependencies.
545 let new_graph = rebuild_unit_graph_shared(interner, unit_graph, &units, explicit_host_kind);
546 // This would be nicer with destructuring assignment.
547 units = new_graph.0;
548 unit_graph = new_graph.1;
549 }
550
551 let mut extra_compiler_args = HashMap::new();
552 if let Some(args) = extra_args {
553 if units.len() != 1 {
554 anyhow::bail!(
555 "extra arguments to `{}` can only be passed to one \
556 target, consider filtering\nthe package by passing, \
557 e.g., `--lib` or `--bin NAME` to specify a single target",
558 extra_args_name
559 );
560 }
561 extra_compiler_args.insert(units[0].clone(), args);
562 }
563 for unit in &units {
564 if unit.mode.is_doc() || unit.mode.is_doc_test() {
565 let mut extra_args = local_rustdoc_args.clone();
566
567 // Add `--document-private-items` rustdoc flag if requested or if
568 // the target is a binary. Binary crates get their private items
569 // documented by default.
570 if rustdoc_document_private_items || unit.target.is_bin() {
571 let mut args = extra_args.take().unwrap_or_default();
572 args.push("--document-private-items".into());
573 extra_args = Some(args);
574 }
575
576 if let Some(args) = extra_args {
577 extra_compiler_args
578 .entry(unit.clone())
579 .or_default()
580 .extend(args);
581 }
582 }
583 }
584
585 if honor_rust_version {
586 // Remove any pre-release identifiers for easier comparison
587 let current_version = &target_data.rustc.version;
588 let untagged_version = semver::Version::new(
589 current_version.major,
590 current_version.minor,
591 current_version.patch,
592 );
593
594 for unit in unit_graph.keys() {
595 let version = match unit.pkg.rust_version() {
596 Some(v) => v,
597 None => continue,
598 };
599
600 let req = semver::VersionReq::parse(version).unwrap();
601 if req.matches(&untagged_version) {
602 continue;
603 }
604
605 anyhow::bail!(
606 "package `{}` cannot be built because it requires rustc {} or newer, \
607 while the currently active rustc version is {}",
608 unit.pkg,
609 version,
610 current_version,
611 );
612 }
613 }
614
615 let bcx = BuildContext::new(
616 ws,
617 pkg_set,
618 build_config,
619 profiles,
620 extra_compiler_args,
621 target_data,
622 units,
623 unit_graph,
624 )?;
625
626 Ok(bcx)
627 }
628
629 impl FilterRule {
new(targets: Vec<String>, all: bool) -> FilterRule630 pub fn new(targets: Vec<String>, all: bool) -> FilterRule {
631 if all {
632 FilterRule::All
633 } else {
634 FilterRule::Just(targets)
635 }
636 }
637
none() -> FilterRule638 pub fn none() -> FilterRule {
639 FilterRule::Just(Vec::new())
640 }
641
matches(&self, target: &Target) -> bool642 fn matches(&self, target: &Target) -> bool {
643 match *self {
644 FilterRule::All => true,
645 FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()),
646 }
647 }
648
is_specific(&self) -> bool649 fn is_specific(&self) -> bool {
650 match *self {
651 FilterRule::All => true,
652 FilterRule::Just(ref targets) => !targets.is_empty(),
653 }
654 }
655
try_collect(&self) -> Option<Vec<String>>656 pub fn try_collect(&self) -> Option<Vec<String>> {
657 match *self {
658 FilterRule::All => None,
659 FilterRule::Just(ref targets) => Some(targets.clone()),
660 }
661 }
662
contains_glob_patterns(&self) -> bool663 pub(crate) fn contains_glob_patterns(&self) -> bool {
664 match self {
665 FilterRule::All => false,
666 FilterRule::Just(targets) => targets.iter().any(is_glob_pattern),
667 }
668 }
669 }
670
671 impl CompileFilter {
672 /// Construct a CompileFilter from raw command line arguments.
from_raw_arguments( lib_only: bool, bins: Vec<String>, all_bins: bool, tsts: Vec<String>, all_tsts: bool, exms: Vec<String>, all_exms: bool, bens: Vec<String>, all_bens: bool, all_targets: bool, ) -> CompileFilter673 pub fn from_raw_arguments(
674 lib_only: bool,
675 bins: Vec<String>,
676 all_bins: bool,
677 tsts: Vec<String>,
678 all_tsts: bool,
679 exms: Vec<String>,
680 all_exms: bool,
681 bens: Vec<String>,
682 all_bens: bool,
683 all_targets: bool,
684 ) -> CompileFilter {
685 if all_targets {
686 return CompileFilter::new_all_targets();
687 }
688 let rule_lib = if lib_only {
689 LibRule::True
690 } else {
691 LibRule::False
692 };
693 let rule_bins = FilterRule::new(bins, all_bins);
694 let rule_tsts = FilterRule::new(tsts, all_tsts);
695 let rule_exms = FilterRule::new(exms, all_exms);
696 let rule_bens = FilterRule::new(bens, all_bens);
697
698 CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens)
699 }
700
701 /// Construct a CompileFilter from underlying primitives.
new( rule_lib: LibRule, rule_bins: FilterRule, rule_tsts: FilterRule, rule_exms: FilterRule, rule_bens: FilterRule, ) -> CompileFilter702 pub fn new(
703 rule_lib: LibRule,
704 rule_bins: FilterRule,
705 rule_tsts: FilterRule,
706 rule_exms: FilterRule,
707 rule_bens: FilterRule,
708 ) -> CompileFilter {
709 if rule_lib == LibRule::True
710 || rule_bins.is_specific()
711 || rule_tsts.is_specific()
712 || rule_exms.is_specific()
713 || rule_bens.is_specific()
714 {
715 CompileFilter::Only {
716 all_targets: false,
717 lib: rule_lib,
718 bins: rule_bins,
719 examples: rule_exms,
720 benches: rule_bens,
721 tests: rule_tsts,
722 }
723 } else {
724 CompileFilter::Default {
725 required_features_filterable: true,
726 }
727 }
728 }
729
new_all_targets() -> CompileFilter730 pub fn new_all_targets() -> CompileFilter {
731 CompileFilter::Only {
732 all_targets: true,
733 lib: LibRule::Default,
734 bins: FilterRule::All,
735 examples: FilterRule::All,
736 benches: FilterRule::All,
737 tests: FilterRule::All,
738 }
739 }
740
need_dev_deps(&self, mode: CompileMode) -> bool741 pub fn need_dev_deps(&self, mode: CompileMode) -> bool {
742 match mode {
743 CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true,
744 CompileMode::Check { test: true } => true,
745 CompileMode::Build | CompileMode::Doc { .. } | CompileMode::Check { test: false } => {
746 match *self {
747 CompileFilter::Default { .. } => false,
748 CompileFilter::Only {
749 ref examples,
750 ref tests,
751 ref benches,
752 ..
753 } => examples.is_specific() || tests.is_specific() || benches.is_specific(),
754 }
755 }
756 CompileMode::RunCustomBuild => panic!("Invalid mode"),
757 }
758 }
759
760 // this selects targets for "cargo run". for logic to select targets for
761 // other subcommands, see generate_targets and filter_default_targets
target_run(&self, target: &Target) -> bool762 pub fn target_run(&self, target: &Target) -> bool {
763 match *self {
764 CompileFilter::Default { .. } => true,
765 CompileFilter::Only {
766 ref lib,
767 ref bins,
768 ref examples,
769 ref tests,
770 ref benches,
771 ..
772 } => {
773 let rule = match *target.kind() {
774 TargetKind::Bin => bins,
775 TargetKind::Test => tests,
776 TargetKind::Bench => benches,
777 TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples,
778 TargetKind::Lib(..) => {
779 return match *lib {
780 LibRule::True => true,
781 LibRule::Default => true,
782 LibRule::False => false,
783 };
784 }
785 TargetKind::CustomBuild => return false,
786 };
787 rule.matches(target)
788 }
789 }
790 }
791
is_specific(&self) -> bool792 pub fn is_specific(&self) -> bool {
793 match *self {
794 CompileFilter::Default { .. } => false,
795 CompileFilter::Only { .. } => true,
796 }
797 }
798
is_all_targets(&self) -> bool799 pub fn is_all_targets(&self) -> bool {
800 matches!(
801 *self,
802 CompileFilter::Only {
803 all_targets: true,
804 ..
805 }
806 )
807 }
808
contains_glob_patterns(&self) -> bool809 pub(crate) fn contains_glob_patterns(&self) -> bool {
810 match self {
811 CompileFilter::Default { .. } => false,
812 CompileFilter::Only {
813 bins,
814 examples,
815 tests,
816 benches,
817 ..
818 } => {
819 bins.contains_glob_patterns()
820 || examples.contains_glob_patterns()
821 || tests.contains_glob_patterns()
822 || benches.contains_glob_patterns()
823 }
824 }
825 }
826 }
827
828 /// A proposed target.
829 ///
830 /// Proposed targets are later filtered into actual `Unit`s based on whether or
831 /// not the target requires its features to be present.
832 #[derive(Debug)]
833 struct Proposal<'a> {
834 pkg: &'a Package,
835 target: &'a Target,
836 /// Indicates whether or not all required features *must* be present. If
837 /// false, and the features are not available, then it will be silently
838 /// skipped. Generally, targets specified by name (`--bin foo`) are
839 /// required, all others can be silently skipped if features are missing.
840 requires_features: bool,
841 mode: CompileMode,
842 }
843
844 /// Generates all the base targets for the packages the user has requested to
845 /// compile. Dependencies for these targets are computed later in `unit_dependencies`.
generate_targets( ws: &Workspace<'_>, packages: &[&Package], filter: &CompileFilter, requested_kinds: &[CompileKind], explicit_host_kind: CompileKind, mode: CompileMode, resolve: &Resolve, workspace_resolve: &Option<Resolve>, resolved_features: &features::ResolvedFeatures, package_set: &PackageSet<'_>, profiles: &Profiles, interner: &UnitInterner, ) -> CargoResult<Vec<Unit>>846 fn generate_targets(
847 ws: &Workspace<'_>,
848 packages: &[&Package],
849 filter: &CompileFilter,
850 requested_kinds: &[CompileKind],
851 explicit_host_kind: CompileKind,
852 mode: CompileMode,
853 resolve: &Resolve,
854 workspace_resolve: &Option<Resolve>,
855 resolved_features: &features::ResolvedFeatures,
856 package_set: &PackageSet<'_>,
857 profiles: &Profiles,
858 interner: &UnitInterner,
859 ) -> CargoResult<Vec<Unit>> {
860 let config = ws.config();
861 // Helper for creating a list of `Unit` structures
862 let new_unit =
863 |units: &mut HashSet<Unit>, pkg: &Package, target: &Target, target_mode: CompileMode| {
864 let unit_for = if target_mode.is_any_test() {
865 // NOTE: the `UnitFor` here is subtle. If you have a profile
866 // with `panic` set, the `panic` flag is cleared for
867 // tests/benchmarks and their dependencies. If this
868 // was `normal`, then the lib would get compiled three
869 // times (once with panic, once without, and once with
870 // `--test`).
871 //
872 // This would cause a problem for doc tests, which would fail
873 // because `rustdoc` would attempt to link with both libraries
874 // at the same time. Also, it's probably not important (or
875 // even desirable?) for rustdoc to link with a lib with
876 // `panic` set.
877 //
878 // As a consequence, Examples and Binaries get compiled
879 // without `panic` set. This probably isn't a bad deal.
880 //
881 // Forcing the lib to be compiled three times during `cargo
882 // test` is probably also not desirable.
883 UnitFor::new_test(config)
884 } else if target.for_host() {
885 // Proc macro / plugin should not have `panic` set.
886 UnitFor::new_compiler()
887 } else {
888 UnitFor::new_normal()
889 };
890 // Custom build units are added in `build_unit_dependencies`.
891 assert!(!target.is_custom_build());
892 let target_mode = match target_mode {
893 CompileMode::Test => {
894 if target.is_example() && !filter.is_specific() && !target.tested() {
895 // Examples are included as regular binaries to verify
896 // that they compile.
897 CompileMode::Build
898 } else {
899 CompileMode::Test
900 }
901 }
902 CompileMode::Build => match *target.kind() {
903 TargetKind::Test => CompileMode::Test,
904 TargetKind::Bench => CompileMode::Bench,
905 _ => CompileMode::Build,
906 },
907 // `CompileMode::Bench` is only used to inform `filter_default_targets`
908 // which command is being used (`cargo bench`). Afterwards, tests
909 // and benches are treated identically. Switching the mode allows
910 // de-duplication of units that are essentially identical. For
911 // example, `cargo build --all-targets --release` creates the units
912 // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench)
913 // and since these are the same, we want them to be de-duplicated in
914 // `unit_dependencies`.
915 CompileMode::Bench => CompileMode::Test,
916 _ => target_mode,
917 };
918
919 let is_local = pkg.package_id().source_id().is_path();
920
921 // No need to worry about build-dependencies, roots are never build dependencies.
922 let features_for = FeaturesFor::from_for_host(target.proc_macro());
923 let features = resolved_features.activated_features(pkg.package_id(), features_for);
924
925 // If `--target` has not been specified, then the unit
926 // graph is built almost like if `--target $HOST` was
927 // specified. See `rebuild_unit_graph_shared` for more on
928 // why this is done. However, if the package has its own
929 // `package.target` key, then this gets used instead of
930 // `$HOST`
931 let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() {
932 vec![k]
933 } else {
934 requested_kinds
935 .iter()
936 .map(|kind| match kind {
937 CompileKind::Host => {
938 pkg.manifest().default_kind().unwrap_or(explicit_host_kind)
939 }
940 CompileKind::Target(t) => CompileKind::Target(*t),
941 })
942 .collect()
943 };
944
945 for kind in explicit_kinds.iter() {
946 let profile = profiles.get_profile(
947 pkg.package_id(),
948 ws.is_member(pkg),
949 is_local,
950 unit_for,
951 target_mode,
952 *kind,
953 );
954 let unit = interner.intern(
955 pkg,
956 target,
957 profile,
958 kind.for_target(target),
959 target_mode,
960 features.clone(),
961 /*is_std*/ false,
962 /*dep_hash*/ 0,
963 );
964 units.insert(unit);
965 }
966 };
967
968 // Create a list of proposed targets.
969 let mut proposals: Vec<Proposal<'_>> = Vec::new();
970
971 match *filter {
972 CompileFilter::Default {
973 required_features_filterable,
974 } => {
975 for pkg in packages {
976 let default = filter_default_targets(pkg.targets(), mode);
977 proposals.extend(default.into_iter().map(|target| Proposal {
978 pkg,
979 target,
980 requires_features: !required_features_filterable,
981 mode,
982 }));
983 if mode == CompileMode::Test {
984 if let Some(t) = pkg
985 .targets()
986 .iter()
987 .find(|t| t.is_lib() && t.doctested() && t.doctestable())
988 {
989 proposals.push(Proposal {
990 pkg,
991 target: t,
992 requires_features: false,
993 mode: CompileMode::Doctest,
994 });
995 }
996 }
997 }
998 }
999 CompileFilter::Only {
1000 all_targets,
1001 ref lib,
1002 ref bins,
1003 ref examples,
1004 ref tests,
1005 ref benches,
1006 } => {
1007 if *lib != LibRule::False {
1008 let mut libs = Vec::new();
1009 for proposal in filter_targets(packages, Target::is_lib, false, mode) {
1010 let Proposal { target, pkg, .. } = proposal;
1011 if mode.is_doc_test() && !target.doctestable() {
1012 let types = target.rustc_crate_types();
1013 let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect();
1014 ws.config().shell().warn(format!(
1015 "doc tests are not supported for crate type(s) `{}` in package `{}`",
1016 types_str.join(", "),
1017 pkg.name()
1018 ))?;
1019 } else {
1020 libs.push(proposal)
1021 }
1022 }
1023 if !all_targets && libs.is_empty() && *lib == LibRule::True {
1024 let names = packages.iter().map(|pkg| pkg.name()).collect::<Vec<_>>();
1025 if names.len() == 1 {
1026 anyhow::bail!("no library targets found in package `{}`", names[0]);
1027 } else {
1028 anyhow::bail!("no library targets found in packages: {}", names.join(", "));
1029 }
1030 }
1031 proposals.extend(libs);
1032 }
1033
1034 // If `--tests` was specified, add all targets that would be
1035 // generated by `cargo test`.
1036 let test_filter = match tests {
1037 FilterRule::All => Target::tested,
1038 FilterRule::Just(_) => Target::is_test,
1039 };
1040 let test_mode = match mode {
1041 CompileMode::Build => CompileMode::Test,
1042 CompileMode::Check { .. } => CompileMode::Check { test: true },
1043 _ => mode,
1044 };
1045 // If `--benches` was specified, add all targets that would be
1046 // generated by `cargo bench`.
1047 let bench_filter = match benches {
1048 FilterRule::All => Target::benched,
1049 FilterRule::Just(_) => Target::is_bench,
1050 };
1051 let bench_mode = match mode {
1052 CompileMode::Build => CompileMode::Bench,
1053 CompileMode::Check { .. } => CompileMode::Check { test: true },
1054 _ => mode,
1055 };
1056
1057 proposals.extend(list_rule_targets(
1058 packages,
1059 bins,
1060 "bin",
1061 Target::is_bin,
1062 mode,
1063 )?);
1064 proposals.extend(list_rule_targets(
1065 packages,
1066 examples,
1067 "example",
1068 Target::is_example,
1069 mode,
1070 )?);
1071 proposals.extend(list_rule_targets(
1072 packages,
1073 tests,
1074 "test",
1075 test_filter,
1076 test_mode,
1077 )?);
1078 proposals.extend(list_rule_targets(
1079 packages,
1080 benches,
1081 "bench",
1082 bench_filter,
1083 bench_mode,
1084 )?);
1085 }
1086 }
1087
1088 // Only include targets that are libraries or have all required
1089 // features available.
1090 //
1091 // `features_map` is a map of &Package -> enabled_features
1092 // It is computed by the set of enabled features for the package plus
1093 // every enabled feature of every enabled dependency.
1094 let mut features_map = HashMap::new();
1095 // This needs to be a set to de-duplicate units. Due to the way the
1096 // targets are filtered, it is possible to have duplicate proposals for
1097 // the same thing.
1098 let mut units = HashSet::new();
1099 for Proposal {
1100 pkg,
1101 target,
1102 requires_features,
1103 mode,
1104 } in proposals
1105 {
1106 let unavailable_features = match target.required_features() {
1107 Some(rf) => {
1108 validate_required_features(
1109 workspace_resolve,
1110 target.name(),
1111 rf,
1112 pkg.summary(),
1113 &mut config.shell(),
1114 )?;
1115
1116 let features = features_map.entry(pkg).or_insert_with(|| {
1117 resolve_all_features(resolve, resolved_features, package_set, pkg.package_id())
1118 });
1119 rf.iter().filter(|f| !features.contains(*f)).collect()
1120 }
1121 None => Vec::new(),
1122 };
1123 if target.is_lib() || unavailable_features.is_empty() {
1124 new_unit(&mut units, pkg, target, mode);
1125 } else if requires_features {
1126 let required_features = target.required_features().unwrap();
1127 let quoted_required_features: Vec<String> = required_features
1128 .iter()
1129 .map(|s| format!("`{}`", s))
1130 .collect();
1131 anyhow::bail!(
1132 "target `{}` in package `{}` requires the features: {}\n\
1133 Consider enabling them by passing, e.g., `--features=\"{}\"`",
1134 target.name(),
1135 pkg.name(),
1136 quoted_required_features.join(", "),
1137 required_features.join(" ")
1138 );
1139 }
1140 // else, silently skip target.
1141 }
1142 let mut units: Vec<_> = units.into_iter().collect();
1143 unmatched_target_filters(&units, filter, &mut ws.config().shell())?;
1144
1145 // Keep the roots in a consistent order, which helps with checking test output.
1146 units.sort_unstable();
1147 Ok(units)
1148 }
1149
1150 /// Checks if the unit list is empty and the user has passed any combination of
1151 /// --tests, --examples, --benches or --bins, and we didn't match on any targets.
1152 /// We want to emit a warning to make sure the user knows that this run is a no-op,
1153 /// and their code remains unchecked despite cargo not returning any errors
unmatched_target_filters( units: &[Unit], filter: &CompileFilter, shell: &mut Shell, ) -> CargoResult<()>1154 fn unmatched_target_filters(
1155 units: &[Unit],
1156 filter: &CompileFilter,
1157 shell: &mut Shell,
1158 ) -> CargoResult<()> {
1159 if let CompileFilter::Only {
1160 all_targets,
1161 lib: _,
1162 ref bins,
1163 ref examples,
1164 ref tests,
1165 ref benches,
1166 } = *filter
1167 {
1168 if units.is_empty() {
1169 let mut filters = String::new();
1170 let mut miss_count = 0;
1171
1172 let mut append = |t: &FilterRule, s| {
1173 if let FilterRule::All = *t {
1174 miss_count += 1;
1175 filters.push_str(s);
1176 }
1177 };
1178
1179 if all_targets {
1180 filters.push_str(" `all-targets`");
1181 } else {
1182 append(bins, " `bins`,");
1183 append(tests, " `tests`,");
1184 append(examples, " `examples`,");
1185 append(benches, " `benches`,");
1186 filters.pop();
1187 }
1188
1189 return shell.warn(format!(
1190 "Target {}{} specified, but no targets matched. This is a no-op",
1191 if miss_count > 1 { "filters" } else { "filter" },
1192 filters,
1193 ));
1194 }
1195 }
1196
1197 Ok(())
1198 }
1199
1200 /// Warns if a target's required-features references a feature that doesn't exist.
1201 ///
1202 /// This is a warning because historically this was not validated, and it
1203 /// would cause too much breakage to make it an error.
validate_required_features( resolve: &Option<Resolve>, target_name: &str, required_features: &[String], summary: &Summary, shell: &mut Shell, ) -> CargoResult<()>1204 fn validate_required_features(
1205 resolve: &Option<Resolve>,
1206 target_name: &str,
1207 required_features: &[String],
1208 summary: &Summary,
1209 shell: &mut Shell,
1210 ) -> CargoResult<()> {
1211 let resolve = match resolve {
1212 None => return Ok(()),
1213 Some(resolve) => resolve,
1214 };
1215
1216 for feature in required_features {
1217 let fv = FeatureValue::new(feature.into());
1218 match &fv {
1219 FeatureValue::Feature(f) => {
1220 if !summary.features().contains_key(f) {
1221 shell.warn(format!(
1222 "invalid feature `{}` in required-features of target `{}`: \
1223 `{}` is not present in [features] section",
1224 fv, target_name, fv
1225 ))?;
1226 }
1227 }
1228 FeatureValue::Dep { .. } => {
1229 anyhow::bail!(
1230 "invalid feature `{}` in required-features of target `{}`: \
1231 `dep:` prefixed feature values are not allowed in required-features",
1232 fv,
1233 target_name
1234 );
1235 }
1236 FeatureValue::DepFeature { weak: true, .. } => {
1237 anyhow::bail!(
1238 "invalid feature `{}` in required-features of target `{}`: \
1239 optional dependency with `?` is not allowed in required-features",
1240 fv,
1241 target_name
1242 );
1243 }
1244 // Handling of dependent_crate/dependent_crate_feature syntax
1245 FeatureValue::DepFeature {
1246 dep_name,
1247 dep_feature,
1248 weak: false,
1249 } => {
1250 match resolve
1251 .deps(summary.package_id())
1252 .find(|(_dep_id, deps)| deps.iter().any(|dep| dep.name_in_toml() == *dep_name))
1253 {
1254 Some((dep_id, _deps)) => {
1255 let dep_summary = resolve.summary(dep_id);
1256 if !dep_summary.features().contains_key(dep_feature)
1257 && !dep_summary
1258 .dependencies()
1259 .iter()
1260 .any(|dep| dep.name_in_toml() == *dep_feature && dep.is_optional())
1261 {
1262 shell.warn(format!(
1263 "invalid feature `{}` in required-features of target `{}`: \
1264 feature `{}` does not exist in package `{}`",
1265 fv, target_name, dep_feature, dep_id
1266 ))?;
1267 }
1268 }
1269 None => {
1270 shell.warn(format!(
1271 "invalid feature `{}` in required-features of target `{}`: \
1272 dependency `{}` does not exist",
1273 fv, target_name, dep_name
1274 ))?;
1275 }
1276 }
1277 }
1278 }
1279 }
1280 Ok(())
1281 }
1282
1283 /// Gets all of the features enabled for a package, plus its dependencies'
1284 /// features.
1285 ///
1286 /// Dependencies are added as `dep_name/feat_name` because `required-features`
1287 /// wants to support that syntax.
resolve_all_features( resolve_with_overrides: &Resolve, resolved_features: &features::ResolvedFeatures, package_set: &PackageSet<'_>, package_id: PackageId, ) -> HashSet<String>1288 pub fn resolve_all_features(
1289 resolve_with_overrides: &Resolve,
1290 resolved_features: &features::ResolvedFeatures,
1291 package_set: &PackageSet<'_>,
1292 package_id: PackageId,
1293 ) -> HashSet<String> {
1294 let mut features: HashSet<String> = resolved_features
1295 .activated_features(package_id, FeaturesFor::NormalOrDev)
1296 .iter()
1297 .map(|s| s.to_string())
1298 .collect();
1299
1300 // Include features enabled for use by dependencies so targets can also use them with the
1301 // required-features field when deciding whether to be built or skipped.
1302 for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
1303 let is_proc_macro = package_set
1304 .get_one(dep_id)
1305 .expect("packages downloaded")
1306 .proc_macro();
1307 for dep in deps {
1308 let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
1309 for feature in resolved_features
1310 .activated_features_unverified(dep_id, features_for)
1311 .unwrap_or_default()
1312 {
1313 features.insert(format!("{}/{}", dep.name_in_toml(), feature));
1314 }
1315 }
1316 }
1317
1318 features
1319 }
1320
1321 /// Given a list of all targets for a package, filters out only the targets
1322 /// that are automatically included when the user doesn't specify any targets.
filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target>1323 fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> {
1324 match mode {
1325 CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(),
1326 CompileMode::Test => targets
1327 .iter()
1328 .filter(|t| t.tested() || t.is_example())
1329 .collect(),
1330 CompileMode::Build | CompileMode::Check { .. } => targets
1331 .iter()
1332 .filter(|t| t.is_bin() || t.is_lib())
1333 .collect(),
1334 CompileMode::Doc { .. } => {
1335 // `doc` does lib and bins (bin with same name as lib is skipped).
1336 targets
1337 .iter()
1338 .filter(|t| {
1339 t.documented()
1340 && (!t.is_bin()
1341 || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
1342 })
1343 .collect()
1344 }
1345 CompileMode::Doctest | CompileMode::RunCustomBuild => panic!("Invalid mode {:?}", mode),
1346 }
1347 }
1348
1349 /// Returns a list of proposed targets based on command-line target selection flags.
list_rule_targets<'a>( packages: &[&'a Package], rule: &FilterRule, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, mode: CompileMode, ) -> CargoResult<Vec<Proposal<'a>>>1350 fn list_rule_targets<'a>(
1351 packages: &[&'a Package],
1352 rule: &FilterRule,
1353 target_desc: &'static str,
1354 is_expected_kind: fn(&Target) -> bool,
1355 mode: CompileMode,
1356 ) -> CargoResult<Vec<Proposal<'a>>> {
1357 let mut proposals = Vec::new();
1358 match rule {
1359 FilterRule::All => {
1360 proposals.extend(filter_targets(packages, is_expected_kind, false, mode))
1361 }
1362 FilterRule::Just(names) => {
1363 for name in names {
1364 proposals.extend(find_named_targets(
1365 packages,
1366 name,
1367 target_desc,
1368 is_expected_kind,
1369 mode,
1370 )?);
1371 }
1372 }
1373 }
1374 Ok(proposals)
1375 }
1376
1377 /// Finds the targets for a specifically named target.
find_named_targets<'a>( packages: &[&'a Package], target_name: &str, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, mode: CompileMode, ) -> CargoResult<Vec<Proposal<'a>>>1378 fn find_named_targets<'a>(
1379 packages: &[&'a Package],
1380 target_name: &str,
1381 target_desc: &'static str,
1382 is_expected_kind: fn(&Target) -> bool,
1383 mode: CompileMode,
1384 ) -> CargoResult<Vec<Proposal<'a>>> {
1385 let is_glob = is_glob_pattern(target_name);
1386 let proposals = if is_glob {
1387 let pattern = build_glob(target_name)?;
1388 let filter = |t: &Target| is_expected_kind(t) && pattern.matches(t.name());
1389 filter_targets(packages, filter, true, mode)
1390 } else {
1391 let filter = |t: &Target| t.name() == target_name && is_expected_kind(t);
1392 filter_targets(packages, filter, true, mode)
1393 };
1394
1395 if proposals.is_empty() {
1396 let targets = packages.iter().flat_map(|pkg| {
1397 pkg.targets()
1398 .iter()
1399 .filter(|target| is_expected_kind(target))
1400 });
1401 let suggestion = closest_msg(target_name, targets, |t| t.name());
1402 anyhow::bail!(
1403 "no {} target {} `{}`{}",
1404 target_desc,
1405 if is_glob { "matches pattern" } else { "named" },
1406 target_name,
1407 suggestion
1408 );
1409 }
1410 Ok(proposals)
1411 }
1412
filter_targets<'a>( packages: &[&'a Package], predicate: impl Fn(&Target) -> bool, requires_features: bool, mode: CompileMode, ) -> Vec<Proposal<'a>>1413 fn filter_targets<'a>(
1414 packages: &[&'a Package],
1415 predicate: impl Fn(&Target) -> bool,
1416 requires_features: bool,
1417 mode: CompileMode,
1418 ) -> Vec<Proposal<'a>> {
1419 let mut proposals = Vec::new();
1420 for pkg in packages {
1421 for target in pkg.targets().iter().filter(|t| predicate(t)) {
1422 proposals.push(Proposal {
1423 pkg,
1424 target,
1425 requires_features,
1426 mode,
1427 });
1428 }
1429 }
1430 proposals
1431 }
1432
1433 /// This is used to rebuild the unit graph, sharing host dependencies if possible.
1434 ///
1435 /// This will translate any unit's `CompileKind::Target(host)` to
1436 /// `CompileKind::Host` if the kind is equal to `to_host`. This also handles
1437 /// generating the unit `dep_hash`, and merging shared units if possible.
1438 ///
1439 /// This is necessary because if normal dependencies used `CompileKind::Host`,
1440 /// there would be no way to distinguish those units from build-dependency
1441 /// units. This can cause a problem if a shared normal/build dependency needs
1442 /// to link to another dependency whose features differ based on whether or
1443 /// not it is a normal or build dependency. If both units used
1444 /// `CompileKind::Host`, then they would end up being identical, causing a
1445 /// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
1446 /// value or the other.
1447 ///
1448 /// The solution is to keep normal and build dependencies separate when
1449 /// building the unit graph, and then run this second pass which will try to
1450 /// combine shared dependencies safely. By adding a hash of the dependencies
1451 /// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
1452 /// without fear of an unwanted collision.
rebuild_unit_graph_shared( interner: &UnitInterner, unit_graph: UnitGraph, roots: &[Unit], to_host: CompileKind, ) -> (Vec<Unit>, UnitGraph)1453 fn rebuild_unit_graph_shared(
1454 interner: &UnitInterner,
1455 unit_graph: UnitGraph,
1456 roots: &[Unit],
1457 to_host: CompileKind,
1458 ) -> (Vec<Unit>, UnitGraph) {
1459 let mut result = UnitGraph::new();
1460 // Map of the old unit to the new unit, used to avoid recursing into units
1461 // that have already been computed to improve performance.
1462 let mut memo = HashMap::new();
1463 let new_roots = roots
1464 .iter()
1465 .map(|root| {
1466 traverse_and_share(interner, &mut memo, &mut result, &unit_graph, root, to_host)
1467 })
1468 .collect();
1469 (new_roots, result)
1470 }
1471
1472 /// Recursive function for rebuilding the graph.
1473 ///
1474 /// This walks `unit_graph`, starting at the given `unit`. It inserts the new
1475 /// units into `new_graph`, and returns a new updated version of the given
1476 /// unit (`dep_hash` is filled in, and `kind` switched if necessary).
traverse_and_share( interner: &UnitInterner, memo: &mut HashMap<Unit, Unit>, new_graph: &mut UnitGraph, unit_graph: &UnitGraph, unit: &Unit, to_host: CompileKind, ) -> Unit1477 fn traverse_and_share(
1478 interner: &UnitInterner,
1479 memo: &mut HashMap<Unit, Unit>,
1480 new_graph: &mut UnitGraph,
1481 unit_graph: &UnitGraph,
1482 unit: &Unit,
1483 to_host: CompileKind,
1484 ) -> Unit {
1485 if let Some(new_unit) = memo.get(unit) {
1486 // Already computed, no need to recompute.
1487 return new_unit.clone();
1488 }
1489 let mut dep_hash = StableHasher::new();
1490 let new_deps: Vec<_> = unit_graph[unit]
1491 .iter()
1492 .map(|dep| {
1493 let new_dep_unit =
1494 traverse_and_share(interner, memo, new_graph, unit_graph, &dep.unit, to_host);
1495 new_dep_unit.hash(&mut dep_hash);
1496 UnitDep {
1497 unit: new_dep_unit,
1498 ..dep.clone()
1499 }
1500 })
1501 .collect();
1502 let new_dep_hash = dep_hash.finish();
1503 let new_kind = if unit.kind == to_host {
1504 CompileKind::Host
1505 } else {
1506 unit.kind
1507 };
1508 let new_unit = interner.intern(
1509 &unit.pkg,
1510 &unit.target,
1511 unit.profile,
1512 new_kind,
1513 unit.mode,
1514 unit.features.clone(),
1515 unit.is_std,
1516 new_dep_hash,
1517 );
1518 assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
1519 new_graph.entry(new_unit.clone()).or_insert(new_deps);
1520 new_unit
1521 }
1522
1523 /// Build `glob::Pattern` with informative context.
build_glob(pat: &str) -> CargoResult<glob::Pattern>1524 fn build_glob(pat: &str) -> CargoResult<glob::Pattern> {
1525 glob::Pattern::new(pat).with_context(|| format!("cannot build glob pattern from `{}`", pat))
1526 }
1527
1528 /// Emits "package not found" error.
1529 ///
1530 /// > This function should be used only in package selection processes such like
1531 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
emit_package_not_found( ws: &Workspace<'_>, opt_names: BTreeSet<&str>, opt_out: bool, ) -> CargoResult<()>1532 fn emit_package_not_found(
1533 ws: &Workspace<'_>,
1534 opt_names: BTreeSet<&str>,
1535 opt_out: bool,
1536 ) -> CargoResult<()> {
1537 if !opt_names.is_empty() {
1538 anyhow::bail!(
1539 "{}package(s) `{}` not found in workspace `{}`",
1540 if opt_out { "excluded " } else { "" },
1541 opt_names.into_iter().collect::<Vec<_>>().join(", "),
1542 ws.root().display(),
1543 )
1544 }
1545 Ok(())
1546 }
1547
1548 /// Emits "glob pattern not found" error.
1549 ///
1550 /// > This function should be used only in package selection processes such like
1551 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
emit_pattern_not_found( ws: &Workspace<'_>, opt_patterns: Vec<(glob::Pattern, bool)>, opt_out: bool, ) -> CargoResult<()>1552 fn emit_pattern_not_found(
1553 ws: &Workspace<'_>,
1554 opt_patterns: Vec<(glob::Pattern, bool)>,
1555 opt_out: bool,
1556 ) -> CargoResult<()> {
1557 let not_matched = opt_patterns
1558 .iter()
1559 .filter(|(_, matched)| !*matched)
1560 .map(|(pat, _)| pat.as_str())
1561 .collect::<Vec<_>>();
1562 if !not_matched.is_empty() {
1563 anyhow::bail!(
1564 "{}package pattern(s) `{}` not found in workspace `{}`",
1565 if opt_out { "excluded " } else { "" },
1566 not_matched.join(", "),
1567 ws.root().display(),
1568 )
1569 }
1570 Ok(())
1571 }
1572
1573 /// Checks whether a package matches any of a list of glob patterns generated
1574 /// from `opt_patterns_and_names`.
1575 ///
1576 /// > This function should be used only in package selection processes such like
1577 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool1578 fn match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool {
1579 patterns.iter_mut().any(|(m, matched)| {
1580 let is_matched = m.matches(pkg.name().as_str());
1581 *matched |= is_matched;
1582 is_matched
1583 })
1584 }
1585
1586 /// Given a list opt-in or opt-out package selection strings, generates two
1587 /// collections that represent glob patterns and package names respectively.
1588 ///
1589 /// > This function should be used only in package selection processes such like
1590 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
opt_patterns_and_names( opt: &[String], ) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)>1591 fn opt_patterns_and_names(
1592 opt: &[String],
1593 ) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)> {
1594 let mut opt_patterns = Vec::new();
1595 let mut opt_names = BTreeSet::new();
1596 for x in opt.iter() {
1597 if is_glob_pattern(x) {
1598 opt_patterns.push((build_glob(x)?, false));
1599 } else {
1600 opt_names.insert(String::as_str(x));
1601 }
1602 }
1603 Ok((opt_patterns, opt_names))
1604 }
1605
1606 /// Removes duplicate CompileMode::Doc units that would cause problems with
1607 /// filename collisions.
1608 ///
1609 /// Rustdoc only separates units by crate name in the file directory
1610 /// structure. If any two units with the same crate name exist, this would
1611 /// cause a filename collision, causing different rustdoc invocations to stomp
1612 /// on one another's files.
1613 ///
1614 /// Unfortunately this does not remove all duplicates, as some of them are
1615 /// either user error, or difficult to remove. Cases that I can think of:
1616 ///
1617 /// - Same target name in different packages. See the `collision_doc` test.
1618 /// - Different sources. See `collision_doc_sources` test.
1619 ///
1620 /// Ideally this would not be necessary.
remove_duplicate_doc( build_config: &BuildConfig, root_units: &[Unit], unit_graph: &mut UnitGraph, )1621 fn remove_duplicate_doc(
1622 build_config: &BuildConfig,
1623 root_units: &[Unit],
1624 unit_graph: &mut UnitGraph,
1625 ) {
1626 // First, create a mapping of crate_name -> Unit so we can see where the
1627 // duplicates are.
1628 let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
1629 for unit in unit_graph.keys() {
1630 if unit.mode.is_doc() {
1631 all_docs
1632 .entry(unit.target.crate_name())
1633 .or_default()
1634 .push(unit.clone());
1635 }
1636 }
1637 // Keep track of units to remove so that they can be efficiently removed
1638 // from the unit_deps.
1639 let mut removed_units: HashSet<Unit> = HashSet::new();
1640 let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
1641 let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
1642 .into_iter()
1643 .partition(|unit| cb(unit) && !root_units.contains(unit));
1644 for unit in to_remove {
1645 log::debug!(
1646 "removing duplicate doc due to {} for package {} target `{}`",
1647 reason,
1648 unit.pkg,
1649 unit.target.name()
1650 );
1651 unit_graph.remove(&unit);
1652 removed_units.insert(unit);
1653 }
1654 remaining_units
1655 };
1656 // Iterate over the duplicates and try to remove them from unit_graph.
1657 for (_crate_name, mut units) in all_docs {
1658 if units.len() == 1 {
1659 continue;
1660 }
1661 // Prefer target over host if --target was not specified.
1662 if build_config
1663 .requested_kinds
1664 .iter()
1665 .all(CompileKind::is_host)
1666 {
1667 // Note these duplicates may not be real duplicates, since they
1668 // might get merged in rebuild_unit_graph_shared. Either way, it
1669 // shouldn't hurt to remove them early (although the report in the
1670 // log might be confusing).
1671 units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
1672 if units.len() == 1 {
1673 continue;
1674 }
1675 }
1676 // Prefer newer versions over older.
1677 let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
1678 HashMap::new();
1679 for unit in units {
1680 let pkg_id = unit.pkg.package_id();
1681 // Note, this does not detect duplicates from different sources.
1682 source_map
1683 .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
1684 .or_default()
1685 .push(unit);
1686 }
1687 let mut remaining_units = Vec::new();
1688 for (_key, mut units) in source_map {
1689 if units.len() > 1 {
1690 units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
1691 // Remove any entries with version < newest.
1692 let newest_version = units.last().unwrap().pkg.version().clone();
1693 let keep_units = remove(units, "older version", &|unit| {
1694 unit.pkg.version() < &newest_version
1695 });
1696 remaining_units.extend(keep_units);
1697 } else {
1698 remaining_units.extend(units);
1699 }
1700 }
1701 if remaining_units.len() == 1 {
1702 continue;
1703 }
1704 // Are there other heuristics to remove duplicates that would make
1705 // sense? Maybe prefer path sources over all others?
1706 }
1707 // Also remove units from the unit_deps so there aren't any dangling edges.
1708 for unit_deps in unit_graph.values_mut() {
1709 unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
1710 }
1711 // Remove any orphan units that were detached from the graph.
1712 let mut visited = HashSet::new();
1713 fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
1714 if !visited.insert(unit.clone()) {
1715 return;
1716 }
1717 for dep in &graph[unit] {
1718 visit(&dep.unit, graph, visited);
1719 }
1720 }
1721 for unit in root_units {
1722 visit(unit, unit_graph, &mut visited);
1723 }
1724 unit_graph.retain(|unit, _| visited.contains(unit));
1725 }
1726