1 //! The Cargo "compile" operation.
2 //!
3 //! This module contains the entry point for starting the compilation process
4 //! for commands like `build`, `test`, `doc`, `rustc`, etc.
5 //!
6 //! The `compile` function will do all the work to compile a workspace. A
7 //! rough outline is:
8 //!
9 //! - Resolve the dependency graph (see `ops::resolve`).
10 //! - Download any packages needed (see `PackageSet`).
11 //! - Generate a list of top-level "units" of work for the targets the user
12 //! requested on the command-line. Each `Unit` corresponds to a compiler
13 //! invocation. This is done in this module (`generate_targets`).
14 //! - Build the graph of `Unit` dependencies (see
15 //! `core::compiler::context::unit_dependencies`).
16 //! - Create a `Context` which will perform the following steps:
17 //! - Prepare the `target` directory (see `Layout`).
18 //! - Create a job queue (see `JobQueue`). The queue checks the
19 //! fingerprint of each `Unit` to determine if it should run or be
20 //! skipped.
21 //! - Execute the queue. Each leaf in the queue's dependency graph is
22 //! executed, and then removed from the graph when finished. This
23 //! repeats until the queue is empty.
24
25 use std::collections::{BTreeSet, HashMap, HashSet};
26 use std::hash::{Hash, Hasher};
27 use std::sync::Arc;
28
29 use crate::core::compiler::unit_dependencies::build_unit_dependencies;
30 use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
31 use crate::core::compiler::{standard_lib, TargetInfo};
32 use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context};
33 use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit};
34 use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
35 use crate::core::profiles::{Profiles, UnitFor};
36 use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
37 use crate::core::resolver::{HasDevUnits, Resolve};
38 use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target};
39 use crate::core::{PackageId, PackageIdSpec, SourceId, TargetKind, Workspace};
40 use crate::drop_println;
41 use crate::ops;
42 use crate::ops::resolve::WorkspaceResolve;
43 use crate::util::config::Config;
44 use crate::util::interning::InternedString;
45 use crate::util::restricted_names::is_glob_pattern;
46 use crate::util::{closest_msg, profile, CargoResult, StableHasher};
47
48 use anyhow::Context as _;
49
50 /// Contains information about how a package should be compiled.
51 ///
52 /// Note on distinction between `CompileOptions` and `BuildConfig`:
53 /// `BuildConfig` contains values that need to be retained after
54 /// `BuildContext` is created. The other fields are no longer necessary. Think
55 /// of it as `CompileOptions` are high-level settings requested on the
56 /// command-line, and `BuildConfig` are low-level settings for actually
57 /// driving `rustc`.
58 #[derive(Debug)]
59 pub struct CompileOptions {
60 /// Configuration information for a rustc build
61 pub build_config: BuildConfig,
62 /// Feature flags requested by the user.
63 pub cli_features: CliFeatures,
64 /// A set of packages to build.
65 pub spec: Packages,
66 /// Filter to apply to the root package to select which targets will be
67 /// built.
68 pub filter: CompileFilter,
69 /// Extra arguments to be passed to rustdoc (single target only)
70 pub target_rustdoc_args: Option<Vec<String>>,
71 /// The specified target will be compiled with all the available arguments,
72 /// note that this only accounts for the *final* invocation of rustc
73 pub target_rustc_args: Option<Vec<String>>,
74 /// Extra arguments passed to all selected targets for rustdoc.
75 pub local_rustdoc_args: Option<Vec<String>>,
76 /// Whether the `--document-private-items` flags was specified and should
77 /// be forwarded to `rustdoc`.
78 pub rustdoc_document_private_items: bool,
79 /// Whether the build process should check the minimum Rust version
80 /// defined in the cargo metadata for a crate.
81 pub honor_rust_version: bool,
82 }
83
84 impl<'a> CompileOptions {
new(config: &Config, mode: CompileMode) -> CargoResult<CompileOptions>85 pub fn new(config: &Config, mode: CompileMode) -> CargoResult<CompileOptions> {
86 Ok(CompileOptions {
87 build_config: BuildConfig::new(config, None, &[], mode)?,
88 cli_features: CliFeatures::new_all(false),
89 spec: ops::Packages::Packages(Vec::new()),
90 filter: CompileFilter::Default {
91 required_features_filterable: false,
92 },
93 target_rustdoc_args: None,
94 target_rustc_args: None,
95 local_rustdoc_args: None,
96 rustdoc_document_private_items: false,
97 honor_rust_version: true,
98 })
99 }
100 }
101
102 #[derive(Clone, PartialEq, Eq, Debug)]
103 pub enum Packages {
104 Default,
105 All,
106 OptOut(Vec<String>),
107 Packages(Vec<String>),
108 }
109
110 impl Packages {
from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self>111 pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self> {
112 Ok(match (all, exclude.len(), package.len()) {
113 (false, 0, 0) => Packages::Default,
114 (false, 0, _) => Packages::Packages(package),
115 (false, _, _) => anyhow::bail!("--exclude can only be used together with --workspace"),
116 (true, 0, _) => Packages::All,
117 (true, _, _) => Packages::OptOut(exclude),
118 })
119 }
120
121 /// Converts selected packages from a workspace to `PackageIdSpec`s.
to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult<Vec<PackageIdSpec>>122 pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult<Vec<PackageIdSpec>> {
123 let specs = match self {
124 Packages::All => ws
125 .members()
126 .map(Package::package_id)
127 .map(PackageIdSpec::from_package_id)
128 .collect(),
129 Packages::OptOut(opt_out) => {
130 let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
131 let specs = ws
132 .members()
133 .filter(|pkg| {
134 !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
135 })
136 .map(Package::package_id)
137 .map(PackageIdSpec::from_package_id)
138 .collect();
139 let warn = |e| ws.config().shell().warn(e);
140 emit_package_not_found(ws, names, true).or_else(warn)?;
141 emit_pattern_not_found(ws, patterns, true).or_else(warn)?;
142 specs
143 }
144 Packages::Packages(packages) if packages.is_empty() => {
145 vec![PackageIdSpec::from_package_id(ws.current()?.package_id())]
146 }
147 Packages::Packages(opt_in) => {
148 let (mut patterns, packages) = opt_patterns_and_names(opt_in)?;
149 let mut specs = packages
150 .iter()
151 .map(|p| PackageIdSpec::parse(p))
152 .collect::<CargoResult<Vec<_>>>()?;
153 if !patterns.is_empty() {
154 let matched_pkgs = ws
155 .members()
156 .filter(|pkg| match_patterns(pkg, &mut patterns))
157 .map(Package::package_id)
158 .map(PackageIdSpec::from_package_id);
159 specs.extend(matched_pkgs);
160 }
161 emit_pattern_not_found(ws, patterns, false)?;
162 specs
163 }
164 Packages::Default => ws
165 .default_members()
166 .map(Package::package_id)
167 .map(PackageIdSpec::from_package_id)
168 .collect(),
169 };
170 if specs.is_empty() {
171 if ws.is_virtual() {
172 anyhow::bail!(
173 "manifest path `{}` contains no package: The manifest is virtual, \
174 and the workspace has no members.",
175 ws.root().display()
176 )
177 }
178 anyhow::bail!("no packages to compile")
179 }
180 Ok(specs)
181 }
182
183 /// Gets a list of selected packages from a workspace.
get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult<Vec<&'ws Package>>184 pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult<Vec<&'ws Package>> {
185 let packages: Vec<_> = match self {
186 Packages::Default => ws.default_members().collect(),
187 Packages::All => ws.members().collect(),
188 Packages::OptOut(opt_out) => {
189 let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
190 let packages = ws
191 .members()
192 .filter(|pkg| {
193 !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
194 })
195 .collect();
196 emit_package_not_found(ws, names, true)?;
197 emit_pattern_not_found(ws, patterns, true)?;
198 packages
199 }
200 Packages::Packages(opt_in) => {
201 let (mut patterns, mut names) = opt_patterns_and_names(opt_in)?;
202 let packages = ws
203 .members()
204 .filter(|pkg| {
205 names.remove(pkg.name().as_str()) || match_patterns(pkg, &mut patterns)
206 })
207 .collect();
208 emit_package_not_found(ws, names, false)?;
209 emit_pattern_not_found(ws, patterns, false)?;
210 packages
211 }
212 };
213 Ok(packages)
214 }
215
216 /// Returns whether or not the user needs to pass a `-p` flag to target a
217 /// specific package in the workspace.
needs_spec_flag(&self, ws: &Workspace<'_>) -> bool218 pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool {
219 match self {
220 Packages::Default => ws.default_members().count() > 1,
221 Packages::All => ws.members().count() > 1,
222 Packages::Packages(_) => true,
223 Packages::OptOut(_) => true,
224 }
225 }
226 }
227
228 #[derive(Debug, PartialEq, Eq)]
229 pub enum LibRule {
230 /// Include the library, fail if not present
231 True,
232 /// Include the library if present
233 Default,
234 /// Exclude the library
235 False,
236 }
237
238 #[derive(Debug)]
239 pub enum FilterRule {
240 All,
241 Just(Vec<String>),
242 }
243
244 #[derive(Debug)]
245 pub enum CompileFilter {
246 Default {
247 /// Flag whether targets can be safely skipped when required-features are not satisfied.
248 required_features_filterable: bool,
249 },
250 Only {
251 all_targets: bool,
252 lib: LibRule,
253 bins: FilterRule,
254 examples: FilterRule,
255 tests: FilterRule,
256 benches: FilterRule,
257 },
258 }
259
compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>>260 pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
261 let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
262 compile_with_exec(ws, options, &exec)
263 }
264
265 /// Like `compile` but allows specifying a custom `Executor` that will be able to intercept build
266 /// calls and add custom logic. `compile` uses `DefaultExecutor` which just passes calls through.
compile_with_exec<'a>( ws: &Workspace<'a>, options: &CompileOptions, exec: &Arc<dyn Executor>, ) -> CargoResult<Compilation<'a>>267 pub fn compile_with_exec<'a>(
268 ws: &Workspace<'a>,
269 options: &CompileOptions,
270 exec: &Arc<dyn Executor>,
271 ) -> CargoResult<Compilation<'a>> {
272 ws.emit_warnings()?;
273 compile_ws(ws, options, exec)
274 }
275
compile_ws<'a>( ws: &Workspace<'a>, options: &CompileOptions, exec: &Arc<dyn Executor>, ) -> CargoResult<Compilation<'a>>276 pub fn compile_ws<'a>(
277 ws: &Workspace<'a>,
278 options: &CompileOptions,
279 exec: &Arc<dyn Executor>,
280 ) -> CargoResult<Compilation<'a>> {
281 let interner = UnitInterner::new();
282 let bcx = create_bcx(ws, options, &interner)?;
283 if options.build_config.unit_graph {
284 unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?;
285 return Compilation::new(&bcx);
286 }
287 let _p = profile::start("compiling");
288 let cx = Context::new(&bcx)?;
289 cx.compile(exec)
290 }
291
print<'a>( ws: &Workspace<'a>, options: &CompileOptions, print_opt_value: &str, ) -> CargoResult<()>292 pub fn print<'a>(
293 ws: &Workspace<'a>,
294 options: &CompileOptions,
295 print_opt_value: &str,
296 ) -> CargoResult<()> {
297 let CompileOptions {
298 ref build_config,
299 ref target_rustc_args,
300 ..
301 } = *options;
302 let config = ws.config();
303 let rustc = config.load_global_rustc(Some(ws))?;
304 for (index, kind) in build_config.requested_kinds.iter().enumerate() {
305 if index != 0 {
306 drop_println!(config);
307 }
308 let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?;
309 let mut process = rustc.process();
310 process.args(&target_info.rustflags);
311 if let Some(args) = target_rustc_args {
312 process.args(args);
313 }
314 if let CompileKind::Target(t) = kind {
315 process.arg("--target").arg(t.short_name());
316 }
317 process.arg("--print").arg(print_opt_value);
318 process.exec()?;
319 }
320 Ok(())
321 }
322
create_bcx<'a, 'cfg>( ws: &'a Workspace<'cfg>, options: &'a CompileOptions, interner: &'a UnitInterner, ) -> CargoResult<BuildContext<'a, 'cfg>>323 pub fn create_bcx<'a, 'cfg>(
324 ws: &'a Workspace<'cfg>,
325 options: &'a CompileOptions,
326 interner: &'a UnitInterner,
327 ) -> CargoResult<BuildContext<'a, 'cfg>> {
328 let CompileOptions {
329 ref build_config,
330 ref spec,
331 ref cli_features,
332 ref filter,
333 ref target_rustdoc_args,
334 ref target_rustc_args,
335 ref local_rustdoc_args,
336 rustdoc_document_private_items,
337 honor_rust_version,
338 } = *options;
339 let config = ws.config();
340
341 // Perform some pre-flight validation.
342 match build_config.mode {
343 CompileMode::Test
344 | CompileMode::Build
345 | CompileMode::Check { .. }
346 | CompileMode::Bench
347 | CompileMode::RunCustomBuild => {
348 if std::env::var("RUST_FLAGS").is_ok() {
349 config.shell().warn(
350 "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?",
351 )?;
352 }
353 }
354 CompileMode::Doc { .. } | CompileMode::Doctest => {
355 if std::env::var("RUSTDOC_FLAGS").is_ok() {
356 config.shell().warn(
357 "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?"
358 )?;
359 }
360 }
361 }
362 config.validate_term_config()?;
363
364 let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
365
366 let specs = spec.to_package_id_specs(ws)?;
367 let has_dev_units = if filter.need_dev_deps(build_config.mode) {
368 HasDevUnits::Yes
369 } else {
370 HasDevUnits::No
371 };
372 let resolve = ops::resolve_ws_with_opts(
373 ws,
374 &target_data,
375 &build_config.requested_kinds,
376 cli_features,
377 &specs,
378 has_dev_units,
379 crate::core::resolver::features::ForceAllTargets::No,
380 )?;
381 let WorkspaceResolve {
382 mut pkg_set,
383 workspace_resolve,
384 targeted_resolve: resolve,
385 resolved_features,
386 } = resolve;
387
388 let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std {
389 if build_config.build_plan {
390 config
391 .shell()
392 .warn("-Zbuild-std does not currently fully support --build-plan")?;
393 }
394 if build_config.requested_kinds[0].is_host() {
395 // TODO: This should eventually be fixed. Unfortunately it is not
396 // easy to get the host triple in BuildConfig. Consider changing
397 // requested_target to an enum, or some other approach.
398 anyhow::bail!("-Zbuild-std requires --target");
399 }
400 let (std_package_set, std_resolve, std_features) =
401 standard_lib::resolve_std(ws, &target_data, &build_config.requested_kinds, crates)?;
402 pkg_set.add_set(std_package_set);
403 Some((std_resolve, std_features))
404 } else {
405 None
406 };
407
408 // Find the packages in the resolver that the user wants to build (those
409 // passed in with `-p` or the defaults from the workspace), and convert
410 // Vec<PackageIdSpec> to a Vec<PackageId>.
411 let to_build_ids = resolve.specs_to_ids(&specs)?;
412 // Now get the `Package` for each `PackageId`. This may trigger a download
413 // if the user specified `-p` for a dependency that is not downloaded.
414 // Dependencies will be downloaded during build_unit_dependencies.
415 let mut to_builds = pkg_set.get_many(to_build_ids)?;
416
417 // The ordering here affects some error messages coming out of cargo, so
418 // let's be test and CLI friendly by always printing in the same order if
419 // there's an error.
420 to_builds.sort_by_key(|p| p.package_id());
421
422 for pkg in to_builds.iter() {
423 pkg.manifest().print_teapot(config);
424
425 if build_config.mode.is_any_test()
426 && !ws.is_member(pkg)
427 && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
428 {
429 anyhow::bail!(
430 "package `{}` cannot be tested because it requires dev-dependencies \
431 and is not a member of the workspace",
432 pkg.name()
433 );
434 }
435 }
436
437 let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
438 (&Some(ref args), _) => (Some(args.clone()), "rustc"),
439 (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"),
440 _ => (None, ""),
441 };
442
443 if extra_args.is_some() && to_builds.len() != 1 {
444 panic!(
445 "`{}` should not accept multiple `-p` flags",
446 extra_args_name
447 );
448 }
449
450 let profiles = Profiles::new(ws, build_config.requested_profile)?;
451 profiles.validate_packages(
452 ws.profiles(),
453 &mut config.shell(),
454 workspace_resolve.as_ref().unwrap_or(&resolve),
455 )?;
456
457 // If `--target` has not been specified, then the unit graph is built
458 // assuming `--target $HOST` was specified. See
459 // `rebuild_unit_graph_shared` for more on why this is done.
460 let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?);
461 let explicit_host_kinds: Vec<_> = build_config
462 .requested_kinds
463 .iter()
464 .map(|kind| match kind {
465 CompileKind::Host => explicit_host_kind,
466 CompileKind::Target(t) => CompileKind::Target(*t),
467 })
468 .collect();
469
470 // Passing `build_config.requested_kinds` instead of
471 // `explicit_host_kinds` here so that `generate_targets` can do
472 // its own special handling of `CompileKind::Host`. It will
473 // internally replace the host kind by the `explicit_host_kind`
474 // before setting as a unit.
475 let mut units = generate_targets(
476 ws,
477 &to_builds,
478 filter,
479 &build_config.requested_kinds,
480 explicit_host_kind,
481 build_config.mode,
482 &resolve,
483 &workspace_resolve,
484 &resolved_features,
485 &pkg_set,
486 &profiles,
487 interner,
488 )?;
489
490 let std_roots = if let Some(crates) = &config.cli_unstable().build_std {
491 // Only build libtest if it looks like it is needed.
492 let mut crates = crates.clone();
493 if !crates.iter().any(|c| c == "test")
494 && units
495 .iter()
496 .any(|unit| unit.mode.is_rustc_test() && unit.target.harness())
497 {
498 // Only build libtest when libstd is built (libtest depends on libstd)
499 if crates.iter().any(|c| c == "std") {
500 crates.push("test".to_string());
501 }
502 }
503 let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
504 standard_lib::generate_std_roots(
505 &crates,
506 std_resolve,
507 std_features,
508 &explicit_host_kinds,
509 &pkg_set,
510 interner,
511 &profiles,
512 )?
513 } else {
514 Default::default()
515 };
516
517 let mut unit_graph = build_unit_dependencies(
518 ws,
519 &pkg_set,
520 &resolve,
521 &resolved_features,
522 std_resolve_features.as_ref(),
523 &units,
524 &std_roots,
525 build_config.mode,
526 &target_data,
527 &profiles,
528 interner,
529 )?;
530
531 // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
532 // what heuristics to use in that case.
533 if build_config.mode == (CompileMode::Doc { deps: true }) {
534 remove_duplicate_doc(build_config, &units, &mut unit_graph);
535 }
536
537 if build_config
538 .requested_kinds
539 .iter()
540 .any(CompileKind::is_host)
541 {
542 // Rebuild the unit graph, replacing the explicit host targets with
543 // CompileKind::Host, merging any dependencies shared with build
544 // dependencies.
545 let new_graph = rebuild_unit_graph_shared(interner, unit_graph, &units, explicit_host_kind);
546 // This would be nicer with destructuring assignment.
547 units = new_graph.0;
548 unit_graph = new_graph.1;
549 }
550
551 let mut extra_compiler_args = HashMap::new();
552 if let Some(args) = extra_args {
553 if units.len() != 1 {
554 anyhow::bail!(
555 "extra arguments to `{}` can only be passed to one \
556 target, consider filtering\nthe package by passing, \
557 e.g., `--lib` or `--bin NAME` to specify a single target",
558 extra_args_name
559 );
560 }
561 extra_compiler_args.insert(units[0].clone(), args);
562 }
563 for unit in &units {
564 if unit.mode.is_doc() || unit.mode.is_doc_test() {
565 let mut extra_args = local_rustdoc_args.clone();
566
567 // Add `--document-private-items` rustdoc flag if requested or if
568 // the target is a binary. Binary crates get their private items
569 // documented by default.
570 if rustdoc_document_private_items || unit.target.is_bin() {
571 let mut args = extra_args.take().unwrap_or_else(|| vec![]);
572 args.push("--document-private-items".into());
573 extra_args = Some(args);
574 }
575
576 if let Some(args) = extra_args {
577 extra_compiler_args
578 .entry(unit.clone())
579 .or_default()
580 .extend(args);
581 }
582 }
583 }
584
585 if honor_rust_version {
586 // Remove any pre-release identifiers for easier comparison
587 let current_version = &target_data.rustc.version;
588 let untagged_version = semver::Version::new(
589 current_version.major,
590 current_version.minor,
591 current_version.patch,
592 );
593
594 for unit in unit_graph.keys() {
595 let version = match unit.pkg.rust_version() {
596 Some(v) => v,
597 None => continue,
598 };
599
600 let req = semver::VersionReq::parse(version).unwrap();
601 if req.matches(&untagged_version) {
602 continue;
603 }
604
605 anyhow::bail!(
606 "package `{}` cannot be built because it requires rustc {} or newer, \
607 while the currently active rustc version is {}",
608 unit.pkg,
609 version,
610 current_version,
611 );
612 }
613 }
614
615 let bcx = BuildContext::new(
616 ws,
617 pkg_set,
618 build_config,
619 profiles,
620 extra_compiler_args,
621 target_data,
622 units,
623 unit_graph,
624 )?;
625
626 Ok(bcx)
627 }
628
629 impl FilterRule {
new(targets: Vec<String>, all: bool) -> FilterRule630 pub fn new(targets: Vec<String>, all: bool) -> FilterRule {
631 if all {
632 FilterRule::All
633 } else {
634 FilterRule::Just(targets)
635 }
636 }
637
none() -> FilterRule638 pub fn none() -> FilterRule {
639 FilterRule::Just(Vec::new())
640 }
641
matches(&self, target: &Target) -> bool642 fn matches(&self, target: &Target) -> bool {
643 match *self {
644 FilterRule::All => true,
645 FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()),
646 }
647 }
648
is_specific(&self) -> bool649 fn is_specific(&self) -> bool {
650 match *self {
651 FilterRule::All => true,
652 FilterRule::Just(ref targets) => !targets.is_empty(),
653 }
654 }
655
try_collect(&self) -> Option<Vec<String>>656 pub fn try_collect(&self) -> Option<Vec<String>> {
657 match *self {
658 FilterRule::All => None,
659 FilterRule::Just(ref targets) => Some(targets.clone()),
660 }
661 }
662
contains_glob_patterns(&self) -> bool663 pub(crate) fn contains_glob_patterns(&self) -> bool {
664 match self {
665 FilterRule::All => false,
666 FilterRule::Just(targets) => targets.iter().any(is_glob_pattern),
667 }
668 }
669 }
670
671 impl CompileFilter {
672 /// Construct a CompileFilter from raw command line arguments.
from_raw_arguments( lib_only: bool, bins: Vec<String>, all_bins: bool, tsts: Vec<String>, all_tsts: bool, exms: Vec<String>, all_exms: bool, bens: Vec<String>, all_bens: bool, all_targets: bool, ) -> CompileFilter673 pub fn from_raw_arguments(
674 lib_only: bool,
675 bins: Vec<String>,
676 all_bins: bool,
677 tsts: Vec<String>,
678 all_tsts: bool,
679 exms: Vec<String>,
680 all_exms: bool,
681 bens: Vec<String>,
682 all_bens: bool,
683 all_targets: bool,
684 ) -> CompileFilter {
685 if all_targets {
686 return CompileFilter::new_all_targets();
687 }
688 let rule_lib = if lib_only {
689 LibRule::True
690 } else {
691 LibRule::False
692 };
693 let rule_bins = FilterRule::new(bins, all_bins);
694 let rule_tsts = FilterRule::new(tsts, all_tsts);
695 let rule_exms = FilterRule::new(exms, all_exms);
696 let rule_bens = FilterRule::new(bens, all_bens);
697
698 CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens)
699 }
700
701 /// Construct a CompileFilter from underlying primitives.
new( rule_lib: LibRule, rule_bins: FilterRule, rule_tsts: FilterRule, rule_exms: FilterRule, rule_bens: FilterRule, ) -> CompileFilter702 pub fn new(
703 rule_lib: LibRule,
704 rule_bins: FilterRule,
705 rule_tsts: FilterRule,
706 rule_exms: FilterRule,
707 rule_bens: FilterRule,
708 ) -> CompileFilter {
709 if rule_lib == LibRule::True
710 || rule_bins.is_specific()
711 || rule_tsts.is_specific()
712 || rule_exms.is_specific()
713 || rule_bens.is_specific()
714 {
715 CompileFilter::Only {
716 all_targets: false,
717 lib: rule_lib,
718 bins: rule_bins,
719 examples: rule_exms,
720 benches: rule_bens,
721 tests: rule_tsts,
722 }
723 } else {
724 CompileFilter::Default {
725 required_features_filterable: true,
726 }
727 }
728 }
729
new_all_targets() -> CompileFilter730 pub fn new_all_targets() -> CompileFilter {
731 CompileFilter::Only {
732 all_targets: true,
733 lib: LibRule::Default,
734 bins: FilterRule::All,
735 examples: FilterRule::All,
736 benches: FilterRule::All,
737 tests: FilterRule::All,
738 }
739 }
740
need_dev_deps(&self, mode: CompileMode) -> bool741 pub fn need_dev_deps(&self, mode: CompileMode) -> bool {
742 match mode {
743 CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true,
744 CompileMode::Check { test: true } => true,
745 CompileMode::Build | CompileMode::Doc { .. } | CompileMode::Check { test: false } => {
746 match *self {
747 CompileFilter::Default { .. } => false,
748 CompileFilter::Only {
749 ref examples,
750 ref tests,
751 ref benches,
752 ..
753 } => examples.is_specific() || tests.is_specific() || benches.is_specific(),
754 }
755 }
756 CompileMode::RunCustomBuild => panic!("Invalid mode"),
757 }
758 }
759
760 // this selects targets for "cargo run". for logic to select targets for
761 // other subcommands, see generate_targets and filter_default_targets
target_run(&self, target: &Target) -> bool762 pub fn target_run(&self, target: &Target) -> bool {
763 match *self {
764 CompileFilter::Default { .. } => true,
765 CompileFilter::Only {
766 ref lib,
767 ref bins,
768 ref examples,
769 ref tests,
770 ref benches,
771 ..
772 } => {
773 let rule = match *target.kind() {
774 TargetKind::Bin => bins,
775 TargetKind::Test => tests,
776 TargetKind::Bench => benches,
777 TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples,
778 TargetKind::Lib(..) => {
779 return match *lib {
780 LibRule::True => true,
781 LibRule::Default => true,
782 LibRule::False => false,
783 };
784 }
785 TargetKind::CustomBuild => return false,
786 };
787 rule.matches(target)
788 }
789 }
790 }
791
is_specific(&self) -> bool792 pub fn is_specific(&self) -> bool {
793 match *self {
794 CompileFilter::Default { .. } => false,
795 CompileFilter::Only { .. } => true,
796 }
797 }
798
is_all_targets(&self) -> bool799 pub fn is_all_targets(&self) -> bool {
800 matches!(
801 *self,
802 CompileFilter::Only {
803 all_targets: true,
804 ..
805 }
806 )
807 }
808
contains_glob_patterns(&self) -> bool809 pub(crate) fn contains_glob_patterns(&self) -> bool {
810 match self {
811 CompileFilter::Default { .. } => false,
812 CompileFilter::Only {
813 bins,
814 examples,
815 tests,
816 benches,
817 ..
818 } => {
819 bins.contains_glob_patterns()
820 || examples.contains_glob_patterns()
821 || tests.contains_glob_patterns()
822 || benches.contains_glob_patterns()
823 }
824 }
825 }
826 }
827
828 /// A proposed target.
829 ///
830 /// Proposed targets are later filtered into actual `Unit`s based on whether or
831 /// not the target requires its features to be present.
832 #[derive(Debug)]
833 struct Proposal<'a> {
834 pkg: &'a Package,
835 target: &'a Target,
836 /// Indicates whether or not all required features *must* be present. If
837 /// false, and the features are not available, then it will be silently
838 /// skipped. Generally, targets specified by name (`--bin foo`) are
839 /// required, all others can be silently skipped if features are missing.
840 requires_features: bool,
841 mode: CompileMode,
842 }
843
844 /// Generates all the base targets for the packages the user has requested to
845 /// compile. Dependencies for these targets are computed later in `unit_dependencies`.
generate_targets( ws: &Workspace<'_>, packages: &[&Package], filter: &CompileFilter, requested_kinds: &[CompileKind], explicit_host_kind: CompileKind, mode: CompileMode, resolve: &Resolve, workspace_resolve: &Option<Resolve>, resolved_features: &features::ResolvedFeatures, package_set: &PackageSet<'_>, profiles: &Profiles, interner: &UnitInterner, ) -> CargoResult<Vec<Unit>>846 fn generate_targets(
847 ws: &Workspace<'_>,
848 packages: &[&Package],
849 filter: &CompileFilter,
850 requested_kinds: &[CompileKind],
851 explicit_host_kind: CompileKind,
852 mode: CompileMode,
853 resolve: &Resolve,
854 workspace_resolve: &Option<Resolve>,
855 resolved_features: &features::ResolvedFeatures,
856 package_set: &PackageSet<'_>,
857 profiles: &Profiles,
858 interner: &UnitInterner,
859 ) -> CargoResult<Vec<Unit>> {
860 let config = ws.config();
861 // Helper for creating a list of `Unit` structures
862 let new_unit =
863 |units: &mut HashSet<Unit>, pkg: &Package, target: &Target, target_mode: CompileMode| {
864 let unit_for = if target_mode.is_any_test() {
865 // NOTE: the `UnitFor` here is subtle. If you have a profile
866 // with `panic` set, the `panic` flag is cleared for
867 // tests/benchmarks and their dependencies. If this
868 // was `normal`, then the lib would get compiled three
869 // times (once with panic, once without, and once with
870 // `--test`).
871 //
872 // This would cause a problem for doc tests, which would fail
873 // because `rustdoc` would attempt to link with both libraries
874 // at the same time. Also, it's probably not important (or
875 // even desirable?) for rustdoc to link with a lib with
876 // `panic` set.
877 //
878 // As a consequence, Examples and Binaries get compiled
879 // without `panic` set. This probably isn't a bad deal.
880 //
881 // Forcing the lib to be compiled three times during `cargo
882 // test` is probably also not desirable.
883 UnitFor::new_test(config)
884 } else if target.for_host() {
885 // Proc macro / plugin should not have `panic` set.
886 UnitFor::new_compiler()
887 } else {
888 UnitFor::new_normal()
889 };
890 // Custom build units are added in `build_unit_dependencies`.
891 assert!(!target.is_custom_build());
892 let target_mode = match target_mode {
893 CompileMode::Test => {
894 if target.is_example() && !filter.is_specific() && !target.tested() {
895 // Examples are included as regular binaries to verify
896 // that they compile.
897 CompileMode::Build
898 } else {
899 CompileMode::Test
900 }
901 }
902 CompileMode::Build => match *target.kind() {
903 TargetKind::Test => CompileMode::Test,
904 TargetKind::Bench => CompileMode::Bench,
905 _ => CompileMode::Build,
906 },
907 // `CompileMode::Bench` is only used to inform `filter_default_targets`
908 // which command is being used (`cargo bench`). Afterwards, tests
909 // and benches are treated identically. Switching the mode allows
910 // de-duplication of units that are essentially identical. For
911 // example, `cargo build --all-targets --release` creates the units
912 // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench)
913 // and since these are the same, we want them to be de-duplicated in
914 // `unit_dependencies`.
915 CompileMode::Bench => CompileMode::Test,
916 _ => target_mode,
917 };
918
919 let is_local = pkg.package_id().source_id().is_path();
920
921 // No need to worry about build-dependencies, roots are never build dependencies.
922 let features_for = FeaturesFor::from_for_host(target.proc_macro());
923 let features = resolved_features.activated_features(pkg.package_id(), features_for);
924
925 // If `--target` has not been specified, then the unit
926 // graph is built almost like if `--target $HOST` was
927 // specified. See `rebuild_unit_graph_shared` for more on
928 // why this is done. However, if the package has its own
929 // `package.target` key, then this gets used instead of
930 // `$HOST`
931 let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() {
932 vec![k]
933 } else {
934 requested_kinds
935 .iter()
936 .map(|kind| match kind {
937 CompileKind::Host => {
938 pkg.manifest().default_kind().unwrap_or(explicit_host_kind)
939 }
940 CompileKind::Target(t) => CompileKind::Target(*t),
941 })
942 .collect()
943 };
944
945 for kind in explicit_kinds.iter() {
946 let profile = profiles.get_profile(
947 pkg.package_id(),
948 ws.is_member(pkg),
949 is_local,
950 unit_for,
951 target_mode,
952 *kind,
953 );
954 let unit = interner.intern(
955 pkg,
956 target,
957 profile,
958 kind.for_target(target),
959 target_mode,
960 features.clone(),
961 /*is_std*/ false,
962 /*dep_hash*/ 0,
963 );
964 units.insert(unit);
965 }
966 };
967
968 // Create a list of proposed targets.
969 let mut proposals: Vec<Proposal<'_>> = Vec::new();
970
971 match *filter {
972 CompileFilter::Default {
973 required_features_filterable,
974 } => {
975 for pkg in packages {
976 let default = filter_default_targets(pkg.targets(), mode);
977 proposals.extend(default.into_iter().map(|target| Proposal {
978 pkg,
979 target,
980 requires_features: !required_features_filterable,
981 mode,
982 }));
983 if mode == CompileMode::Test {
984 if let Some(t) = pkg
985 .targets()
986 .iter()
987 .find(|t| t.is_lib() && t.doctested() && t.doctestable())
988 {
989 proposals.push(Proposal {
990 pkg,
991 target: t,
992 requires_features: false,
993 mode: CompileMode::Doctest,
994 });
995 }
996 }
997 }
998 }
999 CompileFilter::Only {
1000 all_targets,
1001 ref lib,
1002 ref bins,
1003 ref examples,
1004 ref tests,
1005 ref benches,
1006 } => {
1007 if *lib != LibRule::False {
1008 let mut libs = Vec::new();
1009 for proposal in filter_targets(packages, Target::is_lib, false, mode) {
1010 let Proposal { target, pkg, .. } = proposal;
1011 if mode.is_doc_test() && !target.doctestable() {
1012 let types = target.rustc_crate_types();
1013 let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect();
1014 ws.config().shell().warn(format!(
1015 "doc tests are not supported for crate type(s) `{}` in package `{}`",
1016 types_str.join(", "),
1017 pkg.name()
1018 ))?;
1019 } else {
1020 libs.push(proposal)
1021 }
1022 }
1023 if !all_targets && libs.is_empty() && *lib == LibRule::True {
1024 let names = packages.iter().map(|pkg| pkg.name()).collect::<Vec<_>>();
1025 if names.len() == 1 {
1026 anyhow::bail!("no library targets found in package `{}`", names[0]);
1027 } else {
1028 anyhow::bail!("no library targets found in packages: {}", names.join(", "));
1029 }
1030 }
1031 proposals.extend(libs);
1032 }
1033
1034 // If `--tests` was specified, add all targets that would be
1035 // generated by `cargo test`.
1036 let test_filter = match tests {
1037 FilterRule::All => Target::tested,
1038 FilterRule::Just(_) => Target::is_test,
1039 };
1040 let test_mode = match mode {
1041 CompileMode::Build => CompileMode::Test,
1042 CompileMode::Check { .. } => CompileMode::Check { test: true },
1043 _ => mode,
1044 };
1045 // If `--benches` was specified, add all targets that would be
1046 // generated by `cargo bench`.
1047 let bench_filter = match benches {
1048 FilterRule::All => Target::benched,
1049 FilterRule::Just(_) => Target::is_bench,
1050 };
1051 let bench_mode = match mode {
1052 CompileMode::Build => CompileMode::Bench,
1053 CompileMode::Check { .. } => CompileMode::Check { test: true },
1054 _ => mode,
1055 };
1056
1057 proposals.extend(list_rule_targets(
1058 packages,
1059 bins,
1060 "bin",
1061 Target::is_bin,
1062 mode,
1063 )?);
1064 proposals.extend(list_rule_targets(
1065 packages,
1066 examples,
1067 "example",
1068 Target::is_example,
1069 mode,
1070 )?);
1071 proposals.extend(list_rule_targets(
1072 packages,
1073 tests,
1074 "test",
1075 test_filter,
1076 test_mode,
1077 )?);
1078 proposals.extend(list_rule_targets(
1079 packages,
1080 benches,
1081 "bench",
1082 bench_filter,
1083 bench_mode,
1084 )?);
1085 }
1086 }
1087
1088 // Only include targets that are libraries or have all required
1089 // features available.
1090 //
1091 // `features_map` is a map of &Package -> enabled_features
1092 // It is computed by the set of enabled features for the package plus
1093 // every enabled feature of every enabled dependency.
1094 let mut features_map = HashMap::new();
1095 // This needs to be a set to de-duplicate units. Due to the way the
1096 // targets are filtered, it is possible to have duplicate proposals for
1097 // the same thing.
1098 let mut units = HashSet::new();
1099 for Proposal {
1100 pkg,
1101 target,
1102 requires_features,
1103 mode,
1104 } in proposals
1105 {
1106 let unavailable_features = match target.required_features() {
1107 Some(rf) => {
1108 validate_required_features(
1109 workspace_resolve,
1110 target.name(),
1111 rf,
1112 pkg.summary(),
1113 &mut config.shell(),
1114 )?;
1115
1116 let features = features_map.entry(pkg).or_insert_with(|| {
1117 resolve_all_features(resolve, resolved_features, package_set, pkg.package_id())
1118 });
1119 rf.iter().filter(|f| !features.contains(*f)).collect()
1120 }
1121 None => Vec::new(),
1122 };
1123 if target.is_lib() || unavailable_features.is_empty() {
1124 new_unit(&mut units, pkg, target, mode);
1125 } else if requires_features {
1126 let required_features = target.required_features().unwrap();
1127 let quoted_required_features: Vec<String> = required_features
1128 .iter()
1129 .map(|s| format!("`{}`", s))
1130 .collect();
1131 anyhow::bail!(
1132 "target `{}` in package `{}` requires the features: {}\n\
1133 Consider enabling them by passing, e.g., `--features=\"{}\"`",
1134 target.name(),
1135 pkg.name(),
1136 quoted_required_features.join(", "),
1137 required_features.join(" ")
1138 );
1139 }
1140 // else, silently skip target.
1141 }
1142 let mut units: Vec<_> = units.into_iter().collect();
1143 // Keep the roots in a consistent order, which helps with checking test output.
1144 units.sort_unstable();
1145 Ok(units)
1146 }
1147
1148 /// Warns if a target's required-features references a feature that doesn't exist.
1149 ///
1150 /// This is a warning because historically this was not validated, and it
1151 /// would cause too much breakage to make it an error.
validate_required_features( resolve: &Option<Resolve>, target_name: &str, required_features: &[String], summary: &Summary, shell: &mut Shell, ) -> CargoResult<()>1152 fn validate_required_features(
1153 resolve: &Option<Resolve>,
1154 target_name: &str,
1155 required_features: &[String],
1156 summary: &Summary,
1157 shell: &mut Shell,
1158 ) -> CargoResult<()> {
1159 let resolve = match resolve {
1160 None => return Ok(()),
1161 Some(resolve) => resolve,
1162 };
1163
1164 for feature in required_features {
1165 let fv = FeatureValue::new(feature.into());
1166 match &fv {
1167 FeatureValue::Feature(f) => {
1168 if !summary.features().contains_key(f) {
1169 shell.warn(format!(
1170 "invalid feature `{}` in required-features of target `{}`: \
1171 `{}` is not present in [features] section",
1172 fv, target_name, fv
1173 ))?;
1174 }
1175 }
1176 FeatureValue::Dep { .. }
1177 | FeatureValue::DepFeature {
1178 dep_prefix: true, ..
1179 } => {
1180 anyhow::bail!(
1181 "invalid feature `{}` in required-features of target `{}`: \
1182 `dep:` prefixed feature values are not allowed in required-features",
1183 fv,
1184 target_name
1185 );
1186 }
1187 FeatureValue::DepFeature { weak: true, .. } => {
1188 anyhow::bail!(
1189 "invalid feature `{}` in required-features of target `{}`: \
1190 optional dependency with `?` is not allowed in required-features",
1191 fv,
1192 target_name
1193 );
1194 }
1195 // Handling of dependent_crate/dependent_crate_feature syntax
1196 FeatureValue::DepFeature {
1197 dep_name,
1198 dep_feature,
1199 dep_prefix: false,
1200 weak: false,
1201 } => {
1202 match resolve
1203 .deps(summary.package_id())
1204 .find(|(_dep_id, deps)| deps.iter().any(|dep| dep.name_in_toml() == *dep_name))
1205 {
1206 Some((dep_id, _deps)) => {
1207 let dep_summary = resolve.summary(dep_id);
1208 if !dep_summary.features().contains_key(dep_feature)
1209 && !dep_summary
1210 .dependencies()
1211 .iter()
1212 .any(|dep| dep.name_in_toml() == *dep_feature && dep.is_optional())
1213 {
1214 shell.warn(format!(
1215 "invalid feature `{}` in required-features of target `{}`: \
1216 feature `{}` does not exist in package `{}`",
1217 fv, target_name, dep_feature, dep_id
1218 ))?;
1219 }
1220 }
1221 None => {
1222 shell.warn(format!(
1223 "invalid feature `{}` in required-features of target `{}`: \
1224 dependency `{}` does not exist",
1225 fv, target_name, dep_name
1226 ))?;
1227 }
1228 }
1229 }
1230 }
1231 }
1232 Ok(())
1233 }
1234
1235 /// Gets all of the features enabled for a package, plus its dependencies'
1236 /// features.
1237 ///
1238 /// Dependencies are added as `dep_name/feat_name` because `required-features`
1239 /// wants to support that syntax.
resolve_all_features( resolve_with_overrides: &Resolve, resolved_features: &features::ResolvedFeatures, package_set: &PackageSet<'_>, package_id: PackageId, ) -> HashSet<String>1240 pub fn resolve_all_features(
1241 resolve_with_overrides: &Resolve,
1242 resolved_features: &features::ResolvedFeatures,
1243 package_set: &PackageSet<'_>,
1244 package_id: PackageId,
1245 ) -> HashSet<String> {
1246 let mut features: HashSet<String> = resolved_features
1247 .activated_features(package_id, FeaturesFor::NormalOrDev)
1248 .iter()
1249 .map(|s| s.to_string())
1250 .collect();
1251
1252 // Include features enabled for use by dependencies so targets can also use them with the
1253 // required-features field when deciding whether to be built or skipped.
1254 for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
1255 let is_proc_macro = package_set
1256 .get_one(dep_id)
1257 .expect("packages downloaded")
1258 .proc_macro();
1259 for dep in deps {
1260 let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
1261 for feature in resolved_features
1262 .activated_features_unverified(dep_id, features_for)
1263 .unwrap_or_default()
1264 {
1265 features.insert(format!("{}/{}", dep.name_in_toml(), feature));
1266 }
1267 }
1268 }
1269
1270 features
1271 }
1272
1273 /// Given a list of all targets for a package, filters out only the targets
1274 /// that are automatically included when the user doesn't specify any targets.
filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target>1275 fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> {
1276 match mode {
1277 CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(),
1278 CompileMode::Test => targets
1279 .iter()
1280 .filter(|t| t.tested() || t.is_example())
1281 .collect(),
1282 CompileMode::Build | CompileMode::Check { .. } => targets
1283 .iter()
1284 .filter(|t| t.is_bin() || t.is_lib())
1285 .collect(),
1286 CompileMode::Doc { .. } => {
1287 // `doc` does lib and bins (bin with same name as lib is skipped).
1288 targets
1289 .iter()
1290 .filter(|t| {
1291 t.documented()
1292 && (!t.is_bin()
1293 || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
1294 })
1295 .collect()
1296 }
1297 CompileMode::Doctest | CompileMode::RunCustomBuild => panic!("Invalid mode {:?}", mode),
1298 }
1299 }
1300
1301 /// Returns a list of proposed targets based on command-line target selection flags.
list_rule_targets<'a>( packages: &[&'a Package], rule: &FilterRule, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, mode: CompileMode, ) -> CargoResult<Vec<Proposal<'a>>>1302 fn list_rule_targets<'a>(
1303 packages: &[&'a Package],
1304 rule: &FilterRule,
1305 target_desc: &'static str,
1306 is_expected_kind: fn(&Target) -> bool,
1307 mode: CompileMode,
1308 ) -> CargoResult<Vec<Proposal<'a>>> {
1309 let mut proposals = Vec::new();
1310 match rule {
1311 FilterRule::All => {
1312 proposals.extend(filter_targets(packages, is_expected_kind, false, mode))
1313 }
1314 FilterRule::Just(names) => {
1315 for name in names {
1316 proposals.extend(find_named_targets(
1317 packages,
1318 name,
1319 target_desc,
1320 is_expected_kind,
1321 mode,
1322 )?);
1323 }
1324 }
1325 }
1326 Ok(proposals)
1327 }
1328
1329 /// Finds the targets for a specifically named target.
find_named_targets<'a>( packages: &[&'a Package], target_name: &str, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, mode: CompileMode, ) -> CargoResult<Vec<Proposal<'a>>>1330 fn find_named_targets<'a>(
1331 packages: &[&'a Package],
1332 target_name: &str,
1333 target_desc: &'static str,
1334 is_expected_kind: fn(&Target) -> bool,
1335 mode: CompileMode,
1336 ) -> CargoResult<Vec<Proposal<'a>>> {
1337 let is_glob = is_glob_pattern(target_name);
1338 let proposals = if is_glob {
1339 let pattern = build_glob(target_name)?;
1340 let filter = |t: &Target| is_expected_kind(t) && pattern.matches(t.name());
1341 filter_targets(packages, filter, true, mode)
1342 } else {
1343 let filter = |t: &Target| t.name() == target_name && is_expected_kind(t);
1344 filter_targets(packages, filter, true, mode)
1345 };
1346
1347 if proposals.is_empty() {
1348 let targets = packages.iter().flat_map(|pkg| {
1349 pkg.targets()
1350 .iter()
1351 .filter(|target| is_expected_kind(target))
1352 });
1353 let suggestion = closest_msg(target_name, targets, |t| t.name());
1354 anyhow::bail!(
1355 "no {} target {} `{}`{}",
1356 target_desc,
1357 if is_glob { "matches pattern" } else { "named" },
1358 target_name,
1359 suggestion
1360 );
1361 }
1362 Ok(proposals)
1363 }
1364
filter_targets<'a>( packages: &[&'a Package], predicate: impl Fn(&Target) -> bool, requires_features: bool, mode: CompileMode, ) -> Vec<Proposal<'a>>1365 fn filter_targets<'a>(
1366 packages: &[&'a Package],
1367 predicate: impl Fn(&Target) -> bool,
1368 requires_features: bool,
1369 mode: CompileMode,
1370 ) -> Vec<Proposal<'a>> {
1371 let mut proposals = Vec::new();
1372 for pkg in packages {
1373 for target in pkg.targets().iter().filter(|t| predicate(t)) {
1374 proposals.push(Proposal {
1375 pkg,
1376 target,
1377 requires_features,
1378 mode,
1379 });
1380 }
1381 }
1382 proposals
1383 }
1384
1385 /// This is used to rebuild the unit graph, sharing host dependencies if possible.
1386 ///
1387 /// This will translate any unit's `CompileKind::Target(host)` to
1388 /// `CompileKind::Host` if the kind is equal to `to_host`. This also handles
1389 /// generating the unit `dep_hash`, and merging shared units if possible.
1390 ///
1391 /// This is necessary because if normal dependencies used `CompileKind::Host`,
1392 /// there would be no way to distinguish those units from build-dependency
1393 /// units. This can cause a problem if a shared normal/build dependency needs
1394 /// to link to another dependency whose features differ based on whether or
1395 /// not it is a normal or build dependency. If both units used
1396 /// `CompileKind::Host`, then they would end up being identical, causing a
1397 /// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
1398 /// value or the other.
1399 ///
1400 /// The solution is to keep normal and build dependencies separate when
1401 /// building the unit graph, and then run this second pass which will try to
1402 /// combine shared dependencies safely. By adding a hash of the dependencies
1403 /// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
1404 /// without fear of an unwanted collision.
rebuild_unit_graph_shared( interner: &UnitInterner, unit_graph: UnitGraph, roots: &[Unit], to_host: CompileKind, ) -> (Vec<Unit>, UnitGraph)1405 fn rebuild_unit_graph_shared(
1406 interner: &UnitInterner,
1407 unit_graph: UnitGraph,
1408 roots: &[Unit],
1409 to_host: CompileKind,
1410 ) -> (Vec<Unit>, UnitGraph) {
1411 let mut result = UnitGraph::new();
1412 // Map of the old unit to the new unit, used to avoid recursing into units
1413 // that have already been computed to improve performance.
1414 let mut memo = HashMap::new();
1415 let new_roots = roots
1416 .iter()
1417 .map(|root| {
1418 traverse_and_share(interner, &mut memo, &mut result, &unit_graph, root, to_host)
1419 })
1420 .collect();
1421 (new_roots, result)
1422 }
1423
1424 /// Recursive function for rebuilding the graph.
1425 ///
1426 /// This walks `unit_graph`, starting at the given `unit`. It inserts the new
1427 /// units into `new_graph`, and returns a new updated version of the given
1428 /// unit (`dep_hash` is filled in, and `kind` switched if necessary).
traverse_and_share( interner: &UnitInterner, memo: &mut HashMap<Unit, Unit>, new_graph: &mut UnitGraph, unit_graph: &UnitGraph, unit: &Unit, to_host: CompileKind, ) -> Unit1429 fn traverse_and_share(
1430 interner: &UnitInterner,
1431 memo: &mut HashMap<Unit, Unit>,
1432 new_graph: &mut UnitGraph,
1433 unit_graph: &UnitGraph,
1434 unit: &Unit,
1435 to_host: CompileKind,
1436 ) -> Unit {
1437 if let Some(new_unit) = memo.get(unit) {
1438 // Already computed, no need to recompute.
1439 return new_unit.clone();
1440 }
1441 let mut dep_hash = StableHasher::new();
1442 let new_deps: Vec<_> = unit_graph[unit]
1443 .iter()
1444 .map(|dep| {
1445 let new_dep_unit =
1446 traverse_and_share(interner, memo, new_graph, unit_graph, &dep.unit, to_host);
1447 new_dep_unit.hash(&mut dep_hash);
1448 UnitDep {
1449 unit: new_dep_unit,
1450 ..dep.clone()
1451 }
1452 })
1453 .collect();
1454 let new_dep_hash = dep_hash.finish();
1455 let new_kind = if unit.kind == to_host {
1456 CompileKind::Host
1457 } else {
1458 unit.kind
1459 };
1460 let new_unit = interner.intern(
1461 &unit.pkg,
1462 &unit.target,
1463 unit.profile,
1464 new_kind,
1465 unit.mode,
1466 unit.features.clone(),
1467 unit.is_std,
1468 new_dep_hash,
1469 );
1470 assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
1471 new_graph.entry(new_unit.clone()).or_insert(new_deps);
1472 new_unit
1473 }
1474
1475 /// Build `glob::Pattern` with informative context.
build_glob(pat: &str) -> CargoResult<glob::Pattern>1476 fn build_glob(pat: &str) -> CargoResult<glob::Pattern> {
1477 glob::Pattern::new(pat).with_context(|| format!("cannot build glob pattern from `{}`", pat))
1478 }
1479
1480 /// Emits "package not found" error.
1481 ///
1482 /// > This function should be used only in package selection processes such like
1483 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
emit_package_not_found( ws: &Workspace<'_>, opt_names: BTreeSet<&str>, opt_out: bool, ) -> CargoResult<()>1484 fn emit_package_not_found(
1485 ws: &Workspace<'_>,
1486 opt_names: BTreeSet<&str>,
1487 opt_out: bool,
1488 ) -> CargoResult<()> {
1489 if !opt_names.is_empty() {
1490 anyhow::bail!(
1491 "{}package(s) `{}` not found in workspace `{}`",
1492 if opt_out { "excluded " } else { "" },
1493 opt_names.into_iter().collect::<Vec<_>>().join(", "),
1494 ws.root().display(),
1495 )
1496 }
1497 Ok(())
1498 }
1499
1500 /// Emits "glob pattern not found" error.
1501 ///
1502 /// > This function should be used only in package selection processes such like
1503 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
emit_pattern_not_found( ws: &Workspace<'_>, opt_patterns: Vec<(glob::Pattern, bool)>, opt_out: bool, ) -> CargoResult<()>1504 fn emit_pattern_not_found(
1505 ws: &Workspace<'_>,
1506 opt_patterns: Vec<(glob::Pattern, bool)>,
1507 opt_out: bool,
1508 ) -> CargoResult<()> {
1509 let not_matched = opt_patterns
1510 .iter()
1511 .filter(|(_, matched)| !*matched)
1512 .map(|(pat, _)| pat.as_str())
1513 .collect::<Vec<_>>();
1514 if !not_matched.is_empty() {
1515 anyhow::bail!(
1516 "{}package pattern(s) `{}` not found in workspace `{}`",
1517 if opt_out { "excluded " } else { "" },
1518 not_matched.join(", "),
1519 ws.root().display(),
1520 )
1521 }
1522 Ok(())
1523 }
1524
1525 /// Checks whether a package matches any of a list of glob patterns generated
1526 /// from `opt_patterns_and_names`.
1527 ///
1528 /// > This function should be used only in package selection processes such like
1529 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool1530 fn match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool {
1531 patterns.iter_mut().any(|(m, matched)| {
1532 let is_matched = m.matches(pkg.name().as_str());
1533 *matched |= is_matched;
1534 is_matched
1535 })
1536 }
1537
1538 /// Given a list opt-in or opt-out package selection strings, generates two
1539 /// collections that represent glob patterns and package names respectively.
1540 ///
1541 /// > This function should be used only in package selection processes such like
1542 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
opt_patterns_and_names( opt: &[String], ) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)>1543 fn opt_patterns_and_names(
1544 opt: &[String],
1545 ) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)> {
1546 let mut opt_patterns = Vec::new();
1547 let mut opt_names = BTreeSet::new();
1548 for x in opt.iter() {
1549 if is_glob_pattern(x) {
1550 opt_patterns.push((build_glob(x)?, false));
1551 } else {
1552 opt_names.insert(String::as_str(x));
1553 }
1554 }
1555 Ok((opt_patterns, opt_names))
1556 }
1557
1558 /// Removes duplicate CompileMode::Doc units that would cause problems with
1559 /// filename collisions.
1560 ///
1561 /// Rustdoc only separates units by crate name in the file directory
1562 /// structure. If any two units with the same crate name exist, this would
1563 /// cause a filename collision, causing different rustdoc invocations to stomp
1564 /// on one another's files.
1565 ///
1566 /// Unfortunately this does not remove all duplicates, as some of them are
1567 /// either user error, or difficult to remove. Cases that I can think of:
1568 ///
1569 /// - Same target name in different packages. See the `collision_doc` test.
1570 /// - Different sources. See `collision_doc_sources` test.
1571 ///
1572 /// Ideally this would not be necessary.
remove_duplicate_doc( build_config: &BuildConfig, root_units: &[Unit], unit_graph: &mut UnitGraph, )1573 fn remove_duplicate_doc(
1574 build_config: &BuildConfig,
1575 root_units: &[Unit],
1576 unit_graph: &mut UnitGraph,
1577 ) {
1578 // First, create a mapping of crate_name -> Unit so we can see where the
1579 // duplicates are.
1580 let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
1581 for unit in unit_graph.keys() {
1582 if unit.mode.is_doc() {
1583 all_docs
1584 .entry(unit.target.crate_name())
1585 .or_default()
1586 .push(unit.clone());
1587 }
1588 }
1589 // Keep track of units to remove so that they can be efficiently removed
1590 // from the unit_deps.
1591 let mut removed_units: HashSet<Unit> = HashSet::new();
1592 let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
1593 let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
1594 .into_iter()
1595 .partition(|unit| cb(unit) && !root_units.contains(unit));
1596 for unit in to_remove {
1597 log::debug!(
1598 "removing duplicate doc due to {} for package {} target `{}`",
1599 reason,
1600 unit.pkg,
1601 unit.target.name()
1602 );
1603 unit_graph.remove(&unit);
1604 removed_units.insert(unit);
1605 }
1606 remaining_units
1607 };
1608 // Iterate over the duplicates and try to remove them from unit_graph.
1609 for (_crate_name, mut units) in all_docs {
1610 if units.len() == 1 {
1611 continue;
1612 }
1613 // Prefer target over host if --target was not specified.
1614 if build_config
1615 .requested_kinds
1616 .iter()
1617 .all(CompileKind::is_host)
1618 {
1619 // Note these duplicates may not be real duplicates, since they
1620 // might get merged in rebuild_unit_graph_shared. Either way, it
1621 // shouldn't hurt to remove them early (although the report in the
1622 // log might be confusing).
1623 units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
1624 if units.len() == 1 {
1625 continue;
1626 }
1627 }
1628 // Prefer newer versions over older.
1629 let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
1630 HashMap::new();
1631 for unit in units {
1632 let pkg_id = unit.pkg.package_id();
1633 // Note, this does not detect duplicates from different sources.
1634 source_map
1635 .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
1636 .or_default()
1637 .push(unit);
1638 }
1639 let mut remaining_units = Vec::new();
1640 for (_key, mut units) in source_map {
1641 if units.len() > 1 {
1642 units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
1643 // Remove any entries with version < newest.
1644 let newest_version = units.last().unwrap().pkg.version().clone();
1645 let keep_units = remove(units, "older version", &|unit| {
1646 unit.pkg.version() < &newest_version
1647 });
1648 remaining_units.extend(keep_units);
1649 } else {
1650 remaining_units.extend(units);
1651 }
1652 }
1653 if remaining_units.len() == 1 {
1654 continue;
1655 }
1656 // Are there other heuristics to remove duplicates that would make
1657 // sense? Maybe prefer path sources over all others?
1658 }
1659 // Also remove units from the unit_deps so there aren't any dangling edges.
1660 for unit_deps in unit_graph.values_mut() {
1661 unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
1662 }
1663 // Remove any orphan units that were detached from the graph.
1664 let mut visited = HashSet::new();
1665 fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
1666 if !visited.insert(unit.clone()) {
1667 return;
1668 }
1669 for dep in &graph[unit] {
1670 visit(&dep.unit, graph, visited);
1671 }
1672 }
1673 for unit in root_units {
1674 visit(unit, unit_graph, &mut visited);
1675 }
1676 unit_graph.retain(|unit, _| visited.contains(unit));
1677 }
1678