1 //! The Cargo "compile" operation.
2 //!
3 //! This module contains the entry point for starting the compilation process
4 //! for commands like `build`, `test`, `doc`, `rustc`, etc.
5 //!
6 //! The `compile` function will do all the work to compile a workspace. A
7 //! rough outline is:
8 //!
9 //! - Resolve the dependency graph (see `ops::resolve`).
10 //! - Download any packages needed (see `PackageSet`).
11 //! - Generate a list of top-level "units" of work for the targets the user
12 //! requested on the command-line. Each `Unit` corresponds to a compiler
13 //! invocation. This is done in this module (`generate_targets`).
14 //! - Build the graph of `Unit` dependencies (see
15 //! `core::compiler::context::unit_dependencies`).
16 //! - Create a `Context` which will perform the following steps:
17 //! - Prepare the `target` directory (see `Layout`).
18 //! - Create a job queue (see `JobQueue`). The queue checks the
19 //! fingerprint of each `Unit` to determine if it should run or be
20 //! skipped.
21 //! - Execute the queue. Each leaf in the queue's dependency graph is
22 //! executed, and then removed from the graph when finished. This
23 //! repeats until the queue is empty.
24
25 use std::collections::{BTreeSet, HashMap, HashSet};
26 use std::hash::{Hash, Hasher};
27 use std::sync::Arc;
28
29 use crate::core::compiler::unit_dependencies::build_unit_dependencies;
30 use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
31 use crate::core::compiler::{standard_lib, TargetInfo};
32 use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context};
33 use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit};
34 use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
35 use crate::core::profiles::{Profiles, UnitFor};
36 use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
37 use crate::core::resolver::{HasDevUnits, Resolve};
38 use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target};
39 use crate::core::{PackageId, PackageIdSpec, SourceId, TargetKind, Workspace};
40 use crate::drop_println;
41 use crate::ops;
42 use crate::ops::resolve::WorkspaceResolve;
43 use crate::util::config::Config;
44 use crate::util::interning::InternedString;
45 use crate::util::restricted_names::is_glob_pattern;
46 use crate::util::{closest_msg, profile, CargoResult, StableHasher};
47
48 use anyhow::{bail, Context as _};
49
50 /// Contains information about how a package should be compiled.
51 ///
52 /// Note on distinction between `CompileOptions` and `BuildConfig`:
53 /// `BuildConfig` contains values that need to be retained after
54 /// `BuildContext` is created. The other fields are no longer necessary. Think
55 /// of it as `CompileOptions` are high-level settings requested on the
56 /// command-line, and `BuildConfig` are low-level settings for actually
57 /// driving `rustc`.
58 #[derive(Debug)]
59 pub struct CompileOptions {
60 /// Configuration information for a rustc build
61 pub build_config: BuildConfig,
62 /// Feature flags requested by the user.
63 pub cli_features: CliFeatures,
64 /// A set of packages to build.
65 pub spec: Packages,
66 /// Filter to apply to the root package to select which targets will be
67 /// built.
68 pub filter: CompileFilter,
69 /// Extra arguments to be passed to rustdoc (single target only)
70 pub target_rustdoc_args: Option<Vec<String>>,
71 /// The specified target will be compiled with all the available arguments,
72 /// note that this only accounts for the *final* invocation of rustc
73 pub target_rustc_args: Option<Vec<String>>,
74 /// Extra arguments passed to all selected targets for rustdoc.
75 pub local_rustdoc_args: Option<Vec<String>>,
76 /// Whether the `--document-private-items` flags was specified and should
77 /// be forwarded to `rustdoc`.
78 pub rustdoc_document_private_items: bool,
79 /// Whether the build process should check the minimum Rust version
80 /// defined in the cargo metadata for a crate.
81 pub honor_rust_version: bool,
82 }
83
84 impl<'a> CompileOptions {
new(config: &Config, mode: CompileMode) -> CargoResult<CompileOptions>85 pub fn new(config: &Config, mode: CompileMode) -> CargoResult<CompileOptions> {
86 Ok(CompileOptions {
87 build_config: BuildConfig::new(config, None, &[], mode)?,
88 cli_features: CliFeatures::new_all(false),
89 spec: ops::Packages::Packages(Vec::new()),
90 filter: CompileFilter::Default {
91 required_features_filterable: false,
92 },
93 target_rustdoc_args: None,
94 target_rustc_args: None,
95 local_rustdoc_args: None,
96 rustdoc_document_private_items: false,
97 honor_rust_version: true,
98 })
99 }
100 }
101
102 #[derive(PartialEq, Eq, Debug)]
103 pub enum Packages {
104 Default,
105 All,
106 OptOut(Vec<String>),
107 Packages(Vec<String>),
108 }
109
110 impl Packages {
from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self>111 pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self> {
112 Ok(match (all, exclude.len(), package.len()) {
113 (false, 0, 0) => Packages::Default,
114 (false, 0, _) => Packages::Packages(package),
115 (false, _, _) => anyhow::bail!("--exclude can only be used together with --workspace"),
116 (true, 0, _) => Packages::All,
117 (true, _, _) => Packages::OptOut(exclude),
118 })
119 }
120
121 /// Converts selected packages from a workspace to `PackageIdSpec`s.
to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult<Vec<PackageIdSpec>>122 pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult<Vec<PackageIdSpec>> {
123 let specs = match self {
124 Packages::All => ws
125 .members()
126 .map(Package::package_id)
127 .map(PackageIdSpec::from_package_id)
128 .collect(),
129 Packages::OptOut(opt_out) => {
130 let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
131 let specs = ws
132 .members()
133 .filter(|pkg| {
134 !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
135 })
136 .map(Package::package_id)
137 .map(PackageIdSpec::from_package_id)
138 .collect();
139 let warn = |e| ws.config().shell().warn(e);
140 emit_package_not_found(ws, names, true).or_else(warn)?;
141 emit_pattern_not_found(ws, patterns, true).or_else(warn)?;
142 specs
143 }
144 Packages::Packages(packages) if packages.is_empty() => {
145 vec![PackageIdSpec::from_package_id(ws.current()?.package_id())]
146 }
147 Packages::Packages(opt_in) => {
148 let (mut patterns, packages) = opt_patterns_and_names(opt_in)?;
149 let mut specs = packages
150 .iter()
151 .map(|p| PackageIdSpec::parse(p))
152 .collect::<CargoResult<Vec<_>>>()?;
153 if !patterns.is_empty() {
154 let matched_pkgs = ws
155 .members()
156 .filter(|pkg| match_patterns(pkg, &mut patterns))
157 .map(Package::package_id)
158 .map(PackageIdSpec::from_package_id);
159 specs.extend(matched_pkgs);
160 }
161 emit_pattern_not_found(ws, patterns, false)?;
162 specs
163 }
164 Packages::Default => ws
165 .default_members()
166 .map(Package::package_id)
167 .map(PackageIdSpec::from_package_id)
168 .collect(),
169 };
170 if specs.is_empty() {
171 if ws.is_virtual() {
172 anyhow::bail!(
173 "manifest path `{}` contains no package: The manifest is virtual, \
174 and the workspace has no members.",
175 ws.root().display()
176 )
177 }
178 anyhow::bail!("no packages to compile")
179 }
180 Ok(specs)
181 }
182
183 /// Gets a list of selected packages from a workspace.
get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult<Vec<&'ws Package>>184 pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult<Vec<&'ws Package>> {
185 let packages: Vec<_> = match self {
186 Packages::Default => ws.default_members().collect(),
187 Packages::All => ws.members().collect(),
188 Packages::OptOut(opt_out) => {
189 let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?;
190 let packages = ws
191 .members()
192 .filter(|pkg| {
193 !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns)
194 })
195 .collect();
196 emit_package_not_found(ws, names, true)?;
197 emit_pattern_not_found(ws, patterns, true)?;
198 packages
199 }
200 Packages::Packages(opt_in) => {
201 let (mut patterns, mut names) = opt_patterns_and_names(opt_in)?;
202 let packages = ws
203 .members()
204 .filter(|pkg| {
205 names.remove(pkg.name().as_str()) || match_patterns(pkg, &mut patterns)
206 })
207 .collect();
208 emit_package_not_found(ws, names, false)?;
209 emit_pattern_not_found(ws, patterns, false)?;
210 packages
211 }
212 };
213 Ok(packages)
214 }
215
216 /// Returns whether or not the user needs to pass a `-p` flag to target a
217 /// specific package in the workspace.
needs_spec_flag(&self, ws: &Workspace<'_>) -> bool218 pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool {
219 match self {
220 Packages::Default => ws.default_members().count() > 1,
221 Packages::All => ws.members().count() > 1,
222 Packages::Packages(_) => true,
223 Packages::OptOut(_) => true,
224 }
225 }
226 }
227
228 #[derive(Debug, PartialEq, Eq)]
229 pub enum LibRule {
230 /// Include the library, fail if not present
231 True,
232 /// Include the library if present
233 Default,
234 /// Exclude the library
235 False,
236 }
237
238 #[derive(Debug)]
239 pub enum FilterRule {
240 All,
241 Just(Vec<String>),
242 }
243
244 #[derive(Debug)]
245 pub enum CompileFilter {
246 Default {
247 /// Flag whether targets can be safely skipped when required-features are not satisfied.
248 required_features_filterable: bool,
249 },
250 Only {
251 all_targets: bool,
252 lib: LibRule,
253 bins: FilterRule,
254 examples: FilterRule,
255 tests: FilterRule,
256 benches: FilterRule,
257 },
258 }
259
compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>>260 pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
261 let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
262 compile_with_exec(ws, options, &exec)
263 }
264
265 /// Like `compile` but allows specifying a custom `Executor` that will be able to intercept build
266 /// calls and add custom logic. `compile` uses `DefaultExecutor` which just passes calls through.
compile_with_exec<'a>( ws: &Workspace<'a>, options: &CompileOptions, exec: &Arc<dyn Executor>, ) -> CargoResult<Compilation<'a>>267 pub fn compile_with_exec<'a>(
268 ws: &Workspace<'a>,
269 options: &CompileOptions,
270 exec: &Arc<dyn Executor>,
271 ) -> CargoResult<Compilation<'a>> {
272 ws.emit_warnings()?;
273 compile_ws(ws, options, exec)
274 }
275
compile_ws<'a>( ws: &Workspace<'a>, options: &CompileOptions, exec: &Arc<dyn Executor>, ) -> CargoResult<Compilation<'a>>276 pub fn compile_ws<'a>(
277 ws: &Workspace<'a>,
278 options: &CompileOptions,
279 exec: &Arc<dyn Executor>,
280 ) -> CargoResult<Compilation<'a>> {
281 let interner = UnitInterner::new();
282 let bcx = create_bcx(ws, options, &interner)?;
283 if options.build_config.unit_graph {
284 unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?;
285 return Compilation::new(&bcx);
286 }
287 let _p = profile::start("compiling");
288 let cx = Context::new(&bcx)?;
289 cx.compile(exec)
290 }
291
print<'a>( ws: &Workspace<'a>, options: &CompileOptions, print_opt_value: &str, ) -> CargoResult<()>292 pub fn print<'a>(
293 ws: &Workspace<'a>,
294 options: &CompileOptions,
295 print_opt_value: &str,
296 ) -> CargoResult<()> {
297 let CompileOptions {
298 ref build_config,
299 ref target_rustc_args,
300 ..
301 } = *options;
302 let config = ws.config();
303 let rustc = config.load_global_rustc(Some(ws))?;
304 for (index, kind) in build_config.requested_kinds.iter().enumerate() {
305 if index != 0 {
306 drop_println!(config);
307 }
308 let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?;
309 let mut process = rustc.process();
310 process.args(&target_info.rustflags);
311 if let Some(args) = target_rustc_args {
312 process.args(args);
313 }
314 if let CompileKind::Target(t) = kind {
315 process.arg("--target").arg(t.short_name());
316 }
317 process.arg("--print").arg(print_opt_value);
318 process.exec()?;
319 }
320 Ok(())
321 }
322
create_bcx<'a, 'cfg>( ws: &'a Workspace<'cfg>, options: &'a CompileOptions, interner: &'a UnitInterner, ) -> CargoResult<BuildContext<'a, 'cfg>>323 pub fn create_bcx<'a, 'cfg>(
324 ws: &'a Workspace<'cfg>,
325 options: &'a CompileOptions,
326 interner: &'a UnitInterner,
327 ) -> CargoResult<BuildContext<'a, 'cfg>> {
328 let CompileOptions {
329 ref build_config,
330 ref spec,
331 ref cli_features,
332 ref filter,
333 ref target_rustdoc_args,
334 ref target_rustc_args,
335 ref local_rustdoc_args,
336 rustdoc_document_private_items,
337 honor_rust_version,
338 } = *options;
339 let config = ws.config();
340
341 // Perform some pre-flight validation.
342 match build_config.mode {
343 CompileMode::Test
344 | CompileMode::Build
345 | CompileMode::Check { .. }
346 | CompileMode::Bench
347 | CompileMode::RunCustomBuild => {
348 if std::env::var("RUST_FLAGS").is_ok() {
349 config.shell().warn(
350 "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?",
351 )?;
352 }
353 }
354 CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => {
355 if std::env::var("RUSTDOC_FLAGS").is_ok() {
356 config.shell().warn(
357 "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?"
358 )?;
359 }
360 }
361 }
362 config.validate_term_config()?;
363
364 let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
365
366 let all_packages = &Packages::All;
367 let rustdoc_scrape_examples = &config.cli_unstable().rustdoc_scrape_examples;
368 let need_reverse_dependencies = rustdoc_scrape_examples.is_some();
369 let full_specs = if need_reverse_dependencies {
370 all_packages
371 } else {
372 spec
373 };
374
375 let resolve_specs = full_specs.to_package_id_specs(ws)?;
376 let has_dev_units = if filter.need_dev_deps(build_config.mode) || need_reverse_dependencies {
377 HasDevUnits::Yes
378 } else {
379 HasDevUnits::No
380 };
381 let resolve = ops::resolve_ws_with_opts(
382 ws,
383 &target_data,
384 &build_config.requested_kinds,
385 cli_features,
386 &resolve_specs,
387 has_dev_units,
388 crate::core::resolver::features::ForceAllTargets::No,
389 )?;
390 let WorkspaceResolve {
391 mut pkg_set,
392 workspace_resolve,
393 targeted_resolve: resolve,
394 resolved_features,
395 } = resolve;
396
397 let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std {
398 if build_config.build_plan {
399 config
400 .shell()
401 .warn("-Zbuild-std does not currently fully support --build-plan")?;
402 }
403 if build_config.requested_kinds[0].is_host() {
404 // TODO: This should eventually be fixed. Unfortunately it is not
405 // easy to get the host triple in BuildConfig. Consider changing
406 // requested_target to an enum, or some other approach.
407 anyhow::bail!("-Zbuild-std requires --target");
408 }
409 let (std_package_set, std_resolve, std_features) =
410 standard_lib::resolve_std(ws, &target_data, &build_config.requested_kinds, crates)?;
411 pkg_set.add_set(std_package_set);
412 Some((std_resolve, std_features))
413 } else {
414 None
415 };
416
417 // Find the packages in the resolver that the user wants to build (those
418 // passed in with `-p` or the defaults from the workspace), and convert
419 // Vec<PackageIdSpec> to a Vec<PackageId>.
420 let specs = if need_reverse_dependencies {
421 spec.to_package_id_specs(ws)?
422 } else {
423 resolve_specs.clone()
424 };
425 let to_build_ids = resolve.specs_to_ids(&specs)?;
426 // Now get the `Package` for each `PackageId`. This may trigger a download
427 // if the user specified `-p` for a dependency that is not downloaded.
428 // Dependencies will be downloaded during build_unit_dependencies.
429 let mut to_builds = pkg_set.get_many(to_build_ids)?;
430
431 // The ordering here affects some error messages coming out of cargo, so
432 // let's be test and CLI friendly by always printing in the same order if
433 // there's an error.
434 to_builds.sort_by_key(|p| p.package_id());
435
436 for pkg in to_builds.iter() {
437 pkg.manifest().print_teapot(config);
438
439 if build_config.mode.is_any_test()
440 && !ws.is_member(pkg)
441 && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
442 {
443 anyhow::bail!(
444 "package `{}` cannot be tested because it requires dev-dependencies \
445 and is not a member of the workspace",
446 pkg.name()
447 );
448 }
449 }
450
451 let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
452 (&Some(ref args), _) => (Some(args.clone()), "rustc"),
453 (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"),
454 _ => (None, ""),
455 };
456
457 if extra_args.is_some() && to_builds.len() != 1 {
458 panic!(
459 "`{}` should not accept multiple `-p` flags",
460 extra_args_name
461 );
462 }
463
464 let profiles = Profiles::new(ws, build_config.requested_profile)?;
465 profiles.validate_packages(
466 ws.profiles(),
467 &mut config.shell(),
468 workspace_resolve.as_ref().unwrap_or(&resolve),
469 )?;
470
471 // If `--target` has not been specified, then the unit graph is built
472 // assuming `--target $HOST` was specified. See
473 // `rebuild_unit_graph_shared` for more on why this is done.
474 let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?);
475 let explicit_host_kinds: Vec<_> = build_config
476 .requested_kinds
477 .iter()
478 .map(|kind| match kind {
479 CompileKind::Host => explicit_host_kind,
480 CompileKind::Target(t) => CompileKind::Target(*t),
481 })
482 .collect();
483
484 // Passing `build_config.requested_kinds` instead of
485 // `explicit_host_kinds` here so that `generate_targets` can do
486 // its own special handling of `CompileKind::Host`. It will
487 // internally replace the host kind by the `explicit_host_kind`
488 // before setting as a unit.
489 let mut units = generate_targets(
490 ws,
491 &to_builds,
492 filter,
493 &build_config.requested_kinds,
494 explicit_host_kind,
495 build_config.mode,
496 &resolve,
497 &workspace_resolve,
498 &resolved_features,
499 &pkg_set,
500 &profiles,
501 interner,
502 )?;
503
504 let mut scrape_units = match rustdoc_scrape_examples {
505 Some(arg) => {
506 let filter = match arg.as_str() {
507 "all" => CompileFilter::new_all_targets(),
508 "examples" => CompileFilter::new(
509 LibRule::False,
510 FilterRule::none(),
511 FilterRule::none(),
512 FilterRule::All,
513 FilterRule::none(),
514 ),
515 _ => {
516 bail!(
517 r#"-Z rustdoc-scrape-examples must take "all" or "examples" as an argument"#
518 )
519 }
520 };
521 let to_build_ids = resolve.specs_to_ids(&resolve_specs)?;
522 let to_builds = pkg_set.get_many(to_build_ids)?;
523 let mode = CompileMode::Docscrape;
524
525 generate_targets(
526 ws,
527 &to_builds,
528 &filter,
529 &build_config.requested_kinds,
530 explicit_host_kind,
531 mode,
532 &resolve,
533 &workspace_resolve,
534 &resolved_features,
535 &pkg_set,
536 &profiles,
537 interner,
538 )?
539 }
540 None => Vec::new(),
541 };
542
543 let std_roots = if let Some(crates) = &config.cli_unstable().build_std {
544 // Only build libtest if it looks like it is needed.
545 let mut crates = crates.clone();
546 if !crates.iter().any(|c| c == "test")
547 && units
548 .iter()
549 .any(|unit| unit.mode.is_rustc_test() && unit.target.harness())
550 {
551 // Only build libtest when libstd is built (libtest depends on libstd)
552 if crates.iter().any(|c| c == "std") {
553 crates.push("test".to_string());
554 }
555 }
556 let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
557 standard_lib::generate_std_roots(
558 &crates,
559 std_resolve,
560 std_features,
561 &explicit_host_kinds,
562 &pkg_set,
563 interner,
564 &profiles,
565 )?
566 } else {
567 Default::default()
568 };
569
570 let mut unit_graph = build_unit_dependencies(
571 ws,
572 &pkg_set,
573 &resolve,
574 &resolved_features,
575 std_resolve_features.as_ref(),
576 &units,
577 &scrape_units,
578 &std_roots,
579 build_config.mode,
580 &target_data,
581 &profiles,
582 interner,
583 )?;
584
585 // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
586 // what heuristics to use in that case.
587 if build_config.mode == (CompileMode::Doc { deps: true }) {
588 remove_duplicate_doc(build_config, &units, &mut unit_graph);
589 }
590
591 if build_config
592 .requested_kinds
593 .iter()
594 .any(CompileKind::is_host)
595 {
596 // Rebuild the unit graph, replacing the explicit host targets with
597 // CompileKind::Host, merging any dependencies shared with build
598 // dependencies.
599 let new_graph = rebuild_unit_graph_shared(
600 interner,
601 unit_graph,
602 &units,
603 &scrape_units,
604 explicit_host_kind,
605 );
606 // This would be nicer with destructuring assignment.
607 units = new_graph.0;
608 scrape_units = new_graph.1;
609 unit_graph = new_graph.2;
610 }
611
612 let mut extra_compiler_args = HashMap::new();
613 if let Some(args) = extra_args {
614 if units.len() != 1 {
615 anyhow::bail!(
616 "extra arguments to `{}` can only be passed to one \
617 target, consider filtering\nthe package by passing, \
618 e.g., `--lib` or `--bin NAME` to specify a single target",
619 extra_args_name
620 );
621 }
622 extra_compiler_args.insert(units[0].clone(), args);
623 }
624
625 for unit in &units {
626 if unit.mode.is_doc() || unit.mode.is_doc_test() {
627 let mut extra_args = local_rustdoc_args.clone();
628
629 // Add `--document-private-items` rustdoc flag if requested or if
630 // the target is a binary. Binary crates get their private items
631 // documented by default.
632 if rustdoc_document_private_items || unit.target.is_bin() {
633 let mut args = extra_args.take().unwrap_or_default();
634 args.push("--document-private-items".into());
635 extra_args = Some(args);
636 }
637
638 if let Some(args) = extra_args {
639 extra_compiler_args
640 .entry(unit.clone())
641 .or_default()
642 .extend(args);
643 }
644 }
645 }
646
647 if honor_rust_version {
648 // Remove any pre-release identifiers for easier comparison
649 let current_version = &target_data.rustc.version;
650 let untagged_version = semver::Version::new(
651 current_version.major,
652 current_version.minor,
653 current_version.patch,
654 );
655
656 for unit in unit_graph.keys() {
657 let version = match unit.pkg.rust_version() {
658 Some(v) => v,
659 None => continue,
660 };
661
662 let req = semver::VersionReq::parse(version).unwrap();
663 if req.matches(&untagged_version) {
664 continue;
665 }
666
667 anyhow::bail!(
668 "package `{}` cannot be built because it requires rustc {} or newer, \
669 while the currently active rustc version is {}",
670 unit.pkg,
671 version,
672 current_version,
673 );
674 }
675 }
676
677 let bcx = BuildContext::new(
678 ws,
679 pkg_set,
680 build_config,
681 profiles,
682 extra_compiler_args,
683 target_data,
684 units,
685 unit_graph,
686 scrape_units,
687 )?;
688
689 Ok(bcx)
690 }
691
692 impl FilterRule {
new(targets: Vec<String>, all: bool) -> FilterRule693 pub fn new(targets: Vec<String>, all: bool) -> FilterRule {
694 if all {
695 FilterRule::All
696 } else {
697 FilterRule::Just(targets)
698 }
699 }
700
none() -> FilterRule701 pub fn none() -> FilterRule {
702 FilterRule::Just(Vec::new())
703 }
704
matches(&self, target: &Target) -> bool705 fn matches(&self, target: &Target) -> bool {
706 match *self {
707 FilterRule::All => true,
708 FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()),
709 }
710 }
711
is_specific(&self) -> bool712 fn is_specific(&self) -> bool {
713 match *self {
714 FilterRule::All => true,
715 FilterRule::Just(ref targets) => !targets.is_empty(),
716 }
717 }
718
try_collect(&self) -> Option<Vec<String>>719 pub fn try_collect(&self) -> Option<Vec<String>> {
720 match *self {
721 FilterRule::All => None,
722 FilterRule::Just(ref targets) => Some(targets.clone()),
723 }
724 }
725
contains_glob_patterns(&self) -> bool726 pub(crate) fn contains_glob_patterns(&self) -> bool {
727 match self {
728 FilterRule::All => false,
729 FilterRule::Just(targets) => targets.iter().any(is_glob_pattern),
730 }
731 }
732 }
733
734 impl CompileFilter {
735 /// Construct a CompileFilter from raw command line arguments.
from_raw_arguments( lib_only: bool, bins: Vec<String>, all_bins: bool, tsts: Vec<String>, all_tsts: bool, exms: Vec<String>, all_exms: bool, bens: Vec<String>, all_bens: bool, all_targets: bool, ) -> CompileFilter736 pub fn from_raw_arguments(
737 lib_only: bool,
738 bins: Vec<String>,
739 all_bins: bool,
740 tsts: Vec<String>,
741 all_tsts: bool,
742 exms: Vec<String>,
743 all_exms: bool,
744 bens: Vec<String>,
745 all_bens: bool,
746 all_targets: bool,
747 ) -> CompileFilter {
748 if all_targets {
749 return CompileFilter::new_all_targets();
750 }
751 let rule_lib = if lib_only {
752 LibRule::True
753 } else {
754 LibRule::False
755 };
756 let rule_bins = FilterRule::new(bins, all_bins);
757 let rule_tsts = FilterRule::new(tsts, all_tsts);
758 let rule_exms = FilterRule::new(exms, all_exms);
759 let rule_bens = FilterRule::new(bens, all_bens);
760
761 CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens)
762 }
763
764 /// Construct a CompileFilter from underlying primitives.
new( rule_lib: LibRule, rule_bins: FilterRule, rule_tsts: FilterRule, rule_exms: FilterRule, rule_bens: FilterRule, ) -> CompileFilter765 pub fn new(
766 rule_lib: LibRule,
767 rule_bins: FilterRule,
768 rule_tsts: FilterRule,
769 rule_exms: FilterRule,
770 rule_bens: FilterRule,
771 ) -> CompileFilter {
772 if rule_lib == LibRule::True
773 || rule_bins.is_specific()
774 || rule_tsts.is_specific()
775 || rule_exms.is_specific()
776 || rule_bens.is_specific()
777 {
778 CompileFilter::Only {
779 all_targets: false,
780 lib: rule_lib,
781 bins: rule_bins,
782 examples: rule_exms,
783 benches: rule_bens,
784 tests: rule_tsts,
785 }
786 } else {
787 CompileFilter::Default {
788 required_features_filterable: true,
789 }
790 }
791 }
792
new_all_targets() -> CompileFilter793 pub fn new_all_targets() -> CompileFilter {
794 CompileFilter::Only {
795 all_targets: true,
796 lib: LibRule::Default,
797 bins: FilterRule::All,
798 examples: FilterRule::All,
799 benches: FilterRule::All,
800 tests: FilterRule::All,
801 }
802 }
803
need_dev_deps(&self, mode: CompileMode) -> bool804 pub fn need_dev_deps(&self, mode: CompileMode) -> bool {
805 match mode {
806 CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true,
807 CompileMode::Check { test: true } => true,
808 CompileMode::Build
809 | CompileMode::Doc { .. }
810 | CompileMode::Docscrape
811 | CompileMode::Check { test: false } => match *self {
812 CompileFilter::Default { .. } => false,
813 CompileFilter::Only {
814 ref examples,
815 ref tests,
816 ref benches,
817 ..
818 } => examples.is_specific() || tests.is_specific() || benches.is_specific(),
819 },
820 CompileMode::RunCustomBuild => panic!("Invalid mode"),
821 }
822 }
823
824 // this selects targets for "cargo run". for logic to select targets for
825 // other subcommands, see generate_targets and filter_default_targets
target_run(&self, target: &Target) -> bool826 pub fn target_run(&self, target: &Target) -> bool {
827 match *self {
828 CompileFilter::Default { .. } => true,
829 CompileFilter::Only {
830 ref lib,
831 ref bins,
832 ref examples,
833 ref tests,
834 ref benches,
835 ..
836 } => {
837 let rule = match *target.kind() {
838 TargetKind::Bin => bins,
839 TargetKind::Test => tests,
840 TargetKind::Bench => benches,
841 TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples,
842 TargetKind::Lib(..) => {
843 return match *lib {
844 LibRule::True => true,
845 LibRule::Default => true,
846 LibRule::False => false,
847 };
848 }
849 TargetKind::CustomBuild => return false,
850 };
851 rule.matches(target)
852 }
853 }
854 }
855
is_specific(&self) -> bool856 pub fn is_specific(&self) -> bool {
857 match *self {
858 CompileFilter::Default { .. } => false,
859 CompileFilter::Only { .. } => true,
860 }
861 }
862
is_all_targets(&self) -> bool863 pub fn is_all_targets(&self) -> bool {
864 matches!(
865 *self,
866 CompileFilter::Only {
867 all_targets: true,
868 ..
869 }
870 )
871 }
872
contains_glob_patterns(&self) -> bool873 pub(crate) fn contains_glob_patterns(&self) -> bool {
874 match self {
875 CompileFilter::Default { .. } => false,
876 CompileFilter::Only {
877 bins,
878 examples,
879 tests,
880 benches,
881 ..
882 } => {
883 bins.contains_glob_patterns()
884 || examples.contains_glob_patterns()
885 || tests.contains_glob_patterns()
886 || benches.contains_glob_patterns()
887 }
888 }
889 }
890 }
891
892 /// A proposed target.
893 ///
894 /// Proposed targets are later filtered into actual `Unit`s based on whether or
895 /// not the target requires its features to be present.
896 #[derive(Debug)]
897 struct Proposal<'a> {
898 pkg: &'a Package,
899 target: &'a Target,
900 /// Indicates whether or not all required features *must* be present. If
901 /// false, and the features are not available, then it will be silently
902 /// skipped. Generally, targets specified by name (`--bin foo`) are
903 /// required, all others can be silently skipped if features are missing.
904 requires_features: bool,
905 mode: CompileMode,
906 }
907
908 /// Generates all the base targets for the packages the user has requested to
909 /// compile. Dependencies for these targets are computed later in `unit_dependencies`.
generate_targets( ws: &Workspace<'_>, packages: &[&Package], filter: &CompileFilter, requested_kinds: &[CompileKind], explicit_host_kind: CompileKind, mode: CompileMode, resolve: &Resolve, workspace_resolve: &Option<Resolve>, resolved_features: &features::ResolvedFeatures, package_set: &PackageSet<'_>, profiles: &Profiles, interner: &UnitInterner, ) -> CargoResult<Vec<Unit>>910 fn generate_targets(
911 ws: &Workspace<'_>,
912 packages: &[&Package],
913 filter: &CompileFilter,
914 requested_kinds: &[CompileKind],
915 explicit_host_kind: CompileKind,
916 mode: CompileMode,
917 resolve: &Resolve,
918 workspace_resolve: &Option<Resolve>,
919 resolved_features: &features::ResolvedFeatures,
920 package_set: &PackageSet<'_>,
921 profiles: &Profiles,
922 interner: &UnitInterner,
923 ) -> CargoResult<Vec<Unit>> {
924 let config = ws.config();
925 // Helper for creating a list of `Unit` structures
926 let new_unit =
927 |units: &mut HashSet<Unit>, pkg: &Package, target: &Target, target_mode: CompileMode| {
928 let unit_for = if target_mode.is_any_test() {
929 // NOTE: the `UnitFor` here is subtle. If you have a profile
930 // with `panic` set, the `panic` flag is cleared for
931 // tests/benchmarks and their dependencies. If this
932 // was `normal`, then the lib would get compiled three
933 // times (once with panic, once without, and once with
934 // `--test`).
935 //
936 // This would cause a problem for doc tests, which would fail
937 // because `rustdoc` would attempt to link with both libraries
938 // at the same time. Also, it's probably not important (or
939 // even desirable?) for rustdoc to link with a lib with
940 // `panic` set.
941 //
942 // As a consequence, Examples and Binaries get compiled
943 // without `panic` set. This probably isn't a bad deal.
944 //
945 // Forcing the lib to be compiled three times during `cargo
946 // test` is probably also not desirable.
947 UnitFor::new_test(config)
948 } else if target.for_host() {
949 // Proc macro / plugin should not have `panic` set.
950 UnitFor::new_compiler()
951 } else {
952 UnitFor::new_normal()
953 };
954 // Custom build units are added in `build_unit_dependencies`.
955 assert!(!target.is_custom_build());
956 let target_mode = match target_mode {
957 CompileMode::Test => {
958 if target.is_example() && !filter.is_specific() && !target.tested() {
959 // Examples are included as regular binaries to verify
960 // that they compile.
961 CompileMode::Build
962 } else {
963 CompileMode::Test
964 }
965 }
966 CompileMode::Build => match *target.kind() {
967 TargetKind::Test => CompileMode::Test,
968 TargetKind::Bench => CompileMode::Bench,
969 _ => CompileMode::Build,
970 },
971 // `CompileMode::Bench` is only used to inform `filter_default_targets`
972 // which command is being used (`cargo bench`). Afterwards, tests
973 // and benches are treated identically. Switching the mode allows
974 // de-duplication of units that are essentially identical. For
975 // example, `cargo build --all-targets --release` creates the units
976 // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench)
977 // and since these are the same, we want them to be de-duplicated in
978 // `unit_dependencies`.
979 CompileMode::Bench => CompileMode::Test,
980 _ => target_mode,
981 };
982
983 let is_local = pkg.package_id().source_id().is_path();
984
985 // No need to worry about build-dependencies, roots are never build dependencies.
986 let features_for = FeaturesFor::from_for_host(target.proc_macro());
987 let features = resolved_features.activated_features(pkg.package_id(), features_for);
988
989 // If `--target` has not been specified, then the unit
990 // graph is built almost like if `--target $HOST` was
991 // specified. See `rebuild_unit_graph_shared` for more on
992 // why this is done. However, if the package has its own
993 // `package.target` key, then this gets used instead of
994 // `$HOST`
995 let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() {
996 vec![k]
997 } else {
998 requested_kinds
999 .iter()
1000 .map(|kind| match kind {
1001 CompileKind::Host => {
1002 pkg.manifest().default_kind().unwrap_or(explicit_host_kind)
1003 }
1004 CompileKind::Target(t) => CompileKind::Target(*t),
1005 })
1006 .collect()
1007 };
1008
1009 for kind in explicit_kinds.iter() {
1010 let profile = profiles.get_profile(
1011 pkg.package_id(),
1012 ws.is_member(pkg),
1013 is_local,
1014 unit_for,
1015 target_mode,
1016 *kind,
1017 );
1018 let unit = interner.intern(
1019 pkg,
1020 target,
1021 profile,
1022 kind.for_target(target),
1023 target_mode,
1024 features.clone(),
1025 /*is_std*/ false,
1026 /*dep_hash*/ 0,
1027 );
1028 units.insert(unit);
1029 }
1030 };
1031
1032 // Create a list of proposed targets.
1033 let mut proposals: Vec<Proposal<'_>> = Vec::new();
1034
1035 match *filter {
1036 CompileFilter::Default {
1037 required_features_filterable,
1038 } => {
1039 for pkg in packages {
1040 let default = filter_default_targets(pkg.targets(), mode);
1041 proposals.extend(default.into_iter().map(|target| Proposal {
1042 pkg,
1043 target,
1044 requires_features: !required_features_filterable,
1045 mode,
1046 }));
1047 if mode == CompileMode::Test {
1048 if let Some(t) = pkg
1049 .targets()
1050 .iter()
1051 .find(|t| t.is_lib() && t.doctested() && t.doctestable())
1052 {
1053 proposals.push(Proposal {
1054 pkg,
1055 target: t,
1056 requires_features: false,
1057 mode: CompileMode::Doctest,
1058 });
1059 }
1060 }
1061 }
1062 }
1063 CompileFilter::Only {
1064 all_targets,
1065 ref lib,
1066 ref bins,
1067 ref examples,
1068 ref tests,
1069 ref benches,
1070 } => {
1071 if *lib != LibRule::False {
1072 let mut libs = Vec::new();
1073 for proposal in filter_targets(packages, Target::is_lib, false, mode) {
1074 let Proposal { target, pkg, .. } = proposal;
1075 if mode.is_doc_test() && !target.doctestable() {
1076 let types = target.rustc_crate_types();
1077 let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect();
1078 ws.config().shell().warn(format!(
1079 "doc tests are not supported for crate type(s) `{}` in package `{}`",
1080 types_str.join(", "),
1081 pkg.name()
1082 ))?;
1083 } else {
1084 libs.push(proposal)
1085 }
1086 }
1087 if !all_targets && libs.is_empty() && *lib == LibRule::True {
1088 let names = packages.iter().map(|pkg| pkg.name()).collect::<Vec<_>>();
1089 if names.len() == 1 {
1090 anyhow::bail!("no library targets found in package `{}`", names[0]);
1091 } else {
1092 anyhow::bail!("no library targets found in packages: {}", names.join(", "));
1093 }
1094 }
1095 proposals.extend(libs);
1096 }
1097
1098 // If `--tests` was specified, add all targets that would be
1099 // generated by `cargo test`.
1100 let test_filter = match tests {
1101 FilterRule::All => Target::tested,
1102 FilterRule::Just(_) => Target::is_test,
1103 };
1104 let test_mode = match mode {
1105 CompileMode::Build => CompileMode::Test,
1106 CompileMode::Check { .. } => CompileMode::Check { test: true },
1107 _ => mode,
1108 };
1109 // If `--benches` was specified, add all targets that would be
1110 // generated by `cargo bench`.
1111 let bench_filter = match benches {
1112 FilterRule::All => Target::benched,
1113 FilterRule::Just(_) => Target::is_bench,
1114 };
1115 let bench_mode = match mode {
1116 CompileMode::Build => CompileMode::Bench,
1117 CompileMode::Check { .. } => CompileMode::Check { test: true },
1118 _ => mode,
1119 };
1120
1121 proposals.extend(list_rule_targets(
1122 packages,
1123 bins,
1124 "bin",
1125 Target::is_bin,
1126 mode,
1127 )?);
1128 proposals.extend(list_rule_targets(
1129 packages,
1130 examples,
1131 "example",
1132 Target::is_example,
1133 mode,
1134 )?);
1135 proposals.extend(list_rule_targets(
1136 packages,
1137 tests,
1138 "test",
1139 test_filter,
1140 test_mode,
1141 )?);
1142 proposals.extend(list_rule_targets(
1143 packages,
1144 benches,
1145 "bench",
1146 bench_filter,
1147 bench_mode,
1148 )?);
1149 }
1150 }
1151
1152 // Only include targets that are libraries or have all required
1153 // features available.
1154 //
1155 // `features_map` is a map of &Package -> enabled_features
1156 // It is computed by the set of enabled features for the package plus
1157 // every enabled feature of every enabled dependency.
1158 let mut features_map = HashMap::new();
1159 // This needs to be a set to de-duplicate units. Due to the way the
1160 // targets are filtered, it is possible to have duplicate proposals for
1161 // the same thing.
1162 let mut units = HashSet::new();
1163 for Proposal {
1164 pkg,
1165 target,
1166 requires_features,
1167 mode,
1168 } in proposals
1169 {
1170 let unavailable_features = match target.required_features() {
1171 Some(rf) => {
1172 validate_required_features(
1173 workspace_resolve,
1174 target.name(),
1175 rf,
1176 pkg.summary(),
1177 &mut config.shell(),
1178 )?;
1179
1180 let features = features_map.entry(pkg).or_insert_with(|| {
1181 resolve_all_features(resolve, resolved_features, package_set, pkg.package_id())
1182 });
1183 rf.iter().filter(|f| !features.contains(*f)).collect()
1184 }
1185 None => Vec::new(),
1186 };
1187 if target.is_lib() || unavailable_features.is_empty() {
1188 new_unit(&mut units, pkg, target, mode);
1189 } else if requires_features {
1190 let required_features = target.required_features().unwrap();
1191 let quoted_required_features: Vec<String> = required_features
1192 .iter()
1193 .map(|s| format!("`{}`", s))
1194 .collect();
1195 anyhow::bail!(
1196 "target `{}` in package `{}` requires the features: {}\n\
1197 Consider enabling them by passing, e.g., `--features=\"{}\"`",
1198 target.name(),
1199 pkg.name(),
1200 quoted_required_features.join(", "),
1201 required_features.join(" ")
1202 );
1203 }
1204 // else, silently skip target.
1205 }
1206 let mut units: Vec<_> = units.into_iter().collect();
1207 unmatched_target_filters(&units, filter, &mut ws.config().shell())?;
1208
1209 // Keep the roots in a consistent order, which helps with checking test output.
1210 units.sort_unstable();
1211 Ok(units)
1212 }
1213
1214 /// Checks if the unit list is empty and the user has passed any combination of
1215 /// --tests, --examples, --benches or --bins, and we didn't match on any targets.
1216 /// We want to emit a warning to make sure the user knows that this run is a no-op,
1217 /// and their code remains unchecked despite cargo not returning any errors
unmatched_target_filters( units: &[Unit], filter: &CompileFilter, shell: &mut Shell, ) -> CargoResult<()>1218 fn unmatched_target_filters(
1219 units: &[Unit],
1220 filter: &CompileFilter,
1221 shell: &mut Shell,
1222 ) -> CargoResult<()> {
1223 if let CompileFilter::Only {
1224 all_targets,
1225 lib: _,
1226 ref bins,
1227 ref examples,
1228 ref tests,
1229 ref benches,
1230 } = *filter
1231 {
1232 if units.is_empty() {
1233 let mut filters = String::new();
1234 let mut miss_count = 0;
1235
1236 let mut append = |t: &FilterRule, s| {
1237 if let FilterRule::All = *t {
1238 miss_count += 1;
1239 filters.push_str(s);
1240 }
1241 };
1242
1243 if all_targets {
1244 filters.push_str(" `all-targets`");
1245 } else {
1246 append(bins, " `bins`,");
1247 append(tests, " `tests`,");
1248 append(examples, " `examples`,");
1249 append(benches, " `benches`,");
1250 filters.pop();
1251 }
1252
1253 return shell.warn(format!(
1254 "Target {}{} specified, but no targets matched. This is a no-op",
1255 if miss_count > 1 { "filters" } else { "filter" },
1256 filters,
1257 ));
1258 }
1259 }
1260
1261 Ok(())
1262 }
1263
1264 /// Warns if a target's required-features references a feature that doesn't exist.
1265 ///
1266 /// This is a warning because historically this was not validated, and it
1267 /// would cause too much breakage to make it an error.
validate_required_features( resolve: &Option<Resolve>, target_name: &str, required_features: &[String], summary: &Summary, shell: &mut Shell, ) -> CargoResult<()>1268 fn validate_required_features(
1269 resolve: &Option<Resolve>,
1270 target_name: &str,
1271 required_features: &[String],
1272 summary: &Summary,
1273 shell: &mut Shell,
1274 ) -> CargoResult<()> {
1275 let resolve = match resolve {
1276 None => return Ok(()),
1277 Some(resolve) => resolve,
1278 };
1279
1280 for feature in required_features {
1281 let fv = FeatureValue::new(feature.into());
1282 match &fv {
1283 FeatureValue::Feature(f) => {
1284 if !summary.features().contains_key(f) {
1285 shell.warn(format!(
1286 "invalid feature `{}` in required-features of target `{}`: \
1287 `{}` is not present in [features] section",
1288 fv, target_name, fv
1289 ))?;
1290 }
1291 }
1292 FeatureValue::Dep { .. } => {
1293 anyhow::bail!(
1294 "invalid feature `{}` in required-features of target `{}`: \
1295 `dep:` prefixed feature values are not allowed in required-features",
1296 fv,
1297 target_name
1298 );
1299 }
1300 FeatureValue::DepFeature { weak: true, .. } => {
1301 anyhow::bail!(
1302 "invalid feature `{}` in required-features of target `{}`: \
1303 optional dependency with `?` is not allowed in required-features",
1304 fv,
1305 target_name
1306 );
1307 }
1308 // Handling of dependent_crate/dependent_crate_feature syntax
1309 FeatureValue::DepFeature {
1310 dep_name,
1311 dep_feature,
1312 weak: false,
1313 } => {
1314 match resolve
1315 .deps(summary.package_id())
1316 .find(|(_dep_id, deps)| deps.iter().any(|dep| dep.name_in_toml() == *dep_name))
1317 {
1318 Some((dep_id, _deps)) => {
1319 let dep_summary = resolve.summary(dep_id);
1320 if !dep_summary.features().contains_key(dep_feature)
1321 && !dep_summary
1322 .dependencies()
1323 .iter()
1324 .any(|dep| dep.name_in_toml() == *dep_feature && dep.is_optional())
1325 {
1326 shell.warn(format!(
1327 "invalid feature `{}` in required-features of target `{}`: \
1328 feature `{}` does not exist in package `{}`",
1329 fv, target_name, dep_feature, dep_id
1330 ))?;
1331 }
1332 }
1333 None => {
1334 shell.warn(format!(
1335 "invalid feature `{}` in required-features of target `{}`: \
1336 dependency `{}` does not exist",
1337 fv, target_name, dep_name
1338 ))?;
1339 }
1340 }
1341 }
1342 }
1343 }
1344 Ok(())
1345 }
1346
1347 /// Gets all of the features enabled for a package, plus its dependencies'
1348 /// features.
1349 ///
1350 /// Dependencies are added as `dep_name/feat_name` because `required-features`
1351 /// wants to support that syntax.
resolve_all_features( resolve_with_overrides: &Resolve, resolved_features: &features::ResolvedFeatures, package_set: &PackageSet<'_>, package_id: PackageId, ) -> HashSet<String>1352 pub fn resolve_all_features(
1353 resolve_with_overrides: &Resolve,
1354 resolved_features: &features::ResolvedFeatures,
1355 package_set: &PackageSet<'_>,
1356 package_id: PackageId,
1357 ) -> HashSet<String> {
1358 let mut features: HashSet<String> = resolved_features
1359 .activated_features(package_id, FeaturesFor::NormalOrDev)
1360 .iter()
1361 .map(|s| s.to_string())
1362 .collect();
1363
1364 // Include features enabled for use by dependencies so targets can also use them with the
1365 // required-features field when deciding whether to be built or skipped.
1366 for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
1367 let is_proc_macro = package_set
1368 .get_one(dep_id)
1369 .expect("packages downloaded")
1370 .proc_macro();
1371 for dep in deps {
1372 let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
1373 for feature in resolved_features
1374 .activated_features_unverified(dep_id, features_for)
1375 .unwrap_or_default()
1376 {
1377 features.insert(format!("{}/{}", dep.name_in_toml(), feature));
1378 }
1379 }
1380 }
1381
1382 features
1383 }
1384
1385 /// Given a list of all targets for a package, filters out only the targets
1386 /// that are automatically included when the user doesn't specify any targets.
filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target>1387 fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> {
1388 match mode {
1389 CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(),
1390 CompileMode::Test => targets
1391 .iter()
1392 .filter(|t| t.tested() || t.is_example())
1393 .collect(),
1394 CompileMode::Build | CompileMode::Check { .. } => targets
1395 .iter()
1396 .filter(|t| t.is_bin() || t.is_lib())
1397 .collect(),
1398 CompileMode::Doc { .. } => {
1399 // `doc` does lib and bins (bin with same name as lib is skipped).
1400 targets
1401 .iter()
1402 .filter(|t| {
1403 t.documented()
1404 && (!t.is_bin()
1405 || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
1406 })
1407 .collect()
1408 }
1409 CompileMode::Doctest | CompileMode::Docscrape | CompileMode::RunCustomBuild => {
1410 panic!("Invalid mode {:?}", mode)
1411 }
1412 }
1413 }
1414
1415 /// Returns a list of proposed targets based on command-line target selection flags.
list_rule_targets<'a>( packages: &[&'a Package], rule: &FilterRule, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, mode: CompileMode, ) -> CargoResult<Vec<Proposal<'a>>>1416 fn list_rule_targets<'a>(
1417 packages: &[&'a Package],
1418 rule: &FilterRule,
1419 target_desc: &'static str,
1420 is_expected_kind: fn(&Target) -> bool,
1421 mode: CompileMode,
1422 ) -> CargoResult<Vec<Proposal<'a>>> {
1423 let mut proposals = Vec::new();
1424 match rule {
1425 FilterRule::All => {
1426 proposals.extend(filter_targets(packages, is_expected_kind, false, mode))
1427 }
1428 FilterRule::Just(names) => {
1429 for name in names {
1430 proposals.extend(find_named_targets(
1431 packages,
1432 name,
1433 target_desc,
1434 is_expected_kind,
1435 mode,
1436 )?);
1437 }
1438 }
1439 }
1440 Ok(proposals)
1441 }
1442
1443 /// Finds the targets for a specifically named target.
find_named_targets<'a>( packages: &[&'a Package], target_name: &str, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, mode: CompileMode, ) -> CargoResult<Vec<Proposal<'a>>>1444 fn find_named_targets<'a>(
1445 packages: &[&'a Package],
1446 target_name: &str,
1447 target_desc: &'static str,
1448 is_expected_kind: fn(&Target) -> bool,
1449 mode: CompileMode,
1450 ) -> CargoResult<Vec<Proposal<'a>>> {
1451 let is_glob = is_glob_pattern(target_name);
1452 let proposals = if is_glob {
1453 let pattern = build_glob(target_name)?;
1454 let filter = |t: &Target| is_expected_kind(t) && pattern.matches(t.name());
1455 filter_targets(packages, filter, true, mode)
1456 } else {
1457 let filter = |t: &Target| t.name() == target_name && is_expected_kind(t);
1458 filter_targets(packages, filter, true, mode)
1459 };
1460
1461 if proposals.is_empty() {
1462 let targets = packages.iter().flat_map(|pkg| {
1463 pkg.targets()
1464 .iter()
1465 .filter(|target| is_expected_kind(target))
1466 });
1467 let suggestion = closest_msg(target_name, targets, |t| t.name());
1468 anyhow::bail!(
1469 "no {} target {} `{}`{}",
1470 target_desc,
1471 if is_glob { "matches pattern" } else { "named" },
1472 target_name,
1473 suggestion
1474 );
1475 }
1476 Ok(proposals)
1477 }
1478
filter_targets<'a>( packages: &[&'a Package], predicate: impl Fn(&Target) -> bool, requires_features: bool, mode: CompileMode, ) -> Vec<Proposal<'a>>1479 fn filter_targets<'a>(
1480 packages: &[&'a Package],
1481 predicate: impl Fn(&Target) -> bool,
1482 requires_features: bool,
1483 mode: CompileMode,
1484 ) -> Vec<Proposal<'a>> {
1485 let mut proposals = Vec::new();
1486 for pkg in packages {
1487 for target in pkg.targets().iter().filter(|t| predicate(t)) {
1488 proposals.push(Proposal {
1489 pkg,
1490 target,
1491 requires_features,
1492 mode,
1493 });
1494 }
1495 }
1496 proposals
1497 }
1498
1499 /// This is used to rebuild the unit graph, sharing host dependencies if possible.
1500 ///
1501 /// This will translate any unit's `CompileKind::Target(host)` to
1502 /// `CompileKind::Host` if the kind is equal to `to_host`. This also handles
1503 /// generating the unit `dep_hash`, and merging shared units if possible.
1504 ///
1505 /// This is necessary because if normal dependencies used `CompileKind::Host`,
1506 /// there would be no way to distinguish those units from build-dependency
1507 /// units. This can cause a problem if a shared normal/build dependency needs
1508 /// to link to another dependency whose features differ based on whether or
1509 /// not it is a normal or build dependency. If both units used
1510 /// `CompileKind::Host`, then they would end up being identical, causing a
1511 /// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
1512 /// value or the other.
1513 ///
1514 /// The solution is to keep normal and build dependencies separate when
1515 /// building the unit graph, and then run this second pass which will try to
1516 /// combine shared dependencies safely. By adding a hash of the dependencies
1517 /// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
1518 /// without fear of an unwanted collision.
rebuild_unit_graph_shared( interner: &UnitInterner, unit_graph: UnitGraph, roots: &[Unit], scrape_units: &[Unit], to_host: CompileKind, ) -> (Vec<Unit>, Vec<Unit>, UnitGraph)1519 fn rebuild_unit_graph_shared(
1520 interner: &UnitInterner,
1521 unit_graph: UnitGraph,
1522 roots: &[Unit],
1523 scrape_units: &[Unit],
1524 to_host: CompileKind,
1525 ) -> (Vec<Unit>, Vec<Unit>, UnitGraph) {
1526 let mut result = UnitGraph::new();
1527 // Map of the old unit to the new unit, used to avoid recursing into units
1528 // that have already been computed to improve performance.
1529 let mut memo = HashMap::new();
1530 let new_roots = roots
1531 .iter()
1532 .map(|root| {
1533 traverse_and_share(interner, &mut memo, &mut result, &unit_graph, root, to_host)
1534 })
1535 .collect();
1536 let new_scrape_units = scrape_units
1537 .iter()
1538 .map(|unit| memo.get(unit).unwrap().clone())
1539 .collect();
1540 (new_roots, new_scrape_units, result)
1541 }
1542
1543 /// Recursive function for rebuilding the graph.
1544 ///
1545 /// This walks `unit_graph`, starting at the given `unit`. It inserts the new
1546 /// units into `new_graph`, and returns a new updated version of the given
1547 /// unit (`dep_hash` is filled in, and `kind` switched if necessary).
traverse_and_share( interner: &UnitInterner, memo: &mut HashMap<Unit, Unit>, new_graph: &mut UnitGraph, unit_graph: &UnitGraph, unit: &Unit, to_host: CompileKind, ) -> Unit1548 fn traverse_and_share(
1549 interner: &UnitInterner,
1550 memo: &mut HashMap<Unit, Unit>,
1551 new_graph: &mut UnitGraph,
1552 unit_graph: &UnitGraph,
1553 unit: &Unit,
1554 to_host: CompileKind,
1555 ) -> Unit {
1556 if let Some(new_unit) = memo.get(unit) {
1557 // Already computed, no need to recompute.
1558 return new_unit.clone();
1559 }
1560 let mut dep_hash = StableHasher::new();
1561 let new_deps: Vec<_> = unit_graph[unit]
1562 .iter()
1563 .map(|dep| {
1564 let new_dep_unit =
1565 traverse_and_share(interner, memo, new_graph, unit_graph, &dep.unit, to_host);
1566 new_dep_unit.hash(&mut dep_hash);
1567 UnitDep {
1568 unit: new_dep_unit,
1569 ..dep.clone()
1570 }
1571 })
1572 .collect();
1573 let new_dep_hash = dep_hash.finish();
1574 let new_kind = if unit.kind == to_host {
1575 CompileKind::Host
1576 } else {
1577 unit.kind
1578 };
1579 let new_unit = interner.intern(
1580 &unit.pkg,
1581 &unit.target,
1582 unit.profile,
1583 new_kind,
1584 unit.mode,
1585 unit.features.clone(),
1586 unit.is_std,
1587 new_dep_hash,
1588 );
1589 assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
1590 new_graph.entry(new_unit.clone()).or_insert(new_deps);
1591 new_unit
1592 }
1593
1594 /// Build `glob::Pattern` with informative context.
build_glob(pat: &str) -> CargoResult<glob::Pattern>1595 fn build_glob(pat: &str) -> CargoResult<glob::Pattern> {
1596 glob::Pattern::new(pat).with_context(|| format!("cannot build glob pattern from `{}`", pat))
1597 }
1598
1599 /// Emits "package not found" error.
1600 ///
1601 /// > This function should be used only in package selection processes such like
1602 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
emit_package_not_found( ws: &Workspace<'_>, opt_names: BTreeSet<&str>, opt_out: bool, ) -> CargoResult<()>1603 fn emit_package_not_found(
1604 ws: &Workspace<'_>,
1605 opt_names: BTreeSet<&str>,
1606 opt_out: bool,
1607 ) -> CargoResult<()> {
1608 if !opt_names.is_empty() {
1609 anyhow::bail!(
1610 "{}package(s) `{}` not found in workspace `{}`",
1611 if opt_out { "excluded " } else { "" },
1612 opt_names.into_iter().collect::<Vec<_>>().join(", "),
1613 ws.root().display(),
1614 )
1615 }
1616 Ok(())
1617 }
1618
1619 /// Emits "glob pattern not found" error.
1620 ///
1621 /// > This function should be used only in package selection processes such like
1622 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
emit_pattern_not_found( ws: &Workspace<'_>, opt_patterns: Vec<(glob::Pattern, bool)>, opt_out: bool, ) -> CargoResult<()>1623 fn emit_pattern_not_found(
1624 ws: &Workspace<'_>,
1625 opt_patterns: Vec<(glob::Pattern, bool)>,
1626 opt_out: bool,
1627 ) -> CargoResult<()> {
1628 let not_matched = opt_patterns
1629 .iter()
1630 .filter(|(_, matched)| !*matched)
1631 .map(|(pat, _)| pat.as_str())
1632 .collect::<Vec<_>>();
1633 if !not_matched.is_empty() {
1634 anyhow::bail!(
1635 "{}package pattern(s) `{}` not found in workspace `{}`",
1636 if opt_out { "excluded " } else { "" },
1637 not_matched.join(", "),
1638 ws.root().display(),
1639 )
1640 }
1641 Ok(())
1642 }
1643
1644 /// Checks whether a package matches any of a list of glob patterns generated
1645 /// from `opt_patterns_and_names`.
1646 ///
1647 /// > This function should be used only in package selection processes such like
1648 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool1649 fn match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool {
1650 patterns.iter_mut().any(|(m, matched)| {
1651 let is_matched = m.matches(pkg.name().as_str());
1652 *matched |= is_matched;
1653 is_matched
1654 })
1655 }
1656
1657 /// Given a list opt-in or opt-out package selection strings, generates two
1658 /// collections that represent glob patterns and package names respectively.
1659 ///
1660 /// > This function should be used only in package selection processes such like
1661 /// `Packages::to_package_id_specs` and `Packages::get_packages`.
opt_patterns_and_names( opt: &[String], ) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)>1662 fn opt_patterns_and_names(
1663 opt: &[String],
1664 ) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)> {
1665 let mut opt_patterns = Vec::new();
1666 let mut opt_names = BTreeSet::new();
1667 for x in opt.iter() {
1668 if is_glob_pattern(x) {
1669 opt_patterns.push((build_glob(x)?, false));
1670 } else {
1671 opt_names.insert(String::as_str(x));
1672 }
1673 }
1674 Ok((opt_patterns, opt_names))
1675 }
1676
1677 /// Removes duplicate CompileMode::Doc units that would cause problems with
1678 /// filename collisions.
1679 ///
1680 /// Rustdoc only separates units by crate name in the file directory
1681 /// structure. If any two units with the same crate name exist, this would
1682 /// cause a filename collision, causing different rustdoc invocations to stomp
1683 /// on one another's files.
1684 ///
1685 /// Unfortunately this does not remove all duplicates, as some of them are
1686 /// either user error, or difficult to remove. Cases that I can think of:
1687 ///
1688 /// - Same target name in different packages. See the `collision_doc` test.
1689 /// - Different sources. See `collision_doc_sources` test.
1690 ///
1691 /// Ideally this would not be necessary.
remove_duplicate_doc( build_config: &BuildConfig, root_units: &[Unit], unit_graph: &mut UnitGraph, )1692 fn remove_duplicate_doc(
1693 build_config: &BuildConfig,
1694 root_units: &[Unit],
1695 unit_graph: &mut UnitGraph,
1696 ) {
1697 // First, create a mapping of crate_name -> Unit so we can see where the
1698 // duplicates are.
1699 let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
1700 for unit in unit_graph.keys() {
1701 if unit.mode.is_doc() {
1702 all_docs
1703 .entry(unit.target.crate_name())
1704 .or_default()
1705 .push(unit.clone());
1706 }
1707 }
1708 // Keep track of units to remove so that they can be efficiently removed
1709 // from the unit_deps.
1710 let mut removed_units: HashSet<Unit> = HashSet::new();
1711 let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
1712 let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
1713 .into_iter()
1714 .partition(|unit| cb(unit) && !root_units.contains(unit));
1715 for unit in to_remove {
1716 log::debug!(
1717 "removing duplicate doc due to {} for package {} target `{}`",
1718 reason,
1719 unit.pkg,
1720 unit.target.name()
1721 );
1722 unit_graph.remove(&unit);
1723 removed_units.insert(unit);
1724 }
1725 remaining_units
1726 };
1727 // Iterate over the duplicates and try to remove them from unit_graph.
1728 for (_crate_name, mut units) in all_docs {
1729 if units.len() == 1 {
1730 continue;
1731 }
1732 // Prefer target over host if --target was not specified.
1733 if build_config
1734 .requested_kinds
1735 .iter()
1736 .all(CompileKind::is_host)
1737 {
1738 // Note these duplicates may not be real duplicates, since they
1739 // might get merged in rebuild_unit_graph_shared. Either way, it
1740 // shouldn't hurt to remove them early (although the report in the
1741 // log might be confusing).
1742 units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
1743 if units.len() == 1 {
1744 continue;
1745 }
1746 }
1747 // Prefer newer versions over older.
1748 let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
1749 HashMap::new();
1750 for unit in units {
1751 let pkg_id = unit.pkg.package_id();
1752 // Note, this does not detect duplicates from different sources.
1753 source_map
1754 .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
1755 .or_default()
1756 .push(unit);
1757 }
1758 let mut remaining_units = Vec::new();
1759 for (_key, mut units) in source_map {
1760 if units.len() > 1 {
1761 units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
1762 // Remove any entries with version < newest.
1763 let newest_version = units.last().unwrap().pkg.version().clone();
1764 let keep_units = remove(units, "older version", &|unit| {
1765 unit.pkg.version() < &newest_version
1766 });
1767 remaining_units.extend(keep_units);
1768 } else {
1769 remaining_units.extend(units);
1770 }
1771 }
1772 if remaining_units.len() == 1 {
1773 continue;
1774 }
1775 // Are there other heuristics to remove duplicates that would make
1776 // sense? Maybe prefer path sources over all others?
1777 }
1778 // Also remove units from the unit_deps so there aren't any dangling edges.
1779 for unit_deps in unit_graph.values_mut() {
1780 unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
1781 }
1782 // Remove any orphan units that were detached from the graph.
1783 let mut visited = HashSet::new();
1784 fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
1785 if !visited.insert(unit.clone()) {
1786 return;
1787 }
1788 for dep in &graph[unit] {
1789 visit(&dep.unit, graph, visited);
1790 }
1791 }
1792 for unit in root_units {
1793 visit(unit, unit_graph, &mut visited);
1794 }
1795 unit_graph.retain(|unit, _| visited.contains(unit));
1796 }
1797