1 use std::collections::{BTreeSet, HashMap};
2 use std::fs::{self, File};
3 use std::io::prelude::*;
4 use std::io::SeekFrom;
5 use std::path::{Path, PathBuf};
6 use std::rc::Rc;
7 use std::sync::Arc;
8 
9 use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor};
10 use crate::core::resolver::CliFeatures;
11 use crate::core::{Feature, Shell, Verbosity, Workspace};
12 use crate::core::{Package, PackageId, PackageSet, Resolve, Source, SourceId};
13 use crate::sources::PathSource;
14 use crate::util::errors::CargoResult;
15 use crate::util::toml::TomlManifest;
16 use crate::util::{self, restricted_names, Config, FileLock};
17 use crate::{drop_println, ops};
18 use anyhow::Context as _;
19 use cargo_util::paths;
20 use flate2::read::GzDecoder;
21 use flate2::{Compression, GzBuilder};
22 use log::debug;
23 use serde::Serialize;
24 use tar::{Archive, Builder, EntryType, Header, HeaderMode};
25 
26 pub struct PackageOpts<'cfg> {
27     pub config: &'cfg Config,
28     pub list: bool,
29     pub check_metadata: bool,
30     pub allow_dirty: bool,
31     pub verify: bool,
32     pub jobs: Option<u32>,
33     pub to_package: ops::Packages,
34     pub targets: Vec<String>,
35     pub cli_features: CliFeatures,
36 }
37 
38 const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
39 
40 struct ArchiveFile {
41     /// The relative path in the archive (not including the top-level package
42     /// name directory).
43     rel_path: PathBuf,
44     /// String variant of `rel_path`, for convenience.
45     rel_str: String,
46     /// The contents to add to the archive.
47     contents: FileContents,
48 }
49 
50 enum FileContents {
51     /// Absolute path to the file on disk to add to the archive.
52     OnDisk(PathBuf),
53     /// Generates a file.
54     Generated(GeneratedFile),
55 }
56 
57 enum GeneratedFile {
58     /// Generates `Cargo.toml` by rewriting the original.
59     Manifest,
60     /// Generates `Cargo.lock` in some cases (like if there is a binary).
61     Lockfile,
62     /// Adds a `.cargo_vcs_info.json` file if in a (clean) git repo.
63     VcsInfo(VcsInfo),
64 }
65 
66 #[derive(Serialize)]
67 struct VcsInfo {
68     git: GitVcsInfo,
69     /// Path to the package within repo (empty string if root). / not \
70     path_in_vcs: String,
71 }
72 
73 #[derive(Serialize)]
74 struct GitVcsInfo {
75     sha1: String,
76 }
77 
package_one( ws: &Workspace<'_>, pkg: &Package, opts: &PackageOpts<'_>, ) -> CargoResult<Option<FileLock>>78 pub fn package_one(
79     ws: &Workspace<'_>,
80     pkg: &Package,
81     opts: &PackageOpts<'_>,
82 ) -> CargoResult<Option<FileLock>> {
83     let config = ws.config();
84     let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config);
85     src.update()?;
86 
87     if opts.check_metadata {
88         check_metadata(pkg, config)?;
89     }
90 
91     if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
92         config.shell().warn(
93             "both package.include and package.exclude are specified; \
94              the exclude list will be ignored",
95         )?;
96     }
97     let src_files = src.list_files(pkg)?;
98 
99     // Check (git) repository state, getting the current commit hash if not
100     // dirty.
101     let vcs_info = if !opts.allow_dirty {
102         // This will error if a dirty repo is found.
103         check_repo_state(pkg, &src_files, config)?
104     } else {
105         None
106     };
107 
108     let ar_files = build_ar_list(ws, pkg, src_files, vcs_info)?;
109 
110     if opts.list {
111         for ar_file in ar_files {
112             drop_println!(config, "{}", ar_file.rel_str);
113         }
114 
115         return Ok(None);
116     }
117 
118     // Check that the package dependencies are safe to deploy.
119     for dep in pkg.dependencies() {
120         super::check_dep_has_version(dep, false)?;
121     }
122 
123     let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
124     let dir = ws.target_dir().join("package");
125     let mut dst = {
126         let tmp = format!(".{}", filename);
127         dir.open_rw(&tmp, config, "package scratch space")?
128     };
129 
130     // Package up and test a temporary tarball and only move it to the final
131     // location if it actually passes all our tests. Any previously existing
132     // tarball can be assumed as corrupt or invalid, so we just blow it away if
133     // it exists.
134     config
135         .shell()
136         .status("Packaging", pkg.package_id().to_string())?;
137     dst.file().set_len(0)?;
138     tar(ws, pkg, ar_files, dst.file(), &filename)
139         .with_context(|| "failed to prepare local package for uploading")?;
140     if opts.verify {
141         dst.seek(SeekFrom::Start(0))?;
142         run_verify(ws, pkg, &dst, opts).with_context(|| "failed to verify package tarball")?
143     }
144 
145     dst.seek(SeekFrom::Start(0))?;
146     let src_path = dst.path();
147     let dst_path = dst.parent().join(&filename);
148     fs::rename(&src_path, &dst_path)
149         .with_context(|| "failed to move temporary tarball into final location")?;
150 
151     return Ok(Some(dst));
152 }
153 
package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Option<Vec<FileLock>>>154 pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Option<Vec<FileLock>>> {
155     let pkgs = ws.members_with_features(
156         &opts.to_package.to_package_id_specs(ws)?,
157         &opts.cli_features,
158     )?;
159 
160     let mut dsts = Vec::with_capacity(pkgs.len());
161 
162     if ws.root().join("Cargo.lock").exists() {
163         // Make sure the Cargo.lock is up-to-date and valid.
164         let _ = ops::resolve_ws(ws)?;
165         // If Cargo.lock does not exist, it will be generated by `build_lock`
166         // below, and will be validated during the verification step.
167     }
168 
169     for (pkg, cli_features) in pkgs {
170         let result = package_one(
171             ws,
172             pkg,
173             &PackageOpts {
174                 config: opts.config,
175                 list: opts.list,
176                 check_metadata: opts.check_metadata,
177                 allow_dirty: opts.allow_dirty,
178                 verify: opts.verify,
179                 jobs: opts.jobs,
180                 to_package: ops::Packages::Default,
181                 targets: opts.targets.clone(),
182                 cli_features: cli_features,
183             },
184         )?;
185 
186         if !opts.list {
187             dsts.push(result.unwrap());
188         }
189     }
190 
191     if opts.list {
192         // We're just listing, so there's no file output
193         Ok(None)
194     } else {
195         Ok(Some(dsts))
196     }
197 }
198 
199 /// Builds list of files to archive.
build_ar_list( ws: &Workspace<'_>, pkg: &Package, src_files: Vec<PathBuf>, vcs_info: Option<VcsInfo>, ) -> CargoResult<Vec<ArchiveFile>>200 fn build_ar_list(
201     ws: &Workspace<'_>,
202     pkg: &Package,
203     src_files: Vec<PathBuf>,
204     vcs_info: Option<VcsInfo>,
205 ) -> CargoResult<Vec<ArchiveFile>> {
206     let mut result = Vec::new();
207     let root = pkg.root();
208     for src_file in src_files {
209         let rel_path = src_file.strip_prefix(&root)?.to_path_buf();
210         check_filename(&rel_path, &mut ws.config().shell())?;
211         let rel_str = rel_path
212             .to_str()
213             .ok_or_else(|| {
214                 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
215             })?
216             .to_string();
217         match rel_str.as_ref() {
218             "Cargo.toml" => {
219                 result.push(ArchiveFile {
220                     rel_path: PathBuf::from("Cargo.toml.orig"),
221                     rel_str: "Cargo.toml.orig".to_string(),
222                     contents: FileContents::OnDisk(src_file),
223                 });
224                 result.push(ArchiveFile {
225                     rel_path,
226                     rel_str,
227                     contents: FileContents::Generated(GeneratedFile::Manifest),
228                 });
229             }
230             "Cargo.lock" => continue,
231             VCS_INFO_FILE => anyhow::bail!(
232                 "invalid inclusion of reserved file name \
233                      {} in package source",
234                 VCS_INFO_FILE
235             ),
236             _ => {
237                 result.push(ArchiveFile {
238                     rel_path,
239                     rel_str,
240                     contents: FileContents::OnDisk(src_file),
241                 });
242             }
243         }
244     }
245     if pkg.include_lockfile() {
246         result.push(ArchiveFile {
247             rel_path: PathBuf::from("Cargo.lock"),
248             rel_str: "Cargo.lock".to_string(),
249             contents: FileContents::Generated(GeneratedFile::Lockfile),
250         });
251     }
252     if let Some(vcs_info) = vcs_info {
253         result.push(ArchiveFile {
254             rel_path: PathBuf::from(VCS_INFO_FILE),
255             rel_str: VCS_INFO_FILE.to_string(),
256             contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
257         });
258     }
259     if let Some(license_file) = &pkg.manifest().metadata().license_file {
260         let license_path = Path::new(license_file);
261         let abs_license_path = paths::normalize_path(&pkg.root().join(license_path));
262         if abs_license_path.exists() {
263             match abs_license_path.strip_prefix(&pkg.root()) {
264                 Ok(rel_license_path) => {
265                     if !result.iter().any(|ar| ar.rel_path == rel_license_path) {
266                         result.push(ArchiveFile {
267                             rel_path: rel_license_path.to_path_buf(),
268                             rel_str: rel_license_path
269                                 .to_str()
270                                 .expect("everything was utf8")
271                                 .to_string(),
272                             contents: FileContents::OnDisk(abs_license_path),
273                         });
274                     }
275                 }
276                 Err(_) => {
277                     // The license exists somewhere outside of the package.
278                     let license_name = license_path.file_name().unwrap();
279                     if result
280                         .iter()
281                         .any(|ar| ar.rel_path.file_name().unwrap() == license_name)
282                     {
283                         ws.config().shell().warn(&format!(
284                             "license-file `{}` appears to be a path outside of the package, \
285                             but there is already a file named `{}` in the root of the package. \
286                             The archived crate will contain the copy in the root of the package. \
287                             Update the license-file to point to the path relative \
288                             to the root of the package to remove this warning.",
289                             license_file,
290                             license_name.to_str().unwrap()
291                         ))?;
292                     } else {
293                         result.push(ArchiveFile {
294                             rel_path: PathBuf::from(license_name),
295                             rel_str: license_name.to_str().unwrap().to_string(),
296                             contents: FileContents::OnDisk(abs_license_path),
297                         });
298                     }
299                 }
300             }
301         } else {
302             let rel_msg = if license_path.is_absolute() {
303                 "".to_string()
304             } else {
305                 format!(" (relative to `{}`)", pkg.root().display())
306             };
307             ws.config().shell().warn(&format!(
308                 "license-file `{}` does not appear to exist{}.\n\
309                 Please update the license-file setting in the manifest at `{}`\n\
310                 This may become a hard error in the future.",
311                 license_path.display(),
312                 rel_msg,
313                 pkg.manifest_path().display()
314             ))?;
315         }
316     }
317     result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
318 
319     Ok(result)
320 }
321 
322 /// Construct `Cargo.lock` for the package to be published.
build_lock(ws: &Workspace<'_>, orig_pkg: &Package) -> CargoResult<String>323 fn build_lock(ws: &Workspace<'_>, orig_pkg: &Package) -> CargoResult<String> {
324     let config = ws.config();
325     let orig_resolve = ops::load_pkg_lockfile(ws)?;
326 
327     // Convert Package -> TomlManifest -> Manifest -> Package
328     let toml_manifest = Rc::new(
329         orig_pkg
330             .manifest()
331             .original()
332             .prepare_for_publish(ws, orig_pkg.root())?,
333     );
334     let package_root = orig_pkg.root();
335     let source_id = orig_pkg.package_id().source_id();
336     let (manifest, _nested_paths) =
337         TomlManifest::to_real_manifest(&toml_manifest, source_id, package_root, config)?;
338     let new_pkg = Package::new(manifest, orig_pkg.manifest_path());
339 
340     // Regenerate Cargo.lock using the old one as a guide.
341     let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?;
342     let (pkg_set, mut new_resolve) = ops::resolve_ws(&tmp_ws)?;
343 
344     if let Some(orig_resolve) = orig_resolve {
345         compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
346     }
347     check_yanked(config, &pkg_set, &new_resolve)?;
348 
349     ops::resolve_to_string(&tmp_ws, &mut new_resolve)
350 }
351 
352 // Checks that the package has some piece of metadata that a human can
353 // use to tell what the package is about.
check_metadata(pkg: &Package, config: &Config) -> CargoResult<()>354 fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
355     let md = pkg.manifest().metadata();
356 
357     let mut missing = vec![];
358 
359     macro_rules! lacking {
360         ($( $($field: ident)||* ),*) => {{
361             $(
362                 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
363                     $(missing.push(stringify!($field).replace("_", "-"));)*
364                 }
365             )*
366         }}
367     }
368     lacking!(
369         description,
370         license || license_file,
371         documentation || homepage || repository
372     );
373 
374     if !missing.is_empty() {
375         let mut things = missing[..missing.len() - 1].join(", ");
376         // `things` will be empty if and only if its length is 1 (i.e., the only case
377         // to have no `or`).
378         if !things.is_empty() {
379             things.push_str(" or ");
380         }
381         things.push_str(missing.last().unwrap());
382 
383         config.shell().warn(&format!(
384             "manifest has no {things}.\n\
385              See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
386             things = things
387         ))?
388     }
389 
390     Ok(())
391 }
392 
393 /// Checks if the package source is in a *git* DVCS repository. If *git*, and
394 /// the source is *dirty* (e.g., has uncommitted changes) then `bail!` with an
395 /// informative message. Otherwise return the sha1 hash of the current *HEAD*
396 /// commit, or `None` if no repo is found.
check_repo_state( p: &Package, src_files: &[PathBuf], config: &Config, ) -> CargoResult<Option<VcsInfo>>397 fn check_repo_state(
398     p: &Package,
399     src_files: &[PathBuf],
400     config: &Config,
401 ) -> CargoResult<Option<VcsInfo>> {
402     if let Ok(repo) = git2::Repository::discover(p.root()) {
403         if let Some(workdir) = repo.workdir() {
404             debug!("found a git repo at {:?}", workdir);
405             let path = p.manifest_path();
406             let path = path.strip_prefix(workdir).unwrap_or(path);
407             if let Ok(status) = repo.status_file(path) {
408                 if (status & git2::Status::IGNORED).is_empty() {
409                     debug!(
410                         "found (git) Cargo.toml at {:?} in workdir {:?}",
411                         path, workdir
412                     );
413                     let path_in_vcs = path
414                         .parent()
415                         .and_then(|p| p.to_str())
416                         .unwrap_or("")
417                         .replace("\\", "/");
418                     return Ok(Some(VcsInfo {
419                         git: git(p, src_files, &repo)?,
420                         path_in_vcs,
421                     }));
422                 }
423             }
424             config.shell().verbose(|shell| {
425                 shell.warn(format!(
426                     "No (git) Cargo.toml found at `{}` in workdir `{}`",
427                     path.display(),
428                     workdir.display()
429                 ))
430             })?;
431         }
432     } else {
433         config.shell().verbose(|shell| {
434             shell.warn(format!("No (git) VCS found for `{}`", p.root().display()))
435         })?;
436     }
437 
438     // No VCS with a checked in `Cargo.toml` found, so we don't know if the
439     // directory is dirty or not, thus we have to assume that it's clean.
440     return Ok(None);
441 
442     fn git(p: &Package, src_files: &[PathBuf], repo: &git2::Repository) -> CargoResult<GitVcsInfo> {
443         // This is a collection of any dirty or untracked files. This covers:
444         // - new/modified/deleted/renamed/type change (index or worktree)
445         // - untracked files (which are "new" worktree files)
446         // - ignored (in case the user has an `include` directive that
447         //   conflicts with .gitignore).
448         let mut dirty_files = Vec::new();
449         collect_statuses(repo, &mut dirty_files)?;
450         // Include each submodule so that the error message can provide
451         // specifically *which* files in a submodule are modified.
452         status_submodules(repo, &mut dirty_files)?;
453 
454         // Find the intersection of dirty in git, and the src_files that would
455         // be packaged. This is a lazy n^2 check, but seems fine with
456         // thousands of files.
457         let dirty_src_files: Vec<String> = src_files
458             .iter()
459             .filter(|src_file| dirty_files.iter().any(|path| src_file.starts_with(path)))
460             .map(|path| {
461                 path.strip_prefix(p.root())
462                     .unwrap_or(path)
463                     .display()
464                     .to_string()
465             })
466             .collect();
467         if dirty_src_files.is_empty() {
468             let rev_obj = repo.revparse_single("HEAD")?;
469             Ok(GitVcsInfo {
470                 sha1: rev_obj.id().to_string(),
471             })
472         } else {
473             anyhow::bail!(
474                 "{} files in the working directory contain changes that were \
475                  not yet committed into git:\n\n{}\n\n\
476                  to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag",
477                 dirty_src_files.len(),
478                 dirty_src_files.join("\n")
479             )
480         }
481     }
482 
483     // Helper to collect dirty statuses for a single repo.
484     fn collect_statuses(
485         repo: &git2::Repository,
486         dirty_files: &mut Vec<PathBuf>,
487     ) -> CargoResult<()> {
488         let mut status_opts = git2::StatusOptions::new();
489         // Exclude submodules, as they are being handled manually by recursing
490         // into each one so that details about specific files can be
491         // retrieved.
492         status_opts
493             .exclude_submodules(true)
494             .include_ignored(true)
495             .include_untracked(true);
496         let repo_statuses = repo.statuses(Some(&mut status_opts)).with_context(|| {
497             format!(
498                 "failed to retrieve git status from repo {}",
499                 repo.path().display()
500             )
501         })?;
502         let workdir = repo.workdir().unwrap();
503         let this_dirty = repo_statuses.iter().filter_map(|entry| {
504             let path = entry.path().expect("valid utf-8 path");
505             if path.ends_with("Cargo.lock") && entry.status() == git2::Status::IGNORED {
506                 // It is OK to include Cargo.lock even if it is ignored.
507                 return None;
508             }
509             // Use an absolute path, so that comparing paths is easier
510             // (particularly with submodules).
511             Some(workdir.join(path))
512         });
513         dirty_files.extend(this_dirty);
514         Ok(())
515     }
516 
517     // Helper to collect dirty statuses while recursing into submodules.
518     fn status_submodules(
519         repo: &git2::Repository,
520         dirty_files: &mut Vec<PathBuf>,
521     ) -> CargoResult<()> {
522         for submodule in repo.submodules()? {
523             // Ignore submodules that don't open, they are probably not initialized.
524             // If its files are required, then the verification step should fail.
525             if let Ok(sub_repo) = submodule.open() {
526                 status_submodules(&sub_repo, dirty_files)?;
527                 collect_statuses(&sub_repo, dirty_files)?;
528             }
529         }
530         Ok(())
531     }
532 }
533 
534 fn tar(
535     ws: &Workspace<'_>,
536     pkg: &Package,
537     ar_files: Vec<ArchiveFile>,
538     dst: &File,
539     filename: &str,
540 ) -> CargoResult<()> {
541     // Prepare the encoder and its header.
542     let filename = Path::new(filename);
543     let encoder = GzBuilder::new()
544         .filename(paths::path2bytes(filename)?)
545         .write(dst, Compression::best());
546 
547     // Put all package files into a compressed archive.
548     let mut ar = Builder::new(encoder);
549     let config = ws.config();
550 
551     let base_name = format!("{}-{}", pkg.name(), pkg.version());
552     let base_path = Path::new(&base_name);
553     for ar_file in ar_files {
554         let ArchiveFile {
555             rel_path,
556             rel_str,
557             contents,
558         } = ar_file;
559         let ar_path = base_path.join(&rel_path);
560         config
561             .shell()
562             .verbose(|shell| shell.status("Archiving", &rel_str))?;
563         let mut header = Header::new_gnu();
564         match contents {
565             FileContents::OnDisk(disk_path) => {
566                 let mut file = File::open(&disk_path).with_context(|| {
567                     format!("failed to open for archiving: `{}`", disk_path.display())
568                 })?;
569                 let metadata = file.metadata().with_context(|| {
570                     format!("could not learn metadata for: `{}`", disk_path.display())
571                 })?;
572                 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
573                 header.set_cksum();
574                 ar.append_data(&mut header, &ar_path, &mut file)
575                     .with_context(|| {
576                         format!("could not archive source file `{}`", disk_path.display())
577                     })?;
578             }
579             FileContents::Generated(generated_kind) => {
580                 let contents = match generated_kind {
581                     GeneratedFile::Manifest => pkg.to_registry_toml(ws)?,
582                     GeneratedFile::Lockfile => build_lock(ws, pkg)?,
583                     GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
584                 };
585                 header.set_entry_type(EntryType::file());
586                 header.set_mode(0o644);
587                 header.set_size(contents.len() as u64);
588                 // use something nonzero to avoid rust-lang/cargo#9512
589                 header.set_mtime(1);
590                 header.set_cksum();
591                 ar.append_data(&mut header, &ar_path, contents.as_bytes())
592                     .with_context(|| format!("could not archive source file `{}`", rel_str))?;
593             }
594         }
595     }
596 
597     let encoder = ar.into_inner()?;
598     encoder.finish()?;
599     Ok(())
600 }
601 
602 /// Generate warnings when packaging Cargo.lock, and the resolve have changed.
603 fn compare_resolve(
604     config: &Config,
605     current_pkg: &Package,
606     orig_resolve: &Resolve,
607     new_resolve: &Resolve,
608 ) -> CargoResult<()> {
609     if config.shell().verbosity() != Verbosity::Verbose {
610         return Ok(());
611     }
612     let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
613     let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
614     let added = new_set.difference(&orig_set);
615     // Removed entries are ignored, this is used to quickly find hints for why
616     // an entry changed.
617     let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
618     for pkg_id in added {
619         if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
620             // Skip the package that is being created, since its SourceId
621             // (directory) changes.
622             continue;
623         }
624         // Check for candidates where the source has changed (such as [patch]
625         // or a dependency with multiple sources like path/version).
626         let removed_candidates: Vec<&PackageId> = removed
627             .iter()
628             .filter(|orig_pkg_id| {
629                 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
630             })
631             .cloned()
632             .collect();
633         let extra = match removed_candidates.len() {
634             0 => {
635                 // This can happen if the original was out of date.
636                 let previous_versions: Vec<&PackageId> = removed
637                     .iter()
638                     .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
639                     .cloned()
640                     .collect();
641                 match previous_versions.len() {
642                     0 => String::new(),
643                     1 => format!(
644                         ", previous version was `{}`",
645                         previous_versions[0].version()
646                     ),
647                     _ => format!(
648                         ", previous versions were: {}",
649                         previous_versions
650                             .iter()
651                             .map(|pkg_id| format!("`{}`", pkg_id.version()))
652                             .collect::<Vec<_>>()
653                             .join(", ")
654                     ),
655                 }
656             }
657             1 => {
658                 // This can happen for multi-sourced dependencies like
659                 // `{path="...", version="..."}` or `[patch]` replacement.
660                 // `[replace]` is not captured in Cargo.lock.
661                 format!(
662                     ", was originally sourced from `{}`",
663                     removed_candidates[0].source_id()
664                 )
665             }
666             _ => {
667                 // I don't know if there is a way to actually trigger this,
668                 // but handle it just in case.
669                 let comma_list = removed_candidates
670                     .iter()
671                     .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
672                     .collect::<Vec<_>>()
673                     .join(", ");
674                 format!(
675                     ", was originally sourced from one of these sources: {}",
676                     comma_list
677                 )
678             }
679         };
680         let msg = format!(
681             "package `{}` added to the packaged Cargo.lock file{}",
682             pkg_id, extra
683         );
684         config.shell().note(msg)?;
685     }
686     Ok(())
687 }
688 
689 fn check_yanked(config: &Config, pkg_set: &PackageSet<'_>, resolve: &Resolve) -> CargoResult<()> {
690     // Checking the yanked status involves taking a look at the registry and
691     // maybe updating files, so be sure to lock it here.
692     let _lock = config.acquire_package_cache_lock()?;
693 
694     let mut sources = pkg_set.sources_mut();
695     for pkg_id in resolve.iter() {
696         if let Some(source) = sources.get_mut(pkg_id.source_id()) {
697             if source.is_yanked(pkg_id)? {
698                 config.shell().warn(format!(
699                     "package `{}` in Cargo.lock is yanked in registry `{}`, \
700                      consider updating to a version that is not yanked",
701                     pkg_id,
702                     pkg_id.source_id().display_registry_name()
703                 ))?;
704             }
705         }
706     }
707     Ok(())
708 }
709 
710 fn run_verify(
711     ws: &Workspace<'_>,
712     pkg: &Package,
713     tar: &FileLock,
714     opts: &PackageOpts<'_>,
715 ) -> CargoResult<()> {
716     let config = ws.config();
717 
718     config.shell().status("Verifying", pkg)?;
719 
720     let f = GzDecoder::new(tar.file());
721     let dst = tar
722         .parent()
723         .join(&format!("{}-{}", pkg.name(), pkg.version()));
724     if dst.exists() {
725         paths::remove_dir_all(&dst)?;
726     }
727     let mut archive = Archive::new(f);
728     // We don't need to set the Modified Time, as it's not relevant to verification
729     // and it errors on filesystems that don't support setting a modified timestamp
730     archive.set_preserve_mtime(false);
731     archive.unpack(dst.parent().unwrap())?;
732 
733     // Manufacture an ephemeral workspace to ensure that even if the top-level
734     // package has a workspace we can still build our new crate.
735     let id = SourceId::for_path(&dst)?;
736     let mut src = PathSource::new(&dst, id, ws.config());
737     let new_pkg = src.root_package()?;
738     let pkg_fingerprint = hash_all(&dst)?;
739     let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
740 
741     let rustc_args = if pkg
742         .manifest()
743         .unstable_features()
744         .require(Feature::public_dependency())
745         .is_ok()
746     {
747         // FIXME: Turn this on at some point in the future
748         //Some(vec!["-D exported_private_dependencies".to_string()])
749         Some(vec![])
750     } else {
751         None
752     };
753 
754     let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
755     ops::compile_with_exec(
756         &ws,
757         &ops::CompileOptions {
758             build_config: BuildConfig::new(config, opts.jobs, &opts.targets, CompileMode::Build)?,
759             cli_features: opts.cli_features.clone(),
760             spec: ops::Packages::Packages(Vec::new()),
761             filter: ops::CompileFilter::Default {
762                 required_features_filterable: true,
763             },
764             target_rustdoc_args: None,
765             target_rustc_args: rustc_args,
766             local_rustdoc_args: None,
767             rustdoc_document_private_items: false,
768             honor_rust_version: true,
769         },
770         &exec,
771     )?;
772 
773     // Check that `build.rs` didn't modify any files in the `src` directory.
774     let ws_fingerprint = hash_all(&dst)?;
775     if pkg_fingerprint != ws_fingerprint {
776         let changes = report_hash_difference(&pkg_fingerprint, &ws_fingerprint);
777         anyhow::bail!(
778             "Source directory was modified by build.rs during cargo publish. \
779              Build scripts should not modify anything outside of OUT_DIR.\n\
780              {}\n\n\
781              To proceed despite this, pass the `--no-verify` flag.",
782             changes
783         )
784     }
785 
786     Ok(())
787 }
788 
789 fn hash_all(path: &Path) -> CargoResult<HashMap<PathBuf, u64>> {
790     fn wrap(path: &Path) -> CargoResult<HashMap<PathBuf, u64>> {
791         let mut result = HashMap::new();
792         let walker = walkdir::WalkDir::new(path).into_iter();
793         for entry in walker.filter_entry(|e| !(e.depth() == 1 && e.file_name() == "target")) {
794             let entry = entry?;
795             let file_type = entry.file_type();
796             if file_type.is_file() {
797                 let file = File::open(entry.path())?;
798                 let hash = util::hex::hash_u64_file(&file)?;
799                 result.insert(entry.path().to_path_buf(), hash);
800             } else if file_type.is_symlink() {
801                 let hash = util::hex::hash_u64(&fs::read_link(entry.path())?);
802                 result.insert(entry.path().to_path_buf(), hash);
803             } else if file_type.is_dir() {
804                 let hash = util::hex::hash_u64(&());
805                 result.insert(entry.path().to_path_buf(), hash);
806             }
807         }
808         Ok(result)
809     }
810     let result = wrap(path).with_context(|| format!("failed to verify output at {:?}", path))?;
811     Ok(result)
812 }
813 
814 fn report_hash_difference(orig: &HashMap<PathBuf, u64>, after: &HashMap<PathBuf, u64>) -> String {
815     let mut changed = Vec::new();
816     let mut removed = Vec::new();
817     for (key, value) in orig {
818         match after.get(key) {
819             Some(after_value) => {
820                 if value != after_value {
821                     changed.push(key.to_string_lossy());
822                 }
823             }
824             None => removed.push(key.to_string_lossy()),
825         }
826     }
827     let mut added: Vec<_> = after
828         .keys()
829         .filter(|key| !orig.contains_key(*key))
830         .map(|key| key.to_string_lossy())
831         .collect();
832     let mut result = Vec::new();
833     if !changed.is_empty() {
834         changed.sort_unstable();
835         result.push(format!("Changed: {}", changed.join("\n\t")));
836     }
837     if !added.is_empty() {
838         added.sort_unstable();
839         result.push(format!("Added: {}", added.join("\n\t")));
840     }
841     if !removed.is_empty() {
842         removed.sort_unstable();
843         result.push(format!("Removed: {}", removed.join("\n\t")));
844     }
845     assert!(!result.is_empty(), "unexpected empty change detection");
846     result.join("\n")
847 }
848 
849 // It can often be the case that files of a particular name on one platform
850 // can't actually be created on another platform. For example files with colons
851 // in the name are allowed on Unix but not on Windows.
852 //
853 // To help out in situations like this, issue about weird filenames when
854 // packaging as a "heads up" that something may not work on other platforms.
855 fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
856     let name = match file.file_name() {
857         Some(name) => name,
858         None => return Ok(()),
859     };
860     let name = match name.to_str() {
861         Some(name) => name,
862         None => anyhow::bail!(
863             "path does not have a unicode filename which may not unpack \
864              on all platforms: {}",
865             file.display()
866         ),
867     };
868     let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
869     if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
870         anyhow::bail!(
871             "cannot package a filename with a special character `{}`: {}",
872             c,
873             file.display()
874         )
875     }
876     if restricted_names::is_windows_reserved_path(file) {
877         shell.warn(format!(
878             "file {} is a reserved Windows filename, \
879                 it will not work on Windows platforms",
880             file.display()
881         ))?;
882     }
883     Ok(())
884 }
885