1 // Copyright 2016 Mozilla Foundation
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 use crate::cache::{Cache, CacheWrite, DecompressionFailure, Storage};
16 use crate::compiler::c::{CCompiler, CCompilerKind};
17 use crate::compiler::clang::Clang;
18 use crate::compiler::diab::Diab;
19 use crate::compiler::gcc::GCC;
20 use crate::compiler::msvc;
21 use crate::compiler::msvc::MSVC;
22 use crate::compiler::nvcc::NVCC;
23 use crate::compiler::rust::{Rust, RustupProxy};
24 use crate::dist;
25 #[cfg(feature = "dist-client")]
26 use crate::dist::pkg;
27 use crate::lru_disk_cache;
28 use crate::mock_command::{exit_status, CommandChild, CommandCreatorSync, RunCommand};
29 use crate::util::{fmt_duration_as_secs, ref_env, run_input_output, SpawnExt};
30 use filetime::FileTime;
31 use futures::Future;
32 use futures_03::executor::ThreadPool;
33 use std::borrow::Cow;
34 use std::collections::HashMap;
35 use std::ffi::OsString;
36 use std::fmt;
37 #[cfg(feature = "dist-client")]
38 use std::fs;
39 use std::fs::File;
40 use std::io::prelude::*;
41 use std::path::{Path, PathBuf};
42 use std::process::{self, Stdio};
43 use std::str;
44 use std::sync::Arc;
45 use std::time::{Duration, Instant};
46 use tempfile::TempDir;
47 use tokio_timer::Timeout;
48
49 use crate::errors::*;
50
51 /// Can dylibs (shared libraries or proc macros) be distributed on this platform?
52 #[cfg(all(feature = "dist-client", target_os = "linux", target_arch = "x86_64"))]
53 pub const CAN_DIST_DYLIBS: bool = true;
54 #[cfg(all(
55 feature = "dist-client",
56 not(all(target_os = "linux", target_arch = "x86_64"))
57 ))]
58 pub const CAN_DIST_DYLIBS: bool = false;
59
60 #[derive(Clone, Debug)]
61 pub struct CompileCommand {
62 pub executable: PathBuf,
63 pub arguments: Vec<OsString>,
64 pub env_vars: Vec<(OsString, OsString)>,
65 pub cwd: PathBuf,
66 }
67
68 impl CompileCommand {
execute<T>(self, creator: &T) -> SFuture<process::Output> where T: CommandCreatorSync,69 pub fn execute<T>(self, creator: &T) -> SFuture<process::Output>
70 where
71 T: CommandCreatorSync,
72 {
73 let mut cmd = creator.clone().new_command_sync(self.executable);
74 cmd.args(&self.arguments)
75 .env_clear()
76 .envs(self.env_vars)
77 .current_dir(self.cwd);
78 Box::new(run_input_output(cmd, None))
79 }
80 }
81
82 /// Supported compilers.
83 #[derive(Debug, PartialEq, Clone)]
84 pub enum CompilerKind {
85 /// A C compiler.
86 C(CCompilerKind),
87 /// A Rust compiler.
88 Rust,
89 }
90
91 impl CompilerKind {
lang_kind(&self) -> String92 pub fn lang_kind(&self) -> String {
93 match self {
94 CompilerKind::C(CCompilerKind::NVCC) => "CUDA",
95 CompilerKind::C(_) => "C/C++",
96 CompilerKind::Rust => "Rust",
97 }
98 .to_string()
99 }
100 }
101
102 #[cfg(feature = "dist-client")]
103 pub type DistPackagers = (
104 Box<dyn pkg::InputsPackager>,
105 Box<dyn pkg::ToolchainPackager>,
106 Box<dyn OutputsRewriter>,
107 );
108
109 enum CacheLookupResult {
110 Success(CompileResult, process::Output),
111 Miss(MissType),
112 }
113
114 /// An interface to a compiler for argument parsing.
115 pub trait Compiler<T>: Send + 'static
116 where
117 T: CommandCreatorSync,
118 {
119 /// Return the kind of compiler.
kind(&self) -> CompilerKind120 fn kind(&self) -> CompilerKind;
121 /// Retrieve a packager
122 #[cfg(feature = "dist-client")]
get_toolchain_packager(&self) -> Box<dyn pkg::ToolchainPackager>123 fn get_toolchain_packager(&self) -> Box<dyn pkg::ToolchainPackager>;
124 /// Determine whether `arguments` are supported by this compiler.
parse_arguments( &self, arguments: &[OsString], cwd: &Path, ) -> CompilerArguments<Box<dyn CompilerHasher<T> + 'static>>125 fn parse_arguments(
126 &self,
127 arguments: &[OsString],
128 cwd: &Path,
129 ) -> CompilerArguments<Box<dyn CompilerHasher<T> + 'static>>;
box_clone(&self) -> Box<dyn Compiler<T>>130 fn box_clone(&self) -> Box<dyn Compiler<T>>;
131 }
132
133 impl<T: CommandCreatorSync> Clone for Box<dyn Compiler<T>> {
clone(&self) -> Box<dyn Compiler<T>>134 fn clone(&self) -> Box<dyn Compiler<T>> {
135 self.box_clone()
136 }
137 }
138
139 pub trait CompilerProxy<T>: Send + 'static
140 where
141 T: CommandCreatorSync + Sized,
142 {
143 /// Maps the executable to be used in `cwd` to the true, proxied compiler.
144 ///
145 /// Returns the absolute path to the true compiler and the timestamp of
146 /// timestamp of the true compiler. Iff the resolution fails,
147 /// the returned future resolves to an error with more information.
resolve_proxied_executable( &self, creator: T, cwd: PathBuf, env_vars: &[(OsString, OsString)], ) -> SFuture<(PathBuf, FileTime)>148 fn resolve_proxied_executable(
149 &self,
150 creator: T,
151 cwd: PathBuf,
152 env_vars: &[(OsString, OsString)],
153 ) -> SFuture<(PathBuf, FileTime)>;
154
155 /// Create a clone of `Self` and puts it in a `Box`
box_clone(&self) -> Box<dyn CompilerProxy<T>>156 fn box_clone(&self) -> Box<dyn CompilerProxy<T>>;
157 }
158
159 /// An interface to a compiler for hash key generation, the result of
160 /// argument parsing.
161 pub trait CompilerHasher<T>: fmt::Debug + Send + 'static
162 where
163 T: CommandCreatorSync,
164 {
165 /// Given information about a compiler command, generate a hash key
166 /// that can be used for cache lookups, as well as any additional
167 /// information that can be reused for compilation if necessary.
generate_hash_key( self: Box<Self>, creator: &T, cwd: PathBuf, env_vars: Vec<(OsString, OsString)>, may_dist: bool, pool: &ThreadPool, rewrite_includes_only: bool, ) -> SFuture<HashResult>168 fn generate_hash_key(
169 self: Box<Self>,
170 creator: &T,
171 cwd: PathBuf,
172 env_vars: Vec<(OsString, OsString)>,
173 may_dist: bool,
174 pool: &ThreadPool,
175 rewrite_includes_only: bool,
176 ) -> SFuture<HashResult>;
177
178 /// Return the state of any `--color` option passed to the compiler.
color_mode(&self) -> ColorMode179 fn color_mode(&self) -> ColorMode;
180
181 /// Look up a cached compile result in `storage`. If not found, run the
182 /// compile and store the result.
183 #[allow(clippy::too_many_arguments)]
get_cached_or_compile( self: Box<Self>, dist_client: Result<Option<Arc<dyn dist::Client>>>, creator: T, storage: Arc<dyn Storage>, arguments: Vec<OsString>, cwd: PathBuf, env_vars: Vec<(OsString, OsString)>, cache_control: CacheControl, pool: ThreadPool, ) -> SFuture<(CompileResult, process::Output)>184 fn get_cached_or_compile(
185 self: Box<Self>,
186 dist_client: Result<Option<Arc<dyn dist::Client>>>,
187 creator: T,
188 storage: Arc<dyn Storage>,
189 arguments: Vec<OsString>,
190 cwd: PathBuf,
191 env_vars: Vec<(OsString, OsString)>,
192 cache_control: CacheControl,
193 pool: ThreadPool,
194 ) -> SFuture<(CompileResult, process::Output)> {
195 let out_pretty = self.output_pretty().into_owned();
196 debug!("[{}]: get_cached_or_compile: {:?}", out_pretty, arguments);
197 let start = Instant::now();
198 let may_dist = matches!(dist_client, Ok(Some(_)));
199 let rewrite_includes_only = match dist_client {
200 Ok(Some(ref client)) => client.rewrite_includes_only(),
201 _ => false,
202 };
203 let result = self.generate_hash_key(
204 &creator,
205 cwd.clone(),
206 env_vars,
207 may_dist,
208 &pool,
209 rewrite_includes_only,
210 );
211 Box::new(result.then(move |res| -> SFuture<_> {
212 debug!(
213 "[{}]: generate_hash_key took {}",
214 out_pretty,
215 fmt_duration_as_secs(&start.elapsed())
216 );
217 let (key, compilation, weak_toolchain_key) = match res {
218 Err(e) => {
219 return match e.downcast::<ProcessError>() {
220 Ok(ProcessError(output)) => f_ok((CompileResult::Error, output)),
221 Err(e) => f_err(e),
222 };
223 }
224 Ok(HashResult {
225 key,
226 compilation,
227 weak_toolchain_key,
228 }) => (key, compilation, weak_toolchain_key),
229 };
230 trace!("[{}]: Hash key: {}", out_pretty, key);
231 // If `ForceRecache` is enabled, we won't check the cache.
232 let start = Instant::now();
233 let cache_status = if cache_control == CacheControl::ForceRecache {
234 f_ok(Cache::Recache)
235 } else {
236 storage.get(&key)
237 };
238
239 // Set a maximum time limit for the cache to respond before we forge
240 // ahead ourselves with a compilation.
241 let timeout = Duration::new(60, 0);
242 let cache_status = Timeout::new(cache_status, timeout);
243
244 // Check the result of the cache lookup.
245 Box::new(cache_status.then(move |result| {
246 let out_pretty2 = out_pretty.clone();
247 let duration = start.elapsed();
248 let outputs = compilation
249 .outputs()
250 .map(|(key, path)| (key.to_string(), cwd.join(path)))
251 .collect::<HashMap<_, _>>();
252
253 let miss_type = Box::new(match result {
254 Ok(Cache::Hit(mut entry)) => {
255 debug!(
256 "[{}]: Cache hit in {}",
257 out_pretty,
258 fmt_duration_as_secs(&duration)
259 );
260 let stdout = entry.get_stdout();
261 let stderr = entry.get_stderr();
262 let write = entry.extract_objects(outputs.clone(), &pool);
263 let output = process::Output {
264 status: exit_status(0),
265 stdout,
266 stderr,
267 };
268 let hit = CompileResult::CacheHit(duration);
269 Box::new(write.then(move |result| match result {
270 Ok(()) => f_ok(CacheLookupResult::Success(hit, output)),
271 Err(e) => {
272 if e.downcast_ref::<DecompressionFailure>().is_some() {
273 debug!("[{}]: Failed to decompress object", out_pretty);
274 f_ok(CacheLookupResult::Miss(MissType::CacheReadError))
275 } else {
276 f_err(e)
277 }
278 }
279 }))
280 }
281 Ok(Cache::Miss) => {
282 debug!(
283 "[{}]: Cache miss in {}",
284 out_pretty,
285 fmt_duration_as_secs(&duration)
286 );
287 f_ok(CacheLookupResult::Miss(MissType::Normal))
288 }
289 Ok(Cache::Recache) => {
290 debug!(
291 "[{}]: Cache recache in {}",
292 out_pretty,
293 fmt_duration_as_secs(&duration)
294 );
295 f_ok(CacheLookupResult::Miss(MissType::ForcedRecache))
296 }
297 Err(err) => {
298 if err.is_elapsed() {
299 debug!(
300 "[{}]: Cache timed out {}",
301 out_pretty,
302 fmt_duration_as_secs(&duration)
303 );
304 f_ok(CacheLookupResult::Miss(MissType::TimedOut))
305 } else {
306 error!("[{}]: Cache read error: {}", out_pretty, err);
307 if err.is_inner() {
308 let err = err.into_inner().unwrap();
309 for e in err.chain().skip(1) {
310 error!("[{}] \t{}", out_pretty, e);
311 }
312 }
313 f_ok(CacheLookupResult::Miss(MissType::CacheReadError))
314 }
315 }
316 });
317
318 Box::new(miss_type.and_then(move |result| {
319 match result {
320 CacheLookupResult::Success(compile_result, output) => {
321 f_ok((compile_result, output))
322 }
323 CacheLookupResult::Miss(miss_type) => {
324 // Cache miss, so compile it.
325 let start = Instant::now();
326 let compile = dist_or_local_compile(
327 dist_client,
328 creator,
329 cwd,
330 compilation,
331 weak_toolchain_key,
332 out_pretty2.clone(),
333 );
334
335 Box::new(compile.and_then(
336 move |(cacheable, dist_type, compiler_result)| {
337 let duration = start.elapsed();
338 if !compiler_result.status.success() {
339 debug!(
340 "[{}]: Compiled but failed, not storing in cache",
341 out_pretty2
342 );
343 return f_ok((CompileResult::CompileFailed, compiler_result))
344 as SFuture<_>;
345 }
346 if cacheable != Cacheable::Yes {
347 // Not cacheable
348 debug!("[{}]: Compiled but not cacheable", out_pretty2);
349 return f_ok((
350 CompileResult::NotCacheable,
351 compiler_result,
352 ));
353 }
354 debug!(
355 "[{}]: Compiled in {}, storing in cache",
356 out_pretty2,
357 fmt_duration_as_secs(&duration)
358 );
359 let write = CacheWrite::from_objects(outputs, &pool);
360 let write = write.fcontext("failed to zip up compiler outputs");
361 let o = out_pretty2.clone();
362 Box::new(
363 write
364 .and_then(move |mut entry| {
365 entry.put_stdout(&compiler_result.stdout)?;
366 entry.put_stderr(&compiler_result.stderr)?;
367
368 // Try to finish storing the newly-written cache
369 // entry. We'll get the result back elsewhere.
370 let future =
371 storage.put(&key, entry).then(move |res| {
372 match res {
373 Ok(_) => debug!(
374 "[{}]: Stored in cache successfully!",
375 out_pretty2
376 ),
377 Err(ref e) => debug!(
378 "[{}]: Cache write error: {:?}",
379 out_pretty2, e
380 ),
381 }
382 res.map(|duration| CacheWriteInfo {
383 object_file_pretty: out_pretty2,
384 duration,
385 })
386 });
387 let future = Box::new(future);
388 Ok((
389 CompileResult::CacheMiss(
390 miss_type, dist_type, duration, future,
391 ),
392 compiler_result,
393 ))
394 })
395 .fwith_context(move || {
396 format!("failed to store `{}` to cache", o)
397 }),
398 )
399 },
400 ))
401 }
402 }
403 }))
404 }))
405 }))
406 }
407
408 /// A descriptive string about the file that we're going to be producing.
409 ///
410 /// This is primarily intended for debug logging and such, not for actual
411 /// artifact generation.
output_pretty(&self) -> Cow<'_, str>412 fn output_pretty(&self) -> Cow<'_, str>;
413
box_clone(&self) -> Box<dyn CompilerHasher<T>>414 fn box_clone(&self) -> Box<dyn CompilerHasher<T>>;
415 }
416
417 #[cfg(not(feature = "dist-client"))]
dist_or_local_compile<T>( _dist_client: Result<Option<Arc<dyn dist::Client>>>, creator: T, _cwd: PathBuf, compilation: Box<dyn Compilation>, _weak_toolchain_key: String, out_pretty: String, ) -> SFuture<(Cacheable, DistType, process::Output)> where T: CommandCreatorSync,418 fn dist_or_local_compile<T>(
419 _dist_client: Result<Option<Arc<dyn dist::Client>>>,
420 creator: T,
421 _cwd: PathBuf,
422 compilation: Box<dyn Compilation>,
423 _weak_toolchain_key: String,
424 out_pretty: String,
425 ) -> SFuture<(Cacheable, DistType, process::Output)>
426 where
427 T: CommandCreatorSync,
428 {
429 let mut path_transformer = dist::PathTransformer::default();
430 let compile_commands = compilation
431 .generate_compile_commands(&mut path_transformer, true)
432 .context("Failed to generate compile commands");
433 let (compile_cmd, _dist_compile_cmd, cacheable) = match compile_commands {
434 Ok(cmds) => cmds,
435 Err(e) => return f_err(e),
436 };
437
438 debug!("[{}]: Compiling locally", out_pretty);
439 Box::new(
440 compile_cmd
441 .execute(&creator)
442 .map(move |o| (cacheable, DistType::NoDist, o)),
443 )
444 }
445
446 #[cfg(feature = "dist-client")]
dist_or_local_compile<T>( dist_client: Result<Option<Arc<dyn dist::Client>>>, creator: T, cwd: PathBuf, compilation: Box<dyn Compilation>, weak_toolchain_key: String, out_pretty: String, ) -> SFuture<(Cacheable, DistType, process::Output)> where T: CommandCreatorSync,447 fn dist_or_local_compile<T>(
448 dist_client: Result<Option<Arc<dyn dist::Client>>>,
449 creator: T,
450 cwd: PathBuf,
451 compilation: Box<dyn Compilation>,
452 weak_toolchain_key: String,
453 out_pretty: String,
454 ) -> SFuture<(Cacheable, DistType, process::Output)>
455 where
456 T: CommandCreatorSync,
457 {
458 use futures::future;
459 use std::io;
460
461 let rewrite_includes_only = match dist_client {
462 Ok(Some(ref client)) => client.rewrite_includes_only(),
463 _ => false,
464 };
465 let mut path_transformer = dist::PathTransformer::default();
466 let compile_commands = compilation
467 .generate_compile_commands(&mut path_transformer, rewrite_includes_only)
468 .context("Failed to generate compile commands");
469 let (compile_cmd, dist_compile_cmd, cacheable) = match compile_commands {
470 Ok(cmds) => cmds,
471 Err(e) => return f_err(e),
472 };
473
474 let dist_client = match dist_client {
475 Ok(Some(dc)) => dc,
476 Ok(None) => {
477 debug!("[{}]: Compiling locally", out_pretty);
478 return Box::new(
479 compile_cmd
480 .execute(&creator)
481 .map(move |o| (cacheable, DistType::NoDist, o)),
482 );
483 }
484 Err(e) => {
485 return f_err(e);
486 }
487 };
488
489 debug!("[{}]: Attempting distributed compilation", out_pretty);
490 let compile_out_pretty = out_pretty.clone();
491 let compile_out_pretty2 = out_pretty.clone();
492 let compile_out_pretty3 = out_pretty.clone();
493 let compile_out_pretty4 = out_pretty;
494 let local_executable = compile_cmd.executable.clone();
495 let local_executable2 = local_executable.clone();
496 // TODO: the number of map_errs is subideal, but there's no futures-based carrier trait AFAIK
497 Box::new(future::result(dist_compile_cmd.context("Could not create distributed compile command"))
498 .and_then(move |dist_compile_cmd| {
499 debug!("[{}]: Creating distributed compile request", compile_out_pretty);
500 let dist_output_paths = compilation.outputs()
501 .map(|(_key, path)| path_transformer.as_dist_abs(&cwd.join(path)))
502 .collect::<Option<_>>()
503 .context("Failed to adapt an output path for distributed compile")?;
504 compilation.into_dist_packagers(path_transformer)
505 .map(|packagers| (dist_compile_cmd, packagers, dist_output_paths))
506 })
507 .and_then(move |(mut dist_compile_cmd, (inputs_packager, toolchain_packager, outputs_rewriter), dist_output_paths)| {
508 debug!("[{}]: Identifying dist toolchain for {:?}", compile_out_pretty2, local_executable);
509 dist_client.put_toolchain(&local_executable, &weak_toolchain_key, toolchain_packager)
510 .and_then(|(dist_toolchain, maybe_dist_compile_executable)| {
511 let mut tc_archive = None;
512 if let Some((dist_compile_executable, archive_path)) = maybe_dist_compile_executable {
513 dist_compile_cmd.executable = dist_compile_executable;
514 tc_archive = Some(archive_path);
515 }
516 Ok((dist_client, dist_compile_cmd, dist_toolchain, inputs_packager, outputs_rewriter, dist_output_paths, tc_archive))
517 })
518 })
519 .and_then(move |(dist_client, dist_compile_cmd, dist_toolchain, inputs_packager, outputs_rewriter, dist_output_paths, tc_archive)| {
520 debug!("[{}]: Requesting allocation", compile_out_pretty3);
521 dist_client.do_alloc_job(dist_toolchain.clone())
522 .and_then(move |jares| {
523 let alloc = match jares {
524 dist::AllocJobResult::Success { job_alloc, need_toolchain: true } => {
525 debug!("[{}]: Sending toolchain {} for job {}",
526 compile_out_pretty3, dist_toolchain.archive_id, job_alloc.job_id);
527 Box::new(dist_client.do_submit_toolchain(job_alloc.clone(), dist_toolchain)
528 .and_then(move |res| {
529 match res {
530 dist::SubmitToolchainResult::Success => Ok(job_alloc),
531 dist::SubmitToolchainResult::JobNotFound =>
532 bail!("Job {} not found on server", job_alloc.job_id),
533 dist::SubmitToolchainResult::CannotCache =>
534 bail!("Toolchain for job {} could not be cached by server", job_alloc.job_id),
535 }
536 })
537 .fcontext("Could not submit toolchain"))
538 },
539 dist::AllocJobResult::Success { job_alloc, need_toolchain: false } =>
540 f_ok(job_alloc),
541 dist::AllocJobResult::Fail { msg } =>
542 f_err(anyhow!("Failed to allocate job").context(msg)),
543 };
544 alloc
545 .and_then(move |job_alloc| {
546 let job_id = job_alloc.job_id;
547 let server_id = job_alloc.server_id;
548 debug!("[{}]: Running job", compile_out_pretty3);
549 dist_client.do_run_job(job_alloc, dist_compile_cmd, dist_output_paths, inputs_packager)
550 .map(move |res| ((job_id, server_id), res))
551 .fwith_context(move || format!("could not run distributed compilation job on {:?}", server_id))
552 })
553 })
554 .and_then(move |((job_id, server_id), (jres, path_transformer))| {
555 let jc = match jres {
556 dist::RunJobResult::Complete(jc) => jc,
557 dist::RunJobResult::JobNotFound => bail!("Job {} not found on server", job_id),
558 };
559 info!("fetched {:?}", jc.outputs.iter().map(|&(ref p, ref bs)| (p, bs.lens().to_string())).collect::<Vec<_>>());
560 let mut output_paths: Vec<PathBuf> = vec![];
561 macro_rules! try_or_cleanup {
562 ($v:expr) => {{
563 match $v {
564 Ok(v) => v,
565 Err(e) => {
566 // Do our best to clear up. We may end up deleting a file that we just wrote over
567 // the top of, but it's better to clear up too much than too little
568 for local_path in output_paths.iter() {
569 if let Err(e) = fs::remove_file(local_path) {
570 if e.kind() != io::ErrorKind::NotFound {
571 warn!("{} while attempting to clear up {}", e, local_path.display())
572 }
573 }
574 }
575 return Err(e)
576 },
577 }
578 }};
579 }
580
581 for (path, output_data) in jc.outputs {
582 let len = output_data.lens().actual;
583 let local_path = try_or_cleanup!(path_transformer.to_local(&path)
584 .with_context(|| format!("unable to transform output path {}", path)));
585 output_paths.push(local_path);
586 // Do this first so cleanup works correctly
587 let local_path = output_paths.last().expect("nothing in vec after push");
588
589 let mut file = try_or_cleanup!(File::create(&local_path)
590 .with_context(|| format!("Failed to create output file {}", local_path.display())));
591 let count = try_or_cleanup!(io::copy(&mut output_data.into_reader(), &mut file)
592 .with_context(|| format!("Failed to write output to {}", local_path.display())));
593
594 assert!(count == len);
595 }
596 let extra_inputs = match tc_archive {
597 Some(p) => vec![p],
598 None => vec![],
599 };
600 try_or_cleanup!(outputs_rewriter.handle_outputs(&path_transformer, &output_paths, &extra_inputs)
601 .with_context(|| "failed to rewrite outputs from compile"));
602 Ok((DistType::Ok(server_id), jc.output.into()))
603 })
604 })
605 .or_else(move |e| {
606 if let Some(HttpClientError(_)) = e.downcast_ref::<HttpClientError>() {
607 f_err(e)
608 } else if let Some(lru_disk_cache::Error::FileTooLarge) = e.downcast_ref::<lru_disk_cache::Error>() {
609 f_err(anyhow!(
610 "Could not cache dist toolchain for {:?} locally.
611 Increase `toolchain_cache_size` or decrease the toolchain archive size.",
612 local_executable2))
613 } else {
614 // `{:#}` prints the error and the causes in a single line.
615 let errmsg = format!("{:#}", e);
616 warn!("[{}]: Could not perform distributed compile, falling back to local: {}", compile_out_pretty4, errmsg);
617 Box::new(compile_cmd.execute(&creator).map(|o| (DistType::Error, o)))
618 }
619 })
620 .map(move |(dt, o)| (cacheable, dt, o))
621 )
622 }
623
624 impl<T: CommandCreatorSync> Clone for Box<dyn CompilerHasher<T>> {
clone(&self) -> Box<dyn CompilerHasher<T>>625 fn clone(&self) -> Box<dyn CompilerHasher<T>> {
626 self.box_clone()
627 }
628 }
629
630 /// An interface to a compiler for actually invoking compilation.
631 pub trait Compilation {
632 /// Given information about a compiler command, generate a command that can
633 /// execute the compiler.
generate_compile_commands( &self, path_transformer: &mut dist::PathTransformer, rewrite_includes_only: bool, ) -> Result<(CompileCommand, Option<dist::CompileCommand>, Cacheable)>634 fn generate_compile_commands(
635 &self,
636 path_transformer: &mut dist::PathTransformer,
637 rewrite_includes_only: bool,
638 ) -> Result<(CompileCommand, Option<dist::CompileCommand>, Cacheable)>;
639
640 /// Create a function that will create the inputs used to perform a distributed compilation
641 #[cfg(feature = "dist-client")]
into_dist_packagers( self: Box<Self>, _path_transformer: dist::PathTransformer, ) -> Result<DistPackagers>642 fn into_dist_packagers(
643 self: Box<Self>,
644 _path_transformer: dist::PathTransformer,
645 ) -> Result<DistPackagers>;
646
647 /// Returns an iterator over the results of this compilation.
648 ///
649 /// Each item is a descriptive (and unique) name of the output paired with
650 /// the path where it'll show up.
outputs<'a>(&'a self) -> Box<dyn Iterator<Item = (&'a str, &'a Path)> + 'a>651 fn outputs<'a>(&'a self) -> Box<dyn Iterator<Item = (&'a str, &'a Path)> + 'a>;
652 }
653
654 #[cfg(feature = "dist-client")]
655 pub trait OutputsRewriter {
656 /// Perform any post-compilation handling of outputs, given a Vec of the dist_path and local_path
handle_outputs( self: Box<Self>, path_transformer: &dist::PathTransformer, output_paths: &[PathBuf], extra_inputs: &[PathBuf], ) -> Result<()>657 fn handle_outputs(
658 self: Box<Self>,
659 path_transformer: &dist::PathTransformer,
660 output_paths: &[PathBuf],
661 extra_inputs: &[PathBuf],
662 ) -> Result<()>;
663 }
664
665 #[cfg(feature = "dist-client")]
666 pub struct NoopOutputsRewriter;
667 #[cfg(feature = "dist-client")]
668 impl OutputsRewriter for NoopOutputsRewriter {
handle_outputs( self: Box<Self>, _path_transformer: &dist::PathTransformer, _output_paths: &[PathBuf], _extra_inputs: &[PathBuf], ) -> Result<()>669 fn handle_outputs(
670 self: Box<Self>,
671 _path_transformer: &dist::PathTransformer,
672 _output_paths: &[PathBuf],
673 _extra_inputs: &[PathBuf],
674 ) -> Result<()> {
675 Ok(())
676 }
677 }
678
679 /// Result of generating a hash from a compiler command.
680 pub struct HashResult {
681 /// The hash key of the inputs.
682 pub key: String,
683 /// An object to use for the actual compilation, if necessary.
684 pub compilation: Box<dyn Compilation + 'static>,
685 /// A weak key that may be used to identify the toolchain
686 pub weak_toolchain_key: String,
687 }
688
689 /// Possible results of parsing compiler arguments.
690 #[derive(Debug, PartialEq)]
691 pub enum CompilerArguments<T> {
692 /// Commandline can be handled.
693 Ok(T),
694 /// Cannot cache this compilation.
695 CannotCache(&'static str, Option<String>),
696 /// This commandline is not a compile.
697 NotCompilation,
698 }
699
700 macro_rules! cannot_cache {
701 ($why:expr) => {
702 return CompilerArguments::CannotCache($why, None);
703 };
704 ($why:expr, $extra_info:expr) => {
705 return CompilerArguments::CannotCache($why, Some($extra_info));
706 };
707 }
708
709 macro_rules! try_or_cannot_cache {
710 ($arg:expr, $why:expr) => {{
711 match $arg {
712 Ok(arg) => arg,
713 Err(e) => cannot_cache!($why, e.to_string()),
714 }
715 }};
716 }
717
718 /// Specifics about distributed compilation.
719 #[derive(Debug, PartialEq)]
720 pub enum DistType {
721 /// Distribution was not enabled.
722 NoDist,
723 /// Distributed compile success.
724 Ok(dist::ServerId),
725 /// Distributed compile failed.
726 Error,
727 }
728
729 /// Specifics about cache misses.
730 #[derive(Debug, PartialEq)]
731 pub enum MissType {
732 /// The compilation was not found in the cache, nothing more.
733 Normal,
734 /// Cache lookup was overridden, recompilation was forced.
735 ForcedRecache,
736 /// Cache took too long to respond.
737 TimedOut,
738 /// Error reading from cache
739 CacheReadError,
740 }
741
742 /// Information about a successful cache write.
743 pub struct CacheWriteInfo {
744 pub object_file_pretty: String,
745 pub duration: Duration,
746 }
747
748 /// The result of a compilation or cache retrieval.
749 pub enum CompileResult {
750 /// An error made the compilation not possible.
751 Error,
752 /// Result was found in cache.
753 CacheHit(Duration),
754 /// Result was not found in cache.
755 ///
756 /// The `CacheWriteFuture` will resolve when the result is finished
757 /// being stored in the cache.
758 CacheMiss(MissType, DistType, Duration, SFuture<CacheWriteInfo>),
759 /// Not in cache, but the compilation result was determined to be not cacheable.
760 NotCacheable,
761 /// Not in cache, but compilation failed.
762 CompileFailed,
763 }
764
765 /// The state of `--color` options passed to a compiler.
766 #[derive(Clone, Copy, Debug, PartialEq, Serialize, Deserialize)]
767 pub enum ColorMode {
768 Off,
769 On,
770 Auto,
771 }
772
773 impl Default for ColorMode {
default() -> ColorMode774 fn default() -> ColorMode {
775 ColorMode::Auto
776 }
777 }
778
779 /// Can't derive(Debug) because of `CacheWriteFuture`.
780 impl fmt::Debug for CompileResult {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result781 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
782 match *self {
783 CompileResult::Error => write!(f, "CompileResult::Error"),
784 CompileResult::CacheHit(ref d) => write!(f, "CompileResult::CacheHit({:?})", d),
785 CompileResult::CacheMiss(ref m, ref dt, ref d, _) => {
786 write!(f, "CompileResult::CacheMiss({:?}, {:?}, {:?}, _)", d, m, dt)
787 }
788 CompileResult::NotCacheable => write!(f, "CompileResult::NotCacheable"),
789 CompileResult::CompileFailed => write!(f, "CompileResult::CompileFailed"),
790 }
791 }
792 }
793
794 /// Can't use derive(PartialEq) because of the `CacheWriteFuture`.
795 impl PartialEq<CompileResult> for CompileResult {
eq(&self, other: &CompileResult) -> bool796 fn eq(&self, other: &CompileResult) -> bool {
797 match (self, other) {
798 (&CompileResult::Error, &CompileResult::Error) => true,
799 (&CompileResult::CacheHit(_), &CompileResult::CacheHit(_)) => true,
800 (
801 &CompileResult::CacheMiss(ref m, ref dt, _, _),
802 &CompileResult::CacheMiss(ref n, ref dt2, _, _),
803 ) => m == n && dt == dt2,
804 (&CompileResult::NotCacheable, &CompileResult::NotCacheable) => true,
805 (&CompileResult::CompileFailed, &CompileResult::CompileFailed) => true,
806 _ => false,
807 }
808 }
809 }
810
811 /// Can this result be stored in cache?
812 #[derive(Copy, Clone, Debug, PartialEq)]
813 pub enum Cacheable {
814 Yes,
815 No,
816 }
817
818 /// Control of caching behavior.
819 #[derive(Debug, PartialEq)]
820 pub enum CacheControl {
821 /// Default caching behavior.
822 Default,
823 /// Ignore existing cache entries, force recompilation.
824 ForceRecache,
825 }
826
827 /// Creates a future that will write `contents` to `path` inside of a temporary
828 /// directory.
829 ///
830 /// The future will resolve to the temporary directory and an absolute path
831 /// inside that temporary directory with a file that has the same filename as
832 /// `path` contains the `contents` specified.
833 ///
834 /// Note that when the `TempDir` is dropped it will delete all of its contents
835 /// including the path returned.
write_temp_file( pool: &ThreadPool, path: &Path, contents: Vec<u8>, ) -> SFuture<(TempDir, PathBuf)>836 pub fn write_temp_file(
837 pool: &ThreadPool,
838 path: &Path,
839 contents: Vec<u8>,
840 ) -> SFuture<(TempDir, PathBuf)> {
841 let path = path.to_owned();
842 pool.spawn_fn(move || -> Result<_> {
843 let dir = tempfile::Builder::new().prefix("sccache").tempdir()?;
844 let src = dir.path().join(path);
845 let mut file = File::create(&src)?;
846 file.write_all(&contents)?;
847 Ok((dir, src))
848 })
849 .fcontext("failed to write temporary file")
850 }
851
852 /// If `executable` is a known compiler, return `Some(Box<Compiler>)`.
detect_compiler<T>( creator: T, executable: &Path, cwd: &Path, env: &[(OsString, OsString)], pool: &ThreadPool, dist_archive: Option<PathBuf>, ) -> SFuture<(Box<dyn Compiler<T>>, Option<Box<dyn CompilerProxy<T>>>)> where T: CommandCreatorSync,853 fn detect_compiler<T>(
854 creator: T,
855 executable: &Path,
856 cwd: &Path,
857 env: &[(OsString, OsString)],
858 pool: &ThreadPool,
859 dist_archive: Option<PathBuf>,
860 ) -> SFuture<(Box<dyn Compiler<T>>, Option<Box<dyn CompilerProxy<T>>>)>
861 where
862 T: CommandCreatorSync,
863 {
864 trace!("detect_compiler: {}", executable.display());
865
866 // First, see if this looks like rustc.
867 let filename = match executable.file_stem() {
868 None => return f_err(anyhow!("could not determine compiler kind")),
869 Some(f) => f,
870 };
871 let filename = filename.to_string_lossy().to_lowercase();
872
873 let rustc_vv = if filename == "rustc" || filename == "clippy-driver" {
874 // Sanity check that it's really rustc.
875 let executable = executable.to_path_buf();
876 let mut child = creator.clone().new_command_sync(executable);
877 child.env_clear().envs(ref_env(env)).args(&["-vV"]);
878
879 Box::new(run_input_output(child, None).map(|output| {
880 if let Ok(stdout) = String::from_utf8(output.stdout.clone()) {
881 if stdout.starts_with("rustc ") {
882 return Some(Ok(stdout));
883 }
884 }
885 Some(Err(ProcessError(output)))
886 }))
887 } else {
888 f_ok(None)
889 };
890
891 let creator1 = creator.clone();
892 let creator2 = creator.clone();
893 let executable = executable.to_owned();
894 let executable2 = executable.clone();
895 let env1 = env.to_owned();
896 let env2 = env.to_owned();
897 let env3 = env.to_owned();
898 let pool = pool.clone();
899 let cwd = cwd.to_owned();
900 Box::new(
901 rustc_vv
902 .and_then(move |rustc_vv| match rustc_vv {
903 Some(Ok(rustc_verbose_version)) => {
904 debug!("Found rustc");
905
906 Box::new(
907 RustupProxy::find_proxy_executable::<T>(&executable2,"rustup", creator, &env1)
908 .and_then(move |proxy : Result<Option<RustupProxy>>| -> SFuture<(Option<RustupProxy>, PathBuf)> {
909 match proxy {
910 Ok(Some(proxy)) => {
911 trace!("Found rustup proxy executable");
912 let fut =
913 proxy
914 .resolve_proxied_executable(creator1, cwd, &env2)
915 .then(move |res| {
916 // take the pathbuf for rustc as resolved by the proxy
917 match res {
918 Ok((resolved_path, _time)) => {
919 trace!("Resolved path with rustup proxy {:?}", &resolved_path);
920 f_ok((Some(proxy), resolved_path))
921 },
922 Err(e) => {
923 trace!("Could not resolve compiler with rustup proxy: {}", e);
924 f_ok((None, executable))
925 },
926 }
927 });
928 Box::new(fut)
929 },
930 Ok(None) => {
931 trace!("Did not find rustup");
932 f_ok((None, executable))
933 },
934 Err(e) => {
935 trace!("Did not find rustup due to {}", e);
936 f_ok((None, executable))
937 },
938 }
939 })
940 .then(move |res: Result<(Option<RustupProxy>, PathBuf)>| {
941 let (proxy, resolved_rustc) : (_, PathBuf)
942 = res
943 .map(|(proxy,resolved_compiler_executable)| {
944 (
945 proxy.map(Box::new).map(|x : Box<RustupProxy>| {
946 x as Box<dyn CompilerProxy<T>>
947 }),
948 resolved_compiler_executable
949 )
950 })
951 .unwrap_or_else(|_e| {
952 trace!("Compiling rust without proxy");
953 (None, executable2)
954 });
955
956 Rust::new(
957 creator2,
958 resolved_rustc,
959 &env3,
960 &rustc_verbose_version,
961 dist_archive,
962 pool,
963 )
964 .map(|c| {
965 (
966 Box::new(c) as Box<dyn Compiler<T> >,
967 proxy as Option<Box<dyn CompilerProxy<T>>>
968 )
969 })
970 }
971 )
972 )
973 }
974 Some(Err(e)) => f_err(e),
975 None => {
976 let cc = detect_c_compiler(creator, executable, env1.to_vec(), pool);
977 Box::new(cc.map(|c : Box<dyn Compiler<T>>| { (c, None ) }))
978 },
979 })
980 )
981 }
982
detect_c_compiler<T>( creator: T, executable: PathBuf, env: Vec<(OsString, OsString)>, pool: ThreadPool, ) -> SFuture<Box<dyn Compiler<T>>> where T: CommandCreatorSync,983 fn detect_c_compiler<T>(
984 creator: T,
985 executable: PathBuf,
986 env: Vec<(OsString, OsString)>,
987 pool: ThreadPool,
988 ) -> SFuture<Box<dyn Compiler<T>>>
989 where
990 T: CommandCreatorSync,
991 {
992 trace!("detect_c_compiler");
993
994 // NVCC needs to be first as msvc, clang, or gcc could
995 // be the underlying host compiler for nvcc
996 // Both clang and clang-cl define _MSC_VER on Windows, so we first
997 // check for MSVC, then check whether _MT is defined, which is the
998 // difference between clang and clang-cl.
999 let test = b"#if defined(__NVCC__)
1000 nvcc
1001 #elif defined(_MSC_VER) && !defined(__clang__)
1002 msvc
1003 #elif defined(_MSC_VER) && defined(_MT)
1004 msvc-clang
1005 #elif defined(__clang__) && defined(__cplusplus)
1006 clang++
1007 #elif defined(__clang__)
1008 clang
1009 #elif defined(__GNUC__) && defined(__cplusplus)
1010 g++
1011 #elif defined(__GNUC__)
1012 gcc
1013 #elif defined(__DCC__)
1014 diab
1015 #else
1016 unknown
1017 #endif
1018 __VERSION__
1019 "
1020 .to_vec();
1021 let write = write_temp_file(&pool, "testfile.c".as_ref(), test);
1022
1023 let mut cmd = creator.clone().new_command_sync(&executable);
1024 cmd.stdout(Stdio::piped())
1025 .stderr(Stdio::piped())
1026 .envs(env.iter().map(|s| (&s.0, &s.1)));
1027 let output = write.and_then(move |(tempdir, src)| {
1028 cmd.arg("-E").arg(src);
1029 trace!("compiler {:?}", cmd);
1030 cmd.spawn()
1031 .and_then(|child| {
1032 child
1033 .wait_with_output()
1034 .fcontext("failed to read child output")
1035 })
1036 .map(|e| {
1037 drop(tempdir);
1038 e
1039 })
1040 });
1041
1042 Box::new(output.and_then(move |output| -> SFuture<_> {
1043 let stdout = match str::from_utf8(&output.stdout) {
1044 Ok(s) => s,
1045 Err(_) => return f_err(anyhow!("Failed to parse output")),
1046 };
1047 let mut lines = stdout.lines().filter_map(|line| {
1048 let line = line.trim();
1049 if line.is_empty() || line.starts_with('#') {
1050 None
1051 } else {
1052 Some(line)
1053 }
1054 });
1055 if let Some(kind) = lines.next() {
1056 let version = lines
1057 .next()
1058 // In case the compiler didn't expand the macro.
1059 .filter(|&line| line != "__VERSION__")
1060 .map(str::to_owned);
1061 match kind {
1062 "clang" | "clang++" => {
1063 debug!("Found {}", kind);
1064 return Box::new(
1065 CCompiler::new(
1066 Clang {
1067 clangplusplus: kind == "clang++",
1068 },
1069 executable,
1070 version,
1071 &pool,
1072 )
1073 .map(|c| Box::new(c) as Box<dyn Compiler<T>>),
1074 );
1075 }
1076 "diab" => {
1077 debug!("Found diab");
1078 return Box::new(
1079 CCompiler::new(Diab, executable, version, &pool)
1080 .map(|c| Box::new(c) as Box<dyn Compiler<T>>),
1081 );
1082 }
1083 "gcc" | "g++" => {
1084 debug!("Found {}", kind);
1085 return Box::new(
1086 CCompiler::new(
1087 GCC {
1088 gplusplus: kind == "g++",
1089 },
1090 executable,
1091 version,
1092 &pool,
1093 )
1094 .map(|c| Box::new(c) as Box<dyn Compiler<T>>),
1095 );
1096 }
1097 "msvc" | "msvc-clang" => {
1098 let is_clang = kind == "msvc-clang";
1099 debug!("Found MSVC (is clang: {})", is_clang);
1100 let prefix = msvc::detect_showincludes_prefix(
1101 &creator,
1102 executable.as_ref(),
1103 is_clang,
1104 env,
1105 &pool,
1106 );
1107 return Box::new(prefix.and_then(move |prefix| {
1108 trace!("showIncludes prefix: '{}'", prefix);
1109 CCompiler::new(
1110 MSVC {
1111 includes_prefix: prefix,
1112 is_clang,
1113 },
1114 executable,
1115 version,
1116 &pool,
1117 )
1118 .map(|c| Box::new(c) as Box<dyn Compiler<T>>)
1119 }));
1120 }
1121 "nvcc" => {
1122 debug!("Found NVCC");
1123 return Box::new(
1124 CCompiler::new(NVCC, executable, version, &pool)
1125 .map(|c| Box::new(c) as Box<dyn Compiler<T>>),
1126 );
1127 }
1128 _ => (),
1129 }
1130 }
1131
1132 let stderr = String::from_utf8_lossy(&output.stderr);
1133 debug!("nothing useful in detection output {:?}", stdout);
1134 debug!("compiler status: {}", output.status);
1135 debug!("compiler stderr:\n{}", stderr);
1136
1137 f_err(anyhow!(stderr.into_owned()))
1138 }))
1139 }
1140
1141 /// If `executable` is a known compiler, return a `Box<Compiler>` containing information about it.
get_compiler_info<T>( creator: T, executable: &Path, cwd: &Path, env: &[(OsString, OsString)], pool: &ThreadPool, dist_archive: Option<PathBuf>, ) -> SFuture<(Box<dyn Compiler<T>>, Option<Box<dyn CompilerProxy<T>>>)> where T: CommandCreatorSync,1142 pub fn get_compiler_info<T>(
1143 creator: T,
1144 executable: &Path,
1145 cwd: &Path,
1146 env: &[(OsString, OsString)],
1147 pool: &ThreadPool,
1148 dist_archive: Option<PathBuf>,
1149 ) -> SFuture<(Box<dyn Compiler<T>>, Option<Box<dyn CompilerProxy<T>>>)>
1150 where
1151 T: CommandCreatorSync,
1152 {
1153 let pool = pool.clone();
1154 detect_compiler(creator, executable, cwd, env, &pool, dist_archive)
1155 }
1156
1157 #[cfg(test)]
1158 mod test {
1159 use super::*;
1160 use crate::cache::disk::DiskCache;
1161 use crate::cache::Storage;
1162 use crate::mock_command::*;
1163 use crate::test::mock_storage::MockStorage;
1164 use crate::test::utils::*;
1165 use futures::{future, Future};
1166 use futures_03::executor::ThreadPool;
1167 use std::fs::{self, File};
1168 use std::io::Write;
1169 use std::sync::Arc;
1170 use std::time::Duration;
1171 use std::u64;
1172 use tokio_compat::runtime::current_thread::Runtime;
1173
1174 #[test]
test_detect_compiler_kind_gcc()1175 fn test_detect_compiler_kind_gcc() {
1176 let f = TestFixture::new();
1177 let creator = new_creator();
1178 let pool = ThreadPool::sized(1);
1179 next_command(&creator, Ok(MockChild::new(exit_status(0), "\n\ngcc", "")));
1180 let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None)
1181 .wait()
1182 .unwrap()
1183 .0;
1184 assert_eq!(CompilerKind::C(CCompilerKind::GCC), c.kind());
1185 }
1186
1187 #[test]
test_detect_compiler_kind_clang()1188 fn test_detect_compiler_kind_clang() {
1189 let f = TestFixture::new();
1190 let creator = new_creator();
1191 let pool = ThreadPool::sized(1);
1192 next_command(&creator, Ok(MockChild::new(exit_status(0), "clang\n", "")));
1193 let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None)
1194 .wait()
1195 .unwrap()
1196 .0;
1197 assert_eq!(CompilerKind::C(CCompilerKind::Clang), c.kind());
1198 }
1199
1200 #[test]
test_detect_compiler_kind_msvc()1201 fn test_detect_compiler_kind_msvc() {
1202 drop(env_logger::try_init());
1203 let creator = new_creator();
1204 let pool = ThreadPool::sized(1);
1205 let f = TestFixture::new();
1206 let srcfile = f.touch("test.h").unwrap();
1207 let mut s = srcfile.to_str().unwrap();
1208 if s.starts_with("\\\\?\\") {
1209 s = &s[4..];
1210 }
1211 let prefix = String::from("blah: ");
1212 let stdout = format!("{}{}\r\n", prefix, s);
1213 // Compiler detection output
1214 next_command(&creator, Ok(MockChild::new(exit_status(0), "\nmsvc\n", "")));
1215 // showincludes prefix detection output
1216 next_command(
1217 &creator,
1218 Ok(MockChild::new(exit_status(0), &stdout, &String::new())),
1219 );
1220 let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None)
1221 .wait()
1222 .unwrap()
1223 .0;
1224 assert_eq!(CompilerKind::C(CCompilerKind::MSVC), c.kind());
1225 }
1226
1227 #[test]
1228 fn test_detect_compiler_kind_nvcc() {
1229 let f = TestFixture::new();
1230 let creator = new_creator();
1231 let pool = ThreadPool::sized(1);
1232 next_command(&creator, Ok(MockChild::new(exit_status(0), "nvcc\n", "")));
1233 let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None)
1234 .wait()
1235 .unwrap()
1236 .0;
1237 assert_eq!(CompilerKind::C(CCompilerKind::NVCC), c.kind());
1238 }
1239
1240 #[test]
1241 fn test_detect_compiler_kind_rustc() {
1242 let f = TestFixture::new();
1243 // Windows uses bin, everything else uses lib. Just create both.
1244 fs::create_dir(f.tempdir.path().join("lib")).unwrap();
1245 fs::create_dir(f.tempdir.path().join("bin")).unwrap();
1246 let rustc = f.mk_bin("rustc").unwrap();
1247 let creator = new_creator();
1248 let pool = ThreadPool::sized(1);
1249 // rustc --vV
1250 next_command(
1251 &creator,
1252 Ok(MockChild::new(
1253 exit_status(0),
1254 "\
1255 rustc 1.27.0 (3eda71b00 2018-06-19)
1256 binary: rustc
1257 commit-hash: 3eda71b00ad48d7bf4eef4c443e7f611fd061418
1258 commit-date: 2018-06-19
1259 host: x86_64-unknown-linux-gnu
1260 release: 1.27.0
1261 LLVM version: 6.0",
1262 "",
1263 )),
1264 );
1265 // rustc --print=sysroot
1266 let sysroot = f.tempdir.path().to_str().unwrap();
1267 next_command(&creator, Ok(MockChild::new(exit_status(0), &sysroot, "")));
1268 next_command(&creator, Ok(MockChild::new(exit_status(0), &sysroot, "")));
1269 next_command(&creator, Ok(MockChild::new(exit_status(0), &sysroot, "")));
1270 let c = detect_compiler(creator, &rustc, f.tempdir.path(), &[], &pool, None)
1271 .wait()
1272 .unwrap()
1273 .0;
1274 assert_eq!(CompilerKind::Rust, c.kind());
1275 }
1276
1277 #[test]
1278 fn test_detect_compiler_kind_diab() {
1279 let f = TestFixture::new();
1280 let creator = new_creator();
1281 let pool = ThreadPool::sized(1);
1282 next_command(&creator, Ok(MockChild::new(exit_status(0), "\ndiab\n", "")));
1283 let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None)
1284 .wait()
1285 .unwrap()
1286 .0;
1287 assert_eq!(CompilerKind::C(CCompilerKind::Diab), c.kind());
1288 }
1289
1290 #[test]
1291 fn test_detect_compiler_kind_unknown() {
1292 let f = TestFixture::new();
1293 let creator = new_creator();
1294 let pool = ThreadPool::sized(1);
1295 next_command(
1296 &creator,
1297 Ok(MockChild::new(exit_status(0), "something", "")),
1298 );
1299 assert!(detect_compiler(
1300 creator,
1301 "/foo/bar".as_ref(),
1302 f.tempdir.path(),
1303 &[],
1304 &pool,
1305 None
1306 )
1307 .wait()
1308 .is_err());
1309 }
1310
1311 #[test]
1312 fn test_detect_compiler_kind_process_fail() {
1313 let f = TestFixture::new();
1314 let creator = new_creator();
1315 let pool = ThreadPool::sized(1);
1316 next_command(&creator, Ok(MockChild::new(exit_status(1), "", "")));
1317 assert!(detect_compiler(
1318 creator,
1319 "/foo/bar".as_ref(),
1320 f.tempdir.path(),
1321 &[],
1322 &pool,
1323 None
1324 )
1325 .wait()
1326 .is_err());
1327 }
1328
1329 #[test]
1330 fn test_compiler_version_affects_hash() {
1331 let f = TestFixture::new();
1332 let creator = new_creator();
1333 let pool = ThreadPool::sized(1);
1334 let arguments = ovec!["-c", "foo.c", "-o", "foo.o"];
1335 let cwd = f.tempdir.path();
1336
1337 let results: Vec<_> = [11, 12]
1338 .iter()
1339 .map(|version| {
1340 let output = format!("clang\n\"{}.0.0\"", version);
1341 next_command(&creator, Ok(MockChild::new(exit_status(0), &output, "")));
1342 let c = detect_compiler(
1343 creator.clone(),
1344 &f.bins[0],
1345 f.tempdir.path(),
1346 &[],
1347 &pool,
1348 None,
1349 )
1350 .wait()
1351 .unwrap()
1352 .0;
1353 next_command(
1354 &creator,
1355 Ok(MockChild::new(exit_status(0), "preprocessor output", "")),
1356 );
1357 let hasher = match c.parse_arguments(&arguments, ".".as_ref()) {
1358 CompilerArguments::Ok(h) => h,
1359 o => panic!("Bad result from parse_arguments: {:?}", o),
1360 };
1361 hasher
1362 .generate_hash_key(&creator, cwd.to_path_buf(), vec![], false, &pool, false)
1363 .wait()
1364 .unwrap()
1365 })
1366 .collect();
1367 assert_eq!(results.len(), 2);
1368 assert_ne!(results[0].key, results[1].key);
1369 }
1370
1371 #[test]
test_get_compiler_info()1372 fn test_get_compiler_info() {
1373 let creator = new_creator();
1374 let pool = ThreadPool::sized(1);
1375 let f = TestFixture::new();
1376 // Pretend to be GCC.
1377 next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", "")));
1378 let c = get_compiler_info(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None)
1379 .wait()
1380 .unwrap()
1381 .0;
1382 // digest of an empty file.
1383 assert_eq!(CompilerKind::C(CCompilerKind::GCC), c.kind());
1384 }
1385
1386 #[test]
test_compiler_get_cached_or_compile()1387 fn test_compiler_get_cached_or_compile() {
1388 drop(env_logger::try_init());
1389 let creator = new_creator();
1390 let f = TestFixture::new();
1391 let pool = ThreadPool::sized(1);
1392 let mut runtime = Runtime::new().unwrap();
1393 let storage = DiskCache::new(&f.tempdir.path().join("cache"), u64::MAX, &pool);
1394 let storage: Arc<dyn Storage> = Arc::new(storage);
1395 // Pretend to be GCC.
1396 next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", "")));
1397 let c = get_compiler_info(
1398 creator.clone(),
1399 &f.bins[0],
1400 f.tempdir.path(),
1401 &[],
1402 &pool,
1403 None,
1404 )
1405 .wait()
1406 .unwrap()
1407 .0;
1408 // The preprocessor invocation.
1409 next_command(
1410 &creator,
1411 Ok(MockChild::new(exit_status(0), "preprocessor output", "")),
1412 );
1413 // The compiler invocation.
1414 const COMPILER_STDOUT: &[u8] = b"compiler stdout";
1415 const COMPILER_STDERR: &[u8] = b"compiler stderr";
1416 let obj = f.tempdir.path().join("foo.o");
1417 let o = obj.clone();
1418 next_command_calls(&creator, move |_| {
1419 // Pretend to compile something.
1420 let mut f = File::create(&o)?;
1421 f.write_all(b"file contents")?;
1422 Ok(MockChild::new(
1423 exit_status(0),
1424 COMPILER_STDOUT,
1425 COMPILER_STDERR,
1426 ))
1427 });
1428 let cwd = f.tempdir.path();
1429 let arguments = ovec!["-c", "foo.c", "-o", "foo.o"];
1430 let hasher = match c.parse_arguments(&arguments, ".".as_ref()) {
1431 CompilerArguments::Ok(h) => h,
1432 o => panic!("Bad result from parse_arguments: {:?}", o),
1433 };
1434 let hasher2 = hasher.clone();
1435 let (cached, res) = runtime
1436 .block_on(future::lazy(|| {
1437 hasher.get_cached_or_compile(
1438 Ok(None),
1439 creator.clone(),
1440 storage.clone(),
1441 arguments.clone(),
1442 cwd.to_path_buf(),
1443 vec![],
1444 CacheControl::Default,
1445 pool.clone(),
1446 )
1447 }))
1448 .unwrap();
1449 // Ensure that the object file was created.
1450 assert!(fs::metadata(&obj).map(|m| m.len() > 0).unwrap());
1451 match cached {
1452 CompileResult::CacheMiss(MissType::Normal, DistType::NoDist, _, f) => {
1453 // wait on cache write future so we don't race with it!
1454 f.wait().unwrap();
1455 }
1456 _ => assert!(false, "Unexpected compile result: {:?}", cached),
1457 }
1458 assert_eq!(exit_status(0), res.status);
1459 assert_eq!(COMPILER_STDOUT, res.stdout.as_slice());
1460 assert_eq!(COMPILER_STDERR, res.stderr.as_slice());
1461 // Now compile again, which should be a cache hit.
1462 fs::remove_file(&obj).unwrap();
1463 // The preprocessor invocation.
1464 next_command(
1465 &creator,
1466 Ok(MockChild::new(exit_status(0), "preprocessor output", "")),
1467 );
1468 // There should be no actual compiler invocation.
1469 let (cached, res) = runtime
1470 .block_on(future::lazy(|| {
1471 hasher2.get_cached_or_compile(
1472 Ok(None),
1473 creator,
1474 storage,
1475 arguments,
1476 cwd.to_path_buf(),
1477 vec![],
1478 CacheControl::Default,
1479 pool,
1480 )
1481 }))
1482 .unwrap();
1483 // Ensure that the object file was created.
1484 assert!(fs::metadata(&obj).map(|m| m.len() > 0).unwrap());
1485 assert_eq!(CompileResult::CacheHit(Duration::new(0, 0)), cached);
1486 assert_eq!(exit_status(0), res.status);
1487 assert_eq!(COMPILER_STDOUT, res.stdout.as_slice());
1488 assert_eq!(COMPILER_STDERR, res.stderr.as_slice());
1489 }
1490
1491 #[test]
1492 #[cfg(feature = "dist-client")]
test_compiler_get_cached_or_compile_dist()1493 fn test_compiler_get_cached_or_compile_dist() {
1494 drop(env_logger::try_init());
1495 let creator = new_creator();
1496 let f = TestFixture::new();
1497 let pool = ThreadPool::sized(1);
1498 let mut runtime = Runtime::new().unwrap();
1499 let storage = DiskCache::new(&f.tempdir.path().join("cache"), u64::MAX, &pool);
1500 let storage: Arc<dyn Storage> = Arc::new(storage);
1501 // Pretend to be GCC.
1502 next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", "")));
1503 let c = get_compiler_info(
1504 creator.clone(),
1505 &f.bins[0],
1506 f.tempdir.path(),
1507 &[],
1508 &pool,
1509 None,
1510 )
1511 .wait()
1512 .unwrap()
1513 .0;
1514 // The preprocessor invocation.
1515 next_command(
1516 &creator,
1517 Ok(MockChild::new(exit_status(0), "preprocessor output", "")),
1518 );
1519 // The compiler invocation.
1520 const COMPILER_STDOUT: &[u8] = b"compiler stdout";
1521 const COMPILER_STDERR: &[u8] = b"compiler stderr";
1522 let obj = f.tempdir.path().join("foo.o");
1523 // Dist client will do the compilation
1524 let dist_client = Some(test_dist::OneshotClient::new(
1525 0,
1526 COMPILER_STDOUT.to_owned(),
1527 COMPILER_STDERR.to_owned(),
1528 ));
1529 let cwd = f.tempdir.path();
1530 let arguments = ovec!["-c", "foo.c", "-o", "foo.o"];
1531 let hasher = match c.parse_arguments(&arguments, ".".as_ref()) {
1532 CompilerArguments::Ok(h) => h,
1533 o => panic!("Bad result from parse_arguments: {:?}", o),
1534 };
1535 let hasher2 = hasher.clone();
1536 let (cached, res) = runtime
1537 .block_on(future::lazy(|| {
1538 hasher.get_cached_or_compile(
1539 Ok(dist_client.clone()),
1540 creator.clone(),
1541 storage.clone(),
1542 arguments.clone(),
1543 cwd.to_path_buf(),
1544 vec![],
1545 CacheControl::Default,
1546 pool.clone(),
1547 )
1548 }))
1549 .unwrap();
1550 // Ensure that the object file was created.
1551 assert!(fs::metadata(&obj).map(|m| m.len() > 0).unwrap());
1552 match cached {
1553 CompileResult::CacheMiss(MissType::Normal, DistType::Ok(_), _, f) => {
1554 // wait on cache write future so we don't race with it!
1555 f.wait().unwrap();
1556 }
1557 _ => assert!(false, "Unexpected compile result: {:?}", cached),
1558 }
1559 assert_eq!(exit_status(0), res.status);
1560 assert_eq!(COMPILER_STDOUT, res.stdout.as_slice());
1561 assert_eq!(COMPILER_STDERR, res.stderr.as_slice());
1562 // Now compile again, which should be a cache hit.
1563 fs::remove_file(&obj).unwrap();
1564 // The preprocessor invocation.
1565 next_command(
1566 &creator,
1567 Ok(MockChild::new(exit_status(0), "preprocessor output", "")),
1568 );
1569 // There should be no actual compiler invocation.
1570 let (cached, res) = runtime
1571 .block_on(future::lazy(|| {
1572 hasher2.get_cached_or_compile(
1573 Ok(dist_client.clone()),
1574 creator,
1575 storage,
1576 arguments,
1577 cwd.to_path_buf(),
1578 vec![],
1579 CacheControl::Default,
1580 pool,
1581 )
1582 }))
1583 .unwrap();
1584 // Ensure that the object file was created.
1585 assert!(fs::metadata(&obj).map(|m| m.len() > 0).unwrap());
1586 assert_eq!(CompileResult::CacheHit(Duration::new(0, 0)), cached);
1587 assert_eq!(exit_status(0), res.status);
1588 assert_eq!(COMPILER_STDOUT, res.stdout.as_slice());
1589 assert_eq!(COMPILER_STDERR, res.stderr.as_slice());
1590 }
1591
1592 #[test]
1593 /// Test that a cache read that results in an error is treated as a cache
1594 /// miss.
test_compiler_get_cached_or_compile_cache_error()1595 fn test_compiler_get_cached_or_compile_cache_error() {
1596 drop(env_logger::try_init());
1597 let creator = new_creator();
1598 let f = TestFixture::new();
1599 let pool = ThreadPool::sized(1);
1600 let mut runtime = Runtime::new().unwrap();
1601 let storage = MockStorage::new();
1602 let storage: Arc<MockStorage> = Arc::new(storage);
1603 // Pretend to be GCC.
1604 next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", "")));
1605 let c = get_compiler_info(
1606 creator.clone(),
1607 &f.bins[0],
1608 f.tempdir.path(),
1609 &[],
1610 &pool,
1611 None,
1612 )
1613 .wait()
1614 .unwrap()
1615 .0;
1616 // The preprocessor invocation.
1617 next_command(
1618 &creator,
1619 Ok(MockChild::new(exit_status(0), "preprocessor output", "")),
1620 );
1621 // The compiler invocation.
1622 const COMPILER_STDOUT: &[u8] = b"compiler stdout";
1623 const COMPILER_STDERR: &[u8] = b"compiler stderr";
1624 let obj = f.tempdir.path().join("foo.o");
1625 let o = obj.clone();
1626 next_command_calls(&creator, move |_| {
1627 // Pretend to compile something.
1628 let mut f = File::create(&o)?;
1629 f.write_all(b"file contents")?;
1630 Ok(MockChild::new(
1631 exit_status(0),
1632 COMPILER_STDOUT,
1633 COMPILER_STDERR,
1634 ))
1635 });
1636 let cwd = f.tempdir.path();
1637 let arguments = ovec!["-c", "foo.c", "-o", "foo.o"];
1638 let hasher = match c.parse_arguments(&arguments, ".".as_ref()) {
1639 CompilerArguments::Ok(h) => h,
1640 o => panic!("Bad result from parse_arguments: {:?}", o),
1641 };
1642 // The cache will return an error.
1643 storage.next_get(f_err(anyhow!("Some Error")));
1644 let (cached, res) = runtime
1645 .block_on(future::lazy(|| {
1646 hasher.get_cached_or_compile(
1647 Ok(None),
1648 creator,
1649 storage,
1650 arguments.clone(),
1651 cwd.to_path_buf(),
1652 vec![],
1653 CacheControl::Default,
1654 pool,
1655 )
1656 }))
1657 .unwrap();
1658 // Ensure that the object file was created.
1659 assert!(fs::metadata(&obj).map(|m| m.len() > 0).unwrap());
1660 match cached {
1661 CompileResult::CacheMiss(MissType::CacheReadError, DistType::NoDist, _, f) => {
1662 // wait on cache write future so we don't race with it!
1663 f.wait().unwrap();
1664 }
1665 _ => assert!(false, "Unexpected compile result: {:?}", cached),
1666 }
1667
1668 assert_eq!(exit_status(0), res.status);
1669 assert_eq!(COMPILER_STDOUT, res.stdout.as_slice());
1670 assert_eq!(COMPILER_STDERR, res.stderr.as_slice());
1671 }
1672
1673 #[test]
test_compiler_get_cached_or_compile_force_recache()1674 fn test_compiler_get_cached_or_compile_force_recache() {
1675 drop(env_logger::try_init());
1676 let creator = new_creator();
1677 let f = TestFixture::new();
1678 let pool = ThreadPool::sized(1);
1679 let mut runtime = Runtime::new().unwrap();
1680 let storage = DiskCache::new(&f.tempdir.path().join("cache"), u64::MAX, &pool);
1681 let storage: Arc<dyn Storage> = Arc::new(storage);
1682 // Pretend to be GCC.
1683 next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", "")));
1684 let c = get_compiler_info(
1685 creator.clone(),
1686 &f.bins[0],
1687 f.tempdir.path(),
1688 &[],
1689 &pool,
1690 None,
1691 )
1692 .wait()
1693 .unwrap()
1694 .0;
1695 const COMPILER_STDOUT: &[u8] = b"compiler stdout";
1696 const COMPILER_STDERR: &[u8] = b"compiler stderr";
1697 // The compiler should be invoked twice, since we're forcing
1698 // recaching.
1699 let obj = f.tempdir.path().join("foo.o");
1700 for _ in 0..2 {
1701 // The preprocessor invocation.
1702 next_command(
1703 &creator,
1704 Ok(MockChild::new(exit_status(0), "preprocessor output", "")),
1705 );
1706 // The compiler invocation.
1707 let o = obj.clone();
1708 next_command_calls(&creator, move |_| {
1709 // Pretend to compile something.
1710 let mut f = File::create(&o)?;
1711 f.write_all(b"file contents")?;
1712 Ok(MockChild::new(
1713 exit_status(0),
1714 COMPILER_STDOUT,
1715 COMPILER_STDERR,
1716 ))
1717 });
1718 }
1719 let cwd = f.tempdir.path();
1720 let arguments = ovec!["-c", "foo.c", "-o", "foo.o"];
1721 let hasher = match c.parse_arguments(&arguments, ".".as_ref()) {
1722 CompilerArguments::Ok(h) => h,
1723 o => panic!("Bad result from parse_arguments: {:?}", o),
1724 };
1725 let hasher2 = hasher.clone();
1726 let (cached, res) = runtime
1727 .block_on(future::lazy(|| {
1728 hasher.get_cached_or_compile(
1729 Ok(None),
1730 creator.clone(),
1731 storage.clone(),
1732 arguments.clone(),
1733 cwd.to_path_buf(),
1734 vec![],
1735 CacheControl::Default,
1736 pool.clone(),
1737 )
1738 }))
1739 .unwrap();
1740 // Ensure that the object file was created.
1741 assert!(fs::metadata(&obj).map(|m| m.len() > 0).unwrap());
1742 match cached {
1743 CompileResult::CacheMiss(MissType::Normal, DistType::NoDist, _, f) => {
1744 // wait on cache write future so we don't race with it!
1745 f.wait().unwrap();
1746 }
1747 _ => assert!(false, "Unexpected compile result: {:?}", cached),
1748 }
1749 assert_eq!(exit_status(0), res.status);
1750 assert_eq!(COMPILER_STDOUT, res.stdout.as_slice());
1751 assert_eq!(COMPILER_STDERR, res.stderr.as_slice());
1752 // Now compile again, but force recaching.
1753 fs::remove_file(&obj).unwrap();
1754 let (cached, res) = hasher2
1755 .get_cached_or_compile(
1756 Ok(None),
1757 creator,
1758 storage,
1759 arguments,
1760 cwd.to_path_buf(),
1761 vec![],
1762 CacheControl::ForceRecache,
1763 pool,
1764 )
1765 .wait()
1766 .unwrap();
1767 // Ensure that the object file was created.
1768 assert!(fs::metadata(&obj).map(|m| m.len() > 0).unwrap());
1769 match cached {
1770 CompileResult::CacheMiss(MissType::ForcedRecache, DistType::NoDist, _, f) => {
1771 // wait on cache write future so we don't race with it!
1772 f.wait().unwrap();
1773 }
1774 _ => assert!(false, "Unexpected compile result: {:?}", cached),
1775 }
1776 assert_eq!(exit_status(0), res.status);
1777 assert_eq!(COMPILER_STDOUT, res.stdout.as_slice());
1778 assert_eq!(COMPILER_STDERR, res.stderr.as_slice());
1779 }
1780
1781 #[test]
test_compiler_get_cached_or_compile_preprocessor_error()1782 fn test_compiler_get_cached_or_compile_preprocessor_error() {
1783 drop(env_logger::try_init());
1784 let creator = new_creator();
1785 let f = TestFixture::new();
1786 let pool = ThreadPool::sized(1);
1787 let mut runtime = Runtime::new().unwrap();
1788 let storage = DiskCache::new(&f.tempdir.path().join("cache"), u64::MAX, &pool);
1789 let storage: Arc<dyn Storage> = Arc::new(storage);
1790 // Pretend to be GCC. Also inject a fake object file that the subsequent
1791 // preprocessor failure should remove.
1792 let obj = f.tempdir.path().join("foo.o");
1793 let o = obj.clone();
1794 next_command_calls(&creator, move |_| {
1795 let mut f = File::create(&o)?;
1796 f.write_all(b"file contents")?;
1797 Ok(MockChild::new(exit_status(0), "gcc", ""))
1798 });
1799 let c = get_compiler_info(
1800 creator.clone(),
1801 &f.bins[0],
1802 f.tempdir.path(),
1803 &[],
1804 &pool,
1805 None,
1806 )
1807 .wait()
1808 .unwrap()
1809 .0;
1810 // We should now have a fake object file.
1811 assert!(fs::metadata(&obj).is_ok());
1812 // The preprocessor invocation.
1813 const PREPROCESSOR_STDERR: &[u8] = b"something went wrong";
1814 next_command(
1815 &creator,
1816 Ok(MockChild::new(
1817 exit_status(1),
1818 b"preprocessor output",
1819 PREPROCESSOR_STDERR,
1820 )),
1821 );
1822 let cwd = f.tempdir.path();
1823 let arguments = ovec!["-c", "foo.c", "-o", "foo.o"];
1824 let hasher = match c.parse_arguments(&arguments, ".".as_ref()) {
1825 CompilerArguments::Ok(h) => h,
1826 o => panic!("Bad result from parse_arguments: {:?}", o),
1827 };
1828 let (cached, res) = runtime
1829 .block_on(future::lazy(|| {
1830 hasher.get_cached_or_compile(
1831 Ok(None),
1832 creator,
1833 storage,
1834 arguments,
1835 cwd.to_path_buf(),
1836 vec![],
1837 CacheControl::Default,
1838 pool,
1839 )
1840 }))
1841 .unwrap();
1842 assert_eq!(cached, CompileResult::Error);
1843 assert_eq!(exit_status(1), res.status);
1844 // Shouldn't get anything on stdout, since that would just be preprocessor spew!
1845 assert_eq!(b"", res.stdout.as_slice());
1846 assert_eq!(PREPROCESSOR_STDERR, res.stderr.as_slice());
1847 // Errors in preprocessing should remove the object file.
1848 assert_eq!(fs::metadata(&obj).is_ok(), false);
1849 }
1850
1851 #[test]
1852 #[cfg(feature = "dist-client")]
test_compiler_get_cached_or_compile_dist_error()1853 fn test_compiler_get_cached_or_compile_dist_error() {
1854 drop(env_logger::try_init());
1855 let creator = new_creator();
1856 let f = TestFixture::new();
1857 let pool = ThreadPool::sized(1);
1858 let dist_clients = vec![
1859 test_dist::ErrorPutToolchainClient::new(),
1860 test_dist::ErrorAllocJobClient::new(),
1861 test_dist::ErrorSubmitToolchainClient::new(),
1862 test_dist::ErrorRunJobClient::new(),
1863 ];
1864 let storage = DiskCache::new(&f.tempdir.path().join("cache"), u64::MAX, &pool);
1865 let storage: Arc<dyn Storage> = Arc::new(storage);
1866 // Pretend to be GCC.
1867 next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", "")));
1868 let c = get_compiler_info(
1869 creator.clone(),
1870 &f.bins[0],
1871 f.tempdir.path(),
1872 &[],
1873 &pool,
1874 None,
1875 )
1876 .wait()
1877 .unwrap()
1878 .0;
1879 const COMPILER_STDOUT: &[u8] = b"compiler stdout";
1880 const COMPILER_STDERR: &[u8] = b"compiler stderr";
1881 // The compiler should be invoked twice, since we're forcing
1882 // recaching.
1883 let obj = f.tempdir.path().join("foo.o");
1884 for _ in dist_clients.iter() {
1885 // The preprocessor invocation.
1886 next_command(
1887 &creator,
1888 Ok(MockChild::new(exit_status(0), "preprocessor output", "")),
1889 );
1890 // The compiler invocation.
1891 let o = obj.clone();
1892 next_command_calls(&creator, move |_| {
1893 // Pretend to compile something.
1894 let mut f = File::create(&o)?;
1895 f.write_all(b"file contents")?;
1896 Ok(MockChild::new(
1897 exit_status(0),
1898 COMPILER_STDOUT,
1899 COMPILER_STDERR,
1900 ))
1901 });
1902 }
1903 let cwd = f.tempdir.path();
1904 let arguments = ovec!["-c", "foo.c", "-o", "foo.o"];
1905 let hasher = match c.parse_arguments(&arguments, ".".as_ref()) {
1906 CompilerArguments::Ok(h) => h,
1907 o => panic!("Bad result from parse_arguments: {:?}", o),
1908 };
1909 // All these dist clients will fail, but should still result in successful compiles
1910 for dist_client in dist_clients {
1911 if obj.is_file() {
1912 fs::remove_file(&obj).unwrap();
1913 }
1914 let hasher = hasher.clone();
1915 let (cached, res) = hasher
1916 .get_cached_or_compile(
1917 Ok(Some(dist_client.clone())),
1918 creator.clone(),
1919 storage.clone(),
1920 arguments.clone(),
1921 cwd.to_path_buf(),
1922 vec![],
1923 CacheControl::ForceRecache,
1924 pool.clone(),
1925 )
1926 .wait()
1927 .unwrap();
1928 // Ensure that the object file was created.
1929 assert!(fs::metadata(&obj).map(|m| m.len() > 0).unwrap());
1930 match cached {
1931 CompileResult::CacheMiss(MissType::ForcedRecache, DistType::Error, _, f) => {
1932 // wait on cache write future so we don't race with it!
1933 f.wait().unwrap();
1934 }
1935 _ => assert!(false, "Unexpected compile result: {:?}", cached),
1936 }
1937 assert_eq!(exit_status(0), res.status);
1938 assert_eq!(COMPILER_STDOUT, res.stdout.as_slice());
1939 assert_eq!(COMPILER_STDERR, res.stderr.as_slice());
1940 }
1941 }
1942 }
1943
1944 #[cfg(test)]
1945 #[cfg(feature = "dist-client")]
1946 mod test_dist {
1947 use crate::dist::pkg;
1948 use crate::dist::{
1949 self, AllocJobResult, CompileCommand, JobAlloc, JobComplete, JobId, OutputData,
1950 PathTransformer, ProcessOutput, RunJobResult, SchedulerStatusResult, ServerId,
1951 SubmitToolchainResult, Toolchain,
1952 };
1953 use std::cell::Cell;
1954 use std::path::{Path, PathBuf};
1955 use std::sync::Arc;
1956
1957 use crate::errors::*;
1958
1959 pub struct ErrorPutToolchainClient;
1960 impl ErrorPutToolchainClient {
1961 #[allow(clippy::new_ret_no_self)]
new() -> Arc<dyn dist::Client>1962 pub fn new() -> Arc<dyn dist::Client> {
1963 Arc::new(ErrorPutToolchainClient)
1964 }
1965 }
1966 impl dist::Client for ErrorPutToolchainClient {
do_alloc_job(&self, _: Toolchain) -> SFuture<AllocJobResult>1967 fn do_alloc_job(&self, _: Toolchain) -> SFuture<AllocJobResult> {
1968 unreachable!()
1969 }
do_get_status(&self) -> SFuture<SchedulerStatusResult>1970 fn do_get_status(&self) -> SFuture<SchedulerStatusResult> {
1971 unreachable!()
1972 }
do_submit_toolchain(&self, _: JobAlloc, _: Toolchain) -> SFuture<SubmitToolchainResult>1973 fn do_submit_toolchain(&self, _: JobAlloc, _: Toolchain) -> SFuture<SubmitToolchainResult> {
1974 unreachable!()
1975 }
do_run_job( &self, _: JobAlloc, _: CompileCommand, _: Vec<String>, _: Box<dyn pkg::InputsPackager>, ) -> SFuture<(RunJobResult, PathTransformer)>1976 fn do_run_job(
1977 &self,
1978 _: JobAlloc,
1979 _: CompileCommand,
1980 _: Vec<String>,
1981 _: Box<dyn pkg::InputsPackager>,
1982 ) -> SFuture<(RunJobResult, PathTransformer)> {
1983 unreachable!()
1984 }
put_toolchain( &self, _: &Path, _: &str, _: Box<dyn pkg::ToolchainPackager>, ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)>1985 fn put_toolchain(
1986 &self,
1987 _: &Path,
1988 _: &str,
1989 _: Box<dyn pkg::ToolchainPackager>,
1990 ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)> {
1991 f_err(anyhow!("put toolchain failure"))
1992 }
rewrite_includes_only(&self) -> bool1993 fn rewrite_includes_only(&self) -> bool {
1994 false
1995 }
get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf>1996 fn get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf> {
1997 None
1998 }
1999 }
2000
2001 pub struct ErrorAllocJobClient {
2002 tc: Toolchain,
2003 }
2004 impl ErrorAllocJobClient {
2005 #[allow(clippy::new_ret_no_self)]
new() -> Arc<dyn dist::Client>2006 pub fn new() -> Arc<dyn dist::Client> {
2007 Arc::new(Self {
2008 tc: Toolchain {
2009 archive_id: "somearchiveid".to_owned(),
2010 },
2011 })
2012 }
2013 }
2014 impl dist::Client for ErrorAllocJobClient {
do_alloc_job(&self, tc: Toolchain) -> SFuture<AllocJobResult>2015 fn do_alloc_job(&self, tc: Toolchain) -> SFuture<AllocJobResult> {
2016 assert_eq!(self.tc, tc);
2017 f_err(anyhow!("alloc job failure"))
2018 }
do_get_status(&self) -> SFuture<SchedulerStatusResult>2019 fn do_get_status(&self) -> SFuture<SchedulerStatusResult> {
2020 unreachable!()
2021 }
do_submit_toolchain(&self, _: JobAlloc, _: Toolchain) -> SFuture<SubmitToolchainResult>2022 fn do_submit_toolchain(&self, _: JobAlloc, _: Toolchain) -> SFuture<SubmitToolchainResult> {
2023 unreachable!()
2024 }
do_run_job( &self, _: JobAlloc, _: CompileCommand, _: Vec<String>, _: Box<dyn pkg::InputsPackager>, ) -> SFuture<(RunJobResult, PathTransformer)>2025 fn do_run_job(
2026 &self,
2027 _: JobAlloc,
2028 _: CompileCommand,
2029 _: Vec<String>,
2030 _: Box<dyn pkg::InputsPackager>,
2031 ) -> SFuture<(RunJobResult, PathTransformer)> {
2032 unreachable!()
2033 }
put_toolchain( &self, _: &Path, _: &str, _: Box<dyn pkg::ToolchainPackager>, ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)>2034 fn put_toolchain(
2035 &self,
2036 _: &Path,
2037 _: &str,
2038 _: Box<dyn pkg::ToolchainPackager>,
2039 ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)> {
2040 f_ok((self.tc.clone(), None))
2041 }
rewrite_includes_only(&self) -> bool2042 fn rewrite_includes_only(&self) -> bool {
2043 false
2044 }
get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf>2045 fn get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf> {
2046 None
2047 }
2048 }
2049
2050 pub struct ErrorSubmitToolchainClient {
2051 has_started: Cell<bool>,
2052 tc: Toolchain,
2053 }
2054 impl ErrorSubmitToolchainClient {
2055 #[allow(clippy::new_ret_no_self)]
new() -> Arc<dyn dist::Client>2056 pub fn new() -> Arc<dyn dist::Client> {
2057 Arc::new(Self {
2058 has_started: Cell::new(false),
2059 tc: Toolchain {
2060 archive_id: "somearchiveid".to_owned(),
2061 },
2062 })
2063 }
2064 }
2065 impl dist::Client for ErrorSubmitToolchainClient {
do_alloc_job(&self, tc: Toolchain) -> SFuture<AllocJobResult>2066 fn do_alloc_job(&self, tc: Toolchain) -> SFuture<AllocJobResult> {
2067 assert!(!self.has_started.replace(true));
2068 assert_eq!(self.tc, tc);
2069 f_ok(AllocJobResult::Success {
2070 job_alloc: JobAlloc {
2071 auth: "abcd".to_owned(),
2072 job_id: JobId(0),
2073 server_id: ServerId::new(([0, 0, 0, 0], 1).into()),
2074 },
2075 need_toolchain: true,
2076 })
2077 }
do_get_status(&self) -> SFuture<SchedulerStatusResult>2078 fn do_get_status(&self) -> SFuture<SchedulerStatusResult> {
2079 unreachable!()
2080 }
do_submit_toolchain( &self, job_alloc: JobAlloc, tc: Toolchain, ) -> SFuture<SubmitToolchainResult>2081 fn do_submit_toolchain(
2082 &self,
2083 job_alloc: JobAlloc,
2084 tc: Toolchain,
2085 ) -> SFuture<SubmitToolchainResult> {
2086 assert_eq!(job_alloc.job_id, JobId(0));
2087 assert_eq!(self.tc, tc);
2088 f_err(anyhow!("submit toolchain failure"))
2089 }
do_run_job( &self, _: JobAlloc, _: CompileCommand, _: Vec<String>, _: Box<dyn pkg::InputsPackager>, ) -> SFuture<(RunJobResult, PathTransformer)>2090 fn do_run_job(
2091 &self,
2092 _: JobAlloc,
2093 _: CompileCommand,
2094 _: Vec<String>,
2095 _: Box<dyn pkg::InputsPackager>,
2096 ) -> SFuture<(RunJobResult, PathTransformer)> {
2097 unreachable!()
2098 }
put_toolchain( &self, _: &Path, _: &str, _: Box<dyn pkg::ToolchainPackager>, ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)>2099 fn put_toolchain(
2100 &self,
2101 _: &Path,
2102 _: &str,
2103 _: Box<dyn pkg::ToolchainPackager>,
2104 ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)> {
2105 f_ok((self.tc.clone(), None))
2106 }
rewrite_includes_only(&self) -> bool2107 fn rewrite_includes_only(&self) -> bool {
2108 false
2109 }
get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf>2110 fn get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf> {
2111 None
2112 }
2113 }
2114
2115 pub struct ErrorRunJobClient {
2116 has_started: Cell<bool>,
2117 tc: Toolchain,
2118 }
2119 impl ErrorRunJobClient {
2120 #[allow(clippy::new_ret_no_self)]
new() -> Arc<dyn dist::Client>2121 pub fn new() -> Arc<dyn dist::Client> {
2122 Arc::new(Self {
2123 has_started: Cell::new(false),
2124 tc: Toolchain {
2125 archive_id: "somearchiveid".to_owned(),
2126 },
2127 })
2128 }
2129 }
2130 impl dist::Client for ErrorRunJobClient {
do_alloc_job(&self, tc: Toolchain) -> SFuture<AllocJobResult>2131 fn do_alloc_job(&self, tc: Toolchain) -> SFuture<AllocJobResult> {
2132 assert!(!self.has_started.replace(true));
2133 assert_eq!(self.tc, tc);
2134 f_ok(AllocJobResult::Success {
2135 job_alloc: JobAlloc {
2136 auth: "abcd".to_owned(),
2137 job_id: JobId(0),
2138 server_id: ServerId::new(([0, 0, 0, 0], 1).into()),
2139 },
2140 need_toolchain: true,
2141 })
2142 }
do_get_status(&self) -> SFuture<SchedulerStatusResult>2143 fn do_get_status(&self) -> SFuture<SchedulerStatusResult> {
2144 unreachable!()
2145 }
do_submit_toolchain( &self, job_alloc: JobAlloc, tc: Toolchain, ) -> SFuture<SubmitToolchainResult>2146 fn do_submit_toolchain(
2147 &self,
2148 job_alloc: JobAlloc,
2149 tc: Toolchain,
2150 ) -> SFuture<SubmitToolchainResult> {
2151 assert_eq!(job_alloc.job_id, JobId(0));
2152 assert_eq!(self.tc, tc);
2153 f_ok(SubmitToolchainResult::Success)
2154 }
do_run_job( &self, job_alloc: JobAlloc, command: CompileCommand, _: Vec<String>, _: Box<dyn pkg::InputsPackager>, ) -> SFuture<(RunJobResult, PathTransformer)>2155 fn do_run_job(
2156 &self,
2157 job_alloc: JobAlloc,
2158 command: CompileCommand,
2159 _: Vec<String>,
2160 _: Box<dyn pkg::InputsPackager>,
2161 ) -> SFuture<(RunJobResult, PathTransformer)> {
2162 assert_eq!(job_alloc.job_id, JobId(0));
2163 assert_eq!(command.executable, "/overridden/compiler");
2164 f_err(anyhow!("run job failure"))
2165 }
put_toolchain( &self, _: &Path, _: &str, _: Box<dyn pkg::ToolchainPackager>, ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)>2166 fn put_toolchain(
2167 &self,
2168 _: &Path,
2169 _: &str,
2170 _: Box<dyn pkg::ToolchainPackager>,
2171 ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)> {
2172 f_ok((
2173 self.tc.clone(),
2174 Some((
2175 "/overridden/compiler".to_owned(),
2176 PathBuf::from("somearchiveid"),
2177 )),
2178 ))
2179 }
rewrite_includes_only(&self) -> bool2180 fn rewrite_includes_only(&self) -> bool {
2181 false
2182 }
get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf>2183 fn get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf> {
2184 None
2185 }
2186 }
2187
2188 pub struct OneshotClient {
2189 has_started: Cell<bool>,
2190 tc: Toolchain,
2191 output: ProcessOutput,
2192 }
2193
2194 impl OneshotClient {
2195 #[allow(clippy::new_ret_no_self)]
new(code: i32, stdout: Vec<u8>, stderr: Vec<u8>) -> Arc<dyn dist::Client>2196 pub fn new(code: i32, stdout: Vec<u8>, stderr: Vec<u8>) -> Arc<dyn dist::Client> {
2197 Arc::new(Self {
2198 has_started: Cell::new(false),
2199 tc: Toolchain {
2200 archive_id: "somearchiveid".to_owned(),
2201 },
2202 output: ProcessOutput::fake_output(code, stdout, stderr),
2203 })
2204 }
2205 }
2206
2207 impl dist::Client for OneshotClient {
do_alloc_job(&self, tc: Toolchain) -> SFuture<AllocJobResult>2208 fn do_alloc_job(&self, tc: Toolchain) -> SFuture<AllocJobResult> {
2209 assert!(!self.has_started.replace(true));
2210 assert_eq!(self.tc, tc);
2211
2212 f_ok(AllocJobResult::Success {
2213 job_alloc: JobAlloc {
2214 auth: "abcd".to_owned(),
2215 job_id: JobId(0),
2216 server_id: ServerId::new(([0, 0, 0, 0], 1).into()),
2217 },
2218 need_toolchain: true,
2219 })
2220 }
do_get_status(&self) -> SFuture<SchedulerStatusResult>2221 fn do_get_status(&self) -> SFuture<SchedulerStatusResult> {
2222 unreachable!()
2223 }
do_submit_toolchain( &self, job_alloc: JobAlloc, tc: Toolchain, ) -> SFuture<SubmitToolchainResult>2224 fn do_submit_toolchain(
2225 &self,
2226 job_alloc: JobAlloc,
2227 tc: Toolchain,
2228 ) -> SFuture<SubmitToolchainResult> {
2229 assert_eq!(job_alloc.job_id, JobId(0));
2230 assert_eq!(self.tc, tc);
2231
2232 f_ok(SubmitToolchainResult::Success)
2233 }
do_run_job( &self, job_alloc: JobAlloc, command: CompileCommand, outputs: Vec<String>, inputs_packager: Box<dyn pkg::InputsPackager>, ) -> SFuture<(RunJobResult, PathTransformer)>2234 fn do_run_job(
2235 &self,
2236 job_alloc: JobAlloc,
2237 command: CompileCommand,
2238 outputs: Vec<String>,
2239 inputs_packager: Box<dyn pkg::InputsPackager>,
2240 ) -> SFuture<(RunJobResult, PathTransformer)> {
2241 assert_eq!(job_alloc.job_id, JobId(0));
2242 assert_eq!(command.executable, "/overridden/compiler");
2243
2244 let mut inputs = vec![];
2245 let path_transformer = inputs_packager.write_inputs(&mut inputs).unwrap();
2246 let outputs = outputs
2247 .into_iter()
2248 .map(|name| {
2249 let data = format!("some data in {}", name);
2250 let data = OutputData::try_from_reader(data.as_bytes()).unwrap();
2251 (name, data)
2252 })
2253 .collect();
2254 let result = RunJobResult::Complete(JobComplete {
2255 output: self.output.clone(),
2256 outputs,
2257 });
2258 f_ok((result, path_transformer))
2259 }
put_toolchain( &self, _: &Path, _: &str, _: Box<dyn pkg::ToolchainPackager>, ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)>2260 fn put_toolchain(
2261 &self,
2262 _: &Path,
2263 _: &str,
2264 _: Box<dyn pkg::ToolchainPackager>,
2265 ) -> SFuture<(Toolchain, Option<(String, PathBuf)>)> {
2266 f_ok((
2267 self.tc.clone(),
2268 Some((
2269 "/overridden/compiler".to_owned(),
2270 PathBuf::from("somearchiveid"),
2271 )),
2272 ))
2273 }
rewrite_includes_only(&self) -> bool2274 fn rewrite_includes_only(&self) -> bool {
2275 false
2276 }
get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf>2277 fn get_custom_toolchain(&self, _exe: &PathBuf) -> Option<PathBuf> {
2278 None
2279 }
2280 }
2281 }
2282