1 use crate::QueryCtxt;
2 use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
3 use rustc_data_structures::memmap::Mmap;
4 use rustc_data_structures::sync::{HashMapExt, Lock, Lrc, RwLock};
5 use rustc_data_structures::unhash::UnhashMap;
6 use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, StableCrateId, LOCAL_CRATE};
7 use rustc_hir::definitions::DefPathHash;
8 use rustc_index::vec::{Idx, IndexVec};
9 use rustc_middle::dep_graph::{DepNodeIndex, SerializedDepNodeIndex};
10 use rustc_middle::mir::interpret::{AllocDecodingSession, AllocDecodingState};
11 use rustc_middle::mir::{self, interpret};
12 use rustc_middle::thir;
13 use rustc_middle::ty::codec::{RefDecodable, TyDecoder, TyEncoder};
14 use rustc_middle::ty::{self, Ty, TyCtxt};
15 use rustc_query_system::dep_graph::DepContext;
16 use rustc_query_system::query::{QueryContext, QuerySideEffects};
17 use rustc_serialize::{
18 opaque::{self, FileEncodeResult, FileEncoder, IntEncodedWithFixedSize},
19 Decodable, Decoder, Encodable, Encoder,
20 };
21 use rustc_session::Session;
22 use rustc_span::hygiene::{
23 ExpnId, HygieneDecodeContext, HygieneEncodeContext, SyntaxContext, SyntaxContextData,
24 };
25 use rustc_span::source_map::{SourceMap, StableSourceFileId};
26 use rustc_span::CachingSourceMapView;
27 use rustc_span::{BytePos, ExpnData, ExpnHash, Pos, SourceFile, Span};
28 use std::mem;
29
30 const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
31
32 // A normal span encoded with both location information and a `SyntaxContext`
33 const TAG_FULL_SPAN: u8 = 0;
34 // A partial span with no location information, encoded only with a `SyntaxContext`
35 const TAG_PARTIAL_SPAN: u8 = 1;
36 const TAG_RELATIVE_SPAN: u8 = 2;
37
38 const TAG_SYNTAX_CONTEXT: u8 = 0;
39 const TAG_EXPN_DATA: u8 = 1;
40
41 /// Provides an interface to incremental compilation data cached from the
42 /// previous compilation session. This data will eventually include the results
43 /// of a few selected queries (like `typeck` and `mir_optimized`) and
44 /// any side effects that have been emitted during a query.
45 pub struct OnDiskCache<'sess> {
46 // The complete cache data in serialized form.
47 serialized_data: RwLock<Option<Mmap>>,
48
49 // Collects all `QuerySideEffects` created during the current compilation
50 // session.
51 current_side_effects: Lock<FxHashMap<DepNodeIndex, QuerySideEffects>>,
52
53 source_map: &'sess SourceMap,
54 file_index_to_stable_id: FxHashMap<SourceFileIndex, EncodedSourceFileId>,
55
56 // Caches that are populated lazily during decoding.
57 file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
58
59 // A map from dep-node to the position of the cached query result in
60 // `serialized_data`.
61 query_result_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
62
63 // A map from dep-node to the position of any associated `QuerySideEffects` in
64 // `serialized_data`.
65 prev_side_effects_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
66
67 alloc_decoding_state: AllocDecodingState,
68
69 // A map from syntax context ids to the position of their associated
70 // `SyntaxContextData`. We use a `u32` instead of a `SyntaxContext`
71 // to represent the fact that we are storing *encoded* ids. When we decode
72 // a `SyntaxContext`, a new id will be allocated from the global `HygieneData`,
73 // which will almost certainly be different than the serialized id.
74 syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
75 // A map from the `DefPathHash` of an `ExpnId` to the position
76 // of their associated `ExpnData`. Ideally, we would store a `DefId`,
77 // but we need to decode this before we've constructed a `TyCtxt` (which
78 // makes it difficult to decode a `DefId`).
79
80 // Note that these `DefPathHashes` correspond to both local and foreign
81 // `ExpnData` (e.g `ExpnData.krate` may not be `LOCAL_CRATE`). Alternatively,
82 // we could look up the `ExpnData` from the metadata of foreign crates,
83 // but it seemed easier to have `OnDiskCache` be independent of the `CStore`.
84 expn_data: UnhashMap<ExpnHash, AbsoluteBytePos>,
85 // Additional information used when decoding hygiene data.
86 hygiene_context: HygieneDecodeContext,
87 // Maps `ExpnHash`es to their raw value from the *previous*
88 // compilation session. This is used as an initial 'guess' when
89 // we try to map an `ExpnHash` to its value in the current
90 // compilation session.
91 foreign_expn_data: UnhashMap<ExpnHash, u32>,
92 }
93
94 // This type is used only for serialization and deserialization.
95 #[derive(Encodable, Decodable)]
96 struct Footer {
97 file_index_to_stable_id: FxHashMap<SourceFileIndex, EncodedSourceFileId>,
98 query_result_index: EncodedDepNodeIndex,
99 side_effects_index: EncodedDepNodeIndex,
100 // The location of all allocations.
101 interpret_alloc_index: Vec<u32>,
102 // See `OnDiskCache.syntax_contexts`
103 syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
104 // See `OnDiskCache.expn_data`
105 expn_data: UnhashMap<ExpnHash, AbsoluteBytePos>,
106 foreign_expn_data: UnhashMap<ExpnHash, u32>,
107 }
108
109 pub type EncodedDepNodeIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
110
111 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable)]
112 struct SourceFileIndex(u32);
113
114 #[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Encodable, Decodable)]
115 pub struct AbsoluteBytePos(u32);
116
117 impl AbsoluteBytePos {
new(pos: usize) -> AbsoluteBytePos118 fn new(pos: usize) -> AbsoluteBytePos {
119 debug_assert!(pos <= u32::MAX as usize);
120 AbsoluteBytePos(pos as u32)
121 }
122
to_usize(self) -> usize123 fn to_usize(self) -> usize {
124 self.0 as usize
125 }
126 }
127
128 /// An `EncodedSourceFileId` is the same as a `StableSourceFileId` except that
129 /// the source crate is represented as a [StableCrateId] instead of as a
130 /// `CrateNum`. This way `EncodedSourceFileId` can be encoded and decoded
131 /// without any additional context, i.e. with a simple `opaque::Decoder` (which
132 /// is the only thing available when decoding the cache's [Footer].
133 #[derive(Encodable, Decodable, Clone, Debug)]
134 struct EncodedSourceFileId {
135 file_name_hash: u64,
136 stable_crate_id: StableCrateId,
137 }
138
139 impl EncodedSourceFileId {
translate(&self, tcx: TyCtxt<'_>) -> StableSourceFileId140 fn translate(&self, tcx: TyCtxt<'_>) -> StableSourceFileId {
141 let cnum = tcx.stable_crate_id_to_crate_num(self.stable_crate_id);
142 StableSourceFileId { file_name_hash: self.file_name_hash, cnum }
143 }
144
new(tcx: TyCtxt<'_>, file: &SourceFile) -> EncodedSourceFileId145 fn new(tcx: TyCtxt<'_>, file: &SourceFile) -> EncodedSourceFileId {
146 let source_file_id = StableSourceFileId::new(file);
147 EncodedSourceFileId {
148 file_name_hash: source_file_id.file_name_hash,
149 stable_crate_id: tcx.stable_crate_id(source_file_id.cnum),
150 }
151 }
152 }
153
154 impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
155 /// Creates a new `OnDiskCache` instance from the serialized data in `data`.
new(sess: &'sess Session, data: Mmap, start_pos: usize) -> Self156 fn new(sess: &'sess Session, data: Mmap, start_pos: usize) -> Self {
157 debug_assert!(sess.opts.incremental.is_some());
158
159 // Wrap in a scope so we can borrow `data`.
160 let footer: Footer = {
161 let mut decoder = opaque::Decoder::new(&data[..], start_pos);
162
163 // Decode the *position* of the footer, which can be found in the
164 // last 8 bytes of the file.
165 decoder.set_position(data.len() - IntEncodedWithFixedSize::ENCODED_SIZE);
166 let footer_pos = IntEncodedWithFixedSize::decode(&mut decoder)
167 .expect("error while trying to decode footer position")
168 .0 as usize;
169
170 // Decode the file footer, which contains all the lookup tables, etc.
171 decoder.set_position(footer_pos);
172
173 decode_tagged(&mut decoder, TAG_FILE_FOOTER)
174 .expect("error while trying to decode footer position")
175 };
176
177 Self {
178 serialized_data: RwLock::new(Some(data)),
179 file_index_to_stable_id: footer.file_index_to_stable_id,
180 file_index_to_file: Default::default(),
181 source_map: sess.source_map(),
182 current_side_effects: Default::default(),
183 query_result_index: footer.query_result_index.into_iter().collect(),
184 prev_side_effects_index: footer.side_effects_index.into_iter().collect(),
185 alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index),
186 syntax_contexts: footer.syntax_contexts,
187 expn_data: footer.expn_data,
188 foreign_expn_data: footer.foreign_expn_data,
189 hygiene_context: Default::default(),
190 }
191 }
192
new_empty(source_map: &'sess SourceMap) -> Self193 fn new_empty(source_map: &'sess SourceMap) -> Self {
194 Self {
195 serialized_data: RwLock::new(None),
196 file_index_to_stable_id: Default::default(),
197 file_index_to_file: Default::default(),
198 source_map,
199 current_side_effects: Default::default(),
200 query_result_index: Default::default(),
201 prev_side_effects_index: Default::default(),
202 alloc_decoding_state: AllocDecodingState::new(Vec::new()),
203 syntax_contexts: FxHashMap::default(),
204 expn_data: UnhashMap::default(),
205 foreign_expn_data: UnhashMap::default(),
206 hygiene_context: Default::default(),
207 }
208 }
209
210 /// Execute all cache promotions and release the serialized backing Mmap.
211 ///
212 /// Cache promotions require invoking queries, which needs to read the serialized data.
213 /// In order to serialize the new on-disk cache, the former on-disk cache file needs to be
214 /// deleted, hence we won't be able to refer to its memmapped data.
drop_serialized_data(&self, tcx: TyCtxt<'tcx>)215 fn drop_serialized_data(&self, tcx: TyCtxt<'tcx>) {
216 // Load everything into memory so we can write it out to the on-disk
217 // cache. The vast majority of cacheable query results should already
218 // be in memory, so this should be a cheap operation.
219 // Do this *before* we clone 'latest_foreign_def_path_hashes', since
220 // loading existing queries may cause us to create new DepNodes, which
221 // may in turn end up invoking `store_foreign_def_id_hash`
222 tcx.dep_graph.exec_cache_promotions(tcx);
223
224 *self.serialized_data.write() = None;
225 }
226
serialize<'tcx>(&self, tcx: TyCtxt<'tcx>, encoder: &mut FileEncoder) -> FileEncodeResult227 fn serialize<'tcx>(&self, tcx: TyCtxt<'tcx>, encoder: &mut FileEncoder) -> FileEncodeResult {
228 // Serializing the `DepGraph` should not modify it.
229 tcx.dep_graph.with_ignore(|| {
230 // Allocate `SourceFileIndex`es.
231 let (file_to_file_index, file_index_to_stable_id) = {
232 let files = tcx.sess.source_map().files();
233 let mut file_to_file_index =
234 FxHashMap::with_capacity_and_hasher(files.len(), Default::default());
235 let mut file_index_to_stable_id =
236 FxHashMap::with_capacity_and_hasher(files.len(), Default::default());
237
238 for (index, file) in files.iter().enumerate() {
239 let index = SourceFileIndex(index as u32);
240 let file_ptr: *const SourceFile = &**file as *const _;
241 file_to_file_index.insert(file_ptr, index);
242 let source_file_id = EncodedSourceFileId::new(tcx, &file);
243 file_index_to_stable_id.insert(index, source_file_id);
244 }
245
246 (file_to_file_index, file_index_to_stable_id)
247 };
248
249 let hygiene_encode_context = HygieneEncodeContext::default();
250
251 let mut encoder = CacheEncoder {
252 tcx,
253 encoder,
254 type_shorthands: Default::default(),
255 predicate_shorthands: Default::default(),
256 interpret_allocs: Default::default(),
257 source_map: CachingSourceMapView::new(tcx.sess.source_map()),
258 file_to_file_index,
259 hygiene_context: &hygiene_encode_context,
260 };
261
262 // Encode query results.
263 let mut query_result_index = EncodedDepNodeIndex::new();
264
265 tcx.sess.time("encode_query_results", || -> FileEncodeResult {
266 let enc = &mut encoder;
267 let qri = &mut query_result_index;
268 QueryCtxt::from_tcx(tcx).encode_query_results(enc, qri)
269 })?;
270
271 // Encode side effects.
272 let side_effects_index: EncodedDepNodeIndex = self
273 .current_side_effects
274 .borrow()
275 .iter()
276 .map(
277 |(dep_node_index, side_effects)| -> Result<_, <FileEncoder as Encoder>::Error> {
278 let pos = AbsoluteBytePos::new(encoder.position());
279 let dep_node_index = SerializedDepNodeIndex::new(dep_node_index.index());
280 encoder.encode_tagged(dep_node_index, side_effects)?;
281
282 Ok((dep_node_index, pos))
283 },
284 )
285 .collect::<Result<_, _>>()?;
286
287 let interpret_alloc_index = {
288 let mut interpret_alloc_index = Vec::new();
289 let mut n = 0;
290 loop {
291 let new_n = encoder.interpret_allocs.len();
292 // If we have found new IDs, serialize those too.
293 if n == new_n {
294 // Otherwise, abort.
295 break;
296 }
297 interpret_alloc_index.reserve(new_n - n);
298 for idx in n..new_n {
299 let id = encoder.interpret_allocs[idx];
300 let pos = encoder.position() as u32;
301 interpret_alloc_index.push(pos);
302 interpret::specialized_encode_alloc_id(&mut encoder, tcx, id)?;
303 }
304 n = new_n;
305 }
306 interpret_alloc_index
307 };
308
309 let mut syntax_contexts = FxHashMap::default();
310 let mut expn_data = UnhashMap::default();
311 let mut foreign_expn_data = UnhashMap::default();
312
313 // Encode all hygiene data (`SyntaxContextData` and `ExpnData`) from the current
314 // session.
315
316 hygiene_encode_context.encode(
317 &mut encoder,
318 |encoder, index, ctxt_data| -> FileEncodeResult {
319 let pos = AbsoluteBytePos::new(encoder.position());
320 encoder.encode_tagged(TAG_SYNTAX_CONTEXT, ctxt_data)?;
321 syntax_contexts.insert(index, pos);
322 Ok(())
323 },
324 |encoder, expn_id, data, hash| -> FileEncodeResult {
325 if expn_id.krate == LOCAL_CRATE {
326 let pos = AbsoluteBytePos::new(encoder.position());
327 encoder.encode_tagged(TAG_EXPN_DATA, data)?;
328 expn_data.insert(hash, pos);
329 } else {
330 foreign_expn_data.insert(hash, expn_id.local_id.as_u32());
331 }
332 Ok(())
333 },
334 )?;
335
336 // `Encode the file footer.
337 let footer_pos = encoder.position() as u64;
338 encoder.encode_tagged(
339 TAG_FILE_FOOTER,
340 &Footer {
341 file_index_to_stable_id,
342 query_result_index,
343 side_effects_index,
344 interpret_alloc_index,
345 syntax_contexts,
346 expn_data,
347 foreign_expn_data,
348 },
349 )?;
350
351 // Encode the position of the footer as the last 8 bytes of the
352 // file so we know where to look for it.
353 IntEncodedWithFixedSize(footer_pos).encode(encoder.encoder)?;
354
355 // DO NOT WRITE ANYTHING TO THE ENCODER AFTER THIS POINT! The address
356 // of the footer must be the last thing in the data stream.
357
358 Ok(())
359 })
360 }
361 }
362
363 impl<'sess> OnDiskCache<'sess> {
as_dyn(&self) -> &dyn rustc_middle::ty::OnDiskCache<'sess>364 pub fn as_dyn(&self) -> &dyn rustc_middle::ty::OnDiskCache<'sess> {
365 self as _
366 }
367
368 /// Loads a `QuerySideEffects` created during the previous compilation session.
load_side_effects( &self, tcx: TyCtxt<'_>, dep_node_index: SerializedDepNodeIndex, ) -> QuerySideEffects369 pub fn load_side_effects(
370 &self,
371 tcx: TyCtxt<'_>,
372 dep_node_index: SerializedDepNodeIndex,
373 ) -> QuerySideEffects {
374 let side_effects: Option<QuerySideEffects> =
375 self.load_indexed(tcx, dep_node_index, &self.prev_side_effects_index, "side_effects");
376
377 side_effects.unwrap_or_default()
378 }
379
380 /// Stores a `QuerySideEffects` emitted during the current compilation session.
381 /// Anything stored like this will be available via `load_side_effects` in
382 /// the next compilation session.
383 #[inline(never)]
384 #[cold]
store_side_effects(&self, dep_node_index: DepNodeIndex, side_effects: QuerySideEffects)385 pub fn store_side_effects(&self, dep_node_index: DepNodeIndex, side_effects: QuerySideEffects) {
386 let mut current_side_effects = self.current_side_effects.borrow_mut();
387 let prev = current_side_effects.insert(dep_node_index, side_effects);
388 debug_assert!(prev.is_none());
389 }
390
391 /// Returns the cached query result if there is something in the cache for
392 /// the given `SerializedDepNodeIndex`; otherwise returns `None`.
try_load_query_result<'tcx, T>( &self, tcx: TyCtxt<'tcx>, dep_node_index: SerializedDepNodeIndex, ) -> Option<T> where T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,393 pub fn try_load_query_result<'tcx, T>(
394 &self,
395 tcx: TyCtxt<'tcx>,
396 dep_node_index: SerializedDepNodeIndex,
397 ) -> Option<T>
398 where
399 T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,
400 {
401 self.load_indexed(tcx, dep_node_index, &self.query_result_index, "query result")
402 }
403
404 /// Stores side effect emitted during computation of an anonymous query.
405 /// Since many anonymous queries can share the same `DepNode`, we aggregate
406 /// them -- as opposed to regular queries where we assume that there is a
407 /// 1:1 relationship between query-key and `DepNode`.
408 #[inline(never)]
409 #[cold]
store_side_effects_for_anon_node( &self, dep_node_index: DepNodeIndex, side_effects: QuerySideEffects, )410 pub fn store_side_effects_for_anon_node(
411 &self,
412 dep_node_index: DepNodeIndex,
413 side_effects: QuerySideEffects,
414 ) {
415 let mut current_side_effects = self.current_side_effects.borrow_mut();
416
417 let x = current_side_effects.entry(dep_node_index).or_default();
418 x.append(side_effects);
419 }
420
load_indexed<'tcx, T>( &self, tcx: TyCtxt<'tcx>, dep_node_index: SerializedDepNodeIndex, index: &FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>, debug_tag: &'static str, ) -> Option<T> where T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,421 fn load_indexed<'tcx, T>(
422 &self,
423 tcx: TyCtxt<'tcx>,
424 dep_node_index: SerializedDepNodeIndex,
425 index: &FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
426 debug_tag: &'static str,
427 ) -> Option<T>
428 where
429 T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,
430 {
431 let pos = index.get(&dep_node_index).cloned()?;
432
433 self.with_decoder(tcx, pos, |decoder| match decode_tagged(decoder, dep_node_index) {
434 Ok(v) => Some(v),
435 Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
436 })
437 }
438
with_decoder<'a, 'tcx, T, F: for<'s> FnOnce(&mut CacheDecoder<'s, 'tcx>) -> T>( &'sess self, tcx: TyCtxt<'tcx>, pos: AbsoluteBytePos, f: F, ) -> T where T: Decodable<CacheDecoder<'a, 'tcx>>,439 fn with_decoder<'a, 'tcx, T, F: for<'s> FnOnce(&mut CacheDecoder<'s, 'tcx>) -> T>(
440 &'sess self,
441 tcx: TyCtxt<'tcx>,
442 pos: AbsoluteBytePos,
443 f: F,
444 ) -> T
445 where
446 T: Decodable<CacheDecoder<'a, 'tcx>>,
447 {
448 let serialized_data = self.serialized_data.read();
449 let mut decoder = CacheDecoder {
450 tcx,
451 opaque: opaque::Decoder::new(serialized_data.as_deref().unwrap_or(&[]), pos.to_usize()),
452 source_map: self.source_map,
453 file_index_to_file: &self.file_index_to_file,
454 file_index_to_stable_id: &self.file_index_to_stable_id,
455 alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
456 syntax_contexts: &self.syntax_contexts,
457 expn_data: &self.expn_data,
458 foreign_expn_data: &self.foreign_expn_data,
459 hygiene_context: &self.hygiene_context,
460 };
461 f(&mut decoder)
462 }
463 }
464
465 //- DECODING -------------------------------------------------------------------
466
467 /// A decoder that can read from the incremental compilation cache. It is similar to the one
468 /// we use for crate metadata decoding in that it can rebase spans and eventually
469 /// will also handle things that contain `Ty` instances.
470 pub struct CacheDecoder<'a, 'tcx> {
471 tcx: TyCtxt<'tcx>,
472 opaque: opaque::Decoder<'a>,
473 source_map: &'a SourceMap,
474 file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
475 file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, EncodedSourceFileId>,
476 alloc_decoding_session: AllocDecodingSession<'a>,
477 syntax_contexts: &'a FxHashMap<u32, AbsoluteBytePos>,
478 expn_data: &'a UnhashMap<ExpnHash, AbsoluteBytePos>,
479 foreign_expn_data: &'a UnhashMap<ExpnHash, u32>,
480 hygiene_context: &'a HygieneDecodeContext,
481 }
482
483 impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile>484 fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile> {
485 let CacheDecoder {
486 tcx,
487 ref file_index_to_file,
488 ref file_index_to_stable_id,
489 ref source_map,
490 ..
491 } = *self;
492
493 file_index_to_file
494 .borrow_mut()
495 .entry(index)
496 .or_insert_with(|| {
497 let stable_id = file_index_to_stable_id[&index].translate(tcx);
498 source_map
499 .source_file_by_stable_id(stable_id)
500 .expect("failed to lookup `SourceFile` in new context")
501 })
502 .clone()
503 }
504 }
505
506 trait DecoderWithPosition: Decoder {
position(&self) -> usize507 fn position(&self) -> usize;
508 }
509
510 impl<'a> DecoderWithPosition for opaque::Decoder<'a> {
position(&self) -> usize511 fn position(&self) -> usize {
512 self.position()
513 }
514 }
515
516 impl<'a, 'tcx> DecoderWithPosition for CacheDecoder<'a, 'tcx> {
position(&self) -> usize517 fn position(&self) -> usize {
518 self.opaque.position()
519 }
520 }
521
522 // Decodes something that was encoded with `encode_tagged()` and verify that the
523 // tag matches and the correct amount of bytes was read.
decode_tagged<D, T, V>(decoder: &mut D, expected_tag: T) -> Result<V, D::Error> where T: Decodable<D> + Eq + std::fmt::Debug, V: Decodable<D>, D: DecoderWithPosition,524 fn decode_tagged<D, T, V>(decoder: &mut D, expected_tag: T) -> Result<V, D::Error>
525 where
526 T: Decodable<D> + Eq + std::fmt::Debug,
527 V: Decodable<D>,
528 D: DecoderWithPosition,
529 {
530 let start_pos = decoder.position();
531
532 let actual_tag = T::decode(decoder)?;
533 assert_eq!(actual_tag, expected_tag);
534 let value = V::decode(decoder)?;
535 let end_pos = decoder.position();
536
537 let expected_len: u64 = Decodable::decode(decoder)?;
538 assert_eq!((end_pos - start_pos) as u64, expected_len);
539
540 Ok(value)
541 }
542
543 impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
544 const CLEAR_CROSS_CRATE: bool = false;
545
546 #[inline]
tcx(&self) -> TyCtxt<'tcx>547 fn tcx(&self) -> TyCtxt<'tcx> {
548 self.tcx
549 }
550
551 #[inline]
position(&self) -> usize552 fn position(&self) -> usize {
553 self.opaque.position()
554 }
555
556 #[inline]
peek_byte(&self) -> u8557 fn peek_byte(&self) -> u8 {
558 self.opaque.data[self.opaque.position()]
559 }
560
cached_ty_for_shorthand<F>( &mut self, shorthand: usize, or_insert_with: F, ) -> Result<Ty<'tcx>, Self::Error> where F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>,561 fn cached_ty_for_shorthand<F>(
562 &mut self,
563 shorthand: usize,
564 or_insert_with: F,
565 ) -> Result<Ty<'tcx>, Self::Error>
566 where
567 F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>,
568 {
569 let tcx = self.tcx();
570
571 let cache_key = ty::CReaderCacheKey { cnum: None, pos: shorthand };
572
573 if let Some(&ty) = tcx.ty_rcache.borrow().get(&cache_key) {
574 return Ok(ty);
575 }
576
577 let ty = or_insert_with(self)?;
578 // This may overwrite the entry, but it should overwrite with the same value.
579 tcx.ty_rcache.borrow_mut().insert_same(cache_key, ty);
580 Ok(ty)
581 }
582
with_position<F, R>(&mut self, pos: usize, f: F) -> R where F: FnOnce(&mut Self) -> R,583 fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
584 where
585 F: FnOnce(&mut Self) -> R,
586 {
587 debug_assert!(pos < self.opaque.data.len());
588
589 let new_opaque = opaque::Decoder::new(self.opaque.data, pos);
590 let old_opaque = mem::replace(&mut self.opaque, new_opaque);
591 let r = f(self);
592 self.opaque = old_opaque;
593 r
594 }
595
decode_alloc_id(&mut self) -> Result<interpret::AllocId, Self::Error>596 fn decode_alloc_id(&mut self) -> Result<interpret::AllocId, Self::Error> {
597 let alloc_decoding_session = self.alloc_decoding_session;
598 alloc_decoding_session.decode_alloc_id(self)
599 }
600 }
601
602 rustc_middle::implement_ty_decoder!(CacheDecoder<'a, 'tcx>);
603
604 // This ensures that the `Decodable<opaque::Decoder>::decode` specialization for `Vec<u8>` is used
605 // when a `CacheDecoder` is passed to `Decodable::decode`. Unfortunately, we have to manually opt
606 // into specializations this way, given how `CacheDecoder` and the decoding traits currently work.
607 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Vec<u8> {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>608 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
609 Decodable::decode(&mut d.opaque)
610 }
611 }
612
613 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for SyntaxContext {
decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>614 fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
615 let syntax_contexts = decoder.syntax_contexts;
616 rustc_span::hygiene::decode_syntax_context(decoder, decoder.hygiene_context, |this, id| {
617 // This closure is invoked if we haven't already decoded the data for the `SyntaxContext` we are deserializing.
618 // We look up the position of the associated `SyntaxData` and decode it.
619 let pos = syntax_contexts.get(&id).unwrap();
620 this.with_position(pos.to_usize(), |decoder| {
621 let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT)?;
622 Ok(data)
623 })
624 })
625 }
626 }
627
628 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for ExpnId {
decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>629 fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
630 let hash = ExpnHash::decode(decoder)?;
631 if hash.is_root() {
632 return Ok(ExpnId::root());
633 }
634
635 if let Some(expn_id) = ExpnId::from_hash(hash) {
636 return Ok(expn_id);
637 }
638
639 let krate = decoder.tcx.stable_crate_id_to_crate_num(hash.stable_crate_id());
640
641 let expn_id = if krate == LOCAL_CRATE {
642 // We look up the position of the associated `ExpnData` and decode it.
643 let pos = decoder
644 .expn_data
645 .get(&hash)
646 .unwrap_or_else(|| panic!("Bad hash {:?} (map {:?})", hash, decoder.expn_data));
647
648 let data: ExpnData = decoder
649 .with_position(pos.to_usize(), |decoder| decode_tagged(decoder, TAG_EXPN_DATA))?;
650 let expn_id = rustc_span::hygiene::register_local_expn_id(data, hash);
651
652 #[cfg(debug_assertions)]
653 {
654 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
655 let mut hcx = decoder.tcx.create_stable_hashing_context();
656 let mut hasher = StableHasher::new();
657 hcx.while_hashing_spans(true, |hcx| {
658 expn_id.expn_data().hash_stable(hcx, &mut hasher)
659 });
660 let local_hash: u64 = hasher.finish();
661 debug_assert_eq!(hash.local_hash(), local_hash);
662 }
663
664 expn_id
665 } else {
666 let index_guess = decoder.foreign_expn_data[&hash];
667 decoder.tcx.cstore_untracked().expn_hash_to_expn_id(
668 decoder.tcx.sess,
669 krate,
670 index_guess,
671 hash,
672 )
673 };
674
675 debug_assert_eq!(expn_id.krate, krate);
676 Ok(expn_id)
677 }
678 }
679
680 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Span {
decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>681 fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
682 let ctxt = SyntaxContext::decode(decoder)?;
683 let parent = Option::<LocalDefId>::decode(decoder)?;
684 let tag: u8 = Decodable::decode(decoder)?;
685
686 if tag == TAG_PARTIAL_SPAN {
687 return Ok(Span::new(BytePos(0), BytePos(0), ctxt, parent));
688 } else if tag == TAG_RELATIVE_SPAN {
689 let dlo = u32::decode(decoder)?;
690 let dto = u32::decode(decoder)?;
691
692 let enclosing =
693 decoder.tcx.definitions_untracked().def_span(parent.unwrap()).data_untracked();
694 let span = Span::new(
695 enclosing.lo + BytePos::from_u32(dlo),
696 enclosing.lo + BytePos::from_u32(dto),
697 ctxt,
698 parent,
699 );
700
701 return Ok(span);
702 } else {
703 debug_assert_eq!(tag, TAG_FULL_SPAN);
704 }
705
706 let file_lo_index = SourceFileIndex::decode(decoder)?;
707 let line_lo = usize::decode(decoder)?;
708 let col_lo = BytePos::decode(decoder)?;
709 let len = BytePos::decode(decoder)?;
710
711 let file_lo = decoder.file_index_to_file(file_lo_index);
712 let lo = file_lo.lines[line_lo - 1] + col_lo;
713 let hi = lo + len;
714
715 Ok(Span::new(lo, hi, ctxt, parent))
716 }
717 }
718
719 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for CrateNum {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>720 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
721 let stable_id = StableCrateId::decode(d)?;
722 let cnum = d.tcx.stable_crate_id_to_crate_num(stable_id);
723 Ok(cnum)
724 }
725 }
726
727 // This impl makes sure that we get a runtime error when we try decode a
728 // `DefIndex` that is not contained in a `DefId`. Such a case would be problematic
729 // because we would not know how to transform the `DefIndex` to the current
730 // context.
731 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefIndex {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<DefIndex, String>732 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<DefIndex, String> {
733 Err(d.error("trying to decode `DefIndex` outside the context of a `DefId`"))
734 }
735 }
736
737 // Both the `CrateNum` and the `DefIndex` of a `DefId` can change in between two
738 // compilation sessions. We use the `DefPathHash`, which is stable across
739 // sessions, to map the old `DefId` to the new one.
740 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefId {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>741 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
742 // Load the `DefPathHash` which is was we encoded the `DefId` as.
743 let def_path_hash = DefPathHash::decode(d)?;
744
745 // Using the `DefPathHash`, we can lookup the new `DefId`.
746 // Subtle: We only encode a `DefId` as part of a query result.
747 // If we get to this point, then all of the query inputs were green,
748 // which means that the definition with this hash is guaranteed to
749 // still exist in the current compilation session.
750 Ok(d.tcx().def_path_hash_to_def_id(def_path_hash))
751 }
752 }
753
754 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx FxHashSet<LocalDefId> {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>755 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
756 RefDecodable::decode(d)
757 }
758 }
759
760 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
761 for &'tcx IndexVec<mir::Promoted, mir::Body<'tcx>>
762 {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>763 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
764 RefDecodable::decode(d)
765 }
766 }
767
768 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [thir::abstract_const::Node<'tcx>] {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>769 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
770 RefDecodable::decode(d)
771 }
772 }
773
774 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>775 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
776 RefDecodable::decode(d)
777 }
778 }
779
780 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [rustc_ast::InlineAsmTemplatePiece] {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>781 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
782 RefDecodable::decode(d)
783 }
784 }
785
786 impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [Span] {
decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String>787 fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
788 RefDecodable::decode(d)
789 }
790 }
791
792 //- ENCODING -------------------------------------------------------------------
793
794 pub trait OpaqueEncoder: Encoder {
position(&self) -> usize795 fn position(&self) -> usize;
796 }
797
798 impl OpaqueEncoder for FileEncoder {
799 #[inline]
position(&self) -> usize800 fn position(&self) -> usize {
801 FileEncoder::position(self)
802 }
803 }
804
805 /// An encoder that can write to the incremental compilation cache.
806 pub struct CacheEncoder<'a, 'tcx, E: OpaqueEncoder> {
807 tcx: TyCtxt<'tcx>,
808 encoder: &'a mut E,
809 type_shorthands: FxHashMap<Ty<'tcx>, usize>,
810 predicate_shorthands: FxHashMap<ty::PredicateKind<'tcx>, usize>,
811 interpret_allocs: FxIndexSet<interpret::AllocId>,
812 source_map: CachingSourceMapView<'tcx>,
813 file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>,
814 hygiene_context: &'a HygieneEncodeContext,
815 }
816
817 impl<'a, 'tcx, E> CacheEncoder<'a, 'tcx, E>
818 where
819 E: 'a + OpaqueEncoder,
820 {
source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex821 fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex {
822 self.file_to_file_index[&(&*source_file as *const SourceFile)]
823 }
824
825 /// Encode something with additional information that allows to do some
826 /// sanity checks when decoding the data again. This method will first
827 /// encode the specified tag, then the given value, then the number of
828 /// bytes taken up by tag and value. On decoding, we can then verify that
829 /// we get the expected tag and read the expected number of bytes.
encode_tagged<T: Encodable<Self>, V: Encodable<Self>>( &mut self, tag: T, value: &V, ) -> Result<(), E::Error>830 fn encode_tagged<T: Encodable<Self>, V: Encodable<Self>>(
831 &mut self,
832 tag: T,
833 value: &V,
834 ) -> Result<(), E::Error> {
835 let start_pos = self.position();
836
837 tag.encode(self)?;
838 value.encode(self)?;
839
840 let end_pos = self.position();
841 ((end_pos - start_pos) as u64).encode(self)
842 }
843 }
844
845 impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for SyntaxContext
846 where
847 E: 'a + OpaqueEncoder,
848 {
encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error>849 fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
850 rustc_span::hygiene::raw_encode_syntax_context(*self, s.hygiene_context, s)
851 }
852 }
853
854 impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for ExpnId
855 where
856 E: 'a + OpaqueEncoder,
857 {
encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error>858 fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
859 s.hygiene_context.schedule_expn_data_for_encoding(*self);
860 self.expn_hash().encode(s)
861 }
862 }
863
864 impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for Span
865 where
866 E: 'a + OpaqueEncoder,
867 {
encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error>868 fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
869 let span_data = self.data_untracked();
870 span_data.ctxt.encode(s)?;
871 span_data.parent.encode(s)?;
872
873 if span_data.is_dummy() {
874 return TAG_PARTIAL_SPAN.encode(s);
875 }
876
877 if let Some(parent) = span_data.parent {
878 let enclosing = s.tcx.definitions_untracked().def_span(parent).data_untracked();
879 if enclosing.contains(span_data) {
880 TAG_RELATIVE_SPAN.encode(s)?;
881 (span_data.lo - enclosing.lo).to_u32().encode(s)?;
882 (span_data.hi - enclosing.lo).to_u32().encode(s)?;
883 return Ok(());
884 }
885 }
886
887 let pos = s.source_map.byte_pos_to_line_and_col(span_data.lo);
888 let partial_span = match &pos {
889 Some((file_lo, _, _)) => !file_lo.contains(span_data.hi),
890 None => true,
891 };
892
893 if partial_span {
894 return TAG_PARTIAL_SPAN.encode(s);
895 }
896
897 let (file_lo, line_lo, col_lo) = pos.unwrap();
898
899 let len = span_data.hi - span_data.lo;
900
901 let source_file_index = s.source_file_index(file_lo);
902
903 TAG_FULL_SPAN.encode(s)?;
904 source_file_index.encode(s)?;
905 line_lo.encode(s)?;
906 col_lo.encode(s)?;
907 len.encode(s)
908 }
909 }
910
911 impl<'a, 'tcx, E> TyEncoder<'tcx> for CacheEncoder<'a, 'tcx, E>
912 where
913 E: 'a + OpaqueEncoder,
914 {
915 const CLEAR_CROSS_CRATE: bool = false;
916
position(&self) -> usize917 fn position(&self) -> usize {
918 self.encoder.position()
919 }
type_shorthands(&mut self) -> &mut FxHashMap<Ty<'tcx>, usize>920 fn type_shorthands(&mut self) -> &mut FxHashMap<Ty<'tcx>, usize> {
921 &mut self.type_shorthands
922 }
predicate_shorthands(&mut self) -> &mut FxHashMap<ty::PredicateKind<'tcx>, usize>923 fn predicate_shorthands(&mut self) -> &mut FxHashMap<ty::PredicateKind<'tcx>, usize> {
924 &mut self.predicate_shorthands
925 }
encode_alloc_id(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error>926 fn encode_alloc_id(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
927 let (index, _) = self.interpret_allocs.insert_full(*alloc_id);
928
929 index.encode(self)
930 }
931 }
932
933 impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for CrateNum
934 where
935 E: 'a + OpaqueEncoder,
936 {
encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error>937 fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
938 s.tcx.stable_crate_id(*self).encode(s)
939 }
940 }
941
942 impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for DefId
943 where
944 E: 'a + OpaqueEncoder,
945 {
encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error>946 fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
947 s.tcx.def_path_hash(*self).encode(s)
948 }
949 }
950
951 impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for DefIndex
952 where
953 E: 'a + OpaqueEncoder,
954 {
encode(&self, _: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error>955 fn encode(&self, _: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
956 bug!("encoding `DefIndex` without context");
957 }
958 }
959
960 macro_rules! encoder_methods {
961 ($($name:ident($ty:ty);)*) => {
962 #[inline]
963 $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> {
964 self.encoder.$name(value)
965 })*
966 }
967 }
968
969 impl<'a, 'tcx, E> Encoder for CacheEncoder<'a, 'tcx, E>
970 where
971 E: 'a + OpaqueEncoder,
972 {
973 type Error = E::Error;
974
975 #[inline]
emit_unit(&mut self) -> Result<(), Self::Error>976 fn emit_unit(&mut self) -> Result<(), Self::Error> {
977 Ok(())
978 }
979
980 encoder_methods! {
981 emit_usize(usize);
982 emit_u128(u128);
983 emit_u64(u64);
984 emit_u32(u32);
985 emit_u16(u16);
986 emit_u8(u8);
987
988 emit_isize(isize);
989 emit_i128(i128);
990 emit_i64(i64);
991 emit_i32(i32);
992 emit_i16(i16);
993 emit_i8(i8);
994
995 emit_bool(bool);
996 emit_f64(f64);
997 emit_f32(f32);
998 emit_char(char);
999 emit_str(&str);
1000 emit_raw_bytes(&[u8]);
1001 }
1002 }
1003
1004 // This ensures that the `Encodable<opaque::FileEncoder>::encode` specialization for byte slices
1005 // is used when a `CacheEncoder` having an `opaque::FileEncoder` is passed to `Encodable::encode`.
1006 // Unfortunately, we have to manually opt into specializations this way, given how `CacheEncoder`
1007 // and the encoding traits currently work.
1008 impl<'a, 'tcx> Encodable<CacheEncoder<'a, 'tcx, FileEncoder>> for [u8] {
encode(&self, e: &mut CacheEncoder<'a, 'tcx, FileEncoder>) -> FileEncodeResult1009 fn encode(&self, e: &mut CacheEncoder<'a, 'tcx, FileEncoder>) -> FileEncodeResult {
1010 self.encode(e.encoder)
1011 }
1012 }
1013
encode_query_results<'a, 'tcx, CTX, Q>( tcx: CTX, encoder: &mut CacheEncoder<'a, 'tcx, FileEncoder>, query_result_index: &mut EncodedDepNodeIndex, ) -> FileEncodeResult where CTX: QueryContext + 'tcx, Q: super::QueryDescription<CTX>, Q::Value: Encodable<CacheEncoder<'a, 'tcx, FileEncoder>>,1014 pub fn encode_query_results<'a, 'tcx, CTX, Q>(
1015 tcx: CTX,
1016 encoder: &mut CacheEncoder<'a, 'tcx, FileEncoder>,
1017 query_result_index: &mut EncodedDepNodeIndex,
1018 ) -> FileEncodeResult
1019 where
1020 CTX: QueryContext + 'tcx,
1021 Q: super::QueryDescription<CTX>,
1022 Q::Value: Encodable<CacheEncoder<'a, 'tcx, FileEncoder>>,
1023 {
1024 let _timer = tcx
1025 .dep_context()
1026 .profiler()
1027 .extra_verbose_generic_activity("encode_query_results_for", std::any::type_name::<Q>());
1028
1029 assert!(Q::query_state(tcx).all_inactive());
1030 let cache = Q::query_cache(tcx);
1031 let mut res = Ok(());
1032 cache.iter_results(&mut |key, value, dep_node| {
1033 if res.is_err() {
1034 return;
1035 }
1036 if Q::cache_on_disk(*tcx.dep_context(), &key) {
1037 let dep_node = SerializedDepNodeIndex::new(dep_node.index());
1038
1039 // Record position of the cache entry.
1040 query_result_index.push((dep_node, AbsoluteBytePos::new(encoder.encoder.position())));
1041
1042 // Encode the type check tables with the `SerializedDepNodeIndex`
1043 // as tag.
1044 match encoder.encode_tagged(dep_node, value) {
1045 Ok(()) => {}
1046 Err(e) => {
1047 res = Err(e);
1048 }
1049 }
1050 }
1051 });
1052
1053 res
1054 }
1055