1 /*
2  * Copyright (c) 2013 The Native Client Authors. All rights reserved.
3  * Use of this source code is governed by a BSD-style license that can be
4  * found in the LICENSE file.
5  */
6 
7 #ifndef NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_INST_CLASSES_INLINE_H_
8 #define NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_INST_CLASSES_INLINE_H_
9 
10 #include "native_client/src/trusted/validator_arm/inst_classes.h"
11 #include "native_client/src/trusted/validator_arm/validator.h"
12 
13 
14 // The following static inline methods are defined here, so that
15 // methods in class ClassDecoder (direct and derived) can (inline)
16 // call them. By keeping all code for each type of violation in a
17 // single file, it is easier to maintain (rather than having some code
18 // in the python code generator files).
19 
20 namespace nacl_arm_dec {
21 
22 // Reports unsafe loads/stores of a base address by the given instruction
23 // pair. If the instruction pair defines a safe load/store of a base address,
24 // it updates the Critical set with the address of the second instruction,
25 // so that later code can check that the instruction pair is atomic.
26 //
27 // See comment associated with Violation::LOADSTORE_VIOLATION for details
28 // on when a base address is considered safe.
get_loadstore_violations(const nacl_arm_val::DecodedInstruction & first,const nacl_arm_val::DecodedInstruction & second,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::AddressSet * critical)29 static inline ViolationSet get_loadstore_violations(
30     const nacl_arm_val::DecodedInstruction& first,
31     const nacl_arm_val::DecodedInstruction& second,
32     const nacl_arm_val::SfiValidator& sfi,
33     nacl_arm_val::AddressSet* critical) {
34   Register base = second.base_address_register();
35 
36   if (base.Equals(Register::None())  // not a load/store
37       || sfi.is_data_address_register(base)) {
38     return kNoViolations;
39   }
40 
41   // PC + immediate addressing is always safe.
42   if (second.is_literal_load()) return kNoViolations;
43 
44   // The following checks if this represents a thread address pointer access,
45   // which means the instruction must be one of the following forms:
46   //    ldr Rn, [r9]     ; load use thread pointer.
47   //    ldr Rn, [r9, #4] ; load IRT thread pointer.
48   if (second.is_load_thread_address_pointer()) return kNoViolations;
49 
50   if (first.defines(base)
51       && first.clears_bits(sfi.data_address_mask())
52       && first.always_dominates(second)) {
53     return sfi.validate_instruction_pair_allowed(
54         first, second, critical, LOADSTORE_CROSSES_BUNDLE_VIOLATION);
55   }
56 
57   if (sfi.conditional_memory_access_allowed_for_sfi() &&
58       first.sets_Z_if_bits_clear(base, sfi.data_address_mask()) &&
59       second.is_eq_conditional_on(first)
60       ) {
61     return sfi.validate_instruction_pair_allowed(
62         first, second, critical,
63         LOADSTORE_CROSSES_BUNDLE_VIOLATION);
64   }
65 
66   return ViolationBit(LOADSTORE_VIOLATION);
67 }
68 
69 // The following generates the diagnostics that corresponds to the violations
70 // collected by method get_loadstore_violations method (above).
generate_loadstore_diagnostics(ViolationSet violations,const nacl_arm_val::DecodedInstruction & first,const nacl_arm_val::DecodedInstruction & second,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::ProblemSink * out)71 static inline void generate_loadstore_diagnostics(
72     ViolationSet violations,
73     const nacl_arm_val::DecodedInstruction& first,
74     const nacl_arm_val::DecodedInstruction& second,
75     const nacl_arm_val::SfiValidator& sfi,
76     nacl_arm_val::ProblemSink* out) {
77   if (ContainsViolation(violations, LOADSTORE_CROSSES_BUNDLE_VIOLATION)) {
78     out->ReportProblemDiagnostic(
79         LOADSTORE_CROSSES_BUNDLE_VIOLATION,
80         second.addr(),
81         "Load/store base %s is not properly masked, "
82         "because instruction pair [%08" NACL_PRIx32 ", %08" NACL_PRIx32
83         "] crosses bundle boundary.",
84         second.base_address_register().ToString(), first.addr(), second.addr());
85   }
86   if (ContainsViolation(violations, LOADSTORE_VIOLATION)) {
87     Register base = second.base_address_register();
88 
89     if (first.defines(base)) {
90       if (first.clears_bits(sfi.data_address_mask())) {
91         if (first.defines(Register::Conditions())) {
92           out->ReportProblemDiagnostic(
93               LOADSTORE_VIOLATION,
94               second.addr(),
95               "Load/store base %s is not properly masked, "
96               "because instruction %08" NACL_PRIx32
97               " sets APSR condition flags.",
98               base.ToString(),
99               first.addr());
100         } else {
101           out->ReportProblemDiagnostic(
102               LOADSTORE_VIOLATION,
103               second.addr(),
104               "Load/store base %s is not properly masked, "
105               "because the conditions (%s, %s) on "
106               "[%08" NACL_PRIx32 ", %08" NACL_PRIx32
107               "] don't guarantee atomicity",
108               base.ToString(),
109               Instruction::ToString(first.inst().GetCondition()),
110               Instruction::ToString(second.inst().GetCondition()),
111               first.addr(),
112               second.addr());
113         }
114       } else {
115         out->ReportProblemDiagnostic(
116             LOADSTORE_VIOLATION,
117             second.addr(),
118             "Load/store base %s is not properly masked.",
119             base.ToString());
120       }
121     } else if (first.sets_Z_if_bits_clear(base, sfi.data_address_mask())) {
122       if (sfi.conditional_memory_access_allowed_for_sfi()) {
123         out->ReportProblemDiagnostic(
124             LOADSTORE_VIOLATION,
125             second.addr(),
126             "Load/store base %s is not properly masked, because "
127             "%08" NACL_PRIx32 " is not conditional on EQ",
128             base.ToString(),
129             second.addr());
130       } else {
131         out->ReportProblemDiagnostic(
132             LOADSTORE_VIOLATION,
133             second.addr(),
134             "Load/store base %s is not properly masked, "
135             "because [%08" NACL_PRIx32 ", %08" NACL_PRIx32 "] instruction "
136             "pair is disallowed on this CPU",
137             base.ToString(),
138             first.addr(),
139             second.addr());
140       }
141     } else if (base.Equals(Register::Pc())) {
142       const char* pc = Register::Pc().ToString();
143       out->ReportProblemDiagnostic(
144           LOADSTORE_VIOLATION,
145           second.addr(),
146           "Native Client only allows updates on %s of "
147           "the form '%s + immediate'.",
148           pc,
149           pc);
150     } else {
151       out->ReportProblemDiagnostic(
152           LOADSTORE_VIOLATION,
153           second.addr(),
154           "Load/store base %s is not properly masked.",
155           base.ToString());
156     }
157   }
158 }
159 
160 // Reports any unsafe indirect branches. If the instruction pair defines
161 // a safe indirect branch, it updates the Critical set with the address
162 // of the branch, so that later code can check that the instruction pair
163 // is atomic.
164 //
165 // A destination address is safe if it has specific bits masked off by its
166 // immediate predecessor.
get_branch_mask_violations(const nacl_arm_val::DecodedInstruction & first,const nacl_arm_val::DecodedInstruction & second,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::AddressSet * critical)167 static inline ViolationSet get_branch_mask_violations(
168     const nacl_arm_val::DecodedInstruction& first,
169     const nacl_arm_val::DecodedInstruction& second,
170     const nacl_arm_val::SfiValidator& sfi,
171     nacl_arm_val::AddressSet* critical) {
172   Register target(second.branch_target_register());
173   if (target.Equals(Register::None())) return kNoViolations;
174 
175   if (first.defines(target) &&
176       first.clears_bits(sfi.code_address_mask()) &&
177       first.always_dominates(second)) {
178     return sfi.validate_instruction_pair_allowed(
179         first, second, critical, BRANCH_MASK_CROSSES_BUNDLE_VIOLATION);
180   }
181 
182   return ViolationBit(BRANCH_MASK_VIOLATION);
183 }
184 
185 // The following generates the diagnostics that corresponds to the violations
186 // collected by method get_branch_mask_violations (above).
generate_branch_mask_diagnostics(ViolationSet violations,const nacl_arm_val::DecodedInstruction & first,const nacl_arm_val::DecodedInstruction & second,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::ProblemSink * out)187 static inline void generate_branch_mask_diagnostics(
188     ViolationSet violations,
189     const nacl_arm_val::DecodedInstruction& first,
190     const nacl_arm_val::DecodedInstruction& second,
191     const nacl_arm_val::SfiValidator& sfi,
192     nacl_arm_val::ProblemSink* out) {
193   if (ContainsViolation(violations, BRANCH_MASK_CROSSES_BUNDLE_VIOLATION)) {
194     out->ReportProblemDiagnostic(
195         BRANCH_MASK_CROSSES_BUNDLE_VIOLATION,
196         second.addr(),
197         "Destination branch on %s is not properly masked, "
198         "because instruction pair [%08" NACL_PRIx32 ", %08" NACL_PRIx32 "] "
199         "crosses bundle boundary",
200         second.branch_target_register().ToString(),
201         first.addr(),
202         second.addr());
203   }
204   if (ContainsViolation(violations, BRANCH_MASK_VIOLATION)) {
205     Register target(second.branch_target_register());
206     if (first.defines(target)) {
207       if (first.clears_bits(sfi.code_address_mask())) {
208         if (first.defines(Register::Conditions())) {
209           out->ReportProblemDiagnostic(
210               BRANCH_MASK_VIOLATION,
211               second.addr(),
212               "Destination branch on %s is not properly masked, "
213               "because instruction %08" NACL_PRIx32
214               " sets APSR condition flags",
215               target.ToString(),
216               first.addr());
217         } else {
218           out->ReportProblemDiagnostic(
219               BRANCH_MASK_VIOLATION,
220               second.addr(),
221               "Destination branch on %s is not properly masked, "
222               "because the conditions (%s, %s) on "
223               "[%08" NACL_PRIx32 ", %08" NACL_PRIx32
224               "] don't guarantee atomicity",
225               target.ToString(),
226               Instruction::ToString(first.inst().GetCondition()),
227               Instruction::ToString(second.inst().GetCondition()),
228               first.addr(),
229               second.addr());
230         }
231         return;
232       }
233     }
234     out->ReportProblemDiagnostic(
235         BRANCH_MASK_VIOLATION,
236         second.addr(),
237         "Destination branch on %s is not properly masked.",
238         target.ToString());
239   }
240 }
241 
242 // Reports any instructions that update a data-address register without
243 // a valid mask. If the instruction pair safely updates the data-address
244 // register, it updates the Critical set with the address of the the
245 // second instruction, so that later code can check that the instruction
246 // pair is atomic.
get_data_register_update_violations(const nacl_arm_val::DecodedInstruction & first,const nacl_arm_val::DecodedInstruction & second,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::AddressSet * critical)247 static inline ViolationSet get_data_register_update_violations(
248     const nacl_arm_val::DecodedInstruction& first,
249     const nacl_arm_val::DecodedInstruction& second,
250     const nacl_arm_val::SfiValidator& sfi,
251     nacl_arm_val::AddressSet* critical) {
252 
253   RegisterList data_registers(sfi.data_address_registers());
254 
255   // Don't need to check if no data address register updates.
256   if (!first.defines_any(data_registers)) return kNoViolations;
257 
258   // A single safe data register update doesn't affect control flow.
259   if (first.clears_bits(sfi.data_address_mask())) return kNoViolations;
260 
261   // Small immediate base register writeback to data address registers
262   // (e.g. SP) doesn't need to be an instruction pair.
263   if (first.base_address_register_writeback_small_immediate() &&
264       sfi.data_address_registers().Contains(first.base_address_register())) {
265     return kNoViolations;
266   }
267 
268   // Data address register modification followed by bit clear.
269   RegisterList data_addr_defs(first.defs().Intersect(data_registers));
270   if (second.defines_all(data_addr_defs)
271       && second.clears_bits(sfi.data_address_mask())
272       && second.always_postdominates(first)) {
273     return sfi.validate_instruction_pair_allowed(
274         first, second, critical,
275         DATA_REGISTER_UPDATE_CROSSES_BUNDLE_VIOLATION);
276   }
277 
278   return ViolationBit(DATA_REGISTER_UPDATE_VIOLATION);
279 }
280 
281 // The following generates the diagnostics that corresponds to the violations
282 // collected by method get_data_register_violations (above).
generate_data_register_update_diagnostics(ViolationSet violations,const nacl_arm_val::DecodedInstruction & first,const nacl_arm_val::DecodedInstruction & second,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::ProblemSink * out)283 static inline void generate_data_register_update_diagnostics(
284     ViolationSet violations,
285     const nacl_arm_val::DecodedInstruction& first,
286     const nacl_arm_val::DecodedInstruction& second,
287     const nacl_arm_val::SfiValidator& sfi,
288     nacl_arm_val::ProblemSink* out) {
289   if (ContainsViolation(violations,
290                         DATA_REGISTER_UPDATE_CROSSES_BUNDLE_VIOLATION)) {
291     RegisterList data_registers(sfi.data_address_registers());
292     RegisterList data_addr_defs(first.defs().Intersect(data_registers));
293     for (Register::Number r = 0; r < Register::kNumberGPRs; ++r) {
294       Register reg(r);
295       if (data_addr_defs.Contains(reg)) {
296         out->ReportProblemDiagnostic(
297             DATA_REGISTER_UPDATE_CROSSES_BUNDLE_VIOLATION,
298             first.addr(),
299             "Updating %s without masking in following instruction, "
300             "because instruction pair [%08" NACL_PRIx32 ", %08" NACL_PRIx32
301             "] crosses bundle boundary.",
302             reg.ToString(),
303             first.addr(),
304             second.addr());
305       }
306     }
307   }
308   if (ContainsViolation(violations, DATA_REGISTER_UPDATE_VIOLATION)) {
309     RegisterList data_registers(sfi.data_address_registers());
310     RegisterList data_addr_defs(first.defs().Intersect(data_registers));
311     if (second.defines_all(data_addr_defs) &&
312         second.clears_bits(sfi.data_address_mask())) {
313       for (Register::Number r = 0; r < Register::kNumberGPRs; ++r) {
314         Register reg(r);
315         if (data_addr_defs.Contains(reg)) {
316           if (first.defines(Register::Conditions())) {
317             out->ReportProblemDiagnostic(
318                 DATA_REGISTER_UPDATE_VIOLATION,
319                 first.addr(),
320                 "Updating %s without masking in following instruction, "
321                 "because instruction %08" NACL_PRIx32 " sets APSR "
322                 "condition flags.",
323                 reg.ToString(),
324                 first.addr());
325           } else {
326             out->ReportProblemDiagnostic(
327                 DATA_REGISTER_UPDATE_VIOLATION,
328                 first.addr(),
329                 "Updating %s without masking in following instruction, "
330                 "because the conditions (%s, %s) on "
331                 "[%08" NACL_PRIx32 ", %08" NACL_PRIx32 "] don't "
332                 "guarantee atomicity",
333                 reg.ToString(),
334                 Instruction::ToString(first.inst().GetCondition()),
335                 Instruction::ToString(second.inst().GetCondition()),
336                 first.addr(),
337                 second.addr());
338           }
339         }
340       }
341     } else {
342       for (Register::Number r = 0; r < Register::kNumberGPRs; ++r) {
343         Register reg(r);
344         if (data_addr_defs.Contains(reg)) {
345           out->ReportProblemDiagnostic(
346               DATA_REGISTER_UPDATE_VIOLATION,
347               first.addr(),
348               "Updating %s without masking in following instruction.",
349               reg.ToString());
350         }
351       }
352     }
353   }
354 }
355 
356 // Checks if the call instruction isn't the last instruction in the
357 // bundle.
358 //
359 // This is not a security check per se. Rather, it is a check to prevent
360 // imbalancing the CPU's return stack, thereby decreasing performance.
get_call_position_violations(const nacl_arm_val::DecodedInstruction & inst,const nacl_arm_val::SfiValidator & sfi)361 static inline ViolationSet get_call_position_violations(
362     const nacl_arm_val::DecodedInstruction& inst,
363     const nacl_arm_val::SfiValidator& sfi) {
364   // Identify linking branches through their definitions:
365   if (inst.defines_all(RegisterList(Register::Pc()).Add(Register::Lr()))) {
366     uint32_t last_slot = sfi.bundle_for_address(inst.addr()).end_addr() - 4;
367     if (inst.addr() != last_slot) {
368       return ViolationBit(CALL_POSITION_VIOLATION);
369     }
370   }
371   return kNoViolations;
372 }
373 
374 // The following generates the diagnostics that corresponds to the violations
375 // collected by method get_call_position_violations (above).
generate_call_position_diagnostics(ViolationSet violations,const nacl_arm_val::DecodedInstruction & inst,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::ProblemSink * out)376 static inline void generate_call_position_diagnostics(
377       ViolationSet violations,
378       const nacl_arm_val::DecodedInstruction& inst,
379       const nacl_arm_val::SfiValidator& sfi,
380       nacl_arm_val::ProblemSink* out) {
381   UNREFERENCED_PARAMETER(sfi);
382   if (ContainsViolation(violations, CALL_POSITION_VIOLATION)) {
383     out->ReportProblemDiagnostic(
384         CALL_POSITION_VIOLATION,
385         inst.addr(),
386         "Call not last instruction in instruction bundle.");
387   }
388 }
389 
390 // Checks that the instruction doesn't set a read-only register
get_read_only_violations(const nacl_arm_val::DecodedInstruction & inst,const nacl_arm_val::SfiValidator & sfi)391 static inline ViolationSet get_read_only_violations(
392     const nacl_arm_val::DecodedInstruction& inst,
393     const nacl_arm_val::SfiValidator& sfi) {
394   return inst.defines_any(sfi.read_only_registers())
395       ? ViolationBit(READ_ONLY_VIOLATION)
396       : kNoViolations;
397 }
398 
399 // The following generates the diagnostics that corresponds to the violations
400 // collected by method get_read_only_violations (above).
generate_read_only_diagnostics(ViolationSet violations,const nacl_arm_val::DecodedInstruction & inst,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::ProblemSink * out)401 static inline void generate_read_only_diagnostics(
402     ViolationSet violations,
403     const nacl_arm_val::DecodedInstruction& inst,
404     const nacl_arm_val::SfiValidator& sfi,
405     nacl_arm_val::ProblemSink* out) {
406   UNREFERENCED_PARAMETER(sfi);
407   if (ContainsViolation(violations, READ_ONLY_VIOLATION)) {
408     RegisterList& read_only = inst.defs().Intersect(sfi.read_only_registers());
409     for (Register::Number r = 0; r < Register::kNumberGPRs; ++r) {
410       Register reg(r);
411       if (read_only.Contains(reg)) {
412         out->ReportProblemDiagnostic(
413             READ_ONLY_VIOLATION,
414             inst.addr(),
415             "Updates read-only register: %s.",
416             reg.ToString());
417       }
418     }
419   }
420 }
421 
422 // Checks that instruction doesn't read the thread local pointer.
get_read_thread_local_pointer_violations(const nacl_arm_val::DecodedInstruction & inst)423 static inline ViolationSet get_read_thread_local_pointer_violations(
424     const nacl_arm_val::DecodedInstruction& inst) {
425   return (inst.uses(Register::Tp()) && !inst.is_load_thread_address_pointer())
426       ? ViolationBit(READ_THREAD_LOCAL_POINTER_VIOLATION)
427       : kNoViolations;
428 }
429 
430 // The following generates the diagnostics that corresponds to the violations
431 // collected by method get_read_thread_local_pointer_violations (above).
generate_read_thread_local_pointer_diagnostics(ViolationSet violations,const nacl_arm_val::DecodedInstruction & inst,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::ProblemSink * out)432 static inline void generate_read_thread_local_pointer_diagnostics(
433     ViolationSet violations,
434     const nacl_arm_val::DecodedInstruction& inst,
435     const nacl_arm_val::SfiValidator& sfi,
436     nacl_arm_val::ProblemSink* out) {
437   UNREFERENCED_PARAMETER(sfi);
438   if (ContainsViolation(violations, READ_THREAD_LOCAL_POINTER_VIOLATION)) {
439     out->ReportProblemDiagnostic(
440         READ_THREAD_LOCAL_POINTER_VIOLATION,
441         inst.addr(),
442         "Use of thread pointer %s not legal outside of load thread pointer "
443         "instruction(s)",
444         Register::Tp().ToString());
445   }
446 }
447 
448 // Checks that writes to the program counter are only from branches. Updates
449 // branches to contain the target of safe branches.
get_pc_writes_violations(const nacl_arm_val::DecodedInstruction & inst,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::AddressSet * branches)450 static inline ViolationSet get_pc_writes_violations(
451     const nacl_arm_val::DecodedInstruction& inst,
452     const nacl_arm_val::SfiValidator& sfi,
453     nacl_arm_val::AddressSet* branches) {
454 
455   // Safe if a relative branch.
456   if (inst.is_relative_branch()) {
457     branches->add(inst.addr());
458     return kNoViolations;
459   }
460 
461   // If branch to register, it is checked by get_branch_mask_violation.
462   if (!inst.branch_target_register().Equals(Register::None()))
463     return kNoViolations;
464 
465   if (!inst.defines(nacl_arm_dec::Register::Pc())) return kNoViolations;
466 
467   if (inst.clears_bits(sfi.code_address_mask())) return kNoViolations;
468 
469   return ViolationBit(PC_WRITES_VIOLATION);
470 }
471 
472 
473 // The following generates the diagnostics that corresponds to the violations
474 // collected by method get_pc_writes_violations (above).
generate_pc_writes_diagnostics(ViolationSet violations,const nacl_arm_val::DecodedInstruction & inst,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::ProblemSink * out)475 static inline void generate_pc_writes_diagnostics(
476     ViolationSet violations,
477     const nacl_arm_val::DecodedInstruction& inst,
478     const nacl_arm_val::SfiValidator& sfi,
479     nacl_arm_val::ProblemSink* out) {
480   UNREFERENCED_PARAMETER(sfi);
481   if (ContainsViolation(violations, PC_WRITES_VIOLATION)) {
482     out->ReportProblemDiagnostic(
483         PC_WRITES_VIOLATION,
484         inst.addr(),
485         "Destination branch on %s is not properly masked.",
486         Register::Pc().ToString());
487   }
488 }
489 
490 // If a pool head, mark address appropriately and then skip over
491 // the constant bundle.
validate_literal_pool_head(const nacl_arm_val::DecodedInstruction & inst,const nacl_arm_val::SfiValidator & sfi,nacl_arm_val::AddressSet * critical,uint32_t * next_inst_addr)492 static inline void validate_literal_pool_head(
493     const nacl_arm_val::DecodedInstruction& inst,
494     const nacl_arm_val::SfiValidator& sfi,
495     nacl_arm_val::AddressSet* critical,
496     uint32_t* next_inst_addr) {
497   if (inst.is_literal_pool_head() && sfi.is_bundle_head(inst.addr())) {
498     // Add each instruction in this bundle to the critical set.
499     // Skip over the literal pool head (which is also the bundle head):
500     // indirect branches to it are legal, direct branches should therefore
501     // also be legal.
502     uint32_t last_data_addr = sfi.bundle_for_address(inst.addr()).end_addr();
503     for (; *next_inst_addr < last_data_addr; *next_inst_addr += 4) {
504       critical->add(*next_inst_addr);
505     }
506   }
507 }
508 
509 }  // namespace nacl_arm_dec
510 
511 #endif  // NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_INST_CLASSES_INLINE_H_
512