1 /*
2 * Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "aot/aotLoader.hpp"
27 #include "classfile/classLoaderDataGraph.hpp"
28 #include "classfile/classFileStream.hpp"
29 #include "classfile/javaClasses.inline.hpp"
30 #include "classfile/metadataOnStackMark.hpp"
31 #include "classfile/symbolTable.hpp"
32 #include "classfile/systemDictionary.hpp"
33 #include "classfile/verifier.hpp"
34 #include "code/codeCache.hpp"
35 #include "compiler/compileBroker.hpp"
36 #include "interpreter/oopMapCache.hpp"
37 #include "interpreter/rewriter.hpp"
38 #include "jfr/jfrEvents.hpp"
39 #include "logging/logStream.hpp"
40 #include "memory/metadataFactory.hpp"
41 #include "memory/metaspaceShared.hpp"
42 #include "memory/resourceArea.hpp"
43 #include "memory/universe.hpp"
44 #include "oops/annotations.hpp"
45 #include "oops/constantPool.hpp"
46 #include "oops/fieldStreams.inline.hpp"
47 #include "oops/klassVtable.hpp"
48 #include "oops/oop.inline.hpp"
49 #include "oops/recordComponent.hpp"
50 #include "prims/jvmtiImpl.hpp"
51 #include "prims/jvmtiRedefineClasses.hpp"
52 #include "prims/jvmtiThreadState.inline.hpp"
53 #include "prims/resolvedMethodTable.hpp"
54 #include "prims/methodComparator.hpp"
55 #include "runtime/atomic.hpp"
56 #include "runtime/deoptimization.hpp"
57 #include "runtime/handles.inline.hpp"
58 #include "runtime/jniHandles.inline.hpp"
59 #include "runtime/relocator.hpp"
60 #include "runtime/safepointVerifiers.hpp"
61 #include "utilities/bitMap.inline.hpp"
62 #include "utilities/events.hpp"
63
64 Array<Method*>* VM_RedefineClasses::_old_methods = NULL;
65 Array<Method*>* VM_RedefineClasses::_new_methods = NULL;
66 Method** VM_RedefineClasses::_matching_old_methods = NULL;
67 Method** VM_RedefineClasses::_matching_new_methods = NULL;
68 Method** VM_RedefineClasses::_deleted_methods = NULL;
69 Method** VM_RedefineClasses::_added_methods = NULL;
70 int VM_RedefineClasses::_matching_methods_length = 0;
71 int VM_RedefineClasses::_deleted_methods_length = 0;
72 int VM_RedefineClasses::_added_methods_length = 0;
73
74 // This flag is global as the constructor does not reset it:
75 bool VM_RedefineClasses::_has_redefined_Object = false;
76 u8 VM_RedefineClasses::_id_counter = 0;
77
VM_RedefineClasses(jint class_count,const jvmtiClassDefinition * class_defs,JvmtiClassLoadKind class_load_kind)78 VM_RedefineClasses::VM_RedefineClasses(jint class_count,
79 const jvmtiClassDefinition *class_defs,
80 JvmtiClassLoadKind class_load_kind) {
81 _class_count = class_count;
82 _class_defs = class_defs;
83 _class_load_kind = class_load_kind;
84 _any_class_has_resolved_methods = false;
85 _res = JVMTI_ERROR_NONE;
86 _the_class = NULL;
87 _id = next_id();
88 }
89
get_ik(jclass def)90 static inline InstanceKlass* get_ik(jclass def) {
91 oop mirror = JNIHandles::resolve_non_null(def);
92 return InstanceKlass::cast(java_lang_Class::as_Klass(mirror));
93 }
94
95 // If any of the classes are being redefined, wait
96 // Parallel constant pool merging leads to indeterminate constant pools.
lock_classes()97 void VM_RedefineClasses::lock_classes() {
98 MonitorLocker ml(RedefineClasses_lock);
99 bool has_redefined;
100 do {
101 has_redefined = false;
102 // Go through classes each time until none are being redefined.
103 for (int i = 0; i < _class_count; i++) {
104 if (get_ik(_class_defs[i].klass)->is_being_redefined()) {
105 ml.wait();
106 has_redefined = true;
107 break; // for loop
108 }
109 }
110 } while (has_redefined);
111 for (int i = 0; i < _class_count; i++) {
112 get_ik(_class_defs[i].klass)->set_is_being_redefined(true);
113 }
114 ml.notify_all();
115 }
116
unlock_classes()117 void VM_RedefineClasses::unlock_classes() {
118 MonitorLocker ml(RedefineClasses_lock);
119 for (int i = 0; i < _class_count; i++) {
120 assert(get_ik(_class_defs[i].klass)->is_being_redefined(),
121 "should be being redefined to get here");
122 get_ik(_class_defs[i].klass)->set_is_being_redefined(false);
123 }
124 ml.notify_all();
125 }
126
doit_prologue()127 bool VM_RedefineClasses::doit_prologue() {
128 if (_class_count == 0) {
129 _res = JVMTI_ERROR_NONE;
130 return false;
131 }
132 if (_class_defs == NULL) {
133 _res = JVMTI_ERROR_NULL_POINTER;
134 return false;
135 }
136
137 for (int i = 0; i < _class_count; i++) {
138 if (_class_defs[i].klass == NULL) {
139 _res = JVMTI_ERROR_INVALID_CLASS;
140 return false;
141 }
142 if (_class_defs[i].class_byte_count == 0) {
143 _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
144 return false;
145 }
146 if (_class_defs[i].class_bytes == NULL) {
147 _res = JVMTI_ERROR_NULL_POINTER;
148 return false;
149 }
150
151 oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
152 // classes for primitives, arrays, hidden and vm unsafe anonymous classes
153 // cannot be redefined.
154 if (!is_modifiable_class(mirror)) {
155 _res = JVMTI_ERROR_UNMODIFIABLE_CLASS;
156 return false;
157 }
158 }
159
160 // Start timer after all the sanity checks; not quite accurate, but
161 // better than adding a bunch of stop() calls.
162 if (log_is_enabled(Info, redefine, class, timer)) {
163 _timer_vm_op_prologue.start();
164 }
165
166 lock_classes();
167 // We first load new class versions in the prologue, because somewhere down the
168 // call chain it is required that the current thread is a Java thread.
169 _res = load_new_class_versions(Thread::current());
170 if (_res != JVMTI_ERROR_NONE) {
171 // free any successfully created classes, since none are redefined
172 for (int i = 0; i < _class_count; i++) {
173 if (_scratch_classes[i] != NULL) {
174 ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
175 // Free the memory for this class at class unloading time. Not before
176 // because CMS might think this is still live.
177 InstanceKlass* ik = get_ik(_class_defs[i].klass);
178 if (ik->get_cached_class_file() == _scratch_classes[i]->get_cached_class_file()) {
179 // Don't double-free cached_class_file copied from the original class if error.
180 _scratch_classes[i]->set_cached_class_file(NULL);
181 }
182 cld->add_to_deallocate_list(InstanceKlass::cast(_scratch_classes[i]));
183 }
184 }
185 // Free os::malloc allocated memory in load_new_class_version.
186 os::free(_scratch_classes);
187 _timer_vm_op_prologue.stop();
188 unlock_classes();
189 return false;
190 }
191
192 _timer_vm_op_prologue.stop();
193 return true;
194 }
195
doit()196 void VM_RedefineClasses::doit() {
197 Thread *thread = Thread::current();
198
199 #if INCLUDE_CDS
200 if (UseSharedSpaces) {
201 // Sharing is enabled so we remap the shared readonly space to
202 // shared readwrite, private just in case we need to redefine
203 // a shared class. We do the remap during the doit() phase of
204 // the safepoint to be safer.
205 if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
206 log_info(redefine, class, load)("failed to remap shared readonly space to readwrite, private");
207 _res = JVMTI_ERROR_INTERNAL;
208 return;
209 }
210 }
211 #endif
212
213 // Mark methods seen on stack and everywhere else so old methods are not
214 // cleaned up if they're on the stack.
215 MetadataOnStackMark md_on_stack(/*walk_all_metadata*/true, /*redefinition_walk*/true);
216 HandleMark hm(thread); // make sure any handles created are deleted
217 // before the stack walk again.
218
219 for (int i = 0; i < _class_count; i++) {
220 redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
221 }
222
223 // Flush all compiled code that depends on the classes redefined.
224 flush_dependent_code();
225
226 // Adjust constantpool caches and vtables for all classes
227 // that reference methods of the evolved classes.
228 // Have to do this after all classes are redefined and all methods that
229 // are redefined are marked as old.
230 AdjustAndCleanMetadata adjust_and_clean_metadata(thread);
231 ClassLoaderDataGraph::classes_do(&adjust_and_clean_metadata);
232
233 // JSR-292 support
234 if (_any_class_has_resolved_methods) {
235 bool trace_name_printed = false;
236 ResolvedMethodTable::adjust_method_entries(&trace_name_printed);
237 }
238
239 // Increment flag indicating that some invariants are no longer true.
240 // See jvmtiExport.hpp for detailed explanation.
241 JvmtiExport::increment_redefinition_count();
242
243 // check_class() is optionally called for product bits, but is
244 // always called for non-product bits.
245 #ifdef PRODUCT
246 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
247 #endif
248 log_trace(redefine, class, obsolete, metadata)("calling check_class");
249 CheckClass check_class(thread);
250 ClassLoaderDataGraph::classes_do(&check_class);
251 #ifdef PRODUCT
252 }
253 #endif
254
255 // Clean up any metadata now unreferenced while MetadataOnStackMark is set.
256 ClassLoaderDataGraph::clean_deallocate_lists(false);
257 }
258
doit_epilogue()259 void VM_RedefineClasses::doit_epilogue() {
260 unlock_classes();
261
262 // Free os::malloc allocated memory.
263 os::free(_scratch_classes);
264
265 // Reset the_class to null for error printing.
266 _the_class = NULL;
267
268 if (log_is_enabled(Info, redefine, class, timer)) {
269 // Used to have separate timers for "doit" and "all", but the timer
270 // overhead skewed the measurements.
271 julong doit_time = _timer_rsc_phase1.milliseconds() +
272 _timer_rsc_phase2.milliseconds();
273 julong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
274
275 log_info(redefine, class, timer)
276 ("vm_op: all=" JULONG_FORMAT " prologue=" JULONG_FORMAT " doit=" JULONG_FORMAT,
277 all_time, (julong)_timer_vm_op_prologue.milliseconds(), doit_time);
278 log_info(redefine, class, timer)
279 ("redefine_single_class: phase1=" JULONG_FORMAT " phase2=" JULONG_FORMAT,
280 (julong)_timer_rsc_phase1.milliseconds(), (julong)_timer_rsc_phase2.milliseconds());
281 }
282 }
283
is_modifiable_class(oop klass_mirror)284 bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
285 // classes for primitives cannot be redefined
286 if (java_lang_Class::is_primitive(klass_mirror)) {
287 return false;
288 }
289 Klass* k = java_lang_Class::as_Klass(klass_mirror);
290 // classes for arrays cannot be redefined
291 if (k == NULL || !k->is_instance_klass()) {
292 return false;
293 }
294
295 // Cannot redefine or retransform a hidden or an unsafe anonymous class.
296 if (InstanceKlass::cast(k)->is_hidden() ||
297 InstanceKlass::cast(k)->is_unsafe_anonymous()) {
298 return false;
299 }
300 return true;
301 }
302
303 // Append the current entry at scratch_i in scratch_cp to *merge_cp_p
304 // where the end of *merge_cp_p is specified by *merge_cp_length_p. For
305 // direct CP entries, there is just the current entry to append. For
306 // indirect and double-indirect CP entries, there are zero or more
307 // referenced CP entries along with the current entry to append.
308 // Indirect and double-indirect CP entries are handled by recursive
309 // calls to append_entry() as needed. The referenced CP entries are
310 // always appended to *merge_cp_p before the referee CP entry. These
311 // referenced CP entries may already exist in *merge_cp_p in which case
312 // there is nothing extra to append and only the current entry is
313 // appended.
append_entry(const constantPoolHandle & scratch_cp,int scratch_i,constantPoolHandle * merge_cp_p,int * merge_cp_length_p,TRAPS)314 void VM_RedefineClasses::append_entry(const constantPoolHandle& scratch_cp,
315 int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
316 TRAPS) {
317
318 // append is different depending on entry tag type
319 switch (scratch_cp->tag_at(scratch_i).value()) {
320
321 // The old verifier is implemented outside the VM. It loads classes,
322 // but does not resolve constant pool entries directly so we never
323 // see Class entries here with the old verifier. Similarly the old
324 // verifier does not like Class entries in the input constant pool.
325 // The split-verifier is implemented in the VM so it can optionally
326 // and directly resolve constant pool entries to load classes. The
327 // split-verifier can accept either Class entries or UnresolvedClass
328 // entries in the input constant pool. We revert the appended copy
329 // back to UnresolvedClass so that either verifier will be happy
330 // with the constant pool entry.
331 //
332 // this is an indirect CP entry so it needs special handling
333 case JVM_CONSTANT_Class:
334 case JVM_CONSTANT_UnresolvedClass:
335 {
336 int name_i = scratch_cp->klass_name_index_at(scratch_i);
337 int new_name_i = find_or_append_indirect_entry(scratch_cp, name_i, merge_cp_p,
338 merge_cp_length_p, THREAD);
339
340 if (new_name_i != name_i) {
341 log_trace(redefine, class, constantpool)
342 ("Class entry@%d name_index change: %d to %d",
343 *merge_cp_length_p, name_i, new_name_i);
344 }
345
346 (*merge_cp_p)->temp_unresolved_klass_at_put(*merge_cp_length_p, new_name_i);
347 if (scratch_i != *merge_cp_length_p) {
348 // The new entry in *merge_cp_p is at a different index than
349 // the new entry in scratch_cp so we need to map the index values.
350 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
351 }
352 (*merge_cp_length_p)++;
353 } break;
354
355 // these are direct CP entries so they can be directly appended,
356 // but double and long take two constant pool entries
357 case JVM_CONSTANT_Double: // fall through
358 case JVM_CONSTANT_Long:
359 {
360 ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
361 THREAD);
362
363 if (scratch_i != *merge_cp_length_p) {
364 // The new entry in *merge_cp_p is at a different index than
365 // the new entry in scratch_cp so we need to map the index values.
366 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
367 }
368 (*merge_cp_length_p) += 2;
369 } break;
370
371 // these are direct CP entries so they can be directly appended
372 case JVM_CONSTANT_Float: // fall through
373 case JVM_CONSTANT_Integer: // fall through
374 case JVM_CONSTANT_Utf8: // fall through
375
376 // This was an indirect CP entry, but it has been changed into
377 // Symbol*s so this entry can be directly appended.
378 case JVM_CONSTANT_String: // fall through
379 {
380 ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
381 THREAD);
382
383 if (scratch_i != *merge_cp_length_p) {
384 // The new entry in *merge_cp_p is at a different index than
385 // the new entry in scratch_cp so we need to map the index values.
386 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
387 }
388 (*merge_cp_length_p)++;
389 } break;
390
391 // this is an indirect CP entry so it needs special handling
392 case JVM_CONSTANT_NameAndType:
393 {
394 int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
395 int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
396 merge_cp_length_p, THREAD);
397
398 int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
399 int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
400 merge_cp_p, merge_cp_length_p,
401 THREAD);
402
403 // If the referenced entries already exist in *merge_cp_p, then
404 // both new_name_ref_i and new_signature_ref_i will both be 0.
405 // In that case, all we are appending is the current entry.
406 if (new_name_ref_i != name_ref_i) {
407 log_trace(redefine, class, constantpool)
408 ("NameAndType entry@%d name_ref_index change: %d to %d",
409 *merge_cp_length_p, name_ref_i, new_name_ref_i);
410 }
411 if (new_signature_ref_i != signature_ref_i) {
412 log_trace(redefine, class, constantpool)
413 ("NameAndType entry@%d signature_ref_index change: %d to %d",
414 *merge_cp_length_p, signature_ref_i, new_signature_ref_i);
415 }
416
417 (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
418 new_name_ref_i, new_signature_ref_i);
419 if (scratch_i != *merge_cp_length_p) {
420 // The new entry in *merge_cp_p is at a different index than
421 // the new entry in scratch_cp so we need to map the index values.
422 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
423 }
424 (*merge_cp_length_p)++;
425 } break;
426
427 // this is a double-indirect CP entry so it needs special handling
428 case JVM_CONSTANT_Fieldref: // fall through
429 case JVM_CONSTANT_InterfaceMethodref: // fall through
430 case JVM_CONSTANT_Methodref:
431 {
432 int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
433 int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
434 merge_cp_p, merge_cp_length_p, THREAD);
435
436 int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
437 int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
438 merge_cp_p, merge_cp_length_p, THREAD);
439
440 const char *entry_name = NULL;
441 switch (scratch_cp->tag_at(scratch_i).value()) {
442 case JVM_CONSTANT_Fieldref:
443 entry_name = "Fieldref";
444 (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
445 new_name_and_type_ref_i);
446 break;
447 case JVM_CONSTANT_InterfaceMethodref:
448 entry_name = "IFMethodref";
449 (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
450 new_klass_ref_i, new_name_and_type_ref_i);
451 break;
452 case JVM_CONSTANT_Methodref:
453 entry_name = "Methodref";
454 (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
455 new_name_and_type_ref_i);
456 break;
457 default:
458 guarantee(false, "bad switch");
459 break;
460 }
461
462 if (klass_ref_i != new_klass_ref_i) {
463 log_trace(redefine, class, constantpool)
464 ("%s entry@%d class_index changed: %d to %d", entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i);
465 }
466 if (name_and_type_ref_i != new_name_and_type_ref_i) {
467 log_trace(redefine, class, constantpool)
468 ("%s entry@%d name_and_type_index changed: %d to %d",
469 entry_name, *merge_cp_length_p, name_and_type_ref_i, new_name_and_type_ref_i);
470 }
471
472 if (scratch_i != *merge_cp_length_p) {
473 // The new entry in *merge_cp_p is at a different index than
474 // the new entry in scratch_cp so we need to map the index values.
475 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
476 }
477 (*merge_cp_length_p)++;
478 } break;
479
480 // this is an indirect CP entry so it needs special handling
481 case JVM_CONSTANT_MethodType:
482 {
483 int ref_i = scratch_cp->method_type_index_at(scratch_i);
484 int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
485 merge_cp_length_p, THREAD);
486 if (new_ref_i != ref_i) {
487 log_trace(redefine, class, constantpool)
488 ("MethodType entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
489 }
490 (*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i);
491 if (scratch_i != *merge_cp_length_p) {
492 // The new entry in *merge_cp_p is at a different index than
493 // the new entry in scratch_cp so we need to map the index values.
494 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
495 }
496 (*merge_cp_length_p)++;
497 } break;
498
499 // this is an indirect CP entry so it needs special handling
500 case JVM_CONSTANT_MethodHandle:
501 {
502 int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i);
503 int ref_i = scratch_cp->method_handle_index_at(scratch_i);
504 int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
505 merge_cp_length_p, THREAD);
506 if (new_ref_i != ref_i) {
507 log_trace(redefine, class, constantpool)
508 ("MethodHandle entry@%d ref_index change: %d to %d", *merge_cp_length_p, ref_i, new_ref_i);
509 }
510 (*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i);
511 if (scratch_i != *merge_cp_length_p) {
512 // The new entry in *merge_cp_p is at a different index than
513 // the new entry in scratch_cp so we need to map the index values.
514 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
515 }
516 (*merge_cp_length_p)++;
517 } break;
518
519 // this is an indirect CP entry so it needs special handling
520 case JVM_CONSTANT_Dynamic: // fall through
521 case JVM_CONSTANT_InvokeDynamic:
522 {
523 // Index of the bootstrap specifier in the operands array
524 int old_bs_i = scratch_cp->bootstrap_methods_attribute_index(scratch_i);
525 int new_bs_i = find_or_append_operand(scratch_cp, old_bs_i, merge_cp_p,
526 merge_cp_length_p, THREAD);
527 // The bootstrap method NameAndType_info index
528 int old_ref_i = scratch_cp->bootstrap_name_and_type_ref_index_at(scratch_i);
529 int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
530 merge_cp_length_p, THREAD);
531 if (new_bs_i != old_bs_i) {
532 log_trace(redefine, class, constantpool)
533 ("Dynamic entry@%d bootstrap_method_attr_index change: %d to %d",
534 *merge_cp_length_p, old_bs_i, new_bs_i);
535 }
536 if (new_ref_i != old_ref_i) {
537 log_trace(redefine, class, constantpool)
538 ("Dynamic entry@%d name_and_type_index change: %d to %d", *merge_cp_length_p, old_ref_i, new_ref_i);
539 }
540
541 if (scratch_cp->tag_at(scratch_i).is_dynamic_constant())
542 (*merge_cp_p)->dynamic_constant_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
543 else
544 (*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
545 if (scratch_i != *merge_cp_length_p) {
546 // The new entry in *merge_cp_p is at a different index than
547 // the new entry in scratch_cp so we need to map the index values.
548 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
549 }
550 (*merge_cp_length_p)++;
551 } break;
552
553 // At this stage, Class or UnresolvedClass could be in scratch_cp, but not
554 // ClassIndex
555 case JVM_CONSTANT_ClassIndex: // fall through
556
557 // Invalid is used as the tag for the second constant pool entry
558 // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
559 // not be seen by itself.
560 case JVM_CONSTANT_Invalid: // fall through
561
562 // At this stage, String could be here, but not StringIndex
563 case JVM_CONSTANT_StringIndex: // fall through
564
565 // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
566 // here
567 case JVM_CONSTANT_UnresolvedClassInError: // fall through
568
569 default:
570 {
571 // leave a breadcrumb
572 jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
573 ShouldNotReachHere();
574 } break;
575 } // end switch tag value
576 } // end append_entry()
577
578
find_or_append_indirect_entry(const constantPoolHandle & scratch_cp,int ref_i,constantPoolHandle * merge_cp_p,int * merge_cp_length_p,TRAPS)579 int VM_RedefineClasses::find_or_append_indirect_entry(const constantPoolHandle& scratch_cp,
580 int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
581
582 int new_ref_i = ref_i;
583 bool match = (ref_i < *merge_cp_length_p) &&
584 scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i, THREAD);
585
586 if (!match) {
587 // forward reference in *merge_cp_p or not a direct match
588 int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p, THREAD);
589 if (found_i != 0) {
590 guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree");
591 // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
592 new_ref_i = found_i;
593 map_index(scratch_cp, ref_i, found_i);
594 } else {
595 // no match found so we have to append this entry to *merge_cp_p
596 append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p, THREAD);
597 // The above call to append_entry() can only append one entry
598 // so the post call query of *merge_cp_length_p is only for
599 // the sake of consistency.
600 new_ref_i = *merge_cp_length_p - 1;
601 }
602 }
603
604 return new_ref_i;
605 } // end find_or_append_indirect_entry()
606
607
608 // Append a bootstrap specifier into the merge_cp operands that is semantically equal
609 // to the scratch_cp operands bootstrap specifier passed by the old_bs_i index.
610 // Recursively append new merge_cp entries referenced by the new bootstrap specifier.
append_operand(const constantPoolHandle & scratch_cp,int old_bs_i,constantPoolHandle * merge_cp_p,int * merge_cp_length_p,TRAPS)611 void VM_RedefineClasses::append_operand(const constantPoolHandle& scratch_cp, int old_bs_i,
612 constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
613
614 int old_ref_i = scratch_cp->operand_bootstrap_method_ref_index_at(old_bs_i);
615 int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
616 merge_cp_length_p, THREAD);
617 if (new_ref_i != old_ref_i) {
618 log_trace(redefine, class, constantpool)
619 ("operands entry@%d bootstrap method ref_index change: %d to %d", _operands_cur_length, old_ref_i, new_ref_i);
620 }
621
622 Array<u2>* merge_ops = (*merge_cp_p)->operands();
623 int new_bs_i = _operands_cur_length;
624 // We have _operands_cur_length == 0 when the merge_cp operands is empty yet.
625 // However, the operand_offset_at(0) was set in the extend_operands() call.
626 int new_base = (new_bs_i == 0) ? (*merge_cp_p)->operand_offset_at(0)
627 : (*merge_cp_p)->operand_next_offset_at(new_bs_i - 1);
628 int argc = scratch_cp->operand_argument_count_at(old_bs_i);
629
630 ConstantPool::operand_offset_at_put(merge_ops, _operands_cur_length, new_base);
631 merge_ops->at_put(new_base++, new_ref_i);
632 merge_ops->at_put(new_base++, argc);
633
634 for (int i = 0; i < argc; i++) {
635 int old_arg_ref_i = scratch_cp->operand_argument_index_at(old_bs_i, i);
636 int new_arg_ref_i = find_or_append_indirect_entry(scratch_cp, old_arg_ref_i, merge_cp_p,
637 merge_cp_length_p, THREAD);
638 merge_ops->at_put(new_base++, new_arg_ref_i);
639 if (new_arg_ref_i != old_arg_ref_i) {
640 log_trace(redefine, class, constantpool)
641 ("operands entry@%d bootstrap method argument ref_index change: %d to %d",
642 _operands_cur_length, old_arg_ref_i, new_arg_ref_i);
643 }
644 }
645 if (old_bs_i != _operands_cur_length) {
646 // The bootstrap specifier in *merge_cp_p is at a different index than
647 // that in scratch_cp so we need to map the index values.
648 map_operand_index(old_bs_i, new_bs_i);
649 }
650 _operands_cur_length++;
651 } // end append_operand()
652
653
find_or_append_operand(const constantPoolHandle & scratch_cp,int old_bs_i,constantPoolHandle * merge_cp_p,int * merge_cp_length_p,TRAPS)654 int VM_RedefineClasses::find_or_append_operand(const constantPoolHandle& scratch_cp,
655 int old_bs_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
656
657 int new_bs_i = old_bs_i; // bootstrap specifier index
658 bool match = (old_bs_i < _operands_cur_length) &&
659 scratch_cp->compare_operand_to(old_bs_i, *merge_cp_p, old_bs_i, THREAD);
660
661 if (!match) {
662 // forward reference in *merge_cp_p or not a direct match
663 int found_i = scratch_cp->find_matching_operand(old_bs_i, *merge_cp_p,
664 _operands_cur_length, THREAD);
665 if (found_i != -1) {
666 guarantee(found_i != old_bs_i, "compare_operand_to() and find_matching_operand() disagree");
667 // found a matching operand somewhere else in *merge_cp_p so just need a mapping
668 new_bs_i = found_i;
669 map_operand_index(old_bs_i, found_i);
670 } else {
671 // no match found so we have to append this bootstrap specifier to *merge_cp_p
672 append_operand(scratch_cp, old_bs_i, merge_cp_p, merge_cp_length_p, THREAD);
673 new_bs_i = _operands_cur_length - 1;
674 }
675 }
676 return new_bs_i;
677 } // end find_or_append_operand()
678
679
finalize_operands_merge(const constantPoolHandle & merge_cp,TRAPS)680 void VM_RedefineClasses::finalize_operands_merge(const constantPoolHandle& merge_cp, TRAPS) {
681 if (merge_cp->operands() == NULL) {
682 return;
683 }
684 // Shrink the merge_cp operands
685 merge_cp->shrink_operands(_operands_cur_length, CHECK);
686
687 if (log_is_enabled(Trace, redefine, class, constantpool)) {
688 // don't want to loop unless we are tracing
689 int count = 0;
690 for (int i = 1; i < _operands_index_map_p->length(); i++) {
691 int value = _operands_index_map_p->at(i);
692 if (value != -1) {
693 log_trace(redefine, class, constantpool)("operands_index_map[%d]: old=%d new=%d", count, i, value);
694 count++;
695 }
696 }
697 }
698 // Clean-up
699 _operands_index_map_p = NULL;
700 _operands_cur_length = 0;
701 _operands_index_map_count = 0;
702 } // end finalize_operands_merge()
703
704 // Symbol* comparator for qsort
705 // The caller must have an active ResourceMark.
symcmp(const void * a,const void * b)706 static int symcmp(const void* a, const void* b) {
707 char* astr = (*(Symbol**)a)->as_C_string();
708 char* bstr = (*(Symbol**)b)->as_C_string();
709 return strcmp(astr, bstr);
710 }
711
712 // The caller must have an active ResourceMark.
check_attribute_arrays(const char * attr_name,InstanceKlass * the_class,InstanceKlass * scratch_class,Array<u2> * the_array,Array<u2> * scr_array)713 static jvmtiError check_attribute_arrays(const char* attr_name,
714 InstanceKlass* the_class, InstanceKlass* scratch_class,
715 Array<u2>* the_array, Array<u2>* scr_array) {
716 bool the_array_exists = the_array != Universe::the_empty_short_array();
717 bool scr_array_exists = scr_array != Universe::the_empty_short_array();
718
719 int array_len = the_array->length();
720 if (the_array_exists && scr_array_exists) {
721 if (array_len != scr_array->length()) {
722 log_trace(redefine, class)
723 ("redefined class %s attribute change error: %s len=%d changed to len=%d",
724 the_class->external_name(), attr_name, array_len, scr_array->length());
725 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
726 }
727
728 // The order of entries in the attribute array is not specified so we
729 // have to explicitly check for the same contents. We do this by copying
730 // the referenced symbols into their own arrays, sorting them and then
731 // comparing each element pair.
732
733 Symbol** the_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, array_len);
734 Symbol** scr_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, array_len);
735
736 if (the_syms == NULL || scr_syms == NULL) {
737 return JVMTI_ERROR_OUT_OF_MEMORY;
738 }
739
740 for (int i = 0; i < array_len; i++) {
741 int the_cp_index = the_array->at(i);
742 int scr_cp_index = scr_array->at(i);
743 the_syms[i] = the_class->constants()->klass_name_at(the_cp_index);
744 scr_syms[i] = scratch_class->constants()->klass_name_at(scr_cp_index);
745 }
746
747 qsort(the_syms, array_len, sizeof(Symbol*), symcmp);
748 qsort(scr_syms, array_len, sizeof(Symbol*), symcmp);
749
750 for (int i = 0; i < array_len; i++) {
751 if (the_syms[i] != scr_syms[i]) {
752 log_trace(redefine, class)
753 ("redefined class %s attribute change error: %s[%d]: %s changed to %s",
754 the_class->external_name(), attr_name, i,
755 the_syms[i]->as_C_string(), scr_syms[i]->as_C_string());
756 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
757 }
758 }
759 } else if (the_array_exists ^ scr_array_exists) {
760 const char* action_str = (the_array_exists) ? "removed" : "added";
761 log_trace(redefine, class)
762 ("redefined class %s attribute change error: %s attribute %s",
763 the_class->external_name(), attr_name, action_str);
764 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
765 }
766 return JVMTI_ERROR_NONE;
767 }
768
check_nest_attributes(InstanceKlass * the_class,InstanceKlass * scratch_class)769 static jvmtiError check_nest_attributes(InstanceKlass* the_class,
770 InstanceKlass* scratch_class) {
771 // Check whether the class NestHost attribute has been changed.
772 Thread* thread = Thread::current();
773 ResourceMark rm(thread);
774 u2 the_nest_host_idx = the_class->nest_host_index();
775 u2 scr_nest_host_idx = scratch_class->nest_host_index();
776
777 if (the_nest_host_idx != 0 && scr_nest_host_idx != 0) {
778 Symbol* the_sym = the_class->constants()->klass_name_at(the_nest_host_idx);
779 Symbol* scr_sym = scratch_class->constants()->klass_name_at(scr_nest_host_idx);
780 if (the_sym != scr_sym) {
781 log_trace(redefine, class, nestmates)
782 ("redefined class %s attribute change error: NestHost class: %s replaced with: %s",
783 the_class->external_name(), the_sym->as_C_string(), scr_sym->as_C_string());
784 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
785 }
786 } else if ((the_nest_host_idx == 0) ^ (scr_nest_host_idx == 0)) {
787 const char* action_str = (the_nest_host_idx != 0) ? "removed" : "added";
788 log_trace(redefine, class, nestmates)
789 ("redefined class %s attribute change error: NestHost attribute %s",
790 the_class->external_name(), action_str);
791 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
792 }
793
794 // Check whether the class NestMembers attribute has been changed.
795 return check_attribute_arrays("NestMembers",
796 the_class, scratch_class,
797 the_class->nest_members(),
798 scratch_class->nest_members());
799 }
800
801 // Return an error status if the class Record attribute was changed.
check_record_attribute(InstanceKlass * the_class,InstanceKlass * scratch_class)802 static jvmtiError check_record_attribute(InstanceKlass* the_class, InstanceKlass* scratch_class) {
803 // Get lists of record components.
804 Array<RecordComponent*>* the_record = the_class->record_components();
805 Array<RecordComponent*>* scr_record = scratch_class->record_components();
806 bool the_record_exists = the_record != NULL;
807 bool scr_record_exists = scr_record != NULL;
808
809 if (the_record_exists && scr_record_exists) {
810 int the_num_components = the_record->length();
811 int scr_num_components = scr_record->length();
812 if (the_num_components != scr_num_components) {
813 log_trace(redefine, class, record)
814 ("redefined class %s attribute change error: Record num_components=%d changed to num_components=%d",
815 the_class->external_name(), the_num_components, scr_num_components);
816 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
817 }
818
819 // Compare each field in each record component.
820 ConstantPool* the_cp = the_class->constants();
821 ConstantPool* scr_cp = scratch_class->constants();
822 for (int x = 0; x < the_num_components; x++) {
823 RecordComponent* the_component = the_record->at(x);
824 RecordComponent* scr_component = scr_record->at(x);
825 const Symbol* const the_name = the_cp->symbol_at(the_component->name_index());
826 const Symbol* const scr_name = scr_cp->symbol_at(scr_component->name_index());
827 const Symbol* const the_descr = the_cp->symbol_at(the_component->descriptor_index());
828 const Symbol* const scr_descr = scr_cp->symbol_at(scr_component->descriptor_index());
829 if (the_name != scr_name || the_descr != scr_descr) {
830 log_trace(redefine, class, record)
831 ("redefined class %s attribute change error: Record name_index, descriptor_index, and/or attributes_count changed",
832 the_class->external_name());
833 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
834 }
835
836 int the_gen_sig = the_component->generic_signature_index();
837 int scr_gen_sig = scr_component->generic_signature_index();
838 const Symbol* const the_gen_sig_sym = (the_gen_sig == 0 ? NULL :
839 the_cp->symbol_at(the_component->generic_signature_index()));
840 const Symbol* const scr_gen_sig_sym = (scr_gen_sig == 0 ? NULL :
841 scr_cp->symbol_at(scr_component->generic_signature_index()));
842 if (the_gen_sig_sym != scr_gen_sig_sym) {
843 log_trace(redefine, class, record)
844 ("redefined class %s attribute change error: Record generic_signature attribute changed",
845 the_class->external_name());
846 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
847 }
848
849 // It's okay if a record component's annotations were changed.
850 }
851
852 } else if (the_record_exists ^ scr_record_exists) {
853 const char* action_str = (the_record_exists) ? "removed" : "added";
854 log_trace(redefine, class, record)
855 ("redefined class %s attribute change error: Record attribute %s",
856 the_class->external_name(), action_str);
857 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_ATTRIBUTE_CHANGED;
858 }
859
860 return JVMTI_ERROR_NONE;
861 }
862
863
check_permitted_subclasses_attribute(InstanceKlass * the_class,InstanceKlass * scratch_class)864 static jvmtiError check_permitted_subclasses_attribute(InstanceKlass* the_class,
865 InstanceKlass* scratch_class) {
866 Thread* thread = Thread::current();
867 ResourceMark rm(thread);
868
869 // Check whether the class PermittedSubclasses attribute has been changed.
870 return check_attribute_arrays("PermittedSubclasses",
871 the_class, scratch_class,
872 the_class->permitted_subclasses(),
873 scratch_class->permitted_subclasses());
874 }
875
can_add_or_delete(Method * m)876 static bool can_add_or_delete(Method* m) {
877 // Compatibility mode
878 return (AllowRedefinitionToAddDeleteMethods &&
879 (m->is_private() && (m->is_static() || m->is_final())));
880 }
881
compare_and_normalize_class_versions(InstanceKlass * the_class,InstanceKlass * scratch_class)882 jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
883 InstanceKlass* the_class,
884 InstanceKlass* scratch_class) {
885 int i;
886
887 // Check superclasses, or rather their names, since superclasses themselves can be
888 // requested to replace.
889 // Check for NULL superclass first since this might be java.lang.Object
890 if (the_class->super() != scratch_class->super() &&
891 (the_class->super() == NULL || scratch_class->super() == NULL ||
892 the_class->super()->name() !=
893 scratch_class->super()->name())) {
894 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
895 }
896
897 // Check if the number, names and order of directly implemented interfaces are the same.
898 // I think in principle we should just check if the sets of names of directly implemented
899 // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
900 // .java file, also changes in .class file) should not matter. However, comparing sets is
901 // technically a bit more difficult, and, more importantly, I am not sure at present that the
902 // order of interfaces does not matter on the implementation level, i.e. that the VM does not
903 // rely on it somewhere.
904 Array<InstanceKlass*>* k_interfaces = the_class->local_interfaces();
905 Array<InstanceKlass*>* k_new_interfaces = scratch_class->local_interfaces();
906 int n_intfs = k_interfaces->length();
907 if (n_intfs != k_new_interfaces->length()) {
908 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
909 }
910 for (i = 0; i < n_intfs; i++) {
911 if (k_interfaces->at(i)->name() !=
912 k_new_interfaces->at(i)->name()) {
913 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
914 }
915 }
916
917 // Check whether class is in the error init state.
918 if (the_class->is_in_error_state()) {
919 // TBD #5057930: special error code is needed in 1.6
920 return JVMTI_ERROR_INVALID_CLASS;
921 }
922
923 // Check whether the nest-related attributes have been changed.
924 jvmtiError err = check_nest_attributes(the_class, scratch_class);
925 if (err != JVMTI_ERROR_NONE) {
926 return err;
927 }
928
929 // Check whether the Record attribute has been changed.
930 err = check_record_attribute(the_class, scratch_class);
931 if (err != JVMTI_ERROR_NONE) {
932 return err;
933 }
934
935 // Check whether the PermittedSubclasses attribute has been changed.
936 err = check_permitted_subclasses_attribute(the_class, scratch_class);
937 if (err != JVMTI_ERROR_NONE) {
938 return err;
939 }
940
941 // Check whether class modifiers are the same.
942 jushort old_flags = (jushort) the_class->access_flags().get_flags();
943 jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
944 if (old_flags != new_flags) {
945 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
946 }
947
948 // Check if the number, names, types and order of fields declared in these classes
949 // are the same.
950 JavaFieldStream old_fs(the_class);
951 JavaFieldStream new_fs(scratch_class);
952 for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
953 // access
954 old_flags = old_fs.access_flags().as_short();
955 new_flags = new_fs.access_flags().as_short();
956 if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
957 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
958 }
959 // offset
960 if (old_fs.offset() != new_fs.offset()) {
961 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
962 }
963 // name and signature
964 Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
965 Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
966 Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
967 Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
968 if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
969 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
970 }
971 }
972
973 // If both streams aren't done then we have a differing number of
974 // fields.
975 if (!old_fs.done() || !new_fs.done()) {
976 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
977 }
978
979 // Do a parallel walk through the old and new methods. Detect
980 // cases where they match (exist in both), have been added in
981 // the new methods, or have been deleted (exist only in the
982 // old methods). The class file parser places methods in order
983 // by method name, but does not order overloaded methods by
984 // signature. In order to determine what fate befell the methods,
985 // this code places the overloaded new methods that have matching
986 // old methods in the same order as the old methods and places
987 // new overloaded methods at the end of overloaded methods of
988 // that name. The code for this order normalization is adapted
989 // from the algorithm used in InstanceKlass::find_method().
990 // Since we are swapping out of order entries as we find them,
991 // we only have to search forward through the overloaded methods.
992 // Methods which are added and have the same name as an existing
993 // method (but different signature) will be put at the end of
994 // the methods with that name, and the name mismatch code will
995 // handle them.
996 Array<Method*>* k_old_methods(the_class->methods());
997 Array<Method*>* k_new_methods(scratch_class->methods());
998 int n_old_methods = k_old_methods->length();
999 int n_new_methods = k_new_methods->length();
1000 Thread* thread = Thread::current();
1001
1002 int ni = 0;
1003 int oi = 0;
1004 while (true) {
1005 Method* k_old_method;
1006 Method* k_new_method;
1007 enum { matched, added, deleted, undetermined } method_was = undetermined;
1008
1009 if (oi >= n_old_methods) {
1010 if (ni >= n_new_methods) {
1011 break; // we've looked at everything, done
1012 }
1013 // New method at the end
1014 k_new_method = k_new_methods->at(ni);
1015 method_was = added;
1016 } else if (ni >= n_new_methods) {
1017 // Old method, at the end, is deleted
1018 k_old_method = k_old_methods->at(oi);
1019 method_was = deleted;
1020 } else {
1021 // There are more methods in both the old and new lists
1022 k_old_method = k_old_methods->at(oi);
1023 k_new_method = k_new_methods->at(ni);
1024 if (k_old_method->name() != k_new_method->name()) {
1025 // Methods are sorted by method name, so a mismatch means added
1026 // or deleted
1027 if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
1028 method_was = added;
1029 } else {
1030 method_was = deleted;
1031 }
1032 } else if (k_old_method->signature() == k_new_method->signature()) {
1033 // Both the name and signature match
1034 method_was = matched;
1035 } else {
1036 // The name matches, but the signature doesn't, which means we have to
1037 // search forward through the new overloaded methods.
1038 int nj; // outside the loop for post-loop check
1039 for (nj = ni + 1; nj < n_new_methods; nj++) {
1040 Method* m = k_new_methods->at(nj);
1041 if (k_old_method->name() != m->name()) {
1042 // reached another method name so no more overloaded methods
1043 method_was = deleted;
1044 break;
1045 }
1046 if (k_old_method->signature() == m->signature()) {
1047 // found a match so swap the methods
1048 k_new_methods->at_put(ni, m);
1049 k_new_methods->at_put(nj, k_new_method);
1050 k_new_method = m;
1051 method_was = matched;
1052 break;
1053 }
1054 }
1055
1056 if (nj >= n_new_methods) {
1057 // reached the end without a match; so method was deleted
1058 method_was = deleted;
1059 }
1060 }
1061 }
1062
1063 switch (method_was) {
1064 case matched:
1065 // methods match, be sure modifiers do too
1066 old_flags = (jushort) k_old_method->access_flags().get_flags();
1067 new_flags = (jushort) k_new_method->access_flags().get_flags();
1068 if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
1069 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
1070 }
1071 {
1072 u2 new_num = k_new_method->method_idnum();
1073 u2 old_num = k_old_method->method_idnum();
1074 if (new_num != old_num) {
1075 Method* idnum_owner = scratch_class->method_with_idnum(old_num);
1076 if (idnum_owner != NULL) {
1077 // There is already a method assigned this idnum -- switch them
1078 // Take current and original idnum from the new_method
1079 idnum_owner->set_method_idnum(new_num);
1080 idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
1081 }
1082 // Take current and original idnum from the old_method
1083 k_new_method->set_method_idnum(old_num);
1084 k_new_method->set_orig_method_idnum(k_old_method->orig_method_idnum());
1085 if (thread->has_pending_exception()) {
1086 return JVMTI_ERROR_OUT_OF_MEMORY;
1087 }
1088 }
1089 }
1090 log_trace(redefine, class, normalize)
1091 ("Method matched: new: %s [%d] == old: %s [%d]",
1092 k_new_method->name_and_sig_as_C_string(), ni, k_old_method->name_and_sig_as_C_string(), oi);
1093 // advance to next pair of methods
1094 ++oi;
1095 ++ni;
1096 break;
1097 case added:
1098 // method added, see if it is OK
1099 if (!can_add_or_delete(k_new_method)) {
1100 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
1101 }
1102 {
1103 u2 num = the_class->next_method_idnum();
1104 if (num == ConstMethod::UNSET_IDNUM) {
1105 // cannot add any more methods
1106 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
1107 }
1108 u2 new_num = k_new_method->method_idnum();
1109 Method* idnum_owner = scratch_class->method_with_idnum(num);
1110 if (idnum_owner != NULL) {
1111 // There is already a method assigned this idnum -- switch them
1112 // Take current and original idnum from the new_method
1113 idnum_owner->set_method_idnum(new_num);
1114 idnum_owner->set_orig_method_idnum(k_new_method->orig_method_idnum());
1115 }
1116 k_new_method->set_method_idnum(num);
1117 k_new_method->set_orig_method_idnum(num);
1118 if (thread->has_pending_exception()) {
1119 return JVMTI_ERROR_OUT_OF_MEMORY;
1120 }
1121 }
1122 log_trace(redefine, class, normalize)
1123 ("Method added: new: %s [%d]", k_new_method->name_and_sig_as_C_string(), ni);
1124 ++ni; // advance to next new method
1125 break;
1126 case deleted:
1127 // method deleted, see if it is OK
1128 if (!can_add_or_delete(k_old_method)) {
1129 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
1130 }
1131 log_trace(redefine, class, normalize)
1132 ("Method deleted: old: %s [%d]", k_old_method->name_and_sig_as_C_string(), oi);
1133 ++oi; // advance to next old method
1134 break;
1135 default:
1136 ShouldNotReachHere();
1137 }
1138 }
1139
1140 return JVMTI_ERROR_NONE;
1141 }
1142
1143
1144 // Find new constant pool index value for old constant pool index value
1145 // by seaching the index map. Returns zero (0) if there is no mapped
1146 // value for the old constant pool index.
find_new_index(int old_index)1147 int VM_RedefineClasses::find_new_index(int old_index) {
1148 if (_index_map_count == 0) {
1149 // map is empty so nothing can be found
1150 return 0;
1151 }
1152
1153 if (old_index < 1 || old_index >= _index_map_p->length()) {
1154 // The old_index is out of range so it is not mapped. This should
1155 // not happen in regular constant pool merging use, but it can
1156 // happen if a corrupt annotation is processed.
1157 return 0;
1158 }
1159
1160 int value = _index_map_p->at(old_index);
1161 if (value == -1) {
1162 // the old_index is not mapped
1163 return 0;
1164 }
1165
1166 return value;
1167 } // end find_new_index()
1168
1169
1170 // Find new bootstrap specifier index value for old bootstrap specifier index
1171 // value by seaching the index map. Returns unused index (-1) if there is
1172 // no mapped value for the old bootstrap specifier index.
find_new_operand_index(int old_index)1173 int VM_RedefineClasses::find_new_operand_index(int old_index) {
1174 if (_operands_index_map_count == 0) {
1175 // map is empty so nothing can be found
1176 return -1;
1177 }
1178
1179 if (old_index == -1 || old_index >= _operands_index_map_p->length()) {
1180 // The old_index is out of range so it is not mapped.
1181 // This should not happen in regular constant pool merging use.
1182 return -1;
1183 }
1184
1185 int value = _operands_index_map_p->at(old_index);
1186 if (value == -1) {
1187 // the old_index is not mapped
1188 return -1;
1189 }
1190
1191 return value;
1192 } // end find_new_operand_index()
1193
1194
1195 // Returns true if the current mismatch is due to a resolved/unresolved
1196 // class pair. Otherwise, returns false.
is_unresolved_class_mismatch(const constantPoolHandle & cp1,int index1,const constantPoolHandle & cp2,int index2)1197 bool VM_RedefineClasses::is_unresolved_class_mismatch(const constantPoolHandle& cp1,
1198 int index1, const constantPoolHandle& cp2, int index2) {
1199
1200 jbyte t1 = cp1->tag_at(index1).value();
1201 if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
1202 return false; // wrong entry type; not our special case
1203 }
1204
1205 jbyte t2 = cp2->tag_at(index2).value();
1206 if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
1207 return false; // wrong entry type; not our special case
1208 }
1209
1210 if (t1 == t2) {
1211 return false; // not a mismatch; not our special case
1212 }
1213
1214 char *s1 = cp1->klass_name_at(index1)->as_C_string();
1215 char *s2 = cp2->klass_name_at(index2)->as_C_string();
1216 if (strcmp(s1, s2) != 0) {
1217 return false; // strings don't match; not our special case
1218 }
1219
1220 return true; // made it through the gauntlet; this is our special case
1221 } // end is_unresolved_class_mismatch()
1222
1223
load_new_class_versions(TRAPS)1224 jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
1225
1226 // For consistency allocate memory using os::malloc wrapper.
1227 _scratch_classes = (InstanceKlass**)
1228 os::malloc(sizeof(InstanceKlass*) * _class_count, mtClass);
1229 if (_scratch_classes == NULL) {
1230 return JVMTI_ERROR_OUT_OF_MEMORY;
1231 }
1232 // Zero initialize the _scratch_classes array.
1233 for (int i = 0; i < _class_count; i++) {
1234 _scratch_classes[i] = NULL;
1235 }
1236
1237 ResourceMark rm(THREAD);
1238
1239 JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
1240 // state can only be NULL if the current thread is exiting which
1241 // should not happen since we're trying to do a RedefineClasses
1242 guarantee(state != NULL, "exiting thread calling load_new_class_versions");
1243 for (int i = 0; i < _class_count; i++) {
1244 // Create HandleMark so that any handles created while loading new class
1245 // versions are deleted. Constant pools are deallocated while merging
1246 // constant pools
1247 HandleMark hm(THREAD);
1248 InstanceKlass* the_class = get_ik(_class_defs[i].klass);
1249 Symbol* the_class_sym = the_class->name();
1250
1251 log_debug(redefine, class, load)
1252 ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
1253 the_class->external_name(), _class_load_kind, os::available_memory() >> 10);
1254
1255 ClassFileStream st((u1*)_class_defs[i].class_bytes,
1256 _class_defs[i].class_byte_count,
1257 "__VM_RedefineClasses__",
1258 ClassFileStream::verify);
1259
1260 // Parse the stream.
1261 Handle the_class_loader(THREAD, the_class->class_loader());
1262 Handle protection_domain(THREAD, the_class->protection_domain());
1263 // Set redefined class handle in JvmtiThreadState class.
1264 // This redefined class is sent to agent event handler for class file
1265 // load hook event.
1266 state->set_class_being_redefined(the_class, _class_load_kind);
1267
1268 ClassLoadInfo cl_info(protection_domain);
1269 InstanceKlass* scratch_class = SystemDictionary::parse_stream(
1270 the_class_sym,
1271 the_class_loader,
1272 &st,
1273 cl_info,
1274 THREAD);
1275 // Clear class_being_redefined just to be sure.
1276 state->clear_class_being_redefined();
1277
1278 // TODO: if this is retransform, and nothing changed we can skip it
1279
1280 // Need to clean up allocated InstanceKlass if there's an error so assign
1281 // the result here. Caller deallocates all the scratch classes in case of
1282 // an error.
1283 _scratch_classes[i] = scratch_class;
1284
1285 if (HAS_PENDING_EXCEPTION) {
1286 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1287 log_info(redefine, class, load, exceptions)("parse_stream exception: '%s'", ex_name->as_C_string());
1288 CLEAR_PENDING_EXCEPTION;
1289
1290 if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
1291 return JVMTI_ERROR_UNSUPPORTED_VERSION;
1292 } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
1293 return JVMTI_ERROR_INVALID_CLASS_FORMAT;
1294 } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
1295 return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
1296 } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
1297 // The message will be "XXX (wrong name: YYY)"
1298 return JVMTI_ERROR_NAMES_DONT_MATCH;
1299 } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1300 return JVMTI_ERROR_OUT_OF_MEMORY;
1301 } else { // Just in case more exceptions can be thrown..
1302 return JVMTI_ERROR_FAILS_VERIFICATION;
1303 }
1304 }
1305
1306 // Ensure class is linked before redefine
1307 if (!the_class->is_linked()) {
1308 the_class->link_class(THREAD);
1309 if (HAS_PENDING_EXCEPTION) {
1310 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1311 log_info(redefine, class, load, exceptions)("link_class exception: '%s'", ex_name->as_C_string());
1312 CLEAR_PENDING_EXCEPTION;
1313 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1314 return JVMTI_ERROR_OUT_OF_MEMORY;
1315 } else {
1316 return JVMTI_ERROR_INTERNAL;
1317 }
1318 }
1319 }
1320
1321 // Do the validity checks in compare_and_normalize_class_versions()
1322 // before verifying the byte codes. By doing these checks first, we
1323 // limit the number of functions that require redirection from
1324 // the_class to scratch_class. In particular, we don't have to
1325 // modify JNI GetSuperclass() and thus won't change its performance.
1326 jvmtiError res = compare_and_normalize_class_versions(the_class,
1327 scratch_class);
1328 if (res != JVMTI_ERROR_NONE) {
1329 return res;
1330 }
1331
1332 // verify what the caller passed us
1333 {
1334 // The bug 6214132 caused the verification to fail.
1335 // Information about the_class and scratch_class is temporarily
1336 // recorded into jvmtiThreadState. This data is used to redirect
1337 // the_class to scratch_class in the JVM_* functions called by the
1338 // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
1339 // description.
1340 RedefineVerifyMark rvm(the_class, scratch_class, state);
1341 Verifier::verify(scratch_class, true, THREAD);
1342 }
1343
1344 if (HAS_PENDING_EXCEPTION) {
1345 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1346 log_info(redefine, class, load, exceptions)("verify_byte_codes exception: '%s'", ex_name->as_C_string());
1347 CLEAR_PENDING_EXCEPTION;
1348 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1349 return JVMTI_ERROR_OUT_OF_MEMORY;
1350 } else {
1351 // tell the caller the bytecodes are bad
1352 return JVMTI_ERROR_FAILS_VERIFICATION;
1353 }
1354 }
1355
1356 res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
1357 if (HAS_PENDING_EXCEPTION) {
1358 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1359 log_info(redefine, class, load, exceptions)("merge_cp_and_rewrite exception: '%s'", ex_name->as_C_string());
1360 CLEAR_PENDING_EXCEPTION;
1361 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1362 return JVMTI_ERROR_OUT_OF_MEMORY;
1363 } else {
1364 return JVMTI_ERROR_INTERNAL;
1365 }
1366 }
1367
1368 if (VerifyMergedCPBytecodes) {
1369 // verify what we have done during constant pool merging
1370 {
1371 RedefineVerifyMark rvm(the_class, scratch_class, state);
1372 Verifier::verify(scratch_class, true, THREAD);
1373 }
1374
1375 if (HAS_PENDING_EXCEPTION) {
1376 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1377 log_info(redefine, class, load, exceptions)
1378 ("verify_byte_codes post merge-CP exception: '%s'", ex_name->as_C_string());
1379 CLEAR_PENDING_EXCEPTION;
1380 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1381 return JVMTI_ERROR_OUT_OF_MEMORY;
1382 } else {
1383 // tell the caller that constant pool merging screwed up
1384 return JVMTI_ERROR_INTERNAL;
1385 }
1386 }
1387 }
1388
1389 Rewriter::rewrite(scratch_class, THREAD);
1390 if (!HAS_PENDING_EXCEPTION) {
1391 scratch_class->link_methods(THREAD);
1392 }
1393 if (HAS_PENDING_EXCEPTION) {
1394 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1395 log_info(redefine, class, load, exceptions)
1396 ("Rewriter::rewrite or link_methods exception: '%s'", ex_name->as_C_string());
1397 CLEAR_PENDING_EXCEPTION;
1398 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1399 return JVMTI_ERROR_OUT_OF_MEMORY;
1400 } else {
1401 return JVMTI_ERROR_INTERNAL;
1402 }
1403 }
1404
1405 log_debug(redefine, class, load)
1406 ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)", the_class->external_name(), os::available_memory() >> 10);
1407 }
1408
1409 return JVMTI_ERROR_NONE;
1410 }
1411
1412
1413 // Map old_index to new_index as needed. scratch_cp is only needed
1414 // for log calls.
map_index(const constantPoolHandle & scratch_cp,int old_index,int new_index)1415 void VM_RedefineClasses::map_index(const constantPoolHandle& scratch_cp,
1416 int old_index, int new_index) {
1417 if (find_new_index(old_index) != 0) {
1418 // old_index is already mapped
1419 return;
1420 }
1421
1422 if (old_index == new_index) {
1423 // no mapping is needed
1424 return;
1425 }
1426
1427 _index_map_p->at_put(old_index, new_index);
1428 _index_map_count++;
1429
1430 log_trace(redefine, class, constantpool)
1431 ("mapped tag %d at index %d to %d", scratch_cp->tag_at(old_index).value(), old_index, new_index);
1432 } // end map_index()
1433
1434
1435 // Map old_index to new_index as needed.
map_operand_index(int old_index,int new_index)1436 void VM_RedefineClasses::map_operand_index(int old_index, int new_index) {
1437 if (find_new_operand_index(old_index) != -1) {
1438 // old_index is already mapped
1439 return;
1440 }
1441
1442 if (old_index == new_index) {
1443 // no mapping is needed
1444 return;
1445 }
1446
1447 _operands_index_map_p->at_put(old_index, new_index);
1448 _operands_index_map_count++;
1449
1450 log_trace(redefine, class, constantpool)("mapped bootstrap specifier at index %d to %d", old_index, new_index);
1451 } // end map_index()
1452
1453
1454 // Merge old_cp and scratch_cp and return the results of the merge via
1455 // merge_cp_p. The number of entries in *merge_cp_p is returned via
1456 // merge_cp_length_p. The entries in old_cp occupy the same locations
1457 // in *merge_cp_p. Also creates a map of indices from entries in
1458 // scratch_cp to the corresponding entry in *merge_cp_p. Index map
1459 // entries are only created for entries in scratch_cp that occupy a
1460 // different location in *merged_cp_p.
merge_constant_pools(const constantPoolHandle & old_cp,const constantPoolHandle & scratch_cp,constantPoolHandle * merge_cp_p,int * merge_cp_length_p,TRAPS)1461 bool VM_RedefineClasses::merge_constant_pools(const constantPoolHandle& old_cp,
1462 const constantPoolHandle& scratch_cp, constantPoolHandle *merge_cp_p,
1463 int *merge_cp_length_p, TRAPS) {
1464
1465 if (merge_cp_p == NULL) {
1466 assert(false, "caller must provide scratch constantPool");
1467 return false; // robustness
1468 }
1469 if (merge_cp_length_p == NULL) {
1470 assert(false, "caller must provide scratch CP length");
1471 return false; // robustness
1472 }
1473 // Worst case we need old_cp->length() + scratch_cp()->length(),
1474 // but the caller might be smart so make sure we have at least
1475 // the minimum.
1476 if ((*merge_cp_p)->length() < old_cp->length()) {
1477 assert(false, "merge area too small");
1478 return false; // robustness
1479 }
1480
1481 log_info(redefine, class, constantpool)("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(), scratch_cp->length());
1482
1483 {
1484 // Pass 0:
1485 // The old_cp is copied to *merge_cp_p; this means that any code
1486 // using old_cp does not have to change. This work looks like a
1487 // perfect fit for ConstantPool*::copy_cp_to(), but we need to
1488 // handle one special case:
1489 // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
1490 // This will make verification happy.
1491
1492 int old_i; // index into old_cp
1493
1494 // index zero (0) is not used in constantPools
1495 for (old_i = 1; old_i < old_cp->length(); old_i++) {
1496 // leave debugging crumb
1497 jbyte old_tag = old_cp->tag_at(old_i).value();
1498 switch (old_tag) {
1499 case JVM_CONSTANT_Class:
1500 case JVM_CONSTANT_UnresolvedClass:
1501 // revert the copy to JVM_CONSTANT_UnresolvedClass
1502 // May be resolving while calling this so do the same for
1503 // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
1504 (*merge_cp_p)->temp_unresolved_klass_at_put(old_i,
1505 old_cp->klass_name_index_at(old_i));
1506 break;
1507
1508 case JVM_CONSTANT_Double:
1509 case JVM_CONSTANT_Long:
1510 // just copy the entry to *merge_cp_p, but double and long take
1511 // two constant pool entries
1512 ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_false);
1513 old_i++;
1514 break;
1515
1516 default:
1517 // just copy the entry to *merge_cp_p
1518 ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_false);
1519 break;
1520 }
1521 } // end for each old_cp entry
1522
1523 ConstantPool::copy_operands(old_cp, *merge_cp_p, CHECK_false);
1524 (*merge_cp_p)->extend_operands(scratch_cp, CHECK_false);
1525
1526 // We don't need to sanity check that *merge_cp_length_p is within
1527 // *merge_cp_p bounds since we have the minimum on-entry check above.
1528 (*merge_cp_length_p) = old_i;
1529 }
1530
1531 // merge_cp_len should be the same as old_cp->length() at this point
1532 // so this trace message is really a "warm-and-breathing" message.
1533 log_debug(redefine, class, constantpool)("after pass 0: merge_cp_len=%d", *merge_cp_length_p);
1534
1535 int scratch_i; // index into scratch_cp
1536 {
1537 // Pass 1a:
1538 // Compare scratch_cp entries to the old_cp entries that we have
1539 // already copied to *merge_cp_p. In this pass, we are eliminating
1540 // exact duplicates (matching entry at same index) so we only
1541 // compare entries in the common indice range.
1542 int increment = 1;
1543 int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
1544 for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
1545 switch (scratch_cp->tag_at(scratch_i).value()) {
1546 case JVM_CONSTANT_Double:
1547 case JVM_CONSTANT_Long:
1548 // double and long take two constant pool entries
1549 increment = 2;
1550 break;
1551
1552 default:
1553 increment = 1;
1554 break;
1555 }
1556
1557 bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
1558 scratch_i, CHECK_false);
1559 if (match) {
1560 // found a match at the same index so nothing more to do
1561 continue;
1562 } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
1563 *merge_cp_p, scratch_i)) {
1564 // The mismatch in compare_entry_to() above is because of a
1565 // resolved versus unresolved class entry at the same index
1566 // with the same string value. Since Pass 0 reverted any
1567 // class entries to unresolved class entries in *merge_cp_p,
1568 // we go with the unresolved class entry.
1569 continue;
1570 }
1571
1572 int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
1573 CHECK_false);
1574 if (found_i != 0) {
1575 guarantee(found_i != scratch_i,
1576 "compare_entry_to() and find_matching_entry() do not agree");
1577
1578 // Found a matching entry somewhere else in *merge_cp_p so
1579 // just need a mapping entry.
1580 map_index(scratch_cp, scratch_i, found_i);
1581 continue;
1582 }
1583
1584 // The find_matching_entry() call above could fail to find a match
1585 // due to a resolved versus unresolved class or string entry situation
1586 // like we solved above with the is_unresolved_*_mismatch() calls.
1587 // However, we would have to call is_unresolved_*_mismatch() over
1588 // all of *merge_cp_p (potentially) and that doesn't seem to be
1589 // worth the time.
1590
1591 // No match found so we have to append this entry and any unique
1592 // referenced entries to *merge_cp_p.
1593 append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1594 CHECK_false);
1595 }
1596 }
1597
1598 log_debug(redefine, class, constantpool)
1599 ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1600 *merge_cp_length_p, scratch_i, _index_map_count);
1601
1602 if (scratch_i < scratch_cp->length()) {
1603 // Pass 1b:
1604 // old_cp is smaller than scratch_cp so there are entries in
1605 // scratch_cp that we have not yet processed. We take care of
1606 // those now.
1607 int increment = 1;
1608 for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
1609 switch (scratch_cp->tag_at(scratch_i).value()) {
1610 case JVM_CONSTANT_Double:
1611 case JVM_CONSTANT_Long:
1612 // double and long take two constant pool entries
1613 increment = 2;
1614 break;
1615
1616 default:
1617 increment = 1;
1618 break;
1619 }
1620
1621 int found_i =
1622 scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_false);
1623 if (found_i != 0) {
1624 // Found a matching entry somewhere else in *merge_cp_p so
1625 // just need a mapping entry.
1626 map_index(scratch_cp, scratch_i, found_i);
1627 continue;
1628 }
1629
1630 // No match found so we have to append this entry and any unique
1631 // referenced entries to *merge_cp_p.
1632 append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1633 CHECK_false);
1634 }
1635
1636 log_debug(redefine, class, constantpool)
1637 ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1638 *merge_cp_length_p, scratch_i, _index_map_count);
1639 }
1640 finalize_operands_merge(*merge_cp_p, THREAD);
1641
1642 return true;
1643 } // end merge_constant_pools()
1644
1645
1646 // Scoped object to clean up the constant pool(s) created for merging
1647 class MergeCPCleaner {
1648 ClassLoaderData* _loader_data;
1649 ConstantPool* _cp;
1650 ConstantPool* _scratch_cp;
1651 public:
MergeCPCleaner(ClassLoaderData * loader_data,ConstantPool * merge_cp)1652 MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
1653 _loader_data(loader_data), _cp(merge_cp), _scratch_cp(NULL) {}
~MergeCPCleaner()1654 ~MergeCPCleaner() {
1655 _loader_data->add_to_deallocate_list(_cp);
1656 if (_scratch_cp != NULL) {
1657 _loader_data->add_to_deallocate_list(_scratch_cp);
1658 }
1659 }
add_scratch_cp(ConstantPool * scratch_cp)1660 void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
1661 };
1662
1663 // Merge constant pools between the_class and scratch_class and
1664 // potentially rewrite bytecodes in scratch_class to use the merged
1665 // constant pool.
merge_cp_and_rewrite(InstanceKlass * the_class,InstanceKlass * scratch_class,TRAPS)1666 jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
1667 InstanceKlass* the_class, InstanceKlass* scratch_class,
1668 TRAPS) {
1669 // worst case merged constant pool length is old and new combined
1670 int merge_cp_length = the_class->constants()->length()
1671 + scratch_class->constants()->length();
1672
1673 // Constant pools are not easily reused so we allocate a new one
1674 // each time.
1675 // merge_cp is created unsafe for concurrent GC processing. It
1676 // should be marked safe before discarding it. Even though
1677 // garbage, if it crosses a card boundary, it may be scanned
1678 // in order to find the start of the first complete object on the card.
1679 ClassLoaderData* loader_data = the_class->class_loader_data();
1680 ConstantPool* merge_cp_oop =
1681 ConstantPool::allocate(loader_data,
1682 merge_cp_length,
1683 CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1684 MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
1685
1686 HandleMark hm(THREAD); // make sure handles are cleared before
1687 // MergeCPCleaner clears out merge_cp_oop
1688 constantPoolHandle merge_cp(THREAD, merge_cp_oop);
1689
1690 // Get constants() from the old class because it could have been rewritten
1691 // while we were at a safepoint allocating a new constant pool.
1692 constantPoolHandle old_cp(THREAD, the_class->constants());
1693 constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
1694
1695 // If the length changed, the class was redefined out from under us. Return
1696 // an error.
1697 if (merge_cp_length != the_class->constants()->length()
1698 + scratch_class->constants()->length()) {
1699 return JVMTI_ERROR_INTERNAL;
1700 }
1701
1702 // Update the version number of the constant pools (may keep scratch_cp)
1703 merge_cp->increment_and_save_version(old_cp->version());
1704 scratch_cp->increment_and_save_version(old_cp->version());
1705
1706 ResourceMark rm(THREAD);
1707 _index_map_count = 0;
1708 _index_map_p = new intArray(scratch_cp->length(), scratch_cp->length(), -1);
1709
1710 _operands_cur_length = ConstantPool::operand_array_length(old_cp->operands());
1711 _operands_index_map_count = 0;
1712 int operands_index_map_len = ConstantPool::operand_array_length(scratch_cp->operands());
1713 _operands_index_map_p = new intArray(operands_index_map_len, operands_index_map_len, -1);
1714
1715 // reference to the cp holder is needed for copy_operands()
1716 merge_cp->set_pool_holder(scratch_class);
1717 bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
1718 &merge_cp_length, THREAD);
1719 merge_cp->set_pool_holder(NULL);
1720
1721 if (!result) {
1722 // The merge can fail due to memory allocation failure or due
1723 // to robustness checks.
1724 return JVMTI_ERROR_INTERNAL;
1725 }
1726
1727 // Save fields from the old_cp.
1728 merge_cp->copy_fields(old_cp());
1729 scratch_cp->copy_fields(old_cp());
1730
1731 log_info(redefine, class, constantpool)("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count);
1732
1733 if (_index_map_count == 0) {
1734 // there is nothing to map between the new and merged constant pools
1735
1736 if (old_cp->length() == scratch_cp->length()) {
1737 // The old and new constant pools are the same length and the
1738 // index map is empty. This means that the three constant pools
1739 // are equivalent (but not the same). Unfortunately, the new
1740 // constant pool has not gone through link resolution nor have
1741 // the new class bytecodes gone through constant pool cache
1742 // rewriting so we can't use the old constant pool with the new
1743 // class.
1744
1745 // toss the merged constant pool at return
1746 } else if (old_cp->length() < scratch_cp->length()) {
1747 // The old constant pool has fewer entries than the new constant
1748 // pool and the index map is empty. This means the new constant
1749 // pool is a superset of the old constant pool. However, the old
1750 // class bytecodes have already gone through constant pool cache
1751 // rewriting so we can't use the new constant pool with the old
1752 // class.
1753
1754 // toss the merged constant pool at return
1755 } else {
1756 // The old constant pool has more entries than the new constant
1757 // pool and the index map is empty. This means that both the old
1758 // and merged constant pools are supersets of the new constant
1759 // pool.
1760
1761 // Replace the new constant pool with a shrunken copy of the
1762 // merged constant pool
1763 set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1764 CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1765 // The new constant pool replaces scratch_cp so have cleaner clean it up.
1766 // It can't be cleaned up while there are handles to it.
1767 cp_cleaner.add_scratch_cp(scratch_cp());
1768 }
1769 } else {
1770 if (log_is_enabled(Trace, redefine, class, constantpool)) {
1771 // don't want to loop unless we are tracing
1772 int count = 0;
1773 for (int i = 1; i < _index_map_p->length(); i++) {
1774 int value = _index_map_p->at(i);
1775
1776 if (value != -1) {
1777 log_trace(redefine, class, constantpool)("index_map[%d]: old=%d new=%d", count, i, value);
1778 count++;
1779 }
1780 }
1781 }
1782
1783 // We have entries mapped between the new and merged constant pools
1784 // so we have to rewrite some constant pool references.
1785 if (!rewrite_cp_refs(scratch_class, THREAD)) {
1786 return JVMTI_ERROR_INTERNAL;
1787 }
1788
1789 // Replace the new constant pool with a shrunken copy of the
1790 // merged constant pool so now the rewritten bytecodes have
1791 // valid references; the previous new constant pool will get
1792 // GCed.
1793 set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
1794 CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
1795 // The new constant pool replaces scratch_cp so have cleaner clean it up.
1796 // It can't be cleaned up while there are handles to it.
1797 cp_cleaner.add_scratch_cp(scratch_cp());
1798 }
1799
1800 return JVMTI_ERROR_NONE;
1801 } // end merge_cp_and_rewrite()
1802
1803
1804 // Rewrite constant pool references in klass scratch_class.
rewrite_cp_refs(InstanceKlass * scratch_class,TRAPS)1805 bool VM_RedefineClasses::rewrite_cp_refs(InstanceKlass* scratch_class,
1806 TRAPS) {
1807
1808 // rewrite constant pool references in the nest attributes:
1809 if (!rewrite_cp_refs_in_nest_attributes(scratch_class)) {
1810 // propagate failure back to caller
1811 return false;
1812 }
1813
1814 // rewrite constant pool references in the Record attribute:
1815 if (!rewrite_cp_refs_in_record_attribute(scratch_class, THREAD)) {
1816 // propagate failure back to caller
1817 return false;
1818 }
1819
1820 // rewrite constant pool references in the PermittedSubclasses attribute:
1821 if (!rewrite_cp_refs_in_permitted_subclasses_attribute(scratch_class)) {
1822 // propagate failure back to caller
1823 return false;
1824 }
1825
1826 // rewrite constant pool references in the methods:
1827 if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
1828 // propagate failure back to caller
1829 return false;
1830 }
1831
1832 // rewrite constant pool references in the class_annotations:
1833 if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
1834 // propagate failure back to caller
1835 return false;
1836 }
1837
1838 // rewrite constant pool references in the fields_annotations:
1839 if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
1840 // propagate failure back to caller
1841 return false;
1842 }
1843
1844 // rewrite constant pool references in the methods_annotations:
1845 if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
1846 // propagate failure back to caller
1847 return false;
1848 }
1849
1850 // rewrite constant pool references in the methods_parameter_annotations:
1851 if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
1852 THREAD)) {
1853 // propagate failure back to caller
1854 return false;
1855 }
1856
1857 // rewrite constant pool references in the methods_default_annotations:
1858 if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
1859 THREAD)) {
1860 // propagate failure back to caller
1861 return false;
1862 }
1863
1864 // rewrite constant pool references in the class_type_annotations:
1865 if (!rewrite_cp_refs_in_class_type_annotations(scratch_class, THREAD)) {
1866 // propagate failure back to caller
1867 return false;
1868 }
1869
1870 // rewrite constant pool references in the fields_type_annotations:
1871 if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class, THREAD)) {
1872 // propagate failure back to caller
1873 return false;
1874 }
1875
1876 // rewrite constant pool references in the methods_type_annotations:
1877 if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class, THREAD)) {
1878 // propagate failure back to caller
1879 return false;
1880 }
1881
1882 // There can be type annotations in the Code part of a method_info attribute.
1883 // These annotations are not accessible, even by reflection.
1884 // Currently they are not even parsed by the ClassFileParser.
1885 // If runtime access is added they will also need to be rewritten.
1886
1887 // rewrite source file name index:
1888 u2 source_file_name_idx = scratch_class->source_file_name_index();
1889 if (source_file_name_idx != 0) {
1890 u2 new_source_file_name_idx = find_new_index(source_file_name_idx);
1891 if (new_source_file_name_idx != 0) {
1892 scratch_class->set_source_file_name_index(new_source_file_name_idx);
1893 }
1894 }
1895
1896 // rewrite class generic signature index:
1897 u2 generic_signature_index = scratch_class->generic_signature_index();
1898 if (generic_signature_index != 0) {
1899 u2 new_generic_signature_index = find_new_index(generic_signature_index);
1900 if (new_generic_signature_index != 0) {
1901 scratch_class->set_generic_signature_index(new_generic_signature_index);
1902 }
1903 }
1904
1905 return true;
1906 } // end rewrite_cp_refs()
1907
1908 // Rewrite constant pool references in the NestHost and NestMembers attributes.
rewrite_cp_refs_in_nest_attributes(InstanceKlass * scratch_class)1909 bool VM_RedefineClasses::rewrite_cp_refs_in_nest_attributes(
1910 InstanceKlass* scratch_class) {
1911
1912 u2 cp_index = scratch_class->nest_host_index();
1913 if (cp_index != 0) {
1914 scratch_class->set_nest_host_index(find_new_index(cp_index));
1915 }
1916 Array<u2>* nest_members = scratch_class->nest_members();
1917 for (int i = 0; i < nest_members->length(); i++) {
1918 u2 cp_index = nest_members->at(i);
1919 nest_members->at_put(i, find_new_index(cp_index));
1920 }
1921 return true;
1922 }
1923
1924 // Rewrite constant pool references in the Record attribute.
rewrite_cp_refs_in_record_attribute(InstanceKlass * scratch_class,TRAPS)1925 bool VM_RedefineClasses::rewrite_cp_refs_in_record_attribute(
1926 InstanceKlass* scratch_class, TRAPS) {
1927 Array<RecordComponent*>* components = scratch_class->record_components();
1928 if (components != NULL) {
1929 for (int i = 0; i < components->length(); i++) {
1930 RecordComponent* component = components->at(i);
1931 u2 cp_index = component->name_index();
1932 component->set_name_index(find_new_index(cp_index));
1933 cp_index = component->descriptor_index();
1934 component->set_descriptor_index(find_new_index(cp_index));
1935 cp_index = component->generic_signature_index();
1936 if (cp_index != 0) {
1937 component->set_generic_signature_index(find_new_index(cp_index));
1938 }
1939
1940 AnnotationArray* annotations = component->annotations();
1941 if (annotations != NULL && annotations->length() != 0) {
1942 int byte_i = 0; // byte index into annotations
1943 if (!rewrite_cp_refs_in_annotations_typeArray(annotations, byte_i, THREAD)) {
1944 log_debug(redefine, class, annotation)("bad record_component_annotations at %d", i);
1945 // propagate failure back to caller
1946 return false;
1947 }
1948 }
1949
1950 AnnotationArray* type_annotations = component->type_annotations();
1951 if (type_annotations != NULL && type_annotations->length() != 0) {
1952 int byte_i = 0; // byte index into annotations
1953 if (!rewrite_cp_refs_in_annotations_typeArray(type_annotations, byte_i, THREAD)) {
1954 log_debug(redefine, class, annotation)("bad record_component_type_annotations at %d", i);
1955 // propagate failure back to caller
1956 return false;
1957 }
1958 }
1959 }
1960 }
1961 return true;
1962 }
1963
1964 // Rewrite constant pool references in the PermittedSubclasses attribute.
rewrite_cp_refs_in_permitted_subclasses_attribute(InstanceKlass * scratch_class)1965 bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
1966 InstanceKlass* scratch_class) {
1967
1968 Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
1969 assert(permitted_subclasses != NULL, "unexpected null permitted_subclasses");
1970 for (int i = 0; i < permitted_subclasses->length(); i++) {
1971 u2 cp_index = permitted_subclasses->at(i);
1972 permitted_subclasses->at_put(i, find_new_index(cp_index));
1973 }
1974 return true;
1975 }
1976
1977 // Rewrite constant pool references in the methods.
rewrite_cp_refs_in_methods(InstanceKlass * scratch_class,TRAPS)1978 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
1979 InstanceKlass* scratch_class, TRAPS) {
1980
1981 Array<Method*>* methods = scratch_class->methods();
1982
1983 if (methods == NULL || methods->length() == 0) {
1984 // no methods so nothing to do
1985 return true;
1986 }
1987
1988 // rewrite constant pool references in the methods:
1989 for (int i = methods->length() - 1; i >= 0; i--) {
1990 methodHandle method(THREAD, methods->at(i));
1991 methodHandle new_method;
1992 rewrite_cp_refs_in_method(method, &new_method, THREAD);
1993 if (!new_method.is_null()) {
1994 // the method has been replaced so save the new method version
1995 // even in the case of an exception. original method is on the
1996 // deallocation list.
1997 methods->at_put(i, new_method());
1998 }
1999 if (HAS_PENDING_EXCEPTION) {
2000 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
2001 log_info(redefine, class, load, exceptions)("rewrite_cp_refs_in_method exception: '%s'", ex_name->as_C_string());
2002 // Need to clear pending exception here as the super caller sets
2003 // the JVMTI_ERROR_INTERNAL if the returned value is false.
2004 CLEAR_PENDING_EXCEPTION;
2005 return false;
2006 }
2007 }
2008
2009 return true;
2010 }
2011
2012
2013 // Rewrite constant pool references in the specific method. This code
2014 // was adapted from Rewriter::rewrite_method().
rewrite_cp_refs_in_method(methodHandle method,methodHandle * new_method_p,TRAPS)2015 void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
2016 methodHandle *new_method_p, TRAPS) {
2017
2018 *new_method_p = methodHandle(); // default is no new method
2019
2020 // We cache a pointer to the bytecodes here in code_base. If GC
2021 // moves the Method*, then the bytecodes will also move which
2022 // will likely cause a crash. We create a NoSafepointVerifier
2023 // object to detect whether we pass a possible safepoint in this
2024 // code block.
2025 NoSafepointVerifier nsv;
2026
2027 // Bytecodes and their length
2028 address code_base = method->code_base();
2029 int code_length = method->code_size();
2030
2031 int bc_length;
2032 for (int bci = 0; bci < code_length; bci += bc_length) {
2033 address bcp = code_base + bci;
2034 Bytecodes::Code c = (Bytecodes::Code)(*bcp);
2035
2036 bc_length = Bytecodes::length_for(c);
2037 if (bc_length == 0) {
2038 // More complicated bytecodes report a length of zero so
2039 // we have to try again a slightly different way.
2040 bc_length = Bytecodes::length_at(method(), bcp);
2041 }
2042
2043 assert(bc_length != 0, "impossible bytecode length");
2044
2045 switch (c) {
2046 case Bytecodes::_ldc:
2047 {
2048 int cp_index = *(bcp + 1);
2049 int new_index = find_new_index(cp_index);
2050
2051 if (StressLdcRewrite && new_index == 0) {
2052 // If we are stressing ldc -> ldc_w rewriting, then we
2053 // always need a new_index value.
2054 new_index = cp_index;
2055 }
2056 if (new_index != 0) {
2057 // the original index is mapped so we have more work to do
2058 if (!StressLdcRewrite && new_index <= max_jubyte) {
2059 // The new value can still use ldc instead of ldc_w
2060 // unless we are trying to stress ldc -> ldc_w rewriting
2061 log_trace(redefine, class, constantpool)
2062 ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), p2i(bcp), cp_index, new_index);
2063 *(bcp + 1) = new_index;
2064 } else {
2065 log_trace(redefine, class, constantpool)
2066 ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c), p2i(bcp), cp_index, new_index);
2067 // the new value needs ldc_w instead of ldc
2068 u_char inst_buffer[4]; // max instruction size is 4 bytes
2069 bcp = (address)inst_buffer;
2070 // construct new instruction sequence
2071 *bcp = Bytecodes::_ldc_w;
2072 bcp++;
2073 // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
2074 // See comment below for difference between put_Java_u2()
2075 // and put_native_u2().
2076 Bytes::put_Java_u2(bcp, new_index);
2077
2078 Relocator rc(method, NULL /* no RelocatorListener needed */);
2079 methodHandle m;
2080 {
2081 PauseNoSafepointVerifier pnsv(&nsv);
2082
2083 // ldc is 2 bytes and ldc_w is 3 bytes
2084 m = rc.insert_space_at(bci, 3, inst_buffer, CHECK);
2085 }
2086
2087 // return the new method so that the caller can update
2088 // the containing class
2089 *new_method_p = method = m;
2090 // switch our bytecode processing loop from the old method
2091 // to the new method
2092 code_base = method->code_base();
2093 code_length = method->code_size();
2094 bcp = code_base + bci;
2095 c = (Bytecodes::Code)(*bcp);
2096 bc_length = Bytecodes::length_for(c);
2097 assert(bc_length != 0, "sanity check");
2098 } // end we need ldc_w instead of ldc
2099 } // end if there is a mapped index
2100 } break;
2101
2102 // these bytecodes have a two-byte constant pool index
2103 case Bytecodes::_anewarray : // fall through
2104 case Bytecodes::_checkcast : // fall through
2105 case Bytecodes::_getfield : // fall through
2106 case Bytecodes::_getstatic : // fall through
2107 case Bytecodes::_instanceof : // fall through
2108 case Bytecodes::_invokedynamic : // fall through
2109 case Bytecodes::_invokeinterface: // fall through
2110 case Bytecodes::_invokespecial : // fall through
2111 case Bytecodes::_invokestatic : // fall through
2112 case Bytecodes::_invokevirtual : // fall through
2113 case Bytecodes::_ldc_w : // fall through
2114 case Bytecodes::_ldc2_w : // fall through
2115 case Bytecodes::_multianewarray : // fall through
2116 case Bytecodes::_new : // fall through
2117 case Bytecodes::_putfield : // fall through
2118 case Bytecodes::_putstatic :
2119 {
2120 address p = bcp + 1;
2121 int cp_index = Bytes::get_Java_u2(p);
2122 int new_index = find_new_index(cp_index);
2123 if (new_index != 0) {
2124 // the original index is mapped so update w/ new value
2125 log_trace(redefine, class, constantpool)
2126 ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),p2i(bcp), cp_index, new_index);
2127 // Rewriter::rewrite_method() uses put_native_u2() in this
2128 // situation because it is reusing the constant pool index
2129 // location for a native index into the ConstantPoolCache.
2130 // Since we are updating the constant pool index prior to
2131 // verification and ConstantPoolCache initialization, we
2132 // need to keep the new index in Java byte order.
2133 Bytes::put_Java_u2(p, new_index);
2134 }
2135 } break;
2136 default:
2137 break;
2138 }
2139 } // end for each bytecode
2140
2141 // We also need to rewrite the parameter name indexes, if there is
2142 // method parameter data present
2143 if(method->has_method_parameters()) {
2144 const int len = method->method_parameters_length();
2145 MethodParametersElement* elem = method->method_parameters_start();
2146
2147 for (int i = 0; i < len; i++) {
2148 const u2 cp_index = elem[i].name_cp_index;
2149 const u2 new_cp_index = find_new_index(cp_index);
2150 if (new_cp_index != 0) {
2151 elem[i].name_cp_index = new_cp_index;
2152 }
2153 }
2154 }
2155 } // end rewrite_cp_refs_in_method()
2156
2157
2158 // Rewrite constant pool references in the class_annotations field.
rewrite_cp_refs_in_class_annotations(InstanceKlass * scratch_class,TRAPS)2159 bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
2160 InstanceKlass* scratch_class, TRAPS) {
2161
2162 AnnotationArray* class_annotations = scratch_class->class_annotations();
2163 if (class_annotations == NULL || class_annotations->length() == 0) {
2164 // no class_annotations so nothing to do
2165 return true;
2166 }
2167
2168 log_debug(redefine, class, annotation)("class_annotations length=%d", class_annotations->length());
2169
2170 int byte_i = 0; // byte index into class_annotations
2171 return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
2172 THREAD);
2173 }
2174
2175
2176 // Rewrite constant pool references in an annotations typeArray. This
2177 // "structure" is adapted from the RuntimeVisibleAnnotations_attribute
2178 // that is described in section 4.8.15 of the 2nd-edition of the VM spec:
2179 //
2180 // annotations_typeArray {
2181 // u2 num_annotations;
2182 // annotation annotations[num_annotations];
2183 // }
2184 //
rewrite_cp_refs_in_annotations_typeArray(AnnotationArray * annotations_typeArray,int & byte_i_ref,TRAPS)2185 bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
2186 AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
2187
2188 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2189 // not enough room for num_annotations field
2190 log_debug(redefine, class, annotation)("length() is too small for num_annotations field");
2191 return false;
2192 }
2193
2194 u2 num_annotations = Bytes::get_Java_u2((address)
2195 annotations_typeArray->adr_at(byte_i_ref));
2196 byte_i_ref += 2;
2197
2198 log_debug(redefine, class, annotation)("num_annotations=%d", num_annotations);
2199
2200 int calc_num_annotations = 0;
2201 for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2202 if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
2203 byte_i_ref, THREAD)) {
2204 log_debug(redefine, class, annotation)("bad annotation_struct at %d", calc_num_annotations);
2205 // propagate failure back to caller
2206 return false;
2207 }
2208 }
2209 assert(num_annotations == calc_num_annotations, "sanity check");
2210
2211 return true;
2212 } // end rewrite_cp_refs_in_annotations_typeArray()
2213
2214
2215 // Rewrite constant pool references in the annotation struct portion of
2216 // an annotations_typeArray. This "structure" is from section 4.8.15 of
2217 // the 2nd-edition of the VM spec:
2218 //
2219 // struct annotation {
2220 // u2 type_index;
2221 // u2 num_element_value_pairs;
2222 // {
2223 // u2 element_name_index;
2224 // element_value value;
2225 // } element_value_pairs[num_element_value_pairs];
2226 // }
2227 //
rewrite_cp_refs_in_annotation_struct(AnnotationArray * annotations_typeArray,int & byte_i_ref,TRAPS)2228 bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
2229 AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
2230 if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
2231 // not enough room for smallest annotation_struct
2232 log_debug(redefine, class, annotation)("length() is too small for annotation_struct");
2233 return false;
2234 }
2235
2236 u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
2237 byte_i_ref, "type_index", THREAD);
2238
2239 u2 num_element_value_pairs = Bytes::get_Java_u2((address)
2240 annotations_typeArray->adr_at(byte_i_ref));
2241 byte_i_ref += 2;
2242
2243 log_debug(redefine, class, annotation)
2244 ("type_index=%d num_element_value_pairs=%d", type_index, num_element_value_pairs);
2245
2246 int calc_num_element_value_pairs = 0;
2247 for (; calc_num_element_value_pairs < num_element_value_pairs;
2248 calc_num_element_value_pairs++) {
2249 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2250 // not enough room for another element_name_index, let alone
2251 // the rest of another component
2252 log_debug(redefine, class, annotation)("length() is too small for element_name_index");
2253 return false;
2254 }
2255
2256 u2 element_name_index = rewrite_cp_ref_in_annotation_data(
2257 annotations_typeArray, byte_i_ref,
2258 "element_name_index", THREAD);
2259
2260 log_debug(redefine, class, annotation)("element_name_index=%d", element_name_index);
2261
2262 if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
2263 byte_i_ref, THREAD)) {
2264 log_debug(redefine, class, annotation)("bad element_value at %d", calc_num_element_value_pairs);
2265 // propagate failure back to caller
2266 return false;
2267 }
2268 } // end for each component
2269 assert(num_element_value_pairs == calc_num_element_value_pairs,
2270 "sanity check");
2271
2272 return true;
2273 } // end rewrite_cp_refs_in_annotation_struct()
2274
2275
2276 // Rewrite a constant pool reference at the current position in
2277 // annotations_typeArray if needed. Returns the original constant
2278 // pool reference if a rewrite was not needed or the new constant
2279 // pool reference if a rewrite was needed.
rewrite_cp_ref_in_annotation_data(AnnotationArray * annotations_typeArray,int & byte_i_ref,const char * trace_mesg,TRAPS)2280 u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
2281 AnnotationArray* annotations_typeArray, int &byte_i_ref,
2282 const char * trace_mesg, TRAPS) {
2283
2284 address cp_index_addr = (address)
2285 annotations_typeArray->adr_at(byte_i_ref);
2286 u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
2287 u2 new_cp_index = find_new_index(old_cp_index);
2288 if (new_cp_index != 0) {
2289 log_debug(redefine, class, annotation)("mapped old %s=%d", trace_mesg, old_cp_index);
2290 Bytes::put_Java_u2(cp_index_addr, new_cp_index);
2291 old_cp_index = new_cp_index;
2292 }
2293 byte_i_ref += 2;
2294 return old_cp_index;
2295 }
2296
2297
2298 // Rewrite constant pool references in the element_value portion of an
2299 // annotations_typeArray. This "structure" is from section 4.8.15.1 of
2300 // the 2nd-edition of the VM spec:
2301 //
2302 // struct element_value {
2303 // u1 tag;
2304 // union {
2305 // u2 const_value_index;
2306 // {
2307 // u2 type_name_index;
2308 // u2 const_name_index;
2309 // } enum_const_value;
2310 // u2 class_info_index;
2311 // annotation annotation_value;
2312 // struct {
2313 // u2 num_values;
2314 // element_value values[num_values];
2315 // } array_value;
2316 // } value;
2317 // }
2318 //
rewrite_cp_refs_in_element_value(AnnotationArray * annotations_typeArray,int & byte_i_ref,TRAPS)2319 bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
2320 AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
2321
2322 if ((byte_i_ref + 1) > annotations_typeArray->length()) {
2323 // not enough room for a tag let alone the rest of an element_value
2324 log_debug(redefine, class, annotation)("length() is too small for a tag");
2325 return false;
2326 }
2327
2328 u1 tag = annotations_typeArray->at(byte_i_ref);
2329 byte_i_ref++;
2330 log_debug(redefine, class, annotation)("tag='%c'", tag);
2331
2332 switch (tag) {
2333 // These BaseType tag values are from Table 4.2 in VM spec:
2334 case JVM_SIGNATURE_BYTE:
2335 case JVM_SIGNATURE_CHAR:
2336 case JVM_SIGNATURE_DOUBLE:
2337 case JVM_SIGNATURE_FLOAT:
2338 case JVM_SIGNATURE_INT:
2339 case JVM_SIGNATURE_LONG:
2340 case JVM_SIGNATURE_SHORT:
2341 case JVM_SIGNATURE_BOOLEAN:
2342
2343 // The remaining tag values are from Table 4.8 in the 2nd-edition of
2344 // the VM spec:
2345 case 's':
2346 {
2347 // For the above tag values (including the BaseType values),
2348 // value.const_value_index is right union field.
2349
2350 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2351 // not enough room for a const_value_index
2352 log_debug(redefine, class, annotation)("length() is too small for a const_value_index");
2353 return false;
2354 }
2355
2356 u2 const_value_index = rewrite_cp_ref_in_annotation_data(
2357 annotations_typeArray, byte_i_ref,
2358 "const_value_index", THREAD);
2359
2360 log_debug(redefine, class, annotation)("const_value_index=%d", const_value_index);
2361 } break;
2362
2363 case 'e':
2364 {
2365 // for the above tag value, value.enum_const_value is right union field
2366
2367 if ((byte_i_ref + 4) > annotations_typeArray->length()) {
2368 // not enough room for a enum_const_value
2369 log_debug(redefine, class, annotation)("length() is too small for a enum_const_value");
2370 return false;
2371 }
2372
2373 u2 type_name_index = rewrite_cp_ref_in_annotation_data(
2374 annotations_typeArray, byte_i_ref,
2375 "type_name_index", THREAD);
2376
2377 u2 const_name_index = rewrite_cp_ref_in_annotation_data(
2378 annotations_typeArray, byte_i_ref,
2379 "const_name_index", THREAD);
2380
2381 log_debug(redefine, class, annotation)
2382 ("type_name_index=%d const_name_index=%d", type_name_index, const_name_index);
2383 } break;
2384
2385 case 'c':
2386 {
2387 // for the above tag value, value.class_info_index is right union field
2388
2389 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2390 // not enough room for a class_info_index
2391 log_debug(redefine, class, annotation)("length() is too small for a class_info_index");
2392 return false;
2393 }
2394
2395 u2 class_info_index = rewrite_cp_ref_in_annotation_data(
2396 annotations_typeArray, byte_i_ref,
2397 "class_info_index", THREAD);
2398
2399 log_debug(redefine, class, annotation)("class_info_index=%d", class_info_index);
2400 } break;
2401
2402 case '@':
2403 // For the above tag value, value.attr_value is the right union
2404 // field. This is a nested annotation.
2405 if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
2406 byte_i_ref, THREAD)) {
2407 // propagate failure back to caller
2408 return false;
2409 }
2410 break;
2411
2412 case JVM_SIGNATURE_ARRAY:
2413 {
2414 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
2415 // not enough room for a num_values field
2416 log_debug(redefine, class, annotation)("length() is too small for a num_values field");
2417 return false;
2418 }
2419
2420 // For the above tag value, value.array_value is the right union
2421 // field. This is an array of nested element_value.
2422 u2 num_values = Bytes::get_Java_u2((address)
2423 annotations_typeArray->adr_at(byte_i_ref));
2424 byte_i_ref += 2;
2425 log_debug(redefine, class, annotation)("num_values=%d", num_values);
2426
2427 int calc_num_values = 0;
2428 for (; calc_num_values < num_values; calc_num_values++) {
2429 if (!rewrite_cp_refs_in_element_value(
2430 annotations_typeArray, byte_i_ref, THREAD)) {
2431 log_debug(redefine, class, annotation)("bad nested element_value at %d", calc_num_values);
2432 // propagate failure back to caller
2433 return false;
2434 }
2435 }
2436 assert(num_values == calc_num_values, "sanity check");
2437 } break;
2438
2439 default:
2440 log_debug(redefine, class, annotation)("bad tag=0x%x", tag);
2441 return false;
2442 } // end decode tag field
2443
2444 return true;
2445 } // end rewrite_cp_refs_in_element_value()
2446
2447
2448 // Rewrite constant pool references in a fields_annotations field.
rewrite_cp_refs_in_fields_annotations(InstanceKlass * scratch_class,TRAPS)2449 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
2450 InstanceKlass* scratch_class, TRAPS) {
2451
2452 Array<AnnotationArray*>* fields_annotations = scratch_class->fields_annotations();
2453
2454 if (fields_annotations == NULL || fields_annotations->length() == 0) {
2455 // no fields_annotations so nothing to do
2456 return true;
2457 }
2458
2459 log_debug(redefine, class, annotation)("fields_annotations length=%d", fields_annotations->length());
2460
2461 for (int i = 0; i < fields_annotations->length(); i++) {
2462 AnnotationArray* field_annotations = fields_annotations->at(i);
2463 if (field_annotations == NULL || field_annotations->length() == 0) {
2464 // this field does not have any annotations so skip it
2465 continue;
2466 }
2467
2468 int byte_i = 0; // byte index into field_annotations
2469 if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
2470 THREAD)) {
2471 log_debug(redefine, class, annotation)("bad field_annotations at %d", i);
2472 // propagate failure back to caller
2473 return false;
2474 }
2475 }
2476
2477 return true;
2478 } // end rewrite_cp_refs_in_fields_annotations()
2479
2480
2481 // Rewrite constant pool references in a methods_annotations field.
rewrite_cp_refs_in_methods_annotations(InstanceKlass * scratch_class,TRAPS)2482 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
2483 InstanceKlass* scratch_class, TRAPS) {
2484
2485 for (int i = 0; i < scratch_class->methods()->length(); i++) {
2486 Method* m = scratch_class->methods()->at(i);
2487 AnnotationArray* method_annotations = m->constMethod()->method_annotations();
2488
2489 if (method_annotations == NULL || method_annotations->length() == 0) {
2490 // this method does not have any annotations so skip it
2491 continue;
2492 }
2493
2494 int byte_i = 0; // byte index into method_annotations
2495 if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
2496 THREAD)) {
2497 log_debug(redefine, class, annotation)("bad method_annotations at %d", i);
2498 // propagate failure back to caller
2499 return false;
2500 }
2501 }
2502
2503 return true;
2504 } // end rewrite_cp_refs_in_methods_annotations()
2505
2506
2507 // Rewrite constant pool references in a methods_parameter_annotations
2508 // field. This "structure" is adapted from the
2509 // RuntimeVisibleParameterAnnotations_attribute described in section
2510 // 4.8.17 of the 2nd-edition of the VM spec:
2511 //
2512 // methods_parameter_annotations_typeArray {
2513 // u1 num_parameters;
2514 // {
2515 // u2 num_annotations;
2516 // annotation annotations[num_annotations];
2517 // } parameter_annotations[num_parameters];
2518 // }
2519 //
rewrite_cp_refs_in_methods_parameter_annotations(InstanceKlass * scratch_class,TRAPS)2520 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
2521 InstanceKlass* scratch_class, TRAPS) {
2522
2523 for (int i = 0; i < scratch_class->methods()->length(); i++) {
2524 Method* m = scratch_class->methods()->at(i);
2525 AnnotationArray* method_parameter_annotations = m->constMethod()->parameter_annotations();
2526 if (method_parameter_annotations == NULL
2527 || method_parameter_annotations->length() == 0) {
2528 // this method does not have any parameter annotations so skip it
2529 continue;
2530 }
2531
2532 if (method_parameter_annotations->length() < 1) {
2533 // not enough room for a num_parameters field
2534 log_debug(redefine, class, annotation)("length() is too small for a num_parameters field at %d", i);
2535 return false;
2536 }
2537
2538 int byte_i = 0; // byte index into method_parameter_annotations
2539
2540 u1 num_parameters = method_parameter_annotations->at(byte_i);
2541 byte_i++;
2542
2543 log_debug(redefine, class, annotation)("num_parameters=%d", num_parameters);
2544
2545 int calc_num_parameters = 0;
2546 for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
2547 if (!rewrite_cp_refs_in_annotations_typeArray(
2548 method_parameter_annotations, byte_i, THREAD)) {
2549 log_debug(redefine, class, annotation)("bad method_parameter_annotations at %d", calc_num_parameters);
2550 // propagate failure back to caller
2551 return false;
2552 }
2553 }
2554 assert(num_parameters == calc_num_parameters, "sanity check");
2555 }
2556
2557 return true;
2558 } // end rewrite_cp_refs_in_methods_parameter_annotations()
2559
2560
2561 // Rewrite constant pool references in a methods_default_annotations
2562 // field. This "structure" is adapted from the AnnotationDefault_attribute
2563 // that is described in section 4.8.19 of the 2nd-edition of the VM spec:
2564 //
2565 // methods_default_annotations_typeArray {
2566 // element_value default_value;
2567 // }
2568 //
rewrite_cp_refs_in_methods_default_annotations(InstanceKlass * scratch_class,TRAPS)2569 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
2570 InstanceKlass* scratch_class, TRAPS) {
2571
2572 for (int i = 0; i < scratch_class->methods()->length(); i++) {
2573 Method* m = scratch_class->methods()->at(i);
2574 AnnotationArray* method_default_annotations = m->constMethod()->default_annotations();
2575 if (method_default_annotations == NULL
2576 || method_default_annotations->length() == 0) {
2577 // this method does not have any default annotations so skip it
2578 continue;
2579 }
2580
2581 int byte_i = 0; // byte index into method_default_annotations
2582
2583 if (!rewrite_cp_refs_in_element_value(
2584 method_default_annotations, byte_i, THREAD)) {
2585 log_debug(redefine, class, annotation)("bad default element_value at %d", i);
2586 // propagate failure back to caller
2587 return false;
2588 }
2589 }
2590
2591 return true;
2592 } // end rewrite_cp_refs_in_methods_default_annotations()
2593
2594
2595 // Rewrite constant pool references in a class_type_annotations field.
rewrite_cp_refs_in_class_type_annotations(InstanceKlass * scratch_class,TRAPS)2596 bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
2597 InstanceKlass* scratch_class, TRAPS) {
2598
2599 AnnotationArray* class_type_annotations = scratch_class->class_type_annotations();
2600 if (class_type_annotations == NULL || class_type_annotations->length() == 0) {
2601 // no class_type_annotations so nothing to do
2602 return true;
2603 }
2604
2605 log_debug(redefine, class, annotation)("class_type_annotations length=%d", class_type_annotations->length());
2606
2607 int byte_i = 0; // byte index into class_type_annotations
2608 return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations,
2609 byte_i, "ClassFile", THREAD);
2610 } // end rewrite_cp_refs_in_class_type_annotations()
2611
2612
2613 // Rewrite constant pool references in a fields_type_annotations field.
rewrite_cp_refs_in_fields_type_annotations(InstanceKlass * scratch_class,TRAPS)2614 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(
2615 InstanceKlass* scratch_class, TRAPS) {
2616
2617 Array<AnnotationArray*>* fields_type_annotations = scratch_class->fields_type_annotations();
2618 if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) {
2619 // no fields_type_annotations so nothing to do
2620 return true;
2621 }
2622
2623 log_debug(redefine, class, annotation)("fields_type_annotations length=%d", fields_type_annotations->length());
2624
2625 for (int i = 0; i < fields_type_annotations->length(); i++) {
2626 AnnotationArray* field_type_annotations = fields_type_annotations->at(i);
2627 if (field_type_annotations == NULL || field_type_annotations->length() == 0) {
2628 // this field does not have any annotations so skip it
2629 continue;
2630 }
2631
2632 int byte_i = 0; // byte index into field_type_annotations
2633 if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations,
2634 byte_i, "field_info", THREAD)) {
2635 log_debug(redefine, class, annotation)("bad field_type_annotations at %d", i);
2636 // propagate failure back to caller
2637 return false;
2638 }
2639 }
2640
2641 return true;
2642 } // end rewrite_cp_refs_in_fields_type_annotations()
2643
2644
2645 // Rewrite constant pool references in a methods_type_annotations field.
rewrite_cp_refs_in_methods_type_annotations(InstanceKlass * scratch_class,TRAPS)2646 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations(
2647 InstanceKlass* scratch_class, TRAPS) {
2648
2649 for (int i = 0; i < scratch_class->methods()->length(); i++) {
2650 Method* m = scratch_class->methods()->at(i);
2651 AnnotationArray* method_type_annotations = m->constMethod()->type_annotations();
2652
2653 if (method_type_annotations == NULL || method_type_annotations->length() == 0) {
2654 // this method does not have any annotations so skip it
2655 continue;
2656 }
2657
2658 log_debug(redefine, class, annotation)("methods type_annotations length=%d", method_type_annotations->length());
2659
2660 int byte_i = 0; // byte index into method_type_annotations
2661 if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations,
2662 byte_i, "method_info", THREAD)) {
2663 log_debug(redefine, class, annotation)("bad method_type_annotations at %d", i);
2664 // propagate failure back to caller
2665 return false;
2666 }
2667 }
2668
2669 return true;
2670 } // end rewrite_cp_refs_in_methods_type_annotations()
2671
2672
2673 // Rewrite constant pool references in a type_annotations
2674 // field. This "structure" is adapted from the
2675 // RuntimeVisibleTypeAnnotations_attribute described in
2676 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2677 //
2678 // type_annotations_typeArray {
2679 // u2 num_annotations;
2680 // type_annotation annotations[num_annotations];
2681 // }
2682 //
rewrite_cp_refs_in_type_annotations_typeArray(AnnotationArray * type_annotations_typeArray,int & byte_i_ref,const char * location_mesg,TRAPS)2683 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray(
2684 AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2685 const char * location_mesg, TRAPS) {
2686
2687 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2688 // not enough room for num_annotations field
2689 log_debug(redefine, class, annotation)("length() is too small for num_annotations field");
2690 return false;
2691 }
2692
2693 u2 num_annotations = Bytes::get_Java_u2((address)
2694 type_annotations_typeArray->adr_at(byte_i_ref));
2695 byte_i_ref += 2;
2696
2697 log_debug(redefine, class, annotation)("num_type_annotations=%d", num_annotations);
2698
2699 int calc_num_annotations = 0;
2700 for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
2701 if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray,
2702 byte_i_ref, location_mesg, THREAD)) {
2703 log_debug(redefine, class, annotation)("bad type_annotation_struct at %d", calc_num_annotations);
2704 // propagate failure back to caller
2705 return false;
2706 }
2707 }
2708 assert(num_annotations == calc_num_annotations, "sanity check");
2709
2710 if (byte_i_ref != type_annotations_typeArray->length()) {
2711 log_debug(redefine, class, annotation)
2712 ("read wrong amount of bytes at end of processing type_annotations_typeArray (%d of %d bytes were read)",
2713 byte_i_ref, type_annotations_typeArray->length());
2714 return false;
2715 }
2716
2717 return true;
2718 } // end rewrite_cp_refs_in_type_annotations_typeArray()
2719
2720
2721 // Rewrite constant pool references in a type_annotation
2722 // field. This "structure" is adapted from the
2723 // RuntimeVisibleTypeAnnotations_attribute described in
2724 // section 4.7.20 of the Java SE 8 Edition of the VM spec:
2725 //
2726 // type_annotation {
2727 // u1 target_type;
2728 // union {
2729 // type_parameter_target;
2730 // supertype_target;
2731 // type_parameter_bound_target;
2732 // empty_target;
2733 // method_formal_parameter_target;
2734 // throws_target;
2735 // localvar_target;
2736 // catch_target;
2737 // offset_target;
2738 // type_argument_target;
2739 // } target_info;
2740 // type_path target_path;
2741 // annotation anno;
2742 // }
2743 //
rewrite_cp_refs_in_type_annotation_struct(AnnotationArray * type_annotations_typeArray,int & byte_i_ref,const char * location_mesg,TRAPS)2744 bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct(
2745 AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2746 const char * location_mesg, TRAPS) {
2747
2748 if (!skip_type_annotation_target(type_annotations_typeArray,
2749 byte_i_ref, location_mesg, THREAD)) {
2750 return false;
2751 }
2752
2753 if (!skip_type_annotation_type_path(type_annotations_typeArray,
2754 byte_i_ref, THREAD)) {
2755 return false;
2756 }
2757
2758 if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray,
2759 byte_i_ref, THREAD)) {
2760 return false;
2761 }
2762
2763 return true;
2764 } // end rewrite_cp_refs_in_type_annotation_struct()
2765
2766
2767 // Read, verify and skip over the target_type and target_info part
2768 // so that rewriting can continue in the later parts of the struct.
2769 //
2770 // u1 target_type;
2771 // union {
2772 // type_parameter_target;
2773 // supertype_target;
2774 // type_parameter_bound_target;
2775 // empty_target;
2776 // method_formal_parameter_target;
2777 // throws_target;
2778 // localvar_target;
2779 // catch_target;
2780 // offset_target;
2781 // type_argument_target;
2782 // } target_info;
2783 //
skip_type_annotation_target(AnnotationArray * type_annotations_typeArray,int & byte_i_ref,const char * location_mesg,TRAPS)2784 bool VM_RedefineClasses::skip_type_annotation_target(
2785 AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
2786 const char * location_mesg, TRAPS) {
2787
2788 if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2789 // not enough room for a target_type let alone the rest of a type_annotation
2790 log_debug(redefine, class, annotation)("length() is too small for a target_type");
2791 return false;
2792 }
2793
2794 u1 target_type = type_annotations_typeArray->at(byte_i_ref);
2795 byte_i_ref += 1;
2796 log_debug(redefine, class, annotation)("target_type=0x%.2x", target_type);
2797 log_debug(redefine, class, annotation)("location=%s", location_mesg);
2798
2799 // Skip over target_info
2800 switch (target_type) {
2801 case 0x00:
2802 // kind: type parameter declaration of generic class or interface
2803 // location: ClassFile
2804 case 0x01:
2805 // kind: type parameter declaration of generic method or constructor
2806 // location: method_info
2807
2808 {
2809 // struct:
2810 // type_parameter_target {
2811 // u1 type_parameter_index;
2812 // }
2813 //
2814 if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2815 log_debug(redefine, class, annotation)("length() is too small for a type_parameter_target");
2816 return false;
2817 }
2818
2819 u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2820 byte_i_ref += 1;
2821
2822 log_debug(redefine, class, annotation)("type_parameter_target: type_parameter_index=%d", type_parameter_index);
2823 } break;
2824
2825 case 0x10:
2826 // kind: type in extends clause of class or interface declaration
2827 // (including the direct superclass of an unsafe anonymous class declaration),
2828 // or in implements clause of interface declaration
2829 // location: ClassFile
2830
2831 {
2832 // struct:
2833 // supertype_target {
2834 // u2 supertype_index;
2835 // }
2836 //
2837 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2838 log_debug(redefine, class, annotation)("length() is too small for a supertype_target");
2839 return false;
2840 }
2841
2842 u2 supertype_index = Bytes::get_Java_u2((address)
2843 type_annotations_typeArray->adr_at(byte_i_ref));
2844 byte_i_ref += 2;
2845
2846 log_debug(redefine, class, annotation)("supertype_target: supertype_index=%d", supertype_index);
2847 } break;
2848
2849 case 0x11:
2850 // kind: type in bound of type parameter declaration of generic class or interface
2851 // location: ClassFile
2852 case 0x12:
2853 // kind: type in bound of type parameter declaration of generic method or constructor
2854 // location: method_info
2855
2856 {
2857 // struct:
2858 // type_parameter_bound_target {
2859 // u1 type_parameter_index;
2860 // u1 bound_index;
2861 // }
2862 //
2863 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2864 log_debug(redefine, class, annotation)("length() is too small for a type_parameter_bound_target");
2865 return false;
2866 }
2867
2868 u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2869 byte_i_ref += 1;
2870 u1 bound_index = type_annotations_typeArray->at(byte_i_ref);
2871 byte_i_ref += 1;
2872
2873 log_debug(redefine, class, annotation)
2874 ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d", type_parameter_index, bound_index);
2875 } break;
2876
2877 case 0x13:
2878 // kind: type in field declaration
2879 // location: field_info
2880 case 0x14:
2881 // kind: return type of method, or type of newly constructed object
2882 // location: method_info
2883 case 0x15:
2884 // kind: receiver type of method or constructor
2885 // location: method_info
2886
2887 {
2888 // struct:
2889 // empty_target {
2890 // }
2891 //
2892 log_debug(redefine, class, annotation)("empty_target");
2893 } break;
2894
2895 case 0x16:
2896 // kind: type in formal parameter declaration of method, constructor, or lambda expression
2897 // location: method_info
2898
2899 {
2900 // struct:
2901 // formal_parameter_target {
2902 // u1 formal_parameter_index;
2903 // }
2904 //
2905 if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
2906 log_debug(redefine, class, annotation)("length() is too small for a formal_parameter_target");
2907 return false;
2908 }
2909
2910 u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref);
2911 byte_i_ref += 1;
2912
2913 log_debug(redefine, class, annotation)
2914 ("formal_parameter_target: formal_parameter_index=%d", formal_parameter_index);
2915 } break;
2916
2917 case 0x17:
2918 // kind: type in throws clause of method or constructor
2919 // location: method_info
2920
2921 {
2922 // struct:
2923 // throws_target {
2924 // u2 throws_type_index
2925 // }
2926 //
2927 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2928 log_debug(redefine, class, annotation)("length() is too small for a throws_target");
2929 return false;
2930 }
2931
2932 u2 throws_type_index = Bytes::get_Java_u2((address)
2933 type_annotations_typeArray->adr_at(byte_i_ref));
2934 byte_i_ref += 2;
2935
2936 log_debug(redefine, class, annotation)("throws_target: throws_type_index=%d", throws_type_index);
2937 } break;
2938
2939 case 0x40:
2940 // kind: type in local variable declaration
2941 // location: Code
2942 case 0x41:
2943 // kind: type in resource variable declaration
2944 // location: Code
2945
2946 {
2947 // struct:
2948 // localvar_target {
2949 // u2 table_length;
2950 // struct {
2951 // u2 start_pc;
2952 // u2 length;
2953 // u2 index;
2954 // } table[table_length];
2955 // }
2956 //
2957 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2958 // not enough room for a table_length let alone the rest of a localvar_target
2959 log_debug(redefine, class, annotation)("length() is too small for a localvar_target table_length");
2960 return false;
2961 }
2962
2963 u2 table_length = Bytes::get_Java_u2((address)
2964 type_annotations_typeArray->adr_at(byte_i_ref));
2965 byte_i_ref += 2;
2966
2967 log_debug(redefine, class, annotation)("localvar_target: table_length=%d", table_length);
2968
2969 int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry
2970 int table_size = table_length * table_struct_size;
2971
2972 if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) {
2973 // not enough room for a table
2974 log_debug(redefine, class, annotation)("length() is too small for a table array of length %d", table_length);
2975 return false;
2976 }
2977
2978 // Skip over table
2979 byte_i_ref += table_size;
2980 } break;
2981
2982 case 0x42:
2983 // kind: type in exception parameter declaration
2984 // location: Code
2985
2986 {
2987 // struct:
2988 // catch_target {
2989 // u2 exception_table_index;
2990 // }
2991 //
2992 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
2993 log_debug(redefine, class, annotation)("length() is too small for a catch_target");
2994 return false;
2995 }
2996
2997 u2 exception_table_index = Bytes::get_Java_u2((address)
2998 type_annotations_typeArray->adr_at(byte_i_ref));
2999 byte_i_ref += 2;
3000
3001 log_debug(redefine, class, annotation)("catch_target: exception_table_index=%d", exception_table_index);
3002 } break;
3003
3004 case 0x43:
3005 // kind: type in instanceof expression
3006 // location: Code
3007 case 0x44:
3008 // kind: type in new expression
3009 // location: Code
3010 case 0x45:
3011 // kind: type in method reference expression using ::new
3012 // location: Code
3013 case 0x46:
3014 // kind: type in method reference expression using ::Identifier
3015 // location: Code
3016
3017 {
3018 // struct:
3019 // offset_target {
3020 // u2 offset;
3021 // }
3022 //
3023 if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
3024 log_debug(redefine, class, annotation)("length() is too small for a offset_target");
3025 return false;
3026 }
3027
3028 u2 offset = Bytes::get_Java_u2((address)
3029 type_annotations_typeArray->adr_at(byte_i_ref));
3030 byte_i_ref += 2;
3031
3032 log_debug(redefine, class, annotation)("offset_target: offset=%d", offset);
3033 } break;
3034
3035 case 0x47:
3036 // kind: type in cast expression
3037 // location: Code
3038 case 0x48:
3039 // kind: type argument for generic constructor in new expression or
3040 // explicit constructor invocation statement
3041 // location: Code
3042 case 0x49:
3043 // kind: type argument for generic method in method invocation expression
3044 // location: Code
3045 case 0x4A:
3046 // kind: type argument for generic constructor in method reference expression using ::new
3047 // location: Code
3048 case 0x4B:
3049 // kind: type argument for generic method in method reference expression using ::Identifier
3050 // location: Code
3051
3052 {
3053 // struct:
3054 // type_argument_target {
3055 // u2 offset;
3056 // u1 type_argument_index;
3057 // }
3058 //
3059 if ((byte_i_ref + 3) > type_annotations_typeArray->length()) {
3060 log_debug(redefine, class, annotation)("length() is too small for a type_argument_target");
3061 return false;
3062 }
3063
3064 u2 offset = Bytes::get_Java_u2((address)
3065 type_annotations_typeArray->adr_at(byte_i_ref));
3066 byte_i_ref += 2;
3067 u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
3068 byte_i_ref += 1;
3069
3070 log_debug(redefine, class, annotation)
3071 ("type_argument_target: offset=%d, type_argument_index=%d", offset, type_argument_index);
3072 } break;
3073
3074 default:
3075 log_debug(redefine, class, annotation)("unknown target_type");
3076 #ifdef ASSERT
3077 ShouldNotReachHere();
3078 #endif
3079 return false;
3080 }
3081
3082 return true;
3083 } // end skip_type_annotation_target()
3084
3085
3086 // Read, verify and skip over the type_path part so that rewriting
3087 // can continue in the later parts of the struct.
3088 //
3089 // type_path {
3090 // u1 path_length;
3091 // {
3092 // u1 type_path_kind;
3093 // u1 type_argument_index;
3094 // } path[path_length];
3095 // }
3096 //
skip_type_annotation_type_path(AnnotationArray * type_annotations_typeArray,int & byte_i_ref,TRAPS)3097 bool VM_RedefineClasses::skip_type_annotation_type_path(
3098 AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS) {
3099
3100 if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
3101 // not enough room for a path_length let alone the rest of the type_path
3102 log_debug(redefine, class, annotation)("length() is too small for a type_path");
3103 return false;
3104 }
3105
3106 u1 path_length = type_annotations_typeArray->at(byte_i_ref);
3107 byte_i_ref += 1;
3108
3109 log_debug(redefine, class, annotation)("type_path: path_length=%d", path_length);
3110
3111 int calc_path_length = 0;
3112 for (; calc_path_length < path_length; calc_path_length++) {
3113 if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) {
3114 // not enough room for a path
3115 log_debug(redefine, class, annotation)
3116 ("length() is too small for path entry %d of %d", calc_path_length, path_length);
3117 return false;
3118 }
3119
3120 u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref);
3121 byte_i_ref += 1;
3122 u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
3123 byte_i_ref += 1;
3124
3125 log_debug(redefine, class, annotation)
3126 ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d",
3127 calc_path_length, type_path_kind, type_argument_index);
3128
3129 if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) {
3130 // not enough room for a path
3131 log_debug(redefine, class, annotation)("inconsistent type_path values");
3132 return false;
3133 }
3134 }
3135 assert(path_length == calc_path_length, "sanity check");
3136
3137 return true;
3138 } // end skip_type_annotation_type_path()
3139
3140
3141 // Rewrite constant pool references in the method's stackmap table.
3142 // These "structures" are adapted from the StackMapTable_attribute that
3143 // is described in section 4.8.4 of the 6.0 version of the VM spec
3144 // (dated 2005.10.26):
3145 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3146 //
3147 // stack_map {
3148 // u2 number_of_entries;
3149 // stack_map_frame entries[number_of_entries];
3150 // }
3151 //
rewrite_cp_refs_in_stack_map_table(const methodHandle & method,TRAPS)3152 void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
3153 const methodHandle& method, TRAPS) {
3154
3155 if (!method->has_stackmap_table()) {
3156 return;
3157 }
3158
3159 AnnotationArray* stackmap_data = method->stackmap_data();
3160 address stackmap_p = (address)stackmap_data->adr_at(0);
3161 address stackmap_end = stackmap_p + stackmap_data->length();
3162
3163 assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
3164 u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
3165 stackmap_p += 2;
3166
3167 log_debug(redefine, class, stackmap)("number_of_entries=%u", number_of_entries);
3168
3169 // walk through each stack_map_frame
3170 u2 calc_number_of_entries = 0;
3171 for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
3172 // The stack_map_frame structure is a u1 frame_type followed by
3173 // 0 or more bytes of data:
3174 //
3175 // union stack_map_frame {
3176 // same_frame;
3177 // same_locals_1_stack_item_frame;
3178 // same_locals_1_stack_item_frame_extended;
3179 // chop_frame;
3180 // same_frame_extended;
3181 // append_frame;
3182 // full_frame;
3183 // }
3184
3185 assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
3186 u1 frame_type = *stackmap_p;
3187 stackmap_p++;
3188
3189 // same_frame {
3190 // u1 frame_type = SAME; /* 0-63 */
3191 // }
3192 if (frame_type <= 63) {
3193 // nothing more to do for same_frame
3194 }
3195
3196 // same_locals_1_stack_item_frame {
3197 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
3198 // verification_type_info stack[1];
3199 // }
3200 else if (frame_type >= 64 && frame_type <= 127) {
3201 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3202 calc_number_of_entries, frame_type, THREAD);
3203 }
3204
3205 // reserved for future use
3206 else if (frame_type >= 128 && frame_type <= 246) {
3207 // nothing more to do for reserved frame_types
3208 }
3209
3210 // same_locals_1_stack_item_frame_extended {
3211 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
3212 // u2 offset_delta;
3213 // verification_type_info stack[1];
3214 // }
3215 else if (frame_type == 247) {
3216 stackmap_p += 2;
3217 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3218 calc_number_of_entries, frame_type, THREAD);
3219 }
3220
3221 // chop_frame {
3222 // u1 frame_type = CHOP; /* 248-250 */
3223 // u2 offset_delta;
3224 // }
3225 else if (frame_type >= 248 && frame_type <= 250) {
3226 stackmap_p += 2;
3227 }
3228
3229 // same_frame_extended {
3230 // u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
3231 // u2 offset_delta;
3232 // }
3233 else if (frame_type == 251) {
3234 stackmap_p += 2;
3235 }
3236
3237 // append_frame {
3238 // u1 frame_type = APPEND; /* 252-254 */
3239 // u2 offset_delta;
3240 // verification_type_info locals[frame_type - 251];
3241 // }
3242 else if (frame_type >= 252 && frame_type <= 254) {
3243 assert(stackmap_p + 2 <= stackmap_end,
3244 "no room for offset_delta");
3245 stackmap_p += 2;
3246 u1 len = frame_type - 251;
3247 for (u1 i = 0; i < len; i++) {
3248 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3249 calc_number_of_entries, frame_type, THREAD);
3250 }
3251 }
3252
3253 // full_frame {
3254 // u1 frame_type = FULL_FRAME; /* 255 */
3255 // u2 offset_delta;
3256 // u2 number_of_locals;
3257 // verification_type_info locals[number_of_locals];
3258 // u2 number_of_stack_items;
3259 // verification_type_info stack[number_of_stack_items];
3260 // }
3261 else if (frame_type == 255) {
3262 assert(stackmap_p + 2 + 2 <= stackmap_end,
3263 "no room for smallest full_frame");
3264 stackmap_p += 2;
3265
3266 u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
3267 stackmap_p += 2;
3268
3269 for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
3270 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3271 calc_number_of_entries, frame_type, THREAD);
3272 }
3273
3274 // Use the largest size for the number_of_stack_items, but only get
3275 // the right number of bytes.
3276 u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
3277 stackmap_p += 2;
3278
3279 for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
3280 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
3281 calc_number_of_entries, frame_type, THREAD);
3282 }
3283 }
3284 } // end while there is a stack_map_frame
3285 assert(number_of_entries == calc_number_of_entries, "sanity check");
3286 } // end rewrite_cp_refs_in_stack_map_table()
3287
3288
3289 // Rewrite constant pool references in the verification type info
3290 // portion of the method's stackmap table. These "structures" are
3291 // adapted from the StackMapTable_attribute that is described in
3292 // section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
3293 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
3294 //
3295 // The verification_type_info structure is a u1 tag followed by 0 or
3296 // more bytes of data:
3297 //
3298 // union verification_type_info {
3299 // Top_variable_info;
3300 // Integer_variable_info;
3301 // Float_variable_info;
3302 // Long_variable_info;
3303 // Double_variable_info;
3304 // Null_variable_info;
3305 // UninitializedThis_variable_info;
3306 // Object_variable_info;
3307 // Uninitialized_variable_info;
3308 // }
3309 //
rewrite_cp_refs_in_verification_type_info(address & stackmap_p_ref,address stackmap_end,u2 frame_i,u1 frame_type,TRAPS)3310 void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
3311 address& stackmap_p_ref, address stackmap_end, u2 frame_i,
3312 u1 frame_type, TRAPS) {
3313
3314 assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
3315 u1 tag = *stackmap_p_ref;
3316 stackmap_p_ref++;
3317
3318 switch (tag) {
3319 // Top_variable_info {
3320 // u1 tag = ITEM_Top; /* 0 */
3321 // }
3322 // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
3323 case 0: // fall through
3324
3325 // Integer_variable_info {
3326 // u1 tag = ITEM_Integer; /* 1 */
3327 // }
3328 case ITEM_Integer: // fall through
3329
3330 // Float_variable_info {
3331 // u1 tag = ITEM_Float; /* 2 */
3332 // }
3333 case ITEM_Float: // fall through
3334
3335 // Double_variable_info {
3336 // u1 tag = ITEM_Double; /* 3 */
3337 // }
3338 case ITEM_Double: // fall through
3339
3340 // Long_variable_info {
3341 // u1 tag = ITEM_Long; /* 4 */
3342 // }
3343 case ITEM_Long: // fall through
3344
3345 // Null_variable_info {
3346 // u1 tag = ITEM_Null; /* 5 */
3347 // }
3348 case ITEM_Null: // fall through
3349
3350 // UninitializedThis_variable_info {
3351 // u1 tag = ITEM_UninitializedThis; /* 6 */
3352 // }
3353 case ITEM_UninitializedThis:
3354 // nothing more to do for the above tag types
3355 break;
3356
3357 // Object_variable_info {
3358 // u1 tag = ITEM_Object; /* 7 */
3359 // u2 cpool_index;
3360 // }
3361 case ITEM_Object:
3362 {
3363 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
3364 u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
3365 u2 new_cp_index = find_new_index(cpool_index);
3366 if (new_cp_index != 0) {
3367 log_debug(redefine, class, stackmap)("mapped old cpool_index=%d", cpool_index);
3368 Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
3369 cpool_index = new_cp_index;
3370 }
3371 stackmap_p_ref += 2;
3372
3373 log_debug(redefine, class, stackmap)
3374 ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i, frame_type, cpool_index);
3375 } break;
3376
3377 // Uninitialized_variable_info {
3378 // u1 tag = ITEM_Uninitialized; /* 8 */
3379 // u2 offset;
3380 // }
3381 case ITEM_Uninitialized:
3382 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
3383 stackmap_p_ref += 2;
3384 break;
3385
3386 default:
3387 log_debug(redefine, class, stackmap)("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag);
3388 ShouldNotReachHere();
3389 break;
3390 } // end switch (tag)
3391 } // end rewrite_cp_refs_in_verification_type_info()
3392
3393
3394 // Change the constant pool associated with klass scratch_class to
3395 // scratch_cp. If shrink is true, then scratch_cp_length elements
3396 // are copied from scratch_cp to a smaller constant pool and the
3397 // smaller constant pool is associated with scratch_class.
set_new_constant_pool(ClassLoaderData * loader_data,InstanceKlass * scratch_class,constantPoolHandle scratch_cp,int scratch_cp_length,TRAPS)3398 void VM_RedefineClasses::set_new_constant_pool(
3399 ClassLoaderData* loader_data,
3400 InstanceKlass* scratch_class, constantPoolHandle scratch_cp,
3401 int scratch_cp_length, TRAPS) {
3402 assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
3403
3404 // scratch_cp is a merged constant pool and has enough space for a
3405 // worst case merge situation. We want to associate the minimum
3406 // sized constant pool with the klass to save space.
3407 ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
3408 constantPoolHandle smaller_cp(THREAD, cp);
3409
3410 // preserve version() value in the smaller copy
3411 int version = scratch_cp->version();
3412 assert(version != 0, "sanity check");
3413 smaller_cp->set_version(version);
3414
3415 // attach klass to new constant pool
3416 // reference to the cp holder is needed for copy_operands()
3417 smaller_cp->set_pool_holder(scratch_class);
3418
3419 smaller_cp->copy_fields(scratch_cp());
3420
3421 scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
3422 if (HAS_PENDING_EXCEPTION) {
3423 // Exception is handled in the caller
3424 loader_data->add_to_deallocate_list(smaller_cp());
3425 return;
3426 }
3427 scratch_cp = smaller_cp;
3428
3429 // attach new constant pool to klass
3430 scratch_class->set_constants(scratch_cp());
3431 scratch_cp->initialize_unresolved_klasses(loader_data, CHECK);
3432
3433 int i; // for portability
3434
3435 // update each field in klass to use new constant pool indices as needed
3436 for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) {
3437 jshort cur_index = fs.name_index();
3438 jshort new_index = find_new_index(cur_index);
3439 if (new_index != 0) {
3440 log_trace(redefine, class, constantpool)("field-name_index change: %d to %d", cur_index, new_index);
3441 fs.set_name_index(new_index);
3442 }
3443 cur_index = fs.signature_index();
3444 new_index = find_new_index(cur_index);
3445 if (new_index != 0) {
3446 log_trace(redefine, class, constantpool)("field-signature_index change: %d to %d", cur_index, new_index);
3447 fs.set_signature_index(new_index);
3448 }
3449 cur_index = fs.initval_index();
3450 new_index = find_new_index(cur_index);
3451 if (new_index != 0) {
3452 log_trace(redefine, class, constantpool)("field-initval_index change: %d to %d", cur_index, new_index);
3453 fs.set_initval_index(new_index);
3454 }
3455 cur_index = fs.generic_signature_index();
3456 new_index = find_new_index(cur_index);
3457 if (new_index != 0) {
3458 log_trace(redefine, class, constantpool)("field-generic_signature change: %d to %d", cur_index, new_index);
3459 fs.set_generic_signature_index(new_index);
3460 }
3461 } // end for each field
3462
3463 // Update constant pool indices in the inner classes info to use
3464 // new constant indices as needed. The inner classes info is a
3465 // quadruple:
3466 // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
3467 InnerClassesIterator iter(scratch_class);
3468 for (; !iter.done(); iter.next()) {
3469 int cur_index = iter.inner_class_info_index();
3470 if (cur_index == 0) {
3471 continue; // JVM spec. allows null inner class refs so skip it
3472 }
3473 int new_index = find_new_index(cur_index);
3474 if (new_index != 0) {
3475 log_trace(redefine, class, constantpool)("inner_class_info change: %d to %d", cur_index, new_index);
3476 iter.set_inner_class_info_index(new_index);
3477 }
3478 cur_index = iter.outer_class_info_index();
3479 new_index = find_new_index(cur_index);
3480 if (new_index != 0) {
3481 log_trace(redefine, class, constantpool)("outer_class_info change: %d to %d", cur_index, new_index);
3482 iter.set_outer_class_info_index(new_index);
3483 }
3484 cur_index = iter.inner_name_index();
3485 new_index = find_new_index(cur_index);
3486 if (new_index != 0) {
3487 log_trace(redefine, class, constantpool)("inner_name change: %d to %d", cur_index, new_index);
3488 iter.set_inner_name_index(new_index);
3489 }
3490 } // end for each inner class
3491
3492 // Attach each method in klass to the new constant pool and update
3493 // to use new constant pool indices as needed:
3494 Array<Method*>* methods = scratch_class->methods();
3495 for (i = methods->length() - 1; i >= 0; i--) {
3496 methodHandle method(THREAD, methods->at(i));
3497 method->set_constants(scratch_cp());
3498
3499 int new_index = find_new_index(method->name_index());
3500 if (new_index != 0) {
3501 log_trace(redefine, class, constantpool)
3502 ("method-name_index change: %d to %d", method->name_index(), new_index);
3503 method->set_name_index(new_index);
3504 }
3505 new_index = find_new_index(method->signature_index());
3506 if (new_index != 0) {
3507 log_trace(redefine, class, constantpool)
3508 ("method-signature_index change: %d to %d", method->signature_index(), new_index);
3509 method->set_signature_index(new_index);
3510 }
3511 new_index = find_new_index(method->generic_signature_index());
3512 if (new_index != 0) {
3513 log_trace(redefine, class, constantpool)
3514 ("method-generic_signature_index change: %d to %d", method->generic_signature_index(), new_index);
3515 method->set_generic_signature_index(new_index);
3516 }
3517
3518 // Update constant pool indices in the method's checked exception
3519 // table to use new constant indices as needed.
3520 int cext_length = method->checked_exceptions_length();
3521 if (cext_length > 0) {
3522 CheckedExceptionElement * cext_table =
3523 method->checked_exceptions_start();
3524 for (int j = 0; j < cext_length; j++) {
3525 int cur_index = cext_table[j].class_cp_index;
3526 int new_index = find_new_index(cur_index);
3527 if (new_index != 0) {
3528 log_trace(redefine, class, constantpool)("cext-class_cp_index change: %d to %d", cur_index, new_index);
3529 cext_table[j].class_cp_index = (u2)new_index;
3530 }
3531 } // end for each checked exception table entry
3532 } // end if there are checked exception table entries
3533
3534 // Update each catch type index in the method's exception table
3535 // to use new constant pool indices as needed. The exception table
3536 // holds quadruple entries of the form:
3537 // (beg_bci, end_bci, handler_bci, klass_index)
3538
3539 ExceptionTable ex_table(method());
3540 int ext_length = ex_table.length();
3541
3542 for (int j = 0; j < ext_length; j ++) {
3543 int cur_index = ex_table.catch_type_index(j);
3544 int new_index = find_new_index(cur_index);
3545 if (new_index != 0) {
3546 log_trace(redefine, class, constantpool)("ext-klass_index change: %d to %d", cur_index, new_index);
3547 ex_table.set_catch_type_index(j, new_index);
3548 }
3549 } // end for each exception table entry
3550
3551 // Update constant pool indices in the method's local variable
3552 // table to use new constant indices as needed. The local variable
3553 // table hold sextuple entries of the form:
3554 // (start_pc, length, name_index, descriptor_index, signature_index, slot)
3555 int lvt_length = method->localvariable_table_length();
3556 if (lvt_length > 0) {
3557 LocalVariableTableElement * lv_table =
3558 method->localvariable_table_start();
3559 for (int j = 0; j < lvt_length; j++) {
3560 int cur_index = lv_table[j].name_cp_index;
3561 int new_index = find_new_index(cur_index);
3562 if (new_index != 0) {
3563 log_trace(redefine, class, constantpool)("lvt-name_cp_index change: %d to %d", cur_index, new_index);
3564 lv_table[j].name_cp_index = (u2)new_index;
3565 }
3566 cur_index = lv_table[j].descriptor_cp_index;
3567 new_index = find_new_index(cur_index);
3568 if (new_index != 0) {
3569 log_trace(redefine, class, constantpool)("lvt-descriptor_cp_index change: %d to %d", cur_index, new_index);
3570 lv_table[j].descriptor_cp_index = (u2)new_index;
3571 }
3572 cur_index = lv_table[j].signature_cp_index;
3573 new_index = find_new_index(cur_index);
3574 if (new_index != 0) {
3575 log_trace(redefine, class, constantpool)("lvt-signature_cp_index change: %d to %d", cur_index, new_index);
3576 lv_table[j].signature_cp_index = (u2)new_index;
3577 }
3578 } // end for each local variable table entry
3579 } // end if there are local variable table entries
3580
3581 rewrite_cp_refs_in_stack_map_table(method, THREAD);
3582 } // end for each method
3583 } // end set_new_constant_pool()
3584
3585
3586 // Unevolving classes may point to methods of the_class directly
3587 // from their constant pool caches, itables, and/or vtables. We
3588 // use the ClassLoaderDataGraph::classes_do() facility and this helper
3589 // to fix up these pointers. MethodData also points to old methods and
3590 // must be cleaned.
3591
3592 // Adjust cpools and vtables closure
do_klass(Klass * k)3593 void VM_RedefineClasses::AdjustAndCleanMetadata::do_klass(Klass* k) {
3594
3595 // This is a very busy routine. We don't want too much tracing
3596 // printed out.
3597 bool trace_name_printed = false;
3598
3599 // If the class being redefined is java.lang.Object, we need to fix all
3600 // array class vtables also. The _has_redefined_Object flag is global.
3601 // Once the java.lang.Object has been redefined (by the current or one
3602 // of the previous VM_RedefineClasses operations) we have to always
3603 // adjust method entries for array classes.
3604 if (k->is_array_klass() && _has_redefined_Object) {
3605 k->vtable().adjust_method_entries(&trace_name_printed);
3606
3607 } else if (k->is_instance_klass()) {
3608 HandleMark hm(_thread);
3609 InstanceKlass *ik = InstanceKlass::cast(k);
3610
3611 // Clean MethodData of this class's methods so they don't refer to
3612 // old methods that are no longer running.
3613 Array<Method*>* methods = ik->methods();
3614 int num_methods = methods->length();
3615 for (int index = 0; index < num_methods; ++index) {
3616 if (methods->at(index)->method_data() != NULL) {
3617 methods->at(index)->method_data()->clean_weak_method_links();
3618 }
3619 }
3620
3621 // Adjust all vtables, default methods and itables, to clean out old methods.
3622 ResourceMark rm(_thread);
3623 if (ik->vtable_length() > 0) {
3624 ik->vtable().adjust_method_entries(&trace_name_printed);
3625 ik->adjust_default_methods(&trace_name_printed);
3626 }
3627
3628 if (ik->itable_length() > 0) {
3629 ik->itable().adjust_method_entries(&trace_name_printed);
3630 }
3631
3632 // The constant pools in other classes (other_cp) can refer to
3633 // old methods. We have to update method information in
3634 // other_cp's cache. If other_cp has a previous version, then we
3635 // have to repeat the process for each previous version. The
3636 // constant pool cache holds the Method*s for non-virtual
3637 // methods and for virtual, final methods.
3638 //
3639 // Special case: if the current class is being redefined by the current
3640 // VM_RedefineClasses operation, then new_cp has already been attached
3641 // to the_class and old_cp has already been added as a previous version.
3642 // The new_cp doesn't have any cached references to old methods so it
3643 // doesn't need to be updated and we could optimize by skipping it.
3644 // However, the current class can be marked as being redefined by another
3645 // VM_RedefineClasses operation which has already executed its doit_prologue
3646 // and needs cpcache method entries adjusted. For simplicity, the cpcache
3647 // update is done unconditionally. It should result in doing nothing for
3648 // classes being redefined by the current VM_RedefineClasses operation.
3649 // Method entries in the previous version(s) are adjusted as well.
3650 ConstantPoolCache* cp_cache;
3651
3652 // this klass' constant pool cache may need adjustment
3653 ConstantPool* other_cp = ik->constants();
3654 cp_cache = other_cp->cache();
3655 if (cp_cache != NULL) {
3656 cp_cache->adjust_method_entries(&trace_name_printed);
3657 }
3658
3659 // the previous versions' constant pool caches may need adjustment
3660 for (InstanceKlass* pv_node = ik->previous_versions();
3661 pv_node != NULL;
3662 pv_node = pv_node->previous_versions()) {
3663 cp_cache = pv_node->constants()->cache();
3664 if (cp_cache != NULL) {
3665 cp_cache->adjust_method_entries(&trace_name_printed);
3666 }
3667 }
3668 }
3669 }
3670
update_jmethod_ids(Thread * thread)3671 void VM_RedefineClasses::update_jmethod_ids(Thread* thread) {
3672 for (int j = 0; j < _matching_methods_length; ++j) {
3673 Method* old_method = _matching_old_methods[j];
3674 jmethodID jmid = old_method->find_jmethod_id_or_null();
3675 if (jmid != NULL) {
3676 // There is a jmethodID, change it to point to the new method
3677 methodHandle new_method_h(thread, _matching_new_methods[j]);
3678 Method::change_method_associated_with_jmethod_id(jmid, new_method_h());
3679 assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
3680 "should be replaced");
3681 }
3682 }
3683 }
3684
check_methods_and_mark_as_obsolete()3685 int VM_RedefineClasses::check_methods_and_mark_as_obsolete() {
3686 int emcp_method_count = 0;
3687 int obsolete_count = 0;
3688 int old_index = 0;
3689 for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
3690 Method* old_method = _matching_old_methods[j];
3691 Method* new_method = _matching_new_methods[j];
3692 Method* old_array_method;
3693
3694 // Maintain an old_index into the _old_methods array by skipping
3695 // deleted methods
3696 while ((old_array_method = _old_methods->at(old_index)) != old_method) {
3697 ++old_index;
3698 }
3699
3700 if (MethodComparator::methods_EMCP(old_method, new_method)) {
3701 // The EMCP definition from JSR-163 requires the bytecodes to be
3702 // the same with the exception of constant pool indices which may
3703 // differ. However, the constants referred to by those indices
3704 // must be the same.
3705 //
3706 // We use methods_EMCP() for comparison since constant pool
3707 // merging can remove duplicate constant pool entries that were
3708 // present in the old method and removed from the rewritten new
3709 // method. A faster binary comparison function would consider the
3710 // old and new methods to be different when they are actually
3711 // EMCP.
3712 //
3713 // The old and new methods are EMCP and you would think that we
3714 // could get rid of one of them here and now and save some space.
3715 // However, the concept of EMCP only considers the bytecodes and
3716 // the constant pool entries in the comparison. Other things,
3717 // e.g., the line number table (LNT) or the local variable table
3718 // (LVT) don't count in the comparison. So the new (and EMCP)
3719 // method can have a new LNT that we need so we can't just
3720 // overwrite the new method with the old method.
3721 //
3722 // When this routine is called, we have already attached the new
3723 // methods to the_class so the old methods are effectively
3724 // overwritten. However, if an old method is still executing,
3725 // then the old method cannot be collected until sometime after
3726 // the old method call has returned. So the overwriting of old
3727 // methods by new methods will save us space except for those
3728 // (hopefully few) old methods that are still executing.
3729 //
3730 // A method refers to a ConstMethod* and this presents another
3731 // possible avenue to space savings. The ConstMethod* in the
3732 // new method contains possibly new attributes (LNT, LVT, etc).
3733 // At first glance, it seems possible to save space by replacing
3734 // the ConstMethod* in the old method with the ConstMethod*
3735 // from the new method. The old and new methods would share the
3736 // same ConstMethod* and we would save the space occupied by
3737 // the old ConstMethod*. However, the ConstMethod* contains
3738 // a back reference to the containing method. Sharing the
3739 // ConstMethod* between two methods could lead to confusion in
3740 // the code that uses the back reference. This would lead to
3741 // brittle code that could be broken in non-obvious ways now or
3742 // in the future.
3743 //
3744 // Another possibility is to copy the ConstMethod* from the new
3745 // method to the old method and then overwrite the new method with
3746 // the old method. Since the ConstMethod* contains the bytecodes
3747 // for the method embedded in the oop, this option would change
3748 // the bytecodes out from under any threads executing the old
3749 // method and make the thread's bcp invalid. Since EMCP requires
3750 // that the bytecodes be the same modulo constant pool indices, it
3751 // is straight forward to compute the correct new bcp in the new
3752 // ConstMethod* from the old bcp in the old ConstMethod*. The
3753 // time consuming part would be searching all the frames in all
3754 // of the threads to find all of the calls to the old method.
3755 //
3756 // It looks like we will have to live with the limited savings
3757 // that we get from effectively overwriting the old methods
3758 // when the new methods are attached to the_class.
3759
3760 // Count number of methods that are EMCP. The method will be marked
3761 // old but not obsolete if it is EMCP.
3762 emcp_method_count++;
3763
3764 // An EMCP method is _not_ obsolete. An obsolete method has a
3765 // different jmethodID than the current method. An EMCP method
3766 // has the same jmethodID as the current method. Having the
3767 // same jmethodID for all EMCP versions of a method allows for
3768 // a consistent view of the EMCP methods regardless of which
3769 // EMCP method you happen to have in hand. For example, a
3770 // breakpoint set in one EMCP method will work for all EMCP
3771 // versions of the method including the current one.
3772 } else {
3773 // mark obsolete methods as such
3774 old_method->set_is_obsolete();
3775 obsolete_count++;
3776
3777 // obsolete methods need a unique idnum so they become new entries in
3778 // the jmethodID cache in InstanceKlass
3779 assert(old_method->method_idnum() == new_method->method_idnum(), "must match");
3780 u2 num = InstanceKlass::cast(_the_class)->next_method_idnum();
3781 if (num != ConstMethod::UNSET_IDNUM) {
3782 old_method->set_method_idnum(num);
3783 }
3784
3785 // With tracing we try not to "yack" too much. The position of
3786 // this trace assumes there are fewer obsolete methods than
3787 // EMCP methods.
3788 if (log_is_enabled(Trace, redefine, class, obsolete, mark)) {
3789 ResourceMark rm;
3790 log_trace(redefine, class, obsolete, mark)
3791 ("mark %s(%s) as obsolete", old_method->name()->as_C_string(), old_method->signature()->as_C_string());
3792 }
3793 }
3794 old_method->set_is_old();
3795 }
3796 for (int i = 0; i < _deleted_methods_length; ++i) {
3797 Method* old_method = _deleted_methods[i];
3798
3799 assert(!old_method->has_vtable_index(),
3800 "cannot delete methods with vtable entries");;
3801
3802 // Mark all deleted methods as old, obsolete and deleted
3803 old_method->set_is_deleted();
3804 old_method->set_is_old();
3805 old_method->set_is_obsolete();
3806 ++obsolete_count;
3807 // With tracing we try not to "yack" too much. The position of
3808 // this trace assumes there are fewer obsolete methods than
3809 // EMCP methods.
3810 if (log_is_enabled(Trace, redefine, class, obsolete, mark)) {
3811 ResourceMark rm;
3812 log_trace(redefine, class, obsolete, mark)
3813 ("mark deleted %s(%s) as obsolete", old_method->name()->as_C_string(), old_method->signature()->as_C_string());
3814 }
3815 }
3816 assert((emcp_method_count + obsolete_count) == _old_methods->length(),
3817 "sanity check");
3818 log_trace(redefine, class, obsolete, mark)("EMCP_cnt=%d, obsolete_cnt=%d", emcp_method_count, obsolete_count);
3819 return emcp_method_count;
3820 }
3821
3822 // This internal class transfers the native function registration from old methods
3823 // to new methods. It is designed to handle both the simple case of unchanged
3824 // native methods and the complex cases of native method prefixes being added and/or
3825 // removed.
3826 // It expects only to be used during the VM_RedefineClasses op (a safepoint).
3827 //
3828 // This class is used after the new methods have been installed in "the_class".
3829 //
3830 // So, for example, the following must be handled. Where 'm' is a method and
3831 // a number followed by an underscore is a prefix.
3832 //
3833 // Old Name New Name
3834 // Simple transfer to new method m -> m
3835 // Add prefix m -> 1_m
3836 // Remove prefix 1_m -> m
3837 // Simultaneous add of prefixes m -> 3_2_1_m
3838 // Simultaneous removal of prefixes 3_2_1_m -> m
3839 // Simultaneous add and remove 1_m -> 2_m
3840 // Same, caused by prefix removal only 3_2_1_m -> 3_2_m
3841 //
3842 class TransferNativeFunctionRegistration {
3843 private:
3844 InstanceKlass* the_class;
3845 int prefix_count;
3846 char** prefixes;
3847
3848 // Recursively search the binary tree of possibly prefixed method names.
3849 // Iteration could be used if all agents were well behaved. Full tree walk is
3850 // more resilent to agents not cleaning up intermediate methods.
3851 // Branch at each depth in the binary tree is:
3852 // (1) without the prefix.
3853 // (2) with the prefix.
3854 // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
search_prefix_name_space(int depth,char * name_str,size_t name_len,Symbol * signature)3855 Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
3856 Symbol* signature) {
3857 TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
3858 if (name_symbol != NULL) {
3859 Method* method = the_class->lookup_method(name_symbol, signature);
3860 if (method != NULL) {
3861 // Even if prefixed, intermediate methods must exist.
3862 if (method->is_native()) {
3863 // Wahoo, we found a (possibly prefixed) version of the method, return it.
3864 return method;
3865 }
3866 if (depth < prefix_count) {
3867 // Try applying further prefixes (other than this one).
3868 method = search_prefix_name_space(depth+1, name_str, name_len, signature);
3869 if (method != NULL) {
3870 return method; // found
3871 }
3872
3873 // Try adding this prefix to the method name and see if it matches
3874 // another method name.
3875 char* prefix = prefixes[depth];
3876 size_t prefix_len = strlen(prefix);
3877 size_t trial_len = name_len + prefix_len;
3878 char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
3879 strcpy(trial_name_str, prefix);
3880 strcat(trial_name_str, name_str);
3881 method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
3882 signature);
3883 if (method != NULL) {
3884 // If found along this branch, it was prefixed, mark as such
3885 method->set_is_prefixed_native();
3886 return method; // found
3887 }
3888 }
3889 }
3890 }
3891 return NULL; // This whole branch bore nothing
3892 }
3893
3894 // Return the method name with old prefixes stripped away.
method_name_without_prefixes(Method * method)3895 char* method_name_without_prefixes(Method* method) {
3896 Symbol* name = method->name();
3897 char* name_str = name->as_utf8();
3898
3899 // Old prefixing may be defunct, strip prefixes, if any.
3900 for (int i = prefix_count-1; i >= 0; i--) {
3901 char* prefix = prefixes[i];
3902 size_t prefix_len = strlen(prefix);
3903 if (strncmp(prefix, name_str, prefix_len) == 0) {
3904 name_str += prefix_len;
3905 }
3906 }
3907 return name_str;
3908 }
3909
3910 // Strip any prefixes off the old native method, then try to find a
3911 // (possibly prefixed) new native that matches it.
strip_and_search_for_new_native(Method * method)3912 Method* strip_and_search_for_new_native(Method* method) {
3913 ResourceMark rm;
3914 char* name_str = method_name_without_prefixes(method);
3915 return search_prefix_name_space(0, name_str, strlen(name_str),
3916 method->signature());
3917 }
3918
3919 public:
3920
3921 // Construct a native method transfer processor for this class.
TransferNativeFunctionRegistration(InstanceKlass * _the_class)3922 TransferNativeFunctionRegistration(InstanceKlass* _the_class) {
3923 assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
3924
3925 the_class = _the_class;
3926 prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
3927 }
3928
3929 // Attempt to transfer any of the old or deleted methods that are native
transfer_registrations(Method ** old_methods,int methods_length)3930 void transfer_registrations(Method** old_methods, int methods_length) {
3931 for (int j = 0; j < methods_length; j++) {
3932 Method* old_method = old_methods[j];
3933
3934 if (old_method->is_native() && old_method->has_native_function()) {
3935 Method* new_method = strip_and_search_for_new_native(old_method);
3936 if (new_method != NULL) {
3937 // Actually set the native function in the new method.
3938 // Redefine does not send events (except CFLH), certainly not this
3939 // behind the scenes re-registration.
3940 new_method->set_native_function(old_method->native_function(),
3941 !Method::native_bind_event_is_interesting);
3942 }
3943 }
3944 }
3945 }
3946 };
3947
3948 // Don't lose the association between a native method and its JNI function.
transfer_old_native_function_registrations(InstanceKlass * the_class)3949 void VM_RedefineClasses::transfer_old_native_function_registrations(InstanceKlass* the_class) {
3950 TransferNativeFunctionRegistration transfer(the_class);
3951 transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
3952 transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
3953 }
3954
3955 // Deoptimize all compiled code that depends on this class.
3956 //
3957 // If the can_redefine_classes capability is obtained in the onload
3958 // phase then the compiler has recorded all dependencies from startup.
3959 // In that case we need only deoptimize and throw away all compiled code
3960 // that depends on the class.
3961 //
3962 // If can_redefine_classes is obtained sometime after the onload
3963 // phase then the dependency information may be incomplete. In that case
3964 // the first call to RedefineClasses causes all compiled code to be
3965 // thrown away. As can_redefine_classes has been obtained then
3966 // all future compilations will record dependencies so second and
3967 // subsequent calls to RedefineClasses need only throw away code
3968 // that depends on the class.
3969 //
3970
3971 // First step is to walk the code cache for each class redefined and mark
3972 // dependent methods. Wait until all classes are processed to deoptimize everything.
mark_dependent_code(InstanceKlass * ik)3973 void VM_RedefineClasses::mark_dependent_code(InstanceKlass* ik) {
3974 assert_locked_or_safepoint(Compile_lock);
3975
3976 // All dependencies have been recorded from startup or this is a second or
3977 // subsequent use of RedefineClasses
3978 if (JvmtiExport::all_dependencies_are_recorded()) {
3979 CodeCache::mark_for_evol_deoptimization(ik);
3980 }
3981 }
3982
flush_dependent_code()3983 void VM_RedefineClasses::flush_dependent_code() {
3984 assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
3985
3986 bool deopt_needed;
3987
3988 // This is the first redefinition, mark all the nmethods for deoptimization
3989 if (!JvmtiExport::all_dependencies_are_recorded()) {
3990 log_debug(redefine, class, nmethod)("Marked all nmethods for deopt");
3991 CodeCache::mark_all_nmethods_for_evol_deoptimization();
3992 deopt_needed = true;
3993 } else {
3994 int deopt = CodeCache::mark_dependents_for_evol_deoptimization();
3995 log_debug(redefine, class, nmethod)("Marked %d dependent nmethods for deopt", deopt);
3996 deopt_needed = (deopt != 0);
3997 }
3998
3999 if (deopt_needed) {
4000 CodeCache::flush_evol_dependents();
4001 }
4002
4003 // From now on we know that the dependency information is complete
4004 JvmtiExport::set_all_dependencies_are_recorded(true);
4005 }
4006
compute_added_deleted_matching_methods()4007 void VM_RedefineClasses::compute_added_deleted_matching_methods() {
4008 Method* old_method;
4009 Method* new_method;
4010
4011 _matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4012 _matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4013 _added_methods = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
4014 _deleted_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
4015
4016 _matching_methods_length = 0;
4017 _deleted_methods_length = 0;
4018 _added_methods_length = 0;
4019
4020 int nj = 0;
4021 int oj = 0;
4022 while (true) {
4023 if (oj >= _old_methods->length()) {
4024 if (nj >= _new_methods->length()) {
4025 break; // we've looked at everything, done
4026 }
4027 // New method at the end
4028 new_method = _new_methods->at(nj);
4029 _added_methods[_added_methods_length++] = new_method;
4030 ++nj;
4031 } else if (nj >= _new_methods->length()) {
4032 // Old method, at the end, is deleted
4033 old_method = _old_methods->at(oj);
4034 _deleted_methods[_deleted_methods_length++] = old_method;
4035 ++oj;
4036 } else {
4037 old_method = _old_methods->at(oj);
4038 new_method = _new_methods->at(nj);
4039 if (old_method->name() == new_method->name()) {
4040 if (old_method->signature() == new_method->signature()) {
4041 _matching_old_methods[_matching_methods_length ] = old_method;
4042 _matching_new_methods[_matching_methods_length++] = new_method;
4043 ++nj;
4044 ++oj;
4045 } else {
4046 // added overloaded have already been moved to the end,
4047 // so this is a deleted overloaded method
4048 _deleted_methods[_deleted_methods_length++] = old_method;
4049 ++oj;
4050 }
4051 } else { // names don't match
4052 if (old_method->name()->fast_compare(new_method->name()) > 0) {
4053 // new method
4054 _added_methods[_added_methods_length++] = new_method;
4055 ++nj;
4056 } else {
4057 // deleted method
4058 _deleted_methods[_deleted_methods_length++] = old_method;
4059 ++oj;
4060 }
4061 }
4062 }
4063 }
4064 assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
4065 assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
4066 }
4067
4068
swap_annotations(InstanceKlass * the_class,InstanceKlass * scratch_class)4069 void VM_RedefineClasses::swap_annotations(InstanceKlass* the_class,
4070 InstanceKlass* scratch_class) {
4071 // Swap annotation fields values
4072 Annotations* old_annotations = the_class->annotations();
4073 the_class->set_annotations(scratch_class->annotations());
4074 scratch_class->set_annotations(old_annotations);
4075 }
4076
4077
4078 // Install the redefinition of a class:
4079 // - house keeping (flushing breakpoints and caches, deoptimizing
4080 // dependent compiled code)
4081 // - replacing parts in the_class with parts from scratch_class
4082 // - adding a weak reference to track the obsolete but interesting
4083 // parts of the_class
4084 // - adjusting constant pool caches and vtables in other classes
4085 // that refer to methods in the_class. These adjustments use the
4086 // ClassLoaderDataGraph::classes_do() facility which only allows
4087 // a helper method to be specified. The interesting parameters
4088 // that we would like to pass to the helper method are saved in
4089 // static global fields in the VM operation.
redefine_single_class(jclass the_jclass,InstanceKlass * scratch_class,TRAPS)4090 void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
4091 InstanceKlass* scratch_class, TRAPS) {
4092
4093 HandleMark hm(THREAD); // make sure handles from this call are freed
4094
4095 if (log_is_enabled(Info, redefine, class, timer)) {
4096 _timer_rsc_phase1.start();
4097 }
4098
4099 InstanceKlass* the_class = get_ik(the_jclass);
4100
4101 // Set a flag to control and optimize adjusting method entries
4102 _has_redefined_Object |= the_class == SystemDictionary::Object_klass();
4103
4104 // Remove all breakpoints in methods of this class
4105 JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
4106 jvmti_breakpoints.clearall_in_class_at_safepoint(the_class);
4107
4108 // Mark all compiled code that depends on this class
4109 mark_dependent_code(the_class);
4110
4111 _old_methods = the_class->methods();
4112 _new_methods = scratch_class->methods();
4113 _the_class = the_class;
4114 compute_added_deleted_matching_methods();
4115 update_jmethod_ids(THREAD);
4116
4117 _any_class_has_resolved_methods = the_class->has_resolved_methods() || _any_class_has_resolved_methods;
4118
4119 // Attach new constant pool to the original klass. The original
4120 // klass still refers to the old constant pool (for now).
4121 scratch_class->constants()->set_pool_holder(the_class);
4122
4123 #if 0
4124 // In theory, with constant pool merging in place we should be able
4125 // to save space by using the new, merged constant pool in place of
4126 // the old constant pool(s). By "pool(s)" I mean the constant pool in
4127 // the klass version we are replacing now and any constant pool(s) in
4128 // previous versions of klass. Nice theory, doesn't work in practice.
4129 // When this code is enabled, even simple programs throw NullPointer
4130 // exceptions. I'm guessing that this is caused by some constant pool
4131 // cache difference between the new, merged constant pool and the
4132 // constant pool that was just being used by the klass. I'm keeping
4133 // this code around to archive the idea, but the code has to remain
4134 // disabled for now.
4135
4136 // Attach each old method to the new constant pool. This can be
4137 // done here since we are past the bytecode verification and
4138 // constant pool optimization phases.
4139 for (int i = _old_methods->length() - 1; i >= 0; i--) {
4140 Method* method = _old_methods->at(i);
4141 method->set_constants(scratch_class->constants());
4142 }
4143
4144 // NOTE: this doesn't work because you can redefine the same class in two
4145 // threads, each getting their own constant pool data appended to the
4146 // original constant pool. In order for the new methods to work when they
4147 // become old methods, they need to keep their updated copy of the constant pool.
4148
4149 {
4150 // walk all previous versions of the klass
4151 InstanceKlass *ik = the_class;
4152 PreviousVersionWalker pvw(ik);
4153 do {
4154 ik = pvw.next_previous_version();
4155 if (ik != NULL) {
4156
4157 // attach previous version of klass to the new constant pool
4158 ik->set_constants(scratch_class->constants());
4159
4160 // Attach each method in the previous version of klass to the
4161 // new constant pool
4162 Array<Method*>* prev_methods = ik->methods();
4163 for (int i = prev_methods->length() - 1; i >= 0; i--) {
4164 Method* method = prev_methods->at(i);
4165 method->set_constants(scratch_class->constants());
4166 }
4167 }
4168 } while (ik != NULL);
4169 }
4170 #endif
4171
4172 // Replace methods and constantpool
4173 the_class->set_methods(_new_methods);
4174 scratch_class->set_methods(_old_methods); // To prevent potential GCing of the old methods,
4175 // and to be able to undo operation easily.
4176
4177 Array<int>* old_ordering = the_class->method_ordering();
4178 the_class->set_method_ordering(scratch_class->method_ordering());
4179 scratch_class->set_method_ordering(old_ordering);
4180
4181 ConstantPool* old_constants = the_class->constants();
4182 the_class->set_constants(scratch_class->constants());
4183 scratch_class->set_constants(old_constants); // See the previous comment.
4184 #if 0
4185 // We are swapping the guts of "the new class" with the guts of "the
4186 // class". Since the old constant pool has just been attached to "the
4187 // new class", it seems logical to set the pool holder in the old
4188 // constant pool also. However, doing this will change the observable
4189 // class hierarchy for any old methods that are still executing. A
4190 // method can query the identity of its "holder" and this query uses
4191 // the method's constant pool link to find the holder. The change in
4192 // holding class from "the class" to "the new class" can confuse
4193 // things.
4194 //
4195 // Setting the old constant pool's holder will also cause
4196 // verification done during vtable initialization below to fail.
4197 // During vtable initialization, the vtable's class is verified to be
4198 // a subtype of the method's holder. The vtable's class is "the
4199 // class" and the method's holder is gotten from the constant pool
4200 // link in the method itself. For "the class"'s directly implemented
4201 // methods, the method holder is "the class" itself (as gotten from
4202 // the new constant pool). The check works fine in this case. The
4203 // check also works fine for methods inherited from super classes.
4204 //
4205 // Miranda methods are a little more complicated. A miranda method is
4206 // provided by an interface when the class implementing the interface
4207 // does not provide its own method. These interfaces are implemented
4208 // internally as an InstanceKlass. These special instanceKlasses
4209 // share the constant pool of the class that "implements" the
4210 // interface. By sharing the constant pool, the method holder of a
4211 // miranda method is the class that "implements" the interface. In a
4212 // non-redefine situation, the subtype check works fine. However, if
4213 // the old constant pool's pool holder is modified, then the check
4214 // fails because there is no class hierarchy relationship between the
4215 // vtable's class and "the new class".
4216
4217 old_constants->set_pool_holder(scratch_class());
4218 #endif
4219
4220 // track number of methods that are EMCP for add_previous_version() call below
4221 int emcp_method_count = check_methods_and_mark_as_obsolete();
4222 transfer_old_native_function_registrations(the_class);
4223
4224 // The class file bytes from before any retransformable agents mucked
4225 // with them was cached on the scratch class, move to the_class.
4226 // Note: we still want to do this if nothing needed caching since it
4227 // should get cleared in the_class too.
4228 if (the_class->get_cached_class_file() == 0) {
4229 // the_class doesn't have a cache yet so copy it
4230 the_class->set_cached_class_file(scratch_class->get_cached_class_file());
4231 }
4232 else if (scratch_class->get_cached_class_file() !=
4233 the_class->get_cached_class_file()) {
4234 // The same class can be present twice in the scratch classes list or there
4235 // are multiple concurrent RetransformClasses calls on different threads.
4236 // In such cases we have to deallocate scratch_class cached_class_file.
4237 os::free(scratch_class->get_cached_class_file());
4238 }
4239
4240 // NULL out in scratch class to not delete twice. The class to be redefined
4241 // always owns these bytes.
4242 scratch_class->set_cached_class_file(NULL);
4243
4244 // Replace inner_classes
4245 Array<u2>* old_inner_classes = the_class->inner_classes();
4246 the_class->set_inner_classes(scratch_class->inner_classes());
4247 scratch_class->set_inner_classes(old_inner_classes);
4248
4249 // Initialize the vtable and interface table after
4250 // methods have been rewritten
4251 // no exception should happen here since we explicitly
4252 // do not check loader constraints.
4253 // compare_and_normalize_class_versions has already checked:
4254 // - classloaders unchanged, signatures unchanged
4255 // - all instanceKlasses for redefined classes reused & contents updated
4256 the_class->vtable().initialize_vtable(false, THREAD);
4257 the_class->itable().initialize_itable(false, THREAD);
4258 assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception");
4259
4260 // Leave arrays of jmethodIDs and itable index cache unchanged
4261
4262 // Copy the "source file name" attribute from new class version
4263 the_class->set_source_file_name_index(
4264 scratch_class->source_file_name_index());
4265
4266 // Copy the "source debug extension" attribute from new class version
4267 the_class->set_source_debug_extension(
4268 scratch_class->source_debug_extension(),
4269 scratch_class->source_debug_extension() == NULL ? 0 :
4270 (int)strlen(scratch_class->source_debug_extension()));
4271
4272 // Use of javac -g could be different in the old and the new
4273 if (scratch_class->access_flags().has_localvariable_table() !=
4274 the_class->access_flags().has_localvariable_table()) {
4275
4276 AccessFlags flags = the_class->access_flags();
4277 if (scratch_class->access_flags().has_localvariable_table()) {
4278 flags.set_has_localvariable_table();
4279 } else {
4280 flags.clear_has_localvariable_table();
4281 }
4282 the_class->set_access_flags(flags);
4283 }
4284
4285 swap_annotations(the_class, scratch_class);
4286
4287 // Replace CP indexes for class and name+type of enclosing method
4288 u2 old_class_idx = the_class->enclosing_method_class_index();
4289 u2 old_method_idx = the_class->enclosing_method_method_index();
4290 the_class->set_enclosing_method_indices(
4291 scratch_class->enclosing_method_class_index(),
4292 scratch_class->enclosing_method_method_index());
4293 scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
4294
4295 // Replace fingerprint data
4296 the_class->set_has_passed_fingerprint_check(scratch_class->has_passed_fingerprint_check());
4297 the_class->store_fingerprint(scratch_class->get_stored_fingerprint());
4298
4299 the_class->set_has_been_redefined();
4300
4301 if (!the_class->should_be_initialized()) {
4302 // Class was already initialized, so AOT has only seen the original version.
4303 // We need to let AOT look at it again.
4304 AOTLoader::load_for_klass(the_class, THREAD);
4305 }
4306
4307 // keep track of previous versions of this class
4308 the_class->add_previous_version(scratch_class, emcp_method_count);
4309
4310 _timer_rsc_phase1.stop();
4311 if (log_is_enabled(Info, redefine, class, timer)) {
4312 _timer_rsc_phase2.start();
4313 }
4314
4315 if (the_class->oop_map_cache() != NULL) {
4316 // Flush references to any obsolete methods from the oop map cache
4317 // so that obsolete methods are not pinned.
4318 the_class->oop_map_cache()->flush_obsolete_entries();
4319 }
4320
4321 increment_class_counter((InstanceKlass *)the_class, THREAD);
4322
4323 if (EventClassRedefinition::is_enabled()) {
4324 EventClassRedefinition event;
4325 event.set_classModificationCount(java_lang_Class::classRedefinedCount(the_class->java_mirror()));
4326 event.set_redefinedClass(the_class);
4327 event.set_redefinitionId(_id);
4328 event.commit();
4329 }
4330
4331 {
4332 ResourceMark rm(THREAD);
4333 // increment the classRedefinedCount field in the_class and in any
4334 // direct and indirect subclasses of the_class
4335 log_info(redefine, class, load)
4336 ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
4337 the_class->external_name(), java_lang_Class::classRedefinedCount(the_class->java_mirror()), os::available_memory() >> 10);
4338 Events::log_redefinition(THREAD, "redefined class name=%s, count=%d",
4339 the_class->external_name(),
4340 java_lang_Class::classRedefinedCount(the_class->java_mirror()));
4341
4342 }
4343 _timer_rsc_phase2.stop();
4344
4345 } // end redefine_single_class()
4346
4347
4348 // Increment the classRedefinedCount field in the specific InstanceKlass
4349 // and in all direct and indirect subclasses.
increment_class_counter(InstanceKlass * ik,TRAPS)4350 void VM_RedefineClasses::increment_class_counter(InstanceKlass *ik, TRAPS) {
4351 oop class_mirror = ik->java_mirror();
4352 Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
4353 int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
4354 java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
4355
4356 if (class_oop != _the_class) {
4357 // _the_class count is printed at end of redefine_single_class()
4358 log_debug(redefine, class, subclass)("updated count in subclass=%s to %d", ik->external_name(), new_count);
4359 }
4360
4361 for (Klass *subk = ik->subklass(); subk != NULL;
4362 subk = subk->next_sibling()) {
4363 if (subk->is_instance_klass()) {
4364 // Only update instanceKlasses
4365 InstanceKlass *subik = InstanceKlass::cast(subk);
4366 // recursively do subclasses of the current subclass
4367 increment_class_counter(subik, THREAD);
4368 }
4369 }
4370 }
4371
do_klass(Klass * k)4372 void VM_RedefineClasses::CheckClass::do_klass(Klass* k) {
4373 bool no_old_methods = true; // be optimistic
4374
4375 // Both array and instance classes have vtables.
4376 // a vtable should never contain old or obsolete methods
4377 ResourceMark rm(_thread);
4378 if (k->vtable_length() > 0 &&
4379 !k->vtable().check_no_old_or_obsolete_entries()) {
4380 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4381 log_trace(redefine, class, obsolete, metadata)
4382 ("klassVtable::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4383 k->signature_name());
4384 k->vtable().dump_vtable();
4385 }
4386 no_old_methods = false;
4387 }
4388
4389 if (k->is_instance_klass()) {
4390 HandleMark hm(_thread);
4391 InstanceKlass *ik = InstanceKlass::cast(k);
4392
4393 // an itable should never contain old or obsolete methods
4394 if (ik->itable_length() > 0 &&
4395 !ik->itable().check_no_old_or_obsolete_entries()) {
4396 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4397 log_trace(redefine, class, obsolete, metadata)
4398 ("klassItable::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4399 ik->signature_name());
4400 ik->itable().dump_itable();
4401 }
4402 no_old_methods = false;
4403 }
4404
4405 // the constant pool cache should never contain non-deleted old or obsolete methods
4406 if (ik->constants() != NULL &&
4407 ik->constants()->cache() != NULL &&
4408 !ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
4409 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4410 log_trace(redefine, class, obsolete, metadata)
4411 ("cp-cache::check_no_old_or_obsolete_entries failure -- OLD or OBSOLETE method found -- class: %s",
4412 ik->signature_name());
4413 ik->constants()->cache()->dump_cache();
4414 }
4415 no_old_methods = false;
4416 }
4417 }
4418
4419 // print and fail guarantee if old methods are found.
4420 if (!no_old_methods) {
4421 if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
4422 dump_methods();
4423 } else {
4424 log_trace(redefine, class)("Use the '-Xlog:redefine+class*:' option "
4425 "to see more info about the following guarantee() failure.");
4426 }
4427 guarantee(false, "OLD and/or OBSOLETE method(s) found");
4428 }
4429 }
4430
next_id()4431 u8 VM_RedefineClasses::next_id() {
4432 while (true) {
4433 u8 id = _id_counter;
4434 u8 next_id = id + 1;
4435 u8 result = Atomic::cmpxchg(&_id_counter, id, next_id);
4436 if (result == id) {
4437 return next_id;
4438 }
4439 }
4440 }
4441
dump_methods()4442 void VM_RedefineClasses::dump_methods() {
4443 int j;
4444 log_trace(redefine, class, dump)("_old_methods --");
4445 for (j = 0; j < _old_methods->length(); ++j) {
4446 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4447 Method* m = _old_methods->at(j);
4448 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4449 m->access_flags().print_on(&log_stream);
4450 log_stream.print(" -- ");
4451 m->print_name(&log_stream);
4452 log_stream.cr();
4453 }
4454 log_trace(redefine, class, dump)("_new_methods --");
4455 for (j = 0; j < _new_methods->length(); ++j) {
4456 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4457 Method* m = _new_methods->at(j);
4458 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4459 m->access_flags().print_on(&log_stream);
4460 log_stream.print(" -- ");
4461 m->print_name(&log_stream);
4462 log_stream.cr();
4463 }
4464 log_trace(redefine, class, dump)("_matching_methods --");
4465 for (j = 0; j < _matching_methods_length; ++j) {
4466 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4467 Method* m = _matching_old_methods[j];
4468 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4469 m->access_flags().print_on(&log_stream);
4470 log_stream.print(" -- ");
4471 m->print_name();
4472 log_stream.cr();
4473
4474 m = _matching_new_methods[j];
4475 log_stream.print(" (%5d) ", m->vtable_index());
4476 m->access_flags().print_on(&log_stream);
4477 log_stream.cr();
4478 }
4479 log_trace(redefine, class, dump)("_deleted_methods --");
4480 for (j = 0; j < _deleted_methods_length; ++j) {
4481 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4482 Method* m = _deleted_methods[j];
4483 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4484 m->access_flags().print_on(&log_stream);
4485 log_stream.print(" -- ");
4486 m->print_name(&log_stream);
4487 log_stream.cr();
4488 }
4489 log_trace(redefine, class, dump)("_added_methods --");
4490 for (j = 0; j < _added_methods_length; ++j) {
4491 LogStreamHandle(Trace, redefine, class, dump) log_stream;
4492 Method* m = _added_methods[j];
4493 log_stream.print("%4d (%5d) ", j, m->vtable_index());
4494 m->access_flags().print_on(&log_stream);
4495 log_stream.print(" -- ");
4496 m->print_name(&log_stream);
4497 log_stream.cr();
4498 }
4499 }
4500
print_on_error(outputStream * st) const4501 void VM_RedefineClasses::print_on_error(outputStream* st) const {
4502 VM_Operation::print_on_error(st);
4503 if (_the_class != NULL) {
4504 ResourceMark rm;
4505 st->print_cr(", redefining class %s", _the_class->external_name());
4506 }
4507 }
4508