1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instrumentation.h"
18
19 #include <functional>
20 #include <optional>
21 #include <sstream>
22
23 #include <android-base/logging.h>
24
25 #include "arch/context.h"
26 #include "art_field-inl.h"
27 #include "art_method-inl.h"
28 #include "base/atomic.h"
29 #include "base/callee_save_type.h"
30 #include "class_linker.h"
31 #include "debugger.h"
32 #include "dex/dex_file-inl.h"
33 #include "dex/dex_file_types.h"
34 #include "dex/dex_instruction-inl.h"
35 #include "entrypoints/quick/quick_alloc_entrypoints.h"
36 #include "entrypoints/quick/quick_entrypoints.h"
37 #include "entrypoints/runtime_asm_entrypoints.h"
38 #include "gc_root-inl.h"
39 #include "interpreter/interpreter.h"
40 #include "interpreter/interpreter_common.h"
41 #include "jit/jit.h"
42 #include "jit/jit_code_cache.h"
43 #include "jvalue-inl.h"
44 #include "jvalue.h"
45 #include "mirror/class-inl.h"
46 #include "mirror/dex_cache.h"
47 #include "mirror/object-inl.h"
48 #include "mirror/object_array-inl.h"
49 #include "nth_caller_visitor.h"
50 #include "oat_quick_method_header.h"
51 #include "runtime-inl.h"
52 #include "thread.h"
53 #include "thread_list.h"
54
55 namespace art {
56 namespace instrumentation {
57
58 constexpr bool kVerboseInstrumentation = false;
59
MethodExited(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,OptionalFrame frame,MutableHandle<mirror::Object> & return_value)60 void InstrumentationListener::MethodExited(
61 Thread* thread,
62 Handle<mirror::Object> this_object,
63 ArtMethod* method,
64 uint32_t dex_pc,
65 OptionalFrame frame,
66 MutableHandle<mirror::Object>& return_value) {
67 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
68 Primitive::kPrimNot);
69 const void* original_ret = return_value.Get();
70 JValue v;
71 v.SetL(return_value.Get());
72 MethodExited(thread, this_object, method, dex_pc, frame, v);
73 DCHECK(original_ret == v.GetL()) << "Return value changed";
74 }
75
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)76 void InstrumentationListener::FieldWritten(Thread* thread,
77 Handle<mirror::Object> this_object,
78 ArtMethod* method,
79 uint32_t dex_pc,
80 ArtField* field,
81 Handle<mirror::Object> field_value) {
82 DCHECK(!field->IsPrimitiveType());
83 JValue v;
84 v.SetL(field_value.Get());
85 FieldWritten(thread, this_object, method, dex_pc, field, v);
86 }
87
88 // Instrumentation works on non-inlined frames by updating returned PCs
89 // of compiled frames.
90 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
91 StackVisitor::StackWalkKind::kSkipInlinedFrames;
92
93 class InstallStubsClassVisitor : public ClassVisitor {
94 public:
InstallStubsClassVisitor(Instrumentation * instrumentation)95 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
96 : instrumentation_(instrumentation) {}
97
operator ()(ObjPtr<mirror::Class> klass)98 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
99 instrumentation_->InstallStubsForClass(klass.Ptr());
100 return true; // we visit all classes.
101 }
102
103 private:
104 Instrumentation* const instrumentation_;
105 };
106
InstrumentationStackPopper(Thread * self)107 InstrumentationStackPopper::InstrumentationStackPopper(Thread* self)
108 : self_(self),
109 instrumentation_(Runtime::Current()->GetInstrumentation()),
110 pop_until_(0u) {}
111
~InstrumentationStackPopper()112 InstrumentationStackPopper::~InstrumentationStackPopper() {
113 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
114 self_->GetInstrumentationStack();
115 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until_;) {
116 i = stack->erase(i);
117 }
118 }
119
PopFramesTo(uintptr_t stack_pointer,MutableHandle<mirror::Throwable> & exception)120 bool InstrumentationStackPopper::PopFramesTo(uintptr_t stack_pointer,
121 MutableHandle<mirror::Throwable>& exception) {
122 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
123 self_->GetInstrumentationStack();
124 DCHECK(!self_->IsExceptionPending());
125 if (!instrumentation_->HasMethodUnwindListeners()) {
126 pop_until_ = stack_pointer;
127 return true;
128 }
129 if (kVerboseInstrumentation) {
130 LOG(INFO) << "Popping frames for exception " << exception->Dump();
131 }
132 // The instrumentation events expect the exception to be set.
133 self_->SetException(exception.Get());
134 bool new_exception_thrown = false;
135 auto i = stack->upper_bound(pop_until_);
136
137 // Now pop all frames until reaching stack_pointer, or a new exception is
138 // thrown. Note that `stack_pointer` doesn't need to be a return PC address
139 // (in fact the exception handling code passes the start of the frame where
140 // the catch handler is).
141 for (; i != stack->end() && i->first <= stack_pointer; i++) {
142 const InstrumentationStackFrame& frame = i->second;
143 ArtMethod* method = frame.method_;
144 // Notify listeners of method unwind.
145 // TODO: improve the dex_pc information here.
146 uint32_t dex_pc = dex::kDexNoIndex;
147 if (kVerboseInstrumentation) {
148 LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
149 }
150 if (!method->IsRuntimeMethod() && !frame.interpreter_entry_) {
151 instrumentation_->MethodUnwindEvent(self_, frame.this_object_, method, dex_pc);
152 new_exception_thrown = self_->GetException() != exception.Get();
153 if (new_exception_thrown) {
154 pop_until_ = i->first;
155 break;
156 }
157 }
158 }
159 if (!new_exception_thrown) {
160 pop_until_ = stack_pointer;
161 }
162 exception.Assign(self_->GetException());
163 self_->ClearException();
164 if (kVerboseInstrumentation && new_exception_thrown) {
165 LOG(INFO) << "Did partial pop of frames due to new exception";
166 }
167 return !new_exception_thrown;
168 }
169
Instrumentation()170 Instrumentation::Instrumentation()
171 : current_force_deopt_id_(0),
172 instrumentation_stubs_installed_(false),
173 entry_exit_stubs_installed_(false),
174 interpreter_stubs_installed_(false),
175 interpret_only_(false),
176 forced_interpret_only_(false),
177 have_method_entry_listeners_(false),
178 have_method_exit_listeners_(false),
179 have_method_unwind_listeners_(false),
180 have_dex_pc_listeners_(false),
181 have_field_read_listeners_(false),
182 have_field_write_listeners_(false),
183 have_exception_thrown_listeners_(false),
184 have_watched_frame_pop_listeners_(false),
185 have_branch_listeners_(false),
186 have_exception_handled_listeners_(false),
187 deoptimized_methods_lock_(new ReaderWriterMutex("deoptimized methods lock",
188 kGenericBottomLock)),
189 deoptimization_enabled_(false),
190 interpreter_handler_table_(kMainHandlerTable),
191 quick_alloc_entry_points_instrumentation_counter_(0),
192 alloc_entrypoints_instrumented_(false),
193 can_use_instrumentation_trampolines_(true) {
194 }
195
InstallStubsForClass(ObjPtr<mirror::Class> klass)196 void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
197 if (!klass->IsResolved()) {
198 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
199 // could not be initialized or linked with regards to class inheritance.
200 } else if (klass->IsErroneousResolved()) {
201 // We can't execute code in a erroneous class: do nothing.
202 } else {
203 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
204 InstallStubsForMethod(&method);
205 }
206 }
207 }
208
UpdateEntrypoints(ArtMethod * method,const void * quick_code)209 static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
210 REQUIRES_SHARED(Locks::mutator_lock_) {
211 if (kIsDebugBuild) {
212 jit::Jit* jit = Runtime::Current()->GetJit();
213 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
214 // Ensure we always have the thumb entrypoint for JIT on arm32.
215 if (kRuntimeISA == InstructionSet::kArm) {
216 CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
217 }
218 }
219 }
220 method->SetEntryPointFromQuickCompiledCode(quick_code);
221 }
222
NeedDebugVersionFor(ArtMethod * method) const223 bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const
224 REQUIRES_SHARED(Locks::mutator_lock_) {
225 art::Runtime* runtime = Runtime::Current();
226 // If anything says we need the debug version or we are debuggable we will need the debug version
227 // of the method.
228 return (runtime->GetRuntimeCallbacks()->MethodNeedsDebugVersion(method) ||
229 runtime->IsJavaDebuggable()) &&
230 !method->IsNative() &&
231 !method->IsProxyMethod();
232 }
233
InstallStubsForMethod(ArtMethod * method)234 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
235 if (!method->IsInvokable() || method->IsProxyMethod()) {
236 // Do not change stubs for these methods.
237 return;
238 }
239 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
240 // TODO We should remove the need for this since it means we cannot always correctly detect calls
241 // to Proxy.<init>
242 // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
243 // we also need to check this based on the declaring-class descriptor. The check is valid because
244 // Proxy only has a single constructor.
245 ArtMethod* well_known_proxy_init = jni::DecodeArtMethod(
246 WellKnownClasses::java_lang_reflect_Proxy_init);
247 if ((LIKELY(well_known_proxy_init != nullptr) && UNLIKELY(method == well_known_proxy_init)) ||
248 UNLIKELY(method->IsConstructor() &&
249 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;"))) {
250 return;
251 }
252 const void* new_quick_code;
253 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
254 Runtime* const runtime = Runtime::Current();
255 ClassLinker* const class_linker = runtime->GetClassLinker();
256 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
257 if (uninstall) {
258 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
259 new_quick_code = GetQuickToInterpreterBridge();
260 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
261 new_quick_code = GetCodeForInvoke(method);
262 } else {
263 new_quick_code = GetQuickResolutionStub();
264 }
265 } else { // !uninstall
266 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
267 !method->IsNative()) {
268 new_quick_code = GetQuickToInterpreterBridge();
269 } else {
270 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
271 // class, all its static methods code will be set to the instrumentation entry point.
272 // For more details, see ClassLinker::FixupStaticTrampolines.
273 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
274 if (entry_exit_stubs_installed_) {
275 // This needs to be checked first since the instrumentation entrypoint will be able to
276 // find the actual JIT compiled code that corresponds to this method.
277 new_quick_code = GetQuickInstrumentationEntryPoint();
278 } else if (NeedDebugVersionFor(method)) {
279 // It would be great to search the JIT for its implementation here but we cannot due to
280 // the locks we hold. Instead just set to the interpreter bridge and that code will search
281 // the JIT when it gets called and replace the entrypoint then.
282 new_quick_code = GetQuickToInterpreterBridge();
283 } else {
284 new_quick_code = class_linker->GetQuickOatCodeFor(method);
285 }
286 } else {
287 new_quick_code = GetQuickResolutionStub();
288 }
289 }
290 }
291 UpdateEntrypoints(method, new_quick_code);
292 }
293
294 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
295 // deoptimization of quick frames to interpreter frames.
296 // Since we may already have done this previously, we need to push new instrumentation frame before
297 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg)298 void InstrumentationInstallStack(Thread* thread, void* arg)
299 REQUIRES(Locks::mutator_lock_) {
300 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
301 struct InstallStackVisitor final : public StackVisitor {
302 InstallStackVisitor(Thread* thread_in,
303 Context* context,
304 uintptr_t instrumentation_exit_pc,
305 uint64_t force_deopt_id)
306 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
307 instrumentation_stack_(thread_in->GetInstrumentationStack()),
308 instrumentation_exit_pc_(instrumentation_exit_pc),
309 reached_existing_instrumentation_frames_(false),
310 last_return_pc_(0),
311 force_deopt_id_(force_deopt_id) {}
312
313 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
314 ArtMethod* m = GetMethod();
315 if (m == nullptr) {
316 if (kVerboseInstrumentation) {
317 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
318 }
319 last_return_pc_ = 0;
320 return true; // Ignore upcalls.
321 }
322 if (GetCurrentQuickFrame() == nullptr) {
323 bool interpreter_frame = true;
324 InstrumentationStackFrame instrumentation_frame(GetThisObject().Ptr(),
325 m,
326 /*return_pc=*/ 0,
327 GetFrameId(),
328 interpreter_frame,
329 force_deopt_id_);
330 if (kVerboseInstrumentation) {
331 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
332 }
333 shadow_stack_.push_back(instrumentation_frame);
334 return true; // Continue.
335 }
336 uintptr_t return_pc = GetReturnPc();
337 if (kVerboseInstrumentation) {
338 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
339 }
340 if (return_pc == instrumentation_exit_pc_) {
341 auto it = instrumentation_stack_->find(GetReturnPcAddr());
342 CHECK(it != instrumentation_stack_->end());
343 const InstrumentationStackFrame& frame = it->second;
344 if (m->IsRuntimeMethod()) {
345 if (frame.interpreter_entry_) {
346 // This instrumentation frame is for an interpreter bridge and is
347 // pushed when executing the instrumented interpreter bridge. So method
348 // enter event must have been reported. However we need to push a DEX pc
349 // into the dex_pcs_ list to match size of instrumentation stack.
350 uint32_t dex_pc = dex::kDexNoIndex;
351 dex_pcs_.push_back(dex_pc);
352 last_return_pc_ = frame.return_pc_;
353 return true;
354 }
355 }
356
357 // We've reached a frame which has already been installed with instrumentation exit stub.
358 // We should have already installed instrumentation or be interpreter on previous frames.
359 reached_existing_instrumentation_frames_ = true;
360
361 CHECK_EQ(m->GetNonObsoleteMethod(), frame.method_->GetNonObsoleteMethod())
362 << "Expected " << ArtMethod::PrettyMethod(m)
363 << ", Found " << ArtMethod::PrettyMethod(frame.method_);
364 return_pc = frame.return_pc_;
365 if (kVerboseInstrumentation) {
366 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
367 }
368 } else {
369 CHECK_NE(return_pc, 0U);
370 if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
371 // We already saw an existing instrumentation frame so this should be a runtime-method
372 // inserted by the interpreter or runtime.
373 std::string thread_name;
374 GetThread()->GetThreadName(thread_name);
375 uint32_t dex_pc = dex::kDexNoIndex;
376 if (last_return_pc_ != 0 && GetCurrentOatQuickMethodHeader() != nullptr) {
377 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(
378 GetCurrentQuickFrame(), last_return_pc_);
379 }
380 LOG(FATAL) << "While walking " << thread_name << " found unexpected non-runtime method"
381 << " without instrumentation exit return or interpreter frame."
382 << " method is " << GetMethod()->PrettyMethod()
383 << " return_pc is " << std::hex << return_pc
384 << " dex pc: " << dex_pc;
385 UNREACHABLE();
386 }
387 InstrumentationStackFrame instrumentation_frame(
388 m->IsRuntimeMethod() ? nullptr : GetThisObject().Ptr(),
389 m,
390 return_pc,
391 GetFrameId(), // A runtime method still gets a frame id.
392 false,
393 force_deopt_id_);
394 if (kVerboseInstrumentation) {
395 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
396 }
397
398 instrumentation_stack_->insert({GetReturnPcAddr(), instrumentation_frame});
399 SetReturnPc(instrumentation_exit_pc_);
400 }
401 uint32_t dex_pc = dex::kDexNoIndex;
402 if (last_return_pc_ != 0 && GetCurrentOatQuickMethodHeader() != nullptr) {
403 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(GetCurrentQuickFrame(), last_return_pc_);
404 }
405 dex_pcs_.push_back(dex_pc);
406 last_return_pc_ = return_pc;
407 return true; // Continue.
408 }
409 std::map<uintptr_t, InstrumentationStackFrame>* const instrumentation_stack_;
410 std::vector<InstrumentationStackFrame> shadow_stack_;
411 std::vector<uint32_t> dex_pcs_;
412 const uintptr_t instrumentation_exit_pc_;
413 bool reached_existing_instrumentation_frames_;
414 uintptr_t last_return_pc_;
415 uint64_t force_deopt_id_;
416 };
417 if (kVerboseInstrumentation) {
418 std::string thread_name;
419 thread->GetThreadName(thread_name);
420 LOG(INFO) << "Installing exit stubs in " << thread_name;
421 }
422
423 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
424 std::unique_ptr<Context> context(Context::Create());
425 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
426 InstallStackVisitor visitor(
427 thread, context.get(), instrumentation_exit_pc, instrumentation->current_force_deopt_id_);
428 visitor.WalkStack(true);
429 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
430
431 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
432 // Create method enter events for all methods currently on the thread's stack. We only do this
433 // if no debugger is attached to prevent from posting events twice.
434 // TODO: This is the only place we make use of frame_id_. We should create a
435 // std::vector instead and populate it as we walk the stack.
436 auto ssi = visitor.shadow_stack_.rbegin();
437 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
438 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
439 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < isi->second.frame_id_) {
440 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
441 ++ssi;
442 }
443 uint32_t dex_pc = visitor.dex_pcs_.back();
444 visitor.dex_pcs_.pop_back();
445 if (!isi->second.interpreter_entry_ && !isi->second.method_->IsRuntimeMethod()) {
446 instrumentation->MethodEnterEvent(
447 thread, isi->second.this_object_, isi->second.method_, dex_pc);
448 }
449 }
450 }
451 thread->VerifyStack();
452 }
453
InstrumentThreadStack(Thread * thread)454 void Instrumentation::InstrumentThreadStack(Thread* thread) {
455 instrumentation_stubs_installed_ = true;
456 InstrumentationInstallStack(thread, this);
457 }
458
459 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)460 static void InstrumentationRestoreStack(Thread* thread, void* arg)
461 REQUIRES(Locks::mutator_lock_) {
462 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
463
464 struct RestoreStackVisitor final : public StackVisitor {
465 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
466 Instrumentation* instrumentation)
467 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
468 thread_(thread_in),
469 instrumentation_exit_pc_(instrumentation_exit_pc),
470 instrumentation_(instrumentation),
471 instrumentation_stack_(thread_in->GetInstrumentationStack()),
472 frames_removed_(0) {}
473
474 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
475 if (instrumentation_stack_->size() == 0) {
476 return false; // Stop.
477 }
478 ArtMethod* m = GetMethod();
479 if (GetCurrentQuickFrame() == nullptr) {
480 if (kVerboseInstrumentation) {
481 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
482 << " Method=" << ArtMethod::PrettyMethod(m);
483 }
484 return true; // Ignore shadow frames.
485 }
486 if (m == nullptr) {
487 if (kVerboseInstrumentation) {
488 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
489 }
490 return true; // Ignore upcalls.
491 }
492 auto it = instrumentation_stack_->find(GetReturnPcAddr());
493 if (it != instrumentation_stack_->end()) {
494 const InstrumentationStackFrame& instrumentation_frame = it->second;
495 if (kVerboseInstrumentation) {
496 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
497 }
498 if (instrumentation_frame.interpreter_entry_) {
499 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
500 } else {
501 CHECK_EQ(m->GetNonObsoleteMethod(),
502 instrumentation_frame.method_->GetNonObsoleteMethod())
503 << ArtMethod::PrettyMethod(m)
504 << " and " << instrumentation_frame.method_->GetNonObsoleteMethod()->PrettyMethod();
505 }
506 SetReturnPc(instrumentation_frame.return_pc_);
507 if (instrumentation_->ShouldNotifyMethodEnterExitEvents() &&
508 !m->IsRuntimeMethod()) {
509 // Create the method exit events. As the methods didn't really exit the result is 0.
510 // We only do this if no debugger is attached to prevent from posting events twice.
511 JValue val;
512 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
513 GetDexPc(), OptionalFrame{}, val);
514 }
515 frames_removed_++;
516 } else {
517 if (kVerboseInstrumentation) {
518 LOG(INFO) << " No exit stub in " << DescribeLocation();
519 }
520 }
521 return true; // Continue.
522 }
523 Thread* const thread_;
524 const uintptr_t instrumentation_exit_pc_;
525 Instrumentation* const instrumentation_;
526 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
527 size_t frames_removed_;
528 };
529 if (kVerboseInstrumentation) {
530 std::string thread_name;
531 thread->GetThreadName(thread_name);
532 LOG(INFO) << "Removing exit stubs in " << thread_name;
533 }
534 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
535 thread->GetInstrumentationStack();
536 if (stack->size() > 0) {
537 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
538 uintptr_t instrumentation_exit_pc =
539 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
540 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
541 visitor.WalkStack(true);
542 CHECK_EQ(visitor.frames_removed_, stack->size());
543 stack->clear();
544 }
545 }
546
DeoptimizeAllThreadFrames()547 void Instrumentation::DeoptimizeAllThreadFrames() {
548 Thread* self = Thread::Current();
549 MutexLock mu(self, *Locks::thread_list_lock_);
550 ThreadList* tl = Runtime::Current()->GetThreadList();
551 tl->ForEach([&](Thread* t) {
552 Locks::mutator_lock_->AssertExclusiveHeld(self);
553 InstrumentThreadStack(t);
554 });
555 current_force_deopt_id_++;
556 }
557
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)558 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
559 return (events & expected) != 0;
560 }
561
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)562 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
563 uint32_t events,
564 std::list<InstrumentationListener*>& list,
565 InstrumentationListener* listener,
566 bool* has_listener)
567 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
568 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
569 if (!HasEvent(event, events)) {
570 return;
571 }
572 // If there is a free slot in the list, we insert the listener in that slot.
573 // Otherwise we add it to the end of the list.
574 auto it = std::find(list.begin(), list.end(), nullptr);
575 if (it != list.end()) {
576 *it = listener;
577 } else {
578 list.push_back(listener);
579 }
580 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
581 }
582
AddListener(InstrumentationListener * listener,uint32_t events)583 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
584 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
585 PotentiallyAddListenerTo(kMethodEntered,
586 events,
587 method_entry_listeners_,
588 listener,
589 &have_method_entry_listeners_);
590 PotentiallyAddListenerTo(kMethodExited,
591 events,
592 method_exit_listeners_,
593 listener,
594 &have_method_exit_listeners_);
595 PotentiallyAddListenerTo(kMethodUnwind,
596 events,
597 method_unwind_listeners_,
598 listener,
599 &have_method_unwind_listeners_);
600 PotentiallyAddListenerTo(kBranch,
601 events,
602 branch_listeners_,
603 listener,
604 &have_branch_listeners_);
605 PotentiallyAddListenerTo(kDexPcMoved,
606 events,
607 dex_pc_listeners_,
608 listener,
609 &have_dex_pc_listeners_);
610 PotentiallyAddListenerTo(kFieldRead,
611 events,
612 field_read_listeners_,
613 listener,
614 &have_field_read_listeners_);
615 PotentiallyAddListenerTo(kFieldWritten,
616 events,
617 field_write_listeners_,
618 listener,
619 &have_field_write_listeners_);
620 PotentiallyAddListenerTo(kExceptionThrown,
621 events,
622 exception_thrown_listeners_,
623 listener,
624 &have_exception_thrown_listeners_);
625 PotentiallyAddListenerTo(kWatchedFramePop,
626 events,
627 watched_frame_pop_listeners_,
628 listener,
629 &have_watched_frame_pop_listeners_);
630 PotentiallyAddListenerTo(kExceptionHandled,
631 events,
632 exception_handled_listeners_,
633 listener,
634 &have_exception_handled_listeners_);
635 UpdateInterpreterHandlerTable();
636 }
637
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)638 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
639 uint32_t events,
640 std::list<InstrumentationListener*>& list,
641 InstrumentationListener* listener,
642 bool* has_listener)
643 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
644 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
645 if (!HasEvent(event, events)) {
646 return;
647 }
648 auto it = std::find(list.begin(), list.end(), listener);
649 if (it != list.end()) {
650 // Just update the entry, do not remove from the list. Removing entries in the list
651 // is unsafe when mutators are iterating over it.
652 *it = nullptr;
653 }
654
655 // Check if the list contains any non-null listener, and update 'has_listener'.
656 for (InstrumentationListener* l : list) {
657 if (l != nullptr) {
658 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
659 return;
660 }
661 }
662 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = false; });
663 }
664
RemoveListener(InstrumentationListener * listener,uint32_t events)665 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
666 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
667 PotentiallyRemoveListenerFrom(kMethodEntered,
668 events,
669 method_entry_listeners_,
670 listener,
671 &have_method_entry_listeners_);
672 PotentiallyRemoveListenerFrom(kMethodExited,
673 events,
674 method_exit_listeners_,
675 listener,
676 &have_method_exit_listeners_);
677 PotentiallyRemoveListenerFrom(kMethodUnwind,
678 events,
679 method_unwind_listeners_,
680 listener,
681 &have_method_unwind_listeners_);
682 PotentiallyRemoveListenerFrom(kBranch,
683 events,
684 branch_listeners_,
685 listener,
686 &have_branch_listeners_);
687 PotentiallyRemoveListenerFrom(kDexPcMoved,
688 events,
689 dex_pc_listeners_,
690 listener,
691 &have_dex_pc_listeners_);
692 PotentiallyRemoveListenerFrom(kFieldRead,
693 events,
694 field_read_listeners_,
695 listener,
696 &have_field_read_listeners_);
697 PotentiallyRemoveListenerFrom(kFieldWritten,
698 events,
699 field_write_listeners_,
700 listener,
701 &have_field_write_listeners_);
702 PotentiallyRemoveListenerFrom(kExceptionThrown,
703 events,
704 exception_thrown_listeners_,
705 listener,
706 &have_exception_thrown_listeners_);
707 PotentiallyRemoveListenerFrom(kWatchedFramePop,
708 events,
709 watched_frame_pop_listeners_,
710 listener,
711 &have_watched_frame_pop_listeners_);
712 PotentiallyRemoveListenerFrom(kExceptionHandled,
713 events,
714 exception_handled_listeners_,
715 listener,
716 &have_exception_handled_listeners_);
717 UpdateInterpreterHandlerTable();
718 }
719
GetCurrentInstrumentationLevel() const720 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
721 if (interpreter_stubs_installed_) {
722 return InstrumentationLevel::kInstrumentWithInterpreter;
723 } else if (entry_exit_stubs_installed_) {
724 return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
725 } else {
726 return InstrumentationLevel::kInstrumentNothing;
727 }
728 }
729
RequiresInstrumentationInstallation(InstrumentationLevel new_level) const730 bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
731 // We need to reinstall instrumentation if we go to a different level.
732 return GetCurrentInstrumentationLevel() != new_level;
733 }
734
UpdateInstrumentationLevels(InstrumentationLevel level)735 void Instrumentation::UpdateInstrumentationLevels(InstrumentationLevel level) {
736 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
737 can_use_instrumentation_trampolines_ = false;
738 }
739 if (UNLIKELY(!can_use_instrumentation_trampolines_)) {
740 for (auto& p : requested_instrumentation_levels_) {
741 if (p.second == InstrumentationLevel::kInstrumentWithInstrumentationStubs) {
742 p.second = InstrumentationLevel::kInstrumentWithInterpreter;
743 }
744 }
745 }
746 }
747
ConfigureStubs(const char * key,InstrumentationLevel desired_level)748 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
749 // Store the instrumentation level for this key or remove it.
750 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
751 // The client no longer needs instrumentation.
752 requested_instrumentation_levels_.erase(key);
753 } else {
754 // The client needs instrumentation.
755 requested_instrumentation_levels_.Overwrite(key, desired_level);
756 }
757
758 UpdateInstrumentationLevels(desired_level);
759 UpdateStubs();
760 }
761
EnableSingleThreadDeopt()762 void Instrumentation::EnableSingleThreadDeopt() {
763 // Single-thread deopt only uses interpreter.
764 can_use_instrumentation_trampolines_ = false;
765 UpdateInstrumentationLevels(InstrumentationLevel::kInstrumentWithInterpreter);
766 UpdateStubs();
767 }
768
UpdateStubs()769 void Instrumentation::UpdateStubs() {
770 // Look for the highest required instrumentation level.
771 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
772 for (const auto& v : requested_instrumentation_levels_) {
773 requested_level = std::max(requested_level, v.second);
774 }
775
776 DCHECK(can_use_instrumentation_trampolines_ ||
777 requested_level != InstrumentationLevel::kInstrumentWithInstrumentationStubs)
778 << "Use trampolines: " << can_use_instrumentation_trampolines_ << " level "
779 << requested_level;
780
781 interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
782 forced_interpret_only_;
783
784 if (!RequiresInstrumentationInstallation(requested_level)) {
785 // We're already set.
786 return;
787 }
788 Thread* const self = Thread::Current();
789 Runtime* runtime = Runtime::Current();
790 Locks::mutator_lock_->AssertExclusiveHeld(self);
791 Locks::thread_list_lock_->AssertNotHeld(self);
792 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
793 if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
794 interpreter_stubs_installed_ = true;
795 entry_exit_stubs_installed_ = true;
796 } else {
797 CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
798 entry_exit_stubs_installed_ = true;
799 interpreter_stubs_installed_ = false;
800 }
801 InstallStubsClassVisitor visitor(this);
802 runtime->GetClassLinker()->VisitClasses(&visitor);
803 instrumentation_stubs_installed_ = true;
804 MutexLock mu(self, *Locks::thread_list_lock_);
805 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
806 } else {
807 interpreter_stubs_installed_ = false;
808 entry_exit_stubs_installed_ = false;
809 InstallStubsClassVisitor visitor(this);
810 runtime->GetClassLinker()->VisitClasses(&visitor);
811 // Restore stack only if there is no method currently deoptimized.
812 bool empty;
813 {
814 ReaderMutexLock mu(self, *GetDeoptimizedMethodsLock());
815 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
816 }
817 if (empty) {
818 MutexLock mu(self, *Locks::thread_list_lock_);
819 bool no_remaining_deopts = true;
820 // Check that there are no other forced deoptimizations. Do it here so we only need to lock
821 // thread_list_lock once.
822 // The compiler gets confused on the thread annotations, so use
823 // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
824 // exclusively at this point.
825 Locks::mutator_lock_->AssertExclusiveHeld(self);
826 runtime->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
827 no_remaining_deopts =
828 no_remaining_deopts && !t->IsForceInterpreter() &&
829 std::all_of(t->GetInstrumentationStack()->cbegin(),
830 t->GetInstrumentationStack()->cend(),
831 [&](const auto& frame) REQUIRES_SHARED(Locks::mutator_lock_) {
832 return frame.second.force_deopt_id_ == current_force_deopt_id_;
833 });
834 });
835 if (no_remaining_deopts) {
836 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
837 // Only do this after restoring, as walking the stack when restoring will see
838 // the instrumentation exit pc.
839 instrumentation_stubs_installed_ = false;
840 }
841 }
842 }
843 }
844
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)845 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
846 thread->ResetQuickAllocEntryPointsForThread();
847 }
848
SetEntrypointsInstrumented(bool instrumented)849 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
850 Thread* self = Thread::Current();
851 Runtime* runtime = Runtime::Current();
852 Locks::mutator_lock_->AssertNotHeld(self);
853 Locks::instrument_entrypoints_lock_->AssertHeld(self);
854 if (runtime->IsStarted()) {
855 ScopedSuspendAll ssa(__FUNCTION__);
856 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
857 SetQuickAllocEntryPointsInstrumented(instrumented);
858 ResetQuickAllocEntryPoints();
859 alloc_entrypoints_instrumented_ = instrumented;
860 } else {
861 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
862 SetQuickAllocEntryPointsInstrumented(instrumented);
863
864 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
865 // update for just this thread.
866 // Note: self may be null. One of those paths is setting instrumentation in the Heap
867 // constructor for gcstress mode.
868 if (self != nullptr) {
869 ResetQuickAllocEntryPointsForThread(self, nullptr);
870 }
871
872 alloc_entrypoints_instrumented_ = instrumented;
873 }
874 }
875
InstrumentQuickAllocEntryPoints()876 void Instrumentation::InstrumentQuickAllocEntryPoints() {
877 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
878 InstrumentQuickAllocEntryPointsLocked();
879 }
880
UninstrumentQuickAllocEntryPoints()881 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
882 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
883 UninstrumentQuickAllocEntryPointsLocked();
884 }
885
InstrumentQuickAllocEntryPointsLocked()886 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
887 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
888 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
889 SetEntrypointsInstrumented(true);
890 }
891 ++quick_alloc_entry_points_instrumentation_counter_;
892 }
893
UninstrumentQuickAllocEntryPointsLocked()894 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
895 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
896 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
897 --quick_alloc_entry_points_instrumentation_counter_;
898 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
899 SetEntrypointsInstrumented(false);
900 }
901 }
902
ResetQuickAllocEntryPoints()903 void Instrumentation::ResetQuickAllocEntryPoints() {
904 Runtime* runtime = Runtime::Current();
905 if (runtime->IsStarted()) {
906 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
907 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
908 }
909 }
910
UpdateMethodsCodeImpl(ArtMethod * method,const void * quick_code)911 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
912 const void* new_quick_code;
913 if (LIKELY(!instrumentation_stubs_installed_)) {
914 new_quick_code = quick_code;
915 } else {
916 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
917 new_quick_code = GetQuickToInterpreterBridge();
918 } else {
919 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
920 if (class_linker->IsQuickResolutionStub(quick_code) ||
921 class_linker->IsQuickToInterpreterBridge(quick_code)) {
922 new_quick_code = quick_code;
923 } else if (entry_exit_stubs_installed_ &&
924 // We need to make sure not to replace anything that InstallStubsForMethod
925 // wouldn't. Specifically we cannot stub out Proxy.<init> since subtypes copy the
926 // implementation directly and this will confuse the instrumentation trampolines.
927 // TODO We should remove the need for this since it makes it impossible to profile
928 // Proxy.<init> correctly in all cases.
929 method != jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init)) {
930 new_quick_code = GetQuickInstrumentationEntryPoint();
931 if (!method->IsNative() && Runtime::Current()->GetJit() != nullptr) {
932 // Native methods use trampoline entrypoints during interpreter tracing.
933 DCHECK(!Runtime::Current()->GetJit()->GetCodeCache()->GetGarbageCollectCodeUnsafe());
934 ProfilingInfo* profiling_info = method->GetProfilingInfo(kRuntimePointerSize);
935 // Tracing will look at the saved entry point in the profiling info to know the actual
936 // entrypoint, so we store it here.
937 if (profiling_info != nullptr) {
938 profiling_info->SetSavedEntryPoint(quick_code);
939 }
940 }
941 } else {
942 new_quick_code = quick_code;
943 }
944 }
945 }
946 UpdateEntrypoints(method, new_quick_code);
947 }
948
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * quick_code)949 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code) {
950 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
951 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
952 // the ArtMethod is still in memory.
953 const void* new_quick_code = quick_code;
954 if (UNLIKELY(instrumentation_stubs_installed_) && entry_exit_stubs_installed_) {
955 new_quick_code = GetQuickInstrumentationEntryPoint();
956 }
957 UpdateEntrypoints(method, new_quick_code);
958 }
959
UpdateMethodsCode(ArtMethod * method,const void * quick_code)960 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
961 DCHECK(method->GetDeclaringClass()->IsResolved());
962 UpdateMethodsCodeImpl(method, quick_code);
963 }
964
UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod * method)965 void Instrumentation::UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method) {
966 UpdateMethodsCodeImpl(method, GetQuickToInterpreterBridge());
967 }
968
UpdateMethodsCodeForJavaDebuggable(ArtMethod * method,const void * quick_code)969 void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
970 const void* quick_code) {
971 // When the runtime is set to Java debuggable, we may update the entry points of
972 // all methods of a class to the interpreter bridge. A method's declaring class
973 // might not be in resolved state yet in that case, so we bypass the DCHECK in
974 // UpdateMethodsCode.
975 UpdateMethodsCodeImpl(method, quick_code);
976 }
977
AddDeoptimizedMethod(ArtMethod * method)978 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
979 if (IsDeoptimizedMethod(method)) {
980 // Already in the map. Return.
981 return false;
982 }
983 // Not found. Add it.
984 deoptimized_methods_.insert(method);
985 return true;
986 }
987
IsDeoptimizedMethod(ArtMethod * method)988 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
989 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
990 }
991
BeginDeoptimizedMethod()992 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
993 if (deoptimized_methods_.empty()) {
994 // Empty.
995 return nullptr;
996 }
997 return *deoptimized_methods_.begin();
998 }
999
RemoveDeoptimizedMethod(ArtMethod * method)1000 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1001 auto it = deoptimized_methods_.find(method);
1002 if (it == deoptimized_methods_.end()) {
1003 return false;
1004 }
1005 deoptimized_methods_.erase(it);
1006 return true;
1007 }
1008
IsDeoptimizedMethodsEmpty() const1009 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1010 return deoptimized_methods_.empty();
1011 }
1012
Deoptimize(ArtMethod * method)1013 void Instrumentation::Deoptimize(ArtMethod* method) {
1014 CHECK(!method->IsNative());
1015 CHECK(!method->IsProxyMethod());
1016 CHECK(method->IsInvokable());
1017
1018 Thread* self = Thread::Current();
1019 {
1020 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
1021 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
1022 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
1023 << " is already deoptimized";
1024 }
1025 if (!interpreter_stubs_installed_) {
1026 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
1027
1028 // Install instrumentation exit stub and instrumentation frames. We may already have installed
1029 // these previously so it will only cover the newly created frames.
1030 instrumentation_stubs_installed_ = true;
1031 MutexLock mu(self, *Locks::thread_list_lock_);
1032 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
1033 }
1034 }
1035
Undeoptimize(ArtMethod * method)1036 void Instrumentation::Undeoptimize(ArtMethod* method) {
1037 CHECK(!method->IsNative());
1038 CHECK(!method->IsProxyMethod());
1039 CHECK(method->IsInvokable());
1040
1041 Thread* self = Thread::Current();
1042 bool empty;
1043 {
1044 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
1045 bool found_and_erased = RemoveDeoptimizedMethod(method);
1046 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
1047 << " is not deoptimized";
1048 empty = IsDeoptimizedMethodsEmpty();
1049 }
1050
1051 // Restore code and possibly stack only if we did not deoptimize everything.
1052 if (!interpreter_stubs_installed_) {
1053 // Restore its code or resolution trampoline.
1054 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1055 if (method->IsStatic() && !method->IsConstructor() &&
1056 !method->GetDeclaringClass()->IsInitialized()) {
1057 UpdateEntrypoints(method, GetQuickResolutionStub());
1058 } else {
1059 const void* quick_code = NeedDebugVersionFor(method)
1060 ? GetQuickToInterpreterBridge()
1061 : class_linker->GetQuickOatCodeFor(method);
1062 UpdateEntrypoints(method, quick_code);
1063 }
1064
1065 // If there is no deoptimized method left, we can restore the stack of each thread.
1066 if (empty && !entry_exit_stubs_installed_) {
1067 MutexLock mu(self, *Locks::thread_list_lock_);
1068 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
1069 instrumentation_stubs_installed_ = false;
1070 }
1071 }
1072 }
1073
IsDeoptimized(ArtMethod * method)1074 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
1075 DCHECK(method != nullptr);
1076 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1077 return IsDeoptimizedMethod(method);
1078 }
1079
EnableDeoptimization()1080 void Instrumentation::EnableDeoptimization() {
1081 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1082 CHECK(IsDeoptimizedMethodsEmpty());
1083 CHECK_EQ(deoptimization_enabled_, false);
1084 deoptimization_enabled_ = true;
1085 }
1086
DisableDeoptimization(const char * key)1087 void Instrumentation::DisableDeoptimization(const char* key) {
1088 CHECK_EQ(deoptimization_enabled_, true);
1089 // If we deoptimized everything, undo it.
1090 InstrumentationLevel level = GetCurrentInstrumentationLevel();
1091 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
1092 UndeoptimizeEverything(key);
1093 }
1094 // Undeoptimized selected methods.
1095 while (true) {
1096 ArtMethod* method;
1097 {
1098 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1099 if (IsDeoptimizedMethodsEmpty()) {
1100 break;
1101 }
1102 method = BeginDeoptimizedMethod();
1103 CHECK(method != nullptr);
1104 }
1105 Undeoptimize(method);
1106 }
1107 deoptimization_enabled_ = false;
1108 }
1109
1110 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const1111 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
1112 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
1113 return false;
1114 }
1115 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
1116 }
1117
DeoptimizeEverything(const char * key)1118 void Instrumentation::DeoptimizeEverything(const char* key) {
1119 CHECK(deoptimization_enabled_);
1120 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
1121 }
1122
UndeoptimizeEverything(const char * key)1123 void Instrumentation::UndeoptimizeEverything(const char* key) {
1124 CHECK(interpreter_stubs_installed_);
1125 CHECK(deoptimization_enabled_);
1126 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1127 }
1128
EnableMethodTracing(const char * key,bool needs_interpreter)1129 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
1130 InstrumentationLevel level;
1131 if (needs_interpreter) {
1132 level = InstrumentationLevel::kInstrumentWithInterpreter;
1133 } else {
1134 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
1135 }
1136 ConfigureStubs(key, level);
1137 }
1138
DisableMethodTracing(const char * key)1139 void Instrumentation::DisableMethodTracing(const char* key) {
1140 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1141 }
1142
GetCodeForInvoke(ArtMethod * method) const1143 const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) const {
1144 // This is called by instrumentation entry only and that should never be getting proxy methods.
1145 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1146 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1147 if (LIKELY(!instrumentation_stubs_installed_ && !interpreter_stubs_installed_)) {
1148 // In general we just return whatever the method thinks its entrypoint is here. The only
1149 // exception is if it still has the instrumentation entrypoint. That means we are racing another
1150 // thread getting rid of instrumentation which is unexpected but possible. In that case we want
1151 // to wait and try to get it from the oat file or jit.
1152 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1153 DCHECK(code != nullptr);
1154 if (code != GetQuickInstrumentationEntryPoint()) {
1155 return code;
1156 } else if (method->IsNative()) {
1157 return class_linker->GetQuickOatCodeFor(method);
1158 }
1159 // We don't know what it is. Fallthough to try to find the code from the JIT or Oat file.
1160 } else if (method->IsNative()) {
1161 // TODO We could have JIT compiled native entrypoints. It might be worth it to find these.
1162 return class_linker->GetQuickOatCodeFor(method);
1163 } else if (UNLIKELY(interpreter_stubs_installed_)) {
1164 return GetQuickToInterpreterBridge();
1165 }
1166 // Since the method cannot be native due to ifs above we can always fall back to interpreter
1167 // bridge.
1168 const void* result = GetQuickToInterpreterBridge();
1169 if (!NeedDebugVersionFor(method)) {
1170 // If we don't need a debug version we should see what the oat file/class linker has to say.
1171 result = class_linker->GetQuickOatCodeFor(method);
1172 }
1173 // If both those fail try the jit.
1174 if (result == GetQuickToInterpreterBridge()) {
1175 jit::Jit* jit = Runtime::Current()->GetJit();
1176 if (jit != nullptr) {
1177 const void* res = jit->GetCodeCache()->FindCompiledCodeForInstrumentation(method);
1178 if (res != nullptr) {
1179 result = res;
1180 }
1181 }
1182 }
1183 return result;
1184 }
1185
GetQuickCodeFor(ArtMethod * method,PointerSize pointer_size) const1186 const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
1187 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1188 if (LIKELY(!instrumentation_stubs_installed_)) {
1189 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
1190 DCHECK(code != nullptr);
1191 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
1192 !class_linker->IsQuickToInterpreterBridge(code)) &&
1193 !class_linker->IsQuickResolutionStub(code) &&
1194 !class_linker->IsQuickToInterpreterBridge(code)) {
1195 return code;
1196 }
1197 }
1198 return class_linker->GetQuickOatCodeFor(method);
1199 }
1200
MethodEnterEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1201 void Instrumentation::MethodEnterEventImpl(Thread* thread,
1202 ObjPtr<mirror::Object> this_object,
1203 ArtMethod* method,
1204 uint32_t dex_pc) const {
1205 DCHECK(!method->IsRuntimeMethod());
1206 if (HasMethodEntryListeners()) {
1207 Thread* self = Thread::Current();
1208 StackHandleScope<1> hs(self);
1209 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1210 for (InstrumentationListener* listener : method_entry_listeners_) {
1211 if (listener != nullptr) {
1212 listener->MethodEntered(thread, thiz, method, dex_pc);
1213 }
1214 }
1215 }
1216 }
1217
1218 template <>
MethodExitEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,OptionalFrame frame,MutableHandle<mirror::Object> & return_value) const1219 void Instrumentation::MethodExitEventImpl(Thread* thread,
1220 ObjPtr<mirror::Object> this_object,
1221 ArtMethod* method,
1222 uint32_t dex_pc,
1223 OptionalFrame frame,
1224 MutableHandle<mirror::Object>& return_value) const {
1225 if (HasMethodExitListeners()) {
1226 Thread* self = Thread::Current();
1227 StackHandleScope<1> hs(self);
1228 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1229 for (InstrumentationListener* listener : method_exit_listeners_) {
1230 if (listener != nullptr) {
1231 listener->MethodExited(thread, thiz, method, dex_pc, frame, return_value);
1232 }
1233 }
1234 }
1235 }
1236
MethodExitEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,OptionalFrame frame,JValue & return_value) const1237 template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1238 ObjPtr<mirror::Object> this_object,
1239 ArtMethod* method,
1240 uint32_t dex_pc,
1241 OptionalFrame frame,
1242 JValue& return_value) const {
1243 if (HasMethodExitListeners()) {
1244 Thread* self = Thread::Current();
1245 StackHandleScope<2> hs(self);
1246 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1247 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1248 Primitive::kPrimNot) {
1249 for (InstrumentationListener* listener : method_exit_listeners_) {
1250 if (listener != nullptr) {
1251 listener->MethodExited(thread, thiz, method, dex_pc, frame, return_value);
1252 }
1253 }
1254 } else {
1255 MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1256 MethodExitEventImpl(thread, thiz.Get(), method, dex_pc, frame, ret);
1257 return_value.SetL(ret.Get());
1258 }
1259 }
1260 }
1261
MethodUnwindEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1262 void Instrumentation::MethodUnwindEvent(Thread* thread,
1263 ObjPtr<mirror::Object> this_object,
1264 ArtMethod* method,
1265 uint32_t dex_pc) const {
1266 if (HasMethodUnwindListeners()) {
1267 Thread* self = Thread::Current();
1268 StackHandleScope<1> hs(self);
1269 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1270 for (InstrumentationListener* listener : method_unwind_listeners_) {
1271 if (listener != nullptr) {
1272 listener->MethodUnwind(thread, thiz, method, dex_pc);
1273 }
1274 }
1275 }
1276 }
1277
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1278 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1279 ObjPtr<mirror::Object> this_object,
1280 ArtMethod* method,
1281 uint32_t dex_pc) const {
1282 Thread* self = Thread::Current();
1283 StackHandleScope<1> hs(self);
1284 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1285 for (InstrumentationListener* listener : dex_pc_listeners_) {
1286 if (listener != nullptr) {
1287 listener->DexPcMoved(thread, thiz, method, dex_pc);
1288 }
1289 }
1290 }
1291
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1292 void Instrumentation::BranchImpl(Thread* thread,
1293 ArtMethod* method,
1294 uint32_t dex_pc,
1295 int32_t offset) const {
1296 for (InstrumentationListener* listener : branch_listeners_) {
1297 if (listener != nullptr) {
1298 listener->Branch(thread, method, dex_pc, offset);
1299 }
1300 }
1301 }
1302
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1303 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1304 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1305 if (listener != nullptr) {
1306 listener->WatchedFramePop(thread, frame);
1307 }
1308 }
1309 }
1310
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1311 void Instrumentation::FieldReadEventImpl(Thread* thread,
1312 ObjPtr<mirror::Object> this_object,
1313 ArtMethod* method,
1314 uint32_t dex_pc,
1315 ArtField* field) const {
1316 Thread* self = Thread::Current();
1317 StackHandleScope<1> hs(self);
1318 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1319 for (InstrumentationListener* listener : field_read_listeners_) {
1320 if (listener != nullptr) {
1321 listener->FieldRead(thread, thiz, method, dex_pc, field);
1322 }
1323 }
1324 }
1325
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1326 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1327 ObjPtr<mirror::Object> this_object,
1328 ArtMethod* method,
1329 uint32_t dex_pc,
1330 ArtField* field,
1331 const JValue& field_value) const {
1332 Thread* self = Thread::Current();
1333 StackHandleScope<2> hs(self);
1334 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1335 if (field->IsPrimitiveType()) {
1336 for (InstrumentationListener* listener : field_write_listeners_) {
1337 if (listener != nullptr) {
1338 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1339 }
1340 }
1341 } else {
1342 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1343 for (InstrumentationListener* listener : field_write_listeners_) {
1344 if (listener != nullptr) {
1345 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1346 }
1347 }
1348 }
1349 }
1350
ExceptionThrownEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1351 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1352 ObjPtr<mirror::Throwable> exception_object) const {
1353 Thread* self = Thread::Current();
1354 StackHandleScope<1> hs(self);
1355 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1356 if (HasExceptionThrownListeners()) {
1357 DCHECK_EQ(thread->GetException(), h_exception.Get());
1358 thread->ClearException();
1359 for (InstrumentationListener* listener : exception_thrown_listeners_) {
1360 if (listener != nullptr) {
1361 listener->ExceptionThrown(thread, h_exception);
1362 }
1363 }
1364 // See b/65049545 for discussion about this behavior.
1365 thread->AssertNoPendingException();
1366 thread->SetException(h_exception.Get());
1367 }
1368 }
1369
ExceptionHandledEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1370 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1371 ObjPtr<mirror::Throwable> exception_object) const {
1372 Thread* self = Thread::Current();
1373 StackHandleScope<1> hs(self);
1374 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1375 if (HasExceptionHandledListeners()) {
1376 // We should have cleared the exception so that callers can detect a new one.
1377 DCHECK(thread->GetException() == nullptr);
1378 for (InstrumentationListener* listener : exception_handled_listeners_) {
1379 if (listener != nullptr) {
1380 listener->ExceptionHandled(thread, h_exception);
1381 }
1382 }
1383 }
1384 }
1385
PushInstrumentationStackFrame(Thread * self,ObjPtr<mirror::Object> this_object,ArtMethod * method,uintptr_t stack_ptr,uintptr_t lr,bool interpreter_entry)1386 void Instrumentation::PushInstrumentationStackFrame(Thread* self,
1387 ObjPtr<mirror::Object> this_object,
1388 ArtMethod* method,
1389 uintptr_t stack_ptr,
1390 uintptr_t lr,
1391 bool interpreter_entry) {
1392 DCHECK(!self->IsExceptionPending());
1393 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1394 self->GetInstrumentationStack();
1395 if (kVerboseInstrumentation) {
1396 LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1397 << reinterpret_cast<void*>(lr);
1398 }
1399
1400 // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1401 // event causes an exception we can simply send the unwind event and return.
1402 StackHandleScope<1> hs(self);
1403 Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1404 if (!interpreter_entry) {
1405 MethodEnterEvent(self, h_this.Get(), method, 0);
1406 if (self->IsExceptionPending()) {
1407 MethodUnwindEvent(self, h_this.Get(), method, 0);
1408 return;
1409 }
1410 }
1411
1412 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1413 DCHECK(!self->IsExceptionPending());
1414 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1415
1416 instrumentation::InstrumentationStackFrame instrumentation_frame(
1417 h_this.Get(), method, lr, frame_id, interpreter_entry, current_force_deopt_id_);
1418 stack->insert({stack_ptr, instrumentation_frame});
1419 }
1420
GetDeoptimizationMethodType(ArtMethod * method)1421 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1422 if (method->IsRuntimeMethod()) {
1423 // Certain methods have strict requirement on whether the dex instruction
1424 // should be re-executed upon deoptimization.
1425 if (method == Runtime::Current()->GetCalleeSaveMethod(
1426 CalleeSaveType::kSaveEverythingForClinit)) {
1427 return DeoptimizationMethodType::kKeepDexPc;
1428 }
1429 if (method == Runtime::Current()->GetCalleeSaveMethod(
1430 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1431 return DeoptimizationMethodType::kKeepDexPc;
1432 }
1433 }
1434 return DeoptimizationMethodType::kDefault;
1435 }
1436
1437 // Try to get the shorty of a runtime method if it's an invocation stub.
GetRuntimeMethodShorty(Thread * thread)1438 static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
1439 char shorty = 'V';
1440 StackVisitor::WalkStack(
1441 [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1442 ArtMethod* m = stack_visitor->GetMethod();
1443 if (m == nullptr || m->IsRuntimeMethod()) {
1444 return true;
1445 }
1446 // The first Java method.
1447 if (m->IsNative()) {
1448 // Use JNI method's shorty for the jni stub.
1449 shorty = m->GetShorty()[0];
1450 } else if (m->IsProxyMethod()) {
1451 // Proxy method just invokes its proxied method via
1452 // art_quick_proxy_invoke_handler.
1453 shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
1454 } else {
1455 const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
1456 if (instr.IsInvoke()) {
1457 auto get_method_index_fn = [](ArtMethod* caller,
1458 const Instruction& inst,
1459 uint32_t dex_pc)
1460 REQUIRES_SHARED(Locks::mutator_lock_) {
1461 switch (inst.Opcode()) {
1462 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
1463 case Instruction::INVOKE_VIRTUAL_QUICK: {
1464 uint16_t method_idx = caller->GetIndexFromQuickening(dex_pc);
1465 CHECK_NE(method_idx, DexFile::kDexNoIndex16);
1466 return method_idx;
1467 }
1468 default: {
1469 return static_cast<uint16_t>(inst.VRegB());
1470 }
1471 }
1472 };
1473
1474 uint16_t method_index = get_method_index_fn(m, instr, stack_visitor->GetDexPc());
1475 const DexFile* dex_file = m->GetDexFile();
1476 if (interpreter::IsStringInit(dex_file, method_index)) {
1477 // Invoking string init constructor is turned into invoking
1478 // StringFactory.newStringFromChars() which returns a string.
1479 shorty = 'L';
1480 } else {
1481 shorty = dex_file->GetMethodShorty(method_index)[0];
1482 }
1483
1484 } else {
1485 // It could be that a non-invoke opcode invokes a stub, which in turn
1486 // invokes Java code. In such cases, we should never expect a return
1487 // value from the stub.
1488 }
1489 }
1490 // Stop stack walking since we've seen a Java frame.
1491 return false;
1492 },
1493 thread,
1494 /* context= */ nullptr,
1495 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
1496 return shorty;
1497 }
1498
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc_addr,uint64_t * gpr_result,uint64_t * fpr_result)1499 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
1500 uintptr_t* return_pc_addr,
1501 uint64_t* gpr_result,
1502 uint64_t* fpr_result) {
1503 DCHECK(gpr_result != nullptr);
1504 DCHECK(fpr_result != nullptr);
1505 // Do the pop.
1506 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1507 self->GetInstrumentationStack();
1508 CHECK_GT(stack->size(), 0U);
1509 auto it = stack->find(reinterpret_cast<uintptr_t>(return_pc_addr));
1510 CHECK(it != stack->end());
1511 InstrumentationStackFrame instrumentation_frame = it->second;
1512 stack->erase(it);
1513
1514 // Set return PC and check the consistency of the stack.
1515 // We don't cache the return pc value in a local as it may change after
1516 // sending a method exit event.
1517 *return_pc_addr = instrumentation_frame.return_pc_;
1518 self->VerifyStack();
1519
1520 ArtMethod* method = instrumentation_frame.method_;
1521 uint32_t length;
1522 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1523 char return_shorty;
1524
1525 // Runtime method does not call into MethodExitEvent() so there should not be
1526 // suspension point below.
1527 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1528 if (method->IsRuntimeMethod()) {
1529 if (method != Runtime::Current()->GetCalleeSaveMethod(
1530 CalleeSaveType::kSaveEverythingForClinit)) {
1531 // If the caller is at an invocation point and the runtime method is not
1532 // for clinit, we need to pass return results to the caller.
1533 // We need the correct shorty to decide whether we need to pass the return
1534 // result for deoptimization below.
1535 return_shorty = GetRuntimeMethodShorty(self);
1536 } else {
1537 // Some runtime methods such as allocations, unresolved field getters, etc.
1538 // have return value. We don't need to set return_value since MethodExitEvent()
1539 // below isn't called for runtime methods. Deoptimization doesn't need the
1540 // value either since the dex instruction will be re-executed by the
1541 // interpreter, except these two cases:
1542 // (1) For an invoke, which is handled above to get the correct shorty.
1543 // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
1544 // idempotent. However there is no return value for it anyway.
1545 return_shorty = 'V';
1546 }
1547 } else {
1548 return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1549 }
1550
1551 bool is_ref = return_shorty == '[' || return_shorty == 'L';
1552 StackHandleScope<1> hs(self);
1553 MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
1554 JValue return_value;
1555 if (return_shorty == 'V') {
1556 return_value.SetJ(0);
1557 } else if (return_shorty == 'F' || return_shorty == 'D') {
1558 return_value.SetJ(*fpr_result);
1559 } else {
1560 return_value.SetJ(*gpr_result);
1561 }
1562 if (is_ref) {
1563 // Take a handle to the return value so we won't lose it if we suspend.
1564 res.Assign(return_value.GetL());
1565 }
1566 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1567 // return_pc.
1568 uint32_t dex_pc = dex::kDexNoIndex;
1569 if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
1570 ObjPtr<mirror::Object> this_object = instrumentation_frame.this_object_;
1571 // Note that sending the event may change the contents of *return_pc_addr.
1572 MethodExitEvent(
1573 self, this_object, instrumentation_frame.method_, dex_pc, OptionalFrame{}, return_value);
1574 }
1575
1576 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1577 // back to an upcall.
1578 NthCallerVisitor visitor(self, 1, true);
1579 visitor.WalkStack(true);
1580 bool deoptimize = (visitor.caller != nullptr) &&
1581 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1582 self->IsForceInterpreter() ||
1583 // NB Since structurally obsolete compiled methods might have the offsets of
1584 // methods/fields compiled in we need to go back to interpreter whenever we hit
1585 // them.
1586 visitor.caller->GetDeclaringClass()->IsObsoleteObject() ||
1587 // Check if we forced all threads to deoptimize in the time between this frame
1588 // being created and now.
1589 instrumentation_frame.force_deopt_id_ != current_force_deopt_id_ ||
1590 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
1591 if (is_ref) {
1592 // Restore the return value if it's a reference since it might have moved.
1593 *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1594 }
1595 if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
1596 if (kVerboseInstrumentation) {
1597 LOG(INFO) << "Deoptimizing "
1598 << visitor.caller->PrettyMethod()
1599 << " by returning from "
1600 << method->PrettyMethod()
1601 << " with result "
1602 << std::hex << return_value.GetJ() << std::dec
1603 << " in "
1604 << *self;
1605 }
1606 DeoptimizationMethodType deopt_method_type = GetDeoptimizationMethodType(method);
1607 self->PushDeoptimizationContext(return_value,
1608 return_shorty == 'L' || return_shorty == '[',
1609 /* exception= */ nullptr ,
1610 /* from_code= */ false,
1611 deopt_method_type);
1612 return GetTwoWordSuccessValue(*return_pc_addr,
1613 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1614 } else {
1615 if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
1616 VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
1617 << " at PC " << reinterpret_cast<void*>(*return_pc_addr);
1618 }
1619 if (kVerboseInstrumentation) {
1620 LOG(INFO) << "Returning from " << method->PrettyMethod()
1621 << " to PC " << reinterpret_cast<void*>(*return_pc_addr);
1622 }
1623 return GetTwoWordSuccessValue(0, *return_pc_addr);
1624 }
1625 }
1626
PopFramesForDeoptimization(Thread * self,uintptr_t pop_until) const1627 uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, uintptr_t pop_until) const {
1628 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1629 self->GetInstrumentationStack();
1630 // Pop all instrumentation frames below `pop_until`.
1631 uintptr_t return_pc = 0u;
1632 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until;) {
1633 auto e = i;
1634 ++i;
1635 if (kVerboseInstrumentation) {
1636 LOG(INFO) << "Popping for deoptimization " << e->second.method_->PrettyMethod();
1637 }
1638 return_pc = e->second.return_pc_;
1639 stack->erase(e);
1640 }
1641 return return_pc;
1642 }
1643
Dump() const1644 std::string InstrumentationStackFrame::Dump() const {
1645 std::ostringstream os;
1646 os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
1647 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_)
1648 << " force_deopt_id=" << force_deopt_id_;
1649 return os.str();
1650 }
1651
1652 } // namespace instrumentation
1653 } // namespace art
1654