1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_INSTRUMENTATION_H_
18 #define ART_RUNTIME_INSTRUMENTATION_H_
19 
20 #include <functional>
21 #include <stdint.h>
22 #include <list>
23 #include <memory>
24 #include <unordered_set>
25 #include <optional>
26 
27 #include "arch/instruction_set.h"
28 #include "base/enums.h"
29 #include "base/locks.h"
30 #include "base/macros.h"
31 #include "base/safe_map.h"
32 #include "gc_root.h"
33 
34 namespace art {
35 namespace mirror {
36 class Class;
37 class Object;
38 class Throwable;
39 }  // namespace mirror
40 class ArtField;
41 class ArtMethod;
42 template <typename T> class Handle;
43 template <typename T> class MutableHandle;
44 union JValue;
45 class SHARED_LOCKABLE ReaderWriterMutex;
46 class ShadowFrame;
47 class Thread;
48 enum class DeoptimizationMethodType;
49 
50 namespace instrumentation {
51 
52 // Interpreter handler tables.
53 enum InterpreterHandlerTable {
54   kMainHandlerTable = 0,          // Main handler table: no suspend check, no instrumentation.
55   kAlternativeHandlerTable = 1,   // Alternative handler table: suspend check and/or instrumentation
56                                   // enabled.
57   kNumHandlerTables
58 };
59 
60 // Do we want to deoptimize for method entry and exit listeners or just try to intercept
61 // invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
62 // application's performance.
63 static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
64 
65 // an optional frame is either Some(const ShadowFrame& current_frame) or None depending on if the
66 // method being exited has a shadow-frame associed with the current stack frame. In cases where
67 // there is no shadow-frame associated with this stack frame this will be None.
68 using OptionalFrame = std::optional<std::reference_wrapper<const ShadowFrame>>;
69 
70 // Instrumentation event listener API. Registered listeners will get the appropriate call back for
71 // the events they are listening for. The call backs supply the thread, method and dex_pc the event
72 // occurred upon. The thread may or may not be Thread::Current().
73 struct InstrumentationListener {
InstrumentationListenerInstrumentationListener74   InstrumentationListener() {}
~InstrumentationListenerInstrumentationListener75   virtual ~InstrumentationListener() {}
76 
77   // Call-back for when a method is entered.
78   virtual void MethodEntered(Thread* thread,
79                              Handle<mirror::Object> this_object,
80                              ArtMethod* method,
81                              uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_) = 0;
82 
83   virtual void MethodExited(Thread* thread,
84                             Handle<mirror::Object> this_object,
85                             ArtMethod* method,
86                             uint32_t dex_pc,
87                             OptionalFrame frame,
88                             MutableHandle<mirror::Object>& return_value)
89       REQUIRES_SHARED(Locks::mutator_lock_);
90 
91   // Call-back for when a method is exited. The implementor should either handler-ize the return
92   // value (if appropriate) or use the alternate MethodExited callback instead if they need to
93   // go through a suspend point.
94   virtual void MethodExited(Thread* thread,
95                             Handle<mirror::Object> this_object,
96                             ArtMethod* method,
97                             uint32_t dex_pc,
98                             OptionalFrame frame,
99                             JValue& return_value)
100       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
101 
102   // Call-back for when a method is popped due to an exception throw. A method will either cause a
103   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
104   virtual void MethodUnwind(Thread* thread,
105                             Handle<mirror::Object> this_object,
106                             ArtMethod* method,
107                             uint32_t dex_pc)
108       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
109 
110   // Call-back for when the dex pc moves in a method.
111   virtual void DexPcMoved(Thread* thread,
112                           Handle<mirror::Object> this_object,
113                           ArtMethod* method,
114                           uint32_t new_dex_pc)
115       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
116 
117   // Call-back for when we read from a field.
118   virtual void FieldRead(Thread* thread,
119                          Handle<mirror::Object> this_object,
120                          ArtMethod* method,
121                          uint32_t dex_pc,
122                          ArtField* field) = 0;
123 
124   virtual void FieldWritten(Thread* thread,
125                             Handle<mirror::Object> this_object,
126                             ArtMethod* method,
127                             uint32_t dex_pc,
128                             ArtField* field,
129                             Handle<mirror::Object> field_value)
130       REQUIRES_SHARED(Locks::mutator_lock_);
131 
132   // Call-back for when we write into a field.
133   virtual void FieldWritten(Thread* thread,
134                             Handle<mirror::Object> this_object,
135                             ArtMethod* method,
136                             uint32_t dex_pc,
137                             ArtField* field,
138                             const JValue& field_value)
139       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
140 
141   // Call-back when an exception is thrown.
142   virtual void ExceptionThrown(Thread* thread,
143                                Handle<mirror::Throwable> exception_object)
144       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
145 
146   // Call-back when an exception is caught/handled by java code.
147   virtual void ExceptionHandled(Thread* thread, Handle<mirror::Throwable> exception_object)
148       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
149 
150   // Call-back for when we execute a branch.
151   virtual void Branch(Thread* thread,
152                       ArtMethod* method,
153                       uint32_t dex_pc,
154                       int32_t dex_pc_offset)
155       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
156 
157   // Call-back when a shadow_frame with the needs_notify_pop_ boolean set is popped off the stack by
158   // either return or exceptions. Normally instrumentation listeners should ensure that there are
159   // shadow-frames by deoptimizing stacks.
160   virtual void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED,
161                                const ShadowFrame& frame ATTRIBUTE_UNUSED)
162       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
163 };
164 
165 class Instrumentation;
166 // A helper to send instrumentation events while popping the stack in a safe way.
167 class InstrumentationStackPopper {
168  public:
169   explicit InstrumentationStackPopper(Thread* self);
170   ~InstrumentationStackPopper() REQUIRES_SHARED(Locks::mutator_lock_);
171 
172   // Increase the number of frames being popped up to `stack_pointer`. Return true if the
173   // frames were popped without any exceptions, false otherwise. The exception that caused
174   // the pop is 'exception'.
175   bool PopFramesTo(uintptr_t stack_pointer, /*in-out*/MutableHandle<mirror::Throwable>& exception)
176       REQUIRES_SHARED(Locks::mutator_lock_);
177 
178  private:
179   Thread* self_;
180   Instrumentation* instrumentation_;
181   // The stack pointer limit for frames to pop.
182   uintptr_t pop_until_;
183 };
184 
185 // Instrumentation is a catch-all for when extra information is required from the runtime. The
186 // typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
187 // to method entry and exit, it may also force execution to be switched to the interpreter and
188 // trigger deoptimization.
189 class Instrumentation {
190  public:
191   enum InstrumentationEvent {
192     kMethodEntered = 0x1,
193     kMethodExited = 0x2,
194     kMethodUnwind = 0x4,
195     kDexPcMoved = 0x8,
196     kFieldRead = 0x10,
197     kFieldWritten = 0x20,
198     kExceptionThrown = 0x40,
199     kBranch = 0x80,
200     kWatchedFramePop = 0x200,
201     kExceptionHandled = 0x400,
202   };
203 
204   enum class InstrumentationLevel {
205     kInstrumentNothing,                   // execute without instrumentation
206     kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
207     kInstrumentWithInterpreter            // execute with interpreter
208   };
209 
210   Instrumentation();
211 
212   // Add a listener to be notified of the masked together sent of instrumentation events. This
213   // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
214   // for saying you should have suspended all threads (installing stubs while threads are running
215   // will break).
216   void AddListener(InstrumentationListener* listener, uint32_t events)
217       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
218 
219   // Removes a listener possibly removing instrumentation stubs.
220   void RemoveListener(InstrumentationListener* listener, uint32_t events)
221       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
222 
223   // Deoptimization.
224   void EnableDeoptimization()
225       REQUIRES(Locks::mutator_lock_)
226       REQUIRES(!GetDeoptimizedMethodsLock());
227   // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
228   void DisableDeoptimization(const char* key)
229       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
230       REQUIRES(!GetDeoptimizedMethodsLock());
231 
AreAllMethodsDeoptimized()232   bool AreAllMethodsDeoptimized() const {
233     return interpreter_stubs_installed_;
234   }
235   bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
236 
CanDeoptimize()237   bool CanDeoptimize() {
238     return deoptimization_enabled_;
239   }
240 
241   // Executes everything with interpreter.
242   void DeoptimizeEverything(const char* key)
243       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
244       REQUIRES(!Locks::thread_list_lock_,
245                !Locks::classlinker_classes_lock_,
246                !GetDeoptimizedMethodsLock());
247 
248   // Executes everything with compiled code (or interpreter if there is no code). May visit class
249   // linker classes through ConfigureStubs.
250   void UndeoptimizeEverything(const char* key)
251       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
252       REQUIRES(!Locks::thread_list_lock_,
253                !Locks::classlinker_classes_lock_,
254                !GetDeoptimizedMethodsLock());
255 
256   // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
257   // method (except a class initializer) set to the resolution trampoline will be deoptimized only
258   // once its declaring class is initialized.
259   void Deoptimize(ArtMethod* method)
260       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !GetDeoptimizedMethodsLock());
261 
262   // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
263   // (except a class initializer) set to the resolution trampoline will be updated only once its
264   // declaring class is initialized.
265   void Undeoptimize(ArtMethod* method)
266       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !GetDeoptimizedMethodsLock());
267 
268   // Indicates whether the method has been deoptimized so it is executed with the interpreter.
269   bool IsDeoptimized(ArtMethod* method)
270       REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
271 
272   // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
273   void EnableMethodTracing(const char* key,
274                            bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
275       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
276       REQUIRES(!Locks::thread_list_lock_,
277                !Locks::classlinker_classes_lock_,
278                !GetDeoptimizedMethodsLock());
279 
280   // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
281   void DisableMethodTracing(const char* key)
282       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
283       REQUIRES(!Locks::thread_list_lock_,
284                !Locks::classlinker_classes_lock_,
285                !GetDeoptimizedMethodsLock());
286 
GetInterpreterHandlerTable()287   InterpreterHandlerTable GetInterpreterHandlerTable() const
288       REQUIRES_SHARED(Locks::mutator_lock_) {
289     return interpreter_handler_table_;
290   }
291 
292   void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
293   void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
294   void InstrumentQuickAllocEntryPointsLocked()
295       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
296                !Locks::runtime_shutdown_lock_);
297   void UninstrumentQuickAllocEntryPointsLocked()
298       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
299                !Locks::runtime_shutdown_lock_);
300   void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
301 
302   // Update the code of a method respecting any installed stubs.
303   void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
304       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
305 
306   // Update the code of a native method to a JITed stub.
307   void UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code)
308       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
309 
310   // Update the code of a method to the interpreter respecting any installed stubs from debugger.
311   void UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method)
312       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
313 
314   // Update the code of a method respecting any installed stubs from debugger.
315   void UpdateMethodsCodeForJavaDebuggable(ArtMethod* method, const void* quick_code)
316       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
317 
318   // Return the code that we can execute for an invoke including from the JIT.
319   const void* GetCodeForInvoke(ArtMethod* method) const
320       REQUIRES_SHARED(Locks::mutator_lock_);
321 
322   // Get the quick code for the given method. More efficient than asking the class linker as it
323   // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
324   // installed.
325   const void* GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const
326       REQUIRES_SHARED(Locks::mutator_lock_);
327 
ForceInterpretOnly()328   void ForceInterpretOnly() {
329     interpret_only_ = true;
330     forced_interpret_only_ = true;
331   }
332 
333   // Called by ArtMethod::Invoke to determine dispatch mechanism.
InterpretOnly()334   bool InterpretOnly() const {
335     return interpret_only_;
336   }
337 
IsForcedInterpretOnly()338   bool IsForcedInterpretOnly() const {
339     return forced_interpret_only_;
340   }
341 
342   // Code is in boot image oat file which isn't compiled as debuggable.
343   // Need debug version (interpreter or jitted) if that's the case.
344   bool NeedDebugVersionFor(ArtMethod* method) const
345       REQUIRES_SHARED(Locks::mutator_lock_);
346 
AreExitStubsInstalled()347   bool AreExitStubsInstalled() const {
348     return instrumentation_stubs_installed_;
349   }
350 
HasMethodEntryListeners()351   bool HasMethodEntryListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
352     return have_method_entry_listeners_;
353   }
354 
HasMethodExitListeners()355   bool HasMethodExitListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
356     return have_method_exit_listeners_;
357   }
358 
HasMethodUnwindListeners()359   bool HasMethodUnwindListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
360     return have_method_unwind_listeners_;
361   }
362 
HasDexPcListeners()363   bool HasDexPcListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
364     return have_dex_pc_listeners_;
365   }
366 
HasFieldReadListeners()367   bool HasFieldReadListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
368     return have_field_read_listeners_;
369   }
370 
HasFieldWriteListeners()371   bool HasFieldWriteListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
372     return have_field_write_listeners_;
373   }
374 
HasExceptionThrownListeners()375   bool HasExceptionThrownListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
376     return have_exception_thrown_listeners_;
377   }
378 
HasBranchListeners()379   bool HasBranchListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
380     return have_branch_listeners_;
381   }
382 
HasWatchedFramePopListeners()383   bool HasWatchedFramePopListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
384     return have_watched_frame_pop_listeners_;
385   }
386 
HasExceptionHandledListeners()387   bool HasExceptionHandledListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
388     return have_exception_handled_listeners_;
389   }
390 
IsActive()391   bool IsActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
392     return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
393         have_field_read_listeners_ || have_field_write_listeners_ ||
394         have_exception_thrown_listeners_ || have_method_unwind_listeners_ ||
395         have_branch_listeners_ || have_watched_frame_pop_listeners_ ||
396         have_exception_handled_listeners_;
397   }
398 
399   // Inform listeners that a method has been entered. A dex PC is provided as we may install
400   // listeners into executing code and get method enter events for methods already on the stack.
MethodEnterEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc)401   void MethodEnterEvent(Thread* thread,
402                         ObjPtr<mirror::Object> this_object,
403                         ArtMethod* method,
404                         uint32_t dex_pc) const
405       REQUIRES_SHARED(Locks::mutator_lock_) {
406     if (UNLIKELY(HasMethodEntryListeners())) {
407       MethodEnterEventImpl(thread, this_object, method, dex_pc);
408     }
409   }
410 
411   // Inform listeners that a method has been exited.
412   template<typename T>
MethodExitEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,OptionalFrame frame,T & return_value)413   void MethodExitEvent(Thread* thread,
414                        ObjPtr<mirror::Object> this_object,
415                        ArtMethod* method,
416                        uint32_t dex_pc,
417                        OptionalFrame frame,
418                        T& return_value) const
419       REQUIRES_SHARED(Locks::mutator_lock_) {
420     if (UNLIKELY(HasMethodExitListeners())) {
421       MethodExitEventImpl(thread, this_object, method, dex_pc, frame, return_value);
422     }
423   }
424 
425   // Inform listeners that a method has been exited due to an exception.
426   void MethodUnwindEvent(Thread* thread,
427                          ObjPtr<mirror::Object> this_object,
428                          ArtMethod* method,
429                          uint32_t dex_pc) const
430       REQUIRES_SHARED(Locks::mutator_lock_);
431 
432   // Inform listeners that the dex pc has moved (only supported by the interpreter).
DexPcMovedEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc)433   void DexPcMovedEvent(Thread* thread,
434                        ObjPtr<mirror::Object> this_object,
435                        ArtMethod* method,
436                        uint32_t dex_pc) const
437       REQUIRES_SHARED(Locks::mutator_lock_) {
438     if (UNLIKELY(HasDexPcListeners())) {
439       DexPcMovedEventImpl(thread, this_object, method, dex_pc);
440     }
441   }
442 
443   // Inform listeners that a branch has been taken (only supported by the interpreter).
Branch(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset)444   void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
445       REQUIRES_SHARED(Locks::mutator_lock_) {
446     if (UNLIKELY(HasBranchListeners())) {
447       BranchImpl(thread, method, dex_pc, offset);
448     }
449   }
450 
451   // Inform listeners that we read a field (only supported by the interpreter).
FieldReadEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field)452   void FieldReadEvent(Thread* thread,
453                       ObjPtr<mirror::Object> this_object,
454                       ArtMethod* method,
455                       uint32_t dex_pc,
456                       ArtField* field) const
457       REQUIRES_SHARED(Locks::mutator_lock_) {
458     if (UNLIKELY(HasFieldReadListeners())) {
459       FieldReadEventImpl(thread, this_object, method, dex_pc, field);
460     }
461   }
462 
463   // Inform listeners that we write a field (only supported by the interpreter).
FieldWriteEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value)464   void FieldWriteEvent(Thread* thread,
465                        ObjPtr<mirror::Object> this_object,
466                        ArtMethod* method,
467                        uint32_t dex_pc,
468                        ArtField* field,
469                        const JValue& field_value) const
470       REQUIRES_SHARED(Locks::mutator_lock_) {
471     if (UNLIKELY(HasFieldWriteListeners())) {
472       FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
473     }
474   }
475 
476   // Inform listeners that a branch has been taken (only supported by the interpreter).
WatchedFramePopped(Thread * thread,const ShadowFrame & frame)477   void WatchedFramePopped(Thread* thread, const ShadowFrame& frame) const
478       REQUIRES_SHARED(Locks::mutator_lock_) {
479     if (UNLIKELY(HasWatchedFramePopListeners())) {
480       WatchedFramePopImpl(thread, frame);
481     }
482   }
483 
484   // Inform listeners that an exception was thrown.
485   void ExceptionThrownEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
486       REQUIRES_SHARED(Locks::mutator_lock_);
487 
488   // Inform listeners that an exception has been handled. This is not sent for native code or for
489   // exceptions which reach the end of the thread's stack.
490   void ExceptionHandledEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
491       REQUIRES_SHARED(Locks::mutator_lock_);
492 
493   // Called when an instrumented method is entered. The intended link register (lr) is saved so
494   // that returning causes a branch to the method exit stub. Generates method enter events.
495   void PushInstrumentationStackFrame(Thread* self,
496                                      ObjPtr<mirror::Object> this_object,
497                                      ArtMethod* method,
498                                      uintptr_t stack_pointer,
499                                      uintptr_t lr,
500                                      bool interpreter_entry)
501       REQUIRES_SHARED(Locks::mutator_lock_);
502 
503   DeoptimizationMethodType GetDeoptimizationMethodType(ArtMethod* method)
504       REQUIRES_SHARED(Locks::mutator_lock_);
505 
506   // Called when an instrumented method is exited. Removes the pushed instrumentation frame
507   // returning the intended link register. Generates method exit events. The gpr_result and
508   // fpr_result pointers are pointers to the locations where the integer/pointer and floating point
509   // result values of the function are stored. Both pointers must always be valid but the values
510   // held there will only be meaningful if interpreted as the appropriate type given the function
511   // being returned from.
512   TwoWordReturn PopInstrumentationStackFrame(Thread* self,
513                                              uintptr_t* return_pc_addr,
514                                              uint64_t* gpr_result,
515                                              uint64_t* fpr_result)
516       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
517 
518   // Pops nframes instrumentation frames from the current thread. Returns the return pc for the last
519   // instrumentation frame that's popped.
520   uintptr_t PopFramesForDeoptimization(Thread* self, uintptr_t stack_pointer) const
521       REQUIRES_SHARED(Locks::mutator_lock_);
522 
523   // Call back for configure stubs.
524   void InstallStubsForClass(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_)
525       REQUIRES(!GetDeoptimizedMethodsLock());
526 
527   void InstallStubsForMethod(ArtMethod* method)
528       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
529 
530   // Sets up instrumentation to allow single thread deoptimization using ForceInterpreterCount.
531   void EnableSingleThreadDeopt()
532       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
533       REQUIRES(!Locks::thread_list_lock_,
534                !Locks::classlinker_classes_lock_,
535                !GetDeoptimizedMethodsLock());
536 
537   // Install instrumentation exit stub on every method of the stack of the given thread.
538   // This is used by the debugger to cause a deoptimization of the thread's stack after updating
539   // local variable(s).
540   void InstrumentThreadStack(Thread* thread)
541       REQUIRES(Locks::mutator_lock_);
542 
543   // Force all currently running frames to be deoptimized back to interpreter. This should only be
544   // used in cases where basically all compiled code has been invalidated.
545   void DeoptimizeAllThreadFrames() REQUIRES(art::Locks::mutator_lock_);
546 
547   static size_t ComputeFrameId(Thread* self,
548                                size_t frame_depth,
549                                size_t inlined_frames_before_frame)
550       REQUIRES_SHARED(Locks::mutator_lock_);
551 
552   // Does not hold lock, used to check if someone changed from not instrumented to instrumented
553   // during a GC suspend point.
AllocEntrypointsInstrumented()554   bool AllocEntrypointsInstrumented() const REQUIRES_SHARED(Locks::mutator_lock_) {
555     return alloc_entrypoints_instrumented_;
556   }
557 
558   InstrumentationLevel GetCurrentInstrumentationLevel() const;
559 
560  private:
561   // Returns true if moving to the given instrumentation level requires the installation of stubs.
562   // False otherwise.
563   bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
564 
565   // Does the job of installing or removing instrumentation code within methods.
566   // In order to support multiple clients using instrumentation at the same time,
567   // the caller must pass a unique key (a string) identifying it so we remind which
568   // instrumentation level it needs. Therefore the current instrumentation level
569   // becomes the highest instrumentation level required by a client.
570   void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
571       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
572       REQUIRES(!GetDeoptimizedMethodsLock(),
573                !Locks::thread_list_lock_,
574                !Locks::classlinker_classes_lock_);
575   void UpdateStubs() REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
576       REQUIRES(!GetDeoptimizedMethodsLock(),
577                !Locks::thread_list_lock_,
578                !Locks::classlinker_classes_lock_);
579   void UpdateInstrumentationLevels(InstrumentationLevel level)
580       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
581       REQUIRES(!GetDeoptimizedMethodsLock(),
582                !Locks::thread_list_lock_,
583                !Locks::classlinker_classes_lock_);
584 
UpdateInterpreterHandlerTable()585   void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
586     /*
587      * TUNING: Dalvik's mterp stashes the actual current handler table base in a
588      * tls field.  For Arm, this enables all suspend, debug & tracing checks to be
589      * collapsed into a single conditionally-executed ldw instruction.
590      * Move to Dalvik-style handler-table management for both the goto interpreter and
591      * mterp.
592      */
593     interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
594   }
595 
596   // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
597   // exclusive access to mutator lock which you can't get if the runtime isn't started.
598   void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
599 
600   void MethodEnterEventImpl(Thread* thread,
601                             ObjPtr<mirror::Object> this_object,
602                             ArtMethod* method,
603                             uint32_t dex_pc) const
604       REQUIRES_SHARED(Locks::mutator_lock_);
605   template <typename T>
606   void MethodExitEventImpl(Thread* thread,
607                            ObjPtr<mirror::Object> this_object,
608                            ArtMethod* method,
609                            uint32_t dex_pc,
610                            OptionalFrame frame,
611                            T& return_value) const
612       REQUIRES_SHARED(Locks::mutator_lock_);
613   void DexPcMovedEventImpl(Thread* thread,
614                            ObjPtr<mirror::Object> this_object,
615                            ArtMethod* method,
616                            uint32_t dex_pc) const
617       REQUIRES_SHARED(Locks::mutator_lock_);
618   void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
619       REQUIRES_SHARED(Locks::mutator_lock_);
620   void WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const
621       REQUIRES_SHARED(Locks::mutator_lock_);
622   void FieldReadEventImpl(Thread* thread,
623                           ObjPtr<mirror::Object> this_object,
624                           ArtMethod* method,
625                           uint32_t dex_pc,
626                           ArtField* field) const
627       REQUIRES_SHARED(Locks::mutator_lock_);
628   void FieldWriteEventImpl(Thread* thread,
629                            ObjPtr<mirror::Object> this_object,
630                            ArtMethod* method,
631                            uint32_t dex_pc,
632                            ArtField* field,
633                            const JValue& field_value) const
634       REQUIRES_SHARED(Locks::mutator_lock_);
635 
636   // Read barrier-aware utility functions for accessing deoptimized_methods_
637   bool AddDeoptimizedMethod(ArtMethod* method)
638       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
639   bool IsDeoptimizedMethod(ArtMethod* method)
640       REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
641   bool RemoveDeoptimizedMethod(ArtMethod* method)
642       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
643   ArtMethod* BeginDeoptimizedMethod()
644       REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
645   bool IsDeoptimizedMethodsEmpty() const
646       REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
647   void UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code)
648       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
649 
GetDeoptimizedMethodsLock()650   ReaderWriterMutex* GetDeoptimizedMethodsLock() const {
651     return deoptimized_methods_lock_.get();
652   }
653 
654   // A counter that's incremented every time a DeoptimizeAllFrames. We check each
655   // InstrumentationStackFrames creation id against this number and if they differ we deopt even if
656   // we could otherwise continue running.
657   uint64_t current_force_deopt_id_ GUARDED_BY(Locks::mutator_lock_);
658 
659   // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
660   bool instrumentation_stubs_installed_;
661 
662   // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
663   bool entry_exit_stubs_installed_;
664 
665   // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
666   bool interpreter_stubs_installed_;
667 
668   // Do we need the fidelity of events that we only get from running within the interpreter?
669   bool interpret_only_;
670 
671   // Did the runtime request we only run in the interpreter? ie -Xint mode.
672   bool forced_interpret_only_;
673 
674   // Do we have any listeners for method entry events? Short-cut to avoid taking the
675   // instrumentation_lock_.
676   bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
677 
678   // Do we have any listeners for method exit events? Short-cut to avoid taking the
679   // instrumentation_lock_.
680   bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
681 
682   // Do we have any listeners for method unwind events? Short-cut to avoid taking the
683   // instrumentation_lock_.
684   bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
685 
686   // Do we have any listeners for dex move events? Short-cut to avoid taking the
687   // instrumentation_lock_.
688   bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
689 
690   // Do we have any listeners for field read events? Short-cut to avoid taking the
691   // instrumentation_lock_.
692   bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
693 
694   // Do we have any listeners for field write events? Short-cut to avoid taking the
695   // instrumentation_lock_.
696   bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
697 
698   // Do we have any exception thrown listeners? Short-cut to avoid taking the instrumentation_lock_.
699   bool have_exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
700 
701   // Do we have any frame pop listeners? Short-cut to avoid taking the instrumentation_lock_.
702   bool have_watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
703 
704   // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
705   bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
706 
707   // Do we have any exception handled listeners? Short-cut to avoid taking the
708   // instrumentation_lock_.
709   bool have_exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
710 
711   // Contains the instrumentation level required by each client of the instrumentation identified
712   // by a string key.
713   typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
714   InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
715 
716   // The event listeners, written to with the mutator_lock_ exclusively held.
717   // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
718   // added or removed while iterating. The modifying thread holds exclusive lock,
719   // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
720   // do keep iterators that need to remain valid. This is the reason these listeners are std::list
721   // and not for example std::vector: the existing storage for a std::list does not move.
722   // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
723   // listeners can also be deleted concurrently.
724   // As a result, these lists are never trimmed. That's acceptable given the low number of
725   // listeners we have.
726   std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
727   std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
728   std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
729   std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
730   std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
731   std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
732   std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
733   std::list<InstrumentationListener*> exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
734   std::list<InstrumentationListener*> watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
735   std::list<InstrumentationListener*> exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
736 
737   // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
738   // only.
739   mutable std::unique_ptr<ReaderWriterMutex> deoptimized_methods_lock_ BOTTOM_MUTEX_ACQUIRED_AFTER;
740   std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(GetDeoptimizedMethodsLock());
741   bool deoptimization_enabled_;
742 
743   // Current interpreter handler table. This is updated each time the thread state flags are
744   // modified.
745   InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
746 
747   // Greater than 0 if quick alloc entry points instrumented.
748   size_t quick_alloc_entry_points_instrumentation_counter_;
749 
750   // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
751   // to prevent races with the GC where the GC relies on thread suspension only see
752   // alloc_entrypoints_instrumented_ change during suspend points.
753   bool alloc_entrypoints_instrumented_;
754 
755   // If we can use instrumentation trampolines. After the first time we instrument something with
756   // the interpreter we can no longer use trampolines because it can lead to stack corruption.
757   // TODO Figure out a way to remove the need for this.
758   bool can_use_instrumentation_trampolines_;
759 
760   friend class InstrumentationTest;  // For GetCurrentInstrumentationLevel and ConfigureStubs.
761   friend class InstrumentationStackPopper;  // For popping instrumentation frames.
762   friend void InstrumentationInstallStack(Thread*, void*);
763 
764   DISALLOW_COPY_AND_ASSIGN(Instrumentation);
765 };
766 std::ostream& operator<<(std::ostream& os, Instrumentation::InstrumentationEvent rhs);
767 std::ostream& operator<<(std::ostream& os, Instrumentation::InstrumentationLevel rhs);
768 
769 // An element in the instrumentation side stack maintained in art::Thread.
770 struct InstrumentationStackFrame {
InstrumentationStackFrameInstrumentationStackFrame771   InstrumentationStackFrame(mirror::Object* this_object,
772                             ArtMethod* method,
773                             uintptr_t return_pc,
774                             size_t frame_id,
775                             bool interpreter_entry,
776                             uint64_t force_deopt_id)
777       : this_object_(this_object),
778         method_(method),
779         return_pc_(return_pc),
780         frame_id_(frame_id),
781         interpreter_entry_(interpreter_entry),
782         force_deopt_id_(force_deopt_id) {
783   }
784 
785   std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
786 
787   mirror::Object* this_object_;
788   ArtMethod* method_;
789   uintptr_t return_pc_;
790   size_t frame_id_;
791   bool interpreter_entry_;
792   uint64_t force_deopt_id_;
793 };
794 
795 }  // namespace instrumentation
796 }  // namespace art
797 
798 #endif  // ART_RUNTIME_INSTRUMENTATION_H_
799