1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
18 #define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
19 
20 #include "jni.h"
21 
22 #include "base/locks.h"
23 #include "base/macros.h"
24 #include "base/value_object.h"
25 #include "thread_state.h"
26 
27 namespace art {
28 
29 class JavaVMExt;
30 class JNIEnvExt;
31 template<class MirrorType> class ObjPtr;
32 class Thread;
33 
34 namespace mirror {
35 class Object;
36 }  // namespace mirror
37 
38 // Scoped change into and out of a particular state. Handles Runnable transitions that require
39 // more complicated suspension checking. The subclasses ScopedObjectAccessUnchecked and
40 // ScopedObjectAccess are used to handle the change into Runnable to Get direct access to objects,
41 // the unchecked variant doesn't aid annotalysis.
42 class ScopedThreadStateChange : public ValueObject {
43  public:
44   ALWAYS_INLINE ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
45       REQUIRES(!Locks::thread_suspend_count_lock_);
46 
47   ALWAYS_INLINE ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_);
48 
Self()49   ALWAYS_INLINE Thread* Self() const {
50     return self_;
51   }
52 
53  protected:
54   // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*.
ScopedThreadStateChange()55   ScopedThreadStateChange() {}
56 
57   Thread* const self_ = nullptr;
58   const ThreadState thread_state_ = kTerminated;
59 
60  private:
61   void ScopedThreadChangeDestructorCheck();
62 
63   ThreadState old_thread_state_ = kTerminated;
64   const bool expected_has_no_thread_ = true;
65 
66   friend class ScopedObjectAccessUnchecked;
67   DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
68 };
69 
70 // Assumes we are already runnable.
71 class ScopedObjectAccessAlreadyRunnable : public ValueObject {
72  public:
Self()73   Thread* Self() const {
74     return self_;
75   }
76 
Env()77   JNIEnvExt* Env() const {
78     return env_;
79   }
80 
Vm()81   JavaVMExt* Vm() const {
82     return vm_;
83   }
84 
85   bool ForceCopy() const;
86 
87   /*
88    * Add a local reference for an object to the indirect reference table associated with the
89    * current stack frame.  When the native function returns, the reference will be discarded.
90    *
91    * We need to allow the same reference to be added multiple times, and cope with nullptr.
92    *
93    * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and
94    * it's best if we don't grab a mutex.
95    */
96   template<typename T>
97   T AddLocalReference(ObjPtr<mirror::Object> obj) const
98       REQUIRES_SHARED(Locks::mutator_lock_);
99 
100   template<typename T>
101   ObjPtr<T> Decode(jobject obj) const REQUIRES_SHARED(Locks::mutator_lock_);
102 
103   ALWAYS_INLINE bool IsRunnable() const;
104 
105  protected:
106   ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
107       REQUIRES(!Locks::thread_suspend_count_lock_);
108 
109   ALWAYS_INLINE explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
110       REQUIRES(!Locks::thread_suspend_count_lock_);
111 
112   // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
113   // change into Runnable or acquire a share on the mutator_lock_.
114   // Note: The reinterpret_cast is backed by a static_assert in the cc file. Avoid a down_cast,
115   //       as it prevents forward declaration of JavaVMExt.
ScopedObjectAccessAlreadyRunnable(JavaVM * vm)116   explicit ScopedObjectAccessAlreadyRunnable(JavaVM* vm)
117       : self_(nullptr), env_(nullptr), vm_(reinterpret_cast<JavaVMExt*>(vm)) {}
118 
119   // Here purely to force inlining.
~ScopedObjectAccessAlreadyRunnable()120   ALWAYS_INLINE ~ScopedObjectAccessAlreadyRunnable() {}
121 
122   static void DCheckObjIsNotClearedJniWeakGlobal(ObjPtr<mirror::Object> obj)
123       REQUIRES_SHARED(Locks::mutator_lock_);
124 
125   // Self thread, can be null.
126   Thread* const self_;
127   // The full JNIEnv.
128   JNIEnvExt* const env_;
129   // The full JavaVM.
130   JavaVMExt* const vm_;
131 };
132 
133 // Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
134 //
135 // This class performs the necessary thread state switching to and from Runnable and lets us
136 // amortize the cost of working out the current thread. Additionally it lets us check (and repair)
137 // apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
138 // into jobjects via methods of this class. Performing this here enforces the Runnable thread state
139 // for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
140 // is also manipulating the Object.
141 //
142 // The destructor transitions back to the previous thread state, typically Native. In this state
143 // GC and thread suspension may occur.
144 //
145 // For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
146 // the mutator_lock_ will be acquired on construction.
147 class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable {
148  public:
149   ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(JNIEnv* env)
150       REQUIRES(!Locks::thread_suspend_count_lock_);
151 
152   ALWAYS_INLINE explicit ScopedObjectAccessUnchecked(Thread* self)
153       REQUIRES(!Locks::thread_suspend_count_lock_);
154 
~ScopedObjectAccessUnchecked()155   ALWAYS_INLINE ~ScopedObjectAccessUnchecked() REQUIRES(!Locks::thread_suspend_count_lock_) {}
156 
157   // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
158   // change into Runnable or acquire a share on the mutator_lock_.
ScopedObjectAccessUnchecked(JavaVM * vm)159   explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE
160       : ScopedObjectAccessAlreadyRunnable(vm), tsc_() {}
161 
162  private:
163   // The scoped thread state change makes sure that we are runnable and restores the thread state
164   // in the destructor.
165   const ScopedThreadStateChange tsc_;
166 
167   DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
168 };
169 
170 // Annotalysis helping variant of the above.
171 class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
172  public:
173   ALWAYS_INLINE explicit ScopedObjectAccess(JNIEnv* env)
174       REQUIRES(!Locks::thread_suspend_count_lock_)
175       SHARED_LOCK_FUNCTION(Locks::mutator_lock_);
176 
177   ALWAYS_INLINE explicit ScopedObjectAccess(Thread* self)
178       REQUIRES(!Locks::thread_suspend_count_lock_)
179       SHARED_LOCK_FUNCTION(Locks::mutator_lock_);
180 
181   // Base class will release share of lock. Invoked after this destructor.
182   ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE;
183 
184  private:
185   // TODO: remove this constructor. It is used by check JNI's ScopedCheck to make it believe that
186   //       routines operating with just a VM are sound, they are not, but when you have just a VM
187   //       you cannot call the unsound routines.
ScopedObjectAccess(JavaVM * vm)188   explicit ScopedObjectAccess(JavaVM* vm) SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
189       : ScopedObjectAccessUnchecked(vm) {}
190 
191   friend class ScopedCheck;
192   DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccess);
193 };
194 
195 // Annotalysis helper for going to a suspended state from runnable.
196 class ScopedThreadSuspension : public ValueObject {
197  public:
198   ALWAYS_INLINE explicit ScopedThreadSuspension(Thread* self, ThreadState suspended_state)
199       REQUIRES(!Locks::thread_suspend_count_lock_, !Roles::uninterruptible_)
200       UNLOCK_FUNCTION(Locks::mutator_lock_);
201 
202   ALWAYS_INLINE ~ScopedThreadSuspension() SHARED_LOCK_FUNCTION(Locks::mutator_lock_);
203 
204  private:
205   Thread* const self_;
206   const ThreadState suspended_state_;
207   DISALLOW_COPY_AND_ASSIGN(ScopedThreadSuspension);
208 };
209 
210 
211 }  // namespace art
212 
213 #endif  // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
214