1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "interpreter_common.h"
18
19 #include <cmath>
20
21 #include "base/casts.h"
22 #include "base/enums.h"
23 #include "class_root-inl.h"
24 #include "debugger.h"
25 #include "dex/dex_file_types.h"
26 #include "entrypoints/runtime_asm_entrypoints.h"
27 #include "handle.h"
28 #include "intrinsics_enum.h"
29 #include "jit/jit.h"
30 #include "jvalue-inl.h"
31 #include "method_handles-inl.h"
32 #include "method_handles.h"
33 #include "mirror/array-alloc-inl.h"
34 #include "mirror/array-inl.h"
35 #include "mirror/call_site-inl.h"
36 #include "mirror/class.h"
37 #include "mirror/emulated_stack_frame.h"
38 #include "mirror/method_handle_impl-inl.h"
39 #include "mirror/method_type-inl.h"
40 #include "mirror/object_array-alloc-inl.h"
41 #include "mirror/object_array-inl.h"
42 #include "mirror/var_handle.h"
43 #include "reflection-inl.h"
44 #include "reflection.h"
45 #include "shadow_frame-inl.h"
46 #include "stack.h"
47 #include "thread-inl.h"
48 #include "transaction.h"
49 #include "var_handles.h"
50 #include "well_known_classes.h"
51
52 namespace art {
53 namespace interpreter {
54
ThrowNullPointerExceptionFromInterpreter()55 void ThrowNullPointerExceptionFromInterpreter() {
56 ThrowNullPointerExceptionFromDexPC();
57 }
58
CheckStackOverflow(Thread * self,size_t frame_size)59 bool CheckStackOverflow(Thread* self, size_t frame_size)
60 REQUIRES_SHARED(Locks::mutator_lock_) {
61 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
62 uint8_t* stack_end = self->GetStackEndForInterpreter(implicit_check);
63 if (UNLIKELY(__builtin_frame_address(0) < stack_end + frame_size)) {
64 ThrowStackOverflowError(self);
65 return false;
66 }
67 return true;
68 }
69
UseFastInterpreterToInterpreterInvoke(ArtMethod * method)70 bool UseFastInterpreterToInterpreterInvoke(ArtMethod* method) {
71 Runtime* runtime = Runtime::Current();
72 const void* quick_code = method->GetEntryPointFromQuickCompiledCode();
73 if (!runtime->GetClassLinker()->IsQuickToInterpreterBridge(quick_code)) {
74 return false;
75 }
76 if (!method->SkipAccessChecks() || method->IsNative() || method->IsProxyMethod()) {
77 return false;
78 }
79 if (method->IsIntrinsic()) {
80 return false;
81 }
82 if (method->GetDeclaringClass()->IsStringClass() && method->IsConstructor()) {
83 return false;
84 }
85 if (method->IsStatic() && !method->GetDeclaringClass()->IsVisiblyInitialized()) {
86 return false;
87 }
88 ProfilingInfo* profiling_info = method->GetProfilingInfo(kRuntimePointerSize);
89 if ((profiling_info != nullptr) && (profiling_info->GetSavedEntryPoint() != nullptr)) {
90 return false;
91 }
92 return true;
93 }
94
95 template <typename T>
SendMethodExitEvents(Thread * self,const instrumentation::Instrumentation * instrumentation,ShadowFrame & frame,ObjPtr<mirror::Object> thiz,ArtMethod * method,uint32_t dex_pc,T & result)96 bool SendMethodExitEvents(Thread* self,
97 const instrumentation::Instrumentation* instrumentation,
98 ShadowFrame& frame,
99 ObjPtr<mirror::Object> thiz,
100 ArtMethod* method,
101 uint32_t dex_pc,
102 T& result) {
103 bool had_event = false;
104 // We can get additional ForcePopFrame requests during handling of these events. We should
105 // respect these and send additional instrumentation events.
106 StackHandleScope<1> hs(self);
107 Handle<mirror::Object> h_thiz(hs.NewHandle(thiz));
108 do {
109 frame.SetForcePopFrame(false);
110 if (UNLIKELY(instrumentation->HasMethodExitListeners() && !frame.GetSkipMethodExitEvents())) {
111 had_event = true;
112 instrumentation->MethodExitEvent(
113 self, h_thiz.Get(), method, dex_pc, instrumentation::OptionalFrame{ frame }, result);
114 }
115 // We don't send method-exit if it's a pop-frame. We still send frame_popped though.
116 if (UNLIKELY(frame.NeedsNotifyPop() && instrumentation->HasWatchedFramePopListeners())) {
117 had_event = true;
118 instrumentation->WatchedFramePopped(self, frame);
119 }
120 } while (UNLIKELY(frame.GetForcePopFrame()));
121 if (UNLIKELY(had_event)) {
122 return !self->IsExceptionPending();
123 } else {
124 return true;
125 }
126 }
127
128 template
129 bool SendMethodExitEvents(Thread* self,
130 const instrumentation::Instrumentation* instrumentation,
131 ShadowFrame& frame,
132 ObjPtr<mirror::Object> thiz,
133 ArtMethod* method,
134 uint32_t dex_pc,
135 MutableHandle<mirror::Object>& result);
136
137 template
138 bool SendMethodExitEvents(Thread* self,
139 const instrumentation::Instrumentation* instrumentation,
140 ShadowFrame& frame,
141 ObjPtr<mirror::Object> thiz,
142 ArtMethod* method,
143 uint32_t dex_pc,
144 JValue& result);
145
146 // We execute any instrumentation events that are triggered by this exception and change the
147 // shadow_frame's dex_pc to that of the exception handler if there is one in the current method.
148 // Return true if we should continue executing in the current method and false if we need to go up
149 // the stack to find an exception handler.
150 // We accept a null Instrumentation* meaning we must not report anything to the instrumentation.
151 // TODO We should have a better way to skip instrumentation reporting or possibly rethink that
152 // behavior.
MoveToExceptionHandler(Thread * self,ShadowFrame & shadow_frame,const instrumentation::Instrumentation * instrumentation)153 bool MoveToExceptionHandler(Thread* self,
154 ShadowFrame& shadow_frame,
155 const instrumentation::Instrumentation* instrumentation) {
156 self->VerifyStack();
157 StackHandleScope<2> hs(self);
158 Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
159 if (instrumentation != nullptr &&
160 instrumentation->HasExceptionThrownListeners() &&
161 self->IsExceptionThrownByCurrentMethod(exception.Get())) {
162 // See b/65049545 for why we don't need to check to see if the exception has changed.
163 instrumentation->ExceptionThrownEvent(self, exception.Get());
164 if (shadow_frame.GetForcePopFrame()) {
165 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
166 // prevent an ExceptionHandledEvent from also being sent before popping.
167 return true;
168 }
169 }
170 bool clear_exception = false;
171 uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock(
172 hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception);
173 if (found_dex_pc == dex::kDexNoIndex) {
174 if (instrumentation != nullptr) {
175 if (shadow_frame.NeedsNotifyPop()) {
176 instrumentation->WatchedFramePopped(self, shadow_frame);
177 if (shadow_frame.GetForcePopFrame()) {
178 // We will check in the caller for GetForcePopFrame again. We need to bail out early to
179 // prevent an ExceptionHandledEvent from also being sent before popping and to ensure we
180 // handle other types of non-standard-exits.
181 return true;
182 }
183 }
184 // Exception is not caught by the current method. We will unwind to the
185 // caller. Notify any instrumentation listener.
186 instrumentation->MethodUnwindEvent(self,
187 shadow_frame.GetThisObject(),
188 shadow_frame.GetMethod(),
189 shadow_frame.GetDexPC());
190 }
191 return shadow_frame.GetForcePopFrame();
192 } else {
193 shadow_frame.SetDexPC(found_dex_pc);
194 if (instrumentation != nullptr && instrumentation->HasExceptionHandledListeners()) {
195 self->ClearException();
196 instrumentation->ExceptionHandledEvent(self, exception.Get());
197 if (UNLIKELY(self->IsExceptionPending())) {
198 // Exception handled event threw an exception. Try to find the handler for this one.
199 return MoveToExceptionHandler(self, shadow_frame, instrumentation);
200 } else if (!clear_exception) {
201 self->SetException(exception.Get());
202 }
203 } else if (clear_exception) {
204 self->ClearException();
205 }
206 return true;
207 }
208 }
209
UnexpectedOpcode(const Instruction * inst,const ShadowFrame & shadow_frame)210 void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame) {
211 LOG(FATAL) << "Unexpected instruction: "
212 << inst->DumpString(shadow_frame.GetMethod()->GetDexFile());
213 UNREACHABLE();
214 }
215
AbortTransactionF(Thread * self,const char * fmt,...)216 void AbortTransactionF(Thread* self, const char* fmt, ...) {
217 va_list args;
218 va_start(args, fmt);
219 AbortTransactionV(self, fmt, args);
220 va_end(args);
221 }
222
AbortTransactionV(Thread * self,const char * fmt,va_list args)223 void AbortTransactionV(Thread* self, const char* fmt, va_list args) {
224 CHECK(Runtime::Current()->IsActiveTransaction());
225 // Constructs abort message.
226 std::string abort_msg;
227 android::base::StringAppendV(&abort_msg, fmt, args);
228 // Throws an exception so we can abort the transaction and rollback every change.
229 Runtime::Current()->AbortTransactionAndThrowAbortError(self, abort_msg);
230 }
231
232 // START DECLARATIONS :
233 //
234 // These additional declarations are required because clang complains
235 // about ALWAYS_INLINE (-Werror, -Wgcc-compat) in definitions.
236 //
237
238 template <bool is_range, bool do_assignability_check>
239 static ALWAYS_INLINE bool DoCallCommon(ArtMethod* called_method,
240 Thread* self,
241 ShadowFrame& shadow_frame,
242 JValue* result,
243 uint16_t number_of_inputs,
244 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
245 uint32_t vregC) REQUIRES_SHARED(Locks::mutator_lock_);
246
247 template <bool is_range>
248 ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame,
249 ShadowFrame* callee_frame,
250 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
251 const size_t first_src_reg,
252 const size_t first_dest_reg,
253 const size_t num_regs) REQUIRES_SHARED(Locks::mutator_lock_);
254
255 // END DECLARATIONS.
256
ArtInterpreterToCompiledCodeBridge(Thread * self,ArtMethod * caller,ShadowFrame * shadow_frame,uint16_t arg_offset,JValue * result)257 void ArtInterpreterToCompiledCodeBridge(Thread* self,
258 ArtMethod* caller,
259 ShadowFrame* shadow_frame,
260 uint16_t arg_offset,
261 JValue* result)
262 REQUIRES_SHARED(Locks::mutator_lock_) {
263 ArtMethod* method = shadow_frame->GetMethod();
264 // Ensure static methods are initialized.
265 if (method->IsStatic()) {
266 ObjPtr<mirror::Class> declaringClass = method->GetDeclaringClass();
267 if (UNLIKELY(!declaringClass->IsVisiblyInitialized())) {
268 self->PushShadowFrame(shadow_frame);
269 StackHandleScope<1> hs(self);
270 Handle<mirror::Class> h_class(hs.NewHandle(declaringClass));
271 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
272 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
273 self->PopShadowFrame();
274 DCHECK(self->IsExceptionPending());
275 return;
276 }
277 self->PopShadowFrame();
278 DCHECK(h_class->IsInitializing());
279 // Reload from shadow frame in case the method moved, this is faster than adding a handle.
280 method = shadow_frame->GetMethod();
281 }
282 }
283 // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise,
284 // check that the arg_offset isn't greater than the number of registers. A stronger check is
285 // difficult since the frame may contain space for all the registers in the method, or only enough
286 // space for the arguments.
287 if (kIsDebugBuild) {
288 if (method->GetCodeItem() == nullptr) {
289 DCHECK_EQ(0u, arg_offset) << method->PrettyMethod();
290 } else {
291 DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs());
292 }
293 }
294 jit::Jit* jit = Runtime::Current()->GetJit();
295 if (jit != nullptr && caller != nullptr) {
296 jit->NotifyInterpreterToCompiledCodeTransition(self, caller);
297 }
298 method->Invoke(self, shadow_frame->GetVRegArgs(arg_offset),
299 (shadow_frame->NumberOfVRegs() - arg_offset) * sizeof(uint32_t),
300 result, method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty());
301 }
302
SetStringInitValueToAllAliases(ShadowFrame * shadow_frame,uint16_t this_obj_vreg,JValue result)303 void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
304 uint16_t this_obj_vreg,
305 JValue result)
306 REQUIRES_SHARED(Locks::mutator_lock_) {
307 ObjPtr<mirror::Object> existing = shadow_frame->GetVRegReference(this_obj_vreg);
308 if (existing == nullptr) {
309 // If it's null, we come from compiled code that was deoptimized. Nothing to do,
310 // as the compiler verified there was no alias.
311 // Set the new string result of the StringFactory.
312 shadow_frame->SetVRegReference(this_obj_vreg, result.GetL());
313 return;
314 }
315 // Set the string init result into all aliases.
316 for (uint32_t i = 0, e = shadow_frame->NumberOfVRegs(); i < e; ++i) {
317 if (shadow_frame->GetVRegReference(i) == existing) {
318 DCHECK_EQ(shadow_frame->GetVRegReference(i),
319 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
320 shadow_frame->SetVRegReference(i, result.GetL());
321 DCHECK_EQ(shadow_frame->GetVRegReference(i),
322 reinterpret_cast32<mirror::Object*>(shadow_frame->GetVReg(i)));
323 }
324 }
325 }
326
327 template<bool is_range>
DoMethodHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,bool invoke_exact,const Instruction * inst,uint16_t inst_data,JValue * result)328 static bool DoMethodHandleInvokeCommon(Thread* self,
329 ShadowFrame& shadow_frame,
330 bool invoke_exact,
331 const Instruction* inst,
332 uint16_t inst_data,
333 JValue* result)
334 REQUIRES_SHARED(Locks::mutator_lock_) {
335 // Make sure to check for async exceptions
336 if (UNLIKELY(self->ObserveAsyncException())) {
337 return false;
338 }
339 // Invoke-polymorphic instructions always take a receiver. i.e, they are never static.
340 const uint32_t vRegC = (is_range) ? inst->VRegC_4rcc() : inst->VRegC_45cc();
341 const int invoke_method_idx = (is_range) ? inst->VRegB_4rcc() : inst->VRegB_45cc();
342
343 // Initialize |result| to 0 as this is the default return value for
344 // polymorphic invocations of method handle types with void return
345 // and provides a sensible return result in error cases.
346 result->SetJ(0);
347
348 // The invoke_method_idx here is the name of the signature polymorphic method that
349 // was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact)
350 // and not the method that we'll dispatch to in the end.
351 StackHandleScope<2> hs(self);
352 Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
353 ObjPtr<mirror::MethodHandle>::DownCast(shadow_frame.GetVRegReference(vRegC))));
354 if (UNLIKELY(method_handle == nullptr)) {
355 // Note that the invoke type is kVirtual here because a call to a signature
356 // polymorphic method is shaped like a virtual call at the bytecode level.
357 ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual);
358 return false;
359 }
360
361 // The vRegH value gives the index of the proto_id associated with this
362 // signature polymorphic call site.
363 const uint16_t vRegH = (is_range) ? inst->VRegH_4rcc() : inst->VRegH_45cc();
364 const dex::ProtoIndex callsite_proto_id(vRegH);
365
366 // Call through to the classlinker and ask it to resolve the static type associated
367 // with the callsite. This information is stored in the dex cache so it's
368 // guaranteed to be fast after the first resolution.
369 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
370 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
371 class_linker->ResolveMethodType(self, callsite_proto_id, shadow_frame.GetMethod())));
372
373 // This implies we couldn't resolve one or more types in this method handle.
374 if (UNLIKELY(callsite_type == nullptr)) {
375 CHECK(self->IsExceptionPending());
376 return false;
377 }
378
379 // There is a common dispatch method for method handles that takes
380 // arguments either from a range or an array of arguments depending
381 // on whether the DEX instruction is invoke-polymorphic/range or
382 // invoke-polymorphic. The array here is for the latter.
383 if (UNLIKELY(is_range)) {
384 // VRegC is the register holding the method handle. Arguments passed
385 // to the method handle's target do not include the method handle.
386 RangeInstructionOperands operands(inst->VRegC_4rcc() + 1, inst->VRegA_4rcc() - 1);
387 if (invoke_exact) {
388 return MethodHandleInvokeExact(self,
389 shadow_frame,
390 method_handle,
391 callsite_type,
392 &operands,
393 result);
394 } else {
395 return MethodHandleInvoke(self,
396 shadow_frame,
397 method_handle,
398 callsite_type,
399 &operands,
400 result);
401 }
402 } else {
403 // Get the register arguments for the invoke.
404 uint32_t args[Instruction::kMaxVarArgRegs] = {};
405 inst->GetVarArgs(args, inst_data);
406 // Drop the first register which is the method handle performing the invoke.
407 memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1));
408 args[Instruction::kMaxVarArgRegs - 1] = 0;
409 VarArgsInstructionOperands operands(args, inst->VRegA_45cc() - 1);
410 if (invoke_exact) {
411 return MethodHandleInvokeExact(self,
412 shadow_frame,
413 method_handle,
414 callsite_type,
415 &operands,
416 result);
417 } else {
418 return MethodHandleInvoke(self,
419 shadow_frame,
420 method_handle,
421 callsite_type,
422 &operands,
423 result);
424 }
425 }
426 }
427
DoMethodHandleInvokeExact(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)428 bool DoMethodHandleInvokeExact(Thread* self,
429 ShadowFrame& shadow_frame,
430 const Instruction* inst,
431 uint16_t inst_data,
432 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
433 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
434 static const bool kIsRange = false;
435 return DoMethodHandleInvokeCommon<kIsRange>(
436 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
437 } else {
438 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
439 static const bool kIsRange = true;
440 return DoMethodHandleInvokeCommon<kIsRange>(
441 self, shadow_frame, /* invoke_exact= */ true, inst, inst_data, result);
442 }
443 }
444
DoMethodHandleInvoke(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)445 bool DoMethodHandleInvoke(Thread* self,
446 ShadowFrame& shadow_frame,
447 const Instruction* inst,
448 uint16_t inst_data,
449 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
450 if (inst->Opcode() == Instruction::INVOKE_POLYMORPHIC) {
451 static const bool kIsRange = false;
452 return DoMethodHandleInvokeCommon<kIsRange>(
453 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
454 } else {
455 DCHECK_EQ(inst->Opcode(), Instruction::INVOKE_POLYMORPHIC_RANGE);
456 static const bool kIsRange = true;
457 return DoMethodHandleInvokeCommon<kIsRange>(
458 self, shadow_frame, /* invoke_exact= */ false, inst, inst_data, result);
459 }
460 }
461
DoVarHandleInvokeCommon(Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result,mirror::VarHandle::AccessMode access_mode)462 static bool DoVarHandleInvokeCommon(Thread* self,
463 ShadowFrame& shadow_frame,
464 const Instruction* inst,
465 uint16_t inst_data,
466 JValue* result,
467 mirror::VarHandle::AccessMode access_mode)
468 REQUIRES_SHARED(Locks::mutator_lock_) {
469 // Make sure to check for async exceptions
470 if (UNLIKELY(self->ObserveAsyncException())) {
471 return false;
472 }
473
474 StackHandleScope<2> hs(self);
475 bool is_var_args = inst->HasVarArgs();
476 const uint16_t vRegH = is_var_args ? inst->VRegH_45cc() : inst->VRegH_4rcc();
477 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
478 Handle<mirror::MethodType> callsite_type(hs.NewHandle(
479 class_linker->ResolveMethodType(self, dex::ProtoIndex(vRegH), shadow_frame.GetMethod())));
480 // This implies we couldn't resolve one or more types in this VarHandle.
481 if (UNLIKELY(callsite_type == nullptr)) {
482 CHECK(self->IsExceptionPending());
483 return false;
484 }
485
486 const uint32_t vRegC = is_var_args ? inst->VRegC_45cc() : inst->VRegC_4rcc();
487 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(vRegC));
488 Handle<mirror::VarHandle> var_handle(hs.NewHandle(ObjPtr<mirror::VarHandle>::DownCast(receiver)));
489 if (is_var_args) {
490 uint32_t args[Instruction::kMaxVarArgRegs];
491 inst->GetVarArgs(args, inst_data);
492 VarArgsInstructionOperands all_operands(args, inst->VRegA_45cc());
493 NoReceiverInstructionOperands operands(&all_operands);
494 return VarHandleInvokeAccessor(self,
495 shadow_frame,
496 var_handle,
497 callsite_type,
498 access_mode,
499 &operands,
500 result);
501 } else {
502 RangeInstructionOperands all_operands(inst->VRegC_4rcc(), inst->VRegA_4rcc());
503 NoReceiverInstructionOperands operands(&all_operands);
504 return VarHandleInvokeAccessor(self,
505 shadow_frame,
506 var_handle,
507 callsite_type,
508 access_mode,
509 &operands,
510 result);
511 }
512 }
513
514 #define DO_VAR_HANDLE_ACCESSOR(_access_mode) \
515 bool DoVarHandle ## _access_mode(Thread* self, \
516 ShadowFrame& shadow_frame, \
517 const Instruction* inst, \
518 uint16_t inst_data, \
519 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) { \
520 const auto access_mode = mirror::VarHandle::AccessMode::k ## _access_mode; \
521 return DoVarHandleInvokeCommon(self, shadow_frame, inst, inst_data, result, access_mode); \
522 }
523
524 DO_VAR_HANDLE_ACCESSOR(CompareAndExchange)
DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)525 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeAcquire)
526 DO_VAR_HANDLE_ACCESSOR(CompareAndExchangeRelease)
527 DO_VAR_HANDLE_ACCESSOR(CompareAndSet)
528 DO_VAR_HANDLE_ACCESSOR(Get)
529 DO_VAR_HANDLE_ACCESSOR(GetAcquire)
530 DO_VAR_HANDLE_ACCESSOR(GetAndAdd)
531 DO_VAR_HANDLE_ACCESSOR(GetAndAddAcquire)
532 DO_VAR_HANDLE_ACCESSOR(GetAndAddRelease)
533 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAnd)
534 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndAcquire)
535 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseAndRelease)
536 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOr)
537 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrAcquire)
538 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseOrRelease)
539 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXor)
540 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorAcquire)
541 DO_VAR_HANDLE_ACCESSOR(GetAndBitwiseXorRelease)
542 DO_VAR_HANDLE_ACCESSOR(GetAndSet)
543 DO_VAR_HANDLE_ACCESSOR(GetAndSetAcquire)
544 DO_VAR_HANDLE_ACCESSOR(GetAndSetRelease)
545 DO_VAR_HANDLE_ACCESSOR(GetOpaque)
546 DO_VAR_HANDLE_ACCESSOR(GetVolatile)
547 DO_VAR_HANDLE_ACCESSOR(Set)
548 DO_VAR_HANDLE_ACCESSOR(SetOpaque)
549 DO_VAR_HANDLE_ACCESSOR(SetRelease)
550 DO_VAR_HANDLE_ACCESSOR(SetVolatile)
551 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSet)
552 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetAcquire)
553 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetPlain)
554 DO_VAR_HANDLE_ACCESSOR(WeakCompareAndSetRelease)
555
556 #undef DO_VAR_HANDLE_ACCESSOR
557
558 template<bool is_range>
559 bool DoInvokePolymorphic(Thread* self,
560 ShadowFrame& shadow_frame,
561 const Instruction* inst,
562 uint16_t inst_data,
563 JValue* result) {
564 const int invoke_method_idx = inst->VRegB();
565 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
566 ArtMethod* invoke_method =
567 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
568 self, invoke_method_idx, shadow_frame.GetMethod(), kPolymorphic);
569
570 // Ensure intrinsic identifiers are initialized.
571 DCHECK(invoke_method->IsIntrinsic());
572
573 // Dispatch based on intrinsic identifier associated with method.
574 switch (static_cast<art::Intrinsics>(invoke_method->GetIntrinsic())) {
575 #define CASE_SIGNATURE_POLYMORPHIC_INTRINSIC(Name, ...) \
576 case Intrinsics::k##Name: \
577 return Do ## Name(self, shadow_frame, inst, inst_data, result);
578 #include "intrinsics_list.h"
579 SIGNATURE_POLYMORPHIC_INTRINSICS_LIST(CASE_SIGNATURE_POLYMORPHIC_INTRINSIC)
580 #undef INTRINSICS_LIST
581 #undef SIGNATURE_POLYMORPHIC_INTRINSICS_LIST
582 #undef CASE_SIGNATURE_POLYMORPHIC_INTRINSIC
583 default:
584 LOG(FATAL) << "Unreachable: " << invoke_method->GetIntrinsic();
585 UNREACHABLE();
586 return false;
587 }
588 }
589
ConvertScalarBootstrapArgument(jvalue value)590 static JValue ConvertScalarBootstrapArgument(jvalue value) {
591 // value either contains a primitive scalar value if it corresponds
592 // to a primitive type, or it contains an integer value if it
593 // corresponds to an object instance reference id (e.g. a string id).
594 return JValue::FromPrimitive(value.j);
595 }
596
GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)597 static ObjPtr<mirror::Class> GetClassForBootstrapArgument(EncodedArrayValueIterator::ValueType type)
598 REQUIRES_SHARED(Locks::mutator_lock_) {
599 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
600 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
601 switch (type) {
602 case EncodedArrayValueIterator::ValueType::kBoolean:
603 case EncodedArrayValueIterator::ValueType::kByte:
604 case EncodedArrayValueIterator::ValueType::kChar:
605 case EncodedArrayValueIterator::ValueType::kShort:
606 // These types are disallowed by JVMS. Treat as integers. This
607 // will result in CCE's being raised if the BSM has one of these
608 // types.
609 case EncodedArrayValueIterator::ValueType::kInt:
610 return GetClassRoot(ClassRoot::kPrimitiveInt, class_roots);
611 case EncodedArrayValueIterator::ValueType::kLong:
612 return GetClassRoot(ClassRoot::kPrimitiveLong, class_roots);
613 case EncodedArrayValueIterator::ValueType::kFloat:
614 return GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots);
615 case EncodedArrayValueIterator::ValueType::kDouble:
616 return GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots);
617 case EncodedArrayValueIterator::ValueType::kMethodType:
618 return GetClassRoot<mirror::MethodType>(class_roots);
619 case EncodedArrayValueIterator::ValueType::kMethodHandle:
620 return GetClassRoot<mirror::MethodHandle>(class_roots);
621 case EncodedArrayValueIterator::ValueType::kString:
622 return GetClassRoot<mirror::String>();
623 case EncodedArrayValueIterator::ValueType::kType:
624 return GetClassRoot<mirror::Class>();
625 case EncodedArrayValueIterator::ValueType::kField:
626 case EncodedArrayValueIterator::ValueType::kMethod:
627 case EncodedArrayValueIterator::ValueType::kEnum:
628 case EncodedArrayValueIterator::ValueType::kArray:
629 case EncodedArrayValueIterator::ValueType::kAnnotation:
630 case EncodedArrayValueIterator::ValueType::kNull:
631 return nullptr;
632 }
633 }
634
GetArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,EncodedArrayValueIterator::ValueType type,const JValue * encoded_value,JValue * decoded_value)635 static bool GetArgumentForBootstrapMethod(Thread* self,
636 ArtMethod* referrer,
637 EncodedArrayValueIterator::ValueType type,
638 const JValue* encoded_value,
639 JValue* decoded_value)
640 REQUIRES_SHARED(Locks::mutator_lock_) {
641 // The encoded_value contains either a scalar value (IJDF) or a
642 // scalar DEX file index to a reference type to be materialized.
643 switch (type) {
644 case EncodedArrayValueIterator::ValueType::kInt:
645 case EncodedArrayValueIterator::ValueType::kFloat:
646 decoded_value->SetI(encoded_value->GetI());
647 return true;
648 case EncodedArrayValueIterator::ValueType::kLong:
649 case EncodedArrayValueIterator::ValueType::kDouble:
650 decoded_value->SetJ(encoded_value->GetJ());
651 return true;
652 case EncodedArrayValueIterator::ValueType::kMethodType: {
653 StackHandleScope<2> hs(self);
654 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
655 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
656 dex::ProtoIndex proto_idx(encoded_value->GetC());
657 ClassLinker* cl = Runtime::Current()->GetClassLinker();
658 ObjPtr<mirror::MethodType> o =
659 cl->ResolveMethodType(self, proto_idx, dex_cache, class_loader);
660 if (UNLIKELY(o.IsNull())) {
661 DCHECK(self->IsExceptionPending());
662 return false;
663 }
664 decoded_value->SetL(o);
665 return true;
666 }
667 case EncodedArrayValueIterator::ValueType::kMethodHandle: {
668 uint32_t index = static_cast<uint32_t>(encoded_value->GetI());
669 ClassLinker* cl = Runtime::Current()->GetClassLinker();
670 ObjPtr<mirror::MethodHandle> o = cl->ResolveMethodHandle(self, index, referrer);
671 if (UNLIKELY(o.IsNull())) {
672 DCHECK(self->IsExceptionPending());
673 return false;
674 }
675 decoded_value->SetL(o);
676 return true;
677 }
678 case EncodedArrayValueIterator::ValueType::kString: {
679 dex::StringIndex index(static_cast<uint32_t>(encoded_value->GetI()));
680 ClassLinker* cl = Runtime::Current()->GetClassLinker();
681 ObjPtr<mirror::String> o = cl->ResolveString(index, referrer);
682 if (UNLIKELY(o.IsNull())) {
683 DCHECK(self->IsExceptionPending());
684 return false;
685 }
686 decoded_value->SetL(o);
687 return true;
688 }
689 case EncodedArrayValueIterator::ValueType::kType: {
690 dex::TypeIndex index(static_cast<uint32_t>(encoded_value->GetI()));
691 ClassLinker* cl = Runtime::Current()->GetClassLinker();
692 ObjPtr<mirror::Class> o = cl->ResolveType(index, referrer);
693 if (UNLIKELY(o.IsNull())) {
694 DCHECK(self->IsExceptionPending());
695 return false;
696 }
697 decoded_value->SetL(o);
698 return true;
699 }
700 case EncodedArrayValueIterator::ValueType::kBoolean:
701 case EncodedArrayValueIterator::ValueType::kByte:
702 case EncodedArrayValueIterator::ValueType::kChar:
703 case EncodedArrayValueIterator::ValueType::kShort:
704 case EncodedArrayValueIterator::ValueType::kField:
705 case EncodedArrayValueIterator::ValueType::kMethod:
706 case EncodedArrayValueIterator::ValueType::kEnum:
707 case EncodedArrayValueIterator::ValueType::kArray:
708 case EncodedArrayValueIterator::ValueType::kAnnotation:
709 case EncodedArrayValueIterator::ValueType::kNull:
710 // Unreachable - unsupported types that have been checked when
711 // determining the effect call site type based on the bootstrap
712 // argument types.
713 UNREACHABLE();
714 }
715 }
716
PackArgumentForBootstrapMethod(Thread * self,ArtMethod * referrer,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)717 static bool PackArgumentForBootstrapMethod(Thread* self,
718 ArtMethod* referrer,
719 CallSiteArrayValueIterator* it,
720 ShadowFrameSetter* setter)
721 REQUIRES_SHARED(Locks::mutator_lock_) {
722 auto type = it->GetValueType();
723 const JValue encoded_value = ConvertScalarBootstrapArgument(it->GetJavaValue());
724 JValue decoded_value;
725 if (!GetArgumentForBootstrapMethod(self, referrer, type, &encoded_value, &decoded_value)) {
726 return false;
727 }
728 switch (it->GetValueType()) {
729 case EncodedArrayValueIterator::ValueType::kInt:
730 case EncodedArrayValueIterator::ValueType::kFloat:
731 setter->Set(static_cast<uint32_t>(decoded_value.GetI()));
732 return true;
733 case EncodedArrayValueIterator::ValueType::kLong:
734 case EncodedArrayValueIterator::ValueType::kDouble:
735 setter->SetLong(decoded_value.GetJ());
736 return true;
737 case EncodedArrayValueIterator::ValueType::kMethodType:
738 case EncodedArrayValueIterator::ValueType::kMethodHandle:
739 case EncodedArrayValueIterator::ValueType::kString:
740 case EncodedArrayValueIterator::ValueType::kType:
741 setter->SetReference(decoded_value.GetL());
742 return true;
743 case EncodedArrayValueIterator::ValueType::kBoolean:
744 case EncodedArrayValueIterator::ValueType::kByte:
745 case EncodedArrayValueIterator::ValueType::kChar:
746 case EncodedArrayValueIterator::ValueType::kShort:
747 case EncodedArrayValueIterator::ValueType::kField:
748 case EncodedArrayValueIterator::ValueType::kMethod:
749 case EncodedArrayValueIterator::ValueType::kEnum:
750 case EncodedArrayValueIterator::ValueType::kArray:
751 case EncodedArrayValueIterator::ValueType::kAnnotation:
752 case EncodedArrayValueIterator::ValueType::kNull:
753 // Unreachable - unsupported types that have been checked when
754 // determining the effect call site type based on the bootstrap
755 // argument types.
756 UNREACHABLE();
757 }
758 }
759
PackCollectorArrayForBootstrapMethod(Thread * self,ArtMethod * referrer,ObjPtr<mirror::Class> array_type,int32_t array_length,CallSiteArrayValueIterator * it,ShadowFrameSetter * setter)760 static bool PackCollectorArrayForBootstrapMethod(Thread* self,
761 ArtMethod* referrer,
762 ObjPtr<mirror::Class> array_type,
763 int32_t array_length,
764 CallSiteArrayValueIterator* it,
765 ShadowFrameSetter* setter)
766 REQUIRES_SHARED(Locks::mutator_lock_) {
767 StackHandleScope<1> hs(self);
768 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
769 JValue decoded_value;
770
771 #define COLLECT_PRIMITIVE_ARRAY(Descriptor, Type) \
772 Handle<mirror::Type ## Array> array = \
773 hs.NewHandle(mirror::Type ## Array::Alloc(self, array_length)); \
774 if (array.IsNull()) { \
775 return false; \
776 } \
777 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
778 auto type = it->GetValueType(); \
779 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
780 const JValue encoded_value = \
781 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
782 GetArgumentForBootstrapMethod(self, \
783 referrer, \
784 type, \
785 &encoded_value, \
786 &decoded_value); \
787 array->Set(i, decoded_value.Get ## Descriptor()); \
788 } \
789 setter->SetReference(array.Get()); \
790 return true;
791
792 #define COLLECT_REFERENCE_ARRAY(T, Type) \
793 Handle<mirror::ObjectArray<T>> array = /* NOLINT */ \
794 hs.NewHandle(mirror::ObjectArray<T>::Alloc(self, \
795 array_type, \
796 array_length)); \
797 if (array.IsNull()) { \
798 return false; \
799 } \
800 for (int32_t i = 0; it->HasNext(); it->Next(), ++i) { \
801 auto type = it->GetValueType(); \
802 DCHECK_EQ(type, EncodedArrayValueIterator::ValueType::k ## Type); \
803 const JValue encoded_value = \
804 ConvertScalarBootstrapArgument(it->GetJavaValue()); \
805 if (!GetArgumentForBootstrapMethod(self, \
806 referrer, \
807 type, \
808 &encoded_value, \
809 &decoded_value)) { \
810 return false; \
811 } \
812 ObjPtr<mirror::Object> o = decoded_value.GetL(); \
813 if (Runtime::Current()->IsActiveTransaction()) { \
814 array->Set<true>(i, ObjPtr<T>::DownCast(o)); \
815 } else { \
816 array->Set<false>(i, ObjPtr<T>::DownCast(o)); \
817 } \
818 } \
819 setter->SetReference(array.Get()); \
820 return true;
821
822 ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots = class_linker->GetClassRoots();
823 ObjPtr<mirror::Class> component_type = array_type->GetComponentType();
824 if (component_type == GetClassRoot(ClassRoot::kPrimitiveInt, class_roots)) {
825 COLLECT_PRIMITIVE_ARRAY(I, Int);
826 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveLong, class_roots)) {
827 COLLECT_PRIMITIVE_ARRAY(J, Long);
828 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveFloat, class_roots)) {
829 COLLECT_PRIMITIVE_ARRAY(F, Float);
830 } else if (component_type == GetClassRoot(ClassRoot::kPrimitiveDouble, class_roots)) {
831 COLLECT_PRIMITIVE_ARRAY(D, Double);
832 } else if (component_type == GetClassRoot<mirror::MethodType>()) {
833 COLLECT_REFERENCE_ARRAY(mirror::MethodType, MethodType);
834 } else if (component_type == GetClassRoot<mirror::MethodHandle>()) {
835 COLLECT_REFERENCE_ARRAY(mirror::MethodHandle, MethodHandle);
836 } else if (component_type == GetClassRoot<mirror::String>(class_roots)) {
837 COLLECT_REFERENCE_ARRAY(mirror::String, String);
838 } else if (component_type == GetClassRoot<mirror::Class>()) {
839 COLLECT_REFERENCE_ARRAY(mirror::Class, Type);
840 } else {
841 UNREACHABLE();
842 }
843 #undef COLLECT_PRIMITIVE_ARRAY
844 #undef COLLECT_REFERENCE_ARRAY
845 }
846
BuildCallSiteForBootstrapMethod(Thread * self,const DexFile * dex_file,uint32_t call_site_idx)847 static ObjPtr<mirror::MethodType> BuildCallSiteForBootstrapMethod(Thread* self,
848 const DexFile* dex_file,
849 uint32_t call_site_idx)
850 REQUIRES_SHARED(Locks::mutator_lock_) {
851 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
852 CallSiteArrayValueIterator it(*dex_file, csi);
853 DCHECK_GE(it.Size(), 1u);
854
855 StackHandleScope<2> hs(self);
856 // Create array for parameter types.
857 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
858 ObjPtr<mirror::Class> class_array_type =
859 GetClassRoot<mirror::ObjectArray<mirror::Class>>(class_linker);
860 Handle<mirror::ObjectArray<mirror::Class>> ptypes = hs.NewHandle(
861 mirror::ObjectArray<mirror::Class>::Alloc(self,
862 class_array_type,
863 static_cast<int>(it.Size())));
864 if (ptypes.IsNull()) {
865 DCHECK(self->IsExceptionPending());
866 return nullptr;
867 }
868
869 // Populate the first argument with an instance of j.l.i.MethodHandles.Lookup
870 // that the runtime will construct.
871 ptypes->Set(0, GetClassRoot<mirror::MethodHandlesLookup>(class_linker));
872 it.Next();
873
874 // The remaining parameter types are derived from the types of
875 // arguments present in the DEX file.
876 int index = 1;
877 while (it.HasNext()) {
878 ObjPtr<mirror::Class> ptype = GetClassForBootstrapArgument(it.GetValueType());
879 if (ptype.IsNull()) {
880 ThrowClassCastException("Unsupported bootstrap argument type");
881 return nullptr;
882 }
883 ptypes->Set(index, ptype);
884 index++;
885 it.Next();
886 }
887 DCHECK_EQ(static_cast<size_t>(index), it.Size());
888
889 // By definition, the return type is always a j.l.i.CallSite.
890 Handle<mirror::Class> rtype = hs.NewHandle(GetClassRoot<mirror::CallSite>());
891 return mirror::MethodType::Create(self, rtype, ptypes);
892 }
893
InvokeBootstrapMethod(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)894 static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self,
895 ShadowFrame& shadow_frame,
896 uint32_t call_site_idx)
897 REQUIRES_SHARED(Locks::mutator_lock_) {
898 StackHandleScope<5> hs(self);
899 // There are three mandatory arguments expected from the call site
900 // value array in the DEX file: the bootstrap method handle, the
901 // method name to pass to the bootstrap method, and the method type
902 // to pass to the bootstrap method.
903 static constexpr size_t kMandatoryArgumentsCount = 3;
904 ArtMethod* referrer = shadow_frame.GetMethod();
905 const DexFile* dex_file = referrer->GetDexFile();
906 const dex::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
907 CallSiteArrayValueIterator it(*dex_file, csi);
908 if (it.Size() < kMandatoryArgumentsCount) {
909 ThrowBootstrapMethodError("Truncated bootstrap arguments (%zu < %zu)",
910 it.Size(), kMandatoryArgumentsCount);
911 return nullptr;
912 }
913
914 if (it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodHandle) {
915 ThrowBootstrapMethodError("First bootstrap argument is not a method handle");
916 return nullptr;
917 }
918
919 uint32_t bsm_index = static_cast<uint32_t>(it.GetJavaValue().i);
920 it.Next();
921
922 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
923 Handle<mirror::MethodHandle> bsm =
924 hs.NewHandle(class_linker->ResolveMethodHandle(self, bsm_index, referrer));
925 if (bsm.IsNull()) {
926 DCHECK(self->IsExceptionPending());
927 return nullptr;
928 }
929
930 if (bsm->GetHandleKind() != mirror::MethodHandle::Kind::kInvokeStatic) {
931 // JLS suggests also accepting constructors. This is currently
932 // hard as constructor invocations happen via transformers in ART
933 // today. The constructor would need to be a class derived from java.lang.invoke.CallSite.
934 ThrowBootstrapMethodError("Unsupported bootstrap method invocation kind");
935 return nullptr;
936 }
937
938 // Construct the local call site type information based on the 3
939 // mandatory arguments provided by the runtime and the static arguments
940 // in the DEX file. We will use these arguments to build a shadow frame.
941 MutableHandle<mirror::MethodType> call_site_type =
942 hs.NewHandle(BuildCallSiteForBootstrapMethod(self, dex_file, call_site_idx));
943 if (call_site_type.IsNull()) {
944 DCHECK(self->IsExceptionPending());
945 return nullptr;
946 }
947
948 // Check if this BSM is targeting a variable arity method. If so,
949 // we'll need to collect the trailing arguments into an array.
950 Handle<mirror::Array> collector_arguments;
951 int32_t collector_arguments_length;
952 if (bsm->GetTargetMethod()->IsVarargs()) {
953 int number_of_bsm_parameters = bsm->GetMethodType()->GetNumberOfPTypes();
954 if (number_of_bsm_parameters == 0) {
955 ThrowBootstrapMethodError("Variable arity BSM does not have any arguments");
956 return nullptr;
957 }
958 Handle<mirror::Class> collector_array_class =
959 hs.NewHandle(bsm->GetMethodType()->GetPTypes()->Get(number_of_bsm_parameters - 1));
960 if (!collector_array_class->IsArrayClass()) {
961 ThrowBootstrapMethodError("Variable arity BSM does not have array as final argument");
962 return nullptr;
963 }
964 // The call site may include no arguments to be collected. In this
965 // case the number of arguments must be at least the number of BSM
966 // parameters less the collector array.
967 if (call_site_type->GetNumberOfPTypes() < number_of_bsm_parameters - 1) {
968 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
969 return nullptr;
970 }
971 // Check all the arguments to be collected match the collector array component type.
972 for (int i = number_of_bsm_parameters - 1; i < call_site_type->GetNumberOfPTypes(); ++i) {
973 if (call_site_type->GetPTypes()->Get(i) != collector_array_class->GetComponentType()) {
974 ThrowClassCastException(collector_array_class->GetComponentType(),
975 call_site_type->GetPTypes()->Get(i));
976 return nullptr;
977 }
978 }
979 // Update the call site method type so it now includes the collector array.
980 int32_t collector_arguments_start = number_of_bsm_parameters - 1;
981 collector_arguments_length = call_site_type->GetNumberOfPTypes() - number_of_bsm_parameters + 1;
982 call_site_type.Assign(
983 mirror::MethodType::CollectTrailingArguments(self,
984 call_site_type.Get(),
985 collector_array_class.Get(),
986 collector_arguments_start));
987 if (call_site_type.IsNull()) {
988 DCHECK(self->IsExceptionPending());
989 return nullptr;
990 }
991 } else {
992 collector_arguments_length = 0;
993 }
994
995 if (call_site_type->GetNumberOfPTypes() != bsm->GetMethodType()->GetNumberOfPTypes()) {
996 ThrowWrongMethodTypeException(bsm->GetMethodType(), call_site_type.Get());
997 return nullptr;
998 }
999
1000 // BSM invocation has a different set of exceptions that
1001 // j.l.i.MethodHandle.invoke(). Scan arguments looking for CCE
1002 // "opportunities". Unfortunately we cannot just leave this to the
1003 // method handle invocation as this might generate a WMTE.
1004 for (int32_t i = 0; i < call_site_type->GetNumberOfPTypes(); ++i) {
1005 ObjPtr<mirror::Class> from = call_site_type->GetPTypes()->Get(i);
1006 ObjPtr<mirror::Class> to = bsm->GetMethodType()->GetPTypes()->Get(i);
1007 if (!IsParameterTypeConvertible(from, to)) {
1008 ThrowClassCastException(from, to);
1009 return nullptr;
1010 }
1011 }
1012 if (!IsReturnTypeConvertible(call_site_type->GetRType(), bsm->GetMethodType()->GetRType())) {
1013 ThrowClassCastException(bsm->GetMethodType()->GetRType(), call_site_type->GetRType());
1014 return nullptr;
1015 }
1016
1017 // Set-up a shadow frame for invoking the bootstrap method handle.
1018 ShadowFrameAllocaUniquePtr bootstrap_frame =
1019 CREATE_SHADOW_FRAME(call_site_type->NumberOfVRegs(),
1020 nullptr,
1021 referrer,
1022 shadow_frame.GetDexPC());
1023 ScopedStackedShadowFramePusher pusher(
1024 self, bootstrap_frame.get(), StackedShadowFrameType::kShadowFrameUnderConstruction);
1025 ShadowFrameSetter setter(bootstrap_frame.get(), 0u);
1026
1027 // The first parameter is a MethodHandles lookup instance.
1028 Handle<mirror::Class> lookup_class =
1029 hs.NewHandle(shadow_frame.GetMethod()->GetDeclaringClass());
1030 ObjPtr<mirror::MethodHandlesLookup> lookup =
1031 mirror::MethodHandlesLookup::Create(self, lookup_class);
1032 if (lookup.IsNull()) {
1033 DCHECK(self->IsExceptionPending());
1034 return nullptr;
1035 }
1036 setter.SetReference(lookup);
1037
1038 // Pack the remaining arguments into the frame.
1039 int number_of_arguments = call_site_type->GetNumberOfPTypes();
1040 int argument_index;
1041 for (argument_index = 1; argument_index < number_of_arguments; ++argument_index) {
1042 if (argument_index == number_of_arguments - 1 &&
1043 call_site_type->GetPTypes()->Get(argument_index)->IsArrayClass()) {
1044 ObjPtr<mirror::Class> array_type = call_site_type->GetPTypes()->Get(argument_index);
1045 if (!PackCollectorArrayForBootstrapMethod(self,
1046 referrer,
1047 array_type,
1048 collector_arguments_length,
1049 &it,
1050 &setter)) {
1051 DCHECK(self->IsExceptionPending());
1052 return nullptr;
1053 }
1054 } else if (!PackArgumentForBootstrapMethod(self, referrer, &it, &setter)) {
1055 DCHECK(self->IsExceptionPending());
1056 return nullptr;
1057 }
1058 it.Next();
1059 }
1060 DCHECK(!it.HasNext());
1061 DCHECK(setter.Done());
1062
1063 // Invoke the bootstrap method handle.
1064 JValue result;
1065 RangeInstructionOperands operands(0, bootstrap_frame->NumberOfVRegs());
1066 bool invoke_success = MethodHandleInvoke(self,
1067 *bootstrap_frame,
1068 bsm,
1069 call_site_type,
1070 &operands,
1071 &result);
1072 if (!invoke_success) {
1073 DCHECK(self->IsExceptionPending());
1074 return nullptr;
1075 }
1076
1077 Handle<mirror::Object> object(hs.NewHandle(result.GetL()));
1078 if (UNLIKELY(object.IsNull())) {
1079 // This will typically be for LambdaMetafactory which is not supported.
1080 ThrowClassCastException("Bootstrap method returned null");
1081 return nullptr;
1082 }
1083
1084 // Check the result type is a subclass of j.l.i.CallSite.
1085 ObjPtr<mirror::Class> call_site_class = GetClassRoot<mirror::CallSite>(class_linker);
1086 if (UNLIKELY(!object->InstanceOf(call_site_class))) {
1087 ThrowClassCastException(object->GetClass(), call_site_class);
1088 return nullptr;
1089 }
1090
1091 // Check the call site target is not null as we're going to invoke it.
1092 ObjPtr<mirror::CallSite> call_site = ObjPtr<mirror::CallSite>::DownCast(result.GetL());
1093 ObjPtr<mirror::MethodHandle> target = call_site->GetTarget();
1094 if (UNLIKELY(target == nullptr)) {
1095 ThrowClassCastException("Bootstrap method returned a CallSite with a null target");
1096 return nullptr;
1097 }
1098 return call_site;
1099 }
1100
1101 namespace {
1102
DoResolveCallSite(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx)1103 ObjPtr<mirror::CallSite> DoResolveCallSite(Thread* self,
1104 ShadowFrame& shadow_frame,
1105 uint32_t call_site_idx)
1106 REQUIRES_SHARED(Locks::mutator_lock_) {
1107 StackHandleScope<1> hs(self);
1108 Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache()));
1109
1110 // Get the call site from the DexCache if present.
1111 ObjPtr<mirror::CallSite> call_site = dex_cache->GetResolvedCallSite(call_site_idx);
1112 if (LIKELY(call_site != nullptr)) {
1113 return call_site;
1114 }
1115
1116 // Invoke the bootstrap method to get a candidate call site.
1117 call_site = InvokeBootstrapMethod(self, shadow_frame, call_site_idx);
1118 if (UNLIKELY(call_site == nullptr)) {
1119 if (!self->GetException()->IsError()) {
1120 // Use a BootstrapMethodError if the exception is not an instance of java.lang.Error.
1121 ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method",
1122 call_site_idx);
1123 }
1124 return nullptr;
1125 }
1126
1127 // Attempt to place the candidate call site into the DexCache, return the winning call site.
1128 return dex_cache->SetResolvedCallSite(call_site_idx, call_site);
1129 }
1130
1131 } // namespace
1132
DoInvokeCustom(Thread * self,ShadowFrame & shadow_frame,uint32_t call_site_idx,const InstructionOperands * operands,JValue * result)1133 bool DoInvokeCustom(Thread* self,
1134 ShadowFrame& shadow_frame,
1135 uint32_t call_site_idx,
1136 const InstructionOperands* operands,
1137 JValue* result) {
1138 // Make sure to check for async exceptions
1139 if (UNLIKELY(self->ObserveAsyncException())) {
1140 return false;
1141 }
1142
1143 // invoke-custom is not supported in transactions. In transactions
1144 // there is a limited set of types supported. invoke-custom allows
1145 // running arbitrary code and instantiating arbitrary types.
1146 CHECK(!Runtime::Current()->IsActiveTransaction());
1147
1148 ObjPtr<mirror::CallSite> call_site = DoResolveCallSite(self, shadow_frame, call_site_idx);
1149 if (call_site.IsNull()) {
1150 DCHECK(self->IsExceptionPending());
1151 return false;
1152 }
1153
1154 StackHandleScope<2> hs(self);
1155 Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
1156 Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType());
1157 DCHECK_EQ(operands->GetNumberOfOperands(), target_method_type->NumberOfVRegs())
1158 << " call_site_idx" << call_site_idx;
1159 return MethodHandleInvokeExact(self,
1160 shadow_frame,
1161 target,
1162 target_method_type,
1163 operands,
1164 result);
1165 }
1166
1167 // Assign register 'src_reg' from shadow_frame to register 'dest_reg' into new_shadow_frame.
AssignRegister(ShadowFrame * new_shadow_frame,const ShadowFrame & shadow_frame,size_t dest_reg,size_t src_reg)1168 static inline void AssignRegister(ShadowFrame* new_shadow_frame, const ShadowFrame& shadow_frame,
1169 size_t dest_reg, size_t src_reg)
1170 REQUIRES_SHARED(Locks::mutator_lock_) {
1171 // Uint required, so that sign extension does not make this wrong on 64b systems
1172 uint32_t src_value = shadow_frame.GetVReg(src_reg);
1173 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference<kVerifyNone>(src_reg);
1174
1175 // If both register locations contains the same value, the register probably holds a reference.
1176 // Note: As an optimization, non-moving collectors leave a stale reference value
1177 // in the references array even after the original vreg was overwritten to a non-reference.
1178 if (src_value == reinterpret_cast32<uint32_t>(o.Ptr())) {
1179 new_shadow_frame->SetVRegReference(dest_reg, o);
1180 } else {
1181 new_shadow_frame->SetVReg(dest_reg, src_value);
1182 }
1183 }
1184
1185 template <bool is_range>
CopyRegisters(ShadowFrame & caller_frame,ShadowFrame * callee_frame,const uint32_t (& arg)[Instruction::kMaxVarArgRegs],const size_t first_src_reg,const size_t first_dest_reg,const size_t num_regs)1186 inline void CopyRegisters(ShadowFrame& caller_frame,
1187 ShadowFrame* callee_frame,
1188 const uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1189 const size_t first_src_reg,
1190 const size_t first_dest_reg,
1191 const size_t num_regs) {
1192 if (is_range) {
1193 const size_t dest_reg_bound = first_dest_reg + num_regs;
1194 for (size_t src_reg = first_src_reg, dest_reg = first_dest_reg; dest_reg < dest_reg_bound;
1195 ++dest_reg, ++src_reg) {
1196 AssignRegister(callee_frame, caller_frame, dest_reg, src_reg);
1197 }
1198 } else {
1199 DCHECK_LE(num_regs, arraysize(arg));
1200
1201 for (size_t arg_index = 0; arg_index < num_regs; ++arg_index) {
1202 AssignRegister(callee_frame, caller_frame, first_dest_reg + arg_index, arg[arg_index]);
1203 }
1204 }
1205 }
1206
1207 template <bool is_range,
1208 bool do_assignability_check>
DoCallCommon(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,JValue * result,uint16_t number_of_inputs,uint32_t (& arg)[Instruction::kMaxVarArgRegs],uint32_t vregC)1209 static inline bool DoCallCommon(ArtMethod* called_method,
1210 Thread* self,
1211 ShadowFrame& shadow_frame,
1212 JValue* result,
1213 uint16_t number_of_inputs,
1214 uint32_t (&arg)[Instruction::kMaxVarArgRegs],
1215 uint32_t vregC) {
1216 bool string_init = false;
1217 // Replace calls to String.<init> with equivalent StringFactory call.
1218 if (UNLIKELY(called_method->GetDeclaringClass()->IsStringClass()
1219 && called_method->IsConstructor())) {
1220 called_method = WellKnownClasses::StringInitToStringFactory(called_method);
1221 string_init = true;
1222 }
1223
1224 // Compute method information.
1225 CodeItemDataAccessor accessor(called_method->DexInstructionData());
1226 // Number of registers for the callee's call frame.
1227 uint16_t num_regs;
1228 // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to
1229 // PerformCall. A deoptimization could occur at any time, and we shouldn't change which
1230 // entrypoint to use once we start building the shadow frame.
1231
1232 // For unstarted runtimes, always use the interpreter entrypoint. This fixes the case where we are
1233 // doing cross compilation. Note that GetEntryPointFromQuickCompiledCode doesn't use the image
1234 // pointer size here and this may case an overflow if it is called from the compiler. b/62402160
1235 const bool use_interpreter_entrypoint = !Runtime::Current()->IsStarted() ||
1236 ClassLinker::ShouldUseInterpreterEntrypoint(
1237 called_method,
1238 called_method->GetEntryPointFromQuickCompiledCode());
1239 if (LIKELY(accessor.HasCodeItem())) {
1240 // When transitioning to compiled code, space only needs to be reserved for the input registers.
1241 // The rest of the frame gets discarded. This also prevents accessing the called method's code
1242 // item, saving memory by keeping code items of compiled code untouched.
1243 if (!use_interpreter_entrypoint) {
1244 DCHECK(!Runtime::Current()->IsAotCompiler()) << "Compiler should use interpreter entrypoint";
1245 num_regs = number_of_inputs;
1246 } else {
1247 num_regs = accessor.RegistersSize();
1248 DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, accessor.InsSize());
1249 }
1250 } else {
1251 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1252 num_regs = number_of_inputs;
1253 }
1254
1255 // Hack for String init:
1256 //
1257 // Rewrite invoke-x java.lang.String.<init>(this, a, b, c, ...) into:
1258 // invoke-x StringFactory(a, b, c, ...)
1259 // by effectively dropping the first virtual register from the invoke.
1260 //
1261 // (at this point the ArtMethod has already been replaced,
1262 // so we just need to fix-up the arguments)
1263 //
1264 // Note that FindMethodFromCode in entrypoint_utils-inl.h was also special-cased
1265 // to handle the compiler optimization of replacing `this` with null without
1266 // throwing NullPointerException.
1267 uint32_t string_init_vreg_this = is_range ? vregC : arg[0];
1268 if (UNLIKELY(string_init)) {
1269 DCHECK_GT(num_regs, 0u); // As the method is an instance method, there should be at least 1.
1270
1271 // The new StringFactory call is static and has one fewer argument.
1272 if (!accessor.HasCodeItem()) {
1273 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1274 num_regs--;
1275 } // else ... don't need to change num_regs since it comes up from the string_init's code item
1276 number_of_inputs--;
1277
1278 // Rewrite the var-args, dropping the 0th argument ("this")
1279 for (uint32_t i = 1; i < arraysize(arg); ++i) {
1280 arg[i - 1] = arg[i];
1281 }
1282 arg[arraysize(arg) - 1] = 0;
1283
1284 // Rewrite the non-var-arg case
1285 vregC++; // Skips the 0th vreg in the range ("this").
1286 }
1287
1288 // Parameter registers go at the end of the shadow frame.
1289 DCHECK_GE(num_regs, number_of_inputs);
1290 size_t first_dest_reg = num_regs - number_of_inputs;
1291 DCHECK_NE(first_dest_reg, (size_t)-1);
1292
1293 // Allocate shadow frame on the stack.
1294 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1295 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1296 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
1297 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1298
1299 // Initialize new shadow frame by copying the registers from the callee shadow frame.
1300 if (do_assignability_check) {
1301 // Slow path.
1302 // We might need to do class loading, which incurs a thread state change to kNative. So
1303 // register the shadow frame as under construction and allow suspension again.
1304 ScopedStackedShadowFramePusher pusher(
1305 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
1306 self->EndAssertNoThreadSuspension(old_cause);
1307
1308 // ArtMethod here is needed to check type information of the call site against the callee.
1309 // Type information is retrieved from a DexFile/DexCache for that respective declared method.
1310 //
1311 // As a special case for proxy methods, which are not dex-backed,
1312 // we have to retrieve type information from the proxy's method
1313 // interface method instead (which is dex backed since proxies are never interfaces).
1314 ArtMethod* method =
1315 new_shadow_frame->GetMethod()->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1316
1317 // We need to do runtime check on reference assignment. We need to load the shorty
1318 // to get the exact type of each reference argument.
1319 const dex::TypeList* params = method->GetParameterTypeList();
1320 uint32_t shorty_len = 0;
1321 const char* shorty = method->GetShorty(&shorty_len);
1322
1323 // Handle receiver apart since it's not part of the shorty.
1324 size_t dest_reg = first_dest_reg;
1325 size_t arg_offset = 0;
1326
1327 if (!method->IsStatic()) {
1328 size_t receiver_reg = is_range ? vregC : arg[0];
1329 new_shadow_frame->SetVRegReference(dest_reg, shadow_frame.GetVRegReference(receiver_reg));
1330 ++dest_reg;
1331 ++arg_offset;
1332 DCHECK(!string_init); // All StringFactory methods are static.
1333 }
1334
1335 // Copy the caller's invoke-* arguments into the callee's parameter registers.
1336 for (uint32_t shorty_pos = 0; dest_reg < num_regs; ++shorty_pos, ++dest_reg, ++arg_offset) {
1337 // Skip the 0th 'shorty' type since it represents the return type.
1338 DCHECK_LT(shorty_pos + 1, shorty_len) << "for shorty '" << shorty << "'";
1339 const size_t src_reg = (is_range) ? vregC + arg_offset : arg[arg_offset];
1340 switch (shorty[shorty_pos + 1]) {
1341 // Handle Object references. 1 virtual register slot.
1342 case 'L': {
1343 ObjPtr<mirror::Object> o = shadow_frame.GetVRegReference(src_reg);
1344 if (do_assignability_check && o != nullptr) {
1345 const dex::TypeIndex type_idx = params->GetTypeItem(shorty_pos).type_idx_;
1346 ObjPtr<mirror::Class> arg_type = method->GetDexCache()->GetResolvedType(type_idx);
1347 if (arg_type == nullptr) {
1348 StackHandleScope<1> hs(self);
1349 // Preserve o since it is used below and GetClassFromTypeIndex may cause thread
1350 // suspension.
1351 HandleWrapperObjPtr<mirror::Object> h = hs.NewHandleWrapper(&o);
1352 arg_type = method->ResolveClassFromTypeIndex(type_idx);
1353 if (arg_type == nullptr) {
1354 CHECK(self->IsExceptionPending());
1355 return false;
1356 }
1357 }
1358 if (!o->VerifierInstanceOf(arg_type)) {
1359 // This should never happen.
1360 std::string temp1, temp2;
1361 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1362 "Invoking %s with bad arg %d, type '%s' not instance of '%s'",
1363 new_shadow_frame->GetMethod()->GetName(), shorty_pos,
1364 o->GetClass()->GetDescriptor(&temp1),
1365 arg_type->GetDescriptor(&temp2));
1366 return false;
1367 }
1368 }
1369 new_shadow_frame->SetVRegReference(dest_reg, o);
1370 break;
1371 }
1372 // Handle doubles and longs. 2 consecutive virtual register slots.
1373 case 'J': case 'D': {
1374 uint64_t wide_value =
1375 (static_cast<uint64_t>(shadow_frame.GetVReg(src_reg + 1)) << BitSizeOf<uint32_t>()) |
1376 static_cast<uint32_t>(shadow_frame.GetVReg(src_reg));
1377 new_shadow_frame->SetVRegLong(dest_reg, wide_value);
1378 // Skip the next virtual register slot since we already used it.
1379 ++dest_reg;
1380 ++arg_offset;
1381 break;
1382 }
1383 // Handle all other primitives that are always 1 virtual register slot.
1384 default:
1385 new_shadow_frame->SetVReg(dest_reg, shadow_frame.GetVReg(src_reg));
1386 break;
1387 }
1388 }
1389 } else {
1390 if (is_range) {
1391 DCHECK_EQ(num_regs, first_dest_reg + number_of_inputs);
1392 }
1393
1394 CopyRegisters<is_range>(shadow_frame,
1395 new_shadow_frame,
1396 arg,
1397 vregC,
1398 first_dest_reg,
1399 number_of_inputs);
1400 self->EndAssertNoThreadSuspension(old_cause);
1401 }
1402
1403 PerformCall(self,
1404 accessor,
1405 shadow_frame.GetMethod(),
1406 first_dest_reg,
1407 new_shadow_frame,
1408 result,
1409 use_interpreter_entrypoint);
1410
1411 if (string_init && !self->IsExceptionPending()) {
1412 SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result);
1413 }
1414
1415 return !self->IsExceptionPending();
1416 }
1417
1418 template<bool is_range, bool do_assignability_check>
DoCall(ArtMethod * called_method,Thread * self,ShadowFrame & shadow_frame,const Instruction * inst,uint16_t inst_data,JValue * result)1419 bool DoCall(ArtMethod* called_method, Thread* self, ShadowFrame& shadow_frame,
1420 const Instruction* inst, uint16_t inst_data, JValue* result) {
1421 // Argument word count.
1422 const uint16_t number_of_inputs =
1423 (is_range) ? inst->VRegA_3rc(inst_data) : inst->VRegA_35c(inst_data);
1424
1425 // TODO: find a cleaner way to separate non-range and range information without duplicating
1426 // code.
1427 uint32_t arg[Instruction::kMaxVarArgRegs] = {}; // only used in invoke-XXX.
1428 uint32_t vregC = 0;
1429 if (is_range) {
1430 vregC = inst->VRegC_3rc();
1431 } else {
1432 vregC = inst->VRegC_35c();
1433 inst->GetVarArgs(arg, inst_data);
1434 }
1435
1436 return DoCallCommon<is_range, do_assignability_check>(
1437 called_method, self, shadow_frame,
1438 result, number_of_inputs, arg, vregC);
1439 }
1440
1441 template <bool is_range, bool do_access_check, bool transaction_active>
DoFilledNewArray(const Instruction * inst,const ShadowFrame & shadow_frame,Thread * self,JValue * result)1442 bool DoFilledNewArray(const Instruction* inst,
1443 const ShadowFrame& shadow_frame,
1444 Thread* self,
1445 JValue* result) {
1446 DCHECK(inst->Opcode() == Instruction::FILLED_NEW_ARRAY ||
1447 inst->Opcode() == Instruction::FILLED_NEW_ARRAY_RANGE);
1448 const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c();
1449 if (!is_range) {
1450 // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments.
1451 CHECK_LE(length, 5);
1452 }
1453 if (UNLIKELY(length < 0)) {
1454 ThrowNegativeArraySizeException(length);
1455 return false;
1456 }
1457 uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
1458 ObjPtr<mirror::Class> array_class = ResolveVerifyAndClinit(dex::TypeIndex(type_idx),
1459 shadow_frame.GetMethod(),
1460 self,
1461 false,
1462 do_access_check);
1463 if (UNLIKELY(array_class == nullptr)) {
1464 DCHECK(self->IsExceptionPending());
1465 return false;
1466 }
1467 CHECK(array_class->IsArrayClass());
1468 ObjPtr<mirror::Class> component_class = array_class->GetComponentType();
1469 const bool is_primitive_int_component = component_class->IsPrimitiveInt();
1470 if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) {
1471 if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) {
1472 ThrowRuntimeException("Bad filled array request for type %s",
1473 component_class->PrettyDescriptor().c_str());
1474 } else {
1475 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
1476 "Found type %s; filled-new-array not implemented for anything but 'int'",
1477 component_class->PrettyDescriptor().c_str());
1478 }
1479 return false;
1480 }
1481 ObjPtr<mirror::Object> new_array = mirror::Array::Alloc(
1482 self,
1483 array_class,
1484 length,
1485 array_class->GetComponentSizeShift(),
1486 Runtime::Current()->GetHeap()->GetCurrentAllocator());
1487 if (UNLIKELY(new_array == nullptr)) {
1488 self->AssertPendingOOMException();
1489 return false;
1490 }
1491 uint32_t arg[Instruction::kMaxVarArgRegs]; // only used in filled-new-array.
1492 uint32_t vregC = 0; // only used in filled-new-array-range.
1493 if (is_range) {
1494 vregC = inst->VRegC_3rc();
1495 } else {
1496 inst->GetVarArgs(arg);
1497 }
1498 for (int32_t i = 0; i < length; ++i) {
1499 size_t src_reg = is_range ? vregC + i : arg[i];
1500 if (is_primitive_int_component) {
1501 new_array->AsIntArray()->SetWithoutChecks<transaction_active>(
1502 i, shadow_frame.GetVReg(src_reg));
1503 } else {
1504 new_array->AsObjectArray<mirror::Object>()->SetWithoutChecks<transaction_active>(
1505 i, shadow_frame.GetVRegReference(src_reg));
1506 }
1507 }
1508
1509 result->SetL(new_array);
1510 return true;
1511 }
1512
1513 // TODO: Use ObjPtr here.
1514 template<typename T>
RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,int32_t count)1515 static void RecordArrayElementsInTransactionImpl(ObjPtr<mirror::PrimitiveArray<T>> array,
1516 int32_t count)
1517 REQUIRES_SHARED(Locks::mutator_lock_) {
1518 Runtime* runtime = Runtime::Current();
1519 for (int32_t i = 0; i < count; ++i) {
1520 runtime->RecordWriteArray(array.Ptr(), i, array->GetWithoutChecks(i));
1521 }
1522 }
1523
RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array,int32_t count)1524 void RecordArrayElementsInTransaction(ObjPtr<mirror::Array> array, int32_t count)
1525 REQUIRES_SHARED(Locks::mutator_lock_) {
1526 DCHECK(Runtime::Current()->IsActiveTransaction());
1527 DCHECK(array != nullptr);
1528 DCHECK_LE(count, array->GetLength());
1529 Primitive::Type primitive_component_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
1530 switch (primitive_component_type) {
1531 case Primitive::kPrimBoolean:
1532 RecordArrayElementsInTransactionImpl(array->AsBooleanArray(), count);
1533 break;
1534 case Primitive::kPrimByte:
1535 RecordArrayElementsInTransactionImpl(array->AsByteArray(), count);
1536 break;
1537 case Primitive::kPrimChar:
1538 RecordArrayElementsInTransactionImpl(array->AsCharArray(), count);
1539 break;
1540 case Primitive::kPrimShort:
1541 RecordArrayElementsInTransactionImpl(array->AsShortArray(), count);
1542 break;
1543 case Primitive::kPrimInt:
1544 RecordArrayElementsInTransactionImpl(array->AsIntArray(), count);
1545 break;
1546 case Primitive::kPrimFloat:
1547 RecordArrayElementsInTransactionImpl(array->AsFloatArray(), count);
1548 break;
1549 case Primitive::kPrimLong:
1550 RecordArrayElementsInTransactionImpl(array->AsLongArray(), count);
1551 break;
1552 case Primitive::kPrimDouble:
1553 RecordArrayElementsInTransactionImpl(array->AsDoubleArray(), count);
1554 break;
1555 default:
1556 LOG(FATAL) << "Unsupported primitive type " << primitive_component_type
1557 << " in fill-array-data";
1558 UNREACHABLE();
1559 }
1560 }
1561
1562 // Explicit DoCall template function declarations.
1563 #define EXPLICIT_DO_CALL_TEMPLATE_DECL(_is_range, _do_assignability_check) \
1564 template REQUIRES_SHARED(Locks::mutator_lock_) \
1565 bool DoCall<_is_range, _do_assignability_check>(ArtMethod* method, Thread* self, \
1566 ShadowFrame& shadow_frame, \
1567 const Instruction* inst, uint16_t inst_data, \
1568 JValue* result)
1569 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, false);
1570 EXPLICIT_DO_CALL_TEMPLATE_DECL(false, true);
1571 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, false);
1572 EXPLICIT_DO_CALL_TEMPLATE_DECL(true, true);
1573 #undef EXPLICIT_DO_CALL_TEMPLATE_DECL
1574
1575 // Explicit DoInvokePolymorphic template function declarations.
1576 #define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \
1577 template REQUIRES_SHARED(Locks::mutator_lock_) \
1578 bool DoInvokePolymorphic<_is_range>( \
1579 Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \
1580 uint16_t inst_data, JValue* result)
1581 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
1582 EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
1583 #undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
1584
1585 // Explicit DoFilledNewArray template function declarations.
1586 #define EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(_is_range_, _check, _transaction_active) \
1587 template REQUIRES_SHARED(Locks::mutator_lock_) \
1588 bool DoFilledNewArray<_is_range_, _check, _transaction_active>(const Instruction* inst, \
1589 const ShadowFrame& shadow_frame, \
1590 Thread* self, JValue* result)
1591 #define EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(_transaction_active) \
1592 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, false, _transaction_active); \
1593 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(false, true, _transaction_active); \
1594 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, false, _transaction_active); \
1595 EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL(true, true, _transaction_active)
1596 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(false);
1597 EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL(true);
1598 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_ALL_TEMPLATE_DECL
1599 #undef EXPLICIT_DO_FILLED_NEW_ARRAY_TEMPLATE_DECL
1600
1601 } // namespace interpreter
1602 } // namespace art
1603