Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : *
4 : * Copyright 2017 Mozilla Foundation
5 : *
6 : * Licensed under the Apache License, Version 2.0 (the "License");
7 : * you may not use this file except in compliance with the License.
8 : * You may obtain a copy of the License at
9 : *
10 : * http://www.apache.org/licenses/LICENSE-2.0
11 : *
12 : * Unless required by applicable law or agreed to in writing, software
13 : * distributed under the License is distributed on an "AS IS" BASIS,
14 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 : * See the License for the specific language governing permissions and
16 : * limitations under the License.
17 : */
18 :
19 : #include "wasm/WasmBuiltins.h"
20 :
21 : #include "mozilla/Atomics.h"
22 : #include "mozilla/BinarySearch.h"
23 :
24 : #include "fdlibm.h"
25 : #include "jslibmath.h"
26 :
27 : #include "jit/InlinableNatives.h"
28 : #include "jit/MacroAssembler.h"
29 : #include "threading/Mutex.h"
30 : #include "wasm/WasmInstance.h"
31 : #include "wasm/WasmStubs.h"
32 :
33 : #include "vm/Debugger-inl.h"
34 : #include "vm/Stack-inl.h"
35 :
36 : using namespace js;
37 : using namespace jit;
38 : using namespace wasm;
39 :
40 : using mozilla::Atomic;
41 : using mozilla::BinarySearchIf;
42 : using mozilla::HashGeneric;
43 : using mozilla::IsNaN;
44 : using mozilla::MakeEnumeratedRange;
45 :
46 : static const unsigned BUILTIN_THUNK_LIFO_SIZE = 64 * 1024;
47 :
48 : // ============================================================================
49 : // WebAssembly builtin C++ functions called from wasm code to implement internal
50 : // wasm operations.
51 :
52 : #if defined(JS_CODEGEN_ARM)
53 : extern "C" {
54 :
55 : extern MOZ_EXPORT int64_t
56 : __aeabi_idivmod(int, int);
57 :
58 : extern MOZ_EXPORT int64_t
59 : __aeabi_uidivmod(int, int);
60 :
61 : }
62 : #endif
63 :
64 : // This utility function can only be called for builtins that are called
65 : // directly from wasm code. Note that WasmCall pushes both an outer
66 : // WasmActivation and an inner JitActivation that becomes active when calling
67 : // JIT code.
68 : static WasmActivation*
69 0 : CallingActivation()
70 : {
71 0 : Activation* act = TlsContext.get()->activation();
72 0 : MOZ_ASSERT(!act->asJit()->isActive(), "WasmCall pushes an inactive JitActivation");
73 0 : return act->prev()->asWasm();
74 : }
75 :
76 : static void*
77 0 : WasmHandleExecutionInterrupt()
78 : {
79 0 : WasmActivation* activation = CallingActivation();
80 0 : MOZ_ASSERT(activation->interrupted());
81 :
82 0 : if (!CheckForInterrupt(activation->cx())) {
83 : // If CheckForInterrupt failed, it is time to interrupt execution.
84 : // Returning nullptr to the caller will jump to the throw stub which
85 : // will call WasmHandleThrow. The WasmActivation must stay in the
86 : // interrupted state until then so that stack unwinding works in
87 : // WasmHandleThrow.
88 0 : return nullptr;
89 : }
90 :
91 : // If CheckForInterrupt succeeded, then execution can proceed and the
92 : // interrupt is over.
93 0 : void* resumePC = activation->resumePC();
94 0 : activation->finishInterrupt();
95 0 : return resumePC;
96 : }
97 :
98 : static bool
99 0 : WasmHandleDebugTrap()
100 : {
101 0 : WasmActivation* activation = CallingActivation();
102 0 : MOZ_ASSERT(activation);
103 0 : JSContext* cx = activation->cx();
104 :
105 0 : FrameIterator iter(activation);
106 0 : MOZ_ASSERT(iter.debugEnabled());
107 0 : const CallSite* site = iter.debugTrapCallsite();
108 0 : MOZ_ASSERT(site);
109 0 : if (site->kind() == CallSite::EnterFrame) {
110 0 : if (!iter.instance()->enterFrameTrapsEnabled())
111 0 : return true;
112 0 : DebugFrame* frame = iter.debugFrame();
113 0 : frame->setIsDebuggee();
114 0 : frame->observe(cx);
115 : // TODO call onEnterFrame
116 0 : JSTrapStatus status = Debugger::onEnterFrame(cx, frame);
117 0 : if (status == JSTRAP_RETURN) {
118 : // Ignoring forced return (JSTRAP_RETURN) -- changing code execution
119 : // order is not yet implemented in the wasm baseline.
120 : // TODO properly handle JSTRAP_RETURN and resume wasm execution.
121 0 : JS_ReportErrorASCII(cx, "Unexpected resumption value from onEnterFrame");
122 0 : return false;
123 : }
124 0 : return status == JSTRAP_CONTINUE;
125 : }
126 0 : if (site->kind() == CallSite::LeaveFrame) {
127 0 : DebugFrame* frame = iter.debugFrame();
128 0 : frame->updateReturnJSValue();
129 0 : bool ok = Debugger::onLeaveFrame(cx, frame, nullptr, true);
130 0 : frame->leave(cx);
131 0 : return ok;
132 : }
133 :
134 0 : DebugFrame* frame = iter.debugFrame();
135 0 : DebugState& debug = iter.instance()->debug();
136 0 : MOZ_ASSERT(debug.hasBreakpointTrapAtOffset(site->lineOrBytecode()));
137 0 : if (debug.stepModeEnabled(frame->funcIndex())) {
138 0 : RootedValue result(cx, UndefinedValue());
139 0 : JSTrapStatus status = Debugger::onSingleStep(cx, &result);
140 0 : if (status == JSTRAP_RETURN) {
141 : // TODO properly handle JSTRAP_RETURN.
142 0 : JS_ReportErrorASCII(cx, "Unexpected resumption value from onSingleStep");
143 0 : return false;
144 : }
145 0 : if (status != JSTRAP_CONTINUE)
146 0 : return false;
147 : }
148 0 : if (debug.hasBreakpointSite(site->lineOrBytecode())) {
149 0 : RootedValue result(cx, UndefinedValue());
150 0 : JSTrapStatus status = Debugger::onTrap(cx, &result);
151 0 : if (status == JSTRAP_RETURN) {
152 : // TODO properly handle JSTRAP_RETURN.
153 0 : JS_ReportErrorASCII(cx, "Unexpected resumption value from breakpoint handler");
154 0 : return false;
155 : }
156 0 : if (status != JSTRAP_CONTINUE)
157 0 : return false;
158 : }
159 0 : return true;
160 : }
161 :
162 : // Unwind the entire activation in response to a thrown exception. This function
163 : // is responsible for notifying the debugger of each unwound frame. The return
164 : // value is the new stack address which the calling stub will set to the sp
165 : // register before executing a return instruction.
166 : static void*
167 0 : WasmHandleThrow()
168 : {
169 0 : WasmActivation* activation = CallingActivation();
170 0 : JSContext* cx = activation->cx();
171 :
172 : // FrameIterator iterates down wasm frames in the activation starting at
173 : // WasmActivation::exitFP. Pass Unwind::True to pop WasmActivation::exitFP
174 : // once each time FrameIterator is incremented, ultimately leaving exitFP
175 : // null when the FrameIterator is done(). This is necessary to prevent a
176 : // DebugFrame from being observed again after we just called onLeaveFrame
177 : // (which would lead to the frame being re-added to the map of live frames,
178 : // right as it becomes trash).
179 0 : FrameIterator iter(activation, FrameIterator::Unwind::True);
180 0 : MOZ_ASSERT(!iter.done());
181 :
182 : // Live wasm code on the stack is kept alive (in wasm::TraceActivations) by
183 : // marking the instance of every wasm::Frame found by FrameIterator.
184 : // However, as explained above, we're popping frames while iterating which
185 : // means that a GC during this loop could collect the code of frames whose
186 : // code is still on the stack. This is actually mostly fine: as soon as we
187 : // return to the throw stub, the entire stack will be popped as a whole,
188 : // returning to the C++ caller. However, we must keep the throw stub alive
189 : // itself which is owned by the innermost instance.
190 0 : RootedWasmInstanceObject keepAlive(cx, iter.instance()->object());
191 :
192 0 : for (; !iter.done(); ++iter) {
193 0 : if (!iter.debugEnabled())
194 0 : continue;
195 :
196 0 : DebugFrame* frame = iter.debugFrame();
197 0 : frame->clearReturnJSValue();
198 :
199 : // Assume JSTRAP_ERROR status if no exception is pending --
200 : // no onExceptionUnwind handlers must be fired.
201 0 : if (cx->isExceptionPending()) {
202 0 : JSTrapStatus status = Debugger::onExceptionUnwind(cx, frame);
203 0 : if (status == JSTRAP_RETURN) {
204 : // Unexpected trap return -- raising error since throw recovery
205 : // is not yet implemented in the wasm baseline.
206 : // TODO properly handle JSTRAP_RETURN and resume wasm execution.
207 0 : JS_ReportErrorASCII(cx, "Unexpected resumption value from onExceptionUnwind");
208 : }
209 : }
210 :
211 0 : bool ok = Debugger::onLeaveFrame(cx, frame, nullptr, false);
212 0 : if (ok) {
213 : // Unexpected success from the handler onLeaveFrame -- raising error
214 : // since throw recovery is not yet implemented in the wasm baseline.
215 : // TODO properly handle success and resume wasm execution.
216 0 : JS_ReportErrorASCII(cx, "Unexpected success from onLeaveFrame");
217 : }
218 0 : frame->leave(cx);
219 : }
220 :
221 0 : MOZ_ASSERT(!activation->interrupted(), "unwinding clears the interrupt");
222 0 : return iter.unwoundAddressOfReturnAddress();
223 : }
224 :
225 : static void
226 0 : WasmReportTrap(int32_t trapIndex)
227 : {
228 0 : JSContext* cx = TlsContext.get();
229 :
230 0 : MOZ_ASSERT(trapIndex < int32_t(Trap::Limit) && trapIndex >= 0);
231 0 : Trap trap = Trap(trapIndex);
232 :
233 : unsigned errorNumber;
234 0 : switch (trap) {
235 : case Trap::Unreachable:
236 0 : errorNumber = JSMSG_WASM_UNREACHABLE;
237 0 : break;
238 : case Trap::IntegerOverflow:
239 0 : errorNumber = JSMSG_WASM_INTEGER_OVERFLOW;
240 0 : break;
241 : case Trap::InvalidConversionToInteger:
242 0 : errorNumber = JSMSG_WASM_INVALID_CONVERSION;
243 0 : break;
244 : case Trap::IntegerDivideByZero:
245 0 : errorNumber = JSMSG_WASM_INT_DIVIDE_BY_ZERO;
246 0 : break;
247 : case Trap::IndirectCallToNull:
248 0 : errorNumber = JSMSG_WASM_IND_CALL_TO_NULL;
249 0 : break;
250 : case Trap::IndirectCallBadSig:
251 0 : errorNumber = JSMSG_WASM_IND_CALL_BAD_SIG;
252 0 : break;
253 : case Trap::ImpreciseSimdConversion:
254 0 : errorNumber = JSMSG_SIMD_FAILED_CONVERSION;
255 0 : break;
256 : case Trap::OutOfBounds:
257 0 : errorNumber = JSMSG_WASM_OUT_OF_BOUNDS;
258 0 : break;
259 : case Trap::StackOverflow:
260 0 : errorNumber = JSMSG_OVER_RECURSED;
261 0 : break;
262 : default:
263 0 : MOZ_CRASH("unexpected trap");
264 : }
265 :
266 0 : JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, errorNumber);
267 0 : }
268 :
269 : static void
270 0 : WasmReportOutOfBounds()
271 : {
272 0 : JSContext* cx = TlsContext.get();
273 0 : JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_OUT_OF_BOUNDS);
274 0 : }
275 :
276 : static void
277 0 : WasmReportUnalignedAccess()
278 : {
279 0 : JSContext* cx = TlsContext.get();
280 0 : JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_UNALIGNED_ACCESS);
281 0 : }
282 :
283 : static int32_t
284 0 : CoerceInPlace_ToInt32(MutableHandleValue val)
285 : {
286 0 : JSContext* cx = TlsContext.get();
287 :
288 : int32_t i32;
289 0 : if (!ToInt32(cx, val, &i32))
290 0 : return false;
291 0 : val.set(Int32Value(i32));
292 :
293 0 : return true;
294 : }
295 :
296 : static int32_t
297 0 : CoerceInPlace_ToNumber(MutableHandleValue val)
298 : {
299 0 : JSContext* cx = TlsContext.get();
300 :
301 : double dbl;
302 0 : if (!ToNumber(cx, val, &dbl))
303 0 : return false;
304 0 : val.set(DoubleValue(dbl));
305 :
306 0 : return true;
307 : }
308 :
309 : static int64_t
310 0 : DivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi, uint32_t y_lo)
311 : {
312 0 : int64_t x = ((uint64_t)x_hi << 32) + x_lo;
313 0 : int64_t y = ((uint64_t)y_hi << 32) + y_lo;
314 0 : MOZ_ASSERT(x != INT64_MIN || y != -1);
315 0 : MOZ_ASSERT(y != 0);
316 0 : return x / y;
317 : }
318 :
319 : static int64_t
320 0 : UDivI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi, uint32_t y_lo)
321 : {
322 0 : uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
323 0 : uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
324 0 : MOZ_ASSERT(y != 0);
325 0 : return x / y;
326 : }
327 :
328 : static int64_t
329 0 : ModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi, uint32_t y_lo)
330 : {
331 0 : int64_t x = ((uint64_t)x_hi << 32) + x_lo;
332 0 : int64_t y = ((uint64_t)y_hi << 32) + y_lo;
333 0 : MOZ_ASSERT(x != INT64_MIN || y != -1);
334 0 : MOZ_ASSERT(y != 0);
335 0 : return x % y;
336 : }
337 :
338 : static int64_t
339 0 : UModI64(uint32_t x_hi, uint32_t x_lo, uint32_t y_hi, uint32_t y_lo)
340 : {
341 0 : uint64_t x = ((uint64_t)x_hi << 32) + x_lo;
342 0 : uint64_t y = ((uint64_t)y_hi << 32) + y_lo;
343 0 : MOZ_ASSERT(y != 0);
344 0 : return x % y;
345 : }
346 :
347 : static int64_t
348 0 : TruncateDoubleToInt64(double input)
349 : {
350 : // Note: INT64_MAX is not representable in double. It is actually
351 : // INT64_MAX + 1. Therefore also sending the failure value.
352 0 : if (input >= double(INT64_MAX) || input < double(INT64_MIN) || IsNaN(input))
353 0 : return 0x8000000000000000;
354 0 : return int64_t(input);
355 : }
356 :
357 : static uint64_t
358 0 : TruncateDoubleToUint64(double input)
359 : {
360 : // Note: UINT64_MAX is not representable in double. It is actually UINT64_MAX + 1.
361 : // Therefore also sending the failure value.
362 0 : if (input >= double(UINT64_MAX) || input <= -1.0 || IsNaN(input))
363 0 : return 0x8000000000000000;
364 0 : return uint64_t(input);
365 : }
366 :
367 : static double
368 0 : Int64ToDouble(int32_t x_hi, uint32_t x_lo)
369 : {
370 0 : int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
371 0 : return double(x);
372 : }
373 :
374 : static float
375 0 : Int64ToFloat32(int32_t x_hi, uint32_t x_lo)
376 : {
377 0 : int64_t x = int64_t((uint64_t(x_hi) << 32)) + int64_t(x_lo);
378 0 : return float(x);
379 : }
380 :
381 : static double
382 0 : Uint64ToDouble(int32_t x_hi, uint32_t x_lo)
383 : {
384 0 : uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
385 0 : return double(x);
386 : }
387 :
388 : static float
389 0 : Uint64ToFloat32(int32_t x_hi, uint32_t x_lo)
390 : {
391 0 : uint64_t x = (uint64_t(x_hi) << 32) + uint64_t(x_lo);
392 0 : return float(x);
393 : }
394 :
395 : template <class F>
396 : static inline void*
397 0 : FuncCast(F* funcPtr, ABIFunctionType abiType)
398 : {
399 0 : void* pf = JS_FUNC_TO_DATA_PTR(void*, funcPtr);
400 : #ifdef JS_SIMULATOR
401 : pf = Simulator::RedirectNativeFunction(pf, abiType);
402 : #endif
403 0 : return pf;
404 : }
405 :
406 : static void*
407 0 : AddressOf(SymbolicAddress imm, ABIFunctionType* abiType)
408 : {
409 0 : switch (imm) {
410 : case SymbolicAddress::HandleExecutionInterrupt:
411 0 : *abiType = Args_General0;
412 0 : return FuncCast(WasmHandleExecutionInterrupt, *abiType);
413 : case SymbolicAddress::HandleDebugTrap:
414 0 : *abiType = Args_General0;
415 0 : return FuncCast(WasmHandleDebugTrap, *abiType);
416 : case SymbolicAddress::HandleThrow:
417 0 : *abiType = Args_General0;
418 0 : return FuncCast(WasmHandleThrow, *abiType);
419 : case SymbolicAddress::ReportTrap:
420 0 : *abiType = Args_General1;
421 0 : return FuncCast(WasmReportTrap, *abiType);
422 : case SymbolicAddress::ReportOutOfBounds:
423 0 : *abiType = Args_General0;
424 0 : return FuncCast(WasmReportOutOfBounds, *abiType);
425 : case SymbolicAddress::ReportUnalignedAccess:
426 0 : *abiType = Args_General0;
427 0 : return FuncCast(WasmReportUnalignedAccess, *abiType);
428 : case SymbolicAddress::CallImport_Void:
429 0 : *abiType = Args_General4;
430 0 : return FuncCast(Instance::callImport_void, *abiType);
431 : case SymbolicAddress::CallImport_I32:
432 0 : *abiType = Args_General4;
433 0 : return FuncCast(Instance::callImport_i32, *abiType);
434 : case SymbolicAddress::CallImport_I64:
435 0 : *abiType = Args_General4;
436 0 : return FuncCast(Instance::callImport_i64, *abiType);
437 : case SymbolicAddress::CallImport_F64:
438 0 : *abiType = Args_General4;
439 0 : return FuncCast(Instance::callImport_f64, *abiType);
440 : case SymbolicAddress::CoerceInPlace_ToInt32:
441 0 : *abiType = Args_General1;
442 0 : return FuncCast(CoerceInPlace_ToInt32, *abiType);
443 : case SymbolicAddress::CoerceInPlace_ToNumber:
444 0 : *abiType = Args_General1;
445 0 : return FuncCast(CoerceInPlace_ToNumber, *abiType);
446 : case SymbolicAddress::ToInt32:
447 0 : *abiType = Args_Int_Double;
448 0 : return FuncCast<int32_t (double)>(JS::ToInt32, *abiType);
449 : case SymbolicAddress::DivI64:
450 0 : *abiType = Args_General4;
451 0 : return FuncCast(DivI64, *abiType);
452 : case SymbolicAddress::UDivI64:
453 0 : *abiType = Args_General4;
454 0 : return FuncCast(UDivI64, *abiType);
455 : case SymbolicAddress::ModI64:
456 0 : *abiType = Args_General4;
457 0 : return FuncCast(ModI64, *abiType);
458 : case SymbolicAddress::UModI64:
459 0 : *abiType = Args_General4;
460 0 : return FuncCast(UModI64, *abiType);
461 : case SymbolicAddress::TruncateDoubleToUint64:
462 0 : *abiType = Args_Int64_Double;
463 0 : return FuncCast(TruncateDoubleToUint64, *abiType);
464 : case SymbolicAddress::TruncateDoubleToInt64:
465 0 : *abiType = Args_Int64_Double;
466 0 : return FuncCast(TruncateDoubleToInt64, *abiType);
467 : case SymbolicAddress::Uint64ToDouble:
468 0 : *abiType = Args_Double_IntInt;
469 0 : return FuncCast(Uint64ToDouble, *abiType);
470 : case SymbolicAddress::Uint64ToFloat32:
471 0 : *abiType = Args_Float32_IntInt;
472 0 : return FuncCast(Uint64ToFloat32, *abiType);
473 : case SymbolicAddress::Int64ToDouble:
474 0 : *abiType = Args_Double_IntInt;
475 0 : return FuncCast(Int64ToDouble, *abiType);
476 : case SymbolicAddress::Int64ToFloat32:
477 0 : *abiType = Args_Float32_IntInt;
478 0 : return FuncCast(Int64ToFloat32, *abiType);
479 : #if defined(JS_CODEGEN_ARM)
480 : case SymbolicAddress::aeabi_idivmod:
481 : *abiType = Args_General2;
482 : return FuncCast(__aeabi_idivmod, *abiType);
483 : case SymbolicAddress::aeabi_uidivmod:
484 : *abiType = Args_General2;
485 : return FuncCast(__aeabi_uidivmod, *abiType);
486 : case SymbolicAddress::AtomicCmpXchg:
487 : *abiType = Args_General5;
488 : return FuncCast(atomics_cmpxchg_asm_callout, *abiType);
489 : case SymbolicAddress::AtomicXchg:
490 : *abiType = Args_General4;
491 : return FuncCast(atomics_xchg_asm_callout, *abiType);
492 : case SymbolicAddress::AtomicFetchAdd:
493 : *abiType = Args_General4;
494 : return FuncCast(atomics_add_asm_callout, *abiType);
495 : case SymbolicAddress::AtomicFetchSub:
496 : *abiType = Args_General4;
497 : return FuncCast(atomics_sub_asm_callout, *abiType);
498 : case SymbolicAddress::AtomicFetchAnd:
499 : *abiType = Args_General4;
500 : return FuncCast(atomics_and_asm_callout, *abiType);
501 : case SymbolicAddress::AtomicFetchOr:
502 : *abiType = Args_General4;
503 : return FuncCast(atomics_or_asm_callout, *abiType);
504 : case SymbolicAddress::AtomicFetchXor:
505 : *abiType = Args_General4;
506 : return FuncCast(atomics_xor_asm_callout, *abiType);
507 : #endif
508 : case SymbolicAddress::ModD:
509 0 : *abiType = Args_Double_DoubleDouble;
510 0 : return FuncCast(NumberMod, *abiType);
511 : case SymbolicAddress::SinD:
512 0 : *abiType = Args_Double_Double;
513 0 : return FuncCast<double (double)>(sin, *abiType);
514 : case SymbolicAddress::CosD:
515 0 : *abiType = Args_Double_Double;
516 0 : return FuncCast<double (double)>(cos, *abiType);
517 : case SymbolicAddress::TanD:
518 0 : *abiType = Args_Double_Double;
519 0 : return FuncCast<double (double)>(tan, *abiType);
520 : case SymbolicAddress::ASinD:
521 0 : *abiType = Args_Double_Double;
522 0 : return FuncCast<double (double)>(fdlibm::asin, *abiType);
523 : case SymbolicAddress::ACosD:
524 0 : *abiType = Args_Double_Double;
525 0 : return FuncCast<double (double)>(fdlibm::acos, *abiType);
526 : case SymbolicAddress::ATanD:
527 0 : *abiType = Args_Double_Double;
528 0 : return FuncCast<double (double)>(fdlibm::atan, *abiType);
529 : case SymbolicAddress::CeilD:
530 0 : *abiType = Args_Double_Double;
531 0 : return FuncCast<double (double)>(fdlibm::ceil, *abiType);
532 : case SymbolicAddress::CeilF:
533 0 : *abiType = Args_Float32_Float32;
534 0 : return FuncCast<float (float)>(fdlibm::ceilf, *abiType);
535 : case SymbolicAddress::FloorD:
536 0 : *abiType = Args_Double_Double;
537 0 : return FuncCast<double (double)>(fdlibm::floor, *abiType);
538 : case SymbolicAddress::FloorF:
539 0 : *abiType = Args_Float32_Float32;
540 0 : return FuncCast<float (float)>(fdlibm::floorf, *abiType);
541 : case SymbolicAddress::TruncD:
542 0 : *abiType = Args_Double_Double;
543 0 : return FuncCast<double (double)>(fdlibm::trunc, *abiType);
544 : case SymbolicAddress::TruncF:
545 0 : *abiType = Args_Float32_Float32;
546 0 : return FuncCast<float (float)>(fdlibm::truncf, *abiType);
547 : case SymbolicAddress::NearbyIntD:
548 0 : *abiType = Args_Double_Double;
549 0 : return FuncCast<double (double)>(fdlibm::nearbyint, *abiType);
550 : case SymbolicAddress::NearbyIntF:
551 0 : *abiType = Args_Float32_Float32;
552 0 : return FuncCast<float (float)>(fdlibm::nearbyintf, *abiType);
553 : case SymbolicAddress::ExpD:
554 0 : *abiType = Args_Double_Double;
555 0 : return FuncCast<double (double)>(fdlibm::exp, *abiType);
556 : case SymbolicAddress::LogD:
557 0 : *abiType = Args_Double_Double;
558 0 : return FuncCast<double (double)>(fdlibm::log, *abiType);
559 : case SymbolicAddress::PowD:
560 0 : *abiType = Args_Double_DoubleDouble;
561 0 : return FuncCast(ecmaPow, *abiType);
562 : case SymbolicAddress::ATan2D:
563 0 : *abiType = Args_Double_DoubleDouble;
564 0 : return FuncCast(ecmaAtan2, *abiType);
565 : case SymbolicAddress::GrowMemory:
566 0 : *abiType = Args_General2;
567 0 : return FuncCast(Instance::growMemory_i32, *abiType);
568 : case SymbolicAddress::CurrentMemory:
569 0 : *abiType = Args_General1;
570 0 : return FuncCast(Instance::currentMemory_i32, *abiType);
571 : case SymbolicAddress::Limit:
572 0 : break;
573 : }
574 :
575 0 : MOZ_CRASH("Bad SymbolicAddress");
576 : }
577 :
578 : bool
579 0 : wasm::NeedsBuiltinThunk(SymbolicAddress sym)
580 : {
581 : // Some functions don't want to a thunk, because they already have one or
582 : // they don't have frame info.
583 0 : switch (sym) {
584 : case SymbolicAddress::HandleExecutionInterrupt: // GenerateInterruptExit
585 : case SymbolicAddress::HandleDebugTrap: // GenerateDebugTrapStub
586 : case SymbolicAddress::HandleThrow: // GenerateThrowStub
587 : case SymbolicAddress::ReportTrap: // GenerateTrapExit
588 : case SymbolicAddress::ReportOutOfBounds: // GenerateOutOfBoundsExit
589 : case SymbolicAddress::ReportUnalignedAccess: // GeneratesUnalignedExit
590 : case SymbolicAddress::CallImport_Void: // GenerateImportInterpExit
591 : case SymbolicAddress::CallImport_I32:
592 : case SymbolicAddress::CallImport_I64:
593 : case SymbolicAddress::CallImport_F64:
594 : case SymbolicAddress::CoerceInPlace_ToInt32: // GenerateImportJitExit
595 : case SymbolicAddress::CoerceInPlace_ToNumber:
596 0 : return false;
597 : case SymbolicAddress::ToInt32:
598 : case SymbolicAddress::DivI64:
599 : case SymbolicAddress::UDivI64:
600 : case SymbolicAddress::ModI64:
601 : case SymbolicAddress::UModI64:
602 : case SymbolicAddress::TruncateDoubleToUint64:
603 : case SymbolicAddress::TruncateDoubleToInt64:
604 : case SymbolicAddress::Uint64ToDouble:
605 : case SymbolicAddress::Uint64ToFloat32:
606 : case SymbolicAddress::Int64ToDouble:
607 : case SymbolicAddress::Int64ToFloat32:
608 : #if defined(JS_CODEGEN_ARM)
609 : case SymbolicAddress::aeabi_idivmod:
610 : case SymbolicAddress::aeabi_uidivmod:
611 : case SymbolicAddress::AtomicCmpXchg:
612 : case SymbolicAddress::AtomicXchg:
613 : case SymbolicAddress::AtomicFetchAdd:
614 : case SymbolicAddress::AtomicFetchSub:
615 : case SymbolicAddress::AtomicFetchAnd:
616 : case SymbolicAddress::AtomicFetchOr:
617 : case SymbolicAddress::AtomicFetchXor:
618 : #endif
619 : case SymbolicAddress::ModD:
620 : case SymbolicAddress::SinD:
621 : case SymbolicAddress::CosD:
622 : case SymbolicAddress::TanD:
623 : case SymbolicAddress::ASinD:
624 : case SymbolicAddress::ACosD:
625 : case SymbolicAddress::ATanD:
626 : case SymbolicAddress::CeilD:
627 : case SymbolicAddress::CeilF:
628 : case SymbolicAddress::FloorD:
629 : case SymbolicAddress::FloorF:
630 : case SymbolicAddress::TruncD:
631 : case SymbolicAddress::TruncF:
632 : case SymbolicAddress::NearbyIntD:
633 : case SymbolicAddress::NearbyIntF:
634 : case SymbolicAddress::ExpD:
635 : case SymbolicAddress::LogD:
636 : case SymbolicAddress::PowD:
637 : case SymbolicAddress::ATan2D:
638 : case SymbolicAddress::GrowMemory:
639 : case SymbolicAddress::CurrentMemory:
640 0 : return true;
641 : case SymbolicAddress::Limit:
642 0 : break;
643 : }
644 :
645 0 : MOZ_CRASH("unexpected symbolic address");
646 : }
647 :
648 : // ============================================================================
649 : // JS builtins that can be imported by wasm modules and called efficiently
650 : // through thunks. These thunks conform to the internal wasm ABI and thus can be
651 : // patched in for import calls. Calling a JS builtin through a thunk is much
652 : // faster than calling out through the generic import call trampoline which will
653 : // end up in the slowest C++ Instance::callImport path.
654 : //
655 : // Each JS builtin can have several overloads. These must all be enumerated in
656 : // PopulateTypedNatives() so they can be included in the process-wide thunk set.
657 :
658 : #define FOR_EACH_UNARY_NATIVE(_) \
659 : _(math_sin, MathSin) \
660 : _(math_tan, MathTan) \
661 : _(math_cos, MathCos) \
662 : _(math_exp, MathExp) \
663 : _(math_log, MathLog) \
664 : _(math_asin, MathASin) \
665 : _(math_atan, MathATan) \
666 : _(math_acos, MathACos) \
667 : _(math_log10, MathLog10) \
668 : _(math_log2, MathLog2) \
669 : _(math_log1p, MathLog1P) \
670 : _(math_expm1, MathExpM1) \
671 : _(math_sinh, MathSinH) \
672 : _(math_tanh, MathTanH) \
673 : _(math_cosh, MathCosH) \
674 : _(math_asinh, MathASinH) \
675 : _(math_atanh, MathATanH) \
676 : _(math_acosh, MathACosH) \
677 : _(math_sign, MathSign) \
678 : _(math_trunc, MathTrunc) \
679 : _(math_cbrt, MathCbrt)
680 :
681 : #define FOR_EACH_BINARY_NATIVE(_) \
682 : _(ecmaAtan2, MathATan2) \
683 : _(ecmaHypot, MathHypot) \
684 : _(ecmaPow, MathPow) \
685 :
686 : #define DEFINE_UNARY_FLOAT_WRAPPER(func, _) \
687 : static float func##_uncached_f32(float x) { \
688 : return float(func##_uncached(double(x))); \
689 : }
690 :
691 : #define DEFINE_BINARY_FLOAT_WRAPPER(func, _) \
692 : static float func##_f32(float x, float y) { \
693 : return float(func(double(x), double(y))); \
694 : }
695 :
696 0 : FOR_EACH_UNARY_NATIVE(DEFINE_UNARY_FLOAT_WRAPPER)
697 0 : FOR_EACH_BINARY_NATIVE(DEFINE_BINARY_FLOAT_WRAPPER)
698 :
699 : #undef DEFINE_UNARY_FLOAT_WRAPPER
700 : #undef DEFINE_BINARY_FLOAT_WRAPPER
701 :
702 : struct TypedNative
703 : {
704 : InlinableNative native;
705 : ABIFunctionType abiType;
706 :
707 0 : TypedNative(InlinableNative native, ABIFunctionType abiType)
708 0 : : native(native),
709 0 : abiType(abiType)
710 0 : {}
711 :
712 : typedef TypedNative Lookup;
713 0 : static HashNumber hash(const Lookup& l) {
714 0 : return HashGeneric(uint32_t(l.native), uint32_t(l.abiType));
715 : }
716 0 : static bool match(const TypedNative& lhs, const Lookup& rhs) {
717 0 : return lhs.native == rhs.native && lhs.abiType == rhs.abiType;
718 : }
719 : };
720 :
721 : using TypedNativeToFuncPtrMap =
722 : HashMap<TypedNative, void*, TypedNative, SystemAllocPolicy>;
723 :
724 : static bool
725 0 : PopulateTypedNatives(TypedNativeToFuncPtrMap* typedNatives)
726 : {
727 0 : if (!typedNatives->init())
728 0 : return false;
729 :
730 : #define ADD_OVERLOAD(funcName, native, abiType) \
731 : if (!typedNatives->putNew(TypedNative(InlinableNative::native, abiType), \
732 : FuncCast(funcName, abiType))) \
733 : return false;
734 :
735 : #define ADD_UNARY_OVERLOADS(funcName, native) \
736 : ADD_OVERLOAD(funcName##_uncached, native, Args_Double_Double) \
737 : ADD_OVERLOAD(funcName##_uncached_f32, native, Args_Float32_Float32)
738 :
739 : #define ADD_BINARY_OVERLOADS(funcName, native) \
740 : ADD_OVERLOAD(funcName, native, Args_Double_DoubleDouble) \
741 : ADD_OVERLOAD(funcName##_f32, native, Args_Float32_Float32Float32)
742 :
743 0 : FOR_EACH_UNARY_NATIVE(ADD_UNARY_OVERLOADS)
744 0 : FOR_EACH_BINARY_NATIVE(ADD_BINARY_OVERLOADS)
745 :
746 : #undef ADD_UNARY_OVERLOADS
747 : #undef ADD_BINARY_OVERLOADS
748 :
749 0 : return true;
750 : }
751 :
752 : #undef FOR_EACH_UNARY_NATIVE
753 : #undef FOR_EACH_BINARY_NATIVE
754 :
755 : // ============================================================================
756 : // Process-wide builtin thunk set
757 : //
758 : // Thunks are inserted between wasm calls and the C++ callee and achieve two
759 : // things:
760 : // - bridging the few differences between the internal wasm ABI and the external
761 : // native ABI (viz. float returns on x86 and soft-fp ARM)
762 : // - executing an exit prologue/epilogue which in turn allows any asynchronous
763 : // interrupt to see the full stack up to the wasm operation that called out
764 : //
765 : // Thunks are created for two kinds of C++ callees, enumerated above:
766 : // - SymbolicAddress: for statically compiled calls in the wasm module
767 : // - Imported JS builtins: optimized calls to imports
768 : //
769 : // All thunks are created up front, lazily, when the first wasm module is
770 : // compiled in the process. Thunks are kept alive until the JS engine shuts down
771 : // in the process. No thunks are created at runtime after initialization. This
772 : // simple scheme allows several simplifications:
773 : // - no reference counting to keep thunks alive
774 : // - no problems toggling W^X permissions which, because of multiple executing
775 : // threads, would require each thunk allocation to be on its own page
776 : // The cost for creating all thunks at once is relatively low since all thunks
777 : // fit within the smallest executable quanta (64k).
778 :
779 : using TypedNativeToCodeRangeMap =
780 : HashMap<TypedNative, uint32_t, TypedNative, SystemAllocPolicy>;
781 :
782 : using SymbolicAddressToCodeRangeArray =
783 : EnumeratedArray<SymbolicAddress, SymbolicAddress::Limit, uint32_t>;
784 :
785 : struct BuiltinThunks
786 : {
787 : uint8_t* codeBase;
788 : size_t codeSize;
789 : CodeRangeVector codeRanges;
790 : TypedNativeToCodeRangeMap typedNativeToCodeRange;
791 : SymbolicAddressToCodeRangeArray symbolicAddressToCodeRange;
792 :
793 0 : BuiltinThunks()
794 0 : : codeBase(nullptr), codeSize(0)
795 0 : {}
796 :
797 0 : ~BuiltinThunks() {
798 0 : if (codeBase)
799 0 : DeallocateExecutableMemory(codeBase, codeSize);
800 0 : }
801 : };
802 :
803 3 : Mutex initBuiltinThunks(mutexid::WasmInitBuiltinThunks);
804 : Atomic<const BuiltinThunks*> builtinThunks;
805 :
806 : bool
807 0 : wasm::EnsureBuiltinThunksInitialized()
808 : {
809 0 : LockGuard<Mutex> guard(initBuiltinThunks);
810 0 : if (builtinThunks)
811 0 : return true;
812 :
813 0 : auto thunks = MakeUnique<BuiltinThunks>();
814 0 : if (!thunks)
815 0 : return false;
816 :
817 0 : LifoAlloc lifo(BUILTIN_THUNK_LIFO_SIZE);
818 0 : TempAllocator tempAlloc(&lifo);
819 0 : MacroAssembler masm(MacroAssembler::WasmToken(), tempAlloc);
820 :
821 0 : for (auto sym : MakeEnumeratedRange(SymbolicAddress::Limit)) {
822 0 : if (!NeedsBuiltinThunk(sym)) {
823 0 : thunks->symbolicAddressToCodeRange[sym] = UINT32_MAX;
824 0 : continue;
825 : }
826 :
827 0 : uint32_t codeRangeIndex = thunks->codeRanges.length();
828 0 : thunks->symbolicAddressToCodeRange[sym] = codeRangeIndex;
829 :
830 : ABIFunctionType abiType;
831 0 : void* funcPtr = AddressOf(sym, &abiType);
832 0 : ExitReason exitReason(sym);
833 0 : CallableOffsets offset = GenerateBuiltinThunk(masm, abiType, exitReason, funcPtr);
834 0 : if (masm.oom() || !thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offset))
835 0 : return false;
836 : }
837 :
838 0 : TypedNativeToFuncPtrMap typedNatives;
839 0 : if (!PopulateTypedNatives(&typedNatives))
840 0 : return false;
841 :
842 0 : if (!thunks->typedNativeToCodeRange.init())
843 0 : return false;
844 :
845 0 : for (TypedNativeToFuncPtrMap::Range r = typedNatives.all(); !r.empty(); r.popFront()) {
846 0 : TypedNative typedNative = r.front().key();
847 :
848 0 : uint32_t codeRangeIndex = thunks->codeRanges.length();
849 0 : if (!thunks->typedNativeToCodeRange.putNew(typedNative, codeRangeIndex))
850 0 : return false;
851 :
852 0 : ABIFunctionType abiType = typedNative.abiType;
853 0 : void* funcPtr = r.front().value();
854 0 : ExitReason exitReason = ExitReason::Fixed::BuiltinNative;
855 0 : CallableOffsets offset = GenerateBuiltinThunk(masm, abiType, exitReason, funcPtr);
856 0 : if (masm.oom() || !thunks->codeRanges.emplaceBack(CodeRange::BuiltinThunk, offset))
857 0 : return false;
858 : }
859 :
860 0 : masm.finish();
861 0 : if (masm.oom())
862 0 : return false;
863 :
864 0 : size_t allocSize = AlignBytes(masm.bytesNeeded(), ExecutableCodePageSize);
865 :
866 0 : thunks->codeSize = allocSize;
867 0 : thunks->codeBase = (uint8_t*)AllocateExecutableMemory(allocSize, ProtectionSetting::Writable);
868 0 : if (!thunks->codeBase)
869 0 : return false;
870 :
871 0 : masm.executableCopy(thunks->codeBase, /* flushICache = */ false);
872 0 : memset(thunks->codeBase + masm.bytesNeeded(), 0, allocSize - masm.bytesNeeded());
873 :
874 0 : masm.processCodeLabels(thunks->codeBase);
875 : #ifdef DEBUG
876 0 : MOZ_ASSERT(masm.callSites().empty());
877 0 : MOZ_ASSERT(masm.callFarJumps().empty());
878 0 : MOZ_ASSERT(masm.trapSites().empty());
879 0 : MOZ_ASSERT(masm.trapFarJumps().empty());
880 0 : MOZ_ASSERT(masm.extractMemoryAccesses().empty());
881 0 : MOZ_ASSERT(!masm.numSymbolicAccesses());
882 : #endif
883 :
884 0 : ExecutableAllocator::cacheFlush(thunks->codeBase, thunks->codeSize);
885 0 : if (!ExecutableAllocator::makeExecutable(thunks->codeBase, thunks->codeSize))
886 0 : return false;
887 :
888 0 : builtinThunks = thunks.release();
889 0 : return true;
890 : }
891 :
892 : void
893 0 : wasm::ReleaseBuiltinThunks()
894 : {
895 0 : if (builtinThunks) {
896 0 : const BuiltinThunks* ptr = builtinThunks;
897 0 : js_delete(const_cast<BuiltinThunks*>(ptr));
898 0 : builtinThunks = nullptr;
899 : }
900 0 : }
901 :
902 : void*
903 0 : wasm::SymbolicAddressTarget(SymbolicAddress sym)
904 : {
905 0 : MOZ_ASSERT(builtinThunks);
906 :
907 : ABIFunctionType abiType;
908 0 : void* funcPtr = AddressOf(sym, &abiType);
909 :
910 0 : if (!NeedsBuiltinThunk(sym))
911 0 : return funcPtr;
912 :
913 0 : const BuiltinThunks& thunks = *builtinThunks;
914 0 : uint32_t codeRangeIndex = thunks.symbolicAddressToCodeRange[sym];
915 0 : return thunks.codeBase + thunks.codeRanges[codeRangeIndex].begin();
916 : }
917 :
918 : static Maybe<ABIFunctionType>
919 0 : ToBuiltinABIFunctionType(const Sig& sig)
920 : {
921 0 : const ValTypeVector& args = sig.args();
922 0 : ExprType ret = sig.ret();
923 :
924 : uint32_t abiType;
925 0 : switch (ret) {
926 0 : case ExprType::F32: abiType = ArgType_Float32 << RetType_Shift; break;
927 0 : case ExprType::F64: abiType = ArgType_Double << RetType_Shift; break;
928 0 : default: return Nothing();
929 : }
930 :
931 0 : if ((args.length() + 1) > (sizeof(uint32_t) * 8 / ArgType_Shift))
932 0 : return Nothing();
933 :
934 0 : for (size_t i = 0; i < args.length(); i++) {
935 0 : switch (args[i]) {
936 0 : case ValType::F32: abiType |= (ArgType_Float32 << (ArgType_Shift * (i + 1))); break;
937 0 : case ValType::F64: abiType |= (ArgType_Double << (ArgType_Shift * (i + 1))); break;
938 0 : default: return Nothing();
939 : }
940 : }
941 :
942 0 : return Some(ABIFunctionType(abiType));
943 : }
944 :
945 : void*
946 0 : wasm::MaybeGetBuiltinThunk(HandleFunction f, const Sig& sig, JSContext* cx)
947 : {
948 0 : MOZ_ASSERT(builtinThunks);
949 :
950 0 : if (!f->isNative() || !f->jitInfo() || f->jitInfo()->type() != JSJitInfo::InlinableNative)
951 0 : return nullptr;
952 :
953 0 : Maybe<ABIFunctionType> abiType = ToBuiltinABIFunctionType(sig);
954 0 : if (!abiType)
955 0 : return nullptr;
956 :
957 0 : TypedNative typedNative(f->jitInfo()->inlinableNative, *abiType);
958 :
959 0 : const BuiltinThunks& thunks = *builtinThunks;
960 0 : auto p = thunks.typedNativeToCodeRange.readonlyThreadsafeLookup(typedNative);
961 0 : if (!p)
962 0 : return nullptr;
963 :
964 0 : return thunks.codeBase + thunks.codeRanges[p->value()].begin();
965 : }
966 :
967 : bool
968 0 : wasm::LookupBuiltinThunk(void* pc, const CodeRange** codeRange, uint8_t** codeBase)
969 : {
970 0 : if (!builtinThunks)
971 0 : return false;
972 :
973 0 : const BuiltinThunks& thunks = *builtinThunks;
974 0 : if (pc < thunks.codeBase || pc >= thunks.codeBase + thunks.codeSize)
975 0 : return false;
976 :
977 0 : *codeBase = thunks.codeBase;
978 :
979 0 : CodeRange::OffsetInCode target((uint8_t*)pc - thunks.codeBase);
980 0 : *codeRange = LookupInSorted(thunks.codeRanges, target);
981 :
982 0 : return !!*codeRange;
983 : }
|