Line data Source code
1 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 : * vim: set ts=8 sts=4 et sw=4 tw=99:
3 : * This Source Code Form is subject to the terms of the Mozilla Public
4 : * License, v. 2.0. If a copy of the MPL was not distributed with this
5 : * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 :
7 : #include "jit/OptimizationTracking.h"
8 :
9 : #include "mozilla/SizePrintfMacros.h"
10 :
11 : #include "jsprf.h"
12 :
13 : #include "ds/Sort.h"
14 : #include "jit/IonBuilder.h"
15 : #include "jit/JitcodeMap.h"
16 : #include "jit/JitSpewer.h"
17 : #include "js/TrackedOptimizationInfo.h"
18 :
19 : #include "vm/ObjectGroup-inl.h"
20 : #include "vm/TypeInference-inl.h"
21 :
22 : using namespace js;
23 : using namespace js::jit;
24 :
25 : using mozilla::Maybe;
26 : using mozilla::Some;
27 : using mozilla::Nothing;
28 :
29 : using JS::TrackedStrategy;
30 : using JS::TrackedOutcome;
31 : using JS::TrackedTypeSite;
32 : using JS::ForEachTrackedOptimizationAttemptOp;
33 : using JS::ForEachTrackedOptimizationTypeInfoOp;
34 :
35 : bool
36 0 : TrackedOptimizations::trackTypeInfo(OptimizationTypeInfo&& ty)
37 : {
38 0 : return types_.append(mozilla::Move(ty));
39 : }
40 :
41 : bool
42 0 : TrackedOptimizations::trackAttempt(TrackedStrategy strategy)
43 : {
44 0 : OptimizationAttempt attempt(strategy, TrackedOutcome::GenericFailure);
45 0 : currentAttempt_ = attempts_.length();
46 0 : return attempts_.append(attempt);
47 : }
48 :
49 : void
50 0 : TrackedOptimizations::amendAttempt(uint32_t index)
51 : {
52 0 : currentAttempt_ = index;
53 0 : }
54 :
55 : void
56 0 : TrackedOptimizations::trackOutcome(TrackedOutcome outcome)
57 : {
58 0 : attempts_[currentAttempt_].setOutcome(outcome);
59 0 : }
60 :
61 : void
62 0 : TrackedOptimizations::trackSuccess()
63 : {
64 0 : attempts_[currentAttempt_].setOutcome(TrackedOutcome::GenericSuccess);
65 0 : }
66 :
67 : template <class Vec>
68 : static bool
69 0 : VectorContentsMatch(const Vec* xs, const Vec* ys)
70 : {
71 0 : if (xs->length() != ys->length())
72 0 : return false;
73 0 : for (auto x = xs->begin(), y = ys->begin(); x != xs->end(); x++, y++) {
74 0 : MOZ_ASSERT(y != ys->end());
75 0 : if (*x != *y)
76 0 : return false;
77 : }
78 0 : return true;
79 : }
80 :
81 : bool
82 0 : TrackedOptimizations::matchTypes(const TempOptimizationTypeInfoVector& other) const
83 : {
84 0 : return VectorContentsMatch(&types_, &other);
85 : }
86 :
87 : bool
88 0 : TrackedOptimizations::matchAttempts(const TempOptimizationAttemptsVector& other) const
89 : {
90 0 : return VectorContentsMatch(&attempts_, &other);
91 : }
92 :
93 : JS_PUBLIC_API(const char*)
94 0 : JS::TrackedStrategyString(TrackedStrategy strategy)
95 : {
96 0 : switch (strategy) {
97 : #define STRATEGY_CASE(name) \
98 : case TrackedStrategy::name: \
99 : return #name;
100 0 : TRACKED_STRATEGY_LIST(STRATEGY_CASE)
101 : #undef STRATEGY_CASE
102 :
103 : default:
104 0 : MOZ_CRASH("bad strategy");
105 : }
106 : }
107 :
108 : JS_PUBLIC_API(const char*)
109 0 : JS::TrackedOutcomeString(TrackedOutcome outcome)
110 : {
111 0 : switch (outcome) {
112 : #define OUTCOME_CASE(name) \
113 : case TrackedOutcome::name: \
114 : return #name;
115 0 : TRACKED_OUTCOME_LIST(OUTCOME_CASE)
116 : #undef OUTCOME_CASE
117 :
118 : default:
119 0 : MOZ_CRASH("bad outcome");
120 : }
121 : }
122 :
123 : JS_PUBLIC_API(const char*)
124 0 : JS::TrackedTypeSiteString(TrackedTypeSite site)
125 : {
126 0 : switch (site) {
127 : #define TYPESITE_CASE(name) \
128 : case TrackedTypeSite::name: \
129 : return #name;
130 0 : TRACKED_TYPESITE_LIST(TYPESITE_CASE)
131 : #undef TYPESITE_CASE
132 :
133 : default:
134 0 : MOZ_CRASH("bad type site");
135 : }
136 : }
137 :
138 : void
139 0 : SpewTempOptimizationTypeInfoVector(JitSpewChannel channel,
140 : const TempOptimizationTypeInfoVector* types,
141 : const char* indent = nullptr)
142 : {
143 : #ifdef JS_JITSPEW
144 0 : for (const OptimizationTypeInfo* t = types->begin(); t != types->end(); t++) {
145 0 : JitSpewStart(channel, " %s%s of type %s, type set",
146 : indent ? indent : "",
147 0 : TrackedTypeSiteString(t->site()), StringFromMIRType(t->mirType()));
148 0 : for (uint32_t i = 0; i < t->types().length(); i++)
149 0 : JitSpewCont(channel, " %s", TypeSet::TypeString(t->types()[i]));
150 0 : JitSpewFin(channel);
151 : }
152 : #endif
153 0 : }
154 :
155 : void
156 0 : SpewTempOptimizationAttemptsVector(JitSpewChannel channel,
157 : const TempOptimizationAttemptsVector* attempts,
158 : const char* indent = nullptr)
159 : {
160 : #ifdef JS_JITSPEW
161 0 : for (const OptimizationAttempt* a = attempts->begin(); a != attempts->end(); a++) {
162 0 : JitSpew(channel, " %s%s: %s", indent ? indent : "",
163 0 : TrackedStrategyString(a->strategy()), TrackedOutcomeString(a->outcome()));
164 : }
165 : #endif
166 0 : }
167 :
168 : void
169 0 : TrackedOptimizations::spew(JitSpewChannel channel) const
170 : {
171 : #ifdef JS_JITSPEW
172 0 : SpewTempOptimizationTypeInfoVector(channel, &types_);
173 0 : SpewTempOptimizationAttemptsVector(channel, &attempts_);
174 : #endif
175 0 : }
176 :
177 : bool
178 0 : OptimizationTypeInfo::trackTypeSet(TemporaryTypeSet* typeSet)
179 : {
180 0 : if (!typeSet)
181 0 : return true;
182 0 : return typeSet->enumerateTypes(&types_);
183 : }
184 :
185 : bool
186 0 : OptimizationTypeInfo::trackType(TypeSet::Type type)
187 : {
188 0 : return types_.append(type);
189 : }
190 :
191 : bool
192 0 : OptimizationTypeInfo::operator ==(const OptimizationTypeInfo& other) const
193 : {
194 0 : return site_ == other.site_ && mirType_ == other.mirType_ &&
195 0 : VectorContentsMatch(&types_, &other.types_);
196 : }
197 :
198 : bool
199 0 : OptimizationTypeInfo::operator !=(const OptimizationTypeInfo& other) const
200 : {
201 0 : return !(*this == other);
202 : }
203 :
204 : static inline HashNumber
205 0 : CombineHash(HashNumber h, HashNumber n)
206 : {
207 0 : h += n;
208 0 : h += (h << 10);
209 0 : h ^= (h >> 6);
210 0 : return h;
211 : }
212 :
213 : static inline HashNumber
214 0 : HashType(TypeSet::Type ty)
215 : {
216 0 : if (ty.isObjectUnchecked())
217 0 : return PointerHasher<TypeSet::ObjectKey*, 3>::hash(ty.objectKey());
218 0 : return HashNumber(ty.raw());
219 : }
220 :
221 : static HashNumber
222 0 : HashTypeList(const TempTypeList& types)
223 : {
224 0 : HashNumber h = 0;
225 0 : for (uint32_t i = 0; i < types.length(); i++)
226 0 : h = CombineHash(h, HashType(types[i]));
227 0 : return h;
228 : }
229 :
230 : HashNumber
231 0 : OptimizationTypeInfo::hash() const
232 : {
233 0 : return ((HashNumber(site_) << 24) + (HashNumber(mirType_) << 16)) ^ HashTypeList(types_);
234 : }
235 :
236 : template <class Vec>
237 : static HashNumber
238 0 : HashVectorContents(const Vec* xs, HashNumber h)
239 : {
240 0 : for (auto x = xs->begin(); x != xs->end(); x++)
241 0 : h = CombineHash(h, x->hash());
242 0 : return h;
243 : }
244 :
245 : /* static */ HashNumber
246 0 : UniqueTrackedOptimizations::Key::hash(const Lookup& lookup)
247 : {
248 0 : HashNumber h = HashVectorContents(lookup.types, 0);
249 0 : h = HashVectorContents(lookup.attempts, h);
250 0 : h += (h << 3);
251 0 : h ^= (h >> 11);
252 0 : h += (h << 15);
253 0 : return h;
254 : }
255 :
256 : /* static */ bool
257 0 : UniqueTrackedOptimizations::Key::match(const Key& key, const Lookup& lookup)
258 : {
259 0 : return VectorContentsMatch(key.attempts, lookup.attempts) &&
260 0 : VectorContentsMatch(key.types, lookup.types);
261 : }
262 :
263 : bool
264 0 : UniqueTrackedOptimizations::add(const TrackedOptimizations* optimizations)
265 : {
266 0 : MOZ_ASSERT(!sorted());
267 : Key key;
268 0 : key.types = &optimizations->types_;
269 0 : key.attempts = &optimizations->attempts_;
270 0 : AttemptsMap::AddPtr p = map_.lookupForAdd(key);
271 0 : if (p) {
272 0 : p->value().frequency++;
273 0 : return true;
274 : }
275 : Entry entry;
276 0 : entry.index = UINT8_MAX;
277 0 : entry.frequency = 1;
278 0 : return map_.add(p, key, entry);
279 : }
280 :
281 : struct FrequencyComparator
282 : {
283 0 : bool operator()(const UniqueTrackedOptimizations::SortEntry& a,
284 : const UniqueTrackedOptimizations::SortEntry& b,
285 : bool* lessOrEqualp)
286 : {
287 0 : *lessOrEqualp = b.frequency <= a.frequency;
288 0 : return true;
289 : }
290 : };
291 :
292 : bool
293 0 : UniqueTrackedOptimizations::sortByFrequency(JSContext* cx)
294 : {
295 0 : MOZ_ASSERT(!sorted());
296 :
297 0 : JitSpew(JitSpew_OptimizationTrackingExtended, "=> Sorting unique optimizations by frequency");
298 :
299 : // Sort by frequency.
300 0 : Vector<SortEntry> entries(cx);
301 0 : for (AttemptsMap::Range r = map_.all(); !r.empty(); r.popFront()) {
302 : SortEntry entry;
303 0 : entry.types = r.front().key().types;
304 0 : entry.attempts = r.front().key().attempts;
305 0 : entry.frequency = r.front().value().frequency;
306 0 : if (!entries.append(entry))
307 0 : return false;
308 : }
309 :
310 : // The compact table stores indices as a max of uint8_t. In practice each
311 : // script has fewer unique optimization attempts than UINT8_MAX.
312 0 : if (entries.length() >= UINT8_MAX - 1)
313 0 : return false;
314 :
315 0 : Vector<SortEntry> scratch(cx);
316 0 : if (!scratch.resize(entries.length()))
317 0 : return false;
318 :
319 : FrequencyComparator comparator;
320 0 : MOZ_ALWAYS_TRUE(MergeSort(entries.begin(), entries.length(), scratch.begin(), comparator));
321 :
322 : // Update map entries' indices.
323 0 : for (size_t i = 0; i < entries.length(); i++) {
324 : Key key;
325 0 : key.types = entries[i].types;
326 0 : key.attempts = entries[i].attempts;
327 0 : AttemptsMap::Ptr p = map_.lookup(key);
328 0 : MOZ_ASSERT(p);
329 0 : p->value().index = sorted_.length();
330 :
331 0 : JitSpew(JitSpew_OptimizationTrackingExtended, " Entry %" PRIuSIZE " has frequency %" PRIu32,
332 0 : sorted_.length(), p->value().frequency);
333 :
334 0 : if (!sorted_.append(entries[i]))
335 0 : return false;
336 : }
337 :
338 0 : return true;
339 : }
340 :
341 : uint8_t
342 0 : UniqueTrackedOptimizations::indexOf(const TrackedOptimizations* optimizations) const
343 : {
344 0 : MOZ_ASSERT(sorted());
345 : Key key;
346 0 : key.types = &optimizations->types_;
347 0 : key.attempts = &optimizations->attempts_;
348 0 : AttemptsMap::Ptr p = map_.lookup(key);
349 0 : MOZ_ASSERT(p);
350 0 : MOZ_ASSERT(p->value().index != UINT8_MAX);
351 0 : return p->value().index;
352 : }
353 :
354 : // Assigns each unique tracked type an index; outputs a compact list.
355 0 : class jit::UniqueTrackedTypes
356 : {
357 : public:
358 : struct TypeHasher
359 : {
360 : typedef TypeSet::Type Lookup;
361 :
362 0 : static HashNumber hash(const Lookup& ty) { return HashType(ty); }
363 0 : static bool match(const TypeSet::Type& ty1, const TypeSet::Type& ty2) { return ty1 == ty2; }
364 : };
365 :
366 : private:
367 : // Map of unique TypeSet::Types to indices.
368 : typedef HashMap<TypeSet::Type, uint8_t, TypeHasher> TypesMap;
369 : TypesMap map_;
370 :
371 : Vector<TypeSet::Type, 1> list_;
372 :
373 : public:
374 0 : explicit UniqueTrackedTypes(JSContext* cx)
375 0 : : map_(cx),
376 0 : list_(cx)
377 0 : { }
378 :
379 0 : bool init() { return map_.init(); }
380 : bool getIndexOf(JSContext* cx, TypeSet::Type ty, uint8_t* indexp);
381 :
382 0 : uint32_t count() const { MOZ_ASSERT(map_.count() == list_.length()); return list_.length(); }
383 : bool enumerate(TypeSet::TypeList* types) const;
384 : };
385 :
386 : bool
387 0 : UniqueTrackedTypes::getIndexOf(JSContext* cx, TypeSet::Type ty, uint8_t* indexp)
388 : {
389 0 : TypesMap::AddPtr p = map_.lookupForAdd(ty);
390 0 : if (p) {
391 0 : *indexp = p->value();
392 0 : return true;
393 : }
394 :
395 : // Store indices as max of uint8_t. In practice each script has fewer than
396 : // UINT8_MAX of unique observed types.
397 0 : if (count() >= UINT8_MAX)
398 0 : return false;
399 :
400 0 : uint8_t index = (uint8_t) count();
401 0 : if (!map_.add(p, ty, index))
402 0 : return false;
403 0 : if (!list_.append(ty))
404 0 : return false;
405 0 : *indexp = index;
406 0 : return true;
407 : }
408 :
409 : bool
410 0 : UniqueTrackedTypes::enumerate(TypeSet::TypeList* types) const
411 : {
412 0 : return types->append(list_.begin(), list_.end());
413 : }
414 :
415 : void
416 0 : IonTrackedOptimizationsRegion::unpackHeader()
417 : {
418 0 : CompactBufferReader reader(start_, end_);
419 0 : startOffset_ = reader.readUnsigned();
420 0 : endOffset_ = reader.readUnsigned();
421 0 : rangesStart_ = reader.currentPosition();
422 0 : MOZ_ASSERT(startOffset_ < endOffset_);
423 0 : }
424 :
425 : void
426 0 : IonTrackedOptimizationsRegion::RangeIterator::readNext(uint32_t* startOffset, uint32_t* endOffset,
427 : uint8_t* index)
428 : {
429 0 : MOZ_ASSERT(more());
430 :
431 0 : CompactBufferReader reader(cur_, end_);
432 :
433 : // The very first entry isn't delta-encoded.
434 0 : if (cur_ == start_) {
435 0 : *startOffset = firstStartOffset_;
436 0 : *endOffset = prevEndOffset_ = reader.readUnsigned();
437 0 : *index = reader.readByte();
438 0 : cur_ = reader.currentPosition();
439 0 : MOZ_ASSERT(cur_ <= end_);
440 0 : return;
441 : }
442 :
443 : // Otherwise, read a delta.
444 : uint32_t startDelta, length;
445 0 : ReadDelta(reader, &startDelta, &length, index);
446 0 : *startOffset = prevEndOffset_ + startDelta;
447 0 : *endOffset = prevEndOffset_ = *startOffset + length;
448 0 : cur_ = reader.currentPosition();
449 0 : MOZ_ASSERT(cur_ <= end_);
450 : }
451 :
452 : Maybe<uint8_t>
453 0 : JitcodeGlobalEntry::IonEntry::trackedOptimizationIndexAtAddr(JSRuntime *rt, void* ptr,
454 : uint32_t* entryOffsetOut)
455 : {
456 0 : MOZ_ASSERT(hasTrackedOptimizations());
457 0 : MOZ_ASSERT(containsPointer(ptr));
458 0 : uint32_t ptrOffset = ((uint8_t*) ptr) - ((uint8_t*) nativeStartAddr());
459 0 : Maybe<IonTrackedOptimizationsRegion> region = optsRegionTable_->findRegion(ptrOffset);
460 0 : if (region.isNothing())
461 0 : return Nothing();
462 0 : return region->findIndex(ptrOffset, entryOffsetOut);
463 : }
464 :
465 : void
466 0 : JitcodeGlobalEntry::IonEntry::forEachOptimizationAttempt(JSRuntime *rt, uint8_t index,
467 : ForEachTrackedOptimizationAttemptOp& op)
468 : {
469 0 : trackedOptimizationAttempts(index).forEach(op);
470 0 : }
471 :
472 : void
473 0 : JitcodeGlobalEntry::IonEntry::forEachOptimizationTypeInfo(JSRuntime *rt, uint8_t index,
474 : IonTrackedOptimizationsTypeInfo::ForEachOpAdapter& op)
475 : {
476 0 : trackedOptimizationTypeInfo(index).forEach(op, allTrackedTypes());
477 0 : }
478 :
479 : void
480 0 : IonTrackedOptimizationsAttempts::forEach(ForEachTrackedOptimizationAttemptOp& op)
481 : {
482 0 : CompactBufferReader reader(start_, end_);
483 0 : const uint8_t* cur = start_;
484 0 : while (cur != end_) {
485 0 : TrackedStrategy strategy = TrackedStrategy(reader.readUnsigned());
486 0 : TrackedOutcome outcome = TrackedOutcome(reader.readUnsigned());
487 0 : MOZ_ASSERT(strategy < TrackedStrategy::Count);
488 0 : MOZ_ASSERT(outcome < TrackedOutcome::Count);
489 0 : op(strategy, outcome);
490 0 : cur = reader.currentPosition();
491 0 : MOZ_ASSERT(cur <= end_);
492 : }
493 0 : }
494 :
495 : void
496 0 : IonTrackedOptimizationsTypeInfo::forEach(ForEachOp& op, const IonTrackedTypeVector* allTypes)
497 : {
498 0 : CompactBufferReader reader(start_, end_);
499 0 : const uint8_t* cur = start_;
500 0 : while (cur != end_) {
501 0 : TrackedTypeSite site = JS::TrackedTypeSite(reader.readUnsigned());
502 0 : MOZ_ASSERT(site < JS::TrackedTypeSite::Count);
503 0 : MIRType mirType = MIRType(reader.readUnsigned());
504 0 : uint32_t length = reader.readUnsigned();
505 0 : for (uint32_t i = 0; i < length; i++)
506 0 : op.readType((*allTypes)[reader.readByte()]);
507 0 : op(site, mirType);
508 0 : cur = reader.currentPosition();
509 0 : MOZ_ASSERT(cur <= end_);
510 : }
511 0 : }
512 :
513 : Maybe<uint8_t>
514 0 : IonTrackedOptimizationsRegion::findIndex(uint32_t offset, uint32_t* entryOffsetOut) const
515 : {
516 0 : if (offset <= startOffset_ || offset > endOffset_)
517 0 : return Nothing();
518 :
519 : // Linear search through the run.
520 0 : RangeIterator iter = ranges();
521 0 : while (iter.more()) {
522 : uint32_t startOffset, endOffset;
523 : uint8_t index;
524 0 : iter.readNext(&startOffset, &endOffset, &index);
525 0 : if (startOffset < offset && offset <= endOffset) {
526 0 : *entryOffsetOut = endOffset;
527 0 : return Some(index);
528 : }
529 : }
530 0 : return Nothing();
531 : }
532 :
533 : Maybe<IonTrackedOptimizationsRegion>
534 0 : IonTrackedOptimizationsRegionTable::findRegion(uint32_t offset) const
535 : {
536 : // For two contiguous regions, e.g., [i, j] and [j, k], an offset exactly
537 : // at j will be associated with [i, j] instead of [j, k]. An offset
538 : // exactly at j is often a return address from a younger frame, which case
539 : // the next region, despite starting at j, has not yet logically started
540 : // execution.
541 :
542 : static const uint32_t LINEAR_SEARCH_THRESHOLD = 8;
543 0 : uint32_t regions = numEntries();
544 0 : MOZ_ASSERT(regions > 0);
545 :
546 : // For small numbers of regions, do linear search.
547 0 : if (regions <= LINEAR_SEARCH_THRESHOLD) {
548 0 : for (uint32_t i = 0; i < regions; i++) {
549 0 : IonTrackedOptimizationsRegion region = entry(i);
550 0 : if (region.startOffset() < offset && offset <= region.endOffset()) {
551 0 : return Some(entry(i));
552 : }
553 : }
554 0 : return Nothing();
555 : }
556 :
557 : // Otherwise, do binary search.
558 0 : uint32_t i = 0;
559 0 : while (regions > 1) {
560 0 : uint32_t step = regions / 2;
561 0 : uint32_t mid = i + step;
562 0 : IonTrackedOptimizationsRegion region = entry(mid);
563 :
564 0 : if (offset <= region.startOffset()) {
565 : // Entry is below mid.
566 0 : regions = step;
567 0 : } else if (offset > region.endOffset()) {
568 : // Entry is above mid.
569 0 : i = mid;
570 0 : regions -= step;
571 : } else {
572 : // Entry is in mid.
573 0 : return Some(entry(i));
574 : }
575 : }
576 0 : return Nothing();
577 : }
578 :
579 : /* static */ uint32_t
580 0 : IonTrackedOptimizationsRegion::ExpectedRunLength(const NativeToTrackedOptimizations* start,
581 : const NativeToTrackedOptimizations* end)
582 : {
583 0 : MOZ_ASSERT(start < end);
584 :
585 : // A run always has at least 1 entry, which is not delta encoded.
586 0 : uint32_t runLength = 1;
587 0 : uint32_t prevEndOffset = start->endOffset.offset();
588 :
589 0 : for (const NativeToTrackedOptimizations* entry = start + 1; entry != end; entry++) {
590 0 : uint32_t startOffset = entry->startOffset.offset();
591 0 : uint32_t endOffset = entry->endOffset.offset();
592 0 : uint32_t startDelta = startOffset - prevEndOffset;
593 0 : uint32_t length = endOffset - startOffset;
594 :
595 0 : if (!IsDeltaEncodeable(startDelta, length))
596 0 : break;
597 :
598 0 : runLength++;
599 0 : if (runLength == MAX_RUN_LENGTH)
600 0 : break;
601 :
602 0 : prevEndOffset = endOffset;
603 : }
604 :
605 0 : return runLength;
606 : }
607 :
608 : void
609 0 : OptimizationAttempt::writeCompact(CompactBufferWriter& writer) const
610 : {
611 0 : writer.writeUnsigned((uint32_t) strategy_);
612 0 : writer.writeUnsigned((uint32_t) outcome_);
613 0 : }
614 :
615 : bool
616 0 : OptimizationTypeInfo::writeCompact(JSContext* cx, CompactBufferWriter& writer,
617 : UniqueTrackedTypes& uniqueTypes) const
618 : {
619 0 : writer.writeUnsigned((uint32_t) site_);
620 0 : writer.writeUnsigned((uint32_t) mirType_);
621 0 : writer.writeUnsigned(types_.length());
622 0 : for (uint32_t i = 0; i < types_.length(); i++) {
623 : uint8_t index;
624 0 : if (!uniqueTypes.getIndexOf(cx, types_[i], &index))
625 0 : return false;
626 0 : writer.writeByte(index);
627 : }
628 0 : return true;
629 : }
630 :
631 : /* static */ void
632 0 : IonTrackedOptimizationsRegion::ReadDelta(CompactBufferReader& reader,
633 : uint32_t* startDelta, uint32_t* length,
634 : uint8_t* index)
635 : {
636 : // 2 bytes
637 : // SSSS-SSSL LLLL-LII0
638 0 : const uint32_t firstByte = reader.readByte();
639 0 : const uint32_t secondByte = reader.readByte();
640 0 : if ((firstByte & ENC1_MASK) == ENC1_MASK_VAL) {
641 0 : uint32_t encVal = firstByte | secondByte << 8;
642 0 : *startDelta = encVal >> ENC1_START_DELTA_SHIFT;
643 0 : *length = (encVal >> ENC1_LENGTH_SHIFT) & ENC1_LENGTH_MAX;
644 0 : *index = (encVal >> ENC1_INDEX_SHIFT) & ENC1_INDEX_MAX;
645 0 : MOZ_ASSERT(length != 0);
646 0 : return;
647 : }
648 :
649 : // 3 bytes
650 : // SSSS-SSSS SSSS-LLLL LLII-II01
651 0 : const uint32_t thirdByte = reader.readByte();
652 0 : if ((firstByte & ENC2_MASK) == ENC2_MASK_VAL) {
653 0 : uint32_t encVal = firstByte | secondByte << 8 | thirdByte << 16;
654 0 : *startDelta = encVal >> ENC2_START_DELTA_SHIFT;
655 0 : *length = (encVal >> ENC2_LENGTH_SHIFT) & ENC2_LENGTH_MAX;
656 0 : *index = (encVal >> ENC2_INDEX_SHIFT) & ENC2_INDEX_MAX;
657 0 : MOZ_ASSERT(length != 0);
658 0 : return;
659 : }
660 :
661 : // 4 bytes
662 : // SSSS-SSSS SSSL-LLLL LLLL-LIII IIII-I011
663 0 : const uint32_t fourthByte = reader.readByte();
664 0 : if ((firstByte & ENC3_MASK) == ENC3_MASK_VAL) {
665 0 : uint32_t encVal = firstByte | secondByte << 8 | thirdByte << 16 | fourthByte << 24;
666 0 : *startDelta = encVal >> ENC3_START_DELTA_SHIFT;
667 0 : *length = (encVal >> ENC3_LENGTH_SHIFT) & ENC3_LENGTH_MAX;
668 0 : *index = (encVal >> ENC3_INDEX_SHIFT) & ENC3_INDEX_MAX;
669 0 : MOZ_ASSERT(length != 0);
670 0 : return;
671 : }
672 :
673 : // 5 bytes
674 : // SSSS-SSSS SSSS-SSSL LLLL-LLLL LLLL-LIII IIII-I111
675 0 : MOZ_ASSERT((firstByte & ENC4_MASK) == ENC4_MASK_VAL);
676 0 : uint64_t fifthByte = reader.readByte();
677 0 : uint64_t encVal = firstByte | secondByte << 8 | thirdByte << 16 | fourthByte << 24 |
678 0 : fifthByte << 32;
679 0 : *startDelta = encVal >> ENC4_START_DELTA_SHIFT;
680 0 : *length = (encVal >> ENC4_LENGTH_SHIFT) & ENC4_LENGTH_MAX;
681 0 : *index = (encVal >> ENC4_INDEX_SHIFT) & ENC4_INDEX_MAX;
682 0 : MOZ_ASSERT(length != 0);
683 : }
684 :
685 : /* static */ void
686 0 : IonTrackedOptimizationsRegion::WriteDelta(CompactBufferWriter& writer,
687 : uint32_t startDelta, uint32_t length,
688 : uint8_t index)
689 : {
690 : // 2 bytes
691 : // SSSS-SSSL LLLL-LII0
692 0 : if (startDelta <= ENC1_START_DELTA_MAX &&
693 0 : length <= ENC1_LENGTH_MAX &&
694 : index <= ENC1_INDEX_MAX)
695 : {
696 : uint16_t val = ENC1_MASK_VAL |
697 0 : (startDelta << ENC1_START_DELTA_SHIFT) |
698 0 : (length << ENC1_LENGTH_SHIFT) |
699 0 : (index << ENC1_INDEX_SHIFT);
700 0 : writer.writeByte(val & 0xff);
701 0 : writer.writeByte((val >> 8) & 0xff);
702 0 : return;
703 : }
704 :
705 : // 3 bytes
706 : // SSSS-SSSS SSSS-LLLL LLII-II01
707 0 : if (startDelta <= ENC2_START_DELTA_MAX &&
708 0 : length <= ENC2_LENGTH_MAX &&
709 : index <= ENC2_INDEX_MAX)
710 : {
711 : uint32_t val = ENC2_MASK_VAL |
712 0 : (startDelta << ENC2_START_DELTA_SHIFT) |
713 0 : (length << ENC2_LENGTH_SHIFT) |
714 0 : (index << ENC2_INDEX_SHIFT);
715 0 : writer.writeByte(val & 0xff);
716 0 : writer.writeByte((val >> 8) & 0xff);
717 0 : writer.writeByte((val >> 16) & 0xff);
718 0 : return;
719 : }
720 :
721 : // 4 bytes
722 : // SSSS-SSSS SSSL-LLLL LLLL-LIII IIII-I011
723 0 : if (startDelta <= ENC3_START_DELTA_MAX &&
724 : length <= ENC3_LENGTH_MAX)
725 : {
726 : // index always fits because it's an uint8_t; change this if
727 : // ENC3_INDEX_MAX changes.
728 : MOZ_ASSERT(ENC3_INDEX_MAX == UINT8_MAX);
729 : uint32_t val = ENC3_MASK_VAL |
730 0 : (startDelta << ENC3_START_DELTA_SHIFT) |
731 0 : (length << ENC3_LENGTH_SHIFT) |
732 0 : (index << ENC3_INDEX_SHIFT);
733 0 : writer.writeByte(val & 0xff);
734 0 : writer.writeByte((val >> 8) & 0xff);
735 0 : writer.writeByte((val >> 16) & 0xff);
736 0 : writer.writeByte((val >> 24) & 0xff);
737 0 : return;
738 : }
739 :
740 : // 5 bytes
741 : // SSSS-SSSS SSSS-SSSL LLLL-LLLL LLLL-LIII IIII-I111
742 0 : if (startDelta <= ENC4_START_DELTA_MAX &&
743 : length <= ENC4_LENGTH_MAX)
744 : {
745 : // index always fits because it's an uint8_t; change this if
746 : // ENC4_INDEX_MAX changes.
747 : MOZ_ASSERT(ENC4_INDEX_MAX == UINT8_MAX);
748 : uint64_t val = ENC4_MASK_VAL |
749 0 : (((uint64_t) startDelta) << ENC4_START_DELTA_SHIFT) |
750 0 : (((uint64_t) length) << ENC4_LENGTH_SHIFT) |
751 0 : (((uint64_t) index) << ENC4_INDEX_SHIFT);
752 0 : writer.writeByte(val & 0xff);
753 0 : writer.writeByte((val >> 8) & 0xff);
754 0 : writer.writeByte((val >> 16) & 0xff);
755 0 : writer.writeByte((val >> 24) & 0xff);
756 0 : writer.writeByte((val >> 32) & 0xff);
757 0 : return;
758 : }
759 :
760 0 : MOZ_CRASH("startDelta,length,index triple too large to encode.");
761 : }
762 :
763 : /* static */ bool
764 0 : IonTrackedOptimizationsRegion::WriteRun(CompactBufferWriter& writer,
765 : const NativeToTrackedOptimizations* start,
766 : const NativeToTrackedOptimizations* end,
767 : const UniqueTrackedOptimizations& unique)
768 : {
769 : // Write the header, which is the range that this whole run encompasses.
770 0 : JitSpew(JitSpew_OptimizationTrackingExtended, " Header: [%" PRIuSIZE ", %" PRIuSIZE "]",
771 0 : start->startOffset.offset(), (end - 1)->endOffset.offset());
772 0 : writer.writeUnsigned(start->startOffset.offset());
773 0 : writer.writeUnsigned((end - 1)->endOffset.offset());
774 :
775 : // Write the first entry of the run, which is not delta-encoded.
776 0 : JitSpew(JitSpew_OptimizationTrackingExtended,
777 : " [%6" PRIuSIZE ", %6" PRIuSIZE "] vector %3u, offset %4" PRIuSIZE,
778 : start->startOffset.offset(), start->endOffset.offset(),
779 0 : unique.indexOf(start->optimizations), writer.length());
780 0 : uint32_t prevEndOffset = start->endOffset.offset();
781 0 : writer.writeUnsigned(prevEndOffset);
782 0 : writer.writeByte(unique.indexOf(start->optimizations));
783 :
784 : // Delta encode the run.
785 0 : for (const NativeToTrackedOptimizations* entry = start + 1; entry != end; entry++) {
786 0 : uint32_t startOffset = entry->startOffset.offset();
787 0 : uint32_t endOffset = entry->endOffset.offset();
788 :
789 0 : uint32_t startDelta = startOffset - prevEndOffset;
790 0 : uint32_t length = endOffset - startOffset;
791 0 : uint8_t index = unique.indexOf(entry->optimizations);
792 :
793 0 : JitSpew(JitSpew_OptimizationTrackingExtended,
794 : " [%6u, %6u] delta [+%5u, +%5u] vector %3u, offset %4" PRIuSIZE,
795 0 : startOffset, endOffset, startDelta, length, index, writer.length());
796 :
797 0 : WriteDelta(writer, startDelta, length, index);
798 :
799 0 : prevEndOffset = endOffset;
800 : }
801 :
802 0 : if (writer.oom())
803 0 : return false;
804 :
805 0 : return true;
806 : }
807 :
808 : static bool
809 0 : WriteOffsetsTable(CompactBufferWriter& writer, const Vector<uint32_t, 16>& offsets,
810 : uint32_t* tableOffsetp)
811 : {
812 : // 4-byte align for the uint32s.
813 0 : uint32_t padding = sizeof(uint32_t) - (writer.length() % sizeof(uint32_t));
814 0 : if (padding == sizeof(uint32_t))
815 0 : padding = 0;
816 0 : JitSpew(JitSpew_OptimizationTrackingExtended, " Padding %u byte%s",
817 0 : padding, padding == 1 ? "" : "s");
818 0 : for (uint32_t i = 0; i < padding; i++)
819 0 : writer.writeByte(0);
820 :
821 : // Record the start of the table to compute reverse offsets for entries.
822 0 : uint32_t tableOffset = writer.length();
823 :
824 : // Write how many bytes were padded and numEntries.
825 0 : writer.writeNativeEndianUint32_t(padding);
826 0 : writer.writeNativeEndianUint32_t(offsets.length());
827 :
828 : // Write entry offset table.
829 0 : for (size_t i = 0; i < offsets.length(); i++) {
830 0 : JitSpew(JitSpew_OptimizationTrackingExtended, " Entry %" PRIuSIZE " reverse offset %u",
831 0 : i, tableOffset - padding - offsets[i]);
832 0 : writer.writeNativeEndianUint32_t(tableOffset - padding - offsets[i]);
833 : }
834 :
835 0 : if (writer.oom())
836 0 : return false;
837 :
838 0 : *tableOffsetp = tableOffset;
839 0 : return true;
840 : }
841 :
842 : static JSFunction*
843 0 : MaybeConstructorFromType(TypeSet::Type ty)
844 : {
845 0 : if (ty.isUnknown() || ty.isAnyObject() || !ty.isGroup())
846 0 : return nullptr;
847 0 : ObjectGroup* obj = ty.group();
848 0 : TypeNewScript* newScript = obj->newScript();
849 0 : if (!newScript && obj->maybeUnboxedLayout())
850 0 : newScript = obj->unboxedLayout().newScript();
851 0 : return newScript ? newScript->function() : nullptr;
852 : }
853 :
854 : static void
855 0 : InterpretedFunctionFilenameAndLineNumber(JSFunction* fun, const char** filename,
856 : Maybe<unsigned>* lineno)
857 : {
858 0 : if (fun->hasScript()) {
859 0 : *filename = fun->nonLazyScript()->maybeForwardedScriptSource()->filename();
860 0 : *lineno = Some((unsigned) fun->nonLazyScript()->lineno());
861 0 : } else if (fun->lazyScriptOrNull()) {
862 0 : *filename = fun->lazyScript()->maybeForwardedScriptSource()->filename();
863 0 : *lineno = Some((unsigned) fun->lazyScript()->lineno());
864 : } else {
865 0 : *filename = "(self-hosted builtin)";
866 0 : *lineno = Nothing();
867 : }
868 0 : }
869 :
870 : static void
871 0 : SpewConstructor(TypeSet::Type ty, JSFunction* constructor)
872 : {
873 : #ifdef JS_JITSPEW
874 0 : if (!constructor->isInterpreted()) {
875 0 : JitSpew(JitSpew_OptimizationTrackingExtended, " Unique type %s has native constructor",
876 0 : TypeSet::TypeString(ty));
877 0 : return;
878 : }
879 :
880 : char buf[512];
881 0 : if (constructor->displayAtom())
882 0 : PutEscapedString(buf, 512, constructor->displayAtom(), 0);
883 : else
884 0 : snprintf(buf, mozilla::ArrayLength(buf), "??");
885 :
886 : const char* filename;
887 0 : Maybe<unsigned> lineno;
888 0 : InterpretedFunctionFilenameAndLineNumber(constructor, &filename, &lineno);
889 :
890 0 : JitSpew(JitSpew_OptimizationTrackingExtended, " Unique type %s has constructor %s (%s:%u)",
891 0 : TypeSet::TypeString(ty), buf, filename, lineno.isSome() ? *lineno : 0);
892 : #endif
893 : }
894 :
895 : static void
896 0 : SpewAllocationSite(TypeSet::Type ty, JSScript* script, uint32_t offset)
897 : {
898 : #ifdef JS_JITSPEW
899 0 : if (!JitSpewEnabled(JitSpew_OptimizationTrackingExtended))
900 0 : return;
901 :
902 0 : JitSpew(JitSpew_OptimizationTrackingExtended, " Unique type %s has alloc site %s:%u",
903 : TypeSet::TypeString(ty), script->filename(),
904 0 : PCToLineNumber(script, script->offsetToPC(offset)));
905 : #endif
906 : }
907 :
908 : bool
909 0 : jit::WriteIonTrackedOptimizationsTable(JSContext* cx, CompactBufferWriter& writer,
910 : const NativeToTrackedOptimizations* start,
911 : const NativeToTrackedOptimizations* end,
912 : const UniqueTrackedOptimizations& unique,
913 : uint32_t* numRegions,
914 : uint32_t* regionTableOffsetp,
915 : uint32_t* typesTableOffsetp,
916 : uint32_t* optimizationTableOffsetp,
917 : IonTrackedTypeVector* allTypes)
918 : {
919 0 : MOZ_ASSERT(unique.sorted());
920 :
921 : #ifdef JS_JITSPEW
922 : // Spew training data, which may be fed into a script to determine a good
923 : // encoding strategy.
924 0 : if (JitSpewEnabled(JitSpew_OptimizationTrackingExtended)) {
925 0 : JitSpewStart(JitSpew_OptimizationTrackingExtended, "=> Training data: ");
926 0 : for (const NativeToTrackedOptimizations* entry = start; entry != end; entry++) {
927 0 : JitSpewCont(JitSpew_OptimizationTrackingExtended, "%" PRIuSIZE ",%" PRIuSIZE ",%u ",
928 : entry->startOffset.offset(), entry->endOffset.offset(),
929 0 : unique.indexOf(entry->optimizations));
930 : }
931 0 : JitSpewFin(JitSpew_OptimizationTrackingExtended);
932 : }
933 : #endif
934 :
935 0 : Vector<uint32_t, 16> offsets(cx);
936 0 : const NativeToTrackedOptimizations* entry = start;
937 :
938 : // Write out region offloads, partitioned into runs.
939 0 : JitSpew(JitSpew_Profiling, "=> Writing regions");
940 0 : while (entry != end) {
941 0 : uint32_t runLength = IonTrackedOptimizationsRegion::ExpectedRunLength(entry, end);
942 0 : JitSpew(JitSpew_OptimizationTrackingExtended,
943 : " Run at entry %" PRIuSIZE ", length %" PRIu32 ", offset %" PRIuSIZE,
944 0 : size_t(entry - start), runLength, writer.length());
945 :
946 0 : if (!offsets.append(writer.length()))
947 0 : return false;
948 :
949 0 : if (!IonTrackedOptimizationsRegion::WriteRun(writer, entry, entry + runLength, unique))
950 0 : return false;
951 :
952 0 : entry += runLength;
953 : }
954 :
955 : // Write out the table indexing into the payloads. 4-byte align for the uint32s.
956 0 : if (!WriteOffsetsTable(writer, offsets, regionTableOffsetp))
957 0 : return false;
958 :
959 0 : *numRegions = offsets.length();
960 :
961 : // Clear offsets so that it may be reused below for the unique
962 : // optimizations table.
963 0 : offsets.clear();
964 :
965 0 : const UniqueTrackedOptimizations::SortedVector& vec = unique.sortedVector();
966 0 : JitSpew(JitSpew_OptimizationTrackingExtended, "=> Writing unique optimizations table with %" PRIuSIZE " entr%s",
967 0 : vec.length(), vec.length() == 1 ? "y" : "ies");
968 :
969 : // Write out type info payloads.
970 0 : UniqueTrackedTypes uniqueTypes(cx);
971 0 : if (!uniqueTypes.init())
972 0 : return false;
973 :
974 0 : for (const UniqueTrackedOptimizations::SortEntry* p = vec.begin(); p != vec.end(); p++) {
975 0 : const TempOptimizationTypeInfoVector* v = p->types;
976 0 : JitSpew(JitSpew_OptimizationTrackingExtended,
977 : " Type info entry %" PRIuSIZE " of length %" PRIuSIZE ", offset %" PRIuSIZE,
978 0 : size_t(p - vec.begin()), v->length(), writer.length());
979 0 : SpewTempOptimizationTypeInfoVector(JitSpew_OptimizationTrackingExtended, v, " ");
980 :
981 0 : if (!offsets.append(writer.length()))
982 0 : return false;
983 :
984 0 : for (const OptimizationTypeInfo* t = v->begin(); t != v->end(); t++) {
985 0 : if (!t->writeCompact(cx, writer, uniqueTypes))
986 0 : return false;
987 : }
988 : }
989 :
990 : // Enumerate the unique types, and pull out any 'new' script constructor
991 : // functions and allocation site information. We do this during linking
992 : // instead of during profiling to avoid touching compartment tables during
993 : // profiling. Additionally, TypeNewScript is subject to GC in the
994 : // meantime.
995 0 : TypeSet::TypeList uniqueTypeList;
996 0 : if (!uniqueTypes.enumerate(&uniqueTypeList))
997 0 : return false;
998 0 : for (uint32_t i = 0; i < uniqueTypeList.length(); i++) {
999 0 : TypeSet::Type ty = uniqueTypeList[i];
1000 0 : if (JSFunction* constructor = MaybeConstructorFromType(ty)) {
1001 0 : if (!allTypes->append(IonTrackedTypeWithAddendum(ty, constructor)))
1002 0 : return false;
1003 0 : SpewConstructor(ty, constructor);
1004 : } else {
1005 : JSScript* script;
1006 : uint32_t offset;
1007 0 : if (!ty.isUnknown() && !ty.isAnyObject() && ty.isGroup() &&
1008 0 : ObjectGroup::findAllocationSite(cx, ty.group(), &script, &offset))
1009 : {
1010 0 : if (!allTypes->append(IonTrackedTypeWithAddendum(ty, script, offset)))
1011 0 : return false;
1012 0 : SpewAllocationSite(ty, script, offset);
1013 : } else {
1014 0 : if (!allTypes->append(IonTrackedTypeWithAddendum(ty)))
1015 0 : return false;
1016 : }
1017 : }
1018 : }
1019 :
1020 0 : if (!WriteOffsetsTable(writer, offsets, typesTableOffsetp))
1021 0 : return false;
1022 0 : offsets.clear();
1023 :
1024 : // Write out attempts payloads.
1025 0 : for (const UniqueTrackedOptimizations::SortEntry* p = vec.begin(); p != vec.end(); p++) {
1026 0 : const TempOptimizationAttemptsVector* v = p->attempts;
1027 0 : if (JitSpewEnabled(JitSpew_OptimizationTrackingExtended)) {
1028 0 : JitSpew(JitSpew_OptimizationTrackingExtended,
1029 : " Attempts entry %" PRIuSIZE " of length %" PRIuSIZE ", offset %" PRIuSIZE,
1030 0 : size_t(p - vec.begin()), v->length(), writer.length());
1031 0 : SpewTempOptimizationAttemptsVector(JitSpew_OptimizationTrackingExtended, v, " ");
1032 : }
1033 :
1034 0 : if (!offsets.append(writer.length()))
1035 0 : return false;
1036 :
1037 0 : for (const OptimizationAttempt* a = v->begin(); a != v->end(); a++)
1038 0 : a->writeCompact(writer);
1039 : }
1040 :
1041 0 : return WriteOffsetsTable(writer, offsets, optimizationTableOffsetp);
1042 : }
1043 :
1044 :
1045 : BytecodeSite*
1046 0 : IonBuilder::maybeTrackedOptimizationSite(jsbytecode* pc)
1047 : {
1048 : // BytecodeSites that track optimizations need to be 1-1 with the pc
1049 : // when optimization tracking is enabled, so that all MIR generated by
1050 : // a single pc are tracked at one place, even across basic blocks.
1051 : //
1052 : // Alternatively, we could make all BytecodeSites 1-1 with the pc, but
1053 : // there is no real need as optimization tracking is a toggled
1054 : // feature.
1055 : //
1056 : // Since sites that track optimizations should be sparse, just do a
1057 : // reverse linear search, as we're most likely advancing in pc.
1058 0 : MOZ_ASSERT(isOptimizationTrackingEnabled());
1059 0 : for (size_t i = trackedOptimizationSites_.length(); i != 0; i--) {
1060 0 : BytecodeSite* site = trackedOptimizationSites_[i - 1];
1061 0 : if (site->pc() == pc) {
1062 0 : MOZ_ASSERT(site->tree() == info().inlineScriptTree());
1063 0 : return site;
1064 : }
1065 : }
1066 0 : return nullptr;
1067 : }
1068 :
1069 : void
1070 3444 : IonBuilder::startTrackingOptimizations()
1071 : {
1072 3444 : if (isOptimizationTrackingEnabled()) {
1073 0 : BytecodeSite* site = maybeTrackedOptimizationSite(current->trackedSite()->pc());
1074 :
1075 0 : if (!site) {
1076 0 : site = current->trackedSite();
1077 0 : site->setOptimizations(new(alloc()) TrackedOptimizations(alloc()));
1078 : // OOMs are handled as if optimization tracking were turned off.
1079 0 : if (!trackedOptimizationSites_.append(site))
1080 0 : site = nullptr;
1081 0 : } else if (site->hasOptimizations()) {
1082 : // The same bytecode may be visited multiple times (see
1083 : // restartLoop). Only the last time matters, so clear any previous
1084 : // tracked optimizations.
1085 0 : site->optimizations()->clear();
1086 : }
1087 :
1088 : // The case of !site->hasOptimizations() means we had an OOM when
1089 : // previously attempting to track optimizations. Leave
1090 : // site->optimizations_ nullptr to leave optimization tracking off.
1091 :
1092 0 : if (site)
1093 0 : current->updateTrackedSite(site);
1094 : }
1095 3444 : }
1096 :
1097 : void
1098 0 : IonBuilder::trackTypeInfoUnchecked(TrackedTypeSite kind, MIRType mirType,
1099 : TemporaryTypeSet* typeSet)
1100 : {
1101 0 : BytecodeSite* site = current->trackedSite();
1102 : // OOMs are handled as if optimization tracking were turned off.
1103 0 : OptimizationTypeInfo typeInfo(alloc(), kind, mirType);
1104 0 : if (!typeInfo.trackTypeSet(typeSet)) {
1105 0 : site->setOptimizations(nullptr);
1106 0 : return;
1107 : }
1108 0 : if (!site->optimizations()->trackTypeInfo(mozilla::Move(typeInfo)))
1109 0 : site->setOptimizations(nullptr);
1110 : }
1111 :
1112 : void
1113 0 : IonBuilder::trackTypeInfoUnchecked(TrackedTypeSite kind, JSObject* obj)
1114 : {
1115 0 : BytecodeSite* site = current->trackedSite();
1116 : // OOMs are handled as if optimization tracking were turned off.
1117 0 : OptimizationTypeInfo typeInfo(alloc(), kind, MIRType::Object);
1118 0 : if (!typeInfo.trackType(TypeSet::ObjectType(obj)))
1119 0 : return;
1120 0 : if (!site->optimizations()->trackTypeInfo(mozilla::Move(typeInfo)))
1121 0 : site->setOptimizations(nullptr);
1122 : }
1123 :
1124 : void
1125 0 : IonBuilder::trackTypeInfoUnchecked(CallInfo& callInfo)
1126 : {
1127 0 : MDefinition* thisArg = callInfo.thisArg();
1128 0 : trackTypeInfoUnchecked(TrackedTypeSite::Call_This, thisArg->type(), thisArg->resultTypeSet());
1129 :
1130 0 : for (uint32_t i = 0; i < callInfo.argc(); i++) {
1131 0 : MDefinition* arg = callInfo.getArg(i);
1132 0 : trackTypeInfoUnchecked(TrackedTypeSite::Call_Arg, arg->type(), arg->resultTypeSet());
1133 : }
1134 :
1135 0 : TemporaryTypeSet* returnTypes = getInlineReturnTypeSet();
1136 0 : trackTypeInfoUnchecked(TrackedTypeSite::Call_Return, returnTypes->getKnownMIRType(),
1137 0 : returnTypes);
1138 0 : }
1139 :
1140 : void
1141 0 : IonBuilder::trackOptimizationAttemptUnchecked(TrackedStrategy strategy)
1142 : {
1143 0 : BytecodeSite* site = current->trackedSite();
1144 : // OOMs are handled as if optimization tracking were turned off.
1145 0 : if (!site->optimizations()->trackAttempt(strategy))
1146 0 : site->setOptimizations(nullptr);
1147 0 : }
1148 :
1149 : void
1150 0 : IonBuilder::amendOptimizationAttemptUnchecked(uint32_t index)
1151 : {
1152 0 : const BytecodeSite* site = current->trackedSite();
1153 0 : site->optimizations()->amendAttempt(index);
1154 0 : }
1155 :
1156 : void
1157 0 : IonBuilder::trackOptimizationOutcomeUnchecked(TrackedOutcome outcome)
1158 : {
1159 0 : const BytecodeSite* site = current->trackedSite();
1160 0 : site->optimizations()->trackOutcome(outcome);
1161 0 : }
1162 :
1163 : void
1164 0 : IonBuilder::trackOptimizationSuccessUnchecked()
1165 : {
1166 0 : const BytecodeSite* site = current->trackedSite();
1167 0 : site->optimizations()->trackSuccess();
1168 0 : }
1169 :
1170 : void
1171 0 : IonBuilder::trackInlineSuccessUnchecked(InliningStatus status)
1172 : {
1173 0 : if (status == InliningStatus_Inlined)
1174 0 : trackOptimizationOutcome(TrackedOutcome::Inlined);
1175 0 : }
1176 :
1177 : static JSFunction*
1178 0 : FunctionFromTrackedType(const IonTrackedTypeWithAddendum& tracked)
1179 : {
1180 0 : if (tracked.hasConstructor())
1181 0 : return tracked.constructor;
1182 :
1183 0 : TypeSet::Type ty = tracked.type;
1184 :
1185 0 : if (ty.isSingleton()) {
1186 0 : JSObject* obj = ty.singleton();
1187 0 : return obj->is<JSFunction>() ? &obj->as<JSFunction>() : nullptr;
1188 : }
1189 :
1190 0 : return ty.group()->maybeInterpretedFunction();
1191 : }
1192 :
1193 : void
1194 0 : IonTrackedOptimizationsTypeInfo::ForEachOpAdapter::readType(const IonTrackedTypeWithAddendum& tracked)
1195 : {
1196 0 : TypeSet::Type ty = tracked.type;
1197 :
1198 0 : if (ty.isPrimitive() || ty.isUnknown() || ty.isAnyObject()) {
1199 0 : op_.readType("primitive", TypeSet::NonObjectTypeString(ty), nullptr, Nothing());
1200 0 : return;
1201 : }
1202 :
1203 : char buf[512];
1204 0 : const uint32_t bufsize = mozilla::ArrayLength(buf);
1205 :
1206 0 : if (JSFunction* fun = FunctionFromTrackedType(tracked)) {
1207 : // The displayAtom is useful for identifying both native and
1208 : // interpreted functions.
1209 0 : char* name = nullptr;
1210 0 : if (fun->displayAtom()) {
1211 0 : PutEscapedString(buf, bufsize, fun->displayAtom(), 0);
1212 0 : name = buf;
1213 : }
1214 :
1215 0 : if (fun->isNative()) {
1216 : //
1217 : // Try printing out the displayAtom of the native function and the
1218 : // absolute address of the native function pointer.
1219 : //
1220 : // Note that this address is not usable without knowing the
1221 : // starting address at which our shared library is loaded. Shared
1222 : // library information is exposed by the profiler. If this address
1223 : // needs to be symbolicated manually (e.g., when it is gotten via
1224 : // debug spewing of all optimization information), it needs to be
1225 : // converted to an offset from the beginning of the shared library
1226 : // for use with utilities like `addr2line` on Linux and `atos` on
1227 : // OS X. Converting to an offset may be done via dladdr():
1228 : //
1229 : // void* addr = JS_FUNC_TO_DATA_PTR(void*, fun->native());
1230 : // uintptr_t offset;
1231 : // Dl_info info;
1232 : // if (dladdr(addr, &info) != 0)
1233 : // offset = uintptr_t(addr) - uintptr_t(info.dli_fbase);
1234 : //
1235 : char locationBuf[20];
1236 0 : if (!name) {
1237 0 : uintptr_t addr = JS_FUNC_TO_DATA_PTR(uintptr_t, fun->native());
1238 0 : snprintf(locationBuf, mozilla::ArrayLength(locationBuf), "%" PRIxPTR, addr);
1239 : }
1240 0 : op_.readType("native", name, name ? nullptr : locationBuf, Nothing());
1241 0 : return;
1242 : }
1243 :
1244 : const char* filename;
1245 0 : Maybe<unsigned> lineno;
1246 0 : InterpretedFunctionFilenameAndLineNumber(fun, &filename, &lineno);
1247 0 : op_.readType(tracked.hasConstructor() ? "constructor" : "function",
1248 0 : name, filename, lineno);
1249 0 : return;
1250 : }
1251 :
1252 0 : const char* className = ty.objectKey()->clasp()->name;
1253 0 : snprintf(buf, bufsize, "[object %s]", className);
1254 :
1255 0 : if (tracked.hasAllocationSite()) {
1256 0 : JSScript* script = tracked.script;
1257 0 : op_.readType("alloc site", buf,
1258 : script->maybeForwardedScriptSource()->filename(),
1259 0 : Some(PCToLineNumber(script, script->offsetToPC(tracked.offset))));
1260 0 : return;
1261 : }
1262 :
1263 0 : if (ty.isGroup()) {
1264 0 : op_.readType("prototype", buf, nullptr, Nothing());
1265 0 : return;
1266 : }
1267 :
1268 0 : op_.readType("singleton", buf, nullptr, Nothing());
1269 : }
1270 :
1271 : void
1272 0 : IonTrackedOptimizationsTypeInfo::ForEachOpAdapter::operator()(JS::TrackedTypeSite site,
1273 : MIRType mirType)
1274 : {
1275 0 : op_(site, StringFromMIRType(mirType));
1276 0 : }
1277 :
1278 : typedef JS::ForEachProfiledFrameOp::FrameHandle FrameHandle;
1279 :
1280 : void
1281 0 : FrameHandle::updateHasTrackedOptimizations()
1282 : {
1283 : // All inlined frames will have the same optimization information by
1284 : // virtue of sharing the JitcodeGlobalEntry, but such information is
1285 : // only interpretable on the youngest frame.
1286 0 : if (depth() != 0)
1287 0 : return;
1288 0 : if (!entry_.hasTrackedOptimizations())
1289 0 : return;
1290 :
1291 : uint32_t entryOffset;
1292 0 : optsIndex_ = entry_.trackedOptimizationIndexAtAddr(rt_, addr_, &entryOffset);
1293 0 : if (optsIndex_.isSome())
1294 0 : canonicalAddr_ = (void*)(((uint8_t*) entry_.nativeStartAddr()) + entryOffset);
1295 : }
1296 :
1297 : JS_PUBLIC_API(void)
1298 0 : FrameHandle::forEachOptimizationAttempt(ForEachTrackedOptimizationAttemptOp& op,
1299 : JSScript** scriptOut, jsbytecode** pcOut) const
1300 : {
1301 0 : MOZ_ASSERT(optsIndex_.isSome());
1302 0 : entry_.forEachOptimizationAttempt(rt_, *optsIndex_, op);
1303 0 : entry_.youngestFrameLocationAtAddr(rt_, addr_, scriptOut, pcOut);
1304 0 : }
1305 :
1306 : JS_PUBLIC_API(void)
1307 0 : FrameHandle::forEachOptimizationTypeInfo(ForEachTrackedOptimizationTypeInfoOp& op) const
1308 : {
1309 0 : MOZ_ASSERT(optsIndex_.isSome());
1310 0 : IonTrackedOptimizationsTypeInfo::ForEachOpAdapter adapter(op);
1311 0 : entry_.forEachOptimizationTypeInfo(rt_, *optsIndex_, adapter);
1312 0 : }
|