58
62
static const int kSizeLimit = 1500;
64
// Constants for counter based profiler.
66
// Number of times a function has to be seen on the stack before it is
68
static const int kProfilerTicksBeforeOptimization = 2;
69
// If the function optimization was disabled due to high deoptimization count,
70
// but the function is hot and has been seen on the stack this number of times,
71
// then we try to reenable optimization for this function.
72
static const int kProfilerTicksBeforeReenablingOptimization = 250;
73
// If a function does not have enough type info (according to
74
// FLAG_type_info_threshold), but has seen a huge number of ticks,
75
// optimize it as it is.
76
static const int kTicksWhenNotEnoughTypeInfo = 100;
77
// We only have one byte to store the number of ticks.
78
STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
79
STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
80
STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
83
// Maximum size in bytes of generated code for a function to be optimized
84
// the very first time it is seen on the stack.
85
static const int kMaxSizeEarlyOpt =
86
5 * FullCodeGenerator::kBackEdgeDistanceUnit;
61
89
Atomic32 RuntimeProfiler::state_ = 0;
62
// TODO(isolates): Create the semaphore lazily and clean it up when no
64
Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0);
91
// TODO(isolates): Clean up the semaphore when it is no longer required.
92
static LazySemaphore<0>::type semaphore = LAZY_SEMAPHORE_INITIALIZER;
67
bool RuntimeProfiler::has_been_globally_setup_ = false;
95
bool RuntimeProfiler::has_been_globally_set_up_ = false;
69
97
bool RuntimeProfiler::enabled_ = false;
75
103
sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
76
104
sampler_ticks_until_threshold_adjustment_(
77
105
kSamplerTicksBetweenThresholdAdjustment),
78
sampler_window_position_(0) {
106
sampler_window_position_(0),
107
any_ic_changed_(false),
108
code_generated_(false) {
79
109
ClearSampleBuffer();
83
void RuntimeProfiler::GlobalSetup() {
84
ASSERT(!has_been_globally_setup_);
113
void RuntimeProfiler::GlobalSetUp() {
114
ASSERT(!has_been_globally_set_up_);
85
115
enabled_ = V8::UseCrankshaft() && FLAG_opt;
87
has_been_globally_setup_ = true;
117
has_been_globally_set_up_ = true;
92
void RuntimeProfiler::Optimize(JSFunction* function) {
122
static void GetICCounts(JSFunction* function,
123
int* ic_with_type_info_count,
127
*ic_with_type_info_count = 0;
129
function->shared()->code()->type_feedback_info();
130
if (raw_info->IsTypeFeedbackInfo()) {
131
TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
132
*ic_with_type_info_count = info->ic_with_type_info_count();
133
*ic_total_count = info->ic_total_count();
135
*percentage = *ic_total_count > 0
136
? 100 * *ic_with_type_info_count / *ic_total_count
141
void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
93
142
ASSERT(function->IsOptimizable());
94
143
if (FLAG_trace_opt) {
95
144
PrintF("[marking ");
96
145
function->PrintName();
97
146
PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
98
PrintF(" for recompilation");
147
PrintF(" for recompilation, reason: %s", reason);
148
if (FLAG_type_info_threshold > 0) {
149
int typeinfo, total, percentage;
150
GetICCounts(function, &typeinfo, &total, &percentage);
151
PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
102
// The next call to the function will trigger optimization.
103
function->MarkForLazyRecompilation();
156
if (FLAG_parallel_recompilation) {
157
function->MarkForParallelRecompilation();
159
// The next call to the function will trigger optimization.
160
function->MarkForLazyRecompilation();
107
165
void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
108
166
// See AlwaysFullCompiler (in compiler.cc) comment on why we need
109
167
// Debug::has_break_points().
110
ASSERT(function->IsMarkedForLazyRecompilation());
168
ASSERT(function->IsMarkedForLazyRecompilation() ||
169
function->IsMarkedForParallelRecompilation());
111
170
if (!FLAG_use_osr ||
112
171
isolate_->DebuggerHasBreakPoints() ||
113
172
function->IsBuiltin()) {
186
255
JSFunction* samples[kSamplerFrameCount];
187
256
int sample_count = 0;
188
257
int frame_count = 0;
258
int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count
259
: kSamplerFrameCount;
189
260
for (JavaScriptFrameIterator it(isolate_);
190
frame_count++ < kSamplerFrameCount && !it.done();
261
frame_count++ < frame_count_limit && !it.done();
192
263
JavaScriptFrame* frame = it.frame();
193
264
JSFunction* function = JSFunction::cast(frame->function());
195
// Adjust threshold each time we have processed
196
// a certain number of ticks.
197
if (sampler_ticks_until_threshold_adjustment_ > 0) {
198
sampler_ticks_until_threshold_adjustment_--;
199
if (sampler_ticks_until_threshold_adjustment_ <= 0) {
200
// If the threshold is not already at the minimum
201
// modify and reset the ticks until next adjustment.
202
if (sampler_threshold_ > kSamplerThresholdMin) {
203
sampler_threshold_ -= kSamplerThresholdDelta;
204
sampler_ticks_until_threshold_adjustment_ =
205
kSamplerTicksBetweenThresholdAdjustment;
266
if (!FLAG_watch_ic_patching) {
267
// Adjust threshold each time we have processed
268
// a certain number of ticks.
269
if (sampler_ticks_until_threshold_adjustment_ > 0) {
270
sampler_ticks_until_threshold_adjustment_--;
271
if (sampler_ticks_until_threshold_adjustment_ <= 0) {
272
// If the threshold is not already at the minimum
273
// modify and reset the ticks until next adjustment.
274
if (sampler_threshold_ > kSamplerThresholdMin) {
275
sampler_threshold_ -= kSamplerThresholdDelta;
276
sampler_ticks_until_threshold_adjustment_ =
277
kSamplerTicksBetweenThresholdAdjustment;
210
if (function->IsMarkedForLazyRecompilation()) {
211
Code* unoptimized = function->shared()->code();
212
int nesting = unoptimized->allow_osr_at_loop_nesting_level();
283
SharedFunctionInfo* shared = function->shared();
284
Code* shared_code = shared->code();
286
if (shared_code->kind() != Code::FUNCTION) continue;
288
if (function->IsMarkedForLazyRecompilation() ||
289
function->IsMarkedForParallelRecompilation()) {
290
int nesting = shared_code->allow_osr_at_loop_nesting_level();
213
291
if (nesting == 0) AttemptOnStackReplacement(function);
214
292
int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker);
215
unoptimized->set_allow_osr_at_loop_nesting_level(new_nesting);
293
shared_code->set_allow_osr_at_loop_nesting_level(new_nesting);
296
// Only record top-level code on top of the execution stack and
297
// avoid optimizing excessively large scripts since top-level code
298
// will be executed only once.
299
const int kMaxToplevelSourceSize = 10 * 1024;
300
if (shared->is_toplevel() &&
301
(frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
218
305
// Do not record non-optimizable functions.
306
if (shared->optimization_disabled()) {
307
if (shared->deopt_count() >= FLAG_max_opt_count) {
308
// If optimization was disabled due to many deoptimizations,
309
// then check if the function is hot and try to reenable optimization.
310
int ticks = shared_code->profiler_ticks();
311
if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
312
shared_code->set_profiler_ticks(0);
313
shared->TryReenableOptimization();
315
shared_code->set_profiler_ticks(ticks + 1);
219
320
if (!function->IsOptimizable()) continue;
220
samples[sample_count++] = function;
222
int function_size = function->shared()->SourceSize();
223
int threshold_size_factor = (function_size > kSizeLimit)
224
? sampler_threshold_size_factor_
227
int threshold = sampler_threshold_ * threshold_size_factor;
229
if (LookupSample(function) >= threshold) {
322
if (FLAG_watch_ic_patching) {
323
int ticks = shared_code->profiler_ticks();
325
if (ticks >= kProfilerTicksBeforeOptimization) {
326
int typeinfo, total, percentage;
327
GetICCounts(function, &typeinfo, &total, &percentage);
328
if (percentage >= FLAG_type_info_threshold) {
329
// If this particular function hasn't had any ICs patched for enough
330
// ticks, optimize it now.
331
Optimize(function, "hot and stable");
332
} else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
333
Optimize(function, "not much type info but very hot");
335
shared_code->set_profiler_ticks(ticks + 1);
336
if (FLAG_trace_opt_verbose) {
337
PrintF("[not yet optimizing ");
338
function->PrintName();
339
PrintF(", not enough type info: %d/%d (%d%%)]\n",
340
typeinfo, total, percentage);
343
} else if (!any_ic_changed_ &&
344
shared_code->instruction_size() < kMaxSizeEarlyOpt) {
345
// If no IC was patched since the last tick and this function is very
346
// small, optimistically optimize it now.
347
Optimize(function, "small function");
349
shared_code->set_profiler_ticks(ticks + 1);
351
} else { // !FLAG_watch_ic_patching
352
samples[sample_count++] = function;
354
int function_size = function->shared()->SourceSize();
355
int threshold_size_factor = (function_size > kSizeLimit)
356
? sampler_threshold_size_factor_
359
int threshold = sampler_threshold_ * threshold_size_factor;
361
if (LookupSample(function) >= threshold) {
362
Optimize(function, "sampler window lookup");
234
// Add the collected functions as samples. It's important not to do
235
// this as part of collecting them because this will interfere with
236
// the sample lookup in case of recursive functions.
237
for (int i = 0; i < sample_count; i++) {
238
AddSample(samples[i], kSamplerFrameWeight[i]);
366
if (FLAG_watch_ic_patching) {
367
any_ic_changed_ = false;
368
} else { // !FLAG_watch_ic_patching
369
// Add the collected functions as samples. It's important not to do
370
// this as part of collecting them because this will interfere with
371
// the sample lookup in case of recursive functions.
372
for (int i = 0; i < sample_count; i++) {
373
AddSample(samples[i], kSamplerFrameWeight[i]);
243
379
void RuntimeProfiler::NotifyTick() {
380
if (FLAG_count_based_interrupts) return;
244
381
isolate_->stack_guard()->RequestRuntimeProfilerTick();
248
void RuntimeProfiler::Setup() {
249
ASSERT(has_been_globally_setup_);
385
void RuntimeProfiler::SetUp() {
386
ASSERT(has_been_globally_set_up_);
387
if (!FLAG_watch_ic_patching) {
251
390
// If the ticker hasn't already started, make sure to do so to get
252
391
// the ticks for the runtime profiler.
253
392
if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();