1
%% ``The contents of this file are subject to the Erlang Public License,
2
%% Version 1.1, (the "License"); you may not use this file except in
3
%% compliance with the License. You should have received a copy of the
4
%% Erlang Public License along with this software. If not, it can be
5
%% retrieved via the world wide web at http://www.erlang.org/.
7
%% Software distributed under the License is distributed on an "AS IS"
8
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
9
%% the License for the specific language governing rights and limitations
12
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
13
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
14
%% AB. All Rights Reserved.''
24
%% Exported to be used in spawn
28
-define(MAX, 999999999999999).
29
-define(RANGE_MAX, 16#7ffffff).
31
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
33
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
35
%%---------------------------------------------------------------------------
38
%% Compiles and runs all benchmarks in the current directory,
39
%% and creates a report
40
%%---------------------------------------------------------------------------
42
run(compiler_options()).
45
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
46
%%% Generic Benchmark functions
47
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
49
%%---------------------------------------------------------------------------
50
%% compiler_options() -> OptionsList
51
%% OptionsList = list() - See Erlang/OTP module compile
52
%%---------------------------------------------------------------------------
54
[report_errors, report_warnings].
56
%%---------------------------------------------------------------------------
57
%% run(OptionsList) ->
58
%% OptionsList = list() - See Erlang/OTP module compile
60
%% Help function to run/0.
61
%%---------------------------------------------------------------------------
63
Bms = compile_benchmarks(OptionsList),
67
%%---------------------------------------------------------------------------
68
%% compile_benchmarks(OptionsList) -> [BmInfo| _]
69
%% OptionsList = list() - See Erlang/OTP module compile
70
%% BmInfo = {Module, Iterations, [BmFunctionName| _]}
72
%% Iterations = integer()
73
%% BmFunctionName = atom()
75
%% Compiles all benchmark modules in the current directory and
76
%% returns info about the benchmarks.
77
%%---------------------------------------------------------------------------
78
compile_benchmarks(OptionsList) ->
79
{ok, FilesInCurrentDir} = file:list_dir("."),
80
ErlFiles = [ErlFile || ErlFile <- lists:sort(FilesInCurrentDir),
81
lists:suffix(".erl", ErlFile)],
82
lists:foldr(fun(File, BmInfoAcc) ->
83
case lists:suffix("_bm.erl", File) of
85
BmInfo = bm_compile(File, OptionsList),
88
just_compile(File, OptionsList),
93
%%---------------------------------------------------------------------------
94
%% just_compile(FileName, OptionsList) -> ok
95
%% FileName = string()
96
%% OptionsList = list() - See Erlang/OTP module compile
98
%% Compiles a support module.
99
%%---------------------------------------------------------------------------
100
just_compile(FileName, OptionsList) ->
101
io:format("Compiling ~s...\n", [FileName]), % Progress info to user
102
case c:c(FileName, OptionsList) of
105
%% If compilation fails there is no point in trying to continue
109
io_lib:format("Could not compile file ~s", [FileName])),
112
%%---------------------------------------------------------------------------
113
%% bm_compile(FileName, OptionsList) -> BmInfo
114
%% FileName = string()
115
%% OptionsList = list() - See Erlang/OTP module compile
116
%% BmInfo = {Module, Iterations, [BmFunctionName| _]}
117
%% Iterations = integer()
119
%% BmFunctionName = atom()
121
%% Compiles the benchmark module implemented in <FileName> and returns
122
%% information about the benchmark tests.
123
%%---------------------------------------------------------------------------
124
bm_compile(FileName, OptionsList) ->
125
io:format("Compiling ~s...\n", [FileName]), % Progress info to user
126
case c:c(FileName, OptionsList) of
129
%% If compilation fails there is no point in trying to continue
133
io_lib:format("Could not compile file ~s", [FileName])),
136
%%---------------------------------------------------------------------------
137
%% bm_cases(Module) -> {Module, Iter, [BmFunctionName |_]}
140
%% BmFunctionName = atom()
142
%% Fetches the number of iterations and the names of the benchmark
143
%% functions for the module <Module>.
144
%%---------------------------------------------------------------------------
146
case catch Module:benchmarks() of
147
{Iter, BmList} when integer(Iter), list(BmList) ->
148
{Module, Iter, BmList};
149
%% The benchmark is incorrect implemented there is no point in
150
%% trying to continue
154
io_lib:format("Incorrect return value: ~p "
155
"from ~p:benchmarks()",
159
%%---------------------------------------------------------------------------
160
%% run_benchmarks(Bms) ->
161
%% Bms = [{Module, Iter, [BmFunctionName |_]} | _]
164
%% BmFunctionName = atom()
166
%% Runs all the benchmark tests described in <Bms>.
167
%%---------------------------------------------------------------------------
168
run_benchmarks(Bms) ->
169
Ver = erlang:system_info(version),
170
Machine = erlang:system_info(machine),
171
SysInfo = {Ver,Machine},
173
Res = [bms_run(Mod, Tests, Iter, SysInfo) || {Mod,Iter,Tests} <- Bms],
175
%% Create an intermediate file that is later used to generate a bench
177
Name = Ver ++ [$.|Machine] ++ ".bmres",
178
{ok, IntermediatFile} = file:open(Name, [write]),
180
%% Create mark that identifies version of the benchmark modules
181
io:format(IntermediatFile, "~p.\n", [erlang:phash(Bms, ?RANGE_MAX)]),
183
io:format(IntermediatFile, "~p.\n", [Res]),
184
file:close(IntermediatFile).
186
%%---------------------------------------------------------------------------
187
%% bms_run(Module, BmTests, Iter, Info) ->
189
%% BmTests = [BmFunctionName|_],
190
%% BmFunctionName = atom()
192
%% SysInfo = {Ver, Machine}
194
%% Machine = string()
196
%% Runs all benchmark tests in module <Module>.
197
%%---------------------------------------------------------------------------
198
bms_run(Module, BmTests, Iter, SysInfo) ->
199
io:format("Running ~s:", [Module]), % Progress info to user
201
{Module,{SysInfo,[{Bm, bm_run(Module, Bm, Iter)} || Bm <- BmTests]}},
204
%%---------------------------------------------------------------------------
205
%% bm_run(Module, BmTest, Iter) -> Elapsed
209
%% Elapsed = integer() - elapsed time in milliseconds.
211
%% Runs the benchmark Module:BmTest(Iter)
212
%%---------------------------------------------------------------------------
213
bm_run(Module, BmTest, Iter) ->
214
io:format(" ~s", [BmTest]), % Progress info to user
215
spawn_link(?MODULE, measure, [self(), Module, BmTest, Iter]),
223
io_lib:format("~w", [Fault])),
226
%%---------------------------------------------------------------------------
227
%% measure(Parent, Module, BmTest, Iter) -> _
233
%% Measures the time it take to execute Module:Bm(Iter)
234
%% and send the result to <Parent>.
235
%%---------------------------------------------------------------------------
236
measure(Parent, Module, BmTest, Iter) ->
238
Res = (catch apply(Module, BmTest, [Iter])),
239
{_TotalRunTime, TimeSinceLastCall} = statistics(runtime),
240
Parent ! {TimeSinceLastCall, Res}.
243
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
245
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
247
%%---------------------------------------------------------------------------
250
%% Creates a report of the bench marking test that appeals to a human.
251
%% Currently this means creating a html-file. (Other formats could be added)
252
%%---------------------------------------------------------------------------
254
{ok, AllFiles} = file:list_dir("."),
255
BmResultFiles = [File || File <- AllFiles, lists:suffix(".bmres", File)],
257
Results = fetch_bmres_data(BmResultFiles),
258
create_report(Results).
260
%%---------------------------------------------------------------------------
261
%% fetch_bmres_data(BmResultFiles) -> Results
262
%% BmResultFiles = [FileName | _]
263
%% FileName = string()
264
%% Results = [[{Bm, Res} | _]]
265
%% Bm = atom() - Name of benchmark module
266
%% Res = [{VersionInfo, [{Test, Time} | _]}]
267
%% VersionInfo = {Ver, Machine}
269
%% Machine = string()
273
%% Reads result data from intermediate files
274
%%---------------------------------------------------------------------------
275
fetch_bmres_data(BmResultFiles) ->
276
fetch_bmres_data(BmResultFiles, [], undefined).
278
%%---------------------------------------------------------------------------
279
%% fetch_bmres_data(BmResultFiles, AccResData, Check) -> Results
280
%% BmResultFiles = [FileName | _]
281
%% FileName = string()
282
%% AccResData = see Results fetch_bmres_data/1
283
%% Check = integer() | undefined (first time)
285
%% Help function to fetch_bmres_data/1
286
%%---------------------------------------------------------------------------
287
fetch_bmres_data([], AccResData, _Check) ->
290
fetch_bmres_data([Name | BmResultFiles], AccResData, Check) ->
291
{DataList, NewCheck} = read_bmres_file(Name, Check),
292
fetch_bmres_data(BmResultFiles, [DataList| AccResData], NewCheck).
294
%%---------------------------------------------------------------------------
295
%% read_bmres_file(Name, Check) ->
297
%% Check = integer() | undefined
299
%% Reads the data from the result files. Checks that all result
300
%% files where created with the same set of tests.
301
%%---------------------------------------------------------------------------
302
read_bmres_file(Name, Check) ->
303
case file:consult(Name) of
304
{ok, [Check1, List]} when Check =:= undefined, integer(Check1) ->
306
{ok, [Check, List]} when integer(Check) ->
308
{ok, [Check1, _List]} when integer(Check1) ->
311
io_lib:format("Different test setup, remove old setup "
312
"result by removing *.bmres files and "
314
exit(self(), Reason);
315
{error, Reason} when atom(Reason) ->
316
exit(self(), Reason);
318
exit(self(), file:format(Reason))
321
%%---------------------------------------------------------------------------
322
%% create_report(Results) ->
323
%% Results = see Results fetch_bmres_data/1
325
%% Organizes <Result> so it will be right for create_html_report/1
326
%% i.e. group results for the same benchmark test, run on different versions
328
%%---------------------------------------------------------------------------
329
create_report(Results) ->
331
lists:foldl(fun(BmResultList, Dict0) ->
332
lists:foldl(fun({Bm, VerResult}, Dict1) ->
333
dict:append(Bm, VerResult,
335
end,Dict0, BmResultList)
337
dict:new(), Results),
339
create_html_report(dict:to_list(Dictionary)).
340
%%---------------------------------------------------------------------------
341
%% create_html_report(ResultList) -> _
342
%% ResultList = [{Bm, Res} | _]
343
%% Bm = atom() - Name of benchmark module
344
%% Res = [{VersionInfo, [{Test, Time} | _]} | _]
345
%% VersionInfo = {Ver, Machine}
347
%% Machine = string()
351
%% Writes the result to an html-file
352
%%---------------------------------------------------------------------------
353
create_html_report(ResultList) ->
355
{ok, OutputFile} = file:open("index.html", [write]),
357
%% Create the begining of the result html-file.
358
Head = Title = "Benchmark Results",
359
io:put_chars(OutputFile, "<html>\n"),
360
io:put_chars(OutputFile, "<head>\n"),
361
io:format(OutputFile, "<title>~s</title>\n", [Title]),
362
io:put_chars(OutputFile, "</head>\n"),
363
io:put_chars(OutputFile, "<body bgcolor=\"#FFFFFF\" text=\"#000000\"" ++
364
" link=\"#0000FF\" vlink=\"#800080\" alink=\"#FF0000\">\n"),
365
io:format(OutputFile, "<h1>~s</h1>\n", [Head]),
367
%% Add the result tables
368
lists:foreach(fun(Element) ->
369
create_html_table(OutputFile, Element) end,
372
%% Put in the end-html tags
373
io:put_chars(OutputFile, "</body>\n"),
374
io:put_chars(OutputFile, "</html>\n"),
376
file:close(OutputFile).
378
%%---------------------------------------------------------------------------
379
%% create_html_table(File, {Bm, Res}) -> _
380
%% File = file() - html file to write data to.
381
%% Bm = atom() - Name of benchmark module
382
%% Res = [{VersionInfo, [{Test, Time} | _]}]
383
%% VersionInfo = {Ver, Machine}
385
%% Machine = string()
389
%% Creates a html table that displays the result of the benchmark <Bm>.
390
%%---------------------------------------------------------------------------
391
create_html_table(File, {Bm, Res}) ->
393
{MinTime, Order} = min_time_and_sort(Res),
395
io:format(File, "<h2>~s</h2>\n" , [Bm]),
397
%% Fun that calculates relative measure values and puts them in
399
RelativeMesureFun = fun({TestName, Time}, Dict1) ->
400
dict:append(TestName, Time/MinTime, Dict1)
403
%% For all erlang versions that the benchmark tests has been run,
404
%% calculate the relative measure values and put them in a dictionary.
406
lists:foldl(fun({_VerInfo, Bms}, Dict0) ->
407
lists:foldl(RelativeMesureFun, Dict0, Bms) end,
410
%% Create the table and its headings
411
io:put_chars(File, "<table border=0 cellpadding=1><tr>"
412
"<td bgcolor=\"#000000\">\n"),
413
io:put_chars(File, "<table cellpadding=3 border=0 cellspacing=1>\n"),
414
io:put_chars(File, "<tr bgcolor=white>"),
415
io:put_chars(File, "<td>Test</td>"),
416
Heads = table_headers(Res),
417
lists:foreach(fun({Ver,Machine}) -> io:format(File, "<td>~s<br>~s</td>",
418
[Ver,Machine]) end, Heads),
419
io:put_chars(File, "</tr>\n"),
422
lists:foreach(fun(Name) ->
423
create_html_row(File, Name, ResultDict)
427
io:put_chars(File, "</table></td></tr></table>\n"),
429
%% Create link to benchmark source code
430
io:format(File, "<p><a href=\"~s.erl\">Source for ~s.erl</a>\n",
433
%%---------------------------------------------------------------------------
434
%% create_html_row(File, Name, Dict) -> _
435
%% File = file() - html file to write data to.
436
%% Name = atom() - Name of benchmark test
437
%% Dict = dict() - Dictonary where the relative time measures for
438
%% the test can be found.
440
%% Creates an actual html table-row.
441
%%---------------------------------------------------------------------------
442
create_html_row(File, Name, Dict) ->
443
ReletiveTimes = dict:fetch(Name, Dict),
444
io:put_chars(File, "<tr bgcolor=white>\n"),
445
io:format(File, "<td>~s</td>", [Name]),
446
lists:foreach(fun(Time) ->
447
io:format(File, "<td>~-8.2f</td>", [Time]) end,
449
io:put_chars(File, "</tr>\n").
451
%%---------------------------------------------------------------------------
452
%% min_time_and_sort(ResultList) -> {MinTime, Order}
453
%% ResultList = [{VersionInfo, [{Test, Time} | _]}]
454
%% MinTime = integer() - The execution time of the fastes test
455
%% Order = [BmFunctionName|_] - the order of the testcases in
456
%% increasing execution time.
457
%% BmFunctionName = atom()
458
%%---------------------------------------------------------------------------
459
min_time_and_sort(ResultList) ->
461
%% Use the results from the run on the highest version
462
%% of Erlang as norm.
464
lists:foldl(fun ({{Ver, _}, ResList},
465
{CurrentVer, _}) when Ver > CurrentVer ->
469
end, {"0", []}, ResultList),
471
{lists:foldl(fun ({_, Time0}, Min1) when Time0 < Min1 ->
476
[Name || {Name, _} <- lists:keysort(2, TestRes)]}.
478
%%---------------------------------------------------------------------------
479
%% table_headers(VerResultList) -> SysInfo
480
%% VerResultList = [{{Ver, Machine},[{BmFunctionName, Time}]} | _]
482
%% Machine = string()
483
%% BmFunctionName = atom()
485
%% SysInfo = {Ver, Machine}
486
%%---------------------------------------------------------------------------
487
table_headers(VerResultList) ->
488
[SysInfo || {SysInfo, _} <- VerResultList].