|
1 // Copyright (c) 2005-2009 Nokia Corporation and/or its subsidiary(-ies). |
|
2 // All rights reserved. |
|
3 // This component and the accompanying materials are made available |
|
4 // under the terms of the License "Eclipse Public License v1.0" |
|
5 // which accompanies this distribution, and is available |
|
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html". |
|
7 // |
|
8 // Initial Contributors: |
|
9 // Nokia Corporation - initial contribution. |
|
10 // |
|
11 // Contributors: |
|
12 // |
|
13 // Description: |
|
14 // e32test\bench\t_asmbm.cpp |
|
15 // |
|
16 // |
|
17 |
|
18 #include "t_asmbm.h" |
|
19 #include <hal.h> |
|
20 #include <e32test.h> |
|
21 |
|
22 extern RTest test; |
|
23 |
|
24 const TReal KDefaultRunLength = 1.0; |
|
25 const TInt KInitIterations = 3000; |
|
26 |
|
27 // Length of time to run benchmark for in seconds |
|
28 TReal RunLength = KDefaultRunLength; |
|
29 |
|
30 TInt FastCounterFrequency; |
|
31 TBool FastCounterCountsUp; |
|
32 |
|
33 /** |
|
34 * Calculate the time in seconds corresponding to a fast counter delta value. |
|
35 */ |
|
36 TReal TimeDelta(TInt aDelta) |
|
37 { |
|
38 if (!FastCounterCountsUp) |
|
39 aDelta = -aDelta; |
|
40 return ((TReal) aDelta + 1) / FastCounterFrequency; |
|
41 } |
|
42 |
|
43 /** |
|
44 * Run a benchmark for the specifiec number of iterations and return the total |
|
45 * time taken in seconds. |
|
46 */ |
|
47 TReal TimeBenchmarkL(MBenchmarkList& aBenchmarks, TInt aIndex, const TBmParams& aParams) |
|
48 { |
|
49 TInt delta = 0; |
|
50 User::LeaveIfError(aBenchmarks.Run(aIndex, aParams, delta)); |
|
51 User::After(20 * 1000); // hack: wait for kernel thread to exit |
|
52 return TimeDelta(delta); |
|
53 } |
|
54 |
|
55 void RunGeneralBenchmarkL(MBenchmarkList& aBenchmarks, TInt aIndex, const TBmInfo& aInfo) |
|
56 { |
|
57 // Run the benchmark with a small number of iterations and from this result |
|
58 // work out how many iterations we need to run it for RunLength seconds. |
|
59 // Loop till we get it right. |
|
60 |
|
61 TBmParams params; |
|
62 params.iSourceAlign = 0; |
|
63 params.iDestAlign = 0; |
|
64 |
|
65 TInt iterations = KInitIterations; |
|
66 TReal time; |
|
67 for (;;) |
|
68 { |
|
69 params.iIts = iterations / 10; |
|
70 time = TimeBenchmarkL(aBenchmarks, aIndex, params); |
|
71 if (time >= RunLength) |
|
72 break; |
|
73 iterations = (TInt) ((RunLength * 1.2) / (time/(TReal)iterations)); |
|
74 } |
|
75 |
|
76 TBuf<64> nameBuf; |
|
77 nameBuf.Copy(aInfo.iName); |
|
78 test.Printf(_L("%i\t%e\t%S\n"), aIndex, time/iterations * 1000000.0, &nameBuf); |
|
79 } |
|
80 |
|
81 void RunMemoryBenchmarkL(MBenchmarkList& aBenchmarks, TInt aIndex, const TBmInfo& aInfo) |
|
82 { |
|
83 // Run the benchmark with a small number of iterations and from this result |
|
84 // work out how many iterations we need to run it for RunLength seconds. |
|
85 // Loop till we get it right. |
|
86 |
|
87 TBmParams params; |
|
88 params.iSourceAlign = 0; |
|
89 params.iDestAlign = 0; |
|
90 |
|
91 TInt iterations = KInitIterations; |
|
92 TReal time; |
|
93 for (;;) |
|
94 { |
|
95 params.iIts = iterations / 10; |
|
96 time = TimeBenchmarkL(aBenchmarks, aIndex, params); |
|
97 if (time >= RunLength) |
|
98 break; |
|
99 iterations = (TInt) ((RunLength * 1.2) / (time/(TReal)iterations)); |
|
100 } |
|
101 |
|
102 TBuf<64> nameBuf; |
|
103 nameBuf.Copy(aInfo.iName); |
|
104 test.Printf(_L("%i\t%S\talignment step == %d\n"), aIndex, &nameBuf, aInfo.iAlignStep); |
|
105 |
|
106 for (TInt sourceAlign = 0 ; sourceAlign < 32 ; sourceAlign += aInfo.iAlignStep) |
|
107 { |
|
108 for (TInt destAlign = 0 ; destAlign < 32 ; destAlign += aInfo.iAlignStep) |
|
109 { |
|
110 params.iSourceAlign = sourceAlign; |
|
111 params.iDestAlign = destAlign; |
|
112 time = TimeBenchmarkL(aBenchmarks, aIndex, params); |
|
113 test.Printf(_L("%e\t"), time/iterations * 1000000.0); |
|
114 } |
|
115 test.Printf(_L("\n")); |
|
116 } |
|
117 } |
|
118 |
|
119 void RunBenchmarkL(MBenchmarkList& aBenchmarks, TInt aIndex, TUint aCategories) |
|
120 { |
|
121 TBmInfo info; |
|
122 User::LeaveIfError(aBenchmarks.Info(aIndex, info)); |
|
123 |
|
124 if (!(info.iCategories & aCategories)) |
|
125 return; |
|
126 |
|
127 if (info.iCategories & aCategories & KCategoryMemory) |
|
128 RunMemoryBenchmarkL(aBenchmarks, aIndex, info); |
|
129 else |
|
130 RunGeneralBenchmarkL(aBenchmarks, aIndex, info); |
|
131 } |
|
132 |
|
133 void BadUsage() |
|
134 { |
|
135 test.Printf(_L("usage: [ OPTIONS ] [ INDEX... ]\n")); |
|
136 test.Printf(_L("Options are:\n")); |
|
137 test.Printf(_L(" -r TIME Set the length of time in seconds to run each benchmark for\n")); |
|
138 test.Printf(_L(" -m Run memory alignment benchmarks only\n")); |
|
139 test.Printf(_L(" -x Run extra benchmarks as well as normal ones\n")); |
|
140 } |
|
141 |
|
142 void RunBenchmarkTestsL(MBenchmarkList& aBenchmarks) |
|
143 { |
|
144 InitDataL(); |
|
145 |
|
146 User::LeaveIfError(HAL::Get(HALData::EFastCounterFrequency, FastCounterFrequency)); |
|
147 User::LeaveIfError(HAL::Get(HALData::EFastCounterCountsUp, FastCounterCountsUp)); |
|
148 |
|
149 TInt count = aBenchmarks.Count(); |
|
150 TBool ok = ETrue; |
|
151 TUint categories = KCategoryGeneral; |
|
152 |
|
153 RArray<TInt> testsToRun; |
|
154 CleanupClosePushL(testsToRun); |
|
155 |
|
156 HBufC* buf = HBufC::NewLC(User::CommandLineLength()); |
|
157 TPtr ptr = buf->Des(); |
|
158 User::CommandLine(ptr); |
|
159 |
|
160 if (ptr != KNullDesC) |
|
161 { |
|
162 TLex lex(ptr); |
|
163 TPtrC16 token; |
|
164 |
|
165 while (ok && (token.Set(lex.NextToken()), token != KNullDesC)) |
|
166 { |
|
167 if (token == _L("-r")) |
|
168 { |
|
169 token.Set(lex.NextToken()); |
|
170 if (token == KNullDesC || |
|
171 TLex(token).Val(RunLength) != KErrNone || |
|
172 RunLength < 0.0) |
|
173 { |
|
174 BadUsage(); |
|
175 ok = EFalse; |
|
176 } |
|
177 } |
|
178 else if (token == _L("-m")) |
|
179 { |
|
180 categories = KCategoryMemory; |
|
181 } |
|
182 else if (token == _L("-x")) |
|
183 { |
|
184 categories = KCategoryGeneral | KCategoryExtra; |
|
185 } |
|
186 else |
|
187 { |
|
188 TInt index; |
|
189 if (TLex(token).Val(index) != KErrNone) |
|
190 { |
|
191 BadUsage(); |
|
192 ok = EFalse; |
|
193 } |
|
194 else if (index < 0 || index >= count) |
|
195 { |
|
196 test.Printf(_L("Index out of range: %d\n"), index); |
|
197 ok = EFalse; |
|
198 } |
|
199 else |
|
200 { |
|
201 testsToRun.AppendL(index); |
|
202 } |
|
203 } |
|
204 } |
|
205 } |
|
206 |
|
207 CleanupStack::PopAndDestroy(buf); |
|
208 |
|
209 if (ok) |
|
210 { |
|
211 test.Printf(_L("Note that these benchmarks are intended to guide optimisation, and not to\n")); |
|
212 test.Printf(_L("provide a meaningful indication of the speed of specific functions\n")); |
|
213 test.Printf(_L("\n")); |
|
214 if (testsToRun.Count() == 0) |
|
215 { |
|
216 for (TInt i = 0 ; i < count ; ++i) |
|
217 { |
|
218 RunBenchmarkL(aBenchmarks, i, categories); |
|
219 } |
|
220 } |
|
221 else |
|
222 { |
|
223 for (TInt i = 0 ; i < testsToRun.Count() ; ++i) |
|
224 { |
|
225 RunBenchmarkL(aBenchmarks, testsToRun[i], categories); |
|
226 } |
|
227 } |
|
228 } |
|
229 |
|
230 CleanupStack::PopAndDestroy(&testsToRun); |
|
231 } |