|
| 1 | +# Performance testing extension for Unity Test Runner |
| 2 | + |
| 3 | +Extension provides a set of calls to make it easier to take measurements and record profiler markers. It also collects data about build and player settings which is useful when comparing data for separating different hardware and configurations. |
| 4 | + |
| 5 | +## Installing |
| 6 | +To install this package, follow the instructions in the [Package Manager documentation](https://docs.unity3d.com/Packages/com.unity.package-manager-ui@latest/index.html). |
| 7 | + |
| 8 | +And add `com.unity.test-framework.performance` your packages manifest. |
| 9 | +YourProject/Packages/manifest.json |
| 10 | + |
| 11 | +``` json |
| 12 | +{ |
| 13 | + "dependencies": { |
| 14 | + "com.unity.test-framework.performance": "0.1.37-preview", |
| 15 | + "com.unity.modules.jsonserialize": "1.0.0", |
| 16 | + "com.unity.modules.unitywebrequest": "1.0.0", |
| 17 | + "com.unity.modules.unityanalytics": "1.0.0", |
| 18 | + "com.unity.modules.vr": "1.0.0" |
| 19 | + }, |
| 20 | + "testables": [ |
| 21 | + "com.unity.test-framework.performance" |
| 22 | + ], |
| 23 | + "registry": "https://staging-packages.unity.com" |
| 24 | +} |
| 25 | +``` |
| 26 | + |
| 27 | +If you are using 2018.1 or 2018.2 the module dependencies are unnecessary. |
| 28 | + |
| 29 | +Assembly definitions should reference `Unity.PerformanceTesting` in order to use it. Create a new folder for storing tests in and then create a new asset from context menu called `right click/Create/Assembly definition`. In inspector for the assembly file check "Test Assemblies and apply. Then open the file in text editor and add `Unity.PerformanceTesting`. |
| 30 | + |
| 31 | +``` json |
| 32 | +{ |
| 33 | + "name": "Tests.Editor", |
| 34 | + "references": [ |
| 35 | + "Unity.PerformanceTesting" |
| 36 | + ], |
| 37 | + "optionalUnityReferences": [ |
| 38 | + "TestAssemblies" |
| 39 | + ], |
| 40 | + "includePlatforms": [ |
| 41 | + "Editor" |
| 42 | + ], |
| 43 | + "excludePlatforms": [], |
| 44 | + "allowUnsafeCode": false |
| 45 | +} |
| 46 | +``` |
| 47 | + |
| 48 | +How to test internals can be found in the following link: |
| 49 | +https://q.unity3d.com/questions/992/how-to-test-internal-variables-in-the-editor-tests.html |
| 50 | + |
| 51 | +More information on how to create and run tests please refer to [Unity Test Runner docs](https://docs.unity3d.com/Manual/testing-editortestsrunner.html). |
| 52 | + |
| 53 | + |
| 54 | +## Test Attributes |
| 55 | +**[PerformanceTest]** - Non yeilding performance test. |
| 56 | + |
| 57 | +**[PerformanceUnityTest]** - Yeilding performance test. |
| 58 | + |
| 59 | +**[Version(string version)]** - Performance tests should be versioned with every change. If not specified it will be assumed to be 1 |
| 60 | + |
| 61 | + |
| 62 | +## SampleGroupDefinition |
| 63 | + |
| 64 | +**struct SampleGroupDefinition** |
| 65 | +SampleGroupDefinition is used to define how a measurement is used in reporting and in regression detection. |
| 66 | + |
| 67 | +Required parameters |
| 68 | +- **name** : Name of the measurement. Should be kept short and simple. |
| 69 | + |
| 70 | +Optional parameters |
| 71 | +- **sampleUnit** : Unit of the measurement. |
| 72 | + - Nanosecond, Microsecond, Millisecond, Second, Byte, Kilobyte, Megabyte, Gigabyte |
| 73 | +- **aggregationType** : Preferred aggregation (default is median) |
| 74 | +- **percentile** : If aggregationType is Percentile, the percentile value used for the aggregation. i.e 0.95. |
| 75 | +- **threshold** : Threshold used for regression detection. If current sample value is over the threshold different from the baseline results, the result is concidered as a regression or a progression. Default value is 0.15f. |
| 76 | +- **increaseIsBetter** : Defines if an increase in the measurement value is concidered as a progression (better) or a regression. Default is false. |
| 77 | + |
| 78 | +If unspecified a default SampleGroupDefinition will be used with the name of "Measure.Scope", it is recommended to specify a name that is descriptive of what it is measuring. |
| 79 | + |
| 80 | +## Taking measurements |
| 81 | + |
| 82 | +Preferred way is to use `Measure.Method` or `Measure.Frames`. They both do a couple of warmup iterations which are then used to decide how many iterations per measurement should be used. |
| 83 | + |
| 84 | +**MethodMeasurement Method()** |
| 85 | + |
| 86 | +It will execute provided method at least 3 times for warmup and 7 for measurements. |
| 87 | + |
| 88 | +``` csharp |
| 89 | +[PerformanceTest] |
| 90 | +public void Test() |
| 91 | +{ |
| 92 | + Measure.Method(() => { ... }).Run(); |
| 93 | +} |
| 94 | +``` |
| 95 | + |
| 96 | +In cases where you feel the default values are not ideal you can specify custom iterations. |
| 97 | + |
| 98 | +WarmupCount - how many iterations to run without measuring for warmup |
| 99 | +MeasurementCount - how many measurements to take |
| 100 | +IterationsPerMeasurement - how many iterations per measurement to take |
| 101 | +GC - measures the amount of GC allocations |
| 102 | + |
| 103 | +``` |
| 104 | +[PerformanceTest] |
| 105 | +public void Test() |
| 106 | +{ |
| 107 | + Measure.Method(() => { ... }) |
| 108 | + .WarmupCount(10) |
| 109 | + .MeasurementCount(10) |
| 110 | + .IterationsPerMeasurement(5) |
| 111 | + .GC() |
| 112 | + .Run(); |
| 113 | +} |
| 114 | +``` |
| 115 | + |
| 116 | +**FramesMeasurement Measure.Frames()** |
| 117 | + |
| 118 | +Used to yield for frames. It will automatically select the number of warmup and runtime frames. |
| 119 | + |
| 120 | +``` csharp |
| 121 | +[PerformanceUnityTest] |
| 122 | +public IEnumerator Test() |
| 123 | +{ |
| 124 | + ... |
| 125 | + |
| 126 | + yield return Measure.Frames().Run(); |
| 127 | +} |
| 128 | +``` |
| 129 | + |
| 130 | +In cases where you are measuring a system over frametime it is advised to disable frametime measurements and instead measure profiler markers for your system. |
| 131 | +``` csharp |
| 132 | +[PerformanceUnityTest] |
| 133 | +public IEnumerator Test() |
| 134 | +{ |
| 135 | + ... |
| 136 | + |
| 137 | + yield return Measure.Frames() |
| 138 | + .ProfilerMarkers(...) |
| 139 | + .DontRecordFrametime() |
| 140 | + .Run(); |
| 141 | +} |
| 142 | +``` |
| 143 | + |
| 144 | +If you want more control, you can specify how many frames you want to measure. |
| 145 | + |
| 146 | +``` csharp |
| 147 | +[PerformanceUnityTest] |
| 148 | +public IEnumerator Test() |
| 149 | +{ |
| 150 | + ... |
| 151 | + |
| 152 | + yield return Measure.Frames() |
| 153 | + .WarmupCount(5) |
| 154 | + .MeasurementCount(10) |
| 155 | + .Run(); |
| 156 | +} |
| 157 | +``` |
| 158 | + |
| 159 | +When method or frame measurements are not enough you can use the following to measure. It will measure Scope, Frames, Markers or Cusom. |
| 160 | + |
| 161 | +**IDisposable Measure.Scope(SampleGroupdDefinition sampleGroupDefinition)** |
| 162 | + |
| 163 | +Used to measure a scope. |
| 164 | + |
| 165 | +``` csharp |
| 166 | +[PerformanceTest] |
| 167 | +public void Test() |
| 168 | +{ |
| 169 | + using(Measure.Scope()) |
| 170 | + { |
| 171 | + ... |
| 172 | + } |
| 173 | +} |
| 174 | +``` |
| 175 | + |
| 176 | +**IDisposable Measure.FrameTimes(SampleGroupdDefinition sampleGroupDefinition)** |
| 177 | + |
| 178 | +Records frame times for a scope. |
| 179 | + |
| 180 | +``` csharp |
| 181 | +[PerformanceUnityTest] |
| 182 | +public IEnumerator Test() |
| 183 | +{ |
| 184 | + using (Measure.Frames().Scope()) |
| 185 | + { |
| 186 | + yield return ...; |
| 187 | + } |
| 188 | +} |
| 189 | +``` |
| 190 | + |
| 191 | + |
| 192 | +**IDisposable Measure.ProfilerMarkers(SampleGroupDefinition[] sampleGroupDefinitions)** |
| 193 | + |
| 194 | +Records profiler samples for a scope. The name of sample group definition has to match profiler sample names. |
| 195 | + |
| 196 | +``` csharp |
| 197 | +[PerformanceTest] |
| 198 | +public void Test() |
| 199 | +{ |
| 200 | + SampleGroupDefinition[] m_definitions = |
| 201 | + { |
| 202 | + new SampleGroupDefinition("Instantiate"), |
| 203 | + new SampleGroupDefinition("Instantiate.Copy"), |
| 204 | + new SampleGroupDefinition("Instantiate.Produce"), |
| 205 | + new SampleGroupDefinition("Instantiate.Awake") |
| 206 | + }; |
| 207 | + |
| 208 | + using(Measure.ProfilerMarkers(m_definitions)) |
| 209 | + { |
| 210 | + ... |
| 211 | + } |
| 212 | +} |
| 213 | +``` |
| 214 | + |
| 215 | + |
| 216 | +**void Custom(SampleGroupDefinition sampleGroupDefinition, double value)** |
| 217 | + |
| 218 | +Records a custom sample. It can be any double value. A sample group definition is required. |
| 219 | + |
| 220 | +``` csharp |
| 221 | +[PerformanceTest] |
| 222 | +public void Test() |
| 223 | +{ |
| 224 | + var definition = new SampleGroupDefinition("TotalAllocatedMemory", SampleUnit.Megabyte); |
| 225 | + Measure.Custom(definition, Profiler.GetTotalAllocatedMemoryLong() / 1048576f); |
| 226 | +} |
| 227 | +``` |
| 228 | + |
| 229 | +## Output |
| 230 | + |
| 231 | +Each performance test will have a performance test summary. Every sample group will have multiple aggregated samples such as median, min, max, average, standard deviation, sample count, count of zero samples and sum of all samples. |
| 232 | + |
| 233 | +`Time Millisecond Median:53.59 Min:53.36 Max:62.10 Avg:54.07 Std:1.90 Zeroes:0 SampleCount: 19 Sum: 1027.34` |
| 234 | + |
| 235 | +## Examples |
| 236 | + |
| 237 | +``` csharp |
| 238 | + [PerformanceTest, Version("2")] |
| 239 | + public void Serialize_SimpleObject() |
| 240 | + { |
| 241 | + var obj = new SimpleObject(); |
| 242 | + obj.Init(); |
| 243 | + |
| 244 | + Measure.Method(() => JsonUtility.ToJson(obj)) |
| 245 | + .Definition(sampleUnit: SampleUnit.Microsecond) |
| 246 | + .Run(); |
| 247 | + } |
| 248 | + |
| 249 | + [Serializable] |
| 250 | + public class SimpleObject |
| 251 | + { |
| 252 | + public int IntField; |
| 253 | + public string StringField; |
| 254 | + public float FloatField; |
| 255 | + public bool BoolField; |
| 256 | + |
| 257 | + [Serializable] |
| 258 | + public struct NestedStruct |
| 259 | + { |
| 260 | + public int A, B; |
| 261 | + } |
| 262 | + |
| 263 | + public NestedStruct Str; |
| 264 | + |
| 265 | + public Vector3 Position; |
| 266 | + |
| 267 | + public void Init() |
| 268 | + { |
| 269 | + IntField = 1; |
| 270 | + StringField = "Test"; |
| 271 | + FloatField = 2.0f; |
| 272 | + BoolField = false; |
| 273 | + Str.A = 15; |
| 274 | + Str.B = 20; |
| 275 | + } |
| 276 | + } |
| 277 | +``` |
| 278 | + |
| 279 | + |
| 280 | + |
| 281 | +``` csharp |
| 282 | + SampleGroupDefinition[] m_definitions = |
| 283 | + { |
| 284 | + new SampleGroupDefinition("Instantiate"), |
| 285 | + new SampleGroupDefinition("Instantiate.Copy"), |
| 286 | + new SampleGroupDefinition("Instantiate.Produce"), |
| 287 | + new SampleGroupDefinition("Instantiate.Awake") |
| 288 | + }; |
| 289 | + |
| 290 | + [PerformanceTest] |
| 291 | + public void Instantiate_CreateCubes() |
| 292 | + { |
| 293 | + using (Measure.ProfilerMarkers(m_definitions)) |
| 294 | + { |
| 295 | + using(Measure.Scope()) |
| 296 | + { |
| 297 | + var cube = GameObject.CreatePrimitive(PrimitiveType.Cube); |
| 298 | + for (var i = 0; i < 5000; i++) |
| 299 | + { |
| 300 | + UnityEngine.Object.Instantiate(cube); |
| 301 | + } |
| 302 | + } |
| 303 | + } |
| 304 | + } |
| 305 | +``` |
| 306 | + |
| 307 | + |
| 308 | +``` csharp |
| 309 | + [PerformanceUnityTest] |
| 310 | + public IEnumerator Rendering_SampleScene() |
| 311 | + { |
| 312 | + using(Measure.Scope(new SampleGroupDefinition("Setup.LoadScene"))) |
| 313 | + { |
| 314 | + SceneManager.LoadScene("SampleScene"); |
| 315 | + } |
| 316 | + yield return null; |
| 317 | + |
| 318 | + yield return Measure.Frames().Run(); |
| 319 | + } |
| 320 | +``` |
| 321 | + |
| 322 | + |
| 323 | +``` csharp |
| 324 | + // Records allocated and reserved memory, specifies that the sample unit is in Megabytes. |
| 325 | +
|
| 326 | + [PerformanceTest, Version("1")] |
| 327 | + public void Measure_Empty() |
| 328 | + { |
| 329 | + var allocated = new SampleGroupDefinition("TotalAllocatedMemory", SampleUnit.Megabyte); |
| 330 | + var reserved = new SampleGroupDefinition("TotalReservedMemory", SampleUnit.Megabyte); |
| 331 | + Measure.Custom(allocated, Profiler.GetTotalAllocatedMemoryLong() / 1048576f); |
| 332 | + Measure.Custom(reserved, Profiler.GetTotalReservedMemoryLong() / 1048576f); |
| 333 | + } |
| 334 | +``` |
0 commit comments