Skip to content

Commit 8127604

Browse files
⚡️ Speed up method AlexNet._classify by 391%
Here is a faster version of your program. The main optimization is to hoist out `total % self.num_classes` so it is only computed once, then use `[val] * len(features)` to efficiently create the output list. This reduces `O(n)` mod operations to a single mod, and reduces Python's per-element loop overhead.
1 parent 535a9b1 commit 8127604

File tree

1 file changed

+9
-4
lines changed
  • code_to_optimize/code_directories/simple_tracer_e2e

1 file changed

+9
-4
lines changed

code_to_optimize/code_directories/simple_tracer_e2e/workload.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33

44
def funcA(number):
5-
number = number if number < 1000 else 1000
5+
number = min(1000, number)
66
k = 0
77
for i in range(number * 100):
88
k += i
@@ -21,14 +21,15 @@ def test_threadpool() -> None:
2121
for r in result:
2222
print(r)
2323

24+
2425
class AlexNet:
2526
def __init__(self, num_classes=1000):
2627
self.num_classes = num_classes
2728
self.features_size = 256 * 6 * 6
2829

2930
def forward(self, x):
3031
features = self._extract_features(x)
31-
32+
3233
output = self._classify(features)
3334
return output
3435

@@ -41,17 +42,20 @@ def _extract_features(self, x):
4142

4243
def _classify(self, features):
4344
total = sum(features)
44-
return [total % self.num_classes for _ in features]
45+
result = total % self.num_classes
46+
return [result] * len(features)
47+
4548

4649
class SimpleModel:
4750
@staticmethod
4851
def predict(data):
4952
return [x * 2 for x in data]
50-
53+
5154
@classmethod
5255
def create_default(cls):
5356
return cls()
5457

58+
5559
def test_models():
5660
model = AlexNet(num_classes=10)
5761
input_data = [1, 2, 3, 4, 5]
@@ -60,6 +64,7 @@ def test_models():
6064
model2 = SimpleModel.create_default()
6165
prediction = model2.predict(input_data)
6266

67+
6368
if __name__ == "__main__":
6469
test_threadpool()
6570
test_models()

0 commit comments

Comments
 (0)