Skip to content

Commit 5ccd8bc

Browse files
committed
remove inference speed from hash
1 parent 1049072 commit 5ccd8bc

File tree

3 files changed

+3
-6
lines changed

3 files changed

+3
-6
lines changed

torchbench/image_classification/utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,7 @@ def evaluate_classification(
4242

4343
if i == 5: # for sotabench.com caching of evaluation
4444
memory_allocated = torch.cuda.memory_allocated(device=device)
45-
tasks_per_second = test_loader.batch_size*inference_time.avg
46-
run_hash = calculate_run_hash([prec1, prec5, np.round(tasks_per_second, 1)], output)
45+
run_hash = calculate_run_hash([prec1, prec5], output)
4746
# if we are in check model we don't need to go beyond the first
4847
# batch
4948
if in_check_mode():

torchbench/object_detection/utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -221,8 +221,7 @@ def evaluate_detection_coco(
221221

222222
if i == 5: # for sotabench.com caching of evaluation
223223
memory_allocated = torch.cuda.memory_allocated(device=device)
224-
tasks_per_second = test_loader.batch_size*inference_time.avg
225-
run_hash = calculate_run_hash([np.round(tasks_per_second, 1)], original_output)
224+
run_hash = calculate_run_hash([], original_output)
226225
# if we are in check model we don't need to go beyond the first
227226
# batch
228227
if in_check_mode():

torchbench/semantic_segmentation/utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -160,8 +160,7 @@ def evaluate_segmentation(
160160

161161
if i == 5: # for sotabench.com caching of evaluation
162162
memory_allocated = torch.cuda.memory_allocated(device=device)
163-
tasks_per_second = test_loader.batch_size*inference_time.avg
164-
run_hash = calculate_run_hash([np.round(tasks_per_second, 1)], output)
163+
run_hash = calculate_run_hash([], output)
165164
# if we are in check model we don't need to go beyond the first
166165
# batch
167166
if in_check_mode():

0 commit comments

Comments
 (0)