Skip to content

Commit

Permalink
Add performance tests (#31)
Browse files Browse the repository at this point in the history
* Add performance tests for Explainer wrt WB vs BB methods
* Support raw perf data & summary output in csv / xlsx format
  • Loading branch information
goodsong81 authored Jun 27, 2024
1 parent 8372a85 commit 0717b1a
Show file tree
Hide file tree
Showing 6 changed files with 435 additions and 14 deletions.
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ dev = [
val = [
"timm==0.9.5",
"onnx==1.14.1",
"pandas",
"py-cpuinfo",
"openpyxl",
]

[project.urls]
Expand Down
18 changes: 12 additions & 6 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import logging
import os
from datetime import datetime, timedelta, timezone
from pathlib import Path

import pytest
Expand All @@ -17,13 +18,12 @@ def pytest_addoption(parser: pytest.Parser):
"--data-root",
action="store",
default=".data",
help="Data root directory.",
help="Data root directory. Defaults to '.data'",
)
parser.addoption(
"--output-root",
action="store",
default=".data",
help="Output root directory.",
help="Output root directory. Defaults to temp dir.",
)
parser.addoption(
"--clear-cache",
Expand All @@ -44,9 +44,15 @@ def fxt_data_root(request: pytest.FixtureRequest) -> Path:


@pytest.fixture(scope="session")
def fxt_output_root(request: pytest.FixtureRequest) -> Path:
"""Output root directory path."""
output_root = Path(request.config.getoption("--output-root"))
def fxt_output_root(
request: pytest.FixtureRequest,
tmp_path_factory: pytest.TempPathFactory,
) -> Path:
"""Output root."""
output_root = request.config.getoption("--output-root")
if output_root is None:
output_root = tmp_path_factory.mktemp("openvino_xai")
output_root = Path(output_root)
output_root.mkdir(parents=True, exist_ok=True)
msg = f"{output_root = }"
log.info(msg)
Expand Down
9 changes: 1 addition & 8 deletions tests/intg/test_classification_timm.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def test_classification_white_box(self, model_id, dump_maps=False):
# self.check_for_saved_map(model_id, "timm_models/maps_wb/")

if model_id in NON_SUPPORTED_BY_WB_MODELS:
pytest.xfail(reason="Not supported yet")
pytest.skip(reason="Not supported yet")

timm_model, model_cfg = self.get_timm_model(model_id)
self.update_report("report_wb.csv", model_id)
Expand Down Expand Up @@ -251,13 +251,6 @@ def test_classification_white_box(self, model_id, dump_maps=False):
self.update_report("report_wb.csv", model_id, "True", "True", "True", shape_str, str(map_saved))
self.clear_cache()

# sudo ln -s /usr/local/cuda-11.8/ cuda
# pip uninstall torch torchvision
# pip3 install --pre torch torchvision --index-url https://download.pytorch.org/whl/nightly/cu118
#
# ulimit -a
# ulimit -Sn 10000
# ulimit -a
@pytest.mark.parametrize("model_id", TEST_MODELS)
def test_classification_black_box(self, model_id, dump_maps=False):
# self.check_for_saved_map(model_id, "timm_models/maps_bb/")
Expand Down
Empty file added tests/perf/__init__.py
Empty file.
175 changes: 175 additions & 0 deletions tests/perf/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations

import logging
import os
import platform
import subprocess
from datetime import datetime, timedelta, timezone
from pathlib import Path

import pandas as pd
import pytest
from cpuinfo import get_cpu_info

log = logging.getLogger(__name__)


def pytest_addoption(parser: pytest.Parser):
"""Add custom options for OpenVINO XAI perf tests."""
parser.addoption(
"--num-repeat",
action="store",
default=5,
help="Number of trials for each model explain. "
"Random seeds are set to 0 ~ num_repeat-1 for the trials. "
"Defaults to 10.",
)
parser.addoption(
"--num-masks",
action="store",
default=5000,
help="Number of masks for black box methods." "Defaults to 5000.",
)


@pytest.fixture(scope="session")
def fxt_num_repeat(request: pytest.FixtureRequest) -> int:
"""Number of repeated trials."""
num_repeat = int(request.config.getoption("--num-repeat"))
msg = f"{num_repeat = }"
log.info(msg)
print(msg)
return num_repeat


@pytest.fixture(scope="session")
def fxt_num_masks(request: pytest.FixtureRequest) -> int:
"""Number of masks for black box methods."""
num_masks = int(request.config.getoption("--num-masks"))
msg = f"{num_masks = }"
log.info(msg)
print(msg)
return num_masks


@pytest.fixture(scope="session")
def fxt_current_date() -> str:
tz = timezone(offset=timedelta(hours=9), name="Seoul")
return datetime.now(tz=tz).strftime("%Y%m%d-%H%M%S")


@pytest.fixture(scope="session")
def fxt_output_root(
request: pytest.FixtureRequest,
tmp_path_factory: pytest.TempPathFactory,
fxt_current_date: str,
) -> Path:
"""Output root + dateh."""
output_root = request.config.getoption("--output-root")
if output_root is None:
output_root = tmp_path_factory.mktemp("openvino_xai")
output_root = Path(output_root) / "perf" / fxt_current_date
output_root.mkdir(parents=True, exist_ok=True)
msg = f"{output_root = }"
log.info(msg)
print(msg)
return output_root


@pytest.fixture(scope="session")
def fxt_tags(fxt_current_date: str) -> dict[str, str]:
"""Tag fields to record various metadata."""
try:
from importlib.metadata import version

version_str = version("openvino_xai")
except Exception:
version_str = "unknown"
try:
branch_str = (
subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"]).decode("ascii").strip()
) # noqa: S603, S607
except Exception:
branch_str = os.environ.get("GH_CTX_REF_NAME", "unknown")
try:
commit_str = (
subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).decode("ascii").strip()
) # noqa: S603, S607
except Exception:
commit_str = os.environ.get("GH_CTX_SHA", "unknown")
tags = {
"version": version_str,
"branch": branch_str,
"commit": commit_str,
"date": fxt_current_date,
"machine_name": platform.node(),
"cpu_info": get_cpu_info()["brand_raw"],
}
msg = f"{tags = }"
log.info(msg)
return tags


@pytest.fixture(scope="session", autouse=True)
def fxt_perf_summary(
fxt_output_root: Path,
fxt_tags: dict[str, str],
):
"""Summarize all results at the end of test session."""
yield

# Merge all raw data
raw_data = []
csv_files = fxt_output_root.rglob("perf-raw-*-*.csv")
for csv_file in csv_files:
data = pd.read_csv(csv_file)
raw_data.append(data)
if len(raw_data) == 0:
print("No raw data to summarize")
return
raw_data = pd.concat(raw_data, ignore_index=True)
raw_data = raw_data.drop(["Unnamed: 0"], axis=1)
raw_data = raw_data.replace(
{
"Method.RECIPROCAM": "RECIPROCAM",
"Method.VITRECIPROCAM": "RECIPROCAM",
"Method.RISE": "RISE",
}
)
raw_data.to_csv(fxt_output_root / "perf-raw-all.csv", index=False)

# Summarize
data = raw_data.pivot_table(
index=["model", "version"],
columns=["method"],
values=["time"],
aggfunc=["mean", "std"],
)
data.columns = data.columns.rename(["stat", "metric", "method"])
data = data.reorder_levels(["method", "metric", "stat"], axis=1)
data0 = data

data = raw_data.pivot_table(
index=["version"],
columns=["method"],
values=["time"],
aggfunc=["mean", "std"],
)
indices = data.index.to_frame()
indices["model"] = "all"
data.index = pd.MultiIndex.from_frame(indices)
data = data.reorder_levels(["model", "version"], axis=0)
data.columns = data.columns.rename(["stat", "metric", "method"])
data = data.reorder_levels(["method", "metric", "stat"], axis=1)
data1 = data

data = pd.concat([data0, data1], axis=0)
data = data.sort_index(axis=0).sort_index(axis=1)

print("=" * 20, "[Perf summary]")
print(data)
data.to_csv(fxt_output_root / "perf-summary.csv")
data.to_excel(fxt_output_root / "perf-summary.xlsx")
print(f" -> Saved to {fxt_output_root}")
Loading

0 comments on commit 0717b1a

Please sign in to comment.