Skip to content

Commit

Permalink
choice to omit large case in test
Browse files Browse the repository at this point in the history
  • Loading branch information
milhidaka committed Feb 15, 2022
1 parent 38a60fd commit 40bf68e
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 51 deletions.
12 changes: 6 additions & 6 deletions test/model_test/make_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -468,8 +468,8 @@ def array_to_tensor_value_info(array, name):
dtype = _data_type_from_numpy(array.dtype)
return helper.make_tensor_value_info(name, dtype, list(array.shape))

def dump_direct_onnx(name, op_type, input_arrays, output_arrays, attributes={}, opset_version=10):
name_all.append(name)
def dump_direct_onnx(name, op_type, input_arrays, output_arrays, attributes={}, opset_version=10, large=False):
name_all.append({"name": name, "large": large})
output_dir = f"{OUTPUT_DIR}/{name}"
os.makedirs(output_dir, exist_ok=True)
input_tvs = []
Expand Down Expand Up @@ -513,8 +513,8 @@ def dump_direct_onnx(name, op_type, input_arrays, output_arrays, attributes={},
subprocess.check_call(["python", "-m", "webdnn.optimize_model", onnx_path, os.path.join(output_dir, "optimized")])


def dump(name, model, input_shapes, opset_version=10):
name_all.append(name)
def dump(name, model, input_shapes, opset_version=10, large=False):
name_all.append({"name": name, "large": large})
output_dir = f"{OUTPUT_DIR}/{name}"
os.makedirs(output_dir, exist_ok=True)
inputs = []
Expand Down Expand Up @@ -568,7 +568,7 @@ def main():
dump_direct_onnx("mean", "Mean", [np.random.rand(3, 4).astype(np.float32), np.random.rand(1, 4).astype(np.float32)], [np.random.rand(3, 4).astype(np.float32)])
dump_direct_onnx("tile", "Tile", [np.random.rand(3, 4, 5, 6).astype(np.float32), np.array([2, 3, 4, 5], dtype=np.int64)], [np.zeros((3*2, 4*3, 5*4, 6*5), dtype=np.float32)])
dump("relu", ReLU(), [(3, 4)])
dump("relu2", ReLU(), [(100, 20, 30, 400)])
dump("relu2", ReLU(), [(100, 20, 30, 400)], large=True)
dump("reluexp", ReLUExp(), [(3, 4)])
dump("sqrt", Sqrt(), [torch.rand(3, 4)])
dump("sqrtscalar", Sqrt(), [rand_scalar()])
Expand Down Expand Up @@ -626,7 +626,7 @@ def main():
# cinkhkw % 4 != 0, group * batch * outh * outw > 16384
dump("conv10", nn.Conv2d(512, 512, 3, 1, 1, groups=512, bias=False), [(1, 512, 7, 7)])
# very large im2col (IM2COL_NUMEL_LIMIT)
dump("conv11", nn.Conv2d(1, 1, 4, 1, 1, bias=True), [(32, 1, 1027, 1027)])
dump("conv11", nn.Conv2d(1, 1, 4, 1, 1, bias=True), [(32, 1, 1027, 1027)], large=True)
# in_channels, out_channels, kernel_size
dump("convtranspose1", nn.ConvTranspose2d(16, 32, 3, stride=1, padding=0, output_padding=0, groups=1, dilation=1, bias=False), [(1, 16, 7, 7)])
dump("convtranspose2", nn.ConvTranspose2d(16, 32, 3, stride=2, padding=0, output_padding=0, groups=1, dilation=1, bias=True), [(2, 16, 7, 9)])
Expand Down
1 change: 1 addition & 0 deletions test/model_test/runner/optimized.html
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ <h1>WebDNN Operator Test (Optimized Model)</h1>
><input type="checkbox" name="backend" value="webgpu" />WebGPU</label
>
<label><input type="checkbox" name="backend" value="webgl" />WebGL</label>
<label><input type="checkbox" name="large" value="1" />Large Case</label>
</div>
<div id="result"></div>
</body>
Expand Down
1 change: 1 addition & 0 deletions test/model_test/runner/standard.html
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ <h1>WebDNN Operator Test (Standard ONNX Model)</h1>
><input type="checkbox" name="backend" value="webgpu" />WebGPU</label
>
<label><input type="checkbox" name="backend" value="webgl" />WebGL</label>
<label><input type="checkbox" name="large" value="1" />Large Case</label>
</div>
<div id="result"></div>
</body>
Expand Down
104 changes: 59 additions & 45 deletions test/model_test/runner/test.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,59 +5,73 @@ function wait() {
}

async function runTest(optimized) {
let caseDirs = [];
const resultDom = document.getElementById("result");
const previousResultList = document.getElementById("resultList");
if (previousResultList) {
resultDom.removeChild(previousResultList);
}
const resultList = document.createElement("ol");
resultDom.appendChild(resultList);
try {
let caseDirs = [];
const resultDom = document.getElementById("result");
const previousResultList = document.getElementById("resultList");
if (previousResultList) {
resultDom.removeChild(previousResultList);
}
const resultList = document.createElement("ol");
resultDom.appendChild(resultList);

// URLに?case=xxx があればケースxxxだけを実行
const usp = new URLSearchParams(location.search);
const selectedCase = usp.get("case");
if (selectedCase) {
caseDirs.push(`model/${selectedCase}/`);
} else {
const listJSON = await (await fetch("model/cases.json")).json();
for (const name of listJSON) {
caseDirs.push(`model/${name}/`);
// URLに?case=xxx があればケースxxxだけを実行
const runLarge = document.getElementsByName("large")[0].checked;
const usp = new URLSearchParams(location.search);
const selectedCase = usp.get("case");
if (selectedCase) {
caseDirs.push(`model/${selectedCase}/`);
} else {
const listJSON = await (await fetch("model/cases.json")).json();
for (const {name, large} of listJSON) {
if (!large || (large && runLarge)) {
caseDirs.push(`model/${name}/`);
}
}
}
}
const checkboxes = document.getElementsByName("backend");
const backendOrders = [["cpu"]];
for (const checkbox of checkboxes) {
if (checkbox.checked) {
backendOrders.push([checkbox.value, "cpu"]);
const checkboxes = document.getElementsByName("backend");
const backendOrders = [["cpu"]];
for (const checkbox of checkboxes) {
if (checkbox.checked) {
backendOrders.push([checkbox.value, "cpu"]);
}
}
}
let allOk = true;
const allResults = {};
for (const caseDir of caseDirs) {
for (const backendOrder of backendOrders) {
console.log("test", caseDir, backendOrder);
const msg = await runTestOne(caseDir, backendOrder, optimized);
const ok = !msg;
allOk &= ok;
allResults[caseDir] = ok;
resultList.innerHTML += `<li><span class="${
ok ? "result-ok" : "result-fail"
}">${ok ? "OK" : "Fail"}, ${caseDir}, ${backendOrder[0]}</span> <span>${msg ? msg : ''}</span></li>`;
await wait();
let allOk = true;
const allResults = {};
for (const caseDir of caseDirs) {
for (const backendOrder of backendOrders) {
console.log("test", caseDir, backendOrder);
const msg = await runTestOne(caseDir, backendOrder, optimized);
const ok = !msg;
allOk &= ok;
allResults[caseDir] = ok;
resultList.innerHTML += `<li><span class="${
ok ? "result-ok" : "result-fail"
}">${ok ? "OK" : "Fail"}, ${caseDir}, ${backendOrder[0]}</span> <span>${
msg ? msg : ""
}</span></li>`;
await wait();
}
}
}
console.log("done all test");
if (allOk) {
console.log("all ok");
resultList.innerHTML += `<li><span class="result-ok">Done. All cases OK.</span></li>`;
} else {
console.error("failed", allResults);resultList.innerHTML += `<li><span class="result-fail">Some cases failed.</span></li>`;
console.log("done all test");
if (allOk) {
console.log("all ok");
resultList.innerHTML += `<li><span class="result-ok">Done. All cases OK.</span></li>`;
} else {
console.error("failed", allResults);
resultList.innerHTML += `<li><span class="result-fail">Some cases failed.</span></li>`;
}
} catch (error) {
console.error(error);
alert(error.message);
}
}

async function runTestOne(directory, backendOrder, optimized) {
const runner = await WebDNN.load(optimized ? `${directory}optimized/` : directory, { backendOrder, optimized });
const runner = await WebDNN.load(
optimized ? `${directory}optimized/` : directory,
{ backendOrder, optimized }
);
const expectedTensors = await runner
.getTensorLoader(directory + "expected.bin")
.loadAll();
Expand Down

0 comments on commit 40bf68e

Please sign in to comment.