-
Notifications
You must be signed in to change notification settings - Fork 146
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
6 changed files
with
205 additions
and
19 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
import { OperatorImpl } from "../operatorImpl"; | ||
import { Tensor } from "../../interface/core/tensor"; | ||
import { onnx } from "onnx-proto"; | ||
import { getAttrString } from "../operatorUtil"; | ||
import { Backend } from "../../interface/core/constants"; | ||
import { CPUTensor } from "../../interface/backend/cpu/cpuTensor"; | ||
|
||
type PadMode = "constant" | "reflect" | "edge"; | ||
|
||
/* | ||
* Opset 11 | ||
* opset 2は互換性なし | ||
*/ | ||
export abstract class Pad11 extends OperatorImpl { | ||
mode!: PadMode; | ||
|
||
initialize(attribute: onnx.IAttributeProto[]): void { | ||
super.initialize(attribute); | ||
this.mode = getAttrString(attribute, "mode", "constant") as PadMode; | ||
} | ||
|
||
getTensorBackendRequirement( | ||
nInputs: number, | ||
// eslint-disable-next-line @typescript-eslint/no-unused-vars | ||
nOutputs: number | ||
): (Backend | null)[] { | ||
if (nInputs === 2) { | ||
return [this.backend, "cpu"]; | ||
} else { | ||
return [this.backend, "cpu", "cpu"]; | ||
} | ||
} | ||
|
||
protected calcShape( | ||
input: Tensor, | ||
padTensor: CPUTensor | ||
): { outputShape: number[]; pads: number[] } { | ||
const outputShape: number[] = []; | ||
const pads: number[] = Array.from(padTensor.data); | ||
for (let i = 0; i < input.ndim; i++) { | ||
outputShape.push(input.dims[i] + pads[i] + pads[i + input.ndim]); | ||
} | ||
return { outputShape, pads }; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
150 changes: 150 additions & 0 deletions
150
src/descriptor_runner/operators/webgl/operators/standard/pad11.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,150 @@ | ||
import { Tensor } from "../../../../interface/core/tensor"; | ||
import { OperatorEntry } from "../../../../interface/core/operator"; | ||
import { Pad11 } from "../../../base/pad11"; | ||
import { | ||
WebDNNWebGLContext, | ||
WebGLUniformItem, | ||
} from "../../../../interface/backend/webgl/webglContext"; | ||
import { | ||
shaderGenHeader, | ||
shaderGenOutput, | ||
shaderGenTensorNDGet, | ||
shaderGenTensorNDGetUniformItem, | ||
shaderGenTensorOutputCoordsWithReturn, | ||
shaderGenTensorOutputUniform, | ||
shaderGenTensorOutputUniformItem, | ||
} from "../../shaderHelper"; | ||
import { arange } from "../../../../util"; | ||
|
||
/* | ||
* Opset 11 | ||
* opset 2は互換性なし | ||
*/ | ||
class WebGLPad11 extends Pad11 { | ||
constructor() { | ||
super("webgl"); | ||
} | ||
|
||
async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> { | ||
const [input, shapeTensor, constantValueTensor] = inputs; | ||
context.assertsWebGLTensor(input); | ||
context.cpuContext.assertsCPUTensor(shapeTensor); | ||
const { outputShape: outShape, pads } = this.calcShape(input, shapeTensor); | ||
let constantValue = 0; | ||
if (constantValueTensor) { | ||
context.cpuContext.assertsCPUTensor(constantValueTensor); | ||
constantValue = constantValueTensor.data[0]; | ||
} | ||
const output = context.emptyTensor(outShape, "float32"); | ||
const kernelName = `pad_${outShape.length}_${this.mode}`; | ||
const padUniforms = arange(outShape.length) | ||
.map((dim) => `uniform int pad${dim};`) | ||
.join(""); | ||
const inShapeUniforms = arange(outShape.length) | ||
.map((dim) => `uniform int inShape${dim};`) | ||
.join(""); | ||
const constantUniform = | ||
this.mode === "constant" ? "uniform float padConstant;" : ""; | ||
const tex_input_idxs = arange(outShape.length) | ||
.map((dim) => `ti${dim}`) | ||
.join(","); | ||
const minusPad = arange(outShape.length) | ||
.map((dim) => `int ti${dim} = tex_output_${dim} - pad${dim};`) | ||
.join(""); | ||
const outOfBoundCond = arange(outShape.length) | ||
.map((dim) => `ti${dim} < 0 || ti${dim} >= inShape${dim}`) | ||
.join("||"); | ||
let indexAdjuster: string; | ||
let valueGetter: string; | ||
switch (this.mode) { | ||
case "constant": | ||
indexAdjuster = ""; | ||
valueGetter = `if (${outOfBoundCond}) {s = padConstant;} else {s = get_tex_input(${tex_input_idxs});}`; | ||
break; | ||
case "edge": | ||
indexAdjuster = arange(outShape.length) | ||
.map( | ||
(dim) => | ||
`if (ti${dim} < 0) {ti${dim} = 0;} else if (ti${dim} >= inShape${dim}) {ti${dim} = inShape${dim} - 1;}` | ||
) | ||
.join(""); | ||
valueGetter = `s = get_tex_input(${tex_input_idxs});`; | ||
break; | ||
case "reflect": | ||
indexAdjuster = arange(outShape.length) | ||
.map( | ||
(dim) => | ||
`if (ti${dim} < 0) {ti${dim} = pad_mod(-ti${dim}, inShape${dim} * 2 - 2); if (ti${dim} >= inShape${dim}) {ti${dim} = inShape${dim} * 2 - ti${dim} - 2;}} else if (ti${dim} >= inShape${dim}) {ti${dim} = pad_mod(ti${dim}, inShape${dim} * 2 - 2); if (ti${dim} >= inShape${dim}) {ti${dim} = inShape${dim} * 2 - ti${dim} - 2;}}` | ||
) | ||
.join(""); | ||
valueGetter = `s = get_tex_input(${tex_input_idxs});`; | ||
break; | ||
} | ||
const kernelSource = `${shaderGenHeader(context.webgl2)} | ||
int pad_mod(int x, int y) { | ||
int z = x / y; | ||
return x - z * y; | ||
} | ||
${padUniforms} | ||
${constantUniform} | ||
${inShapeUniforms} | ||
${shaderGenTensorOutputUniform(outShape.length)} | ||
${shaderGenTensorNDGet("tex_input", input.ndim, context.webgl2)} | ||
void main() { | ||
${shaderGenTensorOutputCoordsWithReturn(outShape.length)} | ||
${minusPad} | ||
${indexAdjuster} | ||
float s; | ||
${valueGetter} | ||
${shaderGenOutput("s", context.webgl2)} | ||
return; | ||
} | ||
`; | ||
context.addKernel(kernelName, kernelSource); | ||
|
||
const uniforms: WebGLUniformItem[] = [ | ||
...shaderGenTensorNDGetUniformItem( | ||
"tex_input", | ||
input.strides, | ||
input, | ||
context.webgl2 | ||
), | ||
...shaderGenTensorOutputUniformItem(outShape, output, context.webgl2), | ||
]; | ||
for (let dim = 0; dim < outShape.length; dim++) { | ||
uniforms.push({ name: `pad${dim}`, value: pads[dim], type: "int" }); | ||
uniforms.push({ | ||
name: `inShape${dim}`, | ||
value: input.dims[dim], | ||
type: "int", | ||
}); | ||
} | ||
if (this.mode === "constant") { | ||
uniforms.push({ | ||
name: "padConstant", | ||
value: constantValue, | ||
type: "float", | ||
}); | ||
} | ||
await context.runKernel( | ||
kernelName, | ||
[{ tensor: input, name: "tex_input" }], | ||
output, | ||
uniforms | ||
); | ||
return [output]; | ||
} | ||
} | ||
|
||
export function getOpEntries(): OperatorEntry[] { | ||
return [ | ||
{ | ||
opType: "Pad", | ||
backend: "webgl", | ||
opsetMin: 11, | ||
factory: () => new WebGLPad11(), | ||
}, | ||
]; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters