Skip to content

Commit

Permalink
Revert D14842057: Compiler uses first-class modules**
Browse files Browse the repository at this point in the history
Differential Revision:
D14842057

Original commit changeset: ca6e7b5a4380

fbshipit-source-id: e8f1862a59bf20d5f78648b2fdc53a8b3750ead3
  • Loading branch information
zdevito authored and facebook-github-bot committed Apr 11, 2019
1 parent 5e1f0b2 commit f5165ad
Show file tree
Hide file tree
Showing 32 changed files with 784 additions and 1,239 deletions.
54 changes: 0 additions & 54 deletions aten/src/ATen/core/function_schema.h
Original file line number Diff line number Diff line change
Expand Up @@ -164,18 +164,6 @@ struct FunctionSchema {
}
return c10::nullopt;
}
FunctionSchema cloneWithArguments(std::vector<Argument> new_arguments) const {
return FunctionSchema(
name(),
overload_name(),
std::move(new_arguments),
returns(),
is_vararg(),
is_varret());
}
// Check that inputs have the correct types and appends any missing default
// values.
void checkAndNormalizeInputs(std::vector<IValue>& inputs) const;
};

inline bool operator==(const FunctionSchema& lhs, const FunctionSchema& rhs) {
Expand Down Expand Up @@ -239,46 +227,4 @@ inline std::string toString(const FunctionSchema& schema) {
return str.str();
}

inline void FunctionSchema::checkAndNormalizeInputs(std::vector<IValue>& inputs) const {
// Do we have more inputs than the schema accepts?
AT_CHECK(
inputs.size() <= arguments().size(),
"Expected at most ",
arguments().size(),
" argument(s) for operator '",
name(),
"', but received ",
inputs.size(),
" argument(s). Declaration: ",
*this);

for (size_t pos = 0; pos < arguments().size(); ++pos) {
const auto& argument = arguments()[pos];
if (pos < inputs.size()) {
if (!isSubvalueOf(inputs[pos], argument.type())) {
AT_ERROR(
"Expected value of type ",
*argument.type(),
" for argument '",
argument.name(),
"' in position ",
pos,
", but instead got value of type ",
attemptToRecoverType(inputs[pos])->str(),
". Declaration: ",
*this);
}
} else if (argument.default_value()) {
inputs.push_back(*argument.default_value());
} else {
AT_ERROR(
name(),
"() is missing value for argument '",
argument.name(),
"'. Declaration: ",
*this);
}
}
}

} // namespace c10
25 changes: 11 additions & 14 deletions aten/src/ATen/core/jit_type.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
namespace torch {
namespace jit {
namespace script {
struct CompilationUnit;
struct Function;
struct Module;
struct Method;
}
} // namespace jit
} // namespace torch
Expand Down Expand Up @@ -1100,19 +1100,19 @@ CAFFE2_API TypePtr evalTypeVariables(TypePtr type, TypeEnv & type_env);

struct ClassType;
using ClassTypePtr = std::shared_ptr<ClassType>;
using ::torch::jit::script::CompilationUnit;
using ::torch::jit::script::Function;
using ::torch::jit::script::Module;
using ::torch::jit::script::Method;

// This represents a class in TorchScript.
struct CAFFE2_API ClassType : public Type {
// Create a user type and register it globally.
static ClassTypePtr create(
const std::string& name,
std::shared_ptr<CompilationUnit> module);
std::shared_ptr<Module> module);

// Create a type representing a Module,
// These do not have methods, and are not globally registered
static ClassTypePtr createModuleType(std::shared_ptr<CompilationUnit> module);
static ClassTypePtr createModuleType();

// returns nullptr if there is no type with that name
static ClassTypePtr get(const std::string& name);
Expand Down Expand Up @@ -1168,11 +1168,8 @@ struct CAFFE2_API ClassType : public Type {
return attributeNames_[slot];
}

Function* getMethod(const std::string& name) const;
CompilationUnit& compilation_unit();
const CompilationUnit& compilation_unit() const;
std::vector<Function*> methods() const;

Method* getMethod(const std::string& name) const;
std::vector<Method*> methods() const;

const std::string& name() const {
return typename_;
Expand Down Expand Up @@ -1229,10 +1226,10 @@ struct CAFFE2_API ClassType : public Type {
static const TypeKind Kind = TypeKind::ClassType;

private:
ClassType(std::string name, std::shared_ptr<CompilationUnit> cu)
ClassType(std::string name, std::shared_ptr<Module> module)
: Type(TypeKind::ClassType),
typename_(std::move(name)),
compilation_unit_(std::move(cu)) {}
module_(std::move(module)) {}

// Name of type (note that this has to be globally unique).
std::string typename_;
Expand All @@ -1246,7 +1243,7 @@ struct CAFFE2_API ClassType : public Type {
std::vector<std::string> attributeNames_;
std::vector<TypePtr> attributeTypes_;
// Holds method attributes
std::shared_ptr<CompilationUnit> compilation_unit_;
std::shared_ptr<Module> module_;

};
} // namespace c10
10 changes: 5 additions & 5 deletions aten/src/ATen/core/type.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -472,18 +472,18 @@ ClassTypeRegistry& getRegistry() {

ClassTypePtr ClassType::create(
const std::string& name,
std::shared_ptr<CompilationUnit> cu) {
auto ptr = ClassTypePtr(new ClassType(name, std::move(cu)));
std::shared_ptr<Module> module) {
auto ptr = ClassTypePtr(new ClassType(name, std::move(module)));
getRegistry().registerType(name, ptr);
return ptr;
}

ClassTypePtr ClassType::createModuleType(std::shared_ptr<CompilationUnit> cu) {
return ClassTypePtr(new ClassType("Module", std::move(cu)));
ClassTypePtr ClassType::createModuleType() {
return ClassTypePtr(new ClassType("Module", nullptr));
}

ClassTypePtr ClassType::refine(at::ArrayRef<TypePtr> refined_slots) const {
auto ptr = ClassTypePtr(new ClassType(typename_, compilation_unit_));
auto ptr = ClassTypePtr(new ClassType(typename_, module_));
AT_ASSERT(numAttributes() == refined_slots.size());
for(size_t i = 0; i < attributeNames_.size(); ++i) {
AT_ASSERT(refined_slots[i]->isSubtypeOf(attributeTypes_[i]));
Expand Down
3 changes: 1 addition & 2 deletions test/cpp/jit/test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,7 @@ namespace jit {
_(NoneSchemaMatch) \
_(ClassParser) \
_(PeepholeOptimize) \
_(RecordFunction) \
_(ModuleDefine)
_(RecordFunction)

#define TH_FORALL_TESTS_CUDA(_) \
_(ArgumentSpec) \
Expand Down
60 changes: 24 additions & 36 deletions test/cpp/jit/test_misc.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
#include "ATen/core/ivalue.h"
#include "torch/csrc/jit/script/compiler.h"
#include "torch/csrc/jit/script/module.h"
#include "torch/jit.h"

#include "onnx/onnx_pb.h"

Expand Down Expand Up @@ -370,10 +369,11 @@ static const auto cf_examples = R"JIT(
return a
)JIT";
void testControlFlow() {
auto cu = compile(cf_examples);

auto cu = std::make_shared<script::Module>();
script::defineMethodsInModule(
cu, cf_examples, script::nativeResolver, c10::nullopt);
auto run = [&](const std::string& name, std::vector<IValue> stack) {
auto graph = cu->get_function(name).graph();
auto graph = cu->get_method(name).graph();
Code code(graph);
InterpreterState interp(code);
interp.run(stack);
Expand Down Expand Up @@ -576,11 +576,12 @@ void testTopologicalIndex() {
}

void invokeTestRecordFunction(at::Tensor& t) {
autograd::profiler::GetPackedInputsCallback inputs_cb = [t]() {
Stack st;
pack(st, t);
return st;
};
autograd::profiler::GetPackedInputsCallback inputs_cb =
[t]() {
Stack st;
pack(st, t);
return st;
};
autograd::profiler::RecordFunction guard("test", inputs_cb);
t.add_(torch::ones_like(t));
}
Expand All @@ -604,15 +605,15 @@ void invokeTestRecordFunctionNested() {

void testRecordFunction() {
std::vector<std::vector<int64_t>> input_sizes;
autograd::profiler::pushCallback(
[&input_sizes](const autograd::profiler::RecordFunction& fn) {
for (const auto& input : fn.inputs()) {
if (input.isTensor()) {
std::vector<int64_t> t = input.toTensor().sizes().vec();
input_sizes.push_back(t);
}
}
});
autograd::profiler::pushCallback([&input_sizes](
const autograd::profiler::RecordFunction& fn) {
for (const auto& input : fn.inputs()) {
if (input.isTensor()) {
std::vector<int64_t> t = input.toTensor().sizes().vec();
input_sizes.push_back(t);
}
}
});

auto t = torch::randn({1, 2, 3}, at::kCPU);
invokeTestRecordFunction(t);
Expand All @@ -624,15 +625,14 @@ void testRecordFunction() {

// test nested RecordFunctions
std::vector<std::string> nested_names;
autograd::profiler::pushCallback(
[&nested_names](const autograd::profiler::RecordFunction& fn) {
nested_names.push_back(getFullName(&fn));
});
autograd::profiler::pushCallback([&nested_names](
const autograd::profiler::RecordFunction& fn) {
nested_names.push_back(getFullName(&fn));
});

{
autograd::profiler::RecordFunction guard("outer");
invokeTestRecordFunctionNested();
;
invokeTestRecordFunctionNested();;
}

autograd::profiler::popCallback();
Expand Down Expand Up @@ -709,18 +709,6 @@ void testNoneSchemaMatch() {
// checking that constant propagation ran wo/failure
AT_ASSERT(std::distance(nodes.begin(), nodes.end()) == 1);
}

void testModuleDefine() {
auto m = std::make_shared<script::Module>();
m->register_parameter("foo", torch::ones({}), false);
m->define(R"(
def add_it(self, x, b : int = 4):
return self.foo + x + b
)");
auto result = m->run_method("add_it", torch::ones({}));
AT_ASSERT(result.toTensor().item<float>() == 6)
}

} // namespace test
} // namespace jit
} // namespace torch
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@ ModelProto {
graph:
GraphProto {
name: "torch-jit-export"
inputs: [{name: "x", type:Tensor dims: 2 3},{name: "1", type:Tensor dims: 3 4},{name: "2", type:Tensor dims: 3 3}]
inputs: [{name: "x", type:Tensor dims: 2 3},{name: "1", type:Tensor dims: 3 3},{name: "2", type:Tensor dims: 3 4}]
outputs: [{name: "6", type:Tensor dims: 2 4}]
initializers: [TensorProto shape: [3 4],TensorProto shape: [3 3]]
initializers: [TensorProto shape: [3 3],TensorProto shape: [3 4]]
nodes: [
Node {type: "Constant", inputs: [], outputs: [3], attributes: [{ name: 'value', type: tensor, value:TensorProto shape: [1]}]},
Node {type: "Gemm", inputs: [x,2,3], outputs: [4], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 0}]},
Node {type: "Gemm", inputs: [x,1,3], outputs: [4], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 0}]},
Node {type: "Constant", inputs: [], outputs: [5], attributes: [{ name: 'value', type: tensor, value:TensorProto shape: [1]}]},
Node {type: "Gemm", inputs: [4,1,5], outputs: [6], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 0}]}
Node {type: "Gemm", inputs: [4,2,5], outputs: [6], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 0}]}
]
}
opset_import: [OperatorSetIdProto { domain: }],
Expand Down
10 changes: 5 additions & 5 deletions test/expect/TestScript.test_onnx_export_speculate-f2.expect
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ ModelProto {
graph:
GraphProto {
name: "torch-jit-export"
inputs: [{name: "x.1", type:Tensor dims: 1 10},{name: "1", type:Tensor dims: 20},{name: "2", type:Tensor dims: 20 10}]
inputs: [{name: "x.1", type:Tensor dims: 1 10},{name: "1", type:Tensor dims: 20 10},{name: "2", type:Tensor dims: 20}]
outputs: [{name: "8", type:Tensor dims: 1 20}]
initializers: [TensorProto shape: [20],TensorProto shape: [20 10]]
initializers: [TensorProto shape: [20 10],TensorProto shape: [20]]
nodes: [
Node {type: "Add", inputs: [x.1,x.1], outputs: [3], attributes: []},
Node {type: "ReduceSum", inputs: [3], outputs: [4], attributes: [{ name: 'keepdims', type: int, value: 0}]},
Expand All @@ -28,7 +28,7 @@ ModelProto {
outputs: [{name: "10", type:Tensor dims: 1 20}]
initializers: []
nodes: [
Node {type: "Gemm", inputs: [3,2,1], outputs: [10], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 1},{ name: 'transB', type: int, value: 1}]}
Node {type: "Gemm", inputs: [3,1,2], outputs: [10], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 1},{ name: 'transB', type: int, value: 1}]}
]
}

Expand All @@ -39,7 +39,7 @@ ModelProto {
outputs: [{name: "11", type:Tensor dims: 1 20}]
initializers: []
nodes: [
Node {type: "Gemm", inputs: [3,2,1], outputs: [11], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 1},{ name: 'transB', type: int, value: 1}]}
Node {type: "Gemm", inputs: [3,1,2], outputs: [11], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 1},{ name: 'transB', type: int, value: 1}]}
]
}

Expand All @@ -54,7 +54,7 @@ ModelProto {
outputs: [{name: "12", type:Tensor dims: 1 20}]
initializers: []
nodes: [
Node {type: "Gemm", inputs: [3,2,1], outputs: [12], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 1},{ name: 'transB', type: int, value: 1}]}
Node {type: "Gemm", inputs: [3,1,2], outputs: [12], attributes: [{ name: 'alpha', type: float, value: 1},{ name: 'beta', type: float, value: 1},{ name: 'transB', type: int, value: 1}]}
]
}

Expand Down
2 changes: 1 addition & 1 deletion test/test_jit.py
Original file line number Diff line number Diff line change
Expand Up @@ -7478,7 +7478,7 @@ def call_foo(self, input):
def foo(self, input):
self.call_foo(input)

with self.assertRaisesRegex(RuntimeError, 'called recursively'):
with self.assertRaisesRegex(RuntimeError, 'called recursively involving'):
M()

def test_script_kwargs_fn_call(self):
Expand Down
1 change: 0 additions & 1 deletion tools/build_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,6 @@
"torch/csrc/jit/register_quantized_ops.cpp",
"torch/csrc/jit/scope.cpp",
"torch/csrc/jit/script/compiler.cpp",
"torch/csrc/api/src/jit.cpp",
"torch/csrc/jit/script/edit_distance.cpp",
"torch/csrc/jit/script/logging.cpp",
"torch/csrc/jit/script/final_returns.cpp",
Expand Down
3 changes: 2 additions & 1 deletion torch/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ set(TORCH_SRCS
${TORCH_SRC_DIR}/csrc/jit/register_quantized_ops.cpp
${TORCH_SRC_DIR}/csrc/jit/scope.cpp
${TORCH_SRC_DIR}/csrc/jit/script/compiler.cpp
${TORCH_SRC_DIR}/csrc/api/src/jit.cpp
${TORCH_SRC_DIR}/csrc/jit/testing/file_check.cpp
${TORCH_SRC_DIR}/csrc/jit/script/final_returns.cpp
${TORCH_SRC_DIR}/csrc/jit/script/schema_matching.cpp
Expand Down Expand Up @@ -237,6 +236,7 @@ if (NOT NO_API)
${TORCH_SRC_DIR}/csrc/api/src/data/samplers/random.cpp
${TORCH_SRC_DIR}/csrc/api/src/data/samplers/sequential.cpp
${TORCH_SRC_DIR}/csrc/api/src/data/samplers/stream.cpp
${TORCH_SRC_DIR}/csrc/api/src/jit.cpp
${TORCH_SRC_DIR}/csrc/api/src/nn/init.cpp
${TORCH_SRC_DIR}/csrc/api/src/nn/module.cpp
${TORCH_SRC_DIR}/csrc/api/src/nn/modules/batchnorm.cpp
Expand Down Expand Up @@ -528,6 +528,7 @@ if (BUILD_PYTHON)
${TORCH_SRC_DIR}/csrc/jit/python_tracer.cpp
${TORCH_SRC_DIR}/csrc/jit/script/init.cpp
${TORCH_SRC_DIR}/csrc/jit/script/lexer.cpp
${TORCH_SRC_DIR}/csrc/jit/script/module.cpp
${TORCH_SRC_DIR}/csrc/jit/script/python_tree_views.cpp
${TORCH_SRC_DIR}/csrc/multiprocessing/init.cpp
${TORCH_SRC_DIR}/csrc/nn/THNN.cpp
Expand Down
2 changes: 1 addition & 1 deletion torch/csrc/api/include/torch/jit.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ namespace jit {
/// )JIT");
/// IValue output = module->run_method("relu_script", a, b);
/// \endrst
TORCH_API std::shared_ptr<script::CompilationUnit> compile(const std::string& source);
TORCH_API std::shared_ptr<script::Module> compile(const std::string& source);

} // namespace jit
} // namespace torch
7 changes: 4 additions & 3 deletions torch/csrc/api/src/jit.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@
namespace torch {
namespace jit {

std::shared_ptr<script::CompilationUnit> compile(const std::string& source) {
auto module = std::make_shared<script::CompilationUnit>();
module->define(source, script::nativeResolver, nullptr);
std::shared_ptr<script::Module> compile(const std::string& source) {
auto module = std::make_shared<script::Module>();
defineMethodsInModule(
module, source, script::nativeResolver, /*self=*/c10::nullopt);
return module;
}

Expand Down
Loading

0 comments on commit f5165ad

Please sign in to comment.