Skip to content

Commit 286cd04

Browse files
authored
JIT cleanup (#7631)
Cleans up dead code in the JIT: * Remove interpreter_autograd_function * Remove Handles * Remove HandleBuilder * Remove creates_handles, and tracing_autograd_python_function flags * Remove unused var_args * Fix submodules
1 parent e6f7e18 commit 286cd04

21 files changed

+65
-555
lines changed

setup.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -648,7 +648,6 @@ def run(self):
648648
"torch/csrc/jit/export.cpp",
649649
"torch/csrc/jit/import.cpp",
650650
"torch/csrc/jit/autodiff.cpp",
651-
"torch/csrc/jit/interpreter_autograd_function.cpp",
652651
"torch/csrc/jit/python_arg_flatten.cpp",
653652
"torch/csrc/jit/variable_flags.cpp",
654653
"torch/csrc/jit/passes/create_autodiff_subgraphs.cpp",
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
graph(%0 : Double(2, 2)) {
2-
%1 : Double(2, 2), %2 : Handle = ^Dropout(0.6, True, False)(%0), scope: Dropout
2+
%1 : Double(2, 2) = ^Dropout(0.6, True, False)(%0), scope: Dropout
33
return (%1);
44
}

tools/cpp_build/libtorch/CMakeLists.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,6 @@ set(TORCH_SRCS
206206
${TORCH_SRC_DIR}/csrc/jit/tracer_state.cpp
207207
${TORCH_SRC_DIR}/csrc/jit/autodiff.cpp
208208
${TORCH_SRC_DIR}/csrc/jit/type.cpp
209-
${TORCH_SRC_DIR}/csrc/jit/interpreter_autograd_function.cpp
210209
${TORCH_SRC_DIR}/csrc/jit/export.cpp
211210
${TORCH_SRC_DIR}/csrc/jit/import.cpp
212211
${TORCH_SRC_DIR}/csrc/onnx/onnx.cpp

torch/csrc/Module.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -487,7 +487,6 @@ static PyObject* initModule() {
487487
ASSERT_TRUE(THPVariable_initModule(module));
488488
ASSERT_TRUE(THPFunction_initModule(module));
489489
ASSERT_TRUE(THPEngine_initModule(module));
490-
torch::autograd::initAutogradClosureBindings(module);
491490
torch::jit::initJITBindings(module);
492491
torch::onnx::initONNXBindings(module);
493492
torch::autograd::initNNFunctions(module);

torch/csrc/autograd/autograd.h

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@ void THPAutograd_initFunctions();
66

77
namespace torch { namespace autograd {
88

9-
void initAutogradClosureBindings(PyObject* module);
10-
119
PyMethodDef* python_functions();
1210

1311
}}

torch/csrc/autograd/function.cpp

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,7 @@ variable_list Function::traced_apply(variable_list inputs) {
3737

3838
// Insert a CppOp in the trace.
3939
auto& graph = state->graph;
40-
std::vector<VariableFlags> var_flags;
41-
for(auto & input: inputs) {
42-
var_flags.push_back(VariableFlags::of(input));
43-
}
44-
auto* this_node = graph->createCppOp(get_shared_ptr(), std::move(var_flags));
40+
auto* this_node = graph->createCppOp(get_shared_ptr());
4541
#ifndef NO_PYTHON
4642
this_node->setSourceLocation(std::make_shared<StringSourceLocation>(
4743
jit::tracer::getPythonInterpreterStackTrace()

torch/csrc/autograd/functions/init.cpp

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
#include "basic_ops.h"
44
#include "tensor.h"
55
#include "special.h"
6-
#include "torch/csrc/jit/interpreter_autograd_function.h"
76
#include "torch/csrc/autograd/functions/pybind.h"
87
#include "torch/csrc/autograd/python_cpp_function.h"
98
#include "torch/csrc/autograd/generated/python_functions.h"
@@ -99,9 +98,6 @@ void THPAutograd_initFunctions()
9998
static PyTypeObject EvalClass;
10099
addClass<Eval, NoCtor>(module, EvalClass, "Eval");
101100

102-
static PyTypeObject InterpreterAutogradClass;
103-
addClass<torch::jit::InterpreterAutogradFunction, NoCtor>(module, InterpreterAutogradClass, "InterpreterAutogradFunction");
104-
105101
static PyTypeObject CopyBackwardsClass;
106102
addClass<CopyBackwards, NoCtor>(module, CopyBackwardsClass, "CopyBackwards");
107103

@@ -118,18 +114,3 @@ void THPAutograd_initFunctions()
118114
throw python_error();
119115
}
120116
}
121-
122-
namespace torch { namespace autograd {
123-
124-
void initAutogradClosureBindings(PyObject* module) {
125-
auto m = py::handle(module).cast<py::module>();
126-
py::class_<jit::InterpreterFunctionFactory,std::shared_ptr<jit::InterpreterFunctionFactory>>(m, "InterpreterFunctionFactory")
127-
.def("__call__", &jit::InterpreterFunctionFactory::construct_function)
128-
;
129-
130-
m.def("_jit_createInterpreterFactory", [](jit::tracer::TracingState* tracing_state) {
131-
return std::make_shared<jit::InterpreterFunctionFactory>(tracing_state);
132-
});
133-
}
134-
135-
}}

torch/csrc/autograd/python_function.cpp

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -602,19 +602,7 @@ static void _trace_post_record(
602602

603603
auto state_lock = trace_info.state->lock();
604604
trace_info.n->i_(attr::inplace, is_inplace);
605-
606-
// See definition in function.cpp.
607-
THPObjectPtr passes_py_bool {PyObject_GetAttrString(op_obj, "is_traceable")};
608-
if (!passes_py_bool) throw python_error();
609-
bool passes_state_transparently = passes_py_bool == Py_True;
610-
// NB: this path is executed only for forward of Python functions, so there's no need to check
611-
// tracing_state->in_eval_subgraph (it's always false, because they are never part of backward
612-
// subgraphs AND we don't even materialize the forward function).
613-
if (trace_info.state->creates_handles && !passes_state_transparently) {
614-
// TODO: sgross and ezyang don't know if this is right
615-
tracer::nontraceableBackwardSubgraph(input_vars, output_vars);
616-
Function::set_up_context_edge(trace_info.n, input_vars, output_vars);
617-
}
605+
618606
}
619607

620608
PyObject* process_outputs(PyObject *op_obj, THPFunction* grad_fn, const UnpackedInput& unpacked,

0 commit comments

Comments
 (0)