Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
d724701
[ao][fx] fixing public v private graph_module.py
HDCharles Nov 3, 2022
a4f60f9
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 4, 2022
bca13e7
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 4, 2022
8931b94
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 5, 2022
6c404a9
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 7, 2022
7c7f6f9
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 8, 2022
c14584f
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 11, 2022
4137ee6
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 11, 2022
e7600f5
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 11, 2022
6b8b39f
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 11, 2022
d02725d
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 15, 2022
f531d1b
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 15, 2022
e501835
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 15, 2022
0d717d6
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 16, 2022
7665a3f
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Nov 17, 2022
8fcdb54
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 7, 2022
18b8708
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 7, 2022
fa74b0f
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 7, 2022
e7c8954
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 7, 2022
148f7fb
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 8, 2022
ce41f89
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 9, 2022
1551474
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 9, 2022
452c561
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 12, 2022
cd3bd5d
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 13, 2022
b0ad11a
Update on "[ao][fx] fixing public v private graph_module.py"
HDCharles Dec 14, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions test/quantization/ao_migration/test_quantization_fx.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ def test_function_import_fx_graph_module(self):
function_list = [
'FusedGraphModule',
'ObservedGraphModule',
'is_observed_module',
'_is_observed_module',
'ObservedStandaloneGraphModule',
'is_observed_standalone_module',
'_is_observed_standalone_module',
'QuantizedGraphModule'
]
self._test_function_import('fx.graph_module', function_list)
Expand Down
8 changes: 4 additions & 4 deletions torch/ao/quantization/fx/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@
)
from .graph_module import (
QuantizedGraphModule,
is_observed_module,
is_observed_standalone_module,
_is_observed_module,
_is_observed_standalone_module,
)
from ._equalize import update_obs_for_equalization, convert_eq_obs
from torch.nn.utils.parametrize import type_before_parametrizations
Expand Down Expand Up @@ -450,7 +450,7 @@ def _restore_state(
) -> Tuple[Dict[str, Tuple[str, type]],
PrepareCustomConfig,
Set[str]]:
assert is_observed_module(observed), \
assert _is_observed_module(observed), \
'incoming model must be produced by prepare_fx'
prepare_custom_config: PrepareCustomConfig = observed._prepare_custom_config # type: ignore[assignment]
node_name_to_scope: Dict[str, Tuple[str, type]] = observed._node_name_to_scope # type: ignore[assignment]
Expand Down Expand Up @@ -1017,7 +1017,7 @@ def convert(
node_name_to_qconfig)
elif isinstance(mod, DeQuantStub):
_replace_observer_or_dequant_stub_with_dequantize_node(node, model.graph)
elif is_observed_standalone_module(mod):
elif _is_observed_standalone_module(mod):
convert_standalone_module(
node, modules, model, is_reference, backend_config)
# below this point `type_before_parametrizations` is used
Expand Down
6 changes: 2 additions & 4 deletions torch/ao/quantization/fx/graph_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@
__all__ = [
"FusedGraphModule",
"ObservedGraphModule",
"is_observed_module",
"ObservedStandaloneGraphModule",
"is_observed_standalone_module",
"QuantizedGraphModule",
]

Expand Down Expand Up @@ -56,7 +54,7 @@ def __deepcopy__(self, memo):
fake_mod.__dict__ = copy.deepcopy(self.__dict__)
return ObservedGraphModule(fake_mod, copy.deepcopy(self.graph), copy.deepcopy(self.preserved_attr_names))

def is_observed_module(module: Any) -> bool:
def _is_observed_module(module: Any) -> bool:
return isinstance(module, ObservedGraphModule)

class ObservedStandaloneGraphModule(ObservedGraphModule):
Expand All @@ -71,7 +69,7 @@ def __deepcopy__(self, memo):
fake_mod.__dict__ = copy.deepcopy(self.__dict__)
return ObservedStandaloneGraphModule(fake_mod, copy.deepcopy(self.graph), copy.deepcopy(self.preserved_attr_names))

def is_observed_standalone_module(module: Any) -> bool:
def _is_observed_standalone_module(module: Any) -> bool:
return isinstance(module, ObservedStandaloneGraphModule)

def _save_packed_weight(self, destination, prefix, keep_vars):
Expand Down
4 changes: 2 additions & 2 deletions torch/ao/quantization/fx/match_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
MatchAllNode
)
from .graph_module import (
is_observed_standalone_module,
_is_observed_standalone_module,
)
from torch.nn.utils.parametrize import type_before_parametrizations
from typing import Any, Dict, List, Callable, Optional, Tuple, Type, Set, Iterable
Expand Down Expand Up @@ -232,7 +232,7 @@ def is_standalone_module(node_target: str, modules: Dict[str, torch.nn.Module]):
for node in graph.nodes:
if node.op == 'call_module' and \
(is_standalone_module(node.target, modules) or
is_observed_standalone_module(modules[node.target])):
_is_observed_standalone_module(modules[node.target])):
# add node to matched nodes
match_map[node.name] = (
node, node, None,
Expand Down
4 changes: 2 additions & 2 deletions torch/quantization/fx/graph_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
GraphModule,
FusedGraphModule,
ObservedGraphModule,
is_observed_module,
_is_observed_module,
ObservedStandaloneGraphModule,
is_observed_standalone_module,
_is_observed_standalone_module,
QuantizedGraphModule
)