Skip to content

Commit 3c559ab

Browse files
q10facebook-github-bot
authored andcommitted
Add configuration knob for ENSEMBLE_ROWWISE_ADAGRAD, backend (pytorch#2954)
Summary: X-link: facebookresearch/FBGEMM#54 Pull Request resolved: pytorch#2954 - Add configuration knob for ENSEMBLE_ROWWISE_ADAGRAD Reviewed By: spcyppt Differential Revision: D60977713 fbshipit-source-id: 7278897b2a238440e99d875d42b5bfadc58e5beb
1 parent 6c997de commit 3c559ab

File tree

5 files changed

+64
-20
lines changed

5 files changed

+64
-20
lines changed

fbgemm_gpu/codegen/training/python/__init__.template

Lines changed: 28 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -6,22 +6,34 @@
66
# This source code is licensed under the BSD-style license found in the
77
# LICENSE file in the root directory of this source tree.
88

9-
# All optimizers
10-
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_args as lookup_args # noqa: F401
11-
{%- for optim in all_optimizers %}
12-
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_{{ optim }} as lookup_{{optim}} # noqa: F401
13-
{%- endfor %}
9+
import warnings
1410

15-
# SSD optimizers (putting them under try-except for BC as they are
16-
# experimental ops which can be removed/updated in the future)
11+
{%- macro force_import(name) %}
12+
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.{{ name }} as {{ name }} # noqa: F401
13+
{%- endmacro %}
14+
15+
{%- macro try_import(name) %}
1716
try:
18-
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_args_ssd as lookup_args_ssd
19-
{%- for optim in ssd_optimizers %}
20-
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_{{ optim }}_ssd as lookup_{{ optim }}_ssd
21-
{%- endfor %}
17+
# Import is placed under a try-except bc the op is experimental and can be
18+
# removed/updated in the future
19+
import fbgemm_gpu.split_embedding_codegen_lookup_invokers.{{ name }} as {{ name }} # noqa: F401
2220
except:
23-
import logging
24-
logging.warn("fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_args_ssd import failed")
25-
{%- for optim in ssd_optims %}
26-
logging.warn("fbgemm_gpu.split_embedding_codegen_lookup_invokers.lookup_{{ optim }}_ssd import failed")
27-
{%- endfor %}
21+
warnings.warn(
22+
f"""\033[93m
23+
Failed to import: fbgemm_gpu.split_embedding_codegen_lookup_invokers.{{ name }}
24+
\033[0m""",
25+
DeprecationWarning,
26+
)
27+
{%- endmacro %}
28+
29+
# TBE optimizers
30+
{{- force_import("lookup_args") }}
31+
{%- for optim in all_optimizers %}
32+
{{ try_import("lookup_" + optim) }}
33+
{%- endfor %}
34+
35+
# SSD TBE optimizers
36+
{{- try_import("lookup_args_ssd") }}
37+
{%- for optim in ssd_optimizers %}
38+
{{ try_import("lookup_" + optim + "_ssd") }}
39+
{%- endfor %}

fbgemm_gpu/fbgemm_gpu/config/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,4 +6,4 @@
66

77
# pyre-strict
88

9-
from .feature_list import FeatureGateName # noqa F401
9+
from .feature_list import FeatureGate, FeatureGateName # noqa F401

fbgemm_gpu/fbgemm_gpu/config/feature_list.py

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,32 @@ def foo():
4545
4646
"""
4747

48+
# Enable TBE V2 APIs
4849
TBE_V2 = auto()
4950

51+
# Enable Ensemble Rowwise Adagrad (D60189486 stack)
52+
TBE_ENSEMBLE_ROWWISE_ADAGRAD = auto()
53+
5054
def is_enabled(self) -> bool:
51-
return torch.ops.fbgemm.check_feature_gate_key(self.name)
55+
return FeatureGate.is_enabled(self)
56+
57+
58+
class FeatureGate:
59+
"""
60+
FBGEMM_GPU feature gate.
61+
62+
This class exists because methods defined on enums cannot be invoked when
63+
the enum is packaged into a model (the mechanism is unclear).
64+
65+
**Code Example:**
66+
67+
.. code-block:: python
68+
69+
from deeplearning.fbgemm.fbgemm_gpu.config import FeatureGate, FeatureGateName
70+
71+
FeatureGate.is_enabled(FeatureGateName.TBE_V2)
72+
"""
73+
74+
@classmethod
75+
def is_enabled(cls, feature: FeatureGateName) -> bool:
76+
return torch.ops.fbgemm.check_feature_gate_key(feature.name)

fbgemm_gpu/include/fbgemm_gpu/config/feature_gates.h

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,9 @@ namespace fbgemm_gpu::config {
5555
/// UI.
5656
///
5757
/// For OSS: The environment variable will be evaluated as f"FBGEMM_{ENUM}"
58-
#define ENUMERATE_ALL_FEATURE_FLAGS X(TBE_V2)
58+
#define ENUMERATE_ALL_FEATURE_FLAGS \
59+
X(TBE_V2) \
60+
X(TBE_ENSEMBLE_ROWWISE_ADAGRAD)
5961
// X(EXAMPLE_FEATURE_FLAG)
6062

6163
/// @ingroup fbgemm-gpu-config

fbgemm_gpu/test/config/feature_gate_test.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
# pyre-fixme[21]
1414
import fbgemm_gpu
15-
from fbgemm_gpu.config import FeatureGateName
15+
from fbgemm_gpu.config import FeatureGate, FeatureGateName
1616

1717
# pyre-fixme[16]: Module `fbgemm_gpu` has no attribute `open_source`.
1818
open_source: bool = getattr(fbgemm_gpu, "open_source", False)
@@ -38,6 +38,11 @@ def test_feature_gates(self) -> None:
3838
with self.assertNotRaised(Exception):
3939
print(f"\n[OSS] Feature {feature.name} enabled: {feature.is_enabled()}")
4040

41+
with self.assertNotRaised(Exception):
42+
print(
43+
f"\n[OSS] Feature {feature.name} enabled: {FeatureGate.is_enabled(feature)}"
44+
)
45+
4146
@unittest.skipIf(open_source, "Not supported in open source")
4247
def test_feature_gates_fb(self) -> None:
4348
# pyre-fixme[16]

0 commit comments

Comments
 (0)