Skip to content
This repository was archived by the owner on Nov 22, 2022. It is now read-only.

Commit b98c294

Browse files
m3rlin45facebook-github-bot
authored andcommitted
Introduce Class Usage Logging (#1243)
Summary: Pull Request resolved: #1243 This builds on https://pytorch.org/docs/stable/notes/large_scale_deployments.html#api-usage-logging to log when certain classes are initialized. In this diff, I only covered bert/roberta models (and not even necessarily all of them) to prove that the system works This will not affect OSS users, as for them, the logger is a no-op. Reviewed By: snisarg Differential Revision: D19744167 fbshipit-source-id: f07845d1c1bd1f493c0e084661ab138b48b56ad2
1 parent 14d942e commit b98c294

File tree

4 files changed

+23
-0
lines changed

4 files changed

+23
-0
lines changed

pytext/models/bert_classification_models.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
TransformerSentenceEncoderBase,
3333
)
3434
from pytext.utils.label import get_label_weights
35+
from pytext.utils.usage import log_class_usage
3536

3637

3738
class NewBertModel(BaseModel):
@@ -120,6 +121,7 @@ def __init__(self, encoder, decoder, output_layer, stage=Stage.TRAIN) -> None:
120121
self.output_layer = output_layer
121122
self.stage = stage
122123
self.module_list = [encoder, decoder]
124+
log_class_usage(__class__)
123125

124126

125127
class BertPairwiseModel(BasePairwiseModel):
@@ -162,6 +164,7 @@ def __init__(
162164
self.encoder1 = encoder1
163165
self.encoder2 = encoder2
164166
self.encoders = [encoder1, encoder2]
167+
log_class_usage(__class__)
165168

166169
@classmethod
167170
def _create_encoder(

pytext/models/bert_regression_model.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from pytext.models.bert_classification_models import NewBertModel
1010
from pytext.models.module import create_module
1111
from pytext.models.output_layers import RegressionOutputLayer
12+
from pytext.utils.usage import log_class_usage
1213

1314

1415
class NewBertRegressionModel(NewBertModel):
@@ -37,3 +38,7 @@ def from_config(cls, config: Config, tensorizers: Dict[str, Tensorizer]):
3738
)
3839
output_layer = RegressionOutputLayer.from_config(config.output_layer)
3940
return cls(encoder, decoder, output_layer)
41+
42+
def __init__(self, encoder, decoder, output_layer) -> None:
43+
super().__init__(encoder, decoder, output_layer)
44+
log_class_usage(__class__)

pytext/models/roberta.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
)
2828
from pytext.torchscript.module import get_script_module_cls
2929
from pytext.utils.file_io import PathManager
30+
from pytext.utils.usage import log_class_usage
3031
from torch.serialization import default_restore_location
3132

3233

@@ -72,6 +73,7 @@ def __init__(self, config: Config, output_encoded_layers: bool, **kwarg) -> None
7273
assert config.pretrained_encoder.load_path, "Load path cannot be empty."
7374
self.encoder = create_module(config.pretrained_encoder)
7475
self.representation_dim = self.encoder.encoder.token_embedding.weight.size(-1)
76+
log_class_usage(__class__)
7577

7678
def _embedding(self):
7779
# used to tie weights in MaskedLM model
@@ -128,6 +130,7 @@ def __init__(self, config: Config, output_encoded_layers: bool, **kwarg) -> None
128130
self.load_state_dict(roberta_state)
129131

130132
self.representation_dim = self._embedding().weight.size(-1)
133+
log_class_usage(__class__)
131134

132135
def _embedding(self):
133136
# used to tie weights in MaskedLM model
@@ -200,6 +203,7 @@ def __init__(self, encoder, decoder, output_layer, stage=Stage.TRAIN) -> None:
200203
self.module_list = [encoder, decoder]
201204
self.output_layer = output_layer
202205
self.stage = stage
206+
log_class_usage(__class__)
203207

204208
def arrange_model_inputs(self, tensor_dict):
205209
tokens, pad_mask, segment_labels, positions, _ = tensor_dict["tokens"]

pytext/utils/usage.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
#!/usr/bin/env python3
2+
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
3+
4+
import torch
5+
6+
7+
def log_class_usage(klass):
8+
identifier = "PyText"
9+
if klass and hasattr(klass, "__name__"):
10+
identifier += f".{klass.__name__}"
11+
torch._C._log_api_usage_once(identifier)

0 commit comments

Comments
 (0)