| from transformers.configuration_utils import PretrainedConfig | |
| from transformers.utils import logging | |
| logger = logging.get_logger(__name__) | |
| class ConnectorConfig(PretrainedConfig): | |
| def __init__( | |
| self, | |
| hidden_size=768, | |
| intermediate_size=3072, | |
| num_hidden_layers=12, | |
| num_attention_heads=12, | |
| hidden_act="gelu_pytorch_tanh", | |
| layer_norm_eps=1e-6, | |
| attention_dropout=0.0, | |
| **kwargs, | |
| ): | |
| super().__init__(**kwargs) | |
| self.hidden_size = hidden_size | |
| self.intermediate_size = intermediate_size | |
| self.num_hidden_layers = num_hidden_layers | |
| self.num_attention_heads = num_attention_heads | |
| self.attention_dropout = attention_dropout | |
| self.layer_norm_eps = layer_norm_eps | |
| self.hidden_act = hidden_act | |