Skip to content

Commit

Permalink
resolve TE dependency
Browse files Browse the repository at this point in the history
  • Loading branch information
suiyoubi committed Dec 2, 2024
1 parent 5acf9aa commit d363a11
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions nemo/collections/llm/bert/model/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@
import torch.distributed
from megatron.core import InferenceParams, parallel_state, tensor_parallel
from megatron.core.fusions.fused_layer_norm import FusedLayerNorm
from megatron.core.models.bert import bert_layer_specs
from megatron.core.models.bert.bert_lm_head import BertLMHead as MCoreBertLMHead
from megatron.core.models.bert.bert_model import BertModel as MCoreBert
from megatron.core.models.bert.pooler import Pooler
from megatron.core.optimizer import OptimizerConfig
from megatron.core.packed_seq_params import PackedSeqParams
Expand All @@ -46,8 +44,11 @@
HAVE_TE = True
try:
import transformer_engine # pylint: disable=W0611

Check notice

Code scanning / CodeQL

Unused import Note

Import of 'transformer_engine' is not used.
from megatron.core.models.bert import bert_layer_specs
from megatron.core.models.bert.bert_model import BertModel as MCoreBert
except (ImportError, ModuleNotFoundError) as e:
HAVE_TE = False
MCoreBert = TransformerLayer # Place holder for import checking. BERT requires TE installed.

if TYPE_CHECKING:
from nemo.collections.common.tokenizers.tokenizer_spec import TokenizerSpec
Expand Down

0 comments on commit d363a11

Please sign in to comment.