Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Nov 4, 2024
1 parent 71b4c80 commit 845d887
Showing 1 changed file with 32 additions and 30 deletions.
62 changes: 32 additions & 30 deletions thunder/tests/test_networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,36 +401,38 @@ def test_thunderfx_mistral_nemo_small():


LLAMA_3_2_1B_CFG = {
'architectures': ['LlamaForCausalLM'],
'attention_bias': False,
'attention_dropout': 0.0,
'bos_token_id': 128000,
'eos_token_id': 128001,
'head_dim': 64,
'hidden_act': 'silu',
'hidden_size': 2048,
'initializer_range': 0.02,
'intermediate_size': 8192,
'max_position_embeddings': 131072,
'mlp_bias': False,
'model_type': 'llama',
'num_attention_heads': 32,
'num_hidden_layers': 16,
'num_key_value_heads': 8,
'pretraining_tp': 1,
'rms_norm_eps': 1e-05,
'rope_scaling': {'factor': 32.0,
'high_freq_factor': 4.0,
'low_freq_factor': 1.0,
'original_max_position_embeddings': 8192,
'rope_type': 'llama3'},
'rope_theta': 500000.0,
'tie_word_embeddings': True,
'torch_dtype': 'bfloat16',
'transformers_version': '4.45.0.dev0',
'use_cache': True,
'vocab_size': 128256,
'_commit_hash': '4e20de362430cd3b72f300e6b0f18e50e7166e08'
"architectures": ["LlamaForCausalLM"],
"attention_bias": False,
"attention_dropout": 0.0,
"bos_token_id": 128000,
"eos_token_id": 128001,
"head_dim": 64,
"hidden_act": "silu",
"hidden_size": 2048,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"mlp_bias": False,
"model_type": "llama",
"num_attention_heads": 32,
"num_hidden_layers": 16,
"num_key_value_heads": 8,
"pretraining_tp": 1,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"factor": 32.0,
"high_freq_factor": 4.0,
"low_freq_factor": 1.0,
"original_max_position_embeddings": 8192,
"rope_type": "llama3",
},
"rope_theta": 500000.0,
"tie_word_embeddings": True,
"torch_dtype": "bfloat16",
"transformers_version": "4.45.0.dev0",
"use_cache": True,
"vocab_size": 128256,
"_commit_hash": "4e20de362430cd3b72f300e6b0f18e50e7166e08",
}


Expand Down

0 comments on commit 845d887

Please sign in to comment.