Upload vandijklab_C2S-Scale-Gemma-2-27B_1.py with huggingface_hub
Browse files
vandijklab_C2S-Scale-Gemma-2-27B_1.py
CHANGED
|
@@ -29,14 +29,14 @@ except Exception as e:
|
|
| 29 |
|
| 30 |
with open('vandijklab_C2S-Scale-Gemma-2-27B_1.txt', 'a', encoding='utf-8') as f:
|
| 31 |
import traceback
|
| 32 |
-
f.write('''
|
| 33 |
-
```CODE:
|
| 34 |
# Load model directly
|
| 35 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 36 |
|
| 37 |
tokenizer = AutoTokenizer.from_pretrained("vandijklab/C2S-Scale-Gemma-2-27B")
|
| 38 |
model = AutoModelForCausalLM.from_pretrained("vandijklab/C2S-Scale-Gemma-2-27B")
|
| 39 |
```
|
|
|
|
| 40 |
ERROR:
|
| 41 |
''')
|
| 42 |
traceback.print_exc(file=f)
|
|
|
|
| 29 |
|
| 30 |
with open('vandijklab_C2S-Scale-Gemma-2-27B_1.txt', 'a', encoding='utf-8') as f:
|
| 31 |
import traceback
|
| 32 |
+
f.write('''```CODE:
|
|
|
|
| 33 |
# Load model directly
|
| 34 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 35 |
|
| 36 |
tokenizer = AutoTokenizer.from_pretrained("vandijklab/C2S-Scale-Gemma-2-27B")
|
| 37 |
model = AutoModelForCausalLM.from_pretrained("vandijklab/C2S-Scale-Gemma-2-27B")
|
| 38 |
```
|
| 39 |
+
|
| 40 |
ERROR:
|
| 41 |
''')
|
| 42 |
traceback.print_exc(file=f)
|