Upload inclusionAI_Ring-flash-linear-2.0-128k_1.py with huggingface_hub
Browse files
inclusionAI_Ring-flash-linear-2.0-128k_1.py
CHANGED
|
@@ -27,13 +27,14 @@ except Exception as e:
|
|
| 27 |
|
| 28 |
with open('inclusionAI_Ring-flash-linear-2.0-128k_1.txt', 'a', encoding='utf-8') as f:
|
| 29 |
import traceback
|
| 30 |
-
f.write('
|
|
|
|
| 31 |
# Load model directly
|
| 32 |
from transformers import AutoModelForCausalLM
|
| 33 |
model = AutoModelForCausalLM.from_pretrained("inclusionAI/Ring-flash-linear-2.0-128k", trust_remote_code=True, torch_dtype="auto")
|
| 34 |
```
|
| 35 |
ERROR:
|
| 36 |
-
')
|
| 37 |
traceback.print_exc(file=f)
|
| 38 |
|
| 39 |
finally:
|
|
|
|
| 27 |
|
| 28 |
with open('inclusionAI_Ring-flash-linear-2.0-128k_1.txt', 'a', encoding='utf-8') as f:
|
| 29 |
import traceback
|
| 30 |
+
f.write('''
|
| 31 |
+
```CODE:
|
| 32 |
# Load model directly
|
| 33 |
from transformers import AutoModelForCausalLM
|
| 34 |
model = AutoModelForCausalLM.from_pretrained("inclusionAI/Ring-flash-linear-2.0-128k", trust_remote_code=True, torch_dtype="auto")
|
| 35 |
```
|
| 36 |
ERROR:
|
| 37 |
+
''')
|
| 38 |
traceback.print_exc(file=f)
|
| 39 |
|
| 40 |
finally:
|