ariG23498 HF Staff commited on
Commit
baee13d
·
verified ·
1 Parent(s): e6d1184

Upload aquif-ai_aquif-3.5-Max-42B-A3B_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. aquif-ai_aquif-3.5-Max-42B-A3B_1.py +84 -0
aquif-ai_aquif-3.5-Max-42B-A3B_1.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "numpy",
5
+ # "einops",
6
+ # "pandas",
7
+ # "protobuf",
8
+ # "torch",
9
+ # "torchvision",
10
+ # "transformers",
11
+ # "timm",
12
+ # "diffusers",
13
+ # "sentence-transformers",
14
+ # "accelerate",
15
+ # "peft",
16
+ # "slack-sdk",
17
+ # ]
18
+ # ///
19
+
20
+ try:
21
+ # Load model directly
22
+ from transformers import AutoTokenizer, AutoModelForCausalLM
23
+
24
+ tokenizer = AutoTokenizer.from_pretrained("aquif-ai/aquif-3.5-Max-42B-A3B")
25
+ model = AutoModelForCausalLM.from_pretrained("aquif-ai/aquif-3.5-Max-42B-A3B")
26
+ messages = [
27
+ {"role": "user", "content": "Who are you?"},
28
+ ]
29
+ inputs = tokenizer.apply_chat_template(
30
+ messages,
31
+ add_generation_prompt=True,
32
+ tokenize=True,
33
+ return_dict=True,
34
+ return_tensors="pt",
35
+ ).to(model.device)
36
+
37
+ outputs = model.generate(**inputs, max_new_tokens=40)
38
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
39
+ with open('aquif-ai_aquif-3.5-Max-42B-A3B_1.txt', 'w', encoding='utf-8') as f:
40
+ f.write('Everything was good in aquif-ai_aquif-3.5-Max-42B-A3B_1.txt')
41
+ except Exception as e:
42
+ import os
43
+ from slack_sdk import WebClient
44
+ client = WebClient(token=os.environ['SLACK_TOKEN'])
45
+ client.chat_postMessage(
46
+ channel='#hub-model-metadata-snippets-sprint',
47
+ text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/aquif-ai_aquif-3.5-Max-42B-A3B_1.txt|aquif-ai_aquif-3.5-Max-42B-A3B_1.txt>',
48
+ )
49
+
50
+ with open('aquif-ai_aquif-3.5-Max-42B-A3B_1.txt', 'a', encoding='utf-8') as f:
51
+ import traceback
52
+ f.write('''```CODE:
53
+ # Load model directly
54
+ from transformers import AutoTokenizer, AutoModelForCausalLM
55
+
56
+ tokenizer = AutoTokenizer.from_pretrained("aquif-ai/aquif-3.5-Max-42B-A3B")
57
+ model = AutoModelForCausalLM.from_pretrained("aquif-ai/aquif-3.5-Max-42B-A3B")
58
+ messages = [
59
+ {"role": "user", "content": "Who are you?"},
60
+ ]
61
+ inputs = tokenizer.apply_chat_template(
62
+ messages,
63
+ add_generation_prompt=True,
64
+ tokenize=True,
65
+ return_dict=True,
66
+ return_tensors="pt",
67
+ ).to(model.device)
68
+
69
+ outputs = model.generate(**inputs, max_new_tokens=40)
70
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
71
+ ```
72
+
73
+ ERROR:
74
+ ''')
75
+ traceback.print_exc(file=f)
76
+
77
+ finally:
78
+ from huggingface_hub import upload_file
79
+ upload_file(
80
+ path_or_fileobj='aquif-ai_aquif-3.5-Max-42B-A3B_1.txt',
81
+ repo_id='model-metadata/code_execution_files',
82
+ path_in_repo='aquif-ai_aquif-3.5-Max-42B-A3B_1.txt',
83
+ repo_type='dataset',
84
+ )