ariG23498 HF Staff commited on
Commit
d5c9ccb
·
verified ·
1 Parent(s): 77e7fcd

Upload HuggingFaceTB_SmolLM3-3B_5.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. HuggingFaceTB_SmolLM3-3B_5.py +83 -0
HuggingFaceTB_SmolLM3-3B_5.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "torch",
5
+ # "torchvision",
6
+ # "transformers",
7
+ # "diffusers",
8
+ # "sentence-transformers",
9
+ # "accelerate",
10
+ # "peft",
11
+ # "slack-sdk",
12
+ # ]
13
+ # ///
14
+
15
+ try:
16
+ prompt = "Give me a brief explanation of gravity in simple terms."
17
+ messages = [
18
+ {"role": "system", "content": "/no_think"},
19
+ {"role": "user", "content": prompt}
20
+ ]
21
+
22
+ text = tokenizer.apply_chat_template(
23
+ messages,
24
+ tokenize=False,
25
+ add_generation_prompt=True,
26
+ )
27
+
28
+ model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
29
+
30
+ # Generate the output
31
+ generated_ids = model.generate(**model_inputs, max_new_tokens=32768)
32
+
33
+ # Get and decode the output
34
+ output_ids = generated_ids[0][len(model_inputs.input_ids[0]) :]
35
+ print(tokenizer.decode(output_ids, skip_special_tokens=True))
36
+ with open('HuggingFaceTB_SmolLM3-3B_5.txt', 'w', encoding='utf-8') as f:
37
+ f.write('Everything was good in HuggingFaceTB_SmolLM3-3B_5.txt')
38
+ except Exception as e:
39
+ import os
40
+ from slack_sdk import WebClient
41
+ client = WebClient(token=os.environ['SLACK_TOKEN'])
42
+ client.chat_postMessage(
43
+ channel='#hub-model-metadata-snippets-sprint',
44
+ text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/HuggingFaceTB_SmolLM3-3B_5.txt|HuggingFaceTB_SmolLM3-3B_5.txt>',
45
+ )
46
+
47
+ with open('HuggingFaceTB_SmolLM3-3B_5.txt', 'a', encoding='utf-8') as f:
48
+ import traceback
49
+ f.write('''```CODE:
50
+ prompt = "Give me a brief explanation of gravity in simple terms."
51
+ messages = [
52
+ {"role": "system", "content": "/no_think"},
53
+ {"role": "user", "content": prompt}
54
+ ]
55
+
56
+ text = tokenizer.apply_chat_template(
57
+ messages,
58
+ tokenize=False,
59
+ add_generation_prompt=True,
60
+ )
61
+
62
+ model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
63
+
64
+ # Generate the output
65
+ generated_ids = model.generate(**model_inputs, max_new_tokens=32768)
66
+
67
+ # Get and decode the output
68
+ output_ids = generated_ids[0][len(model_inputs.input_ids[0]) :]
69
+ print(tokenizer.decode(output_ids, skip_special_tokens=True))
70
+ ```
71
+
72
+ ERROR:
73
+ ''')
74
+ traceback.print_exc(file=f)
75
+
76
+ finally:
77
+ from huggingface_hub import upload_file
78
+ upload_file(
79
+ path_or_fileobj='HuggingFaceTB_SmolLM3-3B_5.txt',
80
+ repo_id='model-metadata/code_execution_files',
81
+ path_in_repo='HuggingFaceTB_SmolLM3-3B_5.txt',
82
+ repo_type='dataset',
83
+ )