ariG23498 HF Staff commited on
Commit
fcb97fb
·
verified ·
1 Parent(s): d5c9ccb

Upload HuggingFaceTB_SmolLM3-3B_6.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. HuggingFaceTB_SmolLM3-3B_6.py +93 -0
HuggingFaceTB_SmolLM3-3B_6.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "torch",
5
+ # "torchvision",
6
+ # "transformers",
7
+ # "diffusers",
8
+ # "sentence-transformers",
9
+ # "accelerate",
10
+ # "peft",
11
+ # "slack-sdk",
12
+ # ]
13
+ # ///
14
+
15
+ try:
16
+ tools = [
17
+ {
18
+ "name": "get_weather",
19
+ "description": "Get the weather in a city",
20
+ "parameters": {"type": "object", "properties": {"city": {"type": "string", "description": "The city to get the weather for"}}}}
21
+ ]
22
+
23
+ messages = [
24
+ {
25
+ "role": "user",
26
+ "content": "Hello! How is the weather today in Copenhagen?"
27
+ }
28
+ ]
29
+
30
+ inputs = tokenizer.apply_chat_template(
31
+ messages,
32
+ enable_thinking=False, # True works as well, your choice!
33
+ xml_tools=tools,
34
+ add_generation_prompt=True,
35
+ tokenize=True,
36
+ return_tensors="pt"
37
+ ).to(model.device)
38
+
39
+ outputs = model.generate(inputs)
40
+ print(tokenizer.decode(outputs[0]))
41
+ with open('HuggingFaceTB_SmolLM3-3B_6.txt', 'w', encoding='utf-8') as f:
42
+ f.write('Everything was good in HuggingFaceTB_SmolLM3-3B_6.txt')
43
+ except Exception as e:
44
+ import os
45
+ from slack_sdk import WebClient
46
+ client = WebClient(token=os.environ['SLACK_TOKEN'])
47
+ client.chat_postMessage(
48
+ channel='#hub-model-metadata-snippets-sprint',
49
+ text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/HuggingFaceTB_SmolLM3-3B_6.txt|HuggingFaceTB_SmolLM3-3B_6.txt>',
50
+ )
51
+
52
+ with open('HuggingFaceTB_SmolLM3-3B_6.txt', 'a', encoding='utf-8') as f:
53
+ import traceback
54
+ f.write('''```CODE:
55
+ tools = [
56
+ {
57
+ "name": "get_weather",
58
+ "description": "Get the weather in a city",
59
+ "parameters": {"type": "object", "properties": {"city": {"type": "string", "description": "The city to get the weather for"}}}}
60
+ ]
61
+
62
+ messages = [
63
+ {
64
+ "role": "user",
65
+ "content": "Hello! How is the weather today in Copenhagen?"
66
+ }
67
+ ]
68
+
69
+ inputs = tokenizer.apply_chat_template(
70
+ messages,
71
+ enable_thinking=False, # True works as well, your choice!
72
+ xml_tools=tools,
73
+ add_generation_prompt=True,
74
+ tokenize=True,
75
+ return_tensors="pt"
76
+ ).to(model.device)
77
+
78
+ outputs = model.generate(inputs)
79
+ print(tokenizer.decode(outputs[0]))
80
+ ```
81
+
82
+ ERROR:
83
+ ''')
84
+ traceback.print_exc(file=f)
85
+
86
+ finally:
87
+ from huggingface_hub import upload_file
88
+ upload_file(
89
+ path_or_fileobj='HuggingFaceTB_SmolLM3-3B_6.txt',
90
+ repo_id='model-metadata/code_execution_files',
91
+ path_in_repo='HuggingFaceTB_SmolLM3-3B_6.txt',
92
+ repo_type='dataset',
93
+ )