File size: 2,486 Bytes
77e7fcd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# /// script
# requires-python = ">=3.12"
# dependencies = [
#     "torch",
#     "torchvision",
#     "transformers",
#     "diffusers",
#     "sentence-transformers",
#     "accelerate",
#     "peft",
#     "slack-sdk",
# ]
# ///

try:
    # prepare the model input
    prompt = "Give me a brief explanation of gravity in simple terms."
    messages_think = [
        {"role": "user", "content": prompt}
    ]
    
    text = tokenizer.apply_chat_template(
        messages_think,
        tokenize=False,
        add_generation_prompt=True,
    )
    model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
    
    # Generate the output
    generated_ids = model.generate(**model_inputs, max_new_tokens=32768)
    
    # Get and decode the output
    output_ids = generated_ids[0][len(model_inputs.input_ids[0]) :]
    print(tokenizer.decode(output_ids, skip_special_tokens=True))
    with open('HuggingFaceTB_SmolLM3-3B_4.txt', 'w', encoding='utf-8') as f:
        f.write('Everything was good in HuggingFaceTB_SmolLM3-3B_4.txt')
except Exception as e:
    import os
    from slack_sdk import WebClient
    client = WebClient(token=os.environ['SLACK_TOKEN'])
    client.chat_postMessage(
        channel='#hub-model-metadata-snippets-sprint',
        text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/HuggingFaceTB_SmolLM3-3B_4.txt|HuggingFaceTB_SmolLM3-3B_4.txt>',
    )

    with open('HuggingFaceTB_SmolLM3-3B_4.txt', 'a', encoding='utf-8') as f:
        import traceback
        f.write('''```CODE: 
# prepare the model input
prompt = "Give me a brief explanation of gravity in simple terms."
messages_think = [
    {"role": "user", "content": prompt}
]

text = tokenizer.apply_chat_template(
    messages_think,
    tokenize=False,
    add_generation_prompt=True,
)
model_inputs = tokenizer([text], return_tensors="pt").to(model.device)

# Generate the output
generated_ids = model.generate(**model_inputs, max_new_tokens=32768)

# Get and decode the output
output_ids = generated_ids[0][len(model_inputs.input_ids[0]) :]
print(tokenizer.decode(output_ids, skip_special_tokens=True))
```

ERROR: 
''')
        traceback.print_exc(file=f)
    
finally:
    from huggingface_hub import upload_file
    upload_file(
        path_or_fileobj='HuggingFaceTB_SmolLM3-3B_4.txt',
        repo_id='model-metadata/code_execution_files',
        path_in_repo='HuggingFaceTB_SmolLM3-3B_4.txt',
        repo_type='dataset',
    )