ariG23498 HF Staff commited on
Commit
3c1d1e3
·
verified ·
1 Parent(s): 59de194

Upload PokeeAI_pokee_research_7b_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. PokeeAI_pokee_research_7b_1.py +8 -28
PokeeAI_pokee_research_7b_1.py CHANGED
@@ -11,24 +11,14 @@
11
  # ///
12
 
13
  try:
14
- # Load model directly
15
- from transformers import AutoTokenizer, AutoModelForCausalLM
16
 
17
- tokenizer = AutoTokenizer.from_pretrained("PokeeAI/pokee_research_7b")
18
- model = AutoModelForCausalLM.from_pretrained("PokeeAI/pokee_research_7b")
19
  messages = [
20
  {"role": "user", "content": "Who are you?"},
21
  ]
22
- inputs = tokenizer.apply_chat_template(
23
- messages,
24
- add_generation_prompt=True,
25
- tokenize=True,
26
- return_dict=True,
27
- return_tensors="pt",
28
- ).to(model.device)
29
-
30
- outputs = model.generate(**inputs, max_new_tokens=40)
31
- print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
32
  with open('PokeeAI_pokee_research_7b_1.txt', 'w', encoding='utf-8') as f:
33
  f.write('Everything was good in PokeeAI_pokee_research_7b_1.txt')
34
  except Exception as e:
@@ -43,24 +33,14 @@ except Exception as e:
43
  with open('PokeeAI_pokee_research_7b_1.txt', 'a', encoding='utf-8') as f:
44
  import traceback
45
  f.write('''```CODE:
46
- # Load model directly
47
- from transformers import AutoTokenizer, AutoModelForCausalLM
48
 
49
- tokenizer = AutoTokenizer.from_pretrained("PokeeAI/pokee_research_7b")
50
- model = AutoModelForCausalLM.from_pretrained("PokeeAI/pokee_research_7b")
51
  messages = [
52
  {"role": "user", "content": "Who are you?"},
53
  ]
54
- inputs = tokenizer.apply_chat_template(
55
- messages,
56
- add_generation_prompt=True,
57
- tokenize=True,
58
- return_dict=True,
59
- return_tensors="pt",
60
- ).to(model.device)
61
-
62
- outputs = model.generate(**inputs, max_new_tokens=40)
63
- print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
64
  ```
65
 
66
  ERROR:
 
11
  # ///
12
 
13
  try:
14
+ # Use a pipeline as a high-level helper
15
+ from transformers import pipeline
16
 
17
+ pipe = pipeline("text-generation", model="PokeeAI/pokee_research_7b")
 
18
  messages = [
19
  {"role": "user", "content": "Who are you?"},
20
  ]
21
+ pipe(messages)
 
 
 
 
 
 
 
 
 
22
  with open('PokeeAI_pokee_research_7b_1.txt', 'w', encoding='utf-8') as f:
23
  f.write('Everything was good in PokeeAI_pokee_research_7b_1.txt')
24
  except Exception as e:
 
33
  with open('PokeeAI_pokee_research_7b_1.txt', 'a', encoding='utf-8') as f:
34
  import traceback
35
  f.write('''```CODE:
36
+ # Use a pipeline as a high-level helper
37
+ from transformers import pipeline
38
 
39
+ pipe = pipeline("text-generation", model="PokeeAI/pokee_research_7b")
 
40
  messages = [
41
  {"role": "user", "content": "Who are you?"},
42
  ]
43
+ pipe(messages)
 
 
 
 
 
 
 
 
 
44
  ```
45
 
46
  ERROR: