noequal commited on
Commit
80e06e8
·
1 Parent(s): b791513

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -10
app.py CHANGED
@@ -1,18 +1,28 @@
 
1
  import streamlit as st
2
- from transformers import pipeline
3
 
4
- # Load the zero-shot classification pipeline
5
- theme_detection = pipeline('zero-shot-classification')
 
 
6
 
7
- st.title("Theme Detection App")
8
 
9
  # Create a textarea for user input
10
  user_text = st.text_area("Enter Text:", "Type here...")
11
 
12
- if st.button("Detect Themes"):
13
- # Perform theme detection
14
- themes = theme_detection(user_text, ['Theme1', 'Theme2', 'Theme3'])
15
-
16
- # Display the result
17
- st.success(f"Detected Themes: {', '.join(themes['labels'])}")
18
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import GPT2LMHeadModel, GPT2Tokenizer
2
  import streamlit as st
 
3
 
4
+ # Load pre-trained GPT-2 model and tokenizer
5
+ model_name = "gpt2" # You can try other GPT-2 variants as well
6
+ tokenizer = GPT2Tokenizer.from_pretrained(model_name)
7
+ model = GPT2LMHeadModel.from_pretrained(model_name)
8
 
9
+ st.title("Theme Generation App")
10
 
11
  # Create a textarea for user input
12
  user_text = st.text_area("Enter Text:", "Type here...")
13
 
14
+ if st.button("Generate Themes"):
15
+ # Construct a prompt for thematic generation
16
+ prompt = f"Generate themes based on the following text: {user_text}\nThemes:"
17
+
18
+ # Tokenize the prompt
19
+ input_ids = tokenizer.encode(prompt, return_tensors="pt")
20
 
21
+ # Generate themes using GPT-2
22
+ output = model.generate(input_ids, max_length=100, num_beams=5, no_repeat_ngram_size=2, top_k=50, top_p=0.95, temperature=0.7)
23
+
24
+ # Decode the generated themes
25
+ generated_themes = tokenizer.decode(output[0], skip_special_tokens=True)
26
+
27
+ # Display the generated themes
28
+ st.success(f"Generated Themes: {generated_themes}")