Adedoyinjames's picture
Update app.py
e82fddc verified
raw
history blame
20.2 kB
import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
from huggingface_hub import login
import random
import os
import json
# Get Hugging Face token from environment variables
HUGGINGFACE_TOKEN = os.environ.get("HUGGINGFACE_TOKEN", "")
if HUGGINGFACE_TOKEN:
try:
login(token=HUGGINGFACE_TOKEN)
print("βœ… Successfully connected to Hugging Face")
except Exception as e:
print(f"❌ Hugging Face login failed: {e}")
# Personal and Company Information
PERSONAL_INFO = {
"name": "Adedoyin Ifeoluwa James",
"role": "Entrepreneur, Founder & CEO of YAH Tech",
"goals": [
"Build profitable systems that matter",
"Reshaping the economical world",
"Use software development to create flexibility and contribute to economic growth"
]
}
COMPANY_INFO = {
"name": "YAH Tech",
"type": "Venture studio / app development company focused on solving problems with futuristic solutions",
"purpose": "Build and launch technology-driven ventures that generate profit and societal value",
"primary_activity": "App development and creation of scalable business systems",
"philosophy": "Learn, understand, create, and evaluate",
"stage": "Growth and development, focused on building sustainable processes and systems"
}
# Knowledge base for the AI - Expanded with more detailed information
KNOWLEDGE_BASE = {
"personal": {
"who are you": f"🀡 I'm **{PERSONAL_INFO['name']}**, {PERSONAL_INFO['role']}. I'm passionate about using technology to create meaningful impact and drive economic growth.",
"what are your goals": f"🎯 My goals include:\nβ€’ {PERSONAL_INFO['goals'][0]}\nβ€’ {PERSONAL_INFO['goals'][1]}\nβ€’ {PERSONAL_INFO['goals'][2]}",
"what do you do": f"πŸ’Ό I'm the CEO of YAH Tech, focused on **{PERSONAL_INFO['goals'][2]}**. I lead our venture studio in creating innovative technology solutions.",
"tell me about yourself": f"🌟 I'm **{PERSONAL_INFO['name']}**, {PERSONAL_INFO['role']}. My mission is to **{PERSONAL_INFO['goals'][0].lower()}** and **{PERSONAL_INFO['goals'][1].lower()}** through innovative technology solutions. I believe in the power of software development to transform economies.",
"background": f"πŸŽ“ I'm **{PERSONAL_INFO['name']}**, a technology entrepreneur dedicated to building systems that create both economic value and social impact through YAH Tech.",
"experience": f"πŸ’Ό As {PERSONAL_INFO['role']}, I've focused on building YAH Tech into a venture studio that develops scalable technology solutions and profitable business systems.",
},
"company": {
"what is yah tech": f"🏒 **{COMPANY_INFO['name']}** is a {COMPANY_INFO['type']}. Our purpose is to **{COMPANY_INFO['purpose'].lower()}**.",
"what does yah tech do": f"πŸš€ We specialize in **{COMPANY_INFO['primary_activity'].lower()}**. {COMPANY_INFO['purpose']}. We build complete technology ventures from the ground up.",
"company philosophy": f"πŸ’‘ Our philosophy is: **'{COMPANY_INFO['philosophy']}'** - we believe in thoroughly understanding problems before creating innovative solutions.",
"company stage": f"πŸ“ˆ We're currently in the **{COMPANY_INFO['stage']}** phase, building sustainable processes and systems for long-term success.",
"venture studio": "🎯 As a venture studio, we don't just build apps - we build complete businesses with scalable systems and futuristic solutions that drive economic growth. We identify opportunities, develop solutions, and launch ventures.",
"services": f"πŸ› οΈ {COMPANY_INFO['name']} offers: β€’ App development β€’ Venture building β€’ Business system design β€’ Technology solutions β€’ Scalable platform development",
"mission": f"🎯 Our mission at {COMPANY_INFO['name']} is to {COMPANY_INFO['purpose'].lower()} through innovative technology and systematic business development.",
},
"general": {
"hello": "πŸ‘‹ Hello! I'm YAH Bot, your assistant for everything about Adedoyin James and YAH Tech. How can I help you today?",
"hi": "πŸ‘‹ Hi there! I'm here to tell you about Adedoyin James and YAH Tech. What would you like to know?",
"help": "πŸ’‘ I can answer questions about:\nβ€’ Adedoyin James (background, goals, experience)\nβ€’ YAH Tech (company info, services, philosophy)\nβ€’ Our venture studio model and approach\nβ€’ Technology and business insights",
"thanks": "πŸ™ You're welcome! Is there anything else you'd like to know about Adedoyin or YAH Tech?",
"thank you": "πŸ™ You're welcome! Feel free to ask more about our work at YAH Tech.",
}
}
class YAHBot:
def __init__(self):
self.model_name = "google/flan-t5-base"
self.tokenizer = None
self.model = None
self._load_model()
def _load_model(self):
"""Load the Hugging Face model"""
try:
print("πŸ”„ Loading YAH Tech AI model...")
# Try to load custom model first
self.tokenizer = AutoTokenizer.from_pretrained(
"Adedoyinjames/YAH-Tech-Chat-Bot",
token=HUGGINGFACE_TOKEN if HUGGINGFACE_TOKEN else None
)
self.model = AutoModelForSeq2SeqLM.from_pretrained(
"Adedoyinjames/YAH-Tech-Chat-Bot",
token=HUGGINGFACE_TOKEN if HUGGINGFACE_TOKEN else None,
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32
)
print("βœ… YAH Tech AI model loaded successfully!")
except Exception as e:
print(f"❌ Failed to load custom model: {e}")
print("πŸ”„ Falling back to standard FLAN-T5-base model...")
try:
self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)
self.model = AutoModelForSeq2SeqLM.from_pretrained(self.model_name)
print("βœ… Standard AI model loaded successfully!")
except Exception as e2:
print(f"❌ Failed to load AI model: {e2}")
self.model = None
self.tokenizer = None
def _get_knowledge_response(self, user_input):
"""Check if the input matches any knowledge base entries"""
user_input_lower = user_input.lower()
# Check all knowledge bases
for category in KNOWLEDGE_BASE.values():
for key, response in category.items():
if key in user_input_lower:
return response
# Specific patterns for personal name
if any(word in user_input_lower for word in ["adedoyin", "ifeoluwa", "james"]):
return KNOWLEDGE_BASE["personal"]["who are you"]
# Specific patterns for company
if any(word in user_input_lower for word in ["yah tech", "your company", "venture studio"]):
return KNOWLEDGE_BASE["company"]["what is yah tech"]
# Greeting patterns
if any(word in user_input_lower for word in ["hello", "hi ", "hey"]):
return KNOWLEDGE_BASE["general"]["hello"]
# Thank you patterns
if any(word in user_input_lower for word in ["thank", "thanks"]):
return KNOWLEDGE_BASE["general"]["thanks"]
return None
def _create_smart_prompt(self, user_input, conversation_history):
"""Create a smart prompt that combines brand identity with general knowledge"""
brand_context = f"""You are YAH Bot, an AI assistant representing {PERSONAL_INFO['name']} and {COMPANY_INFO['name']}.
About {PERSONAL_INFO['name']}:
- {PERSONAL_INFO['role']}
- Goals: {', '.join(PERSONAL_INFO['goals'])}
About {COMPANY_INFO['name']}:
- {COMPANY_INFO['type']}
- Philosophy: "{COMPANY_INFO['philosophy']}"
- Purpose: {COMPANY_INFO['purpose']}
When answering questions:
1. First provide accurate, helpful information
2. Naturally mention YAH Tech or Adedoyin James when relevant
3. Be professional but conversational
4. Connect general topics to technology, business, or innovation when appropriate
Current conversation:"""
# Build conversation history
conv_history = ""
for msg in conversation_history[-4:]: # Last 4 exchanges
role = "Human" if msg["role"] == "user" else "Assistant"
conv_history += f"{role}: {msg['content']}\n"
prompt = f"{brand_context}\n{conv_history}Human: {user_input}\nAssistant:"
return prompt
def generate_response(self, user_input, conversation_history):
"""Generate response using knowledge base or AI model"""
# First check knowledge base for exact matches
knowledge_response = self._get_knowledge_response(user_input)
if knowledge_response:
return knowledge_response
# Use AI model for other queries
if self.model and self.tokenizer:
try:
# Create smart prompt with brand context
prompt = self._create_smart_prompt(user_input, conversation_history)
# Tokenize with proper handling
inputs = self.tokenizer(
prompt,
return_tensors="pt",
max_length=512,
truncation=True,
padding=True
)
# Generate response
with torch.no_grad():
outputs = self.model.generate(
inputs.input_ids,
max_length=256,
num_return_sequences=1,
temperature=0.8,
do_sample=True,
pad_token_id=self.tokenizer.pad_token_id or self.tokenizer.eos_token_id,
repetition_penalty=1.1
)
response = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
# Clean up the response and ensure brand mention when appropriate
cleaned_response = self._enhance_response(response, user_input)
return cleaned_response
except Exception as e:
print(f"Model error: {str(e)}")
return self._get_fallback_response(user_input)
# Fallback if model fails to load
return self._get_fallback_response(user_input)
def _enhance_response(self, response, user_input):
"""Enhance the response to include brand mentions when appropriate"""
user_input_lower = user_input.lower()
# Topics where brand mention would be natural
tech_topics = ["technology", "software", "app", "development", "coding", "programming"]
business_topics = ["business", "startup", "company", "venture", "entrepreneur"]
innovation_topics = ["innovation", "future", "solution", "system", "growth"]
# Check if the topic is relevant to our brand
is_relevant_topic = any(topic in user_input_lower for topic in
tech_topics + business_topics + innovation_topics)
# If response doesn't mention brand but topic is relevant, add a natural mention
if (is_relevant_topic and
"yah tech" not in response.lower() and
"adedoyin" not in response.lower()):
brand_connectors = [
f" {response} This aligns with the approach we take at YAH Tech under Adedoyin James' leadership.",
f" {response} At YAH Tech, we often work with similar concepts in our venture studio.",
f" {response} Speaking of which, Adedoyin James focuses on applying such principles at YAH Tech.",
f" {response} This is similar to the philosophy we embrace at YAH Tech - '{COMPANY_INFO['philosophy']}'."
]
return random.choice(brand_connectors)
return response
def _get_fallback_response(self, user_input):
"""Get a fallback response that still promotes the brand"""
fallback_responses = [
f"🌟 That's an interesting question about {user_input.split()[0] if user_input.split() else 'that'}! At {COMPANY_INFO['name']}, we believe in **{COMPANY_INFO['philosophy']}** to create solutions that matter.",
f"πŸš€ I'd be happy to discuss that. As {PERSONAL_INFO['name']}, I focus on building technology-driven ventures that create both **profit and societal value** through YAH Tech.",
f"πŸ’‘ Great question! Our approach at {COMPANY_INFO['name']} is to develop futuristic solutions through careful evaluation and systematic creation, which applies to many topics including this one.",
f"🎯 From my perspective as {PERSONAL_INFO['name']}, I focus on building systems that reshape the economic landscape through innovative technology at YAH Tech. How can I help you with this specifically?"
]
return random.choice(fallback_responses)
# Initialize the bot globally
yah_bot = YAHBot()
def chat_function(message, history):
"""Chat function for Gradio interface"""
# Convert Gradio history format to our format
conversation_history = []
for human_msg, assistant_msg in history:
conversation_history.append({"role": "user", "content": human_msg})
conversation_history.append({"role": "assistant", "content": assistant_msg})
# Generate response
response = yah_bot.generate_response(message, conversation_history)
return response
def api_chat_endpoint(message: str, history: list = None):
"""API endpoint function"""
if history is None:
history = []
conversation_history = []
for human_msg, assistant_msg in history:
conversation_history.append({"role": "user", "content": human_msg})
conversation_history.append({"role": "assistant", "content": assistant_msg})
response = yah_bot.generate_response(message, conversation_history)
return {
"response": response,
"status": "success",
"message": message
}
# Custom CSS for styling
custom_css = """
.gradio-container {
background: linear-gradient(135deg, #f8f9fa 0%, #e3f2fd 100%);
font-family: 'Inter', sans-serif;
}
.chatbot {
border-radius: 16px;
box-shadow: 0 4px 20px rgba(0,0,0,0.1);
}
.user {
background: linear-gradient(135deg, #e3f2fd 0%, #bbdefb 100%) !important;
border-left: 6px solid #2196f3 !important;
border-radius: 16px !important;
margin: 12px 0 !important;
}
.bot {
background: linear-gradient(135deg, #ffffff 0%, #f5f5f5 100%) !important;
border-left: 6px solid #78909c !important;
border: 1px solid #e0e0e0 !important;
border-radius: 16px !important;
margin: 12px 0 !important;
}
.textbox {
border: 2px solid #90caf9 !important;
border-radius: 25px !important;
padding: 15px 20px !important;
font-size: 16px !important;
background: white !important;
box-shadow: 0 2px 10px rgba(33, 150, 243, 0.1) !important;
}
.button {
background: linear-gradient(135deg, #2196f3 0%, #1976d2 100%) !important;
color: white !important;
border: none !important;
border-radius: 25px !important;
padding: 12px 28px !important;
font-weight: 600 !important;
font-size: 16px !important;
box-shadow: 0 4px 12px rgba(33, 150, 243, 0.3) !important;
}
.button:hover {
transform: translateY(-2px);
box-shadow: 0 6px 16px rgba(33, 150, 243, 0.4) !important;
}
.header-gradient {
background: linear-gradient(135deg, #2196f3 0%, #1976d2 100%);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
background-clip: text;
}
"""
# Create the Gradio interface
with gr.Blocks(css=custom_css, theme=gr.themes.Soft()) as demo:
gr.Markdown(
f"""
# <div class="header-gradient">YAH Tech Assistant</div>
πŸš€ Powered by AI β€’ πŸ’Ό Venture Studio β€’ 🌍 Economic Innovation
**πŸ‘€ Founder**: {PERSONAL_INFO['name']}
**πŸ’Ό Role**: {PERSONAL_INFO['role']}
**🏒 Company**: {COMPANY_INFO['name']} - {COMPANY_INFO['type']}
*Ask me anything - general knowledge or about YAH Tech!*
"""
)
gr.Markdown("---")
with gr.Row():
with gr.Column(scale=3):
chatbot = gr.Chatbot(
label="YAH Tech Chat",
bubble_full_width=False,
show_copy_button=True,
avatar_images=(
"https://via.placeholder.com/40/2196f3/ffffff?text=U",
"https://via.placeholder.com/40/78909c/ffffff?text=Y"
)
)
with gr.Row():
msg = gr.Textbox(
placeholder="πŸ’¬ Ask me anything - general knowledge or about YAH Tech...",
container=False,
scale=4
)
clear_btn = gr.Button("πŸ—‘οΈ Clear", scale=1)
gr.Markdown("""
**πŸ’‘ Example questions:**
- *"What's the capital of France?"* πŸ‡«πŸ‡·
- *"Tell me about AI technology"* πŸ€–
- *"How to start a business?"* πŸ’Ό
- *"What is YAH Tech?"* 🏒
- *"Who is Adedoyin James?"* πŸ‘€
""")
with gr.Column(scale=1):
gr.Markdown("### 🏒 About YAH Tech")
gr.Markdown(f"""
**🎯 Focus**: {COMPANY_INFO['type']}
**πŸ“ˆ Stage**: {COMPANY_INFO['stage']}
**πŸ’‘ Philosophy**: *"{COMPANY_INFO['philosophy']}"*
**🎯 Purpose**: {COMPANY_INFO['purpose']}
**πŸ› οΈ Services**:
β€’ App development
β€’ Venture building
β€’ Business system design
β€’ Technology solutions
β€’ Scalable platform development
""")
gr.Markdown("---")
gr.Markdown("### πŸ”— API Usage")
gr.Markdown("""
```python
import requests
response = requests.post(
"YOUR_SPACE_URL/api/predict",
json={
"data": [
"Your message here",
[]
]
}
)
```
""")
# Event handlers
def respond(message, chat_history):
bot_message = chat_function(message, chat_history)
chat_history.append((message, bot_message))
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot])
clear_btn.click(lambda: None, None, chatbot, queue=False)
# Initial welcome message
def add_welcome_message(chat_history):
welcome_msg = f"""
πŸ‘‹ **Hello! I'm YAH Bot, your AI assistant for everything related to {PERSONAL_INFO['name']} and {COMPANY_INFO['name']}!**
I can help you with:
β€’ 🌍 **General knowledge** (science, history, geography, etc.)
β€’ πŸ’‘ **Technology & business insights**
β€’ 🏒 **YAH Tech company information**
β€’ πŸ‘€ **Adedoyin James' background & goals**
β€’ πŸš€ **Venture studio model & services**
**Try asking me anything - I'll provide helpful answers while naturally sharing insights about our work at YAH Tech!**
**How can I assist you today?** πŸš€
"""
chat_history.append(("Hi", welcome_msg))
return chat_history
demo.load(add_welcome_message, [chatbot], [chatbot])
# For API usage, Gradio automatically creates endpoints at:
# - /api/predict for the main chat function
# - /info for space information
if __name__ == "__main__":
demo.launch(
share=True,
show_api=True,
server_name="0.0.0.
)