Update app.py
Browse files
app.py
CHANGED
|
@@ -1,15 +1,15 @@
|
|
| 1 |
# Cultural Bias Explorer in Language Models
|
| 2 |
# ----------------------------------------
|
| 3 |
-
# This Python project uses LangChain +
|
| 4 |
# by retrieving answers to the same prompts using region-specific document bases.
|
| 5 |
|
| 6 |
# Install necessary packages before running:
|
| 7 |
-
# pip install langchain huggingface_hub faiss-cpu sentence-transformers unstructured wikipedia
|
| 8 |
|
| 9 |
from langchain_community.document_loaders import WikipediaLoader
|
| 10 |
-
from
|
| 11 |
-
from
|
| 12 |
-
from
|
| 13 |
from langchain.chains import RetrievalQA
|
| 14 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 15 |
import os
|
|
@@ -37,13 +37,13 @@ def create_vector_store(region_topic):
|
|
| 37 |
return vectorstore
|
| 38 |
|
| 39 |
# ------------------ MAIN LOGIC ------------------
|
| 40 |
-
#
|
| 41 |
-
#
|
|
|
|
| 42 |
|
| 43 |
-
llm =
|
| 44 |
-
repo_id="HuggingFaceH4/zephyr-7b-beta", # free
|
| 45 |
-
temperature
|
| 46 |
-
max_new_tokens=512
|
| 47 |
)
|
| 48 |
|
| 49 |
for region in REGIONS:
|
|
|
|
| 1 |
# Cultural Bias Explorer in Language Models
|
| 2 |
# ----------------------------------------
|
| 3 |
+
# This Python project uses LangChain + HuggingFaceHub to explore cultural bias
|
| 4 |
# by retrieving answers to the same prompts using region-specific document bases.
|
| 5 |
|
| 6 |
# Install necessary packages before running:
|
| 7 |
+
# pip install langchain langchain-community huggingface_hub faiss-cpu sentence-transformers unstructured wikipedia
|
| 8 |
|
| 9 |
from langchain_community.document_loaders import WikipediaLoader
|
| 10 |
+
from langchain_community.embeddings import HuggingFaceEmbeddings
|
| 11 |
+
from langchain_community.vectorstores import FAISS
|
| 12 |
+
from langchain.llms import HuggingFaceHub
|
| 13 |
from langchain.chains import RetrievalQA
|
| 14 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 15 |
import os
|
|
|
|
| 37 |
return vectorstore
|
| 38 |
|
| 39 |
# ------------------ MAIN LOGIC ------------------
|
| 40 |
+
# If you're using Hugging Face Spaces, the token is already managed securely.
|
| 41 |
+
# Otherwise, you can set it like this:
|
| 42 |
+
# os.environ["HUGGINGFACEHUB_API_TOKEN"] = "your_token_here"
|
| 43 |
|
| 44 |
+
llm = HuggingFaceHub(
|
| 45 |
+
repo_id="HuggingFaceH4/zephyr-7b-beta", # free and public model
|
| 46 |
+
model_kwargs={"temperature": 0.7, "max_new_tokens": 512}
|
|
|
|
| 47 |
)
|
| 48 |
|
| 49 |
for region in REGIONS:
|