kemo2003 commited on
Commit
8092193
·
verified ·
1 Parent(s): 3092ac6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -2
app.py CHANGED
@@ -1,17 +1,33 @@
1
  import gradio as gr
2
  import torch
3
- from transformers import AutoModelForCausalLM, AutoTokenizer, Blip2Processor, Blip2ForConditionalGeneration, WhisperProcessor, WhisperForConditionalGeneration
4
  from TTS.api import TTS
5
  import numpy as np
6
  from PIL import Image
7
  import fitz # PyMuPDF
8
  import pandas as pd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  # Initialize device
11
  device = "cuda" if torch.cuda.is_available() else "cpu"
12
 
13
  # Text model: Mistral 7B
14
- mistral_model_name = "mistralai/Mistral-7B-Instruct-v0.1"
15
  mistral_tokenizer = AutoTokenizer.from_pretrained(mistral_model_name)
16
  mistral_model = AutoModelForCausalLM.from_pretrained(mistral_model_name, torch_dtype=torch.float16 if device == "cuda" else torch.float32).to(device)
17
 
 
1
  import gradio as gr
2
  import torch
 
3
  from TTS.api import TTS
4
  import numpy as np
5
  from PIL import Image
6
  import fitz # PyMuPDF
7
  import pandas as pd
8
+ from huggingface_hub import login
9
+ from transformers import AutoTokenizer, AutoModelForCausalLM
10
+ import os
11
+
12
+ # تسجيل الدخول باستخدام المفتاح من المتغير البيئي
13
+ token = os.getenv("HF_API_TOKEN")
14
+ if token:
15
+ login(token=token)
16
+ else:
17
+ raise ValueError("لم يتم تعيين المفتاح البيئي HF_API_TOKEN.")
18
+
19
+ # اسم النموذج
20
+ mistral_model_name = "mistralai/Mistral-7B-Instruct-v0.1"
21
+
22
+ # تحميل التوكنيزر والموديل
23
+ mistral_tokenizer = AutoTokenizer.from_pretrained(mistral_model_name)
24
+ mistral_model = AutoModelForCausalLM.from_pretrained(mistral_model_name)
25
 
26
  # Initialize device
27
  device = "cuda" if torch.cuda.is_available() else "cpu"
28
 
29
  # Text model: Mistral 7B
30
+ mistral_model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1"
31
  mistral_tokenizer = AutoTokenizer.from_pretrained(mistral_model_name)
32
  mistral_model = AutoModelForCausalLM.from_pretrained(mistral_model_name, torch_dtype=torch.float16 if device == "cuda" else torch.float32).to(device)
33