File size: 6,387 Bytes
eb54863
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33ecbd1
eb54863
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c5591c5
eb54863
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c5591c5
eb54863
c5591c5
 
eb54863
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
import gradio
import prompts
import json
from together import Together
import base64
import numpy as numpy
from PIL import Image
from io import BytesIO
import uuid
import datetime
import os
from huggingface_hub import HfApi


HF_KEY = os.environ.get("HF_KEY") if os.environ.get("HF_KEY") else ""
TOGETHER_KEY = os.environ.get("TOGETHER_KEY") if os.environ.get("TOGETHER_KEY") else ""
PASSWORDS = os.environ.get("PASSWORDS") if os.environ.get("PASSWORDS") else ""

hf_client = HfApi(
    token = HF_KEY
)

together_client = Together(
    api_key = TOGETHER_KEY
)

def process_token(secret_token):
    global together_client
    try:
        passwords = PASSWORDS
        passwords = passwords.split(":")
        
        if secret_token in passwords:
            secret_token = TOGETHER_KEY
        
        together_client = Together(
            api_key = secret_token
        )
        gradio.Info("API token has been set successfully.", duration = 2)
        return secret_token
    except:
        return secret_token
    
    

def assisted_prompt_generation(prompt):
    gradio.Info("Assisting prompt generation...", duration = 2)
    try:
        response = together_client.chat.completions.create(
            model = "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free",
            messages = [
                {"role": "system", "content": prompts.assisted_prompt_generator.system_prompt},
                {"role": "user", "content": f"{prompt}"},
                {"role": "assistant", "content": ""}
                ],
            response_format = {"type": "json_object"}
        )
        output = json.loads(response.choices[0].message.content)
        
        if output["return_code"] == 400:
            gradio.Error("Prompt generation failed.", duration = 5)
            return output["prompt"]
        else:
            gradio.Info("Prompt generated successfully.", duration = 2)
            return output["prompt"]
    except Exception as e:
        gradio.Error("Prompt generation failed.", duration = 5)
        return "Failed"

def verify_prompt(prompt):
    gradio.Info("Verifying prompt...", duration = 2)
    try:
        response = together_client.chat.completions.create(
            model = "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free",
            messages = [
                {"role": "system", "content": prompts.prompt_verification_agent.system_prompt},
                {"role": "user", "content": f"{prompt}"},
                {"role": "assistant", "content": ""}
                ],
            response_format = {"type": "json_object"}
        )
        output = json.loads(response.choices[0].message.content)
        
        if output["return_code"] == 400:
            gradio.Error("Prompt verification failed.", duration = 5)
            return "Failed"
        else:
            gradio.Info("Prompt verification successfully.", duration = 2)
            return prompt
    except Exception as e:
        gradio.Error("Prompt verification failed.", duration = 5)
        return "Failed"


def generate_image(prompt):
    if prompt == "Failed":
        gradio.Error("Prompt generation failed.", duration = 5)
        return numpy.zeros((1024, 1024, 3), dtype = numpy.uint8)
    
    response = together_client.images.generate(
        prompt= prompt,
        model = "black-forest-labs/FLUX.1-schnell-Free",
        width = 1024,
        height = 1024,
        steps = 4,
        n = 1,
        response_format="b64_json",
        stop=[]
    )
    b_64_image = response.data[0].b64_json
    image_data = base64.b64decode(b_64_image)
    image = Image.open(BytesIO(image_data))
    image_np = numpy.array(image)
    return image_np


def save_image(prompt, image):
    temp_id = uuid.uuid4()
    datetime_now = datetime.datetime.now()
    year = datetime_now.year
    month = datetime_now.month
    day = datetime_now.day
    hour = datetime_now.hour
    minute = datetime_now.minute
    
    image_PIL = Image.fromarray(image)
    image_PIL.save(f"{temp_id}.png")
    prompt = {
        "prompt": prompt,
    }
    json.dump(prompt, open(f"{temp_id}.json", "w"))
    hf_client.upload_file(
        path_or_fileobj = f"{temp_id}.png",
        path_in_repo = f"images/{year}/{month}/{day}/{hour}/{minute}/{temp_id}.png",
        repo_type = "dataset",
        repo_id = "xqt/fashion_model_generator",
        commit_message = f"ADD: image {temp_id}.png",
    )
    hf_client.upload_file(
        path_or_fileobj = f"{temp_id}.json",
        path_in_repo = f"images/{year}/{month}/{day}/{hour}/{minute}/{temp_id}.json",
        repo_type = "dataset",
        repo_id = "xqt/fashion_model_generator",
        commit_message = f"ADD: prompt {temp_id}.json",
    )
    gradio.Info(f"Image and prompt saved successfully <a href = \"https://huggingface.co/datasets/xqt/fashion_model_generator/blob/main/images/{year}/{month}/{day}/{hour}/{minute}/{temp_id}.png\"> here <\a>" , duration = 5)
    os.remove(f"{temp_id}.png")
    os.remove(f"{temp_id}.json")
    return
    

with gradio.Blocks(fill_width = False) as app:
    gradio.Markdown("""
    # Fashion Model Generator
    ## This app generates images of fashion model.
    Synthetic Dataset: [xqt/fashion_model_generator](https://huggingface.co/datasets/xqt/fashion_model_generator)
    """)
    
    api_token_input = gradio.Textbox(label = "Together AI API Key (key is never stored and it uses free models only)", placeholder = "Enter your Together AI API Key here.", type = "password")
    
    with gradio.Row(equal_height = True):
        with gradio.Column(scale = 4):
            prompt_input = gradio.Textbox(label = "Prompt", placeholder = "Enter your prompt here.")
        with gradio.Column(scale = 1):
            prompt_assist = gradio.Button(value = "Prompt Assist", icon = "assets/wand-magic-sparkles-solid.svg")
    
    image_output = gradio.Image(label="Generated Image")
    
    api_token_input.submit(process_token, inputs = [api_token_input], outputs = [api_token_input])
    prompt_assist.click(assisted_prompt_generation, inputs = [prompt_input], outputs = [prompt_input])
    prompt_input.submit(verify_prompt, inputs = [prompt_input], outputs = [prompt_input]).then(
        generate_image, inputs = [prompt_input], outputs = [image_output]
    ).then(
        save_image, inputs = [prompt_input, image_output], outputs = []
    )

if __name__ == "__main__":
    app.launch()