Spaces:
Running
Running
MekkCyber
commited on
Commit
·
57cdcf8
1
Parent(s):
d6df0f3
fix
Browse files
app.py
CHANGED
|
@@ -124,14 +124,17 @@ def quantize_and_save(profile: gr.OAuthProfile | None, oauth_token: gr.OAuthToke
|
|
| 124 |
return "group_size must be a number"
|
| 125 |
|
| 126 |
group_size = int(group_size)
|
| 127 |
-
|
| 128 |
quantized_model = quantize_model(model_name, quantization_type, group_size, oauth_token, profile.username)
|
| 129 |
return save_model(quantized_model, model_name, quantization_type, group_size, profile.username, oauth_token, quantized_model_name)
|
| 130 |
# except Exception as e :
|
| 131 |
# return e
|
| 132 |
|
| 133 |
|
| 134 |
-
|
|
|
|
|
|
|
|
|
|
| 135 |
gr.Markdown(
|
| 136 |
"""
|
| 137 |
# 🚀 LLM Model Quantization App
|
|
@@ -141,7 +144,7 @@ with gr.Blocks(theme=gr.themes.Ocean()) as app:
|
|
| 141 |
)
|
| 142 |
|
| 143 |
|
| 144 |
-
gr.LoginButton(elem_id="login-button", elem_classes="center-button")
|
| 145 |
|
| 146 |
m1 = gr.Markdown()
|
| 147 |
app.load(hello, inputs=None, outputs=m1)
|
|
|
|
| 124 |
return "group_size must be a number"
|
| 125 |
|
| 126 |
group_size = int(group_size)
|
| 127 |
+
|
| 128 |
quantized_model = quantize_model(model_name, quantization_type, group_size, oauth_token, profile.username)
|
| 129 |
return save_model(quantized_model, model_name, quantization_type, group_size, profile.username, oauth_token, quantized_model_name)
|
| 130 |
# except Exception as e :
|
| 131 |
# return e
|
| 132 |
|
| 133 |
|
| 134 |
+
css="""/* Custom CSS to allow scrolling */
|
| 135 |
+
.gradio-container {overflow-y: auto;}
|
| 136 |
+
"""
|
| 137 |
+
with gr.Blocks(theme=gr.themes.Ocean(), css=css) as app:
|
| 138 |
gr.Markdown(
|
| 139 |
"""
|
| 140 |
# 🚀 LLM Model Quantization App
|
|
|
|
| 144 |
)
|
| 145 |
|
| 146 |
|
| 147 |
+
gr.LoginButton(elem_id="login-button", elem_classes="center-button", min_width=250)
|
| 148 |
|
| 149 |
m1 = gr.Markdown()
|
| 150 |
app.load(hello, inputs=None, outputs=m1)
|