Spaces:
Runtime error
Runtime error
kwabs22
commited on
Commit
·
ee7fb1c
1
Parent(s):
d6aaec5
Time first
Browse files
app.py
CHANGED
|
@@ -783,8 +783,71 @@ with gr.Blocks() as demo:
|
|
| 783 |
with gr.Accordion("Config and Asset Assistance - Click to open", open=False):
|
| 784 |
gr.HTML("Jonas Tyroller - This problem changes your perspective on game dev - minimise the cost of exploration so you can explore more (17:00) | dont make the same game again but worse (:) <br>https://youtu.be/o5K0uqhxgsE")
|
| 785 |
with gr.Accordion("Leaveraging Huggingface chat"):
|
| 786 |
-
gr.
|
| 787 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 788 |
with gr.Tab("Main problem to solve - Concept combination / integration and non-linear progression planning"):
|
| 789 |
gr.HTML("The story and the gameplay dont have to occur at the same time - eg. ")
|
| 790 |
gr.Markdown("## Prompts / Mermaid diagrams to be made from the ideas for workflow")
|
|
@@ -809,85 +872,12 @@ with gr.Blocks() as demo:
|
|
| 809 |
for key, item in examplemermaidconceptblendingstrutures.items():
|
| 810 |
gr.Code(item, label=key)
|
| 811 |
with gr.Tab("Guidance / Themes to consider before you attempt config (Currently Qwen 0.5B)"):
|
| 812 |
-
|
| 813 |
-
gr.HTML("UI can be media and all items can have media")
|
| 814 |
-
with gr.Accordion("Old Ideas to merge", open=False):
|
| 815 |
-
gr.HTML("Random Scenario / Song to 'full game' manual or auto is end goal ")
|
| 816 |
-
gr.HTML("Componets (outside Code Support for Config): Decisions (and context explanation), Nested Sections, Media (Especially to affect decisions), Replayability (GTA and Tekken type mechanics in text form), Theme integration (Modified Varibles that affect UI or config order)")
|
| 817 |
-
gr.HTML("Existing Games eg. GTA Heists - Same Map with overlapping branching narratives, Battlefront - Elites amongst Commoners, Tekken Casino (one mistake = 1/2 or 1/3 of your Resources) and Turn based: 'Tactics' type nintendo games, Chess (and any other tile based game) ")
|
| 818 |
-
gr.HTML("Existing Game Rules for text - Cyberpunk RED, ")
|
| 819 |
-
gr.HTML("Community playthrough = Tally of players choices, Random item placed in a random location - first person to get it wins, Survival by location or characters met")
|
| 820 |
-
gr.HTML("Some Kinds of game skeletons ideas - Timelines, Graph as State machine paths, Economy ecosystem")
|
| 821 |
-
gr.HTML("One prompt to be used to test models - <br>Please make 10 python lists for the types of media files and their purposes in a game and then use those lists to random generate a timeline of 20 items when the function is called <br>Great next suggest ways to improve this function to create better timelines")
|
| 822 |
-
with gr.Tab("Main areas of considerations"):
|
| 823 |
-
gr.HTML("")
|
| 824 |
-
with gr.Tab("Structural Inspirations"):
|
| 825 |
-
gr.HTML("GTA Heists - Replayability and stakes, Tekken - 2/3 mistakes = lost round ")
|
| 826 |
-
gr.HTML("Sports Scores, ")
|
| 827 |
-
with gr.Tab("Themes"):
|
| 828 |
-
gr.HTML("")
|
| 829 |
-
with gr.Tab("General FAQ Attempt"):
|
| 830 |
-
with gr.Tab("Front end as FAQ"):
|
| 831 |
-
FAQMainOutput = gr.TextArea(placeholder='Output will show here', value='')
|
| 832 |
-
FAQCustomButtonInput = gr.TextArea(lines=1, placeholder='Prompt goes here')
|
| 833 |
-
|
| 834 |
-
for category_name, category_prompts in FAQAllprompts.items():
|
| 835 |
-
with gr.Accordion(f"General {category_name} Pattern based", open=False):
|
| 836 |
-
with gr.Group():
|
| 837 |
-
for index, (prompt, _) in enumerate(category_prompts):
|
| 838 |
-
button = gr.Button(prompt)
|
| 839 |
-
button.click(llmguide_generate_response, inputs=[FAQCustomButtonInput, gr.State(index), gr.State(category_name)], outputs=FAQMainOutput)
|
| 840 |
-
with gr.Tab("Function Call as FAQ"):
|
| 841 |
-
gr.HTML("Placeholder for media task query routing as dual purpose in workflow and for user queries as psuedo RAG engine")
|
| 842 |
-
gr.HTML("https://llama.meta.com/docs/model-cards-and-prompt-formats/llama3_1/#built-in-tooling - The three built-in tools (brave_search, wolfram_alpha, and code interpreter) can be turned on using the system prompt")
|
| 843 |
-
|
| 844 |
-
with gr.Tab("General RAG (Pathfinder?) Attempt"):
|
| 845 |
-
gr.HTML("https://huggingface.co/spaces/mteb/leaderboard - Source for SOTA - current using all-MiniLM-L6-v2")
|
| 846 |
-
gr.HTML("Placeholder for weak RAG Type Charcter interaction test aka input for JSON 'Knowledge Base' Input")
|
| 847 |
-
gr.Interface(
|
| 848 |
-
fn=process_query,
|
| 849 |
-
inputs=[
|
| 850 |
-
gr.Textbox(lines=2, placeholder="Enter your question here..."),
|
| 851 |
-
gr.Checkbox(label="Use RAG"),
|
| 852 |
-
gr.Checkbox(label="Stream output")
|
| 853 |
-
],
|
| 854 |
-
outputs=[
|
| 855 |
-
gr.Textbox(label="Generated Response"),
|
| 856 |
-
gr.Textbox(label="Tokens per second"),
|
| 857 |
-
gr.Textbox(label="RAM Usage"),
|
| 858 |
-
gr.Textbox(label="Referenced Documents")
|
| 859 |
-
],
|
| 860 |
-
title="RAG/Non-RAG Q&A System",
|
| 861 |
-
description="Ask a question with or without using RAG. The response is generated using a GPU-accelerated model. RAM usage and referenced document IDs (for RAG) are logged."
|
| 862 |
-
)
|
| 863 |
-
|
| 864 |
-
with gr.Tab("Any Request to Qwen2-0.5B"):
|
| 865 |
-
gr.HTML("Placeholder for https://huggingface.co/h2oai/h2o-danube3-500m-chat-GGUF and https://huggingface.co/OuteAI/Lite-Mistral-150M-v2-Instruct as alternative")
|
| 866 |
-
gr.HTML("https://huggingface.co/spaces/HuggingFaceTB/SmolLM-135M-Instruct-WebGPU 125 mdeol to be tested as alternative (and all up to 1.5b - how to delte a model in your code?) - Need to go over the dataset to see how to prompt it - https://huggingface.co/datasets/HuggingFaceTB/smollm-corpus ")
|
| 867 |
-
gr.HTML("Placeholder for qwen 2 72b as alternative use checkbox and gradio client api call")
|
| 868 |
-
gr.Markdown("# Qwen-0.5B-Instruct Language Model")
|
| 869 |
-
gr.Markdown("This demo uses the Qwen-0.5B-Instruct model to generate responses based on your input.")
|
| 870 |
-
gr.HTML("Example prompts: <br>I am writing a story about a chef. please write dishes to appear on the menu. <br>What are the most common decisions that a chef story would include? <br>What are the kinds problems that a chef story would include? <br>What are the kinds of out of reach goals that a chef story would include? <br>Continue this config - Paste any complete block of the config")
|
| 871 |
-
|
| 872 |
-
with gr.Row():
|
| 873 |
-
with gr.Column():
|
| 874 |
-
llmguide_prompt = gr.Textbox(lines=2, placeholder="Enter your prompt here...")
|
| 875 |
-
llmguide_stream_checkbox = gr.Checkbox(label="Enable streaming")
|
| 876 |
-
llmguide_submit_button = gr.Button("Generate")
|
| 877 |
-
|
| 878 |
-
with gr.Column():
|
| 879 |
-
llmguide_output = gr.Textbox(lines=10, label="Generated Response")
|
| 880 |
-
llmguide_tokens_per_second = gr.Textbox(label="Tokens per Second")
|
| 881 |
-
|
| 882 |
-
llmguide_submit_button.click(
|
| 883 |
-
llmguide_generate_response,
|
| 884 |
-
inputs=[llmguide_prompt, llmguide_stream_checkbox],
|
| 885 |
-
outputs=[llmguide_output, llmguide_tokens_per_second],
|
| 886 |
-
)
|
| 887 |
with gr.Tab("Themes and Topics"):
|
| 888 |
gr.HTML("https://en.wikipedia.org/wiki/History#Periods")
|
| 889 |
|
| 890 |
-
with gr.Tab("New Config Proto Assist
|
|
|
|
| 891 |
with gr.Tab("Linear - Player List to Empty Config with Edit support"):
|
| 892 |
with gr.Accordion("Can copy in the Test Example State Machine tab - only linear path for now", open=False):
|
| 893 |
gr.Markdown("# Story and Timeline Generator")
|
|
@@ -974,13 +964,16 @@ with gr.Blocks() as demo:
|
|
| 974 |
mermaideditoriframebtn.click(fn=lambda x: "<iframe src='https://mermaid.live/' width='100%' height='1000px'></iframe>", outputs=mermaideditoriframe)
|
| 975 |
with gr.Accordion("Mermaid Structures - click to open", open=False):
|
| 976 |
for key, item in mermaidstorystructures.items():
|
| 977 |
-
gr.
|
|
|
|
| 978 |
|
| 979 |
with gr.Tab("Branching - Network analysis to Game config"):
|
| 980 |
gr.HTML("Placeholder for analysing multiple stories for their network structures and creating general rules for a strucutre generator based of named entity recognition and bias to locations or people - The extreme long way")
|
| 981 |
|
|
|
|
|
|
|
| 982 |
|
| 983 |
-
with gr.Tab("Existing Config Crafting Progression
|
| 984 |
with gr.Accordion("Test for config to gradio components order - ignore for now", open=False ):
|
| 985 |
gr.HTML("Placeholder for changing the render below to the one above for new config but with the ability to upload files aka the media field should be file uploader / dropdowns for all files that have been uploaded")
|
| 986 |
gr.Markdown("Asset Generation")
|
|
@@ -1029,6 +1022,21 @@ Creating more diverse paths through the game""")
|
|
| 1029 |
""" + display_tipsupdatedconfigatbeinningofthisspace + """
|
| 1030 |
</div>
|
| 1031 |
</div>""")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1032 |
|
| 1033 |
with gr.Row():
|
| 1034 |
with gr.Column(scale=1):
|
|
|
|
| 783 |
with gr.Accordion("Config and Asset Assistance - Click to open", open=False):
|
| 784 |
gr.HTML("Jonas Tyroller - This problem changes your perspective on game dev - minimise the cost of exploration so you can explore more (17:00) | dont make the same game again but worse (:) <br>https://youtu.be/o5K0uqhxgsE")
|
| 785 |
with gr.Accordion("Leaveraging Huggingface chat"):
|
| 786 |
+
with gr.Tab("Hugging Chat"):
|
| 787 |
+
gr.HTML("https://huggingface.co/chat - Huggingface chat supports - State Management (Threads), Image Generation and editing, Websearch, Document parsing (PDF?), Assistants and larger models than zero gpu can support in July 2024 (Unquantised 30B and above)")
|
| 788 |
+
gr.HTML("Existing Assistants to use and planning custom assistants placeholder")
|
| 789 |
+
with gr.Tab("ZeroGPU"):
|
| 790 |
+
gr.HTML("Copy paste any old config to llm and ask to remix is the easiest <br>To bake 'Moral of the story' in you have to be very deliberate")
|
| 791 |
+
gr.HTML("UI can be media and all items can have media")
|
| 792 |
+
with gr.Tab("Any Request to Qwen2-0.5B"):
|
| 793 |
+
gr.HTML("Placeholder for https://huggingface.co/h2oai/h2o-danube3-500m-chat-GGUF and https://huggingface.co/OuteAI/Lite-Mistral-150M-v2-Instruct as alternative")
|
| 794 |
+
gr.HTML("https://huggingface.co/spaces/HuggingFaceTB/SmolLM-135M-Instruct-WebGPU 125 mdeol to be tested as alternative (and all up to 1.5b - how to delte a model in your code?) - Need to go over the dataset to see how to prompt it - https://huggingface.co/datasets/HuggingFaceTB/smollm-corpus ")
|
| 795 |
+
gr.HTML("Placeholder for qwen 2 72b as alternative use checkbox and gradio client api call")
|
| 796 |
+
gr.Markdown("# Qwen-0.5B-Instruct Language Model")
|
| 797 |
+
gr.Markdown("This demo uses the Qwen-0.5B-Instruct model to generate responses based on your input.")
|
| 798 |
+
gr.HTML("Example prompts: <br>I am writing a story about a chef. please write dishes to appear on the menu. <br>What are the most common decisions that a chef story would include? <br>What are the kinds problems that a chef story would include? <br>What are the kinds of out of reach goals that a chef story would include? <br>Continue this config - Paste any complete block of the config")
|
| 799 |
+
|
| 800 |
+
with gr.Row():
|
| 801 |
+
with gr.Column():
|
| 802 |
+
llmguide_prompt = gr.Textbox(lines=2, placeholder="Enter your prompt here...")
|
| 803 |
+
llmguide_stream_checkbox = gr.Checkbox(label="Enable streaming")
|
| 804 |
+
llmguide_submit_button = gr.Button("Generate")
|
| 805 |
+
|
| 806 |
+
with gr.Column():
|
| 807 |
+
llmguide_output = gr.Textbox(lines=10, label="Generated Response")
|
| 808 |
+
llmguide_tokens_per_second = gr.Textbox(label="Tokens per Second")
|
| 809 |
+
|
| 810 |
+
llmguide_submit_button.click(
|
| 811 |
+
llmguide_generate_response,
|
| 812 |
+
inputs=[llmguide_prompt, llmguide_stream_checkbox],
|
| 813 |
+
outputs=[llmguide_output, llmguide_tokens_per_second],
|
| 814 |
+
)
|
| 815 |
+
with gr.Tab("General RAG (Pathfinder?) Attempt"):
|
| 816 |
+
gr.HTML("https://huggingface.co/spaces/mteb/leaderboard - Source for SOTA - current using all-MiniLM-L6-v2")
|
| 817 |
+
gr.HTML("Placeholder for weak RAG Type Charcter interaction test aka input for JSON 'Knowledge Base' Input")
|
| 818 |
+
gr.Interface(
|
| 819 |
+
fn=process_query,
|
| 820 |
+
inputs=[
|
| 821 |
+
gr.Textbox(lines=2, placeholder="Enter your question here..."),
|
| 822 |
+
gr.Checkbox(label="Use RAG"),
|
| 823 |
+
gr.Checkbox(label="Stream output")
|
| 824 |
+
],
|
| 825 |
+
outputs=[
|
| 826 |
+
gr.Textbox(label="Generated Response"),
|
| 827 |
+
gr.Textbox(label="Tokens per second"),
|
| 828 |
+
gr.Textbox(label="RAM Usage"),
|
| 829 |
+
gr.Textbox(label="Referenced Documents")
|
| 830 |
+
],
|
| 831 |
+
title="RAG/Non-RAG Q&A System",
|
| 832 |
+
description="Ask a question with or without using RAG. The response is generated using a GPU-accelerated model. RAM usage and referenced document IDs (for RAG) are logged."
|
| 833 |
+
)
|
| 834 |
+
with gr.Tab("General FAQ Attempt"):
|
| 835 |
+
with gr.Tab("Front end as FAQ"):
|
| 836 |
+
FAQMainOutput = gr.TextArea(placeholder='Output will show here', value='')
|
| 837 |
+
FAQCustomButtonInput = gr.TextArea(lines=1, placeholder='Prompt goes here')
|
| 838 |
+
|
| 839 |
+
for category_name, category_prompts in FAQAllprompts.items():
|
| 840 |
+
with gr.Accordion(f"General {category_name} Pattern based", open=False):
|
| 841 |
+
with gr.Group():
|
| 842 |
+
for index, (prompt, _) in enumerate(category_prompts):
|
| 843 |
+
button = gr.Button(prompt)
|
| 844 |
+
button.click(llmguide_generate_response, inputs=[FAQCustomButtonInput, gr.State(index), gr.State(category_name)], outputs=FAQMainOutput)
|
| 845 |
+
with gr.Tab("Function Call as FAQ"):
|
| 846 |
+
gr.HTML("Placeholder for media task query routing as dual purpose in workflow and for user queries as psuedo RAG engine")
|
| 847 |
+
gr.HTML("https://llama.meta.com/docs/model-cards-and-prompt-formats/llama3_1/#built-in-tooling - The three built-in tools (brave_search, wolfram_alpha, and code interpreter) can be turned on using the system prompt")
|
| 848 |
+
with gr.Tab("Embedded Spaces"):
|
| 849 |
+
gr.HTML("In Asset Generation Tab under Text")
|
| 850 |
+
gr.Markdown("# Current Workflow = Mermaid Diagram to Story to JSON (through LLM and fix JSON by hand) to Media prompts to Asset Gneration")
|
| 851 |
with gr.Tab("Main problem to solve - Concept combination / integration and non-linear progression planning"):
|
| 852 |
gr.HTML("The story and the gameplay dont have to occur at the same time - eg. ")
|
| 853 |
gr.Markdown("## Prompts / Mermaid diagrams to be made from the ideas for workflow")
|
|
|
|
| 872 |
for key, item in examplemermaidconceptblendingstrutures.items():
|
| 873 |
gr.Code(item, label=key)
|
| 874 |
with gr.Tab("Guidance / Themes to consider before you attempt config (Currently Qwen 0.5B)"):
|
| 875 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 876 |
with gr.Tab("Themes and Topics"):
|
| 877 |
gr.HTML("https://en.wikipedia.org/wiki/History#Periods")
|
| 878 |
|
| 879 |
+
with gr.Tab("New Config Proto Assist"):
|
| 880 |
+
gr.HTML("Trying to abstract the process into one worflow is beyond me so multiple paths to goal (config) is the aim now")
|
| 881 |
with gr.Tab("Linear - Player List to Empty Config with Edit support"):
|
| 882 |
with gr.Accordion("Can copy in the Test Example State Machine tab - only linear path for now", open=False):
|
| 883 |
gr.Markdown("# Story and Timeline Generator")
|
|
|
|
| 964 |
mermaideditoriframebtn.click(fn=lambda x: "<iframe src='https://mermaid.live/' width='100%' height='1000px'></iframe>", outputs=mermaideditoriframe)
|
| 965 |
with gr.Accordion("Mermaid Structures - click to open", open=False):
|
| 966 |
for key, item in mermaidstorystructures.items():
|
| 967 |
+
with gr.Accordion(key, open=False):
|
| 968 |
+
gr.Code(item, label=key)
|
| 969 |
|
| 970 |
with gr.Tab("Branching - Network analysis to Game config"):
|
| 971 |
gr.HTML("Placeholder for analysing multiple stories for their network structures and creating general rules for a strucutre generator based of named entity recognition and bias to locations or people - The extreme long way")
|
| 972 |
|
| 973 |
+
with gr.Tab("Linear - Chess PNG to Game config"):
|
| 974 |
+
gr.HTML("Any Chess match can serve as end of game final battle")
|
| 975 |
|
| 976 |
+
with gr.Tab("Existing Config Crafting Progression"):
|
| 977 |
with gr.Accordion("Test for config to gradio components order - ignore for now", open=False ):
|
| 978 |
gr.HTML("Placeholder for changing the render below to the one above for new config but with the ability to upload files aka the media field should be file uploader / dropdowns for all files that have been uploaded")
|
| 979 |
gr.Markdown("Asset Generation")
|
|
|
|
| 1022 |
""" + display_tipsupdatedconfigatbeinningofthisspace + """
|
| 1023 |
</div>
|
| 1024 |
</div>""")
|
| 1025 |
+
with gr.Tab("Old Ideas to merge"):
|
| 1026 |
+
gr.HTML("Random Scenario / Song to 'full game' manual or auto is end goal ")
|
| 1027 |
+
gr.HTML("Componets (outside Code Support for Config): Decisions (and context explanation), Nested Sections, Media (Especially to affect decisions), Replayability (GTA and Tekken type mechanics in text form), Theme integration (Modified Varibles that affect UI or config order)")
|
| 1028 |
+
gr.HTML("Existing Games eg. GTA Heists - Same Map with overlapping branching narratives, Battlefront - Elites amongst Commoners, Tekken Casino (one mistake = 1/2 or 1/3 of your Resources) and Turn based: 'Tactics' type nintendo games, Chess (and any other tile based game) ")
|
| 1029 |
+
gr.HTML("Existing Game Rules for text - Cyberpunk RED, ")
|
| 1030 |
+
gr.HTML("Community playthrough = Tally of players choices, Random item placed in a random location - first person to get it wins, Survival by location or characters met")
|
| 1031 |
+
gr.HTML("Some Kinds of game skeletons ideas - Timelines, Graph as State machine paths, Economy ecosystem")
|
| 1032 |
+
gr.HTML("One prompt to be used to test models - <br>Please make 10 python lists for the types of media files and their purposes in a game and then use those lists to random generate a timeline of 20 items when the function is called <br>Great next suggest ways to improve this function to create better timelines")
|
| 1033 |
+
with gr.Tab("Main areas of considerations"):
|
| 1034 |
+
gr.HTML("")
|
| 1035 |
+
with gr.Tab("Structural Inspirations"):
|
| 1036 |
+
gr.HTML("GTA Heists - Replayability and stakes, Tekken - 2/3 mistakes = lost round ")
|
| 1037 |
+
gr.HTML("Sports Scores, ")
|
| 1038 |
+
with gr.Tab("Themes"):
|
| 1039 |
+
gr.HTML("")
|
| 1040 |
|
| 1041 |
with gr.Row():
|
| 1042 |
with gr.Column(scale=1):
|