Update app.py
Browse files
app.py
CHANGED
|
@@ -13,7 +13,6 @@ from llama_index.llms.llama_cpp.llama_utils import (
|
|
| 13 |
messages_to_prompt,
|
| 14 |
completion_to_prompt,
|
| 15 |
)
|
| 16 |
-
from llama_index.storage.chat_store.redis import RedisChatStore
|
| 17 |
from llama_index.core.memory import ChatMemoryBuffer
|
| 18 |
|
| 19 |
subprocess.run('pip install llama-cpp-python==0.2.75 --no-build-isolation --no-cache-dir --upgrade --only-binary=:all: --extra-index-url=https://abetlen.github.io/llama-cpp-python/whl/cu124', env={'CMAKE_ARGS': "-DLLAMA_CUDA=on"}, shell=True)
|
|
|
|
| 13 |
messages_to_prompt,
|
| 14 |
completion_to_prompt,
|
| 15 |
)
|
|
|
|
| 16 |
from llama_index.core.memory import ChatMemoryBuffer
|
| 17 |
|
| 18 |
subprocess.run('pip install llama-cpp-python==0.2.75 --no-build-isolation --no-cache-dir --upgrade --only-binary=:all: --extra-index-url=https://abetlen.github.io/llama-cpp-python/whl/cu124', env={'CMAKE_ARGS': "-DLLAMA_CUDA=on"}, shell=True)
|