cd@bziiit.com
commited on
Commit
·
63e33f8
1
Parent(s):
59ba192
Add llama API
Browse files- model/ModelIntegrations.py +9 -4
- model/selector.py +4 -2
- requirements.txt +1 -1
model/ModelIntegrations.py
CHANGED
|
@@ -1,9 +1,13 @@
|
|
|
|
|
|
|
|
| 1 |
from .ModelStrategy import ModelStrategy
|
| 2 |
|
| 3 |
from langchain_community.chat_models import ChatOpenAI
|
| 4 |
from langchain_mistralai.chat_models import ChatMistralAI
|
| 5 |
from langchain_anthropic import ChatAnthropic
|
| 6 |
-
|
|
|
|
|
|
|
| 7 |
|
| 8 |
class MistralModel(ModelStrategy):
|
| 9 |
def get_model(self, model_name):
|
|
@@ -20,9 +24,10 @@ class AnthropicModel(ModelStrategy):
|
|
| 20 |
return ChatAnthropic(model=model_name)
|
| 21 |
|
| 22 |
|
| 23 |
-
class
|
| 24 |
def get_model(self, model_name):
|
| 25 |
-
|
|
|
|
| 26 |
|
| 27 |
class ModelManager():
|
| 28 |
def __init__(self):
|
|
@@ -30,7 +35,7 @@ class ModelManager():
|
|
| 30 |
"mistral": MistralModel(),
|
| 31 |
"openai": OpenAIModel(),
|
| 32 |
"anthropic": AnthropicModel(),
|
| 33 |
-
"
|
| 34 |
}
|
| 35 |
|
| 36 |
def get_model(self, provider, model_name):
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
from .ModelStrategy import ModelStrategy
|
| 4 |
|
| 5 |
from langchain_community.chat_models import ChatOpenAI
|
| 6 |
from langchain_mistralai.chat_models import ChatMistralAI
|
| 7 |
from langchain_anthropic import ChatAnthropic
|
| 8 |
+
|
| 9 |
+
from llamaapi import LlamaAPI
|
| 10 |
+
from langchain_experimental.llms import ChatLlamaAPI
|
| 11 |
|
| 12 |
class MistralModel(ModelStrategy):
|
| 13 |
def get_model(self, model_name):
|
|
|
|
| 24 |
return ChatAnthropic(model=model_name)
|
| 25 |
|
| 26 |
|
| 27 |
+
class LlamaAPIModel(ModelStrategy):
|
| 28 |
def get_model(self, model_name):
|
| 29 |
+
llama = LlamaAPI(os.environ.get("LLAMA_API_KEY"))
|
| 30 |
+
return ChatLlamaAPI(client=llama, model=model_name)
|
| 31 |
|
| 32 |
class ModelManager():
|
| 33 |
def __init__(self):
|
|
|
|
| 35 |
"mistral": MistralModel(),
|
| 36 |
"openai": OpenAIModel(),
|
| 37 |
"anthropic": AnthropicModel(),
|
| 38 |
+
"llama": LlamaAPIModel()
|
| 39 |
}
|
| 40 |
|
| 41 |
def get_model(self, provider, model_name):
|
model/selector.py
CHANGED
|
@@ -16,8 +16,10 @@ def ModelSelector():
|
|
| 16 |
"claude-3-opus-20240229": "anthropic.claude-3-opus-20240229",
|
| 17 |
"claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229",
|
| 18 |
},
|
| 19 |
-
# "
|
| 20 |
-
# "llama3": "
|
|
|
|
|
|
|
| 21 |
# }
|
| 22 |
}
|
| 23 |
|
|
|
|
| 16 |
"claude-3-opus-20240229": "anthropic.claude-3-opus-20240229",
|
| 17 |
"claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229",
|
| 18 |
},
|
| 19 |
+
# "llama": {
|
| 20 |
+
# "llama3.2-11b-vision": "llama.llama3.2-11b-vision",
|
| 21 |
+
# "llama3.2-1b": "llama.llama3.2-1b",
|
| 22 |
+
# "llama3.2-3b": "llama.llama3.2-3b"
|
| 23 |
# }
|
| 24 |
}
|
| 25 |
|
requirements.txt
CHANGED
|
@@ -15,5 +15,5 @@ langchain-community
|
|
| 15 |
langchain-pinecone
|
| 16 |
langchain_mistralai
|
| 17 |
langchain_anthropic
|
| 18 |
-
|
| 19 |
pyyaml
|
|
|
|
| 15 |
langchain-pinecone
|
| 16 |
langchain_mistralai
|
| 17 |
langchain_anthropic
|
| 18 |
+
llamaapi
|
| 19 |
pyyaml
|