File size: 6,050 Bytes
18bc133 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2024/1/4 01:25
@Author : alexanderwu
@File : config2.py
"""
import os
from pathlib import Path
from typing import Dict, Iterable, List, Literal, Optional
from pydantic import BaseModel, Field, model_validator
from metagpt.configs.browser_config import BrowserConfig
from metagpt.configs.embedding_config import EmbeddingConfig
from metagpt.configs.exp_pool_config import ExperiencePoolConfig
from metagpt.configs.llm_config import LLMConfig, LLMType
from metagpt.configs.mermaid_config import MermaidConfig
from metagpt.configs.omniparse_config import OmniParseConfig
from metagpt.configs.redis_config import RedisConfig
from metagpt.configs.role_custom_config import RoleCustomConfig
from metagpt.configs.role_zero_config import RoleZeroConfig
from metagpt.configs.s3_config import S3Config
from metagpt.configs.search_config import SearchConfig
from metagpt.configs.workspace_config import WorkspaceConfig
from metagpt.const import CONFIG_ROOT, METAGPT_ROOT
from metagpt.utils.yaml_model import YamlModel
class CLIParams(BaseModel):
"""CLI parameters"""
project_path: str = ""
project_name: str = ""
inc: bool = False
reqa_file: str = ""
max_auto_summarize_code: int = 0
git_reinit: bool = False
@model_validator(mode="after")
def check_project_path(self):
"""Check project_path and project_name"""
if self.project_path:
self.inc = True
self.project_name = self.project_name or Path(self.project_path).name
return self
class Config(CLIParams, YamlModel):
"""Configurations for MetaGPT"""
# Key Parameters
llm: LLMConfig
# RAG Embedding
embedding: EmbeddingConfig = EmbeddingConfig()
# omniparse
omniparse: OmniParseConfig = OmniParseConfig()
# Global Proxy. Will be used if llm.proxy is not set
proxy: str = ""
# Tool Parameters
search: SearchConfig = SearchConfig()
enable_search: bool = False
browser: BrowserConfig = BrowserConfig()
mermaid: MermaidConfig = MermaidConfig()
# Storage Parameters
s3: Optional[S3Config] = None
redis: Optional[RedisConfig] = None
# Misc Parameters
repair_llm_output: bool = False
prompt_schema: Literal["json", "markdown", "raw"] = "json"
workspace: WorkspaceConfig = Field(default_factory=WorkspaceConfig)
enable_longterm_memory: bool = False
code_validate_k_times: int = 2
# Experience Pool Parameters
exp_pool: ExperiencePoolConfig = Field(default_factory=ExperiencePoolConfig)
# Will be removed in the future
metagpt_tti_url: str = ""
language: str = "English"
redis_key: str = "placeholder"
iflytek_app_id: str = ""
iflytek_api_secret: str = ""
iflytek_api_key: str = ""
azure_tts_subscription_key: str = ""
azure_tts_region: str = ""
_extra: dict = dict() # extra config dict
# Role's custom configuration
roles: Optional[List[RoleCustomConfig]] = None
# RoleZero's configuration
role_zero: RoleZeroConfig = Field(default_factory=RoleZeroConfig)
@classmethod
def from_home(cls, path):
"""Load config from ~/.metagpt/config2.yaml"""
pathname = CONFIG_ROOT / path
if not pathname.exists():
return None
return Config.from_yaml_file(pathname)
@classmethod
def default(cls, reload: bool = False, **kwargs) -> "Config":
"""Load default config
- Priority: env < default_config_paths
- Inside default_config_paths, the latter one overwrites the former one
"""
default_config_paths = (
METAGPT_ROOT / "config/config2.yaml",
CONFIG_ROOT / "config2.yaml",
)
if reload or default_config_paths not in _CONFIG_CACHE:
dicts = [dict(os.environ), *(Config.read_yaml(path) for path in default_config_paths), kwargs]
final = merge_dict(dicts)
_CONFIG_CACHE[default_config_paths] = Config(**final)
return _CONFIG_CACHE[default_config_paths]
@classmethod
def from_llm_config(cls, llm_config: dict):
"""user config llm
example:
llm_config = {"api_type": "xxx", "api_key": "xxx", "model": "xxx"}
gpt4 = Config.from_llm_config(llm_config)
A = Role(name="A", profile="Democratic candidate", goal="Win the election", actions=[a1], watch=[a2], config=gpt4)
"""
llm_config = LLMConfig.model_validate(llm_config)
dicts = [dict(os.environ)]
dicts += [{"llm": llm_config}]
final = merge_dict(dicts)
return Config(**final)
def update_via_cli(self, project_path, project_name, inc, reqa_file, max_auto_summarize_code):
"""update config via cli"""
# Use in the PrepareDocuments action according to Section 2.2.3.5.1 of RFC 135.
if project_path:
inc = True
project_name = project_name or Path(project_path).name
self.project_path = project_path
self.project_name = project_name
self.inc = inc
self.reqa_file = reqa_file
self.max_auto_summarize_code = max_auto_summarize_code
@property
def extra(self):
return self._extra
@extra.setter
def extra(self, value: dict):
self._extra = value
def get_openai_llm(self) -> Optional[LLMConfig]:
"""Get OpenAI LLMConfig by name. If no OpenAI, raise Exception"""
if self.llm.api_type == LLMType.OPENAI:
return self.llm
return None
def get_azure_llm(self) -> Optional[LLMConfig]:
"""Get Azure LLMConfig by name. If no Azure, raise Exception"""
if self.llm.api_type == LLMType.AZURE:
return self.llm
return None
def merge_dict(dicts: Iterable[Dict]) -> Dict:
"""Merge multiple dicts into one, with the latter dict overwriting the former"""
result = {}
for dictionary in dicts:
result.update(dictionary)
return result
_CONFIG_CACHE = {}
config = Config.default()
|