Update default vision supported & anthropic chat models on first run

- Update to latest initialize with new claude 3.5 sonnet and haiku models
- Update to set vision enabled for google and anthropic models by
  default. Previously we didn't support but we've supported this for a
  month or two now
This commit is contained in:
Debanjum 2024-11-16 22:07:11 -08:00
parent 23ab258d78
commit 2366fa08b9
2 changed files with 6 additions and 4 deletions

View file

@ -16,7 +16,7 @@ default_offline_chat_models = [
] ]
default_openai_chat_models = ["gpt-4o-mini", "gpt-4o"] default_openai_chat_models = ["gpt-4o-mini", "gpt-4o"]
default_gemini_chat_models = ["gemini-1.5-flash", "gemini-1.5-pro"] default_gemini_chat_models = ["gemini-1.5-flash", "gemini-1.5-pro"]
default_anthropic_chat_models = ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229"] default_anthropic_chat_models = ["claude-3-5-sonnet-20241022", "claude-3-5-haiku-20241022"]
empty_config = { empty_config = {
"search-type": { "search-type": {

View file

@ -87,7 +87,7 @@ def initialization(interactive: bool = True):
ChatModelOptions.ModelType.GOOGLE, ChatModelOptions.ModelType.GOOGLE,
default_gemini_chat_models, default_gemini_chat_models,
default_api_key=os.getenv("GEMINI_API_KEY"), default_api_key=os.getenv("GEMINI_API_KEY"),
vision_enabled=False, vision_enabled=True,
is_offline=False, is_offline=False,
interactive=interactive, interactive=interactive,
provider_name="Google Gemini", provider_name="Google Gemini",
@ -98,7 +98,7 @@ def initialization(interactive: bool = True):
ChatModelOptions.ModelType.ANTHROPIC, ChatModelOptions.ModelType.ANTHROPIC,
default_anthropic_chat_models, default_anthropic_chat_models,
default_api_key=os.getenv("ANTHROPIC_API_KEY"), default_api_key=os.getenv("ANTHROPIC_API_KEY"),
vision_enabled=False, vision_enabled=True,
is_offline=False, is_offline=False,
interactive=interactive, interactive=interactive,
) )
@ -158,7 +158,9 @@ def initialization(interactive: bool = True):
is_offline: bool = False, is_offline: bool = False,
provider_name: str = None, provider_name: str = None,
) -> Tuple[bool, OpenAIProcessorConversationConfig]: ) -> Tuple[bool, OpenAIProcessorConversationConfig]:
supported_vision_models = ["gpt-4o-mini", "gpt-4o"] supported_vision_models = (
default_openai_chat_models + default_anthropic_chat_models + default_gemini_chat_models
)
provider_name = provider_name or model_type.name.capitalize() provider_name = provider_name or model_type.name.capitalize()
default_use_model = {True: "y", False: "n"}[default_api_key is not None or is_offline] default_use_model = {True: "y", False: "n"}[default_api_key is not None or is_offline]
use_model_provider = ( use_model_provider = (