File size: 1,599 Bytes
88435ed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
from neollm.llm.abstract_llm import AbstractLLM
from neollm.types import ClientSettings

from .platform import Platform

SUPPORTED_CLAUDE_MODELS = [
    "claude-3-opus",
    "claude-3-sonnet",
    "claude-3-haiku",
    "claude-3-opus@20240229",
    "claude-3-sonnet@20240229",
    "claude-3-haiku@20240307",
]

SUPPORTED_GEMINI_MODELS = [
    "gemini-1.5-pro-preview-0409",
    "gemini-1.0-pro",
    "gemini-1.0-pro-vision",
]


def get_llm(model_name: str, platform: str, client_settings: ClientSettings) -> AbstractLLM:
    platform = Platform(platform)
    # llmの取得
    if platform == Platform.AZURE:
        from neollm.llm.gpt.azure_llm import get_azure_llm

        return get_azure_llm(model_name, client_settings)
    if platform == Platform.OPENAI:
        from neollm.llm.gpt.openai_llm import get_openai_llm

        return get_openai_llm(model_name, client_settings)
    if platform == Platform.ANTHROPIC:
        from neollm.llm.claude.anthropic_llm import get_anthoropic_llm

        return get_anthoropic_llm(model_name, client_settings)
    if platform == Platform.GCP:
        if model_name in SUPPORTED_CLAUDE_MODELS:
            from neollm.llm.claude.gcp_llm import get_gcp_llm as get_gcp_llm_for_claude

            return get_gcp_llm_for_claude(model_name, client_settings)
        elif model_name in SUPPORTED_GEMINI_MODELS:
            from neollm.llm.gemini.gcp_llm import get_gcp_llm as get_gcp_llm_for_gemini

            return get_gcp_llm_for_gemini(model_name, client_settings)
        else:
            raise ValueError(f"{model_name} is not supported in GCP.")