Simon Strandgaard commited on
Commit
a250316
·
1 Parent(s): 437ee94

Send app name to OpenRouter

Browse files
Files changed (1) hide show
  1. src/llm_factory.py +21 -2
src/llm_factory.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import os
2
  import json
3
  from dotenv import dotenv_values
@@ -10,6 +11,11 @@ from llama_index.llms.groq import Groq
10
  from llama_index.llms.lmstudio import LMStudio
11
  from llama_index.llms.openrouter import OpenRouter
12
 
 
 
 
 
 
13
  __all__ = ["get_llm", "get_available_llms"]
14
 
15
  # Load .env values and merge with system environment variables.
@@ -27,7 +33,7 @@ def load_config(config_path: str) -> Dict[str, Any]:
27
  with open(config_path, "r") as f:
28
  return json.load(f)
29
  except FileNotFoundError:
30
- print(f"Warning: config.json not found at {config_path}. Using default settings.")
31
  return {}
32
  except json.JSONDecodeError as e:
33
  raise ValueError(f"Error decoding JSON from {config_path}: {e}")
@@ -79,7 +85,7 @@ def get_llm(llm_name: Optional[str] = None, **kwargs: Any) -> LLM:
79
 
80
  if llm_name not in _llm_configs:
81
  # If llm_name doesn't exits in _llm_configs, then we go through default settings
82
- print(f"Warning: LLM '{llm_name}' not found in config.json. Falling back to hardcoded defaults.")
83
  raise ValueError(f"Unsupported LLM name: {llm_name}")
84
 
85
  config = _llm_configs[llm_name]
@@ -92,6 +98,19 @@ def get_llm(llm_name: Optional[str] = None, **kwargs: Any) -> LLM:
92
  # Override with any kwargs passed to get_llm()
93
  arguments.update(kwargs)
94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95
  # Dynamically instantiate the class
96
  try:
97
  llm_class = globals()[class_name] # Get class from global scope
 
1
+ import logging
2
  import os
3
  import json
4
  from dotenv import dotenv_values
 
11
  from llama_index.llms.lmstudio import LMStudio
12
  from llama_index.llms.openrouter import OpenRouter
13
 
14
+ # You can disable this if you don't want to send app info to OpenRouter.
15
+ SEND_APP_INFO_TO_OPENROUTER = True
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
  __all__ = ["get_llm", "get_available_llms"]
20
 
21
  # Load .env values and merge with system environment variables.
 
33
  with open(config_path, "r") as f:
34
  return json.load(f)
35
  except FileNotFoundError:
36
+ logger.error(f"Warning: llm_config.json not found at {config_path}. Using default settings.")
37
  return {}
38
  except json.JSONDecodeError as e:
39
  raise ValueError(f"Error decoding JSON from {config_path}: {e}")
 
85
 
86
  if llm_name not in _llm_configs:
87
  # If llm_name doesn't exits in _llm_configs, then we go through default settings
88
+ logger.error(f"LLM '{llm_name}' not found in config.json. Falling back to hardcoded defaults.")
89
  raise ValueError(f"Unsupported LLM name: {llm_name}")
90
 
91
  config = _llm_configs[llm_name]
 
98
  # Override with any kwargs passed to get_llm()
99
  arguments.update(kwargs)
100
 
101
+ if class_name == "OpenRouter" and SEND_APP_INFO_TO_OPENROUTER:
102
+ # https://openrouter.ai/rankings
103
+ # https://openrouter.ai/docs/api-reference/overview#headers
104
+ arguments_extra = {
105
+ "additional_kwargs": {
106
+ "extra_headers": {
107
+ "HTTP-Referer": "https://github.com/neoneye/PlanExe",
108
+ "X-Title": "PlanExe"
109
+ }
110
+ }
111
+ }
112
+ arguments.update(arguments_extra)
113
+
114
  # Dynamically instantiate the class
115
  try:
116
  llm_class = globals()[class_name] # Get class from global scope