alienet commited on
Commit
61481b2
·
1 Parent(s): 7fb5e91

5/17 api & script mode

Browse files
BookWorld.py CHANGED
@@ -135,6 +135,19 @@ class Server():
135
  if self.language == "zh" else f"{self.role_agents[role_code].nickname} is now located at {self.world_agent.find_location_name(init_locations_code[i])}"
136
  self.log(info_text)
137
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  # Simulation
139
  def simulate_generator(self,
140
  rounds: int = 10,
@@ -218,12 +231,11 @@ class Server():
218
  # Characters in next scene
219
  if scene_mode:
220
  group = self._name2code(
221
- self.world_agent.decide_screen_actors(
222
  self._get_locations_info(False),
223
  self.history_manager.get_recent_history(5),
224
  self.event,
225
  list(set(selected_role_codes + list(self.moving_roles_info.keys())))))
226
-
227
  selected_role_codes += group
228
  if len(selected_role_codes) > len(self.role_codes):
229
  selected_role_codes = []
@@ -232,7 +244,6 @@ class Server():
232
  self.current_status['group'] = group
233
  self.current_status['location_code'] = self.role_agents[group[0]].location_code
234
  self.scene_characters[str(current_round)] = group
235
-
236
  # Prologue
237
  # if current_round == 0 and len(group) > 0
238
  # prologue = self.world_agent.generate_location_prologue(location_code=self.role_agents[group[0]].location_code, history_text=self._get_history_text(group),event=self.event,location_info_text=self._find_roles_at_location(self.role_agents[group[0]].location_code,name=True))
@@ -601,7 +612,7 @@ class Server():
601
  Dict[str, Any]: Instruction for each role.
602
  """
603
  roles_info_text = self._get_group_members_info_text(self.role_codes,status=True)
604
- history_text = "\n".join([self.role_agents[role_code].history_manager.get_recent_history(1)[0] for role_code in self.role_codes])
605
 
606
  instruction = self.world_agent.get_script_instruction(
607
  roles_info_text=roles_info_text,
@@ -990,6 +1001,7 @@ class BookWorld():
990
  else:
991
  location_name,location_description = self.server.world_agent.find_location_name(location_code),self.server.world_agent.locations_info[location_code]["description"]
992
  status['location'] = {'name': location_name, 'description': location_description}
 
993
  return status
994
 
995
  def handle_message_edit(self,record_id,new_text):
 
135
  if self.language == "zh" else f"{self.role_agents[role_code].nickname} is now located at {self.world_agent.find_location_name(init_locations_code[i])}"
136
  self.log(info_text)
137
 
138
+ def reset_llm(self, role_llm_name, world_llm_name):
139
+ self.role_llm = get_models(role_llm_name)
140
+ for role_code in self.role_codes:
141
+ self.role_agents[role_code].llm = self.role_llm
142
+ self.role_agents[role_code].llm_name = role_llm_name
143
+ if world_llm_name == role_llm_name:
144
+ self.world_llm = self.role_llm
145
+ else:
146
+ self.world_llm = get_models(world_llm_name)
147
+ self.world_agent.llm = self.world_llm
148
+ self.role_llm_name = role_llm_name
149
+ self.world_llm_name = world_llm_name
150
+
151
  # Simulation
152
  def simulate_generator(self,
153
  rounds: int = 10,
 
231
  # Characters in next scene
232
  if scene_mode:
233
  group = self._name2code(
234
+ self.world_agent.decide_scene_actors(
235
  self._get_locations_info(False),
236
  self.history_manager.get_recent_history(5),
237
  self.event,
238
  list(set(selected_role_codes + list(self.moving_roles_info.keys())))))
 
239
  selected_role_codes += group
240
  if len(selected_role_codes) > len(self.role_codes):
241
  selected_role_codes = []
 
244
  self.current_status['group'] = group
245
  self.current_status['location_code'] = self.role_agents[group[0]].location_code
246
  self.scene_characters[str(current_round)] = group
 
247
  # Prologue
248
  # if current_round == 0 and len(group) > 0
249
  # prologue = self.world_agent.generate_location_prologue(location_code=self.role_agents[group[0]].location_code, history_text=self._get_history_text(group),event=self.event,location_info_text=self._find_roles_at_location(self.role_agents[group[0]].location_code,name=True))
 
612
  Dict[str, Any]: Instruction for each role.
613
  """
614
  roles_info_text = self._get_group_members_info_text(self.role_codes,status=True)
615
+ history_text = self.history_manager.get_recent_history(top_k)
616
 
617
  instruction = self.world_agent.get_script_instruction(
618
  roles_info_text=roles_info_text,
 
1001
  else:
1002
  location_name,location_description = self.server.world_agent.find_location_name(location_code),self.server.world_agent.locations_info[location_code]["description"]
1003
  status['location'] = {'name': location_name, 'description': location_description}
1004
+ status['characters'] = self.get_characters_info()
1005
  return status
1006
 
1007
  def handle_message_edit(self,record_id,new_text):
app.py CHANGED
@@ -205,29 +205,6 @@ async def websocket_endpoint(websocket: WebSocket, client_id: str):
205
  }
206
  })
207
 
208
- elif message['type'] == 'api_settings':
209
- # 处理API设置
210
- settings = message['data']
211
- # 设置环境变量
212
- os.environ[settings['envKey']] = settings['apiKey']
213
-
214
- # 更新BookWorld的设置
215
- manager.bw.update_api_settings(
216
- provider=settings['provider'],
217
- model=settings['model']
218
- )
219
-
220
- # 发送确认消息
221
- await websocket.send_json({
222
- 'type': 'message',
223
- 'data': {
224
- 'username': 'System',
225
- 'timestamp': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
226
- 'text': f'已更新 {settings["provider"]} API设置',
227
- 'icon': default_icon_path,
228
- 'type': 'system'
229
- }
230
- })
231
  except Exception as e:
232
  print(f"WebSocket error: {e}")
233
  finally:
@@ -257,22 +234,9 @@ async def save_config(request: Request):
257
  elif 'openrouter' in llm_provider.lower():
258
  os.environ['OPENROUTER_API_KEY'] = api_key
259
 
260
- if "preset_path" in config and config["preset_path"] and os.path.exists(config["preset_path"]):
261
- preset_path = config["preset_path"]
262
- elif "genre" in config and config["genre"]:
263
- genre = config["genre"]
264
- preset_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),f"./config/experiment_{genre}.json")
265
- else:
266
- raise ValueError("Please set the preset_path in `config.json`.")
267
- manager.bw = BookWorld(preset_path = preset_path,
268
- world_llm_name = config["world_llm_name"],
269
- role_llm_name = config["world_llm_name"])
270
- manager.bw.set_generator(rounds = config["rounds"],
271
- save_dir = config["save_dir"],
272
- if_save = config["if_save"],
273
- mode = config["mode"],
274
- scene_mode = config["scene_mode"],)
275
  return {"status": "success", "message": llm_provider + " 配置已保存"}
 
276
  except Exception as e:
277
  print(f"保存配置失败: {e}")
278
  raise HTTPException(status_code=500, detail="保存配置失败")
 
205
  }
206
  })
207
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
  except Exception as e:
209
  print(f"WebSocket error: {e}")
210
  finally:
 
234
  elif 'openrouter' in llm_provider.lower():
235
  os.environ['OPENROUTER_API_KEY'] = api_key
236
 
237
+ manager.bw.server.reset_llm(model,model)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
238
  return {"status": "success", "message": llm_provider + " 配置已保存"}
239
+
240
  except Exception as e:
241
  print(f"保存配置失败: {e}")
242
  raise HTTPException(status_code=500, detail="保存配置失败")
bw_utils.py CHANGED
@@ -9,21 +9,23 @@ import random
9
  import base64
10
 
11
  MODEL_NAME_DICT = {
12
- "gpt3":"openai/gpt-3.5-turbo",
13
  "gpt-4":"openai/gpt-4",
14
  "gpt-4o":"openai/gpt-4o",
15
  "gpt-4o-mini":"openai/gpt-4o-mini",
16
  "gpt-3.5-turbo":"openai/gpt-3.5-turbo",
17
  "deepseek-r1":"deepseek/deepseek-r1",
18
  "deepseek-v3":"deepseek/deepseek-chat",
19
- "gemini-2":"google/gemini-2.0-flash-001",
20
- "gemini-1.5":"google/gemini-flash-1.5",
21
  "llama3-70b": "meta-llama/llama-3.3-70b-instruct",
22
  "qwen-turbo":"qwen/qwen-turbo",
23
  "qwen-plus":"qwen/qwen-plus",
24
  "qwen-max":"qwen/qwen-max",
25
  "qwen-2.5-72b":"qwen/qwen-2.5-72b-instruct",
 
26
  "claude-3.5-sonnet":"anthropic/claude-3.5-sonnet",
 
27
  "phi-4":"microsoft/phi-4",
28
  }
29
 
@@ -31,38 +33,47 @@ def get_models(model_name):
31
  if os.getenv("OPENROUTER_API_KEY", default="") and model_name in MODEL_NAME_DICT:
32
  from modules.llm.OpenRouter import OpenRouter
33
  return OpenRouter(model=MODEL_NAME_DICT[model_name])
34
- elif model_name.startswith('gpt-3.5'):
35
  from modules.llm.LangChainGPT import LangChainGPT
36
- return LangChainGPT(model="gpt-3.5-turbo")
37
- elif model_name == 'gpt-4':
38
- from modules.llm.LangChainGPT import LangChainGPT
39
- return LangChainGPT(model="gpt-4")
40
- elif model_name == 'gpt-4-turbo':
41
- from modules.llm.LangChainGPT import LangChainGPT
42
- return LangChainGPT(model="gpt-4")
43
- elif model_name == 'gpt-4o':
44
- from modules.llm.LangChainGPT import LangChainGPT
45
- return LangChainGPT(model="gpt-4o")
46
- elif model_name == "gpt-4o-mini":
47
- from modules.llm.LangChainGPT import LangChainGPT
48
- return LangChainGPT(model="gpt-4o-mini")
49
  elif model_name.startswith("claude"):
50
- from modules.llm.LangChainGPT import LangChainGPT
51
- return LangChainGPT(model="claude-3-5-sonnet-20241022")
 
 
 
 
 
 
52
  elif model_name.startswith('qwen'):
53
  from modules.llm.Qwen import Qwen
54
  return Qwen(model = model_name)
55
  elif model_name.startswith('deepseek'):
56
  from modules.llm.DeepSeek import DeepSeek
57
- return DeepSeek()
58
  elif model_name.startswith('doubao'):
59
  from modules.llm.Doubao import Doubao
60
  return Doubao()
61
  elif model_name.startswith('gemini'):
62
  from modules.llm.Gemini import Gemini
 
 
 
 
 
 
 
 
63
  return Gemini()
64
  else:
65
- print(f'Warning! undefined model {model_name}, use gpt-3.5-turbo instead.')
66
  from modules.llm.LangChainGPT import LangChainGPT
67
  return LangChainGPT()
68
 
 
9
  import base64
10
 
11
  MODEL_NAME_DICT = {
12
+ "gpt-3.5":"openai/gpt-3.5-turbo",
13
  "gpt-4":"openai/gpt-4",
14
  "gpt-4o":"openai/gpt-4o",
15
  "gpt-4o-mini":"openai/gpt-4o-mini",
16
  "gpt-3.5-turbo":"openai/gpt-3.5-turbo",
17
  "deepseek-r1":"deepseek/deepseek-r1",
18
  "deepseek-v3":"deepseek/deepseek-chat",
19
+ "gemini-2.0-flash":"google/gemini-2.0-flash-001",
20
+ "gemini-1.5-flash":"google/gemini-flash-1.5",
21
  "llama3-70b": "meta-llama/llama-3.3-70b-instruct",
22
  "qwen-turbo":"qwen/qwen-turbo",
23
  "qwen-plus":"qwen/qwen-plus",
24
  "qwen-max":"qwen/qwen-max",
25
  "qwen-2.5-72b":"qwen/qwen-2.5-72b-instruct",
26
+ "claude-3.5-haiku": "anthropic/claude-3.5-haiku",
27
  "claude-3.5-sonnet":"anthropic/claude-3.5-sonnet",
28
+ "claude-3.7-sonnet":"anthropic/claude-3.7-sonnet",
29
  "phi-4":"microsoft/phi-4",
30
  }
31
 
 
33
  if os.getenv("OPENROUTER_API_KEY", default="") and model_name in MODEL_NAME_DICT:
34
  from modules.llm.OpenRouter import OpenRouter
35
  return OpenRouter(model=MODEL_NAME_DICT[model_name])
36
+ elif model_name.startswith('gpt'):
37
  from modules.llm.LangChainGPT import LangChainGPT
38
+ if model_name.startswith('gpt-3.5'):
39
+ return LangChainGPT(model="gpt-3.5-turbo")
40
+ elif model_name == 'gpt-4' or model_name == 'gpt-4-turbo':
41
+ return LangChainGPT(model="gpt-4")
42
+ elif model_name == 'gpt-4o':
43
+ return LangChainGPT(model="gpt-4o")
44
+ elif model_name == "gpt-4o-mini":
45
+ return LangChainGPT(model="gpt-4o-mini")
 
 
 
 
 
46
  elif model_name.startswith("claude"):
47
+ from modules.llm.Claude import Claude
48
+ if model_name.startswith("claude-3.5-sonnet"):
49
+ return Claude(model="claude-3-5-sonnet-latest")
50
+ elif model_name.startswith("claude-3.7-sonnet"):
51
+ return Claude(model="claude-3-7-sonnet-latest")
52
+ elif model_name.startswith("claude-3.5-haiku"):
53
+ return Claude(model="claude-3-5-haiku-latest")
54
+ return Claude()
55
  elif model_name.startswith('qwen'):
56
  from modules.llm.Qwen import Qwen
57
  return Qwen(model = model_name)
58
  elif model_name.startswith('deepseek'):
59
  from modules.llm.DeepSeek import DeepSeek
60
+ return DeepSeek(model = model_name)
61
  elif model_name.startswith('doubao'):
62
  from modules.llm.Doubao import Doubao
63
  return Doubao()
64
  elif model_name.startswith('gemini'):
65
  from modules.llm.Gemini import Gemini
66
+ if model_name.startswith('gemini-2.0'):
67
+ return Gemini(model="gemini-2.0-flash")
68
+ elif model_name.startswith('gemini-1.5'):
69
+ return Gemini(model="gemini-1.5-flash")
70
+ elif model_name.startswith('gemini-2.5-flash'):
71
+ return Gemini(model="gemini-2.5-flash-preview-04-17")
72
+ elif model_name.startswith('gemini-2.5-pro'):
73
+ return Gemini(model="gemini-2.5-pro-preview-05-06")
74
  return Gemini()
75
  else:
76
+ print(f'Warning! undefined model {model_name}, use gpt-4o-mini instead.')
77
  from modules.llm.LangChainGPT import LangChainGPT
78
  return LangChainGPT()
79
 
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
- "role_llm_name": "gemini-2",
3
- "world_llm_name": "gemini-2",
4
  "embedding_model_name":"bge-m3",
5
  "preset_path":"./experiment_presets/experiment_icefire.json",
6
  "if_save": 0,
 
1
  {
2
+ "role_llm_name": "gemini-2.0-flash",
3
+ "world_llm_name": "gemini-2.0-flash",
4
  "embedding_model_name":"bge-m3",
5
  "preset_path":"./experiment_presets/experiment_icefire.json",
6
  "if_save": 0,
experiment_presets/example_script.json CHANGED
@@ -1,12 +1,11 @@
1
  {
2
  "experiment_subname": "script",
3
- "world_file_path":"./data/worlds/example_world.json",
4
  "map_file_path":"./data/maps/example_map.csv",
5
  "loc_file_path":"./data/locations/example_locations.json",
6
  "role_file_dir":"./data/roles/",
7
  "role_agent_codes":["Lacia-en","Trek-en"],
8
- "intervention":"",
9
- "script":"",
10
  "source":"example_world",
11
  "language":"en"
12
 
 
1
  {
2
  "experiment_subname": "script",
3
+ "world_file_path":"./data/worlds/example_world/general.json",
4
  "map_file_path":"./data/maps/example_map.csv",
5
  "loc_file_path":"./data/locations/example_locations.json",
6
  "role_file_dir":"./data/roles/",
7
  "role_agent_codes":["Lacia-en","Trek-en"],
8
+ "script":"One day, an enigmatic signal from an unknown source reached Lacia and Trek. The pattern resembled Trek’s early consciousness digitization code—but far more evolved. Tracing its origin to lunar orbit, the two found themselves divided: Lacia urged caution, fearing the dangers of an unknown intelligence, while Trek saw it as proof that human evolution had already begun elsewhere. As they followed the signal, ideological tension grew—one seeking to contain it, the other longing to embrace it.",
 
9
  "source":"example_world",
10
  "language":"en"
11
 
frontend/js/right-section/api-panel.js CHANGED
@@ -73,10 +73,10 @@ class APIPanel {
73
 
74
  const provider = this.providerSelect.value;
75
  const models = {
76
- openai: ['gpt-3.5-turbo', 'gpt-4'],
77
- anthropic: ['claude-3-opus', 'claude-3-sonnet'],
78
- alibaba: ['qwen-turbo', 'qwen-max'],
79
- openrouter: ['gpt-4o-mini']
80
  };
81
 
82
  const currentModelValue = this.modelSelect.value;
 
73
 
74
  const provider = this.providerSelect.value;
75
  const models = {
76
+ openai: ['gpt-4o-mini', 'gpt-4o', 'gpt-4'],
77
+ anthropic: ['claude-3.5-sonnet', 'claude-3.7-sonnet', 'claude-3.5-haiku'],
78
+ alibaba: ['qwen-turbo', 'qwen-max','qwen-plus'],
79
+ openrouter: ['gpt-4o-mini','gpt-4o','gemini-2.0-flash','claude-3.5-sonnet','deepseek-r1']
80
  };
81
 
82
  const currentModelValue = this.modelSelect.value;
modules/llm/Claude.py CHANGED
@@ -8,7 +8,7 @@ from .BaseLLM import BaseLLM
8
 
9
  class Claude(BaseLLM):
10
 
11
- def __init__(self, model="claude-3-5-sonnet-20240620"):
12
  super(Claude, self).__init__()
13
  self.model_name = model
14
  self.client = anthropic.Anthropic(
 
8
 
9
  class Claude(BaseLLM):
10
 
11
+ def __init__(self, model="claude-3-5-sonnet-latest"):
12
  super(Claude, self).__init__()
13
  self.model_name = model
14
  self.client = anthropic.Anthropic(
modules/llm/Gemini.py CHANGED
@@ -1,14 +1,12 @@
1
  from .BaseLLM import BaseLLM
2
- import google.generativeai as genai
3
  import os
4
  import time
5
 
6
  class Gemini(BaseLLM):
7
- def __init__(self, model="gemini-1.5-flash"):
8
  super(Gemini, self).__init__()
9
- genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
10
  self.model_name = model
11
- self.model = genai.GenerativeModel(model)
12
  self.messages = []
13
 
14
 
@@ -16,29 +14,26 @@ class Gemini(BaseLLM):
16
  self.messages = []
17
 
18
  def ai_message(self, payload):
19
- self.messages.append({"role": "model", "parts": payload})
20
 
21
  def system_message(self, payload):
22
- self.messages.append({"role": "system", "parts": payload})
23
 
24
  def user_message(self, payload):
25
- self.messages.append({"role": "user", "parts": payload})
26
 
27
  def get_response(self,temperature = 0.8):
28
- time.sleep(3)
29
- chat = self.model.start_chat(
30
- history = self.messages
31
  )
32
- response = chat.send_message(generation_config=genai.GenerationConfig(
33
- temperature=temperature,
34
- ))
35
-
36
  return response.text
37
 
38
  def chat(self,text):
39
- chat = self.model.start_chat()
40
- response = chat.send_message(text)
41
- return response.text
 
 
42
 
43
  def print_prompt(self):
44
  for message in self.messages:
 
1
  from .BaseLLM import BaseLLM
2
+ from google import genai
3
  import os
4
  import time
5
 
6
  class Gemini(BaseLLM):
7
+ def __init__(self, model="gemini-2.0-flash"):
8
  super(Gemini, self).__init__()
 
9
  self.model_name = model
 
10
  self.messages = []
11
 
12
 
 
14
  self.messages = []
15
 
16
  def ai_message(self, payload):
17
+ self.messages.append(payload)
18
 
19
  def system_message(self, payload):
20
+ self.messages.append(payload)
21
 
22
  def user_message(self, payload):
23
+ self.messages.append(payload)
24
 
25
  def get_response(self,temperature = 0.8):
26
+ response = genai.Client(api_key=os.getenv("GEMINI_API_KEY")).models.generate_content(
27
+ model=self.model_name, contents="".join(self.messages), temperature = temperature
 
28
  )
 
 
 
 
29
  return response.text
30
 
31
  def chat(self,text):
32
+ self.initialize_message()
33
+ self.user_message(text)
34
+ response = self.get_response()
35
+
36
+ return response
37
 
38
  def print_prompt(self):
39
  for message in self.messages:
modules/world_agent.py CHANGED
@@ -153,7 +153,7 @@ class WorldAgent:
153
 
154
  return response["if_end"],response["detail"]
155
 
156
- def decide_screen_actors(self,roles_info_text, history_text, event, previous_role_codes):
157
  prompt = self._SELECT_SCREEN_ACTORS_PROMPT.format(**{
158
  "roles_info":roles_info_text,
159
  "history_text":history_text,
 
153
 
154
  return response["if_end"],response["detail"]
155
 
156
+ def decide_scene_actors(self,roles_info_text, history_text, event, previous_role_codes):
157
  prompt = self._SELECT_SCREEN_ACTORS_PROMPT.format(**{
158
  "roles_info":roles_info_text,
159
  "history_text":history_text,
requirements.txt CHANGED
@@ -13,5 +13,5 @@ transformers
13
  uvicorn
14
  pillow
15
  faiss-cpu
16
- google-generativeai
17
  huggingface_hub[cli]
 
13
  uvicorn
14
  pillow
15
  faiss-cpu
16
+ google-genai
17
  huggingface_hub[cli]