seanpedrickcase commited on
Commit
a10d388
·
1 Parent(s): 1f0d087

Refactored call_llama_cpp_model function to include model parameter in chatfuncs.py and updated import statements in llm_api_call.py to reflect this change.

Browse files
Files changed (2) hide show
  1. tools/chatfuncs.py +1 -1
  2. tools/llm_api_call.py +1 -1
tools/chatfuncs.py CHANGED
@@ -217,7 +217,7 @@ def llama_cpp_streaming(history, full_prompt, temperature=temperature):
217
  print(f'Time per token: {(time_generate/NUM_TOKENS)*1000}ms')
218
 
219
  @spaces.GPU
220
- def call_llama_cpp_model(formatted_string:str, gen_config:str):
221
  """
222
  Calls your generation model with parameters from the LlamaCPPGenerationConfig object.
223
 
 
217
  print(f'Time per token: {(time_generate/NUM_TOKENS)*1000}ms')
218
 
219
  @spaces.GPU
220
+ def call_llama_cpp_model(formatted_string:str, gen_config:str, model=model):
221
  """
222
  Calls your generation model with parameters from the LlamaCPPGenerationConfig object.
223
 
tools/llm_api_call.py CHANGED
@@ -19,7 +19,7 @@ GradioFileData = gr.FileData
19
 
20
  from tools.prompts import initial_table_prompt, prompt2, prompt3, system_prompt, summarise_topic_descriptions_prompt, summarise_topic_descriptions_system_prompt, add_existing_topics_system_prompt, add_existing_topics_prompt
21
  from tools.helper_functions import output_folder, detect_file_type, get_file_path_end, read_file, get_or_create_env_var, model_name_map, put_columns_in_df
22
- from tools.chatfuncs import model, LlamaCPPGenerationConfig, temperature, context_length, call_llama_cpp_model
23
 
24
  # ResponseObject class for AWS Bedrock calls
25
  class ResponseObject:
 
19
 
20
  from tools.prompts import initial_table_prompt, prompt2, prompt3, system_prompt, summarise_topic_descriptions_prompt, summarise_topic_descriptions_system_prompt, add_existing_topics_system_prompt, add_existing_topics_prompt
21
  from tools.helper_functions import output_folder, detect_file_type, get_file_path_end, read_file, get_or_create_env_var, model_name_map, put_columns_in_df
22
+ from tools.chatfuncs import LlamaCPPGenerationConfig, call_llama_cpp_model
23
 
24
  # ResponseObject class for AWS Bedrock calls
25
  class ResponseObject: