Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,11 +12,6 @@ from i_search import google, i_search as i_s
|
|
12 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
13 |
import random
|
14 |
import prompts
|
15 |
-
client = InferenceClient(
|
16 |
-
"mistralai/Mixtral-8x7B-Instruct-v0.1"
|
17 |
-
)
|
18 |
-
|
19 |
-
from langchain_community.embeddings import HuggingFaceEmbeddings
|
20 |
|
21 |
# --- Configuration ---
|
22 |
VERBOSE = True
|
@@ -25,7 +20,7 @@ MAX_TOKENS = 2048
|
|
25 |
TEMPERATURE = 0.7
|
26 |
TOP_P = 0.8
|
27 |
REPETITION_PENALTY = 1.5
|
28 |
-
MODEL_NAME = "codellama/CodeLlama-13b-Python-hf"
|
29 |
API_KEY = os.getenv("HUGGINGFACE_API_KEY")
|
30 |
|
31 |
# --- Logging Setup ---
|
@@ -161,8 +156,8 @@ def mixtral_generate(
|
|
161 |
repetition_penalty: float = REPETITION_PENALTY,
|
162 |
) -> str:
|
163 |
"""Generates a response using the Mixtral model."""
|
164 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
165 |
-
model = AutoModelForCausalLM.from_pretrained(
|
166 |
|
167 |
content = PREFIX.format(
|
168 |
date_time_str=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
@@ -362,6 +357,7 @@ def main():
|
|
362 |
def create_file(project_path: str, file_name: str) -> str:
|
363 |
"""Creates a new file in the project directory."""
|
364 |
try:
|
|
|
365 |
open(os.path.join(project_path, file_name), "a").close()
|
366 |
return f"File {file_name} created successfully."
|
367 |
except Exception as e:
|
|
|
12 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
13 |
import random
|
14 |
import prompts
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
# --- Configuration ---
|
17 |
VERBOSE = True
|
|
|
20 |
TEMPERATURE = 0.7
|
21 |
TOP_P = 0.8
|
22 |
REPETITION_PENALTY = 1.5
|
23 |
+
MODEL_NAME = "codellama/CodeLlama-13b-Python-hf" # Use CodeLlama for code-related tasks
|
24 |
API_KEY = os.getenv("HUGGINGFACE_API_KEY")
|
25 |
|
26 |
# --- Logging Setup ---
|
|
|
156 |
repetition_penalty: float = REPETITION_PENALTY,
|
157 |
) -> str:
|
158 |
"""Generates a response using the Mixtral model."""
|
159 |
+
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1") # Use Mixtral model
|
160 |
+
model = AutoModelForCausalLM.from_pretrained("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
161 |
|
162 |
content = PREFIX.format(
|
163 |
date_time_str=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
|
|
357 |
def create_file(project_path: str, file_name: str) -> str:
|
358 |
"""Creates a new file in the project directory."""
|
359 |
try:
|
360 |
+
os.makedirs(os.path.dirname(os.path.join(project_path, file_name)), exist_ok=True) # Create directory if needed
|
361 |
open(os.path.join(project_path, file_name), "a").close()
|
362 |
return f"File {file_name} created successfully."
|
363 |
except Exception as e:
|