Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,18 @@ from sentence_transformers import SentenceTransformer, util
|
|
4 |
import os
|
5 |
import requests
|
6 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
# Constants for enhanced organization
|
9 |
system_message = "You are GitBot, the Github project guardian angel. You resolve issues and propose implementation of feature requests"
|
@@ -41,12 +53,6 @@ from huggingface_hub import InferenceClient,HfApi
|
|
41 |
|
42 |
|
43 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
44 |
-
|
45 |
-
# Load the pre-trained model and tokenizer
|
46 |
-
model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
47 |
-
model = AutoModelForCausalLM.from_pretrained(model_name)
|
48 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
49 |
-
|
50 |
# Define input prompt
|
51 |
input_prompt = "(input value = highest-level-quality code content invocation ; True)"
|
52 |
|
@@ -62,8 +68,8 @@ generated_code_str = tokenizer.batch_decode(generated_code, skip_special_tokens=
|
|
62 |
# Print the generated code
|
63 |
print(generated_code_str)
|
64 |
|
65 |
-
DEFAULT_MODEL = "
|
66 |
-
MAX_RELATED_ISSUES =
|
67 |
|
68 |
# Load a pre-trained model for sentence similarity
|
69 |
similarity_model = SentenceTransformer('all-mpnet-base-v2')
|
|
|
4 |
import os
|
5 |
import requests
|
6 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
7 |
+
import torch
|
8 |
+
|
9 |
+
|
10 |
+
# Load the model and tokenizer
|
11 |
+
model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
12 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
13 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
14 |
+
|
15 |
+
# If you have a GPU, move the model to it
|
16 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
17 |
+
model.to(device)
|
18 |
+
|
19 |
|
20 |
# Constants for enhanced organization
|
21 |
system_message = "You are GitBot, the Github project guardian angel. You resolve issues and propose implementation of feature requests"
|
|
|
53 |
|
54 |
|
55 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
# Define input prompt
|
57 |
input_prompt = "(input value = highest-level-quality code content invocation ; True)"
|
58 |
|
|
|
68 |
# Print the generated code
|
69 |
print(generated_code_str)
|
70 |
|
71 |
+
DEFAULT_MODEL = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
72 |
+
MAX_RELATED_ISSUES = 5
|
73 |
|
74 |
# Load a pre-trained model for sentence similarity
|
75 |
similarity_model = SentenceTransformer('all-mpnet-base-v2')
|