research14 commited on
Commit
7c616cb
·
1 Parent(s): a9a458f

removed unnecessary code

Browse files
Files changed (1) hide show
  1. app.py +0 -24
app.py CHANGED
@@ -14,30 +14,6 @@ vicuna_model = AutoModelForCausalLM.from_pretrained("lmsys/vicuna-7b-v1.3")
14
  llama_tokenizer = AutoTokenizer.from_pretrained("daryl149/llama-2-7b-chat-hf")
15
  llama_model = AutoModelForCausalLM.from_pretrained("daryl149/llama-2-7b-chat-hf")
16
 
17
- template_single = '''Please output any <{}> in the following sentence one per line without any additional text: "{}"'''
18
-
19
- # def linguistic_features(message):
20
- # # Load a trained spaCy pipeline
21
- # nlp = spacy.load("en_core_web_sm")
22
-
23
- # # Create a spaCy doc object
24
- # doc = nlp(message)
25
-
26
- # # Initiate LFTK extractor by passing in the doc
27
- # LFTK_extractor = lftk.Extractor(docs=doc)
28
-
29
- # # Customize LFTK extractor (optional)
30
- # LFTK_extractor.customize(stop_words=True, punctuations=False, round_decimal=3)
31
-
32
- # # Use LFTK to dynamically extract handcrafted linguistic features
33
- # features_to_extract = lftk.search_features(family="wordsent", language="general", return_format="list_key")
34
- # extracted_features = LFTK_extractor.extract(features=features_to_extract)
35
-
36
- # print('Linguistic Features:', extracted_features)
37
-
38
- # return extracted_features
39
-
40
-
41
  def update_api_key(new_key):
42
  global api_key
43
  os.environ['OPENAI_API_TOKEN'] = new_key
 
14
  llama_tokenizer = AutoTokenizer.from_pretrained("daryl149/llama-2-7b-chat-hf")
15
  llama_model = AutoModelForCausalLM.from_pretrained("daryl149/llama-2-7b-chat-hf")
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  def update_api_key(new_key):
18
  global api_key
19
  os.environ['OPENAI_API_TOKEN'] = new_key