nadiamaqbool81 commited on
Commit
94dac17
·
1 Parent(s): 3c2f6e1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -0
app.py CHANGED
@@ -30,9 +30,11 @@ model_box=[
30
  ]
31
  current_model=model_box[0]
32
  pythonFlag = "false"
 
33
 
34
  def the_process(input_text, model_choice):
35
  global pythonFlag
 
36
  print("Inside the_process for python 0", pythonFlag)
37
  global output
38
  print("Inside the_process for python 1", model_choice)
@@ -46,6 +48,16 @@ def the_process(input_text, model_choice):
46
  pythonFlag = "true"
47
  elif(pythonFlag == "true"):
48
  print("pythonFlag", pythonFlag)
 
 
 
 
 
 
 
 
 
 
49
  else:
50
  a_variable = model_box[model_choice]
51
  output = a_variable(input_text)
 
30
  ]
31
  current_model=model_box[0]
32
  pythonFlag = "false"
33
+ javaFlag = "false"
34
 
35
  def the_process(input_text, model_choice):
36
  global pythonFlag
37
+ global javaFlag
38
  print("Inside the_process for python 0", pythonFlag)
39
  global output
40
  print("Inside the_process for python 1", model_choice)
 
48
  pythonFlag = "true"
49
  elif(pythonFlag == "true"):
50
  print("pythonFlag", pythonFlag)
51
+ if(model_choice==0):
52
+ if(javaFlag == "false"):
53
+ print("Inside llama for python")
54
+ tokenizer = AutoTokenizer.from_pretrained("nadiamaqbool81/starcoderbase-1b-hf")
55
+ model = AutoModelForCausalLM.from_pretrained("nadiamaqbool81/starcoderbase-1b-hf")
56
+ output = run_predict(input_text, model, tokenizer)
57
+ print("output" , output)
58
+ javaFlag = "true"
59
+ elif(javaFlag == "true"):
60
+ print("javaFlag", javaFlag)
61
  else:
62
  a_variable = model_box[model_choice]
63
  output = a_variable(input_text)