Rockramsri commited on
Commit
d88cce3
1 Parent(s): 9b3b808
Files changed (1) hide show
  1. app.py +4 -0
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import streamlit as st
2
  import subprocess
3
  import sys
 
4
 
5
  def install(package):
6
  subprocess.check_call([sys.executable, "-m", "pip", "install", package])
@@ -10,7 +11,10 @@ from llama_cpp import Llama
10
  prompt = st.chat_input("Say something")
11
  if prompt:
12
  llm = Llama(model_path="Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf")
 
13
  r=llm(prompt, max_tokens=1000)
 
 
14
  l="Nothing"
15
  try:
16
  l=r["choices"][0]["text"]
 
1
  import streamlit as st
2
  import subprocess
3
  import sys
4
+ import time
5
 
6
  def install(package):
7
  subprocess.check_call([sys.executable, "-m", "pip", "install", package])
 
11
  prompt = st.chat_input("Say something")
12
  if prompt:
13
  llm = Llama(model_path="Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf")
14
+ start = time.time()
15
  r=llm(prompt, max_tokens=1000)
16
+ end = time.time()
17
+ print(f"The Generation time for 1000 tokens is : {end - start}")
18
  l="Nothing"
19
  try:
20
  l=r["choices"][0]["text"]