Spaces:
Runtime error
Runtime error
File size: 4,240 Bytes
101f76a 875dc3c b324113 a871520 1570310 f77b98a 1570310 a871520 0252582 b324113 101f76a 41a17c2 101f76a 17aaefb 101f76a 7f6c962 c2a8cae 7f37dd8 4eff231 17aaefb 7f37dd8 c2a8cae 7f37dd8 7f6c962 ba2f2b4 7f6c962 ba2f2b4 7f6c962 101f76a 41a17c2 ac0d3d4 ab56333 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
import flask
from flask import Flask, request, jsonify
#import streamlit as st
#from langchain.chat_models import ChatOpenAI
from langchain.document_loaders import WebBaseLoader
from langchain.chains.summarize import load_summarize_chain
from bs4 import BeautifulSoup
import os
from dotenv import load_dotenv
load_dotenv()
from langchain import HuggingFaceHub
from huggingface_hub import InferenceClient
import requests
import sys
#OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY')
hf_token = os.environ.get('HUGGINGFACEHUB_API_TOKEN')
#starchat_repo_id = os.environ.get('starchat_repo_id')
repo_id=os.environ.get('repo_id')
#port = os.getenv('port')
llm = HuggingFaceHub(repo_id=repo_id, #for Llama2
#repo_id=starchat_repo_id, #for StarChat
huggingfacehub_api_token=hf_token,
model_kwargs={#"min_length":512, #for StarChat
"min_length":1024, #for Llama2
"max_new_tokens":3072, "do_sample":True, #for StarChat
#"max_new_tokens":5632, "do_sample":True, #for Llama2
"temperature":0.1,
"top_k":50,
"top_p":0.95, "eos_token_id":49155})
#llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo-16k")
chain = load_summarize_chain(llm, chain_type="stuff")
app = Flask(__name__)
@app.route('/', methods=['POST'])
def home_api():
data = request.get_json()
user_query = data['user_question']
print(user_query)
return {"Message":"Flask Home API Deploy Success on HF"}
@app.route('/api/chat', methods=['POST'])
def chat():
#async def chat(): #Failed - Flask API 不支持async???
data = request.get_json()
user_query = data['user_question']
print(user_query)
if user_query !="" and not user_query.strip().isspace() and not user_query == "" and not user_query.strip() == "" and not user_query.isspace():
try:
loader = WebBaseLoader(user_query)
#loader = WebBaseLoader("https://zhuanlan.zhihu.com/p/627439522")
#print(loader)
print(user_query)
#with st.spinner("AI Thinking...Please wait a while to Cheers!"):
docs = loader.load()
result=chain.run(docs)
print("AI Summarization: "+result)
#st.write("AI Summarization:")、
#st.write(result)
return jsonify({'response': result})
except Exception as e:
#st.write("Wrong URL or URL not parsable.")
err_msg="Wrong URL or URL not parsable."
print(err_msg)
return jsonify({'response': err_msg})
#initial_response = llm_chain.run(user_query)
#return jsonify({'response': initial_response})
#找到问题了:jsonify在Huggingface不支持;在Github然后部署到Render是可以的!---NO No No, it's supported
#return {'response': initial_response}
#return jsonify({'response': initial_response}) #tried and OKed!
#url=st.text_input("Enter webiste URL to summarize (format: https://www.usinoip.com):")
#loader = WebBaseLoader("https://www.usinoip.com/")
#if url !="" and not url.strip().isspace() and not url == "" and not url.strip() == "" and not url.isspace():
# try:
# loader = WebBaseLoader(url)
# with st.spinner("AI Thinking...Please wait a while to Cheers!"):
# docs = loader.load()
# result=chain.run(docs)
# print(url)
# print("AI Summarization: "+result)
# st.write("AI Summarization:")
# st.write(result)
# except Exception as e:
# st.write("Wrong URL or URL not parsable.")
#**************************************************************#
#try:
# loader = WebBaseLoader(url)
# with st.spinner("AI Thinking...Please wait a while to Cheers!"):
# docs = loader.load()
# result=chain.run(docs)
# print(result)
# st.write("AI Summarization:")
# st.write(result)
#except Exception as e:
# st.write("Wrong URL") |