randydev commited on
Commit
61b9726
1 Parent(s): 8f2d0ff

Upload fluxai.py

Browse files
Files changed (1) hide show
  1. fluxai.py +32 -0
fluxai.py CHANGED
@@ -9,12 +9,16 @@ from dotenv import load_dotenv
9
  from pydantic import BaseModel
10
  from pymongo import MongoClient
11
  from models import *
 
12
 
13
  class FluxAI(BaseModel):
14
  user_id: int
15
  args: str
16
  auto_enhancer: bool = False
17
 
 
 
 
18
  router = APIRouter()
19
 
20
  load_dotenv()
@@ -35,6 +39,20 @@ async def schellwithflux(args):
35
  return None
36
  return response.content
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  def get_user_tokens_gpt(user_id):
39
  user = collection.find_one({"user_id": user_id})
40
  if not user:
@@ -52,6 +70,20 @@ def deduct_tokens_gpt(user_id, amount):
52
  else:
53
  return False
54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  @router.post("/akeno/fluxai", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
56
  async def fluxai_image(payload: FluxAI):
57
  if deduct_tokens_gpt(payload.user_id, amount=20):
 
9
  from pydantic import BaseModel
10
  from pymongo import MongoClient
11
  from models import *
12
+ from huggingface_hub import InferenceClient
13
 
14
  class FluxAI(BaseModel):
15
  user_id: int
16
  args: str
17
  auto_enhancer: bool = False
18
 
19
+ class MistralAI(BaseModel):
20
+ args: str
21
+
22
  router = APIRouter()
23
 
24
  load_dotenv()
 
39
  return None
40
  return response.content
41
 
42
+ async def mistralai_post_message(message_str):
43
+ client = InferenceClient(
44
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
45
+ token=HUGGING_TOKEN
46
+ )
47
+ output = ""
48
+ for message in client.chat_completion(
49
+ messages=[{"role": "user", "content": message_str}],
50
+ max_tokens=500,
51
+ stream=True
52
+ ):
53
+ output += message.choices[0].delta.content
54
+ return output
55
+
56
  def get_user_tokens_gpt(user_id):
57
  user = collection.find_one({"user_id": user_id})
58
  if not user:
 
70
  else:
71
  return False
72
 
73
+ @router.post("/akeno/mistralai", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
74
+ async def mistralai_(payload: MistralAI):
75
+ try:
76
+ response = await mistralai_post_message(payload.args)
77
+ return SuccessResponse(
78
+ status="True",
79
+ randydev={"message": response}
80
+ )
81
+ except Exception as e:
82
+ return SuccessResponse(
83
+ status="False",
84
+ randydev={"error": f"An error occurred: {str(e)}"}
85
+ )
86
+
87
  @router.post("/akeno/fluxai", response_model=SuccessResponse, responses={422: {"model": SuccessResponse}})
88
  async def fluxai_image(payload: FluxAI):
89
  if deduct_tokens_gpt(payload.user_id, amount=20):