from_dict_generate
Browse files- App/Generate/generatorRoutes.py +31 -16
App/Generate/generatorRoutes.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
from fastapi import APIRouter, HTTPException, status, BackgroundTasks, UploadFile, Query
|
| 2 |
from .Schema import GeneratorRequest, GeneratorBulkRequest
|
| 3 |
-
from .utils.GroqInstruct import chatbot, VideoOutput
|
| 4 |
from .utils.Cohere import chatbot as cohere_chat
|
| 5 |
from .utils.HuggingChat import Hugging
|
| 6 |
from .Story.Story import Story
|
|
@@ -18,23 +18,12 @@ async def update_scene(model_scene):
|
|
| 18 |
await model_scene.update(**model_scene.__dict__)
|
| 19 |
|
| 20 |
|
| 21 |
-
async def
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
renderr = RenderVideo()
|
| 25 |
-
huggChat = Hugging()
|
| 26 |
-
if request.grok:
|
| 27 |
-
message = cohere_chat(Prompt.format(topic=topic), model=request.model)
|
| 28 |
|
| 29 |
-
else:
|
| 30 |
-
temp = await huggChat.chat(
|
| 31 |
-
Prompt.format(topic=topic)
|
| 32 |
-
+ f"Match your response to the following schema: {VideoOutput.model_json_schema()} Make sure to return an instance of the JSON, not the schema itself, and nothing else."
|
| 33 |
-
)
|
| 34 |
-
message = temp
|
| 35 |
-
generated_story = Story.from_dict(message["scenes"])
|
| 36 |
|
| 37 |
-
|
| 38 |
x = await Project.objects.create(name=str(uuid.uuid4()))
|
| 39 |
|
| 40 |
# Assuming generated_story.scenes is a list of scenes
|
|
@@ -68,6 +57,26 @@ async def main(request: GeneratorRequest):
|
|
| 68 |
await celery_task(video_task=request)
|
| 69 |
|
| 70 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 71 |
async def bulkGenerate(bulkRequest: GeneratorBulkRequest):
|
| 72 |
tasks = []
|
| 73 |
for request in bulkRequest.stories:
|
|
@@ -87,6 +96,12 @@ async def generate_video(
|
|
| 87 |
return {"task_id": "started"}
|
| 88 |
|
| 89 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 90 |
@generator_router.post("/generate_video_bulk")
|
| 91 |
async def generate_video_bulk(
|
| 92 |
BulkvideoRequest: GeneratorBulkRequest, background_task: BackgroundTasks
|
|
|
|
| 1 |
from fastapi import APIRouter, HTTPException, status, BackgroundTasks, UploadFile, Query
|
| 2 |
from .Schema import GeneratorRequest, GeneratorBulkRequest
|
| 3 |
+
from .utils.GroqInstruct import chatbot, VideoOutput, Scene
|
| 4 |
from .utils.Cohere import chatbot as cohere_chat
|
| 5 |
from .utils.HuggingChat import Hugging
|
| 6 |
from .Story.Story import Story
|
|
|
|
| 18 |
await model_scene.update(**model_scene.__dict__)
|
| 19 |
|
| 20 |
|
| 21 |
+
async def from_dict_generate(data):
|
| 22 |
+
generated_strory = Story.from_dict(data)
|
| 23 |
+
await generate_assets(generated_story=generated_strory)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
|
| 26 |
+
async def generate_assets(generated_story: Story, batch_size: 4):
|
| 27 |
x = await Project.objects.create(name=str(uuid.uuid4()))
|
| 28 |
|
| 29 |
# Assuming generated_story.scenes is a list of scenes
|
|
|
|
| 57 |
await celery_task(video_task=request)
|
| 58 |
|
| 59 |
|
| 60 |
+
async def main(request: GeneratorRequest):
|
| 61 |
+
topic = request.prompt
|
| 62 |
+
batch_size = request.batch_size
|
| 63 |
+
renderr = RenderVideo()
|
| 64 |
+
huggChat = Hugging()
|
| 65 |
+
if request.grok:
|
| 66 |
+
message = cohere_chat(Prompt.format(topic=topic), model=request.model)
|
| 67 |
+
|
| 68 |
+
else:
|
| 69 |
+
temp = await huggChat.chat(
|
| 70 |
+
Prompt.format(topic=topic)
|
| 71 |
+
+ f"Match your response to the following schema: {VideoOutput.model_json_schema()} Make sure to return an instance of the JSON, not the schema itself, and nothing else."
|
| 72 |
+
)
|
| 73 |
+
message = temp
|
| 74 |
+
generated_story = Story.from_dict(message["scenes"])
|
| 75 |
+
|
| 76 |
+
print("Generated Story ✅")
|
| 77 |
+
await generate_assets(generated_story=generated_story, batch_size=batch_size)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
async def bulkGenerate(bulkRequest: GeneratorBulkRequest):
|
| 81 |
tasks = []
|
| 82 |
for request in bulkRequest.stories:
|
|
|
|
| 96 |
return {"task_id": "started"}
|
| 97 |
|
| 98 |
|
| 99 |
+
@generator_router.post("/generate_video_from_json")
|
| 100 |
+
async def generate_video_from_json(jsonReq: dict, background_task: BackgroundTasks):
|
| 101 |
+
background_task.add_task(from_dict_generate, jsonReq)
|
| 102 |
+
return {"task_id": "started"}
|
| 103 |
+
|
| 104 |
+
|
| 105 |
@generator_router.post("/generate_video_bulk")
|
| 106 |
async def generate_video_bulk(
|
| 107 |
BulkvideoRequest: GeneratorBulkRequest, background_task: BackgroundTasks
|