Spaces:
Sleeping
Sleeping
pwilczewski
commited on
Commit
·
2f1a9ee
1
Parent(s):
762baf0
trying out streaming
Browse files
app.py
CHANGED
@@ -145,9 +145,26 @@ workflow.add_edge("ARIMA", END)
|
|
145 |
|
146 |
graph = workflow.compile()
|
147 |
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
|
146 |
graph = workflow.compile()
|
147 |
|
148 |
+
from langgraph_sdk import get_client
|
149 |
+
|
150 |
+
# Initialize the LangGraph client
|
151 |
+
client = get_client(url="https://huggingface.co/spaces/pwilczewski/gradiobox")
|
152 |
+
assistant_id = "graph"
|
153 |
+
|
154 |
+
async def stream_response(input_data):
|
155 |
+
thread = await client.threads.create()
|
156 |
+
async for chunk in client.runs.stream(
|
157 |
+
thread["thread_id"],
|
158 |
+
assistant_id,
|
159 |
+
input=input_data,
|
160 |
+
stream_mode="values"
|
161 |
+
):
|
162 |
+
yield chunk.data # Yield the data as it is received
|
163 |
+
|
164 |
+
def gradio_interface(input_text):
|
165 |
+
# resp = graph.invoke({"messages": [HumanMessage(content="Run the analysis")]}) # debug=True
|
166 |
+
input_data = {"messages": [HumanMessage(content="Run the analysis")]}
|
167 |
+
return stream_response(input_data)
|
168 |
+
|
169 |
+
demo = gr.Interface(fn=gradio_interface, inputs="text", outputs="text", live=True)
|
170 |
+
demo.launch()
|