Spaces:
Sleeping
Sleeping
pwilczewski
commited on
Commit
·
819a20e
1
Parent(s):
fb4a2eb
no inputs?
Browse files
app.py
CHANGED
@@ -145,26 +145,11 @@ workflow.add_edge("ARIMA", END)
|
|
145 |
|
146 |
graph = workflow.compile()
|
147 |
|
148 |
-
|
149 |
-
|
150 |
-
# Initialize the LangGraph client
|
151 |
-
client = get_client(url="https://huggingface.co/spaces/pwilczewski/gradiobox")
|
152 |
-
assistant_id = "graph"
|
153 |
-
|
154 |
-
async def stream_response(input_data):
|
155 |
-
thread = await client.threads.create()
|
156 |
-
async for chunk in client.runs.stream(
|
157 |
-
thread["thread_id"],
|
158 |
-
assistant_id,
|
159 |
-
input=input_data,
|
160 |
-
stream_mode="updates"
|
161 |
-
):
|
162 |
-
yield chunk.data # Yield the data as it is received
|
163 |
-
|
164 |
def gradio_interface(input_text):
|
165 |
-
#
|
166 |
-
|
167 |
-
return
|
168 |
|
169 |
-
demo = gr.Interface(fn=gradio_interface, inputs=
|
170 |
demo.launch()
|
|
|
145 |
|
146 |
graph = workflow.compile()
|
147 |
|
148 |
+
# can I parse this output?
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
149 |
def gradio_interface(input_text):
|
150 |
+
# input_data = {"messages": [HumanMessage(content="Run the analysis")]}
|
151 |
+
resp = graph.invoke({"messages": [HumanMessage(content="Run the analysis")]}) # debug=True
|
152 |
+
return resp
|
153 |
|
154 |
+
demo = gr.Interface(fn=gradio_interface, inputs=[], outputs="text")
|
155 |
demo.launch()
|