Spaces:
Sleeping
Sleeping
runs black in repo
Browse files- responses.py +15 -6
- server.py +1 -1
- utils.py +9 -9
- validators/sn1_validator_wrapper.py +30 -30
responses.py
CHANGED
@@ -1,12 +1,21 @@
|
|
1 |
from pydantic import BaseModel, Field
|
2 |
from typing import List, Dict, Any
|
3 |
|
|
|
4 |
class TextStreamResponse(BaseModel):
|
5 |
-
streamed_chunks: List[str] = Field(
|
6 |
-
|
|
|
|
|
|
|
|
|
7 |
uid: int = Field(0, description="UID of queried miner")
|
8 |
-
completion: str = Field(
|
9 |
-
|
|
|
|
|
|
|
|
|
10 |
|
11 |
def to_dict(self):
|
12 |
return {
|
@@ -14,5 +23,5 @@ class TextStreamResponse(BaseModel):
|
|
14 |
"streamed_chunks_timings": self.streamed_chunks_timings,
|
15 |
"uid": self.uid,
|
16 |
"completion": self.completion,
|
17 |
-
"timing": self.timing
|
18 |
-
}
|
|
|
1 |
from pydantic import BaseModel, Field
|
2 |
from typing import List, Dict, Any
|
3 |
|
4 |
+
|
5 |
class TextStreamResponse(BaseModel):
|
6 |
+
streamed_chunks: List[str] = Field(
|
7 |
+
default_factory=list, description="List of streamed chunks."
|
8 |
+
)
|
9 |
+
streamed_chunks_timings: List[float] = Field(
|
10 |
+
default_factory=list, description="List of streamed chunks timings, in seconds."
|
11 |
+
)
|
12 |
uid: int = Field(0, description="UID of queried miner")
|
13 |
+
completion: str = Field(
|
14 |
+
"", description="The final completed string from the stream."
|
15 |
+
)
|
16 |
+
timing: float = Field(
|
17 |
+
0, description="Timing information of all request, in seconds."
|
18 |
+
)
|
19 |
|
20 |
def to_dict(self):
|
21 |
return {
|
|
|
23 |
"streamed_chunks_timings": self.streamed_chunks_timings,
|
24 |
"uid": self.uid,
|
25 |
"completion": self.completion,
|
26 |
+
"timing": self.timing,
|
27 |
+
}
|
server.py
CHANGED
@@ -54,7 +54,7 @@ async def chat(request: web.Request) -> web.StreamResponse:
|
|
54 |
return response
|
55 |
|
56 |
|
57 |
-
async def echo_stream(request: web.Request) -> web.StreamResponse:
|
58 |
return await utils.echo_stream(request)
|
59 |
|
60 |
|
|
|
54 |
return response
|
55 |
|
56 |
|
57 |
+
async def echo_stream(request: web.Request) -> web.StreamResponse:
|
58 |
return await utils.echo_stream(request)
|
59 |
|
60 |
|
utils.py
CHANGED
@@ -138,7 +138,7 @@ def guess_task_name(challenge: str):
|
|
138 |
|
139 |
async def echo_stream(request: web.Request) -> web.StreamResponse:
|
140 |
request_data = request["data"]
|
141 |
-
k = request_data.get("k", 1)
|
142 |
message = "\n\n".join(request_data["messages"])
|
143 |
|
144 |
# Create a StreamResponse
|
@@ -154,24 +154,24 @@ async def echo_stream(request: web.Request) -> web.StreamResponse:
|
|
154 |
# Echo the message k times with a timeout between each chunk
|
155 |
for _ in range(k):
|
156 |
for word in message.split():
|
157 |
-
chunk = f"{word} "
|
158 |
await response.write(chunk.encode("utf-8"))
|
159 |
completion += chunk
|
160 |
-
await asyncio.sleep(.3)
|
161 |
bt.logging.info(f"Echoed: {chunk}")
|
162 |
-
|
163 |
chunks.append(chunk)
|
164 |
chunks_timings.append(time.time() - start_time)
|
165 |
-
|
166 |
completion = completion.strip()
|
167 |
|
168 |
-
# Prepare final JSON chunk
|
169 |
response_data = TextStreamResponse(
|
170 |
-
streamed_chunks=chunks,
|
171 |
streamed_chunks_timings=chunks_timings,
|
172 |
completion=completion,
|
173 |
-
timing
|
174 |
-
).to_dict()
|
175 |
|
176 |
# Send the final JSON as part of the stream
|
177 |
await response.write(json.dumps(response_data).encode("utf-8"))
|
|
|
138 |
|
139 |
async def echo_stream(request: web.Request) -> web.StreamResponse:
|
140 |
request_data = request["data"]
|
141 |
+
k = request_data.get("k", 1)
|
142 |
message = "\n\n".join(request_data["messages"])
|
143 |
|
144 |
# Create a StreamResponse
|
|
|
154 |
# Echo the message k times with a timeout between each chunk
|
155 |
for _ in range(k):
|
156 |
for word in message.split():
|
157 |
+
chunk = f"{word} "
|
158 |
await response.write(chunk.encode("utf-8"))
|
159 |
completion += chunk
|
160 |
+
await asyncio.sleep(0.3)
|
161 |
bt.logging.info(f"Echoed: {chunk}")
|
162 |
+
|
163 |
chunks.append(chunk)
|
164 |
chunks_timings.append(time.time() - start_time)
|
165 |
+
|
166 |
completion = completion.strip()
|
167 |
|
168 |
+
# Prepare final JSON chunk
|
169 |
response_data = TextStreamResponse(
|
170 |
+
streamed_chunks=chunks,
|
171 |
streamed_chunks_timings=chunks_timings,
|
172 |
completion=completion,
|
173 |
+
timing=time.time() - start_time,
|
174 |
+
).to_dict()
|
175 |
|
176 |
# Send the final JSON as part of the stream
|
177 |
await response.write(json.dumps(response_data).encode("utf-8"))
|
validators/sn1_validator_wrapper.py
CHANGED
@@ -90,38 +90,36 @@ class S1ValidatorAPI(ValidatorAPI):
|
|
90 |
) -> ProcessedStreamResponse:
|
91 |
"""Process a single response asynchronously."""
|
92 |
# Initialize chunk with a default value
|
93 |
-
chunk = None
|
94 |
# Initialize chunk array to accumulate streamed chunks
|
95 |
chunks = []
|
96 |
chunks_timings = []
|
97 |
-
|
98 |
start_time = time.time()
|
99 |
last_sent_index = 0
|
100 |
-
async for chunk in async_generator:
|
101 |
if isinstance(chunk, list):
|
102 |
-
# Chunks are currently returned in string arrays, so we need to concatenate them
|
103 |
-
concatenated_chunks = "".join(chunk)
|
104 |
new_data = concatenated_chunks[last_sent_index:]
|
105 |
-
|
106 |
if new_data:
|
107 |
-
await response.write(new_data.encode(
|
108 |
-
bt.logging.info(f"Received new chunk from miner: {chunk}")
|
109 |
last_sent_index += len(new_data)
|
110 |
chunks.extend(chunk)
|
111 |
-
chunks_timings.append(time.time() - start_time)
|
112 |
-
|
113 |
if chunk is not None and isinstance(chunk, StreamPromptingSynapse):
|
114 |
-
# Assuming the last chunk holds the last value yielded which should be a synapse with the completion filled
|
115 |
return ProcessedStreamResponse(
|
116 |
synapse=chunk,
|
117 |
streamed_chunks=chunks,
|
118 |
-
streamed_chunks_timings=chunks_timings
|
119 |
-
)
|
120 |
else:
|
121 |
raise ValueError("The last chunkis not a StreamPrompting synapse")
|
122 |
|
123 |
-
|
124 |
-
|
125 |
async def get_stream_response(self, params: QueryValidatorParams) -> StreamResponse:
|
126 |
response = StreamResponse(status=200, reason="OK")
|
127 |
response.headers["Content-Type"] = "application/json"
|
@@ -141,7 +139,7 @@ class S1ValidatorAPI(ValidatorAPI):
|
|
141 |
# Make calls to the network with the prompt.
|
142 |
bt.logging.info(f"Calling dendrite")
|
143 |
start_time = time.time()
|
144 |
-
|
145 |
streams_responses = await self.validator.dendrite(
|
146 |
axons=axons,
|
147 |
synapse=StreamPromptingSynapse(
|
@@ -151,21 +149,23 @@ class S1ValidatorAPI(ValidatorAPI):
|
|
151 |
deserialize=False,
|
152 |
streaming=True,
|
153 |
)
|
154 |
-
|
155 |
-
uid_stream_dict = dict(zip(uids, streams_responses))
|
156 |
-
|
157 |
-
random_uid, random_stream = random.choice(list(uid_stream_dict.items()))
|
158 |
processed_response = await self.process_response(response, random_stream)
|
159 |
-
|
160 |
# Prepare final JSON chunk
|
161 |
-
response_data = json.dumps(
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
|
|
|
|
169 |
# Send the final JSON as part of the stream
|
170 |
await response.write(json.dumps(response_data).encode("utf-8"))
|
171 |
except Exception as e:
|
@@ -179,5 +179,5 @@ class S1ValidatorAPI(ValidatorAPI):
|
|
179 |
|
180 |
return response
|
181 |
|
182 |
-
async def query_validator(self, params: QueryValidatorParams) -> Response:
|
183 |
return await self.get_stream_response(params)
|
|
|
90 |
) -> ProcessedStreamResponse:
|
91 |
"""Process a single response asynchronously."""
|
92 |
# Initialize chunk with a default value
|
93 |
+
chunk = None
|
94 |
# Initialize chunk array to accumulate streamed chunks
|
95 |
chunks = []
|
96 |
chunks_timings = []
|
97 |
+
|
98 |
start_time = time.time()
|
99 |
last_sent_index = 0
|
100 |
+
async for chunk in async_generator:
|
101 |
if isinstance(chunk, list):
|
102 |
+
# Chunks are currently returned in string arrays, so we need to concatenate them
|
103 |
+
concatenated_chunks = "".join(chunk)
|
104 |
new_data = concatenated_chunks[last_sent_index:]
|
105 |
+
|
106 |
if new_data:
|
107 |
+
await response.write(new_data.encode("utf-8"))
|
108 |
+
bt.logging.info(f"Received new chunk from miner: {chunk}")
|
109 |
last_sent_index += len(new_data)
|
110 |
chunks.extend(chunk)
|
111 |
+
chunks_timings.append(time.time() - start_time)
|
112 |
+
|
113 |
if chunk is not None and isinstance(chunk, StreamPromptingSynapse):
|
114 |
+
# Assuming the last chunk holds the last value yielded which should be a synapse with the completion filled
|
115 |
return ProcessedStreamResponse(
|
116 |
synapse=chunk,
|
117 |
streamed_chunks=chunks,
|
118 |
+
streamed_chunks_timings=chunks_timings,
|
119 |
+
)
|
120 |
else:
|
121 |
raise ValueError("The last chunkis not a StreamPrompting synapse")
|
122 |
|
|
|
|
|
123 |
async def get_stream_response(self, params: QueryValidatorParams) -> StreamResponse:
|
124 |
response = StreamResponse(status=200, reason="OK")
|
125 |
response.headers["Content-Type"] = "application/json"
|
|
|
139 |
# Make calls to the network with the prompt.
|
140 |
bt.logging.info(f"Calling dendrite")
|
141 |
start_time = time.time()
|
142 |
+
|
143 |
streams_responses = await self.validator.dendrite(
|
144 |
axons=axons,
|
145 |
synapse=StreamPromptingSynapse(
|
|
|
149 |
deserialize=False,
|
150 |
streaming=True,
|
151 |
)
|
152 |
+
|
153 |
+
uid_stream_dict = dict(zip(uids, streams_responses))
|
154 |
+
|
155 |
+
random_uid, random_stream = random.choice(list(uid_stream_dict.items()))
|
156 |
processed_response = await self.process_response(response, random_stream)
|
157 |
+
|
158 |
# Prepare final JSON chunk
|
159 |
+
response_data = json.dumps(
|
160 |
+
TextStreamResponse(
|
161 |
+
streamed_chunks=processed_response.streamed_chunks,
|
162 |
+
streamed_chunks_timings=processed_response.streamed_chunks_timings,
|
163 |
+
uid=random_uid,
|
164 |
+
completion=processed_response.synapse.completion,
|
165 |
+
timing=time.time() - start_time,
|
166 |
+
).to_dict()
|
167 |
+
)
|
168 |
+
|
169 |
# Send the final JSON as part of the stream
|
170 |
await response.write(json.dumps(response_data).encode("utf-8"))
|
171 |
except Exception as e:
|
|
|
179 |
|
180 |
return response
|
181 |
|
182 |
+
async def query_validator(self, params: QueryValidatorParams) -> Response:
|
183 |
return await self.get_stream_response(params)
|