pedroferreira commited on
Commit
9d1a999
·
1 Parent(s): 89b6d9e

sets random stream as default response

Browse files
Files changed (2) hide show
  1. server.py +2 -0
  2. validators/sn1_validator_wrapper.py +13 -28
server.py CHANGED
@@ -13,6 +13,8 @@ curl -X POST http://0.0.0.0:10000/chat/ -H "api_key: hello" -d '{"k": 5, "timeou
13
 
14
  curl -X POST http://0.0.0.0:10000/chat/ -H "api_key: hey-michal" -d '{"k": 5, "timeout": 3, "roles": ["user"], "messages": ["on what exact date did the 21st century begin?"]}'
15
 
 
 
16
  # stream
17
  curl --no-buffer -X POST http://129.146.127.82:10000/echo/ -H "api_key: hey-michal" -d '{"k": 3, "timeout": 0.2, "roles": ["user"], "messages": ["i need to tell you something important but first"]}'
18
  ```
 
13
 
14
  curl -X POST http://0.0.0.0:10000/chat/ -H "api_key: hey-michal" -d '{"k": 5, "timeout": 3, "roles": ["user"], "messages": ["on what exact date did the 21st century begin?"]}'
15
 
16
+ curl -X POST http://0.0.0.0:10000/chat/ -H "api_key: hey-michal" -d '{"k": 5, "timeout": 15, "roles": ["user"], "messages": ["who you really are?"]}'
17
+
18
  # stream
19
  curl --no-buffer -X POST http://129.146.127.82:10000/echo/ -H "api_key: hey-michal" -d '{"k": 3, "timeout": 0.2, "roles": ["user"], "messages": ["i need to tell you something important but first"]}'
20
  ```
validators/sn1_validator_wrapper.py CHANGED
@@ -3,6 +3,7 @@ import utils
3
  import torch
4
  import traceback
5
  import asyncio
 
6
  import bittensor as bt
7
  from typing import Awaitable
8
  from prompting.validator import Validator
@@ -75,27 +76,15 @@ class S1ValidatorAPI(ValidatorAPI):
75
  return Response(status=500, reason="Internal error")
76
 
77
  async def process_response(
78
- self, response: StreamResponse, uid: int, async_generator: Awaitable
79
  ):
80
  """Process a single response asynchronously."""
81
- try:
82
- chunk = None # Initialize chunk with a default value
83
- async for chunk in async_generator: # most important loop, as this is where we acquire the final synapse.
84
- bt.logging.debug(f"\nchunk for uid {uid}: {chunk}")
85
-
86
- # TODO: SET PROPER IMPLEMENTATION TO RETURN CHUNK
87
- if chunk is not None:
88
- json_data = json.dumps(chunk)
89
- await response.write(json_data.encode("utf-8"))
90
 
91
- except Exception as e:
92
- bt.logging.error(
93
- f"Encountered an error in {self.__class__.__name__}:get_stream_response:\n{traceback.format_exc()}"
94
- )
95
- response.set_status(500, reason="Internal error")
96
- await response.write(json.dumps({"error": str(e)}).encode("utf-8"))
97
- finally:
98
- await response.write_eof() # Ensure to close the response properly
99
 
100
  async def get_stream_response(self, params: QueryValidatorParams) -> StreamResponse:
101
  response = StreamResponse(status=200, reason="OK")
@@ -114,7 +103,8 @@ class S1ValidatorAPI(ValidatorAPI):
114
  axons = [self.validator.metagraph.axons[uid] for uid in uids]
115
 
116
  # Make calls to the network with the prompt.
117
- bt.logging.info(f"Calling dendrite")
 
118
  streams_responses = await self.validator.dendrite(
119
  axons=axons,
120
  synapse=StreamPromptingSynapse(
@@ -124,14 +114,9 @@ class S1ValidatorAPI(ValidatorAPI):
124
  deserialize=False,
125
  streaming=True,
126
  )
127
-
128
- tasks = [
129
- self.process_response(uid, res)
130
- for uid, res in dict(zip(uids, streams_responses))
131
- ]
132
- results = await asyncio.gather(*tasks, return_exceptions=True)
133
-
134
- # TODO: Continue implementation, business decision needs to be made on how to handle the results
135
  except Exception as e:
136
  bt.logging.error(
137
  f"Encountered an error in {self.__class__.__name__}:get_stream_response:\n{traceback.format_exc()}"
@@ -145,7 +130,7 @@ class S1ValidatorAPI(ValidatorAPI):
145
 
146
  async def query_validator(self, params: QueryValidatorParams) -> Response:
147
  # TODO: SET STREAM AS DEFAULT
148
- stream = params.request.get("stream", False)
149
 
150
  if stream:
151
  return await self.get_stream_response(params)
 
3
  import torch
4
  import traceback
5
  import asyncio
6
+ import random
7
  import bittensor as bt
8
  from typing import Awaitable
9
  from prompting.validator import Validator
 
76
  return Response(status=500, reason="Internal error")
77
 
78
  async def process_response(
79
+ self, response: StreamResponse, async_generator: Awaitable
80
  ):
81
  """Process a single response asynchronously."""
82
+ chunk = None # Initialize chunk with a default value
83
+ async for chunk in async_generator:
84
+ if chunk is not None and hasattr(chunk, 'completion'):
85
+ # Directly write the string encoded as UTF-8 bytes
86
+ await response.write(chunk.completion.encode('utf-8'))
 
 
 
 
87
 
 
 
 
 
 
 
 
 
88
 
89
  async def get_stream_response(self, params: QueryValidatorParams) -> StreamResponse:
90
  response = StreamResponse(status=200, reason="OK")
 
103
  axons = [self.validator.metagraph.axons[uid] for uid in uids]
104
 
105
  # Make calls to the network with the prompt.
106
+ bt.logging.info(f"Calling dendrite")
107
+
108
  streams_responses = await self.validator.dendrite(
109
  axons=axons,
110
  synapse=StreamPromptingSynapse(
 
114
  deserialize=False,
115
  streaming=True,
116
  )
117
+
118
+ random_stream = random.choice(streams_responses)
119
+ await self.process_response(response, random_stream)
 
 
 
 
 
120
  except Exception as e:
121
  bt.logging.error(
122
  f"Encountered an error in {self.__class__.__name__}:get_stream_response:\n{traceback.format_exc()}"
 
130
 
131
  async def query_validator(self, params: QueryValidatorParams) -> Response:
132
  # TODO: SET STREAM AS DEFAULT
133
+ stream = params.request.get("stream", True)
134
 
135
  if stream:
136
  return await self.get_stream_response(params)