pedroferreira commited on
Commit
df1e6f4
·
1 Parent(s): 1862b79

runs black

Browse files
common/utils.py CHANGED
@@ -153,7 +153,6 @@ async def echo_stream(request: web.Request) -> web.StreamResponse:
153
  k = request_data.get("k", 1)
154
  message = "\n\n".join(request_data["messages"])
155
 
156
-
157
  echo_iterator = EchoAsyncIterator(message, k, delay=0.3)
158
  streamer = AsyncResponseDataStreamer(echo_iterator, selected_uid=0, delay=0.3)
159
 
 
153
  k = request_data.get("k", 1)
154
  message = "\n\n".join(request_data["messages"])
155
 
 
156
  echo_iterator = EchoAsyncIterator(message, k, delay=0.3)
157
  streamer = AsyncResponseDataStreamer(echo_iterator, selected_uid=0, delay=0.3)
158
 
server.py CHANGED
@@ -2,7 +2,13 @@ import asyncio
2
 
3
  import bittensor as bt
4
  from aiohttp import web
5
- from aiohttp_apispec import docs, request_schema, response_schema, setup_aiohttp_apispec, validation_middleware
 
 
 
 
 
 
6
 
7
  from common import utils
8
  from common.middlewares import api_key_middleware, json_parsing_middleware
@@ -10,11 +16,7 @@ from common.schemas import QueryChatSchema, StreamChunkSchema, StreamErrorSchema
10
  from validators import QueryValidatorParams, S1ValidatorAPI, ValidatorAPI
11
 
12
 
13
- @docs(
14
- tags=["Prompting API"],
15
- summary="Chat",
16
- description="Chat endpoint."
17
- )
18
  @request_schema(QueryChatSchema)
19
  @response_schema(StreamChunkSchema, 200)
20
  @response_schema(StreamErrorSchema, 400)
@@ -32,7 +34,7 @@ async def chat(request: web.Request) -> web.StreamResponse:
32
  @docs(
33
  tags=["Prompting API"],
34
  summary="Echo test",
35
- description="Echo endpoint for testing purposes."
36
  )
37
  @request_schema(QueryChatSchema)
38
  @response_schema(StreamChunkSchema, 200)
@@ -45,7 +47,9 @@ class ValidatorApplication(web.Application):
45
  def __init__(self, validator_instance=None, *args, **kwargs):
46
  super().__init__(*args, **kwargs)
47
 
48
- self["validator"] = validator_instance if validator_instance else S1ValidatorAPI()
 
 
49
 
50
  # Add middlewares to application
51
  self.add_routes(
 
2
 
3
  import bittensor as bt
4
  from aiohttp import web
5
+ from aiohttp_apispec import (
6
+ docs,
7
+ request_schema,
8
+ response_schema,
9
+ setup_aiohttp_apispec,
10
+ validation_middleware,
11
+ )
12
 
13
  from common import utils
14
  from common.middlewares import api_key_middleware, json_parsing_middleware
 
16
  from validators import QueryValidatorParams, S1ValidatorAPI, ValidatorAPI
17
 
18
 
19
+ @docs(tags=["Prompting API"], summary="Chat", description="Chat endpoint.")
 
 
 
 
20
  @request_schema(QueryChatSchema)
21
  @response_schema(StreamChunkSchema, 200)
22
  @response_schema(StreamErrorSchema, 400)
 
34
  @docs(
35
  tags=["Prompting API"],
36
  summary="Echo test",
37
+ description="Echo endpoint for testing purposes.",
38
  )
39
  @request_schema(QueryChatSchema)
40
  @response_schema(StreamChunkSchema, 200)
 
47
  def __init__(self, validator_instance=None, *args, **kwargs):
48
  super().__init__(*args, **kwargs)
49
 
50
+ self["validator"] = (
51
+ validator_instance if validator_instance else S1ValidatorAPI()
52
+ )
53
 
54
  # Add middlewares to application
55
  self.add_routes(
validators/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
  from .base import QueryValidatorParams, ValidatorAPI, MockValidator
2
  from .sn1_validator_wrapper import S1ValidatorAPI
3
  from .streamer import AsyncResponseDataStreamer
4
- from .stream_manager import StreamManager
 
1
  from .base import QueryValidatorParams, ValidatorAPI, MockValidator
2
  from .sn1_validator_wrapper import S1ValidatorAPI
3
  from .streamer import AsyncResponseDataStreamer
4
+ from .stream_manager import StreamManager
validators/database.py CHANGED
@@ -8,30 +8,32 @@ from .streamer import ProcessedStreamResponse
8
  class LogDatabase:
9
  def __init__(self, log_database_path: str):
10
  self.log_database_path = log_database_path
11
- self.ensure_db_exists(log_database_path)
12
-
13
 
14
  def ensure_db_exists(self, file_path):
15
  if not os.path.exists(file_path):
16
  # Create an empty JSONL file
17
- with open(file_path, 'w') as file:
18
  pass
19
  # TODO: change log to debug
20
  bt.logging.info(f"File '{file_path}' created.")
21
  else:
22
  bt.logging.info(f"File '{file_path}' already exists.")
23
 
24
-
25
  async def add_streams_to_db(self, stream_responses: ProcessedStreamResponse):
26
  bt.logging.info(f"Writing streams to the database...")
27
  try:
28
- stream_responses_dict = [dict(stream_response) for stream_response in stream_responses]
29
- await self.append_dicts_to_file(self.log_database_path, stream_responses_dict)
 
 
 
 
30
  except Exception as e:
31
  bt.logging.error(f"Error while adding streams to the database: {e}")
32
- raise e
33
 
34
  async def append_dicts_to_file(self, file_path, dictionaries):
35
- async with aiofiles.open(file_path, mode='a') as file:
36
  for dictionary in dictionaries:
37
- await file.write(json.dumps(dictionary) + '\n')
 
8
  class LogDatabase:
9
  def __init__(self, log_database_path: str):
10
  self.log_database_path = log_database_path
11
+ self.ensure_db_exists(log_database_path)
 
12
 
13
  def ensure_db_exists(self, file_path):
14
  if not os.path.exists(file_path):
15
  # Create an empty JSONL file
16
+ with open(file_path, "w") as file:
17
  pass
18
  # TODO: change log to debug
19
  bt.logging.info(f"File '{file_path}' created.")
20
  else:
21
  bt.logging.info(f"File '{file_path}' already exists.")
22
 
 
23
  async def add_streams_to_db(self, stream_responses: ProcessedStreamResponse):
24
  bt.logging.info(f"Writing streams to the database...")
25
  try:
26
+ stream_responses_dict = [
27
+ dict(stream_response) for stream_response in stream_responses
28
+ ]
29
+ await self.append_dicts_to_file(
30
+ self.log_database_path, stream_responses_dict
31
+ )
32
  except Exception as e:
33
  bt.logging.error(f"Error while adding streams to the database: {e}")
34
+ raise e
35
 
36
  async def append_dicts_to_file(self, file_path, dictionaries):
37
+ async with aiofiles.open(file_path, mode="a") as file:
38
  for dictionary in dictionaries:
39
+ await file.write(json.dumps(dictionary) + "\n")
validators/sn1_validator_wrapper.py CHANGED
@@ -10,11 +10,12 @@ from .streamer import AsyncResponseDataStreamer
10
  from .validator_utils import get_top_incentive_uids
11
  from .stream_manager import StreamManager
12
 
 
13
  class S1ValidatorAPI(ValidatorAPI):
14
  def __init__(self):
15
  self.validator = Validator()
16
 
17
- def sample_uids(self, params: QueryValidatorParams):
18
  if params.sampling_mode == "random":
19
  uids = get_random_uids(
20
  self.validator, k=params.k_miners, exclude=params.exclude or []
@@ -23,9 +24,11 @@ class S1ValidatorAPI(ValidatorAPI):
23
  if params.sampling_mode == "top_incentive":
24
  metagraph = self.validator.metagraph
25
  vpermit_tao_limit = self.validator.config.neuron.vpermit_tao_limit
26
-
27
- top_uids = get_top_incentive_uids(metagraph, k=params.k_miners, vpermit_tao_limit=vpermit_tao_limit)
28
-
 
 
29
  return top_uids
30
 
31
  async def get_stream_response(self, params: QueryValidatorParams) -> StreamResponse:
@@ -33,11 +36,13 @@ class S1ValidatorAPI(ValidatorAPI):
33
  # task_name = utils.guess_task_name(params.messages[-1])
34
 
35
  # Get the list of uids to query for this step.
36
- uids = self.sample_uids(params)
37
  axons = [self.validator.metagraph.axons[uid] for uid in uids]
38
 
39
  # Make calls to the network with the prompt.
40
- bt.logging.info(f"Sampling dendrite by {params.sampling_mode} with roles {params.roles} and messages {params.messages}")
 
 
41
 
42
  streams_responses = await self.validator.dendrite(
43
  axons=axons,
@@ -48,13 +53,14 @@ class S1ValidatorAPI(ValidatorAPI):
48
  deserialize=False,
49
  streaming=True,
50
  )
51
-
52
  # Creates a streamer from the selected stream
53
  stream_manager = StreamManager()
54
- selected_stream = await stream_manager.process_streams(params.request, streams_responses, uids)
55
-
56
- return selected_stream
57
 
 
58
 
59
  async def query_validator(self, params: QueryValidatorParams) -> Response:
60
  return await self.get_stream_response(params)
 
10
  from .validator_utils import get_top_incentive_uids
11
  from .stream_manager import StreamManager
12
 
13
+
14
  class S1ValidatorAPI(ValidatorAPI):
15
  def __init__(self):
16
  self.validator = Validator()
17
 
18
+ def sample_uids(self, params: QueryValidatorParams):
19
  if params.sampling_mode == "random":
20
  uids = get_random_uids(
21
  self.validator, k=params.k_miners, exclude=params.exclude or []
 
24
  if params.sampling_mode == "top_incentive":
25
  metagraph = self.validator.metagraph
26
  vpermit_tao_limit = self.validator.config.neuron.vpermit_tao_limit
27
+
28
+ top_uids = get_top_incentive_uids(
29
+ metagraph, k=params.k_miners, vpermit_tao_limit=vpermit_tao_limit
30
+ )
31
+
32
  return top_uids
33
 
34
  async def get_stream_response(self, params: QueryValidatorParams) -> StreamResponse:
 
36
  # task_name = utils.guess_task_name(params.messages[-1])
37
 
38
  # Get the list of uids to query for this step.
39
+ uids = self.sample_uids(params)
40
  axons = [self.validator.metagraph.axons[uid] for uid in uids]
41
 
42
  # Make calls to the network with the prompt.
43
+ bt.logging.info(
44
+ f"Sampling dendrite by {params.sampling_mode} with roles {params.roles} and messages {params.messages}"
45
+ )
46
 
47
  streams_responses = await self.validator.dendrite(
48
  axons=axons,
 
53
  deserialize=False,
54
  streaming=True,
55
  )
56
+
57
  # Creates a streamer from the selected stream
58
  stream_manager = StreamManager()
59
+ selected_stream = await stream_manager.process_streams(
60
+ params.request, streams_responses, uids
61
+ )
62
 
63
+ return selected_stream
64
 
65
  async def query_validator(self, params: QueryValidatorParams) -> Response:
66
  return await self.get_stream_response(params)
validators/stream_manager.py CHANGED
@@ -9,20 +9,37 @@ from aiohttp.web import Request
9
  class StreamManager:
10
  def __init__(self, log_database_path: str = "requests_db.jsonl"):
11
  self.log_database = LogDatabase(log_database_path)
12
-
13
- async def process_streams(self, request:Request, streams_responses: List[AsyncIterator], stream_uids: List[int]):
 
 
 
 
 
14
  lock = asyncio.Lock()
15
-
16
- streamers = [AsyncResponseDataStreamer(async_iterator=stream, selected_uid=stream_uid, lock=lock) for stream, stream_uid in zip(streams_responses, stream_uids)]
17
- completed_streams = await asyncio.gather(*[streamer.stream(request) for streamer in streamers])
18
-
 
 
 
 
 
 
 
19
  lock.release()
20
  bt.logging.info(f"Streams from uids: {stream_uids} processing completed.")
21
-
22
 
23
  await self.log_database.add_streams_to_db(completed_streams)
24
  # Gets the first stream that acquired the lock, meaning the first stream that was able to return a non-empty chunk
25
- _, selected_stream = next(((streamer, completed_stream) for streamer, completed_stream in zip(streamers, completed_streams) if streamer.lock_acquired), None)
26
-
27
- return selected_stream
28
-
 
 
 
 
 
 
 
9
  class StreamManager:
10
  def __init__(self, log_database_path: str = "requests_db.jsonl"):
11
  self.log_database = LogDatabase(log_database_path)
12
+
13
+ async def process_streams(
14
+ self,
15
+ request: Request,
16
+ streams_responses: List[AsyncIterator],
17
+ stream_uids: List[int],
18
+ ):
19
  lock = asyncio.Lock()
20
+
21
+ streamers = [
22
+ AsyncResponseDataStreamer(
23
+ async_iterator=stream, selected_uid=stream_uid, lock=lock
24
+ )
25
+ for stream, stream_uid in zip(streams_responses, stream_uids)
26
+ ]
27
+ completed_streams = await asyncio.gather(
28
+ *[streamer.stream(request) for streamer in streamers]
29
+ )
30
+
31
  lock.release()
32
  bt.logging.info(f"Streams from uids: {stream_uids} processing completed.")
 
33
 
34
  await self.log_database.add_streams_to_db(completed_streams)
35
  # Gets the first stream that acquired the lock, meaning the first stream that was able to return a non-empty chunk
36
+ _, selected_stream = next(
37
+ (
38
+ (streamer, completed_stream)
39
+ for streamer, completed_stream in zip(streamers, completed_streams)
40
+ if streamer.lock_acquired
41
+ ),
42
+ None,
43
+ )
44
+
45
+ return selected_stream
validators/streamer.py CHANGED
@@ -18,7 +18,7 @@ class StreamChunk(BaseModel):
18
  timestamp: str
19
  sequence_number: int
20
  selected_uid: int
21
-
22
  def encode(self, encoding: str) -> bytes:
23
  data = json.dumps(self.dict(), indent=4)
24
  return data.encode(encoding)
@@ -28,17 +28,25 @@ class StreamError(BaseModel):
28
  error: str
29
  timestamp: str
30
  sequence_number: int
31
- finish_reason: str = 'error'
32
 
33
  def encode(self, encoding: str) -> bytes:
34
  data = json.dumps(self.dict(), indent=4)
35
  return data.encode(encoding)
36
 
 
37
  ProcessedStreamResponse = Union[StreamChunk, StreamError]
38
 
 
39
  class AsyncResponseDataStreamer:
40
- def __init__(self, async_iterator: AsyncIterator, selected_uid:int, lock: asyncio.Lock, delay: float = 0.1):
41
- self.async_iterator = async_iterator
 
 
 
 
 
 
42
  self.delay = delay
43
  self.selected_uid = selected_uid
44
  self.accumulated_chunks: List[str] = []
@@ -47,36 +55,43 @@ class AsyncResponseDataStreamer:
47
  self.sequence_number: int = 0
48
  self.lock = lock
49
  self.lock_acquired = False
50
-
51
-
52
- def ensure_response_is_created(self, initiated_response: web.StreamResponse) -> web.StreamResponse:
 
53
  # Creates response if it was not created
54
  if initiated_response == None:
55
  initiated_response = web_response.StreamResponse(status=200, reason="OK")
56
  initiated_response.headers["Content-Type"] = "application/json"
57
  return initiated_response
58
-
59
  return initiated_response
60
-
61
-
62
- async def write_to_stream(self, request: web.Request, initiated_response: web.StreamResponse, stream_chunk: StreamChunk, lock: asyncio.Lock) -> web.StreamResponse:
63
- # Try to acquire the lock and sets the lock_acquired flag. Only the stream that acquires the lock should write to the response
64
- if lock.locked() == False:
 
 
 
 
 
65
  self.lock_acquired = await lock.acquire()
66
-
67
  if initiated_response == None and self.lock_acquired:
68
  initiated_response = self.ensure_response_is_created(initiated_response)
69
  # Prepare and send the headers
70
  await initiated_response.prepare(request)
71
-
72
  if self.lock_acquired:
73
- await initiated_response.write(stream_chunk.encode('utf-8'))
74
  else:
75
- bt.logging.info(f"Stream of uid {stream_chunk.selected_uid} was not the first to return, skipping...")
76
-
 
 
77
  return initiated_response
78
-
79
-
80
  async def stream(self, request: web.Request) -> ProcessedStreamResponse:
81
  # response = web_response.StreamResponse(status=200, reason="OK")
82
  # response.headers["Content-Type"] = "application/json"
@@ -86,55 +101,61 @@ class AsyncResponseDataStreamer:
86
  start_time = time.time()
87
  client_response: web.Response = None
88
  final_response: ProcessedStreamResponse
89
-
90
  async for chunk in self.async_iterator:
91
- if isinstance(chunk, str):
92
  # Chunks are currently returned in string arrays, so we need to concatenate them
93
  concatenated_chunks = "".join(chunk)
94
- self.accumulated_chunks.append(concatenated_chunks)
95
  self.accumulated_chunks_timings.append(time.time() - start_time)
96
  # Gets new response state
97
  self.sequence_number += 1
98
- new_response_state = self._create_chunk_response(concatenated_chunks)
 
 
99
  # Writes the new response state to the response
100
- client_response = await self.write_to_stream(request, client_response, new_response_state, self.lock)
101
- #await response.write(new_response_state.encode('utf-8'))
102
-
 
 
103
  if chunk is not None and isinstance(chunk, StreamPromptingSynapse):
104
- if len(self.accumulated_chunks) == 0:
105
- self.accumulated_chunks.append(chunk.completion)
106
  self.accumulated_chunks_timings.append(time.time() - start_time)
107
-
108
  self.finish_reason = "completed"
109
  self.sequence_number += 1
110
  # Assuming the last chunk holds the last value yielded which should be a synapse with the completion filled
111
- synapse = chunk
112
  final_response = self._create_chunk_response(synapse.completion)
113
-
114
- if synapse.completion:
115
- client_response = await self.write_to_stream(request, client_response, final_response, self.lock)
 
 
116
  else:
117
  raise ValueError("Stream did not return a valid synapse.")
118
-
119
  except Exception as e:
120
  bt.logging.error(
121
  f"Encountered an error while processing stream for uid {self.selected_uid} get_stream_response:\n{traceback.format_exc()}"
122
  )
123
  error_response = self._create_error_response(str(e))
124
  final_response = error_response
125
-
126
  # Only the stream that acquires the lock should write the error response
127
  if self.lock_acquired:
128
  self.ensure_response_is_created(client_response)
129
  client_response.set_status(500, reason="Internal error")
130
- client_response.write(error_response.encode('utf-8'))
131
  finally:
132
  # Only the stream that acquires the lock should close the response
133
- if self.lock_acquired:
134
  self.ensure_response_is_created(client_response)
135
  # Ensure to close the response properly
136
  await client_response.write_eof()
137
-
138
  return final_response
139
 
140
  def _create_chunk_response(self, chunk: str) -> StreamChunk:
@@ -151,7 +172,7 @@ class AsyncResponseDataStreamer:
151
  accumulated_chunks_timings=self.accumulated_chunks_timings,
152
  timestamp=self._current_timestamp(),
153
  sequence_number=self.sequence_number,
154
- selected_uid=self.selected_uid
155
  )
156
 
157
  def _create_error_response(self, error_message: str) -> StreamError:
@@ -164,7 +185,7 @@ class AsyncResponseDataStreamer:
164
  return StreamError(
165
  error=error_message,
166
  timestamp=self._current_timestamp(),
167
- sequence_number=self.sequence_number
168
  )
169
 
170
  def _current_timestamp(self) -> str:
@@ -173,4 +194,4 @@ class AsyncResponseDataStreamer:
173
 
174
  :return: Current timestamp as a string.
175
  """
176
- return datetime.utcnow().isoformat()
 
18
  timestamp: str
19
  sequence_number: int
20
  selected_uid: int
21
+
22
  def encode(self, encoding: str) -> bytes:
23
  data = json.dumps(self.dict(), indent=4)
24
  return data.encode(encoding)
 
28
  error: str
29
  timestamp: str
30
  sequence_number: int
31
+ finish_reason: str = "error"
32
 
33
  def encode(self, encoding: str) -> bytes:
34
  data = json.dumps(self.dict(), indent=4)
35
  return data.encode(encoding)
36
 
37
+
38
  ProcessedStreamResponse = Union[StreamChunk, StreamError]
39
 
40
+
41
  class AsyncResponseDataStreamer:
42
+ def __init__(
43
+ self,
44
+ async_iterator: AsyncIterator,
45
+ selected_uid: int,
46
+ lock: asyncio.Lock,
47
+ delay: float = 0.1,
48
+ ):
49
+ self.async_iterator = async_iterator
50
  self.delay = delay
51
  self.selected_uid = selected_uid
52
  self.accumulated_chunks: List[str] = []
 
55
  self.sequence_number: int = 0
56
  self.lock = lock
57
  self.lock_acquired = False
58
+
59
+ def ensure_response_is_created(
60
+ self, initiated_response: web.StreamResponse
61
+ ) -> web.StreamResponse:
62
  # Creates response if it was not created
63
  if initiated_response == None:
64
  initiated_response = web_response.StreamResponse(status=200, reason="OK")
65
  initiated_response.headers["Content-Type"] = "application/json"
66
  return initiated_response
67
+
68
  return initiated_response
69
+
70
+ async def write_to_stream(
71
+ self,
72
+ request: web.Request,
73
+ initiated_response: web.StreamResponse,
74
+ stream_chunk: StreamChunk,
75
+ lock: asyncio.Lock,
76
+ ) -> web.StreamResponse:
77
+ # Try to acquire the lock and sets the lock_acquired flag. Only the stream that acquires the lock should write to the response
78
+ if lock.locked() == False:
79
  self.lock_acquired = await lock.acquire()
80
+
81
  if initiated_response == None and self.lock_acquired:
82
  initiated_response = self.ensure_response_is_created(initiated_response)
83
  # Prepare and send the headers
84
  await initiated_response.prepare(request)
85
+
86
  if self.lock_acquired:
87
+ await initiated_response.write(stream_chunk.encode("utf-8"))
88
  else:
89
+ bt.logging.info(
90
+ f"Stream of uid {stream_chunk.selected_uid} was not the first to return, skipping..."
91
+ )
92
+
93
  return initiated_response
94
+
 
95
  async def stream(self, request: web.Request) -> ProcessedStreamResponse:
96
  # response = web_response.StreamResponse(status=200, reason="OK")
97
  # response.headers["Content-Type"] = "application/json"
 
101
  start_time = time.time()
102
  client_response: web.Response = None
103
  final_response: ProcessedStreamResponse
104
+
105
  async for chunk in self.async_iterator:
106
+ if isinstance(chunk, str):
107
  # Chunks are currently returned in string arrays, so we need to concatenate them
108
  concatenated_chunks = "".join(chunk)
109
+ self.accumulated_chunks.append(concatenated_chunks)
110
  self.accumulated_chunks_timings.append(time.time() - start_time)
111
  # Gets new response state
112
  self.sequence_number += 1
113
+ new_response_state = self._create_chunk_response(
114
+ concatenated_chunks
115
+ )
116
  # Writes the new response state to the response
117
+ client_response = await self.write_to_stream(
118
+ request, client_response, new_response_state, self.lock
119
+ )
120
+ # await response.write(new_response_state.encode('utf-8'))
121
+
122
  if chunk is not None and isinstance(chunk, StreamPromptingSynapse):
123
+ if len(self.accumulated_chunks) == 0:
124
+ self.accumulated_chunks.append(chunk.completion)
125
  self.accumulated_chunks_timings.append(time.time() - start_time)
126
+
127
  self.finish_reason = "completed"
128
  self.sequence_number += 1
129
  # Assuming the last chunk holds the last value yielded which should be a synapse with the completion filled
130
+ synapse = chunk
131
  final_response = self._create_chunk_response(synapse.completion)
132
+
133
+ if synapse.completion:
134
+ client_response = await self.write_to_stream(
135
+ request, client_response, final_response, self.lock
136
+ )
137
  else:
138
  raise ValueError("Stream did not return a valid synapse.")
139
+
140
  except Exception as e:
141
  bt.logging.error(
142
  f"Encountered an error while processing stream for uid {self.selected_uid} get_stream_response:\n{traceback.format_exc()}"
143
  )
144
  error_response = self._create_error_response(str(e))
145
  final_response = error_response
146
+
147
  # Only the stream that acquires the lock should write the error response
148
  if self.lock_acquired:
149
  self.ensure_response_is_created(client_response)
150
  client_response.set_status(500, reason="Internal error")
151
+ client_response.write(error_response.encode("utf-8"))
152
  finally:
153
  # Only the stream that acquires the lock should close the response
154
+ if self.lock_acquired:
155
  self.ensure_response_is_created(client_response)
156
  # Ensure to close the response properly
157
  await client_response.write_eof()
158
+
159
  return final_response
160
 
161
  def _create_chunk_response(self, chunk: str) -> StreamChunk:
 
172
  accumulated_chunks_timings=self.accumulated_chunks_timings,
173
  timestamp=self._current_timestamp(),
174
  sequence_number=self.sequence_number,
175
+ selected_uid=self.selected_uid,
176
  )
177
 
178
  def _create_error_response(self, error_message: str) -> StreamError:
 
185
  return StreamError(
186
  error=error_message,
187
  timestamp=self._current_timestamp(),
188
+ sequence_number=self.sequence_number,
189
  )
190
 
191
  def _current_timestamp(self) -> str:
 
194
 
195
  :return: Current timestamp as a string.
196
  """
197
+ return datetime.utcnow().isoformat()
validators/validator_utils.py CHANGED
@@ -3,21 +3,33 @@ from prompting.utils.uids import check_uid_availability
3
 
4
 
5
  def get_top_incentive_uids(metagraph, k: int, vpermit_tao_limit: int) -> List[int]:
6
- miners_uids = list(map(int, filter(lambda uid: check_uid_availability(metagraph, uid, vpermit_tao_limit), metagraph.uids)))
7
-
 
 
 
 
 
 
 
 
8
  # Builds a dictionary of uids and their corresponding incentives
9
  all_miners_incentives = {
10
  "miners_uids": miners_uids,
11
- "incentives": list(map(lambda uid: metagraph.I[uid], miners_uids))
12
  }
13
-
14
  # Zip the uids and their corresponding incentives into a list of tuples
15
- uid_incentive_pairs = list(zip(all_miners_incentives['miners_uids'], all_miners_incentives['incentives']))
 
 
16
 
17
  # Sort the list of tuples by the incentive value in descending order
18
- uid_incentive_pairs_sorted = sorted(uid_incentive_pairs, key=lambda x: x[1], reverse=True)
 
 
19
 
20
  # Extract the top 10 uids
21
  top_k_uids = [uid for uid, incentive in uid_incentive_pairs_sorted[:k]]
22
-
23
- return top_k_uids
 
3
 
4
 
5
  def get_top_incentive_uids(metagraph, k: int, vpermit_tao_limit: int) -> List[int]:
6
+ miners_uids = list(
7
+ map(
8
+ int,
9
+ filter(
10
+ lambda uid: check_uid_availability(metagraph, uid, vpermit_tao_limit),
11
+ metagraph.uids,
12
+ ),
13
+ )
14
+ )
15
+
16
  # Builds a dictionary of uids and their corresponding incentives
17
  all_miners_incentives = {
18
  "miners_uids": miners_uids,
19
+ "incentives": list(map(lambda uid: metagraph.I[uid], miners_uids)),
20
  }
21
+
22
  # Zip the uids and their corresponding incentives into a list of tuples
23
+ uid_incentive_pairs = list(
24
+ zip(all_miners_incentives["miners_uids"], all_miners_incentives["incentives"])
25
+ )
26
 
27
  # Sort the list of tuples by the incentive value in descending order
28
+ uid_incentive_pairs_sorted = sorted(
29
+ uid_incentive_pairs, key=lambda x: x[1], reverse=True
30
+ )
31
 
32
  # Extract the top 10 uids
33
  top_k_uids = [uid for uid, incentive in uid_incentive_pairs_sorted[:k]]
34
+
35
+ return top_k_uids