body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
f80b7d14c22b08bbbd388ce161a47d7be622d888cf08cd0f0d2fc2c2d7b956a9
|
def delete_contact_window(self):
'Метод создающий окно удаления контакта.'
global remove_dialog
remove_dialog = DelContactDialog(self.database)
remove_dialog.btn_ok.clicked.connect((lambda : self.delete_contact(remove_dialog)))
remove_dialog.show()
|
Метод создающий окно удаления контакта.
|
Lib/site-packages/client/main_window.py
|
delete_contact_window
|
fochoao/cpython
| 0 |
python
|
def delete_contact_window(self):
global remove_dialog
remove_dialog = DelContactDialog(self.database)
remove_dialog.btn_ok.clicked.connect((lambda : self.delete_contact(remove_dialog)))
remove_dialog.show()
|
def delete_contact_window(self):
global remove_dialog
remove_dialog = DelContactDialog(self.database)
remove_dialog.btn_ok.clicked.connect((lambda : self.delete_contact(remove_dialog)))
remove_dialog.show()<|docstring|>Метод создающий окно удаления контакта.<|endoftext|>
|
cd3a80a9805843ff67b7d6aa14322f3b6cbb95555eb9fd1bff516995f90da909
|
def delete_contact(self, item):
'\n Метод удаляющий контакт из серверной и клиентсткой BD.\n После обновления баз данных обновляет и содержимое окна.\n '
selected = item.selector.currentText()
try:
self.transport.remove_contact(selected)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.del_contact(selected)
self.clients_list_update()
logger.info(f'Успешно удалён контакт {selected}')
self.messages.information(self, 'Успех', 'Контакт успешно удалён.')
item.close()
if (selected == self.current_chat):
self.current_chat = None
self.set_disabled_input()
|
Метод удаляющий контакт из серверной и клиентсткой BD.
После обновления баз данных обновляет и содержимое окна.
|
Lib/site-packages/client/main_window.py
|
delete_contact
|
fochoao/cpython
| 0 |
python
|
def delete_contact(self, item):
'\n Метод удаляющий контакт из серверной и клиентсткой BD.\n После обновления баз данных обновляет и содержимое окна.\n '
selected = item.selector.currentText()
try:
self.transport.remove_contact(selected)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.del_contact(selected)
self.clients_list_update()
logger.info(f'Успешно удалён контакт {selected}')
self.messages.information(self, 'Успех', 'Контакт успешно удалён.')
item.close()
if (selected == self.current_chat):
self.current_chat = None
self.set_disabled_input()
|
def delete_contact(self, item):
'\n Метод удаляющий контакт из серверной и клиентсткой BD.\n После обновления баз данных обновляет и содержимое окна.\n '
selected = item.selector.currentText()
try:
self.transport.remove_contact(selected)
except ServerError as err:
self.messages.critical(self, 'Ошибка сервера', err.text)
except OSError as err:
if err.errno:
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
else:
self.database.del_contact(selected)
self.clients_list_update()
logger.info(f'Успешно удалён контакт {selected}')
self.messages.information(self, 'Успех', 'Контакт успешно удалён.')
item.close()
if (selected == self.current_chat):
self.current_chat = None
self.set_disabled_input()<|docstring|>Метод удаляющий контакт из серверной и клиентсткой BD.
После обновления баз данных обновляет и содержимое окна.<|endoftext|>
|
6a61fa28c69b0de770f32a26e5fa413b9e21593bfbdede6a2302e72f527a3abd
|
def send_message(self):
'\n Функция отправки сообщения текущему собеседнику.\n Реализует шифрование сообщения и его отправку.\n '
message_text = self.ui.text_message.toPlainText()
self.ui.text_message.clear()
if (not message_text):
return
message_text_encrypted = self.encryptor.encrypt(message_text.encode('utf8'))
message_text_encrypted_base64 = base64.b64encode(message_text_encrypted)
try:
self.transport.send_message(self.current_chat, message_text_encrypted_base64.decode('ascii'))
pass
except ServerError as err:
self.messages.critical(self, 'Ошибка', err.text)
except OSError as err:
if err.errno:
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
except (ConnectionResetError, ConnectionAbortedError):
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
else:
self.database.save_message(self.current_chat, 'out', message_text)
logger.debug(f'Отправлено сообщение для {self.current_chat}: {message_text}')
self.history_list_update()
|
Функция отправки сообщения текущему собеседнику.
Реализует шифрование сообщения и его отправку.
|
Lib/site-packages/client/main_window.py
|
send_message
|
fochoao/cpython
| 0 |
python
|
def send_message(self):
'\n Функция отправки сообщения текущему собеседнику.\n Реализует шифрование сообщения и его отправку.\n '
message_text = self.ui.text_message.toPlainText()
self.ui.text_message.clear()
if (not message_text):
return
message_text_encrypted = self.encryptor.encrypt(message_text.encode('utf8'))
message_text_encrypted_base64 = base64.b64encode(message_text_encrypted)
try:
self.transport.send_message(self.current_chat, message_text_encrypted_base64.decode('ascii'))
pass
except ServerError as err:
self.messages.critical(self, 'Ошибка', err.text)
except OSError as err:
if err.errno:
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
except (ConnectionResetError, ConnectionAbortedError):
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
else:
self.database.save_message(self.current_chat, 'out', message_text)
logger.debug(f'Отправлено сообщение для {self.current_chat}: {message_text}')
self.history_list_update()
|
def send_message(self):
'\n Функция отправки сообщения текущему собеседнику.\n Реализует шифрование сообщения и его отправку.\n '
message_text = self.ui.text_message.toPlainText()
self.ui.text_message.clear()
if (not message_text):
return
message_text_encrypted = self.encryptor.encrypt(message_text.encode('utf8'))
message_text_encrypted_base64 = base64.b64encode(message_text_encrypted)
try:
self.transport.send_message(self.current_chat, message_text_encrypted_base64.decode('ascii'))
pass
except ServerError as err:
self.messages.critical(self, 'Ошибка', err.text)
except OSError as err:
if err.errno:
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
self.messages.critical(self, 'Ошибка', 'Таймаут соединения!')
except (ConnectionResetError, ConnectionAbortedError):
self.messages.critical(self, 'Ошибка', 'Потеряно соединение с сервером!')
self.close()
else:
self.database.save_message(self.current_chat, 'out', message_text)
logger.debug(f'Отправлено сообщение для {self.current_chat}: {message_text}')
self.history_list_update()<|docstring|>Функция отправки сообщения текущему собеседнику.
Реализует шифрование сообщения и его отправку.<|endoftext|>
|
8c0ce8b7f367f86e2b8b0b0eea2e7a93eb0e505c3ccab1f185e26e5842fe82dc
|
@pyqtSlot(dict)
def message(self, message):
'\n Слот обработчик поступаемых сообщений, выполняет дешифровку\n поступаемых сообщений и их сохранение в истории сообщений.\n Запрашивает пользователя если пришло сообщение не от текущего\n собеседника. При необходимости меняет собеседника.\n '
encrypted_message = base64.b64decode(message[MESSAGE_TEXT])
try:
decrypted_message = self.decrypter.decrypt(encrypted_message)
except (ValueError, TypeError):
self.messages.warning(self, 'Ошибка', 'Не удалось декодировать сообщение.')
return
self.database.save_message(self.current_chat, 'in', decrypted_message.decode('utf8'))
sender = message[SENDER]
if (sender == self.current_chat):
self.history_list_update()
elif self.database.check_contact(sender):
if (self.messages.question(self, 'Новое сообщение', f'Получено новое сообщение от {sender}, открыть чат с ним?', QMessageBox.Yes, QMessageBox.No) == QMessageBox.Yes):
self.current_chat = sender
self.set_active_user()
else:
print('NO')
if (self.messages.question(self, 'Новое сообщение', f'''Получено новое сообщение от {sender}.
Данного пользователя нет в вашем контакт-листе.
Добавить в контакты и открыть чат с ним?''', QMessageBox.Yes, QMessageBox.No) == QMessageBox.Yes):
self.add_contact(sender)
self.current_chat = sender
self.database.save_message(self.current_chat, 'in', decrypted_message.decode('utf8'))
self.set_active_user()
|
Слот обработчик поступаемых сообщений, выполняет дешифровку
поступаемых сообщений и их сохранение в истории сообщений.
Запрашивает пользователя если пришло сообщение не от текущего
собеседника. При необходимости меняет собеседника.
|
Lib/site-packages/client/main_window.py
|
message
|
fochoao/cpython
| 0 |
python
|
@pyqtSlot(dict)
def message(self, message):
'\n Слот обработчик поступаемых сообщений, выполняет дешифровку\n поступаемых сообщений и их сохранение в истории сообщений.\n Запрашивает пользователя если пришло сообщение не от текущего\n собеседника. При необходимости меняет собеседника.\n '
encrypted_message = base64.b64decode(message[MESSAGE_TEXT])
try:
decrypted_message = self.decrypter.decrypt(encrypted_message)
except (ValueError, TypeError):
self.messages.warning(self, 'Ошибка', 'Не удалось декодировать сообщение.')
return
self.database.save_message(self.current_chat, 'in', decrypted_message.decode('utf8'))
sender = message[SENDER]
if (sender == self.current_chat):
self.history_list_update()
elif self.database.check_contact(sender):
if (self.messages.question(self, 'Новое сообщение', f'Получено новое сообщение от {sender}, открыть чат с ним?', QMessageBox.Yes, QMessageBox.No) == QMessageBox.Yes):
self.current_chat = sender
self.set_active_user()
else:
print('NO')
if (self.messages.question(self, 'Новое сообщение', f'Получено новое сообщение от {sender}.
Данного пользователя нет в вашем контакт-листе.
Добавить в контакты и открыть чат с ним?', QMessageBox.Yes, QMessageBox.No) == QMessageBox.Yes):
self.add_contact(sender)
self.current_chat = sender
self.database.save_message(self.current_chat, 'in', decrypted_message.decode('utf8'))
self.set_active_user()
|
@pyqtSlot(dict)
def message(self, message):
'\n Слот обработчик поступаемых сообщений, выполняет дешифровку\n поступаемых сообщений и их сохранение в истории сообщений.\n Запрашивает пользователя если пришло сообщение не от текущего\n собеседника. При необходимости меняет собеседника.\n '
encrypted_message = base64.b64decode(message[MESSAGE_TEXT])
try:
decrypted_message = self.decrypter.decrypt(encrypted_message)
except (ValueError, TypeError):
self.messages.warning(self, 'Ошибка', 'Не удалось декодировать сообщение.')
return
self.database.save_message(self.current_chat, 'in', decrypted_message.decode('utf8'))
sender = message[SENDER]
if (sender == self.current_chat):
self.history_list_update()
elif self.database.check_contact(sender):
if (self.messages.question(self, 'Новое сообщение', f'Получено новое сообщение от {sender}, открыть чат с ним?', QMessageBox.Yes, QMessageBox.No) == QMessageBox.Yes):
self.current_chat = sender
self.set_active_user()
else:
print('NO')
if (self.messages.question(self, 'Новое сообщение', f'Получено новое сообщение от {sender}.
Данного пользователя нет в вашем контакт-листе.
Добавить в контакты и открыть чат с ним?', QMessageBox.Yes, QMessageBox.No) == QMessageBox.Yes):
self.add_contact(sender)
self.current_chat = sender
self.database.save_message(self.current_chat, 'in', decrypted_message.decode('utf8'))
self.set_active_user()<|docstring|>Слот обработчик поступаемых сообщений, выполняет дешифровку
поступаемых сообщений и их сохранение в истории сообщений.
Запрашивает пользователя если пришло сообщение не от текущего
собеседника. При необходимости меняет собеседника.<|endoftext|>
|
0c58e9b72cf8e6e8f7f103abb8f3bbdeca297da438e25e99c859986df119372e
|
@pyqtSlot()
def connection_lost(self):
'\n Слот обработчик потери соеднинения с сервером.\n Выдаёт окно предупреждение и завершает работу приложения.\n '
self.messages.warning(self, 'Сбой соединения', 'Потеряно соединение с сервером. ')
self.close()
|
Слот обработчик потери соеднинения с сервером.
Выдаёт окно предупреждение и завершает работу приложения.
|
Lib/site-packages/client/main_window.py
|
connection_lost
|
fochoao/cpython
| 0 |
python
|
@pyqtSlot()
def connection_lost(self):
'\n Слот обработчик потери соеднинения с сервером.\n Выдаёт окно предупреждение и завершает работу приложения.\n '
self.messages.warning(self, 'Сбой соединения', 'Потеряно соединение с сервером. ')
self.close()
|
@pyqtSlot()
def connection_lost(self):
'\n Слот обработчик потери соеднинения с сервером.\n Выдаёт окно предупреждение и завершает работу приложения.\n '
self.messages.warning(self, 'Сбой соединения', 'Потеряно соединение с сервером. ')
self.close()<|docstring|>Слот обработчик потери соеднинения с сервером.
Выдаёт окно предупреждение и завершает работу приложения.<|endoftext|>
|
55bc0b3572321bb9d9239c666d4b0231861c7805d9199eb8d1b412822823837e
|
@pyqtSlot()
def sig_205(self):
'\n Слот выполняющий обновление баз данных по команде сервера.\n '
if (self.current_chat and (not self.database.check_user(self.current_chat))):
self.messages.warning(self, 'Сочувствую', 'К сожалению собеседник был удалён с сервера.')
self.set_disabled_input()
self.current_chat = None
self.clients_list_update()
|
Слот выполняющий обновление баз данных по команде сервера.
|
Lib/site-packages/client/main_window.py
|
sig_205
|
fochoao/cpython
| 0 |
python
|
@pyqtSlot()
def sig_205(self):
'\n \n '
if (self.current_chat and (not self.database.check_user(self.current_chat))):
self.messages.warning(self, 'Сочувствую', 'К сожалению собеседник был удалён с сервера.')
self.set_disabled_input()
self.current_chat = None
self.clients_list_update()
|
@pyqtSlot()
def sig_205(self):
'\n \n '
if (self.current_chat and (not self.database.check_user(self.current_chat))):
self.messages.warning(self, 'Сочувствую', 'К сожалению собеседник был удалён с сервера.')
self.set_disabled_input()
self.current_chat = None
self.clients_list_update()<|docstring|>Слот выполняющий обновление баз данных по команде сервера.<|endoftext|>
|
b89738c6e7dfdcfdbfe1aa5a74b84ec8f94c6464597aa3ce95280218ca8a3d6e
|
def make_connection(self, trans_obj):
'Метод обеспечивающий соединение сигналов и слотов.'
trans_obj.new_message.connect(self.message)
trans_obj.connection_lost.connect(self.connection_lost)
trans_obj.message_205.connect(self.sig_205)
|
Метод обеспечивающий соединение сигналов и слотов.
|
Lib/site-packages/client/main_window.py
|
make_connection
|
fochoao/cpython
| 0 |
python
|
def make_connection(self, trans_obj):
trans_obj.new_message.connect(self.message)
trans_obj.connection_lost.connect(self.connection_lost)
trans_obj.message_205.connect(self.sig_205)
|
def make_connection(self, trans_obj):
trans_obj.new_message.connect(self.message)
trans_obj.connection_lost.connect(self.connection_lost)
trans_obj.message_205.connect(self.sig_205)<|docstring|>Метод обеспечивающий соединение сигналов и слотов.<|endoftext|>
|
17f629df0d32e754f57d402795c9709433ab53b6d610fd5424fdbd31ad5dce34
|
def _wait_threads(self):
"\n Waits for any places that aren't finished with their current transition to finish the transition.\n "
for place in self._places:
while place.working:
pass
|
Waits for any places that aren't finished with their current transition to finish the transition.
|
sim_assets/map.py
|
_wait_threads
|
AvanaPY/SimSims
| 0 |
python
|
def _wait_threads(self):
"\n \n "
for place in self._places:
while place.working:
pass
|
def _wait_threads(self):
"\n \n "
for place in self._places:
while place.working:
pass<|docstring|>Waits for any places that aren't finished with their current transition to finish the transition.<|endoftext|>
|
a0c93024f6c74f82ccfda290eaba21875e750e710e9e03e016db0a48f1aac38d
|
def select_build_type(self, t):
'\n Selects a Place type.\n '
self._selected_build_type = t
self._selected_resource_type = None
self._selected_place = None
|
Selects a Place type.
|
sim_assets/map.py
|
select_build_type
|
AvanaPY/SimSims
| 0 |
python
|
def select_build_type(self, t):
'\n \n '
self._selected_build_type = t
self._selected_resource_type = None
self._selected_place = None
|
def select_build_type(self, t):
'\n \n '
self._selected_build_type = t
self._selected_resource_type = None
self._selected_place = None<|docstring|>Selects a Place type.<|endoftext|>
|
bd982d38d5b8261c670ced20c841522aca2ce63a769c3b624690c776f1273c07
|
def select_resource_type(self, r):
'\n Selects a resource type.\n '
self._selected_resource_type = r
self._selected_build_type = None
self._selected_place = None
|
Selects a resource type.
|
sim_assets/map.py
|
select_resource_type
|
AvanaPY/SimSims
| 0 |
python
|
def select_resource_type(self, r):
'\n \n '
self._selected_resource_type = r
self._selected_build_type = None
self._selected_place = None
|
def select_resource_type(self, r):
'\n \n '
self._selected_resource_type = r
self._selected_build_type = None
self._selected_place = None<|docstring|>Selects a resource type.<|endoftext|>
|
dd4b570d512c3ee55d5de3dd0e1a8528989aea4a360a179d6c42920842cb4d49
|
def select_building_at(self, x, y):
'\n Selects a building that contains the point (x, y)\n '
place_at = self.get_place_at(x, y)
if place_at:
self._selected_place = place_at
|
Selects a building that contains the point (x, y)
|
sim_assets/map.py
|
select_building_at
|
AvanaPY/SimSims
| 0 |
python
|
def select_building_at(self, x, y):
'\n \n '
place_at = self.get_place_at(x, y)
if place_at:
self._selected_place = place_at
|
def select_building_at(self, x, y):
'\n \n '
place_at = self.get_place_at(x, y)
if place_at:
self._selected_place = place_at<|docstring|>Selects a building that contains the point (x, y)<|endoftext|>
|
6de680c4e7e8f8e0640dde456e05ad0144ff9a9842bdd940a5b829dc1935d982
|
def deselect_selections(self):
'\n Deselects all selections.\n '
self._selected_resource_type = None
self._selected_build_type = None
self._selected_place = None
|
Deselects all selections.
|
sim_assets/map.py
|
deselect_selections
|
AvanaPY/SimSims
| 0 |
python
|
def deselect_selections(self):
'\n \n '
self._selected_resource_type = None
self._selected_build_type = None
self._selected_place = None
|
def deselect_selections(self):
'\n \n '
self._selected_resource_type = None
self._selected_build_type = None
self._selected_place = None<|docstring|>Deselects all selections.<|endoftext|>
|
8c7c6da9eb9fa0f6dec2ede8ea5eea96fd9e31b660ac83577b1e0432c01bc671
|
def get_place_at(self, x, y):
'\n Returns the place which contains the point (x, y), None if there is no such place.\n '
place = None
for p in self._places:
if p.point_in_place(x, y):
place = p
return place
|
Returns the place which contains the point (x, y), None if there is no such place.
|
sim_assets/map.py
|
get_place_at
|
AvanaPY/SimSims
| 0 |
python
|
def get_place_at(self, x, y):
'\n \n '
place = None
for p in self._places:
if p.point_in_place(x, y):
place = p
return place
|
def get_place_at(self, x, y):
'\n \n '
place = None
for p in self._places:
if p.point_in_place(x, y):
place = p
return place<|docstring|>Returns the place which contains the point (x, y), None if there is no such place.<|endoftext|>
|
4b5d771612d8531740486be5135ccd8857d0b75c240ca4a56509ebd329123328
|
def disconnect_from_selection(self, x, y):
'\n Disconnects the currently selected place from whichever place contains the point (x, y)\n '
if self._selected_place:
place = self.get_place_at(x, y)
if place:
self._selected_place.disconnect_place(place)
|
Disconnects the currently selected place from whichever place contains the point (x, y)
|
sim_assets/map.py
|
disconnect_from_selection
|
AvanaPY/SimSims
| 0 |
python
|
def disconnect_from_selection(self, x, y):
'\n \n '
if self._selected_place:
place = self.get_place_at(x, y)
if place:
self._selected_place.disconnect_place(place)
|
def disconnect_from_selection(self, x, y):
'\n \n '
if self._selected_place:
place = self.get_place_at(x, y)
if place:
self._selected_place.disconnect_place(place)<|docstring|>Disconnects the currently selected place from whichever place contains the point (x, y)<|endoftext|>
|
54b8fab32510599c7cd984e9797b3e85723d6b20759b56f56f9925abc96e7ac5
|
def delete_place_at(self, x, y):
'\n Fully deletes the place that contains the point (x, y), disconnects any connections to that place\n '
place = self.get_place_at(x, y)
if place:
place.disconnect_all_connections()
self._places.remove(place)
|
Fully deletes the place that contains the point (x, y), disconnects any connections to that place
|
sim_assets/map.py
|
delete_place_at
|
AvanaPY/SimSims
| 0 |
python
|
def delete_place_at(self, x, y):
'\n \n '
place = self.get_place_at(x, y)
if place:
place.disconnect_all_connections()
self._places.remove(place)
|
def delete_place_at(self, x, y):
'\n \n '
place = self.get_place_at(x, y)
if place:
place.disconnect_all_connections()
self._places.remove(place)<|docstring|>Fully deletes the place that contains the point (x, y), disconnects any connections to that place<|endoftext|>
|
6ba6cfd91b7a23cd08afbf13b518cc849ed887bde7d4c6487911e52acfe43a3d
|
def can_build(self):
'\n Returns a boolean if any buildable type or place is selected.\n '
return (self._selected_build_type or self._selected_place or self._selected_resource_type)
|
Returns a boolean if any buildable type or place is selected.
|
sim_assets/map.py
|
can_build
|
AvanaPY/SimSims
| 0 |
python
|
def can_build(self):
'\n \n '
return (self._selected_build_type or self._selected_place or self._selected_resource_type)
|
def can_build(self):
'\n \n '
return (self._selected_build_type or self._selected_place or self._selected_resource_type)<|docstring|>Returns a boolean if any buildable type or place is selected.<|endoftext|>
|
d35e60860729b48e1b23b3e2c600bef2208ee772bc4122707548e337b31e899c
|
def build(self, x, y):
'\n Builds a selected building in a location (x, y).\n '
if self._selected_build_type:
t = self._selected_build_type()
(w, h) = t.dims()
t.set_position((x, y))
self._places.append(t)
elif self._selected_resource_type:
place = self.get_place_at(x, y)
if place:
r = self._selected_resource_type()
place.insert(r)
elif self._selected_place:
place = self.get_place_at(x, y)
if place:
self._selected_place.connect_place(place)
|
Builds a selected building in a location (x, y).
|
sim_assets/map.py
|
build
|
AvanaPY/SimSims
| 0 |
python
|
def build(self, x, y):
'\n \n '
if self._selected_build_type:
t = self._selected_build_type()
(w, h) = t.dims()
t.set_position((x, y))
self._places.append(t)
elif self._selected_resource_type:
place = self.get_place_at(x, y)
if place:
r = self._selected_resource_type()
place.insert(r)
elif self._selected_place:
place = self.get_place_at(x, y)
if place:
self._selected_place.connect_place(place)
|
def build(self, x, y):
'\n \n '
if self._selected_build_type:
t = self._selected_build_type()
(w, h) = t.dims()
t.set_position((x, y))
self._places.append(t)
elif self._selected_resource_type:
place = self.get_place_at(x, y)
if place:
r = self._selected_resource_type()
place.insert(r)
elif self._selected_place:
place = self.get_place_at(x, y)
if place:
self._selected_place.connect_place(place)<|docstring|>Builds a selected building in a location (x, y).<|endoftext|>
|
4d66607b240716cd161343639c9df75a894492142cc98bc42a1f6d01ab1937f0
|
def selected_build_preview(self):
'\n Returns a (blit, name) preview pair of the selected object. Returns (None, "") if no building is selected.\n '
if self._selected_build_type:
(blit, name) = self._selected_previews[self._selected_build_type]
return (blit, name)
return (None, '')
|
Returns a (blit, name) preview pair of the selected object. Returns (None, "") if no building is selected.
|
sim_assets/map.py
|
selected_build_preview
|
AvanaPY/SimSims
| 0 |
python
|
def selected_build_preview(self):
'\n \n '
if self._selected_build_type:
(blit, name) = self._selected_previews[self._selected_build_type]
return (blit, name)
return (None, )
|
def selected_build_preview(self):
'\n \n '
if self._selected_build_type:
(blit, name) = self._selected_previews[self._selected_build_type]
return (blit, name)
return (None, )<|docstring|>Returns a (blit, name) preview pair of the selected object. Returns (None, "") if no building is selected.<|endoftext|>
|
8080c76e44eda5214f5a14852d5f75ac9f59c05f64f94e4bdf3748d422c96ddc
|
def blit(self, dims, text_font: pygame.font.Font):
'\n Returns the blit of the map.\n '
surface = pygame.Surface(dims, pygame.SRCALPHA, 32).convert_alpha()
for place in self._places:
pairs = place.connection_points()
for (a, b) in pairs:
self._draw_bezier(surface, a, b)
for place in self._places:
blit = place.blit()
if blit:
if (place == self._selected_place):
blit.fill((0, 120, 240, 20), special_flags=pygame.BLEND_RGB_MULT)
txt_blit = text_font.render(place.name, True, (0, 0, 0))
(x, y) = blit.get_size()
x = ((x / 2) - (txt_blit.get_width() / 2))
y = ((y / 2) - (txt_blit.get_height() / 2))
blit.blit(txt_blit, (x, y))
surface.blit(blit, place.position)
return surface
|
Returns the blit of the map.
|
sim_assets/map.py
|
blit
|
AvanaPY/SimSims
| 0 |
python
|
def blit(self, dims, text_font: pygame.font.Font):
'\n \n '
surface = pygame.Surface(dims, pygame.SRCALPHA, 32).convert_alpha()
for place in self._places:
pairs = place.connection_points()
for (a, b) in pairs:
self._draw_bezier(surface, a, b)
for place in self._places:
blit = place.blit()
if blit:
if (place == self._selected_place):
blit.fill((0, 120, 240, 20), special_flags=pygame.BLEND_RGB_MULT)
txt_blit = text_font.render(place.name, True, (0, 0, 0))
(x, y) = blit.get_size()
x = ((x / 2) - (txt_blit.get_width() / 2))
y = ((y / 2) - (txt_blit.get_height() / 2))
blit.blit(txt_blit, (x, y))
surface.blit(blit, place.position)
return surface
|
def blit(self, dims, text_font: pygame.font.Font):
'\n \n '
surface = pygame.Surface(dims, pygame.SRCALPHA, 32).convert_alpha()
for place in self._places:
pairs = place.connection_points()
for (a, b) in pairs:
self._draw_bezier(surface, a, b)
for place in self._places:
blit = place.blit()
if blit:
if (place == self._selected_place):
blit.fill((0, 120, 240, 20), special_flags=pygame.BLEND_RGB_MULT)
txt_blit = text_font.render(place.name, True, (0, 0, 0))
(x, y) = blit.get_size()
x = ((x / 2) - (txt_blit.get_width() / 2))
y = ((y / 2) - (txt_blit.get_height() / 2))
blit.blit(txt_blit, (x, y))
surface.blit(blit, place.position)
return surface<|docstring|>Returns the blit of the map.<|endoftext|>
|
fca0ca68db49337ba558a15e125440df68a849b358fa7f59e463d083931e04bb
|
def _draw_bezier(self, surface, pos1, pos2, bend_factor=0.2):
'\n Returns a pygame.Surface object containing a curved line.\n '
direction = ((pos2[0] - pos1[0]), (pos2[1] - pos1[1]))
orth = (((- direction[1]) * bend_factor), (direction[0] * bend_factor))
ctrl_point = (((pos1[0] + (direction[0] * 0.5)) + orth[0]), ((pos1[1] + (direction[1] * 0.5)) + orth[1]))
b_points = compute_bezier_points((pos1, ctrl_point, pos2))
length = sum([math.dist(b_points[i], b_points[(i + 1)]) for i in range((len(b_points) - 1))])
walked = 0
start_col = (255, 0, 0)
end_col = (0, 120, 255)
for i in range((len(b_points) - 1)):
walked += math.dist(b_points[i], b_points[(i + 1)])
col = colour_linear_interpolation(start_col, end_col, (walked / length))
pygame.draw.line(surface, col, b_points[i], b_points[(i + 1)], 3)
|
Returns a pygame.Surface object containing a curved line.
|
sim_assets/map.py
|
_draw_bezier
|
AvanaPY/SimSims
| 0 |
python
|
def _draw_bezier(self, surface, pos1, pos2, bend_factor=0.2):
'\n \n '
direction = ((pos2[0] - pos1[0]), (pos2[1] - pos1[1]))
orth = (((- direction[1]) * bend_factor), (direction[0] * bend_factor))
ctrl_point = (((pos1[0] + (direction[0] * 0.5)) + orth[0]), ((pos1[1] + (direction[1] * 0.5)) + orth[1]))
b_points = compute_bezier_points((pos1, ctrl_point, pos2))
length = sum([math.dist(b_points[i], b_points[(i + 1)]) for i in range((len(b_points) - 1))])
walked = 0
start_col = (255, 0, 0)
end_col = (0, 120, 255)
for i in range((len(b_points) - 1)):
walked += math.dist(b_points[i], b_points[(i + 1)])
col = colour_linear_interpolation(start_col, end_col, (walked / length))
pygame.draw.line(surface, col, b_points[i], b_points[(i + 1)], 3)
|
def _draw_bezier(self, surface, pos1, pos2, bend_factor=0.2):
'\n \n '
direction = ((pos2[0] - pos1[0]), (pos2[1] - pos1[1]))
orth = (((- direction[1]) * bend_factor), (direction[0] * bend_factor))
ctrl_point = (((pos1[0] + (direction[0] * 0.5)) + orth[0]), ((pos1[1] + (direction[1] * 0.5)) + orth[1]))
b_points = compute_bezier_points((pos1, ctrl_point, pos2))
length = sum([math.dist(b_points[i], b_points[(i + 1)]) for i in range((len(b_points) - 1))])
walked = 0
start_col = (255, 0, 0)
end_col = (0, 120, 255)
for i in range((len(b_points) - 1)):
walked += math.dist(b_points[i], b_points[(i + 1)])
col = colour_linear_interpolation(start_col, end_col, (walked / length))
pygame.draw.line(surface, col, b_points[i], b_points[(i + 1)], 3)<|docstring|>Returns a pygame.Surface object containing a curved line.<|endoftext|>
|
d7e6feba1dbc1ed995f70d41a7dad849939e17de693b7c0c2bae1ad14947883a
|
def clear(self):
'\n Clears the map.\n '
self._places.clear()
|
Clears the map.
|
sim_assets/map.py
|
clear
|
AvanaPY/SimSims
| 0 |
python
|
def clear(self):
'\n \n '
self._places.clear()
|
def clear(self):
'\n \n '
self._places.clear()<|docstring|>Clears the map.<|endoftext|>
|
3dd07843208b8fd25fffc8a162156b155427391c30614d1a2ae3dc06c02e1661
|
def json(self):
'\n Returns a json object representing the map.\n '
for (i, p) in enumerate(self._places):
p.set_index(i)
places = []
for place in self._places:
p_json = place.json()
p_json = {k.replace('_', ''): v for (k, v) in p_json.items()}
places.append(p_json)
return places
|
Returns a json object representing the map.
|
sim_assets/map.py
|
json
|
AvanaPY/SimSims
| 0 |
python
|
def json(self):
'\n \n '
for (i, p) in enumerate(self._places):
p.set_index(i)
places = []
for place in self._places:
p_json = place.json()
p_json = {k.replace('_', ): v for (k, v) in p_json.items()}
places.append(p_json)
return places
|
def json(self):
'\n \n '
for (i, p) in enumerate(self._places):
p.set_index(i)
places = []
for place in self._places:
p_json = place.json()
p_json = {k.replace('_', ): v for (k, v) in p_json.items()}
places.append(p_json)
return places<|docstring|>Returns a json object representing the map.<|endoftext|>
|
4abcbd274d05f7401c97358cb70649854d47a5100af335bf926f78764e1f9329
|
def load_json(self, json):
'\n Loads a .json object and replaces the map content.\n '
self._places.clear()
index_map = {}
for place_json in json:
place = Place.from_json(place_json)
self._places.append(place)
index_map[place_json['index']] = place
for place_json in json:
place = index_map[place_json['index']]
for index in place_json['out']:
p = index_map[index]
place.connect_place(p)
|
Loads a .json object and replaces the map content.
|
sim_assets/map.py
|
load_json
|
AvanaPY/SimSims
| 0 |
python
|
def load_json(self, json):
'\n \n '
self._places.clear()
index_map = {}
for place_json in json:
place = Place.from_json(place_json)
self._places.append(place)
index_map[place_json['index']] = place
for place_json in json:
place = index_map[place_json['index']]
for index in place_json['out']:
p = index_map[index]
place.connect_place(p)
|
def load_json(self, json):
'\n \n '
self._places.clear()
index_map = {}
for place_json in json:
place = Place.from_json(place_json)
self._places.append(place)
index_map[place_json['index']] = place
for place_json in json:
place = index_map[place_json['index']]
for index in place_json['out']:
p = index_map[index]
place.connect_place(p)<|docstring|>Loads a .json object and replaces the map content.<|endoftext|>
|
4321c2691ae12978029380e39d72142f0eb4f35c925cfe3f0c8d68e690713872
|
def showImage(img, window='Image'):
' Shows the image in a resizeable window'
cv2.namedWindow(window, cv2.WINDOW_NORMAL)
cv2.imshow(window, img)
|
Shows the image in a resizeable window
|
common/utils_opencv.py
|
showImage
|
MacherLabs/common
| 0 |
python
|
def showImage(img, window='Image'):
' '
cv2.namedWindow(window, cv2.WINDOW_NORMAL)
cv2.imshow(window, img)
|
def showImage(img, window='Image'):
' '
cv2.namedWindow(window, cv2.WINDOW_NORMAL)
cv2.imshow(window, img)<|docstring|>Shows the image in a resizeable window<|endoftext|>
|
bec3121847c2a0c7ce1065dbe7c90606aad7a8f1eb1ad57e13c5787c30e49a40
|
def resizeImg(img, size, keepAspect=False, padding=False):
' Resize the image to given size.\n img -- input source image\n size -- (w,h) of desired resized image\n keepAspect -- to preserve aspect ratio during resize \n padding -- to add black padding when target aspect is different \n '
dtype = img.dtype
(outW, outH) = size
if (len(img.shape) > 2):
(h, w, d) = img.shape[:3]
if padding:
outimg = np.zeros((outH, outW, d), dtype=dtype)
else:
(h, w) = img.shape[:2]
if padding:
outimg = np.zeros((outH, outW), dtype=dtype)
if keepAspect:
aspect = (float(w) / h)
if (int((outH * aspect)) < outW):
out = cv2.resize(img, (int((outH * aspect)), outH))
if padding:
outimg[(:, ((outW - int((outH * aspect))) / 2):((outW + int((outH * aspect))) / 2))] = out
out = outimg
else:
out = cv2.resize(img, (outW, int((outW / aspect))))
if padding:
outimg[(((outH - int((outW / aspect))) / 2):((outH + int((outW / aspect))) / 2),)] = out
out = outimg
else:
out = cv2.resize(img, size)
return out
|
Resize the image to given size.
img -- input source image
size -- (w,h) of desired resized image
keepAspect -- to preserve aspect ratio during resize
padding -- to add black padding when target aspect is different
|
common/utils_opencv.py
|
resizeImg
|
MacherLabs/common
| 0 |
python
|
def resizeImg(img, size, keepAspect=False, padding=False):
' Resize the image to given size.\n img -- input source image\n size -- (w,h) of desired resized image\n keepAspect -- to preserve aspect ratio during resize \n padding -- to add black padding when target aspect is different \n '
dtype = img.dtype
(outW, outH) = size
if (len(img.shape) > 2):
(h, w, d) = img.shape[:3]
if padding:
outimg = np.zeros((outH, outW, d), dtype=dtype)
else:
(h, w) = img.shape[:2]
if padding:
outimg = np.zeros((outH, outW), dtype=dtype)
if keepAspect:
aspect = (float(w) / h)
if (int((outH * aspect)) < outW):
out = cv2.resize(img, (int((outH * aspect)), outH))
if padding:
outimg[(:, ((outW - int((outH * aspect))) / 2):((outW + int((outH * aspect))) / 2))] = out
out = outimg
else:
out = cv2.resize(img, (outW, int((outW / aspect))))
if padding:
outimg[(((outH - int((outW / aspect))) / 2):((outH + int((outW / aspect))) / 2),)] = out
out = outimg
else:
out = cv2.resize(img, size)
return out
|
def resizeImg(img, size, keepAspect=False, padding=False):
' Resize the image to given size.\n img -- input source image\n size -- (w,h) of desired resized image\n keepAspect -- to preserve aspect ratio during resize \n padding -- to add black padding when target aspect is different \n '
dtype = img.dtype
(outW, outH) = size
if (len(img.shape) > 2):
(h, w, d) = img.shape[:3]
if padding:
outimg = np.zeros((outH, outW, d), dtype=dtype)
else:
(h, w) = img.shape[:2]
if padding:
outimg = np.zeros((outH, outW), dtype=dtype)
if keepAspect:
aspect = (float(w) / h)
if (int((outH * aspect)) < outW):
out = cv2.resize(img, (int((outH * aspect)), outH))
if padding:
outimg[(:, ((outW - int((outH * aspect))) / 2):((outW + int((outH * aspect))) / 2))] = out
out = outimg
else:
out = cv2.resize(img, (outW, int((outW / aspect))))
if padding:
outimg[(((outH - int((outW / aspect))) / 2):((outH + int((outW / aspect))) / 2),)] = out
out = outimg
else:
out = cv2.resize(img, size)
return out<|docstring|>Resize the image to given size.
img -- input source image
size -- (w,h) of desired resized image
keepAspect -- to preserve aspect ratio during resize
padding -- to add black padding when target aspect is different<|endoftext|>
|
ebae8e94499a3a9aa29cda5892b572a5cc6d08ca3246be5d22d727a40580b5dc
|
def rotateImg(img, angle, crop=False):
' Rotate an image counter-clockwise by given angle with or without cropping.\n img -- input source image\n angle -- angle in degrees to ratate the img to\n crop -- to change/preserve the size while rotating\n '
(h, w) = img.shape[:2]
centre = ((img.shape[1] / 2), (img.shape[0] / 2))
M = cv2.getRotationMatrix2D(centre, angle, 1.0)
if crop:
out = cv2.warpAffine(img, M, (w, h), flags=cv2.INTER_LINEAR)
else:
rangle = np.deg2rad(angle)
H = abs(((h * np.cos(rangle)) + (w * np.sin(rangle))))
W = abs(((w * np.cos(rangle)) + (h * np.sin(rangle))))
M[(0, 2)] += ((W - w) / 2)
M[(1, 2)] += ((H - h) / 2)
out = cv2.warpAffine(img, M, (int(W), int(H)))
return out
|
Rotate an image counter-clockwise by given angle with or without cropping.
img -- input source image
angle -- angle in degrees to ratate the img to
crop -- to change/preserve the size while rotating
|
common/utils_opencv.py
|
rotateImg
|
MacherLabs/common
| 0 |
python
|
def rotateImg(img, angle, crop=False):
' Rotate an image counter-clockwise by given angle with or without cropping.\n img -- input source image\n angle -- angle in degrees to ratate the img to\n crop -- to change/preserve the size while rotating\n '
(h, w) = img.shape[:2]
centre = ((img.shape[1] / 2), (img.shape[0] / 2))
M = cv2.getRotationMatrix2D(centre, angle, 1.0)
if crop:
out = cv2.warpAffine(img, M, (w, h), flags=cv2.INTER_LINEAR)
else:
rangle = np.deg2rad(angle)
H = abs(((h * np.cos(rangle)) + (w * np.sin(rangle))))
W = abs(((w * np.cos(rangle)) + (h * np.sin(rangle))))
M[(0, 2)] += ((W - w) / 2)
M[(1, 2)] += ((H - h) / 2)
out = cv2.warpAffine(img, M, (int(W), int(H)))
return out
|
def rotateImg(img, angle, crop=False):
' Rotate an image counter-clockwise by given angle with or without cropping.\n img -- input source image\n angle -- angle in degrees to ratate the img to\n crop -- to change/preserve the size while rotating\n '
(h, w) = img.shape[:2]
centre = ((img.shape[1] / 2), (img.shape[0] / 2))
M = cv2.getRotationMatrix2D(centre, angle, 1.0)
if crop:
out = cv2.warpAffine(img, M, (w, h), flags=cv2.INTER_LINEAR)
else:
rangle = np.deg2rad(angle)
H = abs(((h * np.cos(rangle)) + (w * np.sin(rangle))))
W = abs(((w * np.cos(rangle)) + (h * np.sin(rangle))))
M[(0, 2)] += ((W - w) / 2)
M[(1, 2)] += ((H - h) / 2)
out = cv2.warpAffine(img, M, (int(W), int(H)))
return out<|docstring|>Rotate an image counter-clockwise by given angle with or without cropping.
img -- input source image
angle -- angle in degrees to ratate the img to
crop -- to change/preserve the size while rotating<|endoftext|>
|
1a5f95d2d5c9a5a20bc7486196c58e1140c672002fa5c956d38d85237afecac7
|
def showImagesInDirectory(directory):
' Shows all the images in a directory and its sub-directories. '
from os import walk, path
for (root, dirnames, filenames) in walk(directory):
for name in filenames:
try:
file_path = path.join(root, name)
frame = cv2.imread(file_path, (- 1))
print('Original Image Size:', frame.shape, name)
showImage(frame)
except Exception as e:
print('Exception: ', e)
key = (255 & cv2.waitKey(0))
if (key == 27):
break
if (key == 27):
break
cv2.destroyAllWindows()
|
Shows all the images in a directory and its sub-directories.
|
common/utils_opencv.py
|
showImagesInDirectory
|
MacherLabs/common
| 0 |
python
|
def showImagesInDirectory(directory):
' '
from os import walk, path
for (root, dirnames, filenames) in walk(directory):
for name in filenames:
try:
file_path = path.join(root, name)
frame = cv2.imread(file_path, (- 1))
print('Original Image Size:', frame.shape, name)
showImage(frame)
except Exception as e:
print('Exception: ', e)
key = (255 & cv2.waitKey(0))
if (key == 27):
break
if (key == 27):
break
cv2.destroyAllWindows()
|
def showImagesInDirectory(directory):
' '
from os import walk, path
for (root, dirnames, filenames) in walk(directory):
for name in filenames:
try:
file_path = path.join(root, name)
frame = cv2.imread(file_path, (- 1))
print('Original Image Size:', frame.shape, name)
showImage(frame)
except Exception as e:
print('Exception: ', e)
key = (255 & cv2.waitKey(0))
if (key == 27):
break
if (key == 27):
break
cv2.destroyAllWindows()<|docstring|>Shows all the images in a directory and its sub-directories.<|endoftext|>
|
a4d6e6484c759a40fe830f7e5dbeea87337fc7bc17d0e0c65052af626e6b99e4
|
def validate_form_number(number: str) -> bool:
'\n Validate check digit used in IRP, VI and UL forms\n :param number:\n :return:\n '
number_list = list(number)
check_digit = int(number_list.pop())
n = list()
for element in number_list:
n.append(int(element))
number_sum = (((((n[2] + _times_2(n[3])) + n[4]) + _times_2(n[5])) + n[6]) + _times_2(n[7]))
return ((number_sum % 10) == check_digit)
|
Validate check digit used in IRP, VI and UL forms
:param number:
:return:
|
python/common/helper.py
|
validate_form_number
|
jonathan-longe/RSBC-DataHub-API
| 3 |
python
|
def validate_form_number(number: str) -> bool:
'\n Validate check digit used in IRP, VI and UL forms\n :param number:\n :return:\n '
number_list = list(number)
check_digit = int(number_list.pop())
n = list()
for element in number_list:
n.append(int(element))
number_sum = (((((n[2] + _times_2(n[3])) + n[4]) + _times_2(n[5])) + n[6]) + _times_2(n[7]))
return ((number_sum % 10) == check_digit)
|
def validate_form_number(number: str) -> bool:
'\n Validate check digit used in IRP, VI and UL forms\n :param number:\n :return:\n '
number_list = list(number)
check_digit = int(number_list.pop())
n = list()
for element in number_list:
n.append(int(element))
number_sum = (((((n[2] + _times_2(n[3])) + n[4]) + _times_2(n[5])) + n[6]) + _times_2(n[7]))
return ((number_sum % 10) == check_digit)<|docstring|>Validate check digit used in IRP, VI and UL forms
:param number:
:return:<|endoftext|>
|
b9aa35476bbda9f6e5505d0c8f00be6381f061aaff5c345d9be04ad871d29695
|
def _times_2(number: int) -> int:
'\n If number * 2 is greater than 9, return 1\n otherwise return the number * 2\n :param number:\n :return:\n '
return int(list(str((number * 2)))[0])
|
If number * 2 is greater than 9, return 1
otherwise return the number * 2
:param number:
:return:
|
python/common/helper.py
|
_times_2
|
jonathan-longe/RSBC-DataHub-API
| 3 |
python
|
def _times_2(number: int) -> int:
'\n If number * 2 is greater than 9, return 1\n otherwise return the number * 2\n :param number:\n :return:\n '
return int(list(str((number * 2)))[0])
|
def _times_2(number: int) -> int:
'\n If number * 2 is greater than 9, return 1\n otherwise return the number * 2\n :param number:\n :return:\n '
return int(list(str((number * 2)))[0])<|docstring|>If number * 2 is greater than 9, return 1
otherwise return the number * 2
:param number:
:return:<|endoftext|>
|
914ec2fc1aabe87fe74d5e27c768dbc283a252b4ad7d71ac117fd9260365bcc8
|
def middle_logic(functions: list, **args):
'\n Recursive function that calls each node in the list.\n Each node has a "try" function that is executed first. If the try\n function returns True, the next node in the list is returned. If the\n try function returns False, the node\'s "fail" list is executed in the\n same way.\n\n example = dict({\n "rules": [\n {\n "pass": success1,\n "fail": [\n {\n "pass": failure1,\n "fail": []\n }\n ],\n },\n ]\n })\n\n The middleware is called like this: middle_logic(example[\'rules\'])\n '
if functions:
try_fail_node = functions.pop(0)
logging.debug(('calling try function: ' + try_fail_node['try'].__name__))
(flag, args) = try_fail_node['try'](**args)
logging.info('result from {} is {}'.format(try_fail_node['try'].__name__, flag))
if flag:
logging.debug('calling middleware logic recursively')
args = middle_logic(functions, **args)
else:
logging.debug('calling failure functions recursively')
args = middle_logic(try_fail_node['fail'], **args)
return args
|
Recursive function that calls each node in the list.
Each node has a "try" function that is executed first. If the try
function returns True, the next node in the list is returned. If the
try function returns False, the node's "fail" list is executed in the
same way.
example = dict({
"rules": [
{
"pass": success1,
"fail": [
{
"pass": failure1,
"fail": []
}
],
},
]
})
The middleware is called like this: middle_logic(example['rules'])
|
python/common/helper.py
|
middle_logic
|
jonathan-longe/RSBC-DataHub-API
| 3 |
python
|
def middle_logic(functions: list, **args):
'\n Recursive function that calls each node in the list.\n Each node has a "try" function that is executed first. If the try\n function returns True, the next node in the list is returned. If the\n try function returns False, the node\'s "fail" list is executed in the\n same way.\n\n example = dict({\n "rules": [\n {\n "pass": success1,\n "fail": [\n {\n "pass": failure1,\n "fail": []\n }\n ],\n },\n ]\n })\n\n The middleware is called like this: middle_logic(example[\'rules\'])\n '
if functions:
try_fail_node = functions.pop(0)
logging.debug(('calling try function: ' + try_fail_node['try'].__name__))
(flag, args) = try_fail_node['try'](**args)
logging.info('result from {} is {}'.format(try_fail_node['try'].__name__, flag))
if flag:
logging.debug('calling middleware logic recursively')
args = middle_logic(functions, **args)
else:
logging.debug('calling failure functions recursively')
args = middle_logic(try_fail_node['fail'], **args)
return args
|
def middle_logic(functions: list, **args):
'\n Recursive function that calls each node in the list.\n Each node has a "try" function that is executed first. If the try\n function returns True, the next node in the list is returned. If the\n try function returns False, the node\'s "fail" list is executed in the\n same way.\n\n example = dict({\n "rules": [\n {\n "pass": success1,\n "fail": [\n {\n "pass": failure1,\n "fail": []\n }\n ],\n },\n ]\n })\n\n The middleware is called like this: middle_logic(example[\'rules\'])\n '
if functions:
try_fail_node = functions.pop(0)
logging.debug(('calling try function: ' + try_fail_node['try'].__name__))
(flag, args) = try_fail_node['try'](**args)
logging.info('result from {} is {}'.format(try_fail_node['try'].__name__, flag))
if flag:
logging.debug('calling middleware logic recursively')
args = middle_logic(functions, **args)
else:
logging.debug('calling failure functions recursively')
args = middle_logic(try_fail_node['fail'], **args)
return args<|docstring|>Recursive function that calls each node in the list.
Each node has a "try" function that is executed first. If the try
function returns True, the next node in the list is returned. If the
try function returns False, the node's "fail" list is executed in the
same way.
example = dict({
"rules": [
{
"pass": success1,
"fail": [
{
"pass": failure1,
"fail": []
}
],
},
]
})
The middleware is called like this: middle_logic(example['rules'])<|endoftext|>
|
2bdc9c1541e7cfb43af5e4c8b99c2f1d8c299f933bc47631b2fd5db7539a7b52
|
def get_listeners(listeners: dict, key: str) -> list:
'\n Get the list of nested list of functions to invoke\n for a particular form type\n '
if (key in listeners):
return listeners[key]
else:
return listeners['unknown_event']
|
Get the list of nested list of functions to invoke
for a particular form type
|
python/common/helper.py
|
get_listeners
|
jonathan-longe/RSBC-DataHub-API
| 3 |
python
|
def get_listeners(listeners: dict, key: str) -> list:
'\n Get the list of nested list of functions to invoke\n for a particular form type\n '
if (key in listeners):
return listeners[key]
else:
return listeners['unknown_event']
|
def get_listeners(listeners: dict, key: str) -> list:
'\n Get the list of nested list of functions to invoke\n for a particular form type\n '
if (key in listeners):
return listeners[key]
else:
return listeners['unknown_event']<|docstring|>Get the list of nested list of functions to invoke
for a particular form type<|endoftext|>
|
9178b55348e4ee63dc7ae24e70aa26b57fe14099d70d8457d94992e4960ed3f9
|
def darknet_base(inputs):
'Darknet-53 base model.\n '
x = conv2d_unit(inputs, 32, (3, 3))
x = conv2d_unit(x, 64, (3, 3), strides=2)
x = stack_residual_block(x, 32, n=1)
x = conv2d_unit(x, 128, (3, 3), strides=2)
x = stack_residual_block(x, 64, n=2)
x = conv2d_unit(x, 256, (3, 3), strides=2)
x = stack_residual_block(x, 128, n=8)
x = conv2d_unit(x, 512, (3, 3), strides=2)
x = stack_residual_block(x, 256, n=8)
x = conv2d_unit(x, 1024, (3, 3), strides=2)
x = stack_residual_block(x, 512, n=4)
return x
|
Darknet-53 base model.
|
src/object_detection/yolo/darknet53/darknet53.py
|
darknet_base
|
Grula/vehicle-detection
| 0 |
python
|
def darknet_base(inputs):
'\n '
x = conv2d_unit(inputs, 32, (3, 3))
x = conv2d_unit(x, 64, (3, 3), strides=2)
x = stack_residual_block(x, 32, n=1)
x = conv2d_unit(x, 128, (3, 3), strides=2)
x = stack_residual_block(x, 64, n=2)
x = conv2d_unit(x, 256, (3, 3), strides=2)
x = stack_residual_block(x, 128, n=8)
x = conv2d_unit(x, 512, (3, 3), strides=2)
x = stack_residual_block(x, 256, n=8)
x = conv2d_unit(x, 1024, (3, 3), strides=2)
x = stack_residual_block(x, 512, n=4)
return x
|
def darknet_base(inputs):
'\n '
x = conv2d_unit(inputs, 32, (3, 3))
x = conv2d_unit(x, 64, (3, 3), strides=2)
x = stack_residual_block(x, 32, n=1)
x = conv2d_unit(x, 128, (3, 3), strides=2)
x = stack_residual_block(x, 64, n=2)
x = conv2d_unit(x, 256, (3, 3), strides=2)
x = stack_residual_block(x, 128, n=8)
x = conv2d_unit(x, 512, (3, 3), strides=2)
x = stack_residual_block(x, 256, n=8)
x = conv2d_unit(x, 1024, (3, 3), strides=2)
x = stack_residual_block(x, 512, n=4)
return x<|docstring|>Darknet-53 base model.<|endoftext|>
|
49fb44274a11c035904fa7c66529f438b728a7d26cdbf1b0123c8c9b556883c7
|
def darknet():
'Darknet-53 classifier.\n '
inputs = Input(shape=(416, 416, 3))
x = darknet_base(inputs)
x = GlobalAveragePooling2D()(x)
x = Dense(1000, activation='softmax')(x)
model = Model(inputs, x)
return model
|
Darknet-53 classifier.
|
src/object_detection/yolo/darknet53/darknet53.py
|
darknet
|
Grula/vehicle-detection
| 0 |
python
|
def darknet():
'\n '
inputs = Input(shape=(416, 416, 3))
x = darknet_base(inputs)
x = GlobalAveragePooling2D()(x)
x = Dense(1000, activation='softmax')(x)
model = Model(inputs, x)
return model
|
def darknet():
'\n '
inputs = Input(shape=(416, 416, 3))
x = darknet_base(inputs)
x = GlobalAveragePooling2D()(x)
x = Dense(1000, activation='softmax')(x)
model = Model(inputs, x)
return model<|docstring|>Darknet-53 classifier.<|endoftext|>
|
f126248229873e896871abcc3e40371089b7b7660c05957a14d84a88c3b77d85
|
def flatatt(attrs):
'\n Convert a dictionary of attributes to a single string.\n The returned string will contain a leading space followed by key="value",\n XML-style pairs. It is assumed that the keys do not need to be XML-escaped.\n If the passed dictionary is empty, then return an empty string.\n '
return u''.join([(u' %s="%s"' % (k, conditional_escape(v))) for (k, v) in attrs.items()])
|
Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. It is assumed that the keys do not need to be XML-escaped.
If the passed dictionary is empty, then return an empty string.
|
desktop/core/ext-py/Django/django/forms/util.py
|
flatatt
|
wwjiang007/hue
| 91 |
python
|
def flatatt(attrs):
'\n Convert a dictionary of attributes to a single string.\n The returned string will contain a leading space followed by key="value",\n XML-style pairs. It is assumed that the keys do not need to be XML-escaped.\n If the passed dictionary is empty, then return an empty string.\n '
return u.join([(u' %s="%s"' % (k, conditional_escape(v))) for (k, v) in attrs.items()])
|
def flatatt(attrs):
'\n Convert a dictionary of attributes to a single string.\n The returned string will contain a leading space followed by key="value",\n XML-style pairs. It is assumed that the keys do not need to be XML-escaped.\n If the passed dictionary is empty, then return an empty string.\n '
return u.join([(u' %s="%s"' % (k, conditional_escape(v))) for (k, v) in attrs.items()])<|docstring|>Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. It is assumed that the keys do not need to be XML-escaped.
If the passed dictionary is empty, then return an empty string.<|endoftext|>
|
86244c924c6975035a57a423e85674010875062dc57b2ab71c943fa1fb17d046
|
def __init__(self, message):
'\n ValidationError can be passed any object that can be printed (usually\n a string) or a list of objects.\n '
if isinstance(message, list):
self.messages = ErrorList([smart_unicode(msg) for msg in message])
else:
message = smart_unicode(message)
self.messages = ErrorList([message])
|
ValidationError can be passed any object that can be printed (usually
a string) or a list of objects.
|
desktop/core/ext-py/Django/django/forms/util.py
|
__init__
|
wwjiang007/hue
| 91 |
python
|
def __init__(self, message):
'\n ValidationError can be passed any object that can be printed (usually\n a string) or a list of objects.\n '
if isinstance(message, list):
self.messages = ErrorList([smart_unicode(msg) for msg in message])
else:
message = smart_unicode(message)
self.messages = ErrorList([message])
|
def __init__(self, message):
'\n ValidationError can be passed any object that can be printed (usually\n a string) or a list of objects.\n '
if isinstance(message, list):
self.messages = ErrorList([smart_unicode(msg) for msg in message])
else:
message = smart_unicode(message)
self.messages = ErrorList([message])<|docstring|>ValidationError can be passed any object that can be printed (usually
a string) or a list of objects.<|endoftext|>
|
cf536b12ed9faa56122afc4a9378b4b9f0fe49e5c414c41ec0dec244ac6cd18b
|
def test_png_path_basic():
'basic lib.figure.Figure.html_data_table() test'
assert (Figure(DATA).png_path() == RELATIVE_PATH)
|
basic lib.figure.Figure.html_data_table() test
|
tools/perf/tests/lib/figure/test_png_path.py
|
test_png_path_basic
|
pmem/rpma
| 83 |
python
|
def test_png_path_basic():
assert (Figure(DATA).png_path() == RELATIVE_PATH)
|
def test_png_path_basic():
assert (Figure(DATA).png_path() == RELATIVE_PATH)<|docstring|>basic lib.figure.Figure.html_data_table() test<|endoftext|>
|
0c6f52246f1ac1ad014e550d4a2ce1be0bebc05547142dd5b55a31d1cdd88784
|
def read_nims(fn):
'\n\n :param fn: DESCRIPTION\n :type fn: TYPE\n :return: DESCRIPTION\n :rtype: TYPE\n\n '
nims_obj = NIMS(fn)
nims_obj.read_nims()
return nims_obj.to_runts()
|
:param fn: DESCRIPTION
:type fn: TYPE
:return: DESCRIPTION
:rtype: TYPE
|
mth5/io/nims.py
|
read_nims
|
kujaku11/mth5
| 5 |
python
|
def read_nims(fn):
'\n\n :param fn: DESCRIPTION\n :type fn: TYPE\n :return: DESCRIPTION\n :rtype: TYPE\n\n '
nims_obj = NIMS(fn)
nims_obj.read_nims()
return nims_obj.to_runts()
|
def read_nims(fn):
'\n\n :param fn: DESCRIPTION\n :type fn: TYPE\n :return: DESCRIPTION\n :rtype: TYPE\n\n '
nims_obj = NIMS(fn)
nims_obj.read_nims()
return nims_obj.to_runts()<|docstring|>:param fn: DESCRIPTION
:type fn: TYPE
:return: DESCRIPTION
:rtype: TYPE<|endoftext|>
|
7332a0dd98ab99c66e1917c0b81ea33d69c357cb4970531fc2dc0b211e97f970
|
def __str__(self):
'string representation'
msg = [f'type = {self.gps_type}', f'index = {self.index}', f'fix = {self.fix}', f'time_stamp = {self.time_stamp}', f'latitude = {self.latitude}', f'longitude = {self.longitude}', f'elevation = {self.elevation}', f'declination = {self.declination}']
return '\n'.join(msg)
|
string representation
|
mth5/io/nims.py
|
__str__
|
kujaku11/mth5
| 5 |
python
|
def __str__(self):
msg = [f'type = {self.gps_type}', f'index = {self.index}', f'fix = {self.fix}', f'time_stamp = {self.time_stamp}', f'latitude = {self.latitude}', f'longitude = {self.longitude}', f'elevation = {self.elevation}', f'declination = {self.declination}']
return '\n'.join(msg)
|
def __str__(self):
msg = [f'type = {self.gps_type}', f'index = {self.index}', f'fix = {self.fix}', f'time_stamp = {self.time_stamp}', f'latitude = {self.latitude}', f'longitude = {self.longitude}', f'elevation = {self.elevation}', f'declination = {self.declination}']
return '\n'.join(msg)<|docstring|>string representation<|endoftext|>
|
44c91ff5dee00c21b150a52410adac667333083f616f5101afaccda4a739ad15
|
def validate_gps_string(self, gps_string):
"\n make sure the string is valid, remove any binary numbers and find\n the end of the string as '*'\n\n :param string gps_string: raw GPS string to be validated\n\n :returns: validated string or None if there is something wrong\n "
for replace_str in [b'\xd9', b'\xc7', b'\xcc']:
gps_string = gps_string.replace(replace_str, b'')
gps_string = gps_string.replace(b'\x00', b'*')
if (gps_string.find(b'*') < 0):
logging.debug('GPSError: No end to stamp {0}'.format(gps_string))
else:
try:
gps_string = gps_string[0:gps_string.find(b'*')].decode()
return gps_string
except UnicodeDecodeError:
logging.debug('GPSError: stamp not correct format, {0}'.format(gps_string))
return None
|
make sure the string is valid, remove any binary numbers and find
the end of the string as '*'
:param string gps_string: raw GPS string to be validated
:returns: validated string or None if there is something wrong
|
mth5/io/nims.py
|
validate_gps_string
|
kujaku11/mth5
| 5 |
python
|
def validate_gps_string(self, gps_string):
"\n make sure the string is valid, remove any binary numbers and find\n the end of the string as '*'\n\n :param string gps_string: raw GPS string to be validated\n\n :returns: validated string or None if there is something wrong\n "
for replace_str in [b'\xd9', b'\xc7', b'\xcc']:
gps_string = gps_string.replace(replace_str, b)
gps_string = gps_string.replace(b'\x00', b'*')
if (gps_string.find(b'*') < 0):
logging.debug('GPSError: No end to stamp {0}'.format(gps_string))
else:
try:
gps_string = gps_string[0:gps_string.find(b'*')].decode()
return gps_string
except UnicodeDecodeError:
logging.debug('GPSError: stamp not correct format, {0}'.format(gps_string))
return None
|
def validate_gps_string(self, gps_string):
"\n make sure the string is valid, remove any binary numbers and find\n the end of the string as '*'\n\n :param string gps_string: raw GPS string to be validated\n\n :returns: validated string or None if there is something wrong\n "
for replace_str in [b'\xd9', b'\xc7', b'\xcc']:
gps_string = gps_string.replace(replace_str, b)
gps_string = gps_string.replace(b'\x00', b'*')
if (gps_string.find(b'*') < 0):
logging.debug('GPSError: No end to stamp {0}'.format(gps_string))
else:
try:
gps_string = gps_string[0:gps_string.find(b'*')].decode()
return gps_string
except UnicodeDecodeError:
logging.debug('GPSError: stamp not correct format, {0}'.format(gps_string))
return None<|docstring|>make sure the string is valid, remove any binary numbers and find
the end of the string as '*'
:param string gps_string: raw GPS string to be validated
:returns: validated string or None if there is something wrong<|endoftext|>
|
1acf6e6a2ca9dcc7e03b62be029a9acceed89c79657e7b57ad49eee2b49ba1a0
|
def parse_gps_string(self, gps_string):
'\n Parse a raw gps string from the NIMS and set appropriate attributes.\n GPS string will first be validated, then parsed.\n\n :param string gps_string: raw GPS string to be parsed\n '
gps_string = self.validate_gps_string(gps_string)
if (gps_string is None):
self.valid = False
return
if isinstance(gps_string, bytes):
gps_list = gps_string.strip().split(b',')
gps_list = [value.decode() for value in gps_list]
else:
gps_list = gps_string.strip().split(',')
if (len(gps_list) > 1):
if (len(gps_list[1]) > 6):
self.logger.debug('GPS time and lat missing a comma adding one, check time')
gps_list = ((gps_list[0:1] + [gps_list[1][0:6], gps_list[1][6:]]) + gps_list[2:])
(gps_list, error_list) = self.validate_gps_list(gps_list)
if (len(error_list) > 0):
for error in error_list:
logging.debug(('GPSError: ' + error))
if (gps_list is None):
return
attr_dict = self.type_dict[gps_list[0].lower()]
for (index, value) in enumerate(gps_list):
setattr(self, ('_' + attr_dict[index]), value)
if (None not in gps_list):
self.valid = True
self.gps_string = gps_string
|
Parse a raw gps string from the NIMS and set appropriate attributes.
GPS string will first be validated, then parsed.
:param string gps_string: raw GPS string to be parsed
|
mth5/io/nims.py
|
parse_gps_string
|
kujaku11/mth5
| 5 |
python
|
def parse_gps_string(self, gps_string):
'\n Parse a raw gps string from the NIMS and set appropriate attributes.\n GPS string will first be validated, then parsed.\n\n :param string gps_string: raw GPS string to be parsed\n '
gps_string = self.validate_gps_string(gps_string)
if (gps_string is None):
self.valid = False
return
if isinstance(gps_string, bytes):
gps_list = gps_string.strip().split(b',')
gps_list = [value.decode() for value in gps_list]
else:
gps_list = gps_string.strip().split(',')
if (len(gps_list) > 1):
if (len(gps_list[1]) > 6):
self.logger.debug('GPS time and lat missing a comma adding one, check time')
gps_list = ((gps_list[0:1] + [gps_list[1][0:6], gps_list[1][6:]]) + gps_list[2:])
(gps_list, error_list) = self.validate_gps_list(gps_list)
if (len(error_list) > 0):
for error in error_list:
logging.debug(('GPSError: ' + error))
if (gps_list is None):
return
attr_dict = self.type_dict[gps_list[0].lower()]
for (index, value) in enumerate(gps_list):
setattr(self, ('_' + attr_dict[index]), value)
if (None not in gps_list):
self.valid = True
self.gps_string = gps_string
|
def parse_gps_string(self, gps_string):
'\n Parse a raw gps string from the NIMS and set appropriate attributes.\n GPS string will first be validated, then parsed.\n\n :param string gps_string: raw GPS string to be parsed\n '
gps_string = self.validate_gps_string(gps_string)
if (gps_string is None):
self.valid = False
return
if isinstance(gps_string, bytes):
gps_list = gps_string.strip().split(b',')
gps_list = [value.decode() for value in gps_list]
else:
gps_list = gps_string.strip().split(',')
if (len(gps_list) > 1):
if (len(gps_list[1]) > 6):
self.logger.debug('GPS time and lat missing a comma adding one, check time')
gps_list = ((gps_list[0:1] + [gps_list[1][0:6], gps_list[1][6:]]) + gps_list[2:])
(gps_list, error_list) = self.validate_gps_list(gps_list)
if (len(error_list) > 0):
for error in error_list:
logging.debug(('GPSError: ' + error))
if (gps_list is None):
return
attr_dict = self.type_dict[gps_list[0].lower()]
for (index, value) in enumerate(gps_list):
setattr(self, ('_' + attr_dict[index]), value)
if (None not in gps_list):
self.valid = True
self.gps_string = gps_string<|docstring|>Parse a raw gps string from the NIMS and set appropriate attributes.
GPS string will first be validated, then parsed.
:param string gps_string: raw GPS string to be parsed<|endoftext|>
|
8610b4ab9021d899a4dcd9ed78312299c1df75cb80953b1ec7a13b2209ef855b
|
def validate_gps_list(self, gps_list):
'\n check to make sure the gps stamp is the correct format, checks each element\n for the proper format\n\n :param gps_list: a parsed gps string from a NIMS\n :type gps_list: list\n :raises: :class:`mth5.io.nims.GPSError` if anything is wrong.\n '
error_list = []
try:
gps_list = self._validate_gps_type(gps_list)
except GPSError as error:
error_list.append(error.args[0])
return (None, error_list)
g_type = gps_list[0].lower()
try:
self._validate_list_length(gps_list)
except GPSError as error:
error_list.append(error.args[0])
return (None, error_list)
try:
gps_list[self.type_dict[g_type]['time']] = self._validate_time(gps_list[self.type_dict[g_type]['time']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['time']] = None
try:
gps_list[self.type_dict[g_type]['latitude']] = self._validate_latitude(gps_list[self.type_dict[g_type]['latitude']], gps_list[self.type_dict[g_type]['latitude_hemisphere']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['latitude']] = None
try:
gps_list[self.type_dict[g_type]['longitude']] = self._validate_longitude(gps_list[self.type_dict[g_type]['longitude']], gps_list[self.type_dict[g_type]['longitude_hemisphere']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['longitude']] = None
if (g_type == 'gprmc'):
try:
gps_list[self.type_dict['gprmc']['date']] = self._validate_date(gps_list[self.type_dict['gprmc']['date']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['date']] = None
elif (g_type == 'gpgga'):
try:
gps_list[self.type_dict['gpgga']['elevation']] = self._validate_elevation(gps_list[self.type_dict['gpgga']['elevation']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict['gpgga']['elevation']] = None
return (gps_list, error_list)
|
check to make sure the gps stamp is the correct format, checks each element
for the proper format
:param gps_list: a parsed gps string from a NIMS
:type gps_list: list
:raises: :class:`mth5.io.nims.GPSError` if anything is wrong.
|
mth5/io/nims.py
|
validate_gps_list
|
kujaku11/mth5
| 5 |
python
|
def validate_gps_list(self, gps_list):
'\n check to make sure the gps stamp is the correct format, checks each element\n for the proper format\n\n :param gps_list: a parsed gps string from a NIMS\n :type gps_list: list\n :raises: :class:`mth5.io.nims.GPSError` if anything is wrong.\n '
error_list = []
try:
gps_list = self._validate_gps_type(gps_list)
except GPSError as error:
error_list.append(error.args[0])
return (None, error_list)
g_type = gps_list[0].lower()
try:
self._validate_list_length(gps_list)
except GPSError as error:
error_list.append(error.args[0])
return (None, error_list)
try:
gps_list[self.type_dict[g_type]['time']] = self._validate_time(gps_list[self.type_dict[g_type]['time']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['time']] = None
try:
gps_list[self.type_dict[g_type]['latitude']] = self._validate_latitude(gps_list[self.type_dict[g_type]['latitude']], gps_list[self.type_dict[g_type]['latitude_hemisphere']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['latitude']] = None
try:
gps_list[self.type_dict[g_type]['longitude']] = self._validate_longitude(gps_list[self.type_dict[g_type]['longitude']], gps_list[self.type_dict[g_type]['longitude_hemisphere']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['longitude']] = None
if (g_type == 'gprmc'):
try:
gps_list[self.type_dict['gprmc']['date']] = self._validate_date(gps_list[self.type_dict['gprmc']['date']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['date']] = None
elif (g_type == 'gpgga'):
try:
gps_list[self.type_dict['gpgga']['elevation']] = self._validate_elevation(gps_list[self.type_dict['gpgga']['elevation']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict['gpgga']['elevation']] = None
return (gps_list, error_list)
|
def validate_gps_list(self, gps_list):
'\n check to make sure the gps stamp is the correct format, checks each element\n for the proper format\n\n :param gps_list: a parsed gps string from a NIMS\n :type gps_list: list\n :raises: :class:`mth5.io.nims.GPSError` if anything is wrong.\n '
error_list = []
try:
gps_list = self._validate_gps_type(gps_list)
except GPSError as error:
error_list.append(error.args[0])
return (None, error_list)
g_type = gps_list[0].lower()
try:
self._validate_list_length(gps_list)
except GPSError as error:
error_list.append(error.args[0])
return (None, error_list)
try:
gps_list[self.type_dict[g_type]['time']] = self._validate_time(gps_list[self.type_dict[g_type]['time']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['time']] = None
try:
gps_list[self.type_dict[g_type]['latitude']] = self._validate_latitude(gps_list[self.type_dict[g_type]['latitude']], gps_list[self.type_dict[g_type]['latitude_hemisphere']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['latitude']] = None
try:
gps_list[self.type_dict[g_type]['longitude']] = self._validate_longitude(gps_list[self.type_dict[g_type]['longitude']], gps_list[self.type_dict[g_type]['longitude_hemisphere']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['longitude']] = None
if (g_type == 'gprmc'):
try:
gps_list[self.type_dict['gprmc']['date']] = self._validate_date(gps_list[self.type_dict['gprmc']['date']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict[g_type]['date']] = None
elif (g_type == 'gpgga'):
try:
gps_list[self.type_dict['gpgga']['elevation']] = self._validate_elevation(gps_list[self.type_dict['gpgga']['elevation']])
except GPSError as error:
error_list.append(error.args[0])
gps_list[self.type_dict['gpgga']['elevation']] = None
return (gps_list, error_list)<|docstring|>check to make sure the gps stamp is the correct format, checks each element
for the proper format
:param gps_list: a parsed gps string from a NIMS
:type gps_list: list
:raises: :class:`mth5.io.nims.GPSError` if anything is wrong.<|endoftext|>
|
cb2ca816b5cd11509e9abda6776fca12fb77156304b818135633ee685cdd4ee1
|
def _validate_gps_type(self, gps_list):
'Validate gps type should be gpgga or gprmc'
gps_type = gps_list[0].lower()
if ('gpg' in gps_type):
if (len(gps_type) > 5):
gps_list = (['GPGGA', gps_type[(- 6):]] + gps_list[1:])
elif (len(gps_type) < 5):
gps_list[0] = 'GPGGA'
elif ('gpr' in gps_type):
if (len(gps_type) > 5):
gps_list = (['GPRMC', gps_type[(- 6):]] + gps_list[1:])
elif (len(gps_type) < 5):
gps_list[0] = 'GPRMC'
gps_type = gps_list[0].lower()
if (gps_type not in ['gpgga', 'gprmc']):
raise GPSError(('GPS String type not correct. ' + 'Expect GPGGA or GPRMC, got {0}'.format(gps_type.upper())))
return gps_list
|
Validate gps type should be gpgga or gprmc
|
mth5/io/nims.py
|
_validate_gps_type
|
kujaku11/mth5
| 5 |
python
|
def _validate_gps_type(self, gps_list):
gps_type = gps_list[0].lower()
if ('gpg' in gps_type):
if (len(gps_type) > 5):
gps_list = (['GPGGA', gps_type[(- 6):]] + gps_list[1:])
elif (len(gps_type) < 5):
gps_list[0] = 'GPGGA'
elif ('gpr' in gps_type):
if (len(gps_type) > 5):
gps_list = (['GPRMC', gps_type[(- 6):]] + gps_list[1:])
elif (len(gps_type) < 5):
gps_list[0] = 'GPRMC'
gps_type = gps_list[0].lower()
if (gps_type not in ['gpgga', 'gprmc']):
raise GPSError(('GPS String type not correct. ' + 'Expect GPGGA or GPRMC, got {0}'.format(gps_type.upper())))
return gps_list
|
def _validate_gps_type(self, gps_list):
gps_type = gps_list[0].lower()
if ('gpg' in gps_type):
if (len(gps_type) > 5):
gps_list = (['GPGGA', gps_type[(- 6):]] + gps_list[1:])
elif (len(gps_type) < 5):
gps_list[0] = 'GPGGA'
elif ('gpr' in gps_type):
if (len(gps_type) > 5):
gps_list = (['GPRMC', gps_type[(- 6):]] + gps_list[1:])
elif (len(gps_type) < 5):
gps_list[0] = 'GPRMC'
gps_type = gps_list[0].lower()
if (gps_type not in ['gpgga', 'gprmc']):
raise GPSError(('GPS String type not correct. ' + 'Expect GPGGA or GPRMC, got {0}'.format(gps_type.upper())))
return gps_list<|docstring|>Validate gps type should be gpgga or gprmc<|endoftext|>
|
301fbd06f1dd8b7083a8dce5875db82c9e883b9f8460d7f079a0a6e7a7b54ae8
|
def _validate_list_length(self, gps_list):
'validate gps list length based on type of string'
gps_list_type = gps_list[0].lower()
expected_len = self.type_dict[gps_list_type]['length']
if (len(gps_list) not in expected_len):
raise GPSError(('GPS string not correct length for {0}. '.format(gps_list_type.upper()) + 'Expected {0}, got {1} \n{2}'.format(expected_len, len(gps_list), ','.join(gps_list))))
|
validate gps list length based on type of string
|
mth5/io/nims.py
|
_validate_list_length
|
kujaku11/mth5
| 5 |
python
|
def _validate_list_length(self, gps_list):
gps_list_type = gps_list[0].lower()
expected_len = self.type_dict[gps_list_type]['length']
if (len(gps_list) not in expected_len):
raise GPSError(('GPS string not correct length for {0}. '.format(gps_list_type.upper()) + 'Expected {0}, got {1} \n{2}'.format(expected_len, len(gps_list), ','.join(gps_list))))
|
def _validate_list_length(self, gps_list):
gps_list_type = gps_list[0].lower()
expected_len = self.type_dict[gps_list_type]['length']
if (len(gps_list) not in expected_len):
raise GPSError(('GPS string not correct length for {0}. '.format(gps_list_type.upper()) + 'Expected {0}, got {1} \n{2}'.format(expected_len, len(gps_list), ','.join(gps_list))))<|docstring|>validate gps list length based on type of string<|endoftext|>
|
2801821ba662d841f8f75c046299feddd7433740d4f2a649867b654e6af2a5e1
|
def _validate_time(self, time_str):
'validate time string, should be 6 characters long and an int'
if (len(time_str) != 6):
raise GPSError(('Length of time string {0} not correct. '.format(time_str) + 'Expected 6 got {0}'.format(len(time_str))))
try:
int(time_str)
except ValueError:
raise GPSError('Could not convert time string {0}'.format(time_str))
return time_str
|
validate time string, should be 6 characters long and an int
|
mth5/io/nims.py
|
_validate_time
|
kujaku11/mth5
| 5 |
python
|
def _validate_time(self, time_str):
if (len(time_str) != 6):
raise GPSError(('Length of time string {0} not correct. '.format(time_str) + 'Expected 6 got {0}'.format(len(time_str))))
try:
int(time_str)
except ValueError:
raise GPSError('Could not convert time string {0}'.format(time_str))
return time_str
|
def _validate_time(self, time_str):
if (len(time_str) != 6):
raise GPSError(('Length of time string {0} not correct. '.format(time_str) + 'Expected 6 got {0}'.format(len(time_str))))
try:
int(time_str)
except ValueError:
raise GPSError('Could not convert time string {0}'.format(time_str))
return time_str<|docstring|>validate time string, should be 6 characters long and an int<|endoftext|>
|
e4e14a92f31ed11f64bf6f1e0f6334312f3de3194821d9440887dc9ce0645d8f
|
def _validate_date(self, date_str):
'validate date string, should be 6 characters long and an int'
if (len(date_str) != 6):
raise GPSError(('Length of date string not correct {0}. '.format(date_str) + 'Expected 6 got {0}'.format(len(date_str))))
try:
int(date_str)
except ValueError:
raise GPSError('Could not convert date string {0}'.format(date_str))
return date_str
|
validate date string, should be 6 characters long and an int
|
mth5/io/nims.py
|
_validate_date
|
kujaku11/mth5
| 5 |
python
|
def _validate_date(self, date_str):
if (len(date_str) != 6):
raise GPSError(('Length of date string not correct {0}. '.format(date_str) + 'Expected 6 got {0}'.format(len(date_str))))
try:
int(date_str)
except ValueError:
raise GPSError('Could not convert date string {0}'.format(date_str))
return date_str
|
def _validate_date(self, date_str):
if (len(date_str) != 6):
raise GPSError(('Length of date string not correct {0}. '.format(date_str) + 'Expected 6 got {0}'.format(len(date_str))))
try:
int(date_str)
except ValueError:
raise GPSError('Could not convert date string {0}'.format(date_str))
return date_str<|docstring|>validate date string, should be 6 characters long and an int<|endoftext|>
|
6ec85580f871a7e67d44500c3c559acb1fe2a8bb0348534122e5de68782739fa
|
def _validate_latitude(self, latitude_str, hemisphere_str):
'validate latitude, should have hemisphere string with it'
if (len(latitude_str) < 8):
raise GPSError(('Latitude string should be larger than 7 characters. ' + 'Got {0}'.format(len(latitude_str))))
if (len(hemisphere_str) != 1):
raise GPSError(('Latitude hemisphere should be 1 character. ' + 'Got {0}'.format(len(hemisphere_str))))
if (hemisphere_str.lower() not in ['n', 's']):
raise GPSError('Latitude hemisphere {0} not understood'.format(hemisphere_str.upper()))
try:
float(latitude_str)
except ValueError:
raise GPSError('Could not convert latitude string {0}'.format(latitude_str))
return latitude_str
|
validate latitude, should have hemisphere string with it
|
mth5/io/nims.py
|
_validate_latitude
|
kujaku11/mth5
| 5 |
python
|
def _validate_latitude(self, latitude_str, hemisphere_str):
if (len(latitude_str) < 8):
raise GPSError(('Latitude string should be larger than 7 characters. ' + 'Got {0}'.format(len(latitude_str))))
if (len(hemisphere_str) != 1):
raise GPSError(('Latitude hemisphere should be 1 character. ' + 'Got {0}'.format(len(hemisphere_str))))
if (hemisphere_str.lower() not in ['n', 's']):
raise GPSError('Latitude hemisphere {0} not understood'.format(hemisphere_str.upper()))
try:
float(latitude_str)
except ValueError:
raise GPSError('Could not convert latitude string {0}'.format(latitude_str))
return latitude_str
|
def _validate_latitude(self, latitude_str, hemisphere_str):
if (len(latitude_str) < 8):
raise GPSError(('Latitude string should be larger than 7 characters. ' + 'Got {0}'.format(len(latitude_str))))
if (len(hemisphere_str) != 1):
raise GPSError(('Latitude hemisphere should be 1 character. ' + 'Got {0}'.format(len(hemisphere_str))))
if (hemisphere_str.lower() not in ['n', 's']):
raise GPSError('Latitude hemisphere {0} not understood'.format(hemisphere_str.upper()))
try:
float(latitude_str)
except ValueError:
raise GPSError('Could not convert latitude string {0}'.format(latitude_str))
return latitude_str<|docstring|>validate latitude, should have hemisphere string with it<|endoftext|>
|
849bfe938aa2eb696fc3a0ad971cab70f911602ba95cfaca7edbff67d9346343
|
def _validate_longitude(self, longitude_str, hemisphere_str):
'validate longitude, should have hemisphere string with it'
if (len(longitude_str) < 8):
raise GPSError(('Longitude string should be larger than 7 characters. ' + 'Got {0}'.format(len(longitude_str))))
if (len(hemisphere_str) != 1):
raise GPSError(('Longitude hemisphere should be 1 character. ' + 'Got {0}'.format(len(hemisphere_str))))
if (hemisphere_str.lower() not in ['e', 'w']):
raise GPSError('Longitude hemisphere {0} not understood'.format(hemisphere_str.upper()))
try:
float(longitude_str)
except ValueError:
raise GPSError('Could not convert longitude string {0}'.format(longitude_str))
return longitude_str
|
validate longitude, should have hemisphere string with it
|
mth5/io/nims.py
|
_validate_longitude
|
kujaku11/mth5
| 5 |
python
|
def _validate_longitude(self, longitude_str, hemisphere_str):
if (len(longitude_str) < 8):
raise GPSError(('Longitude string should be larger than 7 characters. ' + 'Got {0}'.format(len(longitude_str))))
if (len(hemisphere_str) != 1):
raise GPSError(('Longitude hemisphere should be 1 character. ' + 'Got {0}'.format(len(hemisphere_str))))
if (hemisphere_str.lower() not in ['e', 'w']):
raise GPSError('Longitude hemisphere {0} not understood'.format(hemisphere_str.upper()))
try:
float(longitude_str)
except ValueError:
raise GPSError('Could not convert longitude string {0}'.format(longitude_str))
return longitude_str
|
def _validate_longitude(self, longitude_str, hemisphere_str):
if (len(longitude_str) < 8):
raise GPSError(('Longitude string should be larger than 7 characters. ' + 'Got {0}'.format(len(longitude_str))))
if (len(hemisphere_str) != 1):
raise GPSError(('Longitude hemisphere should be 1 character. ' + 'Got {0}'.format(len(hemisphere_str))))
if (hemisphere_str.lower() not in ['e', 'w']):
raise GPSError('Longitude hemisphere {0} not understood'.format(hemisphere_str.upper()))
try:
float(longitude_str)
except ValueError:
raise GPSError('Could not convert longitude string {0}'.format(longitude_str))
return longitude_str<|docstring|>validate longitude, should have hemisphere string with it<|endoftext|>
|
09ff4e1ed275abe52b9be0dd130ae6e0919c0c1afc6a3be4d7936236f6a97589
|
def _validate_elevation(self, elevation_str):
'validate elevation, check for converstion to float'
elevation_str = elevation_str.lower().replace('m', '')
try:
elevation_str = f'{float(elevation_str):0.2f}'
except ValueError:
raise GPSError(f'Elevation could not be converted {elevation_str}')
return elevation_str
|
validate elevation, check for converstion to float
|
mth5/io/nims.py
|
_validate_elevation
|
kujaku11/mth5
| 5 |
python
|
def _validate_elevation(self, elevation_str):
elevation_str = elevation_str.lower().replace('m', )
try:
elevation_str = f'{float(elevation_str):0.2f}'
except ValueError:
raise GPSError(f'Elevation could not be converted {elevation_str}')
return elevation_str
|
def _validate_elevation(self, elevation_str):
elevation_str = elevation_str.lower().replace('m', )
try:
elevation_str = f'{float(elevation_str):0.2f}'
except ValueError:
raise GPSError(f'Elevation could not be converted {elevation_str}')
return elevation_str<|docstring|>validate elevation, check for converstion to float<|endoftext|>
|
a003d4d10239901c84fd870b56dd62e9f31b9f58a1577006faf5f1a010175571
|
@property
def latitude(self):
'\n Latitude in decimal degrees, WGS84\n '
if ((self._latitude is not None) and (self._latitude_hemisphere is not None)):
index = (len(self._latitude) - 7)
lat = (float(self._latitude[0:index]) + (float(self._latitude[index:]) / 60))
if ('s' in self._latitude_hemisphere.lower()):
lat *= (- 1)
return lat
return 0.0
|
Latitude in decimal degrees, WGS84
|
mth5/io/nims.py
|
latitude
|
kujaku11/mth5
| 5 |
python
|
@property
def latitude(self):
'\n \n '
if ((self._latitude is not None) and (self._latitude_hemisphere is not None)):
index = (len(self._latitude) - 7)
lat = (float(self._latitude[0:index]) + (float(self._latitude[index:]) / 60))
if ('s' in self._latitude_hemisphere.lower()):
lat *= (- 1)
return lat
return 0.0
|
@property
def latitude(self):
'\n \n '
if ((self._latitude is not None) and (self._latitude_hemisphere is not None)):
index = (len(self._latitude) - 7)
lat = (float(self._latitude[0:index]) + (float(self._latitude[index:]) / 60))
if ('s' in self._latitude_hemisphere.lower()):
lat *= (- 1)
return lat
return 0.0<|docstring|>Latitude in decimal degrees, WGS84<|endoftext|>
|
15eef7c6292ab6fe456db20add7c867253237d82ef87102f6cd3cf38095a6e63
|
@property
def longitude(self):
'\n Latitude in decimal degrees, WGS84\n '
if ((self._longitude is not None) and (self._longitude_hemisphere is not None)):
index = (len(self._longitude) - 7)
lon = (float(self._longitude[0:index]) + (float(self._longitude[index:]) / 60))
if ('w' in self._longitude_hemisphere.lower()):
lon *= (- 1)
return lon
return 0.0
|
Latitude in decimal degrees, WGS84
|
mth5/io/nims.py
|
longitude
|
kujaku11/mth5
| 5 |
python
|
@property
def longitude(self):
'\n \n '
if ((self._longitude is not None) and (self._longitude_hemisphere is not None)):
index = (len(self._longitude) - 7)
lon = (float(self._longitude[0:index]) + (float(self._longitude[index:]) / 60))
if ('w' in self._longitude_hemisphere.lower()):
lon *= (- 1)
return lon
return 0.0
|
@property
def longitude(self):
'\n \n '
if ((self._longitude is not None) and (self._longitude_hemisphere is not None)):
index = (len(self._longitude) - 7)
lon = (float(self._longitude[0:index]) + (float(self._longitude[index:]) / 60))
if ('w' in self._longitude_hemisphere.lower()):
lon *= (- 1)
return lon
return 0.0<|docstring|>Latitude in decimal degrees, WGS84<|endoftext|>
|
aeadd605773dd9b983338e85b3d73fc87eb4090ae80dcdf4c782e1e8dac88d6b
|
@property
def elevation(self):
'\n elevation in meters\n '
if (self._elevation is not None):
try:
return float(self._elevation)
except ValueError:
self.logger.error(('GPSError: Could not get elevation GPS string' + f'not complete {self.gps_string}'))
return 0.0
|
elevation in meters
|
mth5/io/nims.py
|
elevation
|
kujaku11/mth5
| 5 |
python
|
@property
def elevation(self):
'\n \n '
if (self._elevation is not None):
try:
return float(self._elevation)
except ValueError:
self.logger.error(('GPSError: Could not get elevation GPS string' + f'not complete {self.gps_string}'))
return 0.0
|
@property
def elevation(self):
'\n \n '
if (self._elevation is not None):
try:
return float(self._elevation)
except ValueError:
self.logger.error(('GPSError: Could not get elevation GPS string' + f'not complete {self.gps_string}'))
return 0.0<|docstring|>elevation in meters<|endoftext|>
|
13018a884caa8851dab06940707c68c555872cd8b4b82fa281625662b827c63a
|
@property
def time_stamp(self):
'\n return a datetime object of the time stamp\n '
if (self._time is None):
return None
if (self._date is None):
self._date = '010180'
try:
return dateutil.parser.parse('{0} {1}'.format(self._date, self._time), dayfirst=True)
except ValueError:
self.logger.error(f'GPSError: bad date string {self.gps_string}')
return None
|
return a datetime object of the time stamp
|
mth5/io/nims.py
|
time_stamp
|
kujaku11/mth5
| 5 |
python
|
@property
def time_stamp(self):
'\n \n '
if (self._time is None):
return None
if (self._date is None):
self._date = '010180'
try:
return dateutil.parser.parse('{0} {1}'.format(self._date, self._time), dayfirst=True)
except ValueError:
self.logger.error(f'GPSError: bad date string {self.gps_string}')
return None
|
@property
def time_stamp(self):
'\n \n '
if (self._time is None):
return None
if (self._date is None):
self._date = '010180'
try:
return dateutil.parser.parse('{0} {1}'.format(self._date, self._time), dayfirst=True)
except ValueError:
self.logger.error(f'GPSError: bad date string {self.gps_string}')
return None<|docstring|>return a datetime object of the time stamp<|endoftext|>
|
43c58692b2b9c76759c3799a1dc87a798d761016885a8c64042b3bb507ec029c
|
@property
def declination(self):
'\n geomagnetic declination in degrees from north\n '
if ((self._declination is None) or (self._declination_hemisphere is None)):
return None
dec = float(self._declination)
if ('w' in self._declination_hemisphere.lower()):
dec *= (- 1)
return dec
|
geomagnetic declination in degrees from north
|
mth5/io/nims.py
|
declination
|
kujaku11/mth5
| 5 |
python
|
@property
def declination(self):
'\n \n '
if ((self._declination is None) or (self._declination_hemisphere is None)):
return None
dec = float(self._declination)
if ('w' in self._declination_hemisphere.lower()):
dec *= (- 1)
return dec
|
@property
def declination(self):
'\n \n '
if ((self._declination is None) or (self._declination_hemisphere is None)):
return None
dec = float(self._declination)
if ('w' in self._declination_hemisphere.lower()):
dec *= (- 1)
return dec<|docstring|>geomagnetic declination in degrees from north<|endoftext|>
|
71d8050c7d4ba33d6e736c5d3f4adaa15e65943375bd7738b17041353d70f89a
|
@property
def gps_type(self):
'GPRMC or GPGGA'
return self._type
|
GPRMC or GPGGA
|
mth5/io/nims.py
|
gps_type
|
kujaku11/mth5
| 5 |
python
|
@property
def gps_type(self):
return self._type
|
@property
def gps_type(self):
return self._type<|docstring|>GPRMC or GPGGA<|endoftext|>
|
6bf857c5d8c7d8845b29b6021c2e062379ad9381e5556e7b4a1d5937756261ce
|
@property
def fix(self):
'\n GPS fixed\n '
if hasattr(self, '_fix'):
return self._fix
return None
|
GPS fixed
|
mth5/io/nims.py
|
fix
|
kujaku11/mth5
| 5 |
python
|
@property
def fix(self):
'\n \n '
if hasattr(self, '_fix'):
return self._fix
return None
|
@property
def fix(self):
'\n \n '
if hasattr(self, '_fix'):
return self._fix
return None<|docstring|>GPS fixed<|endoftext|>
|
882c08674868a2952af0940fc9caab655a5db9d045ae044719a11238a97f5894
|
def read_header(self, fn=None):
'\n read header information\n\n :param fn: full path to file to read\n :type fn: string or :class:`pathlib.Path`\n :raises: :class:`mth5.io.nims.NIMSError` if something is not right.\n\n '
if (fn is not None):
self.fn = fn
if (not os.path.exists(self.fn)):
msg = f'Could not find nims file {self.fn}'
self.logger.error(msg)
raise NIMSError(msg)
self.logger.info(f'Reading NIMS file {self.fn}')
with open(self.fn, 'rb') as fid:
header_str = fid.read(self._max_header_length)
header_list = header_str.split(b'\r')
self.header_dict = {}
last_index = len(header_list)
last_line = header_list[(- 1)]
for (ii, line) in enumerate(header_list[0:(- 1)]):
if (ii == last_index):
break
if (b'comments' in line.lower()):
last_line = header_list[(ii + 1)]
last_index = (ii + 1)
line = line.decode()
if (line.find('>') == 0):
continue
elif (line.find(':') > 0):
(key, value) = line.split(':', 1)
self.header_dict[key.strip().lower()] = value.strip()
elif (line.find('<--') > 0):
(value, key) = line.split('<--')
self.header_dict[key.strip().lower()] = value.strip()
if (last_line.count(b' ') > 0):
if (last_line[0:1] == b' '):
last_line = last_line.strip()
else:
last_line = last_line.split()[1].strip()
data_start_byte = last_line[0:1]
if (data_start_byte in [b'$', b'g']):
data_start_byte = last_line[1:2]
self.data_start_seek = header_str.find(data_start_byte)
self.parse_header_dict()
|
read header information
:param fn: full path to file to read
:type fn: string or :class:`pathlib.Path`
:raises: :class:`mth5.io.nims.NIMSError` if something is not right.
|
mth5/io/nims.py
|
read_header
|
kujaku11/mth5
| 5 |
python
|
def read_header(self, fn=None):
'\n read header information\n\n :param fn: full path to file to read\n :type fn: string or :class:`pathlib.Path`\n :raises: :class:`mth5.io.nims.NIMSError` if something is not right.\n\n '
if (fn is not None):
self.fn = fn
if (not os.path.exists(self.fn)):
msg = f'Could not find nims file {self.fn}'
self.logger.error(msg)
raise NIMSError(msg)
self.logger.info(f'Reading NIMS file {self.fn}')
with open(self.fn, 'rb') as fid:
header_str = fid.read(self._max_header_length)
header_list = header_str.split(b'\r')
self.header_dict = {}
last_index = len(header_list)
last_line = header_list[(- 1)]
for (ii, line) in enumerate(header_list[0:(- 1)]):
if (ii == last_index):
break
if (b'comments' in line.lower()):
last_line = header_list[(ii + 1)]
last_index = (ii + 1)
line = line.decode()
if (line.find('>') == 0):
continue
elif (line.find(':') > 0):
(key, value) = line.split(':', 1)
self.header_dict[key.strip().lower()] = value.strip()
elif (line.find('<--') > 0):
(value, key) = line.split('<--')
self.header_dict[key.strip().lower()] = value.strip()
if (last_line.count(b' ') > 0):
if (last_line[0:1] == b' '):
last_line = last_line.strip()
else:
last_line = last_line.split()[1].strip()
data_start_byte = last_line[0:1]
if (data_start_byte in [b'$', b'g']):
data_start_byte = last_line[1:2]
self.data_start_seek = header_str.find(data_start_byte)
self.parse_header_dict()
|
def read_header(self, fn=None):
'\n read header information\n\n :param fn: full path to file to read\n :type fn: string or :class:`pathlib.Path`\n :raises: :class:`mth5.io.nims.NIMSError` if something is not right.\n\n '
if (fn is not None):
self.fn = fn
if (not os.path.exists(self.fn)):
msg = f'Could not find nims file {self.fn}'
self.logger.error(msg)
raise NIMSError(msg)
self.logger.info(f'Reading NIMS file {self.fn}')
with open(self.fn, 'rb') as fid:
header_str = fid.read(self._max_header_length)
header_list = header_str.split(b'\r')
self.header_dict = {}
last_index = len(header_list)
last_line = header_list[(- 1)]
for (ii, line) in enumerate(header_list[0:(- 1)]):
if (ii == last_index):
break
if (b'comments' in line.lower()):
last_line = header_list[(ii + 1)]
last_index = (ii + 1)
line = line.decode()
if (line.find('>') == 0):
continue
elif (line.find(':') > 0):
(key, value) = line.split(':', 1)
self.header_dict[key.strip().lower()] = value.strip()
elif (line.find('<--') > 0):
(value, key) = line.split('<--')
self.header_dict[key.strip().lower()] = value.strip()
if (last_line.count(b' ') > 0):
if (last_line[0:1] == b' '):
last_line = last_line.strip()
else:
last_line = last_line.split()[1].strip()
data_start_byte = last_line[0:1]
if (data_start_byte in [b'$', b'g']):
data_start_byte = last_line[1:2]
self.data_start_seek = header_str.find(data_start_byte)
self.parse_header_dict()<|docstring|>read header information
:param fn: full path to file to read
:type fn: string or :class:`pathlib.Path`
:raises: :class:`mth5.io.nims.NIMSError` if something is not right.<|endoftext|>
|
8a1db0de3fa71eab9baa47840706b7d3fcfa10dd9c8a595ab23d7cd9a16c4e31
|
def parse_header_dict(self, header_dict=None):
'\n parse the header dictionary into something useful\n '
if (header_dict is not None):
self.header_dict = header_dict
assert isinstance(self.header_dict, dict)
for (key, value) in self.header_dict.items():
if ('wire' in key):
if (key.find('n') == 0):
self.ex_length = float(value.split()[0])
self.ex_azimuth = float(value.split()[1])
elif (key.find('e') == 0):
self.ey_length = float(value.split()[0])
self.ey_azimuth = float(value.split()[1])
elif ('system' in key):
self.box_id = value.split(';')[0].strip()
self.mag_id = value.split(';')[1].strip()
elif ('gps' in key):
gps_list = value.split()
self.header_gps_stamp = dateutil.parser.parse(' '.join(gps_list[0:2]), dayfirst=True)
self.header_gps_latitude = self._get_latitude(gps_list[2], gps_list[3])
self.header_gps_longitude = self._get_longitude(gps_list[4], gps_list[5])
self.header_gps_elevation = float(gps_list[6])
elif ('run' in key):
self.run_id = value.replace('"', '')
else:
setattr(self, key.replace(' ', '_').replace('/', '_'), value)
|
parse the header dictionary into something useful
|
mth5/io/nims.py
|
parse_header_dict
|
kujaku11/mth5
| 5 |
python
|
def parse_header_dict(self, header_dict=None):
'\n \n '
if (header_dict is not None):
self.header_dict = header_dict
assert isinstance(self.header_dict, dict)
for (key, value) in self.header_dict.items():
if ('wire' in key):
if (key.find('n') == 0):
self.ex_length = float(value.split()[0])
self.ex_azimuth = float(value.split()[1])
elif (key.find('e') == 0):
self.ey_length = float(value.split()[0])
self.ey_azimuth = float(value.split()[1])
elif ('system' in key):
self.box_id = value.split(';')[0].strip()
self.mag_id = value.split(';')[1].strip()
elif ('gps' in key):
gps_list = value.split()
self.header_gps_stamp = dateutil.parser.parse(' '.join(gps_list[0:2]), dayfirst=True)
self.header_gps_latitude = self._get_latitude(gps_list[2], gps_list[3])
self.header_gps_longitude = self._get_longitude(gps_list[4], gps_list[5])
self.header_gps_elevation = float(gps_list[6])
elif ('run' in key):
self.run_id = value.replace('"', )
else:
setattr(self, key.replace(' ', '_').replace('/', '_'), value)
|
def parse_header_dict(self, header_dict=None):
'\n \n '
if (header_dict is not None):
self.header_dict = header_dict
assert isinstance(self.header_dict, dict)
for (key, value) in self.header_dict.items():
if ('wire' in key):
if (key.find('n') == 0):
self.ex_length = float(value.split()[0])
self.ex_azimuth = float(value.split()[1])
elif (key.find('e') == 0):
self.ey_length = float(value.split()[0])
self.ey_azimuth = float(value.split()[1])
elif ('system' in key):
self.box_id = value.split(';')[0].strip()
self.mag_id = value.split(';')[1].strip()
elif ('gps' in key):
gps_list = value.split()
self.header_gps_stamp = dateutil.parser.parse(' '.join(gps_list[0:2]), dayfirst=True)
self.header_gps_latitude = self._get_latitude(gps_list[2], gps_list[3])
self.header_gps_longitude = self._get_longitude(gps_list[4], gps_list[5])
self.header_gps_elevation = float(gps_list[6])
elif ('run' in key):
self.run_id = value.replace('"', )
else:
setattr(self, key.replace(' ', '_').replace('/', '_'), value)<|docstring|>parse the header dictionary into something useful<|endoftext|>
|
29db8f7271e4a1c54bf9d517bc12fffd7bb34f204ce692b5dfda2f2742385288
|
@property
def latitude(self):
'\n median latitude value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the GPRMC stamp as they should be duplicates\n '
if (self.stamps is not None):
latitude = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
latitude[ii] = stamp[1][0].latitude
return np.median(latitude[np.nonzero(latitude)])
return self.header_gps_latitude
|
median latitude value from all the GPS stamps in decimal degrees
WGS84
Only get from the GPRMC stamp as they should be duplicates
|
mth5/io/nims.py
|
latitude
|
kujaku11/mth5
| 5 |
python
|
@property
def latitude(self):
'\n median latitude value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the GPRMC stamp as they should be duplicates\n '
if (self.stamps is not None):
latitude = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
latitude[ii] = stamp[1][0].latitude
return np.median(latitude[np.nonzero(latitude)])
return self.header_gps_latitude
|
@property
def latitude(self):
'\n median latitude value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the GPRMC stamp as they should be duplicates\n '
if (self.stamps is not None):
latitude = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
latitude[ii] = stamp[1][0].latitude
return np.median(latitude[np.nonzero(latitude)])
return self.header_gps_latitude<|docstring|>median latitude value from all the GPS stamps in decimal degrees
WGS84
Only get from the GPRMC stamp as they should be duplicates<|endoftext|>
|
fcb09021e4f66dc467d4ba6a85777d1020303c4ec6667d124fb32b38dcd7f728
|
@property
def longitude(self):
'\n median longitude value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
longitude = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
longitude[ii] = stamp[1][0].longitude
return np.median(longitude[np.nonzero(longitude)])
return self.header_gps_longitude
|
median longitude value from all the GPS stamps in decimal degrees
WGS84
Only get from the first stamp within the sets
|
mth5/io/nims.py
|
longitude
|
kujaku11/mth5
| 5 |
python
|
@property
def longitude(self):
'\n median longitude value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
longitude = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
longitude[ii] = stamp[1][0].longitude
return np.median(longitude[np.nonzero(longitude)])
return self.header_gps_longitude
|
@property
def longitude(self):
'\n median longitude value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
longitude = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
longitude[ii] = stamp[1][0].longitude
return np.median(longitude[np.nonzero(longitude)])
return self.header_gps_longitude<|docstring|>median longitude value from all the GPS stamps in decimal degrees
WGS84
Only get from the first stamp within the sets<|endoftext|>
|
34f86457f909ebf10804cd7ea91969fddd009e47d948a391401cbade05667925
|
@property
def elevation(self):
'\n median elevation value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
elevation = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
if (len(stamp[1]) == 1):
elev = stamp[1][0].elevation
if (len(stamp[1]) == 2):
elev = stamp[1][1].elevation
if (elev is None):
continue
elevation[ii] = elev
return np.median(elevation[np.nonzero(elevation)])
return self.header_gps_elevation
|
median elevation value from all the GPS stamps in decimal degrees
WGS84
Only get from the first stamp within the sets
|
mth5/io/nims.py
|
elevation
|
kujaku11/mth5
| 5 |
python
|
@property
def elevation(self):
'\n median elevation value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
elevation = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
if (len(stamp[1]) == 1):
elev = stamp[1][0].elevation
if (len(stamp[1]) == 2):
elev = stamp[1][1].elevation
if (elev is None):
continue
elevation[ii] = elev
return np.median(elevation[np.nonzero(elevation)])
return self.header_gps_elevation
|
@property
def elevation(self):
'\n median elevation value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
elevation = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
if (len(stamp[1]) == 1):
elev = stamp[1][0].elevation
if (len(stamp[1]) == 2):
elev = stamp[1][1].elevation
if (elev is None):
continue
elevation[ii] = elev
return np.median(elevation[np.nonzero(elevation)])
return self.header_gps_elevation<|docstring|>median elevation value from all the GPS stamps in decimal degrees
WGS84
Only get from the first stamp within the sets<|endoftext|>
|
c1881770f3d38046468e65db6e57006498a63c7591b92adbaf634bd2cfed5b81
|
@property
def declination(self):
'\n median elevation value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
declination = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
if (stamp[1][0].gps_type == 'GPRMC'):
dec = stamp[1][0].declination
if (dec is None):
continue
declination[ii] = dec
return np.median(declination[np.nonzero(declination)])
return None
|
median elevation value from all the GPS stamps in decimal degrees
WGS84
Only get from the first stamp within the sets
|
mth5/io/nims.py
|
declination
|
kujaku11/mth5
| 5 |
python
|
@property
def declination(self):
'\n median elevation value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
declination = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
if (stamp[1][0].gps_type == 'GPRMC'):
dec = stamp[1][0].declination
if (dec is None):
continue
declination[ii] = dec
return np.median(declination[np.nonzero(declination)])
return None
|
@property
def declination(self):
'\n median elevation value from all the GPS stamps in decimal degrees\n WGS84\n\n Only get from the first stamp within the sets\n '
if (self.stamps is not None):
declination = np.zeros(len(self.stamps))
for (ii, stamp) in enumerate(self.stamps):
if (stamp[1][0].gps_type == 'GPRMC'):
dec = stamp[1][0].declination
if (dec is None):
continue
declination[ii] = dec
return np.median(declination[np.nonzero(declination)])
return None<|docstring|>median elevation value from all the GPS stamps in decimal degrees
WGS84
Only get from the first stamp within the sets<|endoftext|>
|
4731ce347fc85f9bbdfb7deaba0d63364e9d1322c240f5d39b48c4cf00390819
|
@property
def start_time(self):
'\n start time is the first good GPS time stamp minus the seconds to the\n beginning of the time series.\n '
if (self.stamps is not None):
return self.ts_data.index[0]
return None
|
start time is the first good GPS time stamp minus the seconds to the
beginning of the time series.
|
mth5/io/nims.py
|
start_time
|
kujaku11/mth5
| 5 |
python
|
@property
def start_time(self):
'\n start time is the first good GPS time stamp minus the seconds to the\n beginning of the time series.\n '
if (self.stamps is not None):
return self.ts_data.index[0]
return None
|
@property
def start_time(self):
'\n start time is the first good GPS time stamp minus the seconds to the\n beginning of the time series.\n '
if (self.stamps is not None):
return self.ts_data.index[0]
return None<|docstring|>start time is the first good GPS time stamp minus the seconds to the
beginning of the time series.<|endoftext|>
|
4f0f953808c38e85b940275f048739a33307463be8efadb72183f3ae1e5c07fc
|
@property
def end_time(self):
'\n start time is the first good GPS time stamp minus the seconds to the\n beginning of the time series.\n '
if (self.stamps is not None):
return self.ts_data.index[(- 1)]
return None
|
start time is the first good GPS time stamp minus the seconds to the
beginning of the time series.
|
mth5/io/nims.py
|
end_time
|
kujaku11/mth5
| 5 |
python
|
@property
def end_time(self):
'\n start time is the first good GPS time stamp minus the seconds to the\n beginning of the time series.\n '
if (self.stamps is not None):
return self.ts_data.index[(- 1)]
return None
|
@property
def end_time(self):
'\n start time is the first good GPS time stamp minus the seconds to the\n beginning of the time series.\n '
if (self.stamps is not None):
return self.ts_data.index[(- 1)]
return None<|docstring|>start time is the first good GPS time stamp minus the seconds to the
beginning of the time series.<|endoftext|>
|
523504d7e04af19f5034c3ffb90b890498775bd25cc13ce483a88336f4379b3b
|
@property
def box_temperature(self):
'data logger temperature, sampled at 1 second'
if (self.ts_data is not None):
meta_dict = {'channel_number': 6, 'component': 'temperature', 'measurement_azimuth': 0, 'measurement_tilt': 0, 'sample_rate': 1, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'auxiliary', 'units': 'celsius'}
temp = timeseries.ChannelTS('auxiliary', data=self.info_array['box_temp'], channel_metadata={'auxiliary': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
temp._ts = temp._ts.interp_like(self.hx._ts)
temp.channel_metadata.sample_rate = self.sample_rate
temp.channel_metadata.time_period.end = self.end_time.isoformat()
return temp
return None
|
data logger temperature, sampled at 1 second
|
mth5/io/nims.py
|
box_temperature
|
kujaku11/mth5
| 5 |
python
|
@property
def box_temperature(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 6, 'component': 'temperature', 'measurement_azimuth': 0, 'measurement_tilt': 0, 'sample_rate': 1, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'auxiliary', 'units': 'celsius'}
temp = timeseries.ChannelTS('auxiliary', data=self.info_array['box_temp'], channel_metadata={'auxiliary': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
temp._ts = temp._ts.interp_like(self.hx._ts)
temp.channel_metadata.sample_rate = self.sample_rate
temp.channel_metadata.time_period.end = self.end_time.isoformat()
return temp
return None
|
@property
def box_temperature(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 6, 'component': 'temperature', 'measurement_azimuth': 0, 'measurement_tilt': 0, 'sample_rate': 1, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'auxiliary', 'units': 'celsius'}
temp = timeseries.ChannelTS('auxiliary', data=self.info_array['box_temp'], channel_metadata={'auxiliary': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
temp._ts = temp._ts.interp_like(self.hx._ts)
temp.channel_metadata.sample_rate = self.sample_rate
temp.channel_metadata.time_period.end = self.end_time.isoformat()
return temp
return None<|docstring|>data logger temperature, sampled at 1 second<|endoftext|>
|
2a463b5ca341d6767b881005f71f2f4c0864ca9b46e800f9442b594a7e13912b
|
@property
def hx(self):
'HX'
if (self.ts_data is not None):
meta_dict = {'channel_number': 1, 'component': 'hx', 'measurement_azimuth': 0, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hx.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
HX
|
mth5/io/nims.py
|
hx
|
kujaku11/mth5
| 5 |
python
|
@property
def hx(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 1, 'component': 'hx', 'measurement_azimuth': 0, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hx.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
@property
def hx(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 1, 'component': 'hx', 'measurement_azimuth': 0, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hx.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None<|docstring|>HX<|endoftext|>
|
0815008f6c016c991b0852e7300c9fc065ad02144c0aa3ff856780c771c5816a
|
@property
def hy(self):
'HY'
if (self.ts_data is not None):
meta_dict = {'channel_number': 2, 'component': 'hy', 'measurement_azimuth': 90, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hy.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
HY
|
mth5/io/nims.py
|
hy
|
kujaku11/mth5
| 5 |
python
|
@property
def hy(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 2, 'component': 'hy', 'measurement_azimuth': 90, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hy.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
@property
def hy(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 2, 'component': 'hy', 'measurement_azimuth': 90, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hy.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None<|docstring|>HY<|endoftext|>
|
4831180f90883254e65aada23a74e550e902f793136733a1156c9a976aed3abb
|
@property
def hz(self):
'HZ'
if (self.ts_data is not None):
meta_dict = {'channel_number': 3, 'component': 'hz', 'measurement_azimuth': 0, 'measurement_tilt': 90, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hz.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
HZ
|
mth5/io/nims.py
|
hz
|
kujaku11/mth5
| 5 |
python
|
@property
def hz(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 3, 'component': 'hz', 'measurement_azimuth': 0, 'measurement_tilt': 90, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hz.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
@property
def hz(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 3, 'component': 'hz', 'measurement_azimuth': 0, 'measurement_tilt': 90, 'sample_rate': self.sample_rate, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'magnetic', 'units': 'counts', 'sensor.id': self.mag_id, 'sensor.manufacturer': 'Barry Narod', 'sensor.type': 'fluxgate triaxial magnetometer'}
return timeseries.ChannelTS('magnetic', data=self.ts_data.hz.to_numpy(), channel_metadata={'magnetic': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None<|docstring|>HZ<|endoftext|>
|
2bc6a1e19dbac1272769be9ed0f4b3a99bfc97fe84b8ccba39fa811585c3aa10
|
@property
def ex(self):
'EX'
if (self.ts_data is not None):
meta_dict = {'channel_number': 4, 'component': 'ex', 'measurement_azimuth': self.ex_azimuth, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'dipole_length': self.ex_length, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'electric', 'units': 'counts', 'negative.id': self.s_electrode_id, 'positive.id': self.n_electrode_id}
return timeseries.ChannelTS('electric', data=self.ts_data.ex.to_numpy(), channel_metadata={'electric': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
EX
|
mth5/io/nims.py
|
ex
|
kujaku11/mth5
| 5 |
python
|
@property
def ex(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 4, 'component': 'ex', 'measurement_azimuth': self.ex_azimuth, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'dipole_length': self.ex_length, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'electric', 'units': 'counts', 'negative.id': self.s_electrode_id, 'positive.id': self.n_electrode_id}
return timeseries.ChannelTS('electric', data=self.ts_data.ex.to_numpy(), channel_metadata={'electric': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
@property
def ex(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 4, 'component': 'ex', 'measurement_azimuth': self.ex_azimuth, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'dipole_length': self.ex_length, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'electric', 'units': 'counts', 'negative.id': self.s_electrode_id, 'positive.id': self.n_electrode_id}
return timeseries.ChannelTS('electric', data=self.ts_data.ex.to_numpy(), channel_metadata={'electric': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None<|docstring|>EX<|endoftext|>
|
a9ee6d05e1b7c060bce547434619bd941b6aea971444df034e3b08ee0aa716aa
|
@property
def ey(self):
'EY'
if (self.ts_data is not None):
meta_dict = {'channel_number': 5, 'component': 'ey', 'measurement_azimuth': self.ey_azimuth, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'dipole_length': self.ey_length, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'electric', 'units': 'counts', 'negative.id': self.w_electrode_id, 'positive.id': self.e_electrode_id}
return timeseries.ChannelTS('electric', data=self.ts_data.ey.to_numpy(), channel_metadata={'electric': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
EY
|
mth5/io/nims.py
|
ey
|
kujaku11/mth5
| 5 |
python
|
@property
def ey(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 5, 'component': 'ey', 'measurement_azimuth': self.ey_azimuth, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'dipole_length': self.ey_length, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'electric', 'units': 'counts', 'negative.id': self.w_electrode_id, 'positive.id': self.e_electrode_id}
return timeseries.ChannelTS('electric', data=self.ts_data.ey.to_numpy(), channel_metadata={'electric': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
@property
def ey(self):
if (self.ts_data is not None):
meta_dict = {'channel_number': 5, 'component': 'ey', 'measurement_azimuth': self.ey_azimuth, 'measurement_tilt': 0, 'sample_rate': self.sample_rate, 'dipole_length': self.ey_length, 'time_period.start': self.start_time.isoformat(), 'time_period.end': self.end_time.isoformat(), 'type': 'electric', 'units': 'counts', 'negative.id': self.w_electrode_id, 'positive.id': self.e_electrode_id}
return timeseries.ChannelTS('electric', data=self.ts_data.ey.to_numpy(), channel_metadata={'electric': meta_dict}, run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None<|docstring|>EY<|endoftext|>
|
904238899e89db54dc1589f124c9b903176b3142c01c6bb1478bf883573803f9
|
@property
def run_metadata(self):
'Run metadata'
if (self.ts_data is not None):
meta_dict = {'Run': {'channels_recorded_electric': 'ex, ey', 'channels_recorded_magnetic': 'hx, hy, hz', 'channels_recorded_auxiliary': 'temperature', 'comments': self.comments, 'data_logger.firmware.author': 'B. Narod', 'data_logger.firmware.name': 'nims', 'data_logger.firmware.version': '1.0', 'data_logger.manufacturer': 'Narod', 'data_logger.model': self.box_id, 'data_logger.id': self.box_id, 'data_logger.type': 'long period', 'id': self.run_id, 'data_type': 'MTLP', 'sample_rate': self.sample_rate, 'time_period.end': self.end_time.isoformat(), 'time_period.start': self.start_time.isoformat()}}
return meta_dict
return None
|
Run metadata
|
mth5/io/nims.py
|
run_metadata
|
kujaku11/mth5
| 5 |
python
|
@property
def run_metadata(self):
if (self.ts_data is not None):
meta_dict = {'Run': {'channels_recorded_electric': 'ex, ey', 'channels_recorded_magnetic': 'hx, hy, hz', 'channels_recorded_auxiliary': 'temperature', 'comments': self.comments, 'data_logger.firmware.author': 'B. Narod', 'data_logger.firmware.name': 'nims', 'data_logger.firmware.version': '1.0', 'data_logger.manufacturer': 'Narod', 'data_logger.model': self.box_id, 'data_logger.id': self.box_id, 'data_logger.type': 'long period', 'id': self.run_id, 'data_type': 'MTLP', 'sample_rate': self.sample_rate, 'time_period.end': self.end_time.isoformat(), 'time_period.start': self.start_time.isoformat()}}
return meta_dict
return None
|
@property
def run_metadata(self):
if (self.ts_data is not None):
meta_dict = {'Run': {'channels_recorded_electric': 'ex, ey', 'channels_recorded_magnetic': 'hx, hy, hz', 'channels_recorded_auxiliary': 'temperature', 'comments': self.comments, 'data_logger.firmware.author': 'B. Narod', 'data_logger.firmware.name': 'nims', 'data_logger.firmware.version': '1.0', 'data_logger.manufacturer': 'Narod', 'data_logger.model': self.box_id, 'data_logger.id': self.box_id, 'data_logger.type': 'long period', 'id': self.run_id, 'data_type': 'MTLP', 'sample_rate': self.sample_rate, 'time_period.end': self.end_time.isoformat(), 'time_period.start': self.start_time.isoformat()}}
return meta_dict
return None<|docstring|>Run metadata<|endoftext|>
|
eb2892ffce99c7d5dfd91998c0c6e17b27b34c41c64d3c8a8397d9318cd4db62
|
@property
def station_metadata(self):
'Station metadata from nims file'
if (self.ts_data is not None):
return {'Station': {'geographic_name': f'{self.site_name}, {self.state_province}, {self.country}', 'location.declination.value': self.declination, 'location.elevation': self.elevation, 'location.latitude': self.latitude, 'location.longitude': self.longitude, 'id': self.run_id[0:(- 1)], 'orientation.reference_frame': 'geomagnetic'}}
return None
|
Station metadata from nims file
|
mth5/io/nims.py
|
station_metadata
|
kujaku11/mth5
| 5 |
python
|
@property
def station_metadata(self):
if (self.ts_data is not None):
return {'Station': {'geographic_name': f'{self.site_name}, {self.state_province}, {self.country}', 'location.declination.value': self.declination, 'location.elevation': self.elevation, 'location.latitude': self.latitude, 'location.longitude': self.longitude, 'id': self.run_id[0:(- 1)], 'orientation.reference_frame': 'geomagnetic'}}
return None
|
@property
def station_metadata(self):
if (self.ts_data is not None):
return {'Station': {'geographic_name': f'{self.site_name}, {self.state_province}, {self.country}', 'location.declination.value': self.declination, 'location.elevation': self.elevation, 'location.latitude': self.latitude, 'location.longitude': self.longitude, 'id': self.run_id[0:(- 1)], 'orientation.reference_frame': 'geomagnetic'}}
return None<|docstring|>Station metadata from nims file<|endoftext|>
|
071d7b438271ea9da6e5617e5543d2e4a737c0c66d47879e40f0c0946bc49339
|
def to_runts(self):
'Get xarray for run'
if (self.ts_data is not None):
return timeseries.RunTS(array_list=[self.hx, self.hy, self.hz, self.ex, self.ey, self.box_temperature], run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
Get xarray for run
|
mth5/io/nims.py
|
to_runts
|
kujaku11/mth5
| 5 |
python
|
def to_runts(self):
if (self.ts_data is not None):
return timeseries.RunTS(array_list=[self.hx, self.hy, self.hz, self.ex, self.ey, self.box_temperature], run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None
|
def to_runts(self):
if (self.ts_data is not None):
return timeseries.RunTS(array_list=[self.hx, self.hy, self.hz, self.ex, self.ey, self.box_temperature], run_metadata=self.run_metadata, station_metadata=self.station_metadata)
return None<|docstring|>Get xarray for run<|endoftext|>
|
d397011fc5df8be6843d7e876a54e809ca7f949e5a5bc70e5c0b3f2201ab5fbc
|
def _make_index_values(self):
'\n Index values for the channels recorded\n '
indices = np.zeros((8, 5), dtype=np.int)
for kk in range(8):
for ii in range(3):
indices[(kk, ii)] = ((9 + (kk * 9)) + (ii * 3))
for ii in range(2):
indices[(kk, (3 + ii))] = ((82 + (kk * 6)) + (ii * 3))
return indices
|
Index values for the channels recorded
|
mth5/io/nims.py
|
_make_index_values
|
kujaku11/mth5
| 5 |
python
|
def _make_index_values(self):
'\n \n '
indices = np.zeros((8, 5), dtype=np.int)
for kk in range(8):
for ii in range(3):
indices[(kk, ii)] = ((9 + (kk * 9)) + (ii * 3))
for ii in range(2):
indices[(kk, (3 + ii))] = ((82 + (kk * 6)) + (ii * 3))
return indices
|
def _make_index_values(self):
'\n \n '
indices = np.zeros((8, 5), dtype=np.int)
for kk in range(8):
for ii in range(3):
indices[(kk, ii)] = ((9 + (kk * 9)) + (ii * 3))
for ii in range(2):
indices[(kk, (3 + ii))] = ((82 + (kk * 6)) + (ii * 3))
return indices<|docstring|>Index values for the channels recorded<|endoftext|>
|
9ed5dbb78ebb54d9243b12843d7534eb15d8a24f8419669a22193f62daa02771
|
def _get_gps_string_list(self, nims_string):
"\n get the gps strings from the raw string output by the NIMS. This will\n take the 3rd value in each block, concatenate into a long string and\n then make a list by splitting by '$'. The index values of where the\n '$' are found are also calculated.\n\n :param str nims_string: raw binary string output by NIMS\n\n :returns: list of index values associated with the location of the '$'\n\n :returns: list of possible raw GPS strings\n\n .. note:: This assumes that there are an even amount of data blocks.\n Might be a bad assumption\n "
index_values = []
gps_str_list = []
for ii in range(int((len(nims_string) / self.block_size))):
index = ((ii * self.block_size) + 3)
g_char = struct.unpack('c', nims_string[index:(index + 1)])[0]
if (g_char == b'$'):
index_values.append(((index - 3) / self.block_size))
gps_str_list.append(g_char)
gps_raw_stamp_list = b''.join(gps_str_list).split(b'$')
return (index_values, gps_raw_stamp_list)
|
get the gps strings from the raw string output by the NIMS. This will
take the 3rd value in each block, concatenate into a long string and
then make a list by splitting by '$'. The index values of where the
'$' are found are also calculated.
:param str nims_string: raw binary string output by NIMS
:returns: list of index values associated with the location of the '$'
:returns: list of possible raw GPS strings
.. note:: This assumes that there are an even amount of data blocks.
Might be a bad assumption
|
mth5/io/nims.py
|
_get_gps_string_list
|
kujaku11/mth5
| 5 |
python
|
def _get_gps_string_list(self, nims_string):
"\n get the gps strings from the raw string output by the NIMS. This will\n take the 3rd value in each block, concatenate into a long string and\n then make a list by splitting by '$'. The index values of where the\n '$' are found are also calculated.\n\n :param str nims_string: raw binary string output by NIMS\n\n :returns: list of index values associated with the location of the '$'\n\n :returns: list of possible raw GPS strings\n\n .. note:: This assumes that there are an even amount of data blocks.\n Might be a bad assumption\n "
index_values = []
gps_str_list = []
for ii in range(int((len(nims_string) / self.block_size))):
index = ((ii * self.block_size) + 3)
g_char = struct.unpack('c', nims_string[index:(index + 1)])[0]
if (g_char == b'$'):
index_values.append(((index - 3) / self.block_size))
gps_str_list.append(g_char)
gps_raw_stamp_list = b.join(gps_str_list).split(b'$')
return (index_values, gps_raw_stamp_list)
|
def _get_gps_string_list(self, nims_string):
"\n get the gps strings from the raw string output by the NIMS. This will\n take the 3rd value in each block, concatenate into a long string and\n then make a list by splitting by '$'. The index values of where the\n '$' are found are also calculated.\n\n :param str nims_string: raw binary string output by NIMS\n\n :returns: list of index values associated with the location of the '$'\n\n :returns: list of possible raw GPS strings\n\n .. note:: This assumes that there are an even amount of data blocks.\n Might be a bad assumption\n "
index_values = []
gps_str_list = []
for ii in range(int((len(nims_string) / self.block_size))):
index = ((ii * self.block_size) + 3)
g_char = struct.unpack('c', nims_string[index:(index + 1)])[0]
if (g_char == b'$'):
index_values.append(((index - 3) / self.block_size))
gps_str_list.append(g_char)
gps_raw_stamp_list = b.join(gps_str_list).split(b'$')
return (index_values, gps_raw_stamp_list)<|docstring|>get the gps strings from the raw string output by the NIMS. This will
take the 3rd value in each block, concatenate into a long string and
then make a list by splitting by '$'. The index values of where the
'$' are found are also calculated.
:param str nims_string: raw binary string output by NIMS
:returns: list of index values associated with the location of the '$'
:returns: list of possible raw GPS strings
.. note:: This assumes that there are an even amount of data blocks.
Might be a bad assumption<|endoftext|>
|
3b7c9a9bd60aedde34392f8893e8d1885a860117803c943ae59492517b175296
|
def get_stamps(self, nims_string):
'\n get a list of valid GPS strings and match synchronous GPRMC with GPGGA\n stamps if possible.\n\n :param str nims_string: raw GPS string output by NIMS\n '
(index_list, gps_raw_stamp_list) = self._get_gps_string_list(nims_string)
gprmc_list = []
gpgga_list = []
for (ii, index, raw_stamp) in zip(range(len(index_list)), index_list, gps_raw_stamp_list[1:]):
gps_obj = GPS(raw_stamp, index)
if gps_obj.valid:
if (gps_obj.gps_type == 'GPRMC'):
gprmc_list.append(gps_obj)
elif (gps_obj.gps_type == 'GPGGA'):
gpgga_list.append(gps_obj)
else:
self.logger.debug(f'GPS Error: file index {index}, stamp number {ii}')
max_len = min([len(raw_stamp), 15])
self.logger.debug(f'GPS Raw Stamp: {raw_stamp[0:max_len]}')
return self._gps_match_gprmc_gpgga_strings(gprmc_list, gpgga_list)
|
get a list of valid GPS strings and match synchronous GPRMC with GPGGA
stamps if possible.
:param str nims_string: raw GPS string output by NIMS
|
mth5/io/nims.py
|
get_stamps
|
kujaku11/mth5
| 5 |
python
|
def get_stamps(self, nims_string):
'\n get a list of valid GPS strings and match synchronous GPRMC with GPGGA\n stamps if possible.\n\n :param str nims_string: raw GPS string output by NIMS\n '
(index_list, gps_raw_stamp_list) = self._get_gps_string_list(nims_string)
gprmc_list = []
gpgga_list = []
for (ii, index, raw_stamp) in zip(range(len(index_list)), index_list, gps_raw_stamp_list[1:]):
gps_obj = GPS(raw_stamp, index)
if gps_obj.valid:
if (gps_obj.gps_type == 'GPRMC'):
gprmc_list.append(gps_obj)
elif (gps_obj.gps_type == 'GPGGA'):
gpgga_list.append(gps_obj)
else:
self.logger.debug(f'GPS Error: file index {index}, stamp number {ii}')
max_len = min([len(raw_stamp), 15])
self.logger.debug(f'GPS Raw Stamp: {raw_stamp[0:max_len]}')
return self._gps_match_gprmc_gpgga_strings(gprmc_list, gpgga_list)
|
def get_stamps(self, nims_string):
'\n get a list of valid GPS strings and match synchronous GPRMC with GPGGA\n stamps if possible.\n\n :param str nims_string: raw GPS string output by NIMS\n '
(index_list, gps_raw_stamp_list) = self._get_gps_string_list(nims_string)
gprmc_list = []
gpgga_list = []
for (ii, index, raw_stamp) in zip(range(len(index_list)), index_list, gps_raw_stamp_list[1:]):
gps_obj = GPS(raw_stamp, index)
if gps_obj.valid:
if (gps_obj.gps_type == 'GPRMC'):
gprmc_list.append(gps_obj)
elif (gps_obj.gps_type == 'GPGGA'):
gpgga_list.append(gps_obj)
else:
self.logger.debug(f'GPS Error: file index {index}, stamp number {ii}')
max_len = min([len(raw_stamp), 15])
self.logger.debug(f'GPS Raw Stamp: {raw_stamp[0:max_len]}')
return self._gps_match_gprmc_gpgga_strings(gprmc_list, gpgga_list)<|docstring|>get a list of valid GPS strings and match synchronous GPRMC with GPGGA
stamps if possible.
:param str nims_string: raw GPS string output by NIMS<|endoftext|>
|
0dd4491e9e3568b44d95eeb4e32eaa713908ed7fb11d4b134c7737779d3a5626
|
def _gps_match_gprmc_gpgga_strings(self, gprmc_list, gpgga_list):
'\n match GPRMC and GPGGA strings together into a list\n\n [[GPRMC, GPGGA], ...]\n\n :param list gprmc_list: list of GPS objects for the GPRMC stamps\n :param list gpgga_list: list of GPS objects for the GPGGA stamps\n\n :returns: list of matched GPRMC and GPGGA stamps\n\n '
gps_match_list = []
for gprmc in gprmc_list:
find = False
for (ii, gpgga) in enumerate(gpgga_list):
if (gprmc.time_stamp.time() == gpgga.time_stamp.time()):
gps_match_list.append([gprmc, gpgga])
find = True
del gpgga_list[ii]
break
if (not find):
gps_match_list.append([gprmc])
return gps_match_list
|
match GPRMC and GPGGA strings together into a list
[[GPRMC, GPGGA], ...]
:param list gprmc_list: list of GPS objects for the GPRMC stamps
:param list gpgga_list: list of GPS objects for the GPGGA stamps
:returns: list of matched GPRMC and GPGGA stamps
|
mth5/io/nims.py
|
_gps_match_gprmc_gpgga_strings
|
kujaku11/mth5
| 5 |
python
|
def _gps_match_gprmc_gpgga_strings(self, gprmc_list, gpgga_list):
'\n match GPRMC and GPGGA strings together into a list\n\n [[GPRMC, GPGGA], ...]\n\n :param list gprmc_list: list of GPS objects for the GPRMC stamps\n :param list gpgga_list: list of GPS objects for the GPGGA stamps\n\n :returns: list of matched GPRMC and GPGGA stamps\n\n '
gps_match_list = []
for gprmc in gprmc_list:
find = False
for (ii, gpgga) in enumerate(gpgga_list):
if (gprmc.time_stamp.time() == gpgga.time_stamp.time()):
gps_match_list.append([gprmc, gpgga])
find = True
del gpgga_list[ii]
break
if (not find):
gps_match_list.append([gprmc])
return gps_match_list
|
def _gps_match_gprmc_gpgga_strings(self, gprmc_list, gpgga_list):
'\n match GPRMC and GPGGA strings together into a list\n\n [[GPRMC, GPGGA], ...]\n\n :param list gprmc_list: list of GPS objects for the GPRMC stamps\n :param list gpgga_list: list of GPS objects for the GPGGA stamps\n\n :returns: list of matched GPRMC and GPGGA stamps\n\n '
gps_match_list = []
for gprmc in gprmc_list:
find = False
for (ii, gpgga) in enumerate(gpgga_list):
if (gprmc.time_stamp.time() == gpgga.time_stamp.time()):
gps_match_list.append([gprmc, gpgga])
find = True
del gpgga_list[ii]
break
if (not find):
gps_match_list.append([gprmc])
return gps_match_list<|docstring|>match GPRMC and GPGGA strings together into a list
[[GPRMC, GPGGA], ...]
:param list gprmc_list: list of GPS objects for the GPRMC stamps
:param list gpgga_list: list of GPS objects for the GPGGA stamps
:returns: list of matched GPRMC and GPGGA stamps<|endoftext|>
|
3f2316005acd71c3cdc9b94f65ca24170490c3acb027aea795758ed64f7649f6
|
def _get_gps_stamp_indices_from_status(self, status_array):
'\n get the index location of the stamps from the status array assuming\n that 0 indicates GPS lock.\n\n :param :class:`np.ndarray` status_array: an array of status values from data blocks\n\n :returns: array of index values where GPS lock was acquired ignoring\n sequential locks.\n '
index_values = np.where((status_array == 0))[0]
status_index = np.zeros_like(index_values)
for ii in range(index_values.size):
if ((index_values[ii] - index_values[(ii - 1)]) == 1):
continue
else:
status_index[ii] = index_values[ii]
status_index = status_index[np.nonzero(status_index)]
return status_index
|
get the index location of the stamps from the status array assuming
that 0 indicates GPS lock.
:param :class:`np.ndarray` status_array: an array of status values from data blocks
:returns: array of index values where GPS lock was acquired ignoring
sequential locks.
|
mth5/io/nims.py
|
_get_gps_stamp_indices_from_status
|
kujaku11/mth5
| 5 |
python
|
def _get_gps_stamp_indices_from_status(self, status_array):
'\n get the index location of the stamps from the status array assuming\n that 0 indicates GPS lock.\n\n :param :class:`np.ndarray` status_array: an array of status values from data blocks\n\n :returns: array of index values where GPS lock was acquired ignoring\n sequential locks.\n '
index_values = np.where((status_array == 0))[0]
status_index = np.zeros_like(index_values)
for ii in range(index_values.size):
if ((index_values[ii] - index_values[(ii - 1)]) == 1):
continue
else:
status_index[ii] = index_values[ii]
status_index = status_index[np.nonzero(status_index)]
return status_index
|
def _get_gps_stamp_indices_from_status(self, status_array):
'\n get the index location of the stamps from the status array assuming\n that 0 indicates GPS lock.\n\n :param :class:`np.ndarray` status_array: an array of status values from data blocks\n\n :returns: array of index values where GPS lock was acquired ignoring\n sequential locks.\n '
index_values = np.where((status_array == 0))[0]
status_index = np.zeros_like(index_values)
for ii in range(index_values.size):
if ((index_values[ii] - index_values[(ii - 1)]) == 1):
continue
else:
status_index[ii] = index_values[ii]
status_index = status_index[np.nonzero(status_index)]
return status_index<|docstring|>get the index location of the stamps from the status array assuming
that 0 indicates GPS lock.
:param :class:`np.ndarray` status_array: an array of status values from data blocks
:returns: array of index values where GPS lock was acquired ignoring
sequential locks.<|endoftext|>
|
47f118d3c5b0f09fb1f3ae4ef824920ed717ae7db19921d796c58cd06c6dba41
|
def match_status_with_gps_stamps(self, status_array, gps_list):
'\n Match the index values from the status array with the index values of\n the GPS stamps. There appears to be a bit of wiggle room between when the\n lock is recorded and the stamp was actually recorded. This is typically 1\n second and sometimes 2.\n\n :param array status_array: array of status values from each data block\n :param list gps_list: list of valid GPS stamps [[GPRMC, GPGGA], ...]\n\n .. note:: I think there is a 2 second gap between the lock and the\n first stamp character.\n '
stamp_indices = self._get_gps_stamp_indices_from_status(status_array)
gps_stamps = []
for index in stamp_indices:
stamp_find = False
for (ii, stamps) in enumerate(gps_list):
index_diff = (stamps[0].index - index)
if ((index_diff == 1) or (index_diff == 73)):
index += 1
stamps[0].index += 1
elif ((index_diff == 2) or (index_diff == 74)):
index = index
elif ((index_diff == 3) or (index_diff == 75)):
index -= 1
stamps[0].index -= 1
elif ((index_diff == 4) or (index_diff == 76)):
index -= 2
stamps[0].index -= 2
if (stamps[0].gps_type in ['GPRMC', 'gprmc']):
if (index_diff in [1, 2, 3, 4]):
gps_stamps.append((index, stamps))
stamp_find = True
del gps_list[ii]
break
elif (stamps[0].gps_type in ['GPGGA', 'gpgga']):
if (index_diff in [73, 74, 75, 76]):
gps_stamps.append((index, stamps))
stamp_find = True
del gps_list[ii]
break
if (not stamp_find):
self.logger.debug(f'GPS Error: No good GPS stamp at {index} seconds')
return gps_stamps
|
Match the index values from the status array with the index values of
the GPS stamps. There appears to be a bit of wiggle room between when the
lock is recorded and the stamp was actually recorded. This is typically 1
second and sometimes 2.
:param array status_array: array of status values from each data block
:param list gps_list: list of valid GPS stamps [[GPRMC, GPGGA], ...]
.. note:: I think there is a 2 second gap between the lock and the
first stamp character.
|
mth5/io/nims.py
|
match_status_with_gps_stamps
|
kujaku11/mth5
| 5 |
python
|
def match_status_with_gps_stamps(self, status_array, gps_list):
'\n Match the index values from the status array with the index values of\n the GPS stamps. There appears to be a bit of wiggle room between when the\n lock is recorded and the stamp was actually recorded. This is typically 1\n second and sometimes 2.\n\n :param array status_array: array of status values from each data block\n :param list gps_list: list of valid GPS stamps [[GPRMC, GPGGA], ...]\n\n .. note:: I think there is a 2 second gap between the lock and the\n first stamp character.\n '
stamp_indices = self._get_gps_stamp_indices_from_status(status_array)
gps_stamps = []
for index in stamp_indices:
stamp_find = False
for (ii, stamps) in enumerate(gps_list):
index_diff = (stamps[0].index - index)
if ((index_diff == 1) or (index_diff == 73)):
index += 1
stamps[0].index += 1
elif ((index_diff == 2) or (index_diff == 74)):
index = index
elif ((index_diff == 3) or (index_diff == 75)):
index -= 1
stamps[0].index -= 1
elif ((index_diff == 4) or (index_diff == 76)):
index -= 2
stamps[0].index -= 2
if (stamps[0].gps_type in ['GPRMC', 'gprmc']):
if (index_diff in [1, 2, 3, 4]):
gps_stamps.append((index, stamps))
stamp_find = True
del gps_list[ii]
break
elif (stamps[0].gps_type in ['GPGGA', 'gpgga']):
if (index_diff in [73, 74, 75, 76]):
gps_stamps.append((index, stamps))
stamp_find = True
del gps_list[ii]
break
if (not stamp_find):
self.logger.debug(f'GPS Error: No good GPS stamp at {index} seconds')
return gps_stamps
|
def match_status_with_gps_stamps(self, status_array, gps_list):
'\n Match the index values from the status array with the index values of\n the GPS stamps. There appears to be a bit of wiggle room between when the\n lock is recorded and the stamp was actually recorded. This is typically 1\n second and sometimes 2.\n\n :param array status_array: array of status values from each data block\n :param list gps_list: list of valid GPS stamps [[GPRMC, GPGGA], ...]\n\n .. note:: I think there is a 2 second gap between the lock and the\n first stamp character.\n '
stamp_indices = self._get_gps_stamp_indices_from_status(status_array)
gps_stamps = []
for index in stamp_indices:
stamp_find = False
for (ii, stamps) in enumerate(gps_list):
index_diff = (stamps[0].index - index)
if ((index_diff == 1) or (index_diff == 73)):
index += 1
stamps[0].index += 1
elif ((index_diff == 2) or (index_diff == 74)):
index = index
elif ((index_diff == 3) or (index_diff == 75)):
index -= 1
stamps[0].index -= 1
elif ((index_diff == 4) or (index_diff == 76)):
index -= 2
stamps[0].index -= 2
if (stamps[0].gps_type in ['GPRMC', 'gprmc']):
if (index_diff in [1, 2, 3, 4]):
gps_stamps.append((index, stamps))
stamp_find = True
del gps_list[ii]
break
elif (stamps[0].gps_type in ['GPGGA', 'gpgga']):
if (index_diff in [73, 74, 75, 76]):
gps_stamps.append((index, stamps))
stamp_find = True
del gps_list[ii]
break
if (not stamp_find):
self.logger.debug(f'GPS Error: No good GPS stamp at {index} seconds')
return gps_stamps<|docstring|>Match the index values from the status array with the index values of
the GPS stamps. There appears to be a bit of wiggle room between when the
lock is recorded and the stamp was actually recorded. This is typically 1
second and sometimes 2.
:param array status_array: array of status values from each data block
:param list gps_list: list of valid GPS stamps [[GPRMC, GPGGA], ...]
.. note:: I think there is a 2 second gap between the lock and the
first stamp character.<|endoftext|>
|
3940ebb94d8bf555b4641fc39d77c2788c3cf05c5ad287d356cd5cb824daa087
|
def find_sequence(self, data_array, block_sequence=None):
'\n find a sequence in a given array\n\n :param array data_array: array of the data with shape [n, m]\n where n is the number of seconds recorded\n m is the block length for a given sampling\n rate.\n :param list block_sequence: sequence pattern to locate\n *default* is [1, 131] the start of a\n data block.\n\n :returns: array of index locations where the sequence is found.\n '
if (block_sequence is not None):
self.block_sequence = block_sequence
t = np.vstack([np.roll(data_array, shift) for shift in (- np.arange(len(self.block_sequence)))]).T
return np.where(np.all((t == self.block_sequence), axis=1))[0]
|
find a sequence in a given array
:param array data_array: array of the data with shape [n, m]
where n is the number of seconds recorded
m is the block length for a given sampling
rate.
:param list block_sequence: sequence pattern to locate
*default* is [1, 131] the start of a
data block.
:returns: array of index locations where the sequence is found.
|
mth5/io/nims.py
|
find_sequence
|
kujaku11/mth5
| 5 |
python
|
def find_sequence(self, data_array, block_sequence=None):
'\n find a sequence in a given array\n\n :param array data_array: array of the data with shape [n, m]\n where n is the number of seconds recorded\n m is the block length for a given sampling\n rate.\n :param list block_sequence: sequence pattern to locate\n *default* is [1, 131] the start of a\n data block.\n\n :returns: array of index locations where the sequence is found.\n '
if (block_sequence is not None):
self.block_sequence = block_sequence
t = np.vstack([np.roll(data_array, shift) for shift in (- np.arange(len(self.block_sequence)))]).T
return np.where(np.all((t == self.block_sequence), axis=1))[0]
|
def find_sequence(self, data_array, block_sequence=None):
'\n find a sequence in a given array\n\n :param array data_array: array of the data with shape [n, m]\n where n is the number of seconds recorded\n m is the block length for a given sampling\n rate.\n :param list block_sequence: sequence pattern to locate\n *default* is [1, 131] the start of a\n data block.\n\n :returns: array of index locations where the sequence is found.\n '
if (block_sequence is not None):
self.block_sequence = block_sequence
t = np.vstack([np.roll(data_array, shift) for shift in (- np.arange(len(self.block_sequence)))]).T
return np.where(np.all((t == self.block_sequence), axis=1))[0]<|docstring|>find a sequence in a given array
:param array data_array: array of the data with shape [n, m]
where n is the number of seconds recorded
m is the block length for a given sampling
rate.
:param list block_sequence: sequence pattern to locate
*default* is [1, 131] the start of a
data block.
:returns: array of index locations where the sequence is found.<|endoftext|>
|
8ebedf26dbdcdab142a1d8099044664481d0aaa52a3c3d9464c6ed3c86739a39
|
def unwrap_sequence(self, sequence):
'\n unwrap the sequence to be sequential numbers instead of modulated by\n 256. sets the first number to 0\n\n :param list sequence: sequence of bytes numbers\n :return: unwrapped number of counts\n\n '
count = 0
unwrapped = np.zeros_like(sequence)
for (ii, seq) in enumerate(sequence):
unwrapped[ii] = (seq + (count * 256))
if (seq == 255):
count += 1
unwrapped -= unwrapped[0]
return unwrapped
|
unwrap the sequence to be sequential numbers instead of modulated by
256. sets the first number to 0
:param list sequence: sequence of bytes numbers
:return: unwrapped number of counts
|
mth5/io/nims.py
|
unwrap_sequence
|
kujaku11/mth5
| 5 |
python
|
def unwrap_sequence(self, sequence):
'\n unwrap the sequence to be sequential numbers instead of modulated by\n 256. sets the first number to 0\n\n :param list sequence: sequence of bytes numbers\n :return: unwrapped number of counts\n\n '
count = 0
unwrapped = np.zeros_like(sequence)
for (ii, seq) in enumerate(sequence):
unwrapped[ii] = (seq + (count * 256))
if (seq == 255):
count += 1
unwrapped -= unwrapped[0]
return unwrapped
|
def unwrap_sequence(self, sequence):
'\n unwrap the sequence to be sequential numbers instead of modulated by\n 256. sets the first number to 0\n\n :param list sequence: sequence of bytes numbers\n :return: unwrapped number of counts\n\n '
count = 0
unwrapped = np.zeros_like(sequence)
for (ii, seq) in enumerate(sequence):
unwrapped[ii] = (seq + (count * 256))
if (seq == 255):
count += 1
unwrapped -= unwrapped[0]
return unwrapped<|docstring|>unwrap the sequence to be sequential numbers instead of modulated by
256. sets the first number to 0
:param list sequence: sequence of bytes numbers
:return: unwrapped number of counts<|endoftext|>
|
dcd04eb2c205cfd2c7db6091300f8f7bb46e651b6d77927d9567b33022dbb8a5
|
def _locate_duplicate_blocks(self, sequence):
'\n locate the sequence number where the duplicates exist\n\n :param list sequence: sequence to match duplicate numbers.\n :returns: list of duplicate index values.\n '
duplicates = np.where((np.abs(np.diff(sequence)) == 0))[0]
if (len(duplicates) == 0):
return None
duplicate_list = []
for dup in duplicates:
dup_dict = {}
dup_dict['sequence_index'] = dup
dup_dict['ts_index_0'] = (dup * self.sample_rate)
dup_dict['ts_index_1'] = ((dup * self.sample_rate) + self.sample_rate)
dup_dict['ts_index_2'] = ((dup + 1) * self.sample_rate)
dup_dict['ts_index_3'] = (((dup + 1) * self.sample_rate) + self.sample_rate)
duplicate_list.append(dup_dict)
return duplicate_list
|
locate the sequence number where the duplicates exist
:param list sequence: sequence to match duplicate numbers.
:returns: list of duplicate index values.
|
mth5/io/nims.py
|
_locate_duplicate_blocks
|
kujaku11/mth5
| 5 |
python
|
def _locate_duplicate_blocks(self, sequence):
'\n locate the sequence number where the duplicates exist\n\n :param list sequence: sequence to match duplicate numbers.\n :returns: list of duplicate index values.\n '
duplicates = np.where((np.abs(np.diff(sequence)) == 0))[0]
if (len(duplicates) == 0):
return None
duplicate_list = []
for dup in duplicates:
dup_dict = {}
dup_dict['sequence_index'] = dup
dup_dict['ts_index_0'] = (dup * self.sample_rate)
dup_dict['ts_index_1'] = ((dup * self.sample_rate) + self.sample_rate)
dup_dict['ts_index_2'] = ((dup + 1) * self.sample_rate)
dup_dict['ts_index_3'] = (((dup + 1) * self.sample_rate) + self.sample_rate)
duplicate_list.append(dup_dict)
return duplicate_list
|
def _locate_duplicate_blocks(self, sequence):
'\n locate the sequence number where the duplicates exist\n\n :param list sequence: sequence to match duplicate numbers.\n :returns: list of duplicate index values.\n '
duplicates = np.where((np.abs(np.diff(sequence)) == 0))[0]
if (len(duplicates) == 0):
return None
duplicate_list = []
for dup in duplicates:
dup_dict = {}
dup_dict['sequence_index'] = dup
dup_dict['ts_index_0'] = (dup * self.sample_rate)
dup_dict['ts_index_1'] = ((dup * self.sample_rate) + self.sample_rate)
dup_dict['ts_index_2'] = ((dup + 1) * self.sample_rate)
dup_dict['ts_index_3'] = (((dup + 1) * self.sample_rate) + self.sample_rate)
duplicate_list.append(dup_dict)
return duplicate_list<|docstring|>locate the sequence number where the duplicates exist
:param list sequence: sequence to match duplicate numbers.
:returns: list of duplicate index values.<|endoftext|>
|
438557efa014bf1950b382fb67c273851d837ed09258f05ab92ffd84da7831fb
|
def _check_duplicate_blocks(self, block_01, block_02, info_01, info_02):
'\n make sure the blocks are truly duplicates\n\n :param np.array block_01: block of data to compare\n :param np.array block_02: block of data to compare\n :param np.array info_01: information array from info_array[sequence_index]\n :param np.array info_02: information array from info_array[sequence_index]\n\n :returns: boolean if the blocks and information match\n\n '
if np.array_equal(block_01, block_02):
if np.array_equal(info_01, info_02):
return True
else:
return False
else:
return False
|
make sure the blocks are truly duplicates
:param np.array block_01: block of data to compare
:param np.array block_02: block of data to compare
:param np.array info_01: information array from info_array[sequence_index]
:param np.array info_02: information array from info_array[sequence_index]
:returns: boolean if the blocks and information match
|
mth5/io/nims.py
|
_check_duplicate_blocks
|
kujaku11/mth5
| 5 |
python
|
def _check_duplicate_blocks(self, block_01, block_02, info_01, info_02):
'\n make sure the blocks are truly duplicates\n\n :param np.array block_01: block of data to compare\n :param np.array block_02: block of data to compare\n :param np.array info_01: information array from info_array[sequence_index]\n :param np.array info_02: information array from info_array[sequence_index]\n\n :returns: boolean if the blocks and information match\n\n '
if np.array_equal(block_01, block_02):
if np.array_equal(info_01, info_02):
return True
else:
return False
else:
return False
|
def _check_duplicate_blocks(self, block_01, block_02, info_01, info_02):
'\n make sure the blocks are truly duplicates\n\n :param np.array block_01: block of data to compare\n :param np.array block_02: block of data to compare\n :param np.array info_01: information array from info_array[sequence_index]\n :param np.array info_02: information array from info_array[sequence_index]\n\n :returns: boolean if the blocks and information match\n\n '
if np.array_equal(block_01, block_02):
if np.array_equal(info_01, info_02):
return True
else:
return False
else:
return False<|docstring|>make sure the blocks are truly duplicates
:param np.array block_01: block of data to compare
:param np.array block_02: block of data to compare
:param np.array info_01: information array from info_array[sequence_index]
:param np.array info_02: information array from info_array[sequence_index]
:returns: boolean if the blocks and information match<|endoftext|>
|
4dde3b54361a09738581f7f2d489694cec3cf7631e5d3da3dceb348a2f3df27d
|
def remove_duplicates(self, info_array, data_array):
'\n remove duplicate blocks, removing the first duplicate as suggested by\n Paul and Anna. Checks to make sure that the mag data are identical for\n the duplicate blocks. Removes the blocks from the information and\n data arrays and returns the reduced arrays. This should sync up the\n timing of GPS stamps and index values.\n\n :param np.array info_array: structured array of block information\n :param np.array data_array: structured array of the data\n\n :returns: reduced information array\n :returns: reduced data array\n :returns: index of duplicates in raw data\n\n '
duplicate_test_list = self._locate_duplicate_blocks(self.info_array['sequence'])
if (duplicate_test_list is None):
return (info_array, data_array, None)
duplicate_list = []
for d in duplicate_test_list:
if self._check_duplicate_blocks(data_array[d['ts_index_0']:d['ts_index_1']], data_array[d['ts_index_2']:d['ts_index_3']], info_array[d['sequence_index']], info_array[(d['sequence_index'] + 1)]):
duplicate_list.append(d)
self.logger.debug(f'Deleting {len(duplicate_list)} duplicate blocks')
remove_sequence_index = [d['sequence_index'] for d in duplicate_list]
remove_data_index = np.array([np.arange(d['ts_index_0'], d['ts_index_1'], 1) for d in duplicate_list]).flatten()
return_info_array = np.delete(info_array, remove_sequence_index)
return_data_array = np.delete(data_array, remove_data_index)
return_info_array['sequence'][:] = np.arange(return_info_array.shape[0])
return (return_info_array, return_data_array, duplicate_list)
|
remove duplicate blocks, removing the first duplicate as suggested by
Paul and Anna. Checks to make sure that the mag data are identical for
the duplicate blocks. Removes the blocks from the information and
data arrays and returns the reduced arrays. This should sync up the
timing of GPS stamps and index values.
:param np.array info_array: structured array of block information
:param np.array data_array: structured array of the data
:returns: reduced information array
:returns: reduced data array
:returns: index of duplicates in raw data
|
mth5/io/nims.py
|
remove_duplicates
|
kujaku11/mth5
| 5 |
python
|
def remove_duplicates(self, info_array, data_array):
'\n remove duplicate blocks, removing the first duplicate as suggested by\n Paul and Anna. Checks to make sure that the mag data are identical for\n the duplicate blocks. Removes the blocks from the information and\n data arrays and returns the reduced arrays. This should sync up the\n timing of GPS stamps and index values.\n\n :param np.array info_array: structured array of block information\n :param np.array data_array: structured array of the data\n\n :returns: reduced information array\n :returns: reduced data array\n :returns: index of duplicates in raw data\n\n '
duplicate_test_list = self._locate_duplicate_blocks(self.info_array['sequence'])
if (duplicate_test_list is None):
return (info_array, data_array, None)
duplicate_list = []
for d in duplicate_test_list:
if self._check_duplicate_blocks(data_array[d['ts_index_0']:d['ts_index_1']], data_array[d['ts_index_2']:d['ts_index_3']], info_array[d['sequence_index']], info_array[(d['sequence_index'] + 1)]):
duplicate_list.append(d)
self.logger.debug(f'Deleting {len(duplicate_list)} duplicate blocks')
remove_sequence_index = [d['sequence_index'] for d in duplicate_list]
remove_data_index = np.array([np.arange(d['ts_index_0'], d['ts_index_1'], 1) for d in duplicate_list]).flatten()
return_info_array = np.delete(info_array, remove_sequence_index)
return_data_array = np.delete(data_array, remove_data_index)
return_info_array['sequence'][:] = np.arange(return_info_array.shape[0])
return (return_info_array, return_data_array, duplicate_list)
|
def remove_duplicates(self, info_array, data_array):
'\n remove duplicate blocks, removing the first duplicate as suggested by\n Paul and Anna. Checks to make sure that the mag data are identical for\n the duplicate blocks. Removes the blocks from the information and\n data arrays and returns the reduced arrays. This should sync up the\n timing of GPS stamps and index values.\n\n :param np.array info_array: structured array of block information\n :param np.array data_array: structured array of the data\n\n :returns: reduced information array\n :returns: reduced data array\n :returns: index of duplicates in raw data\n\n '
duplicate_test_list = self._locate_duplicate_blocks(self.info_array['sequence'])
if (duplicate_test_list is None):
return (info_array, data_array, None)
duplicate_list = []
for d in duplicate_test_list:
if self._check_duplicate_blocks(data_array[d['ts_index_0']:d['ts_index_1']], data_array[d['ts_index_2']:d['ts_index_3']], info_array[d['sequence_index']], info_array[(d['sequence_index'] + 1)]):
duplicate_list.append(d)
self.logger.debug(f'Deleting {len(duplicate_list)} duplicate blocks')
remove_sequence_index = [d['sequence_index'] for d in duplicate_list]
remove_data_index = np.array([np.arange(d['ts_index_0'], d['ts_index_1'], 1) for d in duplicate_list]).flatten()
return_info_array = np.delete(info_array, remove_sequence_index)
return_data_array = np.delete(data_array, remove_data_index)
return_info_array['sequence'][:] = np.arange(return_info_array.shape[0])
return (return_info_array, return_data_array, duplicate_list)<|docstring|>remove duplicate blocks, removing the first duplicate as suggested by
Paul and Anna. Checks to make sure that the mag data are identical for
the duplicate blocks. Removes the blocks from the information and
data arrays and returns the reduced arrays. This should sync up the
timing of GPS stamps and index values.
:param np.array info_array: structured array of block information
:param np.array data_array: structured array of the data
:returns: reduced information array
:returns: reduced data array
:returns: index of duplicates in raw data<|endoftext|>
|
5b4c70c67a531ebaef5e409fc829929e36e5529afa364841c43c929e59389876
|
def read_nims(self, fn=None):
'\n Read NIMS DATA.BIN file.\n\n 1. Read in the header information and stores those as attributes\n with the same names as in the header file.\n\n 2. Locate the beginning of the data blocks by looking for the\n first [1, 131, ...] combo. Anything before that is cut out.\n\n 3. Make sure the data is a multiple of the block length, if the\n data is longer the extra bits are cut off.\n\n 4. Read in the GPS data (3rd byte of each block) as characters.\n Parses those into valid GPS stamps with appropriate index locations\n of where the \'$\' was found.\n\n 5. Read in the data as unsigned 8-bit integers and reshape the array\n into [N, data_block_length]. Parse this array into the status\n information and the data.\n\n 6. Remove duplicate blocks, by removing the first of the duplicates\n as suggested by Anna and Paul.\n\n 7. Match the GPS locks from the status with valid GPS stamps.\n\n 8. Check to make sure that there is the correct number of seconds\n between the first and last GPS stamp. The extra seconds are cut\n off from the end of the time series. Not sure if this is the\n best way to accommodate gaps in the data.\n\n .. note:: The data and information array returned have the duplicates\n removed and the sequence reset to be monotonic.\n\n :param str fn: full path to DATA.BIN file\n\n :Example:\n\n >>> from mth5.io import nims\n >>> n = nims.NIMS(r"/home/mt_data/nims/mt001.bin")\n\n\n '
if (fn is not None):
self.fn = fn
st = datetime.datetime.now()
self.read_header(self.fn)
with open(self.fn, 'rb') as fid:
fid.seek(self.data_start_seek)
self._raw_string = fid.read()
data = np.frombuffer(self._raw_string, dtype=np.uint8)
find_first = self.find_sequence(data[0:(self.block_size * 5)])[0]
data = data[find_first:]
self.gps_list = self.get_stamps(self._raw_string[find_first:])
if ((data.size % self.block_size) != 0):
self.logger.warning((f'odd number of bytes {data.size}, not even blocks ' + 'cutting down the data by {0} bits'.format((data.size % self.block_size))))
end_data = (data.size - (data.size % self.block_size))
data = data[0:end_data]
data = data.reshape((int((data.size / self.block_size)), self.block_size))
self.info_array = np.zeros(data.shape[0], dtype=[('soh', np.int), ('block_len', np.int), ('status', np.int), ('gps', np.int), ('sequence', np.int), ('box_temp', np.float), ('head_temp', np.float), ('logic', np.int), ('end', np.int)])
for (key, index) in self._block_dict.items():
if ('temp' in key):
t_value = ((data[(:, index[0])] * 256) + data[(:, index[1])])
t_value[np.where((t_value > 32768))] -= 65536
value = ((t_value - self.t_offset) / self.t_conversion_factor)
else:
value = data[(:, index)]
self.info_array[key][:] = value
self.info_array['sequence'] = self.unwrap_sequence(self.info_array['sequence'])
data_array = np.zeros((data.shape[0] * self.sample_rate), dtype=[('hx', np.float), ('hy', np.float), ('hz', np.float), ('ex', np.float), ('ey', np.float)])
for (cc, comp) in enumerate(['hx', 'hy', 'hz', 'ex', 'ey']):
channel_arr = np.zeros((data.shape[0], 8), dtype=np.float)
for kk in range(self.sample_rate):
index = self.indices[(kk, cc)]
value = ((((data[(:, index)] * 256) + data[(:, (index + 1))]) * np.array([256])) + data[(:, (index + 2))])
value[np.where((value > self._int_max))] -= self._int_factor
channel_arr[(:, kk)] = value
data_array[comp][:] = channel_arr.flatten()
for comp in ['ex', 'ey']:
data_array[comp] *= (- 1)
(self.info_array, data_array, self.duplicate_list) = self.remove_duplicates(self.info_array, data_array)
self.stamps = self.match_status_with_gps_stamps(self.info_array['status'], self.gps_list)
self.ts_data = self.align_data(data_array, self.stamps)
et = datetime.datetime.now()
read_time = (et - st).total_seconds()
self.logger.info(f'Reading took {read_time:.2f} seconds')
|
Read NIMS DATA.BIN file.
1. Read in the header information and stores those as attributes
with the same names as in the header file.
2. Locate the beginning of the data blocks by looking for the
first [1, 131, ...] combo. Anything before that is cut out.
3. Make sure the data is a multiple of the block length, if the
data is longer the extra bits are cut off.
4. Read in the GPS data (3rd byte of each block) as characters.
Parses those into valid GPS stamps with appropriate index locations
of where the '$' was found.
5. Read in the data as unsigned 8-bit integers and reshape the array
into [N, data_block_length]. Parse this array into the status
information and the data.
6. Remove duplicate blocks, by removing the first of the duplicates
as suggested by Anna and Paul.
7. Match the GPS locks from the status with valid GPS stamps.
8. Check to make sure that there is the correct number of seconds
between the first and last GPS stamp. The extra seconds are cut
off from the end of the time series. Not sure if this is the
best way to accommodate gaps in the data.
.. note:: The data and information array returned have the duplicates
removed and the sequence reset to be monotonic.
:param str fn: full path to DATA.BIN file
:Example:
>>> from mth5.io import nims
>>> n = nims.NIMS(r"/home/mt_data/nims/mt001.bin")
|
mth5/io/nims.py
|
read_nims
|
kujaku11/mth5
| 5 |
python
|
def read_nims(self, fn=None):
'\n Read NIMS DATA.BIN file.\n\n 1. Read in the header information and stores those as attributes\n with the same names as in the header file.\n\n 2. Locate the beginning of the data blocks by looking for the\n first [1, 131, ...] combo. Anything before that is cut out.\n\n 3. Make sure the data is a multiple of the block length, if the\n data is longer the extra bits are cut off.\n\n 4. Read in the GPS data (3rd byte of each block) as characters.\n Parses those into valid GPS stamps with appropriate index locations\n of where the \'$\' was found.\n\n 5. Read in the data as unsigned 8-bit integers and reshape the array\n into [N, data_block_length]. Parse this array into the status\n information and the data.\n\n 6. Remove duplicate blocks, by removing the first of the duplicates\n as suggested by Anna and Paul.\n\n 7. Match the GPS locks from the status with valid GPS stamps.\n\n 8. Check to make sure that there is the correct number of seconds\n between the first and last GPS stamp. The extra seconds are cut\n off from the end of the time series. Not sure if this is the\n best way to accommodate gaps in the data.\n\n .. note:: The data and information array returned have the duplicates\n removed and the sequence reset to be monotonic.\n\n :param str fn: full path to DATA.BIN file\n\n :Example:\n\n >>> from mth5.io import nims\n >>> n = nims.NIMS(r"/home/mt_data/nims/mt001.bin")\n\n\n '
if (fn is not None):
self.fn = fn
st = datetime.datetime.now()
self.read_header(self.fn)
with open(self.fn, 'rb') as fid:
fid.seek(self.data_start_seek)
self._raw_string = fid.read()
data = np.frombuffer(self._raw_string, dtype=np.uint8)
find_first = self.find_sequence(data[0:(self.block_size * 5)])[0]
data = data[find_first:]
self.gps_list = self.get_stamps(self._raw_string[find_first:])
if ((data.size % self.block_size) != 0):
self.logger.warning((f'odd number of bytes {data.size}, not even blocks ' + 'cutting down the data by {0} bits'.format((data.size % self.block_size))))
end_data = (data.size - (data.size % self.block_size))
data = data[0:end_data]
data = data.reshape((int((data.size / self.block_size)), self.block_size))
self.info_array = np.zeros(data.shape[0], dtype=[('soh', np.int), ('block_len', np.int), ('status', np.int), ('gps', np.int), ('sequence', np.int), ('box_temp', np.float), ('head_temp', np.float), ('logic', np.int), ('end', np.int)])
for (key, index) in self._block_dict.items():
if ('temp' in key):
t_value = ((data[(:, index[0])] * 256) + data[(:, index[1])])
t_value[np.where((t_value > 32768))] -= 65536
value = ((t_value - self.t_offset) / self.t_conversion_factor)
else:
value = data[(:, index)]
self.info_array[key][:] = value
self.info_array['sequence'] = self.unwrap_sequence(self.info_array['sequence'])
data_array = np.zeros((data.shape[0] * self.sample_rate), dtype=[('hx', np.float), ('hy', np.float), ('hz', np.float), ('ex', np.float), ('ey', np.float)])
for (cc, comp) in enumerate(['hx', 'hy', 'hz', 'ex', 'ey']):
channel_arr = np.zeros((data.shape[0], 8), dtype=np.float)
for kk in range(self.sample_rate):
index = self.indices[(kk, cc)]
value = ((((data[(:, index)] * 256) + data[(:, (index + 1))]) * np.array([256])) + data[(:, (index + 2))])
value[np.where((value > self._int_max))] -= self._int_factor
channel_arr[(:, kk)] = value
data_array[comp][:] = channel_arr.flatten()
for comp in ['ex', 'ey']:
data_array[comp] *= (- 1)
(self.info_array, data_array, self.duplicate_list) = self.remove_duplicates(self.info_array, data_array)
self.stamps = self.match_status_with_gps_stamps(self.info_array['status'], self.gps_list)
self.ts_data = self.align_data(data_array, self.stamps)
et = datetime.datetime.now()
read_time = (et - st).total_seconds()
self.logger.info(f'Reading took {read_time:.2f} seconds')
|
def read_nims(self, fn=None):
'\n Read NIMS DATA.BIN file.\n\n 1. Read in the header information and stores those as attributes\n with the same names as in the header file.\n\n 2. Locate the beginning of the data blocks by looking for the\n first [1, 131, ...] combo. Anything before that is cut out.\n\n 3. Make sure the data is a multiple of the block length, if the\n data is longer the extra bits are cut off.\n\n 4. Read in the GPS data (3rd byte of each block) as characters.\n Parses those into valid GPS stamps with appropriate index locations\n of where the \'$\' was found.\n\n 5. Read in the data as unsigned 8-bit integers and reshape the array\n into [N, data_block_length]. Parse this array into the status\n information and the data.\n\n 6. Remove duplicate blocks, by removing the first of the duplicates\n as suggested by Anna and Paul.\n\n 7. Match the GPS locks from the status with valid GPS stamps.\n\n 8. Check to make sure that there is the correct number of seconds\n between the first and last GPS stamp. The extra seconds are cut\n off from the end of the time series. Not sure if this is the\n best way to accommodate gaps in the data.\n\n .. note:: The data and information array returned have the duplicates\n removed and the sequence reset to be monotonic.\n\n :param str fn: full path to DATA.BIN file\n\n :Example:\n\n >>> from mth5.io import nims\n >>> n = nims.NIMS(r"/home/mt_data/nims/mt001.bin")\n\n\n '
if (fn is not None):
self.fn = fn
st = datetime.datetime.now()
self.read_header(self.fn)
with open(self.fn, 'rb') as fid:
fid.seek(self.data_start_seek)
self._raw_string = fid.read()
data = np.frombuffer(self._raw_string, dtype=np.uint8)
find_first = self.find_sequence(data[0:(self.block_size * 5)])[0]
data = data[find_first:]
self.gps_list = self.get_stamps(self._raw_string[find_first:])
if ((data.size % self.block_size) != 0):
self.logger.warning((f'odd number of bytes {data.size}, not even blocks ' + 'cutting down the data by {0} bits'.format((data.size % self.block_size))))
end_data = (data.size - (data.size % self.block_size))
data = data[0:end_data]
data = data.reshape((int((data.size / self.block_size)), self.block_size))
self.info_array = np.zeros(data.shape[0], dtype=[('soh', np.int), ('block_len', np.int), ('status', np.int), ('gps', np.int), ('sequence', np.int), ('box_temp', np.float), ('head_temp', np.float), ('logic', np.int), ('end', np.int)])
for (key, index) in self._block_dict.items():
if ('temp' in key):
t_value = ((data[(:, index[0])] * 256) + data[(:, index[1])])
t_value[np.where((t_value > 32768))] -= 65536
value = ((t_value - self.t_offset) / self.t_conversion_factor)
else:
value = data[(:, index)]
self.info_array[key][:] = value
self.info_array['sequence'] = self.unwrap_sequence(self.info_array['sequence'])
data_array = np.zeros((data.shape[0] * self.sample_rate), dtype=[('hx', np.float), ('hy', np.float), ('hz', np.float), ('ex', np.float), ('ey', np.float)])
for (cc, comp) in enumerate(['hx', 'hy', 'hz', 'ex', 'ey']):
channel_arr = np.zeros((data.shape[0], 8), dtype=np.float)
for kk in range(self.sample_rate):
index = self.indices[(kk, cc)]
value = ((((data[(:, index)] * 256) + data[(:, (index + 1))]) * np.array([256])) + data[(:, (index + 2))])
value[np.where((value > self._int_max))] -= self._int_factor
channel_arr[(:, kk)] = value
data_array[comp][:] = channel_arr.flatten()
for comp in ['ex', 'ey']:
data_array[comp] *= (- 1)
(self.info_array, data_array, self.duplicate_list) = self.remove_duplicates(self.info_array, data_array)
self.stamps = self.match_status_with_gps_stamps(self.info_array['status'], self.gps_list)
self.ts_data = self.align_data(data_array, self.stamps)
et = datetime.datetime.now()
read_time = (et - st).total_seconds()
self.logger.info(f'Reading took {read_time:.2f} seconds')<|docstring|>Read NIMS DATA.BIN file.
1. Read in the header information and stores those as attributes
with the same names as in the header file.
2. Locate the beginning of the data blocks by looking for the
first [1, 131, ...] combo. Anything before that is cut out.
3. Make sure the data is a multiple of the block length, if the
data is longer the extra bits are cut off.
4. Read in the GPS data (3rd byte of each block) as characters.
Parses those into valid GPS stamps with appropriate index locations
of where the '$' was found.
5. Read in the data as unsigned 8-bit integers and reshape the array
into [N, data_block_length]. Parse this array into the status
information and the data.
6. Remove duplicate blocks, by removing the first of the duplicates
as suggested by Anna and Paul.
7. Match the GPS locks from the status with valid GPS stamps.
8. Check to make sure that there is the correct number of seconds
between the first and last GPS stamp. The extra seconds are cut
off from the end of the time series. Not sure if this is the
best way to accommodate gaps in the data.
.. note:: The data and information array returned have the duplicates
removed and the sequence reset to be monotonic.
:param str fn: full path to DATA.BIN file
:Example:
>>> from mth5.io import nims
>>> n = nims.NIMS(r"/home/mt_data/nims/mt001.bin")<|endoftext|>
|
7b0f7953082863e7271b8db2b22892940c295f1282c0cabd5a02cae8faea6ca4
|
def _get_first_gps_stamp(self, stamps):
'\n get the first GPRMC stamp\n '
for stamp in stamps:
if (stamp[1][0].gps_type in ['gprmc', 'GPRMC']):
return stamp
return None
|
get the first GPRMC stamp
|
mth5/io/nims.py
|
_get_first_gps_stamp
|
kujaku11/mth5
| 5 |
python
|
def _get_first_gps_stamp(self, stamps):
'\n \n '
for stamp in stamps:
if (stamp[1][0].gps_type in ['gprmc', 'GPRMC']):
return stamp
return None
|
def _get_first_gps_stamp(self, stamps):
'\n \n '
for stamp in stamps:
if (stamp[1][0].gps_type in ['gprmc', 'GPRMC']):
return stamp
return None<|docstring|>get the first GPRMC stamp<|endoftext|>
|
7eb3fa316542bb74a18ed14f44f9a089bc1f95efffbeede0a5531cf1b1f81f18
|
def _get_last_gps_stamp(self, stamps):
'\n get the last gprmc stamp\n '
for stamp in stamps[::(- 1)]:
if (stamp[1][0].gps_type in ['gprmc', 'GPRMC']):
return stamp
return None
|
get the last gprmc stamp
|
mth5/io/nims.py
|
_get_last_gps_stamp
|
kujaku11/mth5
| 5 |
python
|
def _get_last_gps_stamp(self, stamps):
'\n \n '
for stamp in stamps[::(- 1)]:
if (stamp[1][0].gps_type in ['gprmc', 'GPRMC']):
return stamp
return None
|
def _get_last_gps_stamp(self, stamps):
'\n \n '
for stamp in stamps[::(- 1)]:
if (stamp[1][0].gps_type in ['gprmc', 'GPRMC']):
return stamp
return None<|docstring|>get the last gprmc stamp<|endoftext|>
|
2d6be74f25f37f746a34a95242c4a6806607e12eccaa412f5a310af695157dc9
|
def _locate_timing_gaps(self, stamps):
'\n locate timing gaps in the data by comparing the stamp index with the\n GPS time stamp. The number of points and seconds should be the same\n\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: list of gap index values\n '
stamp_01 = self._get_first_gps_stamp(stamps)[1][0]
current_stamp = stamp_01
gap_beginning = []
total_gap = 0
for (ii, stamp) in enumerate(stamps[1:], 1):
stamp = stamp[1][0]
if (stamp.gps_type == 'GPGGA'):
continue
time_diff = (stamp.time_stamp - current_stamp.time_stamp).total_seconds()
index_diff = (stamp.index - current_stamp.index)
time_gap = (index_diff - time_diff)
if (time_gap == 0):
continue
elif (time_gap > 0):
total_gap += time_gap
current_stamp = stamp
gap_beginning.append(stamp.index)
self.logger.debug('GPS tamp at {0} is off from previous time by {1} seconds'.format(stamp.time_stamp.isoformat(), time_gap))
self.logger.warning(f'Timing is off by {total_gap} seconds')
return gap_beginning
|
locate timing gaps in the data by comparing the stamp index with the
GPS time stamp. The number of points and seconds should be the same
:param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]
:returns: list of gap index values
|
mth5/io/nims.py
|
_locate_timing_gaps
|
kujaku11/mth5
| 5 |
python
|
def _locate_timing_gaps(self, stamps):
'\n locate timing gaps in the data by comparing the stamp index with the\n GPS time stamp. The number of points and seconds should be the same\n\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: list of gap index values\n '
stamp_01 = self._get_first_gps_stamp(stamps)[1][0]
current_stamp = stamp_01
gap_beginning = []
total_gap = 0
for (ii, stamp) in enumerate(stamps[1:], 1):
stamp = stamp[1][0]
if (stamp.gps_type == 'GPGGA'):
continue
time_diff = (stamp.time_stamp - current_stamp.time_stamp).total_seconds()
index_diff = (stamp.index - current_stamp.index)
time_gap = (index_diff - time_diff)
if (time_gap == 0):
continue
elif (time_gap > 0):
total_gap += time_gap
current_stamp = stamp
gap_beginning.append(stamp.index)
self.logger.debug('GPS tamp at {0} is off from previous time by {1} seconds'.format(stamp.time_stamp.isoformat(), time_gap))
self.logger.warning(f'Timing is off by {total_gap} seconds')
return gap_beginning
|
def _locate_timing_gaps(self, stamps):
'\n locate timing gaps in the data by comparing the stamp index with the\n GPS time stamp. The number of points and seconds should be the same\n\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: list of gap index values\n '
stamp_01 = self._get_first_gps_stamp(stamps)[1][0]
current_stamp = stamp_01
gap_beginning = []
total_gap = 0
for (ii, stamp) in enumerate(stamps[1:], 1):
stamp = stamp[1][0]
if (stamp.gps_type == 'GPGGA'):
continue
time_diff = (stamp.time_stamp - current_stamp.time_stamp).total_seconds()
index_diff = (stamp.index - current_stamp.index)
time_gap = (index_diff - time_diff)
if (time_gap == 0):
continue
elif (time_gap > 0):
total_gap += time_gap
current_stamp = stamp
gap_beginning.append(stamp.index)
self.logger.debug('GPS tamp at {0} is off from previous time by {1} seconds'.format(stamp.time_stamp.isoformat(), time_gap))
self.logger.warning(f'Timing is off by {total_gap} seconds')
return gap_beginning<|docstring|>locate timing gaps in the data by comparing the stamp index with the
GPS time stamp. The number of points and seconds should be the same
:param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]
:returns: list of gap index values<|endoftext|>
|
c853a3cd4ea914ba1828741ffa1347458c3598b44240a05d7fd3e145c1beb8ec
|
def check_timing(self, stamps):
'\n make sure that there are the correct number of seconds in between\n the first and last GPS GPRMC stamps\n\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: [ True | False ] if data is valid or not.\n :returns: gap index locations\n\n .. note:: currently it is assumed that if a data gap occurs the data can be\n squeezed to remove them. Probably a more elegant way of doing it.\n '
gaps = None
first_stamp = self._get_first_gps_stamp(stamps)[1][0]
last_stamp = self._get_last_gps_stamp(stamps)[1][0]
time_diff = (last_stamp.time_stamp - first_stamp.time_stamp)
index_diff = (last_stamp.index - first_stamp.index)
difference = (index_diff - time_diff.total_seconds())
if (difference != 0):
gaps = self._locate_timing_gaps(stamps)
return (False, gaps, difference)
return (True, gaps, difference)
|
make sure that there are the correct number of seconds in between
the first and last GPS GPRMC stamps
:param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]
:returns: [ True | False ] if data is valid or not.
:returns: gap index locations
.. note:: currently it is assumed that if a data gap occurs the data can be
squeezed to remove them. Probably a more elegant way of doing it.
|
mth5/io/nims.py
|
check_timing
|
kujaku11/mth5
| 5 |
python
|
def check_timing(self, stamps):
'\n make sure that there are the correct number of seconds in between\n the first and last GPS GPRMC stamps\n\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: [ True | False ] if data is valid or not.\n :returns: gap index locations\n\n .. note:: currently it is assumed that if a data gap occurs the data can be\n squeezed to remove them. Probably a more elegant way of doing it.\n '
gaps = None
first_stamp = self._get_first_gps_stamp(stamps)[1][0]
last_stamp = self._get_last_gps_stamp(stamps)[1][0]
time_diff = (last_stamp.time_stamp - first_stamp.time_stamp)
index_diff = (last_stamp.index - first_stamp.index)
difference = (index_diff - time_diff.total_seconds())
if (difference != 0):
gaps = self._locate_timing_gaps(stamps)
return (False, gaps, difference)
return (True, gaps, difference)
|
def check_timing(self, stamps):
'\n make sure that there are the correct number of seconds in between\n the first and last GPS GPRMC stamps\n\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: [ True | False ] if data is valid or not.\n :returns: gap index locations\n\n .. note:: currently it is assumed that if a data gap occurs the data can be\n squeezed to remove them. Probably a more elegant way of doing it.\n '
gaps = None
first_stamp = self._get_first_gps_stamp(stamps)[1][0]
last_stamp = self._get_last_gps_stamp(stamps)[1][0]
time_diff = (last_stamp.time_stamp - first_stamp.time_stamp)
index_diff = (last_stamp.index - first_stamp.index)
difference = (index_diff - time_diff.total_seconds())
if (difference != 0):
gaps = self._locate_timing_gaps(stamps)
return (False, gaps, difference)
return (True, gaps, difference)<|docstring|>make sure that there are the correct number of seconds in between
the first and last GPS GPRMC stamps
:param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]
:returns: [ True | False ] if data is valid or not.
:returns: gap index locations
.. note:: currently it is assumed that if a data gap occurs the data can be
squeezed to remove them. Probably a more elegant way of doing it.<|endoftext|>
|
19d167f7a48ebeab34e35cf281251bd384dd1a5c76025af8226778e6f8ab59a6
|
def align_data(self, data_array, stamps):
'\n Need to match up the first good GPS stamp with the data\n\n Do this by using the first GPS stamp and assuming that the time from\n the first time stamp to the start is the index value.\n\n put the data into a pandas data frame that is indexed by time\n\n :param array data_array: structure array with columns for each\n component [hx, hy, hz, ex, ey]\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: pandas DataFrame with colums of components and indexed by\n time initialized by the start time.\n\n .. note:: Data gaps are squeezed cause not sure what a gap actually means.\n '
(timing_valid, self.gaps, time_difference) = self.check_timing(stamps)
if (time_difference > 0):
remove_points = int((time_difference * self.sample_rate))
data_array = data_array[0:(- remove_points)]
self.logger.info(f'Trimmed {remove_points} points off the end of the time series because of timing gaps')
first_stamp = self._get_first_gps_stamp(stamps)
first_index = first_stamp[0]
start_time = (first_stamp[1][0].time_stamp - datetime.timedelta(seconds=int(first_index)))
dt_index = self.make_dt_index(start_time.isoformat(), self.sample_rate, n_samples=data_array.shape[0])
return pd.DataFrame(data_array, index=dt_index)
|
Need to match up the first good GPS stamp with the data
Do this by using the first GPS stamp and assuming that the time from
the first time stamp to the start is the index value.
put the data into a pandas data frame that is indexed by time
:param array data_array: structure array with columns for each
component [hx, hy, hz, ex, ey]
:param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]
:returns: pandas DataFrame with colums of components and indexed by
time initialized by the start time.
.. note:: Data gaps are squeezed cause not sure what a gap actually means.
|
mth5/io/nims.py
|
align_data
|
kujaku11/mth5
| 5 |
python
|
def align_data(self, data_array, stamps):
'\n Need to match up the first good GPS stamp with the data\n\n Do this by using the first GPS stamp and assuming that the time from\n the first time stamp to the start is the index value.\n\n put the data into a pandas data frame that is indexed by time\n\n :param array data_array: structure array with columns for each\n component [hx, hy, hz, ex, ey]\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: pandas DataFrame with colums of components and indexed by\n time initialized by the start time.\n\n .. note:: Data gaps are squeezed cause not sure what a gap actually means.\n '
(timing_valid, self.gaps, time_difference) = self.check_timing(stamps)
if (time_difference > 0):
remove_points = int((time_difference * self.sample_rate))
data_array = data_array[0:(- remove_points)]
self.logger.info(f'Trimmed {remove_points} points off the end of the time series because of timing gaps')
first_stamp = self._get_first_gps_stamp(stamps)
first_index = first_stamp[0]
start_time = (first_stamp[1][0].time_stamp - datetime.timedelta(seconds=int(first_index)))
dt_index = self.make_dt_index(start_time.isoformat(), self.sample_rate, n_samples=data_array.shape[0])
return pd.DataFrame(data_array, index=dt_index)
|
def align_data(self, data_array, stamps):
'\n Need to match up the first good GPS stamp with the data\n\n Do this by using the first GPS stamp and assuming that the time from\n the first time stamp to the start is the index value.\n\n put the data into a pandas data frame that is indexed by time\n\n :param array data_array: structure array with columns for each\n component [hx, hy, hz, ex, ey]\n :param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]\n\n :returns: pandas DataFrame with colums of components and indexed by\n time initialized by the start time.\n\n .. note:: Data gaps are squeezed cause not sure what a gap actually means.\n '
(timing_valid, self.gaps, time_difference) = self.check_timing(stamps)
if (time_difference > 0):
remove_points = int((time_difference * self.sample_rate))
data_array = data_array[0:(- remove_points)]
self.logger.info(f'Trimmed {remove_points} points off the end of the time series because of timing gaps')
first_stamp = self._get_first_gps_stamp(stamps)
first_index = first_stamp[0]
start_time = (first_stamp[1][0].time_stamp - datetime.timedelta(seconds=int(first_index)))
dt_index = self.make_dt_index(start_time.isoformat(), self.sample_rate, n_samples=data_array.shape[0])
return pd.DataFrame(data_array, index=dt_index)<|docstring|>Need to match up the first good GPS stamp with the data
Do this by using the first GPS stamp and assuming that the time from
the first time stamp to the start is the index value.
put the data into a pandas data frame that is indexed by time
:param array data_array: structure array with columns for each
component [hx, hy, hz, ex, ey]
:param list stamps: list of GPS stamps [[status_index, [GPRMC, GPGGA]]]
:returns: pandas DataFrame with colums of components and indexed by
time initialized by the start time.
.. note:: Data gaps are squeezed cause not sure what a gap actually means.<|endoftext|>
|
f1ce98a05a927fc2122f129a8cca4ebb65225fec608c4cd26e69ef36a1bd4df0
|
def calibrate_data(self, ts):
'\n Apply calibrations to data\n\n .. note:: this needs work, would not use this now.\n '
ts[['hx', 'hy', 'hz']] *= self.h_conversion_factor
ts[['ex', 'ey']] *= self.e_conversion_factor
ts['ex'] /= (self.ex_length / 1000.0)
ts['ey'] /= (self.ey_length / 1000.0)
return ts
|
Apply calibrations to data
.. note:: this needs work, would not use this now.
|
mth5/io/nims.py
|
calibrate_data
|
kujaku11/mth5
| 5 |
python
|
def calibrate_data(self, ts):
'\n Apply calibrations to data\n\n .. note:: this needs work, would not use this now.\n '
ts[['hx', 'hy', 'hz']] *= self.h_conversion_factor
ts[['ex', 'ey']] *= self.e_conversion_factor
ts['ex'] /= (self.ex_length / 1000.0)
ts['ey'] /= (self.ey_length / 1000.0)
return ts
|
def calibrate_data(self, ts):
'\n Apply calibrations to data\n\n .. note:: this needs work, would not use this now.\n '
ts[['hx', 'hy', 'hz']] *= self.h_conversion_factor
ts[['ex', 'ey']] *= self.e_conversion_factor
ts['ex'] /= (self.ex_length / 1000.0)
ts['ey'] /= (self.ey_length / 1000.0)
return ts<|docstring|>Apply calibrations to data
.. note:: this needs work, would not use this now.<|endoftext|>
|
c6faf4970f95945a1bf191ad342522bad00d7687b934d4b9b1a10d97480bb425
|
def make_dt_index(self, start_time, sample_rate, stop_time=None, n_samples=None):
'\n make time index array\n\n .. note:: date-time format should be YYYY-M-DDThh:mm:ss.ms UTC\n\n :param start_time: start time\n :type start_time: string\n\n :param end_time: end time\n :type end_time: string\n\n :param sample_rate: sample_rate in samples/second\n :type sample_rate: float\n '
dt_freq = '{0:.0f}N'.format(((1.0 / sample_rate) * 1000000000.0))
if (stop_time is not None):
dt_index = pd.date_range(start=start_time, end=stop_time, freq=dt_freq, closed='left', tz='UTC')
elif (n_samples is not None):
dt_index = pd.date_range(start=start_time, periods=n_samples, freq=dt_freq, tz='UTC')
else:
raise ValueError('Need to input either stop_time or n_samples')
return dt_index
|
make time index array
.. note:: date-time format should be YYYY-M-DDThh:mm:ss.ms UTC
:param start_time: start time
:type start_time: string
:param end_time: end time
:type end_time: string
:param sample_rate: sample_rate in samples/second
:type sample_rate: float
|
mth5/io/nims.py
|
make_dt_index
|
kujaku11/mth5
| 5 |
python
|
def make_dt_index(self, start_time, sample_rate, stop_time=None, n_samples=None):
'\n make time index array\n\n .. note:: date-time format should be YYYY-M-DDThh:mm:ss.ms UTC\n\n :param start_time: start time\n :type start_time: string\n\n :param end_time: end time\n :type end_time: string\n\n :param sample_rate: sample_rate in samples/second\n :type sample_rate: float\n '
dt_freq = '{0:.0f}N'.format(((1.0 / sample_rate) * 1000000000.0))
if (stop_time is not None):
dt_index = pd.date_range(start=start_time, end=stop_time, freq=dt_freq, closed='left', tz='UTC')
elif (n_samples is not None):
dt_index = pd.date_range(start=start_time, periods=n_samples, freq=dt_freq, tz='UTC')
else:
raise ValueError('Need to input either stop_time or n_samples')
return dt_index
|
def make_dt_index(self, start_time, sample_rate, stop_time=None, n_samples=None):
'\n make time index array\n\n .. note:: date-time format should be YYYY-M-DDThh:mm:ss.ms UTC\n\n :param start_time: start time\n :type start_time: string\n\n :param end_time: end time\n :type end_time: string\n\n :param sample_rate: sample_rate in samples/second\n :type sample_rate: float\n '
dt_freq = '{0:.0f}N'.format(((1.0 / sample_rate) * 1000000000.0))
if (stop_time is not None):
dt_index = pd.date_range(start=start_time, end=stop_time, freq=dt_freq, closed='left', tz='UTC')
elif (n_samples is not None):
dt_index = pd.date_range(start=start_time, periods=n_samples, freq=dt_freq, tz='UTC')
else:
raise ValueError('Need to input either stop_time or n_samples')
return dt_index<|docstring|>make time index array
.. note:: date-time format should be YYYY-M-DDThh:mm:ss.ms UTC
:param start_time: start time
:type start_time: string
:param end_time: end time
:type end_time: string
:param sample_rate: sample_rate in samples/second
:type sample_rate: float<|endoftext|>
|
f903c45a082ca4a5acf834d703272fe61d2200d8659a5b6fb63907a5545221e7
|
def get_electric_high_pass(self, hardware='pc'):
'\n get the electric high pass filter based on the hardware\n '
self.hardware = hardware
if ('pc' in hardware.lower()):
return self.electric_high_pass_pc
elif ('hp' in hardware.lower()):
return self.electric_high_pass_hp
else:
raise ResponseError('Hardware value {0} not understood'.format(self.hardware))
|
get the electric high pass filter based on the hardware
|
mth5/io/nims.py
|
get_electric_high_pass
|
kujaku11/mth5
| 5 |
python
|
def get_electric_high_pass(self, hardware='pc'):
'\n \n '
self.hardware = hardware
if ('pc' in hardware.lower()):
return self.electric_high_pass_pc
elif ('hp' in hardware.lower()):
return self.electric_high_pass_hp
else:
raise ResponseError('Hardware value {0} not understood'.format(self.hardware))
|
def get_electric_high_pass(self, hardware='pc'):
'\n \n '
self.hardware = hardware
if ('pc' in hardware.lower()):
return self.electric_high_pass_pc
elif ('hp' in hardware.lower()):
return self.electric_high_pass_hp
else:
raise ResponseError('Hardware value {0} not understood'.format(self.hardware))<|docstring|>get the electric high pass filter based on the hardware<|endoftext|>
|
1885b552c5e901f90033b016d3524c06ed697546c5b4031b5994659963452f86
|
def _get_dt_filter(self, channel, sample_rate):
'\n get the DT filter based on channel ans sampling rate\n '
dt_filter = {'type': 'dt', 'name': 'time_offset', 'parameters': {'offset': self.time_delays_dict[sample_rate][channel]}}
return dt_filter
|
get the DT filter based on channel ans sampling rate
|
mth5/io/nims.py
|
_get_dt_filter
|
kujaku11/mth5
| 5 |
python
|
def _get_dt_filter(self, channel, sample_rate):
'\n \n '
dt_filter = {'type': 'dt', 'name': 'time_offset', 'parameters': {'offset': self.time_delays_dict[sample_rate][channel]}}
return dt_filter
|
def _get_dt_filter(self, channel, sample_rate):
'\n \n '
dt_filter = {'type': 'dt', 'name': 'time_offset', 'parameters': {'offset': self.time_delays_dict[sample_rate][channel]}}
return dt_filter<|docstring|>get the DT filter based on channel ans sampling rate<|endoftext|>
|
ad357ef06f5694a6d8405be39269c71a4e97807fa24cb114e2be3c7073d73453
|
def _get_mag_filter(self, channel):
'\n get mag filter, seems to be the same no matter what\n '
filter_list = [self.mag_low_pass]
filter_list.append(self._get_dt_filter(channel, self.sample_rate))
return_dict = {'channel_id': channel, 'gain': 1, 'conversion_factor': self.h_conversion_factor, 'units': 'nT', 'filters': filter_list}
return return_dict
|
get mag filter, seems to be the same no matter what
|
mth5/io/nims.py
|
_get_mag_filter
|
kujaku11/mth5
| 5 |
python
|
def _get_mag_filter(self, channel):
'\n \n '
filter_list = [self.mag_low_pass]
filter_list.append(self._get_dt_filter(channel, self.sample_rate))
return_dict = {'channel_id': channel, 'gain': 1, 'conversion_factor': self.h_conversion_factor, 'units': 'nT', 'filters': filter_list}
return return_dict
|
def _get_mag_filter(self, channel):
'\n \n '
filter_list = [self.mag_low_pass]
filter_list.append(self._get_dt_filter(channel, self.sample_rate))
return_dict = {'channel_id': channel, 'gain': 1, 'conversion_factor': self.h_conversion_factor, 'units': 'nT', 'filters': filter_list}
return return_dict<|docstring|>get mag filter, seems to be the same no matter what<|endoftext|>
|
20e8baf30a06d6fe000baf0d5685ee7bc7d3b6783b18f19ca7bfea0804b803f8
|
def _get_electric_filter(self, channel):
'\n Get electric filter\n '
filter_list = []
if (self.instrument_type in ['backbone']):
filter_list.append(self.get_electric_high_pass(self.hardware))
filter_list.append(self.electric_low_pass)
filter_list.append(self._get_dt_filter(channel, self.sample_rate))
return_dict = {'channel_id': channel, 'gain': 1, 'conversion_factor': self.e_conversion_factor, 'units': 'nT', 'filters': filter_list}
return return_dict
|
Get electric filter
|
mth5/io/nims.py
|
_get_electric_filter
|
kujaku11/mth5
| 5 |
python
|
def _get_electric_filter(self, channel):
'\n \n '
filter_list = []
if (self.instrument_type in ['backbone']):
filter_list.append(self.get_electric_high_pass(self.hardware))
filter_list.append(self.electric_low_pass)
filter_list.append(self._get_dt_filter(channel, self.sample_rate))
return_dict = {'channel_id': channel, 'gain': 1, 'conversion_factor': self.e_conversion_factor, 'units': 'nT', 'filters': filter_list}
return return_dict
|
def _get_electric_filter(self, channel):
'\n \n '
filter_list = []
if (self.instrument_type in ['backbone']):
filter_list.append(self.get_electric_high_pass(self.hardware))
filter_list.append(self.electric_low_pass)
filter_list.append(self._get_dt_filter(channel, self.sample_rate))
return_dict = {'channel_id': channel, 'gain': 1, 'conversion_factor': self.e_conversion_factor, 'units': 'nT', 'filters': filter_list}
return return_dict<|docstring|>Get electric filter<|endoftext|>
|
b55bb0de81d895f514d955b0f05fddeae960bff05eb9f5503c6486906854c7f4
|
@property
def hx_filter(self):
'HX filter'
return self._get_mag_filter('hx')
|
HX filter
|
mth5/io/nims.py
|
hx_filter
|
kujaku11/mth5
| 5 |
python
|
@property
def hx_filter(self):
return self._get_mag_filter('hx')
|
@property
def hx_filter(self):
return self._get_mag_filter('hx')<|docstring|>HX filter<|endoftext|>
|
b69742585bb3e4400af2b2e7e544c9c85efc5d7763f30473a780af8ebe23cdff
|
@property
def hy_filter(self):
'HY Filter'
return self._get_mag_filter('hy')
|
HY Filter
|
mth5/io/nims.py
|
hy_filter
|
kujaku11/mth5
| 5 |
python
|
@property
def hy_filter(self):
return self._get_mag_filter('hy')
|
@property
def hy_filter(self):
return self._get_mag_filter('hy')<|docstring|>HY Filter<|endoftext|>
|
46b953c373ee05b14d31a65db01d00fc04ec5485871cab265a638371920c50e2
|
def getUSGSnwis(stationids, starttime, endtime, ncfile):
'\n Main function for grabbing the data and saving it to a netcdf file\n '
meta = {}
meta.update({'Station ID': []})
meta.update({'StationName': []})
meta.update({'Latitude': []})
meta.update({'Longitude': []})
time = []
discharge = []
for sid in stationids:
meta = readUSGSmeta(sid, meta=meta)
(tt, dd) = readUSGStxt(sid, starttime, endtime)
time.append(tt)
discharge.append(dd)
USGS2netcdf(ncfile, meta, time, discharge)
|
Main function for grabbing the data and saving it to a netcdf file
|
sfoda/dataio/datadownload/getUSGSnwis.py
|
getUSGSnwis
|
mrayson/sfoda
| 1 |
python
|
def getUSGSnwis(stationids, starttime, endtime, ncfile):
'\n \n '
meta = {}
meta.update({'Station ID': []})
meta.update({'StationName': []})
meta.update({'Latitude': []})
meta.update({'Longitude': []})
time = []
discharge = []
for sid in stationids:
meta = readUSGSmeta(sid, meta=meta)
(tt, dd) = readUSGStxt(sid, starttime, endtime)
time.append(tt)
discharge.append(dd)
USGS2netcdf(ncfile, meta, time, discharge)
|
def getUSGSnwis(stationids, starttime, endtime, ncfile):
'\n \n '
meta = {}
meta.update({'Station ID': []})
meta.update({'StationName': []})
meta.update({'Latitude': []})
meta.update({'Longitude': []})
time = []
discharge = []
for sid in stationids:
meta = readUSGSmeta(sid, meta=meta)
(tt, dd) = readUSGStxt(sid, starttime, endtime)
time.append(tt)
discharge.append(dd)
USGS2netcdf(ncfile, meta, time, discharge)<|docstring|>Main function for grabbing the data and saving it to a netcdf file<|endoftext|>
|
cdd1a3232a1c4da724adb56c6d2e6f934f2304f6e428fe3111835b6da7ae23fc
|
def readUSGStxt(stationid, starttime, endtime):
'\n Read the daily station data from a web-service\n \n See here:\n http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples\n '
scale_fac = 0.0283168
target_url = ('http://waterservices.usgs.gov/nwis/dv/?format=rdb&sites=%s&startDT=%s&endDT=%s¶meterCd=00060' % (stationid, starttime, endtime))
try:
print(('Opening: %s' % target_url))
f = urllib.request.urlopen(target_url)
except:
raise Exception(('cannot open url:\n%s' % target_url))
StationID = []
time = []
discharge = []
for s in f:
line = s.split()
if (line[0] == 'USGS'):
StationID.append(line[1])
time.append(datetime.strptime(line[2], '%Y-%m-%d'))
discharge.append((float(line[3]) * scale_fac))
f.close()
return (np.asarray(time), np.asarray(discharge))
|
Read the daily station data from a web-service
See here:
http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples
|
sfoda/dataio/datadownload/getUSGSnwis.py
|
readUSGStxt
|
mrayson/sfoda
| 1 |
python
|
def readUSGStxt(stationid, starttime, endtime):
'\n Read the daily station data from a web-service\n \n See here:\n http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples\n '
scale_fac = 0.0283168
target_url = ('http://waterservices.usgs.gov/nwis/dv/?format=rdb&sites=%s&startDT=%s&endDT=%s¶meterCd=00060' % (stationid, starttime, endtime))
try:
print(('Opening: %s' % target_url))
f = urllib.request.urlopen(target_url)
except:
raise Exception(('cannot open url:\n%s' % target_url))
StationID = []
time = []
discharge = []
for s in f:
line = s.split()
if (line[0] == 'USGS'):
StationID.append(line[1])
time.append(datetime.strptime(line[2], '%Y-%m-%d'))
discharge.append((float(line[3]) * scale_fac))
f.close()
return (np.asarray(time), np.asarray(discharge))
|
def readUSGStxt(stationid, starttime, endtime):
'\n Read the daily station data from a web-service\n \n See here:\n http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples\n '
scale_fac = 0.0283168
target_url = ('http://waterservices.usgs.gov/nwis/dv/?format=rdb&sites=%s&startDT=%s&endDT=%s¶meterCd=00060' % (stationid, starttime, endtime))
try:
print(('Opening: %s' % target_url))
f = urllib.request.urlopen(target_url)
except:
raise Exception(('cannot open url:\n%s' % target_url))
StationID = []
time = []
discharge = []
for s in f:
line = s.split()
if (line[0] == 'USGS'):
StationID.append(line[1])
time.append(datetime.strptime(line[2], '%Y-%m-%d'))
discharge.append((float(line[3]) * scale_fac))
f.close()
return (np.asarray(time), np.asarray(discharge))<|docstring|>Read the daily station data from a web-service
See here:
http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples<|endoftext|>
|
579a5eb333678b99fc4552b9e5f90b718e662cf414dc789d3292a05114180201
|
def readUSGSmeta(stationid, meta=None):
'\n Read the station meta data from a web-service\n \n See here:\n http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples\n '
target_url = ('http://waterservices.usgs.gov/nwis/site/?format=rdb&sites=%s' % stationid)
try:
f = urllib.request.urlopen(target_url)
except:
raise Exception(('cannot open url:\n%s' % target_url))
if (meta == None):
meta = {}
meta.update({'Station ID': []})
meta.update({'StationName': []})
meta.update({'Latitude': []})
meta.update({'Longitude': []})
for s in f:
line = s.split('\t')
if (line[0] == 'USGS'):
meta['Station ID'].append(line[1])
meta['StationName'].append(line[2])
meta['Latitude'].append(float(line[4]))
meta['Longitude'].append(float(line[5]))
return meta
|
Read the station meta data from a web-service
See here:
http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples
|
sfoda/dataio/datadownload/getUSGSnwis.py
|
readUSGSmeta
|
mrayson/sfoda
| 1 |
python
|
def readUSGSmeta(stationid, meta=None):
'\n Read the station meta data from a web-service\n \n See here:\n http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples\n '
target_url = ('http://waterservices.usgs.gov/nwis/site/?format=rdb&sites=%s' % stationid)
try:
f = urllib.request.urlopen(target_url)
except:
raise Exception(('cannot open url:\n%s' % target_url))
if (meta == None):
meta = {}
meta.update({'Station ID': []})
meta.update({'StationName': []})
meta.update({'Latitude': []})
meta.update({'Longitude': []})
for s in f:
line = s.split('\t')
if (line[0] == 'USGS'):
meta['Station ID'].append(line[1])
meta['StationName'].append(line[2])
meta['Latitude'].append(float(line[4]))
meta['Longitude'].append(float(line[5]))
return meta
|
def readUSGSmeta(stationid, meta=None):
'\n Read the station meta data from a web-service\n \n See here:\n http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples\n '
target_url = ('http://waterservices.usgs.gov/nwis/site/?format=rdb&sites=%s' % stationid)
try:
f = urllib.request.urlopen(target_url)
except:
raise Exception(('cannot open url:\n%s' % target_url))
if (meta == None):
meta = {}
meta.update({'Station ID': []})
meta.update({'StationName': []})
meta.update({'Latitude': []})
meta.update({'Longitude': []})
for s in f:
line = s.split('\t')
if (line[0] == 'USGS'):
meta['Station ID'].append(line[1])
meta['StationName'].append(line[2])
meta['Latitude'].append(float(line[4]))
meta['Longitude'].append(float(line[5]))
return meta<|docstring|>Read the station meta data from a web-service
See here:
http://waterdata.usgs.gov/nwis/news/?automated_retrieval_info#Examples<|endoftext|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.