metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jordan-gleeson/slam-visualiser",
"score": 3
} |
#### File: jordan-gleeson/slam-visualiser/gui.py
```python
import numpy as np
import pygame
import pygame_gui as pygui
import utils
class GUI():
"""Contains all aspects of the GUI.
Handles setup of GUI elements and the handling of input events.
Attributes:
_p_screen: The main pygame screen surface.
_p_world: The world map object.
_p_robot: The robot object.
_p_slam: The slam algorithm object.
"""
def __init__(self, _p_screen, _p_world, _p_robot, _p_slam):
self.screen = _p_screen
self.world = _p_world
self.robot = _p_robot
self.slam = _p_slam
self.manager = pygui.UIManager(self.screen.get_size(), 'theme.json')
self.manager.set_visual_debug_mode(False)
self.settings_window = None
# Main Menu Setup
self.main_menu_state = True
self.main_menu()
# Settings Button Setup
self.toggle_lidar_btn = None
self.toggle_occupancy_grid_btn = None
self.toggle_positions_btn = None
self.done_btn = None
self.settings_button = None
self.reset_btn = None
self.settings_tlt_pnl = None
self.settings_lbl_pnl = None
# Position Visualisation Setup
self.draw_positions = True
# World Editor Setup
self.last_mouse_pos = None
self.we_done_btn = None
self.we_clear_btn = None
self.we_mode_btn = None
self.we_draw_mode = True
self.we_raise_click = False
def main_menu(self):
"""Setup the main menu."""
_button_width = 110
_button_height = 40
_vert_out_padding = 60
_hor_out_padding = 60
_vert_in_padding = 30
_hor_in_padding = 30
_vert_inner_padding = 20
_hor_inner_padding = 20
_start_button_height = 80
_main_menu_pnl_rect = pygame.Rect((0, 0),
(self.screen.get_width(), self.screen.get_height()))
self.main_menu_pnl = pygui.elements.UIPanel(_main_menu_pnl_rect, 0,
self.manager,
object_id="background_panel")
_title_panel_pos = (_hor_out_padding, _vert_out_padding)
_title_panel_size = (self.screen.get_width() -
_hor_out_padding * 2, 100)
_title_panel_rect = pygame.Rect(_title_panel_pos, _title_panel_size)
self.title_pnl = pygui.elements.UIPanel(_title_panel_rect, 0,
manager=self.manager,
object_id="title_panel")
_title_size = (354, 45)
_title_pos = (_title_panel_pos[0] + _hor_inner_padding,
_title_panel_pos[1] + _title_panel_size[1] / 2 - _title_size[1] / 2 + 5)
_title_rect = pygame.Rect(_title_pos, _title_size)
self.title = pygui.elements.UILabel(_title_rect,
"SLAM Visualiser",
self.manager,
object_id="title_label")
_setup_panel_pos = (_hor_out_padding,
_title_panel_pos[1] + _title_panel_size[1] + _vert_in_padding)
_setup_panel_size = ((self.screen.get_width() - _hor_out_padding * 2) / 3 - _hor_in_padding / 2,
self.screen.get_height() - _hor_out_padding * 2 - _hor_in_padding * 2 - _title_panel_size[1] - _start_button_height)
_setup_panel_rect = pygame.Rect(_setup_panel_pos, _setup_panel_size)
self.setup_pnl = pygui.elements.UIPanel(_setup_panel_rect, 0,
manager=self.manager,
object_id="menu_panel")
_setup_label_panel_pos = (_setup_panel_pos[0] + _hor_inner_padding,
_setup_panel_pos[1] + _vert_inner_padding)
_setup_label_panel_size = (_setup_panel_size[0] - _hor_inner_padding * 2,
70)
_setup_label_panel_rect = pygame.Rect(_setup_label_panel_pos,
_setup_label_panel_size)
self.setup_lbl_pnl = pygui.elements.UIPanel(_setup_label_panel_rect, 0,
manager=self.manager,
object_id="title_panel")
_setup_title_size = (98, 35)
_setup_title_pos = (_setup_label_panel_pos[0] + _hor_inner_padding,
_setup_label_panel_pos[1] + _setup_label_panel_size[1] / 2 - _setup_title_size[1] / 2 + 3)
_setup_title_rect = pygame.Rect(_setup_title_pos, _setup_title_size)
self.setup_ttl = pygui.elements.UILabel(_setup_title_rect,
"Setup",
self.manager,
object_id="panel_title_label")
_world_edit_size = (_button_width, _button_height)
_world_edit_pos = (_setup_label_panel_pos[0],
_setup_label_panel_pos[1] + _setup_label_panel_size[1] + _vert_inner_padding)
_world_edit_rect = pygame.Rect(_world_edit_pos, _world_edit_size)
self.world_edit_btn = pygui.elements.UIButton(relative_rect=_world_edit_rect,
text="Edit World",
manager=self.manager,
object_id="setup_button")
_slam_type_size = (_button_width + 60, _button_height)
_slam_type_pos = (_setup_label_panel_pos[0],
_world_edit_pos[1] + _world_edit_size[1] + _vert_inner_padding)
_slam_type_rect = pygame.Rect(_slam_type_pos, _slam_type_size)
_slam_list = ["Occupancy Grid", "Landmarks"]
self.slam_type_drop = pygui.elements.UIDropDownMenu(relative_rect=_slam_type_rect,
options_list=_slam_list,
starting_option="Occupancy Grid",
manager=self.manager,
object_id="setup_dropdown",
expansion_height_limit=len(_slam_list)*50)
_start_button_pos = (_hor_out_padding,
_setup_panel_pos[1] + _setup_panel_size[1] + _vert_in_padding)
_start_button_size = (_setup_panel_size[0], _start_button_height)
_start_button_rect = pygame.Rect(_start_button_pos, _start_button_size)
self.start_btn = pygui.elements.UIButton(relative_rect=_start_button_rect,
text="Start",
manager=self.manager,
object_id="start_button")
_preview_panel_pos = (_setup_panel_pos[0] + _setup_panel_size[0] + _hor_in_padding,
_setup_panel_pos[1])
_preview_panel_size = (_setup_panel_size[0] * 2 + _hor_in_padding / 2,
(_start_button_pos[1] + _start_button_height - _setup_panel_pos[1]) / 2 - _hor_in_padding / 2)
_preview_panel_rect = pygame.Rect(_preview_panel_pos,
_preview_panel_size)
self.preview_pnl = pygui.elements.UIPanel(_preview_panel_rect, 0,
manager=self.manager,
object_id="menu_panel")
_instructions_panel_pos = (_preview_panel_pos[0],
_preview_panel_pos[1] + _preview_panel_size[1] + _hor_in_padding)
_instructions_panel_size = _preview_panel_size
_instructions_panel_rect = pygame.Rect(_instructions_panel_pos,
_instructions_panel_size)
self.instructions_pnl = pygui.elements.UIPanel(_instructions_panel_rect, 0,
manager=self.manager,
object_id="menu_panel")
_preview_label_panel_pos = (_preview_panel_pos[0] + _hor_inner_padding,
_preview_panel_pos[1] + _vert_inner_padding)
_preview_label_panel_size = (_preview_panel_size[0] - _hor_inner_padding * 2,
50)
_preview_label_panel_rect = pygame.Rect(_preview_label_panel_pos,
_preview_label_panel_size)
self.preview_lbl_pnl = pygui.elements.UIPanel(_preview_label_panel_rect, 0,
manager=self.manager,
object_id="title_panel")
_preview_title_size = (138, 35)
_preview_title_pos = (_preview_label_panel_pos[0] + _hor_inner_padding,
_preview_label_panel_pos[1] + _preview_label_panel_size[1] / 2 - _preview_title_size[1] / 2 + 3)
_preview_title_rect = pygame.Rect(_preview_title_pos,
_preview_title_size)
self.preview_ttl = pygui.elements.UILabel(_preview_title_rect,
"Preview",
self.manager,
object_id="panel_title_label")
_instructions_label_panel_pos = (_instructions_panel_pos[0] + _hor_inner_padding,
_instructions_panel_pos[1] + _vert_inner_padding)
_instructions_label_panel_size = (_instructions_panel_size[0] - _hor_inner_padding * 2,
50)
_instructions_label_panel_rect = pygame.Rect(_instructions_label_panel_pos,
_instructions_label_panel_size)
self.instructions_lbl_pnl = pygui.elements.UIPanel(_instructions_label_panel_rect, 0,
manager=self.manager,
object_id="title_panel")
_instructions_title_size = (202, 35)
_instructions_title_pos = (_instructions_label_panel_pos[0] + _hor_inner_padding,
_instructions_label_panel_pos[1] + _instructions_label_panel_size[1] / 2 - _instructions_title_size[1] / 2 + 3)
_instructions_title_rect = pygame.Rect(_instructions_title_pos,
_instructions_title_size)
self.instructions_ttl = pygui.elements.UILabel(_instructions_title_rect,
"Instructions",
self.manager,
object_id="panel_title_label")
def setup_game(self, _world_edited):
"""Add game buttons. Write the world map to sprites."""
_settings_rect_size = (80, 30)
_settings_rect = pygame.Rect((self.screen.get_size()[0] - 10 - _settings_rect_size[0], 10),
_settings_rect_size)
self.settings_button = pygui.elements.UIButton(relative_rect=_settings_rect,
text="Settings",
manager=self.manager,
container=self.settings_window,
object_id="setup_button")
self.kill_main_menu()
if not _world_edited:
self.world.write_map(self.robot.robot.robot_size)
self.world.create_sprites()
def kill_main_menu(self):
"""Removes main menu buttons."""
try:
self.main_menu_pnl.kill()
self.world_edit_btn.kill()
self.start_btn.kill()
self.title.kill()
self.title_pnl.kill()
self.setup_pnl.kill()
self.setup_lbl_pnl.kill()
self.setup_ttl.kill()
self.preview_pnl.kill()
self.instructions_pnl.kill()
self.preview_ttl.kill()
self.preview_lbl_pnl.kill()
self.instructions_lbl_pnl.kill()
self.instructions_ttl.kill()
self.slam_type_drop.kill()
except:
pass
def kill_world_editor(self):
"""Removes world editor buttons."""
try:
self.we_done_btn.kill()
self.we_clear_btn.kill()
self.we_mode_btn.kill()
except:
pass
def update(self, _time_delta):
"""Draws the GUI."""
self.position_draw()
self.manager.update(_time_delta)
self.manager.draw_ui(self.screen)
def input(self, _event):
"""Handles pygame_gui input events."""
if _event.user_type == pygui.UI_BUTTON_PRESSED:
if _event.ui_element == self.toggle_lidar_btn:
self.robot.robot.toggle_lidar()
if _event.ui_element == self.toggle_occupancy_grid_btn:
self.slam.toggle_occupancy_grid()
if _event.ui_element == self.settings_button:
self.settings()
if _event.ui_element == self.toggle_positions_btn:
self.toggle_positions()
if _event.ui_element == self.done_btn:
self.settings_window.kill()
if _event.ui_element == self.start_btn:
self.main_menu_state = 0
if _event.ui_element == self.world_edit_btn:
self.main_menu_state = 2
if _event.ui_element == self.we_done_btn:
self.main_menu_state = 1
if _event.ui_element == self.we_clear_btn:
self.world.clear_map()
if _event.ui_element == self.we_mode_btn:
self.world_editor_mode_button()
if _event.ui_element == self.reset_btn:
self.reset()
def settings(self):
"""Settings window setup."""
_button_width = 110
_button_height = 40
_vert_padding = 15
_hor_padding = 30
_button_count = 6
_border = 4 * 1.5
_setting_window_size = (_button_width + _hor_padding * 2,
_button_height * _button_count + _vert_padding * (_button_count + 1))
_settings_window_pos = (self.screen.get_size()[0] - _setting_window_size[0] - 10,
10)
_settings_window_rect = pygame.Rect(_settings_window_pos,
_setting_window_size)
self.settings_window = pygui.elements.UIWindow(rect=_settings_window_rect,
manager=self.manager,
object_id="settings_window")
_settings_label_panel_pos = (_hor_padding - _border, _vert_padding)
_settings_label_panel_size = (_button_width, _button_height)
_settings_label_panel_rect = pygame.Rect(_settings_label_panel_pos,
_settings_label_panel_size)
self.settings_lbl_pnl = pygui.elements.UIPanel(_settings_label_panel_rect, 0,
manager=self.manager,
container=self.settings_window,
object_id="title_panel")
_settings_title_size = (80, 25)
_settings_title_pos = (_hor_padding + 10 - _border,
_settings_label_panel_pos[1] + (_settings_label_panel_size[1] - _settings_title_size[1]) / 2 + 3)
_settings_title_rect = pygame.Rect(_settings_title_pos,
_settings_title_size)
self.settings_tlt_pnl = pygui.elements.UILabel(_settings_title_rect,
"Settings",
self.manager,
container=self.settings_window,
object_id="settings_title_label")
# Button Setup
_lidar_button_pos = (_hor_padding - _border,
_settings_label_panel_pos[1] + _button_height + _vert_padding)
_lidar_button_rect = pygame.Rect(
_lidar_button_pos, (_button_width, _button_height))
self.toggle_lidar_btn = pygui.elements.UIButton(relative_rect=_lidar_button_rect,
text="Toggle Lidar",
manager=self.manager,
container=self.settings_window,
object_id="setup_button")
_occupancy_button_pos = (_hor_padding - _border,
_lidar_button_pos[1] + _vert_padding + _button_height)
_occupancy_button_rect = pygame.Rect(_occupancy_button_pos,
(_button_width, _button_height))
self.toggle_occupancy_grid_btn = pygui.elements.UIButton(relative_rect=_occupancy_button_rect,
text="Toggle Grid",
manager=self.manager,
container=self.settings_window,
object_id="setup_button")
_positions_button_pos = (_hor_padding - _border,
_occupancy_button_pos[1] + _vert_padding + _button_height)
_positions_button_rect = pygame.Rect(_positions_button_pos,
(_button_width, _button_height))
self.toggle_positions_btn = pygui.elements.UIButton(relative_rect=_positions_button_rect,
text="Toggle Pos",
manager=self.manager,
container=self.settings_window,
object_id="setup_button")
_reset_button_pos = (_hor_padding - _border,
_positions_button_pos[1] + _vert_padding + _button_height)
_reset_button_rect = pygame.Rect(_reset_button_pos,
(_button_width, _button_height))
self.reset_btn = pygui.elements.UIButton(relative_rect=_reset_button_rect,
text="Reset",
manager=self.manager,
container=self.settings_window,
object_id="setup_button")
_done_button_pos = (_hor_padding - _border,
_reset_button_pos[1] + _vert_padding + _button_height)
_done_button_rect = pygame.Rect(_done_button_pos,
(_button_width, _button_height))
self.done_btn = pygui.elements.UIButton(relative_rect=_done_button_rect,
text="Done",
manager=self.manager,
container=self.settings_window,
object_id="done_button")
def position_draw(self):
"""Draw the lines that depict the robot's path historically."""
if self.draw_positions:
try:
pygame.draw.lines(self.screen, (255, 0, 0),
False, self.robot.truth_pos)
pygame.draw.lines(self.screen, (0, 0, 255),
False, self.slam.odo_pos)
except ValueError:
pass
def toggle_positions(self):
"""Toggle whether or not the robot's historical path is visualised."""
if self.draw_positions:
self.draw_positions = False
else:
self.draw_positions = True
def world_editor_setup(self):
"""Setup the world editor screen."""
_button_width = 110
_button_height = 40
_vert_padding = 20
_hor_padding = 20
_done_rect = pygame.Rect((self.screen.get_width() - _button_width - _hor_padding,
self.screen.get_height() - _button_height - _vert_padding),
(_button_width, _button_height))
self.we_done_btn = pygui.elements.UIButton(relative_rect=_done_rect,
text="Done",
manager=self.manager,
object_id="done_button")
_clear_rect = pygame.Rect((self.screen.get_width() - _button_width - _hor_padding,
_vert_padding),
(_button_width, _button_height))
self.we_clear_btn = pygui.elements.UIButton(relative_rect=_clear_rect,
text="Clear",
manager=self.manager,
object_id="setup_button")
_mode_rect = pygame.Rect((self.screen.get_width() - _button_width - _hor_padding,
_vert_padding * 2 + _button_height),
(_button_width, _button_height))
self.we_mode_btn = pygui.elements.UIButton(relative_rect=_mode_rect,
text="Erase",
manager=self.manager,
object_id="setup_button")
def world_editor_mode_button(self):
"""Toggle between draw/erase modes of the world editor."""
if self.we_draw_mode:
self.we_mode_btn.set_text("Draw")
self.we_draw_mode = False
else:
self.we_mode_btn.set_text("Erase")
self.we_draw_mode = True
def world_editor(self, _mouse_click, _pos):
"""Draw onto the world grid if mouse is down and draw the current world grid."""
def world_editor_button_hover(_bh_pos):
"""Return true if the position is within any of the world editor buttons."""
_return = np.array([self.we_clear_btn.hover_point(_bh_pos[0],
_bh_pos[1]),
self.we_done_btn.hover_point(_bh_pos[0],
_bh_pos[1]),
self.we_mode_btn.hover_point(_bh_pos[0],
_bh_pos[1])])
return _return.any()
def world_editor_centre_hover(_ch_pos):
"""Return true if the position is within where the robot will spawn."""
_hor_cen = self.screen.get_width() / 2
_vert_cen = self.screen.get_height() / 2
_robot_size = self.robot.robot.robot_size
_return = np.array([_ch_pos[0] > _hor_cen - _robot_size,
_ch_pos[0] < _hor_cen + _robot_size,
_ch_pos[1] < _vert_cen + _robot_size,
_ch_pos[1] > _vert_cen - _robot_size])
return _return.all()
def pos_to_grid(_pos):
"""Converts game space coordinates to world map grid coordinates."""
return int(_pos / self.world.size)
if _mouse_click:
if self.world.world_type == "Occupancy Grid" or not self.we_draw_mode:
# If in Occupancy Grid mode, find the distance between the last known mouse
# position and find the points in a line between them
if self.last_mouse_pos != None:
_last_point_dis = utils.point_distance(self.last_mouse_pos[0], _pos[0],
_pos[1], self.last_mouse_pos[1])
else:
_last_point_dis = 0
# If clicking on a button don't draw anything
if (_last_point_dis < 8 and world_editor_button_hover(_pos)) or _last_point_dis == 0:
_line = []
else:
_line = utils.line_between(self.last_mouse_pos[0],
self.last_mouse_pos[1],
_pos[0], _pos[1])
# Write to the grid map all the points on the line if not in the spawn space
for _point in _line:
if not world_editor_centre_hover(_point):
self.world.write_to_map(self.we_draw_mode,
pos_to_grid(_point[0]),
pos_to_grid(_point[1]))
self.last_mouse_pos = _pos
elif self.world.world_type == "Landmarks":
# If in landmark mode, only place one wall per click
if self.we_raise_click:
if not world_editor_centre_hover(_pos):
self.world.write_to_map(self.we_draw_mode,
pos_to_grid(_pos[0]),
pos_to_grid(_pos[1]))
self.we_raise_click = False
for i in range(len(self.world.grid)):
for j in range(len(self.world.grid[0])):
if self.world.grid[i][j]:
pygame.draw.rect(self.screen,
(0, 0, 0),
pygame.Rect((j * self.world.size, i * self.world.size),
(self.world.size, self.world.size)))
def reset(self):
"""Reset the game state."""
self.robot.reset()
self.slam.reset()
```
#### File: jordan-gleeson/slam-visualiser/slam_visualiser.py
```python
import time
import operator
import random
import numpy as np
import pygame
import pygame_gui as pygui
import utils
import gui
import copy
class Game():
"""Main game class.
Creates the game screen. Contains the main game loop which handles the order of execution of
robot and SLAM functionality.
"""
def __init__(self):
# pygame setup
pygame.init()
pygame.key.set_repeat(300, 30)
self.screen = pygame.display.set_mode((1280, 720), pygame.SRCALPHA)
self.screen.fill((255, 255, 255))
self.clock = pygame.time.Clock()
# Create a white background
self.background = pygame.Surface(self.screen.get_size(),
pygame.SRCALPHA)
self.background = self.background.convert()
self.background.fill((255, 255, 255))
self.menu_background = copy.copy(self.background)
self.menu_background.fill((57, 65, 101))
pygame.display.flip()
# Setup classes
self.world = World(self.screen)
self.robot = RobotControl(self.screen, self.world)
self.slam = SLAM(self.screen, self.robot)
self.gui = gui.GUI(self.screen, self.world, self.robot, self.slam)
self.font = pygame.font.Font(None, 30)
self.state = 0
self.main()
def main(self):
"""Main game loop."""
_playing_game = True
_world_edited = False
while _playing_game:
_time_delta = self.clock.tick(30) / 1000.0
self.screen.blit(self.background, (0, 0))
for _event in pygame.event.get():
if _event.type == pygame.QUIT:
_playing_game = False
break
if _event.type == pygame.USEREVENT:
self.gui.input(_event)
if _event.type == pygame.MOUSEBUTTONUP:
self.gui.last_mouse_pos = None
self.gui.we_raise_click = True
if _event.type == pygame.KEYDOWN:
if _event.key == pygame.K_r:
self.gui.reset()
self.gui.manager.process_events(_event)
# Main Menu
if self.state == 0:
if self.gui.main_menu_state == 0:
self.state += 1
self.gui.setup_game(_world_edited)
self.init_game()
elif self.gui.main_menu_state == 2:
self.state = 2
_world_edited = True
self.gui.kill_main_menu()
self.gui.world_editor_setup()
else:
self.world.world_type = self.gui.slam_type_drop.selected_option
# Simulation
elif self.state == 1:
self.robot.change_velocity(pygame.key.get_pressed())
self.world.draw()
self.slam.update()
self.robot.update()
self.slam.odometry(self.robot.odo_velocity)
if self.robot.robot.new_sample:
self.slam.occupancy_grid()
self.robot.robot.new_sample = False
# World Editor
elif self.state == 2:
if self.gui.main_menu_state == 1:
self.state = 0
self.gui.main_menu()
self.gui.kill_world_editor()
self.gui.world_editor(pygame.mouse.get_pressed()[0],
pygame.mouse.get_pos())
_fps = self.font.render(str(int(self.clock.get_fps())),
True,
pygame.Color('green'))
self.screen.blit(_fps, (3, 3))
self.gui.update(_time_delta)
pygame.display.update()
pygame.quit()
def init_game(self):
self.robot.robot.setup_lasers()
self.robot.update()
class Robot(pygame.sprite.Sprite):
"""Sprite the robot player object.
Handles the attributes of the robot, including its collision mask. Also handles robot state
updates including translational and rotational changes. This class also contains the lidar
sensor calculations.
Attributes:
_p_screen: The main pygame screen surface.
_p_world: The world map as drawn by the World class.
"""
def __init__(self, _p_screen, _p_world):
pygame.sprite.Sprite.__init__(self)
self.screen = _p_screen
self.world = _p_world
self.image = pygame.image.load("robot.png")
self.robot_size = 50
self.image = pygame.transform.smoothscale(self.image,
(self.robot_size, self.robot_size))
self.image = pygame.transform.rotate(self.image, 90)
self.image_size = self.image.get_size()
self.og_image = self.image.copy()
self.rect = self.image.get_rect()
self.x_pos = float(self.screen.get_size()[0] / 2)
self.y_pos = float(self.screen.get_size()[1] / 2)
self.angle = 0
self.rect.center = (self.x_pos, self.y_pos)
self.hitbox = pygame.Rect(self.x_pos - (self.image_size[0] / 2),
self.y_pos - (self.image_size[1] / 2),
self.image_size[0] + 2,
self.image_size[1] + 2)
self.mask = pygame.mask.from_surface(self.image)
self.draw_lidar = True
# Lidar setup
self.sample_rate = 5 # Hz
self.lidar_state = 0
self.sample_count = 32
self.angle_ref = []
self.new_sample = True
self.initial_laser_length = int(utils.point_distance(self.screen.get_width(), 0,
self.screen.get_height(), 0))
def setup_lasers(self):
"""Setup the lasers coming from the robot depending on observation type."""
if self.world.world_type == "Occupancy Grid":
self.point_cloud = [[0, 0] for _ in range(self.sample_count)]
self.lasers = pygame.sprite.Group()
_lidar = pygame.math.Vector2()
_lidar.xy = (self.x_pos, self.y_pos)
for i in range(self.sample_count):
_degree_multiplier = 360 / self.sample_count
_cur_angle = int(i * _degree_multiplier)
self.angle_ref.append(_cur_angle)
_laser = pygame.math.Vector2()
_laser.from_polar((self.initial_laser_length, _cur_angle))
_laser_sprite = OG_Laser(self.screen, _lidar, _laser)
self.lasers.add(_laser_sprite)
self.lasers_draw = pygame.sprite.Group()
elif self.world.world_type == "Landmarks":
self.point_cloud = [[0, 0]
for _ in range(self.world.landmark_count)]
_landmark_list = self.world.wall_list.sprites()
self.lasers = []
for _landmark in _landmark_list:
_new_laser = LM_Laser(self.screen,
(self.x_pos, self.y_pos),
_landmark.rect.center)
self.angle_ref.append(_landmark.rect.center)
self.lasers.append(_new_laser)
def reset(self):
"""Reset the robots position and sensor data."""
self.x_pos = float(self.screen.get_size()[0] / 2)
self.y_pos = float(self.screen.get_size()[1] / 2)
self.angle = 0
self.rect.center = (self.x_pos, self.y_pos)
self.hitbox = pygame.Rect(self.x_pos - (self.image_size[0] / 2),
self.y_pos - (self.image_size[1] / 2),
self.image_size[0] + 2,
self.image_size[1] + 2)
if self.world.world_type == "Occupancy Grid":
self.point_cloud = [[0, 0]
for _ in range(self.sample_count)]
elif self.world.world_type == "Landmarks":
self.point_cloud = [[0, 0]
for _ in range(self.world.landmark_count)]
def update(self):
"""Updates the position of the robot's rect, hitbox and mask."""
self.rect.center = (self.x_pos, self.y_pos)
self.hitbox.center = (self.x_pos, self.y_pos)
self.mask = pygame.mask.from_surface(self.image)
if self.world.world_type == "Occupancy Grid":
self.lidar()
elif self.world.world_type == "Landmarks":
self.landmark_sensor()
if self.draw_lidar:
for _point in self.point_cloud:
_coords = [int(_point[0] * np.cos(_point[1]) + self.x_pos),
int(_point[0] * np.sin(_point[1]) + self.y_pos)]
pygame.draw.aaline(self.screen,
(255, 0, 0, 255),
(self.x_pos, self.y_pos),
_coords)
pygame.draw.circle(self.screen,
(0, 0, 255, 255),
_coords, 3)
def toggle_lidar(self):
"""Toggle whether or not the lidar sensor is visualised."""
if self.draw_lidar:
self.draw_lidar = False
else:
self.draw_lidar = True
def rotate(self, _direction):
"""Rotates the robot around it's centre."""
self.image = pygame.transform.rotate(self.og_image, _direction)
self.rect = self.image.get_rect()
self.rect.center = (self.x_pos, self.y_pos)
def lidar(self):
"""Performs all calculations for laser range finding and handles the drawing of lasers.
This function uses sprites to determine all of the objects each laser around the robot is
colliding with, then finds the closest wall. It then finds the closest point on that wall
to the robot.
"""
# TODO: Fix flickering on some diagonal lasers
# TODO: Make lasers that don't find a result return max length instead of previous result
_iterations_per_frame = int(
self.sample_count / (30 / self.sample_rate))
_slice_from = self.lidar_state * _iterations_per_frame
if self.lidar_state == (30 // self.sample_rate) - 2:
# Ensure final slice gets the remainder
_slice_to = self.sample_count
else:
_slice_to = _slice_from + _iterations_per_frame
# Update the position of each of the laser sprites in self.lasers
_lidar = pygame.math.Vector2()
_lidar.xy = (self.x_pos, self.y_pos)
for _sprite in self.lasers.sprites()[_slice_from:_slice_to]:
_sprite.origin = _lidar
_sprite.update()
# Check wall collisions in quadrants
_quad_list = [[[0, 90], operator.ge, operator.ge],
[[90, 181], operator.lt, operator.ge],
[[-90, 0], operator.ge, operator.lt],
[[-181, -90], operator.lt, operator.lt]]
_collision_list = {}
_pixel_buffer = self.world.size * 2
for _quad in _quad_list:
_quad_lasers = pygame.sprite.Group()
_quad_walls = pygame.sprite.Group()
for _laser in self.lasers.sprites()[_slice_from:_slice_to]:
_cur_angle = int(_laser.angle.as_polar()[1])
if _cur_angle >= _quad[0][0] and _cur_angle < _quad[0][1]:
_quad_lasers.add(_laser)
for _wall in self.world.wall_list:
_cur_pos = _wall.rect.center
if _quad[1] == operator.ge:
_x_buf = self.x_pos - _pixel_buffer
else:
_x_buf = self.x_pos + _pixel_buffer
if _quad[2] == operator.ge:
_y_buf = self.y_pos - _pixel_buffer
else:
_y_buf = self.y_pos + _pixel_buffer
if _quad[1](_cur_pos[0], _x_buf):
if _quad[2](_cur_pos[1], _y_buf):
_quad_walls.add(_wall)
_collision_list.update(pygame.sprite.groupcollide(_quad_lasers,
_quad_walls,
False,
False,
pygame.sprite.collide_mask))
if _collision_list:
for _laser in _collision_list:
# For each laser, find the closest wall to the robot it is colliding with
_closest_wall = None
_closest_distance = self.initial_laser_length
for _wall in _collision_list[_laser]:
cur_distance = utils.point_distance(self.x_pos,
_wall.rect.center[0],
self.y_pos,
_wall.rect.center[1])
if cur_distance < _closest_distance:
_closest_wall = _wall
_closest_distance = cur_distance
# Find the closest point on the closest wall to the robot
_current_pos = pygame.math.Vector2()
_current_pos.update(self.x_pos, self.y_pos)
_heading = _laser.angle
_direction = _heading.normalize()
_closest_point = [self.initial_laser_length,
self.initial_laser_length]
for _ in range(self.initial_laser_length):
_current_pos += _direction
if _closest_wall.rect.collidepoint(_current_pos):
_r = np.sqrt(np.square(self.x_pos - _current_pos.x)
+ np.square(self.y_pos - _current_pos.y))
_theta = np.arctan2(-(self.y_pos - _current_pos.y), -
(self.x_pos - _current_pos.x))
_closest_point = [_r, _theta]
break
# Write resulting point to the point cloud
if not _closest_point == [self.initial_laser_length, self.initial_laser_length]:
_cur_angle = (round(_heading.as_polar()[1]) + 450) % 360
try:
self.point_cloud[self.angle_ref.index(
_cur_angle)] = _closest_point
except ValueError:
pass
if self.lidar_state == (30 // self.sample_rate) - 1:
self.new_sample = True
self.lidar_state = 0
else:
self.lidar_state += 1
def landmark_sensor(self):
for _laser in self.lasers:
_laser.update((self.x_pos, self.y_pos))
self.point_cloud[self.angle_ref.index(
_laser.destination)] = _laser.polar
class RobotControl():
"""Controls the robot.
Handles the robot's translation and rotation based on user input, including collisions,
acceleration and deceleration.
Attributes:
_p_screen: The main pygame screen surface.
_p_world: The world map as drawn by the World class.
"""
def __init__(self, _p_screen, _p_world):
self.screen = _p_screen
self.robot = Robot(self.screen, _p_world)
self.world = _p_world
# (+x velocity, +y velocity, velocity magnitude) pixels/tick
self.velocity = [0, 0, 0]
self.odo_velocity = self.velocity
self.max_velocity = 4
self.acceleration = 0.5
self.cur_keys = []
self.angular_velocity = 6
self.dummy_screen = pygame.Surface(self.screen.get_size())
self.collision_list = []
self.recursion_depth = 0
self.truth_pos = []
def reset(self):
"""Reset the robot's attributes, including position and velocities."""
self.robot.x_pos = self.screen.get_size()[0] / 2
self.robot.y_pos = self.screen.get_size()[1] / 2
self.robot.rect.center = (self.robot.x_pos, self.robot.y_pos)
self.velocity = [0, 0, 0]
self.odo_velocity = self.velocity
self.robot.angle = 0
self.truth_pos = []
self.robot.reset()
self.update()
def update(self):
"""Update all aspects of the robot, including velocities, position and lidar sensor."""
self.move_velocity()
self.robot.rotate(self.robot.angle)
self.robot.update()
self.screen.blit(self.robot.image, self.robot.rect)
def move_velocity(self):
"""Controls the robot's position.
This function takes in the Robot.velocity vector. The collision method returns, what side,
if any, of the robot is colliding. It then sets the velocity in that direction to zero so
the robot will maintain it's velocity in the perpendicular axis, but stops moving towards
the collision. Then update the robot's position. If the robot isn't receiving input to move
forward, decelerate velocities.
"""
# Check if a collision has occurred, and zero the velocity axis associated with it.
_collision_side = self.collision_detector()
self.collision_list.append(_collision_side)
if len(self.collision_list) > 3:
self.collision_list.pop(0)
if not _collision_side:
self.collision_list = []
if "TOP" in self.collision_list:
if self.velocity[1] < 0:
self.velocity[1] = 0
if "BOTTOM" in self.collision_list:
if self.velocity[1] > 0:
self.velocity[1] = 0
if "RIGHT" in self.collision_list:
if self.velocity[0] > 0:
self.velocity[0] = 0
if "LEFT" in self.collision_list:
if self.velocity[0] < 0:
self.velocity[0] = 0
# Update robot position according to the velocity vector.
self.robot.x_pos += self.velocity[0]
self.robot.y_pos += self.velocity[1]
self.robot.rect.center = (self.robot.x_pos, self.robot.y_pos)
self.odo_velocity = self.velocity
if len(self.truth_pos) > 1000:
self.truth_pos.pop(0)
self.truth_pos.append([self.robot.x_pos, self.robot.y_pos])
# Decelerate the velocity vector if no forward input is received.
_deceleration = self.acceleration / 2
if "UP" not in self.cur_keys:
if self.velocity[0] > 0:
self.velocity[0] -= _deceleration
if self.velocity[0] < 0:
self.velocity[0] += _deceleration
if self.velocity[1] > 0:
self.velocity[1] -= _deceleration
if self.velocity[1] < 0:
self.velocity[1] += _deceleration
if self.velocity[0] < _deceleration and self.velocity[0] > _deceleration * -1:
self.velocity[0] = 0
if self.velocity[1] < _deceleration and self.velocity[1] > _deceleration * -1:
self.velocity[1] = 0
def change_velocity(self, _keys):
"""Controls the robot's velocity.
This function receives input from the user and updates the Robot.angular_velocity and
Robot.velocity vectors accordingly.
Attributes:
_keys: An array containing the current state of all keys.
"""
# Get input and sets the rotation according to the angular velocity.
_pressed_keys = self.convert_key(_keys)
if "RIGHT" in _pressed_keys:
self.robot.angle -= self.angular_velocity
if "LEFT" in _pressed_keys:
self.robot.angle += self.angular_velocity
# Bind the robot.angle to remain < 180 and > -180.
if self.robot.angle > 180:
self.robot.angle = -180 + (self.robot.angle - 180)
elif self.robot.angle < -180:
self.robot.angle = 180 + (self.robot.angle + 180)
# Calculate the current magnitude of the velocity vector.
_speed = self.acceleration * 2
self.velocity[2] = np.sqrt(
np.square(self.velocity[0]) + np.square(self.velocity[1]))
# Calculate the axis velocity components according to the current direction and desired
# speed.
_x_vec = np.cos(-1 * np.deg2rad(self.robot.angle + 90)) * _speed
_y_vec = np.sin(-1 * np.deg2rad(self.robot.angle + 90)) * _speed
if "UP" in _pressed_keys:
self.velocity[0] += self.acceleration * _x_vec
self.velocity[1] += self.acceleration * _y_vec
self.velocity[2] = np.sqrt(
np.square(self.velocity[0]) + np.square(self.velocity[1]))
# Normalise the velocity vectors if the velocity's magnitude is greater than the
# desired maximum velocity.
if self.velocity[2] > self.max_velocity:
_divider = self.max_velocity / \
np.sqrt(
np.square(self.velocity[0]) + np.square(self.velocity[1]))
self.velocity[0] = _divider * self.velocity[0]
self.velocity[1] = _divider * self.velocity[1]
def convert_key(self, _keys):
"""Converts the pressed key information into a string array.
This function takes the passed array of pygame keys and converts it to a list of the
currently pressed keys.
Attributes:
keys: An array containing the current state of all keys.
"""
_action = False
_keys_to_check = [[pygame.K_LEFT, "LEFT"],
[pygame.K_RIGHT, "RIGHT"],
[pygame.K_UP, "UP"],
[pygame.K_DOWN, "DOWN"],
[pygame.K_r, "R"]]
for _key in _keys_to_check:
if _keys[_key[0]]:
if _key[1] not in self.cur_keys:
self.cur_keys.append(_key[1])
_action = True
else:
try:
self.cur_keys.remove(_key[1])
except ValueError:
pass
# When a key is added, remove the first keys so that only the last two remain
if _action:
self.cur_keys = self.cur_keys[-2:]
else:
self.cur_keys = []
return self.cur_keys
def collision_detector(self):
"""Finds if the robot is colliding and the associated side.
This function uses sprites to determine all of the objects the robot is colliding with,
then finds the closest wall to determine which side of the robot is colliding. To solve for
cases where the robot is colliding with two walls simultaneously, the function utilises
recursion to find the second closest wall.
"""
_collision_list = pygame.sprite.spritecollide(self.robot,
self.world.wall_list,
False,
pygame.sprite.collide_mask)
if len(_collision_list) > 0:
# Find the closest colliding wall
_closest_distance = self.robot.initial_laser_length
_closest_wall = None
for _wall in _collision_list:
cur_distance = utils.point_distance(self.robot.x_pos,
_wall.rect.center[0],
self.robot.y_pos,
_wall.rect.center[1])
if cur_distance < _closest_distance:
s_closest_wall = _closest_wall
_closest_wall = _wall
_closest_distance = cur_distance
# If performing recursion, find the second closest wall
if self.recursion_depth > 0 and not s_closest_wall is None:
_closest_wall = s_closest_wall
_wall = _closest_wall
# Find which side of the robot is closest to the closest wall
_sides = [self.robot.hitbox.midtop, self.robot.hitbox.midright,
self.robot.hitbox.midbottom, self.robot.hitbox.midleft]
_closest_side = -1
_closest_side_distance = self.robot.initial_laser_length
for _i, _side in enumerate(_sides):
distance = utils.point_distance(_side[0],
_wall.rect.center[0],
_side[1],
_wall.rect.center[1])
if distance < _closest_side_distance:
_closest_side_distance = distance
_closest_side = _i
_to_return = None
if _closest_side == 0:
_to_return = "TOP"
if _closest_side == 1:
_to_return = "RIGHT"
if _closest_side == 2:
_to_return = "BOTTOM"
if _closest_side == 3:
_to_return = "LEFT"
# If the robot is already colliding with a wall, collide the second closest wall
if len(self.collision_list) > 0:
if _to_return == self.collision_list[len(self.collision_list) - 1]:
if self.recursion_depth <= 1:
self.recursion_depth += 1
return self.collision_detector()
self.recursion_depth = 0
return _to_return
return None
class OG_Laser(pygame.sprite.Sprite):
"""Sprite for the lidar sensor's laser beams.
Handles the attributes of each laser. Uses invisible surfaces to calculate positional offsets
for each laser depending on its given rotation. Also contains the laser's collision mask. It
also handles the positional updates sent from RobotControl.
Attributes:
_p_screen: The main pygame screen surface.
_origin: A pygame.math.Vector2() object that is the robot's base position.
_angle: A pygame.math.Vector2() object that contains polar coordinates stating the laser's
length and direction _angle.
"""
def __init__(self, _p_screen, _origin, _angle):
pygame.sprite.Sprite.__init__(self)
# Use a "dummy" surface to determine the width and height of the rotated laser rect
_dummy_screen = pygame.Surface(
(_p_screen.get_height() * 2, _p_screen.get_width() * 2),
pygame.SRCALPHA)
_dummy_rect = pygame.draw.line(_dummy_screen,
(0, 255, 0, 255),
_origin + _origin,
_origin + _origin + _angle)
self.origin = _origin
self.angle = _angle
_int_angle = int(_angle.as_polar()[1])
# Find an offset for the laser's draw position depending on its angle
if 0 <= _int_angle <= 90:
self.x_offset = 0
self.y_offset = 0
elif _int_angle > 90:
self.x_offset = -_dummy_rect.width
self.y_offset = 0
elif _int_angle < -90:
self.x_offset = -_dummy_rect.width
self.y_offset = -_dummy_rect.height
elif -90 <= _int_angle < 0:
self.x_offset = 0
self.y_offset = -_dummy_rect.height
self.screen = _p_screen
self.image = pygame.Surface((_dummy_rect.width, _dummy_rect.height),
pygame.SRCALPHA)
self.new_start = (self.origin.x + self.x_offset,
self.origin.y + self.y_offset)
self.rect = pygame.draw.aaline(self.image,
(255, 0, 0, 255),
(-self.x_offset, - self.y_offset),
(int(_angle.x - self.x_offset),
int(_angle.y - self.y_offset)))
self.mask = pygame.mask.from_surface(self.image, 50)
def update(self):
"""Update the laser's position."""
self.new_start = (self.origin.x + self.x_offset,
self.origin.y + self.y_offset)
self.rect.topleft = self.new_start
class LM_Laser():
"""Laser object containing the attributes of each landmark sensor laser.
Attributes:
_p_screen: The main pygame screen surface.
_origin: A set of coordinates containing the robot's position.
_destination: A set of coordinates containing the location of the detected landmark.
"""
def __init__(self, _p_screen, _origin, _destination):
self.screen = _p_screen
self.destination = _destination
self.update(_origin)
def update(self, _origin):
"""Update the laser's position."""
self.origin = _origin
self.angle = self.find_angle(_origin, self.destination)
self.length = utils.point_distance(_origin[0], self.destination[0],
_origin[1], self.destination[1])
self.polar = (self.length, self.angle)
def find_angle(self, _origin, _destination):
return np.arctan2(_destination[1] - _origin[1],
_destination[0] - _origin[0])
class Wall(pygame.sprite.Sprite):
"""Sprite for the lidar sensor's laser beams.
Handles the attributes of each laser. Uses invisible surfaces to calculate positional offsets
for each laser depending on its given rotation. Also contains the laser's collision mask.
Attributes:
_top: The desired pixel for the top of the wall.
_left: The desired pixel for the left of the wall.
_width: The desired width of the wall.
_height: The desired height of the wall.
"""
def __init__(self, _left, _top, _width, _height):
pygame.sprite.Sprite.__init__(self)
self.rect = pygame.Rect(_left, _top, _width, _height)
self.color = (0, 0, 0, 255)
self.image = pygame.Surface((_width, _height), pygame.SRCALPHA)
self.image.fill(self.color)
self.mask = pygame.mask.from_threshold(self.image,
pygame.Color('black'),
(1, 1, 1, 255))
def update(self, _color):
"""Update the wall's colour.
Used for debugging purposes only at this stage.
"""
self.image.fill(_color)
class World():
"""Writes and draws the world map.
Handles the attributes for the world map and draws.
Attributes:
_p_screen: The main pygame screen surface.
"""
def __init__(self, _p_screen):
self.screen = _p_screen
self.size = 20
self.grid = [[0 for _ in range(self.screen.get_size()[0] // self.size)]
for __ in range(self.screen.get_size()[1] // self.size)]
self.wall_list = pygame.sprite.Group()
self.world_type = "Occupancy Grid"
self.landmark_count = 10
def write_map(self, _robot_size):
"""Draws the world map into an array of 1s and 0s."""
if self.world_type == "Occupancy Grid":
for i, _ in enumerate(self.grid):
for j, __ in enumerate(self.grid[0]):
if i == 0 or i == len(self.grid) - 1 or j == 0 or j == len(self.grid[0]) - 1:
self.grid[i][j] = 1
else:
self.grid[i][j] = 0
if 20 < i < 30:
if 20 < j < 30:
self.grid[i][j] = 1
elif self.world_type == "Landmarks":
_landmark_list = []
for i in range(self.landmark_count):
_r_point = [random.randrange(0, len(self.grid)),
random.randrange(0, len(self.grid[0]))]
_hor_cen = self.screen.get_width() / 2
_vert_cen = self.screen.get_height() / 2
_return = np.array([_r_point[1] * self.size > _hor_cen - _robot_size / 2,
_r_point[1] * self.size < _hor_cen +
_robot_size / 2,
_r_point[0] * self.size < _vert_cen +
_robot_size / 2,
_r_point[0] * self.size > _vert_cen - _robot_size / 2])
if not _return.all():
_landmark_list.append(_r_point)
for _point in _landmark_list:
self.grid[_point[0]][_point[1]] = 1
def create_sprites(self):
"""Add sprites in the positions indicated by the self.grid array to a sprite group."""
self.wall_list.empty()
for i in range(len(self.grid)):
for j in range(len(self.grid[0])):
if self.grid[i][j]:
wall_rect = Wall(j * self.size,
i * self.size,
self.size,
self.size)
self.wall_list.add(wall_rect)
def clear_map(self):
self.grid = [[0 for _ in range(self.screen.get_size()[0] // self.size)]
for __ in range(self.screen.get_size()[1] // self.size)]
def write_to_map(self, _mode, _x, _y):
if _mode:
self.grid[_y][_x] = 1
else:
self.grid[_y][_x] = 0
def draw(self):
"""Draw the world map."""
self.wall_list.draw(self.screen)
class SLAM():
"""Contains all aspects of the SLAM algorithm (WIP).
Handles calculations and drawing of the occupancy grid map. Creates fake odometry positioning.
Attributes:
_p_screen: The main pygame screen surface.
_p_robot: The robot object.
"""
def __init__(self, _p_screen, _p_robot):
self.screen = _p_screen
self.robot = _p_robot
# Occupancy Grid Setup
self.grid_size = 11
self.grid = [[0.5 for _ in range(self.screen.get_size()[0] // self.grid_size)]
for __ in range(self.screen.get_size()[1] // self.grid_size)]
self.show_occupancy_grid = False
# Odometry Setup
self.odo_x = self.robot.robot.x_pos
self.odo_y = self.robot.robot.y_pos
self.odo_error = 0.2
self.odo_pos = []
def reset(self):
"""Reset the SLAM state."""
self.grid = [[0.5 for _ in range(self.screen.get_size()[0] // self.grid_size)]
for __ in range(self.screen.get_size()[1] // self.grid_size)]
self.odo_x = self.robot.robot.x_pos
self.odo_y = self.robot.robot.y_pos
self.odo_pos = []
def update(self):
"""Update SLAM visuals."""
if self.show_occupancy_grid:
self.draw_grid()
def odometry(self, _vel_vector):
"""Adds a random error to the positional data within a percentage tolerance."""
try:
self.odo_x += np.random.normal(_vel_vector[0], np.abs(_vel_vector[0]) * self.odo_error)
self.odo_y += np.random.normal(_vel_vector[1], np.abs(_vel_vector[1]) * self.odo_error)
if len(self.odo_pos) > 1000:
self.odo_pos.pop(0)
self.odo_pos.append([self.odo_x, self.odo_y])
except ValueError:
pass
def occupancy_grid(self):
"""Occupance grid algorithm.
Loops through all points in the point cloud and lowers the probability of a space in the
grid being occupied if it is found on a line between the robot and a point, and increases
the probability if it is found at the end-point of the laser.
"""
_rate_of_change = 0.05 # The rate at which the probability of a point is changed
_pc = self.robot.robot.point_cloud
for _point in _pc:
try: # Catch instances where the end-point may be out of the game screen
_coords = [int(_point[0] * np.cos(_point[1]) + self.odo_x), # Convert to cartesian
int(_point[0] * np.sin(_point[1]) + self.odo_y)]
# Loop through the points in between the robot and the end-point of a laser
for _clear in utils.line_between(self.robot.robot.x_pos // self.grid_size,
self.robot.robot.y_pos // self.grid_size,
_coords[0] // self.grid_size,
_coords[1] // self.grid_size)[:-1]:
# Decrease occupancy probability
self.grid[int(_clear[1])][int(
_clear[0])] -= _rate_of_change
if self.grid[int(_clear[1])][int(_clear[0])] < 0:
self.grid[int(_clear[1])][int(_clear[0])] = 0
_grid_y = int(_coords[1] // self.grid_size)
_grid_x = int(_coords[0] // self.grid_size)
# Increase occupancy probability of the end-point
self.grid[_grid_y][_grid_x] += _rate_of_change
if self.grid[_grid_y][_grid_x] > 1:
self.grid[_grid_y][_grid_x] = 1
except IndexError:
pass
def toggle_occupancy_grid(self):
"""Toggle whether or not the occupancy grid is visualised."""
if self.show_occupancy_grid:
self.show_occupancy_grid = False
else:
self.show_occupancy_grid = True
def draw_grid(self):
"""Draw the occupancy grid as a function of its probability as its alpha."""
for i in range(len(self.grid)):
for j in range(len(self.grid[0])):
_alpha = 1 - self.grid[i][j]
_rect = pygame.Rect(j * self.grid_size,
i * self.grid_size,
self.grid_size,
self.grid_size)
pygame.draw.rect(self.screen,
(255 * _alpha, 255 * _alpha, 255 * _alpha),
_rect)
if __name__ == '__main__':
Game()
``` |
{
"source": "jordan-hamilton/petnet-feeder-service",
"score": 2
} |
#### File: alembic/versions/317ddf3509cb_make_most_gateway_device_fields_optional.py
```python
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "317ddf3509cb"
down_revision = "390373fdfa3c"
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table("kronos_device") as batch_op:
batch_op.alter_column("name", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("softwareName", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("softwareVersion", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("type", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("uid", existing_type=sa.TEXT(), nullable=True)
with op.batch_alter_table("kronos_gateway") as batch_op:
batch_op.alter_column("name", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("osName", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("sdkVersion", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("softwareName", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("softwareVersion", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("type", existing_type=sa.TEXT(), nullable=True)
batch_op.alter_column("uid", existing_type=sa.TEXT(), nullable=True)
def downgrade():
connection = op.get_bind()
connection.execute("pragma foreign_keys=OFF")
for field_name in [
"uid",
"type",
"softwareVersion",
"softwareName",
"osName",
"sdkVersion",
"name",
]:
connection.execute(
f"UPDATE kronos_gateway SET {field_name} = 'unknown' WHERE {field_name} IS NULL"
)
for field_name in ["uid", "type", "softwareVersion", "softwareName", "name"]:
connection.execute(
f"UPDATE kronos_device SET {field_name} = 'unknown' WHERE {field_name} IS NULL"
)
with op.batch_alter_table("kronos_gateway") as batch_op:
batch_op.alter_column("uid", existing_type=sa.TEXT(), nullable=False)
batch_op.alter_column("type", existing_type=sa.TEXT(), nullable=False)
batch_op.alter_column(
"softwareVersion", existing_type=sa.TEXT(), nullable=False
)
batch_op.alter_column("softwareName", existing_type=sa.TEXT(), nullable=False)
batch_op.alter_column("sdkVersion", existing_type=sa.TEXT(), nullable=False)
batch_op.alter_column("osName", existing_type=sa.TEXT(), nullable=False)
batch_op.alter_column("name", existing_type=sa.TEXT(), nullable=False)
with op.batch_alter_table("kronos_device") as batch_op:
batch_op.alter_column("uid", existing_type=sa.TEXT(), nullable=False)
batch_op.alter_column("type", existing_type=sa.TEXT(), nullable=False)
batch_op.alter_column(
"softwareVersion", existing_type=sa.TEXT(), nullable=False
)
batch_op.alter_column("softwareName", existing_type=sa.TEXT(), nullable=False)
batch_op.alter_column("name", existing_type=sa.TEXT(), nullable=False)
connection.execute("pragma foreign_keys=ON")
```
#### File: tests/test_routes/test_feeder.py
```python
import pytz
import datetime
from fastapi.testclient import TestClient
def test_feeder_list_no_devices(client: TestClient):
response = client.get("/api/v1/feeder")
assert response.status_code == 200
assert response.json() == []
def test_feeder_list_devices(client: TestClient, with_registered_device: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.get("/api/v1/feeder")
assert response.status_code == 200
devices = response.json()
assert len(devices) == 1
assert devices[0]["hid"] == SAMPLE_DEVICE_HID
def test_feeder_list_feed_history(client: TestClient, with_sample_feed: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.get("/api/v1/feeder/history")
assert response.status_code == 200
results = response.json()
assert len(results["data"]) == 1
response = client.get(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/history")
assert response.status_code == 200
results = response.json()
assert len(results["data"]) == 1
def test_feeder_get_device(client: TestClient, with_registered_device: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.get(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}")
assert response.status_code == 200
device = response.json()
assert device["hid"] == SAMPLE_DEVICE_HID
def test_feeder_delete_device(client: TestClient, with_registered_device: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.delete(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}")
assert response.status_code == 200
response = client.get("/api/v1/feeder")
assert response.status_code == 200
assert response.json() == []
def test_feeder_telem(client: TestClient, with_registered_device: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.get(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/telemetry")
assert response.status_code == 400
assert response.json()["detail"] == "Unknown device or device has not yet reported!"
def test_feeder_update_device(client: TestClient, with_stored_recipe: None, mocker):
from tests.test_database_models import SAMPLE_DEVICE_HID, SAMPLE_GATEWAY_HID
timezone = mocker.patch(
"feeder.api.routers.feeder.router.client.send_cmd_utc_offset"
)
front_button = mocker.patch(
"feeder.api.routers.feeder.router.client.send_cmd_button"
)
recipe = mocker.patch("feeder.api.routers.feeder.router.client.send_cmd_budget")
response = client.put(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}",
json={"timezone": "America/Chicago", "frontButton": False, "currentRecipe": 1},
)
assert response.status_code == 200
device = response.json()
assert device["hid"] == SAMPLE_DEVICE_HID
assert device["timezone"] == "America/Chicago"
assert not device["frontButton"]
assert device["currentRecipe"] == 1
timezone.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID,
device_id=SAMPLE_DEVICE_HID,
utc_offset=int(
datetime.datetime.now(pytz.timezone("America/Chicago"))
.utcoffset()
.total_seconds()
),
)
front_button.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID, device_id=SAMPLE_DEVICE_HID, enable=False
)
recipe.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID,
device_id=SAMPLE_DEVICE_HID,
recipe_id=1,
tbsp_per_feeding=1,
g_per_tbsp=8,
budget_tbsp=3,
)
def test_feeder_update_device_seperate_calls(
client: TestClient, with_stored_recipe: None, mocker
):
from tests.test_database_models import SAMPLE_DEVICE_HID, SAMPLE_GATEWAY_HID
timezone = mocker.patch(
"feeder.api.routers.feeder.router.client.send_cmd_utc_offset"
)
front_button = mocker.patch(
"feeder.api.routers.feeder.router.client.send_cmd_button"
)
recipe = mocker.patch("feeder.api.routers.feeder.router.client.send_cmd_budget")
response = client.put(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}",
json={
"timezone": "Not A Real Timezone",
},
)
assert response.status_code == 500
assert response.json()["detail"] == "Unknown timezone!"
response = client.put(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}",
json={
"timezone": "America/Chicago",
},
)
assert response.status_code == 200
device = response.json()
assert device["hid"] == SAMPLE_DEVICE_HID
assert device["timezone"] == "America/Chicago"
response = client.put(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}",
json={
"frontButton": False,
},
)
assert response.status_code == 200
device = response.json()
assert device["hid"] == SAMPLE_DEVICE_HID
assert not device["frontButton"]
response = client.put(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}", json={"currentRecipe": 1}
)
assert response.status_code == 200
device = response.json()
assert device["hid"] == SAMPLE_DEVICE_HID
assert device["currentRecipe"] == 1
# Test that sending an unknown recipe doesn't send
# an error, but also doesn't send anything to the feeder.
response = client.put(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}", json={"currentRecipe": 999}
)
assert response.status_code == 200
timezone.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID,
device_id=SAMPLE_DEVICE_HID,
utc_offset=int(
datetime.datetime.now(pytz.timezone("America/Chicago"))
.utcoffset()
.total_seconds()
),
)
front_button.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID, device_id=SAMPLE_DEVICE_HID, enable=False
)
recipe.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID,
device_id=SAMPLE_DEVICE_HID,
recipe_id=1,
tbsp_per_feeding=1,
g_per_tbsp=8,
budget_tbsp=3,
)
def test_feeder_set_hopper(client: TestClient, with_stored_recipe: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.post(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/hopper",
json={
"level": 100,
},
)
assert response.status_code == 200
response = client.get(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/hopper")
assert response.status_code == 200
assert response.json()["level"] == 100
def test_feeder_reboot(client: TestClient, with_registered_device: None, mocker):
from tests.test_database_models import SAMPLE_DEVICE_HID, SAMPLE_GATEWAY_HID
cmd = mocker.patch("feeder.api.routers.feeder.router.client.send_cmd_reboot")
response = client.post(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/restart")
assert response.status_code == 200
cmd.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID, device_id=SAMPLE_DEVICE_HID
)
def test_feeder_feed(client: TestClient, with_registered_device: None, mocker):
from tests.test_database_models import SAMPLE_DEVICE_HID, SAMPLE_GATEWAY_HID
cmd = mocker.patch("feeder.api.routers.feeder.router.client.send_cmd_feed")
response = client.post(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/feed", json={"portion": 0.0625}
)
assert response.status_code == 200
cmd.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID, device_id=SAMPLE_DEVICE_HID, portion=0.0625
)
def test_feeder_raw(client: TestClient, with_registered_device: None, mocker):
from tests.test_database_models import SAMPLE_DEVICE_HID, SAMPLE_GATEWAY_HID
response = client.post(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/raw", json={"command": "test", "args": {}}
)
assert response.status_code == 403
assert (
response.json()["detail"] == "Raw communication only available in DEBUG mode!"
)
mocker.patch("feeder.api.routers.feeder.settings.debug", return_value=True)
cmd = mocker.patch("feeder.api.routers.feeder.router.client.send_cmd")
response = client.post(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/raw", json={"command": "test", "args": {}}
)
assert response.status_code == 200
cmd.assert_called_once_with(SAMPLE_GATEWAY_HID, SAMPLE_DEVICE_HID, "test", {})
def test_feeder_get_recipe_none(client: TestClient, with_registered_device: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.get(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/recipe")
assert response.status_code == 400
assert response.json()["detail"] == "No recipe set for this device!"
def test_feeder_get_recipe(client: TestClient, with_stored_recipe: None):
from tests.test_database_models import SAMPLE_DEVICE_HID
response = client.get(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/recipe")
assert response.status_code == 200
assert response.json()["tbsp_per_feeding"] == 1
def test_feeder_new_recipe(client: TestClient, with_registered_device: None, mocker):
from tests.test_database_models import SAMPLE_DEVICE_HID, SAMPLE_GATEWAY_HID
cmd = mocker.patch("feeder.api.routers.feeder.router.client.send_cmd_budget")
recipe = {
"g_per_tbsp": 7,
"tbsp_per_feeding": 1,
"name": "Recipe Name One",
"budget_tbsp": 3,
}
response = client.put(f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/recipe", json=recipe)
assert response.status_code == 200
assert response.json() == {"id": 1, **recipe}
cmd.assert_called_once_with(
gateway_id=SAMPLE_GATEWAY_HID,
device_id=SAMPLE_DEVICE_HID,
recipe_id=1,
tbsp_per_feeding=recipe["tbsp_per_feeding"],
g_per_tbsp=recipe["g_per_tbsp"],
budget_tbsp=recipe["budget_tbsp"],
)
response = client.put(
f"/api/v1/feeder/{SAMPLE_DEVICE_HID}/recipe", json={**recipe, "g_per_tbsp": 8}
)
assert response.status_code == 200
assert response.json() == {"id": 1, **recipe, "g_per_tbsp": 8}
``` |
{
"source": "jordanhatcher/Automation",
"score": 3
} |
#### File: Automation/src/condition_initializer.py
```python
import logging
LOGGER = logging.getLogger(__name__)
def init_conditions(condition_config, scheduler, loaded_modules):
"""
Loads conditions based on configuration in the config file
"""
conditions = {}
for condition_name, config in condition_config.items():
LOGGER.debug(f'Loading condition {condition_name}')
condition_module = loaded_modules[condition_name]
condition_class = getattr(condition_module,
condition_module.CONDITION_CLASS_NAME)
if config is not None:
schedule = config.get('schedule')
inputs = config.get('inputs', [])
outputs = config.get('outputs', [])
new_condition = condition_class(scheduler,
schedule=schedule,
inputs=inputs,
outputs=outputs)
else:
new_condition = condition_class(scheduler)
conditions[condition_name] = new_condition
return conditions
``` |
{
"source": "jordan-heemskerk/sarpy",
"score": 2
} |
#### File: general/nitf_elements/nitf_head.py
```python
import logging
from .base import NITFElement, UserHeaderType, _IntegerDescriptor,\
_StringDescriptor, _StringEnumDescriptor, _NITFElementDescriptor, _RawDescriptor, \
_ItemArrayHeaders
from .security import NITFSecurityTags, NITFSecurityTags0
from sarpy.compliance import string_types
__classification__ = "UNCLASSIFIED"
__author__ = "<NAME>"
#############
# NITF 2.1 version
class ImageSegmentsType(_ItemArrayHeaders):
"""
This holds the image subheader and item sizes.
"""
_subhead_len = 6
_item_len = 10
class GraphicsSegmentsType(_ItemArrayHeaders):
"""
This holds the graphics subheader and item sizes.
"""
_subhead_len = 4
_item_len = 6
class TextSegmentsType(_ItemArrayHeaders):
"""
This holds the text subheader size and item sizes.
"""
_subhead_len = 4
_item_len = 5
class DataExtensionsType(_ItemArrayHeaders):
"""
This holds the data extension subheader and item sizes.
"""
_subhead_len = 4
_item_len = 9
class ReservedExtensionsType(_ItemArrayHeaders):
"""
This holds the reserved extension subheader and item sizes.
"""
_subhead_len = 4
_item_len = 7
class NITFHeader(NITFElement):
"""
The main NITF file header for NITF version 2.1 - see standards document
MIL-STD-2500C for more information.
"""
_ordering = (
'FHDR', 'FVER', 'CLEVEL', 'STYPE',
'OSTAID', 'FDT', 'FTITLE', 'Security',
'FSCOP', 'FSCPYS', 'ENCRYP', 'FBKGC',
'ONAME', 'OPHONE', 'FL', 'HL',
'ImageSegments', 'GraphicsSegments', 'NUMX',
'TextSegments', 'DataExtensions', 'ReservedExtensions',
'UserHeader', 'ExtendedHeader')
_lengths = {
'FHDR': 4, 'FVER': 5, 'CLEVEL': 2, 'STYPE': 4,
'OSTAID': 10, 'FDT': 14, 'FTITLE': 80,
'FSCOP': 5, 'FSCPYS': 5, 'ENCRYP': 1, 'FBKGC': 3,
'ONAME': 24, 'OPHONE': 18, 'FL': 12, 'HL': 6,
'NUMX': 3}
CLEVEL = _IntegerDescriptor(
'CLEVEL', True, 2, default_value=0,
docstring='Complexity Level. This field shall contain the complexity level required to '
'interpret fully all components of the file. Valid entries are assigned in '
'accordance with complexity levels established in Table A-10.') # type: int
STYPE = _StringDescriptor(
'STYPE', True, 4, default_value='BF01',
docstring='Standard Type. Standard type or capability. A BCS-A character string `BF01` '
'which indicates that this file is formatted using ISO/IEC IS 12087-5. '
'NITF02.10 is intended to be registered as a profile of ISO/IEC IS 12087-5.') # type: str
OSTAID = _StringDescriptor(
'OSTAID', True, 10, default_value='',
docstring='Originating Station ID. This field shall contain the identification code or name of '
'the originating organization, system, station, or product. It shall not be '
'filled with BCS spaces') # type: str
FDT = _StringDescriptor(
'FDT', True, 14, default_value='',
docstring='File Date and Time. This field shall contain the time (UTC) of the files '
'origination in the format `YYYYMMDDhhmmss`.') # type: str
FTITLE = _StringDescriptor(
'FTITLE', True, 80, default_value='',
docstring='File Title. This field shall contain the title of the file.') # type: str
Security = _NITFElementDescriptor(
'Security', True, NITFSecurityTags, default_args={},
docstring='The image security tags.') # type: NITFSecurityTags
FSCOP = _IntegerDescriptor(
'FSCOP', True, 5, default_value=0,
docstring='File Copy Number. This field shall contain the copy number of the file.') # type: int
FSCPYS = _IntegerDescriptor(
'FSCPYS', True, 5, default_value=0,
docstring='File Number of Copies. This field shall contain the total number of '
'copies of the file.') # type: int
ENCRYP = _StringEnumDescriptor(
'ENCRYP', True, 1, {'0'}, default_value='0',
docstring='Encryption.') # type: str
FBKGC = _RawDescriptor(
'FBKGC', True, 3, default_value=b'\x00\x00\x00',
docstring='File Background Color. This field shall contain the three color components of '
'the file background in the order Red, Green, Blue.') # type: bytes
ONAME = _StringDescriptor(
'ONAME', True, 24, default_value='',
docstring='Originator Name. This field shall contain a valid name for the operator '
'who originated the file.') # type: str
OPHONE = _StringDescriptor(
'OPHONE', True, 18, default_value='',
docstring='Originator Phone Number. This field shall contain a valid phone number '
'for the operator who originated the file.') # type: str
FL = _IntegerDescriptor(
'FL', True, 12, docstring='The size in bytes of the entire file.') # type: int
ImageSegments = _NITFElementDescriptor(
'ImageSegments', True, ImageSegmentsType, default_args={},
docstring='The image segment basic information.') # type: ImageSegmentsType
GraphicsSegments = _NITFElementDescriptor(
'GraphicsSegments', True, GraphicsSegmentsType, default_args={},
docstring='The graphics segment basic information.') # type: GraphicsSegmentsType
TextSegments = _NITFElementDescriptor(
'TextSegments', True, TextSegmentsType, default_args={},
docstring='The text segment basic information.') # type: TextSegmentsType
DataExtensions = _NITFElementDescriptor(
'DataExtensions', True, DataExtensionsType, default_args={},
docstring='The data extension basic information.') # type: DataExtensionsType
ReservedExtensions = _NITFElementDescriptor(
'ReservedExtensions', True, ReservedExtensionsType, default_args={},
docstring='The reserved extension basic information.') # type: ReservedExtensionsType
UserHeader = _NITFElementDescriptor(
'UserHeader', True, UserHeaderType, default_args={},
docstring='User defined header.') # type: UserHeaderType
ExtendedHeader = _NITFElementDescriptor(
'ExtendedHeader', True, UserHeaderType, default_args={},
docstring='Extended subheader - TRE list.') # type: UserHeaderType
def __init__(self, **kwargs):
self._FHDR = 'NITF'
self._FVER = '02.10'
self._NUMX = 0
super(NITFHeader, self).__init__(**kwargs)
@property
def FHDR(self):
"""
str: File Profile Name. This field shall contain the character string uniquely denoting
that the file is formatted using NITF. Always `NITF`.
"""
return self._FHDR
@FHDR.setter
def FHDR(self, value):
pass
@property
def FVER(self):
"""
str: File Version. This field shall contain a BCS-A character string uniquely
denoting the version. Always `02.10`.
"""
return self._FVER
@FVER.setter
def FVER(self, value):
pass
@property
def NUMX(self):
"""
int: Reserved for future use. Always :code:`0`.
"""
return self._NUMX
@NUMX.setter
def NUMX(self, value):
pass
@property
def HL(self):
"""
int: The length of this header object in bytes.
"""
return self.get_bytes_length()
@HL.setter
def HL(self, value):
pass
#############
# NITF 2.0 version
class SymbolSegmentsType(_ItemArrayHeaders):
"""
This holds the symbol subheader and item sizes.
"""
_subhead_len = 4
_item_len = 6
class LabelSegmentsType(_ItemArrayHeaders):
"""
This holds the label subheader and item sizes.
"""
_subhead_len = 4
_item_len = 3
class NITFHeader0(NITFElement):
"""
The main NITF file header for NITF version 2.0 - see standards document
MIL-STD-2500A for more information.
"""
_ordering = (
'FHDR', 'FVER', 'CLEVEL', 'STYPE', 'OSTAID', 'FDT', 'FTITLE', 'Security',
'FSCOP', 'FSCPYS', 'ENCRYP', 'ONAME', 'OPHONE', 'FL', 'HL',
'ImageSegments', 'SymbolSegments', 'LabelSegments', 'TextSegments',
'DataExtensions', 'ReservedExtensions', 'UserHeader', 'ExtendedHeader')
_lengths = {
'FHDR': 4, 'FVER': 5, 'CLEVEL': 2, 'STYPE': 4,
'OSTAID': 10, 'FDT': 14, 'FTITLE': 80,
'FSCOP': 5, 'FSCPYS': 5, 'ENCRYP': 1,
'ONAME': 27, 'OPHONE': 18, 'FL': 12, 'HL': 6}
CLEVEL = _IntegerDescriptor(
'CLEVEL', True, 2, default_value=0,
docstring='Complexity Level. This field shall contain the complexity level required to '
'interpret fully all components of the file. Valid entries are assigned in '
'accordance with complexity levels established in Table A-10.') # type: int
STYPE = _StringDescriptor(
'STYPE', True, 4, default_value='BF01',
docstring='Standard Type. Standard type or capability. A BCS-A character string `BF01` '
'which indicates that this file is formatted using ISO/IEC IS 12087-5. '
'NITF02.10 is intended to be registered as a profile of ISO/IEC IS 12087-5.') # type: str
OSTAID = _StringDescriptor(
'OSTAID', True, 10, default_value='',
docstring='Originating Station ID. This field shall contain the identification code or name of '
'the originating organization, system, station, or product. It shall not be '
'filled with BCS spaces') # type: str
FDT = _StringDescriptor(
'FDT', True, 14, default_value='',
docstring='File Date and Time. This field shall contain the time (UTC) of the files '
'origination in the format `YYYYMMDDhhmmss`.') # type: str
FTITLE = _StringDescriptor(
'FTITLE', True, 80, default_value='',
docstring='File Title. This field shall contain the title of the file.') # type: str
Security = _NITFElementDescriptor(
'Security', True, NITFSecurityTags0, default_args={},
docstring='The image security tags.') # type: NITFSecurityTags0
FSCOP = _IntegerDescriptor(
'FSCOP', True, 5, default_value=0,
docstring='File Copy Number. This field shall contain the copy number of the file.') # type: int
FSCPYS = _IntegerDescriptor(
'FSCPYS', True, 5, default_value=0,
docstring='File Number of Copies. This field shall contain the total number of '
'copies of the file.') # type: int
ENCRYP = _StringEnumDescriptor(
'ENCRYP', True, 1, {'0'}, default_value='0',
docstring='Encryption.') # type: str
ONAME = _StringDescriptor(
'ONAME', True, 27, default_value='',
docstring='Originator Name. This field shall contain a valid name for the operator '
'who originated the file.') # type: str
OPHONE = _StringDescriptor(
'OPHONE', True, 18, default_value='',
docstring='Originator Phone Number. This field shall contain a valid phone number '
'for the operator who originated the file.') # type: str
FL = _IntegerDescriptor(
'FL', True, 12, docstring='The size in bytes of the entire file.') # type: int
ImageSegments = _NITFElementDescriptor(
'ImageSegments', True, ImageSegmentsType, default_args={},
docstring='The image segment basic information.') # type: ImageSegmentsType
SymbolSegments = _NITFElementDescriptor(
'SymbolSegments', True, SymbolSegmentsType, default_args={},
docstring='The symbols segment basic information.') # type: SymbolSegmentsType
LabelSegments = _NITFElementDescriptor(
'LabelSegments', True, LabelSegmentsType, default_args={},
docstring='The labels segment basic information.') # type: LabelSegmentsType
TextSegments = _NITFElementDescriptor(
'TextSegments', True, TextSegmentsType, default_args={},
docstring='The text segment basic information.') # type: TextSegmentsType
DataExtensions = _NITFElementDescriptor(
'DataExtensions', True, DataExtensionsType, default_args={},
docstring='The data extension basic information.') # type: DataExtensionsType
ReservedExtensions = _NITFElementDescriptor(
'ReservedExtensions', True, ReservedExtensionsType, default_args={},
docstring='The reserved extension basic information.') # type: ReservedExtensionsType
UserHeader = _NITFElementDescriptor(
'UserHeader', True, UserHeaderType, default_args={},
docstring='User defined header.') # type: UserHeaderType
ExtendedHeader = _NITFElementDescriptor(
'ExtendedHeader', True, UserHeaderType, default_args={},
docstring='Extended subheader - TRE list.') # type: UserHeaderType
def __init__(self, **kwargs):
self._FHDR = 'NITF'
self._FVER = '02.00'
super(NITFHeader0, self).__init__(**kwargs)
@property
def FHDR(self):
"""
str: File Profile Name. This field shall contain the character string uniquely denoting
that the file is formatted using NITF. Always `NITF`.
"""
return self._FHDR
@FHDR.setter
def FHDR(self, value):
pass
@property
def FVER(self):
"""
str: File Version. This field shall contain a BCS-A character string uniquely
denoting the version, should generally be `02.00` or `01.10`.
"""
return self._FVER
@FVER.setter
def FVER(self, value):
if isinstance(value, bytes) and not isinstance(value, string_types):
value = value.decode('utf-8')
if not isinstance(value, string_types):
raise TypeError('FVER is required to be a string')
if len(value) != 5:
raise ValueError('FVER must have length 5')
if value not in ['02.00', '01.10']:
logging.warning('Got unexpected version {}, and NITF parsing is likely to fail.'.format(value))
self._FVER = value
@property
def HL(self):
"""
int: The length of this header object in bytes.
"""
return self.get_bytes_length()
@HL.setter
def HL(self, value):
pass
``` |
{
"source": "jordanhitchcock/cred",
"score": 2
} |
#### File: cred/tests/test_borrowing.py
```python
from datetime import datetime
from dateutil.relativedelta import relativedelta
import pandas as pd
import pytest
from cred.borrowing import _Borrowing, FixedRateBorrowing
from cred.interest_rate import actual360, thirty360
from cred.businessdays import modified_following, NYBankHolidayCalendar
# Test _Borrowing and PeriodicBorrowing
@pytest.fixture
def borrowing():
return _Borrowing()
@pytest.fixture
def simple_borrowing_subclass():
class SubBorrowing(_Borrowing):
def set_period_values(self, period):
period.add_display_field(0.09, 'interest_rate')
period.add_payment(9, 'interest')
return period
return SubBorrowing()
def test_period(simple_borrowing_subclass):
assert simple_borrowing_subclass.period(0).index == 0
assert simple_borrowing_subclass.period(5).index == 5
with pytest.raises(IndexError) as error:
simple_borrowing_subclass.period(-1)
assert 'Cannot access period with index less than 0' in str(error.value)
# with pytest.raises(IndexError):
# borrowing.period(1000)
def test_context_manager(simple_borrowing_subclass):
assert len(simple_borrowing_subclass._cached_periods) == 0
with simple_borrowing_subclass as sbs:
sbs.period(2)
assert len(sbs._cached_periods) == 1
sbs.period(0)
assert len(sbs._cached_periods) == 2
assert len(simple_borrowing_subclass._cached_periods) == 0
def test_create_period(simple_borrowing_subclass):
assert simple_borrowing_subclass._create_period(2).index == 2
with pytest.raises(ValueError) as error:
simple_borrowing_subclass._create_period(-1)
assert 'Value for period index must be greater than or equal to 0' in str(error.value)
# TODO: Index out of upper bound range
# with pytest.raises(IndexError):
# borrowing.create_period(1000)
def test_set_period_values(borrowing, simple_borrowing_subclass):
with pytest.raises(NotImplementedError):
borrowing.set_period_values(0)
p = simple_borrowing_subclass.period(0)
assert p.get_payment() == 9
assert p.schedule() == {
'index': 0,
'interest_rate': 0.09,
'interest': 9
}
# Test FixedRateBorrowing
# Fixed rate interest only
@pytest.fixture
def fixed_io_no_stubs():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 1),
end_date=datetime(2022, 1, 1),
freq=relativedelta(months=1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=None,
year_frac=actual360,
pmt_convention=modified_following,
holiday_calendar=NYBankHolidayCalendar()
)
@pytest.fixture
def fixed_io_start_stub():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 16),
end_date=datetime(2022, 1, 1),
first_reg_start=datetime(2020, 2, 1),
freq=relativedelta(months=1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=None,
year_frac=actual360,
pmt_convention=modified_following,
holiday_calendar=NYBankHolidayCalendar()
)
@pytest.fixture
def fixed_io_end_stub():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 1),
end_date=datetime(2022, 1, 16),
freq=relativedelta(months=1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=None,
year_frac=actual360,
pmt_convention=modified_following,
holiday_calendar=NYBankHolidayCalendar()
)
@pytest.fixture
def fixed_io_start_and_end_stubs():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 16),
end_date=datetime(2022, 1, 16),
freq=relativedelta(months=1),
first_reg_start=datetime(2020, 2, 1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=None,
year_frac=actual360,
pmt_convention=modified_following,
holiday_calendar=NYBankHolidayCalendar()
)
def test_fixed_io_no_stubs_schedule(fixed_io_no_stubs):
expected_no_stubs = pd.read_csv('tests/data/test_fixed_io_schedule_no_stubs.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_no_stubs, fixed_io_no_stubs.schedule())
def test_fixed_io_start_stub_schedule(fixed_io_start_stub):
expected_start_stub = pd.read_csv('tests/data/test_fixed_io_schedule_start_stub.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_start_stub, fixed_io_start_stub.schedule())
def test_fixed_io_end_stub_schedule(fixed_io_end_stub):
expected_end_stub = pd.read_csv('tests/data/test_fixed_io_schedule_end_stub.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_end_stub, fixed_io_end_stub.schedule())
def test_fixed_io_start_and_end_stubs_schedule(fixed_io_start_and_end_stubs):
expected_start_and_end_stub = pd.read_csv('tests/data/test_fixed_io_schedule_start_and_end_stubs.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_start_and_end_stub, fixed_io_start_and_end_stubs.schedule())
def test_date_index_no_stubs(fixed_io_no_stubs):
assert fixed_io_no_stubs.date_index(datetime(2020, 1, 1)) == 0 # borrowing start date
assert fixed_io_no_stubs.date_index(datetime(2020, 1, 2)) == 0
assert fixed_io_no_stubs.date_index(datetime(2020, 1, 31)) == 0
assert fixed_io_no_stubs.date_index(datetime(2020, 2, 1)) == 1
assert fixed_io_no_stubs.date_index(datetime(2021, 12, 31)) == 23
assert fixed_io_no_stubs.date_index(datetime(2022, 1, 1)) == 23 # borrowing end date
with pytest.raises(IndexError):
fixed_io_no_stubs.date_index(datetime(2019, 12, 31)) # before borrowing start date
with pytest.raises(IndexError):
fixed_io_no_stubs.date_index(datetime(2022, 1, 4)) # after final pmt date
def test_date_index_start_stub(fixed_io_start_stub):
assert fixed_io_start_stub.date_index(datetime(2020, 1, 16)) == 0 # borrowing start date
assert fixed_io_start_stub.date_index(datetime(2020, 1, 18)) == 0
assert fixed_io_start_stub.date_index(datetime(2020, 1, 31)) == 0
assert fixed_io_start_stub.date_index(datetime(2020, 2, 1)) == 1
assert fixed_io_start_stub.date_index(datetime(2021, 12, 31)) == 23
assert fixed_io_start_stub.date_index(datetime(2022, 1, 1)) == 23 # borrowing end date
with pytest.raises(IndexError):
fixed_io_start_stub.date_index(datetime(2020, 1, 1)) # before borrowing start date
with pytest.raises(IndexError):
fixed_io_start_stub.date_index(datetime(2022, 1, 4)) # after final pmt date
def test_date_index_end_stub(fixed_io_end_stub):
assert fixed_io_end_stub.date_index(datetime(2020, 1, 1)) == 0 # borrowing start date
assert fixed_io_end_stub.date_index(datetime(2020, 1, 18)) == 0
assert fixed_io_end_stub.date_index(datetime(2020, 1, 31)) == 0
assert fixed_io_end_stub.date_index(datetime(2020, 2, 1)) == 1
assert fixed_io_end_stub.date_index(datetime(2021, 12, 31)) == 23
assert fixed_io_end_stub.date_index(datetime(2022, 1, 1)) == 24
assert fixed_io_end_stub.date_index(datetime(2022, 1, 16)) == 24
with pytest.raises(IndexError):
fixed_io_end_stub.date_index(datetime(2019, 12, 31)) # before borrowing start date
with pytest.raises(IndexError):
fixed_io_end_stub.date_index(datetime(2022, 1, 19)) # after final pmt dt
def test_date_index_start_and_end_stubs(fixed_io_start_and_end_stubs):
assert fixed_io_start_and_end_stubs.date_index(datetime(2020, 1, 16)) == 0 # borrowing start date
assert fixed_io_start_and_end_stubs.date_index(datetime(2020, 1, 18)) == 0
assert fixed_io_start_and_end_stubs.date_index(datetime(2020, 1, 31)) == 0
assert fixed_io_start_and_end_stubs.date_index(datetime(2020, 2, 1)) == 1
assert fixed_io_start_and_end_stubs.date_index(datetime(2021, 12, 31)) == 23
assert fixed_io_start_and_end_stubs.date_index(datetime(2022, 1, 1)) == 24
assert fixed_io_start_and_end_stubs.date_index(datetime(2022, 1, 16)) == 24
with pytest.raises(IndexError):
fixed_io_start_and_end_stubs.date_index(datetime(2019, 12, 31)) # before borrowing start date
with pytest.raises(IndexError):
fixed_io_start_and_end_stubs.date_index(datetime(2022, 1, 19)) # after final pmt date
# one day stub periods
fixed_io_start_and_end_stubs.start_date = datetime(2020, 1, 31)
fixed_io_start_and_end_stubs.end_date = datetime(2022, 1, 2)
assert fixed_io_start_and_end_stubs.date_index(datetime(2020, 1, 31)) == 0
assert fixed_io_start_and_end_stubs.date_index(datetime(2020, 2, 1)) == 1
assert fixed_io_start_and_end_stubs.date_index(datetime(2022, 1, 1)) == 24
assert fixed_io_start_and_end_stubs.date_index(datetime(2022, 1, 2)) == 24
# Fixed rate with constant payment amortization
@pytest.fixture
def fixed_constant_amort_no_stubs():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 1),
end_date=datetime(2022, 1, 1),
freq=relativedelta(months=1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=250,
year_frac=actual360
)
# TODO: No amort on stub payment
@pytest.fixture
def fixed_constant_amort_start_stub():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 17),
end_date=datetime(2022, 1, 1),
freq=relativedelta(months=1),
first_reg_start=datetime(2020, 2, 1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=250,
year_frac=actual360
)
@pytest.fixture
def fixed_constant_amort_end_stub():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 1),
end_date=datetime(2021, 12, 15),
freq=relativedelta(months=1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=250,
year_frac=actual360
)
@pytest.fixture
def fixed_constant_amort_start_and_end_stubs():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 2),
end_date=datetime(2021, 12, 2),
freq=relativedelta(months=1),
first_reg_start=datetime(2020, 2, 1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=250,
year_frac=actual360
)
def test_fixed_constant_amort_no_stubs(fixed_constant_amort_no_stubs):
expected_no_stubs = pd.read_csv('tests/data/test_fixed_constant_amort_no_stubs.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_no_stubs, fixed_constant_amort_no_stubs.schedule())
def test_fixed_constant_amort_start_stub(fixed_constant_amort_start_stub):
expected_start_stub = pd.read_csv('tests/data/test_fixed_constant_amort_start_stub.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_start_stub, fixed_constant_amort_start_stub.schedule())
def test_fixed_constant_amort_end_stub(fixed_constant_amort_end_stub):
expected_end_stub = pd.read_csv('tests/data/test_fixed_constant_amort_end_stub.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_end_stub, fixed_constant_amort_end_stub.schedule())
def test_fixed_constant_amort_start_and_end_stubs(fixed_constant_amort_start_and_end_stubs):
expected_start_and_end_stub = pd.read_csv('tests/data/test_fixed_constant_amort_start_and_end_stubs.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_start_and_end_stub, fixed_constant_amort_start_and_end_stubs.schedule())
def test_fixed_constant_amort_parital_io(fixed_constant_amort_no_stubs, fixed_constant_amort_start_and_end_stubs):
fixed_constant_amort_no_stubs.io_periods = 6
expected_no_stub = pd.read_csv('tests/data/test_fixed_constant_amort_no_stubs_6mo_io.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_no_stub, fixed_constant_amort_no_stubs.schedule())
fixed_constant_amort_start_and_end_stubs.io_periods = 6
expected_end_and_start_stubs = pd.read_csv('tests/data/test_fixed_constant_amort_start_and_end_stubs_6mo_io.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_end_and_start_stubs, fixed_constant_amort_start_and_end_stubs.schedule())
# Fixed rate with custom amortization schedule
@pytest.fixture
def fixed_amortizing_custom_no_stubs():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 1),
end_date=datetime(2022, 1, 1),
freq=relativedelta(months=1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=[5_000.0] * 23 + [885000.0],
year_frac=thirty360
)
@pytest.fixture
def fixed_amortizing_custom_start_stub():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 16),
end_date=datetime(2022, 1, 1),
freq=relativedelta(months=1),
first_reg_start=datetime(2020, 2, 1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=[5_000.0] * 23 + [885000.0],
year_frac=thirty360
)
@pytest.fixture
def fixed_amortizing_custom_end_stub():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 1),
end_date=datetime(2022, 1, 15),
freq=relativedelta(months=1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=[5_000.0] * 24 + [880000.0],
year_frac=thirty360
)
@pytest.fixture
def fixed_amortizing_custom_start_and_end_stubs():
return FixedRateBorrowing(
start_date=datetime(2020, 1, 16),
end_date=datetime(2021, 12, 12),
freq=relativedelta(months=1),
first_reg_start=datetime(2020, 2, 1),
initial_principal=1_000_000.0,
coupon=0.12,
amort_periods=[5_000.0] * 23 + [885000.0],
year_frac=thirty360
)
def test_fixed_amortizing_custom_no_stubs(fixed_amortizing_custom_no_stubs):
expected_schedule = pd.read_csv('tests/data/test_fixed_amortizing_custom_no_stubs.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_schedule, fixed_amortizing_custom_no_stubs.schedule())
def test_fixed_amortizing_custom_start_stub(fixed_amortizing_custom_start_stub):
expected_schedule = pd.read_csv('tests/data/test_fixed_amortizing_custom_start_stub.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_schedule, fixed_amortizing_custom_start_stub.schedule())
def test_fixed_amortizing_custom_end_stub(fixed_amortizing_custom_end_stub):
expected_schedule = pd.read_csv('tests/data/test_fixed_amortizing_custom_end_stub.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_schedule, fixed_amortizing_custom_end_stub.schedule())
def test_fixed_amortizing_custom_start_and_end_stubs(fixed_amortizing_custom_start_and_end_stubs):
expected_schedule = pd.read_csv('tests/data/test_fixed_amortizing_custom_start_and_end_stubs.csv',
index_col='index',
parse_dates=[1, 2, 3])
pd.testing.assert_frame_equal(expected_schedule, fixed_amortizing_custom_start_and_end_stubs.schedule())
# Test outstanding balance
def test_outstanding_principal(fixed_constant_amort_start_and_end_stubs):
fixed_constant_amort_start_and_end_stubs.holiday_calendar = NYBankHolidayCalendar()
fixed_constant_amort_start_and_end_stubs.adjust_pmt_date = modified_following
fixed_constant_amort_start_and_end_stubs.end_date = datetime(2021, 12, 5)
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2020, 1, 1)) is None # before start date
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2020, 1, 2)) == pytest.approx(1000000.0) # closing date
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2020, 1, 15)) == pytest.approx(1000000.0)
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2020, 3, 1)) == pytest.approx(1000000.0) # period end date with pmt 3/2
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2020, 3, 2)) == pytest.approx(998760.225513185) # period end date with pmt 3/2
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2021, 12, 5)) == pytest.approx(981090.953492929) # maturity date
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2021, 12, 6)) == pytest.approx(0.0) # final pmt date
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2021, 12, 7)) == pytest.approx(0.0) # after final pmt date
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2021, 10, 1), include_dt=True) == pytest.approx(983991.709885) # on pmt dt
assert fixed_constant_amort_start_and_end_stubs.outstanding_principal(datetime(2021, 12, 6), include_dt=True) == pytest.approx(981090.953493) # final pmt date
def test_payments_scheduled_dt(fixed_constant_amort_start_and_end_stubs):
expected_schedule = pd.read_csv('tests/data/test_fixed_constant_amort_start_and_end_stubs.csv',
index_col='index',
parse_dates=[1, 2, 3])
expected_dates = expected_schedule['end_date'].dt.to_pydatetime()
full_expected_output = list(zip(expected_dates, expected_schedule['payment']))
assert fixed_constant_amort_start_and_end_stubs.payments(datetime(2020, 1, 1), datetime(2020, 1, 1)) == []
assert fixed_constant_amort_start_and_end_stubs.payments(datetime(2020, 1, 2), datetime(2020, 1, 2)) == []
# Convert to Series/DF and compare due to issues with floats
pd.testing.assert_series_equal(
pd.Series(fixed_constant_amort_start_and_end_stubs.payments(datetime(2020, 1, 1), datetime(2020, 2, 1))[0]),
pd.Series(full_expected_output[0]))
pd.testing.assert_frame_equal(
pd.DataFrame(fixed_constant_amort_start_and_end_stubs.payments(datetime(2020, 2, 29), datetime(2020, 5, 1))),
pd.DataFrame(full_expected_output[1:4]))
pd.testing.assert_frame_equal(
pd.DataFrame(fixed_constant_amort_start_and_end_stubs.payments(None, datetime(2020, 5, 1))),
pd.DataFrame(full_expected_output[:4]))
pd.testing.assert_frame_equal(
pd.DataFrame(fixed_constant_amort_start_and_end_stubs.payments(datetime(2020, 5, 1), None)),
pd.DataFrame(full_expected_output[3:]))
assert fixed_constant_amort_start_and_end_stubs.payments(None, datetime(2020, 1, 1)) == []
assert fixed_constant_amort_start_and_end_stubs.payments(datetime(2025, 1, 1), None) == []
def test_payments_pmt_dt(fixed_io_start_and_end_stubs):
expected_schedule = pd.read_csv('tests/data/test_fixed_io_schedule_start_and_end_stubs.csv',
index_col='index',
parse_dates=[1, 2, 3])
expected_dates = expected_schedule['payment_date'].dt.to_pydatetime()
full_expected_output = list(zip(expected_dates, expected_schedule['payment']))
assert fixed_io_start_and_end_stubs.payments(datetime(2020, 1, 1), datetime(2020, 1, 1), pmt_dt=True) == []
pd.testing.assert_series_equal(
pd.Series(fixed_io_start_and_end_stubs.payments(datetime(2020, 1, 16), datetime(2020, 1, 16), pmt_dt=True)[0]),
pd.Series(full_expected_output[0]))
```
#### File: cred/tests/test_interest_rate.py
```python
from datetime import datetime
from dateutil.relativedelta import relativedelta
import pytest
from cred.interest_rate import actual360, thirty360, is_month_end
@pytest.mark.parametrize(
'dt1,dt2,expected',
[
(datetime(2019, 1, 1), datetime(2019, 1, 1), 0 / 360),
(datetime(2019, 1, 1), datetime(2021, 1, 1), 731 / 360),
(datetime(2019, 1, 1), datetime(2021, 1, 15), 745 / 360),
(datetime(2019, 1, 15), datetime(2021, 1, 17), 733 / 360),
(datetime(2019, 1, 31), datetime(2021, 1, 31), 731 / 360),
(datetime(2019, 1, 16), datetime(2017, 12, 31), -381 / 360),
]
)
def test_actual360(dt1, dt2, expected):
assert expected == actual360(dt1, dt2)
@pytest.mark.parametrize(
'dt1,dt2,expected',
[
(datetime(2019, 1, 1), datetime(2019, 1, 1), 0 / 360),
(datetime(2019, 1, 1), datetime(2021, 1, 1), 720 / 360),
(datetime(2019, 1, 1), datetime(2021, 1, 15), 734 / 360),
(datetime(2019, 1, 15), datetime(2021, 1, 17), 722 / 360),
(datetime(2019, 1, 31), datetime(2021, 1, 31), 720 / 360),
(datetime(2019, 1, 16), datetime(2017, 12, 31), -375 / 360),
]
)
def test_thirty360(dt1, dt2, expected):
assert expected == thirty360(dt1, dt2)
@pytest.mark.parametrize(
'dt,expected',
[
(datetime(2019, 1, 1), False),
(datetime(2019, 1, 31), True),
(datetime(2019, 6, 30), True),
(datetime(2019, 2, 28), True),
(datetime(2020, 2, 28), False),
(datetime(2020, 2, 29), True)
]
)
def test_is_month_end(dt, expected):
assert is_month_end(dt) == expected
# @pytest.mark.parametrize(
# 'rate,freq,expected',
# [
# (0, relativedelta(months=3), 0.0),
# (0.1, relativedelta(years=2), 0.21),
# (0.1, relativedelta(years=1), 0.1),
# (0.1, relativedelta(months=3), 0.0241136890844451),
# (0.1, relativedelta(months=1), 0.00797414042890376),
# (-0.05, relativedelta(months=3), -0.0127414550985662),
# (0.1, relativedelta(months=1, days=1), 0.008237380814549500)
# ]
# )
# def test_decompounded_periodic_rate(rate, freq, expected):
# assert decompounded_periodic_rate(rate, freq) + expected == pytest.approx(2 * expected)
#
#
# @pytest.mark.parametrize(
# 'rate,freq,expected',
# [
# (0, relativedelta(months=3), 0.0),
# (0.1, relativedelta(years=2), 0.2),
# (0.1, relativedelta(years=1), 0.1),
# (0.1, relativedelta(months=3), 0.025),
# (0.1, relativedelta(months=1), 0.008333333333333330),
# (-0.05, relativedelta(months=3), -0.0125),
# (0.1, relativedelta(months=1, days=1), 0.008607305936073060)
# ]
# )
# def test_simple_periodic_rate(rate, freq, expected):
# assert simple_periodic_rate(rate, freq) + expected == pytest.approx(2 * expected)
``` |
{
"source": "jordanhitchcock/holidaycal",
"score": 2
} |
#### File: holidaycal/test_holidaycal/test_easter.py
```python
from datetime import date
from dateutil.relativedelta import MO, relativedelta
import pytest
from holidaycal.easter import EasterDelta
# Fixtures
@pytest.fixture()
def start_date():
return date(2021, 1, 1)
# Tests
@pytest.mark.parametrize(
'easter_delta,expected',
[
(EasterDelta(), date(2021, 4, 4)),
(EasterDelta(years=1), date(2022, 4, 4)),
(EasterDelta(weeks=1), date(2021, 4, 11)),
(EasterDelta(weekday=MO(-1)), date(2021, 3, 29)),
(EasterDelta(month=11), date(2021, 11, 4))
]
)
def test_add(start_date, easter_delta, expected):
assert easter_delta + start_date == start_date + easter_delta == expected
# if other operand is not a date
assert EasterDelta().__add__(2) == NotImplemented
@pytest.mark.parametrize(
'easter_delta',
[
(EasterDelta()),
(EasterDelta(years=1)),
(EasterDelta(weeks=1)),
(EasterDelta(weekday=MO(-1))),
(EasterDelta(month=11))
]
)
def test_sub(start_date, easter_delta):
with pytest.raises(TypeError):
easter_delta - start_date
@pytest.mark.parametrize(
'easter_delta',
[
(EasterDelta()),
(EasterDelta(years=1)),
(EasterDelta(weeks=1)),
(EasterDelta(weekday=MO(-1))),
(EasterDelta(month=11))
]
)
def test_rsub(start_date, easter_delta):
with pytest.raises(TypeError):
easter_delta - start_date
# if other operand is not a date
assert EasterDelta().__rsub__(2) == NotImplemented
def test_eq():
assert EasterDelta(days=1) == EasterDelta(days=1)
assert not EasterDelta(days=1) == EasterDelta(days=2)
assert not EasterDelta(days=1) == relativedelta(days=1)
def test_ne():
assert not EasterDelta(days=1) != EasterDelta(days=1)
assert EasterDelta(days=1) != EasterDelta(days=2)
def test_create_from_dates():
with pytest.raises(NotImplementedError):
EasterDelta(dt1=date(2021, 1, 1), dt2=date(2022, 1, 1))
def test_repr():
ed = EasterDelta(years=1, months=1, days=1,
year=2022, month=11, day=1)
assert ed.__repr__() == 'EasterDelta(years=+1, months=+1, days=+1, year=2022, month=11, day=1)'
def test_hashable():
assert hash(EasterDelta()) is not None
```
#### File: holidaycal/test_holidaycal/test_holiday.py
```python
from datetime import date
from dateutil.relativedelta import relativedelta, MO
import pytest
from holidaycal.easter import EasterDelta
from holidaycal.holiday import AbstractHoliday, ListHoliday, RecurringHoliday
from holidaycal.observance import nearest_weekday
def test_abstract_holiday_dates():
with pytest.raises(NotImplementedError):
AbstractHoliday('holiday').dates(date(2021, 1, 1), date(2022, 1, 1), False)
def test_recurring_construction():
with pytest.raises(ValueError):
RecurringHoliday('Incomplete Holiday')
with pytest.raises(ValueError):
RecurringHoliday('Overdefined Holiday', month=1, day=1, offset=relativedelta(month=11, weekday=MO(2)))
def test_recurring_dates_absolute():
def skip_years(dt):
if dt.year in [2023, 2024, 2025, 2026, 2029, 2030, 2031, 2032, 2033]:
return True
return False
holiday = RecurringHoliday('test', month=1, day=1, start_date=date(2022, 1, 1), end_date=date(2034, 1, 1),
observance=nearest_weekday, skip=skip_years)
assert holiday.dates(date(2021, 1, 1), date(2035, 1, 1)) == \
[date(2022, 1, 1), date(2027, 1, 1), date(2028, 1, 1), date(2034, 1, 1)]
assert holiday.dates(date(2021, 1, 1), date(2034, 1, 1), observed=True) == \
[date(2027, 1, 1), date(2027, 12, 31)]
assert holiday.__repr__() == 'RecurringHoliday: test (start year=2022, end year=2034, month=1, day=1, ' \
'observance=nearest_weekday, skip=skip_years)'
def test_recurring_dates_relative():
def skip_years(dt):
if dt.year in [2023, 2024, 2025, 2026, 2029, 2030, 2031, 2032, 2033]:
return True
return False
holiday = RecurringHoliday('test', offset=relativedelta(month=1, day=1), start_date=date(2022, 1, 1),
end_date=date(2034, 1, 1), observance=nearest_weekday, skip=skip_years)
assert holiday.dates(date(2021, 1, 1), date(2035, 1, 1)) == \
[date(2022, 1, 1), date(2027, 1, 1), date(2028, 1, 1), date(2034, 1, 1)]
assert holiday.dates(date(2021, 1, 1), date(2034, 1, 1), observed=True) == \
[date(2027, 1, 1), date(2027, 12, 31)]
assert holiday.__repr__() == 'RecurringHoliday: test (start year=2022, end year=2034, ' \
'offset=relativedelta(month=1, day=1), observance=nearest_weekday, skip=skip_years)'
def test_recurring_dates_list():
holiday = RecurringHoliday('test', offset=[relativedelta(month=11, weekday=MO(1)), relativedelta(days=1)],
start_date=date(2016, 11, 1), end_date=date(2021, 11, 2))
assert holiday.dates(date(2015, 1, 1), date(2022, 1, 1)) == [
date(2016, 11, 8), date(2017, 11, 7), date(2018, 11, 6), date(2019, 11, 5), date(2020, 11, 3), date(2021, 11, 2)
]
assert holiday.dates(date(2015, 1, 1), date(2022, 1, 1), observed=True) == [
date(2016, 11, 8), date(2017, 11, 7), date(2018, 11, 6), date(2019, 11, 5), date(2020, 11, 3), date(2021, 11, 2)
]
assert holiday.__repr__() == 'RecurringHoliday: test (start year=2016, end year=2021, ' \
'offset=[relativedelta(month=11, weekday=MO(+1)), relativedelta(days=+1)])'
def test_recurring_dates_easter():
holiday = RecurringHoliday('test easter', offset=EasterDelta(days=1))
assert holiday.dates(date(2016, 1, 1), date(2019, 12, 1)) == [date(2016, 3, 28), date(2017, 4, 17),
date(2018, 4, 2), date(2019, 4, 22)]
assert holiday.__repr__() == 'RecurringHoliday: test easter (offset=EasterDelta(days=+1))'
def test_list_holiday():
holiday = ListHoliday('List holiday',
[date(2021, 1, 1), date(2021, 1, 2), date(2021, 1, 3)],
observance=nearest_weekday)
assert holiday.dates(date(2021, 1, 1), date(2022, 1, 1)) == [date(2021, 1, 1), date(2021, 1, 2), date(2021, 1, 3)]
assert holiday.dates(date(2021, 1, 1), date(2022, 1, 1), observed=True) == \
[date(2021, 1, 1), date(2021, 1, 1), date(2021, 1, 4)]
assert holiday.__repr__() == 'ListHoliday: List holiday (number of dates=3, observance=nearest_weekday)'
``` |
{
"source": "jordanhoare/aws-upskilling",
"score": 2
} |
#### File: hit_counter/hit_counter/hitcounter.py
```python
from multiprocessing.sharedctypes import Value
from aws_cdk import aws_dynamodb as ddb
from aws_cdk import aws_lambda as _lambda
from constructs import Construct
class HitCounter(Construct):
@property
def handler(self):
return self._handler
@property
def table(self):
return self._table
def __init__(
self,
scope: Construct,
id: str,
downstream: _lambda.IFunction,
read_capacity: int = 5,
**kwargs
):
if read_capacity < 5 or read_capacity > 20:
raise ValueError("readCapacity must be greater than 5 or less than 20.")
super().__init__(scope, id, **kwargs)
# We defined a DynamoDB table with path as the partition key (every DynamoDB table must have a single partition key).
self._table = ddb.Table(
self,
"Hits",
partition_key={"name": "path", "type": ddb.AttributeType.STRING},
encryption=ddb.TableEncryption.AWS_MANAGED,
read_capacity=read_capacity,
)
# We defined a Lambda function which is bound to the lambda/hitcount.handler code.
# We wired the Lambda’s environment variables to the function_name and table_name of our resources.
self._handler = _lambda.Function(
self,
"HitCountHandler",
runtime=_lambda.Runtime.PYTHON_3_7,
handler="hitcount.handler",
code=_lambda.Code.from_asset("lambda"),
environment={
"DOWNSTREAM_FUNCTION_NAME": downstream.function_name,
"HITS_TABLE_NAME": self._table.table_name,
},
)
self._table.grant_read_write_data(self.handler)
downstream.grant_invoke(self.handler)
``` |
{
"source": "jordanhoare/digit-drawing-prediction",
"score": 3
} |
#### File: digit_recognition/classifier/predict.py
```python
import base64
import io
from io import BytesIO
import matplotlib.pyplot as plt
import numpy as np
import requests
import torch
import torch.nn.functional as F
import torchvision
from PIL import Image
from torch import nn
class Classifier_Prediction:
"""
(1) Instantiate the model & tokenizer
(2) Create_lenet
(3) Inference
(4) Predict & return list of prediction and probability
"""
def __init__(self, input_image):
if torch.cuda.is_available():
self.device = torch.device("cuda:0")
else:
self.device = torch.device("cpu")
print("No Cuda Available")
self.save_path = "lenet.pth"
self.path = input_image
self.T = torchvision.transforms.Compose([torchvision.transforms.ToTensor()])
self.create_lenet()
self.inference()
self.predict()
def create_lenet(self):
""" """
self.model = nn.Sequential(
nn.Conv2d(1, 6, 5, padding=2),
nn.ReLU(),
nn.AvgPool2d(2, stride=2),
nn.Conv2d(6, 16, 5, padding=0),
nn.ReLU(),
nn.AvgPool2d(2, stride=2),
nn.Flatten(),
nn.Linear(400, 120),
nn.ReLU(),
nn.Linear(120, 84),
nn.ReLU(),
nn.Linear(84, 10),
)
lenet = self.model.to(self.device)
lenet.load_state_dict(torch.load(self.save_path))
self.lenet = lenet.eval()
def inference(self):
image_encoded = self.path.split(",")[1]
image_bytes = io.BytesIO(base64.b64decode(image_encoded))
img = Image.open(image_bytes).convert("L")
img = img.resize((28, 28))
x = (255 - np.expand_dims(np.array(img), -1)) / 255.0
with torch.no_grad():
pred = self.model(
torch.unsqueeze(self.T(x), axis=0).float().to(self.device)
)
return F.softmax(pred, dim=-1).cpu().numpy()
def predict(self):
self.pred = self.inference()
self.pred_idx = np.argmax(self.pred)
self.prob = self.pred[0][self.pred_idx] * 100
# self.prob = "{:.0%}".format(self.prob)
def return_list(self):
return self.pred_idx, self.prob
# poe force-cuda11
# pred_idx, prob = Classifier_Prediction("").return_list()
# print(pred_idx)
``` |
{
"source": "jordanhoare/pybot-lostark",
"score": 3
} |
#### File: draft/core/repair_tool.py
```python
import os
import sys
from random import randint
from time import sleep
import cv2 as cv
import pyautogui
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from core.vision import Vision
from core.window_capture import WindowCapture
class RepairTool:
"""
If there is no durability left on your tool:
> create a new window capture instance in the background
> navigate to the pet repair window using hotkeys
> return the window co-ords of the repair_tool_icon & repair_all_button & repair_all_ok
> repair tools on person then close the pet window
@ takes: repair_icon_path, window_title
@ returns:
"""
# properties
catch_counter = 0
repair_all_button_path = "data/repair_all_button.jpeg"
repair_all_ok_path = "data/repair_all_ok.jpeg"
repair_icon_path = "data/repair_icon.jpeg"
def __init__(self, window_title):
# initialize the capture classes
self.wincap = WindowCapture(window_title)
self.repair_tool_icon = Vision(self.repair_icon_path)
self.repair_all_button = Vision(self.repair_all_button_path)
self.repair_all_ok_button = Vision(self.repair_all_ok_path)
# Console log call to repair function
print(f">>> No durability left on your tool. Attempting to repair tools.")
sleep(randint(0, 1))
self.repair_tool()
def get_click_points(self, rectangles):
# given a list of [x, y, w, h] rectangles returned by find(), convert those into a list of
# [x, y] positions in the center of those rectangles where we can click on those found items
points = []
for (x, y, w, h) in rectangles:
# Determine the center position
center_x = x + int(w / 2)
center_y = y + int(h / 2)
# Save the points
points.append((center_x, center_y))
return points
def repair_tool(self):
# Open up pet repair window
pyautogui.keyDown("alt")
pyautogui.press("p")
pyautogui.keyUp("alt")
sleep(randint(4, 5))
while True:
# get an updated image of the game
screenshot = self.wincap.get_screenshot()
# render object detection images
self.repair_rectangles = self.repair_tool_icon.find(screenshot, 0.85)
repair_button_rectangles = self.repair_all_button.find(screenshot, 0.90)
repair_ok_button_rectangles = self.repair_all_ok_button.find(
screenshot, 0.90
)
# given a list of [x, y, w, h] rectangles returned by find(), convert those into a list of
# [x, y] positions in the center of those rectangles where we can click on those found items
if len(self.repair_rectangles) > 0:
targets = self.get_click_points(self.repair_rectangles)
target = self.wincap.get_screen_position(targets[0])
sleep(randint(0, 1))
pyautogui.moveTo(x=target[0], y=target[1])
sleep(randint(1, 2))
pyautogui.click(clicks=2)
sleep(randint(1, 2))
if len(repair_button_rectangles) > 0:
# repair tools
targets = self.get_click_points(repair_button_rectangles)
target = self.wincap.get_screen_position(targets[0])
sleep(randint(1, 2))
pyautogui.moveTo(x=target[0], y=target[1])
sleep(randint(1, 2))
pyautogui.click(clicks=2)
sleep(randint(1, 2))
if len(repair_ok_button_rectangles) > 0:
# repair tools
targets = self.get_click_points(repair_ok_button_rectangles)
target = self.wincap.get_screen_position(targets[0])
sleep(randint(1, 2))
pyautogui.moveTo(x=target[0], y=target[1])
sleep(randint(1, 2))
pyautogui.click(clicks=2)
sleep(randint(2, 3))
# close pet window
print(f">>> Closing repair window and restarting bot.")
pyautogui.press("esc")
sleep(randint(0, 1))
pyautogui.press("esc")
sleep(randint(0, 1))
pyautogui.press("esc")
sleep(randint(0, 3))
break
########################
## throw an idle timer here incase someone has no Crystaline Aura
########################
```
#### File: pybot_lostark/window_capture/get_hwnd.py
```python
import pygetwindow as gw
import win32gui
def get_hwnd(Title):
"""
Returns the HWND number for provided string
Example:
> window_title = "Lost Ark"
> hwnd = get_hwnd(window_title)
#### Lost Ark (3412402) window was located.
"""
try:
a = gw.getWindowsWithTitle(Title)
a = str(a)
b = a.split("=", 1)
b = b[1].split(")", 1)
hwnd = int(b[0])
return hwnd
except Exception as Ex:
print(f">>> From get_hwnd.py: ", Ex)
print(f">>> No window matched with the title: '{Title}'")
return 0
def list_window_names():
"""
Returns a list of windows currently opened
Example:
> list_window_names()
#### 0x20085c ""
#### 0x3411b2 "LOST ARK (64-bit, DX11) v.2.0.2.1"
#### 0x4a049c ""
"""
def winEnumHandler(hwnd, ctx):
if win32gui.IsWindowVisible(hwnd):
print(hex(hwnd), '"' + win32gui.GetWindowText(hwnd) + '"')
win32gui.EnumWindows(winEnumHandler, None)
``` |
{
"source": "jordanhoare/sentiment-analysis",
"score": 3
} |
#### File: sentiment_analysis/nlp_classifier/nlp_classifier.py
```python
import numpy as np
from scipy.special import softmax
from transformers import AutoModelForSequenceClassification, AutoTokenizer
class BertClassifier:
"""
(1) Instantiate the model & tokenizer
(2) Preprocessing/encoding
(3) Format scores
(4) Return list of scores
"""
def __init__(self, input_phrase):
save_dir = "sentiment_analysis/nlp_classifier"
model_name = "cardiffnlp/twitter-roberta-base-sentiment"
self.model = AutoModelForSequenceClassification.from_pretrained(save_dir)
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
self.model.save_pretrained(save_dir)
self.preprocessing(input_phrase)
self.formatting()
self.calc_sentiment()
def preprocessing(self, input_phrase):
encoded_input = self.tokenizer(input_phrase, return_tensors="pt")
output = self.model(**encoded_input)
scores = output[0][0].detach().numpy()
self.scores = softmax(scores)
def formatting(self):
self.positive_score = "{:.2}".format(self.scores[2])
self.neutral_score = "{:.2}".format(self.scores[1])
self.negative_score = "{:.2}".format(self.scores[0])
def calc_sentiment(self):
if (
self.positive_score >= self.negative_score
and self.positive_score >= self.neutral_score
):
self.sentiment = "Positive"
elif (
self.negative_score >= self.positive_score
and self.negative_score >= self.neutral_score
):
self.sentiment = "Negative"
else:
self.sentiment = "Neutral"
def return_list(self):
return [
self.sentiment,
self.positive_score,
self.neutral_score,
self.negative_score,
]
``` |
{
"source": "JordanHood/python-scanner",
"score": 3
} |
#### File: JordanHood/python-scanner/main.py
```python
import socket
import subprocess
import argparse
import sys
def system_call(command):
return subprocess.getoutput([command])
# grab the host gateway
def get_gateway_address():
return system_call("route -n get default | grep 'gateway' | awk '{print $2}'")
# using nmap to populate the ARP-tables
def populate_arp_tables(gatewayAddress):
return system_call("nmap -T5 -sn {}-255".format(gatewayAddress))
# scan the populated arp table and filter results that aren't `incomplete`
def get_arp_table():
return system_call("arp -a -n | grep -v incomplete | awk '{print $2}' | grep -E -o '[0-9.]+'")
def port_scan(ipAddress):
return system_call("nmap {}".format(ipAddress))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--scan","-s", help="Scan the local network for all available devices", action="store_true")
parser.add_argument("--port","-p", help="Scan a device on the local network for all availabile ports")
args=parser.parse_args()
if args.scan:
gatewayAddress = get_gateway_address()
populate_arp_tables(gatewayAddress)
print(get_arp_table())
elif args.port:
print(port_scan(args.port))
else:
parser.print_help()
sys.exit(1)
``` |
{
"source": "jordanhubbard/ledmonitor",
"score": 3
} |
#### File: jordanhubbard/ledmonitor/ledmonitor.py
```python
import sys
import os
import logging
from time import sleep
from datetime import datetime
from pythonping import ping
from ledcontrol import led_color, led_color_blink
addresses = {
"8.8.8.8": "green", # Google
"10.11.100.248": "yellow", # Near radio P2P
"10.11.100.249": "yellow", # Far radio P2P
"10.11.111.250": "cyan", # Far switch
"10.11.111.248": "violet", # Near ISP radio
"10.11.111.249": "violet", # Far ISP radio
"10.11.100.254": "red", # Great Firewall
}
logging.basicConfig(filename='/tmp/ledmonitor.log', level=logging.DEBUG)
def eep(msg, warn=True):
"""Scream about some important problem"""
today = datetime.now()
date_str = today.strftime("%Y/%m/%d %R")
log_str = date_str + " " + msg
if warn:
logging.warning(log_str)
else:
logging.error(log_str)
while True:
FAIL_CNT = 0
for adr in addresses.items():
ip = adr[0]
col = adr[1]
try:
x = ping(ip, count=1, size=992)
if x.success():
led_color(col, True)
sleep(5)
break
FAIL_CNT = FAIL_CNT + 1
if FAIL_CNT > 3:
led_color("red", True)
eep("fail count > 3 for ip " + ip)
sleep(2)
FAIL_CNT = 0
else:
# Let's have a blink spasm
led_color_blink(col, 5, 0.2)
eep("spazzing on ip " + ip + " with color " + col)
except PermissionError:
print("You have to run this as root")
eep("Attempt to run agent as non-root id " + os.getuid())
sys.exit(1)
except OSError:
# Usually means the network has violently disconnected
led_color_blink("red", 5, 0.2)
eep("exception path triggered on " + ip, False)
``` |
{
"source": "jordanisaacs/fastapi-sessions",
"score": 3
} |
#### File: fastapi_sessions/backends/session_backend.py
```python
from abc import ABC, abstractmethod
from typing import Generic, Optional, TypeVar
from fastapi_sessions.frontends.session_frontend import ID
from pydantic.main import BaseModel
SessionModel = TypeVar("SessionModel", bound=BaseModel)
class BackendError(Exception):
"""Error that is thrown by backends."""
pass
class SessionBackend(ABC, Generic[ID, SessionModel]):
"""Abstract class that defines methods for interacting with session data."""
@abstractmethod
async def create(self, session_id: ID, data: SessionModel) -> None:
"""Create a new session."""
raise NotImplementedError()
@abstractmethod
async def read(self, session_id: ID) -> Optional[SessionModel]:
"""Read session data from the storage."""
raise NotImplementedError()
@abstractmethod
async def update(self, session_id: ID, data: SessionModel) -> None:
"""Update session data to the storage"""
raise NotImplementedError()
@abstractmethod
async def delete(self, session_id: ID) -> None:
"""Remove session data from the storage."""
raise NotImplementedError()
```
#### File: fastapi-sessions/fastapi_sessions/session_verifier.py
```python
from abc import abstractmethod
from typing import Generic, Union
from fastapi import HTTPException, Request
from fastapi_sessions.backends.session_backend import (
BackendError,
SessionBackend,
SessionModel,
)
from fastapi_sessions.frontends.session_frontend import ID, FrontendError
class SessionVerifier(Generic[ID, SessionModel]):
@property
@abstractmethod
def identifier(self) -> str:
raise NotImplementedError()
@property
@abstractmethod
def backend(self) -> SessionBackend[ID, SessionModel]:
raise NotImplementedError()
@property
@abstractmethod
def auto_error(self) -> bool:
raise NotImplementedError()
@property
@abstractmethod
def auth_http_exception(self) -> HTTPException:
raise NotImplementedError()
@abstractmethod
def verify_session(self, model: SessionModel) -> bool:
raise NotImplementedError()
async def __call__(self, request: Request):
try:
session_id: Union[ID, FrontendError] = request.state.session_ids[
self.identifier
]
except Exception:
if self.auto_error:
raise HTTPException(
status_code=500, detail="internal failure of session verification"
)
else:
return BackendError(
"failed to extract the {} session from state", self.identifier
)
if isinstance(session_id, FrontendError):
if self.auto_error:
raise self.auth_http_exception
return
session_data = await self.backend.read(session_id)
if not session_data or not self.verify_session(session_data):
if self.auto_error:
raise self.auth_http_exception
return
return session_data
``` |
{
"source": "JordanJaner/web-scraping-challenge",
"score": 3
} |
#### File: web-scraping-challenge/Missions_to_Mars/scrape_mars.py
```python
import pymongo
import requests
from splinter import Browser
from bs4 import BeautifulSoup as bs
import pandas as pd
import time
# DB Setup
#
client = pymongo.MongoClient('mongodb://localhost:27017')
db = client.mars_db
collection = db.mars
def init_browser():
# @NOTE: Replace the path with your actual path to the chromedriver
executable_path = {'executable_path': 'chromedriver.exe'}
return Browser('chrome', **executable_path, headless=False)
def scrape():
browser = init_browser()
collection.drop()
# Nasa Mars news
news_url = 'https://mars.nasa.gov/news/'
browser.visit(news_url)
news_html = browser.html
news_soup = bs(news_html,'lxml')
news_title = news_soup.find("div",class_="content_title").text
news_para = news_soup.find("div", class_="rollover_description_inner").text
# JPL Mars Space Images - Featured Image
jurl = 'https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars'
browser.visit(jurl)
jhtml = browser.html
jpl_soup = bs(jhtml,"html.parser")
image_url = jpl_soup.find('div',class_='carousel_container').article.footer.a['data-fancybox-href']
base_link = "https:"+jpl_soup.find('div', class_='jpl_logo').a['href'].rstrip('/')
feature_url = base_link+image_url
featured_image_title = jpl_soup.find('h1', class_="media_feature_title").text.strip()
# Mars Weather
turl = 'https://twitter.com/marswxreport?lang=en'
browser.visit(turl)
thtml = browser.html
tw_soup = bs(thtml,"html.parser")
mars_weather = tw_soup.find("p", class_= "TweetTextSize TweetTextSize--normal js-tweet-text tweet-text").text
# Mars fact
murl = 'https://space-facts.com/mars/'
table = pd.read_html(murl)
mars_df = table[0]
mars_df = mars_df[['Mars - Earth Comparison', 'Mars']]
mars_fact_html = mars_df.to_html(header=False, index=False)
# Mars Hemispheres
mhurl = 'https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars'
browser.visit(mhurl)
mhtml = browser.html
mh_soup = bs(mhtml,"html.parser")
results = mh_soup.find_all("div",class_='item')
hemisphere_image_urls = []
for result in results:
product_dict = {}
titles = result.find('h3').text
end_link = result.find("a")["href"]
image_link = "https://astrogeology.usgs.gov/" + end_link
browser.visit(image_link)
html = browser.html
soup= bs(html, "html.parser")
downloads = soup.find("div", class_="downloads")
image_url = downloads.find("a")["href"]
product_dict['title']= titles
product_dict['image_url']= image_url
hemisphere_image_urls.append(product_dict)
# Close the browser after scraping
browser.quit()
# Return results
mars_data ={
'news_title' : news_title,
'summary': news_para,
'featured_image': feature_url,
'featured_image_title': featured_image_title,
'weather': mars_weather,
'fact_table': mars_fact_html,
'hemisphere_image_urls': hemisphere_image_urls,
'news_url': news_url,
'jpl_url': jurl,
'weather_url': turl,
'fact_url': murl,
'hemisphere_url': mhurl,
}
collection.insert(mars_data)
``` |
{
"source": "Jordanjiun/cd11_seepod_ground_risk",
"score": 3
} |
#### File: seedpod_ground_risk/layers/annotation_layer.py
```python
import abc
from typing import List, Dict, Tuple
import geopandas as gpd
import numpy as np
from holoviews.element.geom import Geometry
from seedpod_ground_risk.layers.layer import Layer
class AnnotationLayer(Layer, abc.ABC):
@abc.abstractmethod
def annotate(self, data: List[gpd.GeoDataFrame], raster_data: Tuple[Dict[str, np.array], np.array],
**kwargs) -> Geometry:
"""
Annotate data
:param data: Input data to annotate
:param raster_data: Input raster data
:return: Holoviews Geometry to overlay
"""
pass
```
#### File: seedpod_ground_risk/layers/layer.py
```python
import abc
from typing import NoReturn
class Layer(abc.ABC):
"""
Abstract base class for a single layer
"""
key: str
def __init__(self, key):
self.key = key
@abc.abstractmethod
def preload_data(self) -> NoReturn:
"""
Load any data that is expected to remain static for the duration of the program execution.
This is called when the application is first initialised.
This method is guaranteed to complete execution before any requests for plot generation to this class.
"""
pass
@abc.abstractmethod
def clear_cache(self) -> NoReturn:
"""
Clear all cached dynamic data to the state AFTER `preload_data` was called.
All statically preloaded data should remain intact after calls to this method
"""
pass
```
#### File: seedpod_ground_risk/layers/residential_layer.py
```python
from typing import NoReturn, Tuple
import geopandas as gpd
import numpy as np
import shapely.geometry as sg
from holoviews.element import Geometry
from shapely import speedups
from seedpod_ground_risk.data import england_wa_2011_clipped_filepath, density_filepath
from seedpod_ground_risk.layers.osm_tag_layer import OSMTagLayer
gpd.options.use_pygeos = True # Use GEOS optimised C++ routines
speedups.enable() # Enable shapely speedups
class ResidentialLayer(OSMTagLayer):
_census_wards: gpd.GeoDataFrame
def __init__(self, key, **kwargs):
super(ResidentialLayer, self).__init__(key, 'landuse=residential', **kwargs)
delattr(self, '_colour')
self._census_wards = gpd.GeoDataFrame()
def preload_data(self):
print("Preloading Residential Layer")
self.ingest_census_data()
def generate(self, bounds_polygon: sg.Polygon, raster_shape: Tuple[int, int], from_cache: bool = False, **kwargs) -> \
Tuple[Geometry, np.ndarray, gpd.GeoDataFrame]:
import colorcet
import datashader as ds
from holoviews.operation.datashader import rasterize
import geoviews as gv
from copy import deepcopy
bounds = bounds_polygon.bounds
polys_df = self.query_osm_polygons(bounds_polygon)
bounded_census_wards = self._census_wards.cx[bounds[1]:bounds[3], bounds[0]:bounds[2]]
# Find landuse polygons intersecting/within census wards and merge left
census_df = gpd.overlay(polys_df,
bounded_census_wards,
how='intersection')
# Estimate the population of landuse polygons from the density of the census ward they are within
# EPSG:4326 is *not* an equal area projection so would give gibberish areas
# Project geometries to an equidistant/equal areq projection
census_df['population'] = census_df['density'] * census_df['geometry'].to_crs('EPSG:3395').area
census_df['ln_density'] = np.log(census_df['density'])
# Construct the GeoViews Polygons
gv_polys = gv.Polygons(census_df, kdims=['Longitude', 'Latitude'],
vdims=['population', 'ln_density', 'density']) \
.opts(color='ln_density',
cmap=colorcet.CET_L18, alpha=0.8,
colorbar=True, colorbar_opts={'title': 'Log Population Density [ln(people/km^2)]'}, show_legend=False,
line_color='ln_density')
if self.buffer_dist > 0:
buffered_df = deepcopy(census_df)
buffered_df.geometry = buffered_df.to_crs('EPSG:27700') \
.buffer(self.buffer_dist).to_crs('EPSG:4326')
buffered_polys = gv.Polygons(buffered_df, kdims=['Longitude', 'Latitude'], vdims=['name', 'density'])
raster = rasterize(buffered_polys, aggregator=ds.max('density'), width=raster_shape[0],
height=raster_shape[1], x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]),
dynamic=False)
else:
raster = rasterize(gv_polys, aggregator=ds.max('density'), width=raster_shape[0], height=raster_shape[1],
x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False)
raster_grid = np.copy(list(raster.data.data_vars.items())[0][1].data.astype(np.float))
return gv_polys, raster_grid, gpd.GeoDataFrame(census_df)
def ingest_census_data(self) -> NoReturn:
"""
Ingest Census boundaries and density values and overlay/merge
"""
import pandas as pd
# Import Census boundaries in Ordnance Survey grid and reproject
census_wards_df = gpd.read_file(england_wa_2011_clipped_filepath()).drop(
['altname', 'oldcode'], axis=1)
if not census_wards_df.crs:
census_wards_df = census_wards_df.set_crs('EPSG:27700')
census_wards_df = census_wards_df.to_crs('EPSG:4326')
# Import census ward densities
density_df = pd.read_csv(density_filepath(), header=0)
# Scale from hectares to km^2
density_df['area'] = density_df['area'] * 0.01
density_df['density'] = density_df['density'] / 0.01
# These share a common UID, so merge together on it and store
self._census_wards = census_wards_df.merge(density_df, on='code')
```
#### File: seedpod_ground_risk/layers/temporal_population_estimate_layer.py
```python
from typing import NoReturn
import geopandas as gpd
from seedpod_ground_risk.data import england_wa_2011_clipped_filepath, nhaps_data_filepath, \
density_filepath
from seedpod_ground_risk.layers.blockable_data_layer import BlockableDataLayer
from seedpod_ground_risk.layers.osm_tag_layer import query_osm_polygons
nhaps_category_groupings = [
[0, 1],
[5, 9],
[7],
[6, 8],
]
nhaps_group_tags = [
['landuse=residential'],
['landuse=industrial', 'landuse=commercial'],
['building=school', 'building=college', 'building=university', 'building=public', 'building=government',
'building=civic', 'building=hospital'],
['landuse=retail']
]
class TemporalPopulationEstimateLayer(BlockableDataLayer):
def __init__(self, key, colour: str = None, blocking=False, buffer_dist=0):
super().__init__(key, colour, blocking, buffer_dist)
delattr(self, '_colour')
def preload_data(self):
self._ingest_census_data()
self._ingest_nhaps_proportions()
def generate(self, bounds_polygon, raster_shape, from_cache: bool = False, hour: int = 8, **kwargs):
import pandas as pd
import numpy as np
import geoviews as gv
from copy import deepcopy
from holoviews.operation.datashader import rasterize
import colorcet
import datashader as ds
bounds = bounds_polygon.bounds
# Hardcode residential tag in as this is always the first OSM query made to find the total area population
residential_df = query_osm_polygons('landuse=residential', bounds_polygon)
bounded_census_wards = self._census_wards.cx[bounds[1]:bounds[3], bounds[0]:bounds[2]]
# Find landuse polygons intersecting/within census wards and merge left
census_df = gpd.overlay(residential_df,
bounded_census_wards,
how='intersection')
# Estimate the population of landuse polygons from the density of the census ward they are within
# EPSG:4326 is *not* an equal area projection so would give gibberish areas
# Project geometries to an equidistant/equal areq projection
census_reproj_areas = census_df['geometry'].to_crs('EPSG:3395').area * 1e-6 # km^2
census_df['population'] = census_df['density'] * census_reproj_areas
total_population = census_df['population'].sum()
df = None
# Ensure we have a large enough population for this approximation to be valid
if total_population > 200000:
hour_categories = self.nhaps_df.iloc[:, hour % 24]
nhaps_category_gdfs = []
for idx, categories in enumerate(nhaps_category_groupings):
group_proportion = hour_categories.iloc[categories].sum()
group_population = total_population * group_proportion
# Residential areas are handled separately as they depend upon census data
# Otherwise, they would become uniform density, when we have census data providing us (unscaled) densities
if idx == 0:
group_gdf = deepcopy(census_df)
group_gdf['population'] = group_gdf['population'] * group_proportion
group_gdf['density'] = group_gdf['population'] / census_reproj_areas
group_gdf['ln_density'] = np.log(group_gdf['density'])
else:
group_gdfs = [query_osm_polygons(tag, bounds_polygon) for tag in nhaps_group_tags[idx]]
group_gdf = gpd.GeoDataFrame(pd.concat(group_gdfs, ignore_index=True), crs='EPSG:4326')
areas = group_gdf.to_crs(epsg=3395).geometry.area * 1e-6 # km^2
group_density = group_population / areas.sum()
group_gdf['density'] = group_density
group_gdf['ln_density'] = np.log(group_gdf['density'])
group_gdf['population'] = group_density * areas
nhaps_category_gdfs.append(group_gdf)
nhaps_category_gdf = gpd.GeoDataFrame(pd.concat(nhaps_category_gdfs, ignore_index=True), crs='EPSG:4326')
# Construct the GeoViews Polygons
gv_polys = gv.Polygons(nhaps_category_gdf, kdims=['Longitude', 'Latitude'],
vdims=['population', 'ln_density', 'density']) \
.opts(color='ln_density',
cmap=colorcet.CET_L18, alpha=0.6,
colorbar=True, colorbar_opts={'title': 'Log Population Density [ln(people/km^2)]'},
show_legend=False,
line_color='ln_density')
if self.buffer_dist > 0:
buffered_df = deepcopy(nhaps_category_gdf)
buffered_df.geometry = buffered_df.to_crs('EPSG:27700') \
.buffer(self.buffer_dist).to_crs('EPSG:4326')
buffered_polys = gv.Polygons(buffered_df, kdims=['Longitude', 'Latitude'], vdims=['density'])
raster = rasterize(buffered_polys, aggregator=ds.max('density'), width=raster_shape[0],
height=raster_shape[1], x_range=(bounds[1], bounds[3]),
y_range=(bounds[0], bounds[2]),
dynamic=False)
else:
raster = rasterize(gv_polys, aggregator=ds.max('density'), width=raster_shape[0],
height=raster_shape[1],
x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False)
df = nhaps_category_gdf
else:
census_df['ln_density'] = np.log(census_df['density'])
# Construct the GeoViews Polygons
gv_polys = gv.Polygons(census_df, kdims=['Longitude', 'Latitude'],
vdims=['population', 'ln_density', 'density']) \
.opts(color='ln_density',
cmap=colorcet.CET_L18, alpha=0.8,
colorbar=True, colorbar_opts={'title': 'Log Population Density [ln(people/km^2)]'},
show_legend=False,
line_color='ln_density')
if self.buffer_dist > 0:
buffered_df = deepcopy(census_df)
buffered_df.geometry = buffered_df.to_crs('EPSG:27700') \
.buffer(self.buffer_dist).to_crs('EPSG:4326')
buffered_polys = gv.Polygons(buffered_df, kdims=['Longitude', 'Latitude'], vdims=['name', 'density'])
raster = rasterize(buffered_polys, aggregator=ds.max('density'), width=raster_shape[0],
height=raster_shape[1], x_range=(bounds[1], bounds[3]),
y_range=(bounds[0], bounds[2]),
dynamic=False)
else:
raster = rasterize(gv_polys, aggregator=ds.max('density'), width=raster_shape[0],
height=raster_shape[1],
x_range=(bounds[1], bounds[3]), y_range=(bounds[0], bounds[2]), dynamic=False)
df = census_df
raster_grid = np.copy(list(raster.data.data_vars.items())[0][1].data.astype(float))
return gv_polys, raster_grid, gpd.GeoDataFrame(df)
def clear_cache(self):
pass
def _ingest_census_data(self) -> NoReturn:
"""
Ingest Census boundaries and density values and overlay/merge
"""
import pandas as pd
# Import Census boundaries in Ordnance Survey grid and reproject
census_wards_df = gpd.read_file(england_wa_2011_clipped_filepath()).drop(['altname', 'oldcode'], axis=1)
if not census_wards_df.crs:
census_wards_df = census_wards_df.set_crs('EPSG:27700')
census_wards_df = census_wards_df.to_crs('EPSG:4326')
# Import census ward densities
density_df = pd.read_csv(density_filepath(), header=0)
# Scale from hectares to km^2
density_df['area'] = density_df['area'] * 0.01
density_df['density'] = density_df['density'] / 0.01
# These share a common UID, so merge together on it and store
self._census_wards = census_wards_df.merge(density_df, on='code')
def _ingest_nhaps_proportions(self) -> NoReturn:
"""
Ingest NHAPS serialised spatiotemporal population location proportions
"""
import pandas as pd
self.nhaps_df = pd.read_json(nhaps_data_filepath())
```
#### File: seedpod_ground_risk/pathfinding/algorithm.py
```python
import abc
from seedpod_ground_risk.pathfinding.environment import GridEnvironment, Node
class Algorithm(abc.ABC):
@abc.abstractmethod
def find_path(self, environment: GridEnvironment, start: Node, goal: Node):
pass
```
#### File: seedpod_ground_risk/pathfinding/rjps_a_star.py
```python
from heapq import *
from typing import List, Union
import numpy as np
from seedpod_ground_risk.pathfinding.a_star import JumpPointSearchAStar, _reconstruct_path
from seedpod_ground_risk.pathfinding.environment import GridEnvironment, Node
from seedpod_ground_risk.pathfinding.heuristic import Heuristic, RiskHeuristic
global max_y, max_x
## Implementation is broken ##
def is_passable(grid, y, x):
if y > max_y or x > max_x or y < 0 or x < 0:
return False
return grid[y, x] != -1
def jump(grid: np.ndarray, cy: int, cx: int, dy: int, dx: int, gy: int, gx: int, start_cost: float, jump_gap: float,
jump_limit: float, jump_count: int) -> np.array:
ny, nx = cy + dy, cx + dx
out = np.full((3, 3), -1)
if not is_passable(grid, ny, nx):
return out
if nx == gx and ny == gy:
out[0, :] = [nx, ny, grid[ny, nx]]
return out
# Return as jump point if cost changes between s
if abs(grid[ny, nx] - start_cost) > jump_gap:
out[0, :] = [cx, cy, grid[cy, cx]]
return out
if jump_count > jump_limit:
out[0, :] = [cx, cy, grid[cy, cx]]
return out
if dx and dy:
# Diagonal case
if (is_passable(grid, nx - dx, ny + dy) and not is_passable(grid, nx - dx, ny)) or \
(is_passable(grid, nx + dx, ny - dy) and not is_passable(grid, nx, ny - dy)):
out[0, :] = [nx, ny, grid[ny, nx]]
return out
# Orthogonal searches
y_orthogonal_jump = jump(grid, ny, nx, dy, 0, gy, gx, start_cost, jump_gap, jump_limit, jump_count + 1)
x_orthogonal_jump = jump(grid, ny, nx, 0, dx, gy, gx, start_cost, jump_gap, jump_limit, jump_count + 1)
if not (y_orthogonal_jump == -1).all() or not (x_orthogonal_jump == -1).all():
out[0, :] = [cx, cy, grid[cy, cx]]
out[1, :] = x_orthogonal_jump[0, :]
out[2, :] = y_orthogonal_jump[0, :]
return out
else:
# Orthogonal case
if dx:
if (is_passable(grid, nx, ny + 1) and not is_passable(grid, nx - dx, ny + 1)) or \
(is_passable(grid, nx, ny - 1) and not is_passable(grid, nx - dx, ny - 1)):
out[0, :] = [nx, ny, grid[ny, nx]]
return out
else: # dy
if (is_passable(grid, nx + 1, ny) and not is_passable(grid, nx + 1, ny - dy)) or \
(is_passable(grid, nx - 1, ny) and not is_passable(grid, nx - 1, ny - dy)):
out[0, :] = [nx, ny, grid[ny, nx]]
return out
return jump(grid, ny, nx, dy, dx, gy, gx, start_cost, jump_gap, jump_limit, jump_count + 1)
class RiskJumpPointSearchAStar(JumpPointSearchAStar):
def __init__(self, heuristic: Heuristic, jump_gap=0, jump_limit=200):
raise NotImplementedError("Risk JPS A* no longer works")
if not isinstance(heuristic, RiskHeuristic):
raise ValueError('Risk based A* can only use Risk based heuristics')
if not heuristic.environment.diagonals:
raise ValueError('JPS relies on a grid environment with diagonals')
super().__init__(heuristic)
self.jump_gap = jump_gap
self.jump_limit = jump_limit
self.heuristic_env_hash = hash(heuristic.environment)
def find_path(self, environment: GridEnvironment, start: Node, end: Node) -> Union[
List[Node], None]:
if not environment.diagonals:
raise ValueError('JPS relies on a grid environment with diagonals')
if self.heuristic_env_hash != hash(environment):
raise ValueError("Risk based heuristic and algorithm should have the same environment")
global max_y, max_x
grid = environment.grid
max_y, max_x = grid.shape[0] - 1, grid.shape[1] - 1
self.goal = end
# Check if start and goal are the same
if start == end:
return [start]
# Use heapq;the thread safety provided by ProrityQueue is not needed, as we only exec on a single thread
open = [start]
start.f = start.g = start.h = 0
open_cost = {start: start.f}
closed = set()
for neighbour in environment.get_neighbours(start):
heappush(open, neighbour)
closed[neighbour] = start
neighbour = self.heuristic(start, neighbour)
while open:
node = heappop(open)
if node == end:
return _reconstruct_path(end, grid)
parent = closed[node]
py, px = parent[0], parent[1]
cy, cx = node[0], node[1]
current_cost = costs[cy, cx]
dy, dx = np.clip(cy - py, -1, 1), np.clip(cx - px, -1, 1)
ny, nx = cy + dy, cx + dx
# If the next node is not passable, the current node will be a dead end
if not is_passable(grid, ny, nx):
continue
jump_points = jump(grid, cy, cx, dy, dx, self.goal[0], self.goal[1], grid[ny, nx], self.jump_gap,
self.jump_limit, 0)
for node_vals in jump_points:
if (node_vals == -1).all():
continue
x, y = node_vals[0], node_vals[1]
successor = (y, x)
cost = current_cost + self.heuristic(node, successor)
if costs[y, x] > cost:
costs[y, x] = cost
h = self.heuristic(successor.position, end.position)
heappush(open, (cost + h, successor))
closed[successor] = node
return None
```
#### File: seedpod_ground_risk/ui_resources/new_aircraft_wizard.py
```python
import typing
import PySide2
from PySide2.QtCore import QRegExp
from PySide2.QtGui import QRegExpValidator
from PySide2.QtWidgets import QWizard, QWizardPage, QLabel, QLineEdit, QGridLayout
from seedpod_ground_risk.ui_resources.layer_options import *
class NewAircraftInfoPage(QWizardPage):
def __init__(self, parent: typing.Optional[PySide2.QtWidgets.QWidget] = ...) -> None:
super().__init__(parent)
self.setTitle('New Aircraft Configuration')
def initializePage(self) -> None:
super().initializePage()
layout = QGridLayout()
for name, opt in AIRCRAFT_PARAMETERS.items():
regex = opt[0]
label = QLabel(name)
field = QLineEdit()
field.setValidator(QRegExpValidator(QRegExp(regex)))
label.setBuddy(field)
self.registerField(name + '*', field)
layout.addWidget(label)
layout.addWidget(field)
self.setLayout(layout)
class AircraftWizard(QWizard):
def __init__(self, parent: typing.Optional[PySide2.QtWidgets.QWidget] = ...,
flags: PySide2.QtCore.Qt.WindowFlags = ...) -> None:
super().__init__(parent, flags)
self.addPage(NewAircraftInfoPage(self))
self.setWindowTitle('Add Layer')
# TODO: Going back in wizard does not clear page fields.
# Hook into back button click and remove and re add page.
def accept(self) -> None:
super().accept()
self.aircraftKey = self.field('name')
self.opts = {}
self.d = {}
for name, opt in AIRCRAFT_PARAMETERS.items():
self.d[f'{opt[1]}'] = opt[2](self.field(name))
return self.d
```
#### File: seedpod_ground_risk/ui_resources/plot_webview.py
```python
import PySide2
from PySide2.QtCore import Signal
from PySide2.QtWebEngineWidgets import QWebEngineView
class PlotWebview(QWebEngineView):
resize = Signal(int, int)
def __init__(self, *args, **kwargs):
super(PlotWebview, self).__init__(*args, **kwargs)
def resizeEvent(self, event: PySide2.QtGui.QResizeEvent) -> None:
super().resizeEvent(event)
webview_size = self.size()
self.resize.emit(webview_size.width() - 50, webview_size.height() - 30)
```
#### File: tests/layers/test_full_risk_map.py
```python
import os
import unittest
from itertools import chain
import casex
import numpy as np
import scipy.stats as ss
from seedpod_ground_risk.core.plot_server import PlotServer
from seedpod_ground_risk.core.utils import make_bounds_polygon, remove_raster_nans
from seedpod_ground_risk.layers.strike_risk_layer import wrap_pipeline_cuda, wrap_all_pipeline
from seedpod_ground_risk.layers.temporal_population_estimate_layer import TemporalPopulationEstimateLayer
from seedpod_ground_risk.path_analysis.descent_models.ballistic_model import BallisticModel
from seedpod_ground_risk.path_analysis.descent_models.glide_model import GlideDescentModel
from seedpod_ground_risk.path_analysis.harm_models.fatality_model import FatalityModel
from seedpod_ground_risk.path_analysis.harm_models.strike_model import StrikeModel
from seedpod_ground_risk.path_analysis.utils import velocity_to_kinetic_energy, bearing_to_angle
def offset_window_row(arr, shape, offset):
y, x = shape
off_y, off_x = offset
for j in range(y):
start_y = off_y - j
end_y = start_y + y
# row_windows = []
# app = row_windows.append
for i in range(x):
start_x = off_x - i
end_x = start_x + x
yield arr[start_y:end_y, start_x:end_x]
# app(arr[start_y:end_y, start_x:end_x])
# yield row_windows # Dont return np array here, as it gets copied to contiguous memory and OOMs
class FullRiskMapTestCase(unittest.TestCase):
###
# This can take upwards of 10mins to run
###
def setUp(self) -> None:
super().setUp()
self.hour = 17
self.serialise = False
self.test_bound_coords = [-1.5, 50.87, -1.3, 51]
# self.test_bound_coords = [-1.55, 50.745, -1.3, 51]
self.resolution = 30
self.test_bounds = make_bounds_polygon((self.test_bound_coords[0], self.test_bound_coords[2]),
(self.test_bound_coords[1], self.test_bound_coords[3]))
self._setup_aircraft()
os.chdir(
os.sep.join((
os.path.dirname(os.path.realpath(__file__)),
'..', '..'))
)
ps = PlotServer()
ps.set_time(self.hour)
self.raster_shape = ps._get_raster_dimensions(self.test_bounds, self.resolution)
ps.data_layers = [TemporalPopulationEstimateLayer('tpe')]
[layer.preload_data() for layer in chain(ps.data_layers, ps.annotation_layers)]
ps.generate_layers(self.test_bounds, self.raster_shape)
self.raster_grid = np.flipud(np.sum(
[remove_raster_nans(res[1]) for res in ps._generated_data_layers.values() if
res[1] is not None],
axis=0))
self.raster_shape = self.raster_grid.shape
del ps
# self.path_coords = list(gpd.read_file('path.geojson').iloc[0].geometry.coords)
def test_full_risk_map(self):
bm = BallisticModel(self.aircraft)
gm = GlideDescentModel(self.aircraft)
fm = FatalityModel(0.3, 1e6, 34)
ac_mass = self.aircraft.mass
x, y = np.mgrid[0:self.raster_shape[0], 0:self.raster_shape[1]]
eval_grid = np.vstack((x.ravel(), y.ravel())).T
samples = 5000
# Conjure up our distributions for various things
alt = ss.norm(self.alt, 5).rvs(samples)
vel = ss.norm(self.vel, 2.5).rvs(samples)
wind_vels = ss.norm(self.wind_vel, 1).rvs(samples)
wind_dirs = bearing_to_angle(ss.norm(self.wind_dir, np.deg2rad(5)).rvs(samples))
wind_vel_y = wind_vels * np.sin(wind_dirs)
wind_vel_x = wind_vels * np.cos(wind_dirs)
(bm_mean, bm_cov), v_ib, a_ib = bm.transform(alt, vel,
ss.uniform(0, 360).rvs(samples),
wind_vel_y, wind_vel_x,
0, 0)
(gm_mean, gm_cov), v_ig, a_ig = gm.transform(alt, vel,
ss.uniform(0, 360).rvs(samples),
wind_vel_y, wind_vel_x,
0, 0)
sm_b = StrikeModel(self.raster_grid, self.resolution ** 2, self.aircraft.width, a_ib)
sm_g = StrikeModel(self.raster_grid, self.resolution ** 2, self.aircraft.width, a_ig)
premult = sm_b.premult_mat + sm_g.premult_mat
offset_y, offset_x = self.raster_shape[0] // 2, self.raster_shape[1] // 2
bm_pdf = ss.multivariate_normal(bm_mean + np.array([offset_y, offset_x]), bm_cov).pdf(eval_grid)
gm_pdf = ss.multivariate_normal(gm_mean + np.array([offset_y, offset_x]), gm_cov).pdf(eval_grid)
pdf = bm_pdf + gm_pdf
pdf = pdf.reshape(self.raster_shape)
padded_pdf = np.zeros(((self.raster_shape[0] * 3) + 1, (self.raster_shape[1] * 3) + 1))
padded_pdf[self.raster_shape[0]:self.raster_shape[0] * 2, self.raster_shape[1]:self.raster_shape[1] * 2] = pdf
padded_pdf = padded_pdf * self.event_prob
padded_centre_y, padded_centre_x = self.raster_shape[0] + offset_y, self.raster_shape[1] + offset_x
impact_ke_b = velocity_to_kinetic_energy(ac_mass, v_ib)
impact_ke_g = velocity_to_kinetic_energy(ac_mass, v_ig)
# Check if CUDA toolkit available through env var otherwise fallback to CPU bound numba version
if not os.getenv('CUDA_HOME'):
print('CUDA NOT found, falling back to Numba JITed CPU code')
# Leaving parallelisation to Numba seems to be faster
res = wrap_all_pipeline(self.raster_shape, padded_pdf, padded_centre_y, padded_centre_x, premult)
else:
res = np.zeros(self.raster_shape, dtype=float)
threads_per_block = (32, 32) # 1024 max per block
blocks_per_grid = (
int(np.ceil(self.raster_shape[1] / threads_per_block[1])),
int(np.ceil(self.raster_shape[0] / threads_per_block[0]))
)
print('CUDA found, using config <<<' + str(blocks_per_grid) + ',' + str(threads_per_block) + '>>>')
wrap_pipeline_cuda[blocks_per_grid, threads_per_block](self.raster_shape, padded_pdf, padded_centre_y,
padded_centre_x, premult, res)
# Alternative joblib parallelisation
# res = jl.Parallel(n_jobs=-1, prefer='threads', verbose=1)(
# jl.delayed(wrap_row_pipeline)(c, self.raster_shape, padded_pdf, (padded_centre_y, padded_centre_x), sm)
# for c in range(self.raster_shape[0]))
strike_pdf = res
# snapped_points = [snap_coords_to_grid(self.raster_indices, *coords) for coords in self.path_coords]
import matplotlib.pyplot as mpl
import matplotlib.colors as mc
fig1, ax1 = mpl.subplots(1, 1)
m1 = ax1.matshow(self.raster_grid, norm=mc.LogNorm())
fig1.colorbar(m1, label='Population Density [people/km$^2$]')
ax1.set_title(f'Population Density at t={self.hour}')
ax1.set_xticks([0, self.raster_shape[1] - 1])
ax1.set_yticks([0, self.raster_shape[0] - 1])
ax1.set_xticklabels([self.test_bound_coords[0], self.test_bound_coords[2]], )
ax1.set_yticklabels([self.test_bound_coords[3], self.test_bound_coords[1]], )
fig1.tight_layout()
fig1.savefig(f'figs/tpe_t{self.hour}.png', bbox_inches='tight')
fig1.show()
if self.serialise:
np.savetxt(f'strike_map_t{self.hour}', strike_pdf, delimiter=',')
fig2, ax2 = mpl.subplots(1, 1)
m2 = ax2.matshow(strike_pdf)
fig2.colorbar(m2, label='Strike Risk [h$^{-1}$]')
ax2.set_title(f'Strike Risk Map at t={self.hour}')
ax2.set_xticks([0, self.raster_shape[1] - 1])
ax2.set_yticks([0, self.raster_shape[0] - 1])
ax2.set_xticklabels([self.test_bound_coords[0], self.test_bound_coords[2]], )
ax2.set_yticklabels([self.test_bound_coords[3], self.test_bound_coords[1]], )
fig2.tight_layout()
fig2.savefig(f'figs/risk_strike_t{self.hour}.png', bbox_inches='tight')
fig2.show()
fatality_pdf = fm.transform(strike_pdf, impact_ke=impact_ke_g) + fm.transform(strike_pdf, impact_ke=impact_ke_b)
if self.serialise:
np.savetxt(f'fatality_map_t{self.hour}', fatality_pdf, delimiter=',')
fig3, ax3 = mpl.subplots(1, 1)
m3 = ax3.matshow(fatality_pdf)
fig3.colorbar(m3, label='Fatality Risk [h$^{-1}$]')
ax3.set_title(f'Fatality Risk Map at t={self.hour}')
ax3.set_xticks([0, self.raster_shape[1] - 1])
ax3.set_yticks([0, self.raster_shape[0] - 1])
ax3.set_xticklabels([self.test_bound_coords[0], self.test_bound_coords[2]], )
ax3.set_yticklabels([self.test_bound_coords[3], self.test_bound_coords[1]], )
fig3.tight_layout()
fig3.savefig(f'figs/risk_fatality_t{self.hour}.png', bbox_inches='tight')
fig3.show()
import rasterio
from rasterio import transform
trans = transform.from_bounds(*self.test_bound_coords, *self.raster_shape)
rds = rasterio.open(f'tiffs/fatality_risk_h{self.hour}.tif', 'w', driver='GTiff', count=1,
dtype=rasterio.float64,
crs='EPSG:4326', transform=trans, compress='lzw',
width=self.raster_shape[0], height=self.raster_shape[1])
rds.write(fatality_pdf, 1)
rds.close()
def _setup_aircraft(self, ac_width: float = 2.22, ac_length: float = 1.63,
ac_mass: float = 17, ac_glide_ratio: float = 11, ac_glide_speed: float = 21,
ac_glide_drag_coeff: float = 0.1, ac_ballistic_drag_coeff: float = 0.8,
ac_ballistic_frontal_area: float = 0.5, ac_failure_prob: float = 5e-3, alt: float = 100,
vel: float = 31,
wind_vel: float = 5, wind_dir: float = 45):
self.aircraft = casex.AircraftSpecs(casex.enums.AircraftType.FIXED_WING, ac_width, ac_length, ac_mass)
self.aircraft.set_ballistic_drag_coefficient(ac_ballistic_drag_coeff)
self.aircraft.set_ballistic_frontal_area(ac_ballistic_frontal_area)
self.aircraft.set_glide_speed_ratio(ac_glide_speed, ac_glide_ratio)
self.aircraft.set_glide_drag_coefficient(ac_glide_drag_coeff)
self.alt = alt
self.vel = vel
self.wind_vel = wind_vel
self.wind_dir = np.deg2rad((wind_dir - 90) % 360)
self.event_prob = ac_failure_prob
def plot_path_risk(hour):
import matplotlib.pyplot as mpl
import shapely.geometry as sg
import numpy as np
import geopandas as gpd
# import os
# os.chdir(
# os.sep.join((
# os.path.dirname(os.path.realpath(__file__)),
# '..', '..'))
# )
path = np.genfromtxt('fr_map_path.csv', delimiter=',').astype(int)
raster_indices = dict(Longitude=np.genfromtxt('raster_indices_lon.csv', delimiter=','),
Latitude=np.genfromtxt('raster_indices_lat.csv', delimiter=','))
lat = raster_indices['Latitude'][path[:, 1]]
lon = raster_indices['Longitude'][path[:, 0]]
ls = sg.LineString([sg.Point(lon, lat) for lon, lat in zip(lon, lat)])
df = gpd.GeoDataFrame(geometry=[ls]).set_crs('EPSG:4326')
fatality_pdf = np.genfromtxt(f'fatality_map_t{hour}', delimiter=',')
strike_pdf = np.genfromtxt(f'strike_map_t{hour}', delimiter=',')
fig3, ax3 = mpl.subplots(1, 1)
ax3.tick_params(left=False, right=False,
bottom=False, top=False,
labelleft=False, labelbottom=False)
m3 = ax3.matshow(fatality_pdf)
ax3.plot(path[:, 0], path[:, 1], 'r')
fig3.colorbar(m3, label='Fatality Risk [h$^{-1}$]')
ax3.set_title(f'Fatality Risk Map at t={hour}')
fig3.show()
pathwise_strike_maxs = strike_pdf[path[:, 1], path[:, 0]]
pathwise_fatality_maxs = fatality_pdf[path[:, 1], path[:, 0]]
fig, ax = mpl.subplots(1, 1)
path_dist = df.to_crs('EPSG:27700').iloc[0].geometry.length
ax.set_yscale('log')
x = np.linspace(0, path_dist, len(pathwise_fatality_maxs))
ax.axhline(y=np.mean(pathwise_fatality_maxs), c='y',
label='Fatality Mean') # This seems to be as stable as fsum
ax.plot(x, pathwise_fatality_maxs, c='r', label='Fatality Risk')
ax.axhline(y=np.mean(pathwise_strike_maxs), c='g',
label='Strike Mean') # This seems to be as stable as fsum
ax.plot(x, pathwise_strike_maxs, c='b', label='Strike Risk')
ax.legend()
ax.set_ylabel('Risk [$h^{-1}$]')
ax.set_xlabel('Path Distance [m]')
ax.set_title(f'Casualty Risk along path at t={hour}')
fig.show()
if __name__ == '__main__':
unittest.main()
```
#### File: tests/path_analysis/test_vector_ops.py
```python
import unittest
import numpy as np
from seedpod_ground_risk.path_analysis.utils import rotate_2d, bearing_to_angle
class VectorRotationTestCase(unittest.TestCase):
def test_first_quad(self):
"""
Test rotation of vectors. Used in transformation between frames
"""
theta = np.deg2rad(45)
vec = np.array([0, 1]) # y, x order
out = rotate_2d(vec, theta)
val = np.sqrt(2) / 2
np.testing.assert_array_equal(out, np.array([val, val]))
def test_sec_quad(self):
"""
Test rotation of vectors. Used in transformation between frames
"""
theta = np.deg2rad(45)
vec = np.array([1, 0]) # y, x order
out = rotate_2d(vec, theta)
val = np.sqrt(2) / 2
np.testing.assert_array_equal(out, np.array([val, -val]))
def test_third_quad(self):
"""
Test rotation of vectors. Used in transformation between frames
"""
theta = np.deg2rad(45)
vec = np.array([0, -1]) # y, x order
out = rotate_2d(vec, theta)
val = np.sqrt(2) / 2
np.testing.assert_array_equal(out, np.array([-val, -val]))
def test_fourth_quad(self):
"""
Test rotation of vectors. Used in transformation between frames
"""
theta = np.deg2rad(45)
vec = np.array([-1, 0]) # y, x order
out = rotate_2d(vec, theta)
val = np.sqrt(2) / 2
np.testing.assert_array_equal(out, np.array([-val, val]))
def test_negative_theta(self):
theta = np.deg2rad(-45)
vec = np.array([-1, 0]) # y, x order
out = rotate_2d(vec, theta)
val = np.sqrt(2) / 2
np.testing.assert_array_almost_equal(out, np.array([-val, val]))
def test_over_2pi(self):
theta = np.deg2rad(45) + (2 * np.pi)
vec = np.array([0, 1]) # y, x order
out = rotate_2d(vec, theta)
val = np.sqrt(2) / 2
np.testing.assert_array_almost_equal(out, np.array([val, val]))
class BearingRotationTestCase(unittest.TestCase):
def test_first_quad(self):
self.assertEqual(bearing_to_angle(np.pi / 4), np.pi / 4)
self.assertEqual(bearing_to_angle(45, is_rad=False), 45)
self.assertEqual(bearing_to_angle(np.pi / 2), 0)
self.assertEqual(bearing_to_angle(90, is_rad=False), 0)
self.assertEqual(bearing_to_angle(0), np.pi / 2)
self.assertEqual(bearing_to_angle(0, is_rad=False), 90)
self.assertEqual(bearing_to_angle(2 * np.pi), np.pi / 2)
self.assertEqual(bearing_to_angle(360, is_rad=False), 90)
def test_second_quad(self):
self.assertEqual(bearing_to_angle(np.pi + 3 * np.pi / 4), 3 * np.pi / 4)
self.assertEqual(bearing_to_angle(315, is_rad=False), 135)
self.assertEqual(bearing_to_angle(np.pi + np.pi / 2), np.pi)
self.assertEqual(bearing_to_angle(270, is_rad=False), 180)
def test_third_quad(self):
self.assertEqual(bearing_to_angle(np.pi + np.pi / 4), np.pi + np.pi / 4)
self.assertEqual(bearing_to_angle(225, is_rad=False), 225)
self.assertEqual(bearing_to_angle(np.pi), np.pi + np.pi / 2)
self.assertEqual(bearing_to_angle(180, is_rad=False), 270)
def test_fourth_quad(self):
self.assertEqual(bearing_to_angle(3 * np.pi / 4), np.pi + 3 * np.pi / 4)
self.assertEqual(bearing_to_angle(135, is_rad=False), 315)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jordanjoewatson/natural-language-classifier",
"score": 2
} |
#### File: src/network/activation.py
```python
def sign(val):
if(val >= 0): return 1
else: return -1
```
#### File: natural-language-classifier/src/processData.py
```python
from words import wordAnalysis as words
from words import syllableAnalysis as syllables
from characters import characterAnalysis as chars
from meanings import speechTags as speech
from meanings import entityAnalysis as entities
from meanings import sentimentAnalysis as sentiment
def convertText(text):
ls = []
wordsLs = words.wordList(text)
#WORD ANALYSIS
wordCount = words.wordCount(text)
stopwordsPercent = words.percentageWithoutStopwords(text)
avgWordLength = words.avgWordLen(text)
stringLength = words.stringLength(text)
averageSyllables = syllables.averageSyllables(text)
ls.append(wordCount)
ls.append(stopwordsPercent)
ls.append(avgWordLength)
#ls.append(stringLength)
ls.append(averageSyllables)
#CHARACTER ANALYSIS
charDict = chars.updateDict(text)
firstCharDict = chars.updateFirstChar(wordsLs)
consecutiveCharDict = chars.updateConsecutiveChar(text)
ls += charDict + firstCharDict# + consecutiveCharDict
#METASEMANTICS ANALYSIS
alphaPercent = entities.alphaPercentage(text,wordCount)
entityPercent = entities.entityPercentage(text,wordCount)
ls.append(alphaPercent)
ls.append(entityPercent)
speech.updateTags(text)
nounPercent = speech.nounPercentage(wordCount)
verbPercent = speech.verbPercentage(wordCount)
adjPercent = speech.adjPercentage(wordCount)
pronounPercent = speech.pronounPercentage(wordCount)
conjunctionPercent = speech.conjunctionPercentage(wordCount)
digitPercent = speech.digitPercentage(wordCount)
foreignPercent = speech.foreignPercentage(wordCount)
listPercent = speech.listPercentage(wordCount)
toPercent = speech.toPercentage(wordCount)
interjectionPercent = speech.interjectionPercentage(wordCount)
possessivePercent = speech.possessivePercentage(wordCount)
adverbPercent = speech.adverbPercentage(wordCount)
determinerPercent = speech.determinerPercentage(wordCount)
anonPercent = speech.anonPercentage(wordCount)
ls.append(verbPercent)
ls.append(nounPercent)
ls.append(adjPercent)
ls.append(pronounPercent)
ls.append(conjunctionPercent)
ls.append(digitPercent)
ls.append(foreignPercent)
ls.append(listPercent)
ls.append(toPercent)
ls.append(interjectionPercent)
ls.append(determinerPercent)
ls.append(possessivePercent)
ls.append(adverbPercent)
ls.append(anonPercent)
nounList = speech.getNounList()
verbList = speech.getVerbList()
adjList = speech.getAdjectiveList()
pronounList = speech.getPronounList()
conjunctionList = speech.getConjunctionList()
digitList = speech.getDigitList()
adverbList = speech.getAdverbList()
foreignList = speech.getForeignList()
listList = speech.getListList()
toList = speech.getToList()
interjectionList = speech.getInterjectionList()
determinerList = speech.getDeterminerList()
posList = speech.getPossessiveList()
anonList = speech.getAnonList()
ls += (adjList + pronounList + nounList + verbList + conjunctionList + digitList)
ls += (adverbList + foreignList + listList + toList + interjectionList + determinerList)
ls += (posList + anonList)
sentiment_ls = sentiment.sentiment_analysis(text)
ls.append(sentiment_ls[0])
ls.append(sentiment_ls[1])
return ls
``` |
{
"source": "jordankeener/ncaa_rosters",
"score": 3
} |
#### File: ncaa_rosters/code/_proj_functions.py
```python
from urllib.request import urlopen
from urllib.request import FancyURLopener
from bs4 import BeautifulSoup
import pandas as pd
import requests
import re
class MyOpener(FancyURLopener):
version = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11)'
myopener = MyOpener()
def select_cols(table, colnames):
# BeautifulSoup table, list of strings --> list of lists
# for given table and list of column names,
# returns first column that matches (non-case sensitive)
# each column name for each column name
# returns list of rows (list)
header = table.find_all('tr')[0]
rows = table.find_all('tr')[1:]
header = header.find_all('th')
header_items = []
for item in header:
x = item.get_text().strip()
header_items.append(x)
indexes_ordered = []
for colname in colnames:
col_matches = []
for item in header_items:
pattern = r'(.*)' + colname + r'(.*)'
x = re.match(pattern, item, flags = re.I) != None
col_matches.append(x)
try:
col_index = col_matches.index(True)
indexes_ordered.append(col_index)
except ValueError:
indexes_ordered.append(None)
print(indexes_ordered)
cols_text = []
for j in indexes_ordered:
if j is not None:
cols_text.append(header_items[j])
else:
cols_text.append('N/A')
print(cols_text)
roster = []
for row in rows:
row = row.find_all('td')
row_items = []
for item in row:
x = item.get_text().strip()
row_items.append(x)
result = []
for index in indexes_ordered:
if index is not None:
try:
result.append(row_items[index])
except IndexError:
print("Index Error - trying previous column")
result.append(row_items[index - 1])
else:
result.append("N/A")
result += cols_text
roster.append(result)
return roster
def get_table(url, tableid):
# for when rosters are stored in table objects
# url (str), tableid (str) --> BeautifulSoup table
html = myopener.open(url)
soup = BeautifulSoup(html, 'lxml')
table = soup.find(id = tableid)
return table
def get_list(url, classname, numlists=1):
# for when rosters are stored in unordered list objects
# url (str), list class (str) --> BeautifulSoup list
html = myopener.open(url)
soup = BeautifulSoup(html, 'lxml')
if numlists == 1:
mylist = soup.find('ul', class_ = classname)
elif numlists > 1:
slice = numlists - 1
mylist = soup.find_all('ul', class_ = classname)
print(len(mylist))
mylist = mylist[slice]
else:
mylist = []
return mylist
def get_roster(sport_id, url_template, sporturl, objectid_dict,
object_type = 'table',
subitem = 'tr',
offset = 0):
# uses either get_table() or get_list(), then collects rosters
url = url_template.format(sporturl = sporturl)
if (object_type == 'table'):
tableid = objectid_dict[sport_id]
table = get_table(url, tableid)
elif (object_type == 'list'):
classid = objectid_dict[sport_id]
table = get_list(url, classid)
if (offset > 0):
roster = table.find_all(subitem)[offset:]
else:
roster = table.find_all(subitem)
return roster
def get_grid(url, divid):
html = myopener.open(url)
soup = BeautifulSoup(html, 'lxml')
grid = soup.find('div', id = divid)
return grid
def make_player_df(name, hometown, sport_id, school):
player_df = pd.DataFrame(index=[0])
player_df['name'] = name
player_df['hometown'] = hometown
player_df['sport'] = sport_id
player_df['school'] = school
return player_df
def gather_rosters_ul(sports_dict, url_template):
# takes dictionary with sport name keys and url/table info and url template
# --> DataFrame with roster for each sport by finding name, then
# finding hometown from finding first ul item with comma, then
# taking the next ul item as guess for high school/previous school
roster_list = []
classname = 'sidearm-roster-players'
for (sport_id, sport_info) in sports_dict.items():
sporturl = sport_info[0]
ulnum = sport_info[1]
print(sport_id)
url = url_template.format(sporturl = sporturl)
table = get_list(url, classname, numlists=ulnum)
players = table.find_all('li')
for player in players:
name = player.find('div',
class_ = 'sidearm-roster-player-name').find('a').getText().strip()
hometown_list = player.find('div',
class_ = 'sidearm-roster-player-class-hometown').find_all('span')
try:
hometown = 'N/A'
for (j, item) in enumerate(hometown_list):
x = item.getText().strip()
if ',' in x:
hometown = x
try:
high_school = hometown_list[j+1].getText().strip()
except IndexError:
high_school = 'N/A'
break
else:
continue
except IndexError:
hometown = 'N/A'
high_school = 'N/A'
else:
if hometown == 'N/A':
high_school = 'N/A'
player_row = [name, hometown, high_school, sport_id]
roster_list.append(player_row)
colnames = ['name', 'hometown', 'high school?', 'sport']
full_df = pd.DataFrame(roster_list, columns = colnames)
return full_df
def gather_rosters_grid(sports_dict):
# takes a dictionary with sport name key and url
# --> DataFrame with roster for each sport
full_df = pd.DataFrame()
def_gender = 'N/A'
for (sport_id, sporturl) in sports_dict.items():
cur_gender = def_gender
url = sporturl[0]
grid = get_grid(url, 'roster-grid-layout')
players = grid.find_all('div')
roster = []
print(sport_id)
for player in players:
try:
x = player.get_text().strip()
if x == 'Men':
cur_gender = 'Men'
elif x == 'Women':
cur_gender = 'Women'
else:
pass
gender = cur_gender
except:
try:
gender = cur_gender
except:
gender = def_gender
lev1 = player.find_all('div')
for div in lev1:
try:
name = div.find('div', class_ = 'player-name').find('a').get_text().strip()
lev2 = div.find('div', class_ = 'info')
town_school = lev2.find('div', class_ = 'hometown')
town_school = town_school.find('span', class_ = 'data').get_text().strip()
hs_start = town_school.rfind('(') + 1
hs_end = town_school.rfind(')')
ht_end = town_school.find('(')
if ht_end > 0:
hometown = town_school[:ht_end].strip()
if (hs_start > 0) & (hs_end > 0):
high_school = town_school[hs_start:hs_end].strip()
row = [name, hometown, high_school, gender]
roster.append(row)
except AttributeError:
continue
colnames = ['name', 'hometown', 'high_school', 'gender']
x = pd.DataFrame(roster, columns = colnames)
x['sport'] = sport_id
full_df = full_df.append(x)
print('done' + '\n')
return full_df
def gather_rosters_table(sports_dict, find_cols, url_template):
# takes a dictionary with sport name keys and url/table info,
# list of column names to find, and url template
# --> DataFrame with roster for each sport based on columns given
cols_text = []
for c in find_cols:
cols_text.append(c + " text")
colnames = find_cols + cols_text
full_df = pd.DataFrame()
for (sport_id, sport_info) in sports_dict.items():
sporturl = sport_info[0]
table_id = sport_info[1]
url = url_template.format(sporturl=sporturl)
table = get_table(url, table_id)
print(sport_id + '\n')
roster = select_cols(table, find_cols)
x = pd.DataFrame(roster, columns = colnames)
x['sport'] = sport_id
full_df = full_df.append(x)
return full_df
``` |
{
"source": "JordanKoeller/InterviewPrep",
"score": 4
} |
#### File: JordanKoeller/InterviewPrep/binary_heap.py
```python
class BinaryHeap:
def __init__(self):
self.values = []
def parentIndex(self, index):
return (len(self.values) - 1) // 2
def swap(self, i, j):
tmp = self.values[i]
self.values[i] = self.values[j]
self.values[j] = tmp
def add(self, value):
if len(self.values) == 0:
self.values.append(value)
return
self.values.append(value)
i = len(self.values) - 1
while i >= 0 and self.values[i] < self.values[self.parentIndex(i)]:
self.swap(i, self.parentIndex(i))
i = self.parentIndex(i)
def remove(self):
if len(self.values) == 0:
raise ValueError("Cannot pop a min value")
ret = self.values[0]
i = 0
while i < len(self.values):
l = i * 2 + 1
r = i * 2 + 2
if l < len(self.values) and r < len(self.values):
if self.values[l] < self.values[r]:
self.swap(i, l)
i = l
else:
self.swap(i, r)
i = r
elif l < len(self.values):
self.swap(i, l)
i = l
elif r < len(self.values):
self.swap(i, r)
i = r
else:
break
if i >= len(self.values):
i = self.parentIndex(i)
self.swap(i, len(self.values) - 1)
return ret
def testBinaryHeap():
values = [2, 5, 3, 1, 6, 7, 4, 5, 3, 7, 8, 9,6, 4,3,2, 1, 0]
sorts = sorted(values)
heap = BinaryHeap()
for v in values:
heap.add(v)
print(heap.values)
for v in values:
vv = heap.remove()
print(vv)
testBinaryHeap()
```
#### File: Hackerrank/graphs/shortest_reach_graph.py
```python
import math
import heapq
class Node:
def __init__(self, index):
self.index = index
self.edges = {}
self.hopsFromS = math.inf
def addEdge(self, destinationNode):
self.edges[destinationNode.index] = destinationNode
destinationNode.edges[self.index] = self
class Graph:
def __init__(self, numNodes):
self.nodes = [Node(i) for i in range(0, numNodes)]
self.edges = []
def connect(self, i, j):
self.nodes[i].addEdge(self.nodes[j])
# I use a Breadth-first-search
def find_all_distances(self, s):
start = self.nodes[s]
start.hopsFromS = 0
pq = [start]
while pq:
top = pq.pop(0)
for edgeK in top.edges:
if top.edges[edgeK].hopsFromS > top.hopsFromS + 6:
top.edges[edgeK].hopsFromS = top.hopsFromS + 6
pq.append(top.edges[edgeK])
print(" ".join([str(n.hopsFromS) if math.isfinite(n.hopsFromS) else '-1' for n in self.nodes if n.index != s]))
t = int(input())
for i in range(t):
n,m = [int(value) for value in input().split()]
graph = Graph(n)
for i in range(m):
x,y = [int(x) for x in input().split()]
graph.connect(x-1,y-1)
s = int(input())
graph.find_all_distances(s-1)
```
#### File: JordanKoeller/InterviewPrep/split_sum.py
```python
def can_split_equally(arr):
if len(arr) == 0:
return 0
total = sum(arr)
if total % 2 == 1:
return -1
target = total // 2
matrix = [[0 for _ in range(target + 1)] for _ in range(len(arr) + 1)]
for value in range(1, target + 1):
if arr[0] <= value:
matrix[0][value] = arr[0]
for max_index in range(1, len(arr)):
for target_value in range(1, target + 1):
matrix[max_index][target_value] = matrix[max_index - 1][target_value] # Don't add current element
if arr[max_index] <= target_value:
best_including = matrix[max_index - 1][target_value - arr[max_index]] + arr[max_index]
if best_including <= target_value and matrix[max_index][target_value] < best_including:
matrix[max_index][target_value] = best_including
return matrix[len(arr) - 1][target]
def test_helper(*data):
result = can_split_equally(data)
print(data, " => ", result, " Sum=", sum(data), sum(data) // 2 == result)
test_helper(1, 1, 3, 4, 5)
test_helper(1, 1, 1)
test_helper(1, 2)
test_helper(1, 2, 3)
test_helper(1)
test_helper(0)
test_helper(1, 5, 2, 2, 3, 4, 3)
test_helper(5,100 ,3, 1, 1)
test_helper()
``` |
{
"source": "JordanKoeller/Mirage2",
"score": 2
} |
#### File: mirage/lens_analysis/LightCurves.py
```python
from __future__ import division, print_function
from abc import ABC, abstractmethod
import random
from scipy.stats import ks_2samp, anderson_ksamp, mannwhitneyu, energy_distance
from scipy.signal import argrelmax
from scipy.signal import wiener
from scipy.optimize import minimize
from astropy import units as u
import numpy as np
# 1) Sobell Edge detection working beautifully
# 2) Doublet isolation
# 3) Asymmetry binning - show pulling from bins
# 4) Histograms
# 5) Best fit line for the peak shift
def get_analyzed_events(filename:str,base,min_sep_coeff,with_peaks=False,**event_finding_args):
from mirage import lens_analysis as la
data = la.load(filename)
matrix = data.lightcurve_matrix
ret_asyms = []
ret_shifts = []
ret_peaks = []
lc1 = data[base].lightcurves
r_g = data.simulation.parameters.quasar.r_g
peaks = map(lambda e: e.get_events(min_separation=min_sep_coeff*r_g,**event_finding_args),lc1)
err = 0
for ind in range(int(len(lc1)-1)):
peak_batch = next(peaks)
for peak in peak_batch:
try:
symm = peak.symmetry(min_sep_coeff*r_g)
ret_asyms.append(symm)
lines = data.correlate_lc_peaks([peak],matrix)
shifts = calculate_peak_shifts(lines)
ret_shifts.append(shifts)
if with_peaks:
ret_peaks.append(peak.curve)
except:
err += 1
print("Accumulated %d errors of %d total. Error rate of %.2f percent" % (err,len(ret_shifts)+err,100*err/((len(ret_shifts)+err))))
if with_peaks:
return {'shifts':ret_shifts, 'asymmetry':ret_asyms, 'peaks':ret_peaks}
else:
return {'shifts':ret_shifts, 'asymmetry':ret_asyms}
def get_all_lightcurves(filename:str,base,min_sep_coeff,**event_finding_args):
from mirage import lens_analysis as la
data = la.load(filename)
matrix = data.lightcurve_matrix
ret_asyms = []
ret_shifts = []
ret_peaks = []
lc1 = data[base].lightcurves
r_g = data.simulation.parameters.quasar.r_g
peaks = map(lambda e: e.get_events(min_separation=min_sep_coeff*r_g,**event_finding_args),lc1)
err = 0
for ind in range(int(len(lc1)-1)):
peak_batch = next(peaks)
for peak in peak_batch:
try:
symm = peak.symmetry(min_sep_coeff*r_g)
ret_asyms.append(symm)
lines = data.correlate_lc_peaks([peak],matrix)
shifts = calculate_peak_shifts(lines)
ret_shifts.append(shifts)
ret_peaks.append(peak)
except:
err += 1
peak_slices = data.correlate_lc_peaks(ret_peaks, matrix)
ret_df = []
for i in range(peak_slices.shape[0]):
for j in range(peak_slices.shape[1]):
asym = ret_asyms[i]
shifts = ret_shifts[i]
ret_df.append([i,j,asym,shifts,peak_slices[i,j]])
return ret_df
def calculate_peak_shifts(data:'np.ndarray'):
shifts = np.ndarray(data.shape,dtype=np.int16)
for i in range(data.shape[0]):
baseline = np.argmax(data[i,0])
for j in range(data.shape[1]):
shift = abs(np.argmax(data[i,j]) - baseline)
shifts[i,j] = shift
return shifts
def into_buckets(dataset):
buckets = {}
for i in range(len(dataset['asymmetry'])):
asym = dataset['asymmetry'][i]
shifts = dataset['shifts'][i]
try:
ind = int(float(asym)*100)
if ind in buckets:
buckets[ind].append(shifts)
else:
buckets[ind] = [shifts]
except:
pass
return buckets
def bucket_and_clean(dataset):
buckets = into_buckets(dataset)
for k in buckets.keys():
arr = np.array(buckets[k])
mean = np.mean(arr,axis=0)
std = np.std(arr,axis=0)
buckets[k] = {'mean':mean.flatten(),'std':std.flatten(),'asym':k/100,'num':arr.shape[0]}
return buckets
def scatter_buckets(dataset):
for k in sorted(buckets.keys()):
asym = k
for p in buckets[k]:
plt.plot(p.flatten())
plt.title(str(k))
input("Press Enter!")
plt.close()
def sample_buckets(dataset):
for k in sorted(dataset.keys()):
asym = k
print(len(dataset[k]))
for p in dataset[k][0:5]:
plt.plot(p.flatten())
plt.title(str(k))
input("Press Enter!")
plt.close()
class LightCurveBatch(object):
def __init__(self,data:'list[LightCurve]'):
# if isinstance(data,list):
# self._data = np.array(data)
# else:
self._data = list(data)
def plottables(self,unit='uas'):
for curve in self:
yield curve.plottable(unit)
def smooth_with_window(self,window:int):
d2 = self._data.copy()
for curveI in range(len(self)):
curve = self._data[curveI]
d2[curveI] = curve.smooth_with_window(window)
return LightCurveBatch(d2)
def __add__(self,other):
assert isinstance(other,LightCurveBatch)
total = self._data + other._data
return LightCurveBatch(total)
def __getitem__(self,ind):
if isinstance(ind,int):
return self._data[ind]
else:
return LightCurveBatch(self._data[ind])
def __len__(self):
return len(self._data)
@classmethod
def from_arrays(cls,data:np.ndarray, query_ends:u.Quantity,with_id=False):
ret_data = np.ndarray(len(data),dtype=object)
for i in range(len(data)):
datum = data[i]
if len(datum) > 0:
ends = query_ends[i]
s = ends[0:2]
e = ends[2:]
if with_id:
ret_data[i] = LightCurve(datum,s,e,i)
else:
ret_data[i] = LightCurve(datum,s,e)
return cls(ret_data)
class LightCurve(object):
def __init__(self,data,start,end,line_id = -1):
self._data = np.array(data).flatten()
# print(self._data.shape)
self._start = start
self._end = end
self._line_id = line_id
self._sample_density = self.distance_axis
self._sample_density = (self._sample_density[1] - self._sample_density[0]).to('uas')
def __len__(self):
return len(self._data)
def get_slices(self,slices):
ret1 = list(map(lambda slice_object: self[slice_object],slices))
return LightCurveBatch(ret1)
@property
def line_id(self):
if self._line_id != -1:
return self._line_id
else:
raise AttributeError("LightCurve instance does not have a trial id.")
@property
def sample_density(self):
return self._sample_density
@property
def ends(self):
return self._start,self._end
@property
def curve(self):
return -2.5*np.log10(self._data)
@property
def magnification_curve(self):
return self._data
@property
def query_points(self):
x = np.linspace(self._start[0].value,self._end[0].value,len(self))
y = np.linspace(self._start[1].value,self._end[1].value,len(self))
ret = np.ndarray((len(x),2))
ret[:,0] = x
ret[:,1] = y
return u.Quantity(ret,self._start.unit)
@property
def distance_axis(self):
qpts = self.query_points.value
x = qpts[:,0]
y = qpts[:,1]
xs = x[0]
ys = y[0]
diffx = x - xs
diffy = y - ys
res = (diffx**2+diffy**2)**0.5
return u.Quantity(res,self.query_points.unit)
@property
def length(self):
return self.distance_axis[-1]
def plottable(self,unit='uas'):
x = self.distance_axis.to(unit)
y = self.curve
return x,y
def get_event_slices(self,threshold=0.8/u.uas,smoothing_factor=1.1*u.uas,min_separation=u.Quantity(5.0,'uas'),require_isolation=False):
x = self.distance_axis.to(min_separation.unit)
dx = x[1] - x[0]
min_sep = int((min_separation/dx).to('').value)
threshold = (threshold*dx).to('').value
smoothing_factor = (smoothing_factor/dx).to('').value
peaks = self.get_peaks(threshold,smoothing_factor,min_sep,require_isolation)
obj_list = []
errors = 0
for p in peaks:
s_min = max([0,p-min_sep])
s_max = min([p+min_sep,len(x)-1])
if s_max - s_min > 3:
obj_list.append(slice(s_min,s_max,1))
else:
errors += 1
if errors > 0:
print("Accumulated %d errors" % errors)
return obj_list
def get_events(self,threshold=0.8/u.uas,smoothing_factor=1.1*u.uas,min_separation=u.Quantity(5.0,'uas'),require_isolation=False):
slice_list = self.get_event_slices(threshold, smoothing_factor, min_separation, require_isolation)
ret = []
for slicer in slice_list:
lc = LightCurveSlice(self,slicer.start,slicer.stop,self._line_id)
ret.append(lc)
# print("Returning batch with %d events" % len(ret))
return LightCurveBatch(ret)
def get_peaks(self,threshold=0.8,smoothing_factor=1.1,min_sep=1,require_isolation=False):
'''
Locate peaks of this light curve via a sobel edge detection convolution.
Recommended settings for my 80k batch, trail 5 R_g:
threshold = 0.8
smoothing_factor=1.1
'''
from mirage.calculator import sobel_detect
curve = self._data
return sobel_detect(curve,threshold,smoothing_factor,min_sep,require_isolation)
# def get_event_slices(self,threshold=80/u.uas,smoothing_factor=0.011*u.uas,min_separation=u.Quantity(5.0,'uas'),require_isolation=False):
# x = self.distance_axis.to(min_separation.unit)
# dx = x[1] - x[0]
# min_sep = int((min_separation/dx).value)
# peaks = self.get_peaks(threshold,smoothing_factor,min_sep,require_isolation)
# obj_list = []
# for p in peaks:
# s_min = max([0,p-min_sep])
# s_max = min([p+min_sep,len(x)-1])
# obj_list.append(slice(s_min,s_max,1))
# return obj_list
# def get_events(self,threshold=80/u.uas,smoothing_factor=0.011*u.uas,min_separation=u.Quantity(5.0,'uas'),require_isolation=False):
# slice_list = self.get_event_slices(threshold, smoothing_factor, min_separation, require_isolation)
# ret = []
# for slicer in slice_list:
# lc = LightCurveSlice(self,slicer.start,slicer.stop,self._line_id)
# ret.append(lc)
# # print("Returning batch with %d events" % len(ret))
# return LightCurveBatch(ret)
# def get_peaks(self,threshold=80/u.uas,smoothing_factor=0.011*u.uas,min_sep=1,require_isolation=False):
# '''
# Locate peaks of this light curve via a sobel edge detection convolution.
# Recommended settings for my 80k batch, trail 5 R_g:
# threshold = 0.8
# smoothing_factor=1.1
# '''
# print(self.sample_density.to('uas')**-1)
# threshold = threshold.to('1/uas')
# smoothing_factor = smoothing_factor.to('uas')
# thresh = threshold*self.sample_density
# smoothFac = smoothing_factor/self.sample_density
# print("Passing %.3f,%.3f,%.3f" % (thresh.value,smoothFac.value,min_sep))
# from mirage.calculator import sobel_detect
# curve = self._data
# return sobel_detect(curve,0.7,1.1,200,False)
# return sobel_detect(curve,thresh.value,smoothFac.value,min_sep,require_isolation)
def smooth_with_window(self,window:int):
data = self._data
data = wiener(data,window)
return LightCurve(data,self._start,self._end,self._line_id)
@property
def asymmetry(self):
line = self.curve
peak = np.argmax(line)
slice_length = min(peak,len(line)-peak)-1
lhs = line[peak-slice_length:peak][::-1]*100
rhs = line[peak+1:peak+1+slice_length]*100
diffs = (rhs-lhs)**2
tot = np.sqrt(diffs.sum())
return tot
def __getitem__(self,given):
if isinstance(given,slice):
return LightCurveSlice(self,given.start,given.stop,self._line_id)
elif isinstance(given,int):
return (self.curve[given],self.query_points[given])
else:
raise TypeError("Must give a valid slice object")
class LightCurveSlice(LightCurve):
def __init__(self,parent_curve,start,stop,line_id=-1):
qpts = parent_curve.query_points
curve = parent_curve._data
begin = qpts[start]
end = qpts[stop]
LightCurve.__init__(self,curve[start:stop],begin,end,line_id)
self._s = start
self._e = stop
self._parent_curve = parent_curve
@property
def curve_segment(self):
y = self._parent_curve.curve
return y[self._s:self._e]
def plottable_segment(self,unit='uas'):
x, y = self._parent_curve.plottable(unit)
x = x[self._s:self._e]
y = y[self._s:self._e]
return x, y
def trimmed_to_size(self,size:u.Quantity):
from mirage.calculator import trimmed_to_size_slice
x,y = self.plottable_segment(size.unit)
dx = x[1] - x[0]
slice_length = int((size / dx).value)
slc = trimmed_to_size_slice(y,slice_length)
return self[slc[0]:slc[1]]
def __getitem__(self,slc):
if isinstance(slc,slice):
start,stop = (slc.start,slc.stop)
return self.parent_curve[self._s+start:self._s+stop]
@property
def slice_object(self):
return slice(self._s,self._e,1)
@property
def parent_curve(self):
return self._parent_curve
class Event(object):
def __init__(self,light_curves,parent_index):
self._data = np.array(list(map(lambda l: l._data,light_curves)))
self._parent_index = parent_index
self._asymmetry = light_curves[parent_index].asymmetry
@property
def asymmetry(self):
return self._asymmetry
@property
def curve(self):
return self._data[self._parent_index]
def plot(self):
from matplotlib import pyplot as plt
for lc in self._data:
plt.plot(lc)
def prominences_above(self,cutoff:float) -> int:
"""Computes the number of peaks with prominence `cutoff` or higher in the parent light curve."""
from mirage.calculator.peak_finding import prominence
candidates = np.array([i for i in range(0,len(self._data[self._parent_index]))])#argrelmax(self._data[self._parent_index],order=4))[0]
proms = len(list(filter(lambda x: prominence(self._data[self._parent_index],x) > cutoff,candidates)))
return proms
@property
def shift_array(self):
maxes = np.argmax(self._data,axis=1)
return abs(maxes - maxes[0])
class EventClassificationTable(object):
def __init__(self,events,group_count):
self._bins = {}
self._numGroups = group_count
events = list(events)
separations = list(map(lambda e: e.asymmetry,events))
min_sep = min(separations)
max_sep = max(separations)
dx = (max_sep - min_sep)/group_count
get_ind = lambda asym: int(round((asym - min_sep)/dx))
errors = 0
for event in events:
try:
key = get_ind(event.asymmetry)
if key not in self._bins:
self._bins.update({key:[event]})
else:
self._bins[key].append(event)
except IndexError as e:
errors += 1
# print("Accumuldated %d errors" % errors)
@property
def keys(self):
return list(self._bins.keys())
def __getitem__(self,idd):
return self._bins[idd]
def plot_samples(self,key,num):
from matplotlib import pyplot as plt
import random
bucket = self[key]
samples = random.sample(bucket,num)
for sample in samples:
# Need to normalize our curves
curve = sample.curve
curve -= curve.min()
if curve[0] >= curve[-1]: curve = curve[::-1]
peak_index = np.argmax(curve)
x_ax = np.arange(-peak_index,len(curve)-peak_index)
plt.plot(x_ax,curve)
def merge_buckets(self,key_list):
ret = []
for key in key_list:
ret = ret + self[key]
return ret
def append(self,other):
for k in other.keys:
if k in self._bins:
self._bins[k] = self._bins[k] + other[k]
else:
self._bins.update({k:other[k]})
return self
def to_histograms(self,keys,density=20):
alls = self.merge_buckets(keys)
return EventClassificationTable.mk_histograms(alls,density)
@staticmethod
def mk_histograms(eventlist,density=20):
nparr = np.array(list(map(lambda event: event.shift_array,eventlist)))
ret = []
binArr = [i for i in range(density+1)]
for i in range(nparr.shape[1]):
cts, bins = np.histogram(nparr[:,i],bins=binArr)
ret.append((cts,bins[:-1]))
return ret
def __repr__(self):
lines = "EventClassificationTable"
for k,v in self._bins.items():
lines += ("\n\t" + str(k) + " : " + str(len(v)))
return lines
```
#### File: mirage/lens_analysis/MagnificationMap.py
```python
import numpy as np
from mirage.util import PixelRegion, zero_vector
class MagnificationMap(object):
def __init__(self,simulation,data):
sp = simulation.parameters.source_plane
theta_E = simulation.parameters.theta_E
r = simulation['magmap'].resolution
self._source_plane = PixelRegion(zero_vector(theta_E),sp.dimensions.to(theta_E),r)
self._data = np.flip(data,1)
self._scaling = 1
@property
def data(self):
if self._scaling == 1:
return -2.5*np.log10(self._data+0.001)
else:
return self._data
def setScaling(self,kind):
if kind == "linear":
self._scaling = 0
else:
self._scaling = 1
@property
def region(self):
return self._source_plane
def slice_line(self,start,end):
from mirage.calculator import arbitrary_slice_axis
return arbitrary_slice_axis(start,end,self.region,self._data)
def export(self,filename,fmt='fits',**kwargs):
'''
Saves the magnification map image to the specified file.
Parameters:
`filename` (:class:`str`): Name of the file to save the map to.
`fmt` (:class:`str`): File format to use. Options include `fits`, `png`, `jpeg`. Note that with `fits`, a default header
is also included, describing the map. Default option is `fits`.
If additional keyword arguments are supplied and `fmt` is `fits`, then the extra arguments will be converted to strings
and saved in the `fits` header.
'''
if fmt == 'fits':
from mirage.io import FITSFileManager
fm = FITSFileManager()
fm.open(filename+'.'+fmt)
# headers = {''}
fm.write(self.data,**kwargs)
fm.close()
return "Magmap saved to " + filename + '.' + fmt
else:
print("NEED TO DECIDE WHAT TO DO IF NOT SAVING TO FITS")
def smooth_and_detect(self,sigma):
from scipy.ndimage import gaussian_filter, sobel
rins = self._data
smoothed = gaussian_filter(rins, sigma,mode='nearest')
ret1 = sobel(smoothed,mode='nearest')
ret2 = sobel(smoothed.T,mode='nearest')
ret = abs(ret1) + abs(ret2.T)
return ret
def histogram(self,nbins=100,**kwargs):
from matplotlib import pyplot as plt
data = self.data
return plt.hist(data.flatten(),log=True,bins=nbins,histtype="step",**kwargs)
```
#### File: mirage/parameters/CalculationDependency.py
```python
from abc import ABC, abstractmethod
class CalculationDependency(ABC):
"""Abstract base class that specifies any subclasses have attributes that when changed may cause any ray-tracing that has occurred prior to the change to need to be redone.
There is one abstract method that must be overridden. The :method:`is_similar`, which returns a `bool` specifying if the system must be recalculated.
"""
def __init__(self):
pass
@abstractmethod
def is_similar(self,other:'CalculationDependency') -> bool:
pass
# @abstractmethod
# def update(self,*args,**kwargs) -> None:
# pass
```
#### File: mirage/util/Jsonable.py
```python
from abc import ABC, abstractmethod, abstractproperty, abstractclassmethod
from astropy.units import Quantity
class Jsonable(ABC):
"""Abstract base class for objects that have methods for converting to and from JSON representations."""
def __init__(self):
pass
@abstractproperty
def json(self):
pass
@abstractclassmethod
def from_json(cls,js):
pass
def __repr__(self):
return str(self.json)
@staticmethod
def encode_quantity(quant:Quantity) -> 'Dict':
"""convenience function for converting an :class:`astropy.units.Quantity` instance into a JSON representation.
"""
ret = {}
if isinstance(quant.value,int) or isinstance(quant.value,float):
ret['values'] = quant.value
else:
ret['values'] = quant.value.tolist()
ret['unit'] = str(quant.unit)
return ret
@staticmethod
def decode_quantity(js:'Dict') -> Quantity:
"""
method for constructing a :class:`astropy.units.Quantity` from a JSON representation.
"""
values = js['values']
unit = js['unit']
return Quantity(values,unit)
#Method template
# @property
# def json(self):
# @classmethod
# def from_json(cls,js):
```
#### File: mirage/views/__init__.py
```python
def get_renderer(sim):
from .Renderer import LensRenderer
return LensRenderer(sim)
try:
from .View import ImageCurveView
from .MagmapView import MagnificationMapView
from .LensView import LensView, AnimationController
except ImportError as e:
print("Warning: Matplotlib not detected. Everything inside the View package is disabled.")
try:
from .Window import Window
from .Widgets import ParametersWidget, SimulationWidget
from PyQt5.QtWidgets import QApplication
import sys
_app = QApplication(sys.argv)
except ImportError as e:
print("Warning: PyQt5 not detected.")
```
#### File: mirage/views/Window.py
```python
from PyQt5.QtWidgets import QMainWindow
from PyQt5 import uic
class Window(QMainWindow):
"""Generic wndow object for loading views into. A very minimalist object with a "File" menu item to save, load, and quit."""
windowQML = "mainwindow.ui"
def __init__(self):
QMainWindow.__init__(self)
from os import environ, sep
prefix = None
if "MIRAGE_HOME" in environ:
prefix = environ['MIRAGE_HOME']+sep+"mirage"+sep+"views"+sep+"qml"+sep
else:
prefix = ""
uic.loadUi(prefix+self.windowQML,self)
self._save_action = lambda: print("Dummy")
self._open_action = lambda: print("Dummy")
self.actionOpen.triggered.connect(lambda: self._open_action())
self.actionSave.triggered.connect(lambda: self._save_action())
def bind_widget(self,widget):
box = self.layoutBox
box.addWidget(widget)
self._widget = widget
self._save_action, self._open_action = widget.actionTriggers()
def get_object(self,*args,**kwargs):
return self._widget.get_object(*args,**kwargs)
def set_object(self,*args,**kwargs):
return self._widget.set_object(*args,**kwargs)
@property
def widget(self):
return self._widget
``` |
{
"source": "JordanKoeller/OpenCensus",
"score": 2
} |
#### File: lambdas/app/main.py
```python
try:
import unzip_requirements
except ImportError:
pass
import json
import os
import csv
import logging
import numpy as np
import boto3
from botocore.exceptions import ClientError
from census_table import CensusTable
# Set up logging
logging.basicConfig(level=logging.DEBUG,
format='%(levelname)s: %(asctime)s: %(message)s')
logger = logging.getLogger('Main')
S3_ERROR = {
"isBase64Encoded": False,
"statusCode": 500,
"headers": {"Content-Type": "application/json"},
"body": '{"Messsage": "Could not communicate with s3"}'
}
def main(event, context):
context.log("====================BEGIN TRANSACTION========================\n")
context.log("========================REQUEST==============================\n")
context.log(event)
context.log("\n")
path = event['path']
handlerLookup = {
'/can-you-migrate': handleCanYouMigrate,
'/get-country-headers': handleRequestHeaders,
'/get-country-headers/hierarchy': handleRequestCountryGroupsHierarchy,
'/migration-history': handleRequestHistoricalMigration,
}
resp = handlerLookup[path](event, context)
context.log("========================RESPONSE=============================\n")
context.log(resp)
context.log("\n")
context.log("======================END TRANSACTION========================\n")
return resp
def _respond(code, body):
return {
"isBase64Encoded": False,
"statusCode": code,
"headers": {
"Content-Type": "text/plain",
"Access-Control-Allow-Origin": "*",
},
"body": json.dumps(body)
}
def _getTable():
BUCKET_NAME = "open-justice-resources" # os.environ['CENSUS_BUCKET']
OBJECT_NAME = "aggregatedSheet.csv" # os.environ['CENSUS_OBJECT']
s3 = boto3.client('s3')
sheet = None
try:
response = s3.get_object(Bucket=BUCKET_NAME, Key=OBJECT_NAME)
sheet = response['Body'].iter_lines()
except ClientError as e:
logging.error(e)
return None
stringParser = map(lambda e: str(e, 'utf-8'), sheet)
parser = csv.reader(stringParser)
return CensusTable(parser)
def handleCanYouMigrate(event, context):
table = _getTable()
if table:
requestInfo = json.loads(event['body'])
country = requestInfo['country']
year = requestInfo['year']
expectedWaitRange = table.expectedWaitTimeRange(country, year)
return _respond(200, {
'waitMin': int(expectedWaitRange[0]),
'waitMax': int(expectedWaitRange[0]),
'applied': int(table.applied.get(country, year)),
'accepted': int(table.accepted.get(country, year)),
'waitlist': int(table.waitlist.get(country,year))
})
return S3_ERROR
def handleRequestHeaders(event, context):
table = _getTable()
if table:
headers = table.headers
return _respond(200, {'countries': headers})
return S3_ERROR
def handleRequestHistoricalMigration(event, context):
table = _getTable()
if table:
body = {
'headers': table.headers,
'years': table.years.tolist(),
'applications': table.applied.tolist(),
'accepted': table.accepted.tolist(),
'waitlist': table.waitlist.tolist(),
}
return _respond(200, body)
return S3_ERROR
def handleRequestCountryGroupsHierarchy(event, context):
groupings = {
'North America & South America': {
"Canada & Newfoundland": ["Canada & Newfoundland"],
"Mexico": ["Mexico"],
"West Indies": ["West Indies"],
"Other America": ["Other America"]
},
'Asia': {
"Southeast Asia": ["Asian Turkey"],
"Central Asia": ["China", "India"],
"East Asia": ["Japan", "Korea", "Philippines"],
"Other Asia": ["Other Asia"]
},
'Europe': {
"Western Europe": ["Great Britain", "Ireland", "Other NW Europe"],
"Scandinavia": ["Scandinavia"],
"Central Europe": ["Germany", "Italy", "Other Central Europe"],
"Southern Europe": ["Italy", "Other Southern Europe"],
"Eastern Europe": ["USSR & Baltic States", "Other Eastern Europe", "Poland"]
},
# 'Africa' {},
# 'Australia': {},
}
return _respond(200, {'countryGroups': groupings})
if __name__== '__main__':
import sys
args = sys.argv
os.environ.update({
'CENSUS_BUCKET': 'open-justice-resources',
'CENSUS_OBJECT': 'aggregatedSheet.csv'
})
if 'test' in args:
import unittest
unittest.main(module='test.test_handler')
else:
mockReq = {
"body": {'country': 'Ireland', 'year': 1920},
"path": '/can-you-migrate'
}
resp = main(mockReq, '')
context.log(resp)
```
#### File: colonial-1970-migration/chart19/parse19.py
```python
import os
try:
os.chdir(os.path.join(os.getcwd(), '../../../tmp'))
print(os.getcwd())
except:
pass
# %% [markdown]
# # Parsing the output of AWS Textract
#
# I tried to parse via tesseract. It didn't work. While I was successful in identifying specific cells in the tables the OCR was not refined enough to be useful. I couldn't get a well-fitting product.
#
# ## I now try using AWS Textract
#
# Preliminary runs seem to suggest it is more accurate. Still not error-free, but much closer. However, it outputs individual words, not lines or columns or something so there's still some work to be done to transform the sequence of words into a table.
#
#
# %%
# Import packages and some setup.
import csv
import os
import json
import numpy as np
from matplotlib import pyplot as plt
from PIL import Image
chartDirs = ['chart19', 'chart20', 'chart21', 'chart22', 'chart23']
# %%
# Convert a specific page of the pdf to png.
import pdf2image
def getPngPage(fname) -> Image:
tmp = pdf2image.convert_from_path(
fname,
dpi=600,
grayscale=True
)[0]
tmpData = np.asarray(tmp)
return Image.fromarray((tmpData > 128).astype(np.uint8)*255)
# %%
class BlockTable:
class Block:
# Page coordinates are based on the top left corner of the page. Increasing value right and down.
def __init__(self, text, confidence, boundingBox, padding=None):
self.text = text
self.confidence = confidence
self.boundingBox = boundingBox
self.padding = padding or [1.0, 1.0]
@property
def cx(self):
return (2 * self.boundingBox['Left'] + self.boundingBox['Width']) / 2
@property
def cy(self):
return (2 * self.boundingBox['Top'] + self.boundingBox['Height']) / 2
@property
def width(self):
return self.boundingBox['Width']
@property
def height(self):
return self.boundingBox['Height']
@property
def left(self):
return self.cx - (self.width * self.padding[0]) / 2
@property
def right(self):
return self.cx + (self.width * self.padding[0]) / 2
@property
def top(self):
return self.cy - (self.height * self.padding[1]) / 2
@property
def bottom(self):
return self.cy + (self.height * self.padding[1]) / 2
def overlapsColumns(self, other):
return (self.left < other.left and self.right > other.right) or \
(other.left < self.left and other.right > self.right) or \
(self.left < other.left and self.right > other.left) or \
(self.left < other.right and self.right > other.right)
@property
def asNumber(self):
if self.text == '(10)':
return -1
retStr = ""
for c in self.text:
if c in '0123456789':
retStr = retStr + c
if len(retStr):
return int(retStr)
return -1
def inspect(self, img):
x, y = img.size
box = ((self.left-0.02)*x, (self.top - 0.01)*y, (self.right+0.02)*x, (self.bottom + 0.01)*y)
cut = img.crop(box)
cut.show()
resp = input("Please enter the number shown \n(%s) > " % self.text)
if resp:
self.text = resp
print("Reset text to %s" % self.text)
def __repr__(self, *args, **kwargs):
return self.text
def __init__(self, headers, js, sourceImage):
self.headers = headers
self.img = sourceImage
blocks = filter(lambda blk: blk['BlockType'] == 'WORD', js['Blocks'])
blocks = list(map(lambda b: BlockTable.Block(b['Text'],
b['Confidence'],
b['Geometry']['BoundingBox'],
[1, 1]), blocks))
# print([b.text for b in blocks[:40]])
self.blockHeaders = {}
for block in blocks:
if list(map(lambda b: b.text, self.blockHeaders.keys())) == self.headers:
break
if block.text in headers and block.cy < 0.25:
block.padding = [3,1]
self.blockHeaders.update({block: []})
for block in blocks:
for k in self.blockHeaders.keys():
if k.cy < block.cy and block.overlapsColumns(k):
self.blockHeaders[k].append(block)
break
for header, column in self.blockHeaders.items():
column.sort(key=lambda e: e.cy)
@property
def numpyArray(self):
columns = list(map(lambda e: [e[0]] + e[1], self.blockHeaders.items()))
columns.sort(key=lambda lst: lst[0].cx)
numRows = max(map(lambda col: len(col) - 1, columns))
ret = np.ndarray((numRows, len(columns)), dtype=np.int32) * 0 - 1
for i, col in enumerate(columns):
for j, cell in enumerate(col):
if j > 0:
ret[j-1,i] = cell.asNumber
return ret
def inspectMistakes(self, threshold):
"""
Given a confidence threshold, visually inspect and correct any boxes
with a confidence interval lower than the threshold.
"""
for k, v in self.blockHeaders.items():
for i, block in enumerate(v):
if len(block.text) >= 5 and block.text[-4] == '1' and \
i > 0 and i < len(v) - 1 and \
(block.asNumber / v[i - 1].asNumber > 5 or block.asNumber / v[i + 1].asNumber > 5):
print("Auto-fixing %s to %s" % (block.text, block.text[:-4] + block.text[-3:]))
block.text = block.text[:-4] + block.text[-3:]
break
if block.confidence < threshold or (k.text != 'Year' and len(block.text) >= 4 and block.text[-4] == '1'):
block.inspect(self.img)
# %%
# Function for getting the table from a singular page.
def getTable(js, colHeaders, tableImg):
table = BlockTable(js, colHeaders, tableImg)
threshold = 80
table.inspectMistakes(threshold)
return table.numpyArray
# %%
table19 = getTable(
['Year', *[str(i) for i in range(89, 102)]],
json.load(open('/home/jordan/OpenJustice/resources/colonial-1970-migration/chart19/apiResponse.json')),
getPngPage('/home/jordan/OpenJustice/resources/colonial-1970-migration/chart19/chart19.pdf'))
# %%
# print(table19.shape)
plt.plot(table19[:,1])
plt.show()
np.savetxt('table19.csv', table19, delimiter=',')
``` |
{
"source": "JordanKoeller/Pysch",
"score": 3
} |
#### File: pysh/bash_vm/shell_command.py
```python
from __future__ import annotations
import subprocess
import os
from typing import List, Dict, Iterator, Optional, Tuple
class ShellCommand:
def __init__(self, cmd: str):
self.run_args = [
"bash", "-c", f'{cmd}'
]
# self.run_args: List[str] = [executable, *args]
def exec(self, **extra_environ: str) -> ShellCommandOutput:
result = subprocess.run(self.run_args,
stdout=subprocess.PIPE,
env={
**os.environ,
**(extra_environ if extra_environ else {})
}
)
print("Finished shell command")
return ShellCommandOutput(str(result.stdout, 'utf-8'), result.returncode)
class ShellCommandOutput:
def __init__(self, output_body: str, code: int):
self._code = code
self._value = output_body
@property
def succeeded(self) -> bool:
return self._code == 0
@property
def code(self) -> int:
return self._code
@property
def value(self) -> str:
return self._value
def lines(self) -> List[ShellCommandOutput]:
return [
ShellCommandOutput(substr, self.code)
for substr in self.value.splitlines()
if substr
]
def __iter__(self) -> Iterator[str]:
return iter(self._split_tokens())
def __str__(self) -> str:
return f'<STDOUT value={self.value} code={self.code} >'
def _split_tokens(self) -> List[str]:
ret = []
in_quotes = None
accumulator: List[str] = []
for char in self.value:
if _whitespace(char) and not in_quotes and accumulator:
ret.append(''.join(accumulator))
accumulator = []
elif in_quotes == None and _quotes(char):
in_quotes = char
elif in_quotes and in_quotes == char:
in_quotes = None
if accumulator:
ret.append(''.join(accumulator))
accumulator = []
elif in_quotes and _quotes(char):
raise ValueError(
f"Found unmatched quote characters in string {self.value}")
else:
accumulator.append(char)
return ret
def _quotes(c: str) -> bool:
return c in ['"', "'"]
def _whitespace(c: str) -> bool:
return str.isspace(c)
```
#### File: pysh/terminal/statement_processor.py
```python
from typing import List, Optional
class StatementProcessor:
"""
StatementProcessor
Accepts lines as input, aggregates into statements.
Some Notes:
+ If the statement you are in consists of an indented block, an empty line is used
to signify the end of the block. In practice, this means the user needs to have one
empty line to cause the statement to flush and start executing.
+ To support indention blocks, this class needs to track indention level, as well as know
when the appropriate blank line has been entered to terminate the statement, if an indention
is present.
Has getters to get:
+ current indention level
+ text of completed statement
+ boolean indicating if the statement is complete or not
"""
def __init__(self):
self._statement_lines: List[str] = []
self._indention_level: int = 0
self._backtick_count = 0
def process_line(self, line: str) -> Optional[str]:
"""
Accepts Lines as input and appends them to an array of lines in the current statement.
Returns the full statement if this line completes a statement.
"""
trimmed_line = self._trim_comments(line)
self._statement_lines.append(line)
for c in line:
if c == '`':
self._backtick_count += 1
if trimmed_line.endswith(':'):
self._indention_level += 1
if self._ends_statement(line):
return self._flush_statement()
return None
##############################
# Private methods follow #
##############################
def _flush_statement(self) -> str:
"""
This method cleans up the state of the StatementProcessor after a statement
has been fully formed. Returns the fully formed statement.
"""
self._indention_level = 0
self._backtick_count = 0
ret = '\n'.join(self._statement_lines)
self._statement_lines = []
return ret
def _trim_comments(self, line: str) -> str:
if '#' in line:
return line.split('#')[0].rstrip()
return line.rstrip()
def _ends_statement(self, line: str) -> bool:
"""
Checks to see if the passed-in line is sufficient to end the current statement.
There are a few different things to check.
A statement is only over if all of the following are met:
+ There is an even number of backticks (no unclosed bash injections)
+ The indention level is zero AND the line is not an emptystring.
+ The indention level is greater than zero AND the passed-in line is an empty line.
"""
return self._backtick_count % 2 == 0 and (
(self._indention_level > 0 and line.rstrip() == '') or
(self._indention_level == 0 and line.rstrip() != ''))
```
#### File: Pysch/test/test_py_vm.py
```python
from unittest import TestCase
from io import StringIO
from unittest.mock import patch
from pysh.py_vm.types import Primatives, UserType, UserValue
from pysh.py_vm.session import PyVm
class TestUserValues(TestCase):
def setUp(self):
self.s = UserType(Primatives.String)
self.i = UserType(Primatives.Int)
self.f = UserType(Primatives.Float)
self.b = UserType(Primatives.Bool)
self.ls = UserType(Primatives.Array, Primatives.String)
self.li = UserType(Primatives.Array, Primatives.Int)
self.lf = UserType(Primatives.Array, Primatives.Float)
self.lb = UserType(Primatives.Array, Primatives.Bool)
self.di = UserType(Primatives.Dict, Primatives.Int)
self.df = UserType(Primatives.Dict, Primatives.Float)
self.ds = UserType(Primatives.Dict, Primatives.String)
self.db = UserType(Primatives.Dict, Primatives.Bool)
def testPrimativesEquality(self):
self.assertTrue(Primatives.String == Primatives.String)
self.assertFalse(Primatives.String == Primatives.Int)
self.assertEqual(Primatives.String, Primatives.String)
self.assertNotEqual(Primatives.String, Primatives.Float)
self.assertEqual(Primatives.String, Primatives.String)
self.assertNotEqual(Primatives.String, Primatives.Bool)
self.assertNotEqual(Primatives.Array, Primatives.Dict)
self.assertEqual(Primatives.Array, Primatives.Array)
def testCanInstantiateAllPrimativeTypes(self):
self.assertTrue(self.s.convertable(self.i))
self.assertTrue(self.s.convertable(self.f))
self.assertTrue(self.s.convertable(self.b))
self.assertTrue(self.s.convertable(self.s))
self.assertTrue(self.ls.convertable(self.li))
self.assertTrue(self.ls.convertable(self.ls))
self.assertTrue(self.ls.convertable(self.lf))
self.assertTrue(self.ls.convertable(self.lb))
self.assertFalse(self.ls.convertable(self.f))
self.assertFalse(self.ls.convertable(self.ds))
def testCanConvertBetweenUserValues(self):
string5 = UserValue("5", UserType(Primatives.String))
int5 = string5.convert_type(UserType(Primatives.Int))
intArray = UserValue([1, 2, 3], UserType(
Primatives.Array, Primatives.Int))
self.assertEqual(int5.value, int(string5.value))
self.assertRaises(ValueError, lambda: intArray.convert_type(
UserType(Primatives.Dict, Primatives.Int)))
stringArray = intArray.convert_type(
UserType(Primatives.Array, Primatives.String))
self.assertEqual(stringArray.value, ["1", "2", "3"])
self.assertEqual(intArray.value, [1, 2, 3])
intDict = UserValue({"Key1": 1, "Key2": 2, "Key3": 4},
UserType(Primatives.Dict, Primatives.Int))
strDict = intDict.convert_type(
UserType(Primatives.Dict, Primatives.String))
self.assertEqual({"Key1": 1, "Key2": 2, "Key3": 4}, intDict.value)
self.assertEqual(
{"Key1": "1", "Key2": "2", "Key3": "4"}, strDict.value)
def testUserTypeFactoryCanDetermineVariableTypeAtRuntime(self):
string = "asdf"
integer = 123
double = 1.23
boolean = False
self.assertEqual(UserType.factory(string), UserType(Primatives.String))
self.assertEqual(UserType.factory(integer), UserType(Primatives.Int))
self.assertEqual(UserType.factory(double), UserType(Primatives.Float))
self.assertEqual(UserType.factory(boolean), UserType(Primatives.Bool))
class TestPythonSession(TestCase):
def testCanMockStdOut(self):
with patch('sys.stdout', new=StringIO()) as fakeOutput:
print('hello world')
self.assertEqual(fakeOutput.getvalue().strip(), 'hello world')
def testCanExecutePythonScriptAndCanSeePassedInScope(self):
testCmd = """print("In the command")"""
runner = PyVm()
with patch('sys.stdout', new=StringIO()) as fakeOutput:
runner.run_py_command(testCmd)
self.assertEqual(fakeOutput.getvalue().strip(), 'In the command')
with patch('sys.stdout', new=StringIO()) as fakeOutput:
runner.add_variable('hello', UserValue("world", UserType(Primatives.String)))
runner.run_py_command("print(hello)")
value = fakeOutput.getvalue().strip()
self.assertEqual(value, 'world')
def testPythonExecCodeCanAddVariablesToUserScope(self):
runner = PyVm()
runner.run_py_command("myNewVariable = 25")
self.assertIn("myNewVariable", runner._user_variables)
self.assertEqual(runner._user_variables["myNewVariable"], 25)
runner.run_py_command("""
def MyNewFunction(other):
return myNewVariable + other
""")
self.assertIn("MyNewFunction", runner._user_variables)
``` |
{
"source": "Jordan-Kowal/challenges",
"score": 3
} |
#### File: advent_of_code/2020/day_04.py
```python
import re
# Personal
from _shared import read_input
# --------------------------------------------------------------------------------
# > Helpers
# --------------------------------------------------------------------------------
class PassportForm:
LINE_REGEX = r"([a-z]{3}):([^ ]+)"
FIELDS = [
("byr", True), # field_name, required
("iyr", True),
("eyr", True),
("hgt", True),
("hcl", True),
("ecl", True),
("pid", True),
("cid", False),
]
def __init__(self, line):
"""
Read the passport info to fill a PassportForm
:param str line: Passport data from the input file
"""
self.line = line
self.fill_form()
self.find_invalid_fields()
def fill_form(self):
"""Parses the input file to set our form fields/values"""
for match in re.finditer(self.LINE_REGEX, self.line):
field = match.group(1)
value = match.group(2)
setattr(self, field, value)
def find_invalid_fields(self):
"""
Checks for missing fields
:return: The required fields that are missing from our form
:rtype: set
"""
invalid_fields = set()
for field_name, required in self.FIELDS:
value = getattr(self, field_name, None)
# Check required
if required and value is None:
invalid_fields.add(field_name)
# Custom validation
if value is not None:
function_name = f"validate_{field_name}"
field_validation_function = getattr(self, function_name)
if not field_validation_function():
invalid_fields.add(field_name)
self.invalid_fields = invalid_fields
@property
def is_valid(self):
"""
:return: Whether the form is valid
:rtype: bool
"""
return len(self.invalid_fields) == 0
def validate_byr(self):
"""
:return: Whether BYR is within the range
:rtype: bool
"""
value = int(self.byr)
return 1920 <= value <= 2002
def validate_iyr(self):
"""
:return: Whether IYR is within the range
:rtype: bool
"""
value = int(self.iyr)
return 2010 <= value <= 2020
def validate_eyr(self):
"""
:return: Whether EYR is within the range
:rtype: bool
"""
value = int(self.eyr)
return 2020 <= value <= 2030
def validate_hgt(self):
"""
Checks the HGT is valid and within the right range, depending on the unit of measure
:return: Whether HGT is within the range
:rtype: bool
"""
regex = r"^(\d+)(cm|in)$"
match = re.match(regex, self.hgt)
if match is not None:
value = int(match.group(1))
units = match.group(2)
if units == "cm":
return 150 <= value <= 193
else:
return 59 <= value <= 76
return False
def validate_hcl(self):
"""
:return: Whether the HCL format is valid
:rtype: bool
"""
regex = r"^#[a-f0-9]{6}$"
return not re.match(regex, self.hcl) is None
def validate_ecl(self):
"""
:return: Whether the ECL value is in the list of accepted values
:rtype: bool
"""
return self.ecl in {
"amb",
"blu",
"brn",
"gry",
"grn",
"hzl",
"oth",
}
def validate_pid(self):
"""
:return: Whether PID is a chain of 9 digits
:rtype: bool
"""
regex = r"^\d{9}$"
return not re.match(regex, self.pid) is None
@staticmethod
def validate_cid():
"""
:return: No custom validation. Always valid
:rtype: bool
"""
return True
def get_passport_info_from_input():
"""
Fetches the input file and rebuilds passport info as a one-liner
:return: List string of passport info
:rtype: [str]
"""
passport_list = []
text = ""
for line in read_input("day_04.txt"):
if line != "":
text += f" {line}"
else:
passport_list.append(text[1:])
text = ""
passport_list.append(text[1:]) # Adding the last one
return passport_list
# --------------------------------------------------------------------------------
# > Main
# --------------------------------------------------------------------------------
passport_info = get_passport_info_from_input()
passport_forms = [PassportForm(line) for line in passport_info]
valid_forms = [form for form in passport_forms if form.is_valid]
print(len(valid_forms))
```
#### File: advent_of_code/2020/day_11.py
```python
from enum import Enum
# Personal
from _shared import read_input
# --------------------------------------------------------------------------------
# > Helpers
# --------------------------------------------------------------------------------
class Grid:
def __init__(self, grid):
"""
Creates a grid from a list of spots
Then updates the spot instances by computing their adjacent spots
:param [[Spot]] grid: 2d array for Seat instances
"""
self.grid = grid
self.x_max = len(self.grid[0]) - 1
self.y_max = len(self.grid) - 1
self.compute_adjacent_spots()
self.compute_visible_seats()
def __getitem__(self, item):
"""
:param tuple item: Expects (x, y) coordinates
:return: The spot instance at the (x, y) coordinates
:rtype: Spot
"""
x, y = item
return self.grid[y][x]
def reset(self):
"""Every spot that was OCCUPIED is now set to EMPTY"""
for spot in self.spots:
if spot.status == Spot.Status.OCCUPIED:
spot.status = Spot.Status.EMPTY
def compute_adjacent_spots(self):
"""
Registers the adjacent spots of each spot from the grid
Includes diagonally-adjacent spots
"""
for spot in self.spots:
spot_x, spot_y = spot.pos
spot_x_min = 0 if spot_x == 0 else spot_x - 1
spot_x_max = self.x_max if spot_x == self.x_max else spot_x + 1
spot_y_min = 0 if spot_y == 0 else spot_y - 1
spot_y_max = self.y_max if spot_y == self.y_max else spot_y + 1
adjacent_spots = [
self[x, y]
for x in range(spot_x_min, spot_x_max + 1)
for y in range(spot_y_min, spot_y_max + 1)
]
adjacent_spots = [
spot for spot in adjacent_spots if spot.pos != (spot_x, spot_y)
]
spot.adjacent_spots = adjacent_spots
def compute_visible_seats(self):
"""
Finds the first/closest actual seat in each direction (8 total) for each seat
The list of seats is then stored in the Spot instance
"""
for spot in self.spots:
closest_visible_seats = []
left_iter = list(reversed(range(0, spot.x)))
right_iter = list(range(spot.x + 1, self.x_max + 1))
top_iter = list(reversed(range(0, spot.y)))
bottom_iter = list(range(spot.y + 1, self.y_max + 1))
# Left
for new_x in left_iter:
potential_seat = self[new_x, spot.y]
if potential_seat.is_seat:
closest_visible_seats.append(potential_seat)
break
# Right
for new_x in right_iter:
potential_seat = self[new_x, spot.y]
if potential_seat.is_seat:
closest_visible_seats.append(potential_seat)
break
# Top
for new_y in top_iter:
potential_seat = self[spot.x, new_y]
if potential_seat.is_seat:
closest_visible_seats.append(potential_seat)
break
# Bottom
for new_y in bottom_iter:
potential_seat = self[spot.x, new_y]
if potential_seat.is_seat:
closest_visible_seats.append(potential_seat)
break
# Top Left
for new_x, new_y in zip(left_iter, top_iter):
potential_seat = self[new_x, new_y]
if potential_seat.is_seat:
closest_visible_seats.append(potential_seat)
break
# Top Right
for new_x, new_y in zip(right_iter, top_iter):
potential_seat = self[new_x, new_y]
if potential_seat.is_seat:
closest_visible_seats.append(potential_seat)
break
# Bottom Left
for new_x, new_y in zip(left_iter, bottom_iter):
potential_seat = self[new_x, new_y]
if potential_seat.is_seat:
closest_visible_seats.append(potential_seat)
break
# Bottom Right
for new_x, new_y in zip(right_iter, bottom_iter):
potential_seat = self[new_x, new_y]
if potential_seat.is_seat:
closest_visible_seats.append(potential_seat)
break
spot.closest_visible_seats = closest_visible_seats
def problem_1(self):
"""Perform an turn update until nothing changes"""
while True:
[spot.guess_next_status("adjacent_spots", 4) for spot in self.spots]
[spot.apply_next_status() for spot in self.spots]
if all(not spot.has_changed for spot in self.spots):
break
def problem_2(self):
"""Perform an turn update until nothing changes"""
while True:
[spot.guess_next_status("closest_visible_seats", 5) for spot in self.spots]
[spot.apply_next_status() for spot in self.spots]
if all(not spot.has_changed for spot in self.spots):
break
@property
def spots(self):
"""
:return: List of all spots from left to right, top to bottom
:rtype: [Spot]
"""
return [
self[x, y] for x in range(self.x_max + 1) for y in range(self.y_max + 1)
]
@classmethod
def from_file_content(cls, file_content):
"""
Creates a Grid instance from the daily input file
:param [str] file_content:
:return: The generated Grid
:rtype: Grid
"""
grid = []
for y, line in enumerate(file_content):
row = []
for x, value in enumerate(line):
row.append(Spot(value, x, y))
grid.append(row)
return cls(grid)
class Spot:
"""Room emplacement where one might be able to seat"""
class Status(Enum):
"""The status of a spot"""
EMPTY = "L"
OCCUPIED = "#"
FLOOR = "."
def __init__(self, value, x, y):
"""
Creates a spot with coordinates and a status
:param str value: Initial value from the input file for this spot
:param int x: The X coordinate in a room
:param int y: The Y coordinate in a room
"""
self.pos = (x, y)
self.x, self.y = x, y
self.status = self.status_map[value]
self.next_status = None
self.adjacent_spots = []
self.closest_visible_seats = []
self.has_changed = False
def __repr__(self):
return f"Seat({self.x, self.y})"
def apply_next_status(self):
"""Set the next_status as our current status"""
self.has_changed = self.status != self.next_status
self.status = self.next_status
self.next_status = None
def guess_next_status(self, spot_referential, threshold):
"""
Based on the current status and the adjacent/visible spots,
Guesses what the next status for our spot will be.
However, the current status is not yet updated
:param str spot_referential: The attribute storing the related seats
:param int threshold: Amount of related occupied seats for the seat to be freed
"""
changed = False
referential = getattr(self, spot_referential)
# Empty: Becomes taken if all adjacent are empty or floor
if self.status == self.Status.EMPTY:
if all(
spot.status in {self.Status.EMPTY, self.Status.FLOOR}
for spot in referential
):
self.next_status = self.Status.OCCUPIED
changed = True
# Taken: Becomes empty if at least N adjacent are occupied
if self.status == self.Status.OCCUPIED:
occupied_spots = [
spot for spot in referential if spot.status == self.Status.OCCUPIED
]
if len(occupied_spots) >= threshold:
self.next_status = self.Status.EMPTY
changed = True
# Floor: No change
if self.status == self.Status.FLOOR:
pass
# No change
if not changed:
self.next_status = self.status
@property
def is_seat(self):
return self.status in {self.Status.EMPTY, self.Status.OCCUPIED}
@property
def status_map(self):
"""
:return: A hashmap that maps string inputs to Status enums
:rtype: dict
"""
return {
"L": self.Status.EMPTY,
"#": self.Status.OCCUPIED,
".": self.Status.FLOOR,
}
# --------------------------------------------------------------------------------
# > Main
# --------------------------------------------------------------------------------
file_content = read_input("day_11.txt")
grid = Grid.from_file_content(file_content)
# Problem 1
grid.problem_1()
occupied_spots = [spot for spot in grid.spots if spot.status == Spot.Status.OCCUPIED]
print(len(occupied_spots))
grid.reset()
# Problem 2
grid.problem_2()
occupied_spots = [spot for spot in grid.spots if spot.status == Spot.Status.OCCUPIED]
print(len(occupied_spots))
grid.reset()
```
#### File: advent_of_code/2020/day_20.py
```python
import re
from time import perf_counter
# Personal
from _shared import read_input
# --------------------------------------------------------------------------------
# > Helpers
# --------------------------------------------------------------------------------
class Camera:
def __init__(self, id, photo_grid):
self.id = id
self.grid = photo_grid
self.def_grid = None
self.cut_def_grid = None
self.size = len(self.grid)
self.possible_neighbors = set()
self.generate_grid_variations()
def __str__(self):
return self.__repr__()
def __repr__(self):
return f"Camera(id:{self.id})"
def compute_def_grid_left_match(self, left_camera):
length_iter = list(range(self.size))
border_to_match = "".join([left_camera.def_grid[i][-1] for i in length_iter])
# print(self, left_camera)
# print(border_to_match)
for grid in self.grids:
left_border = "".join([grid[i][0] for i in length_iter])
# print(left_border)
if left_border == border_to_match:
self.def_grid = grid
break
def compute_cut_def_grid(self):
temp = [list(row) for row in self.def_grid.copy()]
temp = temp[1:-1] # delete first and last row
result = []
for row in temp:
result.append(row[1:-1])
result = ["".join(row) for row in result]
self.cut_def_grid = result
def compute_def_grid_top_match(self, top_camera):
border_to_match = "".join(top_camera.def_grid[-1])
# print(self, top_camera)
# print(border_to_match)
for grid in self.grids:
top_border = "".join(grid[0])
# print(top_border)
if top_border == border_to_match:
self.def_grid = grid
break
def get_all_possible_borders(self):
all_borders = [get_grid_borders_as_string(grid) for grid in self.grids]
all_borders_flattened = [x for borders in all_borders for x in borders]
return set(all_borders_flattened)
def find_possible_neighbors(self, cameras):
valid_neighbors = set()
possible_borders = self.get_all_possible_borders()
for camera in cameras:
if camera.id == self.id:
continue
camera_borders = camera.get_all_possible_borders()
for border in possible_borders:
if border in camera_borders:
valid_neighbors.add(camera)
break
self.possible_neighbors = valid_neighbors
def generate_grid_variations(self):
grids = []
grid_1 = self.grid
grid_2 = rotate_grid_90_deg(self.grid)
grid_3 = rotate_grid_90_deg(grid_2)
grid_4 = rotate_grid_90_deg(grid_3)
for grid in [grid_1, grid_2, grid_3, grid_4]:
top_flipped = top_flip_grid_of_strings(grid)
# both_flipped = top_flip_grid_of_strings(side_flipped)
# grids.extend([grid, top_flipped, side_flipped, both_flipped])
grids.append(grid)
grids.append(top_flipped)
self.grids = grids
@classmethod
def from_file_content(cls, lines):
title = lines[0]
match = re.fullmatch(r"Tile (\d+):", title)
camera_id = int(match.group(1))
grid = [photo_line for photo_line in lines[1:]]
return cls(camera_id, grid)
def get_grid_borders_as_string(grid):
length = len(grid)
return (
"".join(grid[0]),
"".join(grid[-1]),
"".join([grid[i][0] for i in range(length)]),
"".join([grid[i][-1] for i in range(length)]),
)
def mirror_from_grid(grid):
grid_mirror = [row.copy() for row in grid.copy()]
grid_copy = [row.copy() for row in grid.copy()]
for i, row in enumerate(grid_copy):
for j, value in enumerate(row):
grid_mirror[j][i] = value
return grid_mirror
def top_flip_grid_of_strings(grid):
grid = [list(row) for row in grid]
grid_copy = grid.copy()
for i in range(len(grid)):
grid_copy[i] = grid[len(grid) - 1 - i]
return ["".join(row) for row in grid_copy]
def side_flip_grid_of_strings(grid):
grid = [list(row) for row in grid]
grid_copy = grid.copy()
for i, row in enumerate(grid):
for j, value in enumerate(row):
grid_copy[i][j] = grid[i][len(row) - 1 - j]
return ["".join(row) for row in grid_copy]
def rotate_grid_90_deg(grid):
grid_copy = grid.copy()
reverted = list(zip(*reversed(grid_copy)))
return ["".join(row) for row in reverted]
# --------------------------------------------------------------------------------
# > Main
# --------------------------------------------------------------------------------
# Initialization
start = perf_counter()
content = read_input("day_20.txt")
# Build cameras
cameras = []
acc = []
line_count = len(content)
for i, line in enumerate(content):
if i + 1 == line_count:
acc.append(line)
camera = Camera.from_file_content(acc)
cameras.append(camera)
if line == "":
camera = Camera.from_file_content(acc)
cameras.append(camera)
acc = []
else:
acc.append(line)
# >>> Problem 1
for camera in cameras:
camera.find_possible_neighbors(cameras)
corner_camera_ids = [c.id for c in cameras if len(c.possible_neighbors) == 2]
# >>> Problem 2:
# COMPUTE THE GRID PLACEMENTS
corner_cameras = [c for c in cameras if len(c.possible_neighbors) == 2]
start_camera = corner_cameras[0]
for start_grid_index in range(len(start_camera.grids)):
try:
camera_map = {camera.id: camera for camera in cameras}
grid = []
row_grid = []
row = 0
col = 0
last_camera = None
last_row_index = None # Computed at the end of the first row
while len(camera_map) > 0:
# --- First turn ---
if len(grid) == 0 and len(row_grid) == 0:
start_camera.def_grid = start_camera.grids[start_grid_index]
row_grid.append(start_camera)
del camera_map[start_camera.id]
last_camera = start_camera
col += 1
continue
remaining_neighbors = [
c for c in last_camera.possible_neighbors if c.id in camera_map
]
# --- First or last row ---
if row == 0 or row == last_row_index:
next_camera = [
c for c in remaining_neighbors if len(c.possible_neighbors) <= 3
][0]
if len(row_grid) == 0:
next_camera.compute_def_grid_top_match(last_camera)
else:
next_camera.compute_def_grid_left_match(row_grid[col - 1])
# Not the corner yet
if len(next_camera.possible_neighbors) == 3:
row_grid.append(next_camera)
del camera_map[next_camera.id]
last_camera = next_camera
col += 1
continue
# We've reached the corner, but start or end?
if len(next_camera.possible_neighbors) == 2:
row_grid.append(next_camera)
del camera_map[next_camera.id]
# Start of the line
if col == 0:
last_camera = next_camera
col += 1
continue
# End of the line
else:
grid.append(row_grid)
last_camera = row_grid[0]
last_row_index = int(len(cameras) / len(row_grid)) - 1
row_grid = []
row += 1
col = 0
continue
# --- Middle row ---
# Start of the line
if col == 0:
next_camera = remaining_neighbors[0]
next_camera.compute_def_grid_top_match(last_camera)
last_camera = next_camera
row_grid.append(next_camera)
del camera_map[next_camera.id]
col += 1
continue
# Rest
else:
above_camera = grid[row - 1][col]
next_camera = [
c for c in above_camera.possible_neighbors if c.id in camera_map
][0]
if len(row_grid) == 0:
next_camera.compute_def_grid_top_match(last_camera)
else:
next_camera.compute_def_grid_left_match(row_grid[col - 1])
row_grid.append(next_camera)
del camera_map[next_camera.id]
# Not the end of the line
if len(next_camera.possible_neighbors) == 4:
col += 1
last_camera = next_camera
# End of the line
else:
last_camera = row_grid[0]
col = 0
row += 1
grid.append(row_grid)
row_grid = []
continue
print("FOUND A VALID PATTERN")
break
except Exception as e:
print(e)
continue
# Remove the borders of each tile for the valid grid
for row in grid:
for camera in row:
camera.compute_cut_def_grid()
# We might have built the grid invertedly, so just in case let's handle both cases
grid_mirror = mirror_from_grid(grid)
# Merge tile rows together
photo_height = len(grid[0][0].cut_def_grid)
text_rows = []
for row in grid:
for i in range(photo_height):
text = ""
for camera in row:
text += camera.cut_def_grid[i]
text_rows.append(text)
mirror_text_rows = []
for row in grid_mirror:
for i in range(photo_height):
text = ""
for camera in row:
text += camera.cut_def_grid[i]
mirror_text_rows.append(text)
# Create all text variations by rotating and flipping
text_variations = []
text_1 = text_rows.copy()
text_2 = rotate_grid_90_deg(text_1)
text_3 = rotate_grid_90_deg(text_2)
text_4 = rotate_grid_90_deg(text_3)
mirror_1 = mirror_text_rows.copy()
mirror_2 = rotate_grid_90_deg(mirror_1)
mirror_3 = rotate_grid_90_deg(mirror_2)
mirror_4 = rotate_grid_90_deg(mirror_3)
for variation in [
text_1,
text_2,
text_3,
text_4,
mirror_1,
mirror_2,
mirror_3,
mirror_4,
]:
top_flipped = top_flip_grid_of_strings(variation)
variation = [list(row) for row in variation.copy()]
top_flipped = [list(row) for row in top_flipped.copy()]
text_variations.append(variation)
text_variations.append(top_flipped)
# The relative indexes to look for, for the pattern
INDEXES_TO_CHECK = (
(0, 0),
(-18, 1),
(-13, 1),
(-12, 1),
(-7, 1),
(-6, 1),
(-1, 1),
(0, 1),
(1, 1),
(-17, 2),
(-14, 2),
(-11, 2),
(-8, 2),
(-5, 2),
(-2, 2),
)
# One of our text should have the pattern (once or more)
valid_texts = []
for text in text_variations:
found = False
x_max = len(text)
y_max = len(text[0])
x = 0
y = 0
for x in range(x_max):
for y in range(y_max):
value = text[x][y]
if value == "#":
for a, b in INDEXES_TO_CHECK:
new_x = x + b
new_y = y + a
try:
if new_y > 0 and new_x > 0 and text[new_x][new_y] == "#":
continue
else:
break
except Exception as e:
break
else:
for a, b in INDEXES_TO_CHECK:
new_x = x + b
new_y = y + a
text[new_x][new_y] = "O"
found = True
if found:
valid_texts.append(text)
# Lets view the results
for text in valid_texts:
print()
for row in text:
print("".join(row))
print(sum([row.count("#") for row in text]))
# Terminate
end = perf_counter()
print(end - start)
```
#### File: challenges/codingame/2019_fall_challenge.py
```python
import math
import sys
# ------------------------------------------------------------
# SETTINGS
# ------------------------------------------------------------
RADAR_SETUPS = {
1: [
(5, 0), # 0 will be replaced by robot.y
(10, 3),
(10, 11),
(15, 7),
(20, 3),
(20, 11),
(25, 7),
(28, 3),
(28, 11),
(3, 2),
(3, 12),
(14, 1),
(14, 13),
],
}
X_SETUPS = {
1: 4,
}
SETUP = 1
HARDCODED_RADARS = RADAR_SETUPS[SETUP]
FIRST_X = X_SETUPS[SETUP]
GET_TRAP_MIN_TURN = 0
GET_TRAP_MAX_TURN = 0
DIG_ENEMY_TRAP_MIN_TURN = 100
LOW_ORE = 10
COMMENTS = True
# ------------------------------------------------------------
# INITAL DATA
# ------------------------------------------------------------
# Game
WIDTH, HEIGHT = [int(i) for i in input().split()]
# Robots
ALL_ROBOTS = {}
MY_ROBOTS = []
ENEMY_ROBOTS = []
# Items
COOLDOWNS = {
"RADAR": 0,
"TRAP": 0
}
ITEM_NAMES = {
-1: None,
2: "RADAR",
3: "TRAP",
4: "ORE",
}
# Game
MY_SCORE = 0
ENEMY_SCORE = 0
TURN = 0
# Ore
CELLS_SCANNED = set()
CELLS_WITH_ORE = set()
# Holes
CELLS_WITHOUT_HOLE = set()
MY_HOLES = set()
ENEMY_HOLES = set()
NEW_HOLES = set()
# Traps
MY_TRAPS = set()
INCOMING_TRAPS = set()
ENEMY_TRAPS = set()
# ------------------------------------------------------------
# CLASS
# ------------------------------------------------------------
class Robot:
# --------------------
# Constants
# --------------------
ACTIONS = [
("play_dead", []),
("first_turn_action", []),
("trigger_trap", []),
("go_to_destination", []),
("bring_back_ore", []),
("pick_up_item", []),
("move_to_hardcoded_radar", []),
("burry_hardcoded_radar", []),
("dig_adjacent_ore", [True, 0]),
("move_to_ore", [True, 0]),
("go_get_radar", []),
("dig_adjacent_ore", [False, DIG_ENEMY_TRAP_MIN_TURN]),
("move_to_ore", [False, DIG_ENEMY_TRAP_MIN_TURN]),
("dig_unknown_cell", []),
("move_to_unknown_cell", []),
("wait_it_out", []),
]
GETTING_RADAR = False
# --------------------
# Core Methods
# --------------------
def __init__(self, id, type, cell):
"""Initializes our robot and its attributes"""
# General
self.dead = False
self.id = id
self.type = type
self.last_action = None
self.index = id if id < 5 else id - 5
# Dig setup
self.last_dig_cell = None
self.dig_objective = None
# Mouvement setup
self.position = cell
self.previous_position = cell
self.destination = None
# Item setup
self.item = None
self.get_first_radar = False
self.getting_radar = False
self.hardcoded_radar_cell = None
# --------------------
# Game Actions
# --------------------
def dig(self, cell, comment=""):
"""Gives the "DIG" order. The robot will dig on the given cell (x, y)"""
self.real_time_dig_update(cell)
self.last_dig_cell = cell
self.dig_objective = None
self.last_action = "DIG"
if not COMMENTS:
comment = ""
print("DIG", *cell, comment)
def move(self, cell, closest=True, comment=""):
"""Gives the "MOVE" order. The robot will move towards the given cell (x, y)"""
if closest:
self.dig_objective = cell
cell = self.get_closest_unoccupied_cell(self.position, cell)
self.destination = cell
self.last_action = "MOVE"
comment += " " + str(self.dig_objective)
if not COMMENTS:
comment = ""
print("MOVE", *cell, comment)
def request(self, item, comment=""):
"""Gives the "REQUEST" order. The robots asks for a RADAR or a TRAP"""
COOLDOWNS[item] = 5
self.item = item
self.last_action = "REQUEST"
if not COMMENTS:
comment = ""
print("REQUEST", item, comment)
def wait(self, comment=""):
"""Gives the "WAIT" order. The robot does nothing"""
self.last_action = "WAIT"
if not COMMENTS:
comment = ""
print("WAIT", comment)
# --------------------
# ACTION CHOICES
# --------------------
def play(self):
for function_name, args in self.ACTIONS:
function = getattr(self, function_name)
if len(args) > 0:
done = function(*args)
else:
done = function()
if done:
break
def play_dead(self):
if self.dead:
self.wait("play dead")
return True
def first_turn_action(self):
if TURN == 1:
if self.get_first_radar:
self.request("RADAR", "first turn action")
else:
cell = (FIRST_X, self.y)
self.move(cell, False, "first turn action")
return True
def trigger_trap(self):
adjacent_cells = get_adjacent_cells(self.position)
for cell in adjacent_cells:
if cell in MY_TRAPS:
friendly_robots = len(adjacent_robots(cell, 0))
enemy_robots = len(adjacent_robots(cell, 1))
if friendly_robots < enemy_robots:
self.dig(cell, "trigger trap")
return True
def go_to_destination(self):
if self.destination is not None:
self.move(self.destination, False, "go to destination")
return True
def bring_back_ore(self):
if self.item == "ORE":
cell = (0, self.y)
self.move(cell, False, "bring back ore")
return True
def pick_up_item(self):
if self.item is None and self.x == 0:
if not COOLDOWNS["RADAR"] and calculate_safe_ore() < LOW_ORE:
Robot.GETTING_RADAR = False
self.getting_radar = False
self.request("RADAR", "pick up item")
return True
elif not COOLDOWNS["TRAP"]:
if TURN >= GET_TRAP_MIN_TURN and TURN <= GET_TRAP_MAX_TURN and not most_alive_robots():
self.request("TRAP", "pick up item")
return True
def move_to_hardcoded_radar(self):
if self.item == "RADAR" and self.destination is None and self.x == 0:
if len(HARDCODED_RADARS) > 0:
cell = self.choose_which_hardcoded_radar()
if self.get_first_radar:
self.get_first_radar = False
cell = (cell[0], self.y)
self.hardcoded_radar_cell = cell
self.move(cell, True, "move to hardcoded radar")
return True
def burry_hardcoded_radar(self):
if self.hardcoded_radar_cell is not None and self.destination is None:
radar_cell = self.hardcoded_radar_cell
self.hardcoded_radar_cell = None
if radar_cell in MY_TRAPS.union(ENEMY_TRAPS):
cells = get_adjacent_cells(self.position)
for cell in cells:
if cell not in MY_TRAPS.union(ENEMY_TRAPS):
radar_cell = cell
break
else:
radar_cell = None
if radar_cell is not None:
self.dig(radar_cell, "burry hardcoded radar")
return True
def dig_adjacent_ore(self, avoid_enemy_traps=True, min_turn=0):
if TURN > min_turn:
alive_robots = [robot for robot in MY_ROBOTS if not robot.dead]
if avoid_enemy_traps or len(alive_robots) > 2:
traps = get_traps(avoid_enemy_traps)
adjacent_cells = get_adjacent_cells(self.position)
for cell in adjacent_cells:
if cell not in traps and cell in CELLS_WITH_ORE:
self.dig(cell, "dig adjacent ore ({})".format(avoid_enemy_traps))
return True
def move_to_ore(self, avoid_enemy_traps=True, min_turn=0):
if TURN > min_turn:
alive_robots = [robot for robot in MY_ROBOTS if not robot.dead]
if avoid_enemy_traps or len(alive_robots) > 2:
traps = get_traps(avoid_enemy_traps)
sorted_cells = sort_cells_closest(self.position, CELLS_WITH_ORE)
sorted_cells = list(filter(lambda x: x not in traps, sorted_cells))
for cell in sorted_cells:
robot_amount = len(friendly_robots_working_this_cell(cell))
ore = MAP_DATA[cell]["ore"]
if avoid_enemy_traps or robot_amount == 0:
if robot_amount < ore:
self.move(cell, True, "move to ore ({})".format(avoid_enemy_traps))
return True
def go_get_radar(self):
if len(HARDCODED_RADARS) > 0 and not Robot.GETTING_RADAR:
turn_to_base = math.ceil(self.x / 4)
if turn_to_base > COOLDOWNS["RADAR"]:
Robot.GETTING_RADAR = True
self.getting_radar = True
cell = (0, self.y)
self.move(cell, False, "go get radar")
return True
def dig_unknown_cell(self):
adjacent_cells = get_adjacent_cells(self.position)
for cell in adjacent_cells:
if not MAP_DATA[cell]["hole"] and cell not in CELLS_SCANNED and cell[0] > 0:
self.dig(cell, "dig unknown cell")
return True
def move_to_unknown_cell(self):
unknown_cells = CELLS_WITHOUT_HOLE.difference(CELLS_SCANNED)
if len(unknown_cells) > 0:
sorted_cells = sort_cells_closest(self.position, unknown_cells)
for cell in sorted_cells:
if cell[0] > 0:
self.move(cell, True, "move to unknown cell")
return True
def wait_it_out(self):
self.wait("wait it out")
return True
# --------------------
# Helper Methods
# --------------------
def choose_which_hardcoded_radar(self):
"""
Description:
Find the next closest hardcoded radar for your robot
The hardcoded radar is then removed from the list
Two radars are compared only when on the same column
Returns:
tuple: Coordinates (x, y) of the hardcoded radar
"""
found = False
if len(HARDCODED_RADARS) > 1:
x1, x2 = HARDCODED_RADARS[0][0], HARDCODED_RADARS[1][0]
if x1 == x2:
y1, y2 = HARDCODED_RADARS[0][1], HARDCODED_RADARS[1][1]
diff_y1, diff_y2 = abs(self.y - y1), abs(self.y - y2)
if diff_y2 < diff_y1:
cell = HARDCODED_RADARS.pop(1)
found = True
if not found:
cell = HARDCODED_RADARS.pop(0)
return cell
def get_closest_unoccupied_cell(self, start_cell, end_cell):
"""
Description:
Returns the closest adjacent cell of a "end_cell" relatively to a "start_cell"
Args:
start_cell (tuple): Coordinates (x, y) of the starting point
end_cell (tuple): Coordinates (x, y) of the ending point
Returns:
tuple: Coordinates (x, y) of the closest adjacent cell
"""
cells = get_adjacent_cells(end_cell)
sorted_cell = sort_cells_closest(start_cell, cells)
robots = [MY_ROBOTS[i] for i in range(self.index)]
for cell in sorted_cell:
occupied = False
for robot in robots:
if robot.position == cell:
occupied = True
break
if not occupied:
return cell
def guess_enemy_pickup_trap(self):
"""Guesses if an enemy has picked up a trap"""
if self.immobile and self.x == 0:
self.item = "TRAP"
def guess_enemy_potential_traps(self):
"""Guesses if a trap has been burried by an enemy"""
if self.immobile and self.x > 0 and self.item == "TRAP":
adjacent_cells = get_adjacent_cells(self.position)
# He made a new hole, 100% sure
for cell in adjacent_cells:
if cell in NEW_HOLES.intersection(ENEMY_HOLES):
robot_count = len(adjacent_robots(cell, 1))
if robot_count == 1:
self.item = None
self.last_dig_cell = cell
ENEMY_TRAPS.add(cell)
return
# If already existing holes, assume they have traps
for cell in adjacent_cells:
if cell in MY_HOLES.union(ENEMY_HOLES):
self.item = None
ENEMY_TRAPS.add(cell)
def just_died(self):
"""Checks if a robot just died this turn"""
if self.position == (-1, -1) and self.previous_position != self.position:
self.dead = True
self.dig_objective = None
if self.getting_radar:
Robot.GETTING_RADAR = False
self.getting_radar = False
return True
return False
def real_time_dig_update(self, cell):
"""
Description:
Update the MAP_DATA and our "cell sets" in realtime for better decision making
That way, our robots don't have to way until next turn to get updated data
Args:
cell (tuple): Coordinates (x, y) of the cell
"""
# Update the "hole" info
MAP_DATA[cell]["hole"] = True
MY_HOLES.add(cell)
if cell in CELLS_WITHOUT_HOLE:
CELLS_WITHOUT_HOLE.remove(cell)
# Update the item drop
if self.item == "TRAP":
MY_TRAPS.add(cell)
self.item = None
# Update the ore
ore = MAP_DATA[cell]["ore"]
if ore not in {"?", 0}:
ore -= 1
self.item = "ORE"
if ore == 0:
CELLS_WITH_ORE.remove(cell)
MAP_DATA[cell]["ore"] = ore
def turn_update(self, cell, item):
"""
Description:
Handles the turnly update of a robot information
Methods used are different based on robot's type
Args:
cell (tuple): Coordinates (x, y) of the cell
item (int): The currently carried item (represented by a number)
"""
self.position = cell
if self.position == (-1, -1):
self.dead = True
if self.type == 0:
self.turn_update_friendly(cell, item)
elif self.type == 1:
self.turn_update_enemy(cell)
self.previous_position = cell
def turn_update_enemy(self, cell):
"""
Description:
Updates an ENEMY robot information at the beginning of a turn
Since we only know its position, we can only assume actions based on movement
Args:
cell (tuple): Coordinates (x, y) of the cell
"""
if not self.dead:
self.guess_enemy_pickup_trap()
self.guess_enemy_potential_traps()
def turn_update_friendly(self, cell, item):
"""
Description:
Updates a FRIENDLY robot information at the beginning of a turn
Args:
cell (tuple): Coordinates (x, y) of the cell
item (int): The currently carried item (represented by a number)
"""
# Item
item = ITEM_NAMES[item]
self.item = item
# Updating cell ore info
if self.last_action == "DIG" and self.item is None:
if self.last_dig_cell in CELLS_WITH_ORE:
CELLS_WITH_ORE.remove(self.last_dig_cell)
if self.just_died():
if self.last_action == "DIG":
remove_traps_recursively(self.last_dig_cell)
if self.destination == cell:
self.destination = None
self.previous_position = cell
# Map data
if min(cell) >= 0:
if self.type == 0:
MAP_DATA[cell]["robots"]["ME"] += 1
else:
MAP_DATA[cell]["robots"]["ENEMY"] += 1
# --------------------
# Custom Properties
# --------------------
@property
def immobile(self):
"""Dynamic property that indicates if the robot has moved"""
return self.position == self.previous_position
@property
def x(self):
"""Dynamic property that returns the robot's X coordinate"""
return self.position[0]
@property
def y(self):
"""Dynamic property that returns the robot's Y coordinate"""
return self.position[1]
# ------------------------------------------------------------
# FUNCTIONS
# ------------------------------------------------------------
def adjacent_robots(cell, type):
"""
Description:
Returns the list of robots that are adjacent to the given cell
You can choose between friendly and enemy by using "type"
Args:
cell (tuple): Coordinates (x, y) of a cell
Returns:
list: List of robots on adjacent cells
"""
cells = get_adjacent_cells(cell)
all_robots = MY_ROBOTS + ENEMY_ROBOTS
robots = [robot for robot in all_robots if robot.type == type]
found_robots = []
for robot in robots:
if robot.position in cells:
found_robots.append(robot)
return found_robots
def calculate_safe_ore():
"""
Description:
Calculates the amount of SAFE ore available on the map
Returns:
int: Amount of ore available
"""
amount = 0
for cell, data in MAP_DATA.items():
if cell not in MY_TRAPS.union(ENEMY_TRAPS):
if data["ore"] not in {"?", 0}:
amount += data["ore"]
return amount
def choose_robot_for_first_radar():
"""
Description:
Finds and returns the robot that is the closest to the center of the map
Returns:
Robot: The closest friendly robot relative to the center
"""
robots = MY_ROBOTS.copy()
center = int(HEIGHT/2)
center_diff = [abs(robot.position[1] - center) for robot in robots]
index = center_diff.index(min(center_diff))
closest_robot = robots[index]
return closest_robot
def create_robot(id, type, cell):
"""
Description:
Creates a robot and puts it in our correct datasets
Args:
id (int): Unique ID of the entity
type (int): 0 for our robot, 1 for an enemy robot
cell (tuple): Coordinates (x, y) of the cell
"""
robot = Robot(id, type, cell)
ALL_ROBOTS[id] = robot
if type == 0:
MY_ROBOTS.append(robot)
else:
ENEMY_ROBOTS.append(robot)
def friendly_robots_working_this_cell(cell):
"""
Description:
Returns the list of friendly robots currently working on this cell
Args:
cell (tuple): Coordinates (x, y) of the cell
Returns:
list: List of the eligible robots
"""
robots = [robot for robot in MY_ROBOTS if robot.dig_objective == cell]
return robots
def generate_map_data(width, height):
"""
Description:
Generates the map_data at the begining of the game
Keys are coordinates (x, y) and they contain data regarding the cell
Args:
width (int): Width of the grid, given by the game
height (int): Height of the grid, given by the game
Returns:
dict: Dict with coordinates (x, y) as keys, and dicts as values
"""
map_data = {}
for i in range(width):
for j in range(height):
CELLS_WITHOUT_HOLE.add((i, j))
map_data[(i, j)] = {
"robots": {
"ME": 0,
"ENEMY": 0,
},
"hole": False,
"ore": "?",
}
return map_data
def get_adjacent_cells(cell):
"""
Description:
Finds and returns the adjacent cells of a given cell (including the original cell)
The result is a SET containing between 3 to 5 elements
Args:
cell (tuple): Coordinates (x, y) of the cell
Returns:
set: Set of 3 to 5 coordinates (x, y)
"""
x, y = cell
cells = set((
(x-1, y),
(x+1, y),
(x, y),
(x, y-1),
(x, y+1),
))
cells = set(filter(is_cell_in_grid, cells))
return cells
def get_traps(all_traps=True):
"""
Description:
Returns a set containing all the cells that have traps
Can be set to check either ALL traps or only OURS
Args:
all_traps (bool, optional): Determines if we get ALL traps. Defaults to True.
Returns:
set: Set of tuples which are cell coordinates
"""
if all_traps:
traps = MY_TRAPS.union(ENEMY_TRAPS)
else:
traps = MY_TRAPS
return traps
def is_cell_in_grid(cell):
"""
Description:
Checks if a given cell is within the limit of the grid
Args:
cell (tuple): Coordinates (x, y) of the cell
Returns:
bool: True if the cell is within WIDTH/HEIGHT
"""
x, y = cell
if x in range(WIDTH) and y in range(HEIGHT):
return True
return False
def most_alive_robots():
"""
Description:
Checks if we have more robots alive than our enemy
Returns:
bool: True if we have strickly more robots
"""
my_robots = [robot for robot in MY_ROBOTS if not robot.dead]
enemy_robots = [robot for robot in ENEMY_ROBOTS if not robot.dead]
if len(my_robots) > len(enemy_robots):
return True
return False
def remove_my_triggered_traps():
"""Removes our missing trap from our info sets"""
removed_traps = MY_TRAPS.difference(MY_TRAPS_THIS_TURN)
for cell in removed_traps:
remove_traps_recursively(cell)
def remove_traps_recursively(initial_cell, first=True):
"""
Description:
Remove traps recursively by declaring which cell triggered the first trap
We spread the reaction only on our traps, since enemy traps are only guessed
Args:
initial_cell (tuple): Coordinates (x, y) of a triggered trap
"""
if initial_cell in MY_TRAPS:
MY_TRAPS.remove(initial_cell)
adjacent_cells = get_adjacent_cells(initial_cell)
for cell in adjacent_cells:
remove_traps_recursively(cell, False)
elif initial_cell in ENEMY_TRAPS and first:
ENEMY_TRAPS.remove(initial_cell)
adjacent_cells = get_adjacent_cells(initial_cell)
for cell in adjacent_cells:
if cell in MY_TRAPS:
remove_traps_recursively(cell, False)
# for traps in [MY_TRAPS, ENEMY_TRAPS]:
# if initial_cell in traps:
# traps.remove(initial_cell)
# adjancent_cells = get_adjacent_cells(initial_cell)
# for cell in adjancent_cells:
# remove_traps_recursively(cell)
def sort_cells_closest(start_cell, cells):
"""
Description:
Returns a sorted list of cells by 'closest' based on the given start cell
Args:
start_cell (tuple): Coordinates (x, y) of the reference cell
cells (list): List of (x, y) tuples that are the cells to compare
Returns:
list: A list containing all the "cells", sorted by closeness relative to "start_cell"
"""
sorted_list = sorted(cells, key=lambda cell: abs(cell[0]-start_cell[0]) + abs(cell[1] - start_cell[1]))
return sorted_list
def turn_cell_update(cell, ore, hole):
"""
Description:
Updates the MAP_DATA and our "cell sets" with ore and hole info
The game will send us this data for every cell, at the start of every turn
Args:
cell (tuple): Coordinates (x, y) of the cell
ore (str): String that can either be "?" or a number
hole (int): 0 if no hole has been dug, 1 otherwise
"""
# Ore update
if ore != "?":
ore = int(ore)
MAP_DATA[cell]["ore"] = ore
CELLS_SCANNED.add(cell)
if ore > 0:
CELLS_WITH_ORE.add(cell)
elif cell in CELLS_WITH_ORE:
CELLS_WITH_ORE.remove(cell)
else:
existing_ore_data = MAP_DATA[cell]["ore"]
if not isinstance(existing_ore_data, int):
MAP_DATA[cell]["ore"] = ore
# Hole update
if hole:
if cell not in MY_HOLES.union(ENEMY_HOLES):
NEW_HOLES.add(cell)
if cell not in MY_HOLES:
ENEMY_HOLES.add(cell)
if cell in CELLS_WITHOUT_HOLE:
CELLS_WITHOUT_HOLE.remove(cell)
MAP_DATA[cell]["hole"] = bool(hole)
# Robot count reset
MAP_DATA[cell]["robots"]["ME"] = 0
MAP_DATA[cell]["robots"]["ENEMY"] = 0
def turn_entity_update(id, type, cell, item):
"""
Description:
Updates either a robot or our dataset, based on the given information
This function is called at the start of each turn, for each identified entity
Args:
id (int): Unique ID of the entity
type (int): Number from 0 to 3 with (0: our robot, 1: enemy robot, 2: radar, 3: emp)
cell (tuple): Coordinates (x, y) of the cell
item (int): The currently carried item (represented by a number)
"""
if id in ALL_ROBOTS.keys():
robot = ALL_ROBOTS[id]
robot.turn_update(cell, item)
else:
if type == 2:
pass
if type == 3:
MY_TRAPS.add(cell)
MY_TRAPS_THIS_TURN.add(cell)
# ------------------------------------------------------------
# GAME LOOP
# ------------------------------------------------------------
MAP_DATA = generate_map_data(WIDTH, HEIGHT)
while True:
# Score update
NEW_HOLES = set()
MY_TRAPS_THIS_TURN = set()
MY_SCORE, ENEMY_SCORE = [int(i) for i in input().split()]
TURN += 1
# Ore and hole update
for i in range(HEIGHT):
inputs = input().split()
for j in range(WIDTH):
cell = (j, i)
ore = inputs[2*j]
hole = int(inputs[2*j+1])
turn_cell_update(cell, ore, hole)
# Item update
entity_count, COOLDOWNS["RADAR"], COOLDOWNS["TRAP"] = [int(i) for i in input().split()]
# Entity update
for i in range(entity_count):
id, type, x, y, item = [int(j) for j in input().split()]
cell = (x, y)
if TURN == 1:
create_robot(id, type, cell)
else:
turn_entity_update(id, type, cell, item)
remove_my_triggered_traps()
# First turn action setup
if TURN == 1:
closest_robot = choose_robot_for_first_radar()
closest_robot.get_first_radar = True
# Action
for i in range(5):
robot = MY_ROBOTS[i]
robot.play()
``` |
{
"source": "Jordan-Kowal/discord-dice-roller",
"score": 3
} |
#### File: discord_dice_roller/cogs/dice_rolling.py
```python
from discord.ext import commands
# Application
from utils.cog import ImprovedCog
from utils.dice_roll import DiceRoll
from utils.embed import create_warning_embed
from utils.settings import get_user_settings, get_user_shortcuts
# --------------------------------------------------------------------------------
# > Cog
# --------------------------------------------------------------------------------
class DiceRollingCog(ImprovedCog):
"""
Provides commands to roll dice with various options
> roll Rolls the dice using the provided instructions
> reroll Rolls the dice using the player's last VALID instructions
> use Shows the current shortcuts for the user
"""
last_roll_per_user = {}
# ----------------------------------------
# roll
# ----------------------------------------
@commands.command()
async def roll(self, ctx, *args):
"""Rolls the dice using the provided instructions"""
self.log_command_call("roll", ctx.message)
user_id = str(ctx.message.author.id)
user_settings = get_user_settings(user_id)
dice_roll = DiceRoll(args, user_settings)
embed_output = dice_roll.roll()
if dice_roll.is_valid:
self.last_roll_per_user[user_id] = dice_roll
await ctx.send(embed=embed_output)
@roll.error
async def roll_error(self, ctx, error):
"""Base error handler for the `roll` command"""
await self.log_error_and_apologize(ctx, error)
# ----------------------------------------
# reroll
# ----------------------------------------
@commands.command()
async def reroll(self, ctx):
"""Rolls the dice using the player's last VALID instructions"""
self.log_command_call("reroll", ctx.message)
user_id = str(ctx.message.author.id)
last_dice_roll = self.last_roll_per_user.get(user_id, None)
if last_dice_roll is None:
description = "You have yet to send one valid `!roll` command"
embed_output = create_warning_embed(description=description)
else:
dice_roll = last_dice_roll.copy()
embed_output = dice_roll.roll()
await ctx.send(embed=embed_output)
@reroll.error
async def reroll_error(self, ctx, error):
"""Base error handler for the `reroll` command"""
await self.log_error_and_apologize(ctx, error)
# ----------------------------------------
# use
# ----------------------------------------
@commands.command()
async def use(self, ctx, name, *args):
"""Rolls the dice using a user's shortcut and maybe additional instructions"""
self.log_command_call("use", ctx.message)
user_id = str(ctx.message.author.id)
user_shortcuts = get_user_shortcuts(user_id)
if name not in user_shortcuts:
description = f"Found no shortcut with the name `{name}` in your settings"
embed = create_warning_embed(description=description)
else:
shortcut_instructions = user_shortcuts[name].split(" ")
instructions = shortcut_instructions + list(args)
user_settings = get_user_settings(user_id)
dice_roll = DiceRoll(instructions, user_settings)
embed = dice_roll.roll()
if dice_roll.is_valid:
self.last_roll_per_user[user_id] = dice_roll
await ctx.send(embed=embed)
@use.error
async def use_error(self, ctx, error):
"""Base error handler for the `use` command"""
await self.log_error_and_apologize(ctx, error)
``` |
{
"source": "Jordan-Kowal/django-backbone",
"score": 2
} |
#### File: django_backbone/contact/models.py
```python
from collections import namedtuple
from datetime import timedelta
from enum import Enum
# Django
from django.contrib.auth import get_user_model
from django.db.models import (
SET_NULL,
EmailField,
ForeignKey,
GenericIPAddressField,
Index,
)
from django.utils import timezone
# Personal
from jklib.django.db.fields import RequiredField, TrimCharField, TrimTextField
from jklib.django.db.models import LifeCycleModel
from jklib.django.db.validators import LengthValidator
from jklib.django.utils.emails import send_html_email_async
from jklib.django.utils.settings import get_config
# Application
from core.utils import render_email_template
# --------------------------------------------------------------------------------
# > Helpers
# --------------------------------------------------------------------------------
EmailInfo = namedtuple("EmailInfo", ["template", "subject"])
# --------------------------------------------------------------------------------
# > Models
# --------------------------------------------------------------------------------
class Contact(LifeCycleModel):
"""Contact messages sent by our users through the API"""
# ----------------------------------------
# Constants
# ----------------------------------------
# Emails
class EmailTemplate(EmailInfo, Enum):
"""Enum of namedtuples that store our email template data"""
USER_NOTIFICATION = EmailInfo(
"contact/emails/user_notification.html", "Your message has been sent"
)
ADMIN_NOTIFICATION = EmailInfo(
"contact/emails/admin_notification.html", "New message received"
)
# Static
BODY_LENGTH = [10, 2000]
NAME_LENGTH = [2, 50]
SUBJECT_LENGTH = [2, 50]
# Overridable
DEFAULT_RETENTION_DAYS = 30 # settings.CONTACT_RETENTION_DAYS
DEFAULT_API_BAN_SETTINGS = { # settings.CONTACT_API_BAN_SETTINGS
"threshold": 3,
"period_in_days": 30,
"duration_in_days": 30,
}
# ----------------------------------------
# Fields
# ----------------------------------------
ip = RequiredField(GenericIPAddressField, db_index=True, verbose_name="IP Address",)
user = ForeignKey(
get_user_model(),
on_delete=SET_NULL,
null=True,
blank=True,
related_name="contacts",
verbose_name="User",
)
name = RequiredField(
TrimCharField,
max_length=NAME_LENGTH[1],
validators=[LengthValidator(*NAME_LENGTH)],
verbose_name="Name",
)
email = RequiredField(EmailField, verbose_name="Email")
subject = RequiredField(
TrimCharField,
max_length=SUBJECT_LENGTH[1],
validators=[LengthValidator(*SUBJECT_LENGTH)],
verbose_name="Subject",
)
body = RequiredField(
TrimTextField, validators=[LengthValidator(*BODY_LENGTH)], verbose_name="Body",
)
# ----------------------------------------
# Behavior (meta, str, save)
# ----------------------------------------
class Meta:
db_table = "contacts"
indexes = [Index(fields=["ip", "created_at"])]
ordering = ["-id"]
unique_together = []
verbose_name = "Contact"
verbose_name_plural = "Contacts"
def __str__(self):
"""
:return: The instance subject
:rtype: str
"""
return self.subject
# ----------------------------------------
# Properties
# ----------------------------------------
@property
def has_expired(self):
"""
:return: Whether the Contact instance has expired (should be removed from the database)
:rtype: bool
"""
expiration_date = self.created_at + timedelta(days=self.get_retention_days())
return expiration_date < timezone.now()
@classmethod
def get_ban_settings(cls):
"""
:return: The API ban config for the Contact model, with custom override
:rtype: dict
"""
custom_config = get_config("CONTACT_API_BAN_SETTINGS", {})
default_config = cls.DEFAULT_API_BAN_SETTINGS.copy()
default_config.update(custom_config)
return default_config
@classmethod
def get_retention_days(cls):
"""
:return: The number of days a Contact instance is kept in the database
:rtype: int
"""
return get_config("CONTACT_RETENTION_DAYS", cls.DEFAULT_RETENTION_DAYS)
# ----------------------------------------
# Public API
# ----------------------------------------
def send_notifications(self, to_admin, to_user):
"""
Sends notification emails to inform of a new contact message
:param bool to_admin: Whether the admin should receive a notification
:param bool to_user: Whether the user should receive a notification
"""
if to_admin:
admin_email = get_config("EMAIL_HOST_USER")
self._send_async_email(self.EmailTemplate.ADMIN_NOTIFICATION, admin_email)
if to_user:
self._send_async_email(self.EmailTemplate.USER_NOTIFICATION, self.email)
@classmethod
def should_ban_ip(cls, ip):
"""
Checks if an IP should be banned based on the amount of contact requests recently sent
:param str ip: The IP address to check
:return: Whether it should be banned
:rtype: bool
"""
ban_settings = cls.get_ban_settings()
threshold = ban_settings["threshold"]
# No threshold means no ban
if not threshold:
return False
# Else we check
creation_date_threshold = timezone.now() - timedelta(
days=ban_settings["period_in_days"]
)
count = cls.objects.filter(
ip=ip, created_at__gt=creation_date_threshold
).count()
return count >= threshold
# ----------------------------------------
# CRON jobs
# ----------------------------------------
@classmethod
def remove_old_entries(cls):
"""Based on the retention days, remove overdue entries"""
creation_limit = timezone.now() - timedelta(days=cls.get_retention_days())
Contact.objects.filter(created_at__lt=creation_limit).delete()
# ----------------------------------------
# Private methods
# ----------------------------------------
def _send_async_email(self, email_template, to):
"""
Sends an email to a target recipient, based on the provided template
:param EmailTemplate email_template: The EmailTemplate to use
:param str to: The recipient email address
"""
context = {"contact": self}
body = render_email_template(email_template.template, context)
send_html_email_async(email_template.subject, body, to=to)
```
#### File: contact/tests/test_viewsets.py
```python
from datetime import date, timedelta
from time import sleep
# Django
from django.core import mail
# Personal
from jklib.django.utils.settings import get_config
# Application
from core.tests import BaseActionTestCase
from security.models import NetworkRule
from users.factories import AdminFactory, UserFactory
# Local
from ..factories import ContactFactory
from ..models import Contact
# --------------------------------------------------------------------------------
# > Helpers
# --------------------------------------------------------------------------------
SERVICE_URL = "/api/contacts/"
class Base(BaseActionTestCase):
"""Base class for all the Contact action test cases"""
def setUp(self):
"""Creates and authenticates an Admin user"""
self.admin = AdminFactory()
self.api_client.force_authenticate(self.admin)
@staticmethod
def assert_instance_representation(instance, response_data):
"""
Compares a response data with a Contact instance
:param Contact instance: Contact instance from the database
:param dict response_data: Response data from the API
"""
assert instance.id == response_data["id"]
assert instance.ip == response_data["ip"]
assert instance.name == response_data["name"]
assert instance.email == response_data["email"]
assert instance.subject == response_data["subject"]
assert instance.body == response_data["body"]
if instance.user is not None:
user_data = response_data["user"]
assert instance.user.id == user_data["id"]
assert instance.user.first_name == user_data["first_name"]
assert instance.user.last_name == user_data["last_name"]
assert instance.user.email == user_data["email"]
assert instance.user.is_active == user_data["is_active"]
assert instance.user.is_verified == user_data["is_verified"]
else:
assert response_data["user"] is None
assert "notify_user" not in response_data
@staticmethod
def assert_payload_matches_instance(payload, instance):
"""
Checks that the instance data matches it's original request payload
:param dict payload: The request payload to create/update the contact demand
:param Contact instance: The related Contact instance
:return:
"""
assert payload["name"] == instance.name
assert payload["email"] == instance.email
assert payload["subject"] == instance.subject
assert payload["body"] == instance.body
# --------------------------------------------------------------------------------
# > TestCases
# --------------------------------------------------------------------------------
class TestCreateContact(Base):
"""TestCase for the 'list' action"""
url_template = SERVICE_URL
http_method_name = "POST"
success_code = 204
def setUp(self):
"""Also prepares a valid creation payload"""
super().setUp()
self.payload = {
"name": "Name",
"email": "<EMAIL>",
"subject": "Subject",
"body": "Sufficiently long body",
"notify_user": False,
}
def test_permissions(self):
"""Tests anybody can access this service. (We use 3 different IPs to avoid ban)"""
user = UserFactory()
admin = AdminFactory()
# Logged out
self.api_client.logout()
response = self.http_method(self.url(), data=self.payload)
assert response.status_code == self.success_code
# User
self.api_client.force_authenticate(user)
response = self.http_method(
self.url(), data=self.payload, REMOTE_ADDR="127.0.0.2"
)
assert response.status_code == self.success_code
# User
self.api_client.logout()
self.api_client.force_authenticate(admin)
response = self.http_method(
self.url(), data=self.payload, REMOTE_ADDR="127.0.0.3"
)
assert response.status_code == self.success_code
assert Contact.objects.count() == 3
def test_automatic_ban(self):
"""Tests that spamming contacts gets your IP banned"""
ip = "127.0.0.2"
ban_settings = Contact.get_ban_settings()
threshold = ban_settings["threshold"]
# Never banned
if not threshold:
response = self.http_method(self.url(), self.payload)
assert response.status_code == self.success_code
assert not Contact.should_ban_ip(ip=ip)
return
# Could be banned
[
self.http_method(self.url(), self.payload, REMOTE_ADDR=ip)
for _ in range(threshold)
]
assert Contact.objects.count() == threshold
with self.assertLogs(logger="security", level="INFO") as logger:
response = self.http_method(self.url(), self.payload, REMOTE_ADDR=ip)
assert response.status_code == 403
assert Contact.objects.count() == threshold
# Check the associated NetworkRule
rule = NetworkRule.objects.get(ip=ip)
message = f"INFO:security:NetworkRule created for {rule.ip} (Status: {rule.computed_status})"
assert logger.output[0] == message
# Our IP should be blacklisted
expected_end_date = date.today() + timedelta(
days=ban_settings["duration_in_days"]
)
assert rule.is_blacklisted
assert rule.expires_on == expected_end_date
# Any subsequent request should fail
response = self.http_method(self.url(), self.payload, REMOTE_ADDR=ip)
assert response.status_code == 403
# Other IPs can pass through
response = self.http_method(self.url(), self.payload)
assert response.status_code == self.success_code
def test_success_notifications(self):
"""Tests that successful Contact creations send notifications"""
# Without notification
self._assert_creation_success_base(self.payload, 1)
assert Contact.objects.count() == 1
sleep(0.4)
assert len(mail.outbox) == 1
email = mail.outbox[0]
assert email.subject == Contact.EmailTemplate.ADMIN_NOTIFICATION.subject
assert email.to[0] == get_config("EMAIL_HOST_USER")
# With notification
mail.outbox = []
self.payload["notify_user"] = True
self._assert_creation_success_base(self.payload, 2)
assert Contact.objects.count() == 2
sleep(0.4)
assert len(mail.outbox) == 2
email_1, email_2 = mail.outbox[0], mail.outbox[1]
subjects = [email_1.subject, email_2.subject]
recipients = [email_1.to[0], email_2.to[0]]
assert Contact.EmailTemplate.ADMIN_NOTIFICATION.subject in subjects
assert Contact.EmailTemplate.USER_NOTIFICATION.subject in subjects
assert self.payload["email"] in recipients
assert get_config("EMAIL_HOST_USER") in recipients
def test_success_user(self):
"""Tests that the User is correctly attached to the created Contact"""
# Logged user
instance = self._assert_creation_success_base(self.payload, 1)
assert Contact.objects.count() == 1
assert instance.user.id == self.admin.id
# No user
self.api_client.logout()
instance = self._assert_creation_success_base(self.payload, 2)
assert Contact.objects.count() == 2
assert instance.user is None
def test_success_ip(self):
"""Tests that the IP is correctly computed from the request"""
ip = "127.0.0.3"
instance = self._assert_creation_success_base(self.payload, 1, REMOTE_ADDR=ip)
assert Contact.objects.count() == 1
assert instance.ip == ip
def _assert_creation_success_base(self, payload, id_, **params):
"""
Performs a creation request and checks its success
:param payload: The data to pass to our request
:param int id_: The expected id of the created instance
:param params: Extra parameters for the called method
:return: The created Contact instance
:rtype: Contact
"""
response = self.http_method(self.url(), data=payload, **params)
assert response.status_code == self.success_code
instance = Contact.objects.get(id=id_)
self.assert_payload_matches_instance(payload, instance)
return instance
class TestListContacts(Base):
"""TestCase for the 'list' action"""
url_template = SERVICE_URL
http_method_name = "GET"
success_code = 200
def test_permissions(self):
"""Tests only admins can access this service"""
self.assert_admin_permissions(self.url())
def test_success(self):
"""Tests we can successfully fetch the list of Contact instances"""
response = self.http_method(self.url())
assert response.status_code == self.success_code
assert Contact.objects.count() == len(response.data) == 0
contact_1 = ContactFactory()
contact_2 = ContactFactory()
response = self.http_method(self.url())
assert response.status_code == self.success_code
assert Contact.objects.count() == len(response.data) == 2
self.assert_instance_representation(contact_2, response.data[0])
self.assert_instance_representation(contact_1, response.data[1])
class TestRetrieveContact(Base):
"""TestCase for the 'retrieve' action"""
url_template = f"{SERVICE_URL}/{{id}}/"
http_method_name = "GET"
success_code = 200
def setUp(self):
"""Also creates a Contact instance"""
super().setUp()
self.contact = ContactFactory()
self.detail_url = self.url(context={"id": self.contact.id})
def test_permissions(self):
"""Tests only admins can access this service"""
self.assert_admin_permissions(self.detail_url)
def test_success(self):
"""Tests we can successfully retrieve a single Contact instance"""
response = self.http_method(self.detail_url)
assert response.status_code == self.success_code
self.assert_instance_representation(self.contact, response.data)
class TestDestroyContact(Base):
"""TestCase for the 'destroy' action"""
url_template = f"{SERVICE_URL}/{{id}}/"
http_method_name = "DELETE"
success_code = 204
def setUp(self):
"""Also creates 2 Contact instances"""
super().setUp()
self.contact_1 = ContactFactory()
self.contact_2 = ContactFactory()
self.url_1 = self.url(context={"id": self.contact_1.id})
self.url_2 = self.url(context={"id": self.contact_2.id})
def test_permissions(self):
"""Tests only admins can access this service"""
assert Contact.objects.count() == 2
self.assert_admin_permissions(self.url_1)
assert Contact.objects.count() == 1
def test_success(self):
"""Tests we can successfully delete individual Contact instances"""
assert Contact.objects.count() == 2
response = self.http_method(self.url_1)
assert response.status_code == self.success_code
assert Contact.objects.count() == 1
response = self.http_method(self.url_2)
assert response.status_code == self.success_code
assert Contact.objects.count() == 0
class TestBulkDestroyContacts(Base):
"""TestCase for the 'bulk_destroy' action"""
url_template = SERVICE_URL
http_method_name = "DELETE"
success_code = 204
def setUp(self):
"""Also creates 4 Contact instances"""
super().setUp()
[ContactFactory() for _ in range(4)]
self.payload = {"ids": [1, 4]}
def test_permissions(self):
"""Tests only admins can access this service"""
assert Contact.objects.count() == 4
self.assert_admin_permissions(url=self.url(), data=self.payload)
assert Contact.objects.count() == 2
def test_success(self):
"""Tests we can successfully delete multiple Contact instances at once"""
# Only valid IDs
response = self.http_method(self.url(), data=self.payload)
assert response.status_code == self.success_code
assert Contact.objects.count() == 2
# Some valid IDs
response = self.http_method(self.url(), data={"ids": [2, 6]})
assert response.status_code == self.success_code
assert Contact.objects.count() == 1
assert Contact.objects.first().id == Contact.objects.last().id == 3
```
#### File: django_backbone/core/signals.py
```python
from django.contrib.auth import get_user_model
from django.contrib.sessions.models import Session
from django.db.models.signals import pre_save
from django.dispatch import receiver
# --------------------------------------------------------------------------------
# > Shared
# --------------------------------------------------------------------------------
@receiver(pre_save)
def automatic_pre_save_full_clean(sender, instance, **kwargs):
"""
Runs the `full_clean` method before saving the instance, unless this model is exempted
:param Model sender: The model class
:param Model instance: The model instance
:param kwargs:
"""
whitelist = {Session, get_user_model()}
if sender not in whitelist:
instance.full_clean()
```
#### File: django_backbone/healthchecks/viewsets.py
```python
import logging
from enum import Enum
from functools import wraps
from secrets import token_urlsafe
# Django
from django.core.cache import cache
from django.core.exceptions import FieldError, ImproperlyConfigured, ObjectDoesNotExist
from django.db import connection
from django.db.migrations.executor import MigrationExecutor
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_500_INTERNAL_SERVER_ERROR
# Personal
from jklib.django.drf.permissions import IsAdminUser
from jklib.django.drf.viewsets import ImprovedViewSet
# Local
from .models import HealthcheckDummy
# --------------------------------------------------------------------------------
# > Utilities
# --------------------------------------------------------------------------------
LOGGER = logging.getLogger("healthcheck")
class Service(Enum):
"""List of services with healthchecks"""
API = "API"
CACHE = "CACHE"
DATABASE = "DATABASE"
MIGRATIONS = "MIGRATIONS"
def error_catcher(service):
"""
Decorator for the healthchecks API endpoints
Logs the API call result, and returns a 500 if the service crashes
:param Service service: Which service is called
:return: Either the service success Response or a 500
:rtype: Response
"""
def decorator(function):
@wraps(function)
def wrapper(request, *args, **kwargs):
try:
response = function(request, *args, **kwargs)
LOGGER.info(f"Service {service.name} is OK")
return response
except Exception as error:
LOGGER.error(f"Service {service.name} is KO: {error}")
return Response(None, status=HTTP_500_INTERNAL_SERVER_ERROR)
return wrapper
return decorator
# --------------------------------------------------------------------------------
# > ViewSets
# --------------------------------------------------------------------------------
class HealthcheckViewSet(ImprovedViewSet):
"""Viewset for our various healthchecks"""
viewset_permission_classes = (IsAdminUser,)
serializer_classes = {"default": None}
@action(detail=False, methods=["get"])
@error_catcher(Service.API)
def api(self, request):
"""Checks if the API is up and running"""
return Response(None, status=HTTP_200_OK)
@action(detail=False, methods=["get"])
@error_catcher(Service.CACHE)
def cache(self, request):
"""Checks we can write/read/delete in the cache system"""
random_cache_key = token_urlsafe(30)
random_cache_value = token_urlsafe(30)
# Set value
cache.set(random_cache_key, random_cache_value)
cached_value = cache.get(random_cache_key, None)
if cached_value is None:
raise KeyError(f"Failed to set a key/value pair in the cache")
if cached_value != random_cache_value:
raise ValueError(
f"Unexpected value stored in the '{random_cache_key}' cache key"
)
# Get value
cache.delete(random_cache_value)
cached_value = cache.get(random_cache_value, None)
if cached_value is not None:
raise AttributeError(
f"Failed to properly delete the '{random_cache_key}' key in the cache"
)
return Response(None, status=HTTP_200_OK)
@action(detail=False, methods=["get"])
@error_catcher(Service.DATABASE)
def database(self, request):
"""Checks we can write/read/delete in the database"""
# Create
content = token_urlsafe(50)
instance = HealthcheckDummy.objects.create(content=content)
if instance is None:
raise LookupError("Failed to create the HealthcheckDummy instance")
# Get
fetched_instance = HealthcheckDummy.objects.get(pk=instance.id)
if fetched_instance is None:
raise ObjectDoesNotExist(
"Failed to fetch the created HealthcheckDummy instance"
)
if fetched_instance.content != content:
raise FieldError(
"Unexpected field value for the fetched HealthcheckDummy instance"
)
# Delete
HealthcheckDummy.objects.all().delete()
if HealthcheckDummy.objects.count() > 0:
raise RuntimeError(
"Failed to properly delete all HealthcheckDummy instances"
)
return Response(None, status=HTTP_200_OK)
@action(detail=False, methods=["get"])
@error_catcher(Service.MIGRATIONS)
def migrations(self, request):
"""Checks if all migrations have been applied to our database"""
executor = MigrationExecutor(connection)
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
if plan:
raise ImproperlyConfigured("There are migrations to apply")
return Response(None, status=HTTP_200_OK)
```
#### File: security/models/network_rule.py
```python
from datetime import date, timedelta
# Django
from django.db.models import (
DateField,
GenericIPAddressField,
Index,
IntegerChoices,
IntegerField,
)
# Personal
from jklib.django.db.fields import ActiveField, RequiredField, TrimCharField
from jklib.django.db.models import LifeCycleModel
from jklib.django.db.queries import get_object_or_none
from jklib.django.utils.network import get_client_ip
from jklib.django.utils.settings import get_config
# --------------------------------------------------------------------------------
# > Models
# --------------------------------------------------------------------------------
class NetworkRule(LifeCycleModel):
"""Model to blacklist or whitelist IP addresses"""
# ----------------------------------------
# Constants
# ----------------------------------------
# Static
COMMENT_MAX_LENGTH = 255
# Overridable
DEFAULT_DURATION = 30 # settings.NETWORK_RULE_DEFAULT_DURATION
# Statuses
class Status(IntegerChoices):
"""Possible statuses for a NetworkRule"""
NONE = 0
WHITELISTED = 1
BLACKLISTED = 2
# ----------------------------------------
# Fields
# ----------------------------------------
ip = RequiredField(
GenericIPAddressField, protocol="IPv4", unique=True, verbose_name="IP Address",
)
status = RequiredField(
IntegerField,
choices=Status.choices,
default=Status.NONE,
verbose_name="Status",
db_index=True,
)
expires_on = DateField(
blank=True,
null=True,
default=None,
db_index=True, # clear_expired_entries
verbose_name="Expires on",
help_text="Expires at the end of said date",
)
active = ActiveField()
comment = TrimCharField(
max_length=COMMENT_MAX_LENGTH, blank=True, verbose_name="Comment"
)
# ----------------------------------------
# Behavior (meta, str, save)
# ----------------------------------------
class Meta:
db_table = "security_network_rules"
indexes = [
Index(fields=["active", "expires_on", "status"]) # For the `bulk_clear` API
]
ordering = ["-id"]
verbose_name = "Network Rule"
verbose_name_plural = "Network Rules"
def __str__(self):
"""
:return: Returns the network rule's IP address
:rtype: str
"""
return f"{self.ip}"
# ----------------------------------------
# Properties
# ----------------------------------------
@classmethod
def get_default_duration(cls):
"""
:return: The default duration for a status, which can be overridden in the settings
:rtype: int
"""
return get_config("NETWORK_RULE_DEFAULT_DURATION", cls.DEFAULT_DURATION)
@property
def computed_status(self):
"""
:return: The current state of the rule based on all its properties
:rtype: str
"""
if self.is_blacklisted:
return "blacklisted"
if self.is_whitelisted:
return "whitelisted"
return "inactive"
@property
def is_blacklisted(self):
"""
:return: Whether the rule/IP is currently active and blacklisted
:rtype: bool
"""
check = self.active and self.status == self.Status.BLACKLISTED
if self.expires_on is None:
return check
else:
return check and self.expires_on >= date.today()
@property
def is_whitelisted(self):
"""
:return: Whether the rule/IP is currently active and whitelisted
:rtype: bool
"""
check = self.active and self.status == self.Status.WHITELISTED
if self.expires_on is None:
return check
else:
return check and self.expires_on >= date.today()
# ----------------------------------------
# API for instance
# ----------------------------------------
def blacklist(self, end_date=None, comment=None, override=False):
"""
Updates the instance to blacklist its IP address
:param date end_date: The desired expiration date
:param str comment: The comment to add in the instance
:param bool override: Whether we allow blacklisting a whitelisted entry
"""
self._update_status("blacklist", end_date, comment, override)
def clear(self):
"""Clears the instance by defaulting its fields to neutral values"""
self.expires_on = None
self.active = False
self.status = self.Status.NONE
self.save()
def whitelist(self, end_date=None, comment=None, override=False):
"""
Updates the instance to whitelist its IP address
:param date end_date: The desired expiration date
:param str comment: The comment to add in the instance
:param bool override: Whether we allow whitelisting a blacklisted entry
"""
self._update_status("whitelist", end_date, comment, override)
def _compute_valid_end_date(self, end_date):
"""
Defaults the expiration date if none is provided
:param date end_date: The desired expiration date
:return: Either the provided date or the default one
:rtype: date
"""
if end_date is None:
delta_in_days = timedelta(days=self.get_default_duration())
end_date = date.today() + delta_in_days
return end_date
def _update_status(self, action, end_date, comment, override):
"""
Update all the required fields to whitelist or blacklist the rule's IP
:param str action: Action to perform, used to define the status check
:param date end_date: The desired expiration date
:param str comment: The comment to add in the instance
:param bool override: Whether we allow whitelisting a blacklisted entry
"""
if action == "whitelist":
status_check = self.Status.BLACKLISTED
new_status = self.Status.WHITELISTED
else:
status_check = self.Status.WHITELISTED
new_status = self.Status.BLACKLISTED
if override or self.status != status_check:
if comment is not None:
self.comment = comment
self.expires_on = self._compute_valid_end_date(end_date)
self.active = True
self.status = new_status
self.save()
# ----------------------------------------
# API for request
# ----------------------------------------
@classmethod
def blacklist_from_request(
cls, request, end_date=None, comment=None, override=False
):
"""
Creates or updates a blacklist rule for the request's IP, and returns the instance.
:param Request request: Request object used to get the IP address
:param date end_date: The desired expiration date
:param str comment: The comment to add in the instance
:param bool override: Whether we allow blacklisting a whitelisted entry
:return: The updated instance
:rtype: NetworkRule
"""
instance = cls._fetch_or_add(request)
instance.blacklist(end_date, comment, override)
return instance
@classmethod
def clear_from_request(cls, request):
"""
If it exists, clears and returns the NetworkRule model
:param Request request: Request object used to get the IP address
:return: The updated instance
:rtype: NetworkRule or None
"""
instance = cls._fetch(request)
if instance is not None:
instance.clear()
return instance
@classmethod
def whitelist_from_request(
cls, request, end_date=None, comment=None, override=False
):
"""
Creates or updates a whitelist rule for the request's IP, and returns the instance.
:param Request request: Request object used to get the IP address
:param date end_date: The desired expiration date
:param str comment: The comment to add in the instance
:param bool override: Whether we allow whitelisting a blacklisted entry
:return: The updated instance
:rtype: NetworkRule
"""
instance = cls._fetch_or_add(request)
instance.whitelist(end_date, comment, override)
return instance
@classmethod
def is_blacklisted_from_request(cls, request):
"""
Checks if a blacklisted NetworkRule linked to the request IP exists
:param Request request: Request object that could be used to get/add the instance
:return: Whether the IP is blacklisted
:rtype: bool
"""
instance = cls._fetch(request)
if instance is not None:
return instance.is_blacklisted
return False
@classmethod
def is_whitelisted_from_request(cls, request):
"""
Checks if a whitelisted NetworkRule linked to the request IP exists
:param Request request: Request object that could be used to get/add the instance
:return: Whether the IP is whitelisted
:rtype: bool
"""
instance = cls._fetch(request)
if instance is not None:
return instance.is_whitelisted
return False
@classmethod
def _fetch(cls, request):
"""
Fetches an existing NetworkRule instance using the Request object
:param Request request: A django Request object
:return: The existing instance linked to this IP
:rtype: NetworkRule
"""
ip_address = get_client_ip(request)
instance = get_object_or_none(cls, ip=ip_address)
return instance
@classmethod
def _fetch_or_add(cls, request):
"""
Fetches an existing NetworkRule instance or create a new one using the Request object
:param Request request: A django Request object
:return: The found (or newly-added) NetworkRule instance
:rtype: NetworkRule
"""
ip_address = get_client_ip(request)
instance = get_object_or_none(cls, ip=ip_address)
if instance is None:
instance = cls(ip=ip_address, active=False)
return instance
# ----------------------------------------
# CRON jobs
# ----------------------------------------
@classmethod
def clear_expired_entries(cls):
"""Clears all expired rules"""
today = date.today()
instances = cls.objects.filter(expires_on__isnull=False).filter(
expires_on__lt=today
)
for instance in instances:
instance.clear()
```
#### File: security/models/security_token.py
```python
from datetime import datetime, timedelta, timezone
from secrets import token_urlsafe
# Django
from django.contrib.auth import get_user_model
from django.db.models import CharField, DateTimeField, Index
# Personal
from jklib.django.db.fields import ActiveField, ForeignKeyCascade, RequiredField
from jklib.django.db.models import LifeCycleModel
from jklib.django.db.queries import get_object_or_none
# --------------------------------------------------------------------------------
# > Models
# --------------------------------------------------------------------------------
class SecurityToken(LifeCycleModel):
"""
Tokens are OTP linked to users, to allow for special actions like password reset
Only 1 active token per user/type
Token have a limited duration/lifespan
Can only be used once
The expected workflow of the model API is:
create_new_token --> Creates a new token for a user/type
fetch_token_instance --> Fetches the Token instance linked to your token value
consume_token --> Token won't be usable anymore
Other utilities for clean up and security are also available
"""
# ----------------------------------------
# Constants
# ----------------------------------------
MIN_DURATION = 300 # 5 minutes
MAX_DURATION = 604800 # 7 days
TYPE_MAX_LENGTH = 50
# ----------------------------------------
# Fields
# ----------------------------------------
user = RequiredField(
ForeignKeyCascade, get_user_model(), related_name="tokens", verbose_name="User"
)
type = RequiredField(CharField, max_length=TYPE_MAX_LENGTH, verbose_name="Type")
value = RequiredField(
CharField, unique=True, max_length=1000, verbose_name="Token value"
)
expired_at = RequiredField(DateTimeField, verbose_name="Expires at")
used_at = DateTimeField(null=True, blank=True, verbose_name="Used at")
is_active_token = ActiveField()
# ----------------------------------------
# Behavior (meta, str, save)
# ----------------------------------------
class Meta:
db_table = "security_tokens"
indexes = [
Index(fields=["user", "type", "is_active_token"]), # deactivate_user_tokens
Index(fields=["type", "value"]), # fetch_token_instance
Index(fields=["used_at", "expired_at"]), # cleanup_expired_unused_tokens
]
ordering = ["-id"]
verbose_name = "Token"
verbose_name_plural = "Tokens"
def __str__(self):
"""
:return: Returns the token value
:rtype: str
"""
return f"{self.value}"
# ----------------------------------------
# Properties
# ----------------------------------------
@property
def can_be_used(self):
"""
:return: Checks if the token is active, not used, and not expired
:rtype: bool
"""
return self.is_active_token and (not self.is_used) and (not self.is_expired)
@property
def is_expired(self):
"""
:return: Whether the token has expired
:rtype: bool
"""
now = datetime.now(timezone.utc)
return self.expired_at < now
@property
def is_used(self):
"""
:return: Whether the token has been used
:rtype: bool
"""
return self.used_at is not None
# ----------------------------------------
# Public API
# ----------------------------------------
def consume_token(self):
"""Deactivates the token and stores its used timestamp"""
self.used_at = datetime.now(timezone.utc)
self.deactivate_token()
@classmethod
def create_new_token(cls, user, token_type, token_duration):
"""
Creates a new token for the user/type, and deactivates the previous ones
:param User user: Instance from the User model
:param str token_type: Type of the token
:param int token_duration: Token lifespan in seconds
:return: The token instance and its value
:rtype: SecurityToken
"""
token_value = cls._generate_unique_token()
token_params = cls._get_valid_token_params(
user, token_value, token_type, token_duration
)
cls.deactivate_user_tokens(user, token_params["type"])
token_instance = cls.objects.create(**token_params)
return token_instance
def deactivate_token(self):
"""Marks a token as not being the active one anymore"""
self.is_active_token = False
self.save()
@classmethod
def deactivate_user_tokens(cls, user, token_type=None):
"""
Deactivates all tokens for a user. Can be narrowed down to a specific type.
:param User user: The user whose tokens must be deactivated
:param str token_type: Type of the token. Defaults to None
"""
tokens = cls.objects.filter(user=user, is_active_token=True)
if token_type is not None:
tokens = tokens.filter(type=token_type)
for token in tokens:
token.deactivate_token()
@classmethod
def fetch_token_instance(cls, token_value, token_type):
"""
Tries to fetch an ACTIVE Token instance using a token value and type
:param str token_value: Value of the token
:param str token_type: Type of the token
:return: The valid token instance or None
:rtype: Token or None
"""
token = get_object_or_none(cls, value=token_value, type=token_type)
if token is not None and token.can_be_used:
return token
else:
return None
# ----------------------------------------
# Cron jobs
# ----------------------------------------
@classmethod
def cleanup_expired_unused_tokens(cls):
"""Deletes all the tokens that are expired and haven't been used"""
now = datetime.now(timezone.utc)
expired_unused_tokens = cls.objects.filter(used_at=None, expired_at__lt=now)
expired_unused_tokens.delete()
# ----------------------------------------
# Private
# ----------------------------------------
@classmethod
def _generate_unique_token(cls):
"""
:return: The unique value to be used for creating a new token
:rtype: str
"""
while True:
token_value = token_urlsafe(50)
results = cls.objects.filter(value=token_value)
if len(results) == 0:
break
return token_value
@classmethod
def _get_valid_token_params(cls, user, token_value, token_type, token_duration):
"""
Validates (and replaces if necessary) the parameters for creating a new token
:param User user: Instance of the User model
:param str token_value: Value of the token, which should be unique
:param str token_type: Type of the token
:param int token_duration: Token lifespan
:return: Parameters to be used for creating a new token
:rtype: dict
"""
token_type = cls._validate_token_type(token_type)
token_duration = cls._validate_token_duration(token_duration)
expiration_date = datetime.now(timezone.utc) + timedelta(seconds=token_duration)
return {
"user": user,
"type": token_type,
"value": token_value,
"expired_at": expiration_date,
"used_at": None,
"is_active_token": True,
}
@classmethod
def _validate_token_duration(cls, value):
"""
Returns the initial duration is a valid integer, else raises an error
:param int value: Duration of the token in seconds
:raise TypeError: When the provided value is not an integer
:raise ValueError: When the provided value is out of bounds
:return: The initial value, if valid
:rtype: int
"""
if type(value) != int:
raise TypeError("Token duration must be an integer")
if value < cls.MIN_DURATION or value > cls.MAX_DURATION:
raise ValueError(
f"Token duration must be between {cls.MIN_DURATION} and {cls.MAX_DURATION} seconds"
)
return value
@classmethod
def _validate_token_type(cls, value):
"""
Returns the initial type if it is a non-empty string, else raises an error
:param str value: Type of the token
:raise TypeError: When the provided value is a string
:raise ValueError: When the provided value is empty
:return: The trimmed value, if valid
:rtype: str
"""
if type(value) != str:
raise TypeError("Token type must be a string")
value = value.strip()
if value == "":
raise ValueError("Token type cannot be empty")
return value
```
#### File: tests/models/test_network_rule.py
```python
from datetime import date, timedelta
# Django
from django.conf import settings
# Personal
from jklib.django.db.tests import ModelTestCase
from jklib.django.utils.network import get_client_ip
from jklib.django.utils.tests import assert_logs
# Local
from ...factories import NetworkRuleFactory
from ...models import NetworkRule
# --------------------------------------------------------------------------------
# > TestCase
# --------------------------------------------------------------------------------
class TestNetworkRule(ModelTestCase):
"""TestCase for the 'NetworkRule' model"""
model_class = NetworkRule
# ----------------------------------------
# Property tests
# ----------------------------------------
@assert_logs("security", "INFO")
def test_get_default_duration(self):
"""Tests the default duration returns the settings duration"""
instance = NetworkRuleFactory()
if hasattr(settings, "NETWORK_RULE_DEFAULT_DURATION"):
assert (
instance.get_default_duration()
== settings.NETWORK_RULE_DEFAULT_DURATION
)
else:
assert instance.get_default_duration() == instance.DEFAULT_DURATION
@assert_logs(logger="security", level="INFO")
def test_computed_status(self):
"""Tests the computed_status works as intended"""
instance = NetworkRuleFactory()
instance.whitelist(override=True)
assert instance.computed_status == "whitelisted"
instance.blacklist(override=True)
assert instance.computed_status == "blacklisted"
instance.clear()
assert instance.computed_status == "inactive"
@assert_logs(logger="security", level="INFO")
def test_is_blacklisted(self):
"""Tests that a blacklisted rule is correctly flagged as blacklisted"""
instance = NetworkRuleFactory()
instance.blacklist()
assert instance.is_blacklisted
instance.whitelist(override=True)
assert not instance.is_blacklisted
@assert_logs(logger="security", level="INFO")
def test_is_whitelisted(self):
"""Tests that a whitelisted rule is correctly flagged as whitelisted"""
instance = NetworkRuleFactory()
instance.whitelist()
assert instance.is_whitelisted
instance.blacklist(override=True)
assert not instance.is_whitelisted
# ----------------------------------------
# Instance API tests
# ----------------------------------------
@assert_logs(logger="security", level="INFO")
def test_blacklist(self):
"""Tests the 'blacklist' method"""
self._test_activate("blacklist")
@assert_logs(logger="security", level="INFO")
def test_clear(self):
"""Tests 'clear' correctly resets the model fields"""
instance = NetworkRuleFactory(do_blacklist=True)
instance.clear()
assert not instance.active
assert instance.status == NetworkRule.Status.NONE
assert instance.expires_on is None
@assert_logs(logger="security", level="INFO")
def test_whitelist(self):
"""Tests the 'blacklist' method"""
self._test_activate("whitelist")
# ----------------------------------------
# Request API tests
# ----------------------------------------
@assert_logs(logger="security", level="INFO")
def test_blacklist_from_request(self):
"""Tests the 'blacklist_from_request' method"""
self._test_activate_from_api("blacklist")
@assert_logs(logger="security", level="INFO")
def test_clear_from_request(self):
"""Tests 'clear_from_request' correctly resets the model fields"""
fake_request = self.build_fake_request()
fake_ip_address = get_client_ip(fake_request)
NetworkRuleFactory(ip=fake_ip_address, do_blacklist=True)
instance = self.model_class.clear_from_request(fake_request)
assert not instance.active
assert instance.status == NetworkRule.Status.NONE
assert instance.expires_on is None
@assert_logs(logger="security", level="INFO")
def test_whitelist_from_request(self):
"""Tests the 'whitelist_from_request' method"""
self._test_activate_from_api("whitelist")
@assert_logs(logger="security", level="INFO")
def test_is_blacklisted_from_request(self):
"""Tests that a blacklisted rule is correctly flagged as blacklisted"""
fake_request = self.build_fake_request()
fake_ip_address = get_client_ip(fake_request)
NetworkRuleFactory(ip=fake_ip_address)
self.model_class.blacklist_from_request(fake_request)
assert self.model_class.is_blacklisted_from_request(fake_request)
@assert_logs(logger="security", level="INFO")
def test_is_whitelisted_from_request(self):
"""Tests that a whitelisted rule is correctly flagged as whitelisted"""
fake_request = self.build_fake_request()
fake_ip_address = get_client_ip(fake_request)
NetworkRuleFactory(ip=fake_ip_address)
self.model_class.whitelist_from_request(fake_request)
assert self.model_class.is_whitelisted_from_request(fake_request)
# ----------------------------------------
# Signals
# ----------------------------------------
@assert_logs(logger="security", level="INFO")
def test_log_signals(self):
"""Tests that logs are generated on creation, update, and deletion"""
logs = self.logger_context.output
instance = NetworkRuleFactory() # Factory creates and updates, so 2 logs
assert logs[0] == self._build_log_message(instance, "created")
assert logs[1] == self._build_log_message(instance, "updated")
instance.save()
assert logs[2] == self._build_log_message(instance, "updated")
instance.delete()
assert logs[3] == self._build_log_message(instance, "deleted")
# ----------------------------------------
# Cron tests
# ----------------------------------------
@assert_logs(logger="security", level="INFO")
def test_clear_expired_entries(self):
"""Tests that only the eligible entries are correctly cleared"""
payloads, instances, clear_eligibility = self._create_instances_for_clear_test()
NetworkRule.clear_expired_entries()
for payload, instance, cleared in zip(payloads, instances, clear_eligibility):
if cleared:
updated_instance = self.model_class.objects.get(pk=instance.id)
assert updated_instance.expires_on is None
assert not updated_instance.active
assert updated_instance.status == NetworkRule.Status.NONE
else:
assert instance.expires_on == payload["expires_on"]
assert instance.active == payload["active"]
assert instance.status == payload["status"]
# ----------------------------------------
# Helpers
# ----------------------------------------
def _test_activate(self, name):
"""
Utility function to test the 'blacklist' or 'whitelist' methods
:param str name: Either blacklist or whitelist
"""
instance = NetworkRuleFactory()
opposite_name = "whitelist" if name == "blacklist" else "blacklist"
main_method = getattr(instance, name)
main_property = lambda: getattr(instance, f"is_{name}ed")
opposite_method = getattr(instance, opposite_name)
opposite_property = lambda: getattr(instance, f"is_{opposite_name}ed")
# Without end_date
new_comment = "Comment 1"
main_method(comment=new_comment)
default_end_date = date.today() + timedelta(
days=instance.get_default_duration()
)
assert main_property()
assert instance.expires_on == default_end_date
assert instance.comment == new_comment
# With end_date
instance.clear()
end_date = date.today() + timedelta(days=3)
main_method(end_date=end_date, comment=new_comment)
assert main_property()
assert instance.expires_on == end_date
# Without override
instance.clear()
opposite_method()
main_method(override=False, end_date=end_date)
assert not main_property()
assert opposite_property()
# With override
main_method(override=True, end_date=end_date)
assert not opposite_property()
assert main_property()
def _test_activate_from_api(self, name):
"""
Utility function to test the 'blacklist_from_request' or 'whitelist_from_request'
:param str name: Either blacklist or whitelist
"""
main_class_method = getattr(self.model_class, f"{name}_from_request")
fake_request = self.build_fake_request()
new_comment = "Comment 1"
instance = main_class_method(fake_request, comment=new_comment)
# Setup dynamic instance calls
opposite_name = "whitelist" if name == "blacklist" else "blacklist"
opposite_method = getattr(instance, f"{opposite_name}")
main_property = lambda: getattr(instance, f"is_{name}ed")
opposite_property = lambda: getattr(instance, f"is_{opposite_name}ed")
# Without end_date
default_end_date = date.today() + timedelta(
days=instance.get_default_duration()
)
assert main_property()
assert instance.expires_on == default_end_date
assert instance.comment == new_comment
# With end_date
instance.clear()
end_date = date.today() + timedelta(days=3)
instance = main_class_method(
fake_request, end_date=end_date, comment=new_comment
)
assert main_property()
assert instance.expires_on == end_date
# Without override
instance.clear()
opposite_method()
instance = main_class_method(fake_request, end_date=end_date, override=False)
assert not main_property()
assert opposite_property()
# With override
instance = main_class_method(fake_request, end_date=end_date, override=True)
assert not opposite_property()
assert main_property()
@staticmethod
def _build_log_message(instance, type_):
"""
Generate the expected log message for an action on an NetworkRule
:param NetworkRule instance: Any NetworkRule instance
:param str type_: Should be 'created', 'updated', or 'deleted'
:return: The expected log message
:rtype: str
"""
return f"INFO:security:NetworkRule {type_} for {instance.ip} (Status: {instance.computed_status})"
def _create_instances_for_clear_test(self):
"""
Builds various NetworkRule instances
Returns : the payloads used, the instances, and whether each instance is clearable
:return: 3 lists of identical sizes
:rtype: tuple(list, list, list)
"""
# Prepare data
expired_date = date.today() - timedelta(days=5)
valid_date = date.today() + timedelta(days=3)
data = [
# IP, Status, Expires on, Active, Whether it should be cleared
(NetworkRule.Status.NONE, None, False, False),
(NetworkRule.Status.BLACKLISTED, expired_date, True, True),
(NetworkRule.Status.WHITELISTED, None, False, False),
(NetworkRule.Status.BLACKLISTED, valid_date, True, False),
(NetworkRule.Status.WHITELISTED, expired_date, True, True),
]
instances = []
clear_eligibility = []
payloads = []
# Create instances and store data in lists
for row in data:
payload = {
"status": row[0],
"expires_on": row[1],
"active": row[2],
}
to_be_cleared = row[3]
instance = NetworkRuleFactory(**payload)
payloads.append(payload)
instances.append(instance)
clear_eligibility.append(to_be_cleared)
return payloads, instances, clear_eligibility
```
#### File: django_backbone/users/serializers.py
```python
from django.contrib.auth.password_validation import validate_password
from rest_framework import serializers
# Personal
from jklib.django.drf.serializers import (
ImprovedSerializer,
NoCreateMixin,
NoUpdateMixin,
required,
)
# Application
from security.models import SecurityToken
# Local
from .models import User
# --------------------------------------------------------------------------------
# > Utilities
# --------------------------------------------------------------------------------
PasswordField = lambda: serializers.CharField(write_only=True, **required())
class PasswordValidationMixin:
"""Provides validation for a `password` and `confirm_password` field"""
@staticmethod
def validate_password(value):
"""
Checks the password strength
:return: The raw password
:rtype: str
"""
validate_password(value)
return value
def validate_confirm_password(self, value):
"""
Checks it matches the provided password
:return: The provided value
:rtype: str
"""
password = self.initial_data["password"]
if value != password:
raise serializers.ValidationError("Passwords do not match")
return value
# --------------------------------------------------------------------------------
# > User Serializers
# --------------------------------------------------------------------------------
class BaseUserSerializer(NoCreateMixin, serializers.ModelSerializer):
"""Base serializer without the password data. Only for updates."""
class Meta:
model = User
fields = ["id", "email", "first_name", "last_name", "is_verified"]
read_only_fields = ["id", "is_verified"]
class UserCreateSerializer(NoUpdateMixin, PasswordValidationMixin, BaseUserSerializer):
"""Extends BaseUserSerializer to provide the password fields. Only for creations."""
password = PasswordField()
confirm_password = PasswordField()
class Meta(BaseUserSerializer.Meta):
fields = BaseUserSerializer.Meta.fields + ["password", "confirm_password"]
def create(self, validated_data):
"""
:param dict validated_data:
:return: The created user instance
:rtype: User
"""
validated_data.pop("confirm_password")
return self.Meta.model.create_user(**validated_data)
class BaseUserAdminSerializer(BaseUserSerializer):
"""Same as BaseUserSerializer with access to more fields (made for admins). Only for updates"""
class Meta(BaseUserSerializer.Meta):
fields = BaseUserSerializer.Meta.fields + [
"is_active",
"is_staff",
]
class UserAdminCreateSerializer(
NoUpdateMixin, PasswordValidationMixin, BaseUserAdminSerializer
):
"""Extends BaseUserAdminSerializer to provide the password fields. Only for creations."""
password = PasswordField()
confirm_password = PasswordField()
class Meta(UserCreateSerializer.Meta):
fields = BaseUserAdminSerializer.Meta.fields + ["password", "confirm_password"]
def create(self, validated_data):
"""
:param dict validated_data:
:return: The created user instance
:rtype: User
"""
validated_data.pop("confirm_password")
return self.Meta.model.create_user(**validated_data)
# --------------------------------------------------------------------------------
# > Password serializers
# --------------------------------------------------------------------------------
class UpdatePasswordSerializer(PasswordValidationMixin, ImprovedSerializer):
"""Similar to 'OverridePasswordSerializer' but asks for the user's current password"""
current_password = PasswordField()
password = PasswordField()
confirm_password = PasswordField()
class Meta:
fields = ["password", "confirm_password", "current_password"]
def update(self, user, validated_data):
"""
Updates the user's password and returns the instance
:param User user:
:param dict validated_data:
:return: The updated user
:rtype: User
"""
user.set_password(validated_data["password"])
user.save()
return user
def validate_current_password(self, current_password):
"""
Checks the value matches the user's current password
:param str current_password:
:return: The raw password
:rtype: str
"""
user = self.instance
if not user.check_password(current_password):
raise serializers.ValidationError("Invalid current password")
return current_password
class PasswordResetSerializer(PasswordValidationMixin, ImprovedSerializer):
"""Similar to 'OverridePasswordSerializer' but it uses a token to get the user instance"""
token = serializers.CharField(write_only=True, **required())
password = PasswordField()
confirm_password = PasswordField()
class Meta:
fields = ["password", "confirm_password", "token"]
def create(self, validated_data):
"""
Consumes the token, updates the user's password, and returns the updated user
:param dict validated_data:
:return: The found and updated user instance
:rtype: User
"""
token_instance = validated_data["token"]
user = token_instance.user
user.set_password(validated_data["password"])
user.save()
token_instance.consume_token()
return user
@staticmethod
def validate_token(value):
"""
Checks the token value matches an active RESET Token, and returns its instance
:param str value:
:return: The fetched token instance
:rtype: Token
"""
token_type, _ = User.RESET_TOKEN
token_instance = SecurityToken.fetch_token_instance(value, token_type)
if token_instance is None:
raise serializers.ValidationError("Invalid or expired token")
return token_instance
class RequestPasswordResetSerializer(ImprovedSerializer):
"""Serializer that asks for an email address"""
email = serializers.EmailField(**required())
class Meta:
fields = ["email"]
# --------------------------------------------------------------------------------
# > Others
# --------------------------------------------------------------------------------
class UserVerificationSerializer(ImprovedSerializer):
"""Serializer that checks for a VERIFY token"""
token = serializers.CharField(write_only=True, **required())
class Meta:
fields = ["token"]
def create(self, validated_data):
"""
Consumes to token, flags the corresponding user as verified and returns its instance
:param dict validated_data:
:return: The user instance and whether it was updated
:rtype: User, bool
"""
token_instance = validated_data["token"]
user = token_instance.user
has_changed = False
if not user.is_verified:
user.is_verified = True
user.save()
has_changed = True
token_instance.consume_token()
return user, has_changed
@staticmethod
def validate_token(value):
"""
Checks the token value matches an active VERIFY Token, and returns its instance
:param str value:
:return: The fetched token instance
:rtype: Token
"""
token_type, _ = User.VERIFY_TOKEN
token_instance = SecurityToken.fetch_token_instance(value, token_type)
if token_instance is None:
raise serializers.ValidationError("Invalid or expired token")
return token_instance
```
#### File: django_backbone/users/viewsets.py
```python
from rest_framework import mixins
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.status import (
HTTP_201_CREATED,
HTTP_202_ACCEPTED,
HTTP_204_NO_CONTENT,
HTTP_422_UNPROCESSABLE_ENTITY,
)
# Personal
from jklib.django.db.queries import get_object_or_none
from jklib.django.drf.permissions import (
AllowAny,
IsAdminUser,
IsNotAuthenticated,
IsObjectOwner,
)
from jklib.django.drf.serializers import IdListSerializer
from jklib.django.drf.viewsets import ImprovedModelViewSet, ImprovedViewSet
# Application
from core.permissions import IsNotVerified
from security.models import SecurityToken
# Local
from .models import User
from .serializers import (
BaseUserAdminSerializer,
BaseUserSerializer,
PasswordResetSerializer,
RequestPasswordResetSerializer,
UpdatePasswordSerializer,
UserAdminCreateSerializer,
UserCreateSerializer,
UserVerificationSerializer,
)
# --------------------------------------------------------------------------------
# > ViewSets
# --------------------------------------------------------------------------------
class UserAdminViewSet(ImprovedModelViewSet):
"""User API for admins"""
queryset = User.objects.all()
viewset_permissions = (IsAdminUser,)
permission_classes = {"default": None}
serializer_classes = {
"default": BaseUserAdminSerializer,
"bulk_destroy": IdListSerializer,
"create": UserAdminCreateSerializer,
"request_verification": None,
}
@action(detail=True, methods=["post"])
def request_verification(self, request, pk=None):
"""Sends an email to the user for him to verify his account"""
user = self.get_object()
if user.is_verified:
return Response(None, status=HTTP_422_UNPROCESSABLE_ENTITY)
token_type, token_duration = User.VERIFY_TOKEN
token = SecurityToken.create_new_token(user, token_type, token_duration)
user.send_verification_email(token, async_=False)
return Response(None, HTTP_204_NO_CONTENT)
class UserViewSet(
mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
ImprovedViewSet,
):
"""User API for users"""
queryset = User.objects.all()
viewset_permissions = None
permission_classes = {
"default": (IsObjectOwner,),
"create": (IsNotAuthenticated,),
"request_password_reset": (IsNotAuthenticated,),
"perform_password_reset": (IsNotAuthenticated,),
"request_verification": (IsObjectOwner, IsNotVerified),
"perform_verification": (AllowAny,),
}
serializer_classes = {
"default": BaseUserSerializer,
"create": UserCreateSerializer,
"perform_password_reset": PasswordResetSerializer,
"request_password_reset": RequestPasswordResetSerializer,
"request_verification": None,
"update_password": UpdatePasswordSerializer,
"perform_verification": UserVerificationSerializer,
}
def create(self, request, *args, **kwargs):
"""Overridden to send the user an email"""
serializer = self.get_valid_serializer(data=request.data)
user = serializer.save()
if not user.is_verified:
token_type, token_duration = User.VERIFY_TOKEN
token = SecurityToken.create_new_token(user, token_type, token_duration)
user.send_verification_email(token, async_=True)
else:
user.send_welcome_email(async_=True)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=HTTP_201_CREATED, headers=headers)
@action(detail=False, methods=["post"])
def perform_password_reset(self, request):
"""Resets the user password if the token is valid"""
serializer = self.get_valid_serializer(data=request.data)
user = serializer.save()
user.send_password_updated_email(async_=True)
return Response(None, HTTP_204_NO_CONTENT)
@action(detail=False, methods=["post"])
def perform_verification(self, request):
"""Flags the user linked to the token as verified"""
serializer = self.get_valid_serializer(data=request.data)
user, has_changed = serializer.save()
if has_changed:
user.send_welcome_email(async_=True)
return Response(None, HTTP_204_NO_CONTENT)
@action(detail=False, methods=["post"])
def request_password_reset(self, request):
"""Creates a token and ends the reset email to the user matching the provided email"""
serializer = self.get_valid_serializer(data=request.data)
email = serializer.validated_data["email"]
user = get_object_or_none(User, email=email)
if user is not None:
token_type, token_duration = User.RESET_TOKEN
token = SecurityToken.create_new_token(user, token_type, token_duration)
user.send_reset_password_email(token, async_=True)
return Response(None, HTTP_202_ACCEPTED)
@action(detail=True, methods=["post"])
def request_verification(self, request, pk=None):
"""Creates a token and sends the verification email to our user"""
user = self.get_object()
if user.is_verified:
return Response(None, status=HTTP_422_UNPROCESSABLE_ENTITY)
token_type, token_duration = User.VERIFY_TOKEN
token = SecurityToken.create_new_token(user, token_type, token_duration)
user.send_verification_email(token, async_=False)
return Response(None, HTTP_204_NO_CONTENT)
@action(detail=True, methods=["post"])
def update_password(self, request, pk=None):
"""Updates our user's current password"""
user = self.get_object()
serializer = self.get_valid_serializer(user, data=request.data)
user = serializer.save()
user.send_password_updated_email(async_=True)
return Response(None, HTTP_204_NO_CONTENT)
``` |
{
"source": "Jordan-Kowal/django_database_translation",
"score": 2
} |
#### File: django_database_translation/django_database_translation/forms.py
```python
from django import forms
# Third-party
# Local
from .models import Language
# --------------------------------------------------------------------------------
# > Classes
# --------------------------------------------------------------------------------
class DynamicTranslationForm(forms.ModelForm):
"""
Form to use in a ModelAdmin for any model that has fields to translate.
It will allow you to display and edit the Translation instances linked to the object.
Since fields are dynamically generated, you must override the get_fieldsets method in the admin (or else they won't show)
The "TranslatedAdmin" ModelAdmin natively use this form.
"""
# ----------------------------------------
# Core Methods
# ----------------------------------------
def __init__(self, *args, **kwargs):
"""Overridden method to dynamically add a new field for each Translation linked with our object"""
super(DynamicTranslationForm, self).__init__(*args, **kwargs)
if self.instance.pk:
self.set_translation_info()
for translation in self.translations:
self.fields[translation["fieldname"]] = translation["field"]
self.initial[translation["fieldname"]] = translation["instance"].text
def save(self, commit=True):
"""Overridden method to save the updated Translation texts"""
if self.instance.pk:
for translation in self.translations:
obj = translation["instance"]
fieldname = translation["fieldname"]
value = self.cleaned_data[fieldname]
obj.text = value
obj.save()
return super(DynamicTranslationForm, self).save(commit=commit)
# ----------------------------------------
# Custom Methods
# ----------------------------------------
def set_translation_info(self):
"""
Finds all the Translation instances linked to our object, and stores their info in an attribute
The attribute is a list of dict, each dict containing the information of one translation
"""
obj = self.instance
information = []
translations = obj.get_translations()
for translation in translations:
fieldname = create_translation_fieldname(translation)
information.append({
"instance": translation,
"fieldname": fieldname,
"field": forms.CharField(required=False, widget=forms.Textarea)
})
self.translations = information
class LanguageSelection(forms.Form):
"""
Generic form for the Language model, with only the ID field
Can be useful if you need a frontend form where the user choses his language
"""
# ----------------------------------------
# Choices
# ----------------------------------------
def available_languages():
"""Returns all the available language in the database"""
languages = Language.objects.all()
choices = [(language.id, language.name) for language in languages]
return choices
# ----------------------------------------
# Fields
# ----------------------------------------
language_id = forms.ChoiceField(
label="",
required=True,
choices=available_languages,
widget=forms.RadioSelect,
)
# ----------------------------------------
# Custom Validation Methods
# ----------------------------------------
def clean_language_id(self):
"""
Custom validator for the "language_id" field
Checks if the language exists, or raises an error
"""
language_id = self.cleaned_data.get("language_id")
try:
Language.objects.get(id=language_id)
return language_id
except Language.DoesNotExist:
raise forms.ValidationError("ID Language incorrecte")
# --------------------------------------------------------------------------------
# > Functions
# --------------------------------------------------------------------------------
def create_translation_fieldname(translation):
"""
Description:
Generates a unique fieldname based on a given Translation instance
Args:
translation (Translation): Translation instance from our Translation model
Returns:
str: The generated field name
"""
field = translation.item.field.name
language = translation.language.name
fieldname = "{} in {} (id={})".format(field, language, translation.id)
return fieldname
```
#### File: django_database_translation/django_database_translation/utils.py
```python
from django.db import models
from django.db.models.fields.files import ImageFieldFile, FieldFile
from django.utils.translation import activate, LANGUAGE_SESSION_KEY
# Third-party
# Local
from .models import Item, Language, Translation
# --------------------------------------------------------------------------------
# > Functions
# --------------------------------------------------------------------------------
def all_instances_as_translated_dict(instances, depth=True, language=None, request=None):
"""
Description:
Applies 'instance_as_translated_dict' to the iterable of instances
Returns a list of dicts which contains the fields of all your instances
Check the 'instance_as_translated_dict' for more info
Args:
instances (iterable): An iterable of your model instances
depth (bool, optional): Determines if FK will also be transformed into dicts. Defaults to True.
language (Language, optional): A Language instance from this app. Defaults to None.
request (HttpRequest, option): HttpRequest from Django. Defaults to None.
Returns:
list: A list of dicts, where each dict contains the fields/values of the initial instances
"""
# Checking arguments
if language is None and request is None:
raise TypeError("You must provide either 'language' or 'request'")
# Get the language from the session
if language is None:
language = get_current_language(request)
# Loop over instances
results = []
for instance in instances:
result = instance_as_translated_dict(instance, depth=depth, language=language)
results.append(result)
return results
def get_current_language(request, set_default=True, default_id=1):
"""
Description:
Returns the current active language. Will set a default language if none is found.
Args:
request (HttpRequest): HttpRequest from Django
set_default (Boolean): Indicates if a default language must be activated (if none currently is). Default to True.
default_id (Integer): The PK for the default Language instance. Default to 1
Returns:
Language: The currently used language from our app's Language model
"""
# Base variables
language = None
language_name = request.session.get(LANGUAGE_SESSION_KEY, False)
# Get the language
if language_name:
try:
language = Language.objects.get(django_language_name=language_name)
except Language.DoesNotExist:
pass
# Set a default language if necessary
if language is None and set_default:
language = set_default_language(request, default_id)
# Always return the active language
return language
def get_translation(language, item_id):
"""
Description:
Returns a translated text using an Item id and a Language instance
Args:
language (Language): Language instance from this app
item_id (int): Key contained in the 'translated field'
Returns:
str: The translated text
"""
translation = ""
try:
entry = Translation.objects.get(language=language, item_id=item_id)
translation = entry.text
except Translation.DoesNotExist:
pass
return translation
def instance_as_translated_dict(instance, depth=True, language=None, request=None):
"""
Description:
Returns a model instance into a dict containing all of its fields
Language can be given as an argument, or guess through the user of "request"
With "depth" set to True, ForeignKey will also be transformed into sub-dict
Files and images are replaced by a subdict with 'path', 'url', and 'name' keys
Meaning you will be able to manipulate the dict in an HTML template much like an instance
Args:
instance (Model): An instance from any of your models
depth (bool, optional): Determines if FK will also be transformed into dicts. Defaults to True.
language (Language, optional): A Language instance from this app. Defaults to None.
request (HttpRequest, option): HttpRequest from Django. Defaults to None.
Returns:
dict: A dict with all of the instance's fields and values
"""
# Checking arguments
if language is None and request is None:
raise TypeError("You must provide either 'language' or 'request'")
# Get the language from the session
if language is None:
language = get_current_language(request)
# Loop over fields
translated_dict = {}
fields = instance._meta.get_fields()
for field in fields:
value = getattr(instance, field.name, None)
if value is not None:
value_type = type(value)
# Case 1: Get the translation
if value_type == Item:
new_value = Translation.objects.get(item=value, language=language).text
# Case 2: Go to the linked model and repeat the process (unless depth=False)
elif issubclass(value_type, models.Model):
if depth:
new_value = instance_as_translated_dict(value, depth=True, language=language)
else:
new_value = value
# Case 3:
elif value_type in {ImageFieldFile, FieldFile}:
if value:
new_value = {
"name": getattr(value, "name", ""),
"url": getattr(value, "url", ""),
"path": getattr(value, "path", ""),
}
else:
new_value = ""
# Case 4: Keep the value as it is
else:
new_value = value
translated_dict[field.name] = new_value
return translated_dict
def set_default_language(request, pk=1):
"""Sets the default language if none is chosen"""
language = Language.objects.get(id=pk)
update_user_language(request, language=language)
return language
def update_user_language(request, language=None, language_id=None):
"""
Description:
Updates the user current language following Django guildelines
This will allow for both "Django" frontend translations and "our app" database translation
The new language must be passed either through a Language instance or an ID
Args:
request (HttpRequest): Request object from Django, used to get to the session
language (Language, optional): A Language instance from this app. Defaults to None.
language_id (id, optional): ID of the language in our database. Defaults to None.
"""
# Checking arguments
if language is None and language_id is None:
raise TypeError("You must provide either 'language' or 'language_id'")
# Get the language from the session
if language is None:
language = Language.objects.get(id=language_id)
# Update the user's language
activate(language.django_language_name)
request.session[LANGUAGE_SESSION_KEY] = language.django_language_name
``` |
{
"source": "Jordan-Kowal/jklib",
"score": 3
} |
#### File: django/db/managers.py
```python
from django.db import models
# --------------------------------------------------------------------------------
# > Managers
# --------------------------------------------------------------------------------
class NoBulkCreateManager(models.Manager):
"""Prevents the use of the bulk_create method"""
def bulk_create(self, objs, **kwargs):
"""
Overrides the 'bulk_create' method to make it non-usable
:raises NotImplementedError: Must be overridden to be usable
"""
raise NotImplementedError("Cannot use bulk_create on this model")
```
#### File: django/db/tests.py
```python
from django.core.exceptions import ValidationError
from django.db import IntegrityError, transaction
# Local
from ..utils.tests import ImprovedTestCase
# --------------------------------------------------------------------------------
# > Classes
# --------------------------------------------------------------------------------
class ModelTestCase(ImprovedTestCase):
"""
TestCase class specifically for testing our models
Inherits from ImprovedTestCase
Provides the following:
Assertions for field constraints (unique, required, choices, etc.)
"""
# ----------------------------------------
# Properties
# ----------------------------------------
model_class = None
@property
def common_errors(self):
"""
:return: A list of common error classes
:rtype: ValueError, ValidationError, IntegrityError
"""
return ValueError, ValidationError, IntegrityError
# ----------------------------------------
# Assertions
# ----------------------------------------
def assert_fields_are_required(self, valid_payload, fields=None):
"""
Tests that the required fields are truly required
For each field, we will:
Use a valid payload
Remove only the specific field
Try to create the object
:param dict valid_payload: A valid payload for the service
:param [str] fields: List of fields to check. Defaults to self.required_fields
"""
if fields is None:
fields = self.required_fields
for field in fields:
with transaction.atomic():
payload = valid_payload.copy()
payload[field] = None
with self.assertRaises(self.common_errors):
self.model_class(**payload).save()
def assert_instance_count_equals(self, n):
"""Tests the number of instances in the database for our model"""
assert self.model_class.objects.count() == n
```
#### File: django/drf/actions.py
```python
from enum import Enum
# Django
from rest_framework.exceptions import MethodNotAllowed
from rest_framework.response import Response
from rest_framework.status import (
HTTP_200_OK,
HTTP_201_CREATED,
HTTP_204_NO_CONTENT,
HTTP_404_NOT_FOUND,
)
# --------------------------------------------------------------------------------
# > Enums
# --------------------------------------------------------------------------------
class SerializerMode(Enum):
"""
List of available serializer modes for an action
Refer to DynamicViewSet.get_serializer_class() to see how they resolve serializers
"""
NONE = 1
UNIQUE = 2
METHOD_BASED = 3
ROLE_BASED = 4
ROLE_AND_METHOD_BASED = 5
# --------------------------------------------------------------------------------
# > Base Handlers
# --------------------------------------------------------------------------------
class ActionHandler:
"""
---------- DESCRIPTION ----------
Custom class for processing action calls, and also provides more flexibility for action customization
This class must be used within the DynamicViewSet class we've created
Permissions and Serializers must be defined at the class level, respectively in 'permissions' and 'serializers'
Both can either directly contain the value, or be a dictionary with 1 value per method (get, post, ...)
To process the action, call the .run() method. It will either:
Call the [.get(), .post(), ...] function based on the action method
Fallback on the .main() function if none of the above is found
When initialized, the instance will store all the data from the request call, making it accessible at all times
Also provides utility with .super_view() and .run_action_from_super_view()
---------- HOW TO SETUP ----------
Make sure to define the following elements:
serializer_mode (NONE, UNIQUE, METHOD_BASED, ROLE_BASED, ROLE_AND_METHOD_BASED)
serializer
if NONE --> returns None
if UNIQUE --> directly returns a serializer
if METHOD_BASED --> dict where each method has a serializer
if ROLE_BASED --> dict with a different serializer for "user" and for "admin"
if ROLE_AND_METHOD_BASED --> dict with user/admin, then methods for serializers
[.get(), .post(), ...] if your action has several valid protocol/methods
.main() if your action has a single method
---------- HOW TO USE: with DynamicViewSet ----------
Simply match your actions with your ActionHandler classes, as described in the DynamicViewSet documentation
The viewset will then take care of the rest
"""
serializer_mode = SerializerMode.UNIQUE
serializer = None
def __init__(self, viewset, request, *args, **kwargs):
"""
Initialize the instance and sets up its attributes for later use
:param ViewSet viewset: Viewset from DRF where our action will take place
:param HttpRequest request: The request object from django that called our action
:param args: Additional args automatically passed to our action
:param kwargs: Additional kwargs automatically passed to our action
"""
# Storing args
self.viewset = viewset
self.request = request
self.args = args
self.kwargs = kwargs
# Useful shortcuts
self.user = request.user
self.data = request.data
self.method = request.method.lower()
def run(self):
"""
Process the service request by calling the appropriate method
It will look for a function matching the [method] name and will default to the "main" function
:return: Response instance from DRF, containing our results
:rtype: Response
"""
action_to_run = getattr(self, self.method, self.main)
return action_to_run()
def run_action_from_super_view(self, action_name):
"""
Calls an action from the parent viewset with the initial arguments
:param str action_name: Name of the method to call from the parent viewset
:return: The results from the parent function we called
"""
parent_viewset_action = getattr(self.super_view(), action_name)
return parent_viewset_action(self.request, *self.args, **self.kwargs)
def super_view(self):
"""
Equivalent to calling super() on the viewset
:return: The parent class of the viewset
:rtype: ViewSet
"""
return super(type(self.viewset), self.viewset)
@staticmethod
def main():
"""Default function for the service processing"""
return MethodNotAllowed()
def get_serializer(self, *args, **kwargs):
"""
Shortcut to get the serializer from the viewset
:return: The serializer attached to our current action
:rtype: Serializer
"""
return self.viewset.get_serializer(*args, **kwargs)
def get_valid_serializer(self, *args, **kwargs):
"""
Shortcut to get and validate the serializer from the viewset
:return: The validated serializer attached to our current action
:rtype: Serializer
"""
return self.viewset.get_valid_serializer(*args, **kwargs)
class ModelActionHandler(ActionHandler):
"""
Extension of ActionHandler that provides utility for Model-related viewset actions
Includes all the CRUD functions equivalent to the DRF viewset model mixins
"""
def get_object(self):
"""
Shortcut to fetch an model instance
:return: A Django instance model
:rtype: Model
"""
return self.viewset.get_object()
def model_create(self):
"""
Creates and saves a model instance
:return: HTTP 200 with the created instance data
:rtype: Response
"""
serializer = self.get_valid_serializer(data=self.data)
serializer.save()
return Response(serializer.data, status=HTTP_201_CREATED)
def model_retrieve(self):
"""
Fetches a single model instance based on the provided serializer
:return: HTTP 200 with the instance data
:rtype: Response
"""
instance = self.get_object()
serializer = self.get_serializer(instance)
return Response(serializer.data, status=HTTP_200_OK)
def model_list(self):
"""
Fetches, filters, paginates, and returns a list of instances for a given model
:return: HTTP 200 with the list of instances
:rtype: Response
"""
viewset = self.viewset
queryset = viewset.filter_queryset(viewset.get_queryset())
page = viewset.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return viewset.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data, status=HTTP_200_OK)
def model_update(self):
"""
Updates and saves a model instance using the provided serializer
:return: HTTP 200 with the updated instance data
:rtype: Response
"""
partial = self.kwargs.pop("partial", False)
instance = self.get_object()
serializer = self.get_serializer(instance, data=self.data, partial=partial)
serializer.is_valid(raise_exception=True)
serializer.save()
# Invalidate the pre-fetched cache if it had been applied
if getattr(instance, "_prefetched_objects_cache", None):
instance._prefetched_objects_cache = {}
return Response(serializer.data, status=HTTP_200_OK)
def model_partial_update(self):
"""
Sets the 'partial' kwarg to partial before calling the .model_update() method
:return: HTTP 200 with the partially updated instance data
:rtype: Response
"""
self.kwargs["partial"] = True
return self.model_update()
def model_destroy(self):
"""
Deletes a model instance from the database
:return: HTTP 204 response without data
:rtype: Response
"""
instance = self.get_object()
instance.delete()
return Response(status=HTTP_204_NO_CONTENT)
def model_bulk_destroy(self):
"""
Filters the viewset queryset with the provided IDs and removes the instances found
Expects the serializer to have an "ids" list field
:return: HTTP 204 response without data
:rtype: Response
"""
serializer = self.get_valid_serializer(data=self.data)
ids_to_delete = serializer.validated_data.pop("ids")
instances = self.viewset.get_queryset().filter(id__in=ids_to_delete)
if len(instances) == 0:
return Response(None, status=HTTP_404_NOT_FOUND)
else:
instances.delete()
return Response(None, status=HTTP_204_NO_CONTENT)
```
#### File: django/utils/emails.py
```python
from threading import Thread
# Django
from django.contrib.staticfiles import finders
from django.core.mail import EmailMessage
# --------------------------------------------------------------------------------
# > Functions
# --------------------------------------------------------------------------------
def extract_email_addresses(emails, sep=","):
"""
Transforms a string of multiple email adresses (separated by commas) into a list
:param str emails: Single string of emails separated by a specific character
:param str sep: The separator used in the emails parameter. Defaults to ','
:return: A list of email addresses
:rtype: list(str)
"""
if type(emails) == str:
emails = emails.split(",")
emails = list(map(lambda x: x.strip(), emails))
return emails
def get_css_content(relative_path):
"""
Gets and returns the content of a css file
Please make your that CSS file do not use " or '
:param relative_path: Relative path to the CSS file (the same as the one you'd use in {% static %})
:return: The content of the CSS file
:rtype: str
"""
css_file = finders.find(relative_path)
with open(css_file, "r", encoding="utf-8") as f:
content = f.read()
return content
def send_html_email(subject, body, sep=",", to=None, cc=None, sender=None):
"""
Sends an HTML email with the given arguments
:param str subject: Subject of the email
:param str body: Body/content of the email
:param str sep: The separator used in the emails parameter. Defaults to ','
:param to: List or character-separated string of emails. Defaults to None.
:type to: list(str) or str
:param cc: List or character-separated string of emails. Defaults to None.
:type cc: list(str) or str
:param str sender: The sender. Defaults to django configuration.
"""
to = extract_email_addresses(to)
cc = extract_email_addresses(cc)
email = EmailMessage(subject=subject, body=body, to=to, cc=cc, from_email=sender,)
email.content_subtype = "html"
email.send()
def send_html_email_async(subject, body, sep=",", to=None, cc=None, sender=None):
"""
Similar to 'send_html_email', but uses a Thread instance to send it asynchronously
:param str subject: Subject of the email
:param str body: Body/content of the email
:param str sep: The separator used in the emails parameter. Defaults to ','
:param to: List or character-separated string of emails. Defaults to None.
:type to: list(str) or str
:param cc: List or character-separated string of emails. Defaults to None.
:type cc: list(str) or str
:param str sender: The sender. Defaults to django configuration.
"""
thread = Thread(target=send_html_email, args=(subject, body, sep, to, cc, sender))
thread.start()
```
#### File: django/utils/network.py
```python
from urllib.parse import urlencode
# Local
from .settings import get_config
# --------------------------------------------------------------------------------
# > Functions
# --------------------------------------------------------------------------------
def build_url(parts, params=None, end_slash=False):
"""
Builds a complete URL by joining its parts and adding params at the end
:param list parts: Ordered list of paths to join
:param dict params: The GET params for the url
:param bool end_slash: Whether we should add a / at the end
:return: The computed URL
:rtype: str
"""
# Remove extra slashes
cleaned_parts = []
for part in parts:
if part == "":
continue
if part[0] == "/":
part = part[1:]
if part[-1] == "/":
part = part[:-1]
cleaned_parts.append(part)
# Build URL
url = "/".join(cleaned_parts)
if params is not None:
url += urlencode(params)
if end_slash:
url += "/"
return url.replace("//", "/")
def get_client_ip(request):
"""
Returns the IP address of the current user
Based on the environment, the address can be different thing: FORWARDED_FOR, REAL_IP, REMOTE_ADDR
:param request: HttpRequest from django
:return: The user's IP address
:rtype: str
"""
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
# FORWARDED_FOR
ip = x_forwarded_for.split(",")[-1].strip()
elif request.META.get("HTTP_X_REAL_IP"):
# REAL_IP
ip = request.META.get("HTTP_X_REAL_IP")
else:
# REMOTE_ADDR
ip = request.META.get("REMOTE_ADDR")
return ip
def get_server_domain():
"""
Fetches the django server address from the settings
:return: The server domain/url
:rtype: str
"""
hosts = get_config("ALLOWED_HOSTS")
domain = hosts[0] if hosts else "http://127.0.0.1:8000/"
return domain
```
#### File: django/utils/templates.py
```python
from django.template import loader
# --------------------------------------------------------------------------------
# > Functions
# --------------------------------------------------------------------------------
def render_template(template_path, context):
"""
Renders an HTML page using a given template and a given context
:param str template_path: Path to the template (app/template_name)
:param dict context: The context values
:return: The dynamically-generated HTML
:rtype: str
"""
template = loader.get_template(template_path)
rendered = template.render(context)
return rendered
```
#### File: jklib/std/classes.py
```python
def is_subclass(obj, reference_class):
"""
Improvement of 'issubclass' that returns False if the first arg is not an actual class
:param obj: The object you want information on
:param reference_class: The reference class to compare to
:return: Whether 'obj' inherits from 'reference_class'
:rtype: bool
"""
try:
return issubclass(obj, reference_class)
except TypeError:
return False
```
#### File: jklib/std/strings.py
```python
def clean_text(text, char_list, replacement=" "):
"""
Replaces specific characters with a 'replacement' character within a text
:param str text: The text we want to change
:param char_list: List of strings, which are the subtexts we will replace
:type char_list: list(str)
:param str replacement: The string used as replacement. Defaults to " ".
:return: The updated string
:rtype: str
"""
if char_list:
for char in char_list:
text = text.replace(char, replacement)
text = text.strip()
return text
def replace_every_nth(text, old, new, nth, start=1):
"""
Modifies a text by replacing "old" string with "new" string every "nth" time
:param str text: The text we want to change
:param str old: The string that will be replaced
:param str new: The string used as replacement
:param int nth: The frequency of replacement (every nth occurrences)
:param int start: Which occurrence to we start with. Defaults to 1.
:return: The updated text
:rtype: str
"""
i = start
index = text.find(old)
while index != -1:
if i == nth:
text = text[:index] + new + text[index + len(old) :]
i = 0
index = text.find(old, index + len(old) + 1)
i += 1
return text
``` |
{
"source": "Jordan-Lane/Neural-Art",
"score": 3
} |
#### File: Neural-Art/src/activations.py
```python
import numpy
# Note: All activation functions here are dynamically added to numpy_image_nn during construction
# To add a new activation function write a function here that takes a numpy array
# and returns the new activated array.
def tanh(matrix):
return numpy.tanh(matrix)
def sigmoid(matrix):
return 1.0 / (1 + numpy.exp(-matrix))
def relu(matrix):
return numpy.maximum(matrix, 0, matrix)
def softmax(matrix):
expo = numpy.exp(matrix)
expo_sum = numpy.sum(numpy.exp(matrix))
return expo/expo_sum
def sech(matrix):
return 1.0 / numpy.cosh(matrix)
``` |
{
"source": "JordanLeich/Beginner-and-Practice-Projects",
"score": 4
} |
#### File: JordanLeich/Beginner-and-Practice-Projects/Simple Login System.py
```python
import time
# Variables
get_user = str()
new_user = str()
existing_user = str()
new_account = str()
new_email = str()
new_password = str()
name = str()
old_email = str()
old_password = str()
try_again = str()
x = float()
y = float()
result = float()
# Start
def beginning():
print()
print("Please keep in mind that this program is case sensitive with your name, email, and password! ")
print()
time.sleep(1)
existing_user = input("Are you an existing user? ")
print()
time.sleep(1)
if existing_user == "yes" or existing_user == "y":
old()
if existing_user == "no" or existing_user == "n":
new()
# Old Account
def old():
global new_account
name = input("Please enter the name on your account: ")
print()
time.sleep(1)
if name != "Jordan":
print("The info you provided doesn't currently exist on our records. ")
print()
try_again = input("Would you like to try again? ")
print()
if try_again == "yes" or try_again == "y" or try_again == "Yes":
old()
if try_again == "no" or try_again == "n" or try_again == "No":
new_account = input("Would you like to create a new account? ")
print()
if new_account == "yes" or new_account == "y" or new_account == "Yes":
make_account()
if new_account != "yes" or new_account == "y" or new_account == "Yes":
end()
old_email = input("Please enter your existing email: ")
print()
time.sleep(1)
if old_email != "<EMAIL>":
print("The info you provided doesn't currently exist on our records. ")
print()
try_again = input("Would you like to try again? ")
print()
if try_again == "yes" or try_again == "y" or try_again == "Yes":
old()
if try_again == "no" or try_again == "n" or try_again == "No":
new_account = input("Would you like to create a new account? ")
print()
if new_account == "yes" or new_account == "y" or new_account == "Yes":
make_account()
if new_account != "yes" or new_account == "y" or new_account == "Yes":
end()
old_password = input("Please enter your existing password: ")
print()
time.sleep(1)
if old_password == "<PASSWORD>":
logged_in()
else:
print("The info you provided doesn't currently exist on our records. ")
print()
try_again = input("Would you like to try again? ")
print()
if try_again == "yes" or try_again == "y" or try_again == "Yes":
old()
if try_again == "no" or try_again == "n" or try_again == "No":
new_account = input("Would you like to create a new account? ")
print()
if new_account == "yes" or new_account == "y" or new_account == "Yes":
make_account()
if new_account != "yes" or new_account == "y" or new_account == "Yes":
end()
# Want to make a new account?
def new():
new_account = input("Since you are currently not an existing user, would you like to become a new user? ")
print()
if new_account == "yes" or new_account == "y":
make_account()
if new_account == "no" or new_account == "n":
end()
# Making the new account
def make_account():
name = input("Enter your first name: ")
print()
time.sleep(1)
new_email = input("Please enter a valid email: ")
print()
time.sleep(1)
new_password = input("Please enter a password: ")
print()
time.sleep(1)
confirm_password = input("Please confirm your new password: ")
print()
time.sleep(1)
if confirm_password != new_password:
print("Your new password could not be confirmed! ")
print()
time.sleep(1)
print("You will now be prompted to another new account. ")
print()
time.sleep(1)
make_account()
logged_in()
# Logging in
def logged_in():
print("Logging in...")
print()
time.sleep(2)
print("Congrats, you are currently logged in. ")
print()
time.sleep(3)
actions()
# Actions to take while the user is logged in
def actions():
first_action = input("Would you like to use our simple math calculator (1) or Quit this program (2): ")
print()
time.sleep(2)
if first_action == "1":
calc()
elif first_action == '2':
end()
else:
print("Invalid input!")
print()
time.sleep(2)
print("restarting input...")
print()
time.sleep(1)
actions()
# Simple calculator that a logged in user can use
def calc():
print("Select an operation.")
print("1. Add")
print("2. Subtract")
print("3. Multiply")
print("4. Divide")
print()
choice = input("Enter choice(1/2/3/4): ")
print()
if choice == '1' or choice == 'add':
add(x, y)
elif choice == '2' or choice == 'subtract':
subtract(x, y)
elif choice == '3' or choice == 'multiply':
multiply(x, y)
elif choice == '4' or choice == 'divide':
divide(x, y)
else:
print("Invalid input!")
print()
time.sleep(2)
print("restarting calculator choice...")
print()
time.sleep(2)
calc()
def add(x, y):
num1 = input('Enter first number: ')
print()
num2 = input('Enter second number: ')
print()
sum = float(num1) + float(num2)
print(sum)
print()
moremath()
def subtract(x, y):
num1 = input('Enter first number: ')
print()
num2 = input('Enter second number: ')
print()
remainder = float(num1) - float(num2)
print(remainder)
print()
moremath()
def multiply(x, y):
num1 = input('Enter first number: ')
print()
num2 = input('Enter second number: ')
print()
product = float(num1) * float(num2)
print(product)
print()
moremath()
def divide(x, y):
num1 = input('Enter first number: ')
print()
num2 = input('Enter second number: ')
print()
quotient = float(num1) / float(num2)
print(quotient)
print()
moremath()
def moremath():
more_math = input("Would you like to do a another math problem(1) or quit this program(2): ")
print()
time.sleep(2)
if more_math == '1':
calc()
elif more_math == '2':
end()
else:
print('Invalid input!')
print()
time.sleep(2)
print('restarting choice...')
print()
moremath()
# Ending
def end():
print("This is the end of the program - <NAME>")
quit()
# Main Engine
beginning()
```
#### File: JordanLeich/Beginner-and-Practice-Projects/Simple Timer & Stopwatch.py
```python
import time
import os
seconds = int()
minutes = int()
hours = int()
def begin():
userchoice1 = int(input("Would you like to make a timer(1) or a countdown stopwatch(2) or end this program(0): "))
if userchoice1 == 1:
timer()
if userchoice1 == 2:
stopwatch()
if userchoice1 == 0:
forceend()
else:
print("Invalid Input! ")
time.sleep(3)
begin()
def timer():
seconds = int()
minutes = int()
hours = int()
print("Welcome to the timer! ")
time.sleep(2)
run = input("Enter R to start the timer! ")
while run.lower() == "r":
if seconds > 59:
seconds = 0
minutes = minutes + 1
if minutes > 59:
minutes = 0
hours = hours + 1
seconds = (seconds + 1)
print(hours, ":", minutes, ":", seconds)
time.sleep(1)
def stopwatch():
global whentostop
print("Welcome to the stopwatch!")
time.sleep(2)
while True:
userinput = input("Enter time to countdown in seconds: ")
try:
whentostop = abs(int(userinput))
except KeyboardInterrupt:
break
except:
print("Invalid Input!")
time.sleep(3)
stopwatch()
while whentostop > 0:
m, s = divmod(whentostop, 60)
h, m = divmod(m, 60)
time_left = str(h).zfill(2) + ":" + str(m).zfill(2) + ":" + str(s).zfill(2)
print(time_left)
time.sleep(1)
whentostop -= 1
print("Stopwatch finished!")
time.sleep(2)
restart()
def restart():
restartchoice = str(input("Would you like to restart this program(yes or no): "))
if restartchoice == 'yes':
print("restarting program...")
time.sleep(3)
begin()
if restartchoice == 'no':
forceend()
else:
print("Invalid Input!")
time.sleep(3)
restart()
def forceend():
print("ending program...")
time.sleep(1)
quit()
begin()
``` |
{
"source": "JordanLeich/command-line-chess",
"score": 4
} |
#### File: JordanLeich/command-line-chess/chess.py
```python
from ui import user_input, print_board
EMPTY = 0 # empty square
W_P = 1 # white pawn
W_N = 2 # white knight
W_B = 3 # white bishop
W_R = 4 # white rook
W_Q = 5 # white queen
W_K = 6 # white king
B_P = 7 # black pawn
B_N = 8 # black knight
B_B = 9 # black bishop
B_R = 10 # black rook
B_Q = 11 # black queen
B_K = 12 # black king
# castling moves
W_KING_SIDE = (95, 97, "")
W_QUEEN_SIDE = (95, 93, "")
B_KING_SIDE = (25, 27, "")
B_QUEEN_SIDE = (25, 23, "")
def main():
board = create_board()
white = True # current player, if False, player will be black
ep_sq = None # possible en passant capture square
# castling rights [white king side, queen side, black king side, queen side]
castling = [True, True, True, True]
half_moves = 0 # fifty-move rule counter
# for detecting threefold repetition. Position is defined by en_passant
# capture possibility, side to move, castling rights and piece placement
positions = {} # count of each position
position = ""
while True:
print_board(board)
pseudo_moves = gen_moves(board, white, castling, ep_sq)
# select legal moves from pseudo_moves
moves = []
king = W_K if white else B_K
for move in pseudo_moves:
copy_board = board[:] # for unmaking the move later
make_move(board, move, white, False)
# find king index
for i in range(len(board)):
if board[i] == king:
king_i = i
if not is_in_check(board, white, king_i): # legal move
moves.append(move)
# for detecting threefold repetition
if is_en_passant(board, move, ep_sq):
position += "e"
board = copy_board # unmake the move
# for detecting threefold repetition
position += "w" if white else "b"
if castling[0]:
position += "K"
if castling[1]:
position += "Q"
if castling[2]:
position += "k"
if castling[3]:
position += "q"
for square in board:
if square != -1:
position += str(square)
if half_moves == 0:
positions = {}
# increment or initialize the count of position
positions[position] = positions.get(position, 0) + 1
position = ""
if not moves: # Game over
king = W_K if white else B_K
# find king index
for i in range(len(board)):
if board[i] == king:
king_i = i
if is_in_check(board, white, king_i):
print("Black" if white else "White", "wins the game!")
return
print("Draw - stalemate")
return
for value in positions.values():
if value >= 3: # threefold repetition
print("Draw - threefold repetition")
return
if half_moves >= 100: # fifty-move rule
print("Draw - fifty-move rule")
return
print("White: " if white else "Black: ", end="")
move = user_input()
while move not in moves:
print_board(board)
print("Please enter a legal move")
print("White: " if white else "Black: ", end="")
move = user_input()
from_sq = move[0]
to_sq = move[1]
# fifty-move rule counter
if board[from_sq] in [W_P, B_P] \
or (board[to_sq] != EMPTY and board[from_sq] != board[to_sq]):
# current move is pawn move or capture
half_moves = 0
else:
half_moves += 1
make_move(board, move, white, ep_sq)
# set possible en passant capture square for the next move
if board[to_sq] in [W_P, B_P] and abs(to_sq - from_sq) == 20:
ep_sq = to_sq + 10 if white else to_sq - 10
else:
ep_sq = None
# set castling rights
if board[to_sq] in [W_K, B_K]: # king has moved
if white:
castling[0] = False
castling[1] = False
else:
castling[2] = False
castling[3] = False
# if a rook is not in initial position castling right is lost
if board[98] != W_R:
castling[0] = False
if board[91] != W_R:
castling[1] = False
if board[28] != B_R:
castling[2] = False
if board[21] != B_R:
castling[3] = False
white = not white # change player
def create_board():
# out of the board square = -1
board = [
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, B_R, B_N, B_B, B_Q, B_K, B_B, B_N, B_R, -1,
-1, B_P, B_P, B_P, B_P, B_P, B_P, B_P, B_P, -1,
-1, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, -1,
-1, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, -1,
-1, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, -1,
-1, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, EMPTY, -1,
-1, W_P, W_P, W_P, W_P, W_P, W_P, W_P, W_P, -1,
-1, W_R, W_N, W_B, W_Q, W_K, W_B, W_N, W_R, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
]
return board
def gen_moves(board, white, castling, ep_sq):
moves = []
pieces = [W_P, W_N, W_B, W_R, W_Q, W_K] if white else \
[B_P, B_N, B_B, B_R, B_Q, B_K]
opposite_pieces = [B_P, B_N, B_B, B_R, B_Q, B_K] if white else \
[W_P, W_N, W_B, W_R, W_Q, W_K]
offsets_list = [
None,
None,
[12, 21, 19, 8, -12, -21, -19, -8], # knight
[11, 9, -11, -9], # bishop
[10, -1, -10, 1], # rook
[10, -1, -10, 1, 11, 9, -11, -9], # queen
[10, -1, -10, 1, 11, 9, -11, -9] # king
]
# loop over all indices in the board
for i in range(len(board)):
piece = board[i]
if piece in [-1, EMPTY] or piece in opposite_pieces:
continue
if piece in [W_P, B_P]: # found a pawn
moves.extend(gen_pawn_moves(board, white, i, ep_sq))
else: # found a piece other than pawn
offsets = offsets_list[piece] if white else offsets_list[piece - 6]
from_sq = i
for offset in offsets:
temp_sq = from_sq
while True:
to_sq = temp_sq + offset
if board[to_sq] == -1: # to_sq is off the board
break
elif board[to_sq] in pieces: # to_sq is same color piece
break
elif board[to_sq] in opposite_pieces:
moves.append((from_sq, to_sq, ""))
break
moves.append((from_sq, to_sq, ""))
# knight and king can only move one square in a direction
if piece in [W_N, W_K, B_N, B_K]:
break
temp_sq = to_sq
# for castling
if piece in [W_K, B_K]:
moves.extend(gen_castling_moves(board, white, castling))
return moves
def gen_pawn_moves(board, white, i, ep_sq):
moves = []
promotion = ""
last_rank = [21, 22, 23, 24, 25, 26, 27, 28] if white else \
[91, 92, 93, 94, 95, 96, 97, 98]
second_rank = [81, 82, 83, 84, 85, 86, 87, 88] if white else \
[31, 32, 33, 34, 35, 36, 37, 38]
opposite_pieces = [B_P, B_N, B_B, B_R, B_Q, B_K] if white else \
[W_P, W_N, W_B, W_R, W_Q, W_K]
normal_offsets = [-10, -20] if white else [10, 20]
capture_offsets = [-9, -11] if white else [9, 11]
from_sq = i
to_sq = from_sq + normal_offsets[0] # single square move
if board[to_sq] == EMPTY:
if to_sq in last_rank:
for promotion in "nbrq":
moves.append((from_sq, to_sq, promotion))
promotion = ""
else:
moves.append((from_sq, to_sq, promotion))
if from_sq in second_rank: # double square move
to_sq = from_sq + normal_offsets[1]
if board[to_sq] == EMPTY:
moves.append((from_sq, to_sq, promotion))
for offset in capture_offsets:
to_sq = from_sq + offset
if board[to_sq] in opposite_pieces: # capture
if to_sq in last_rank:
for promotion in "nbrq":
moves.append((from_sq, to_sq, promotion))
promotion = ""
else:
moves.append((from_sq, to_sq, promotion))
if ep_sq: # current move may be en passant capture
if to_sq == ep_sq:
moves.append((from_sq, to_sq, promotion))
return moves
def gen_castling_moves(board, white, castling):
moves = []
if white:
if castling[0] and (board[96] == EMPTY and board[97] == EMPTY):
if not (is_in_check(board, white, 95) or is_in_check(board, white, 96)):
moves.append(W_KING_SIDE)
if castling[1] and (board[94] == EMPTY and board[93] == EMPTY and board[92] == EMPTY):
if not (is_in_check(board, white, 95) or is_in_check(board, white, 94)):
moves.append(W_QUEEN_SIDE)
else:
if castling[2] and (board[26] == EMPTY and board[27] == EMPTY):
if not (is_in_check(board, white, 25) or is_in_check(board, white, 26)):
moves.append(B_KING_SIDE)
if castling[3] and (board[24] == EMPTY and board[23] == EMPTY and board[22] == EMPTY):
if not (is_in_check(board, white, 25) or is_in_check(board, white, 24)):
moves.append(B_QUEEN_SIDE)
return moves
def is_in_check(board, white, i):
pieces = [W_P, W_N, W_B, W_R, W_Q, W_K] if white else \
[B_P, B_N, B_B, B_R, B_Q, B_K]
opposite_pieces = [B_P, B_N, B_B, B_R, B_Q, B_K] if white else \
[W_P, W_N, W_B, W_R, W_Q, W_K]
# check if a pawn could capture the king
opposite_pawn = B_P if white else W_P
capture_offsets = [-9, -11] if white else [9, 11]
from_sq = i
for offset in capture_offsets:
to_sq = from_sq + offset
if board[to_sq] == opposite_pawn: # to_sq is opposite pawn
return True
# check if a king could capture the king
opposite_king = B_K if white else W_K
offsets = [10, -1, -10, 1, 11, 9, -11, -9]
from_sq = i
for offset in offsets:
to_sq = from_sq + offset
if board[to_sq] == opposite_king: # to_sq is opposite king
return True
# check if a knight could capture the king
opposite_knight = B_N if white else W_N
offsets = [12, 21, 19, 8, -12, -21, -19, -8]
from_sq = i
for offset in offsets:
to_sq = from_sq + offset
if board[to_sq] == opposite_knight: # to_sq is opposite knight
return True
# check if a rook or queen could capture the king
opposite_qr = [B_R, B_Q] if white else [W_R, W_Q]
offsets = [10, -1, -10, 1]
from_sq = i
for offset in offsets:
temp_sq = from_sq
while True:
to_sq = temp_sq + offset
if board[to_sq] == -1: # to_sq is off the board
break
elif board[to_sq] in pieces: # to_sq is same color piece
break
elif board[to_sq] in opposite_pieces:
if board[to_sq] in opposite_qr: # to_sq: opposite queen or rook
return True
break
temp_sq = to_sq
# check if a bishop or queen could capture the king
opposite_qb = [B_B, B_Q] if white else [W_B, W_Q]
offsets = [11, 9, -11, -9]
from_sq = i
for offset in offsets:
temp_sq = from_sq
while True:
to_sq = temp_sq + offset
if board[to_sq] == -1: # to_sq is off the board
break
elif board[to_sq] in pieces: # to_sq is same color piece
break
elif board[to_sq] in opposite_pieces:
if board[to_sq] in opposite_qb: # to_sq:opposite queen or bishop
return True
break
temp_sq = to_sq
def is_en_passant(board, move, ep_sq):
to_sq = move[1]
return board[to_sq] in [W_P, B_P] and to_sq == ep_sq
def make_move(board, move, white, ep_sq):
from_sq = move[0]
to_sq = move[1]
promotion = move[2]
board[to_sq] = board[from_sq]
board[from_sq] = EMPTY
# pawn promotion
if promotion:
if promotion == "n":
board[to_sq] = W_N if white else B_N
elif promotion == "b":
board[to_sq] = W_B if white else B_B
elif promotion == "r":
board[to_sq] = W_R if white else B_R
elif promotion == "q":
board[to_sq] = W_Q if white else B_Q
# en passant capture
elif is_en_passant(board, move, ep_sq):
# remove the en passant captured pawn
remove_sq = to_sq + 10 if white else to_sq - 10
board[remove_sq] = EMPTY
# castling
elif board[to_sq] in [W_K, B_K] and abs(to_sq - from_sq) == 2:
# move the rook to the castled position
if to_sq == 97: # white king side
board[96] = W_R
board[98] = EMPTY
elif to_sq == 93: # white queen side
board[94] = W_R
board[91] = EMPTY
elif to_sq == 27: # black king side
board[26] = B_R
board[28] = EMPTY
elif to_sq == 23: # black queen side
board[24] = B_R
board[21] = EMPTY
if __name__ == "__main__":
main()
```
#### File: JordanLeich/command-line-chess/ui.py
```python
from os import name, system
# for printing the board
SYMBOLS = ".PNBRQKpnbrqk" if name == "nt" else \
" \u2659\u2658\u2657\u2656\u2655\u2654\u265F\u265E\u265D\u265C\u265B\u265A"
# board index position => chess notation
NOTATION = [
"-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1",
"-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1",
"-1", "a8", "b8", "c8", "d8", "e8", "f8", "g8", "h8", "-1",
"-1", "a7", "b7", "c7", "d7", "e7", "f7", "g7", "h7", "-1",
"-1", "a6", "b6", "c6", "d6", "e6", "f6", "g6", "h6", "-1",
"-1", "a5", "b5", "c5", "d5", "e5", "f5", "g5", "h5", "-1",
"-1", "a4", "b4", "c4", "d4", "e4", "f4", "g4", "h4", "-1",
"-1", "a3", "b3", "c3", "d3", "e3", "f3", "g3", "h3", "-1",
"-1", "a2", "b2", "c2", "d2", "e2", "f2", "g2", "h2", "-1",
"-1", "a1", "b1", "c1", "d1", "e1", "f1", "g1", "h1", "-1",
"-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1",
"-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1", "-1",
]
# chess notation => board index position
INDICES = {
"a8": 21, "b8": 22, "c8": 23, "d8": 24, "e8": 25, "f8": 26, "g8": 27, "h8": 28,
"a7": 31, "b7": 32, "c7": 33, "d7": 34, "e7": 35, "f7": 36, "g7": 37, "h7": 38,
"a6": 41, "b6": 42, "c6": 43, "d6": 44, "e6": 45, "f6": 46, "g6": 47, "h6": 48,
"a5": 51, "b5": 52, "c5": 53, "d5": 54, "e5": 55, "f5": 56, "g5": 57, "h5": 58,
"a4": 61, "b4": 62, "c4": 63, "d4": 64, "e4": 65, "f4": 66, "g4": 67, "h4": 68,
"a3": 71, "b3": 72, "c3": 73, "d3": 74, "e3": 75, "f3": 76, "g3": 77, "h3": 78,
"a2": 81, "b2": 82, "c2": 83, "d2": 84, "e2": 85, "f2": 86, "g2": 87, "h2": 88,
"a1": 91, "b1": 92, "c1": 93, "d1": 94, "e1": 95, "f1": 96, "g1": 97, "h1": 98
}
def user_input():
move_string = input()
if move_string == "quit":
quit()
from_sq = INDICES.get(move_string[:2])
to_sq = INDICES.get(move_string[2:4])
promotion = move_string[4:] # used for pawn promotion
return from_sq, to_sq, promotion
def print_board(board):
if name == "nt":
system("cls")
count = 0 # count the number of squares printed
for square in board:
if square != -1: # square is not outside of board
print(SYMBOLS[square], end=" ")
count += 1
if count % 8 == 0:
print()
else:
print_color_board(board)
def print_color_board(board):
system("clear")
i = 0 # count the number of squares printed
row_count = 0
print(8 - row_count, end=" ")
for square in board:
if square != -1: # square is not outside of board
if row_count % 2 == 0:
print("\033[107m" if i % 2 == 0 else "\033[106m", end="")
else:
print("\033[106m" if i % 2 == 0 else "\033[107m", end="")
print(SYMBOLS[square], end=" \033[0m")
i += 1
if i % 8 == 0:
print()
row_count += 1
if row_count < 8:
print(8 - row_count, end=" ")
print(" a b c d e f g h")
def print_moves(moves):
for move in moves:
print(NOTATION[move[0]], NOTATION[move[1]], move[2], sep="", end=" ")
print()
``` |
{
"source": "JordanLeich/Digital-Clock",
"score": 4
} |
#### File: JordanLeich/Digital-Clock/Clock.py
```python
import time
from tkinter import *
root = Tk()
root.title("Digital Clock")
root.geometry("250x100+0+0")
root.resizable(0, 0)
root.configure(bg="red")
label = Label(root, font=("Arial", 45, 'bold'))
label.grid(row=0, column=1)
def clock():
label.configure(bg="red")
label.configure(fg='gray')
text_input = time.strftime("%H:%M:%S")
label.config(text=text_input)
label.after(200, clock)
clock()
root.mainloop()
``` |
{
"source": "JordanLeich/Downloads-organizer",
"score": 3
} |
#### File: JordanLeich/Downloads-organizer/organize.py
```python
import os
import shutil
import sys
def create_folders(directories, directory_path):
"""
This function creates the folders in <directory_path> where the files
will be moved to.
:param directories: dictionary, this is a dictionary containing the
names of the sorted folders and the extensions that correspond to those
folders.
:param directory_path: string, this is a string of the path to the
directory that is to be sorted.
"""
for key in directories:
if key not in os.listdir(directory_path):
os.mkdir(os.path.join(directory_path, key))
if "OTHER" not in os.listdir(directory_path):
os.mkdir(os.path.join(directory_path, "OTHER"))
def organize_folders(directories, directory_path):
"""
This function organizes the files in the specified folder into folders
:param directories: directories: dictionary, this is a dictionary
containing the names of the sorted folders and the extensions that
correspond to those folders.
:param directory_path: string, this is a string of the path to the
directory that is to be sorted.
"""
for file in os.listdir(directory_path):
if os.path.isfile(os.path.join(directory_path, file)):
src_path = os.path.join(directory_path, file)
for key in directories:
extension = directories[key]
if file.endswith(extension):
dest_path = os.path.join(directory_path, key, file)
shutil.move(src_path, dest_path)
break
def organize_remaining_files(directory_path):
"""
This function assigns the file that don't have a corresponding folder to
the <OTHER> directory.
:param directory_path: string, this is a string of the path to the
directory that is to be sorted.
"""
for file in os.listdir(directory_path):
if os.path.isfile(os.path.join(directory_path, file)):
src_path = os.path.join(directory_path, file)
dest_path = os.path.join(directory_path, "OTHER", file)
shutil.move(src_path, dest_path)
def organize_remaining_folders(directories, directory_path):
"""
This function assings the folders within the specified directory to the
<FOLDER> directory.
:param directories: directories: dictionary, this is a dictionary
containing the names of the sorted folders and the extensions that
corresponds to those folders.
:param directory_path: string, this is a string of the path to the
directory that is to be sorted.
"""
list_dir = os.listdir(directory_path)
organized_folders = []
for folder in directories:
organized_folders.append(folder)
organized_folders = tuple(organized_folders)
for folder in list_dir:
if folder not in organized_folders:
src_path = os.path.join(directory_path, folder)
dest_path = os.path.join(directory_path, "FOLDERS", folder)
try:
shutil.move(src_path, dest_path)
except shutil.Error:
shutil.move(src_path, dest_path + " - copy")
print("That folder already exists in the destination folder."
"\nThe folder is renamed to '{}'".format(folder + " - copy"))
if __name__ == '__main__':
directory_path = "C:/Users/jan_b/Downloads"
directories = {
"HTML": (".html5", ".html", ".htm", ".xhtml"),
"IMAGES": (".jpeg", ".jpg", ".tiff", ".gif", ".bmp", ".png", ".bpg",
"svg",
".heif", ".psd"),
"VIDEOS": (".avi", ".flv", ".wmv", ".mov", ".mp4", ".webm", ".vob",
".mng",
".qt", ".mpg", ".mpeg", ".3gp", ".mkv"),
"DOCUMENTS": (".oxps", ".epub", ".pages", ".docx", ".doc", ".fdf",
".ods",
".odt", ".pwi", ".xsn", ".xps", ".dotx", ".docm", ".dox",
".rvg", ".rtf", ".rtfd", ".wpd", ".xls", ".xlsx", ".ppt",
"pptx"),
"ARCHIVES": (".a", ".ar", ".cpio", ".iso", ".tar", ".gz", ".rz", ".7z",
".dmg", ".rar", ".xar", ".zip"),
"AUDIO": (".aac", ".aa", ".aac", ".dvf", ".m4a", ".m4b", ".m4p",
".mp3",
".msv", "ogg", "oga", ".raw", ".vox", ".wav", ".wma"),
"PLAINTEXT": (".txt", ".in", ".out"),
"PDF": ".pdf",
"PYTHON": ".py",
"EXE": ".exe",
"OTHER": "",
"FOLDERS": ""
}
try:
create_folders(directories, directory_path)
organize_folders(directories, directory_path)
organize_remaining_files(directory_path)
organize_remaining_folders(directories, directory_path)
except shutil.Error:
print("There was an error trying to move an item to its destination folder")
``` |
{
"source": "JordanLeich/music-downloader",
"score": 3
} |
#### File: JordanLeich/music-downloader/main.py
```python
from PyQt5.QtWidgets import QApplication, QWidget, QLineEdit, QMessageBox, QPushButton, QLabel
import sys
from PyQt5.QtGui import QIcon
from PyQt5.QtCore import pyqtSlot, QThread
import colors
import youtubedl
class Thread(QThread):
def __init__(self, name):
QThread.__init__(self)
self.name = name
# def setName(self,name):
# self.name = name
def run(self):
youtubedl.run(self.name)
print(colors.green + "Downloaded!\n", colors.reset)
class homepage(QWidget):
def __init__(self):
super().__init__()
self.initUI()
status_thread = False
name = ""
def initUI(self):
self.setGeometry(600, 400, 600, 400)
self.setWindowTitle("Music-downloader")
self.setWindowIcon(QIcon('music.webp'))
self.textbox = QLineEdit(self)
self.label = QLabel("Song Name/Playlist link", self)
self.btn = QPushButton('Download', self)
self.label.move(20, 100)
self.label.resize(150, 30)
self.btn.move(270, 200)
self.btn.resize(70, 30)
self.textbox.move(150, 100)
self.textbox.resize(400, 30)
self.msg = QMessageBox(self)
self.btn.clicked.connect(self.on_click)
self.show()
@pyqtSlot()
def on_click(self):
self.setWindowTitle("Music-downloader(downloading)")
name = self.textbox.text()
self.thread = Thread(name)
self.thread.start()
def main():
app = QApplication(sys.argv)
hp = homepage()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
``` |
{
"source": "JordanLeich/PasswordGenerator",
"score": 4
} |
#### File: PasswordGenerator/Password_Generator/encryption.py
```python
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
from art import tprint
import return_to_menu
import sys
# PUBLIC KEY IMPORT
def pwd_encryption():
tprint('''\nZPE\n''', font='isometric1')
print('Zorkol Password Encryptor'.center(43))
encryption_key = False
while not encryption_key:
try:
encryption_key = input('\nPublic Key File Name >> ')
pu_key = RSA.import_key(open(encryption_key, 'r').read())
encryption_key = PKCS1_OAEP.new(key=pu_key)
encryptor(encryption_key)
except (FileNotFoundError, ValueError):
print('''\nPossible errors:
Public key or password file does not exist.
Public key bits are not enough for encryption (please select ZH from the menu for more information about encryption).''')
encryption_key = False
except KeyboardInterrupt:
print('\n')
return_to_menu.back_to_menu()
# PASSWORD ENCRYPTOR
def encryptor(encryption_key):
file_input = False
while not file_input:
pwd_file_to_list = []
bytes_pwd_list = []
file_input = input('\nFile to encrypt >> ')
file_to_encrypt = open(file_input, 'r')
for i in file_to_encrypt:
pwd_file_to_list.append(i)
for i in pwd_file_to_list:
bytes_pwd_list.append(bytes(i, 'utf-8'))
file_to_encrypt = b''.join(bytes_pwd_list)
encrypted_passwords = encryption_key.encrypt(file_to_encrypt)
encrypted_pwd_file_creation(encrypted_passwords)
# ENCRYPTED PASSWORD FILE CREATION/EXPORT
def encrypted_pwd_file_creation(encrypted_passwords):
new_file = False
while not new_file:
try:
new_file = input('\nNew file name >> (Do not include \'.txt\') ')
with open(new_file + '.txt', 'wb') as enc:
enc.write(encrypted_passwords)
enc.close()
print('\nFile has been exported.\n')
return_to_menu.back_to_menu()
except ValueError:
print('Check your spelling')
new_file = False
```
#### File: PasswordGenerator/Password_Generator/password_generator.py
```python
import secrets
import string
import password_file_creation
# PASSWORD LIST CREATION
def pwd_lst_generator(password_amount, password_length, include_punctuation):
pwd_lst = []
for _ in range(password_amount):
if include_punctuation:
pwd_lst.append(''.join(
secrets.choice(list(string.ascii_letters) + list(string.digits) + list(string.punctuation)) for i in
range(password_length)))
else:
pwd_lst.append(''.join(
secrets.choice(list(string.ascii_letters) + list(string.digits)) for i in range(password_length)))
if len(pwd_lst) == password_amount:
password_file_creation.pwd_file_creation(pwd_lst)
``` |
{
"source": "JordanLeich/Temperature-Converter",
"score": 4
} |
#### File: JordanLeich/Temperature-Converter/Converter.py
```python
import time
import restart
def converter():
try:
user_choice = int(
input("Would you like to convert Celsius to Fahrenheit (1) or convert Fahrenheit to Celsius (2): "))
print()
if user_choice == 1:
user_celsius = float(input("Celsius: "))
print()
fahrenheit = float(user_celsius * 1.8 + 32)
print(user_celsius, "in Celsius equals", fahrenheit, "in Fahrenheit.")
print()
time.sleep(2)
restart.restart()
elif user_choice == 2:
user_fahrenheit = float(input("Fahrenheit: "))
print()
celsius = float(user_fahrenheit * 1.8 + 32)
print(user_fahrenheit, "in Fahrenheit equals", celsius, "in Celsius.")
print()
time.sleep(2)
restart.restart()
else:
print("Invalid input... Restarting input choice...")
print()
time.sleep(3)
converter()
except:
print()
print("Error found... Restarting input choice...\n")
time.sleep(3)
converter()
converter()
``` |
{
"source": "JordanLevy/ChessGladiatorArena",
"score": 4
} |
#### File: ChessGladiatorArena/chessgame/bishop.py
```python
from move import Move
from piece import Piece
class Bishop(Piece):
def __init__(self, board, is_white, file, rank):
super().__init__(board, is_white, file, rank)
self.letter = 'B'
if is_white:
self.img = 'Images/WhiteBishop.png'
else:
self.img = 'Images/BlackBishop.png'
def get_defended_squares(self):
defended = []
w = self.get_is_white()
opposing = lambda x: x and w != x.get_is_white()
moveable = lambda x: x != -1 and x is None
captureable = lambda x: x != -1 and (x is not None)
add_move = lambda x: defended.append(''.join(map(str, self.get_offset(x))))
searching = [True, True, True, True] # True if still searching a direction [ur, ul, dr, dl]
for i in range(1, 9):
offsets = [(i, i), (-i, i), (i, -i), (-i, -i)]
for j in range(len(offsets)):
if not searching[j]:
continue
k = offsets[j]
s = self.get_piece_at_offset(k)
if moveable(s) or captureable(s):
add_move(k)
# x-ray the opposing king
if captureable(s) and not (s.letter == 'K' and opposing(s)):
searching[j] = False
else:
# defend your own piece and stop looking
if s != -1 and not opposing(s):
add_move(k)
searching[j] = False
return defended
def get_possible_moves(self):
possible = []
w = self.get_is_white()
opposing = lambda x: x and w != x.get_is_white()
moveable = lambda x: x != -1 and x is None
captureable = lambda x: x != -1 and (x is not None and opposing(x))
add_move = lambda x, c, e: possible.append(Move(w, self.letter, self.file, self.rank, self.get_offset(x)[0], int(self.get_offset(x)[1]), c, e))
searching = [True, True, True, True] # True if still searching a direction [ur, ul, dr, dl]
for i in range(1, 9):
offsets = [(i, i), (-i, i), (i, -i), (-i, -i)]
for j in range(len(offsets)):
if not searching[j]:
continue
k = offsets[j]
s = self.get_piece_at_offset(k)
if moveable(s) or captureable(s):
add_move(k, s, False)
if captureable(s):
searching[j] = False
else:
if s != -1 and not opposing(s):
searching[j] = False
return possible
```
#### File: ChessGladiatorArena/chessgame/king.py
```python
from move import Move
from piece import Piece
from rook import Rook
class King(Piece):
def __init__(self, board, is_white, file, rank):
super().__init__(board, is_white, file, rank)
self.letter = 'K'
if is_white:
self.img = 'Images/WhiteKing.png'
else:
self.img = 'Images/BlackKing.png'
def is_in_check(self):
return self.defended_by_enemy(self.file, self.rank)
def defended_by_enemy(self, f, r):
files = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
target = f + str(r)
for i in range(0, 8):
for j in range(1, 9):
s = self.board.get_piece(files[i], j)
if not s:
continue
if (self.is_white != s.get_is_white()) and (
target in s.get_defended_squares()):
return True
return False
def get_defended_squares(self):
defended = []
w = self.get_is_white()
opposing = lambda x: x and w != x.get_is_white()
moveable = lambda x: x != -1 and x is None
captureable = lambda x: x != -1 and (x is not None)
add_move = lambda x: defended.append(''.join(map(str, self.get_offset(x))))
for i in range(-1, 2):
for j in range(-1, 2):
if i == 0 and j == 0:
continue
s = self.get_piece_at_offset((i, j))
if moveable(s) or captureable(s):
add_move((i, j))
return defended
def get_possible_moves(self):
possible = []
w = self.get_is_white()
opposing = lambda x: x and w != x.get_is_white()
moveable = lambda x: x != -1 and x is None
captureable = lambda x: x != -1 and (x is not None and opposing(x))
add_move = lambda x, c, e, s, l: possible.append(Move(w, self.letter, self.file, self.rank, self.get_offset(x)[0], int(self.get_offset(x)[1]), c, e, s, l))
# regular king movement
for i in range(-1, 2):
for j in range(-1, 2):
if i == 0 and j == 0:
continue
s = self.get_piece_at_offset((i, j))
if moveable(s) or captureable(s):
add_move((i, j), s, False, False, False)
# short castling
# king cannot have moved
short_castle = not self.get_has_moved()
# h1/8 rook can't have moved
s = self.board.get_piece('h', self.rank)
if not (s and type(s) is Rook and not s.num_times_moved):
short_castle = False
# f1/8 and g1.8 must be empty and undefended
for i in ['f', 'g']:
s = self.board.get_piece(i, self.rank)
if s or self.defended_by_enemy(i, self.rank):
short_castle = False
if short_castle:
add_move((2, 0), None, False, True, False)
# long castling
# king cannot have moved
long_castle = not self.get_has_moved()
# a1/8 rook can't have moved
s = self.board.get_piece('a', self.rank)
if not (s and type(s) is Rook and not s.num_times_moved):
long_castle = False
# c1/8 and d1/8 must be empty and undefended
for i in ['c', 'd']:
s = self.board.get_piece(i, self.rank)
if s or self.defended_by_enemy(i, self.rank):
long_castle = False
# b1/8 only has to be empty
s = self.board.get_piece('b', self.rank)
if s:
long_castle = False
if long_castle:
add_move((-2, 0), None, False, False, True)
return possible
def move(self, move):
f = move.to_file
r = move.to_rank
self.board.remove_piece(move.from_file, move.from_rank)
self.file = f
self.rank = r
self.increment_num_times_moved()
self.board.set_piece(self)
if move.is_short_castle:
s = self.board.get_piece('h', self.rank)
self.board.remove_piece_by_ref(s)
s.file = 'f'
self.board.set_piece(s)
if move.is_long_castle:
s = self.board.get_piece('a', self.rank)
self.board.remove_piece_by_ref(s)
s.file = 'd'
self.board.set_piece(s)
return True
``` |
{
"source": "jordanm88/Django-CRM",
"score": 2
} |
#### File: Django-CRM/common/access_decorators_mixins.py
```python
from django.contrib.auth.mixins import AccessMixin
from django.core.exceptions import PermissionDenied
def sales_access_required(function):
""" this function is a decorator used to authorize if a user has sales access """
def wrap(request, *args, **kwargs):
if (
request.user.role == "ADMIN"
or request.user.is_superuser
or request.user.has_sales_access
):
return function(request, *args, **kwargs)
raise PermissionDenied
return wrap
def marketing_access_required(function):
""" this function is a decorator used to authorize if a user has marketing access """
def wrap(request, *args, **kwargs):
if (
request.user.role == "ADMIN"
or request.user.is_superuser
or request.user.has_marketing_access
):
return function(request, *args, **kwargs)
raise PermissionDenied
return wrap
class SalesAccessRequiredMixin(AccessMixin):
""" Mixin used to authorize if a user has sales access """
def dispatch(self, request, *args, **kwargs):
if not request.user.is_authenticated:
return self.handle_no_permission()
self.raise_exception = True
if (
request.user.role == "ADMIN"
or request.user.is_superuser
or request.user.has_sales_access
):
return super().dispatch(
request, *args, **kwargs
)
return self.handle_no_permission()
class MarketingAccessRequiredMixin(AccessMixin):
""" Mixin used to authorize if a user has marketing access """
def dispatch(self, request, *args, **kwargs):
if not request.user.is_authenticated:
return self.handle_no_permission()
self.raise_exception = True
if (
request.user.role == "ADMIN"
or request.user.is_superuser
or request.user.has_marketing_access
):
return super().dispatch(
request, *args, **kwargs
)
return self.handle_no_permission()
def admin_login_required(function):
""" this function is a decorator used to authorize if a user is admin """
def wrap(request, *args, **kwargs):
if request.user.role == "ADMIN" or request.user.is_superuser:
return function(request, *args, **kwargs)
raise PermissionDenied
return wrap
```
#### File: common/middleware/get_company.py
```python
from rest_framework.response import Response
from rest_framework import status
from django.contrib.auth import logout
from common.models import Org, Profile
def set_profile_request(request, org):
if request.user.is_authenticated:
request.profile = Profile.objects.filter(
user=request.user, org=org, is_active=True).first()
if request.profile is None:
logout(request)
return Response(
{"error": False}, status=status.HTTP_200_OK,
)
class GetProfileAndOrg(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
self.process_request(request)
return self.get_response(request)
def process_request(self, request):
if request.headers.get("org"):
org_id = request.headers.get("org")
org = Org.objects.filter(id=org_id).first()
if org:
request.org = org
set_profile_request(request, org)
else:
request.org = None
else:
request.org = None
```
#### File: Django-CRM/common/serializer.py
```python
import re
from django.contrib.auth.hashers import check_password
from rest_framework import serializers
from common.models import (
User,
Org,
Comment,
Address,
Attachments,
Document,
APISettings,
Profile
)
from django.utils.http import urlsafe_base64_decode
from django.contrib.auth.tokens import default_token_generator
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Org
fields = ("id", "name")
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = (
"id",
"comment",
"commented_on",
"commented_by",
"account",
"lead",
"opportunity",
"contact",
"case",
"task",
"invoice",
"event",
"profile",
)
class LeadCommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = (
"id",
"comment",
"commented_on",
"commented_by",
"lead",
)
class RegisterOrganizationSerializer(serializers.Serializer):
email = serializers.EmailField()
first_name = serializers.CharField(max_length=255)
password = serializers.CharField(max_length=100)
org_name = serializers.CharField(max_length=100)
def validate_password(self, password):
if password:
if len(password) < 4:
raise serializers.ValidationError(
"Password must be at least 4 characters long!"
)
return password
def validate_email(self, email):
org_name = self.initial_data.get('org_name')
if Profile.objects.filter(user__email__iexact=email,
org__name=org_name).exists():
raise serializers.ValidationError(
"This email is already registered in this organization")
return email
def validate_org_name(self, org_name):
if bool(re.search(r"[~\!_.@#\$%\^&\*\ \(\)\+{}\":;'/\[\]]", org_name)):
raise serializers.ValidationError(
"organization name should not contain any special characters")
if Org.objects.filter(name=org_name).exists():
raise serializers.ValidationError(
"Organization already exists with this name")
return org_name
class BillingAddressSerializer(serializers.ModelSerializer):
country = serializers.SerializerMethodField()
def get_country(self, obj):
return obj.get_country_display()
class Meta:
model = Address
fields = ("address_line", "street", "city",
"state", "postcode", "country")
def __init__(self, *args, **kwargs):
account_view = kwargs.pop("account", False)
super().__init__(*args, **kwargs)
if account_view:
self.fields["address_line"].required = True
self.fields["street"].required = True
self.fields["city"].required = True
self.fields["state"].required = True
self.fields["postcode"].required = True
self.fields["country"].required = True
class CreateUserSerializer(serializers.ModelSerializer):
password = serializers.CharField()
class Meta:
model = User
fields = (
"first_name",
"last_name",
"email",
"alternate_email",
"skype_ID",
"description",
"profile_pic",
"password",
)
def __init__(self, *args, **kwargs):
self.org = kwargs.pop("org", None)
super().__init__(*args, **kwargs)
self.fields["first_name"].required = True
self.fields["password"].required = False
self.fields["profile_pic"].required = False
self.fields["skype_ID"].required = False
def validate_email(self, email):
if self.instance:
if self.instance.email != email:
if not Profile.objects.filter(
user__email=email, org=self.org).exists():
return email
raise serializers.ValidationError("Email already exists")
return email
if not Profile.objects.filter(user__email=email.lower(), org=self.org).exists():
return email
raise serializers.ValidationError('Given Email id already exists')
class CreateProfileSerializer(serializers.ModelSerializer):
class Meta:
model = Profile
fields = (
"role",
"phone",
"alternate_phone",
"has_sales_access",
"has_marketing_access",
"is_organization_admin"
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["alternate_phone"].required = False
self.fields["role"].required = True
self.fields["phone"].required = True
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = (
"id",
"first_name",
"last_name",
"email",
"alternate_email",
"skype_ID",
"description",
"profile_pic",
)
class ProfileSerializer(serializers.ModelSerializer):
user_details = serializers.SerializerMethodField()
address = BillingAddressSerializer()
def get_user_details(self, obj):
return UserSerializer(obj.user).data
class Meta:
model = Profile
fields = ("id", 'user_details', 'role', 'address',
'has_marketing_access', 'has_sales_access',
'phone', 'date_of_joining', 'is_active')
class ForgotPasswordSerializer(serializers.Serializer):
email = serializers.CharField(max_length=200)
def validate(self, data):
email = data.get("email")
user = User.objects.filter(email__iexact=email).last()
if not user:
raise serializers.ValidationError(
"You don't have an account. Please create one."
)
return data
class CheckTokenSerializer(serializers.Serializer):
uidb64_regex = r"[0-9A-Za-z_\-]+"
token_regex = r"[<KEY>1,13}-[0-9A-Za-z]{1,20}"
uidb64 = serializers.RegexField(uidb64_regex)
token = serializers.RegexField(token_regex)
error_message = {"__all__": ("Invalid password reset token")}
def get_user(self, uidb64):
try:
uid = urlsafe_base64_decode(uidb64).decode()
user = User._default_manager.get(pk=uid)
except (TypeError, ValueError, OverflowError, User.DoesNotExist):
user = None
return user
class ResetPasswordSerailizer(CheckTokenSerializer):
new_password1 = serializers.CharField()
new_password2 = serializers.CharField()
def validate(self, data):
self.user = self.get_user(data.get("uid"))
if not self.user:
raise serializers.ValidationError(self.error_message)
is_valid_token = default_token_generator.check_token(
self.user, data.get("token")
)
if not is_valid_token:
raise serializers.ValidationError(self.error_message)
new_password2 = data.get("new_password2")
new_password1 = data.get("new_password1")
if new_password1 != new_password2:
raise serializers.ValidationError(
"The two password fields didn't match.")
return new_password2
class AttachmentsSerializer(serializers.ModelSerializer):
file_path = serializers.SerializerMethodField()
def get_file_path(self, obj):
if obj.attachment:
return obj.attachment.url
None
class Meta:
model = Attachments
fields = ["id", "created_by", "file_name", "created_on", "file_path"]
class DocumentSerializer(serializers.ModelSerializer):
shared_to = ProfileSerializer(read_only=True, many=True)
teams = serializers.SerializerMethodField()
created_by = ProfileSerializer()
org = OrganizationSerializer()
def get_teams(self, obj):
return obj.teams.all().values()
class Meta:
model = Document
fields = [
"id",
"title",
"document_file",
"status",
"shared_to",
"teams",
"created_on",
"created_by",
"org"
]
class DocumentCreateSerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs):
request_obj = kwargs.pop("request_obj", None)
super().__init__(*args, **kwargs)
self.fields["title"].required = True
self.org = request_obj.org
def validate_title(self, title):
if self.instance:
if (
Document.objects.filter(
title__iexact=title, org=self.org)
.exclude(id=self.instance.id)
.exists()
):
raise serializers.ValidationError(
"Document with this Title already exists"
)
if Document.objects.filter(title__iexact=title, org=self.org).exists():
raise serializers.ValidationError(
"Document with this Title already exists"
)
return title
class Meta:
model = Document
fields = [
"title",
"document_file",
"status",
"org"
]
def find_urls(string):
# website_regex = "^((http|https)://)?([A-Za-z0-9.-]+\.[A-Za-z]{2,63})?$" # (http(s)://)google.com or google.com
# website_regex = "^https?://([A-Za-z0-9.-]+\.[A-Za-z]{2,63})?$" # (http(s)://)google.com
# http(s)://google.com
website_regex = "^https?://[A-Za-z0-9.-]+\.[A-Za-z]{2,63}$"
# http(s)://google.com:8000
website_regex_port = "^https?://[A-Za-z0-9.-]+\.[A-Za-z]{2,63}:[0-9]{2,4}$"
url = re.findall(website_regex, string)
url_port = re.findall(website_regex_port, string)
if url and url[0] != "":
return url
return url_port
class APISettingsSerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class Meta:
model = APISettings
fields = ("title", "website")
def validate_website(self, website):
if website and not (
website.startswith("http://") or website.startswith("https://")
):
raise serializers.ValidationError("Please provide valid schema")
if not len(find_urls(website)) > 0:
raise serializers.ValidationError(
"Please provide a valid URL with schema and without trailing slash - Example: http://google.com"
)
return website
class APISettingsListSerializer(serializers.ModelSerializer):
created_by = ProfileSerializer()
lead_assigned_to = ProfileSerializer(read_only=True, many=True)
tags = serializers.SerializerMethodField()
org = OrganizationSerializer()
def get_tags(self, obj):
return obj.tags.all().values()
class Meta:
model = APISettings
fields = [
"title",
"apikey",
"website",
"created_on",
"created_by",
"lead_assigned_to",
"tags",
"org"
]
class PasswordChangeSerializer(serializers.Serializer):
old_password = serializers.CharField(max_length=100)
new_password = serializers.CharField(max_length=100)
retype_password = serializers.CharField(max_length=100)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def validate_old_password(self, pwd):
if not check_password(pwd, self.context.get('user').password):
raise serializers.ValidationError(
"old password entered is incorrect.")
return pwd
def validate(self, data):
if len(data.get('new_password')) < 8:
raise serializers.ValidationError(
"Password must be at least 8 characters long!")
if data.get('new_password') == data.get('old_password'):
raise serializers.ValidationError(
"New_password and old password should not be the same")
if data.get('new_password') != data.get('retype_password'):
raise serializers.ValidationError(
"New_password and Retype_password did not match.")
return data
```
#### File: Django-CRM/emails/views.py
```python
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.shortcuts import render
from django.shortcuts import get_object_or_404
from emails.models import Email
from emails.forms import EmailForm
from django.core.mail import EmailMessage
from datetime import datetime
from datetime import timedelta
def emails_list(request):
filter_list = Email.objects.all()
if request.GET.get("from_date", ""):
from_date = request.GET.get("from_date", "")
fd = datetime.strptime(from_date, "%Y-%m-%d").date()
filter_list = filter_list.filter(send_time__gte=fd)
if request.GET.get("to_date", ""):
to_date = request.GET.get("to_date", "")
td = datetime.strptime(to_date, "%Y-%m-%d")
td = td + timedelta(seconds=(24 * 60 * 60 - 1))
filter_list = filter_list.filter(send_time__lte=td)
if request.GET.get("name", ""):
name = request.GET.get("name", "")
filter_list = filter_list.filter(to_email__startswith=name)
return render(request, "mail_all.html", {"filter_list": filter_list})
def email(request):
if request.method == "POST":
form = EmailForm(request.POST, request.FILES)
if form.is_valid():
subject = request.POST.get("subject", "")
message = request.POST.get("message", "")
from_email = request.POST.get("from_email", "")
to_email = request.POST.get("to_email", "")
file = request.FILES.get("files", None)
status = request.POST.get("email_draft", "")
email = EmailMessage(subject, message, from_email, [to_email])
email.content_subtype = "html"
f = form.save()
if file is not None:
email.attach(file.name, file.read(), file.content_type)
f.file = file
if status:
f.status = "draft"
else:
email.send(fail_silently=False)
f.save()
return HttpResponseRedirect(reverse("emails:list"))
else:
return render(request, "create_mail.html", {"form": form})
else:
form = EmailForm()
return render(request, "create_mail.html", {"form": form})
def email_sent(request):
filter_list = Email.objects.filter(status="sent")
if request.GET.get("from_date", ""):
from_date = request.GET.get("from_date", "")
fd = datetime.strptime(from_date, "%Y-%m-%d").date()
filter_list = filter_list.filter(send_time__gte=fd)
if request.GET.get("to_date", ""):
to_date = request.GET.get("to_date", "")
td = datetime.strptime(to_date, "%Y-%m-%d")
td = td + timedelta(seconds=(24 * 60 * 60 - 1))
filter_list = filter_list.filter(send_time__lte=td)
if request.GET.get("name", ""):
name = request.GET.get("name", "")
filter_list = filter_list.filter(to_email__startswith=name)
return render(request, "mail_sent.html", {"filter_list": filter_list})
def email_trash(request):
filter_list = Email.objects.filter(status="trash")
if request.GET.get("from_date", ""):
from_date = request.GET.get("from_date", "")
fd = datetime.strptime(from_date, "%Y-%m-%d").date()
filter_list = filter_list.filter(send_time__gte=fd)
if request.GET.get("to_date", ""):
to_date = request.GET.get("to_date", "")
td = datetime.strptime(to_date, "%Y-%m-%d")
td = td + timedelta(seconds=(24 * 60 * 60 - 1))
filter_list = filter_list.filter(send_time__lte=td)
if request.GET.get("name", ""):
name = request.GET.get("name", "")
filter_list = filter_list.filter(to_email__startswith=name)
return render(request, "mail_trash.html", {"filter_list": filter_list})
def email_trash_delete(request, pk):
get_object_or_404(Email, id=pk).delete()
return HttpResponseRedirect(reverse("emails:email_trash"))
def email_draft(request):
filter_list = Email.objects.filter(status="draft")
if request.GET.get("from_date", ""):
from_date = request.GET.get("from_date", "")
fd = datetime.strptime(from_date, "%Y-%m-%d").date()
filter_list = filter_list.filter(send_time__gte=fd)
if request.GET.get("to_date", ""):
to_date = request.GET.get("to_date", "")
td = datetime.strptime(to_date, "%Y-%m-%d")
td = td + timedelta(seconds=(24 * 60 * 60 - 1))
filter_list = filter_list.filter(send_time__lte=td)
if request.GET.get("name", ""):
name = request.GET.get("name", "")
filter_list = filter_list.filter(to_email__startswith=name)
return render(request, "mail_drafts.html", {"filter_list": filter_list})
def email_draft_delete(request, pk):
get_object_or_404(Email, id=pk).delete()
return HttpResponseRedirect(reverse("emails:email_draft"))
def email_delete(request, pk):
get_object_or_404(Email, id=pk).delete()
return HttpResponseRedirect(reverse("emails:email_sent"))
def email_move_to_trash(request, pk):
trashitem = get_object_or_404(Email, id=pk)
trashitem.status = "trash"
trashitem.save()
return HttpResponseRedirect(request.META["HTTP_REFERER"])
def email_imp(request, pk):
impitem = get_object_or_404(Email, id=pk)
impitem.important = True
impitem.save()
return HttpResponseRedirect(request.META["HTTP_REFERER"])
def email_imp_list(request):
filter_list = Email.objects.filter(important="True")
if request.GET.get("from_date", ""):
from_date = request.GET.get("from_date", "")
fd = datetime.strptime(from_date, "%Y-%m-%d").date()
filter_list = filter_list.filter(send_time__gte=fd)
if request.GET.get("to_date", ""):
to_date = request.GET.get("to_date", "")
td = datetime.strptime(to_date, "%Y-%m-%d")
td = td + timedelta(seconds=(24 * 60 * 60 - 1))
filter_list = filter_list.filter(send_time__lte=td)
if request.GET.get("name", ""):
name = request.GET.get("name", "")
filter_list = filter_list.filter(to_email__startswith=name)
return render(request, "mail_important.html", {"filter_list": filter_list})
def email_sent_edit(request, pk):
em = get_object_or_404(Email, pk=pk)
if request.method == "POST":
form = EmailForm(request.POST, instance=em)
if form.is_valid():
subject = request.POST.get("subject", "")
message = request.POST.get("message", "")
from_email = request.POST.get("from_email", "")
to_email = request.POST.get("to_email", "")
file = request.FILES.get("files", None)
status = request.POST.get("email_draft", "")
email = EmailMessage(subject, message, from_email, [to_email])
email.content_subtype = "html"
f = form.save()
if file is not None:
email.attach(file.name, file.read(), file.content_type)
f.file = file
if status:
f.status = "draft"
else:
email.send(fail_silently=False)
f.status = "sent"
f.save()
return HttpResponseRedirect(reverse("emails:list"))
return render(request, "create_mail.html", {"form": form, "em": em})
form = EmailForm()
return render(request, "create_mail.html", {"form": form, "em": em})
def email_unimp(request, pk):
unimpitem = get_object_or_404(Email, id=pk)
unimpitem.important = False
unimpitem.save()
return HttpResponseRedirect(request.META["HTTP_REFERER"])
def email_view(request, pk):
email_view = get_object_or_404(Email, pk=pk)
x = EmailForm(instance=email_view)
return render(request, "create_mail.html", {"x": x})
```
#### File: Django-CRM/invoices/api_views.py
```python
import pytz
from django.conf import settings
from django.contrib.sites.shortcuts import get_current_site
from django.db.models import Q
from invoices.models import Invoice
from invoices.tasks import (
send_email,
send_invoice_email,
send_invoice_email_cancel,
create_invoice_history,
)
from invoices import swagger_params
from invoices.serializer import (
InvoiceSerailizer,
InvoiceHistorySerializer,
InvoiceCreateSerializer,
)
from accounts.models import Account
from accounts.serializer import AccountSerializer
from common.models import User, Attachments, Comment
from common.custom_auth import JSONWebTokenAuthentication
from common.serializer import (
UserSerializer,
CommentSerializer,
AttachmentsSerializer,
BillingAddressSerializer,
)
from common.utils import (
COUNTRIES,
CURRENCY_CODES,
)
from teams.serializer import TeamsSerializer
from teams.models import Teams
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework.pagination import LimitOffsetPagination
from drf_yasg.utils import swagger_auto_schema
import json
INVOICE_STATUS = (
("Draft", "Draft"),
("Sent", "Sent"),
("Paid", "Paid"),
("Pending", "Pending"),
("Cancelled", "Cancel"),
)
class InvoiceListView(APIView, LimitOffsetPagination):
authentication_classes = (JSONWebTokenAuthentication,)
permission_classes = (IsAuthenticated,)
model = Invoice
def get_context_data(self, **kwargs):
params = (
self.request.query_params
if len(self.request.data) == 0
else self.request.data
)
queryset = self.model.objects.filter(company=self.request.company)
accounts = Account.objects.filter(company=self.request.company)
if self.request.user.role != "ADMIN" and not self.request.user.is_superuser:
queryset = queryset.filter(
Q(created_by=self.request.user) | Q(assigned_to=self.request.user)
).distinct()
accounts = accounts.filter(
Q(created_by=self.request.user) | Q(assigned_to=self.request.user)
).distinct()
if params:
if params.get("invoice_title_or_number"):
queryset = queryset.filter(
Q(invoice_title__icontains=params.get("invoice_title_or_number"))
| Q(invoice_number__icontains=params.get("invoice_title_or_number"))
).distinct()
if params.get("created_by"):
queryset = queryset.filter(created_by=params.get("created_by"))
if params.get("assigned_users"):
queryset = queryset.filter(
assigned_to__in=json.loads(params.get("assigned_users"))
)
if params.get("status"):
queryset = queryset.filter(status=params.get("status"))
if params.get("total_amount"):
queryset = queryset.filter(
total_amount__icontains=params.get("total_amount")
)
context = {}
search = False
if (
params.get("invoice_title_or_number")
or params.get("created_by")
or params.get("assigned_users")
or params.get("status")
or params.get("total_amount")
):
search = True
context["search"] = search
results_invoice = self.paginate_queryset(
queryset.distinct(), self.request, view=self
)
invoices = InvoiceSerailizer(results_invoice, many=True).data
context["per_page"] = 10
context.update(
{
"invoices_count": self.count,
"next": self.get_next_link(),
"previous": self.get_previous_link(),
"page_number": int(self.offset / 10) + 1,
}
)
context["invoices"] = invoices
context["users"] = UserSerializer(
User.objects.filter(is_active=True, company=self.request.company).order_by(
"email"
),
many=True,
).data
context["accounts_list"] = AccountSerializer(accounts, many=True).data
if self.request.user == "ADMIN":
context["teams_list"] = TeamsSerializer(
Teams.objects.filter(company=self.request.company), many=True
).data
context["status"] = INVOICE_STATUS
context["currency"] = CURRENCY_CODES
context["countries"] = COUNTRIES
return context
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_list_get_params
)
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return Response(context)
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_create_post_params
)
def post(self, request, *args, **kwargs):
params = request.query_params if len(request.data) == 0 else request.data
data = {}
serializer = InvoiceCreateSerializer(data=params, request_obj=request)
from_address_serializer = BillingAddressSerializer(data=params)
to_address_serializer = BillingAddressSerializer(data=params)
if not from_address_serializer.is_valid():
data["from_address_errors"] = from_address_serializer.errors
if not to_address_serializer.is_valid():
data["to_address_errors"] = to_address_serializer.errors
if data:
return Response({"error": True}, data)
if serializer.is_valid():
quality_hours = int(params.get("quality_hours"))
rate = float(params.get("rate"))
quantity = quality_hours * rate
tax = quantity * float(params.get("tax")) / 100
total_amount = quantity + tax
from_address_obj = from_address_serializer.save(
address_line=params.get("from_address_line"),
street=params.get("from_street"),
city=params.get("from_city"),
state=params.get("from_state"),
postcode=params.get("from_postcode"),
country=params.get("from_country"),
)
to_address_obj = to_address_serializer.save(
address_line=params.get("to_address_line"),
street=params.get("to_street"),
city=params.get("to_city"),
state=params.get("to_state"),
postcode=params.get("to_postcode"),
country=params.get("to_country"),
)
invoice_obj = serializer.save(
created_by=request.user,
company=request.company,
quantity=params.get("quality_hours"),
total_amount=total_amount,
from_address_id=from_address_obj.id,
to_address_id=to_address_obj.id,
)
if params.get("accounts"):
accounts = json.loads(params.get("accounts"))
for account in accounts:
obj_account = Account.objects.filter(
id=account, company=request.company
)
if obj_account.exists():
invoice_obj.accounts.add(account)
else:
invoice_obj.delete()
data["accounts"] = "Please enter valid account"
return Response({"error": True}, data)
if self.request.user.role == "ADMIN":
if params.get("teams"):
teams = json.loads(params.get("teams"))
for team in teams:
obj_team = Teams.objects.filter(
id=team, company=request.company
)
if obj_team.exists():
invoice_obj.teams.add(team)
else:
invoice_obj.delete()
data["team"] = "Please enter valid Team"
return Response({"error": True}, data)
if params.get("assigned_to"):
assinged_to_users_ids = json.loads(params.get("assigned_to"))
for user_id in assinged_to_users_ids:
user = User.objects.filter(id=user_id, company=request.company)
if user.exists():
invoice_obj.assigned_to.add(user_id)
else:
invoice_obj.delete()
data["assigned_to"] = "Please enter valid user"
return Response({"error": True}, data)
create_invoice_history(invoice_obj.id, request.user.id, [])
assigned_to_list = list(
invoice_obj.assigned_to.all().values_list("id", flat=True)
)
recipients = assigned_to_list
send_email.delay(
recipients,
invoice_obj.id,
domain=settings.DOMAIN_NAME,
protocol=self.request.scheme,
)
return Response({"error": False, "message": "Invoice Created Successfully"})
return Response(
{"error": True, "errors": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
class InvoiceDetailView(APIView):
authentication_classes = (JSONWebTokenAuthentication,)
permission_classes = (IsAuthenticated,)
model = Invoice
def get_object(self, pk):
return self.model.objects.filter(id=pk).first()
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_create_post_params
)
def put(self, request, pk, format=None):
params = request.query_params if len(request.data) == 0 else request.data
invoice_obj = self.get_object(pk=pk)
from_address_obj = invoice_obj.from_address
to_address_obj = invoice_obj.to_address
data = {}
if invoice_obj.company != request.company:
return Response(
{"error": True, "errors": "User company doesnot match with header...."},
status=status.HTTP_404_NOT_FOUND,
)
if self.request.user.role != "ADMIN" and not self.request.user.is_superuser:
if not (
(self.request.user == invoice_obj.created_by)
or (self.request.user in invoice_obj.assigned_to.all())
):
return Response(
{
"error": True,
"errors": "You don't have Permission to perform this action",
},
status=status.HTTP_401_UNAUTHORIZED,
)
serializer = InvoiceCreateSerializer(
invoice_obj,
data=params,
request_obj=request,
invoice=True,
)
from_address_serializer = BillingAddressSerializer(
data=params, instance=from_address_obj
)
to_address_serializer = BillingAddressSerializer(
data=params, instance=to_address_obj
)
if not from_address_serializer.is_valid():
data["from_address_errors"] = from_address_serializer.errors
if not to_address_serializer.is_valid():
data["to_address_errors"] = to_address_serializer.errors
if data:
return Response({"error": True}, data)
if serializer.is_valid():
invoice_obj = serializer.save()
previous_assigned_to_users = list(
invoice_obj.assigned_to.all().values_list("id", flat=True)
)
from_address_obj = from_address_serializer.save(
address_line=params.get("from_address_line"),
street=params.get("from_street"),
city=params.get("from_city"),
state=params.get("from_state"),
postcode=params.get("from_postcode"),
country=params.get("from_country"),
)
to_address_obj = to_address_serializer.save(
address_line=params.get("to_address_line"),
street=params.get("to_street"),
city=params.get("to_city"),
state=params.get("to_state"),
postcode=params.get("to_postcode"),
country=params.get("to_country"),
)
invoice_obj.from_address = from_address_obj
invoice_obj.to_address = to_address_obj
quality_hours = int(params.get("quality_hours"))
rate = float(params.get("rate"))
quantity = quality_hours * rate
tax = quantity * float(params.get("tax")) / 100
invoice_obj.total_amount = quantity + tax
invoice_obj.save()
invoice_obj.accounts.clear()
if params.get("accounts"):
accounts = json.loads(params.get("accounts"))
for account in accounts:
obj_account = Account.objects.filter(
id=account, company=request.company
)
if obj_account.exists():
invoice_obj.accounts.add(account)
else:
data["accounts"] = "Please enter valid account"
return Response({"error": True}, data)
if self.request.user.role == "ADMIN":
invoice_obj.teams.clear()
if params.get("teams"):
teams = json.loads(params.get("teams"))
for team in teams:
obj_team = Teams.objects.filter(
id=team, company=request.company
)
if obj_team.exists():
invoice_obj.teams.add(team)
else:
data["team"] = "Please enter valid Team"
return Response({"error": True}, data)
invoice_obj.assigned_to.clear()
if params.get("assigned_to"):
assinged_to_users_ids = json.loads(params.get("assigned_to"))
for user_id in assinged_to_users_ids:
user = User.objects.filter(id=user_id, company=request.company)
if user.exists():
invoice_obj.assigned_to.add(user_id)
else:
data["assigned_to"] = "Please enter valid User"
return Response({"error": True}, data)
assigned_to_list = list(
invoice_obj.assigned_to.all().values_list("id", flat=True)
)
recipients = list(set(assigned_to_list) - set(previous_assigned_to_users))
send_email.delay(
recipients,
invoice_obj.id,
domain=settings.DOMAIN_NAME,
protocol=self.request.scheme,
)
return Response(
{"error": False, "message": "Invoice Updated Successfully"},
status=status.HTTP_200_OK,
)
return Response(
{"error": True, "errors": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_delete_params
)
def delete(self, request, pk, format=None):
self.object = self.get_object(pk)
if self.object.company != request.company:
return Response(
{"error": True, "errors": "User company doesnot match with header...."}
)
if self.request.user.role != "ADMIN" and not self.request.user.is_superuser:
if self.request.user != self.object.created_by:
return Response(
{
"error": True,
"errors": "You do not have Permission to perform this action",
}
)
if self.object.from_address_id:
self.object.from_address.delete()
if self.object.to_address_id:
self.object.to_address.delete()
self.object.delete()
return Response(
{"error": False, "message": "Invoice Deleted Successfully."},
status=status.HTTP_200_OK,
)
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_delete_params
)
def get(self, request, pk, format=None):
self.invoice = self.get_object(pk=pk)
if self.invoice.company != request.company:
return Response(
{"error": True, "errors": "User company doesnot match with header...."},
status=status.HTTP_404_NOT_FOUND,
)
context = {}
context["invoice_obj"] = InvoiceSerailizer(self.invoice).data
if self.request.user.role != "ADMIN" and not self.request.user.is_superuser:
if not (
(self.request.user == self.invoice.created_by)
or (self.request.user in self.invoice.assigned_to.all())
):
return Response(
{
"error": True,
"errors": "You don't have Permission to perform this action",
}
)
comment_permission = (
True
if (
self.request.user == self.invoice.created_by
or self.request.user.is_superuser
or self.request.user.role == "ADMIN"
)
else False
)
if self.request.user.is_superuser or self.request.user.role == "ADMIN":
users_mention = list(
User.objects.filter(
is_active=True,
company=self.request.company,
).values("username")
)
elif self.request.user != self.invoice.created_by:
if self.invoice.created_by:
users_mention = [{"username": self.invoice.created_by.username}]
else:
users_mention = []
else:
users_mention = []
attachments = Attachments.objects.filter(invoice=self.invoice).order_by("-id")
comments = Comment.objects.filter(invoice=self.invoice).order_by("-id")
context.update(
{
"attachments": AttachmentsSerializer(attachments, many=True).data,
"comments": CommentSerializer(comments, many=True).data,
"invoice_history": InvoiceHistorySerializer(
self.invoice.invoice_history.all(), many=True
).data,
"accounts": AccountSerializer(
self.invoice.accounts.all(), many=True
).data,
"users": UserSerializer(
User.objects.filter(
is_active=True,
company=self.request.company,
).order_by("email"),
many=True,
).data,
"comment_permission": comment_permission,
"users_mention": users_mention,
"status": INVOICE_STATUS,
"currency": CURRENCY_CODES,
"countries": COUNTRIES,
}
)
return Response(context)
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_detail_post_params
)
def post(self, request, pk, **kwargs):
params = (
self.request.query_params
if len(self.request.data) == 0
else self.request.data
)
context = {}
self.invoice_obj = Invoice.objects.get(pk=pk)
if self.invoice_obj.company != request.company:
return Response(
{"error": True, "errors": "User company doesnot match with header...."}
)
comment_serializer = CommentSerializer(data=params)
if self.request.user.role != "ADMIN" and not self.request.user.is_superuser:
if not (
(self.request.user == self.invoice_obj.created_by)
or (self.request.user in self.invoice_obj.assigned_to.all())
):
return Response(
{
"error": True,
"errors": "You don't have Permission to perform this action",
},
status=status.HTTP_401_UNAUTHORIZED,
)
if comment_serializer.is_valid():
if params.get("comment"):
comment_serializer.save(
invoice_id=self.invoice_obj.id,
commented_by_id=self.request.user.id,
)
if self.request.FILES.get("invoice_attachment"):
attachment = Attachments()
attachment.created_by = self.request.user
attachment.file_name = self.request.FILES.get("invoice_attachment").name
attachment.invoice = self.invoice_obj
attachment.attachment = self.request.FILES.get("invoice_attachment")
attachment.save()
comments = Comment.objects.filter(invoice=self.invoice_obj).order_by("-id")
attachments = Attachments.objects.filter(invoice=self.invoice_obj).order_by(
"-id"
)
context.update(
{
"invoice_obj": InvoiceSerailizer(self.invoice_obj).data,
"attachments": AttachmentsSerializer(attachments, many=True).data,
"comments": CommentSerializer(comments, many=True).data,
}
)
return Response(context)
class InvoiceCommentView(APIView):
model = Comment
authentication_classes = (JSONWebTokenAuthentication,)
permission_classes = (IsAuthenticated,)
def get_object(self, pk):
return self.model.objects.get(pk=pk)
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_comment_edit_params
)
def put(self, request, pk, format=None):
params = request.query_params if len(request.data) == 0 else request.data
obj = self.get_object(pk)
if (
request.user.role == "ADMIN"
or request.user.is_superuser
or request.user == obj.commented_by
):
serializer = CommentSerializer(obj, data=params)
if params.get("comment"):
if serializer.is_valid():
serializer.save()
return Response(
{"error": False, "message": "Comment Submitted"},
status=status.HTTP_200_OK,
)
return Response(
{"error": True, "errors": serializer.errors},
status=status.HTTP_400_BAD_REQUEST,
)
else:
return Response(
{
"error": True,
"errors": "You don't have permission to perform this action.",
}
)
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_delete_params
)
def delete(self, request, pk, format=None):
self.object = self.get_object(pk)
if (
request.user.role == "ADMIN"
or request.user.is_superuser
or request.user == self.object.commented_by
):
self.object.delete()
return Response(
{"error": False, "message": "Comment Deleted Successfully"},
status=status.HTTP_200_OK,
)
else:
return Response(
{
"error": True,
"errors": "You don't have permission to perform this action",
}
)
class InvoiceAttachmentView(APIView):
model = Attachments
authentication_classes = (JSONWebTokenAuthentication,)
permission_classes = (IsAuthenticated,)
@swagger_auto_schema(
tags=["Invoices"], manual_parameters=swagger_params.invoice_delete_params
)
def delete(self, request, pk, format=None):
self.object = self.model.objects.get(pk=pk)
if (
request.user.role == "ADMIN"
or request.user.is_superuser
or request.user == self.object.created_by
):
self.object.delete()
return Response(
{"error": False, "message": "Attachment Deleted Successfully"},
status=status.HTTP_200_OK,
)
else:
return Response(
{
"error": True,
"errors": "You don't have permission to perform this action.",
}
)
```
#### File: Django-CRM/tasks/celery_tasks.py
```python
from celery import Celery
from django.conf import settings
from django.core.mail import EmailMessage
from django.shortcuts import reverse
from django.template.loader import render_to_string
from accounts.models import Account, Email
from common.models import User
from contacts.models import Contact
from tasks.models import Task
app = Celery("redis://")
@app.task
def send_email(task_id, recipients, domain="demo.django-crm.io", protocol="http"):
task = Task.objects.filter(id=task_id).first()
created_by = task.created_by
for user in recipients:
recipients_list = []
user = User.objects.filter(id=user, is_active=True).first()
if user:
recipients_list.append(user.email)
subject = " Assigned a task for you ."
context = {}
context["task_title"] = task.title
context["task_id"] = task.id
context["task_created_by"] = task.created_by
context["url"] = protocol + "://" + domain
context["user"] = user
html_content = render_to_string(
"tasks_email_template.html", context=context
)
msg = EmailMessage(subject=subject, body=html_content, to=recipients_list)
msg.content_subtype = "html"
msg.send()
# if task:
# subject = ' Assigned a task for you .'
# context = {}
# context['task_title'] = task.title
# context['task_id'] = task.id
# context['task_created_by'] = task.created_by
# context["url"] = protocol + '://' + domain + \
# reverse('tasks:task_detail', args=(task.id,))
# recipients = task.assigned_to.filter(is_active=True)
# if recipients.count() > 0:
# for recipient in recipients:
# context['user'] = recipient.email
# html_content = render_to_string(
# 'tasks_email_template.html', context=context)
# msg = EmailMessage(
# subject=subject, body=html_content, to=[recipient.email, ])
# msg.content_subtype = "html"
# msg.send()
``` |
{
"source": "jordanmagally/cadCAD",
"score": 2
} |
#### File: testing/tests/external_dataset.py
```python
import unittest
import pandas as pd
from cadCAD import configs
from cadCAD.engine import ExecutionMode, ExecutionContext, Executor
from testing.generic_test import make_generic_test
exec_mode = ExecutionMode()
first_config = configs
single_proc_ctx = ExecutionContext(context=exec_mode.local_mode)
run = Executor(exec_context=single_proc_ctx, configs=first_config)
raw_result, tensor_field, sessions = run.execute()
result = pd.DataFrame(raw_result)
def get_expected_results(run):
return {
(0, run, 0, 0): {
'external_data': {'ds1': None, 'ds2': None, 'ds3': None},
'increment': 0,
'policies': {'ds1': None, 'ds2': None, 'ds3': None}
},
(0, run, 1, 1): {
'external_data': {'ds1': 0, 'ds2': 0, 'ds3': 1},
'increment': 1,
'policies': {'ds1': 0, 'ds2': 0, 'ds3': 1}
},
(0, run, 1, 2): {
'external_data': {'ds1': 1, 'ds2': 40, 'ds3': 5},
'increment': 2,
'policies': {'ds1': 1, 'ds2': 40, 'ds3': 5}
},
(0, run, 1, 3): {
'external_data': {'ds1': 2, 'ds2': 40, 'ds3': 5},
'increment': 3,
'policies': {'ds1': 2, 'ds2': 40, 'ds3': 5}
},
(0, run, 2, 1): {
'external_data': {'ds1': 3, 'ds2': 40, 'ds3': 5},
'increment': 4,
'policies': {'ds1': 3, 'ds2': 40, 'ds3': 5}
},
(0, run, 2, 2): {
'external_data': {'ds1': 4, 'ds2': 40, 'ds3': 5},
'increment': 5,
'policies': {'ds1': 4, 'ds2': 40, 'ds3': 5}
},
(0, run, 2, 3): {
'external_data': {'ds1': 5, 'ds2': 40, 'ds3': 5},
'increment': 6,
'policies': {'ds1': 5, 'ds2': 40, 'ds3': 5}
},
(0, run, 3, 1): {
'external_data': {'ds1': 6, 'ds2': 40, 'ds3': 5},
'increment': 7,
'policies': {'ds1': 6, 'ds2': 40, 'ds3': 5}
},
(0, run, 3, 2): {
'external_data': {'ds1': 7, 'ds2': 40, 'ds3': 5},
'increment': 8,
'policies': {'ds1': 7, 'ds2': 40, 'ds3': 5}
},
(0, run, 3, 3): {
'external_data': {'ds1': 8, 'ds2': 40, 'ds3': 5},
'increment': 9,
'policies': {'ds1': 8, 'ds2': 40, 'ds3': 5}
},
(0, run, 4, 1): {
'external_data': {'ds1': 9, 'ds2': 40, 'ds3': 5},
'increment': 10,
'policies': {'ds1': 9, 'ds2': 40, 'ds3': 5}
},
(0, run, 4, 2): {
'external_data': {'ds1': 10, 'ds2': 40, 'ds3': 5},
'increment': 11,
'policies': {'ds1': 10, 'ds2': 40, 'ds3': 5}
},
(0, run, 4, 3): {
'external_data': {'ds1': 11, 'ds2': 40, 'ds3': 5},
'increment': 12,
'policies': {'ds1': 11, 'ds2': 40, 'ds3': 5}
}
}
expected_results = {}
expected_results_1 = get_expected_results(1)
expected_results_2 = get_expected_results(2)
expected_results.update(expected_results_1)
expected_results.update(expected_results_2)
def row(a, b):
return a == b
params = [["external_dataset", result, expected_results, ['increment', 'external_data', 'policies'], [row]]]
class GenericTest(make_generic_test(params)):
pass
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "JordanMakesMaps/CoralNet-API",
"score": 3
} |
#### File: JordanMakesMaps/CoralNet-API/coralnet_api_utils.py
```python
import json
import numpy as np
import pandas as pd
def get_points(height, width, h_offset, w_offset, percentage, sampling_method):
'''
height: the height of the image (rows)
width: the width of the image (columns)
offset: the % of pixels on all sides to avoid sampling (i.e. avoids edges of image)
percentage: the % of points to sample (1% of a 4MP image = 40,000 points)
sampling_method: either "random" or "grid"
'''
percentage = percentage * .01
num_points = int(height * width * percentage)
if(sampling_method == 'random'):
x = np.random.randint(w_offset, width - w_offset, num_points)
y = np.random.randint(h_offset, height - h_offset, num_points)
else:
density = int(np.sqrt(num_points))
# Creates an equally spaced grid, reshapes, converts into list
x_, y_ = np.meshgrid(np.linspace(w_offset, width - w_offset, density),
np.linspace(h_offset, height - h_offset, density))
xy = np.dstack([x_, y_]).reshape(-1, 2).astype(int)
x = [point[0] for point in xy]
y = [point[1] for point in xy]
# Any labels that did not fit in the grid will be sampled randomly
x += np.random.randint(w_offset, width - w_offset, num_points - len(xy)).tolist()
y += np.random.randint(h_offset, height - h_offset, num_points - len(xy)).tolist()
points = []
for _ in range(num_points):
points.append({'row': int(y[_]), 'column': int(x[_])})
return points
def decode_status(r_status):
curr_status = json.loads(r_status.content)
message = ''
if 'status' in curr_status['data'][0]['attributes'].keys():
s = curr_status['data'][0]['attributes']['successes']
f = curr_status['data'][0]['attributes']['failures']
t = curr_status['data'][0]['attributes']['total']
status = curr_status['data'][0]['attributes']['status']
ids = curr_status['data'][0]['id'].split(",")
ids = ''.join(str(_) for _ in ids)
message = 'Success: ' + str(s) + ' Failures: ' + str(f) + ' Total: ' + str(t) + ' Status: ' + str(status) + ' Ids: ' + ids
return curr_status, message
def check_status(r):
# Sends a request to retrieve the completed annotations, obtains status update
r_status = requests.get(url = 'https://coralnet.ucsd.edu' + r.headers['Location'],
headers = {"Authorization": f"Token {coralnet_token}"})
# Extracts the content from the status update
curr_status, message = decode_status(r_status)
return curr_status, message
def convert_to_csv(export):
all_preds = pd.DataFrame()
image_file = export['data'][0]['id'].split("/")[-1].split("?")[0]
for point in export['data'][0]['attributes']['points']:
per_point = dict()
per_point['image'] = image_file
per_point['X'] = point['column']
per_point['Y'] = point['row']
for index, classification in enumerate(point['classifications']):
per_point['score_' + str(index + 1)] = classification['score']
per_point['label_id_' + str(index + 1)] = classification['label_id']
per_point['label_code_' + str(index + 1)] = classification['label_code']
per_point['label_name_' + str(index + 1)] = classification['label_name']
all_preds = pd.concat([all_preds, pd.DataFrame.from_dict([per_point])])
return all_preds
``` |
{
"source": "jordanmaulana/Face-Distance-Prediction",
"score": 3
} |
#### File: jordanmaulana/Face-Distance-Prediction/FaceDetect.py
```python
import cv2
cap = cv2.VideoCapture(0)
ret = cap.set(cv2.CAP_PROP_FRAME_WIDTH,640)
ret = cap.set(cv2.CAP_PROP_FRAME_HEIGHT,480)
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
f = open('dataset.csv','a')
actual_distance = 30 #cm
def drawBoxAndWriteText(findfaces):
for (x, y, w, h) in findfaces:
line = "%d,"% (h) +str(actual_distance)+"\n"
f.write(line)
cv2.rectangle(color, (x, y), (x+w, y+h), (0, 255, 0), 2)
cv2.putText(color, str(h), (x, y+h),
cv2.FONT_HERSHEY_SIMPLEX, 0.8, (0, 0, 255))
# Main Program
while(True):
# Capture frame-by-frame
ret, color = cap.read()
# Detect face
gray = cv2.cvtColor(color, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30,30)
)
# Draw box and write text
drawBoxAndWriteText(faces)
# Show the resulting frame
cv2.imshow('color', color)
# check if key is pressed
if cv2.waitKey(1) & 0xFF == ord('q'):
f.close()
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()
``` |
{
"source": "jordan-mazur/georinex",
"score": 3
} |
#### File: src/georinex/build.py
```python
import subprocess
import shutil
from pathlib import Path
R = Path(__file__).parent / "rnxcmp"
def build(cc: str = None, src: Path = R / "source/crx2rnx.c") -> int:
if cc:
return do_compile(cc, src)
compilers = ["cc", "gcc", "clang", "icc", "icl", "cl", "clang-cl"]
ret = 1
for cc in compilers:
if shutil.which(cc):
ret = do_compile(cc, src)
if ret == 0:
break
return ret
def do_compile(cc: str, src: Path) -> int:
if not src.is_file():
raise FileNotFoundError(src)
if cc.endswith("cl"): # msvc-like
cmd = [cc, str(src), f"/Fe:{R}"]
else:
cmd = [cc, str(src), "-O2", f"-o{R / 'crx2rnx'}"]
print(" ".join(cmd))
ret = subprocess.run(cmd).returncode
return ret
```
#### File: src/georinex/plots.py
```python
import xarray
from matplotlib.pyplot import figure
import logging
try:
from .plots_geo import navtimeseries
except ImportError as e:
logging.info(e)
def timeseries(data: xarray.Dataset):
if not isinstance(data, xarray.Dataset):
return
if isinstance(data, tuple):
obs, nav = data
obstimeseries(obs)
if navtimeseries is not None:
navtimeseries(nav)
elif data.rinextype == "obs":
obstimeseries(data)
elif data.rinextype == "nav":
if navtimeseries is not None:
navtimeseries(data)
def obstimeseries(obs: xarray.Dataset):
if not isinstance(obs, xarray.Dataset):
return
for p in ("L1", "L1C"):
if p not in obs:
continue
dat = obs[p].dropna(how="all", dim="time")
time = dat.time.values
if time.size == 0:
continue
ax = figure().gca()
ax.plot(time, dat)
ax.set_title(obs.filename)
ax.set_xlabel("time [UTC]")
ax.set_ylabel(p)
ax.grid(True)
ax.legend(dat.sv.values.astype(str), loc="best")
```
#### File: georinex/tests/test_conv.py
```python
import pytest
import xarray
from datetime import datetime
from pytest import approx
from pathlib import Path
import georinex as gr
R = Path(__file__).parent / "data"
@pytest.mark.parametrize(
"time, exp_time",
[
(None, None),
(datetime(2019, 1, 1), datetime(2019, 1, 1)),
(xarray.DataArray(datetime(2019, 1, 1)), datetime(2019, 1, 1)),
],
)
def test_to_datetime(time, exp_time):
assert gr.to_datetime(time) == exp_time
def test_bad_files(tmp_path):
emptyfn = tmp_path / "nonexistingfilename"
emptyfn.touch()
emptyfnrinex = tmp_path / "nonexistingfilename.18o"
emptyfnrinex.touch()
emptyfnNC = tmp_path / "nonexistingfilename.nc"
emptyfnNC.touch()
nonexist = tmp_path / "nonexist" # don't touch
with pytest.raises(ValueError):
gr.load(emptyfn)
with pytest.raises(ValueError):
gr.load(emptyfnrinex)
with pytest.raises(FileNotFoundError):
gr.load(nonexist)
with pytest.raises(ValueError):
gr.load(emptyfnNC)
def test_netcdf_read():
pytest.importorskip("netCDF4")
dat = gr.load(R / "r2all.nc")
assert isinstance(dat, dict), f"{type(dat)}"
assert isinstance(dat["obs"], xarray.Dataset)
def test_netcdf_write(tmp_path):
"""
NetCDF4 wants suffix .nc -- arbitrary tempfile.NamedTemporaryFile names do NOT work!
"""
pytest.importorskip("netCDF4")
fn = tmp_path / "rw.nc"
obs = gr.load(R / "demo.10o", out=fn)
wobs = gr.load(fn)
assert obs.equals(wobs)
def test_locs():
pytest.importorskip("pymap3d") # need to have this
gg = pytest.importorskip("georinex.geo")
pat = ["*o", "*O.rnx", "*O.rnx.gz", "*O.crx", "*O.crx.gz"]
flist = gr.globber(R, pat)
locs = gg.get_locations(flist)
assert locs.loc["demo.10o"].values == approx([41.3887, 2.112, 30])
@pytest.mark.parametrize("dtype", ["OBS", "NAV"])
def test_nc_load(dtype):
pytest.importorskip("netCDF4")
truth = xarray.open_dataset(R / "r2all.nc", group=dtype)
obs = gr.load(R / f"demo.10{dtype[0].lower()}")
assert obs.equals(truth)
```
#### File: georinex/tests/test_kepler.py
```python
import pytest
import xarray
from pytest import approx
import georinex as gr
from datetime import datetime, timedelta
import numpy as np
@pytest.fixture()
def kep_data():
TGPS0 = datetime(1980, 1, 6)
sv = {
"GPSWeek": 910,
"Toe": 410400,
"Eccentricity": 4.27323824e-3,
"sqrtA": 5.15353571e3,
"Cic": 9.8720193e-8,
"Crc": 282.28125,
"Cis": -3.9115548e-8,
"Crs": -132.71875,
"Cuc": -6.60121440e-6,
"Cus": 5.31412661e-6,
"DeltaN": 4.3123e-9,
"Omega0": 2.29116688,
"omega": -0.88396725,
"Io": 0.97477102,
"OmegaDot": -8.025691e-9,
"IDOT": -4.23946e-10,
"M0": 2.24295542,
}
xref = -5.67841101e6
yref = -2.49239629e7
zref = 7.05651887e6
time = TGPS0 + timedelta(weeks=910, seconds=4.03272930e5)
dat = xarray.Dataset(
sv,
attrs={"svtype": "G", "xref": xref, "yref": yref, "zref": zref},
coords={"time": [time]},
)
return dat
def test_kepler(kep_data):
x, y, z = gr.keplerian2ecef(kep_data)
xref = kep_data.attrs["xref"]
yref = kep_data.attrs["yref"]
zref = kep_data.attrs["zref"]
assert x == approx(xref, rel=1e-4)
assert y == approx(yref, rel=1e-4)
assert z == approx(zref, rel=1e-4)
magerr = np.sqrt((x - xref) ** 2 + (y - yref) ** 2 + (z - zref) ** 2)
print("error magnitude [meters]", magerr)
```
#### File: georinex/tests/test_nav2.py
```python
import pytest
from pytest import approx
import xarray
from pathlib import Path
from datetime import datetime
import numpy as np
import georinex as gr
#
R = Path(__file__).parent / "data"
def test_time():
times = gr.gettime(R / "ab422100.18n")
assert times[0] == datetime(2018, 7, 29, 1, 59, 44)
assert times[-1] == datetime(2018, 7, 30)
def test_data():
nav = gr.load(R / "ab422100.18n")
nav0 = nav.sel(time="2018-07-29T03:59:44").dropna(dim="sv", how="all")
assert nav0.sv.values.tolist() == (["G18", "G20", "G24", "G27"])
G20 = nav0.sel(sv="G20")
assert G20.to_array().values == approx(
[
5.1321554929e-4,
6.821210263e-13,
0.0,
11,
-74.125,
4.944134514e-09,
0.736990015,
-3.810971975327e-06,
4.055858473293e-03,
1.130439341068e-5,
5.153679727554e3,
14384,
-2.980232238770e-8,
-2.942741,
-5.587935447693e-8,
9.291603197140e-01,
144.8125,
2.063514928857,
-8.198555788471e-09,
2.935836575092e-10,
1,
2012,
0.0,
2.0,
0.0,
-8.381903171539e-09,
11,
9456,
4,
]
)
def test_mangled():
fn = R / "14601736.18n"
nav = gr.load(fn)
times = gr.to_datetime(nav.time)
assert times == datetime(2018, 6, 22, 8)
def test_mangled2():
fn = R / "brdc2420.18n.gz"
nav = gr.load(fn)
G10 = nav.sel(sv="G10").dropna(dim="time", how="all")
assert G10["Crc"].values == approx([221.59375, 225.7421875])
assert np.isnan(G10["FitIntvl"][0])
assert G10["FitIntvl"][1] == approx(4)
times = gr.to_datetime(nav.time)
assert (
times
== [
datetime(2018, 8, 29, 22, 0),
datetime(2018, 8, 29, 23, 0),
datetime(2018, 8, 29, 23, 30),
datetime(2018, 8, 29, 23, 59, 12),
datetime(2018, 8, 29, 23, 59, 44),
]
).all()
def test_tlim_past_eof():
nav = gr.load(R / "p1462100.18g", tlim=("2018-07-29T23:45", "2018-07-30"))
times = gr.to_datetime(nav.time)
assert times == datetime(2018, 7, 29, 23, 45)
def test_galileo():
nav = gr.load(R / "ceda2100.18e")
E18 = nav.sel(sv="E18").dropna(dim="time", how="all")
assert gr.to_datetime(E18.time) == datetime(2018, 7, 29, 12, 40)
assert E18.to_array().values.squeeze() == approx(
[
6.023218797054e-3,
-2.854960712284e-11,
0.0,
76,
79.53125,
3.006910964197e-09,
-1.308337580849,
6.468966603279e-06,
1.659004657995e-01,
3.594905138016e-07,
5.289377120972e3,
45600,
5.243346095085e-06,
1.437602366755,
4.358589649200e-06,
8.809314114035e-01,
3.329375000000e2,
1.349316878658,
-4.092313318419e-09,
-6.553844422498e-10,
517,
2.012000000000e3,
3.12,
455,
1.396983861923e-08,
1.536682248116e-08,
47064,
]
)
def test_gps():
nav = gr.load(R / "brdc2800.15n")
times = gr.to_datetime(nav.time)
assert times[1] == datetime(2015, 10, 7, 1, 59, 28)
nav1 = nav.sel(time="2015-10-07T01:59:28").dropna(dim="sv", how="all")
assert nav1.to_array().values.squeeze() == approx(
[
-0.488562509417e-04,
-0.534328137292e-11,
0.0,
4.0,
51.125,
0.408659879467e-08,
-0.818212975386,
0.254809856415e-05,
0.463423598558e-02,
0.755488872528e-05,
0.515362124443e04,
266368,
0.800937414169e-07,
-0.124876382768,
0.819563865662e-07,
0.978795513619,
244.0,
0.737626996302,
-0.794890253227e-08,
0.621454457501e-10,
1.0,
1865,
0.0,
2.0,
0.0,
0.558793544769e-08,
4.0,
265170,
4.0,
]
)
def test_small():
pytest.importorskip("netCDF4")
truth = xarray.open_dataset(R / "r2all.nc", group="NAV")
nav = gr.load(R / "demo.10n")
assert nav.equals(truth)
def test_ionospheric_correction():
nav = gr.load(R / "14601736.18n")
assert nav.attrs["ionospheric_corr_GPS"] == approx(
[
0.4657e-08,
0.1490e-07,
-0.5960e-07,
-0.1192e-06,
0.8192e05,
0.9830e05,
-0.6554e05,
-0.5243e06,
]
)
```
#### File: georinex/tests/test_stringio.py
```python
import pytest
from pytest import approx
from pathlib import Path
import io
from datetime import datetime
import georinex as gr
R = Path(__file__).parent / "data"
@pytest.mark.parametrize(
"rinex_version, t", [(2, datetime(1999, 9, 2, 19)), (3, datetime(2010, 10, 18, 0, 1, 4))]
)
def test_nav3(rinex_version, t):
fn = R / f"minimal{rinex_version}.10n"
txt = fn.read_text()
with io.StringIO(txt) as f:
info = gr.rinexinfo(f)
assert info["rinextype"] == "nav"
times = gr.gettime(f)
nav = gr.load(f)
assert times == t
assert nav.equals(gr.load(fn)), "StringIO not matching direct file read"
@pytest.mark.parametrize("rinex_version", [2, 3])
def test_obs(rinex_version):
fn = R / f"minimal{rinex_version}.10o"
txt = fn.read_text()
with io.StringIO(txt) as f:
info = gr.rinexinfo(f)
assert info["rinextype"] == "obs"
times = gr.gettime(f)
obs = gr.load(f)
assert times == datetime(2010, 3, 5, 0, 0, 30)
assert obs.equals(gr.load(fn)), "StringIO not matching direct file read"
def test_locs():
gg = pytest.importorskip("georinex.geo")
txt = (R / "demo.10o").read_text()
with io.StringIO(txt) as f:
locs = gg.get_locations(f)
if locs.size == 0:
pytest.skip("no locs found")
assert locs.iloc[0].values == approx([41.3887, 2.112, 30])
``` |
{
"source": "jordanmcdougall/lsm303d-python",
"score": 2
} |
#### File: library/tests/test_setup.py
```python
import sys
import mock
import pytest
def test_setup_not_present():
sys.modules['smbus'] = mock.MagicMock()
from lsm303d import LSM303D
lsm303d = LSM303D()
with pytest.raises(RuntimeError):
lsm303d.setup()
def test_setup_mock_present():
from tools import SMBusFakeDevice
smbus = mock.Mock()
smbus.SMBus = SMBusFakeDevice
sys.modules['smbus'] = smbus
from lsm303d import LSM303D
lsm303d = LSM303D()
lsm303d.setup()
``` |
{
"source": "JordanMcGrathhhh/Vines",
"score": 2
} |
#### File: JordanMcGrathhhh/Vines/app.py
```python
from flask import Flask, render_template, redirect, url_for, request, flash, session
from flask_login import LoginManager, UserMixin, login_user, login_required
from werkzeug.security import generate_password_hash, check_password_hash
from flask_sqlalchemy import SQLAlchemy
import forms
import organizationCodes
# Setup
app = Flask(__name__)
login = LoginManager(app)
db = SQLAlchemy(app)
# Config
app.config['SECRET_KEY'] = '<KEY>'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
login.login_view = 'login'
# Databases
class accounts(UserMixin, db.Model):
id = db.Column('account_id', db.Integer, primary_key=True)
firstName = db.Column(db.String(32))
lastName = db.Column(db.String(32))
username = db.Column(db.String(16))
password = db.Column(db.String(32))
org_code = db.Column(db.String(8))
def __init__(self, firstName, lastName, username, password, org_code):
self.firstName = firstName
self.lastName = lastName
self.username = username
self.password = password
self.org_code = org_code
class organizations(db.Model):
id = db.Column('org_id', db.Integer, primary_key=True)
name = db.Column(db.String(32))
code = db.Column(db.String(8))
email = db.Column(db.String(128))
def __init__(self, name, code, email):
self.name = name
self.code = code
self.email = email
db.create_all()
@login.user_loader
def load_account(uuid):
return accounts.query.get(int(uuid))
# Routes
@app.route('/')
def index():
return redirect(url_for('login'))
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = forms.signupForm()
if form.validate_on_submit():
account = accounts(request.form['firstName'],
request.form['lastName'],
request.form['username'],
generate_password_hash(request.form['password']),
request.form['org_code'])
db.session.add(account)
db.session.commit()
return redirect(url_for('login'))
return render_template('signup.html', form=form)
@app.route('/login', methods=['GET', 'POST'])
def login():
form = forms.loginForm()
if form.validate_on_submit():
account = accounts.query.filter_by(username=request.form['username']).first()
if account and check_password_hash(account.password, form.password.data):
print("Sign-in Attempted: Access Granted")
session['username'] = account.username
login_user(account)
return redirect(url_for('dashboard'))
else:
print("Sign-in Attempted: Access Denied")
flash("Incorrect Login Parameters.")
return render_template('login.html', form=form)
@app.route('/register', methods=['GET', 'POST'])
def register():
form = forms.registerForm()
# Generate the Random Organization Code for the user to accept when POSTed
if form.validate_on_submit():
code = organizationCodes.generateCode()
print(code)
organization = organizations(request.form['name'],
code,
request.form['email'])
db.session.add(organization)
db.session.commit()
return redirect(url_for('signup'))
return render_template('register.html', form=form)
@app.route('/dashboard', methods=['GET', 'POST'])
@login_required
def dashboard():
username = (session['username'])
return render_template('dashboard.html', username=username)
``` |
{
"source": "JordanMcManus/OpenSidescan",
"score": 3
} |
#### File: OpenSidescan/ScriptsPython/ComputeDistanceCarisVsOpenSidescan.py
```python
import pprint
from math import pi, cos
from Haversine import computeDistanceGreatCircleHaversine
import csv
class LongitudeLatitude :
def __init__( self, longitude, latitude ) :
self.longitude = longitude
self.latitude = latitude
# End of def
# End of class
fileCaris = '../test/data/wrecks/scotsman5info/ScotsmanLongitudeLatitudeCaris.csv'
tableCaris = []
dictionaryCaris = {}
with open( fileCaris, 'r' ) as fp :
fileReader = csv.reader( fp , delimiter = ',')
for line in fileReader :
# Don't put the first line of the file in the list and dictionary
if line[ 0 ] != 'name' :
tableCaris.append( [ line[ 0 ], float( line[ 1 ] ), float( line[ 2 ] ) ] )
dictionaryCaris[ line[ 0 ] ] = LongitudeLatitude( float( line[ 1 ] ), float( line[ 2 ] ) )
# End of if
# End of for
# End of with
pp = pprint.PrettyPrinter(indent=4)
# pp.pprint( tableCaris )
# pp.pprint( dictionaryCaris )
fileOpenSidescan = '../test/data/wrecks/scotsman5info/ScotsmanLongitudeLatitudeOpenSidescan2020-03-18.csv'
tableOpenSidescan = []
dictionaryOpenSidescan = {}
with open( fileOpenSidescan, 'r' ) as fp :
fileReader = csv.reader( fp , delimiter = ',')
for line in fileReader :
if line[ 0 ] != 'name' :
tableOpenSidescan.append( [ line[ 0 ], float( line[ 2 ] ), float( line[ 3 ] ) ] )
dictionaryOpenSidescan[ line[ 0 ] ] = LongitudeLatitude( float( line[ 2 ] ), float( line[ 3 ] ) )
# End of if
# End of for
# End of with
# pp.pprint( tableOpenSidescan )
# pp.pprint( dictionaryOpenSidescan )
# Test what happen when key not in dictionary
# print( dictionaryCaris.get( 'JUNK' ).longitude )
allValues = []
greatCircleDistance = []
distanceDifferenceInLatitude = []
distanceDifferenceInLongitude = []
r = 6371000 # Earth radius
for lineCaris in tableCaris :
# Find this object name in dictionaryOpenSidescan
longitudeLatitudeOpenSidescan = dictionaryOpenSidescan.get( lineCaris[ 0 ] )
if longitudeLatitudeOpenSidescan != None :
# print( lineCaris[ 0 ], "\n",
# "Caris: ", "longitude: ", lineCaris[ 1 ],
# ", latitude: ", lineCaris[ 2 ], "\n"
# "OpenSidescan: ", "longitude: ", longitudeLatitudeOpenSidescan.longitude,
# ", latitude: ", longitudeLatitudeOpenSidescan.latitude, sep='')
allValues.append( [ lineCaris[ 0 ], lineCaris[ 1 ], lineCaris[ 2 ],
longitudeLatitudeOpenSidescan.longitude, longitudeLatitudeOpenSidescan.latitude ] )
greatCircleDistance.append( [ lineCaris[ 0 ],
computeDistanceGreatCircleHaversine(
lineCaris[ 1 ], lineCaris[ 2 ],
longitudeLatitudeOpenSidescan.longitude, longitudeLatitudeOpenSidescan.latitude ) ] )
distanceDifferenceInLatitude.append( [ lineCaris[ 0 ],
2 * pi * r / 360 * abs( lineCaris[ 2 ] -longitudeLatitudeOpenSidescan.latitude ) ] )
distanceDifferenceInLongitude.append( [ lineCaris[ 0 ],
2 * pi * r / 360 * cos( lineCaris[ 2 ]* pi / 180 )
* abs( lineCaris[ 1 ] -longitudeLatitudeOpenSidescan.longitude ) ] )
# End of if
# End of for line in tableCaris
print( "\n\nObject name, Caris longitude-latitude, OpenSidescan longitude-latitude")
pp.pprint( allValues )
print( "\n\n-----> ASSUMING earth is a sphere of radius = ", r/1000, " km", sep='')
print( "\nDistance on a great circle between points in Caris vs. OpenSidescan, in meters:")
pp.pprint( greatCircleDistance )
print( "\nDistance for the latitude difference, in meters:")
pp.pprint( distanceDifferenceInLatitude )
print( "\nDistance for the longitude difference (at the latitude in Caris), in meters:")
pp.pprint( distanceDifferenceInLongitude )
# Scotsman
# http://patrimoine-culturel.gouv.qc.ca/rpcq/detail.do?methode=consulter&id=210726&type=bien#.XnJUhqYpAUE
# "D'une longueur de près de 25 mètres et d'une largeur de 7 mètres,"
longitudeLatitudeCarisW = dictionaryCaris.get( 'W' )
longitudeLatitudeCarisY = dictionaryCaris.get( 'Y' )
print( "\nDistance on a great circle between W and Y points, Caris: ",
computeDistanceGreatCircleHaversine(
longitudeLatitudeCarisW.longitude, longitudeLatitudeCarisW.latitude,
longitudeLatitudeCarisY.longitude, longitudeLatitudeCarisY.latitude ) , sep='' )
longitudeLatitudeOpenSideScanW = dictionaryOpenSidescan.get( 'W' )
longitudeLatitudeOpenSideScanY = dictionaryOpenSidescan.get( 'Y' )
print( "\nDistance on a great circle between W and Y points, OpenSidescan: ",
computeDistanceGreatCircleHaversine(
longitudeLatitudeOpenSideScanW.longitude, longitudeLatitudeOpenSideScanW.latitude,
longitudeLatitudeOpenSideScanY.longitude, longitudeLatitudeOpenSideScanY.latitude ) , sep='' )
# # Tests of computation of the distance for a degree of latitude
# print( "\nDistance for one degree of latitude, assuming earth is a sphere of radius = ", r/1000, " km:\n",
# 2 * pi * r / 360, " meters", sep='' )
# # Tests of computation of the distance for a degree of longitude
# # To take into account longitude lines getting closer as going toward the poles
# # Based on area element when computing surface integral of a sphere,
# # http://math.mit.edu/~jorloff/suppnotes/suppnotes02/v9.pdf
# # To compare values:
# # https://www.thoughtco.com/degree-of-latitude-and-longitude-distance-4070616
# # 111.321 kilometers at equator
# # 85 kilometers at 40 degrees north or south
# lat = 0
# print( "\nDistance for one degree of longitude, assuming earth is a sphere of radius = ", r/1000, " km:\n",
# "at latitude of ", lat, " degrees: ",
# 2 * pi * r / 360 * cos( lat * pi / 180 ), " meters", sep='' )
# lat = 40
# print( "\nDistance for one degree of longitude, assuming earth is a sphere of radius = ", r/1000, " km:\n",
# "at latitude of ", lat, " degrees: ",
# 2 * pi * r / 360 * cos( lat * pi / 180 ), " meters", sep='' )
# lat = 90
# print( "\nDistance for one degree of longitude, assuming earth is a sphere of radius = ", r/1000, " km:\n",
# "at latitude of ", lat, " degrees: ",
# 2 * pi * r / 360 * cos( lat * pi / 180 ), " meters", sep='' )
``` |
{
"source": "jordan-melendez/dft_emulator",
"score": 3
} |
#### File: dft_emulator/dft/harmonic_oscillator.py
```python
from scipy.special import eval_genlaguerre, gammaln
import numpy as np
from .constants import hbar_c, pi
def compute_omega(mass, b):
R"""Returns omega in units of MeV
Parameters
----------
mass
b
"""
return hbar_c ** 2 / (mass * b ** 2)
def ho_radial_wf(r, n, ell, b):
r"""The radial wave function u_{nl} for the 3d isotropic harmonic oscillator.
These are normalized such that \int |u_nl(r)|**2 dr = 1
Parameters
----------
r :
The distance in fm
n :
The n quantum number
ell :
The angular momentum quantum number
b :
The oscillator parameter
# mass :
# Mass in MeV
# omega :
# The harmonic oscillator angular frequency in MeV
Returns
-------
u_nl
"""
# b = 1 / np.sqrt(mass * omega / hbar_c)
# N_{nl} = 2 Gamma(n) / [b * Gamma(n + l + 1/2)]
norm = np.sqrt(2 * np.exp(gammaln(n) - np.log(b) - gammaln(n + ell + 0.5)))
y = r / b
y2 = y ** 2
laguerre = eval_genlaguerre(n - 1, ell + 0.5, y2)
return norm * y ** (ell + 1) * np.exp(-y2 / 2) * laguerre
def ho_energy(n, ell, omega):
R"""The energy of the harmonic oscillator
Note that N = 2 (n - 1) + ell.
Parameters
----------
n
ell
omega
Returns
-------
"""
return omega * (2 * (n - 1) + ell + 3 / 2)
def ho_density(r, n_max, g, b):
rho = np.zeros(len(r))
for ell in range(n_max+1):
max_idx = maximum_wf_index(n_max=n_max, ell=ell)
for i in range(max_idx+1):
n = i + 1 # n starts at 1, not 0
u_nl = ho_radial_wf(r=r, n=n, ell=ell, b=b)
rho += (2 * ell + 1) * u_nl ** 2
rho *= g / (4 * pi * r ** 2)
return rho
def total_fermions(n_max, g):
return g * (n_max + 1) * (n_max + 2) * (n_max + 3) / 6
def total_fermions_one_shell(N, g):
return g * (N + 1) * (N + 2) / 2
def maximum_wf_index(n_max, ell):
return int(np.floor((n_max - ell) / 2))
```
#### File: dft_emulator/dft/quadrature.py
```python
from fastcache import lru_cache
import numpy
from numpy import arange, array, diag, ones_like, sqrt, stack, zeros
from numpy.linalg import eigh, eigvalsh
from enum import IntEnum, unique as uniqueEnum
@uniqueEnum
class QuadratureType(IntEnum):
GaussLegendre = 0
GaussLabotto = 1
SemiInfinite = 2
ExponentialGaussLegendre = 3
ExponentialGaussLabotto = 4
@classmethod
def from_suffix(cls, suffix):
suffix = str(suffix).lower()
if suffix == "l":
return cls.GaussLabotto
if suffix == "i":
return cls.SemiInfinite
if suffix == "e":
return cls.ExponentialGaussLegendre
if suffix == "f":
return cls.ExponentialGaussLabotto
return cls.GaussLegendre
@lru_cache(maxsize=1024)
def gauss_lobatto_mesh(n):
if n < 2:
raise ValueError("n must be > 1")
if n == 2:
xi = array((-1.0, +1.0))
wi = array((+1.0, +1.0))
return stack((xi, wi))
xi = zeros(n)
wi = zeros(n)
Pn = zeros(n)
i = arange(1, n - 2)
b = sqrt(
(i * (2.0 + i)) / (3.0 + 4.0 * i * (2.0 + i))
) # coeff for Jacobi Poly with a=b=1
M = diag(b, -1) + diag(b, 1)
xi[1 : n - 1] = eigvalsh(M)
xi[0] = -1.0
xi[-1] = 1.0
Pim2 = ones_like(xi) # P_{i-2}
Pim1 = xi # P_{i-1}
for j in range(2, n): # want P_{n-1}
wi = (1.0 / j) * ((2 * j - 1) * xi * Pim1 - (j - 1) * Pim2)
Pim2 = Pim1
Pim1 = wi
wi = 2.0 / (n * (n - 1) * wi ** 2)
wi[0] = wi[-1] = 2.0 / (n * (n - 1))
return stack((xi, wi))
@lru_cache(maxsize=1024)
def gauss_legendre_mesh(n):
if n < 2:
raise ValueError("n must be > 1")
if n == 2:
xi = array((-0.5773502691896257, +0.5773502691896257))
wi = array((+1.0, +1.0))
return stack((xi, wi))
Pn = zeros(n)
i = arange(1, n)
b = i / sqrt((2.0 * i - 1) * (2.0 * i + 1))
M = diag(b, -1) + diag(b, 1)
xi, Wi = eigh(M)
return stack((xi, 2 * Wi[0, :] ** 2))
def _gll_mesh(nodes, num_points):
"""
Construct a compound Gauss-Legendre or Gauss-Lobatto-Legendre integration
quadrature. Adjacent Gauss-Lobatto-Legendre sub quadratures will have a
shared mesh point, therefore the total number of mesh points may be less
than sum(num_points).
:param nodes: List of nodes define each sub quadrature
:param num_points: List of number of points in each sub quadrature
:return: quadrature node, quadurature weights
"""
if nodes.size != num_points.size + 1:
raise ValueError("len(nodes) != len(num_points) + 1")
nn = len(num_points)
kind = zeros(num_points.size, numpy.int64)
for i in range(nn):
if num_points[i] < 0:
kind[i] = 1
num_points[i] = -num_points[i]
nt = num_points[0]
for i in range(1, nn):
N = num_points[i]
T = kind[i]
nt += N
if kind[i] == 1 and kind[i - 1] == 1:
nt -= 1
xi = zeros(nt, numpy.float64)
wi = zeros(nt, numpy.float64)
o = 0
prev_k = 0
for i, (n, k) in enumerate(zip(num_points, kind)):
A = nodes[i]
B = nodes[i + 1]
if k == 1:
XW = gauss_lobatto_mesh(n)
else:
XW = gauss_legendre_mesh(n)
X = XW[0] * (B - A) / 2.0 + (A + B) / 2
W = XW[1] * (B - A) / 2.0
if k == 1 and prev_k == 1:
n -= 1
wi[o - 1] += W[0]
X = X[1:]
W = W[1:]
prev_k = k
xi[o : o + n] = X
wi[o : o + n] = W
o += n
return stack((xi, wi))
def gll_mesh(nodes, num_points):
"""
Construct a compound Gauss-Legendre or Gauss-Lobatto-Legendre integration
quadrature. Adjacent Gauss-Lobatto-Legendre sub quadratures will have a
shared mesh point, therefore the total number of mesh points may be less
than sum(num_points).
:param nodes: List of nodes define each sub quadrature
:param num_points: List of number of points in each sub quadrature
:return: quadrature node, quadurature weights
"""
from numpy import array, ndarray
import re
rec = re.compile(r"(?P<n>\d+)(?P<t>[lg]?)")
def to_int(n):
n, t = rec.match(n.lower()).groups("g")
if t == "l":
return -abs(int(n))
return abs(int(n))
if isinstance(nodes, str):
nodes = tuple(map(float, nodes.replace(",", " ").split()))
nodes = array(nodes, float).ravel()
if isinstance(num_points, str):
num_points = tuple(map(to_int, num_points.lower().split()))
num_points = array(num_points).ravel()
return _gll_mesh(nodes, num_points)
@lru_cache(maxsize=1024)
def Exponential_Mesh(x0, xf, npts, terrible=False):
from numpy import arange, log
if not terrible:
n, ni = gll_mesh("0 1", "{:d}L".format(npts))
else:
n = arange(npts) / (npts - 1)
ni = 1.0 / (npts - 1)
s, f = float(min(x0, xf)), float(max(x0, xf))
assert s > 0, "smaller scale must be greater than 0"
return s * (f / s) ** n, s * log(f / s) * (f / s) ** n * ni
from numpy import arange, array, log
# this function can be used in numba with nopython=True
def _compound_mesh(nodes, num_points, kind):
if (
nodes.size == 2
and num_points.size == 1
and kind[0] == QuadratureType.SemiInfinite
):
start = nodes[0]
scale = nodes[1]
XW = gauss_legendre_mesh(num_points[0])
xi = start + scale * (1.0 + XW[0]) / (1.0 - XW[0])
wi = 2.0 * scale * XW[1] / (1 - XW[0]) ** 2
return stack((xi, wi))
if num_points.size != kind.size:
raise ValueError("len(num_points) != len(kind)")
if nodes.size != num_points.size + 1 and nodes.size != num_points.size:
raise ValueError("len(nodes) - len(num_points) != 0 or 1 ")
nt = num_points[0]
for i in range(1, num_points.size):
N = num_points[i]
nt += N
if kind[i] == kind[i - 1] == QuadratureType.GaussLabotto:
nt -= 1
xi = zeros(nt, numpy.float64)
wi = zeros(nt, numpy.float64)
o = 0
prev_k = kind[0]
for i, (n, k) in enumerate(zip(num_points, kind)):
if k == QuadratureType.GaussLegendre or k == QuadratureType.GaussLabotto:
A = nodes[i]
B = nodes[i + 1]
if k == QuadratureType.GaussLabotto:
XW = gauss_lobatto_mesh(n)
else:
XW = gauss_legendre_mesh(n)
X = XW[0] * (B - A) / 2.0 + (A + B) / 2
W = XW[1] * (B - A) / 2.0
if k == prev_k == QuadratureType.GaussLabotto:
n -= 1
wi[o - 1] += W[0]
X = X[1:]
W = W[1:]
xi[o : o + n] = X
wi[o : o + n] = W
o += n
if k == QuadratureType.SemiInfinite:
if i != num_points.size - 1:
raise ValueError("SemiInfinite only valid for last interval")
scale = nodes[i]
XW = gauss_legendre_mesh(n)
X = scale * (1.0 + (1.0 + XW[0]) / (1.0 - XW[0]))
W = 2.0 * scale * XW[1] / (1 - XW[0]) ** 2
xi[o : o + n] = X
wi[o : o + n] = W
o += n
if (
k == QuadratureType.ExponentialGaussLegendre
or k == QuadratureType.ExponentialGaussLabotto
):
if k == QuadratureType.ExponentialGaussLabotto:
XW = gauss_lobatto_mesh(n)
else:
XW = gauss_legendre_mesh(n)
s = nodes[i]
f = nodes[i + 1]
X = s * (f / s) ** XW[0]
W = s * log(f / s) * (f / s) ** XW[0] * XW[1]
if k == prev_k == QuadratureType.GaussLabotto:
n -= 1
wi[o - 1] += W[0]
X = X[1:]
W = W[1:]
xi[o : o + n] = X
wi[o : o + n] = W
o += n
prev_k = k
return stack((xi, wi))
def compound_mesh(node_spec, points_spec):
"""
Parameters
----------
node_spec :
List of nodes define each sub quadrature
points_spec :
List of number of points in each sub quadrature
Returns
-------
quadrature node, quadurature weights
"""
from numpy import array, ndarray, zeros_like
import re
rec = re.compile(r"(?P<n>\d+)(?P<t>[glief]?)")
def to_int(n):
n, t = rec.match(n.lower()).groups()
t = t or "g"
return abs(int(n)), QuadratureType.from_suffix(t).value
if isinstance(node_spec, str):
node_spec = tuple(map(float, node_spec.replace(",", " ").split()))
node_spec = array(node_spec, float).ravel()
if isinstance(points_spec, str):
points_spec, type_spec = array(
tuple(map(to_int, points_spec.lower().split()))
).T
else:
points_spec = array(points_spec).ravel()
type_spec = zeros_like(points_spec)
if len(type_spec) == len(node_spec):
type_spec[-1] = QuadratureType.SemiInfinite
return _compound_mesh(node_spec, points_spec, type_spec)
``` |
{
"source": "jordan-melendez/gp_project",
"score": 3
} |
#### File: gp_project/gp_project/kronecker.py
```python
from functools import reduce
from itertools import cycle, zip_longest
import numpy as np
import scipy as sp
from scipy.stats import multivariate_normal
#################################################################
# Based on functions defined in Yunus Saatci's Thesis (Ch. 5):
# http://mlg.eng.cam.ac.uk/pub/pdf/Saa11.pdf
#################################################################
def kronecker(K):
"""Return the Kronecker product of list of arrays K:
K_1 \otimes K_2 \otimes ... \otimes K_D
Parameters
----------
K: List of array-like
[K_1, K_2, ..., K_D]
"""
return reduce(np.kron, K)
def cartesian(*arrays):
"""Makes the Cartesian product of arrays.
Parameters
----------
arrays: list of 1D array-like
1D arrays where earlier arrays loop more slowly than later ones
"""
N = len(arrays)
return np.stack(np.meshgrid(*arrays, indexing='ij'), -1).reshape(-1, N)
def flat_mtprod(tens, mat):
"""A matrix-tensor product
Z_{i_1, ..., i_D} = \sum_k M_{i_1,k} T_{k, i_2, ..., i_D}
where tens is the vectorized version of T.
Parameters
-----------
mat : 2D array-like
tens: (N,1)- or (N,)-shaped array-like
Returns
-------
Z: column vector
A (column) vectorized version of the matrix-tensor product
"""
Nm = mat.shape[1]
Tmat = tens.reshape((Nm, -1))
Z = np.dot(mat, Tmat)
return Z.T.reshape((-1, 1))
def kron_mvprod(kron_list, b):
"""Compute the matrix-vector product of kronecker(kron_list).b
Parameters
-----------
kron_list: list of 2D array-like objects
D matrices [A_1, A_2, ..., A_D] to be Kronecker'ed:
A = A_1 \otimes A_2 \otimes ... \otimes A_D
Product of column dimensions must be N
b : array-like
Nx1 column vector
"""
return reduce(flat_mtprod, kron_list, b)
def kron_mmprod(kron_list, m):
"""Compute the matrix product of kronecker(kron_list).m
Parameters
-----------
kron_list: list of 2D array-like objects
D matrices [A_1, A_2, ..., A_D] to be Kronecker'ed:
A = A_1 \otimes A_2 \otimes ... \otimes A_D
Product of column dimensions must be N
m : array-like
NxM matrix
"""
if len(m.shape) == 1:
m = m[:, None] # Treat 1D array as Nx1 matrix
return np.concatenate([kron_mvprod(kron_list, b) for b in m.T], axis=1)
def flattened_outer(a, b):
return np.outer(a, b).ravel()
def kron_diag(diags):
"""Returns diagonal of kronecker product from list of diagonals.
"""
return reduce(flattened_outer, diags)
def flat_chol_solve(b, chol):
"""Solve A.x = b given cholesky decomposition of A
"""
N = chol.shape[1]
B = b.reshape((N, -1))
X = sp.linalg.cho_solve((chol, True), B)
return X.T.reshape((-1, 1))
def kron_chol_vsolve(chol_list, b):
"""Solve kronecker(kron_list).x = b where chol_list is the
cholesky decomposition of matrices to be kronecker'ed: kron_list
Parameters
-----------
chol_list: list of 2D array-like objects
Cholesky decompositions of D matrices [A_1, A_2, ..., A_D]
to be Kronecker'ed:
A = A_1 \otimes A_2 \otimes ... \otimes A_D
Product of column dimensions must be N
b : array-like
Nx1 column vector
"""
return reduce(flat_chol_solve, chol_list, b)
def kron_chol_msolve(chol_list, m):
"""Solve kronecker(kron_list).x = m where chol_list is the
cholesky decomposition of matrices to be kronecker'ed: kron_list
Parameters
-----------
chol_list: list of 2D array-like objects
Cholesky decompositions of D matrices [A_1, A_2, ..., A_D]
to be Kronecker'ed:
A = A_1 \otimes A_2 \otimes ... \otimes A_D
Product of column dimensions must be N
m : array-like
NxM matrix
"""
if len(m.shape) == 1:
m = m[:, None] # Treat 1D array as Nx1 matrix
return np.concatenate([kron_chol_vsolve(chol_list, b) for b in m.T], axis=1)
def flat_lower_solve(b, L):
"""Solve L.x = b given lower triangular matrix L
"""
N = L.shape[1]
B = b.reshape((N, -1))
X = sp.linalg.solve_triangular(L, B, lower=True)
return X.T.reshape((-1, 1))
def kron_lower_vsolve(lowers, b):
"""Solve kronecker(lowers).x = b where lowers is a list of lower
triangular matrices.
Parameters
-----------
lowers : list of 2D array-like objects
Lower triangular matrices
L = L_1 \otimes L_2 \otimes ... \otimes L_D
Product of column dimensions must be N
b : array-like
Nx1 column vector
"""
return reduce(flat_lower_solve, lowers, b)
def kron_lower_msolve(lowers, m):
"""Solve kronecker(lowers).x = m where lowers is a list of lower
triangular matrices.
Parameters
-----------
lowers : list of 2D array-like objects
Lower triangular matrices
L = L_1 \otimes L_2 \otimes ... \otimes L_D
Product of column dimensions must be N
m : array-like
NxM matrix
"""
if len(m.shape) == 1:
m = m[:, None] # Treat 1D array as Nx1 matrix
return np.concatenate([kron_lower_vsolve(lowers, b) for b in m.T], axis=1)
#################################################################
# Statistical classes for use in GP regression. Based on PyMC3's
# GP implementation and Yunus Saatci's Thesis mentioned above
#################################################################
def gaussian_kernel(x, xp, ell):
return np.exp(-np.subtract.outer(x, xp)**2/ell**2)
class KroneckerNormal:
"""A multivariate normal that makes use of Kronecker structure of covariance.
Parameters
----------
mu : array-like
covs : list of arrays
The set of covariance matrices to be Kroneckered
[K_1, K_2, ...]
such that K = K_1 \otimes K_2 \otimes ...
chols: list of arrays
The set of lower cholesky matrices to be Kroneckered
[chol_1, chol_2, ...]
such that K_i = chol_i * chol_i^T
EVDs : list of tuples
The set of eigenvalue-vector, eigenvector-matrix pairs, e.g.,
[(v1, Q1), (v2, Q2), ...]
such that K_i = Q_i^T * diag(v_i) * Q_i
noise: float
"""
def __init__(self, mu=0, covs=None, chols=None, EVDs=None, noise=None):
self._setup(covs, chols, EVDs, noise)
self.mu = mu
def _setup(self, covs, chols, EVDs, noise):
if len([i for i in [covs, chols, EVDs] if i is not None]) != 1:
raise ValueError('Incompatible parameterization. '
'Specify exactly one of covs, chols, '
'or EVDs.')
self.isEVD = False
if covs is not None:
self.covs = covs
if noise is not None and noise != 0:
# Noise requires eigendecomposition
self.isEVD = True
eigs_sep, self.Qs = zip(*map(np.linalg.eigh, covs)) # Unzip
self.QTs = list(map(np.transpose, self.Qs))
self.eigs = kron_diag(eigs_sep) # Combine separate eigs
self.eigs += noise
self.N = len(self.eigs)
else:
# Otherwise use cholesky
self.chols = list(map(np.linalg.cholesky, self.covs))
self.chol_diags = np.array(list(map(np.diag, self.chols)))
self.sizes = np.array([len(chol) for chol in self.chols])
self.N = np.prod(self.sizes)
elif chols is not None:
self.chols = chols
self.chol_diags = np.array(list(map(np.diag, self.chols)))
self.sizes = np.array([len(chol) for chol in self.chols])
self.N = np.prod(self.sizes)
else:
self.isEVD = True
eigs_sep, self.Qs = zip(*EVDs) # Unzip tuples
self.QTs = list(map(np.transpose, self.Qs))
self.eigs = kron_diag(eigs_sep) # Combine separate eigs
if noise is not None:
self.eigs += noise
self.N = len(self.eigs)
def random(self, size=None):
"""Drawn using x = mu + A.z for z~N(0,I) and
A = Q.sqrt(Lambda), if isEVD
A = chol, otherwise
Warning: EVD does not (yet) match with random draws from numpy
since A is only defined up to some unknown orthogonal transformation.
Numpy used svd while we must use eigendecomposition, which aren't
easily related due to sign ambiguities and permutations of eigenvalues.
"""
if size is None:
size = [self.N]
elif isinstance(size, int):
size = [size, self.N]
else:
raise NotImplementedError
z = np.random.standard_normal(size)
if self.isEVD:
sqrtLz = np.sqrt(self.eigs) * z
Az = kron_mmprod(self.Qs, sqrtLz.T).T
else:
Az = kron_mmprod(self.chols, z.T).T
return self.mu + Az
def _quaddist(self, value):
"""Computes the quadratic (x-mu)^T @ K^-1 @ (x-mu) and log(det(K))"""
delta = value - self.mu
if self.isEVD:
sqrt_quad = kron_mmprod(self.QTs, delta.T)
sqrt_quad = sqrt_quad/np.sqrt(self.eigs[:, None])
logdet = np.sum(np.log(self.eigs))
else:
sqrt_quad = kron_lower_msolve(self.chols, delta.T)
logchols = np.log(self.chol_diags) * self.N/self.sizes[:, None]
logdet = np.sum(2*logchols)
# Square each sample
quad = np.einsum('ij,ij->j', sqrt_quad, sqrt_quad)
# For theano: quad = tt.batched_dot(sqrt_quad.T, sqrt_quad.T)
return quad, logdet
def logp(self, value):
quad, logdet = self._quaddist(value)
return -1/2 * (quad + logdet + self.N*np.log(2*np.pi))
def update(self):
# How will updates to hyperparameters be performed?
raise NotImplementedError
class MarginalKron:
"""
"""
def __init__(self, mean_func, cov_funcs):
self.mean_func = mean_func
try:
self.cov_funcs = list(cov_funcs)
except TypeError:
self.cov_funcs = [cov_funcs]
def _build_marginal_likelihood(self, Xs):
self.X = cartesian(*Xs)
mu = self.mean_func(self.X)
covs = [f(X) for f, X in zip_longest(cycle(self.cov_funcs), Xs)]
return mu, covs
def marginal_likelihood(self, Xs, y, noise, is_observed=True, **kwargs):
"""
Returns the marginal likelihood distribution, given the input
locations `X` and the data `y`.
"""
mu, covs = self._build_marginal_likelihood(Xs)
self.Xs = Xs
self.y = y
self.noise = noise
return KroneckerNormal(mu=mu, covs=covs, noise=noise)
def total_cov(self, X, Xs=None, diag=False):
if Xs is None:
covs = [f(x, diag) for f, x in
zip_longest(cycle(self.cov_funcs), X.T)]
else:
covs = [f(x, xs, diag) for f, x, xs in
zip_longest(cycle(self.cov_funcs), X.T, Xs.T)]
return reduce(mul, covs)
def _build_conditional(self, Xnew, pred_noise, diag, Xs, y, noise,
cov_total, mean_total):
# Old points
delta = y - self.mean_func(cartesian(*Xs))
Kns = [f(X) for f, X in zip_longest(cycle(self.cov_funcs), Xs)]
eigs_sep, Qs = zip(*map(np.linalg.eigh, Kns)) # Unzip
QTs = list(map(np.transpose, Qs))
eigs = kron_diag(eigs_sep) # Combine separate eigs
if noise is not None:
eigs += noise
# New points
Km = self.total_cov(Xnew, diag)
Knm = self.total_cov(cartesian(*Xs), Xnew)
Kmn = Knm.T
# Build conditional mu
alpha = kron_mvprod(QTs, delta)
alpha = alpha/self.eigs[:, None]
alpha = kron_mvprod(Qs, alpha)
mu = np.dot(Kmn, alpha) + self.mean_func(Xnew)
# Build conditional cov
A = kron_mmprod(QTs, Knm)
A = A/np.sqrt(self.eigs[:, None])
if diag:
Asq = np.sum(np.square(A), 0)
cov = Km - Asq
if pred_noise:
cov += noise
else:
Asq = np.dot(A.T, A)
cov = Km - Asq
if pred_noise:
cov += noise*np.eye(cov.shape)
return mu, cov
def conditional(self, name, Xnew, pred_noise=False, given=None, **kwargs):
"""
Returns the conditional distribution evaluated over new input
locations `Xnew`.
"""
raise NotImplementedError
mu, cov = self._build_conditional(Xnew, pred_noise, False, *givens)
return MvNormal(mu=mu, cov=cov)
``` |
{
"source": "jordan-melendez/img_sanity",
"score": 3
} |
#### File: img_sanity/img_sanity/app.py
```python
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import plotly.graph_objs as go
import dash_table
import pandas as pd
import base64
from os import path
import argparse
import flask
def encode_image(name):
return base64.b64encode(open(name, 'rb').read())
def decode_image(name):
encoded_image = encode_image(name)
return 'data:image/png;base64,{}'.format(encoded_image.decode())
parser = argparse.ArgumentParser(
description='An interactive image filter.'
)
parser.add_argument(
'table', metavar='table', type=str,
help='The name of the table storing image attributes and names'
)
parser.add_argument(
'--img_column', default='name', type=str,
help='The column that stores the image file names'
)
parser.add_argument(
'--img_path', default='', type=str,
help='The path to the images. It is joined with the values in img_column'
)
parser.add_argument(
'--img_ext', default='', type=str,
help='The image extensions, if not part of the image names'
)
args = parser.parse_args()
file = args.table
img_column = args.img_column
img_path = args.img_path
img_path = path.abspath(img_path)
img_ext = args.img_ext
static_image_route = '/static/'
df = pd.read_csv(file)
list_of_images = [name for name in df[img_column]]
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
# external_stylesheets = [dbc.themes.BOOTSTRAP]
# app = dash.Dash()
app = dash.Dash(
__name__, external_stylesheets=external_stylesheets
)
server = app.server
app.layout = html.Div([
html.H1('Image Filter'),
dash_table.DataTable(
id='table',
columns=[{"name": i, "id": i, "deletable": False} for i in df.columns],
data=df.to_dict('records'),
filter_action="native",
sort_action="native",
page_action="native",
page_current=0,
page_size=7,
sort_mode="multi",
style_data_conditional=[
{"if": {"row_index": "odd"}, "backgroundColor": "rgb(248, 248, 248)"}
],
style_table={
'height': '290px',
'overflowY': 'scroll',
'border-bottom': 'thin lightgrey solid'
}
),
html.Div(id='img-grid', className='row',
# style={"maxHeight": "500px", "overflow": "scroll"}
# style={'flex': '1 1 auto', 'overflow': 'scroll'}
),
],
# style={'height': '100%', 'display': 'flex', 'flex-flow': 'column'}
)
@app.callback(
Output("img-grid", "children"),
[Input("table", "derived_virtual_data")]
)
def show_images(rows):
if rows is None:
_df = df
else:
_df = pd.DataFrame(rows)
images = [
# html.Img(src=decode_image(path.join(img_path, name)))
html.Img(src=path.join(static_image_route, name))
for name in _df[img_column]
]
return images
# Add a static image route that serves images from desktop
# Be *very* careful here - you don't want to serve arbitrary files
# from your computer or server
@app.server.route(path.join(static_image_route, '<image_name>'))
def serve_image(image_name):
if image_name not in list_of_images:
raise Exception('"{}" is excluded from the allowed static files'.format(image_name))
return flask.send_from_directory(img_path, image_name + img_ext)
if __name__ == '__main__':
app.run_server(debug=True)
``` |
{
"source": "jordan-melendez/rrplot",
"score": 2
} |
#### File: rrplot/rrplot/utils.py
```python
import matplotlib.pyplot as plt
def findTeXFigureNames():
pass
def placeFigures(currentDir, targetDir, names):
pass
def labelMultipart(fig, label, pos):
pass
``` |
{
"source": "JordanMicahBennett/DeepBrainSeg",
"score": 2
} |
#### File: DeepBrainSeg/radiomics/radiomics.py
```python
import os
import numpy as np
import pandas as pd
import SimpleITK as sitk
import six
import radiomics
from tqdm import tqdm
from radiomics import firstorder, glcm, imageoperations, glrlm, glszm, ngtdm, gldm, getTestCase
class ExtractRadiomicFeatures():
def __init__(self, input_image,
input_mask=None,
save_path=None,
seq='Flair',
class_ = 'ET',
all_=True):
self.input_image = input_image
if not input_mask:
self.input_mask = np.ones(tuple(list(self.input_image.shape)[:-1]))
else: self.input_mask = input_mask
self.img = sitk.GetImageFromArray(self.input_image)
self.GT = sitk.GetImageFromArray(self.input_mask)
self.save_path = save_path
self.seq = seq
self.all_ = all_
self.class_ = class_
self.feat_dict = {}
def first_order(self):
feat_dict = {}
firstOrderFeatures = firstorder.RadiomicsFirstOrder(self.img, self.GT)
firstOrderFeatures.enableAllFeatures()
firstOrderFeatures.execute()
for (key,val) in six.iteritems(firstOrderFeatures.featureValues):
if self.all_:
self.feat_dict[self.seq + "_" + self.class_ + '_' + key] = val
else:
feat_dict[self.seq + "_" + self.class_ + "_" + key] = val
df = pd.DataFrame(feat_dict)
if self.save_path:
df.to_csv(os.path.join(self.save_path, 'firstorder_features.csv'), index=False)
return df
def glcm_features(self):
glcm_dict = {}
GLCMFeatures = glcm.RadiomicsGLCM(self.img, self.GT)
GLCMFeatures.enableAllFeatures()
GLCMFeatures.execute()
for (key,val) in six.iteritems(GLCMFeatures.featureValues):
if self.all_:
self.feat_dict[self.seq + "_" + self.class_ + '_' + key] = val
else:
glcm_dict[self.seq + "_" + self.class_ + "_" + key] = val
df = pd.DataFrame(glcm_dict)
if self.save_path:
df.to_csv(os.path.join(self.save_path, 'glcm_features.csv'), index=False)
return df
def glszm_features(self):
glszm_dict = {}
GLSZMFeatures = glszm.RadiomicsGLSZM(self.img, self.GT)
GLSZMFeatures.enableAllFeatures() # On the feature class level, all features are disabled by default.
GLSZMFeatures.execute()
for (key,val) in six.iteritems(GLSZMFeatures.featureValues):
if self.all_:
self.feat_dict[self.seq + "_" + self.class_ + '_' + key] = val
else:
glszm_dict[self.seq + "_" + self.class_ + "_" + key] = val
df = pd.DataFrame(glszm_dict)
if self.save_path:
df.to_csv(os.path.join(self.save_path, 'glszm_features.csv'), index=False)
return df
def glrlm_features(self):
glrlm_dict = {}
GLRLMFeatures = glrlm.RadiomicsGLRLM(self.img, self.GT)
GLRLMFeatures.enableAllFeatures() # On the feature class level, all features are disabled by default.
GLRLMFeatures.execute()
for (key,val) in six.iteritems(GLRLMFeatures.featureValues):
if self.all_:
self.feat_dict[self.seq + "_" + self.class_ + '_' + key] = val
else:
glrlm_dict[self.seq + "_" + self.class_ + "_" + key] = val
df = pd.DataFrame(glrlm_dict)
if self.save_path:
df.to_csv(os.path.join(self.save_path, 'glrlm_features.csv'), index=False)
return df
def ngtdm_features(self):
ngtdm_dict = {}
NGTDMFeatures = ngtdm.RadiomicsNGTDM(self.img, self.GT)
NGTDMFeatures.enableAllFeatures() # On the feature class level, all features are disabled by default.
NGTDMFeatures.execute()
for (key,val) in six.iteritems(NGTDMFeatures.featureValues):
if self.all_:
self.feat_dict[self.seq + "_" + self.class_ + '_' + key] = val
else:
ngtdm_dict[self.seq + "_" + self.class_ + "_" + key] = val
df = pd.DataFrame(ngtdm_dict)
if self.save_path:
df.to_csv(os.path.join(self.save_path, 'ngtdm_features.csv'), index=False)
return df
def gldm_features(self):
gldm_dict = {}
GLDMFeatures = gldm.RadiomicsGLDM(self.img, self.GT)
GLDMFeatures.enableAllFeatures() # On the feature class level, all features are disabled by default.
GLDMFeatures.execute()
for (key,val) in six.iteritems(GLDMFeatures.featureValues):
if self.all_:
self.feat_dict[self.seq + "_" + self.class_ + '_' + key] = val
else:
gldm_dict[self.seq + "_" + self.class_ + "_" + key] = val
df = pd.DataFrame(gldm_dict)
if self.save_path:
df.to_csv(os.path.join(self.save_path, 'gldm_features.csv'), index=False)
return df
def all_features(self):
_ = self.first_order()
_ = self.glcm_features()
_ = self.glszm_features()
_ = self.glrm_features()
_ = self.gldm_features()
_ = self.ngtdm_features()
df = pd.DataFrame(self.feat_dict)
if self.save_path:
df.to_csv(os.path.join(self.save_path, 'all_features.csv'), index=False)
return df
```
#### File: DeepBrainSeg/tumor/Tester.py
```python
import torch
import SimpleITK as sitk
import numpy as np
import nibabel as nib
from torch.autograd import Variable
from skimage.transform import resize
from torchvision import transforms
from time import gmtime, strftime
from tqdm import tqdm
import pdb
import os
from ..helpers.helper import *
from os.path import expanduser
home = expanduser("~")
#========================================================================================
# prediction functions.....................
bin_path = os.path.join('/opt/ANTs/bin/')
class tumorSeg():
"""
class performs segmentation for a given sequence of patient data.
to main platform for segmentation mask estimation
one for the patient data in brats format
other with any random format
step followed for in estimation of segmentation mask
1. ABLnet for reducing false positives outside the brain
Air Brain Lesson model (2D model, 103 layered)
2. BNet3Dnet 3D network for inner class classification
Dual Path way network
3. MNet2D 57 layered convolutional network for inner class
classification
4. Tir3Dnet 57 layered 3D convolutional network for inner class
classification
more on training details and network information:
(https://link.springer.com/chapter/10.1007/978-3-030-11726-9_43<Paste>)
=========================
quick: True (just evaluates on Dual path network (BNet3D)
else copmutes an ensumble over all four networks
"""
def __init__(self,
quick = False,
ants_path = bin_path):
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# device = "cpu"
map_location = device
#========================================================================================
ckpt_tir2D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_2D_FC57_best_loss.pth.tar')
ckpt_tir3D = os.path.join(home, '.DeepBrainSeg/BestModels/Tramisu_3D_FC57_best_acc.pth.tar')
ckpt_BNET3D = os.path.join(home, '.DeepBrainSeg/BestModels/BrainNet_3D_best_acc.pth.tar')
ckpt_ABL = os.path.join(home, '.DeepBrainSeg/BestModels/ABL_CE_best_model_loss_based.pth.tar')
#========================================================================================
# air brain lesion segmentation..............
from .models.modelABL import FCDenseNet103
self.ABLnclasses = 3
self.ABLnet = FCDenseNet103(n_classes = self.ABLnclasses) ## intialize the graph
saved_parms=torch.load(ckpt_ABL, map_location=map_location)
self.ABLnet.load_state_dict(saved_parms['state_dict']) ## fill the model with trained params
print ("=================================== ABLNET2D Loaded =================================")
self.ABLnet.eval()
self.ABLnet = self.ABLnet.to(device)
#========================================================================================
# Tir2D net.......................
from .models.modelTir2D import FCDenseNet57
self.Mnclasses = 4
self.MNET2D = FCDenseNet57(self.Mnclasses)
ckpt = torch.load(ckpt_tir2D, map_location=map_location)
self.MNET2D.load_state_dict(ckpt['state_dict'])
print ("=================================== MNET2D Loaded ===================================")
self.MNET2D.eval()
self.MNET2D = self.MNET2D.to(device)
#========================================================================================
if not quick:
# BrainNet3D model......................
from .models.model3DBNET import BrainNet_3D_Inception
self.B3Dnclasses = 5
self.BNET3Dnet = BrainNet_3D_Inception()
ckpt = torch.load(ckpt_BNET3D, map_location=map_location)
self.BNET3Dnet.load_state_dict(ckpt['state_dict'])
print ("=================================== KAMNET3D Loaded =================================")
self.BNET3Dnet.eval()
self.BNET3Dnet = self.BNET3Dnet.to(device)
#========================================================================================
# Tir3D model...................
from .models.modelTir3D import FCDenseNet57
self.T3Dnclasses = 5
self.Tir3Dnet = FCDenseNet57(self.T3Dnclasses)
ckpt = torch.load(ckpt_tir3D, map_location=map_location)
self.Tir3Dnet.load_state_dict(ckpt['state_dict'])
print ("================================== TIRNET2D Loaded =================================")
self.Tir3Dnet.eval()
self.Tir3Dnet = self.Tir3Dnet.to(device)
#========================================================================================
self.device = device
self.quick = quick
self.ants_path = ants_path
def get_ants_mask(self, t1_path):
"""
We make use of ants framework for generalized skull stripping
t1_path: t1 volume path (str)
saves the mask in the same location as t1 data directory
returns: maskvolume (numpy uint8 type)
"""
mask_path = os.path.join(os.path.dirname(t1_path), 'mask.nii.gz')
os.system(self.ants_path +'ImageMath 3 '+ mask_path +' Normalize '+ t1_path)
os.system(self.ants_path +'ThresholdImage 3 '+ mask_path +' '+ mask_path +' 0.01 1')
os.system(self.ants_path +'ImageMath 3 '+ mask_path +' MD '+ mask_path +' 1')
os.system(self.ants_path +'ImageMath 3 '+ mask_path +' ME '+ mask_path +' 1')
os.system(self.ants_path +'CopyImageHeaderInformation '+ t1_path+' '+ mask_path +' '+ mask_path +' 1 1 1')
mask = np.uint8(nib.load(mask_path).get_data())
return mask
def get_localization(self, t1_v, t1c_v, t2_v, flair_v, brain_mask):
"""
ABLnetwork output, finds the brain, Whole tumor region
t1_v = t1 volume (numpy array)
t1c_v = t1c volume (numpy array)
t2_v = t2 volume (numpy array)
flair_v = flair volume (numpy array)
brain_mask = brain, whole tumor mask (numpy array, output of ANTs pieline)
"""
t1_v = normalize(t1_v, brain_mask)
t1c_v = normalize(t1c_v, brain_mask)
t2_v = normalize(t2_v, brain_mask)
flair_v = normalize(flair_v, brain_mask)
generated_output_logits = np.empty((self.ABLnclasses, flair_v.shape[0],flair_v.shape[1],flair_v.shape[2]))
for slices in tqdm(range(flair_v.shape[2])):
flair_slice = np.transpose(flair_v[:,:,slices])
t2_slice = np.transpose(t2_v[:,:,slices])
t1ce_slice = np.transpose(t1c_v[:,:,slices])
t1_slice = np.transpose(t1_v[:,:,slices])
array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],4))
array[:,:,0] = flair_slice
array[:,:,1] = t2_slice
array[:,:,2] = t1ce_slice
array[:,:,3] = t1_slice
transformed_array = torch.from_numpy(convert_image(array)).float()
transformed_array = transformed_array.unsqueeze(0) ## neccessary if batch size == 1
transformed_array = transformed_array.to(self.device)
logits = self.ABLnet(transformed_array).detach().cpu().numpy()# 3 x 240 x 240
generated_output_logits[:,:,:, slices] = logits.transpose(0, 1, 3, 2)
final_pred = apply_argmax_to_logits(generated_output_logits)
final_pred = perform_postprocessing(final_pred)
final_pred = adjust_classes_air_brain_tumour(np.uint8(final_pred))
return np.uint8(final_pred)
def inner_class_classification_with_logits_NCube(self, t1,
t1ce, t2, flair,
brain_mask, mask, N = 64):
"""
output of 3D tiramisu model (tir3Dnet)
mask = numpy array output of ABLnet
N = patch size during inference
"""
t1 = normalize(t1, brain_mask)
t1ce = normalize(t1ce, brain_mask)
t2 = normalize(t2, brain_mask)
flair = normalize(flair, brain_mask)
shape = t1.shape # to exclude batch_size
final_prediction = np.zeros((self.T3Dnclasses, shape[0], shape[1], shape[2]))
x_min, x_max, y_min, y_max, z_min, z_max = bbox(mask, pad = N)
x_min, x_max, y_min, y_max, z_min, z_max = x_min, min(shape[0] - N, x_max), y_min, min(shape[1] - N, y_max), z_min, min(shape[2] - N, z_max)
with torch.no_grad():
for x in tqdm(range(x_min, x_max, N//2)):
for y in range(y_min, y_max, N//2):
for z in range(z_min, z_max, N//2):
high = np.zeros((1, 4, N, N, N))
high[0, 0, :, :, :] = flair[x:x+N, y:y+N, z:z+N]
high[0, 1, :, :, :] = t2[x:x+N, y:y+N, z:z+N]
high[0, 2, :, :, :] = t1[x:x+N, y:y+N, z:z+N]
high[0, 3, :, :, :] = t1ce[x:x+N, y:y+N, z:z+N]
high = Variable(torch.from_numpy(high)).to(self.device).float()
pred = torch.nn.functional.softmax(self.Tir3Dnet(high).detach().cpu())
pred = pred.data.numpy()
final_prediction[:, x:x+N, y:y+N, z:z+N] = pred[0]
final_prediction = convert5class_logitsto_4class(final_prediction)
return final_prediction
def inner_class_classification_with_logits_DualPath(self, t1,
t1ce, t2, flair,
brain_mask, mask=None,
prediction_size = 9):
"""
output of BNet3D
prediction_size = mid inference patch size
"""
t1 = normalize(t1, brain_mask)
t1ce = normalize(t1ce, brain_mask)
t2 = normalize(t2, brain_mask)
flair = normalize(flair, brain_mask)
shape = t1.shape # to exclude batch_size
final_prediction = np.zeros((self.B3Dnclasses, shape[0], shape[1], shape[2]))
x_min, x_max, y_min, y_max, z_min, z_max = bbox(mask, pad = prediction_size)
# obtained by aspect ratio calculation
high_res_size = prediction_size + 16
resize_to = int(prediction_size ** 0.5) + 16
low_res_size = int(51*resize_to/19)
hl_pad = (high_res_size - prediction_size)//2
hr_pad = hl_pad + prediction_size
ll_pad = (low_res_size - prediction_size)//2
lr_pad = ll_pad + prediction_size
for x in tqdm(range(x_min, x_max - prediction_size, prediction_size)):
for y in (range(y_min, y_max - prediction_size, prediction_size)):
for z in (range(z_min, z_max - prediction_size, prediction_size)):
high = np.zeros((1, 4, high_res_size, high_res_size, high_res_size))
low = np.zeros((1, 4, low_res_size, low_res_size, low_res_size))
low1 = np.zeros((1, 4, resize_to, resize_to, resize_to))
high[0, 0], high[0, 1], high[0, 2], high[0, 3] = high[0, 0] + flair[0,0,0], high[0, 1] + t2[0,0,0], high[0, 2] + t1[0,0,0], high[0, 2] + t1ce[0,0,0]
low[0, 0], low[0, 1], low[0, 2], low[0, 3] = low[0, 0] + flair[0,0,0], low[0, 1] + t2[0,0,0], low[0, 2] + t1[0,0,0], low[0, 2] + t1ce[0,0,0]
low1[0, 0], low1[0, 1], low1[0, 2], low1[0, 3] = low1[0, 0] + flair[0,0,0], low1[0, 1] + t2[0,0,0], low1[0, 2] + t1[0,0,0], low1[0, 2] + t1ce[0,0,0]
# =========================================================================
vxf, vxt = max(0, x-hl_pad), min(shape[0], x+hr_pad)
vyf, vyt = max(0, y-hl_pad), min(shape[1], y+hr_pad)
vzf, vzt = max(0, z-hl_pad), min(shape[2], z+hr_pad)
txf, txt = max(0, hl_pad-x), max(0, hl_pad-x) + vxt - vxf
tyf, tyt = max(0, hl_pad-y), max(0, hl_pad-y) + vyt - vyf
tzf, tzt = max(0, hl_pad-z), max(0, hl_pad-z) + vzt - vzf
high[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt]
high[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt]
high[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt]
high[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt]
# =========================================================================
vxf, vxt = max(0, x-ll_pad), min(shape[0], x+lr_pad)
vyf, vyt = max(0, y-ll_pad), min(shape[1], y+lr_pad)
vzf, vzt = max(0, z-ll_pad), min(shape[2], z+lr_pad)
txf, txt = max(0, ll_pad-x), max(0, ll_pad-x) + vxt - vxf
tyf, tyt = max(0, ll_pad-y), max(0, ll_pad-y) + vyt - vyf
tzf, tzt = max(0, ll_pad-z), max(0, ll_pad-z) + vzt - vzf
low[0, 0, txf:txt, tyf:tyt, tzf:tzt] = flair[vxf:vxt, vyf:vyt, vzf:vzt]
low[0, 1, txf:txt, tyf:tyt, tzf:tzt] = t2[vxf:vxt, vyf:vyt, vzf:vzt]
low[0, 2, txf:txt, tyf:tyt, tzf:tzt] = t1[vxf:vxt, vyf:vyt, vzf:vzt]
low[0, 3, txf:txt, tyf:tyt, tzf:tzt] = t1ce[vxf:vxt, vyf:vyt, vzf:vzt]
# =========================================================================
low1[0] = [resize(low[0, i, :, :, :], (resize_to, resize_to, resize_to)) for i in range(4)]
high = Variable(torch.from_numpy(high)).to(self.device).float()
low1 = Variable(torch.from_numpy(low1)).to(self.device).float()
pred = torch.nn.functional.softmax(self.BNET3Dnet(high, low1, pred_size=prediction_size).detach().cpu())
pred = pred.numpy()
final_prediction[:, x:x+prediction_size, y:y+prediction_size, z:z+prediction_size] = pred[0]
final_prediction = convert5class_logitsto_4class(final_prediction)
return final_prediction
def inner_class_classification_with_logits_2D(self,
t1ce_volume,
t2_volume,
flair_volume):
"""
output of 2D tiramisu model (MNet)
"""
normalize = transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
transformList = []
transformList.append(transforms.ToTensor())
transformList.append(normalize)
transformSequence=transforms.Compose(transformList)
generated_output = np.empty((self.Mnclasses,flair_volume.shape[0],flair_volume.shape[1],flair_volume.shape[2]))
for slices in tqdm(range(flair_volume.shape[2])):
flair_slice = scale_every_slice_between_0_to_255(np.transpose(flair_volume[:,:,slices]))
t2_slice = scale_every_slice_between_0_to_255(np.transpose(t2_volume[:,:,slices]))
t1ce_slice = scale_every_slice_between_0_to_255(np.transpose(t1ce_volume[:,:,slices]))
array = np.zeros((flair_slice.shape[0],flair_slice.shape[1],3))
array[:,:,0] = flair_slice
array[:,:,1] = t2_slice
array[:,:,2] = t1ce_slice
array = np.uint8(array)
transformed_array = transformSequence(array)
transformed_array = transformed_array.unsqueeze(0)
transformed_array = transformed_array.to(self.device)
outs = torch.nn.functional.softmax(self.MNET2D(transformed_array).detach().cpu()).numpy()
outs = np.swapaxes(generated_output,1, 2)
return outs
def get_segmentation(self,
t1_path,
t2_path,
t1ce_path,
flair_path,
save_path = None):
"""
Generates segmentation for the data not in brats format
if save_path provided function saves the prediction with
DeepBrainSeg_Prediction.nii.qz name in the provided
directory
returns: segmentation mask
"""
t1 = nib.load(t1_path).get_data()
t2 = nib.load(t2_path).get_data()
t1ce = nib.load(t1ce_path).get_data()
flair = nib.load(flair_path).get_data()
affine = nib.load(flair_path).affine
brain_mask = self.get_ants_mask(t2_path)
mask = self.get_localization(t1, t1ce, t2, flair, brain_mask)
# mask = np.swapaxes(mask,1, 0)
if not self.quick:
final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair, brain_mask, mask)
final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair, brain_mask, mask)
final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair).transpose(0, 2, 1, 3)
final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits])
else:
final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair)
final_prediction_array = np.array([final_predictionMnet_logits])
final_prediction_logits = combine_logits_AM(final_prediction_array)
final_pred = postprocessing_pydensecrf(final_prediction_logits)
final_pred = combine_mask_prediction(mask, final_pred)
final_pred = perform_postprocessing(final_pred)
final_pred = adjust_classes(final_pred)
if save_path:
os.makedirs(save_path, exist_ok=True)
save_volume(final_pred, affine, os.path.join(save_path, 'DeepBrainSeg_Prediction'))
return final_pred
def get_segmentation_brats(self,
path,
save = True):
"""
Generates segmentation for the data in BraTs format
if save True saves the prediction in the save directory
in the patients data path
returns : segmentation mask
"""
name = path.split("/")[-1] + "_"
flair = nib.load(os.path.join(path, name + 'flair.nii.gz')).get_data()
t1 = nib.load(os.path.join(path, name + 't1.nii.gz')).get_data()
t1ce = nib.load(os.path.join(path, name + 't1ce.nii.gz')).get_data()
t2 = nib.load(os.path.join(path, name + 't2.nii.gz')).get_data()
affine= nib.load(os.path.join(path, name + 'flair.nii.gz')).affine
print ("[INFO: DeepBrainSeg] (" + strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime()) + ") Working on: ", path)
brain_mask = self.get_ants_mask(os.path.join(path, name + 't2.nii.gz'))
# brain_mask = get_brain_mask(t1)
mask = self.get_localization(t1, t1ce, t2, flair, brain_mask)
mask = np.swapaxes(mask,1, 0)
if not self.quick:
final_predictionTir3D_logits = self.inner_class_classification_with_logits_NCube(t1, t1ce, t2, flair, brain_mask, mask)
final_predictionBNET3D_logits = self.inner_class_classification_with_logits_DualPath(t1, t1ce, t2, flair, brain_mask, mask)
final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair)
final_prediction_array = np.array([final_predictionTir3D_logits, final_predictionBNET3D_logits, final_predictionMnet_logits])
else:
final_predictionMnet_logits = self.inner_class_classification_with_logits_2D(t1, t2, flair)
final_prediction_array = np.array([final_predictionMnet_logits])
final_prediction_logits = combine_logits_AM(final_prediction_array)
final_pred = postprocessing_pydensecrf(final_prediction_logits)
final_pred = combine_mask_prediction(mask, final_pred)
final_pred = perform_postprocessing(final_pred)
final_pred = adjust_classes(final_pred)
if save:
save_volume(final_pred, affine, os.path.join(path, 'DeepBrainSeg_Prediction'))
return final_pred
# ========================================================================================
if __name__ == '__main__':
ext = deepSeg(True)
ext.get_segmentation_brats('../../sample_volume/Brats18_CBICA_AVG_1/')
``` |
{
"source": "JordanMicahBennett/SMART-IMAGE_BASED-COVID19_VIRUS_DETECTO",
"score": 3
} |
#### File: JordanMicahBennett/SMART-IMAGE_BASED-COVID19_VIRUS_DETECTO/covid19_ai_diagnoser_ui.py
```python
import covid19_ai_diagnoser
from tkinter import Frame, Tk, BOTH, Label, Menu, filedialog, messagebox, Text
from PIL import Image, ImageTk
import os
import codecs
screenWidth = "1560"
screenHeight = "840"
windowTitle = "Smart/Ai Coronavirus 2019 (Covid19) Diagnosis Tool"
import cv2
class Window(Frame):
_PRIOR_IMAGE = None
#establish variable to keep track of images added to Frame, for purpose of preventing stacking @ new image additions
#by using destroy() on each old image instance @ addition
#Added by <NAME>, based on suggestion by <NAME>, who suggested that xray images should not stack as new ones are loaded. (https://www.facebook.com/mvandrei)
DIAGNOSIS_RESULT = ""
DIAGNOSIS_RESULT_FIELD = None
#Jordan_note: Added to facilitate output window data
def __init__(self, master=None):
Frame.__init__(self, master)
self.master = master
self.pack(fill=BOTH, expand=1)
load = Image.open("covid19_ai_diagnoser_default__.jpg")
render = ImageTk.PhotoImage(load)
img = Label(self, image=render)
img.image = render
img.place(x=(int(screenWidth)/2)-load.width/2, y=((int(screenHeight)/2))-load.height/2-80)
self._PRIOR_IMAGE = img #setup prior image instance
self.DIAGNOSIS_RESULT_FIELD = Text(self, width=int(screenWidth), height=13)
self.DIAGNOSIS_RESULT_FIELD.pack ( )
self.DIAGNOSIS_RESULT_FIELD.place(x=0, y=int(screenHeight)-200)
def addDiagnosisResult (self, value):
self.DIAGNOSIS_RESULT_FIELD.delete("1.0","end") #clear diagnostic result text element
self.DIAGNOSIS_RESULT = "" #clear diagnostic result string variable
self.DIAGNOSIS_RESULT_FIELD.insert(1.0, value) #add new value
root = Tk()
app = Window(root)
############
#screen window size, window title
root.wm_title(windowTitle)
root.geometry(screenWidth + "x" + screenHeight)
############
#menu bar
menubar = Menu(root)
# Adding a cascade to the menu bar:
filemenu = Menu(menubar, tearoff=0)
menubar.add_cascade(label="Files", menu=filemenu)
CONSTANT_DIAGNOSIS_IMAGE_SPAN = 480
# Defining function to trigger file browser
def loadRegularPneumoniaImageFromDialog():
currdir = os.getcwd()
image_file = filedialog.askopenfile(mode ='r', parent=root, initialdir=currdir, title='Please select an Xray Image of suspected regular pneumonia case:')
root.wm_title(windowTitle + " : " + image_file.name)
loadRegularPneumoniaImageFromName(image_file.name)
def loadRegularPneumoniaImageFromName(filename):
app._PRIOR_IMAGE.destroy() #destroy old image
load = Image.open(filename)
load = load.resize((CONSTANT_DIAGNOSIS_IMAGE_SPAN, CONSTANT_DIAGNOSIS_IMAGE_SPAN),Image.ANTIALIAS) #Resized "load" image to constant size on screen. However, neural network still runs on on original image scale from filename.
render = ImageTk.PhotoImage(load)
img = Label(image=render)
img.image = render
img.place(x=(int(screenWidth)/2)-CONSTANT_DIAGNOSIS_IMAGE_SPAN/2, y=((int(screenHeight)/2))-CONSTANT_DIAGNOSIS_IMAGE_SPAN/2-80)
app.DIAGNOSIS_RESULT += "**Non-Covid19 Mode Result**\n" + filename+"\n\n"
app.DIAGNOSIS_RESULT += covid19_ai_diagnoser.doOnlineInference_regularPneumonia (filename)
print(app.DIAGNOSIS_RESULT)
app._PRIOR_IMAGE = img #set latest instance of old image
app.addDiagnosisResult(app.DIAGNOSIS_RESULT)
enableDiagnosisResultColouring ( )
def loadCovid19ImageFromDialog():
currdir = os.getcwd()
image_file = filedialog.askopenfile(mode ='r', parent=root, initialdir=currdir, title='Please select an Xray Image of suspected coronavirus2019 case:')
root.wm_title(windowTitle + " : " + image_file.name)
loadCovid19ImageFromName(image_file.name)
def loadCovid19ImageFromName(filename):
app._PRIOR_IMAGE.destroy() #destroy old image
load = Image.open(filename)
load = load.resize((CONSTANT_DIAGNOSIS_IMAGE_SPAN, CONSTANT_DIAGNOSIS_IMAGE_SPAN),Image.ANTIALIAS) #Resized "load" image to constant size on screen. However, neural network still runs on on original image scale from filename.
render = ImageTk.PhotoImage(load)
img = Label(image=render)
img.image = render
img.place(x=(int(screenWidth)/2)-CONSTANT_DIAGNOSIS_IMAGE_SPAN/2, y=((int(screenHeight)/2))-CONSTANT_DIAGNOSIS_IMAGE_SPAN/2-80)
app.DIAGNOSIS_RESULT += "**Covid19 Mode Result**\n" + filename+"\n\n"
app.DIAGNOSIS_RESULT += covid19_ai_diagnoser.doOnlineInference_covid19Pneumonia (filename)
print(app.DIAGNOSIS_RESULT)
app._PRIOR_IMAGE = img #set latest instance of old image
app.addDiagnosisResult(app.DIAGNOSIS_RESULT)
enableDiagnosisResultColouring ( )
# Adding a load image button to the cascade menu "File"
filemenu.add_command(label="Load image to test for pneumonia", command=loadRegularPneumoniaImageFromDialog)
filemenu.add_command(label="Load image to test for covid-19", command=loadCovid19ImageFromDialog)
def colourDiagnosisMessageText ( diagnosisContent, startIndexText, endIndexText ):
#If pneumonia or covid19 is detected
if ( covid19_ai_diagnoser.DIAGNOSIS_MESSAGES[0] in diagnosisContent or covid19_ai_diagnoser.DIAGNOSIS_MESSAGES[1] in diagnosisContent ):
app.DIAGNOSIS_RESULT_FIELD.tag_add("DIAGNOSIS_RESULT_MESSAGE", startIndexText, endIndexText)
app.DIAGNOSIS_RESULT_FIELD.tag_configure("DIAGNOSIS_RESULT_MESSAGE", background="red", foreground ="white")
#If normal lungs state is detected
if ( covid19_ai_diagnoser.DIAGNOSIS_MESSAGES[2] in diagnosisContent ):
app.DIAGNOSIS_RESULT_FIELD.tag_add("DIAGNOSIS_RESULT_MESSAGE", startIndexText, endIndexText)
app.DIAGNOSIS_RESULT_FIELD.tag_configure("DIAGNOSIS_RESULT_MESSAGE", background="green", foreground ="white")
def enableDiagnosisResultColouring ( ):
diagnosisResultFieldContent = app.DIAGNOSIS_RESULT_FIELD.get("1.0","end")
colourDiagnosisMessageText ( diagnosisResultFieldContent, "4.0", "4.21" )
############
#root cycle
root.config(menu=menubar)
root.mainloop()
``` |
{
"source": "JordanMilne/Redhawk",
"score": 3
} |
#### File: redhawk/common/parser.py
```python
from . import tree_converter as T
class Parser:
def GetTreeConverterClass(self):
raise NotImplementedError(
"GetTreeConverterClass method not implemented in the Parser base class.")
def Parse(self, filename):
""" Parse filename. """
raise NotImplementedError(
"Parse is not implemented in the Parser base class.")
def _Get_Converter(self, filename):
converter_class = self.GetTreeConverterClass()
assert(issubclass(converter_class, T.TreeConverter))
converter = converter_class(filename)
return converter
def Convert(self, ast, filename=None):
""" Convert language specific AST to the LAST """
return self._Get_Converter(filename).Convert(ast)
def GetLAST(self, filename):
""" Return the language agnostic abstract syntax tree for filename."""
assert(filename != None)
converter = self._Get_Converter(filename)
return converter.Convert(self.Parse(filename))
```
#### File: common/writers/dot_writer.py
```python
import redhawk.common.node as N
import redhawk.common.types as T
import redhawk.utils.util as U
from . import writer
import itertools
import pygraphviz
def WriteToDot(tree):
s = DotWriter()
return s.WriteTree(tree)
def WriteToImage(tree, fmt='png', filename=None):
s = DotWriter()
s.AddTree(tree)
s.Draw(path=filename, fmt=fmt)
return
def EscapeWhitespace(s):
a = s.replace("\n", "\\\\n").replace("\t", "\\\\t")
return a
class DotWriter(writer.Writer):
def __init__(self):
self.node_name_counter = itertools.count(0)
self.graph = pygraphviz.AGraph(directed=True, rankdir='LR')
self.graph.layout(prog='dot')
return
def WriteTree(self, tree):
""" Implementation of the base class method for writing the tree to a
string."""
self.AddTree(tree)
return self.graph.to_string()
def Draw(self, path, fmt='png'):
self.graph.draw(path=path, format=fmt, prog='dot')
return
def AddTree(self, tree):
""" Adds the tree to the graph."""
self.AddASTNodeToGraph(None, tree)
return
def __CreateGraphNode(self, **attrs):
""" Create a graph node with the give attributes."""
node_index = self.node_name_counter.next()
self.graph.add_node(node_index, **attrs)
return node_index
def __CreateGraphNodeFromAST(self, ast_node):
""" Create a Graph Node (with the relevant attributes)
from the ast_node
Return the node index."""
name, attrs = ast_node.GetDotAttributes()
label = [name]
label += ["%s: %s"%(EscapeWhitespace(str(k)), EscapeWhitespace(str(v)))
for (k, v) in attrs.items() if type(v) is str]
if isinstance(ast_node, T.Type):
color = "gray"
fontcolor = "blue"
else:
color = "gray"
fontcolor = "black"
return self.__CreateGraphNode(label = ", ".join(label)
,shape = "box"
,color = color
,fontcolor = fontcolor
,fontname = "Sans"
,fontsize = "10")
def __CreateEmptyGraphNode(self):
""" Create an Empty Node (with style), and return its index."""
return self.__CreateGraphNode(shape='circle',
style='filled',
label="",
height='.1',
width='.1')
def AddASTNodeToGraph(self, parent_index, ast_node):
""" Creates a Graph Node from the given AST node,
marks its parent as the graph node with the given
`parent_index`, and recurses on the given AST
node's children."""
node_index = self.__CreateGraphNodeFromAST(ast_node)
if parent_index is not None:
self.graph.add_edge(parent_index, node_index)
children = ast_node.GetChildren()
for child in children:
if child is None:
continue
if type(child) is list:
empty_node = self.__CreateEmptyGraphNode()
self.graph.add_edge(node_index, empty_node)
map(lambda a: self.AddASTNodeToGraph(empty_node, a),
child)
elif isinstance(child, N.Node):
self.AddASTNodeToGraph(node_index, child)
elif child is None:
continue
else:
raise ValueError("%s's child (type: %s) was supposed to be a Node!\n %s"
%(ast_node.GetName(), type(child), ast_node))
return
```
#### File: c/utils/display_tree.py
```python
import redhawk.utils.parse_ast as parse_ast
import sys
def GetNodeTypeShorthand(node):
cparser_type = ""
if hasattr(node, 'type'):
cparser_type = str(node.type).split(' ')[0].rsplit('.')[-1]
# python_type = str(type(node)).split('.')[-1][:-2]
python_type = node.__class__.__name__
if python_type and cparser_type:
return "p%s-c%s"%(python_type, cparser_type)
if python_type:
return "p"+python_type
return "c"+cparser_type
def GetNodeName(node):
name = ''
while hasattr(node, 'name'):
name = node.name
node = node.name
return name
def NodeToString(node):
if node.coord:
position = '[%s] %d:%d'%(
node.coord.file or '',
node.coord.line or 0,
node.coord.column or 0)
else:
position = '[] 0:0'
name = GetNodeName(node)
if name:
name = " '" + name
return "%s%s"%(str(GetNodeTypeShorthand(node)).lower(), name)
def PrintTree(tree, indent_level = 0, fp=sys.stdout):
fp.write("\n%s(%s"%(' '*indent_level, NodeToString(tree)))
if len(tree.children()) == 0:
fp.write(")")
else:
#fp.write("\n")
for c in tree.children():
PrintTree(c, indent_level+2)
fp.write(")")
try:
filename = sys.argv[1]
except IndexError as e:
sys.stderr.write("No C file specified to parse.\n")
sys.exit(1)
tree = parse_ast.ParseC(filename)
#tree.show(attrnames=True, showcoord=True)
PrintTree(tree)
```
#### File: redhawk/test/test_common_xml_writer.py
```python
import redhawk.common.writers.xml_writer as X
from . import common_test_utils as T
import nose.tools
import random
import itertools
import tempfile
import os
class TestXMLWriter:
def __init__(self):
self.counter = itertools.count(0)
self.temp_dir = tempfile.mkdtemp(prefix='xml')
return
def GetFilename(self):
i = next(self.counter)
return os.path.join(self.temp_dir, str(i))
def FunctionTestXML(self, ast):
v = self.GetFilename()
X.WriteToFile(ast, filename = v + '.xml')
return
def TestGenerator():
""" Testing XML Writer. """
PICK=5
c = TestXMLWriter()
all_asts = list(T.GetAllLASTs())
for i in range(PICK):
r_index = random.randrange(0, len(all_asts))
yield c.FunctionTestXML, all_asts[r_index]
# Disable the test by default.
@nose.tools.nottest
def TestAllPrograms():
""" Testing XML Writer (all programs) """
c = TestXMLWriter()
all_asts = list(T.GetAllLASTs())
for (i, ast) in enumerate(all_asts):
yield c.FunctionTestXML, ast
```
#### File: redhawk/test/test_utils_util.py
```python
from __future__ import print_function
import redhawk.utils.util as U
import nose.tools
import tempfile
import os
def TestConcat():
""" Test Concat."""
assert(U.Concat([[1], [2, 3], [4, 5]]) == [1, 2, 3, 4, 5])
assert(U.Concat([[1], [2, 3], [4, 5]]) == [1, 2, 3, 4, 5])
assert(U.Concat([[]]) == [])
return
def TestFlatten():
""" Test Flatten."""
assert(U.Flatten([1, 2, 3, 4]) == [1, 2, 3, 4])
assert(U.Flatten([[1], [2, 3], 4]) == [1, 2, 3, 4])
assert(U.Flatten([[1], [[2, 3]], 4]) == [1, 2, 3, 4])
assert(U.Flatten([[1], [[[2]]], 3]) == [1, 2, 3])
def TestGuessLanguageSuccess():
""" Test Guess Langague for Success."""
assert(U.GuessLanguage('foo.py') == 'python')
assert(U.GuessLanguage('foo.c') == 'c')
assert(U.GuessLanguage('foo.blah.py') == 'python')
assert(U.GuessLanguage('foo.blah.c') == 'c')
def TestIndexInto():
li = [1, 2, 3, 4, 5]
for i in range(len(li)):
assert(U.IndexInto(li, [i]) == li[i])
assert(U.IndexInto(li, [1, 2]) == None)
li = [[1, 2], [3, [4, 5], [6, 7]], [[[8]], 9], 10]
assert(U.IndexInto(li, [10, 19]) == None)
assert(U.IndexInto(li, [0]) == [1, 2])
assert(U.IndexInto(li, [0, 0]) == 1)
assert(U.IndexInto(li, [0, 1]) == 2)
assert(U.IndexInto(li, [0, 2]) == None)
assert(U.IndexInto(li, [1, 0]) == 3)
assert(U.IndexInto(li, [1, 1]) == [4, 5])
assert(U.IndexInto(li, [1, 2]) == [6, 7])
assert(U.IndexInto(li, [2, 0]) == [[8]])
assert(U.IndexInto(li, [2, 0, 1]) == None)
assert(U.IndexInto(li, [2, 0, 0]) == [8])
assert(U.IndexInto(li, [2, 0, 0, 0]) == 8)
assert(U.IndexInto(li, [2, 1]) == 9)
assert(U.IndexInto(li, [2, 1, 0, 0]) == 9)
assert(U.IndexInto(li, [3]) == 10)
assert(U.IndexInto(li, [4]) == None)
def TestFindFileInDirectoryOrAncestors():
""" Test FindFileInDirectoryOrAncestors"""
# Create an empty temp directory
root_dir = tempfile.mkdtemp()
a_dir = os.path.join(root_dir, "a")
b_dir = os.path.join(a_dir, "b")
# Create subdirectories
os.mkdir(a_dir)
os.mkdir(b_dir)
# Create temporary file
filepath = os.path.join(root_dir, "test_file")
fp = open(filepath, "w")
fp.close()
print(root_dir, a_dir, b_dir, filepath)
# Check if test_file can be found
assert(U.FindFileInDirectoryOrAncestors("test_file", b_dir) ==
filepath)
# Ensure that adding /. to the path does not
# change the result of the test
c_dir = os.path.join(b_dir, os.path.curdir)
assert(U.FindFileInDirectoryOrAncestors("test_file", c_dir) ==
filepath)
# Change Permissions to 000 and ensure that an
# IOError is thrown
os.chmod(filepath, 0)
raised = False
try:
U.FindFileInDirectoryOrAncestors("test_file", c_dir)
except IOError as e:
raised = True
assert(raised)
# Remove the file and temporary directories
os.remove(filepath)
assert(U.FindFileInDirectoryOrAncestors("test_file", b_dir) ==
None)
os.removedirs(b_dir)
return
```
#### File: redhawk/utils/code_generator_backend.py
```python
class CodeGeneratorBackend:
""" A class to help code generation by managing indents."""
def __init__(self, tab=" "):
self.code = []
self.tab = tab
self.level = 0
self.should_indent = True
def GetCode(self):
return "".join(self.code)
def NewLine(self):
self.code.append("\n")
self.should_indent = True
def Write(self, s):
if self.should_indent:
self.code.append(self.tab * self.level)
self.should_indent = False
self.code.append(s)
def WriteLine(self, s):
self.code.append(self.tab * self.level + s + "\n")
def Indent(self):
self.level = self.level + 1
def Dedent(self):
if self.level == 0:
raise SyntaxError("internal error in code generator")
self.level = self.level - 1
``` |
{
"source": "JordanMilne/ShenaniganFS",
"score": 3
} |
#### File: ShenaniganFS/examples/magicsymlinkserver.py
```python
import asyncio
import dataclasses
import os
from typing import *
from shenaniganfs.fs import (
FileType,
FSENTRY,
SimpleFS,
SimpleDirectory,
SimpleFSEntry,
Symlink,
utcnow,
VerifyingFileHandleEncoder,
)
from shenaniganfs.fs_manager import EvictingFileSystemManager, create_fs
from shenaniganfs.nfs_utils import serve_nfs
@dataclasses.dataclass
class MagicSymLink(SimpleFSEntry, Symlink):
read_count: int = dataclasses.field(default=0)
symlink_options: List[bytes] = dataclasses.field(default_factory=list)
type: FileType = dataclasses.field(default=FileType.LNK, init=False)
@property
def contents(self) -> bytearray:
return bytearray(self.symlink_options[self.read_count % len(self.symlink_options)])
@property
def size(self) -> int:
return len(self.contents)
class MagicSwitchingSymlinkFS(SimpleFS):
def __init__(self, trusted_target: bytes, size_quota=None, entries_quota=None):
super().__init__(size_quota=size_quota, entries_quota=entries_quota)
self.read_only = False
self.trusted_target = trusted_target
self.num_blocks = 1
self.free_blocks = 0
self.avail_blocks = 0
self.track_entry(SimpleDirectory(
mode=0o0777,
name=b"",
root_dir=True,
))
def lookup(self, directory: FSENTRY, name: bytes) -> Optional[FSENTRY]:
entry = super().lookup(directory, name)
if not entry:
attrs = dict(
mode=0o0777,
symlink_options=[self.trusted_target, name.replace(b"_", b"/")],
)
self._verify_size_quota(len(name) * 2)
entry = self._base_create(directory, name, attrs, MagicSymLink)
return entry
def readlink(self, entry: FSENTRY) -> bytes:
val = super().readlink(entry)
if isinstance(entry, MagicSymLink):
entry.read_count += 1
entry.ctime = utcnow()
entry.mtime = utcnow()
return val
async def main():
fs_manager = EvictingFileSystemManager(
VerifyingFileHandleEncoder(os.urandom(32)),
factories={
b"/symlinkfs": lambda call_ctx: create_fs(
MagicSwitchingSymlinkFS,
call_ctx,
trusted_target=b"/tmp/foobar",
# Only 100 entries total allowed in the FS
entries_quota=100,
# names + contents not allowed to exceed this in bytes
size_quota=100 * 1024
),
},
)
await serve_nfs(fs_manager, use_internal_rpcbind=True)
try:
asyncio.run(main())
except KeyboardInterrupt:
pass
```
#### File: ShenaniganFS/shenaniganfs/nfs_utils.py
```python
from shenaniganfs.fs_manager import FileSystemManager
from shenaniganfs.nfs2 import MountV1Service, NFSV2Service
from shenaniganfs.nfs3 import MountV3Service, NFSV3Service
from shenaniganfs.portmanager import PortManager, SimplePortMapper, SimpleRPCBind
from shenaniganfs.server import TCPTransportServer
async def serve_nfs(fs_manager: FileSystemManager, use_internal_rpcbind=True):
port_manager = PortManager()
if use_internal_rpcbind:
rpcbind_transport_server = TCPTransportServer("0.0.0.0", 111)
rpcbind_transport_server.register_prog(SimplePortMapper(port_manager))
rpcbind_transport_server.register_prog(SimpleRPCBind(port_manager))
rpcbind_transport_server.notify_port_manager(port_manager)
await rpcbind_transport_server.start()
transport_server = TCPTransportServer("0.0.0.0", 2049)
transport_server.register_prog(MountV1Service(fs_manager))
transport_server.register_prog(NFSV2Service(fs_manager))
transport_server.register_prog(MountV3Service(fs_manager))
transport_server.register_prog(NFSV3Service(fs_manager))
if use_internal_rpcbind:
transport_server.notify_port_manager(port_manager)
else:
await transport_server.notify_rpcbind()
server = await transport_server.start()
async with server:
await server.serve_forever()
```
#### File: ShenaniganFS/shenaniganfs/transport.py
```python
import abc
import asyncio
import struct
import xdrlib
from io import BytesIO
from typing import *
from shenaniganfs.generated.rfc1831 import *
from shenaniganfs.rpchelp import Proc
SPLIT_MSG = Tuple[RPCMsg, bytes]
_T = TypeVar("T")
ProcRet = Union[ReplyBody, _T]
class BaseTransport(abc.ABC):
@abc.abstractmethod
async def write_msg_bytes(self, msg: bytes):
pass
@abc.abstractmethod
async def read_msg_bytes(self) -> bytes:
pass
@property
def closed(self):
return False
@property
def client_addr(self) -> Tuple:
return ()
@abc.abstractmethod
def close(self):
pass
async def write_msg(self, header: RPCMsg, body: bytes) -> None:
p = xdrlib.Packer()
RPCMsg.pack(p, header)
p.pack_fstring(len(body), body)
await self.write_msg_bytes(p.get_buffer())
async def read_msg(self) -> SPLIT_MSG:
msg_bytes = await self.read_msg_bytes()
unpacker = xdrlib.Unpacker(msg_bytes)
msg = RPCMsg.unpack(unpacker)
return msg, unpacker.get_buffer()[unpacker.get_position():]
class TCPTransport(BaseTransport):
# 100KB, larger than UDP would allow anyway?
MAX_MSG_BYTES = 100_000
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
self.reader = reader
self.writer = writer
@property
def closed(self):
return self.reader.at_eof() or self.writer.is_closing()
@property
def client_addr(self) -> Tuple:
return self.writer.get_extra_info('peername')
def close(self):
if self.writer.can_write_eof():
self.writer.write_eof()
if not self.writer.is_closing():
self.writer.close()
async def write_msg_bytes(self, msg: bytes):
# Tack on the fragment size, mark as last frag
msg = struct.pack("!L", len(msg) | (1 << 31)) + msg
self.writer.write(msg)
await self.writer.drain()
async def read_msg_bytes(self) -> bytes:
last_frag = False
msg_bytes = BytesIO()
total_len = 0
while not last_frag:
frag_header = struct.unpack("!L", await self.reader.readexactly(4))[0]
last_frag = frag_header & (1 << 31)
frag_len = frag_header & (~(1 << 31))
total_len += frag_len
if total_len > self.MAX_MSG_BYTES:
raise ValueError(f"Overly large RPC message! {total_len}, {frag_len}")
msg_bytes.write(await self.reader.readexactly(frag_len))
return msg_bytes.getvalue()
class CallContext:
def __init__(self, transport: BaseTransport, msg: RPCMsg):
self.msg = msg
self.transport = transport
class Prog:
"""Base class for rpcgen-created server classes."""
prog: int
vers: int
min_vers: Optional[int] = None
procs: Dict[int, Proc]
def supports_version(self, vers: int) -> bool:
if self.min_vers is not None:
return self.min_vers <= vers <= self.vers
else:
return self.vers == vers
def get_handler(self, proc_id) -> Callable:
return getattr(self, self.procs[proc_id].name)
@staticmethod
def _make_reply_body(
accept_stat: Optional[AcceptStat] = None,
reject_stat: Optional[RejectStat] = None,
auth_stat: Optional[AuthStat] = None,
):
if sum(x is None for x in (accept_stat, reject_stat)) != 1:
raise Exception("Must specify either accept_stat OR reject_stat!")
return ReplyBody(
stat=ReplyStat.MSG_ACCEPTED if accept_stat is not None else ReplyStat.MSG_DENIED,
areply=AcceptedReply(
# TODO: this should be replaced on the way out,
# prog probably doesn't know which to use
verf=OpaqueAuth(
flavor=AuthFlavor.AUTH_NONE,
body=b"",
),
data=ReplyData(
stat=accept_stat,
)
) if accept_stat is not None else None,
rreply=RejectedReply(
r_stat=reject_stat,
auth_error=auth_stat,
) if reject_stat is not None else None,
)
async def handle_proc_call(self, call_ctx: CallContext, proc_id: int, call_body: bytes) \
-> Union[ReplyBody, bytes]:
proc = self.procs.get(proc_id)
if proc is None:
raise NotImplementedError()
unpacker = xdrlib.Unpacker(call_body)
argl = [arg_type.unpack(unpacker)
for arg_type in proc.arg_types]
handler: Callable = self.get_handler(proc_id)
rv = await handler(call_ctx, *argl)
if isinstance(rv, ReplyBody):
return rv
packer = xdrlib.Packer()
proc.ret_type.pack(packer, rv)
return packer.get_buffer()
``` |
{
"source": "jordan-mittleman/daml",
"score": 2
} |
#### File: daml/rules_daml/daml.bzl
```python
load("@bazel_skylib//lib:paths.bzl", "paths")
daml_provider = provider(doc = "DAML provider", fields = {
"dalf": "The DAML-LF file.",
"dar": "The packaged archive.",
"srcjar": "The generated Scala sources as srcjar.",
})
def _daml_impl_compile_dalf(ctx):
# Call damlc compile
compile_args = ctx.actions.args()
compile_args.add("compile")
compile_args.add(ctx.file.main_src)
compile_args.add("--output", ctx.outputs.dalf.path)
if ctx.attr.target:
compile_args.add("--target", ctx.attr.target)
ctx.actions.run(
inputs = depset([ctx.file.main_src] + ctx.files.srcs),
outputs = [ctx.outputs.dalf],
arguments = [compile_args],
progress_message = "Compiling DAML into DAML-LF archive %s" % ctx.outputs.dalf.short_path,
executable = ctx.executable.damlc,
)
def _daml_impl_package_dar(ctx):
# Call damlc package
package_args = ctx.actions.args()
package_args.add("package")
package_args.add(ctx.file.main_src)
package_args.add(ctx.attr.name)
if ctx.attr.target:
package_args.add("--target", ctx.attr.target)
package_args.add("--output")
package_args.add(ctx.outputs.dar.path)
ctx.actions.run(
inputs = [ctx.file.main_src] + ctx.files.srcs,
outputs = [ctx.outputs.dar],
arguments = [package_args],
progress_message = "Creating DAR package %s" % ctx.outputs.dar.basename,
executable = ctx.executable.damlc,
)
def _daml_outputs_impl(name):
patterns = {
"dalf": "{name}.dalf",
"dar": "{name}.dar",
"srcjar": "{name}.srcjar",
}
return {
k: v.format(name = name)
for (k, v) in patterns.items()
}
def _daml_compile_impl(ctx):
_daml_impl_compile_dalf(ctx)
_daml_impl_package_dar(ctx)
# DAML provider
daml = daml_provider(
dalf = ctx.outputs.dalf,
dar = ctx.outputs.dar,
)
return [daml]
def _daml_compile_outputs_impl(name):
patterns = {
"dalf": "{name}.dalf",
"dar": "{name}.dar",
}
return {
k: v.format(name = name)
for (k, v) in patterns.items()
}
# TODO(JM): The daml_compile() is same as daml(), but without the codegen bits.
# All of this needs a cleanup once we understand the needs for daml related rules.
daml_compile = rule(
implementation = _daml_compile_impl,
attrs = {
"main_src": attr.label(
allow_single_file = [".daml"],
mandatory = True,
doc = "The main DAML file that will be passed to the compiler.",
),
"srcs": attr.label_list(
allow_files = [".daml"],
default = [],
doc = "Other DAML files that compilation depends on.",
),
"target": attr.string(doc = "DAML-LF version to output"),
"damlc": attr.label(
executable = True,
cfg = "host",
allow_files = True,
default = Label("//compiler/damlc"),
),
},
executable = False,
outputs = _daml_compile_outputs_impl,
)
def _daml_test_impl(ctx):
script = """
set -eou pipefail
DAMLC=$(rlocation $TEST_WORKSPACE/{damlc})
rlocations () {{ for i in $@; do echo $(rlocation $TEST_WORKSPACE/$i); done; }}
$DAMLC test --files $(rlocations "{files}")
""".format(
damlc = ctx.executable.damlc.short_path,
files = " ".join([f.short_path for f in ctx.files.srcs]),
)
ctx.actions.write(
output = ctx.outputs.executable,
content = script,
)
damlc_runfiles = ctx.attr.damlc[DefaultInfo].data_runfiles
runfiles = ctx.runfiles(
collect_data = True,
files = ctx.files.srcs,
).merge(damlc_runfiles)
return [DefaultInfo(runfiles = runfiles)]
daml_test = rule(
implementation = _daml_test_impl,
attrs = {
"srcs": attr.label_list(
allow_files = [".daml"],
default = [],
doc = "DAML source files to test.",
),
"damlc": attr.label(
executable = True,
cfg = "host",
allow_files = True,
default = Label("//compiler/damlc"),
),
},
test = True,
)
_daml_binary_script_template = """
#!/usr/bin/env sh
{java} -jar {sandbox} $@ {dar}
"""
def _daml_binary_impl(ctx):
script = _daml_binary_script_template.format(
java = ctx.executable._java.short_path,
sandbox = ctx.file._sandbox.short_path,
dar = ctx.file.dar.short_path,
)
ctx.actions.write(
output = ctx.outputs.executable,
content = script,
)
runfiles = ctx.runfiles(
files = [ctx.file.dar, ctx.file._sandbox, ctx.executable._java],
)
return [DefaultInfo(runfiles = runfiles)]
daml_binary = rule(
implementation = _daml_binary_impl,
attrs = {
"dar": attr.label(
allow_single_file = [".dar"],
mandatory = True,
doc = "The DAR to execute in the sandbox.",
),
"_sandbox": attr.label(
cfg = "target",
allow_single_file = [".jar"],
default = Label("//ledger/sandbox:sandbox-binary_deploy.jar"),
),
"_java": attr.label(
executable = True,
cfg = "target",
allow_files = True,
default = Label("@bazel_tools//tools/jdk:java"),
),
},
executable = True,
)
"""
Executable target that runs the DAML sandbox on the given DAR package.
Example:
```
daml_binary(
name = "example-exec",
dar = ":dar-out/com/digitalasset/sample/example/0.1/example-0.1.dar",
)
```
This target can be executed as follows:
```
$ bazel run //:example-exec
```
Command-line arguments can be passed to the sandbox as follows:
```
$ bazel run //:example-exec -- --help
```
"""
def _daml_compile_dalf_output_impl(name):
return {"dalf": name + ".dalf"}
dalf_compile = rule(
implementation = _daml_impl_compile_dalf,
attrs = {
"main_src": attr.label(
allow_single_file = [".daml"],
mandatory = True,
doc = "The main DAML file that will be passed to the compiler.",
),
"srcs": attr.label_list(
allow_files = [".daml"],
default = [],
doc = "Other DAML files that compilation depends on.",
),
"target": attr.string(doc = "DAML-LF version to output"),
"damlc": attr.label(
executable = True,
cfg = "host",
allow_files = True,
default = Label("//compiler/damlc"),
),
},
executable = False,
outputs = _daml_compile_dalf_output_impl,
)
"""
Stripped down version of daml_compile that does not package DALFs into DARs
"""
daml_sandbox_version = "6.0.0"
``` |
{
"source": "jordan-m-lee/yield_mimic",
"score": 3
} |
#### File: jordan-m-lee/yield_mimic/yield_main.py
```python
import numpy as np
import matplotlib.pyplot as plt
import os
import operator
import time
from quick_yield import yield_average
class wave:
def __init__(self, date, energy, filament, grid, focus, data, average):
self.date = date
self.energy = energy
self.filament = filament
self.grid = grid
self.focus = focus
self.data = data
self.average = average
#file = '--';
BIG = []
np.array(BIG)
path = '--'
for file in os.listdir(path):
fullname = path + "/" + file
with open(fullname) as fp:
headers = []
for num in range(4):
line = fp.readline()
line = line.rstrip('\t')
headers.append(line.rstrip('\n'))
for num in range(4):
holder = headers[tr].split()
headers[tr] = float(holder[1])
raw_data = np.loadtxt(fullname, skiprows = 5);
uniqueness = time.ctime(os.path.getmtime(fullname))
BIG.append(wave(uniqueness, headers[0], headers[1], headers[2], headers[3], raw_data, yield_average(raw_data)))
#print('Energy: ' + str(headers[0]) + 'eV')
BIG.sort(key=operator.attrgetter('energy'))
``` |
{
"source": "jordan-moore/commander-randomizer",
"score": 3
} |
#### File: jordan-moore/commander-randomizer/commander-randomizer.py
```python
import requests
import json
import math
import time
from tqdm import tqdm
from random import randint
import webbrowser
MAX_CARD_RESULTS = 175
class Card(object):
name = ""
scryfall_uri = 0
png = ""
color_identity = ""
id = ""
# The class "constructor" - It's actually an initializer
def __init__(self, name, id, scryfall_uri, png, color_identity):
self.name = name
self.id = id
self.scryfall_uri = scryfall_uri
self.png = png
self.color_identity = color_identity
def __str__(self):
return '{\'name\': \'' + str(self.name) + '\', \'id\': \'' + str(self.id) + '\', \'scryfall_uri\': \'' + str(self.scryfall_uri) + '\', \'png\': \'' + str(self.png) + '\', \'color_identity\': ' + str(self.color_identity) + '}'
def get_commanders(page):
url = "https://api.scryfall.com/cards/search"
querystring = {"q": "f:commander is:commander", "order": "artist", "page": str(page)}
headers = {
'Content-Type': "application/json"
}
return requests.request("GET", url, headers=headers, params=querystring)
def pause():
program_pause = input("Press the ENTER for another or CTRL + C to quit...")
def add_cards(new_card_array, cards, page, last_page):
cards = list(cards)
print('Loading Cards Page ' + str(page) + ' of ' + str(last_page))
for card in tqdm(new_card_array):
# name, id, scryfall_uri, png, color_identity
name = card['name']
card_id = card['id']
scryfall_uri = card['scryfall_uri']
try:
png = card['image_uris']['png']
except KeyError:
png = "No PNG Image"
color_identity = card['color_identity']
new_card = Card(name, card_id, scryfall_uri, png, color_identity)
time.sleep(0.01)
# print('Adding Card: ' + str(new_card))
# print()
cards.append(new_card)
return cards
def get_commander(commanders):
rand = randint(0, cards.__len__() - 1)
return cards[rand]
if __name__ == "__main__":
page = 1
last_page = 1
has_more = True
new_card_data = []
cards = list()
num_commanders = 0
while has_more is True:
response = get_commanders(page)
data = json.loads(response.text)
# print("Response Data:")
# print(str(data))
if page == 1:
num_commanders = 0
new_card_data = []
for key, value in data.items():
if key == 'total_cards' and page == 1:
num_commanders = int(value)
last_page = str(math.ceil(num_commanders / MAX_CARD_RESULTS))
print('Commanders Found: ' + str(num_commanders) + '\n')
if key == 'has_more':
has_more = bool(value)
time.sleep(0.02)
# print('Has More: ' + str(has_more))
time.sleep(0.02)
if key == 'data':
new_card_data = value
# print('New Card Data: ' + str(new_card_data))
print()
cards = add_cards(new_card_data, cards, page, last_page)
time.sleep(0.02)
print('\nCommanders: ' + str(cards.__len__()) + ' out of ' + str(num_commanders))
time.sleep(0.02)
page += 1
all_commanders = cards
multicolored_commanders = []
monocolored_commanders = []
for commander_n in all_commanders:
print(str(commander_n.color_identity))
if len(commander_n.color_identity) < 2:
monocolored_commanders.append(commander_n)
print("Monocolored!")
else:
multicolored_commanders.append(commander_n)
print("Multicolored!")
print('Loading Complete!')
while True:
commander = get_commander(cards)
print('\nYour Random Commander Is: ')
time.sleep(0.2)
print(commander)
webbrowser.open(commander.scryfall_uri)
time.sleep(0.1)
print()
pause()
``` |
{
"source": "JordanMoose/MAD-Donations",
"score": 3
} |
#### File: backend/flaskr/routes.py
```python
from flask import jsonify, request
from mongoengine.connection import connect, disconnect
from mongoengine import DoesNotExist
from flaskr.server import app
from flaskr.models import User, Subscription, Transaction
import flaskr.constants as const
@app.route("/")
def hello():
return "Hello world"
@app.route("/connect/")
def connectToMongo():
connect(db='mad-donations', host=const.MONGO_URI)
return "Connected to mongo?"
@app.route("/disconnect/")
def resetMongo():
disconnect()
return "Maybe disconnected from Mongo"
# Test endpoint to get React, Flask, and Mongo hooked up
@app.route("/getUser/id/<string:id>/")
def getUserById(id):
return {
'_id': "512123",
'name': "danielle"
}
@app.route("/getUser/name/<string:name>/")
def getUserByName(name):
return {
'_id': '1234',
'firstName': 'Adam',
'lastName': 'Ash'
}
@app.route("/home/")
def home():
return "This is the homepage"
@app.route("/login/")
def login():
return "This is the login page"
@app.route("/account/")
def account():
return "You shouldn't be here!"
@app.route("/account/<int:id>/")
def getInfo(id):
return "This is account %s" % id
@app.route("/orgs/")
def orgs():
return "Here are all of our orgs"
@app.route("/orgs/featured/")
def featuredOrgs():
return "Here are our featured organizations of the month."
@app.route("/orgs/<int:id>")
def getOrg(id):
return "This is organization %s" % id
#–––––––––––––#
# User Routes #
#–––––––––––––#
@app.route("/user/create/", methods=["POST"])
def createUser():
userData = request.json
newUser = User(displayName=userData['displayName'], email=userData['email'])
try:
saved = newUser.save(force_insert=True)
except:
return "Error saving user to database."
return "User created: %s" % (saved.displayName)
@app.route("/user/<string:email>/", methods=["GET"])
def getUser(email):
try:
user = User.objects.get(email=email)
except DoesNotExist:
return "No user with that email."
except:
return "An unknown error occurred."
return str(user)
@app.route("/user/<string:email>/", methods=["PATCH"])
def editUserInfo(email):
try:
user = User.objects.get(email=email)
except DoesNotExist:
return "No user with that email."
except:
return "An unknown error occurred."
updateData = request.json
app.logger.info(updateData)
app.logger.info("hellllllo")
for k, v in updateData.items():
user[k] = v
try:
saved = user.save()
except:
"Error updating user."
return "Info updated for user: %s" % (user.displayName)
@app.route("/user/<string:email>/", methods=["DELETE"])
def deleteUser(email):
try:
user = User.objects.get(email=email)
displayName = user.displayName
user.delete()
except DoesNotExist:
return "No user with that email."
except:
return "An unknown error occurred."
return "User deleted: %s" % (displayName)
@app.route("/user/<string:email>/causes/", methods=["GET"])
def getUserCauses(email):
try:
user = User.objects.get(email=email)
except DoesNotExist:
return "No user with that email."
except:
return "An unknown error occurred."
return str(user.supportedCauses).replace("'", '"')
@app.route("/user/<string:email>/activeSubscriptions/", methods=["GET"])
def getUserActiveSupscriptions(email):
try:
user = User.objects.get(email=email)
except DoesNotExist:
return "No user with that email."
except:
return "An unknown error occurred."
return str(user.activeSubscriptions)
@app.route("/user/<string:email>/expiredSubscriptions/", methods=["GET"])
def getUserExpiredSupscriptions(email):
try:
user = User.objects.get(email=email)
except DoesNotExist:
return "No user with that email."
except:
return "An unknown error occurred."
return str(user.expiredSubscriptions)
#–––––––––––––––––––––#
# Subscription Routes #
#–––––––––––––––––––––#
@app.route("/subscription/create/", methods=["POST"])
def createSubscription():
subData = request.json
newSub = Subscription(cause=subData['cause'], monthlyAmount=subData['monthlyAmount'], status=subData['status'])
try:
saved = newSub.save(force_insert=True)
except:
return "Error saving subscription to database."
return "Subscription created: %s" % (saved.id)
# Deleting a subscription means updating status to expired
@app.route("/subscription/<string:id>/delete/", methods=["PATCH"])
def deleteSubscription(id):
try:
sub = Subscription.objects.get(id=id)
except DoesNotExist:
return "No subscription with that id."
except:
return "An unknown error occurred."
if sub.status == "expired":
return "This subscription is already expired."
expired = sub.modify(status="expired")
if not expired:
return "Error moving subscription from active to expired."
return "Subscription deleted: %s" % (id)
@app.route("/subscription/<string:id>/edit/", methods=["PATCH"])
def editSubscriptionAmount(id):
try:
oldSub = Subscription.objects.get(id=id)
except DoesNotExist:
return "No subscription with that id."
except:
return "An unknown error occurred."
if oldSub.status == 'expired':
return "This subscription is expired."
if oldSub.status == 'updated':
return "You tried to access a subscription whose monthly amount has alredy been updated. Make sure to use the most recent (active) subscription."
updateData = request.json
newMonthlyAmount = updateData['monthlyAmount']
if newMonthlyAmount == oldSub.monthlyAmount:
return "No change in subscription monthly amount. Skipping update."
updated = oldSub.modify(status='updated')
if not updated:
return "Error changing old subscription status to updated."
newSub = Subscription(cause=oldSub.cause, monthlyAmount=newMonthlyAmount, status='active').save()
return "Monthly amount for subscription %s updated from $%.2f to $%.2f." % (newSub.id, oldSub.monthlyAmount, newSub.monthlyAmount)
#––––––––––––––––––––––#
# Product Stats Routes #
#––––––––––––––––––––––#
@app.route("/stats/users/total/", methods=["GET"])
def getTotalUsers():
return str(User.objects.count())
# Total amount raised
@app.route("/stats/transactions/total/", methods=["GET"])
def getTotalAmountRaised():
return "$%.2f" % (Transaction.objects.sum('amount'))
@app.route("/listConnections/")
def listConnections():
s = ""
for user in User.objects:
s += (user.displayName) + "\n"
return s
``` |
{
"source": "jordanm/scheme",
"score": 2
} |
#### File: jordanm/scheme/bakefile.py
```python
from bake import *
@task()
def buildpkg(runtime):
Path('dist').rmtree(True)
runtime.shell('python setup.py sdist bdist_wheel')
Path('build').rmtree()
Path('scheme.egg-info').rmtree()
@task()
def buildhtml(runtime):
runtime.execute('sphinx.html', sourcedir='docs')
@task()
def clean(runtime):
for target in ('build', 'cover', 'dist', 'scheme.egg-info'):
Path(target).rmtree(True)
@task()
def runtests(runtime):
try:
import coverage
except ImportError:
coverage = None
cmdline = ['nosetests']
if coverage:
cmdline.extend(['--with-coverage', '--cover-html', '--cover-erase',
'--cover-package=scheme'])
runtime.shell(cmdline, passthrough=True)
```
#### File: scheme/scheme/element.py
```python
from scheme.fields import *
from scheme.util import with_metaclass
__all__ = ('Element',)
class ElementMeta(type):
def __new__(metatype, name, bases, namespace):
element = type.__new__(metatype, name, bases, namespace)
if element.polymorphic_identity:
base = bases[0]
if getattr(base, '__polymorphic_on__', None):
base.__polymorphic_impl__[element.polymorphic_identity] = element
return element
else:
raise ValueError("an Element declaration which specifies 'polymorphic_identity'"
" must inherit from a polymorphic base Element")
if element.schema is None:
return element
element.__polymorphic_on__ = None
schema = element.schema
if isinstance(schema, Structure):
element.__attrs__ = schema.generate_defaults(sparse=False)
if schema.polymorphic:
element.__polymorphic_on__ = schema.polymorphic_on.name
element.__polymorphic_impl__ = {}
elif isinstance(schema, Field):
if schema.name:
element.__attrs__ = {schema.name: schema.default}
else:
raise ValueError("class attribute 'schema' must have a valid 'name' attribute")
else:
raise TypeError("class attribute 'schema' must be either None or a Field instance")
schema.instantiator = element.instantiate
schema.extractor = element.extract
return element
@with_metaclass(ElementMeta)
class Element(object):
"""A representational class for schema-based objects.
A subclass of Element is defined by an associated schema, which is typically though not
always a :class:`Structure`, and provides a fluent, object-based approach to working
with values for that schema.
Consider this example::
class Document(Element):
schema = Structure({
'name': Text(nonempty=True),
'description': Text(),
}, nonnull=True)
:var str key_atr: Optional, default is ``None``; if specified at the class level,
indicates...
:var str polymorphic_identity: Optional, default is ``None``; if specified at the class level,
indicates ...
:var schema: The :class:`Field` instance which defines the schema for this subclass of
Element.
"""
key_attr = None
polymorphic_identity = None
schema = None
def __init__(self, **params):
polymorphic_on = self.__polymorphic_on__
if polymorphic_on:
defaults = self.__attrs__[params[polymorphic_on]]
else:
defaults = self.__attrs__
for attr, default in defaults.items():
setattr(self, attr, params.get(attr, default))
def __repr__(self):
aspects = []
for attr in ('id', 'name', 'title'):
value = getattr(self, attr, None)
if value is not None:
aspects.append('%s=%r' % (attr, value))
return '%s(%s)' % (type(self).__name__, ', '.join(aspects))
@classmethod
def extract(cls, field, subject):
if isinstance(field, Structure):
return subject.__dict__
else:
return getattr(subject, field.name)
@classmethod
def instantiate(cls, field, value, key=None):
instance = None
if isinstance(field, Structure):
polymorphic_on = cls.__polymorphic_on__
if polymorphic_on:
identity = value[polymorphic_on]
if identity in cls.__polymorphic_impl__:
impl = cls.__polymorphic_impl__[identity]
instance = impl(**value)
if instance is None:
instance = cls(**value)
else:
instance = cls(**{field.name: value})
if key is not None and cls.key_attr:
setattr(instance, cls.key_attr, key)
return instance
def serialize(self, format=None):
schema = self.__class__.schema
return schema.serialize(schema.extract(self), format)
@classmethod
def unserialize(cls, value, format=None):
return cls.schema.instantiate(cls.schema.unserialize(value, format))
```
#### File: scheme/scheme/exceptions.py
```python
from traceback import format_exc
from scheme.util import format_structure, indent
__all__ = ('CannotDescribeError', 'CannotExtractError', 'CannotFilterError',
'CannotInterpolateError', 'FieldExcludedError', 'InvalidTypeError', 'StructuralError',
'UndefinedFieldError', 'UndefinedParameterError', 'ValidationError')
class SchemeError(Exception):
"""Base scheme exception."""
class CannotDescribeError(SchemeError):
"""Raised when a parameter to a field cannot be described for serialization."""
class CannotExtractError(SchemeError):
"""Raised when a proper value cannot be extracted for a field."""
class CannotFilterError(SchemeError):
"""Raised when a filtering operation on a field results in a broken field."""
class CannotInterpolateError(SchemeError):
"""Raised when a proper value cannot be interpolated for a field."""
class FieldExcludedError(SchemeError):
"""Raised when a field is excluded during the extraction of a value."""
class UndefinedFieldError(SchemeError):
"""Raised when a field is used before an undefined sub-field of the field has
yet to be defined."""
class UndefinedParameterError(SchemeError):
"""Raised when interpolation encounters an undefined parameter."""
class StructuralError(SchemeError):
"""A structural error.
:param *errors: All positional arguments are expected to be ``dict`` values representing
non-structural errors.
:param field: Optional, default is ``None``; if specified, the :class:`Field` instance
which is generating this ``StructuralError``.
:param identity: Optional, default is ``None``; if specified, a ``list`` of ``str`` values
which when concatenated will describe the location of ``field`` within a hierarchical
schema.
:param structure: Optional, default is ``None``; if specified, a potentially hierarchical
structure containing errors.
:param value: Optional, default is ``None``; if specified, the candidate value which caused
``field`` to generate this structural error.
:param **params: Additional keyword parameters, if any, are assumed to comprise another
non-structural error if ``token`` is present.
"""
def __init__(self, *errors, **params):
self.errors = list(errors)
self.field = params.pop('field', None)
self.identity = params.pop('identity', '(unknown)')
self.structure = params.pop('structure', None)
self.tracebacks = None
self.value = params.pop('value', None)
if params and 'token' in params:
self.errors.append(params)
def __str__(self):
return '\n'.join(['validation failed'] + self.format_errors())
@property
def substantive(self):
return (self.errors or self.structure)
def append(self, error):
self.errors.append(error)
return self
def attach(self, structure):
self.structure = structure
return self
def capture(self):
if self.tracebacks is None:
self.tracebacks = []
self.tracebacks.append(format_exc())
return self
def format_errors(self):
errors = []
if self.errors:
self._format_errors(errors)
if self.structure:
self._format_structure(errors)
enumerated_errors = []
for i, error in enumerate(errors):
enumerated_errors.append('[%02d] %s' % (i + 1, indent(error, 5, False)))
return enumerated_errors
def merge(self, exception):
self.errors.extend(exception.errors)
return self
def serialize(self, force=False):
if not force:
try:
return self._serialized_errors
except AttributeError:
pass
if self.errors:
errors = self._serialize_errors(self.errors)
else:
errors = None
if self.structure:
structure = self._serialize_structure()
else:
structure = None
self._serialized_errors = (errors, structure)
return self._serialized_errors
@classmethod
def unserialize(cls, value):
errors, structure = value
return cls(*errors, **{'structure': structure})
def _format_errors(self, errors):
field = self.field
if not field:
return
identity = ''.join(self.identity)
for error in self.errors:
definition = field.errors[error['token']]
lines = ['%s error at %s: %s' % (error['title'].capitalize(), identity,
error['message'])]
if definition.show_field:
lines.append('Field: %r' % field)
if definition.show_value and self.value is not None:
lines.append('Value: %r' % self.value)
if self.tracebacks:
lines.append('Captured tracebacks:')
for traceback in self.tracebacks:
lines.append(indent(traceback, 2))
errors.append('\n'.join(lines))
def _format_structure(self, errors):
structure = self.structure
if isinstance(structure, dict):
structure = structure.values()
for item in structure:
if isinstance(item, StructuralError):
if item.structure is not None:
item._format_structure(errors)
else:
item._format_errors(errors)
def _serialize_errors(self, errors):
serialized = []
for error in errors:
if isinstance(error, dict):
serialized.append(error)
else:
serialized.append({'message': error})
return serialized
def _serialize_structure(self):
structure = self.structure
if isinstance(structure, list):
errors = []
for item in structure:
if isinstance(item, StructuralError):
if item.structure is not None:
errors.append(item._serialize_structure())
else:
errors.append(self._serialize_errors(item.errors))
else:
errors.append(None)
else:
return errors
elif isinstance(structure, dict):
errors = {}
for attr, value in structure.items():
if isinstance(value, StructuralError):
if value.structure is not None:
errors[attr] = value._serialize_structure()
else:
errors[attr] = self._serialize_errors(value.errors)
else:
return errors
class ValidationError(StructuralError):
"""Raised when field validation fails."""
def construct(self, error, **params):
error = self.field.errors[error]
return self.append({'token': error.token, 'title': error.title,
'message': error.format(self.field, params)})
class InvalidTypeError(ValidationError):
""""Raised when field validation fails due to the type of the value."""
```
#### File: scheme/fields/datetime.py
```python
from __future__ import absolute_import
from datetime import datetime
from time import strptime
from scheme.exceptions import *
from scheme.field import *
from scheme.timezone import *
__all__ = ('DateTime',)
class DateTime(Field):
"""A field for ``datetime`` values."""
basetype = 'datetime'
equivalent = datetime
parameters = {'maximum': None, 'minimum': None, 'utc': False}
pattern = '%Y-%m-%dT%H:%M:%SZ'
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a datetime value'),
FieldError('minimum', 'minimum value', '%(field)s must not occur before %(minimum)s'),
FieldError('maximum', 'maximum value', '%(field)s must not occur after %(maximum)s'),
]
def __init__(self, minimum=None, maximum=None, utc=False, **params):
super(DateTime, self).__init__(**params)
if utc:
self.timezone = UTC
else:
self.timezone = LOCAL
if maximum is not None:
try:
maximum = self._normalize_value(self._unserialize_value(maximum))
except InvalidTypeError:
raise TypeError("argument 'maximum' must be either None, a datetime.datetime,"
" or a string in the format 'YYYY-MM-DDTHH:MM:SSZ'")
if minimum is not None:
try:
minimum = self._normalize_value(self._unserialize_value(minimum))
except InvalidTypeError:
raise TypeError("argument 'minimum' must be either None, a datetime.datetime,"
" or a string in the format 'YYYY-MM-DDTHH:MM:SSZ'")
self.maximum = maximum
self.minimum = minimum
self.utc = utc
def __repr__(self):
aspects = []
if self.minimum is not None:
aspects.append('minimum=%r' % self.minimum.strftime(self.pattern))
if self.maximum is not None:
aspects.append('maximum=%r' % self.maximum.strftime(self.pattern))
if self.utc:
aspects.append('utc=True')
return super(DateTime, self).__repr__(aspects)
def describe(self, parameters=None, verbose=False):
params = {}
if self.maximum:
params['maximum'] = self.maximum.strftime(self.pattern)
if self.minimum:
params['minimum'] = self.minimum.strftime(self.pattern)
return super(DateTime, self).describe(parameters, verbose, **params)
def _normalize_value(self, value):
timezone = self.timezone
if value.tzinfo is not None:
if value.tzinfo is timezone:
return value
else:
return value.astimezone(timezone)
else:
return value.replace(tzinfo=timezone)
def _serialize_value(self, value):
if value.tzinfo is not UTC:
value = value.astimezone(UTC)
return value.strftime(self.pattern)
def _unserialize_value(self, value, ancestry=None):
if isinstance(value, datetime):
return value
try:
unserialized = datetime(*strptime(value, self.pattern)[:6])
return unserialized.replace(tzinfo=UTC)
except Exception:
raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid')
def _validate_value(self, value, ancestry):
if isinstance(value, datetime):
value = self._normalize_value(value)
else:
raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid')
minimum = self.minimum
if minimum is not None and value < minimum:
raise ValidationError(identity=ancestry, field=self, value=value).construct(
'minimum', minimum=minimum.strftime(self.pattern))
maximum = self.maximum
if maximum is not None and value > maximum:
raise ValidationError(identity=ancestry, field=self, value=value).construct(
'maximum', maximum=maximum.strftime(self.pattern))
return value
```
#### File: scheme/fields/float.py
```python
from scheme.exceptions import *
from scheme.field import *
from scheme.interpolation import interpolate_parameters
from scheme.util import numbers
__all__ = ('Float',)
class Float(Field):
"""A field for ``float`` values."""
basetype = 'float'
parameters = {'maximum': None, 'minimum': None}
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a floating-point number'),
FieldError('minimum', 'minimum value', '%(field)s must be greater then or equal to %(minimum)f'),
FieldError('maximum', 'maximum value', '%(field)s must be less then or equal to %(maximum)f'),
]
def __init__(self, minimum=None, maximum=None, **params):
super(Float, self).__init__(**params)
if not (minimum is None or isinstance(minimum, float)):
raise TypeError("argument 'minimum' must be either None or a float")
if not (maximum is None or isinstance(maximum, float)):
raise TypeError("argument 'maximum' must be either None or a float")
self.maximum = maximum
self.minimum = minimum
def __repr__(self):
aspects = []
if self.minimum is not None:
aspects.append('minimum=%r' % self.minimum)
if self.maximum is not None:
aspects.append('maximum=%r' % self.maximum)
return super(Float, self).__repr__(aspects)
def interpolate(self, subject, parameters, interpolator=None):
if subject is None:
return None
elif isinstance(subject, numbers):
return float(subject)
else:
return float(interpolate_parameters(subject, parameters, True, interpolator))
def _unserialize_value(self, value, ancestry):
try:
return float(value)
except Exception:
raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid')
def _validate_value(self, value, ancestry):
if not isinstance(value, float):
raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid')
minimum = self.minimum
if minimum is not None and value < minimum:
raise ValidationError(identity=ancestry, field=self, value=value).construct(
'minimum', minimum=minimum)
maximum = self.maximum
if maximum is not None and value > maximum:
raise ValidationError(identity=ancestry, field=self, value=value).construct(
'maximum', maximum=maximum)
```
#### File: scheme/fields/map.py
```python
from scheme.exceptions import *
from scheme.field import *
from scheme.interpolation import interpolate_parameters
from scheme.util import string
__all__ = ('Map',)
class Map(Field):
"""A field for homogeneous mappings of key/value pairs.
A map can contain any number of key/value pairs, but each key and each value must respectfully
by a valid value for the corresponding ``key`` and ``value`` fields defined for the map. In
python, a map value is expressed as a ``dict`'.
In contrast to :class:`Structure`, the keys of a map are not specified as part of the field
definition.
:param value: A :class:`Field` instance which defines the values that can be contained by
a valid map value for this field.
:param key: Optional, default is ``None``; if specified, a :class:`Field` instance which
defines the keys that can be contained by a valid map value for this field. If ``None``,
the field will accept any ``str`` value for keys.
:param list required_keys: Optional, default is ``None``; if specified, a list of keys required
to be present in a valid map value for this field. Each such key must be a valid value for
the field specified for ``key``. Can also be specified as a single space-delimited string,
when ``key`` is string-based.
:raises TypeError: when a constructor parameter is invalid
"""
basetype = 'map'
parameters = {'required_keys': None}
structural = True
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a map'),
FieldError('invalidkeys', 'invalid keys', '%(field)s must have valid keys'),
FieldError('required', 'required key', "%(field)s is missing required key '%(name)s'"),
]
def __init__(self, value, key=None, required_keys=None, **params):
super(Map, self).__init__(**params)
if isinstance(value, Undefined):
if value.field:
value = value.field
else:
value.register(self._define_undefined_field)
elif not isinstance(value, Field):
raise TypeError("argument 'value' must be a Field instance")
if key is not None and not isinstance(key, Field):
raise TypeError("argument 'key' must be either None or a Field instance")
if isinstance(required_keys, string):
required_keys = required_keys.split(' ')
if required_keys is not None and not isinstance(required_keys, (list, tuple)):
raise TypeError("argument 'required_keys' must be either None, a list or tuple of"
" string values, or a string of space-separated values")
self.key = key
self.required_keys = required_keys
self.value = value
def __repr__(self):
aspects = []
if self.key:
aspects.append('key=%r' % self.key)
aspects.append('value=%r' % self.value)
if self.required_keys:
aspects.append('required_keys=%r' % sorted(self.required_keys))
return super(Map, self).__repr__(aspects)
def describe(self, parameters=None, verbose=False):
default = None
if self.default:
default = {}
for key, value in self.default.items():
default[key] = self.value.process(value, OUTBOUND, True)
params = {'value': self.value.describe(parameters, verbose), 'default': default}
if self.key:
params['key'] = self.key.describe(parameters, verbose)
return super(Map, self).describe(parameters, verbose, **params)
def extract(self, subject, strict=True, **params):
if params and not self.screen(**params):
raise FieldExcludedError(self)
if subject is None:
return None
if self.extractor:
try:
subject = self.extractor(self, subject)
except Exception:
raise CannotExtractError('extractor raised exception')
if not isinstance(subject, dict):
raise CannotExtractError('extraction candidate must be a dict value')
definition = self.value
extraction = {}
for key, value in subject.items():
try:
extraction[key] = definition.extract(value, strict, **params)
except FieldExcludedError:
pass
except AttributeError:
if isinstance(definition, Undefined):
raise UndefinedFieldError("the 'value' field of this map is undefined")
else:
raise
return extraction
def filter(self, all=False, **params):
if not super(Map, self).filter(all, **params):
return None
value = self.value.filter(self.value, **params)
if value is self.value:
return self
elif value:
return self.clone(value=value)
else:
raise CannotFilterError(self)
def instantiate(self, value, key=None):
if value is None:
return None
definition = self.value
candidate = {}
for k, v in value.items():
try:
candidate[k] = definition.instantiate(v, k)
except AttributeError:
if isinstance(definition, Undefined):
raise UndefinedFieldError("the 'value' field of this map is undefined")
else:
raise
return super(Map, self).instantiate(candidate, key)
def interpolate(self, subject, parameters, interpolator=None):
if subject is None:
return None
if isinstance(subject, string):
subject = interpolate_parameters(subject, parameters, True, interpolator)
if not isinstance(subject, dict):
raise CannotInterpolateError('interpolation candidate must be a dict value')
definition = self.value
interpolation = {}
for key, value in subject.items():
try:
interpolation[key] = definition.interpolate(value, parameters, interpolator)
except UndefinedParameterError:
continue
except AttributeError:
if isinstance(definition, Undefined):
raise UndefinedFieldError("the 'value' field of this map is undefined")
else:
raise
return interpolation
def process(self, value, phase=INBOUND, serialized=False, ancestry=None):
if not ancestry:
ancestry = [self.guaranteed_name]
if self._is_null(value, ancestry):
return None
if not isinstance(value, dict):
raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid')
if self.preprocessor:
value = self.preprocessor(value)
valid = True
key_field = self.key
value_field = self.value
map = {}
for name, subvalue in value.items():
if key_field:
try:
name = key_field.process(name, phase, serialized, ancestry + ['[%s]' % name])
except StructuralError as exception:
raise ValidationError(identity=ancestry, field=self, value=value).construct('invalidkeys')
elif not isinstance(name, string):
raise ValidationError(identity=ancestry, field=self, value=value).construct('invalidkeys')
try:
map[name] = value_field.process(subvalue, phase, serialized, ancestry + ['[%s]' % name])
except StructuralError as exception:
valid = False
map[name] = exception
except AttributeError:
if isinstance(value_field, Undefined):
raise UndefinedFieldError('the value field of this map is undefined')
else:
raise
if self.required_keys:
for name in self.required_keys:
if name not in map:
valid = False
map[name] = ValidationError(identity=ancestry, field=self).construct(
'required', name=name)
if valid:
return map
else:
raise ValidationError(identity=ancestry, field=self, value=value, structure=map)
def transform(self, transformer):
candidate = transformer(self)
if isinstance(candidate, Field):
return candidate
elif candidate is False:
return self
candidate = self.value.transform(transformer)
if candidate is self.value:
return self
else:
return self.clone(value=candidate)
def _define_undefined_field(self, field):
self.value = field
@classmethod
def _visit_field(cls, specification, callback):
params = {'value': callback(specification['value'])}
if 'key' in specification:
params['key'] = callback(specification['key'])
return params
```
#### File: scheme/fields/sequence.py
```python
from scheme.exceptions import *
from scheme.field import *
from scheme.interpolation import interpolate_parameters
from scheme.util import pluralize, string
__all__ = ('Sequence',)
class Sequence(Field):
"""A field for variable-length sequences of homogeneous items.
A sequence can contain any number of items, but each such item must be a valid value for the
particular ``item`` field defined for the sequence. For python, a sequence is expressed as a
``list``.
:param item: A :class:`Field` instance which defines the items that can be contained by a
valid sequence value for this field.
:param int min_length: Optional, default is ``None``; if specified, as an ``int`` >= 0,
indicates the minimum number of items that a sequence value must contain to be
valid for this field.
:param int max_length: Optional, default is ``None``; if specified, as an ``int`` >= 0,
indicates the maximum number of items that a sequence value can contain to be
valid for this field.
:param boolean unique: Optional, default is ``False``; if ``True``, indicates that a
valid sequence value for this field cannot contain duplicate items.
:raises TypeError: when a parameter to the constructor is invalid
"""
basetype = 'sequence'
parameters = {'min_length': None, 'max_length': None, 'unique': False}
structural = True
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a sequence'),
FieldError('min_length', 'minimum length', '%(field)s must have at least %(min_length)d %(noun)s'),
FieldError('max_length', 'maximum length', '%(field)s must have at most %(max_length)d %(noun)s'),
FieldError('duplicate', 'duplicate value', '%(field)s must not have duplicate values'),
]
def __init__(self, item, min_length=None, max_length=None, unique=False, **params):
super(Sequence, self).__init__(**params)
if isinstance(item, Undefined):
if item.field:
item = item.field
else:
item.register(self._define_undefined_field)
elif not isinstance(item, Field):
raise TypeError("argument 'item' must be a Field instance")
if not (min_length is None or (isinstance(min_length, int) and min_length >= 0)):
raise TypeError("argument 'min_length' must be either None or an integer >= 0")
if not (max_length is None or (isinstance(max_length, int) and max_length >= 0)):
raise TypeError("argument 'max_length' must be either None or an integer >= 0")
self.item = item
self.max_length = max_length
self.min_length = min_length
self.unique = unique
def __repr__(self):
aspects = ['item=%r' % self.item]
if self.min_length is not None:
aspects.append('min_length=%r' % self.min_length)
if self.max_length is not None:
aspects.append('max_length=%r' % self.max_length)
if self.unique:
aspects.append('unique=True')
return super(Sequence, self).__repr__(aspects)
def describe(self, parameters=None, verbose=False):
default = None
if self.default:
default = [self.item.process(value, OUTBOUND, True) for value in self.default]
return super(Sequence, self).describe(parameters, verbose,
item=self.item.describe(parameters, verbose), default=default)
def extract(self, subject, strict=True, **params):
if params and not self.screen(**params):
raise FieldExcludedError(self)
if subject is None:
return None
if self.extractor:
try:
subject = self.extractor(self, subject)
except Exception:
raise CannotExtractError('extractor failed to return list or tuple value')
if not isinstance(subject, (list, tuple)):
raise CannotExtractError('extraction candidate must be a list or tuple value')
definition = self.item
extraction = []
for item in subject:
try:
extraction.append(definition.extract(item, strict, **params))
except FieldExcludedError:
pass
except AttributeError:
if isinstance(definition, Undefined):
raise UndefinedFieldError('the item field of this sequence is undefined')
else:
raise
return extraction
def filter(self, all=False, **params):
if not super(Sequence, self).filter(all, **params):
return None
item = self.item.filter(all, **params)
if item is self.item:
return self
elif item:
return self.clone(item=item)
else:
raise CannotFilterError(self)
def instantiate(self, value, key=None):
if value is None:
return None
candidate = []
for v in value:
try:
candidate.append(self.item.instantiate(v))
except AttributeError:
if isinstance(self.item, Undefined):
raise UndefinedFieldError('the item field of this sequence is undefined')
else:
raise
return super(Sequence, self).instantiate(candidate, key)
def interpolate(self, subject, parameters, interpolator=None):
if subject is None:
return None
if isinstance(subject, string):
subject = interpolate_parameters(subject, parameters, True, interpolator)
if not isinstance(subject, (list, tuple)):
raise CannotInterpolateError('interpolation candidate must be a list or tuple value')
definition = self.item
interpolation = []
for item in subject:
try:
interpolation.append(definition.interpolate(item, parameters, interpolator))
except AttributeError:
if isinstance(definition, Undefined):
raise UndefinedFieldError('the item field of this sequence is undefined')
else:
raise
return interpolation
def process(self, value, phase=INBOUND, serialized=False, ancestry=None):
if not ancestry:
ancestry = [self.guaranteed_name]
if self._is_null(value, ancestry):
return None
if not isinstance(value, list):
raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid')
if self.preprocessor:
value = self.preprocessor(value)
min_length = self.min_length
if min_length is not None and len(value) < min_length:
raise ValidationError(identity=ancestry, field=self, value=value).construct('min_length',
min_length=min_length, noun=pluralize('item', min_length))
max_length = self.max_length
if max_length is not None and len(value) > max_length:
raise ValidationError(identity=ancestry, field=self, value=value).construct('max_length',
max_length=max_length, noun=pluralize('item', max_length))
valid = True
item = self.item
sequence = []
for i, subvalue in enumerate(value):
try:
sequence.append(item.process(subvalue, phase, serialized, ancestry + ['[%s]' % i]))
except StructuralError as exception:
valid = False
sequence.append(exception)
except AttributeError:
if isinstance(item, Undefined):
raise UndefinedFieldError('the item field of this sequence is undefined')
else:
raise
if not valid:
raise ValidationError(identity=ancestry, field=self, value=value, structure=sequence)
elif self.unique and len(set(sequence)) != len(sequence):
raise ValidationError(identity=ancestry, field=self, value=value).construct('duplicate')
else:
return sequence
def transform(self, transformer):
candidate = transformer(self)
if isinstance(candidate, Field):
return candidate
elif candidate is False:
return self
candidate = self.item.transform(transformer)
if candidate is self.item:
return self
else:
return self.clone(item=candidate)
def _define_undefined_field(self, field):
self.item = field
@classmethod
def _visit_field(cls, specification, callback):
return {'item': callback(specification['item'])}
```
#### File: scheme/fields/text.py
```python
import re
from scheme.exceptions import *
from scheme.field import *
from scheme.interpolation import interpolate_parameters
from scheme.util import string
__all__ = ('Text',)
class Text(Field):
"""A field for text values."""
basetype = 'text'
parameters = {'max_length': None, 'min_length': None, 'strip': True}
pattern = None
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a textual value'),
FieldError('pattern', 'invalid value', '%(field)s has an invalid value'),
FieldError('min_length', 'minimum length',
'%(field)s must contain at least %(min_length)d non-whitespace %(noun)s'),
FieldError('max_length', 'maximum length',
'%(field)s may contain at most %(max_length)d %(noun)s'),
]
def __init__(self, pattern=None, min_length=None, max_length=None, strip=True,
nonempty=False, **params):
if nonempty:
params.update(required=True, nonnull=True)
if min_length is None:
min_length = 1
super(Text, self).__init__(**params)
if isinstance(pattern, string):
pattern = re.compile(pattern)
if not (min_length is None or (isinstance(min_length, int) and min_length >= 0)):
raise TypeError("argument 'min_length' must be either None or an integer >= 0")
if not (max_length is None or (isinstance(max_length, int) and max_length >= 0)):
raise TypeError("argument 'max_length' must be either None or an integer >= 0")
self.max_length = max_length
self.min_length = min_length
self.pattern = pattern
self.strip = strip
def __repr__(self, aspects=None):
if not aspects:
aspects = []
if self.min_length is not None:
aspects.append('min_length=%r' % self.min_length)
if self.max_length is not None:
aspects.append('max_length=%r' % self.max_length)
if not self.strip:
aspects.append('strip=False')
return super(Text, self).__repr__(aspects)
def interpolate(self, subject, parameters, interpolator=None):
if subject is None:
return None
else:
return interpolate_parameters(subject, parameters, interpolator=interpolator)
def _validate_value(self, value, ancestry):
if not isinstance(value, string):
raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid')
if self.strip:
value = value.strip()
min_length = self.min_length
if min_length is not None and len(value) < min_length:
noun = 'character'
if min_length > 1:
noun = 'characters'
raise ValidationError(identity=ancestry, field=self, value=value).construct('min_length',
min_length=min_length, noun=noun)
max_length = self.max_length
if max_length is not None and len(value) > max_length:
noun = 'character'
if max_length > 1:
noun = 'characters'
raise ValidationError(identity=ancestry, field=self, value=value).construct('max_length',
max_length=max_length, noun=noun)
if self.pattern and not self.pattern.match(value):
raise ValidationError(identity=ancestry, field=self, value=value).construct('pattern')
return value
```
#### File: scheme/fields/token.py
```python
import re
from scheme.exceptions import *
from scheme.field import *
from scheme.interpolation import interpolate_parameters
from scheme.util import string
__all__ = ('Token',)
class Token(Field):
"""A field for token values."""
basetype = 'token'
pattern = re.compile(r'^\w[-+.\w]*(?<=\w)(?::\w[-+.\w]*(?<=\w))*$')
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a valid token')
]
def __init__(self, segments=None, **params):
super(Token, self).__init__(**params)
self.segments = segments
def __repr__(self):
aspects = []
if self.segments is not None:
aspects.append('segments=%r' % self.segments)
return super(Token, self).__repr__(aspects)
def interpolate(self, subject, parameters, interpolator=None):
if subject is None:
return subject
else:
return interpolate_parameters(subject, parameters, interpolator=interpolator)
def _validate_value(self, value, ancestry):
if not (isinstance(value, string) and self.pattern.match(value)):
raise InvalidTypeError(identity=ancestry, field=self,
value=value).construct('invalid')
if self.segments is not None and value.count(':') + 1 != self.segments:
raise ValidationError(identity=ancestry, field=self,
value=value).construct('invalid')
```
#### File: scheme/fields/uuid.py
```python
import re
from scheme.exceptions import *
from scheme.field import *
from scheme.interpolation import interpolate_parameters
from scheme.util import string
__all__ = ('UUID',)
class UUID(Field):
"""A field for UUID values."""
basetype = 'text'
pattern = re.compile(r'^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$')
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a UUID')
]
def __init__(self, **params):
super(UUID, self).__init__(**params)
def interpolate(self, subject, parameters, interpolator=None):
if subject is None:
return subject
else:
return interpolate_parameters(subject, parameters, True, interpolator)
def _validate_value(self, value, ancestry):
if not (isinstance(value, string) and self.pattern.match(value)):
raise InvalidTypeError(identity=ancestry, field=self,
value=value).construct('invalid')
```
#### File: scheme/formats/urlencoded.py
```python
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
from urllib.parse import parse_qsl
except ImportError:
from cgi import parse_qsl
from scheme.formats.structuredtext import StructuredText
from scheme.util import string
__all__ = ('UrlEncoded',)
class UrlEncoded(StructuredText):
mimetype = 'application/x-www-form-urlencoded'
name = 'urlencoded'
@classmethod
def serialize(cls, content):
if not isinstance(content, dict):
raise ValueError(content)
data = []
for name, value in content.items():
data.append((name, cls._serialize_content(value)))
return urlencode(data)
@classmethod
def unserialize(cls, content):
if not isinstance(content, string):
raise ValueError(content)
data = {}
for name, value in parse_qsl(content):
if value[0] in ('{', '['):
value = cls._unserialize_structured_value(value)
else:
value = cls._unserialize_simple_value(value)
data[name] = value
return data
```
#### File: scheme/tests/test_binary.py
```python
from scheme import *
from tests.util import *
class TestBinary(FieldTestCase):
def test_construction(self):
with self.assertRaises(TypeError):
Binary(min_length='invalid')
with self.assertRaises(TypeError):
Binary(max_length='invalid')
field = Binary(nonempty=True)
self.assertTrue(field.required and field.nonnull
and field.min_length == 1)
field = Binary(nonempty=True, min_length=10)
self.assertTrue(field.required and field.nonnull
and field.min_length == 10)
def test_processing(self):
field = Binary()
self.assert_processed(field, None, b'',
(b'testing', b'dGVzdGluZw=='),
(b'\x00\x00', b'AAA='))
self.assert_not_processed(field, 'invalid', True, 1.0, u'')
def test_min_length(self):
field = Binary(min_length=1)
self.assert_processed(field, (b'\x00', b'AA=='), (b'\x00\x00', b'AAA='), (b'\x00\x00\x00', b'AAAA'))
self.assert_not_processed(field, 'min_length', (b'', b''))
field = Binary(min_length=2)
self.assert_processed(field, (b'\x00\x00', b'AAA='), (b'\x00\x00\x00', b'AAAA'))
self.assert_not_processed(field, 'min_length', (b'', b''), (b'\x00', b'AA=='))
def test_max_length(self):
field = Binary(max_length=2)
self.assert_processed(field, (b'', b''), (b'\x00', b'AA=='), (b'\x00\x00', b'AAA='))
self.assert_not_processed(field, 'max_length', (b'\x00\x00\x00', b'AAAA'))
def test_interpolation(self):
field = Binary()
self.assertEqual(field.interpolate(None, {}), None)
self.assertEqual(field.interpolate(b'\x00\x01', {}), b'\x00\x01')
```
#### File: scheme/tests/test_element.py
```python
try:
from unittest2 import TestCase
except ImportError:
from unittest import TestCase
from scheme import *
class TestElement(TestCase):
def test_invalid_schema(self):
with self.assertRaises(TypeError):
class Example(Element):
schema = True
def test_unnamed_schema(self):
with self.assertRaises(ValueError):
class Example(Element):
schema = Text()
def test_invalid_polymorphic_inheritor(self):
with self.assertRaises(ValueError):
class Example(Element):
polymorphic_identity = 'test'
def test_value_element(self):
class Example(Element):
schema = Text(nonempty=True, name='value')
ex = Example(value='test')
self.assertEqual(ex.value, 'test')
ex = Example.unserialize('test')
self.assertIsInstance(ex, Example)
self.assertEqual(ex.value, 'test')
ex = ex.serialize()
self.assertEqual(ex, 'test')
def test_structural_element(self):
class Example(Element):
schema = Structure({
'name': Text(nonempty=True),
'description': Text(),
'important': Boolean(default=False),
}, nonnull=True)
ex = Example(name='test')
self.assertEqual(ex.name, 'test')
self.assertIs(ex.description, None)
self.assertEqual(ex.important, False)
ex = Example.unserialize({'name': 'test', 'description': 'test',
'important': True})
self.assertIsInstance(ex, Example)
self.assertEqual(ex.name, 'test')
self.assertEqual(ex.description, 'test')
self.assertEqual(ex.important, True)
ex = ex.serialize()
self.assertEqual(ex, {'name': 'test', 'description': 'test',
'important': True})
def test_polymorphic_element(self):
class Example(Element):
schema = Structure(
structure={
'alpha': {
'alpha': Integer(nonempty=True),
},
'beta': {
'beta': Text(nonempty=True),
},
'gamma': {},
},
nonempty=True,
polymorphic_on='type')
class Alpha(Example):
polymorphic_identity = 'alpha'
class Beta(Example):
polymorphic_identity = 'beta'
alpha = Example.unserialize({'type': 'alpha', 'alpha': 1})
self.assertIsInstance(alpha, Alpha)
self.assertEqual(alpha.type, 'alpha')
self.assertEqual(alpha.alpha, 1)
alpha = alpha.serialize()
self.assertEqual(alpha, {'type': 'alpha', 'alpha': 1})
beta = Example.unserialize({'type': 'beta', 'beta': 'beta'})
self.assertIsInstance(beta, Beta)
self.assertEqual(beta.type, 'beta')
self.assertEqual(beta.beta, 'beta')
beta = beta.serialize()
self.assertEqual(beta, {'type': 'beta', 'beta': 'beta'})
gamma = Example.unserialize({'type': 'gamma'})
self.assertIsInstance(gamma, Example)
self.assertEqual(gamma.type, 'gamma')
gamma = gamma.serialize()
self.assertEqual(gamma, {'type': 'gamma'})
def test_key_attr(self):
class Item(Element):
schema = Text(nonempty=True, name='value')
key_attr = 'key'
class Example(Element):
schema = Structure({
'items': Map(Item.schema),
})
ex = Example.unserialize({'items': {'alpha': 'alpha', 'beta': 'beta'}})
self.assertIsInstance(ex, Example)
self.assertIsInstance(ex.items, dict)
self.assertIn('alpha', ex.items)
self.assertIn('beta', ex.items)
alpha = ex.items['alpha']
self.assertIsInstance(alpha, Item)
self.assertEqual(alpha.key, 'alpha')
self.assertEqual(alpha.value, 'alpha')
beta = ex.items['beta']
self.assertIsInstance(beta, Item)
self.assertEqual(beta.key, 'beta')
self.assertEqual(beta.value, 'beta')
ex = Example.serialize(ex)
self.assertEqual(ex, {'items': {'alpha': 'alpha', 'beta': 'beta'}})
```
#### File: scheme/tests/test_error.py
```python
from scheme import *
from tests.util import *
class TestError(FieldTestCase):
def test_processing(self):
field = Error()
self.assert_processed(field, None)
self.assert_not_processed(field, 'invalid', True)
error = StructuralError({'token': 'field'})
self.assertEqual(field.process(error, OUTBOUND, True), ([{'token': 'field'}], None))
self.assertIs(field.process(error, INBOUND, True), error)
error = field.process(([{'token': 'field'}], None), INBOUND, True)
self.assertIsInstance(error, StructuralError)
self.assertEqual(error.errors, [{'token': 'field'}])
```
#### File: scheme/tests/test_map.py
```python
from scheme import *
from tests.util import *
class TestMap(FieldTestCase):
def test_construction(self):
with self.assertRaises(TypeError):
Map(True)
with self.assertRaises(TypeError):
Map(Text(), True)
with self.assertRaises(TypeError):
Map(Text(), required_keys=True)
field = Map(Text(), required_keys='one two three')
self.assertEqual(field.required_keys, ['one', 'two', 'three'])
def test_processing(self):
field = Map(Integer())
self.assert_processed(field, None)
self.assert_not_processed(field, 'invalidkeys', {1: 1})
for valid in [{}, {'a': 1}, {'a': 1, 'b': 2}, {'a': None}]:
self.assert_processed(field, (valid, valid))
expected_error = ValidationError(structure={'a': INVALID_ERROR, 'b': 2})
self.assert_not_processed(field, expected_error, {'a': '', 'b': 2})
def test_preprocessing(self):
def preprocess(value):
return dict((k, v.lower()) for k, v in value.items())
field = Map(Text(), preprocessor=preprocess)
self.assert_processed(field, None, {}, {'a': 'one'}, {'a': 'one', 'b': 'two'})
self.assertEqual(field.process({'a': 'TEST', 'b': 'test'}), {'a': 'test', 'b': 'test'})
def test_null_values(self):
field = Map(Integer(nonnull=True))
self.assert_processed(field, {}, {'a': 1})
expected_error = ValidationError(structure={'a': NULL_ERROR, 'b': 2})
self.assert_not_processed(field, expected_error, {'a': None, 'b': 2})
def test_required_keys(self):
field = Map(Integer(), required_keys=('a',))
self.assert_processed(field, {'a': 1})
expected_error = ValidationError(structure={'a': REQUIRED_ERROR})
self.assert_not_processed(field, expected_error, {})
def test_explicit_key(self):
field = Map(Integer(), key=Integer())
self.assert_processed(field, {}, {1: 1}, {1: 1, 2: 2})
self.assert_not_processed(field, 'invalidkeys', {'a': 1})
def test_undefined_fields(self):
undefined = Undefined(Integer())
field = Map(undefined)
self.assert_processed(field, None, {}, {'a': 1}, {'a': 1, 'b': 2})
undefined = Undefined()
field = Map(undefined)
with self.assertRaises(UndefinedFieldError):
field.process({'a': 1})
with self.assertRaises(UndefinedFieldError):
field.extract({'a': 1})
with self.assertRaises(UndefinedFieldError):
field.instantiate({'a': 1})
with self.assertRaises(UndefinedFieldError):
field.interpolate({'a': 1}, {})
undefined.define(Integer())
self.assert_processed(field, None, {}, {'a': 1}, {'a': 1, 'b': 2})
def test_naive_extraction(self):
field = Map(Integer())
self.assertIs(field.extract(None), None)
with self.assertRaises(FieldExcludedError):
field.extract({}, required=True)
value = {'a': 1, 'b': 2}
extracted = field.extract(value)
self.assertIsInstance(extracted, dict)
self.assertIsNot(extracted, value)
self.assertEqual(extracted, value)
def test_naive_extraction_with_nesting(self):
field = Map(Map(Integer()))
value = {'a': {'a': 1}, 'b': {'b': 2}}
extracted = field.extract(value)
self.assertIsNot(extracted, value)
self.assertIsNot(extracted['a'], value['a'])
self.assertEqual(extracted, value)
def test_mediated_extraction(self):
field = Map(Integer(), extractor=attrmap.extract)
with self.assertRaises(CannotExtractError):
field.extract([])
value = {'a': 1, 'b': 2}
extracted = field.extract(attrmap(None, value))
self.assertIsInstance(extracted, dict)
self.assertIsNot(extracted, value)
self.assertEqual(extracted, value)
def test_mediated_extraction_with_nested_extractors(self):
field = Map(Integer(extractor=valuewrapper.extract), extractor=attrmap.extract)
value = attrmap(None, {'a': valuewrapper(None, 1), 'b': valuewrapper(None, 2)})
extracted = field.extract(value)
self.assertIsInstance(extracted, dict)
self.assertEqual(extracted, {'a': 1, 'b': 2})
def test_naive_extraction_with_nested_extractors(self):
field = Map(Integer(extractor=valuewrapper.extract))
value = {'a': valuewrapper(None, 1), 'b': valuewrapper(None, 2)}
extracted = field.extract(value)
self.assertIsInstance(extracted, dict)
self.assertEqual(extracted, {'a': 1, 'b': 2})
def test_filter(self):
field = Map(Structure({'one': Integer(one=True), 'two': Integer()}), one=False)
self.assertIs(field.filter(), field)
self.assertIsNone(field.filter(one=True))
filtered = field.filter(one=False)
self.assertIsInstance(filtered, Map)
self.assertIsNot(filtered, field)
self.assertEqual(set(filtered.value.structure.keys()), set(['two']))
field = Map(Structure({'one': Integer()}, one=True))
with self.assertRaises(CannotFilterError):
field.filter(one=False)
def test_mediated_instantiation(self):
field = Map(Integer(), instantiator=attrmap)
self.assertIs(field.instantiate(None), None)
instance = field.instantiate({'a': 1, 'b': 2})
self.assertIsInstance(instance, attrmap)
self.assertEqual(instance.a, 1)
self.assertEqual(instance.b, 2)
instance = field.instantiate({})
self.assertIsInstance(instance, attrmap)
def test_mediated_instantiation_with_nested_instantiators(self):
field = Map(Integer(instantiator=valuewrapper), instantiator=attrmap)
instance = field.instantiate({'a': 1, 'b': 2})
self.assertIsInstance(instance, attrmap)
self.assertIsInstance(instance.a, valuewrapper)
self.assertEqual(instance.a.value, 1)
def test_interpolation(self):
field = Map(Integer())
with self.assertRaises(CannotInterpolateError):
field.interpolate([], {})
self.assert_interpolated(field, None, {}, ({'alpha': 1, 'beta': 2},
{'alpha': 1, 'beta': 2}))
self.assert_interpolated(field, ({'alpha': '${alpha}', 'beta': '${beta}'},
{'alpha': 1, 'beta': 2}), alpha=1, beta=2)
self.assert_interpolated(field, ({'alpha': '${alpha}', 'beta': '${beta}'},
{'alpha': 1}), alpha=1)
self.assert_interpolated(field, ('${value}', {'alpha': 1, 'beta': 2}),
value={'alpha': '${alpha}', 'beta': '${beta}'}, alpha=1, beta=2)
def test_transformation(self):
field = Map(Integer())
transformed = field.transform(lambda _: False)
self.assertIs(transformed, field)
transformed = field.transform(lambda f: f)
self.assertIs(transformed, field)
def will_transform(f):
if isinstance(f, Integer):
return f.clone(description='transformed')
transformed = field.transform(will_transform)
self.assertIsNot(transformed, field)
self.assertIsNot(transformed.value, field.value)
self.assertEqual(transformed.value.description, 'transformed')
def will_not_transform(f):
if isinstance(f,Text):
return f.clone(description='transformed')
transformed = field.transform(will_not_transform)
self.assertIs(transformed, field)
def test_description(self):
field = Map(Text())
self.assertEqual(field.describe(), {'fieldtype': 'map', 'structural': True,
'value': {'fieldtype': 'text'}})
field = Map(Text(), Integer())
self.assertEqual(field.describe(), {'fieldtype': 'map', 'structural': True,
'value': {'fieldtype': 'text'}, 'key': {'fieldtype': 'integer'}})
field = Map(Text(), default={'alpha': 'one'})
self.assertEqual(field.describe(), {'fieldtype': 'map', 'structural': True,
'value': {'fieldtype': 'text'}, 'default': {'alpha': 'one'}})
def test_visit(self):
def visit(f):
return f['fieldtype']
field = Map(Text())
visited = Field.visit(field.describe(), visit)
self.assertEqual(visited, {'value': 'text'})
field = Map(Text(), Integer())
visited = Field.visit(field.describe(), visit)
self.assertEqual(visited, {'value': 'text', 'key': 'integer'})
```
#### File: scheme/tests/test_sequence.py
```python
from scheme import *
from tests.util import *
class TestSequence(FieldTestCase):
def _generate_sequences(self):
today, today_text = construct_today()
yesterday, yesterday_text = construct_today(-1)
tomorrow, tomorrow_text = construct_today(+1)
return ([yesterday, today, tomorrow],
[yesterday_text, today_text, tomorrow_text])
def test_construction(self):
with self.assertRaises(TypeError):
Sequence(True)
with self.assertRaises(TypeError):
Sequence(Text(), min_length='bad')
with self.assertRaises(TypeError):
Sequence(Text(), min_length=-2)
with self.assertRaises(TypeError):
Sequence(Text(), max_length='bad')
with self.assertRaises(TypeError):
Sequence(Text(), max_length=-2)
def test_processing(self):
field = Sequence(Date())
self.assert_processed(field, None, self._generate_sequences())
self.assert_processed(field, [], ancestry=['test'])
self.assert_not_processed(field, 'invalid', True)
field = Sequence(Integer())
self.assert_processed(field, [1, 2, 3], [1, None, 3])
expected_error = ValidationError(structure=[1, INVALID_ERROR, 3])
self.assert_not_processed(field, expected_error, [1, '', 3])
def test_preprocessing(self):
def preprocess(value):
return [v.lower() for v in value]
field = Sequence(Text(), preprocessor=preprocess)
self.assert_processed(field, None, [], ['one', 'two'])
self.assertEqual(field.process(['One', 'Two']), ['one', 'two'])
def test_null_values(self):
field = Sequence(Integer(nonnull=True))
self.assert_processed(field, [], [1, 2, 3])
expected_error = ValidationError(structure=[1, NULL_ERROR, 3])
self.assert_not_processed(field, expected_error, [1, None, 3])
def test_min_length(self):
field = Sequence(Date(), min_length=2)
a, b = self._generate_sequences()
self.assert_processed(field, (a, b), (a[:2], b[:2]))
self.assert_not_processed(field, 'min_length', (a[:1], b[:1]))
def test_max_length(self):
field = Sequence(Date(), max_length=2)
a, b = self._generate_sequences()
self.assert_processed(field, (a[:1], b[:1]), (a[:2], b[:2]))
self.assert_not_processed(field, 'max_length', (a, b))
def test_unique_values(self):
field = Sequence(Integer(), unique=True)
self.assert_processed(field, [], [1], [1, 2])
self.assert_not_processed(field, 'duplicate', [1, 1])
def test_undefined_field(self):
undefined = Undefined(Integer())
field = Sequence(undefined)
self.assert_processed(field, None, [], [1], [1, 2])
undefined = Undefined()
field = Sequence(undefined)
with self.assertRaises(UndefinedFieldError):
field.process([1, 2])
with self.assertRaises(UndefinedFieldError):
field.extract([1, 2])
with self.assertRaises(UndefinedFieldError):
field.instantiate([1, 2])
with self.assertRaises(UndefinedFieldError):
field.interpolate([1, 2], {})
undefined.define(Integer())
self.assert_processed(field, None, [], [1], [1, 2])
def test_naive_extraction(self):
field = Sequence(Integer())
self.assertIs(field.extract(None), None)
with self.assertRaises(FieldExcludedError):
field.extract([], required=True)
with self.assertRaises(CannotExtractError):
field.extract({})
value = [1, 2, 3]
extracted = field.extract(value)
self.assertIsNot(extracted, value)
self.assertEqual(extracted, value)
def test_naive_extraction_with_nesting(self):
field = Sequence(Sequence(Integer()))
value = [[1], [2], [3]]
extracted = field.extract(value)
self.assertIsNot(extracted, value)
self.assertEqual(extracted, value)
for i in (0, 1, 2):
self.assertIsNot(extracted[1], value[1])
def test_naive_extraction_with_nested_extractors(self):
field = Sequence(Integer(extractor=valuewrapper.extract))
value = [valuewrapper(None, 1), valuewrapper(None, 2)]
extracted = field.extract(value)
self.assertIsInstance(extracted, list)
self.assertEqual(extracted, [1, 2])
def test_mediated_extraction(self):
field = Sequence(Integer(), extractor=listwrapper.extract)
with self.assertRaises(CannotExtractError):
field.extract({})
value = listwrapper(None, [1, 2])
extracted = field.extract(value)
self.assertIsInstance(extracted, list)
self.assertEqual(extracted, [1, 2])
def test_mediated_extraction_with_nested_extractors(self):
field = Sequence(Integer(extractor=valuewrapper.extract), extractor=listwrapper.extract)
value = listwrapper(None, [valuewrapper(None, 1), valuewrapper(None, 2)])
extracted = field.extract(value)
self.assertIsInstance(extracted, list)
self.assertEqual(extracted, [1, 2])
def test_filter(self):
field = Sequence(Structure({'one': Integer(one=True), 'two': Integer()}), one=False)
self.assertIs(field.filter(), field)
self.assertIsNone(field.filter(one=True))
filtered = field.filter(one=False)
self.assertIsInstance(filtered, Sequence)
self.assertIsNot(filtered, field)
self.assertEqual(set(filtered.item.structure.keys()), set(['two']))
field = Sequence(Structure({'one': Integer()}, one=True))
with self.assertRaises(CannotFilterError):
field.filter(one=False)
def test_mediated_instantiation(self):
field = Sequence(Integer(), instantiator=listwrapper)
self.assertIs(field.instantiate(None), None)
instance = field.instantiate([1, 2])
self.assertIsInstance(instance, listwrapper)
self.assertEqual(instance.list, [1, 2])
instance = field.instantiate([])
self.assertIsInstance(instance, listwrapper)
self.assertEqual(instance.list, [])
def test_mediated_instantiation_with_nested_instantiators(self):
field = Sequence(Integer(instantiator=valuewrapper), instantiator=listwrapper)
instance = field.instantiate([1, 2])
self.assertIsInstance(instance, listwrapper)
self.assertIsInstance(instance.list[0], valuewrapper)
self.assertEqual(instance.list[0].value, 1)
self.assertIsInstance(instance.list[1], valuewrapper)
self.assertEqual(instance.list[1].value, 2)
def test_naive_instantiation_with_nested_instantiators(self):
field = Sequence(Integer(instantiator=valuewrapper))
instance = field.instantiate([1, 2])
self.assertIsInstance(instance, list)
self.assertIsInstance(instance[0], valuewrapper)
self.assertEqual(instance[0].value, 1)
self.assertIsInstance(instance[1], valuewrapper)
self.assertEqual(instance[1].value, 2)
def test_interpolation(self):
field = Sequence(Integer())
with self.assertRaises(CannotInterpolateError):
field.interpolate({}, {})
self.assert_interpolated(field, None, [])
self.assert_interpolated(field, (['${alpha}', '${beta}'], [1, 2]), alpha=1, beta=2)
self.assert_interpolated(field, ([1, 2], [1, 2]))
self.assert_interpolated(field, ('${value}', [1, 2]), value=['${alpha}', '${beta}'],
alpha=1, beta=2)
def test_transformation(self):
field = Sequence(Integer())
transformed = field.transform(lambda _: False)
self.assertIs(transformed, field)
transformed = field.transform(lambda f: f)
self.assertIs(transformed, field)
def will_transform(f):
if isinstance(f, Integer):
return f.clone(description='transformed')
transformed = field.transform(will_transform)
self.assertIsNot(transformed, field)
self.assertIsNot(transformed.item, field.item)
self.assertEqual(transformed.item.description, 'transformed')
def will_not_transform(f):
if isinstance(f, Text):
return f.clone(description='transformed')
transformed = field.transform(will_not_transform)
self.assertIs(transformed, field)
self.assertIs(transformed.item, field.item)
def test_description(self):
field = Sequence(Integer())
self.assertEqual(field.describe(), {'fieldtype': 'sequence', 'structural': True,
'item': {'fieldtype': 'integer'}})
field = Sequence(Integer(), default=[1, 2])
self.assertEqual(field.describe(), {'fieldtype': 'sequence', 'structural': True,
'item': {'fieldtype': 'integer'}, 'default': [1, 2]})
def test_visit(self):
def visit(f):
return f['fieldtype']
field = Sequence(Text())
visited = Field.visit(field.describe(), visit)
self.assertEqual(visited, {'item': 'text'})
```
#### File: scheme/tests/test_structure.py
```python
from scheme import *
from tests.util import *
class TestStructure(FieldTestCase):
maxDiff=None
class ExtractionTarget(object):
def __init__(self, **params):
self.__dict__.update(**params)
def test_instantiation(self):
field = Structure({}, key_order='one two')
self.assertEqual(field.key_order, ['one', 'two'])
field = Structure({'a': Integer(default=1), 'b': Boolean()}, generate_default=True)
self.assertEqual(field.default, {'a': 1})
def test_invalid_instantiation(self):
with self.assertRaises(TypeError):
Structure(True)
with self.assertRaises(TypeError):
Structure({'a': True})
with self.assertRaises(TypeError):
Structure({}, polymorphic_on=True)
with self.assertRaises(TypeError):
Structure({}, key_order=True)
with self.assertRaises(TypeError):
Structure({'a': True}, polymorphic_on='type')
with self.assertRaises(ValueError):
Structure({}, generate_default='test')
def test_processing(self):
field = Structure({})
self.assert_processed(field, None, {})
self.assert_processed(field, {}, ancestry=['test'])
self.assert_not_processed(field, 'invalid', True)
field = Structure({'a': Integer(), 'b': Text(), 'c': Boolean()})
self.assert_processed(field, None, {}, {'a': None}, {'a': 1},
{'a': 1, 'b': None}, {'a': 1, 'b': 'b', 'c': True})
expected_error = ValidationError(structure={'a': INVALID_ERROR, 'b': 'b', 'c': True})
self.assert_not_processed(field, expected_error, {'a': '', 'b': 'b', 'c': True})
def test_preprocessing(self):
def preprocess(value):
if 'a' in value:
value = value.copy()
value['a'] = value['a'].lower()
return value
field = Structure({'a': Text(), 'b': Integer()}, preprocessor=preprocess)
self.assert_processed(field, None, {}, {'a': 'test'}, {'b': 2}, {'a': 'test', 'b': 2})
self.assertEqual(field.process({'a': 'TEST', 'b': 2}), {'a': 'test', 'b': 2})
def test_processing_with_key_order(self):
field = Structure({'alpha': Text(), 'beta': Text(), 'gamma': Text()}, key_order='gamma alpha beta')
self.assert_processed(field, None, {})
self.assert_not_processed(field, 'invalid', True)
def test_required_values(self):
field = Structure({'a': Integer(required=True), 'b': Text()})
self.assert_processed(field, {'a': None}, {'a': 1}, {'a': 1, 'b': 'b'})
expected_error = ValidationError(structure={'a': REQUIRED_ERROR, 'b': 'b'})
self.assert_not_processed(field, expected_error, {'b': 'b'})
def test_ignore_null_values(self):
field = Structure({'a': Integer()})
self.assertEqual(field.process({'a': None}, INBOUND), {'a': None})
field = Structure({'a': Integer(ignore_null=True)})
self.assertEqual(field.process({'a': None}, INBOUND), {})
def test_unknown_values(self):
field = Structure({'a': Integer()})
self.assert_processed(field, {}, {'a': 1})
expected_error = ValidationError(structure={'a': 1, 'z': UNKNOWN_ERROR})
self.assert_not_processed(field, expected_error, {'a': 1, 'z': True})
field = Structure({'a': Integer()}, strict=False)
self.assert_processed(field, {}, {'a': 1})
self.assertEqual(field.process({'a': 1, 'z': True}, INBOUND), {'a': 1})
def test_default_values(self):
field = Structure({'a': Integer(default=2)})
self.assertEqual(field.process({'a': 1}, INBOUND), {'a': 1})
self.assertEqual(field.process({}, INBOUND), {'a': 2})
self.assertEqual(field.process({'a': 1}, OUTBOUND), {'a': 1})
self.assertEqual(field.process({}, OUTBOUND), {})
def test_polymorphism(self):
field = Structure({
'alpha': {'a': Integer()},
'beta': {'b': Integer()},
}, polymorphic_on=Text(name='id'))
self.assert_processed(field, None)
self.assert_not_processed(field, 'required', {})
self.assert_processed(field, {'id': 'alpha', 'a': 1}, {'id': 'beta', 'b': 2})
self.assert_not_processed(field, 'unrecognized', {'id': 'gamma', 'g': 3})
expected_error = ValidationError(structure={'id': 'alpha', 'b': UNKNOWN_ERROR})
self.assert_not_processed(field, expected_error, {'id': 'alpha', 'b': 2})
field = Structure({
'alpha': {'a': Integer(default=1)},
'beta': {'b': Integer(default=2)},
}, polymorphic_on='type', generate_default='alpha')
self.assertEqual(field.default, {'type': 'alpha', 'a': 1})
with self.assertRaises(ValueError):
Structure({'alpha': {}}, polymorphic_on='type', generate_default=True)
def test_polymorphism_with_field_autogeneration(self):
field = Structure({
'alpha': {'a': Integer()},
'beta': {'b': Integer()},
}, polymorphic_on='id')
self.assert_processed(field, None)
self.assert_not_processed(field, 'required', {})
self.assert_processed(field, {'id': 'alpha', 'a': 1}, {'id': 'beta', 'b': 2})
self.assert_not_processed(field, 'invalid', {'id': 'gamma', 'g': 3})
expected_error = ValidationError(structure={'id': 'alpha', 'b': UNKNOWN_ERROR})
self.assert_not_processed(field, expected_error, {'id': 'alpha', 'b': 2})
def test_polymorphism_with_common_fields(self):
field = Structure({
'*': {'n': Integer()},
'alpha': {'a': Integer()},
'beta': {'b': Integer()},
}, polymorphic_on='id')
self.assert_processed(field, None)
self.assert_processed(field, {'id': 'alpha', 'a': 1, 'n': 3},
{'id': 'beta', 'b': 2, 'n': 3})
def test_undefined_fields(self):
undefined = Undefined(Integer())
field = Structure({'a': undefined})
self.assert_processed(field, None, {}, {'a': 1})
undefined = Undefined()
field = Structure({'a': undefined})
with self.assertRaises(UndefinedFieldError):
field.process({'a': 1})
with self.assertRaises(UndefinedFieldError):
field.extract({'a': 1})
with self.assertRaises(UndefinedFieldError):
field.instantiate({'a': 1})
with self.assertRaises(UndefinedFieldError):
field.interpolate({'a': 1}, {})
undefined.define(Integer())
self.assert_processed(field, None, {}, {'a': 1})
def test_undefined_polymorphic_fields(self):
undefined = Undefined(Integer())
field = Structure({'a': {'a': undefined}}, polymorphic_on='type')
self.assert_processed(field, None, {'type': 'a', 'a': 1})
undefined = Undefined()
field = Structure({'a': {'a': undefined}}, polymorphic_on='type')
with self.assertRaises(UndefinedFieldError):
field.process({'type': 'a', 'a': 1})
with self.assertRaises(UndefinedFieldError):
field.extract({'type': 'a', 'a': 1})
with self.assertRaises(UndefinedFieldError):
field.instantiate({'type': 'a', 'a': 1})
with self.assertRaises(UndefinedFieldError):
field.interpolate({'type': 'a', 'a': 1}, {})
undefined.define(Integer())
self.assert_processed(field, None, {'type': 'a', 'a': 1})
def test_naive_extraction(self):
field = Structure({'a': Integer()})
self.assertIs(field.extract(None), None)
with self.assertRaises(FieldExcludedError):
field.extract({}, required=True)
with self.assertRaises(CannotExtractError):
field.extract([])
value = {'a': 1}
extracted = field.extract(value)
self.assertIsNot(extracted, value)
self.assertEqual(extracted, value)
extracted = field.extract({'a': 1, 'b': 2})
self.assertEqual(extracted, value)
extracted = field.extract({})
self.assertEqual(extracted, {})
extracted = field.extract({'a': None})
self.assertEqual(extracted, {})
def test_naive_extraction_with_nesting(self):
field = Structure({'a': Structure({'a': Integer()})})
value = {'a': {'a': 1}}
extracted = field.extract(value)
self.assertIsNot(extracted, value)
self.assertIsNot(extracted['a'], value['a'])
self.assertEqual(extracted, value)
def test_naive_extraction_with_polymorphism(self):
field = Structure({
'alpha': {'a': Integer()},
'beta': {'b': Integer()},
}, polymorphic_on=Text(name='identity'))
for value in ({'identity': 'alpha', 'a': 1}, {'identity': 'beta', 'b': 2}):
extracted = field.extract(value)
self.assertIsNot(extracted, value)
self.assertEqual(extracted, value)
def test_naive_extraction_with_nested_extractors(self):
field = Structure({'a': Integer(), 'b': Text(extractor=valuewrapper.extract)})
with self.assertRaises(CannotExtractError):
field.extract([])
value = {'a': 1, 'b': valuewrapper(None, 'test')}
extracted = field.extract(value)
self.assertIsInstance(extracted, dict)
self.assertEqual(extracted, {'a': 1, 'b': 'test'})
def test_mediated_extraction(self):
field = Structure({'a': Integer(), 'b': Text()}, extractor=attrmap.extract)
with self.assertRaises(CannotExtractError):
field.extract([])
value = attrmap(None, {'a': 1, 'b': 'test'})
extracted = field.extract(value)
self.assertIsInstance(extracted, dict)
self.assertEqual(extracted, {'a': 1, 'b': 'test'})
def test_mediated_extraction_with_nested_extractors(self):
field = Structure({'a': Integer(extractor=valuewrapper.extract), 'b': Text()}, extractor=attrmap.extract)
with self.assertRaises(CannotExtractError):
field.extract([])
value = attrmap(None, {'a': valuewrapper(None, 1), 'b': 'test'})
extracted = field.extract(value)
self.assertIsInstance(extracted, dict)
self.assertEqual(extracted, {'a': 1, 'b': 'test'})
def test_extraction_from_object(self):
field = Structure({'a': Integer(), 'b': Text()})
target = self.ExtractionTarget(a=1, b='b', c='c', d=4)
extracted = field.extract(target, strict=False)
self.assertIsInstance(extracted, dict)
self.assertEqual(extracted, {'a': 1, 'b': 'b'})
target = self.ExtractionTarget(a=1, c='c')
extracted = field.extract(target, strict=False)
self.assertIsInstance(extracted, dict)
self.assertEqual(extracted, {'a': 1})
def test_filter(self):
field = Structure({'a': Integer(), 'b': Text(one=True)}, one=False)
self.assertIs(field.filter(), field)
self.assertIsNone(field.filter(one=True))
filtered = field.filter(one=False)
self.assertIsInstance(filtered, Structure)
self.assertIsNot(filtered, field)
self.assertEqual(set(filtered.structure.keys()), set(['a']))
field = Structure({
'alpha': {'a': Integer(), 'b': Text(one=True)},
'beta': {'c': Integer(), 'd': Text(one=True)},
}, polymorphic_on='type', one=False)
self.assertIs(field.filter(), field)
self.assertIsNone(field.filter(one=True))
filtered = field.filter(one=False)
self.assertIsInstance(filtered, Structure)
self.assertIsNot(filtered, field)
self.assertEqual(set(filtered.structure['alpha'].keys()), set(['a', 'type']))
self.assertEqual(set(filtered.structure['beta'].keys()), set(['c', 'type']))
field = Structure({'a': Structure({'b': Integer(one=True), 'c': Integer()})})
filtered = field.filter(one=False)
self.assertIsInstance(filtered, Structure)
self.assertIsNot(filtered, field)
self.assertIsInstance(filtered.structure['a'], Structure)
self.assertEqual(set(filtered.structure['a'].structure.keys()), set(['c']))
def test_mediated_instantiation(self):
field = Structure({'a': Integer(), 'b': Text()}, instantiator=attrmap)
self.assertIs(field.instantiate(None), None)
instance = field.instantiate({'a': 1, 'b': 'test'})
self.assertIsInstance(instance, attrmap)
self.assertEqual(instance.a, 1)
self.assertEqual(instance.b, 'test')
instance = field.instantiate({})
self.assertIsInstance(instance, attrmap)
def test_mediated_instantiation_with_nested_instantiators(self):
field = Structure({'a': Integer(instantiator=valuewrapper), 'b': Text()}, instantiator=attrmap)
instance = field.instantiate({'a': 1, 'b': 'test'})
self.assertIsInstance(instance, attrmap)
self.assertIsInstance(instance.a, valuewrapper)
self.assertEqual(instance.a.value, 1)
self.assertEqual(instance.b, 'test')
def test_naive_instantiation_with_nested_instantiators(self):
field = Structure({'a': Integer(), 'b': Text(instantiator=valuewrapper)})
instance = field.instantiate({'a': 1, 'b': 'test'})
self.assertIsInstance(instance, dict)
self.assertIsInstance(instance['b'], valuewrapper)
self.assertEqual(instance['a'], 1)
self.assertEqual(instance['b'].value, 'test')
def test_mediated_instantiation_with_polymorphism(self):
field = Structure({
'alpha': {'a': Integer()},
'beta': {'b': Integer()},
}, polymorphic_on=Text(name='identity'), instantiator=attrmap)
for value in ({'identity': 'alpha', 'a': 1}, {'identity': 'beta', 'b': 2}):
instance = field.instantiate(value)
self.assertIsInstance(instance, attrmap)
self.assertEqual(instance.identity, value['identity'])
def test_interpolation(self):
field = Structure({'alpha': Integer(), 'beta': Text()})
with self.assertRaises(CannotInterpolateError):
field.interpolate([], {})
self.assert_interpolated(field, None, {}, ({'alpha': 1, 'beta': 'two'},
{'alpha': 1, 'beta': 'two'}))
self.assert_interpolated(field, ({'alpha': '${alpha}', 'beta': '${beta}'},
{'alpha': 1, 'beta': 'two'}), alpha=1, beta='two')
self.assert_interpolated(field, ('${value}', {'alpha': 1, 'beta': 'two'}),
value={'alpha': '${alpha}', 'beta': '${beta}'}, alpha=1, beta='two')
def test_default_generation(self):
field = Structure({'a': Integer(default=1), 'b': Text(default='test'), 'c': Boolean()})
self.assertEqual(field.generate_defaults(), {'a': 1, 'b': 'test'})
self.assertEqual(field.generate_defaults(sparse=False), {'a': 1, 'b': 'test', 'c': None})
with self.assertRaises(ValueError):
field.generate_defaults('test')
field = Structure({'alpha': {'a': Integer(default=1)}, 'beta': {'b': Text(default='test')}},
polymorphic_on='type')
self.assertEqual(field.generate_defaults('alpha'), {'type': 'alpha', 'a': 1})
self.assertEqual(field.generate_defaults(), {'alpha': {'type': 'alpha', 'a': 1},
'beta': {'type': 'beta', 'b': 'test'}})
with self.assertRaises(ValueError):
field.generate_defaults('gamma')
def test_structure_extension(self):
field = Structure({'a': Integer()})
with self.assertRaises(TypeError):
field.extend({'b': True})
clone = field.extend({'b': Text()})
self.assertIsNot(clone, field)
self.assertEqual(set(field.structure.keys()), set(['a']))
self.assertEqual(set(clone.structure.keys()), set(['a', 'b']))
clone = field.extend({'b': Text(name='b')})
self.assertIsNot(clone, field)
self.assertEqual(set(field.structure.keys()), set(['a']))
self.assertEqual(set(clone.structure.keys()), set(['a', 'b']))
def test_field_insertion(self):
field = Structure({'a': Integer()})
self.assertEqual(set(field.structure.keys()), set(['a']))
field.insert(Text(name='b'))
self.assertEqual(set(field.structure.keys()), set(['a', 'b']))
field.insert(Boolean(name='a'), overwrite=False)
self.assertIsInstance(field.structure['a'], Integer)
field.insert(Boolean(name='a'), overwrite=True)
self.assertIsInstance(field.structure['a'], Boolean)
with self.assertRaises(TypeError):
field.insert(True)
with self.assertRaises(ValueError):
field.insert(Text())
def test_structure_merging(self):
field = Structure({'a': Integer()})
self.assertEqual(set(field.structure.keys()), set(['a']))
replacement = Text(name='b')
field.merge({'b': replacement})
self.assertEqual(set(field.structure.keys()), set(['a', 'b']))
self.assertIs(field.structure['b'], replacement)
field.merge({'a': Boolean()}, prefer=False)
self.assertIsInstance(field.structure['a'], Integer)
field.merge({'a': Boolean()}, prefer=True)
self.assertIsInstance(field.structure['a'], Boolean)
replacement = Text(name='z')
field.merge({'b': replacement})
self.assertEqual(set(field.structure.keys()), set(['a', 'b']))
self.assertIsInstance(field.structure['b'], Text)
self.assertEqual(field.structure['b'].name, 'b')
self.assertIsNot(field.structure['b'], replacement)
with self.assertRaises(TypeError):
field.merge({'b': True})
def test_remove(self):
field = Structure({'a': Integer(), 'b': Integer(), 'c': Integer()})
field.remove('a')
self.assertEqual(set(field.structure.keys()), set(['b', 'c']))
field.remove('a')
self.assertEqual(set(field.structure.keys()), set(['b', 'c']))
def test_structure_replacement(self):
field = Structure({'a': Integer(), 'b': Integer(), 'c': Integer()})
replaced = field.replace({'d': Integer()})
self.assertIs(replaced, field)
replaced = field.replace({'a': Text(), 'b': Text(name='b'), 'd': Text()})
self.assertIsNot(replaced, field)
self.assertIsInstance(replaced.structure['a'], Text)
self.assertIsInstance(replaced.structure['b'], Text)
self.assertIsInstance(replaced.structure['c'], Integer)
self.assertEqual(replaced.structure['a'].name, 'a')
self.assertEqual(set(replaced.structure.keys()), set(['a', 'b', 'c']))
with self.assertRaises(TypeError):
field.replace({'a': True})
def test_transformation(self):
field = Structure({'a': Integer()})
transformed = field.transform(lambda _: False)
self.assertIs(transformed, field)
transformed = field.transform(lambda f: f)
self.assertIs(transformed, field)
def will_transform(f):
if isinstance(f, Integer):
return f.clone(description='transformed')
transformed = field.transform(will_transform)
self.assertIsNot(transformed, field)
self.assertIsNot(transformed.structure['a'], field.structure['a'])
self.assertEqual(transformed.structure['a'].description, 'transformed')
def will_not_transform(f):
if isinstance(f, Text):
return f.clone(description='transformed')
transformed = field.transform(will_not_transform)
self.assertIs(transformed, field)
def test_partial_processing(self):
field = Structure({'a': Integer(required=True, nonnull=True),
'b': Text(required=True)})
self.assertEqual(field.process({'a': 2}, INBOUND, partial=True), {'a': 2})
def test_has_required_fields(self):
field = Structure({'alpha': Text(required=True), 'beta': Integer()})
self.assertTrue(field.has_required_fields)
field = Structure({'alpha': Text(), 'beta': Integer()})
self.assertFalse(field.has_required_fields)
field = Structure({'alpha': {'alpha': Text()}}, polymorphic_on='type')
self.assertTrue(field.has_required_fields)
def test_get_field(self):
field = Structure({'alpha': Text()})
self.assertIs(field.get('alpha'), field.structure['alpha'])
self.assertIs(field.get('beta'), None)
def test_description(self):
field = Structure({'alpha': Text()}, default={'alpha': 'alpha'})
self.assertEqual(field.describe(), {'fieldtype': 'structure', 'structural': True,
'structure': {'alpha': {'fieldtype': 'text', 'name': 'alpha'}},
'default': {'alpha': 'alpha'}})
field = Structure({'alpha': {'one': Integer()}, 'beta': {'one': Text()}}, polymorphic_on='type',
default={'type': 'alpha', 'one': 1})
self.assertEqual(field.describe(), {'fieldtype': 'structure', 'structural': True,
'structure': {
'alpha': {
'one': {'fieldtype': 'integer', 'name': 'one'},
'type': {'fieldtype': 'enumeration', 'name': 'type', 'constant': 'alpha',
'required': True, 'nonnull': True, 'enumeration': ['alpha', 'beta'],
'representation': "'alpha', 'beta'"},
},
'beta': {
'one': {'fieldtype': 'text', 'name': 'one'},
'type': {'fieldtype': 'enumeration', 'name': 'type', 'constant': 'beta',
'required': True, 'nonnull': True, 'enumeration': ['alpha', 'beta'],
'representation': "'alpha', 'beta'"},
},
},
'polymorphic_on': {'fieldtype': 'enumeration', 'enumeration': ['alpha', 'beta'],
'nonnull': True, 'required': True, 'name': 'type'},
'default': {'type': 'alpha', 'one': 1},
})
def test_visit(self):
def visit(f):
return (f['fieldtype'], f['name'])
field = Structure({'a': Text(), 'b': Integer()})
visited = Field.visit(field.describe(), visit)
self.assertEqual(visited, {'structure': {'a': ('text', 'a'), 'b': ('integer', 'b')}})
field = Structure({'alpha': {'a': Text()}, 'beta': {'b': Integer()}}, polymorphic_on='t')
visited = Field.visit(field.describe(), visit)
self.assertEqual(visited, {'structure': {
'alpha': {'a': ('text', 'a'), 't': ('enumeration', 't')},
'beta': {'b': ('integer', 'b'), 't': ('enumeration', 't')}
}})
```
#### File: scheme/tests/test_uuid.py
```python
from scheme import *
from tests.util import *
VALID_UUID = '9ddfe7e5-79b4-4179-8993-43f304d6b012'
SHORT_UUID = '9ddfe7e5-79b4-4179-8993-43f304d6b01'
class TestUUID(FieldTestCase):
def test_processing(self):
field = UUID()
self.assert_processed(field, None, VALID_UUID)
self.assert_not_processed(field, 'invalid', True, '', SHORT_UUID)
def test_interpolation(self):
field = UUID()
self.assert_interpolated(field, None, VALID_UUID)
self.assert_interpolated(field, ('${value}', VALID_UUID), value=VALID_UUID)
```
#### File: scheme/tests/test_yaml.py
```python
from datetime import date, datetime
from scheme import *
from tests.util import *
SINGLE_DICT = """a: 1
b: true
c: something"""
DICT_WITHIN_DICT = """a:
b: 1
c: true
d:
e: 2
f: false"""
SINGLE_LIST = "[1, 2, 3]"
LIST_WITHIN_LIST = """- [1, 2]
- [3, 4]"""
DICT_WITHIN_LIST = """- a: 1
b: true
- a: 2
b: false"""
LIST_WITHIN_DICT = """a: [1, 2]
b: [3, 4]"""
BLOCK_TEXT = """this is some block text that contains newlines, and
therefore should trigger the newline processing of yaml serialization, and
it would be rather disappointing if it doesn't"""
class TestYaml(FormatTestCase):
format = Yaml
def test_simple_values(self):
self.assert_correct([
(None, 'null'),
(True, 'true'),
(False, 'false'),
(1, '1'),
(1.0, '1.0'),
(date(2000, 1, 1), '2000-01-01'),
(datetime(2000, 1, 1, 0, 0, 0), '2000-01-01 00:00:00'),
])
with self.assertRaises(ValueError):
Yaml.serialize(object())
def test_required_quotes(self):
self.assert_correct([
('', "''"),
("'", "''''"),
('null', "'null'"),
('Null', "'Null'"),
('NULL', "'NULL'"),
('~', "'~'"),
('true', "'true'"),
('True', "'True'"),
('TRUE', "'TRUE'"),
('false', "'false'"),
('False', "'False'"),
('FALSE', "'FALSE'"),
('test: this', "'test: this'"),
])
def test_empty_values(self):
self.assert_correct([
({}, '{}'),
([], '[]'),
])
self.assert_correct([
(set(), '[]'),
((), '[]'),
], test_unserialize=False)
def test_strings(self):
self.assert_correct([
('short string', "short string"),
])
def test_complex_values(self):
self.assert_correct([
({'a': 1, 'b': True, 'c': 'something'}, SINGLE_DICT),
({'a': {'b': 1, 'c': True}, 'd': {'e': 2, 'f': False}}, DICT_WITHIN_DICT),
([1, 2, 3], SINGLE_LIST),
([[1, 2], [3, 4]], LIST_WITHIN_LIST),
([{'a': 1, 'b': True}, {'a': 2, 'b': False}], DICT_WITHIN_LIST),
({'a': [1, 2], 'b': [3, 4]}, LIST_WITHIN_DICT),
])
```
#### File: scheme/tests/util.py
```python
from datetime import date, datetime, time, timedelta
from inspect import getargspec
from uuid import uuid4
try:
from unittest2 import TestCase
except ImportError:
from unittest import TestCase
from scheme.exceptions import *
from scheme.field import *
from scheme.timezone import *
from scheme.util import string
def call_with_supported_params(callable, *args, **params):
arguments = getargspec(callable)[0]
for key in list(params):
if key not in arguments:
del params[key]
return callable(*args, **params)
def construct_now(delta=None):
now = datetime.now().replace(microsecond=0, tzinfo=LOCAL)
if delta is not None:
now += timedelta(seconds=delta)
now_text = now.astimezone(UTC).strftime('%Y-%m-%dT%H:%M:%SZ')
return now, now_text
def construct_today(delta=None):
today = date.today()
if delta is not None:
today += timedelta(days=delta)
return today, today.strftime('%Y-%m-%d')
def should_fail(callable, *args, **params):
try:
callable(*args, **params)
except Exception as exception:
return exception
else:
assert False, 'exception should be raised: %r(%r, %r)' % (callable, args, params)
class attrmap(object):
def __init__(self, field, value, key=None):
self.__dict__.update(value)
@classmethod
def extract(self, field, value):
return value.__dict__
class listwrapper(object):
def __init__(self, field, value, key=None):
self.list = value
@classmethod
def extract(self, field, value):
return value.list
class valuewrapper(object):
def __init__(self, field, value, key=None):
self.value = value
@classmethod
def extract(self, field, value):
return value.value
INVALID_ERROR = ValidationError({'token': 'invalid'})
NULL_ERROR = ValidationError({'token': 'nonnull'})
REQUIRED_ERROR = ValidationError({'token': 'required'})
UNKNOWN_ERROR = ValidationError({'token': 'unknown'})
class FieldTestCase(TestCase):
def assert_processed(self, field, *tests, **params):
ancestry = params.get('ancestry', None)
for test in tests:
if isinstance(test, tuple):
unserialized, serialized = test
else:
unserialized, serialized = (test, test)
self.assertEqual(field.process(unserialized, INBOUND, ancestry=ancestry), unserialized)
self.assertEqual(field.process(unserialized, OUTBOUND, ancestry=ancestry), unserialized)
self.assertEqual(field.process(serialized, INBOUND, True, ancestry=ancestry), unserialized)
self.assertEqual(field.process(unserialized, OUTBOUND, True, ancestry=ancestry), serialized)
def assert_not_processed(self, field, expected, *tests):
if isinstance(expected, string):
expected = ValidationError().append({'token': expected})
for test in tests:
if not isinstance(test, tuple):
test = (test, test)
error = should_fail(field.process, test[0], INBOUND)
failed, reason = self.compare_structural_errors(expected, error)
assert failed, reason
for value, phase in zip(test, (OUTBOUND, INBOUND)):
error = should_fail(field.process, value, phase, True)
failed, reason = self.compare_structural_errors(expected, error)
assert failed, reason
def assert_interpolated(self, field, *tests, **params):
for test in tests:
if isinstance(test, tuple):
left, right = test
else:
left, right = test, test
self.assertEqual(field.interpolate(left, params), right)
def compare_structural_errors(self, expected, received):
if not isinstance(received, type(expected)):
return False, 'received error %r not expected type %r' % (received, type(expected))
if not self.compare_errors(expected, received):
return False, 'nonstructural errors do not match: %r, %r' % (expected.errors, received.errors)
if not self.compare_structure(expected, received):
return False, 'structural errors do not match: %r, %r' % (expected.structure, received.structure)
return True, ''
def compare_errors(self, expected, received):
if expected.errors:
if len(received.errors) != len(expected.errors):
return False
for expected_error, received_error in zip(expected.errors, received.errors):
if received_error.get('token') != expected_error['token']:
return False
elif received.errors:
return False
return True
def compare_structure(self, expected, received):
expected, received = expected.structure, received.structure
if isinstance(expected, list):
if not isinstance(received, list):
return False
elif len(received) != len(expected):
return False
for expected_item, received_item in zip(expected, received):
if isinstance(expected_item, StructuralError):
if not isinstance(received_item, StructuralError):
return False
elif expected_item.structure is not None:
if not self.compare_structure(expected_item, received_item):
return False
elif expected_item.errors is not None:
if not self.compare_errors(expected_item, received_item):
return False
elif received_item != expected_item:
return False
elif isinstance(expected, dict):
if not isinstance(received, dict):
return False
elif len(received) != len(expected):
return False
for expected_pair, received_pair in zip(sorted(expected.items()), sorted(received.items())):
if expected_pair[0] != received_pair[0]:
return False
expected_value, received_value = expected_pair[1], received_pair[1]
if isinstance(expected_value, StructuralError):
if not isinstance(received_value, StructuralError):
return False
elif expected_value.structure is not None:
if not self.compare_structure(expected_value, received_value):
return False
elif expected_value.errors is not None:
if not self.compare_errors(expected_value, received_value):
return False
elif received_value != expected_value:
return False
elif received:
return False
return True
class FormatTestCase(TestCase):
format = None
def assert_correct(self, pairs, test_serialize=True, test_unserialize=True, **params):
for unserialized, serialized in pairs:
if test_serialize:
self.assertEqual(call_with_supported_params(self.format.serialize,
unserialized, **params), serialized)
if test_unserialize:
self.assertEqual(call_with_supported_params(self.format.unserialize,
serialized, **params), unserialized)
``` |
{
"source": "jordanmslack/image_classifier",
"score": 3
} |
#### File: jordanmslack/image_classifier/predict.py
```python
from utils import *
def main():
args = arg_parser()
device = detect_device(args.gpu)
with open(args.category_names, 'r') as f:
cat_to_name = json.load(f)
model = load_checkpoint(args.checkpoint)
probabilities = predict(args.image, model, device, cat_to_name, args.top_k)
for label, prob in zip(np.array(probabilities[1][0]), np.array(probabilities[0][0])):
print(f"Picture of {cat_to_name.get(str(label),'')} predicted with a probability of {prob:.4f}")
if __name__ == '__main__':
main()
``` |
{
"source": "jordanmurray/FreightDamage",
"score": 4
} |
#### File: SeniorProject/src/script.py
```python
import sqlite3
from sqlite3 import Error
def create_connection(db_file):
""" create a database connection to a SQLite database """
print "HERE"
conn = None
try:
connection = sqlite3.connect(db_file)
cursor = connection.cursor()
results = cursor.execute("SELECT * FROM FragilityTable;")
for r in results:
print (r)
cursor.close()
connection.close()
except Error as e:
print(e)
finally:
if conn:
conn.close()
if __name__ == '__main__':
print("I am here!!");
create_connection(r"C:\Users\elcha\Documents\dbFolder\fragilityDB.db") # This is my path... need to incorporate
``` |
{
"source": "Jordan-M-Young/Port_Maso",
"score": 2
} |
#### File: Jordan-M-Young/Port_Maso/optimization_window.py
```python
import sys
from PyQt5 import QtWidgets as qtw
import opt_view as ov
import opt_model as om
class Optimization_Window(qtw.QMainWindow):
def __init__(self,portfolio_dic,dir_path):
"""Optimization Window class object
Window that allows the user to generate a space of portfolios based
on passed tickers and weight set argument, allows for analysis (in the future)
and facilitates analysis reports
"""
super().__init__()
#Window settings
self.setWindowTitle('Results')
self.left = 100
self.top = 100
self.width = 640
self.height = 480
self.setGeometry(self.left,self.top,self.width,self.height)
#Window arguments
self.dir_path = dir_path
self.portfolio_dic = portfolio_dic
self.data = {}
self.weights = None
self.selection = None
#Model portion of this window
self.model = om.Model()
#View portion of this window
self.view = ov.View(self.portfolio_dic,self.dir_path)
self.setCentralWidget(self.view)
"""Slots and Signals"""
#Directory update signals
self.view.new_dir.connect(self.model.update_directory)
self.model.new_dir.connect(self.view.update_directory)
#Parameter generation signals
self.view.get_params.connect(self.model.param_gen)
self.model.get_params.connect(self.view.param_gen)
#Report writing signals
self.view.gen_report.connect(self.model.gen_report)
#Data request signals
self.view.send_data.connect(self.model.get_data)
self.model.send_data.connect(self.view.get_data)
#Table widget update signals
self.view.send_table.connect(self.model.update_table)
self.model.send_table.connect(self.view.update_table)
self.show()
if __name__ == '__main__':
pd = {'Hello':'AIG,BA,CVX','22':'CVX,IBM'}
app = qtw.QApplication(sys.argv)
mw = Optimization_Window(pd,'E:/PythonProjects/Stocks_with_Phil/Formatted_Stocks_Monthly')
sys.exit(app.exec_())
``` |
{
"source": "jordanmzlong/Subdivergence-Free-Trees",
"score": 4
} |
#### File: jordanmzlong/Subdivergence-Free-Trees/connectedperms.py
```python
import itertools
# basic factorial function
def fact(n):
sofar = 1
for i in range(1,n+1):
sofar *= i
return sofar
# decrements each element of S by i
def setsubtract(S,i):
return {j - i for j in S}
# returns the number of S-connected permutations of length n, where S is a set of integers
# that is, permutations of length n that don't fix prefixes of sizes given in S
# see paper for derivation
def sconnected(n, S):
total = 0
for i in S:
prefix = fact(i)
suffix = sconnected(n-i,{j for j in range(1,n-i)} & setsubtract(S,i))
total += prefix*suffix
return fact(n) - total
# returns the number of connected permutations of length n
# that is, permutations of length n that don't fix any prefixes
def c(n):
return sconnected(n,{i for i in range(1,n)})
# returns the number of subdivergence-free gluings of f_{k,i} in the paper
def third_family(k,i):
if k < i:
return 0
if i == k:
return c(k) - 2*sconnected(k-1,{j for j in range(i-1,k-1)}) + sconnected(k-2,{j for j in range(i-2,k-2)})
if i == 1:
return c(k)
if i < k:
return c(k) - 2*sconnected(k-1,{j for j in range(i-1,k-1)}) + sconnected(k-2,{j for j in range(i-2,k-2)}) - c(k-1)
def main():
# basic example:
assert(sconnected(5, {2,4}) == (c(5)+c(4)+c(2)*(c(3)+c(2))))
if __name__ == "__main__":
main()
```
#### File: jordanmzlong/Subdivergence-Free-Trees/subfreegluings.py
```python
import copy
from collections import Counter, defaultdict
from functools import reduce
from itertools import chain, combinations, product
from multiset import Multiset
# used to generate new colour labels
leaf_counter = 1
# returns a list of all nonempty subsets (found on stack overflow)
def powerset(iterable):
"powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
s = list(iterable)
return list(chain.from_iterable(combinations(s,r) for r in range(1,len(s)+1)))
# children is a list of nodes, representing children of the node.
# half_edges is a list of numbers representing the half_edges with their colours
class Node:
def __init__(self, children, half_edges):
self.children = children
self.half_edges = half_edges
def __repr__(self):
"the printing function"
sofar = "["
for half_edge in self.half_edges:
sofar += str(half_edge)
for child in self.children:
sofar += repr(child)
sofar += "]"
return sofar
# given a node t, returns a multiset of all of the half-edge colours t
def c(t):
leaves = []
leaves += t.half_edges
for child in t.children:
leaves += c(child)
return Multiset(leaves)
# your basic factorial function
def fact(n):
sofar = 1
for i in range(1,n+1):
sofar *= i
return sofar
# return the number of colour preserving gluings given a multiset of colours
def colour_preserving(colours):
total = 1
for (elt,multiplicity) in colours.items():
total *= fact(multiplicity)
return total
# our most important function!
# computes the number of subdivergence-free gluings of trees t1 and t2
def subfree(t1,t2):
t1_colours = c(t1)
if t1_colours != c(t2):
return 0
return colour_preserving(t1_colours) - subs(t1,t2)
# given a multiset of leaf labels called colours
# and a dictionary of leaf multisets to new leaf labels
# get the leaf label corresponding to colours
# Note: mutates multiset_to_leaf
def getlabel(colours, multiset_to_leaf):
global leaf_counter
found = False
for (key,val) in multiset_to_leaf:
if key == colours and found:
print("ERROR IN RELABELING MULTISET, MULTIPLE KEYS")
if key == colours and not found:
edge_label = val
found = True
if not found:
leaf_counter = leaf_counter + 1
edge_label = leaf_counter
multiset_to_leaf.append((colours, edge_label))
return edge_label
# return a list with an entry for every subset of siblings in t
# of the form (base tree, colourlist)
# colourlist is a list of colour multisets, base tree is a coloured rooted tree
# multiset_to_leaf is the dictionary for generating half-edge labels of the base tree
def deconstruct(t, multiset_to_leaf):
sofar = []
children = copy.deepcopy(t.children)
half_edges = copy.deepcopy(t.half_edges)
for children_subset in powerset(children):
# finding the multiset complement of children
diff = Counter(children) - Counter(children_subset)
leftover_children = list(diff.elements())
# this is basically the same as below except when the descend subset is empty
# need to copy since it's pass by reference by default, and we're mutating it
base = Node(copy.deepcopy(leftover_children), copy.deepcopy(half_edges))
colourlist = []
for i in children_subset:
icolours = c(i)
colourlist.append(icolours)
edge_label = getlabel(icolours, multiset_to_leaf)
base.half_edges.append(edge_label)
sofar += [(base,colourlist)]
# the descend_subset is the subset of children that we'll recursively deconstruct
for descend_subset in powerset(children_subset):
# new_childrenish is a list of tuples to be added as children
# we're taking the cross product of the results of deconstructing the children, in order
# to cover every possible set of siblings in the tree
# NOTE that this might be a source of error, since you may have multiple threads trying
# to access the same multiset_to_leaf dictionary, but I assume python is able to take care
# of this
new_childrenish = list(product(*map(lambda x: deconstruct(x, multiset_to_leaf), descend_subset)))
# don't strictly need this check, but it might speed things up
if new_childrenish == []:
continue
# finding set complement of the descend_subset
diff = Counter(children_subset) - Counter(descend_subset)
to_cut = list(diff.elements())
base = Node(copy.deepcopy(leftover_children), copy.deepcopy(half_edges))
colourlist = []
# for the children we're not descending down but still cutting, we need to modify the half-edges
for i in to_cut:
icolours = c(i)
colourlist.append(icolours)
base.half_edges.append(getlabel(icolours, multiset_to_leaf))
for tup in new_childrenish:
# tup is a tuple of tuples, first element being the new child, second being the colours
newcolourlist = copy.deepcopy(colourlist)
newbase = copy.deepcopy(base)
for (childtree,childcolourlist) in tup:
newbase.children.append(childtree)
newcolourlist += childcolourlist
sofar += [(newbase,newcolourlist)]
return sofar
# generates the number of gluings with subdivergences given trees t1 and t2
def subs(t1,t2):
total = 0
multiset_to_leaf = [] # this will be a list of (colour_multiset,label) tuples
# need a new dictionary of these for every call
for (base1, coloursets1) in deconstruct(t1, multiset_to_leaf):
for (base2, coloursets2) in deconstruct(t2, multiset_to_leaf):
basegluings = subfree(base1, base2)
if basegluings < 0:
print("ERROR: negative result from call to subfree")
if basegluings:
for colours in coloursets1:
basegluings *= colour_preserving(colours)
total += basegluings
return total
# count all of the subsets of siblings of this tree
def count_sibling_subsets(t):
sofar = 0
for children_subset in powerset(t.children):
subset_count = 1
for i in children_subset:
subset_count = subset_count*(1+count_sibling_subsets(i))
sofar += subset_count
return sofar
# removes internal two-valent vertices by contracting edges
# does not remove anything since we don't want to remove the
# edge after the root if the root has only one child
def remove_two_valent(t):
return Node(list(map(remove_two_valent_helper, t.children)), t.half_edges)
def remove_two_valent_helper(t):
if len(t.children) == 1 and t.half_edges == []:
return remove_two_valent_helper(t.children[0])
return Node(list(map(remove_two_valent_helper, t.children)), t.half_edges)
# if either tree might have two-valent vertices use this instead of subfree
def subfree_wrapper(t1,t2):
t1 = remove_two_valent(t1)
t2 = remove_two_valent(t2)
return subfree(t1,t2)
def main():
# some basic examples:
t1 = Node([],[1])
assert(subfree_wrapper(t1,t1) == 1)
t2 = Node([t1],[1])
assert(subfree_wrapper(t2,t2) == 1)
t3 = Node([t2],[1])
assert(subfree_wrapper(t3,t3) == 3)
t4 = Node([t3],[1])
assert(subfree_wrapper(t4,t4) == 13)
t5 = Node([t4],[1])
assert(subfree_wrapper(t5,t5) == 71)
if __name__ == "__main__":
main()
``` |
{
"source": "JordanOberstein/InnerCircle",
"score": 4
} |
#### File: JordanOberstein/InnerCircle/determine_moves.py
```python
from board1 import B1, B1_Data
from board2 import B2, B2_Data
from board3 import B3, B3_Data
from board4 import B4, B4_Data
directions = ["ul", "ur", "r", "br", "bl", "l"]
return_moves_data = [{}, {}, {}, {}]
moves_to_data = [{}, {}, {}, {}]
board_array = [B1, B2, B3, B4]
data_array = [B1_Data, B2_Data, B3_Data, B4_Data]
#slightly modified find_move function
def find_move(piece=False, direction=False, moves_remaining=False, board=B1):
if piece is False:
return False #not a legal move
x = int(piece[1]) #row
y = int(piece[2]) #collumn
if moves_remaining != 0: #any move that is NOT the final move
return find_move(board[x][y]["adj"][direction], direction, moves_remaining - 1)
else:
return piece
def is_compatible_from(space, new_space, d0, board):
directions = ["ul", "ur", "r", "br", "bl", "l"]
d_index = directions.index(d0)
d1 = directions[(d_index + 3) % 6] #opposite direction
x = int(new_space[1])
y = int(new_space[2])
dots1 = board[x][y]["dots"]
if dots1 == "H":
return False
elif dots1 == "C":
return True
else:
return space == find_move(new_space, d1, dots1, board)
for n in range(4):
board = board_array[n]
#add return moves
for x in range(len(board)):
for y in range(len(board[x])):
space = "i{}{}".format(x, y)
legal_moves = [(find_move(space, d0, dots), d0) for d0 in directions for dots in range(1, 5) if find_move(space, d0, dots)]
#print("l", legal_moves)
return_moves = [item[0] for item in legal_moves if is_compatible_from(space, item[0], item[1], board)]
#print("r", return_moves)
return_moves_data[n][space] = return_moves
for item in return_moves_data[n]:
data_array[n][item]["return_moves"] = return_moves_data[n][item]
#print(""{}": {}".format(item, return_moves_data[n][item]))
#print("\n\n")
for n in range(4):
board = board_array[n]
#add moves to
for x in range(len(board)):
for y in range(len(board[x])):
piece = "i{}{}".format(x, y)
dots = board[x][y]["dots"] #number of dots
if dots == "C": #piece is in center
dots = [1, 2, 3]
legal_spaces = [find_move(piece, direction, d, board) for direction in directions for d in dots if find_move(piece, direction, d, board)]
elif dots == "H":
legal_spaces = []
else: #any other space
legal_spaces = [find_move(piece, direction, dots, board) for direction in directions if find_move(piece, direction, dots, board)]
moves_to_data[n][piece] = legal_spaces
#print(legal_spaces)
#print("\n\n")
for n in range(4):
for item in return_moves_data[n]:
print("{} \"return_moves\": {}".format(item, return_moves_data[n][item]))
print("\n\n")
for n in range(4):
for item in moves_to_data[n]:
print("{} \"moves_to\": {}".format(item, moves_to_data[n][item]))
print("\n\n")
with open("moves.txt", "w") as outfile:
for n in range(4):
for item in return_moves_data[n]:
#outfile.write("{} \"return_moves\": {}\n".format(item, return_moves_data[n][item]))
outfile.write("\"return_moves\": {}\n".format(return_moves_data[n][item]))
outfile.write("\n\n")
for n in range(4):
for item in moves_to_data[n]:
#outfile.write("{} \"moves_to\": {}\n".format(item, moves_to_data[n][item]))
outfile.write("\"moves_to\": {}\n".format(moves_to_data[n][item]))
outfile.write("\n\n")
```
#### File: JordanOberstein/InnerCircle/main.py
```python
import random
import sys
import os
from colored import fg, bg, attr
from board1 import B1
from board2 import B2
from board3 import B3
from board4 import B4
COLORIZE_BOARD = True
HARD_MODE = False
def flatten(l): return [item for sublist in l for item in sublist]
RESET = fg(15)+bg(0)+attr(0)
def colorize(text, foreground, background, attribute):
"""Colorize text."""
return fg(foreground)+bg(background)+attr(attribute)+text+RESET
class HiddenPrints:
"""Overides print function."""
def __enter__(self):
self._original_stdout = sys.stdout
sys.stdout = open(os.devnull, 'w')
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stdout.close()
sys.stdout = self._original_stdout
class Display(object):
def __init__(self, board, *args):
"""
Constructor for Display class.
Parameters:
board: board to be displayed.
*args: attributes in board data to be displayed.
"""
self.board = board
self.board_attributes = args
def board_output(self):
"""Formats board data into readable output."""
output = ""
if len(self.board_attributes) == 0:
output += "{:^120}{:^60}\n".format("(name, has_piece, dots)", "sub_dots")
for row in self.board:
new_line = str([(space["name"], space["has_piece"], space["dots"]) for space in row])
new_line = new_line.replace("False,", "--,") # False is default for has_piece
new_line = new_line.replace("'", "")
new_line = new_line.replace("), (", ") (")
sub_dots = str(["?" if HARD_MODE and space["has_piece"] and space["is_hole"] else space["sub_dots"] for space in row])
sub_dots = sub_dots.replace("False", "-")
sub_dots = sub_dots.replace("'", "")
output += "{:^120}{:^60}\n".format(new_line, sub_dots)
if HARD_MODE:
for player in range(1, 19):
for dots in range(1, 5):
output = output.replace("P{}, {}".format(player, dots), "P{}, ?".format(player))
if COLORIZE_BOARD:
output = output.replace("P1,", colorize("P1", 1, 0, 4) + ",") # Red
output = output.replace("P2", colorize("P2", 2, 0, 4)) # Green
output = output.replace("P3", colorize("P3", 4, 0, 4)) # Blue
output = output.replace("P4", colorize("P4", 3, 0, 4)) # Yellow
output = output.replace("P5", colorize("P5", 124, 0, 4)) # Red
output = output.replace("P6", colorize("P6", 114, 0, 4)) # Green
output = output.replace("P7", colorize("P7", 104, 0, 4)) # Blue
output = output.replace("P8", colorize("P8", 94, 0, 4)) # Yellow
output = output.replace("C", colorize("C", 0, 7, 1)) # White bg
output = output.replace("H", colorize("H", 0, 5, 1)) # Purple bg
else:
for attribute in self.board_attributes:
output += "{:^60}".format(attribute)
output += "\n"
for row in self.board:
for attribute in self.board_attributes:
output += "{:^60}".format(str([space[attribute] for space in row]))
output += "\n"
return output
def __str__(self):
"""Print board data."""
return self.board_output()
def retrieve_attr_data(self):
"""Retrieves data for board arguments in initialization."""
return (*[[[space[attribute] for space in row] for row in self.board] for attribute in self.board_attributes],)
class Actions(object):
def __init__(self, board):
"""Constructor for Actions class."""
self.board = board
def rotate(self, r):
"""Rotate a board r rotations counterclockwise."""
old_board = self.board
ring_1 = [old_board[2][2], old_board[2][3], old_board[3][4], old_board[4][3], old_board[4][2], old_board[3][2]]
ring_2 = [old_board[1][1], old_board[1][2], old_board[1][3], old_board[2][4], old_board[3][5], old_board[4][4],
old_board[5][3], old_board[5][2], old_board[5][1], old_board[4][1], old_board[3][1], old_board[2][1]]
ring_3 = [old_board[0][0], old_board[0][1], old_board[0][2], old_board[0][3], old_board[1][4], old_board[2][5],
old_board[3][6], old_board[4][5], old_board[5][4], old_board[6][3], old_board[6][2], old_board[6][1],
old_board[6][0], old_board[5][0], old_board[4][0], old_board[3][0], old_board[2][0], old_board[1][0]]
# Rotate each ring
inner_ring = ring_1[-r:] + ring_1[:-r]
middle_ring = ring_2[-2*r:] + ring_2[:-2*r]
outer_ring = ring_3[-3*r:] + ring_3[:-3*r]
new_board = [[0]*4, [0]*5, [0]*6, [0]*7, [0]*6, [0]*5, [0]*4]
new_board[2][2], new_board[2][3], new_board[3][4], new_board[4][3], new_board[4][2], new_board[3][2] = inner_ring
(new_board[1][1], new_board[1][2], new_board[1][3], new_board[2][4], new_board[3][5], new_board[4][4],
new_board[5][3], new_board[5][2], new_board[5][1], new_board[4][1], new_board[3][1], new_board[2][1]) = middle_ring
(new_board[0][0], new_board[0][1], new_board[0][2], new_board[0][3], new_board[1][4], new_board[2][5],
new_board[3][6], new_board[4][5], new_board[5][4], new_board[6][3], new_board[6][2], new_board[6][1],
new_board[6][0], new_board[5][0], new_board[4][0], new_board[3][0], new_board[2][0], new_board[1][0]) = outer_ring
new_board[3][3] = old_board[3][3]
return new_board
def find_correct_space(self, piece):
"""Determines correct space based on name of piece, required when board is rotated and index doesn't match name."""
for x in range(len(self.board)):
for y in range(len(self.board[x])):
if self.board[x][y]["name"] == piece:
return "i{}{}".format(x, y)
def legal_moves(self, piece):
"""Determines legal moves for a given piece."""
x = int(piece[1]) # Row
y = int(piece[2]) # Collumn
legal_spaces = self.board[x][y]["moves_to"]
legal_spaces_without_pieces = []
for space in legal_spaces:
piece_index = self.find_correct_space(space)
if not self.board[int(piece_index[1])][int(piece_index[2])]["has_piece"]:
legal_spaces_without_pieces.append(space)
return legal_spaces_without_pieces
def take_turn_random(self, CP_pieces, CP_name):
"""Execute turn for player through random choice."""
center_name = "i33"
if center_name in CP_pieces:
piece = center_name
piece_index = center_name
print(CP_name, "has a piece in the center...")
else:
unblocked_pieces = [a_piece for a_piece in CP_pieces if len(self.legal_moves(self.find_correct_space(a_piece))) > 0]
print("Available pieces:", CP_pieces)
print("Unblocked_pieces:", unblocked_pieces)
if len(unblocked_pieces) == 0:
print(CP_name, "has no available pieces. All pieces are blocked")
return False
piece = random.choice(unblocked_pieces)
piece_index = self.find_correct_space(piece)
dots = self.board[int(piece_index[1])][int(piece_index[2])]["dots"]
legal_spaces = self.legal_moves(piece_index)
selected_move = random.choice(legal_spaces)
selected_move_index = self.find_correct_space(selected_move)
print("Selected piece from list:", piece)
print("Selected piece has index:", piece_index)
print("Piece at index {} moves {} spaces".format(piece_index, dots))
print("Legal spaces:", legal_spaces)
print("Selected move from list:", selected_move)
print("Selected move has index:", selected_move_index)
x0 = int(piece_index[1])
y0 = int(piece_index[2])
x1 = int(selected_move_index[1])
y1 = int(selected_move_index[2])
self.board[x0][y0]["has_piece"] = False
self.board[x1][y1]["has_piece"] = CP_name
def take_turn(self, CP_pieces, CP_name):
"""Execute turn for player through user's choice."""
center_name = "i33"
legal_spaces = []
available_pieces = CP_pieces.copy()
if center_name in CP_pieces:
piece = center_name
piece_index = center_name
dots = self.board[int(piece_index[1])][int(piece_index[2])]["dots"] # "C"
legal_spaces = self.legal_moves(piece_index)
print(CP_name, "has a piece in the center...")
else:
while len(legal_spaces) == 0:
print("Available pieces:", available_pieces)
selected_piece = input("These are the available pieces for {}... {}:\n==> ".format(CP_name, available_pieces))
while not selected_piece.isdigit() or int(selected_piece) >= len(available_pieces):
selected_piece = input("These are the available pieces for {}... {}:\n==> ".format(CP_name, available_pieces))
piece = available_pieces[int(selected_piece)]
piece_index = self.find_correct_space(piece)
dots = self.board[int(piece_index[1])][int(piece_index[2])]["dots"]
legal_spaces = self.legal_moves(piece_index)
print("Piece at index {} moves {} spaces".format(piece_index, dots))
print("Legal spaces:", legal_spaces)
if len(legal_spaces) == 0:
print("Selected piece is blocked")
if HARD_MODE:
return False
available_pieces.remove(piece)
if len(available_pieces) == 0:
print(CP_name, "has no available pieces; all pieces are blocked")
return False
selected_legal_space = input("These are the available moves for piece {}... {}:\n==> ".format(piece, legal_spaces))
while not selected_legal_space.isdigit() or int(selected_legal_space) >= len(legal_spaces):
selected_legal_space = input("These are the available moves for piece {}... {}:\n==> ".format(piece, legal_spaces))
selected_move = legal_spaces[int(selected_legal_space)]
selected_move_index = self.find_correct_space(selected_move)
print("Selected piece from list:", piece)
print("Selected piece has index:", piece_index)
print("Piece at index {} moves {} spaces".format(piece_index, dots))
print("Legal spaces:", legal_spaces)
print("Selected move from list:", selected_move)
print("Selected move has index:", selected_move_index)
x0 = int(piece_index[1])
y0 = int(piece_index[2])
x1 = int(selected_move_index[1])
y1 = int(selected_move_index[2])
self.board[x0][y0]["has_piece"] = False
self.board[x1][y1]["has_piece"] = CP_name
class Players(object):
def __init__(self, player_count):
"""
Constructor for Player class.
Parameters:
player_count: the number of players playing the game int in range(2, 19).
"""
self.player_count = player_count
self.players = {"P{}".format(n): {"pieces": [], "is_active": True} for n in range(1, self.player_count + 1)}
def get_active_players(self):
"""Update active players."""
return [player for player in self.players if self.players[player]["is_active"]]
def update_players(self, board):
"""Update player object."""
active_players = self.get_active_players()
for player in active_players:
self.players[player]["pieces"] = [space["name"] for space in flatten(board) if space["has_piece"] == player and not space["is_hole"]]
def update_player_status(self, board):
"""Update player object when moving down boards, removing eliminated players."""
active_players = self.get_active_players()
for player in active_players:
self.players[player]["pieces"] = [space["name"] for space in flatten(board) if space["has_piece"] == player]
if len(self.players[player]["pieces"]) == 0:
self.players[player]["is_active"] = False
def remove_inactive_players(self, starting_spaces_length):
"""Remove players when the number of players is greater than the number of starting spaces."""
if self.player_count > starting_spaces_length:
for player in self.players:
player_number = int(player[1:])
if player_number > starting_spaces_length:
self.players[player]["is_active"] = False
class NewGame(object):
def __init__(self, top_board, player_count, random_gameplay):
"""
Constructor for NewGame class.
Parameters:
board: the top board for gameplay (B4, B3, B2, B1).
player_count: the number of players playing the game int in range(2, 19).
random_gameplay: will gameplay will be executed through random choice or user input.
"""
self.board = top_board
self.board_array = [B4, B3, B2, B1][[B4, B3, B2, B1].index(self.board):] # List of boards being used
self.player_count = player_count
self.players = Players(self.player_count)
self.random_gameplay = random_gameplay
self.turn = 1
self.winner = False
def configure_boards(self):
"""Rotate boards in board_array, then add sub_dots."""
for i in range(1, len(self.board_array)):
r = random.randint(0, 5)
self.board_array[i] = Actions(self.board_array[i]).rotate(r)
upper_board = self.board_array[i-1]
lower_board = self.board_array[i]
for x in range(len(upper_board)):
for y in range(len(upper_board[x])):
if upper_board[x][y]["is_hole"]:
upper_board[x][y]["sub_dots"] = lower_board[x][y]["dots"]
def get_starting_spaces(self):
"""Get starting spaces and determine pieces per player."""
if self.board == B4:
starting_spaces = [space["name"] for space in flatten(self.board) if space["starting_space"]]
random.shuffle(starting_spaces)
equal_spaces = (18//self.player_count)*self.player_count
starting_spaces = starting_spaces[:equal_spaces]
else:
r = random.randint(0, 5)
upper_board = B2 if self.board == B1 else (B3 if self.board == B2 else B4)
flat_upper_board = flatten(Actions(upper_board).rotate(r))
flat_starting_board = flatten(self.board)
starting_spaces = [starting_board["name"] for starting_board,upper_board in zip(flat_starting_board, flat_upper_board) if upper_board["is_hole"]]
random.shuffle(starting_spaces)
self.starting_spaces_length = len(starting_spaces)
number_of_separations = min(self.starting_spaces_length, self.player_count)
minimum_pieces, extra_piece = divmod(self.starting_spaces_length, self.player_count)
separations = [minimum_pieces + (1 if i<extra_piece else 0) for i in range(number_of_separations)]
random.shuffle(separations)
return starting_spaces, separations
def configure_players_random(self):
"""Configure player object for random gameplay."""
starting_spaces, pieces_per_player = self.get_starting_spaces()
self.players.remove_inactive_players(self.starting_spaces_length)
active_players = self.players.get_active_players()
player_names = [player for player,total_pieces in zip(active_players, pieces_per_player) for i in range(total_pieces)]
for name,space in zip(player_names, starting_spaces):
self.board[int(space[1])][int(space[2])]["has_piece"] = name
self.players.update_players(self.board)
def configure_players(self):
"""Configure player object for user-selected gameplay"""
starting_spaces, pieces_per_player = self.get_starting_spaces()
self.players.remove_inactive_players(self.starting_spaces_length)
active_players = self.players.get_active_players()
extra_pieces = [player for player,total_pieces in zip(active_players, pieces_per_player) if total_pieces > min(pieces_per_player)]
player_names = active_players*min(pieces_per_player) + extra_pieces
#player_names = flatten([[player for player,total_pieces in zip(active_players, pieces_per_player) if i < total_pieces] for i in range(max(pieces_per_player))])
print("player names:", player_names) # Order that players place pieces
for name in player_names:
print("Pick a space for", name)
space_index = input("These are the remaining spaces... {}\n==> ".format(starting_spaces))
while not space_index.isdigit() or int(space_index) >= self.starting_spaces_length:
space_index = input("These are the remaining spaces... {}\n==> ".format(starting_spaces))
selected_space = starting_spaces.pop(int(space_index))
x = int(selected_space[1]) # Row
y = int(selected_space[2]) # Collumn
self.board[x][y]["has_piece"] = name
self.players.update_players(self.board)
print(Display(self.board))
def make_move_random(self):
"""Make random move for player."""
center = self.board[3][3]
if not center["has_piece"]:
active_players = self.players.get_active_players()
self.CP_name = "P1" if self.turn == 1 else active_players[(active_players.index(self.CP_name)+1) % len(active_players)]
self.CP_pieces = self.players.players[self.CP_name]["pieces"] # Wrong player if game starts with piece in center
print(self.CP_name, "has these pieces:", self.CP_pieces)
if len(self.CP_pieces) == 0:
print(self.CP_name, "has no valid moves")
else:
Actions(self.board).take_turn_random(self.CP_pieces, self.CP_name)
def make_move(self):
"""Make move for player."""
center = self.board[3][3]
if not center["has_piece"]:
active_players = self.players.get_active_players()
self.CP_name = "P1" if self.turn == 1 else active_players[(active_players.index(self.CP_name)+1) % len(active_players)]
self.CP_pieces = self.players.players[self.CP_name]["pieces"] # Wrong player if game starts with piece in center
print(self.CP_name, "has these pieces:", self.CP_pieces)
if len(self.CP_pieces) == 0:
print(self.CP_name, "has no valid moves")
else:
Actions(self.board).take_turn(self.CP_pieces, self.CP_name)
def check_for_winner(self):
"""Check for winner."""
center = self.board[3][3]
active_players = self.players.get_active_players()
if (len(self.board_array) == 1 and center["has_piece"]) or len(active_players) == 1:
self.winner = self.CP_name # Player who moves into center or last remaining player
return self.winner
def imprint_board(self):
"""Assigns pieces in holes to lower board."""
next_board = self.board_array[1]
for x in range(len(self.board)):
for y in range(len(self.board[x])):
if self.board[x][y]["is_hole"]:
next_board[x][y]["has_piece"] = self.board[x][y]["has_piece"]
self.board_array.pop(0)
self.board = self.board_array[0] # self.board is now next board in self.board_array
print("\n\n\nALL HOLES ARE FILLED ON BOARD B{}; NOW MOVING TO BOARD B{}\n\n\n".format(len(self.board_array)+1, len(self.board_array)))
def play(self):
"""Play a complete game."""
print(Display(self.board))
self.configure_boards()
if self.random_gameplay:
self.configure_players_random()
else:
self.configure_players()
print("Game Setup is now complete")
while True:
break
print("\n\n\nTURN NUMBER", self.turn)
print(Display(self.board))
self.players.update_players(self.board)
if self.random_gameplay:
self.make_move_random()
else:
self.make_move()
center = self.board[3][3]
if center["has_piece"]:
print(Display(self.board))
self.players.update_players(self.board)
if self.check_for_winner():
break
if self.random_gameplay:
self.make_move_random()
else:
self.make_move()
# Check if all pieces are in holes
if all([space["has_piece"] for space in flatten(self.board) if space["is_hole"]]):
print(Display(self.board))
self.imprint_board()
self.players.update_player_status(self.board)
if self.check_for_winner():
break
self.turn += 1
if self.winner:
print("\n\nThere is a winner...")
print(Display(self.board))
print("\nTHE WINNER IS", self.winner)
else:
print("\n\nGame incomplete.")
print(Display(self.board))
def main():
with open("out.txt", "w") as outfile:
pass
TOP_BOARD = B4
PLAYER_COUNT = 18
RANDOM_GAMEPLAY = True
PRINT_TO_CONSOLE = True
if TOP_BOARD not in [B4, B3, B2, B1]:
raise ValueError("Only (B4, B3, B2, B1) are allowed")
if PLAYER_COUNT not in range(2, 19):
raise ValueError("Valid input is int in range(2, 19)")
if type(RANDOM_GAMEPLAY) is not bool:
raise ValueError("Valid input is bool")
Game = NewGame(TOP_BOARD, PLAYER_COUNT, RANDOM_GAMEPLAY)
if PRINT_TO_CONSOLE:
Game.play()
else:
with HiddenPrints():
Game.play()
if __name__ == "__main__":
main()
"""
TO-DO:
Expand and clarify docstrings
Create GUI
IDEAS:
Create move trees and determine winning strategy
Create AI to learn game, find optimal strategy
DIFFERENCES FROM MAIN GAME:
- can support more than four players
- has easy mode where player's turn isn't skipped if you choose a blocked piece
"""
``` |
{
"source": "jordanondricka/keras",
"score": 3
} |
#### File: keras/engine/node_test.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from keras import keras_parameterized
from keras.engine import base_layer
from keras.engine import node as node_module
class DummyTensor(object):
def __init__(self, shape=None):
self.shape = shape
class DummyLayer(base_layer.Layer):
pass
class NetworkConstructionTest(keras_parameterized.TestCase):
def test_chained_node_construction(self):
# test basics
a = DummyTensor(shape=(None, 32))
b = DummyTensor(shape=(None, 32))
a_layer = DummyLayer()
node = node_module.Node(a_layer, outputs=a)
self.assertEqual(node.outbound_layer, a_layer)
self.assertTrue(node.is_input)
self.assertListEqual(node.inbound_layers, [])
self.assertListEqual(node.input_tensors, [a])
self.assertListEqual(node.input_shapes, [(None, 32)])
self.assertListEqual(node.output_tensors, [a])
self.assertListEqual(node.output_shapes, [(None, 32)])
b_layer = DummyLayer()
node_module.Node(b_layer, outputs=b)
dense = DummyLayer()
a_2 = DummyTensor()
node_a = node_module.Node(layer=dense, call_args=(a,), outputs=a_2)
b_2 = DummyTensor()
node_b = node_module.Node(layer=dense, call_args=(b,), outputs=b_2)
# test the node attributes
self.assertFalse(node_a.is_input)
self.assertFalse(node_b.is_input)
self.assertEqual(node_a.call_args, (a,))
self.assertEqual(node_a.call_kwargs, {})
self.assertEqual(node_a.outputs, a_2)
# Test the layer wiring
self.assertLen(dense._inbound_nodes, 2)
self.assertLen(dense._outbound_nodes, 0)
self.assertEqual(dense._inbound_nodes, [node_a, node_b])
self.assertEqual(dense._inbound_nodes[0].inbound_layers, a_layer)
self.assertEqual(dense._inbound_nodes[0].outbound_layer, dense)
self.assertEqual(dense._inbound_nodes[1].inbound_layers, b_layer)
self.assertEqual(dense._inbound_nodes[1].outbound_layer, dense)
self.assertIs(dense._inbound_nodes[0].input_tensors, a)
self.assertIs(dense._inbound_nodes[1].input_tensors, b)
def test_multi_input_node(self):
# test multi-input layer
a = DummyTensor()
b = DummyTensor()
dense = DummyLayer()
a_2 = DummyTensor()
node_module.Node(layer=dense, call_args=(a,), outputs=a_2)
b_2 = DummyTensor()
node_module.Node(layer=dense, call_args=(b,), outputs=b_2)
concat_layer = DummyLayer()
merged = DummyTensor()
node_module.Node(layer=concat_layer, call_args=([a_2, b_2],),
outputs=merged)
merge_layer, merge_node_index, merge_tensor_index = merged._keras_history
self.assertEqual(merge_node_index, 0)
self.assertEqual(merge_tensor_index, 0)
self.assertLen(merge_layer._inbound_nodes, 1)
self.assertLen(merge_layer._outbound_nodes, 0)
self.assertLen(merge_layer._inbound_nodes[0].input_tensors, 2)
self.assertEqual(merge_layer._inbound_nodes[0].input_tensors, [a_2, b_2])
self.assertLen(merge_layer._inbound_nodes[0].inbound_layers, 2)
def test_arg_and_kwarg_mix(self):
input_layer = DummyLayer()
input_layer_2 = DummyLayer()
a = DummyTensor()
node_a = node_module.Node(layer=input_layer, outputs=a)
b = DummyTensor()
node_b = node_module.Node(layer=input_layer_2, outputs=b)
arg_2 = DummyTensor()
arg_3 = DummyTensor()
node_c = node_module.Node(layer=input_layer, outputs=arg_3)
kwarg_x = DummyTensor()
kwarg_y = DummyTensor()
node_d = node_module.Node(layer=input_layer, outputs=kwarg_y)
merge_layer = DummyLayer()
merged = DummyTensor()
node = node_module.Node(layer=merge_layer,
call_args=([a, b], arg_2, arg_3),
call_kwargs={'x': kwarg_x, 'y': kwarg_y},
outputs=merged)
merge_layer, merge_node_index, merge_tensor_index = merged._keras_history
# Check the saved call args/kwargs
self.assertEqual(([a, b], arg_2, arg_3), node.call_args)
self.assertEqual({'x': kwarg_x, 'y': kwarg_y}, node.call_kwargs)
# Only the inputs that were produced by input nodes should appear in
# keras_tensors
self.assertEqual({a, b, arg_3, kwarg_y}, set(node.keras_inputs))
self.assertEqual(set(node.parent_nodes), {node_a, node_b, node_c, node_d})
# Check the layer wirings
self.assertEqual(merge_node_index, 0)
self.assertEqual(merge_tensor_index, 0)
self.assertLen(merge_layer._inbound_nodes, 1)
self.assertLen(merge_layer._outbound_nodes, 0)
self.assertLen(input_layer._outbound_nodes, 3)
self.assertLen(input_layer_2._outbound_nodes, 1)
# The 'backwards compatibility' attributes should only check the
# first call argument
self.assertLen(merge_layer._inbound_nodes[0].input_tensors, 2)
self.assertEqual(merge_layer._inbound_nodes[0].input_tensors, [a, b])
self.assertLen(merge_layer._inbound_nodes[0].inbound_layers, 2)
if __name__ == '__main__':
tf.test.main()
``` |
{
"source": "jordanopensource/data-science-bootcamp",
"score": 3
} |
#### File: data-science-bootcamp/pipelines/josagit.py
```python
import random
from collections import defaultdict
from heapq import nlargest
from luigi import six
import luigi
import luigi.postgres
import urllib2
import StringIO
import gzip
import json
class GetArchive(luigi.Task):
"""
Get the archive unit from github
"""
def run(self):
"""
Download the main archive data from the data.githubarchive.org
"""
baseURL = "http://data.githubarchive.org/2015-01-01-15.json.gz"
outpath = "github.json"
response = urllib2.urlopen(baseURL)
compressedFile = StringIO.StringIO()
compressedFile.write(response.read())
compressedFile.seek(0)
decompressedFile = gzip.GzipFile(fileobj=compressedFile, mode='rb')
with open(outpath, 'w+') as outfile:
outfile.write(decompressedFile.read())
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file in the local file system.
:return: the target output for this task.
:rtype: object (:py:class:`luigi.target.Target`)
"""
return luigi.LocalTarget('github.json')
class AggregateLanguages(luigi.Task):
"""
Loops over all josn entries
get all "PullRequestEvent" from the dump
json parse to get the language
increment coutner for each language.
output new file with lang -> count
Output on log to prep for Hadoop
"""
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file on the local filesystem.
:return: the target output for this task.
:rtype: object (:py:class:`luigi.target.Target`)
"""
return luigi.LocalTarget("cleaned-data/aggregated-languages.json")
def requires(self):
"""
This task's dependencies:
* :py:class:`~.GetArchive`
:return: list of object (:py:class:`luigi.task.Task`)
"""
return [GetArchive()]
def run(self):
language_count = defaultdict(int)
for t in self.input():
with t.open('r') as in_file:
for line in in_file:
values = json.loads(line)
if values['type'] == "PullRequestEvent":
try:
language = values['payload']['pull_request']['head']['repo']['language']
language_count[language] += 1
except:
pass
with self.output().open('w') as out_file:
for language, count in six.iteritems(language_count):
out_file.write('{}\t{}\n'.format(language, count))
class histogram(luigi.Task):
"""
Get the count of each event available
"""
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file on the local filesystem.
:return: the target output for this task.
:rtype: object (:py:class:`luigi.target.Target`)
"""
return luigi.LocalTarget("cleaned-data/histogram.json")
def requires(self):
"""
This task's dependencies:
* :py:class:`~.GetArchive`
:return: list of object (:py:class:`luigi.task.Task`)
"""
return [GetArchive()]
def run(self):
event_count = defaultdict(int)
for t in self.input():
with t.open('r') as in_file:
for line in in_file:
values = json.loads(line)
try:
event_type = values['type']
event_count[event_type] += 1
except:
pass
with self.output().open('w') as out_file:
for event_type, count in six.iteritems(event_count):
out_file.write('{}\t{}\n'.format(event_type, count))
if __name__ == "__main__":
luigi.run()
```
#### File: data-science-bootcamp/pipelines/josatop.py
```python
import random
from collections import defaultdict
from heapq import nlargest
from luigi import six
import luigi
import luigi.contrib.hadoop
import luigi.contrib.hdfs
import luigi.postgres
class Streams(luigi.Task):
"""
Used to generate random data for demonstration.
"""
date = luigi.DateParameter()
def run(self):
"""
Writes to :py:meth:`~.Streams.output` target.
"""
with self.output().open('w') as output:
for _ in range(1000):
output.write('{} {} {}\n'.format(
random.randint(0, 999),
random.randint(0, 999),
random.randint(0, 999)))
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file in the local file system.
:return: the target output for this task.
:rtype: object (:py:class:`luigi.target.Target`)
"""
return luigi.LocalTarget(self.date.strftime('data/streams_%Y_%m_%d_faked.tsv'))
class AggregateArtists(luigi.Task):
"""
This task runs over the target data returned by :py:meth:`~/.Streams.output` and
writes the result into its :py:meth:`~.AggregateArtists.output` target (local file).
"""
date_interval = luigi.DateIntervalParameter()
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file on the local filesystem.
:return: the target output for this task.
:rtype: object (:py:class:`luigi.target.Target`)
"""
return luigi.LocalTarget("data/artist_streams_{}.tsv".format(self.date_interval))
def requires(self):
"""
This task's dependencies:
* :py:class:`~.Streams`
:return: list of object (:py:class:`luigi.task.Task`)
"""
return [Streams(date) for date in self.date_interval]
def run(self):
artist_count = defaultdict(int)
for t in self.input():
with t.open('r') as in_file:
for line in in_file:
_, artist, track = line.strip().split()
artist_count[artist] += 1
with self.output().open('w') as out_file:
for artist, count in six.iteritems(artist_count):
out_file.write('{}\t{}\n'.format(artist, count))
class Top10Artists(luigi.Task):
"""
This task runs over the target data returned by :py:meth:`~/.AggregateArtists.output` or
:py:meth:`~/.AggregateArtistsHadoop.output` in case :py:attr:`~/.Top10Artists.use_hadoop` is set and
writes the result into its :py:meth:`~.Top10Artists.output` target (a file in local filesystem).
"""
date_interval = luigi.DateIntervalParameter()
use_hadoop = luigi.BoolParameter()
def requires(self):
"""
This task's dependencies:
* :py:class:`~.AggregateArtists` or
* :py:class:`~.AggregateArtistsHadoop` if :py:attr:`~/.Top10Artists.use_hadoop` is set.
:return: object (:py:class:`luigi.task.Task`)
"""
if self.use_hadoop:
# Complete a hadoop MR job and output to output target (local file system)
# For now I'll just pass...
pass
else:
return AggregateArtists(self.date_interval)
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file on the local filesystem.
:return: the target output for this task.
:rtype: object (:py:class:`luigi.target.Target`)
"""
return luigi.LocalTarget("data/top_artists_%s.tsv" % self.date_interval)
def run(self):
top_10 = nlargest(10, self._input_iterator())
with self.output().open('w') as out_file:
for streams, artist in top_10:
out_line = '\t'.join([
str(self.date_interval.date_a),
str(self.date_interval.date_b),
artist,
str(streams)
])
out_file.write((out_line + '\n'))
def _input_iterator(self):
with self.input().open('r') as in_file:
for line in in_file:
artist, streams = line.strip().split()
yield int(streams), artist
if __name__ == "__main__":
luigi.run()
``` |
{
"source": "jordanosborn/MastersProject",
"score": 2
} |
#### File: code/analysis/analyse.py
```python
from data_clean import data_open
from sys import argv
from matplotlib import pyplot as plt
import numpy as np
from scipy.optimize import curve_fit
import os
from twilio.rest import Client
from typing import Any, List, Callable, Dict, Tuple
import json, sqlite3
from collections import OrderedDict
with open("secrets_template.json") as f:
secrets = json.loads(f.read())
def send_message(secrets: Any, body: str):
try:
account_sid = secrets["account_sid"]
auth_token = secrets["auth_token"]
client = Client(account_sid, auth_token)
message = client.messages.create(
body=body, from_=f'{secrets["twilio_number"]}', to=f'{secrets["phone_number"]}'
)
print(f'Sent message to {secrets["phone_number"]} message_ID = {message.sid}')
except KeyError:
pass
def func(x, a, b, c):
return a * ( 1 - np.exp(-x / b)) + c
def get_fit(f, x, y, bounds):
try:
fit, _ = curve_fit(f, x, y, bounds=(bounds[0], bounds[1]))
except RuntimeError:
return [np.nan] * len(bounds[0])
else:
return fit
def analyse(
path: str,
function: Callable,
bounds_dict: Dict[str, Tuple[Any, ...]],
plot_param: int,
function_string="a * np.exp(-x / b) + c",
):
video_name = list(filter(lambda s: s != "", path.split("/")))[-1]
index, x_data, Y = data_open(path + "/radial_Avg.csv")
x_data = np.array(x_data)
data = []
parameters = list(bounds_dict.keys())
bounds = (
[v[0] for _, v in bounds_dict.items()],
[v[1] for _, v in bounds_dict.items()],
)
# Save all plots of I vs tau for each q
for i, v in enumerate(zip(index, Y)):
q, y = v
y_data = np.array(y)
fit = get_fit(function, x_data, y_data, bounds)
data.append(fit)
# plt.title(
# f"Plot of Intensity delta ({video_name}) for q={q} vs frame difference tau"
# )
# plt.ylabel(f"I(q={q}, tau)")
# plt.xlabel("tau")
# plt.plot(x_data, y_data, label="data")
# plt.plot(
# x_data,
# func(x_data, *fit),
# label=f"fit f(tau) = {function_string.replace('np.', '')} with {', '.join(map(lambda x: f'{x[0]}={x[1]}', zip(parameters, map(lambda s: round(s, 2), fit))))}",
# )
# plt.legend(loc="lower right")
# plt.savefig(f"{path}/I_vs_tau_for_q_{q}.png")
# plt.close()
if i % 10 == 0:
print(f"{round(100 * i/len(index), 0)}% complete.")
print(f"100% complete.")
# # Save raw fit data
conn = sqlite3.connect("crowd.sqlite")
with conn:
conn.execute(f"DROP TABLE IF EXISTS fit_{video_name}")
with conn:
conn.execute(
f"create table fit_{video_name} (q float primary key, function text, {', '.join(parameters)})"
)
with conn:
conn.executemany(
f"insert into fit_{video_name} values (?, ?, {', '.join(['?'] * len(data[0]))})",
map(
lambda x: [float(x[0])] + [function_string] + [*x[1]], zip(index, data)
),
)
# save log tau_c vs log q
tau_c = np.log(np.array(list(map(lambda x: x[plot_param], data))))
q = np.log(np.array(index, dtype=np.float))
plt.title(f"log(tau_c) vs log(q) for {video_name}")
plt.ylabel("log(tau_c)")
plt.xlabel("log(q)")
plt.plot(q, tau_c)
plt.savefig(f"{path}/tau_c_plot.png")
plt.close()
# TODO: change this to query db instead
if __name__ == "__main__":
if os.path.isdir(argv[1]):
files: List[str] = []
for (dirpath, dirnames, filenames) in os.walk(argv[1]):
files.extend(map(lambda s: f"./{dirpath}/{s}", filenames))
files = list(filter(lambda s: s.find("radial_Avg.csv") != -1, files))
directories = list(map(lambda s: s.replace("/radial_Avg.csv", ""), files))
for i, v in enumerate(directories):
analyse(
v,
func,
OrderedDict(
{"a": (-np.inf, np.inf), "b": (0, np.inf), "c": (-np.inf, np.inf)}
),
1,
)
if i % 10 == 0:
send_message(
secrets["twilio"],
f"Completed approximately {round(i * 100 / len(directories))}%.",
)
elif os.path.isfile(argv[1]) and argv[1].find("radial_Avg.csv") != -1:
analyse(
argv[1].replace("radial_Avg.csv", ""),
func,
OrderedDict(
{"a": (-np.inf, np.inf), "b": (0, np.inf), "c": (-np.inf, np.inf)}
),
1,
)
elif (
os.path.isfile(argv[1])
and argv[1].find("radial_Avg.csv") != -1
and argv[2] == "custom"
):
print("Errors are not checked!")
params_str = input( # nosec
"Comma spaced parameter list with range e.g. A(0: np.inf)? "
)
params = params_str.replace(" ", "").replace("\t", "").split(",")
bounds: Dict[str, Tuple[Any, ...]] = OrderedDict()
for p in params:
name, values = p.replace(")", "").split("(")
bounds[name] = tuple(map(eval, values.split(":")))
independent_vars = input( # nosec
"Please enter comma separated list of independent variable names? "
).split(",")
independent_vars = list(
filter(
lambda s: s != "",
map(lambda s: s.replace(" ", "").replace("\t", ""), independent_vars),
)
)
function_string = input( # nosec
"Please enter function to fit to using params? "
)
plot_param = int(
input("Please enter the index (starting 0) of the final plot? ") # nosec
)
print(bounds, "\n", f"f({', '.join(independent_vars)}) = {function_string}")
if input("Are these correct (y/n)? ").strip() == "y": # nosec
function = eval(
f"lambda {','.join(independent_vars)}, {','.join(bounds.keys())}: {function_string}"
)
analyse(
argv[1].replace("/radial_Avg.csv", ""),
function,
bounds,
plot_param,
function_string,
)
else:
print("Try again!")
```
#### File: code/analysis/data_clean.py
```python
import sys, os
from typing import List, Tuple, Optional
import sqlite3
import matplotlib.pyplot as plt
def data_open(
file: str
) -> Tuple[List[float], Optional[List[float]], List[List[float]]]:
with open(file) as f:
raw_data = f.readlines()
data: List[List[str]] = list(
map(lambda s: s.replace("\n", "").split(",")[:-1], raw_data)
)
X = None
index = list(map(float, data[0]))
data = data[1:]
cleaned = []
for d in data:
x = []
y = []
for v in d:
s = v.replace("(", "").replace(")", "").split(" ")
if X is None:
x.append(float(s[0]))
y.append(float(s[1]))
if X is None:
X = x
cleaned.append(y)
return (index, X, cleaned)
def plot():
_, x, y = data_open(sys.argv[2])
plt.plot(x, y[int(sys.argv[3])])
plt.show()
def modify_db(database: str, folder: str, filename: str, prefix: str = "video"):
conn = sqlite3.connect(database)
files: List[str] = []
for (dirpath, _, filenames) in os.walk(folder):
files.extend(map(lambda s: f"./{dirpath}/{s}", filenames))
files = list(
filter(lambda s: s.find(filename) != -1 and s.find(".csv") != -1, files)
)
names = list(
map(
lambda s: f"{prefix}_"
+ os.path.basename(s)
.replace(filename, os.path.basename(os.path.dirname(s)))
.replace(".csv", ""),
files,
)
)
create_table = (
lambda table, tau: f"create table {table} (q float primary key, {tau})"
)
insert = lambda table, q, tau, I: (
f"insert into {table} values (?, {', '.join(['?']*len(tau))})",
map(lambda x: [x[0]] + [*x[1]], zip(q, I)),
)
for f, name in zip(files, names):
q, tau_list, I_q_tau = data_open(f)
if tau_list is not None:
tau = ", ".join(map(lambda i: f"tau{int(i)} integer", tau_list))
with conn:
conn.execute(f"DROP TABLE IF EXISTS {name}")
with conn:
conn.execute(create_table(name, tau))
with conn:
conn.executemany(*insert(name, q, tau_list, I_q_tau))
print(f"{name} added!")
else:
continue
if __name__ == "__main__":
if len(sys.argv) == 4 and sys.argv[1] == "plot":
plot()
elif (
len(sys.argv) == 3
and sys.argv[1].find(".sqlite") != -1
and os.path.exists(sys.argv[1])
):
modify_db(sys.argv[1], sys.argv[2], "radial_Avg.csv")
elif (
len(sys.argv) == 5
and sys.argv[1].find(".sqlite") != -1
and os.path.exists(sys.argv[1])
and os.path.exists(sys.argv[2])
):
modify_db(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
```
#### File: MastersProject/code/run.py
```python
import json, math
import sys
import subprocess as sp # nosec
import os
from twilio.rest import Client
from typing import Union, Any, List, Iterable, Optional
from itertools import product
def get_allowed_dimension(
tiling_min: int, tiling_max: int, tiling_size_count: Optional[int]
) -> List[int]:
power2 = math.ceil(math.log2(tiling_max))
power3 = math.ceil(math.log(tiling_max, 3))
power5 = math.ceil(math.log(tiling_max, 5))
box_range_pre = product(range(power2 + 1), range(power3 + 1), range(power5 + 1))
box_range: List[int] = list(
filter(
lambda x: tiling_min <= x <= tiling_max,
map(lambda x: int(2 ** x[0] * 3 ** x[1] * 5 ** x[2]), box_range_pre),
)
)
box_range.sort()
if tiling_size_count is not None:
length = len(box_range)
if not (tiling_size_count <= length):
tiling_size_count = length
new_vec = []
for i in range(tiling_size_count - 1):
new_vec.append(box_range[int(i * length / math.ceil(tiling_size_count))])
new_vec.append(box_range[length - 1])
return new_vec
else:
return box_range
with open("secrets_template.json") as f:
secrets = json.loads(f.read())
def send_message(secrets: Any, body: str):
try:
account_sid = secrets["account_sid"]
auth_token = secrets["auth_token"]
client = Client(account_sid, auth_token)
message = client.messages.create(
body=body, from_=f'{secrets["twilio_number"]}', to=f'{secrets["phone_number"]}'
)
print(f'Sent message to {secrets["phone_number"]} message_ID = {message.sid}')
except KeyError:
pass
def run(command: str, video: str, capacity: str, radial_width: str):
print(video)
if command == "video-multi-ddm":
# TODO: run on range of box sizes to prevent resource starvation
# size_range = get_allowed_dimension(16, 1024, 16)
sp.call(
[
"cargo",
"run",
"--release",
command,
str(capacity),
str(radial_width),
str(64),
str(1024),
str(16),
video,
]
)
else:
sp.call(
[
"cargo",
"run",
"--release",
command,
str(capacity),
str(radial_width),
video,
]
)
def upload():
sp.call(["git", "add", "."])
sp.call(["git", "commit", "-m", '"added more data"'])
sp.call(["git", "pull", "--rebase"])
sp.call(["git", "push"])
def contains_any(string: str, to_check: List[str]) -> bool:
return any(map(lambda x: string.find(x) != -1, to_check))
def incomplete_filter(files: List[str], directory: str) -> Iterable[str]:
completed_videos = []
for (_, dirnames, _) in os.walk(directory):
completed_videos.extend(dirnames)
return filter(lambda x: not contains_any(x, completed_videos), files)
def filter_non_videos(files: Union[Iterable[str], List[str]]) -> Iterable[str]:
video_filetypes = [".avi", ".mp4", ".m4v"]
return filter(lambda s: contains_any(s, video_filetypes), files)
def retranspose(files: List[str]):
for i, f in enumerate(files):
file_path = f.replace("./", "").replace("results", "results-transposed")
try:
os.mkdir("/".join(file_path.split("/")[0:-1]))
except FileExistsError:
pass
else:
sp.call(
[
"cargo",
"run",
"--release",
"retranspose",
f.replace("./", ""),
"output.csv",
]
)
sp.call(["mv", "output.csv", file_path])
print(f"Completed {(i+1) * 100 / len(files)}%.")
def add_to_db(
db: str, folder: str, filename: str = "radial_Avg.csv", prefix: str = "video"
):
sp.call(["python3", "analysis/data_clean.py", db, folder, filename, prefix])
if __name__ == "__main__":
files: List[str] = []
if (
len(sys.argv) == 3
and sys.argv[1] in ["video-multi-ddm", "video-ddm"]
and os.path.isdir(sys.argv[2])
):
sys.argv = sys.argv + ["80", "1"]
if (
len(sys.argv) == 5
and sys.argv[1] in ["video-multi-ddm", "video-ddm"]
and os.path.isdir(sys.argv[2])
):
files = []
capacity, radial_width = sys.argv[3], sys.argv[4]
for (dirpath, dirnames, filenames) in os.walk(sys.argv[2]):
files.extend(
map(
lambda s: f"./{dirpath}{s}",
filter(
lambda s: s.split(".")[-1] in ["avi", "mp4", "m4v"], filenames
),
)
)
files_filtered = incomplete_filter(files, "./results-multiDDM")
files_filtered = list(filter_non_videos(files_filtered))
print(f"{len(files_filtered)}/{len(files)} left to analyse.")
for index, video in enumerate(files_filtered):
run(sys.argv[1], video, capacity, radial_width)
if index % 5 == 0 and index != 0:
send_message(
secrets["twilio"],
f"Have completed approximately {round((index + len(files) - len(files_filtered)) * 100 / len(files), 2)}%.",
)
upload()
if sys.argv[1] == "video-multi-ddm":
add_to_db(
"crowd.sqlite", "results-multiDDM", "data_boxsize", "video_multi_ddm"
)
else:
print("Producing retranspose")
files = []
for (dirpath, dirnames, filenames) in os.walk("./results"):
files.extend(
filter(
lambda s: s.find("radial_Avg.csv") != -1,
map(lambda s: f"./{dirpath}/{s}", filenames),
)
)
retranspose(files)
add_to_db("crowd.sqlite", "results-transposed")
upload()
elif len(sys.argv) == 3 and sys.argv[1] == "fit" and os.path.isdir(sys.argv[2]):
sp.call(["python3", "./analysis/analyse.py", *sys.argv[2:]])
upload()
elif len(sys.argv) == 2 and sys.argv[1] == "plot":
sp.call(["python3", "./analysis/plotter.py", "search", "video"])
elif len(sys.argv) == 5 and sys.argv[1] == "resize" and os.path.isdir(sys.argv[2]):
files = []
for (dirpath, dirnames, filenames) in os.walk(sys.argv[2]):
files.extend(
map(
lambda f: os.path.join(dirpath, f),
filter(
lambda f: any(
[f.find(ext) != -1 for ext in ["avi", "mp4", "m4v"]]
),
filenames,
),
)
)
out_dir = f"{os.path.dirname(sys.argv[2])}_resized"
output = list(map(lambda s: os.path.join(out_dir, os.path.basename(s)), files))
print("Starting conversion")
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
for filename, out in zip(files, output):
sp.call(
["python3", "./analysis/video_resizer.py", filename, out, *sys.argv[3:]]
)
elif len(sys.argv) == 6 and sys.argv[1] == "resize":
sp.call(["python3", "./analysis/video_resizer.py", *sys.argv[2:]])
elif (
len(sys.argv) == 3
and sys.argv[1] == "retranspose"
and os.path.isdir(sys.argv[2])
):
files = []
for (dirpath, dirnames, filenames) in os.walk(sys.argv[2]):
files.extend(
filter(
lambda s: s.find("radial_Avg.csv") != -1,
map(lambda s: f"./{dirpath}/{s}", filenames),
)
)
retranspose(files)
upload()
elif len(sys.argv) == 6 and sys.argv[1] == "add_to_db":
add_to_db(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5])
elif len(sys.argv) == 2 and sys.argv[1] == "multiDDM_add_to_db":
add_to_db("crowd.sqlite", "results-multiDDM/", "data_boxsize", "video_multiDDM")
else:
print(
f"Arguments supplied are incorrect (_, directory, capacity, radial_width) - {sys.argv}"
)
``` |
{
"source": "jordanosborn/tf2-keras-pandas",
"score": 3
} |
#### File: tf2-keras-pandas/tests/testNumerical.py
```python
from tensorflow.keras import Model
from sklearn_pandas import DataFrameMapper
from keras_pandas import lib
from keras_pandas.data_types.Numerical import Numerical
from tests.testbase import TestBase
class TestNumerical(TestBase):
def test_init(self):
# Create datatype
datatype = Numerical()
# Check for output support (or not)
self.assertTrue(datatype.supports_output)
def test_datatype_signature(self):
datatype = Numerical()
lib.check_valid_datatype(datatype)
self.assertTrue(datatype.supports_output)
def test_whole(self):
# Create datatype
datatype = Numerical()
# Load observations
observations = lib.load_titanic()
# Transform observations
mapper = DataFrameMapper([(['fare'], datatype.default_transformation_pipeline)], df_out=True)
transformed_df = mapper.fit_transform(observations)
# Create network
input_layer, input_nub = datatype.input_nub_generator('fare', transformed_df)
output_nub = datatype.output_nub_generator('fare', transformed_df)
x = input_nub
x = output_nub(x)
model = Model(input_layer, x)
model.compile(optimizer='adam', loss=datatype.output_suggested_loss())
``` |
{
"source": "jordan-owen/pdm",
"score": 2
} |
#### File: cli/commands/plugin.py
```python
from __future__ import annotations
import argparse
import os
import shlex
import subprocess
import sys
import click
from pdm import termui
from pdm.cli.commands.base import BaseCommand
from pdm.cli.options import verbose_option
from pdm.cli.utils import Package, build_dependency_graph
from pdm.models.environment import WorkingSet
from pdm.project import Project
from pdm.utils import safe_name
if sys.version_info >= (3, 8):
import importlib.metadata as importlib_metadata
else:
import importlib_metadata
from pip import __file__ as pip_location
def _all_plugins() -> list[str]:
result: list[str] = []
for dist in importlib_metadata.distributions():
if any(ep.group == "pdm" for ep in dist.entry_points):
result.append(safe_name(dist.metadata["Name"]).lower())
return result
def run_pip(args: list[str]) -> bytes:
return subprocess.check_output(
[sys.executable, "-I", os.path.dirname(pip_location)] + args,
stderr=subprocess.STDOUT,
)
class Command(BaseCommand):
"""Manage the PDM plugins"""
arguments = [verbose_option]
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
subparsers = parser.add_subparsers()
ListCommand.register_to(subparsers)
AddCommand.register_to(subparsers)
RemoveCommand.register_to(subparsers)
parser.set_defaults(search_parent=False)
self.parser = parser
def handle(self, project: Project, options: argparse.Namespace) -> None:
self.parser.print_help()
class ListCommand(BaseCommand):
"""List all plugins installed with PDM"""
arguments = [verbose_option]
name = "list"
def handle(self, project: Project, options: argparse.Namespace) -> None:
plugins = _all_plugins()
echo = project.core.ui.echo
if not plugins:
echo("No plugin is installed with PDM", err=True)
sys.exit(1)
echo("Installed plugins:", err=True)
for plugin in plugins:
metadata = importlib_metadata.metadata(plugin)
echo(
f"{termui.green(metadata['Name'])} {termui.yellow(metadata['Version'])}"
)
if metadata["Summary"]:
echo(f" {metadata['Summary']}")
class AddCommand(BaseCommand):
"""Install new plugins with PDM"""
arguments = [verbose_option]
name = "add"
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--pip-args",
help="Arguments that will be passed to pip install",
default="",
)
parser.add_argument(
"packages",
nargs="+",
help="Specify one or many plugin names, "
"each package can have a version specifier",
)
def handle(self, project: Project, options: argparse.Namespace) -> None:
pip_args = ["install"] + shlex.split(options.pip_args) + options.packages
project.core.ui.echo(
f"Running pip command: {pip_args}", verbosity=termui.DETAIL
)
with project.core.ui.open_spinner(
f"Installing plugins: {options.packages}"
) as spinner:
try:
run_pip(pip_args)
except subprocess.CalledProcessError as e:
spinner.fail("Installation failed: \n" + e.output)
sys.exit(1)
else:
spinner.succeed("Installation succeeds.")
class RemoveCommand(BaseCommand):
"""Remove plugins from PDM's environment"""
arguments = [verbose_option]
name = "remove"
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--pip-args",
help="Arguments that will be passed to pip uninstall",
default="",
)
parser.add_argument(
"-y", "--yes", action="store_true", help="Answer yes on the question"
)
parser.add_argument(
"packages", nargs="+", help="Specify one or many plugin names"
)
def _resolve_dependencies_to_remove(self, packages: list[str]) -> list[str]:
"""Perform a BFS to find all unneeded dependencies"""
result: set[str] = set()
to_resolve = list(packages)
ws = WorkingSet()
graph = build_dependency_graph(ws)
while to_resolve:
temp: list[Package] = []
for name in to_resolve:
key = safe_name(name).lower()
if key in ws:
result.add(key)
package = Package(key, "0.0.0", {})
if package not in graph:
continue
for dep in graph.iter_children(package):
temp.append(dep)
graph.remove(package)
to_resolve.clear()
for dep in temp:
if not any(graph.iter_parents(dep)) and dep.name != "pdm":
to_resolve.append(dep.name)
return sorted(result)
def handle(self, project: Project, options: argparse.Namespace) -> None:
plugins = _all_plugins()
valid_packages = [
p for p in options.packages if safe_name(p).lower() in plugins
]
packages_to_remove = self._resolve_dependencies_to_remove(valid_packages)
if not packages_to_remove:
project.core.ui.echo("No package to remove.", err=True)
sys.exit(1)
if not (
options.yes
or click.confirm(f"Will remove: {packages_to_remove}, continue?")
):
return
pip_args = (
["uninstall", "-y"] + shlex.split(options.pip_args) + packages_to_remove
)
project.core.ui.echo(
f"Running pip command: {pip_args}", verbosity=termui.DETAIL
)
with project.core.ui.open_spinner(
f"Uninstalling plugins: {valid_packages}"
) as spinner:
try:
run_pip(pip_args)
except subprocess.CalledProcessError as e:
spinner.fail("Uninstallation failed: \n" + e.output)
sys.exit(1)
else:
spinner.succeed("Uninstallation succeeds.")
``` |
{
"source": "jordan-owen/tpqoa",
"score": 2
} |
#### File: tpqoa/tpqoa/tpqoa.py
```python
import v20
import json
import configparser
import pandas as pd
from v20.transaction import StopLossDetails, ClientExtensions
from v20.transaction import TrailingStopLossDetails, TakeProfitDetails
class tpqoa(object):
''' tpqoa is a Python wrapper class for the Oanda v20 API. '''
def __init__(self, conf_file):
''' Init function is expecting a configuration file with
the following content:
[oanda]
account_id = XYZ-ABC-...
access_token = ZYXCAB...
account_type = practice (default) or live
Parameters
==========
conf_file: string
path to and filename of the configuration file,
e.g. '/home/me/oanda.cfg'
'''
self.config = configparser.ConfigParser()
self.config.read(conf_file)
self.access_token = self.config['oanda']['access_token']
self.account_id = self.config['oanda']['account_id']
self.account_type = self.config['oanda']['account_type']
if self.account_type == 'live':
self.hostname = 'api-fxtrade.oanda.com'
self.stream_hostname = 'stream-fxtrade.oanda.com'
else:
self.hostname = 'api-fxpractice.oanda.com'
self.stream_hostname = 'stream-fxpractice.oanda.com'
self.ctx = v20.Context(
hostname=self.hostname,
port=443,
token=self.access_token,
poll_timeout=10
)
self.ctx_stream = v20.Context(
hostname=self.stream_hostname,
port=443,
token=self.access_token,
)
self.suffix = '.000000000Z'
self.stop_stream = False
def get_instruments(self):
''' Retrieves and returns all instruments for the given account. '''
resp = self.ctx.account.instruments(self.account_id)
instruments = resp.get('instruments')
instruments = [ins.dict() for ins in instruments]
instruments = [(ins['displayName'], ins['name'])
for ins in instruments]
return sorted(instruments)
def get_prices(self, instrument):
''' Returns the current BID/ASK prices for instrument. '''
r = self.ctx.pricing.get(self.account_id, instruments=instrument)
r = json.loads(r.raw_body)
bid = float(r['prices'][0]['closeoutBid'])
ask = float(r['prices'][0]['closeoutAsk'])
return r['time'], bid, ask
def transform_datetime(self, dati):
''' Transforms Python datetime object to string. '''
if isinstance(dati, str):
dati = pd.Timestamp(dati).to_pydatetime()
return dati.isoformat('T') + self.suffix
def retrieve_data(self, instrument, start, end, granularity, price):
raw = self.ctx.instrument.candles(
instrument=instrument,
fromTime=start, toTime=end,
granularity=granularity, price=price)
raw = raw.get('candles')
raw = [cs.dict() for cs in raw]
if price == 'A':
for cs in raw:
cs.update(cs['ask'])
del cs['ask']
elif price == 'B':
for cs in raw:
cs.update(cs['bid'])
del cs['bid']
elif price == 'M':
for cs in raw:
cs.update(cs['mid'])
del cs['mid']
else:
raise ValueError("price must be either 'B', 'A' or 'M'.")
if len(raw) == 0:
return pd.DataFrame() # return empty DataFrame if no data
data = pd.DataFrame(raw)
data['time'] = pd.to_datetime(data['time'])
data = data.set_index('time')
data.index = pd.DatetimeIndex(data.index)
for col in list('ohlc'):
data[col] = data[col].astype(float)
return data
def get_history(self, instrument, start, end,
granularity, price, localize=True):
''' Retrieves historical data for instrument.
Parameters
==========
instrument: string
valid instrument name
start, end: datetime, str
Python datetime or string objects for start and end
granularity: string
a string like 'S5', 'M1' or 'D'
price: string
one of 'A' (ask), 'B' (bid) or 'M' (middle)
Returns
=======
data: pd.DataFrame
pandas DataFrame object with data
'''
if granularity.startswith('S') or granularity.startswith('M'):
if granularity.startswith('S'):
freq = '1h'
else:
freq = 'D'
data = pd.DataFrame()
dr = pd.date_range(start, end, freq=freq)
for t in range(len(dr)):
batch_start = self.transform_datetime(dr[t])
if t != len(dr) - 1:
batch_end = self.transform_datetime(dr[t + 1])
else:
batch_end = self.transform_datetime(end)
batch = self.retrieve_data(instrument, batch_start, batch_end,
granularity, price)
data = data.append(batch)
else:
start = self.transform_datetime(start)
end = self.transform_datetime(end)
data = self.retrieve_data(instrument, start, end,
granularity, price)
if localize:
data.index = data.index.tz_localize(None)
return data[['o', 'h', 'l', 'c', 'volume', 'complete']]
def create_order(self, instrument, units, price=None, sl_distance=None,
tsl_distance=None, tp_price=None, comment=None,
touch=False, suppress=False, ret=False):
''' Places order with Oanda.
Parameters
==========
instrument: string
valid instrument name
units: int
number of units of instrument to be bought
(positive int, eg 'units=50')
or to be sold (negative int, eg 'units=-100')
price: float
limit order price, touch order price
sl_distance: float
stop loss distance price, mandatory eg in Germany
tsl_distance: float
trailing stop loss distance
tp_price: float
take profit price to be used for the trade
comment: str
string
touch: boolean
market_if_touched order (requires price to be set)
suppress: boolean
whether to suppress print out
ret: boolean
whether to return the order object
'''
client_ext = ClientExtensions(
comment=comment) if comment is not None else None
sl_details = (StopLossDetails(distance=sl_distance,
clientExtensions=client_ext)
if sl_distance is not None else None)
tsl_details = (TrailingStopLossDetails(distance=tsl_distance,
clientExtensions=client_ext)
if tsl_distance is not None else None)
tp_details = (TakeProfitDetails(
price=tp_price, clientExtensions=client_ext)
if tp_price is not None else None)
if price is None:
request = self.ctx.order.market(
self.account_id,
instrument=instrument,
units=units,
stopLossOnFill=sl_details,
trailingStopLossOnFill=tsl_details,
takeProfitOnFill=tp_details,
)
elif touch:
request = self.ctx.order.market_if_touched(
self.account_id,
instrument=instrument,
price=price,
units=units,
stopLossOnFill=sl_details,
trailingStopLossOnFill=tsl_details,
takeProfitOnFill=tp_details
)
else:
request = self.ctx.order.limit(
self.account_id,
instrument=instrument,
price=price,
units=units,
stopLossOnFill=sl_details,
trailingStopLossOnFill=tsl_details,
takeProfitOnFill=tp_details
)
try:
order = request.get('orderFillTransaction')
except Exception:
order = request.get('orderCreateTransaction')
if not suppress:
print('\n\n', order.dict(), '\n')
if ret is True:
return order.dict()
def stream_data(self, instrument, stop=None, ret=False):
''' Starts a real-time data stream.
Parameters
==========
instrument: string
valid instrument name
'''
self.stream_instrument = instrument
self.ticks = 0
response = self.ctx_stream.pricing.stream(
self.account_id, snapshot=True,
instruments=instrument)
msgs = []
for msg_type, msg in response.parts():
msgs.append(msg)
# print(msg_type, msg)
if msg_type == 'pricing.ClientPrice':
self.ticks += 1
self.time = msg.time
self.on_success(msg.time,
float(msg.bids[0].dict()['price']),
float(msg.asks[0].dict()['price']))
if stop is not None:
if self.ticks >= stop:
if ret:
return msgs
break
if self.stop_stream:
if ret:
return msgs
break
def on_success(self, time, bid, ask):
''' Method called when new data is retrieved. '''
print(time, bid, ask)
def get_account_summary(self, detailed=False):
''' Returns summary data for Oanda account.'''
if detailed is True:
response = self.ctx.account.get(self.account_id)
else:
response = self.ctx.account.summary(self.account_id)
raw = response.get('account')
return raw.dict()
def get_transaction(self, tid=0):
''' Retrieves and returns transaction data. '''
response = self.ctx.transaction.get(self.account_id, tid)
transaction = response.get('transaction')
return transaction.dict()
def get_transactions(self, tid=0):
''' Retrieves and returns transactions data. '''
response = self.ctx.transaction.since(self.account_id, id=tid)
transactions = response.get('transactions')
transactions = [t.dict() for t in transactions]
return transactions
def print_transactions(self, tid=0):
''' Prints basic transactions data. '''
transactions = self.get_transactions(tid)
for trans in transactions:
try:
templ = '%4s | %s | %7s | %12s | %8s'
print(templ % (trans['id'],
trans['time'][:-8],
trans['instrument'],
trans['units'],
trans['pl']))
except Exception:
pass
def get_positions(self):
''' Retrieves and returns positions data. '''
response = self.ctx.position.list_open(self.account_id).body
positions = [p.dict() for p in response.get('positions')]
return positions
``` |
{
"source": "jordan-palmer/flamedisx",
"score": 2
} |
#### File: flamedisx/tests/test_utils.py
```python
import pandas as pd
import wimprates as wr
import flamedisx as fd
def test_j2000_conversion():
j2000_times = pd.np.linspace(0., 10000., 100)
# convert j2000 -> event_time -> j2000
test_times = wr.j2000(fd.j2000_to_event_time(j2000_times))
pd.np.testing.assert_array_almost_equal(j2000_times,
test_times,
decimal=6)
``` |
{
"source": "jordanparker6/coronva-virus",
"score": 2
} |
#### File: coronva-virus/airflow/upload.py
```python
import boto3, os
from datetime import datetime as dt
from botocore.client import Config
def main():
ACCESS_KEY = os.environ['DIGITAL_OCEAN_ACCESS_KEY']
SECRET = os.environ['DIGITAL_OCEAN_SECRET_KEY']
date = dt.today().strftime('%Y.%m.%d')
files = ['data.csv', 'agg_data.csv', 'confirmed_cases.csv']
# Initialize a session using DigitalOcean Spaces.
session = boto3.session.Session()
client = session.client('s3',
region_name='nyc3',
endpoint_url='https://nyc3.digitaloceanspaces.com',
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET)
# Upload Files
for file in files:
print('Uploading: ', file)
fn = f"{date}/{file}"
client.upload_file(fn, 'covid-19', file)
if __name__ == "__main__":
main()
``` |
{
"source": "jordanparker6/datascience-starter",
"score": 3
} |
#### File: ds/dataset/gridsearchcv.py
```python
import itertools
import numpy as np
import pandas as pd
from typing import Dict, Any, Tuple
from sklearn.model_selection import TimeSeriesSplit
from sklearn.model_selection import ShuffleSplit
from tqdm import tqdm
from abc import ABC
class GridsearchCVBase(ABC):
"""A base class for cross validated gridsearch.
Args:
estimator: A scikit learn stimator that implements the fit and score methods.
cv: The number of folds in kfold cross validation.
"""
def __init__(self, estimator, cv: int = 5):
super().__init__()
self.estimator = estimator #: A scikit learn estimator that implements fit and score methods.
self.cv = cv #: The number of folds in kflod cross validation.
self.splitter = None #: A class for splitting the dataframe into k-folds.
def crossval(self, df: pd.DataFrame, parameters: Dict[str, Any], cv: int = 5) -> np.float:
"""Performs k-fold cross validation using the estimators score method and the provided splitter.
Args:
df: A pandas dataframe of target and feature variables.
parameters: A dictionary of parameters and possible values.
cv: The number of folds in k-fold cross validation.
Returns:
The mean score for the cross validation.
"""
if self.splitter == None:
raise NotImplementedError
else:
cv = self.splitter(n_splits=cv)
score = []
for train_index, test_index in cv.split(df):
train, test = df.iloc[train_index, :], df.iloc[test_index, :]
model = self.estimator(**parameters)
model.fit(train)
score.append(model.score(test))
return np.array(score).mean()
def fit(self, df: pd.DataFrame, parameters: Dict[str, Any], min_loss: bool = True) -> Tuple[Dict[str, Any], np.ndarray]:
"""Fit method for cross validated grid search.
Args:
df: A pandas dataframe of target and feature variables.
parameters: A dictionary of parameters and possible values.
min_loss: A boolean indicator to optimise for the min or max score in gridsearch.
"""
scores = []
params = []
values = parameters.values()
options = [dict(zip(parameters.keys(), v)) for v in itertools.product(*parameters.values())]
for option in tqdm(options):
score = self.crossval(df, option, self.cv)
scores.append(score)
params.append(option)
scores = np.array(scores)
if min_loss:
best = np.nanargmin(scores)
else:
best = np.nanargmax(scores)
return params[best], scores
class GridsearchCV(GridsearchCVBase):
""""A gridsearch and crossvalidation approach for iid datasets.
"""
def __init__(self, estimator, cv: int = 5):
super().__init__(estimator, cv)
self.splitter = ShuffleSplit
class TimeseriesGridsearchCV(GridsearchCVBase):
""""A gridsearch and crossvalidation approach for timeseries datasets.
"""
def __init__(self, estimator, cv=5):
super().__init__(estimator, cv)
self.splitter = TimeSeriesSplit
```
#### File: ds/io/excel.py
```python
import xlwings as xw
import pandas as pd
from typing import Optional
import logging
log = logging.getLogger(__name__)
class Excel:
"""A class for interacting with Excel workbooks
Args:
file: The file name for the Excel workbook.
"""
def __init__(self, file: str):
super().__init__()
self.file_name = file #: The file name of the Excel workbook.
self.wb = xw.Book(file) #: The xlwings workbook object.
log.info(f"Excel File Read: {file}")
def to_df(self, sheet: str, cell: str = "A1", expand: str = 'table'):
"""Convert an Excel table to a pandas dataframe.
Args:
sheet: The sheet name of the workbook.
cell: The table starting range of the table.
expand: The xlwing expand parameter (e.g. 'table' for table range or 'B10' for a range reference.
Returns:
A pandas dataframe.
"""
df = pd.DataFrame(self.wb.sheets(sheet).range(cell).expand(expand).value)
headers = df.iloc[0]
df = pd.DataFrame(df.values[1:], columns=headers)
return df
def to_excel(self, df: pd.DataFrame, sheet: str, cell: str = "A1", clear_range: Optional[str] = None):
"""Output a pandas DataFrame to an excel range.
Args:
df: A pandas dataframe.
sheet: The sheet name of the workbook.
cell: The starting range of the output.
clear_rage: The range of the workbook to clear before printing (e.g. "A1:D10").
"""
if clear_range is not None:
self.wb.sheets(sheet).range(clear_range).clear_contents()
print_range = self.wb.sheets(sheet).range(cell)
print_range.value = df
```
#### File: ds/io/fetch.py
```python
import tqdm
import json
import asyncio
import aiohttp
import logging
from asyncio_throttle import Throttler
from typing import Dict, List, Any, Optional
log = logging.getLogger(__name__)
JsonType = Dict[str, Any]
class AsyncFetch:
""" A base class for asyncronus HTTP fetching.
"""
def __init__(self):
super().__init__()
def fetch(self, url: str) -> JsonType:
"""Executes an async fetch.
Args:
url: A url string.
Returns:
A json response object.
"""
return self.fetch_all([url], rate=None)
def fetch_all(self, urls: List[str], rate: Optional[int] = None) -> List[JsonType]:
"""Executes a throtled async fetch for a list of urls.
Args:
urls: A list of url strings.
rate (optional): The rate to throttle (calls per second).
Returns:
A list of json responses.
"""
async def _fetch_all(self, urls: List[str], rate: Optional[int] = None) -> List[JsonType]:
connector = aiohttp.TCPConnector(verify_ssl=False, limit=100)
throttler = None
if rate:
throttler = Throttler(rate_limit=rate, period=1)
async with aiohttp.ClientSession(connector=connector) as session:
tasks = [self._throttler(session, url, throttler, i) for i, url in enumerate(urls)]
responses = [await f for f in tqdm.tqdm(asyncio.as_completed(tasks), total=len(tasks))]
responses = sorted(responses, key=lambda x: x[1])
responses = list(map(lambda x: x[0], responses))
return responses
return asyncio.run(_fetch_all(self, urls, rate))
# ------------------------------------------------
# - Async Handling Functions ---------------------
# ------------------------------------------------
async def _fetch(self, session: aiohttp.ClientSession, url: str, i: int) -> JsonType:
"""A handler to execulte a async HTTP request.
Args:
session: context for making the http call.
url: URL to call.
i: index of fetch.
Returns:
A json response object.
"""
async with session.get(url, timeout=60*30) as response:
resp = await response.read()
log.debug(f'Made request: {url}. Status: {response.status}', data=resp)
return json.loads(resp), i
async def _throttler(self, session: aiohttp.ClientSession, url: str, throttler: Throttler, i: int):
"""A throttling wrapper.
Args:
session: context for making the http call.
url: URL to call.
rate: the number of concurrent tasks.
throttler : asyncio-throttle class.
i: index of fetch.
Return:
The json response object.
"""
if throttler:
async with throttler:
return await self._fetch(session, url, i)
else:
return await self._fetch(session, url, i)
```
#### File: models/timeseries/linear_model.py
```python
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.base import BaseEstimator
from datascience_starter.feature_engineering import TimeseriesFeatures
class LinearTimeseriesModel(BaseEstimator):
"""A simple linear model for timeseries analysis.
Provides a simple linear timeseries that models timeseries data
on time depedency, without autocorrelation (e.g. ARIMA).
The model leveraging exponentially decayed weights to prioritise
recent entries and RBF features to engineer seasonality.
Args:
alpha (float): The hyperparameter for RBFFeatures. Values between (0, 1).
r (float): hyperparameter for exponential decay
Attributes:
params: The model hyperparameters.
model: The LinearRegression model class from scikit-learn.
"""
def __init__(self, alpha: float, r: float):
super().__init__()
self.params = { "alpha": alpha, "r": r }
self.model = LinearRegression()
def transform(self, df, ylabel='y'):
rbf = TimeseriesFeatures(self.params['alpha'])
df = rbf.transform(df)
X = df.drop([ylabel], axis=1)
y = df[ylabel]
return X, y
def fit(self, df, ylabel='y'):
X, y = self.transform(df, ylabel)
if self.params['r'] == 1:
self.model.fit(X, y)
else:
self.model.fit(X, y, sample_weight=self._ewa(len(y), self.params['r']))
return self.model
def predict(self, df, ylabel='y'):
X, y = self.transform(df, ylabel)
pred = self.model.predict(X)
return pred
def score(self, df, ylabel='y', metric='mape'):
X, y = self.transform(df, ylabel)
if metric == 'mape':
yhat = self.predict(df, ylabel)
return self._mape(y, yhat)
elif metric == 'r2':
return self.model.score(X, y)
else:
return
def _ewa(self, n, r):
x = np.arange(0, n)
func = lambda i: r ** i * (1 - r) / (1 - np.power(r, n))
return np.flip(np.array([func(i) for i in x]))
def _mape(self, y, yhat):
return np.mean(np.abs((y - yhat) / y)) * 100
```
#### File: models/timeseries/sarimax.py
```python
from sklearn.base import BaseEstimator
import statsmodels.api as sm
from statsmodels.tsa.statespace.sarimax import SARIMAX as SARIMAXModel
from statsmodels.tsa.stattools import adfuller
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from typing import Dict, Any
class SARIMAX(BaseEstimator):
"""A class to fit and predict using the SARIMAX model.
Implements the scikit-learn api for the SARIMAX model.
Also provides utility methods to check stationarity
with an Augmented Dickie Fuller test.
Args:
**kwargs: Key word arguements for the statsmodels SARIMAX class.
Please see statsmodels for documentation.
Attributes:
params: The model paramaters.
model: An instance of the statsmodels SARIMAX model.
"""
def __init__(self, **kwargs):
super().__init__()
self.params: Dict[str, Any] = kwargs
self.model: SARIMAX = SARIMAX
def fit(self, y: np.ndarray, max_iter: int = 50, method: str = 'powell', **kwargs):
self.model = self.model(y, **self.params)
self.model = self.model.fit(max_iter=max_iter, disp=0, method=method, **kwargs)
return self
def predict(self, X: np.ndarray):
pred = self.model.get_prediction(start=X.index[0], end=X.index[-1])
yhat = pred.predicted_mean
ci = pred.conf_int()
return yhat, ci
def plot_predictions(self, X: np.ndarray, shift: int = 0):
yhat, ci = self.predict(X)
yhat = yhat[shift:]
ci = ci.iloc[shift:, :]
ax = X.plot(label='observed')
yhat.plot(ax=ax, label='One-step ahead Forecast', alpha=.7, figsize=(14, 4))
ax.fill_between(ci.index, ci.iloc[:, 0], ci.iloc[:, 1], color='b', alpha=.05)
ax.set_xlabel('Date')
ax.set_ylabel('y')
plt.legend()
plt.show()
def forecast(self, start: str, end: str):
pred = self.model.get_prediction(start=pd.to_datetime(start), end=pd.to_datetime(end))
yhat = pred.predicted_mean
ci = pred.conf_int()
return yhat, ci
def plot_forecast(self, start: str, end: str):
yhat, ci = self.forecast(start, end)
ax = yhat.plot(figsize=(14, 4), label="Forecast")
ax.fill_between(ci.index, ci.iloc[:, 0], ci.iloc[:, 1], color='b', alpha=.05)
ax.set_xlabel('Date')
ax.set_ylabel('y')
plt.legend()
plt.show()
def summary(self):
print(self.model.summary().tables[1])
self.model.plot_diagnostics(figsize=(18, 6))
def score(self, y: np.ndarray, shift: int = 0):
yhat, _ = self.predict(y)
return self._mape(y[shift:].values, yhat[shift:].values)
def check_stationary(self, y: np.ndarray, alpha: float = 0.05):
result = adfuller(y)
print('ADF Statistic: %f' % result[0])
print('p-value: %f' % result[1])
for key, value in result[4].items():
print('\t%s: %.3f' % (key, value))
return result[1] <= alpha
def _mape(self, y: np.ndarray, yhat: np.ndarray):
return np.mean(np.abs((y - yhat) / y)) * 100
``` |
{
"source": "jordanparker6/newsanalyzer",
"score": 3
} |
#### File: newsanalyzer/newsreader/cli.py
```python
from questionary import Style, Validator, ValidationError, prompt, print as pprint
from pyfiglet import figlet_format
import datetime as dt
from . import scrapers
style = Style([
('qmark', '#07b05b bold'), # token in front of the question
('answer', '#00b3ff bold'), # submitted answer text behind the question
('pointer', '#07b05b bold'), # pointer used in select and checkbox prompts
('highlighted', 'bold'), # pointed-at choice in select and checkbox prompts
('selected', 'bold noreverse'), # style for a selected item of a checkbox
('separator', '#cc5454'), # separator in lists
('instruction', ''), # user instructions for select, rawselect, checkbox
('text', ''), # plain text
('disabled', 'fg:#858585 italic') # disabled choices for select and checkbox prompts
])
# ~~~ CLI Logging Utility ~~~~~~~~~~~~~~~
def log(string, style, font="slant", figlet=False):
if not figlet:
pprint(string, style)
else:
pprint(figlet_format(
string, font=font
), style)
# ~~~ Validators ~~~~~~~~~~~~~~~
class EmptyValidator(Validator):
def validate(self, value):
if len(value.text):
return True
else:
raise ValidationError(
message="You can't leave this blank",
cursor_position=len(value.text))
class ISODatetimeValidator(Validator):
def validate(self, value):
if isinstance(dt.datetime.fromisoformat(value.text), dt.datetime):
return True
else:
raise ValidationError(
message="The period to a valid ISO date string",
cursor_position=len(value.text)
)
# ~~~ Questions ~~~~~~~~~~~~~~~
def askQuestions(methods):
cfg = {}
if "scrapers" in methods:
cfg["scrapers"] = askScraperInfo()
if "nlp" in methods:
cfg["nlp"] = askNLPInfo()
return cfg
def askDatabaseInfo():
questions = [
{
'type': 'input',
'name': 'uri',
'message': 'What is the database URI?',
'default': "sqlite:///database.db",
'validate': EmptyValidator
}
]
answers = prompt(questions, style=style)
return answers
def askMethodInfo():
questions = [
{
'type': 'checkbox',
'name': 'methods',
'message': 'Which features do you wish to run?',
'choices': [
{ "value": "scrapers", "name": "1) Run Web Scrapers", "checked": False },
{ "value": "nlp", "name": "2) Run NLP Analysis", "checked": False },
{ "value": "dashboard", "name": "3) Serve Dashboard", "checked": False },
],
'validate': lambda answer: 'You must choose at least one operation.' if len(answer) == 0 else True
}
]
return prompt(questions, style=style)
def askScraperInfo():
now = dt.date.today()
scraper_classes = list(map(lambda x: x.__name__, scrapers.ScraperBase.__subclasses__()))
questions = [
{
'type': 'input',
'name': "period_to",
'message': "How far back do you wish to scrape?",
'default': now.replace(year=now.year - 1).isoformat(),
'validate': ISODatetimeValidator,
},
{
'type': 'checkbox',
'name': 'classes',
'message': 'Which scrapers to run?',
'choices': map(lambda x: { "name": x, "checked": True }, scraper_classes),
'validate': lambda answer: 'You must choose at least one operation.' if len(answer) == 0 else True
}
]
return prompt(questions, style=style)
def askNLPInfo():
questions = [
{
'type': 'select',
'name': "model",
'message': "Which SpaCy language model to use?",
'default': 'en_core_web_sm',
'choices': ['en_core_web_sm', 'en_core_web_md', 'en_core_web_lg', 'en_core_web_trf'],
}
]
return prompt(questions, style=style)
```
#### File: newsanalyzer/newsreader/database.py
```python
from typing import Optional, Dict, List, Any, Union
import datetime as dt
from sqlmodel import Field, Session, SQLModel, create_engine, select
import threading as th
import queue
# ~~~ Database ~~~~~~~~~~~~~~~
class Database:
def __init__(self, uri: str):
self.engine = create_engine(uri)
SQLModel.metadata.create_all(self.engine)
def create_all(self, items: List[SQLModel]):
with Session(self.engine) as session:
for item in items:
session.add(item)
session.commit()
def get_by_id(self, id: Union[str, int], model: SQLModel):
with Session(self.engine) as session:
stmt = select(model).where(model.id == id)
return session.exec(stmt).first()
def get_by_field(self, key: str, value: Any, model: SQLModel):
stmt = select(model).where(getattr(model, key) == value)
print(stmt)
return self.exec(stmt)
def exec(self, stmt: str, params = {}):
with Session(self.engine) as session:
return session.exec(stmt, params=params).all()
class DatabaseWorker(th.Thread):
def __init__(self,
uri: str,
queue: queue.Queue,
batch: int = None,
timeout: int = 10
):
super().__init__()
self.q = queue
self.db = None
self.uri = uri
self.timeout = timeout
self.batch = batch
def run(self):
self.db = Database(self.uri)
while True:
cache = []
try:
cache.append(self.q.get(timeout=self.timeout))
if self.batch:
if len(cache) % self.batch == 0:
self.db.create_all(cache)
cache = []
else:
cache = []
except queue.Empty:
self.db.create_all(cache)
break
# ~~~ Models ~~~~~~~~~~~~~~~~~
class Document(SQLModel, table=True):
id: str = Field(primary_key=True)
name: str
href: str
date: dt.datetime
text: Optional[str] = None
date_collected: dt.datetime
collected_by: str
class Paragraph(SQLModel, table=True):
id: str = Field(primary_key=True)
text: str
document_id: str = Field(foreign_key="document.id")
sentiment: str
sent_score: float
class Entity(SQLModel, table=True):
id: str = Field(primary_key=True)
name: str
description: Optional[str]
class EntityMention(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
text: str
score: Optional[float]
label: str
start: int
end: int
paragraph_id: str = Field(foreign_key="paragraph.id")
kb_id: Optional[str] = Field(foreign_key="entity.id")
class EntityFeature(SQLModel, table=True):
id: int = Field(primary_key=True)
kb_id: str = Field(foreign_key="entity.id")
key: str
value: str
```
#### File: newsreader/nlp/transformers.py
```python
from tqdm import tqdm
import logging
from transformers import AutoTokenizer, AutoModelForTokenClassification, AutoModelForSequenceClassification, pipeline
from ..database import Database, Paragraph, EntityMention
from .utils import split_paragraphs, truncate_text
log = logging.getLogger(__name__)
def clean_ner_schema(ent):
ent["label"] = ent["entity_group"]
ent["text"] = ent["word"]
del ent["entity_group"]
del ent["word"]
return ent
def analyse(
database: Database,
ner_model: str,
sent_model: str
):
print("NLP Analysis:")
# ~~~~ Inititate the Language Model Pipelines ~~~~~~~
ner = pipeline(
task="ner",
model=AutoModelForTokenClassification.from_pretrained(ner_model),
tokenizer=AutoTokenizer.from_pretrained(ner_model),
aggregation_strategy="average"
)
sent = pipeline(
task="sentiment-analysis",
model=AutoModelForSequenceClassification.from_pretrained(sent_model),
tokenizer=AutoTokenizer.from_pretrained(sent_model)
)
# ~~~~ Imperative analysis and processing ~~~~~~~
while True:
docs = database.exec("""
SELECT document.id, document.text FROM document
WHERE NOT EXISTS (SELECT paragraph.id FROM paragraph WHERE document.id = paragraph.document_id)
""")
n = len(docs)
if n == 0:
break
for doc in tqdm(docs):
id, text = doc
paragraphs = list(map(lambda x: truncate_text(x, 700), split_paragraphs(text)))
if len(paragraphs) > 0:
try:
items = zip(
paragraphs,
sent(paragraphs),
ner(paragraphs)
)
except Exception as e:
print(len(text))
log.error(e, extra={ "id": id, "text": text, "paragraphs": paragraphs })
continue
results = []
for j, item in enumerate(items):
results.append(
Paragraph(
id=f"{id}:{j}",
text=item[0],
sentiment=item[1]["label"],
sent_score=item[1]["score"],
document_id=id
)
)
for ent in item[2]:
ent = clean_ner_schema(ent)
results.append(
EntityMention(paragraph_id=f"{id}:{j}", **ent)
)
database.create_all(results)
```
#### File: newsreader/scrapers/all.py
```python
from typing import Generator
from dateutil.parser import parse
import datetime as dt
from .base import ScraperBase
# ~~~ Scraper Implementations ~~~~~~~~~~~~~~~~~
class ITNewsScraper(ScraperBase):
"""
A web-scraper to collect all recent ITNews articles to a specified date.
"""
url = "https://www.itnews.com.au/news"
def _find_documents(self, to: dt.datetime) -> Generator:
i = 1
while True:
html = self.get_html(self.url + f"/page{i}")
i += 1
for el in html.select('a.article-list-container'):
doc = {}
doc["href"] = self.url + el["href"]
doc["name"] = el.select_one(".article-list-title").text.strip()
doc["date"] = parse(el.select_one(".article-list-details").text.strip())
yield doc
if to > doc["date"]:
break
def _scrape_document(self, href: str):
html = self.get_html(href)
text = ""
paragraphs = html.select("#article-body > p")
if len(paragraphs) > 0:
for p in paragraphs:
text += p.text + "\n\n"
else:
text = html.select_one("#article-body").text
return text
``` |
{
"source": "jordanparker6/python-api",
"score": 2
} |
#### File: python-api/app/loaders.py
```python
from typing import Generator
from app.core.database import Database
async def load_db() -> Generator:
with Database() as db:
yield db
``` |
{
"source": "jordanparker6/TransformerSeries",
"score": 2
} |
#### File: transformerseries/archive/evaluate.py
```python
import logging
import torch
import joblib
from pathlib import Path
import numpy as np
from torch.utils.data import Dataset, DataLoader
from torch.utils.tensorboard import SummaryWriter
import config
import models
from plot import plot_prediction
logger = logging.getLogger(__name__)
log_dir = config.MODEL_DIR.joinpath("logs")
writer = SummaryWriter(config.MODEL_DIR.joinpath("logs"), comment="test")
def evaluate(
model_name: str,
data: Dataset,
model_path: Path,
forecast_window: int,
model_dir: Path
):
## could move this into train and pick the best model on the evaluation dataset
device = torch.device(config.DEVICE)
features = data.features
raw_features = data.raw_features
targets = data.targets
date_index = data.dates
dataloader = DataLoader(data, batch_size=1, shuffle=True)
model = models.ALL[model_name]
model = model(feature_size=len(features), output_size=len(targets)).double().to(device)
model.load_state_dict(torch.load(model_path))
writer = SummaryWriter(model_dir.joinpath("logs"), comment="training")
with torch.no_grad():
model.eval()
all_val_loss = []
all_metrics = { k: [] for k in config.MODEL["metrics"] }
for plot in range(25):
metrics = { k: 0 for k in config.MODEL["metrics"] }
for X_i, Y_i, X, Y, group in dataloader:
X = X.permute(1,0,2).double().to(device)[1:, :, :]
Y = Y.permute(1,0,2).double().to(device)
next_input = X
all_pred = []
for i in range(forecast_window - 1):
pred = model(next_input)
if all_pred == []:
all_pred = pred
else:
all_pred = torch.cat((all_pred, pred[-1,:,:].unsqueeze(0)))
old_features = X[i + 1:, :, len(targets):] # Size: [train_window - 1, 1, feature_size]
new_feature = Y[i + 1, :, len(targets):].unsqueeze(1) # Size: [1, 1, feature_size]
new_features = torch.cat((old_features, new_feature)) # Size: [train_window, 1, feature_size]
next_input = torch.cat((X[i+1:, :, 0:len(targets)], pred[-1,:,:].unsqueeze(0)))
next_input = torch.cat((next_input, new_features), dim = 2)
true = torch.cat((X[1:,:,0:len(targets)], Y[:-1,:,0:len(targets)]))
for metric in config.MODEL["metrics"]:
metrics[metric] += config.METRICS[metric](true, all_pred[:,:, 0:len(targets)])
all_metrics = { k: v + [metrics[k]] for k, v in all_metrics.items() }
logstr = f"{model_name} | Sample: {plot}, "
for k, v in metrics.items():
logstr += f"| Validation {k}: {metrics[k]} "
writer.add_scalar(f"Evaluation {k}", v, plot)
logger.info(logstr)
if plot % 5 == 0:
scalar = joblib.load('scalar_item.joblib')
X = data.inverse_transform(X, scalar)
Y = data.inverse_transform(Y, scalar)
all_pred = data.inverse_transform(all_pred, scalar)
X_dates = date_index[X_i.tolist()[0]].tolist()[1:]
Y_dates = date_index[Y_i.tolist()[0]].tolist()
for i, target in enumerate(targets):
writer.add_figure(
f"{model_name}_test_plot_'{target}'@sample-{plot}",
plot_prediction(X[:, i], Y[:, i], all_pred[:, i], X_dates, Y_dates),
plot
)
all_metrics = { k: torch.cat([x.unsqueeze(0) for x in v]) for k, v in all_metrics.items() }
for k, v in all_metrics.items():
logger.info(f"{model_name} | Average Validation {k}: {v.mean()}")
```
#### File: TransformerSeries/transformerseries/dataset.py
```python
import pandas as pd
import numpy as np
from pathlib import Path
from typing import List
import joblib
import torch
from torch.utils.data import Dataset
from core import config
from processors.base import ProcessPipeline
class TimeSeriesDataset(Dataset):
def __init__(
self,
csv_file: Path,
targets: List[str],
training_length: int,
forecast_window: int,
processor: ProcessPipeline = None,
):
"""
Args:
csv_file (Path): The path object for the csv file.
targets: The column names of the variable being predicted.
training_length (int): The length of the sequence included in the training.
forecast_window (int): The forecast window for predictions.
"""
df = pd.read_csv(csv_file, parse_dates=["timestamp"]).reset_index(drop=True)
position_encoded = ["sin_hour", "cos_hour", "sin_day", "cos_day", "sin_month", "cos_month"]
assert "timestamp" in df.columns, "Column 'timestamp' does not exist. Please ensure the dataset has been preprocessed."
assert "group_id" in df.columns, "Column 'group_id' does not exist. Please ensure the dataset has been preprocessed."
assert all(x in df.columns for x in targets), "A target column doesn't exist in the dataset."
assert all(x in df.columns for x in position_encoded), "The target dataset has not been position encoded. Please ensure the dataset has been preprocessed"
df = df.drop(columns=["timestamp"])
if le == None:
le = preprocessing.LabelEncoder()
df["group_id"] = le.fit_transform(df.group_id)
else:
df["group_id"] = le.transform(df.group_id)
if scaler == None:
scaler = MinMaxScaler()
df = pd.DataFrame(scaler.fit_transform(df), columns=df.columns)
else:
df = pd.DataFrame(scaler.transform(df), columns=df.columns)
self.le = le
self.df = df
self.scaler = scaler # could make this also NormScaler
self.training_length = training_length
self.forecast_window = forecast_window
cols = [col for col in self.df.columns if col not in ["group_id"]]
self.raw_features = [col for col in cols if col not in position_encoded]
self.features = self.raw_features + position_encoded
self.targets = targets
self.build_dataset()
def build_dataset(self):
"""Apply feature engineering steps and creates test / train split."""
X = []
Y = []
df = self.df.loc[: , self.features]
unique_groups = df.group_id.unique()
self.groups = unique_groups
for group in unique_groups:
tmp = df[df.group_id == group].values
tmp_target = df.loc[df.group_id == group, self.targets].values
itr = range(0, tmp.shape[0] - self.training_length - self.forecast_window, self.training_length + self.forecast_window)
for i in itr:
src = tmp[i: i+self.training_length].tolist()
trg = [[0]] + tmp_target[i+self.training_length: i+self.training_length+self.forecast_window].tolist()
X.append(src)
Y.append(trg)
self.X = np.array(X, dtype=np.float64)
self.Y = np.array(Y, dtype=np.float64)
def __len__(self):
return len(self.X)
def __getitem__(self, idx):
return self.X[idx], self.Y[idx]
def get_parameters(self):
return {
"targets": self.targets,
"features": self.features,
"raw_features": self.raw_features,
"training_length": self.training_length,
"forecast_window": self.forecast_window,
"groups": self.groups,
"dates": self.dates
}
def transform(self, df: pd.DataFrame) -> pd.DataFrame:
return pd.DataFrame(self.scaler.transform(df), columns=df.columns)
def inverse_transform(self, out: np.ndarray) -> List[float]:
#Create an empty dataframe, same shape as training dataframe, only fill
#the target index (in this case 1), scale it, then only retrieve the target
vals = np.empty((out.shape[0] ,9))
vals[:, 1] = out.reshape(-1)
return self.scaler.inverse_transform(vals)[:, 1].tolist()
```
#### File: transformerseries/processors/timeseries.py
```python
from typing import List, Optional
class PositionEncoding:
pass
class TimeseriesWindowBuilder:
def __init__(self, training_length: int, forecast_window: int):
self.training_length = training_length
self.forecast_window = forecast_window
def fit(self, dataset)
X = []
Y = []
df = self.df.loc[: , self.features]
unique_groups = df.group_id.unique()
for group in unique_groups:
tmp = dataset._df[df.group_id == group].values
tmp_target = df.loc[df.group_id == group, dataset.targets].values
itr = range(0, tmp.shape[0] - self.training_length - self.forecast_window, self.training_length + self.forecast_window)
for i in itr:
src = tmp[i: i + self.training_length].tolist()
trg = [[0]] + tmp_target[i + self.training_length : i + self.training_length + self.forecast_window].tolist()
X.append(src)
Y.append(trg)
self.X = np.array(X, dtype=np.float64)
self.Y = np.array(Y, dtype=np.float64)
``` |
{
"source": "jordan-patterson/menu",
"score": 3
} |
#### File: menu/clmenu/clmenu.py
```python
from sys import stdin
from os import system,path
from termcolor import colored
class getch:
def __init__(self):
import tty, sys
def __call__(self):
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
def printLogo(filename):
try:
file=open(filename,'r')
data=file.read()
system("clear")
print(data)
except FileNotFoundError:
print("\n\n\t\t\t\tError: File does not exist")
class Menu:
'''
A Class for making a custom menu in the command line
options: A list of options for the menu
instructions: A small piece of text telling the user what to do
logoName: The name of a file with your custom made logo. ASCII ART RECOMMENDED
'''
def __init__(self,options,instructions,logoName):
self.options=options
self.instructions=instructions
self.logoName=logoName
def arrow(self,count,tabs):
'''
Method for displaying the menu. The arrow is printed beside the option
that has index count
'''
printLogo(self.logoName)
print("\n\n\t\t\t\t "+colored(self.instructions,'magenta')+"\n\n")
for i in range(len(self.options)):
if(tabs):
print("\t\t\t\t ",end="")
else:
print("\t\t ",end="")
if(i==count):
print("->>>>>>[ "+colored(self.options[i],'green')+" \n")
else:
print(" [ "+colored(self.options[i],'cyan')+" \n")
def prompt(self,tabs=True):
'''
Method for handling the user input and increasing or decreasing count when either UP
or DOWN arrow key was presssed
'''
count=0
getc=getch()
self.arrow(count,tabs)
key=" "
while(ord(key)!=13):
key=getc()
if(ord(key)==66):
if(count<len(self.options)-1):
count+=1
elif(ord(key)==65):
if(count>0):
count-=1
self.arrow(count,tabs)
return count
``` |
{
"source": "jordan-patterson/pyquotes",
"score": 3
} |
#### File: jordan-patterson/pyquotes/main.py
```python
from bs4 import BeautifulSoup
import requests
import os
import json
from random import choice
home = os.path.expanduser("~")
directory = home+"/bin/pyquotes/"
def initialize():
types=getTypes()
categories=[]
for t in types:
categories.append(getCategories(t))
links={"categories":categories}
if(not os.path.exists(directory)):
os.makedirs(directory)
with open(directory+"links.json","w") as file:
json.dump(links,file)
return links
def getTypes():
types=[]
r = requests.get("https://quotefancy.com")
if r.status_code==200:
soup=BeautifulSoup(r.text,markup="html")
links = soup.find_all('a')
for a in links:
link=a.get('href')
temp="quotes"
if(link not in types):
if(link):
if(temp in link):
types.append(link)
return types
def getCategories(link):
categories=[]
r = requests.get(link)
if r.status_code==200:
soup=BeautifulSoup(r.text,markup="html")
links=soup.find_all('a')
for a in links:
link=a.get('href')
temp="quotes"
if(link not in categories):
if(link):
if(temp in link):
categories.append(link)
return categories
def getQuotes(link):
links = []
r = requests.get(link)
if r.status_code==200:
print("successful")
soup = BeautifulSoup(r.text)
quotes = soup.find_all('a')
for i in quotes:
link = i.get('href')
temp="quote/"
if(link not in links):
if(link):
if(temp in link):
links.append(link)
return links
def getImages(link):
links=[]
r = requests.get(link)
if r.status_code==200:
soup=BeautifulSoup(r.text)
images = soup.find_all('img')
for i in images:
source = i.get('data-original')
temp="wallpaper/1600x900"
if(source not in links):
if(source):
if(temp in source):
links.append(source)
return links
def getImage(link):
chunk_size=1024
filename=link.split('/')[-1]
r = requests.get(link,stream=True)
with open(filename,"wb") as file:
for chunk in r.iter_content(chunk_size=chunk_size):
file.write(chunk)
os.system("xdg-open '"+filename+"'")
def main():
if(not os.path.exists(directory+"links.json")):
links=initialize()
else:
with open(directory+"links.json","r") as file:
links = json.load(file)
categories=links["categories"]
category=choice(categories)
#print(category)
link = choice(category)
print(link)
quotes = getQuotes(link)
#print(quotes)
quote = choice(quotes)
#print(quote)
images=getImages(quote)
#print(images)
image=choice(images)
getImage(image)
if __name__=="__main__":
main()
``` |
{
"source": "JordanPCF/LyricsGenius",
"score": 3
} |
#### File: lyricsgenius/api/api.py
```python
from .base import Sender
from .public_methods import (
SearchMethods,
SongMethods
)
class API(Sender):
"""Genius API.
The :obj:`API` class is in charge of making all the requests
to the developers' API (api.genius.com)
Use the methods of this class if you already have information
such as song ID to make direct requests to the API. Otherwise
the :class:`Genius` class provides a friendlier front-end
to search and retrieve data from Genius.com.
All methods of this class are available through the :class:`Genius` class.
Args:
access_token (:obj:`str`): API key provided by Genius.
response_format (:obj:`str`, optional): API response format (dom, plain, html).
timeout (:obj:`int`, optional): time before quitting on response (seconds).
sleep_time (:obj:`str`, optional): time to wait between requests.
retries (:obj:`int`, optional): Number of retries in case of timeouts and
errors with a >= 500 response code. By default, requests are only made once.
Attributes:
response_format (:obj:`str`, optional): API response format (dom, plain, html).
timeout (:obj:`int`, optional): time before quitting on response (seconds).
sleep_time (:obj:`str`, optional): time to wait between requests.
retries (:obj:`int`, optional): Number of retries in case of timeouts and
errors with a >= 500 response code. By default, requests are only made once.
Returns:
:class:`API`: An object of the `API` class.
"""
def __init__(self,
access_token,
response_format='plain',
timeout=5,
sleep_time=0.2,
retries=0,
):
super().__init__(
access_token=access_token,
response_format=response_format,
timeout=timeout,
sleep_time=sleep_time,
retries=retries,
)
def search_songs(self, search_term, per_page=None, page=None):
"""Searches songs hosted on Genius.
Args:
search_term (:obj:`str`): A term to search on Genius.
per_page (:obj:`int`, optional): Number of results to
return per page. It can't be more than 5 for this method.
page (:obj:`int`, optional): Number of the page.
Returns:
:obj:`dict`
"""
endpoint = "search"
params = {'q': search_term,
'per_page': per_page,
'page': page}
return self._make_request(endpoint, params_=params)
def song(self, song_id, text_format=None):
"""Gets data for a specific song.
Args:
song_id (:obj:`int`): Genius song ID
text_format (:obj:`str`, optional): Text format of the results
('dom', 'html', 'markdown' or 'plain').
Returns:
:obj:`dict`
Examples:
.. code:: python
genius = Genius(token)
song = genius.song(2857381)
print(song['full_title'])
"""
endpoint = "songs/{id}".format(id=song_id)
params = {'text_format': text_format or self.response_format}
return self._make_request(endpoint, params_=params)
class PublicAPI(
Sender,
SearchMethods,
SongMethods):
"""Genius public API.
The :obj:`PublicAPI` class is in charge of making all the requests
to the public API (genius.com/api)
You can use this method without an access token since calls are made
to the public API.
All methods of this class are available through the :class:`Genius` class.
Args:
response_format (:obj:`str`, optional): API response format (dom, plain, html).
timeout (:obj:`int`, optional): time before quitting on response (seconds).
sleep_time (:obj:`str`, optional): time to wait between requests.
retries (:obj:`int`, optional): Number of retries in case of timeouts and
errors with a >= 500 response code. By default, requests are only made once.
Attributes:
response_format (:obj:`str`, optional): API response format (dom, plain, html).
timeout (:obj:`int`, optional): time before quitting on response (seconds).
sleep_time (:obj:`str`, optional): time to wait between requests.
retries (:obj:`int`, optional): Number of retries in case of timeouts and
errors with a >= 500 response code. By default, requests are only made once.
Returns:
:class:`PublicAPI`: An object of the `PublicAPI` class.
"""
def __init__(
self,
response_format='plain',
timeout=5,
sleep_time=0.2,
retries=0,
**kwargs
):
# Genius PublicAPI Constructor
super().__init__(
response_format=response_format,
timeout=timeout,
sleep_time=sleep_time,
retries=retries,
**kwargs
)
```
#### File: LyricsGenius/tests/test_auth.py
```python
import os
import unittest
from unittest.mock import MagicMock, patch
from lyricsgenius import OAuth2
client_id = os.environ["GENIUS_CLIENT_ID"]
client_secret = os.environ["GENIUS_CLIENT_SECRET"]
redirect_uri = os.environ["GENIUS_REDIRECT_URI"]
def mocked_requests_post(*args, **kwargs):
class MockResponse:
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
def json(self):
return self.json_data
def raise_for_status(self):
if self.status_code > 300:
raise ConnectionError
method, url = args[0], args[1]
data = kwargs['data']
code = data.get('code')
data_client_id = data.get('client_id')
data_client_secret = data.get('client_secret')
data_redirect_uri = data.get('redirect_uri')
data_grant_type = data.get('grant_type')
data_response_type = data.get('response_type')
if (method == 'POST'
and url == OAuth2.token_url
and code == 'some_code'
and data_client_id == client_id
and data_client_secret == client_secret
and data_redirect_uri == redirect_uri
and data_grant_type == 'authorization_code'
and data_response_type == 'code'):
return MockResponse({"access_token": "test"}, 200)
return MockResponse(None, 403)
class TestOAuth2(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("\n---------------------\nSetting up OAuth2 tests...\n")
def test_init(self):
with self.assertRaises(AssertionError):
OAuth2(client_id, redirect_uri)
scope = ('me', 'create_annotation', 'manage_annotation', 'vote')
auth = OAuth2(client_id, redirect_uri,
client_secret, scope='all')
self.assertEqual(auth.scope, scope)
def test_get_user_token_client_flow(self):
# client-only flow
auth = OAuth2(client_id, redirect_uri, client_only_app=True)
redirected = 'https://example.com/callback#access_token=test'
client_flow_token = 'test'
r = auth.get_user_token(redirected)
self.assertEqual(r, client_flow_token)
@patch('requests.Session.request',
side_effect=mocked_requests_post)
def test_get_user_token_code_flow(self, mock_post):
# full code exchange flow
auth = OAuth2(client_id, redirect_uri,
client_secret, scope='all')
redirected = 'https://example.com/callback?code=some_code'
code_flow_token = 'test'
r = auth.get_user_token(redirected)
self.assertEqual(r, code_flow_token)
def test_prompt_user(self):
auth = OAuth2(client_id, redirect_uri,
client_secret, scope='all')
token = '<PASSWORD>'
current_module = 'lyricsgenius.auth'
input_ = MagicMock(return_value='http://example.com?code=some_code')
with patch(current_module + '.webbrowser', MagicMock()), \
patch(current_module + '.input', input_), \
patch(current_module + '.print', MagicMock()), \
patch('requests.Session.request',
side_effect=mocked_requests_post):
r = auth.prompt_user()
self.assertEqual(r, token)
```
#### File: LyricsGenius/tests/test_genius.py
```python
import os
import unittest
from lyricsgenius import Genius
# Import client access token from environment variable
access_token = os.environ.get("GENIUS_ACCESS_TOKEN", None)
assert access_token is not None, (
"Must declare environment variable: GENIUS_ACCESS_TOKEN")
genius = Genius(access_token, sleep_time=1.0, timeout=15)
class TestEndpoints(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("\n---------------------\nSetting up Endpoint tests...\n")
cls.search_term = "<NAME>"
cls.song_title_only = "99 Problems"
cls.tag = genius.tag('pop')
def test_search_song(self):
artist = "Jay-Z"
# Empty response
response = genius.search_song('')
self.assertIsNone(response)
# Pass no title and ID
with self.assertRaises(AssertionError):
genius.search_song()
# Search by song ID
response = genius.search_song(song_id=1)
self.assertIsNotNone(response)
# Exact match exact search
response = genius.search_song(self.song_title_only)
self.assertTrue(response.title.lower() == self.song_title_only.lower())
# Song with artist name
response = genius.search_song(self.song_title_only, artist)
self.assertTrue(response.title.lower() == self.song_title_only.lower())
# Spaced out search
response = genius.search_song(" \t 99 \t \t\tProblems ", artist)
self.assertTrue(response.title.lower() == self.song_title_only.lower())
# No title match because of artist
response = genius.search_song(self.song_title_only, artist="Drake")
self.assertFalse(response.title.lower() == self.song_title_only.lower())
def test_song_annotations(self):
msg = "Incorrect song annotation response."
r = sorted(genius.song_annotations(1))
real = r[0][0]
expected = "(I’m at bat)"
self.assertEqual(real, expected, msg)
def test_tag_results(self):
r = self.tag
self.assertEqual(r['next_page'], 2)
self.assertEqual(len(r['hits']), 20)
def test_tag_first_result(self):
artists = ['<NAME>', '<NAME>']
featured_artists = ['<NAME>']
song_title = "Despacito (Remix)"
title_with_artists = (
"Despacito (Remix) by <NAME> & <NAME> (Ft. <NAME>)"
)
url = "https://genius.com/Luis-fonsi-and-daddy-yankee-despacito-remix-lyrics"
first_song = self.tag['hits'][0]
self.assertEqual(artists, first_song['artists'])
self.assertEqual(featured_artists, first_song['featured_artists'])
self.assertEqual(song_title, first_song['title'])
self.assertEqual(title_with_artists, first_song['title_with_artists'])
self.assertEqual(url, first_song['url'])
class TestLyrics(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("\n---------------------\nSetting up lyrics tests...\n")
cls.song_url = "https://genius.com/Andy-shauf-begin-again-lyrics"
cls.song_id = 2885745
cls.lyrics_ending = (
"[Outro]"
"\nNow I’m kicking leaves"
"\nCursing the one that I love and the one I don’t"
"\nI wonder who you’re thinking of"
)
def test_lyrics_with_url(self):
lyrics = genius.lyrics(self.song_url)
self.assertTrue(lyrics.endswith(self.lyrics_ending))
def test_lyrics_with_id(self):
lyrics = genius.lyrics(self.song_id)
self.assertTrue(lyrics.endswith(self.lyrics_ending))
``` |
{
"source": "JordanPedersen/pymarc-extraction",
"score": 3
} |
#### File: JordanPedersen/pymarc-extraction/pymarc_extract.py
```python
import pymarc
from pymarc import MARCReader
import sys, csv, traceback, warnings, logging, re
import pandas as pd
def extractsub():
#ask user to define the variables for inputmarc file and outputtxt file
inputmarc = input("Insert 'filename.mrc' Here >>> ")
outputtxt = input("Insert 'filename.txt' Here >>> ")
#create dataframe
marc_data = []
#Configure the max width display for column
pd.set_option('display.max_colwidth', None)
reader = MARCReader(open(inputmarc, 'rb'), hide_utf8_warnings=True, force_utf8=True, utf8_handling='ignore', file_encoding='utf-8')
try:
for record in reader:
##this is a new line from old version
if record is not None:
# print(record) --- this is also a new line
if record.get_fields('651') is not None:
for f in record.get_fields('651'):
# print(f)
if f['a'] is not None:
catkey = record['001'].value()
geosub = (f['a'])
if type(geosub) == None.__class__:
geosub = ""
#print((record['001']), "|", geosub)
marc_data.append(str(catkey)+"|"+ geosub)
else:
print("something wrong")
#store data in dataframe
df = pd.DataFrame(marc_data)
# export to tab delimited file
df.to_csv(outputtxt, sep='\t', encoding='utf-8', index = False)
except:
traceback.print_exc()
if __name__ == "__main__":
#Define extractsub as main
extractsub()
``` |
{
"source": "jordanperr/csci5636finalproject-windse-convergence",
"score": 3
} |
#### File: jordanperr/csci5636finalproject-windse-convergence/experiment_2d.py
```python
import windse_driver.driver_functions as df
import yaml
import pandas as pd
import glob
def run():
experiment_name = "2d-n"
params_dict = yaml.safe_load(open("convergence-2D-3-Turbine.yaml"))
results = []
for nx in range(110,200+1,10):
params_dict["domain"]["nx"] = nx
params_dict["domain"]["ny"] = nx
params_dict["general"]["name"] = f"{experiment_name}{nx}"
params, problem, solver = df.SetupSimulation(params_dict)
solver.Solve()
results.append({"params":params, "problem":problem, "solver":solver})
#pickle.dump(results, open(f"./output/{experiment_name}.results.pickle","wb"))
def load_power(path):
df = pd.read_csv(path, header=None, skiprows=[0], sep=" ")
df.columns = ["time", "Turbine 1", "Turbine 2", "Turbine 3", "sum"]
return df
def load_result(path):
df = load_power(path+"/data/2d_power_data.txt")
df["path"] = path
df["logfile"] = open(path+"/log.txt","r").read()
df["dofs"] = df["logfile"].str.extract("Total DOFS: +(\d+)").astype("int")
df["nx"] = df["path"].str.extract(r'-n(\d+)').astype("int")
return df
def get_results():
df = pd.concat(map(load_result, glob.glob("./output/2d-n*"))).sort_values("nx")
return df
if __name__=="__main__":
run()
``` |
{
"source": "JordanP/flask-microversions",
"score": 3
} |
#### File: flask-microversions/micro/utils.py
```python
import functools
import flask
import flask.json
import flask.testing
class JSONResponseMixin():
def json(self):
return flask.json.loads(self.data)
class Response(JSONResponseMixin, flask.Response):
pass
def add_json_kwarg(f):
"""Fill the headers and the body of an HTTP request for a JSON request."""
@functools.wraps(f)
def decorated(*args, json=None, **kwargs):
if not json:
return f(*args, **kwargs)
if 'headers' not in kwargs or 'Content-Type' not in kwargs['headers']:
headers = kwargs.setdefault('headers', {})
headers['Content-Type'] = 'application/json'
if 'data' not in kwargs or kwargs['data'] is None:
kwargs['data'] = flask.json.dumps(json)
return f(*args, **kwargs)
return decorated
class FlaskClient(flask.testing.FlaskClient):
post = add_json_kwarg(flask.testing.FlaskClient.post)
class ComparableMixin(object): # pragma: no cover
def _compare(self, other, method):
try:
return method(self._cmpkey(), other._cmpkey())
except (AttributeError, TypeError):
# _cmpkey not implemented, or return different type,
# so I can't compare with "other".
return NotImplemented
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
``` |
{
"source": "jordanphillips23/mate-rov",
"score": 4
} |
#### File: scripts/deprecated_scripts/Controller-pygame.py
```python
import pygame
import numpy as np
import rospy
# Define some colors.
BLACK = pygame.Color('black')
WHITE = pygame.Color('white')
# This is a simple class that will help us print to the screen.
# It has nothing to do with the joysticks, just outputting the
# information.
class TextPrint(object):
def __init__(self):
self.reset()
self.font = pygame.font.Font(None, 20)
def tprint(self, screen, textString):
textBitmap = self.font.render(textString, True, BLACK)
screen.blit(textBitmap, (self.x, self.y))
self.y += self.line_height
def reset(self):
self.x = 10
self.y = 10
self.line_height = 15
def indent(self):
self.x += 10
def unindent(self):
self.x -= 10
pygame.init()
# Set the width and height of the screen (width, height).
screen = pygame.display.set_mode((500, 700))
pygame.display.set_caption("My Game")
# Loop until the user clicks the close button.
done = False
# Used to manage how fast the screen updates.
clock = pygame.time.Clock()
# Initialize the joysticks.
pygame.joystick.init()
# Get ready to print.
textPrint = TextPrint()
dimensions = 25
emptyChar = "-"
drawChar = "+"
arx = np.chararray((dimensions,dimensions), unicode=True , offset = 3) # ( rows , columns )
arx.fill(emptyChar)
arx[ np.size(arx,0)//2 , np.size(arx,1)//2 ] = "*"
currentPosition = [ np.size(arx,0)//2 , np.size(arx,1)//2 ]
def update_Arx( x = 0, y = 0):
global arx
global drawChar
global emptyChar
global arx
global dimensions
global currentPosition
x = round(x,1)
y = round(y,1)
if( round(x,1) > 0.5 ):
if( currentPosition[1] < dimensions-1 ):
currentPosition[1] += 1
elif( round(x,1) < -0.5 ):
if( currentPosition[1] > 0 ):
currentPosition[1] -= 1
if( round(y,1) > 0.5 ):
if( currentPosition[0] < dimensions-1 ):
currentPosition[0] += 1
elif( round(y,1) < -0.5 ):
if( currentPosition[0] > 0 ):
currentPosition[0] -= 1
if(arx[ currentPosition[0] , currentPosition[1] ] != drawChar ):
arx[ currentPosition[0] , currentPosition[1] ] = drawChar
# -------- Main Program Loop -----------
while not done:
#
# EVENT PROCESSING STEP
#
# Possible joystick actions: JOYAXISMOTION, JOYBALLMOTION, JOYBUTTONDOWN,
# JOYBUTTONUP, JOYHATMOTION
for event in pygame.event.get(): # User did something.
if event.type == pygame.QUIT: # If user clicked close.
done = True # Flag that we are done so we exit this loop.
elif event.type == pygame.JOYBUTTONDOWN:
print("Joystick button pressed.")
elif event.type == pygame.JOYBUTTONUP:
print("Joystick button released.")
#
# DRAWING STEP
#
# First, clear the screen to white. Don't put other drawing commands
# above this, or they will be erased with this command.
screen.fill(WHITE)
textPrint.reset()
# Get count of joysticks.
joystick_count = pygame.joystick.get_count()
textPrint.tprint(screen, "Number of joysticks: {}".format(joystick_count))
textPrint.indent()
# For each joystick:
ar = np.zeros((3,3)) # ( rows , columns )
ar2 = np.zeros((3,3)) # ( rows , columns )
for i in range(joystick_count):
joystick = pygame.joystick.Joystick(i)
joystick.init()
textPrint.tprint(screen, "Joystick {}".format(i))
textPrint.indent()
# Get the name from the OS for the controller/joystick.
name = joystick.get_name()
textPrint.tprint(screen, "Joystick name: {}".format(name))
# Usually axis run in pairs, up/down for one, and left/right for
# the other.
axes = joystick.get_numaxes()
textPrint.tprint(screen, "Number of axes: {}".format(axes))
textPrint.indent()
"""
Human readable version
JOYSTICK: LEFT RIGHT
axis 0 and 1 axis 3 and 4
Movement:
-1.ax1 -1.ax3
-1.ax0 1.ax0 -1.ax4 1.ax4
1.ax1 1.ax3
"""
for i in range(axes):
axis = joystick.get_axis(i)
if( i == 0):
#-----left_joy
# controls
# LEFT RIGHT
# -1 1
if( round(axis,6) < 0 ): #LEFT
ar[1,0] = round(axis,1)
elif( round(axis,6) > 0 ): #RIGHT
ar[1,2] = round(axis,1)
update_Arx( x = axis, y = 0)
# =======================================
# publish axis -- left joystick horizontal
if( i == 1):
#-----left_joy
# controls
# UP DOWN
# -1 1
if( round(axis,6) < 0 ): # UP
ar[0,1] = round(axis,1)
elif( round(axis,6) > 0 ): #DOWN
ar[2,1] = round(axis,1)
update_Arx( x = 0, y = axis)
# =======================================
# publish axis -- left joystick horizontal
if( i == 3):
#-----right_joy
# controls
# UP DOWN
# -1 1
if( round(axis,6) < 0 ): #UP
ar2[0,1] = round(axis,1)
elif( round(axis,6) > 0 ): #DOWN
ar2[2,1] = round(axis,1)
# =======================================
# publish axis -- left joystick horizontal
if( i == 4):
#-----left_joy
# controls
# LEFT RIGHT
# -1 1
if( round(axis,6) < 0 ): #LEFT
ar2[1,0] = round(axis,1)
elif( round(axis,6) > 0 ): #RIGHT
ar2[1,2] = round(axis,1)
# =======================================
# publish axis -- left joystick horizontal
textPrint.tprint(screen, "Axis {} value: {:>6.3f}".format(i, axis))
textPrint.unindent()
textPrint.tprint(screen, "")
textPrint.tprint(screen, " LEFT RIGHT")
textPrint.tprint(screen, "{:6.1f} {:6.1f} {:6.1f} {:6.1f} {:6.1f} {:6.1f}".format( ar[0,0] , ar[0,1] , ar[0,2] , ar2[0,0] , ar2[0,1] , ar2[0,2] ))
textPrint.tprint(screen, "{:6.1f} {:6.1f} {:6.1f} {:6.1f} {:6.1f} {:6.1f}".format( ar[1,0] , ar[1,1] , ar[1,2] , ar2[1,0] , ar2[1,1] , ar2[1,2]))
textPrint.tprint(screen, "{:6.1f} {:6.1f} {:6.1f} {:6.1f} {:6.1f} {:6.1f}".format( ar[2,0] , ar[2,1] , ar[2,2] , ar2[2,0] , ar2[2,1] , ar2[2,2]))
for j in range(np.size(arx , 0)):
rowString = ""
for k in range(np.size(arx , 1)):
if( arx[j,k] == emptyChar ):
rowString += " " +")("+ " "
else:
rowString += " " + arx[j,k] + " "
textPrint.tprint(screen, rowString)
"""
################################################################################
######### __INIT__ BUTTONS ################
######### THIS PART CONTROLS THE REST OF THE CONTROLLER BUTTONS ################
################################################################################
buttons = joystick.get_numbuttons()
textPrint.tprint(screen, "Number of buttons: {}".format(buttons))
textPrint.indent()
for i in range(buttons):
button = joystick.get_button(i)
textPrint.tprint(screen,
"Button {:>2} value: {}".format(i, button))
textPrint.unindent()
hats = joystick.get_numhats()
textPrint.tprint(screen, "Number of hats: {}".format(hats))
textPrint.indent()
# Hat position. All or nothing for direction, not a float like
# get_axis(). Position is a tuple of int values (x, y).
for i in range(hats):
hat = joystick.get_hat(i)
textPrint.tprint(screen, "Hat {} value: {}".format(i, str(hat)))
textPrint.unindent()
textPrint.unindent()
################################################################################
######### END BUTTONS ################
################################################################################
"""
#
# ALL CODE TO DRAW SHOULD GO ABOVE THIS COMMENT
#
# Go ahead and update the screen with what we've drawn.
pygame.display.flip()
# Limit to 20 frames per second.
clock.tick(20)
# Close the window and quit.
# If you forget this line, the program will 'hang'
# on exit if running from IDLE.
pygame.quit()
```
#### File: scripts/motion-tracking/motion-tracker.py
```python
from collections import deque
from imutils.video import VideoStream
import numpy as np
import argparse
import cv2
import imutils
import time
import os
import tracker
def nothing(x):
pass
#will not use args -v, --video
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video",
help="path to the (optional) video file")
ap.add_argument("-b", "--buffer", type=int, default=64,
help="max buffer size")
args = vars(ap.parse_args())
setting = {'hue_min':0, 'hue_max': 180, 'sat_min': 0, 'sat_max': 255, 'val_min': 0, 'val_max': 255}
setting_file = os.path.join(os.path.expanduser('~'), '.multithresh.json') #for loading specific thresholds later on
if os.path.exists(setting_file):
with open(setting_file, 'r') as f:
setting = json.load(f)
#def lower/upper boundaries of the bottle cap, --> "blue"
#range in HSV values, intialize list of range of hue, saturation, value
#adjust HERE to filter for different colors
lower = np.array([setting['hue_min'], setting['sat_min'], setting['val_min']])
upper = np.array([setting['hue_max'], setting['sat_max'], setting['val_max']])
""" lower = (0, 0, 0)
upper = (0, 0, 255) """
pts = deque(maxlen=args["buffer"])
w_name = 'webcam'
cv2.namedWindow(w_name)
cv2.namedWindow('track')
cv2.createTrackbar('h_min', 'track', setting['hue_min'], 180, nothing)
cv2.createTrackbar('s_min', 'track', setting['sat_min'], 255, nothing)
cv2.createTrackbar('v_min', 'track', setting['val_min'], 255, nothing)
cv2.createTrackbar('h_max', 'track', setting['hue_max'], 180, nothing)
cv2.createTrackbar('s_max', 'track', setting['sat_max'], 255, nothing)
cv2.createTrackbar('v_max', 'track', setting['val_max'], 255, nothing)
#if no video, refer to webcam-->this is the desired behavior
if not args.get("video", False):
vs = VideoStream(src=0).start()
#if video specified, grab reference to file
#might hardcode this later to search for index 1 webcam
else:
vs = cv2.VideoCapture(args["video"])
#delay for camera init
time.sleep(2.0)
#takes input from cv2 sliders to adjust hsv min/max values
#shows trackbars on different window, 'track'
def refresh_color(min, max, frame):
hsv_hue_min = cv2.getTrackbarPos('h_min', 'track')
hsv_saturation_min = cv2.getTrackbarPos('s_min', 'track')
hsv_value_min = cv2.getTrackbarPos('v_min', 'track')
hsv_hue_max = cv2.getTrackbarPos('h_max', 'track')
hsv_saturation_max = cv2.getTrackbarPos('s_max', 'track')
hsv_value_max = cv2.getTrackbarPos('v_max', 'track')
upper[2] = hsv_value_max
upper[1] = hsv_saturation_max
upper[0] = hsv_hue_max
lower[0] = hsv_hue_min
lower[1] = hsv_saturation_min
lower[2] = hsv_value_min
print("-"*50)
print(f"Hue: {lower[0]*2:d}-{upper[0]*2:d} deg")
print(f"Sat: {lower[1]/2.55:.0f}-{upper[1]/2.55:.0f} %")
print(f"Val: {lower[2]/2.55:.0f}-{upper[2]/2.55:.0f} %")
#takes raw frame and performs color filtering and noise reduction
def process(frame):
#resize frame, blur, convert to HSV registering
#frame = imutils.resize(frame, width=640)
blurred = cv2.GaussianBlur(frame, (11,11), 0)
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
#construct mask for desired color value
#series of dilations and erosions to remove blobs in mask
mask = cv2.inRange(hsv, lower, upper)
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
#kernel = cv2.getStructuringElement(cv2.MORPH_RECT,(3,3))
#kernel = cv2.getStructuringElement(cv2.MORPH_CROSS,(3,3))
#erode followed by dilate to reduce noise
#mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
#by this point, should be able to detect filtered out blue bottle cap
return mask
#finds contours and does stuff to them
def track_targets(mask, frame):
#finding contours in mask, init (x,y) of ball
cntrs = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_NONE)
cntrs = imutils.grab_contours(cntrs)
center = None
for contour in cntrs:
cv2.drawContours(mask, [contour], 0, (255, 0, 255), 3)
#continue if >=1 contours found
#doin stuff to contours
if len(cntrs) > 0:
#find largest contour in mask
#computer minimum enclosing circle and centroid
circle = max(cntrs, key=cv2.contourArea)
((x,y), radius) = cv2.minEnclosingCircle(circle)
moments_arr = cv2.moments(circle)
center = (int(moments_arr["m10"] / moments_arr["m00"]),
int(moments_arr["m01"] / moments_arr["m00"]))
#cap must meet certain min radius
#adjust HERE for different sized balls
if radius > 0.5:
#draw circle, centroid on frame, update list of points
cv2.circle(frame, (int(x), int(y)), int(radius),
(0, 255, 255), 2)
cv2.circle(frame, center, 5, (0, 0, 255), -1)
cv2.circle(mask, (int(x), int(y)), int(radius),
(0, 255, 255), 2)
cv2.circle(mask, center, 5, (0, 0, 255), -1)
pts.appendleft(center)
#draw trail of centroid points
for i in range(1, len(pts)):
#if either of tracked points are None, ignore
if pts[i-1] is None or pts[i] is None:
continue
#otherwise, compute thickness of line and draw connector
thickness = int(np.sqrt(args["buffer"]
/ float(i + 1))*2.5)
cv2.line(frame, pts[i-1], pts[i], (0, 0, 255), thickness)
cv2.line(mask, pts[i-1], pts[i], (0, 0, 255), thickness)
while vs.read() is not None:
frame = vs.read()
#take frame from webcam or video file
frame = frame [1] if args.get("video", False) else frame
""" if frame is None:
break """
#shows unfiltered frame
refresh_color(lower, upper, frame)
cv2.imshow('track', frame)
#processes frame for contour detection
mask = process(frame)
#performs contour detection as desired
#draws contours on mask and frame
track_targets(mask, frame)
#show process, detected name
cv2.imshow(w_name, mask)
key_press = cv2.waitKey(20)
if key_press == 27:
break
if not args.get("video", False):
vs.stop()
else:
vs.release()
cv2.destroyWindow(w_name)
``` |
{
"source": "jordan-pierre/tim-ferriss-search-engine",
"score": 3
} |
#### File: tim-ferriss-search-engine/scraper/tim_scraper.py
```python
import logging
import re
import json
import os
import sys
import requests
from bs4 import BeautifulSoup
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(message)s",
handlers=[logging.FileHandler("debug.log"), logging.StreamHandler()],
)
def get_past_episodes_urls():
"""_summary_"""
directory_url = (
"https://tim.blog/2018/09/20/all-transcripts-from-the-tim-ferriss-show/"
)
dir_site = requests.get(directory_url)
dir_site.raise_for_status()
soup = BeautifulSoup(dir_site.content, "html.parser")
site_content = str(soup.find(class_="entry-content"))
# Grab everything in the regex format
regex = r'^<p><a href="([^"]*)"([^>]*)>(#\d*:[^<]*)</a></p>'
matches = re.finditer(regex, site_content, re.MULTILINE)
# store in json structure [{index, title, url},]
bulk_transcript_urls = dict()
for match_num, match in enumerate(matches, start=1):
bulk_transcript_urls[match_num] = {
"title": match.group(3),
"url": match.group(1),
}
if len(bulk_transcript_urls) == 0:
logging.warning("No matches found. Exiting system to avoid file overwrite.")
sys.exit(0)
output_path = os.sep.join(["output", "transcript_urls.json"])
with open(output_path, "w", encoding="utf-8") as final:
json.dump(bulk_transcript_urls, final, indent=2)
logging.info(f"SUCCESS: Wrote URL list to {output_path}")
if __name__ == "__main__":
get_past_episodes_urls()
``` |
{
"source": "JordanP/monitor-http-log",
"score": 3
} |
#### File: monitor-http-log/monitor_http_log/main.py
```python
import argparse
import collections
import datetime
import logging
import logging.handlers
import os
import re
import select
import sys
import time
import urlparse
import monitor_http_log.heapq_oo as heapq_oo
from monitor_http_log import exceptions
# Keep all data points for the last `HISTORY` seconds
HISTORY = 10
# Store all data points in a heap
LAST_HITS = heapq_oo.HeapQ()
# Also aggregate the data to avoid traversing `LAST_HITS`
# This is a CPU/memory tradeoff
LAST_HITS_PER_SECTION = collections.Counter()
BYTES_PER_SECONDS = collections.defaultdict(int)
# Print statistics every `STATS_INTERVAL` seconds
STATS_INTERVAL = 10
# An alarm will be triggered if the bandwidth usage has been more than
# `ALARM_BW_TRESHOLD` bytes on average in the last `ALARM_BW_PERIOD` seconds
ALARM_BW_TRESHOLD = 1000
ALARM_BW_PERIOD = 20
# Overall traffic in the last `HISTORY` seconds
LAST_BW = 0
# Some constants
ALARM_STATE_HIGH = object()
ALARM_STATE_LOW = object()
COMMON_LOG_FORMAT = re.compile(
r'(?P<client_ip>[^ ]*) (?P<user_identifier>[^ ]*) (?P<user_id>[^ ]*)'
r' \[(?P<date>[^]]*)\] "(?P<http_method>[A-Z]*) (?P<http_url>[^"]*) '
r'(?P<http_version>HTTP/\d.\d)" (?P<status_code>[^ ]*) '
r'(?P<bytes_sent>[^ ]*)'
)
def parse_log_line(line):
matches = COMMON_LOG_FORMAT.match(line)
if matches is None:
# For instance an HTTP 408 (Request Timeout) could have no HTTP url
logging.warning("Unable to parse HTTP log line : '%s'", line)
raise exceptions.InvalidHTTPLogLine()
hit = matches.groupdict()
# Build section
parse_result = urlparse.urlparse(hit['http_url'])
path_section = "/".join(parse_result.path.split('/')[:2])
# According to instructions, a section also contains the scheme and netloc
hit['section'] = urlparse.urlunparse(
(parse_result.scheme, parse_result.netloc, path_section, '', '', '')
)
# Convert date to unix timestamp (we assume the log stream comes from the
# same server as where this program is running, to avoid dealing with TZ,
# "the %z escape that expands to the preferred hour/minute offset is not
# supported by all ANSI C libraries")
hit["time"] = time.mktime(
time.strptime(hit["date"].split()[0], "%d/%b/%Y:%H:%M:%S")
)
try:
hit['bytes_sent'] = int(hit['bytes_sent'])
except ValueError:
# An HTTP DELETE returns no data so bytes_sent is '-'
hit['bytes_sent'] = 0
return hit
def update_statistics():
global LAST_BW
# Discard hits that are old from the statistics
horizon = time.time() - HISTORY
for _, hit in LAST_HITS.popuntil((horizon,)):
LAST_HITS_PER_SECTION[hit['section']] -= 1
LAST_BW -= hit['bytes_sent']
def print_statistics():
total_bandwidth_in_kb = LAST_BW / 1024
print("In the last {} seconds").format(HISTORY)
print("Top sections : %r" % LAST_HITS_PER_SECTION.most_common(3))
print("Total Hits / Total bandwidth: {} / {} KiB".format(
len(LAST_HITS), total_bandwidth_in_kb))
print("-" * 80)
def update_and_print_stats():
# First discard old data in order to have accurate stats.
update_statistics()
print_statistics()
last_printed = time.time()
return last_printed
def evaluate_alarm(data, alarm_state, alarm_treshold, alarm_period):
new_alarm_state = alarm_state
aggregated_bandwidth = 0
# Data points older than `horizon` are not going to be looked at.
horizon = time.time() - alarm_period
for timestamp in data.keys():
if timestamp < horizon:
del data[timestamp]
else:
aggregated_bandwidth += data[timestamp]
avg = aggregated_bandwidth / alarm_period
if avg >= alarm_treshold and alarm_state == ALARM_STATE_LOW:
new_alarm_state = ALARM_STATE_HIGH
print("\033[93mHigh traffic generated an alert - traffic = {} B/s, "
"triggered at {}\033[0m").format(avg, datetime.datetime.now())
elif avg < alarm_treshold and alarm_state == ALARM_STATE_HIGH:
new_alarm_state = ALARM_STATE_LOW
print("\033[92mAlert recovered at {}\033[0m".format(
datetime.datetime.now()))
return new_alarm_state
def process_hit(hit):
global LAST_BW
logging.debug("Got hit: %s", hit)
LAST_HITS_PER_SECTION[hit['section']] += 1
LAST_HITS.add((hit['time'], hit))
LAST_BW += hit['bytes_sent']
BYTES_PER_SECONDS[hit['time']] += hit['bytes_sent']
def process_logs_forever(http_logs):
stats_last_printed = time.time()
alarm_state = ALARM_STATE_LOW
while True:
# We set a timeout to select because we don't want the program to
# freeze if no HTTP hit.
readable = select.select([http_logs], [], [], 0.5)[0]
if not readable:
# Nothing to read.
if time.time() - stats_last_printed >= STATS_INTERVAL:
stats_last_printed = update_and_print_stats()
alarm_state = evaluate_alarm(
BYTES_PER_SECONDS, alarm_state,
ALARM_BW_TRESHOLD, ALARM_BW_PERIOD
)
else:
log_line = readable[0].readline().strip()
# If we are at the end of the file (EOF) then `readline()` is
# going to return ''.
if not log_line:
if time.time() - stats_last_printed >= STATS_INTERVAL:
stats_last_printed = update_and_print_stats()
alarm_state = evaluate_alarm(
BYTES_PER_SECONDS, alarm_state,
ALARM_BW_TRESHOLD, ALARM_BW_PERIOD
)
# We don't want readline() to keep returning EOF
# so wait for new HTTP hits to come.
time.sleep(0.5)
else:
try:
hit = parse_log_line(log_line)
except exceptions.InvalidHTTPLogLine:
continue
process_hit(hit)
def main():
parser = argparse.ArgumentParser(
description='HTTP log monitoring console program',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'-v', '--verbose', help='Log at DEBUG level',
dest='verbose', action='store_true'
)
parser.add_argument(
'-q', '--quiet', help='Log at WARNING level',
dest='quiet', action='store_true'
)
parser.add_argument(
'-f', '--file', help='Path to the HTTP log file',
dest='httplog_file',
)
args = parser.parse_args()
logger = logging.getLogger()
if args.verbose:
logger.setLevel(logging.DEBUG)
elif args.quiet:
logger.setLevel(logging.WARNING)
else:
logger.setLevel(logging.INFO)
if args.httplog_file:
http_logs = open(args.httplog_file, 'rt')
http_logs.seek(0, os.SEEK_END)
else:
# We can also invoke this program with
# tail -f /var/log/apache2/other_vhosts_access.log | monitor_http_log
http_logs = sys.stdin
process_logs_forever(http_logs)
if __name__ == '__main__':
sys.exit(main())
``` |
{
"source": "JordanPowell/discord-play-register",
"score": 3
} |
#### File: JordanPowell/discord-play-register/game.py
```python
from db import db
import json
import os
def create_mention(player):
return '<@!%s>' % player.id
class Game:
def __init__(self, name, aliases=[], min_players=0, max_players=100, known=False):
self.name = name
self.aliases = aliases
self.min_players = min_players
self.max_players = max_players
self.known = known
def loosely_matches(self, string_with_game_name):
return any(string_with_game_name.lower().startswith(potential_name.lower())
for potential_name in [self.name] + (self.aliases or []))
def get_available_players(self):
return db.get_players_for_game(self)
def get_players_for_next_game(self):
return self.get_available_players()[:self.max_players]
def is_ready_to_play(self):
players = self.get_ready_players()
return len(players) >= self.min_players
def get_ready_messages(self):
players = self.get_ready_players()
if len(players) >= self.min_players:
if len(players) == self.max_players:
return ["%s are ready to play %s!\n@ me with 'clear %s' to clear the players." % (
','.join([create_mention(p) for p in players]),
self.name,
self.name,)]
return []
def get_ready_players(self):
return db.get_ready_players_for_game(self)
def get_unready_players(self):
return db.get_unready_players_for_game(self)
def __str__(self):
return '%s%s' % (self.name, ' (unknown game)' if not self.known else '')
def __eq__(self, other):
return self.name == other.name
def __hash__(self):
return hash(self.name)
class KnownGame(Game):
def __init__(self, *args, **kwargs):
super().__init__(*args, known=True, **kwargs)
def lookup_known_game_by_name_or_alias(name):
for game in get_known_games():
if game.loosely_matches(name):
return game
def lookup_game_by_name_or_alias(name):
# Name may contain extra junk, e.g. "I'd play cs later, after food" would mean name="cs later, after food"
game = lookup_known_game_by_name_or_alias(name)
return game if game else None
def read_games_dict(json_filename=None):
json_filename = json_filename or os.path.join(os.path.dirname(__file__), os.environ.get('GAMES_DB_FILE', 'known_games.json'))
with open(json_filename) as json_file:
return json.load(json_file)
def write_games_dict(known_games_json, json_filename=None):
json_filename = json_filename or os.path.join(os.path.dirname(__file__), os.environ.get('GAMES_DB_FILE', 'known_games.json'))
with open(json_filename, "w") as json_file:
json.dump(known_games_json, json_file, sort_keys=True, indent=4)
def get_known_games():
known_game_dict = read_games_dict()
return [KnownGame(name=name, **props) for name, props in known_game_dict.items()]
```
#### File: JordanPowell/discord-play-register/handlers.py
```python
from db import db
from utils import extract_remainder_after_fragments, extract_time, epoch_time_to_digital
from game import lookup_game_by_name_or_alias, get_known_games, lookup_known_game_by_name_or_alias, \
write_games_dict, read_games_dict, create_mention
from response import TextResponse
from dotenv import load_dotenv
import itertools
import os
load_dotenv()
CLIENT_ID = os.getenv('CLIENT_ID')
def get_message_handlers():
return [
WouldPlayHandler(),
ClearHandler(),
CancelHandler(),
PingHandler(),
AccidentalRoleMentionHandler(),
StatusHandler(),
QueryHandler(),
AddHandler(),
HelpHandler()
]
def get_any_ready_messages(game):
if game.is_ready_to_play:
return game.get_ready_messages()
return []
def replace_last_occurence(original, string_to_replace, replace_with):
return replace_with.join(original.rsplit(string_to_replace, 1))
def make_sentence_from_strings(string_list):
return replace_last_occurence(", ".join(string_list), ", ", " and ")
def split_by_first_mention(message):
msg = message.content
if msg.startswith('<@'):
idx = msg.index('>') + 1
return msg[:idx], msg[idx:].strip()
else:
return '', msg
def message_starts_with_any_fragment(message, fragments):
return any(message.lower().startswith(query_game_fragment.lower())
for query_game_fragment in fragments)
def message_pertains_to_all_games(message):
query_game_fragments = ['games', 'game', 'list', 'g']
return message_starts_with_any_fragment(message, query_game_fragments)
def get_game_name_or_alias_from_message(message):
if lookup_game_by_name_or_alias(message):
game = lookup_game_by_name_or_alias(message)
for potential_name in [game.name] + game.aliases:
if message.lower().startswith(potential_name.lower()):
return potential_name
def get_default_game_dict_representation(game_name):
return {
game_name:
{
'aliases': [],
'max_players': 5,
'min_players': 2
}
}
def is_bot_mention(mention):
return mention[3 if mention.startswith('<@!') else 2:-1] == CLIENT_ID
class GameExtractionMixin:
multi_game_delimiter = '/'
def get_all_responses_with_games(self, message, games):
responses = []
for game in games:
responses += self.get_all_responses_with_game(message, game) if game else []
return responses
def get_all_responses_without_game(self, message):
return []
def get_all_responses_with_game(self, message, game):
return []
def get_all_responses(self, message):
plays = extract_remainder_after_fragments(self.fragments, message.content)
game_names = plays.split(self.multi_game_delimiter)
games = [lookup_game_by_name_or_alias(game_name) for game_name in game_names if lookup_game_by_name_or_alias(game_name)]
responses = []
if games:
games = [game for game in games if game]
responses += self.get_all_responses_with_games(message, games)
else:
responses += self.get_all_responses_without_game(message)
return responses
class MessageHandler:
helper_command_list = []
def should_handle(self, message):
raise NotImplementedError()
def get_all_responses(self, message):
raise NotImplementedError()
class ContentBasedHandler(MessageHandler):
fragments = []
def should_handle(self, message):
return any(message.content.lower().startswith(f.lower()) for f in self.fragments)
class MentionMessageHandler(MessageHandler):
keywords = []
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fragments = self.keywords
def should_handle(self, message):
mention, remainder = split_by_first_mention(message)
return is_bot_mention(mention) and any(remainder.lower().startswith(kw.lower()) for kw in self.keywords)
def split_string_by_keywords(self, string):
for keyword in self.keywords:
kw_removed_string = string.replace(keyword, '', 1)
if kw_removed_string != string:
kw_removed_string = kw_removed_string.strip()
return keyword, kw_removed_string
return None, string
def generate_ready_at_time_messages(ready_would_plays_for_game, unready_would_plays_for_game, list_players=False):
done_times = set()
string = ""
number_of_ready_players = len(ready_would_plays_for_game)
if number_of_ready_players > 0:
if list_players:
string += "(%s. %s) " % (number_of_ready_players, ", ".join([wp.player.display_name for wp in ready_would_plays_for_game]))
else:
string += "(%s) " % number_of_ready_players
for uwp in unready_would_plays_for_game:
time_ready = uwp.for_time
if time_ready in done_times:
continue
done_times.add(time_ready)
ready_at_time_would_plays = db.get_would_plays_ready_at_time(uwp.game, time_ready)
number_ready_at_time = len(ready_at_time_would_plays)
if list_players:
string += "(%s at %s. %s)" % (number_ready_at_time, epoch_time_to_digital(time_ready), ", ".join([wp.player.display_name for wp in ready_at_time_would_plays]))
else:
string += "(%s at %s)" % (number_ready_at_time, epoch_time_to_digital(time_ready))
return string
class WouldPlayHandler(GameExtractionMixin, ContentBasedHandler):
fragments = ["I'd play", "id play", "I'd paly", "id paly", "I’d play", "I’d paly", "I’dplay", "I’dpaly", "same to", "same"]
helper_command_list = [f"{fragments[0]} <game> - Add your name to the list of players that would play <game>."]
def get_all_responses_without_game(self, message):
return [TextResponse(message.channel, f"What? What would you play {message.author.name}?")]
def get_all_responses_with_games(self, message, games):
for_time = extract_time(message.content)
game_and_players_strings = []
for game in games:
db.record_would_play(message.author, game, for_time)
ready_would_plays_for_game = db.get_ready_would_plays_for_game(game)
unready_would_plays_for_game = db.get_unready_would_plays_for_game(game)
if len(unready_would_plays_for_game) == 0:
game_and_players_strings += ["%s (%s)" % (game.name, len(game.get_ready_players()))]
else:
game_and_players_strings += ["%s %s" % (game.name, generate_ready_at_time_messages(ready_would_plays_for_game, unready_would_plays_for_game))]
messages = ["%s would play %s" % (message.author.display_name, make_sentence_from_strings(game_and_players_strings))]
for game in games:
messages += get_any_ready_messages(game)
return messages
for game in games:
messages += get_any_ready_messages(game)
return messages
def get_all_responses_without_game(self, message):
last_would_plays = db.get_last_would_plays_at_same_time()
if not last_would_plays:
return []
games = set([lwp.game for lwp in last_would_plays])
messages = self.get_all_responses_with_games(message, games)
return messages
class StatusHandler(MentionMessageHandler):
keywords = ['status']
helper_command_list = [f"@BOT {keywords[0]} - allow the user to determine the status of games (number of players etc.)."]
def get_all_responses(self, message):
messages = ['Bot alive']
for game in get_known_games():
ready_would_plays_for_game = db.get_ready_would_plays_for_game(game)
unready_would_plays_for_game = db.get_unready_would_plays_for_game(game)
if ready_would_plays_for_game or unready_would_plays_for_game:
messages.append("%s %s" % (game.name, generate_ready_at_time_messages(ready_would_plays_for_game, unready_would_plays_for_game, list_players=True)))
return ['\n'.join(messages)]
class ClearHandler(GameExtractionMixin, MentionMessageHandler):
keywords = ['clear']
helper_command_list = [f"@BOT {keywords[0]} <game> - clear all names from the <game>'s' \"I'd play\" list."]
def get_all_responses_with_game(self, message, game):
if game:
db.clear_game(game)
return ['Cleared %s' % game]
else:
return ['No game specified!']
class CancelHandler(MentionMessageHandler):
keywords = ['cancel']
helper_command_list = [f"@BOT {keywords[0]} - Removes your name from all \"I'd play\" lists."]
def get_all_responses(self, message):
db.cancel_would_plays(message.author)
return ['Cancelled all play requests from %s' % (message.author.display_name)]
class PingHandler(GameExtractionMixin, MentionMessageHandler):
keywords = ['ping', 'p']
helper_command_list = [f"@BOT {keywords[0]} <game> - Ping all players that would currently play <game> and refresh the list for <game>."]
def get_all_responses_with_game(self, message, game):
players = game.get_players_for_next_game()
db.clear_game(game)
return ['%s - ready to play %s.' % (','.join([create_mention(p) for p in players]), game)]
class AccidentalRoleMentionHandler(MessageHandler):
def should_handle(self, message):
return 'Play Register' in message.clean_content and '<@&' in message.content
def get_all_responses(self, message):
return ['It looks like you tried to @ me but might have accidentally selected the role instead']
class QueryHandler(MentionMessageHandler):
keywords = ['query']
helper_command_list = [f"@BOT {keywords[0]} games - Determine what games are in the known_games list.",
f"@BOT {keywords[0]} <property> <game> - Determine the <property> value for <game> (i.e. min_players)."]
def get_all_responses(self, message):
mention, remainder = split_by_first_mention(message)
found_keyword, remainder = self.split_string_by_keywords(remainder)
if message_pertains_to_all_games(remainder):
return ['\n'.join([game.name for game in get_known_games()])]
else:
attribute, game_name = remainder.split(' ')[:2]
game = lookup_game_by_name_or_alias(game_name)
attribute_display = {
'aliases': lambda z: ', '.join([alias for alias in z])
}
display_function = attribute_display.get(attribute, lambda x: str(x))
return ["%s: %s" % (attribute, display_function(getattr(game, attribute)))]
class AddHandler(MentionMessageHandler):
""" Called via '@bot add game <game>' or '@bot add <property> <game> <value>' """
keywords = ['add']
json_filename = os.path.join(os.path.dirname(__file__), 'known_games.json')
helper_command_list = [f"@BOT {keywords[0]} <games> - Add <game> to the known_games list.",
f"@BOT {keywords[0]} <property> <game> <value> - Add <value> to the <property> of <game> (Edits min/max_players)."]
def get_all_responses(self, message):
mention, remainder = split_by_first_mention(message)
found_keyword, remainder = self.split_string_by_keywords(remainder)
split_remainder = remainder.split(' ')
if len(split_remainder) == 1:
return ["Incorrect command: Try 'add game <game name>' or 'add <property> <game name> <value>"]
if message_pertains_to_all_games(split_remainder[0]):
new_game = ' '.join(split_remainder[1:])
if lookup_known_game_by_name_or_alias(new_game):
return ["That game already exists you absolute degenerate. Don't trigger me."]
else:
new_game_dict = get_default_game_dict_representation(new_game)
known_games_dict = read_games_dict()
known_games_dict.update(new_game_dict)
write_games_dict(known_games_dict)
return ["Congratulations - %s has been added to the known games list! Fantastic work there comrade, give yourself a pat on the back!" % new_game]
else:
property, remainder = split_remainder[0], ' '.join(split_remainder[1:])
if get_game_name_or_alias_from_message(remainder):
game_name = get_game_name_or_alias_from_message(remainder).lower()
else:
return ["Invalid game name/ alias"]
game = lookup_game_by_name_or_alias(game_name)
value = remainder[len(game_name) + 1:]
known_games_dict = read_games_dict()
if property.lower() == 'alias':
known_games_dict[game.name]['aliases'] += [value]
elif property.lower() in ['min_players', 'max_players']:
known_games_dict[game.name][property] = int(value)
else:
return ["Invalid property type"]
write_games_dict(known_games_dict)
return ["%s has been added to %s in %s" % (value, property, game.name)]
def add_commands_to_command_helper_list():
return ['\n'.join(f"`{cmd.split('-')[0]}` - {cmd.split('-')[1]}" for cmd in itertools.chain.from_iterable(handler.helper_command_list for handler in get_message_handlers()))]
class HelpHandler(MentionMessageHandler):
keywords = ['help', 'h', '?']
def get_all_responses(self, message):
return add_commands_to_command_helper_list()
``` |
{
"source": "jordanqin/qcloud-sdk-android-samples",
"score": 2
} |
#### File: cossign/cossign/cosconfig.py
```python
import os
# from app.main.cam.policy.policies import Policies
basedir = os.path.abspath(os.path.dirname(__file__))
VERSION = 'v0.0.3'
class Config(object):
def __init__(self):
pass
#
COMMON_POLICY = '''{"statement":[{"action":["name/cos:*"],"effect":"allow","resource":"*"}],"version":"2.0"}'''
# 用户昵称,非必选
NAME = "WANG"
# 策略
POLICY = COMMON_POLICY
# 临时证书有效期
DURATION_SECOND = 1800
# 用户的secret id
SECRET_ID = ''
# 用户的secret key
SECRET_KEY = ''
PROXY_TYPE = ''
PROXY_HOST = ''
PORT = 5000
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
pass
class TestingConfig(Config):
pass
class ProductionConfig(Config):
pass
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
``` |
{
"source": "jordan-rash/CICERO",
"score": 3
} |
#### File: home/dnanexus/cicero_annotations_to_html.py
```python
import os
import sys
import pandas as pd
def print_usage():
print("Usage: %s cicero_annotation_file.txt" % sys.argv[0])
def preprocess_dataframe(df):
# Order columns (instructions from Scott)
cols = ["chrA", "posA", "ortA",
"chrB", "posB", "ortB",
"type",
"geneA", "geneB",
"featureA", "featureB",
"matchA", "matchB",
"repeatA", "repeatB",
"coverageA", "coverageB",
"ratioA", "ratioB",
"readsA", "readsB",
"qposA", "qposB",
"total_readsA", "total_readsB",
"contig"]
return df[cols]
if __name__ == "__main__":
if len(sys.argv) != 2:
print_usage()
sys.exit(1)
tdt_file = sys.argv[1]
data = pd.read_table(tdt_file)
data = preprocess_dataframe(data)
html_data = data.to_html()
with open("results.html", "w") as f:
f.write("<html>\n")
f.write("<head>\n")
#f.write('<link rel="stylesheet" type="text/css" href="http://code.jquery.com/ui/1.9.2/themes/base/jquery-ui.css">')
f.write('<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/1.10.1/css/jquery.dataTables.css">')
f.write('<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/responsive/1.0.0/css/dataTables.responsive.css">')
f.write("</head>\n")
f.write("<body>\n")
f.write(html_data+"\n")
f.write('<script src="https://code.jquery.com/jquery-1.9.1.js"></script>')
f.write('<script src="http://code.jquery.com/ui/1.9.2/jquery-ui.js"></script>')
f.write('<script src="https://cdn.datatables.net/1.10.1/js/jquery.dataTables.min.js"></script>')
f.write('<script src="https://cdn.datatables.net/responsive/1.0.0/js/dataTables.responsive.js"></script>')
f.write('<script type="text/javascript">\n')
f.write('$(document).ready(function() {\n')
f.write(' $(".dataframe").attr("cellspacing", "0").addClass("display").DataTable({"iDisplayLength": 25});\n')
f.write('});\n')
f.write("</script>\n")
f.write("</body>\n")
f.write("</html>\n")
``` |
{
"source": "jordanrburger/keboola.app-hightouch",
"score": 2
} |
#### File: keboola.app-hightouch/src/component.py
```python
import logging
from keboola.component.base import ComponentBase
from keboola.component.exceptions import UserException
from hightouch import hightouchClient
# configuration variables
KEY_API_TOKEN = '#api_token'
KEY_ENDPOINT = 'endpoint'
KEY_SYNC_ID = 'sync_id'
# list of mandatory parameters => if some is missing,
# component will fail with readable message on initialization.
REQUIRED_PARAMETERS = [KEY_API_TOKEN, KEY_ENDPOINT, KEY_SYNC_ID]
REQUIRED_IMAGE_PARS = []
class Component(ComponentBase):
"""
Extends base class for general Python components. Initializes the CommonInterface
and performs configuration validation.
For easier debugging the data folder is picked up by default from `../data` path,
relative to working directory.
If `debug` parameter is present in the `config.json`, the default logger is set to verbose DEBUG mode.
"""
def __init__(self):
super().__init__()
def run(self):
"""
Main execution code
"""
# ####### EXAMPLE TO REMOVE
# check for missing configuration parameters
self.validate_configuration_parameters(REQUIRED_PARAMETERS)
self.validate_image_parameters(REQUIRED_IMAGE_PARS)
params = self.configuration.parameters
endpoint = params.get(KEY_ENDPOINT)
client = hightouchClient(params.get(KEY_API_TOKEN))
if endpoint == "Run Sync":
sync_id = params.get(KEY_SYNC_ID)
response = client.run_sync(sync_id)
logging.logger.info(response)
if __name__ == "__main__":
try:
comp = Component()
# this triggers the run method by default and is controlled by the configuration.action parameter
comp.execute_action()
except UserException as exc:
logging.exception(exc)
exit(1)
except Exception as exc:
logging.exception(exc)
exit(2)
``` |
{
"source": "jordanreedy16/minemeld-core",
"score": 2
} |
#### File: minemeld/flask/metricsapi.py
```python
from builtins import str
import os
import os.path
import hashlib
import rrdtool
from flask import request, jsonify
import minemeld.collectd
from . import config
from .aaa import MMBlueprint
from .logger import LOG
__all__ = ['BLUEPRINT']
RRD_PATH = config.get('RRD_PATH', '/var/lib/collectd/rrd/minemeld/')
RRD_SOCKET_PATH = config.get('RRD_SOCKET_PATH', '/var/run/collectd.sock')
ALLOWED_CF = ['MAX', 'MIN', 'AVERAGE']
BLUEPRINT = MMBlueprint('metrics', __name__, url_prefix='/metrics')
def _list_metrics(prefix=None):
result = os.listdir(RRD_PATH)
if prefix is not None:
result = [m for m in result if m.startswith(prefix)]
return result
def _fetch_metric(cc, metric, type_=None,
cf='MAX', dt=86400, r=1800):
dirname = os.path.join(RRD_PATH, metric)
if type_ is None:
rrdname = os.listdir(dirname)[0]
type_ = rrdname.replace('.rrd', '')
else:
rrdname = type_+'.rrd'
if rrdname not in os.listdir(dirname):
raise RuntimeError('Unknown metric type')
cc.flush(identifier='minemeld/%s/%s' % (metric, type_))
(start, end, step), metrics, data = rrdtool.fetch(
str(os.path.join(dirname, rrdname)),
cf,
'--start', '-%d' % dt,
'--resolution', '%d' % r
)
result = []
if type_ != 'minemeld_delta':
curts = start
for v in data:
result.append([curts, v[0]])
curts += step
else:
curts = start+step
ov = data[0][0]
for v in data[1:]:
cv = v[0]
if cv is not None and ov is not None:
if cv >= ov:
cv = cv - ov
result.append([curts, cv])
ov = v[0]
curts += step
return result
@BLUEPRINT.route('/', read_write=False)
def get_metrics():
return jsonify(result=_list_metrics())
@BLUEPRINT.route('/minemeld/<nodetype>', read_write=False)
def get_node_type_metrics(nodetype):
cf = str(request.args.get('cf', 'MAX')).upper()
if cf not in ALLOWED_CF:
return jsonify(error={'message': 'Unknown function'}), 400
try:
dt = int(request.args.get('dt', '86400'))
except ValueError:
return jsonify(error={'message': 'Invalid delta'}), 400
if dt < 0:
return jsonify(error={'message': 'Invalid delta'}), 400
try:
resolution = int(request.args.get('r', '1800'))
except ValueError:
return jsonify(error={'message': 'Invalid resolution'}), 400
if resolution < 0:
return jsonify(error={'message': 'Invalid resolution'}), 400
type_ = request.args.get('t', None)
metrics = _list_metrics(prefix='minemeld.'+nodetype+'.')
cc = minemeld.collectd.CollectdClient(RRD_SOCKET_PATH)
result = []
for m in metrics:
v = _fetch_metric(cc, m, cf=cf, dt=dt, r=resolution, type_=type_)
_, _, mname = m.split('.', 2)
result.append({
'metric': mname,
'values': v
})
return jsonify(result=result)
@BLUEPRINT.route('/minemeld', read_write=False)
def get_global_metrics():
cf = str(request.args.get('cf', 'MAX')).upper()
if cf not in ALLOWED_CF:
return jsonify(error={'message': 'Unknown function'}), 400
try:
dt = int(request.args.get('dt', '86400'))
except ValueError:
return jsonify(error={'message': 'Invalid delta'}), 400
if dt < 0:
return jsonify(error={'message': 'Invalid delta'}), 400
try:
resolution = int(request.args.get('r', '1800'))
except ValueError:
return jsonify(error={'message': 'Invalid resolution'}), 400
if resolution < 0:
return jsonify(error={'message': 'Invalid resolution'}), 400
type_ = request.args.get('t', None)
metrics = _list_metrics(prefix='minemeld.')
metrics = [m for m in metrics if 'minemeld.sources' not in m]
metrics = [m for m in metrics if 'minemeld.outputs' not in m]
metrics = [m for m in metrics if 'minemeld.transits' not in m]
cc = minemeld.collectd.CollectdClient(RRD_SOCKET_PATH)
result = []
for m in metrics:
v = _fetch_metric(cc, m, cf=cf, dt=dt, r=resolution, type_=type_)
_, mname = m.split('.', 1)
result.append({
'metric': mname,
'values': v
})
return jsonify(result=result)
@BLUEPRINT.route('/<node>', read_write=False)
def get_node_metrics(node):
cf = str(request.args.get('cf', 'MAX')).upper()
if cf not in ALLOWED_CF:
return jsonify(error={'message': 'Unknown function'}), 400
try:
dt = int(request.args.get('dt', '86400'))
except ValueError:
return jsonify(error={'message': 'Invalid delta'}), 400
if dt < 0:
return jsonify(error={'message': 'Invalid delta'}), 400
try:
resolution = int(request.args.get('r', '1800'))
except ValueError:
return jsonify(error={'message': 'Invalid resolution'}), 400
if resolution < 0:
return jsonify(error={'message': 'Invalid resolution'}), 400
type_ = request.args.get('t', None)
node = hashlib.md5(node).hexdigest()[:10]
metrics = _list_metrics(prefix=node+'.')
cc = minemeld.collectd.CollectdClient(RRD_SOCKET_PATH)
result = []
for m in metrics:
v = _fetch_metric(cc, m, cf=cf, dt=dt, r=resolution, type_=type_)
_, mname = m.split('.', 1)
result.append({
'metric': mname,
'values': v
})
return jsonify(result=result)
@BLUEPRINT.route('/<node>/<metric>', methods=['GET'], read_write=False)
def get_metric(node, metric):
cf = str(request.args.get('cf', 'MAX')).upper()
if cf not in ALLOWED_CF:
return jsonify(error={'message': 'Unknown function'}), 400
try:
dt = int(request.args.get('dt', '86400'))
except ValueError:
return jsonify(error={'message': 'Invalid delta'}), 400
if dt < 0:
return jsonify(error={'message': 'Invalid delta'}), 400
try:
resolution = int(request.args.get('r', '1800'))
except ValueError:
return jsonify(error={'message': 'Invalid resolution'}), 400
if resolution < 0:
return jsonify(error={'message': 'Invalid resolution'}), 400
type_ = request.args.get('t', 'minemeld_counter')
node = hashlib.md5(node).hexdigest()[:10]
metric = node+'.'+metric
if metric not in _list_metrics():
return jsonify(error={'message': 'Unknown metric'}), 404
cc = minemeld.collectd.CollectdClient(RRD_SOCKET_PATH)
try:
result = _fetch_metric(cc, metric, type_=type_, cf=cf,
dt=dt, r=resolution)
except RuntimeError as e:
return jsonify(error={'message': str(e)}), 400
return jsonify(result=result)
```
#### File: minemeld/flask/session.py
```python
from builtins import str
import os
from datetime import timedelta
from uuid import uuid4
import ujson
import redis
import werkzeug.datastructures
import flask.sessions
from .logger import LOG
SESSION_EXPIRATION_ENV = 'SESSION_EXPIRATION'
DEFAULT_SESSION_EXPIRATION = 10
class RedisSession(werkzeug.datastructures.CallbackDict, flask.sessions.SessionMixin):
def __init__(self, initial=None, sid=None, new=False):
def on_update(self):
self.modified = True
werkzeug.datastructures.CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.new = new
self.modified = False
class RedisSessionInterface(flask.sessions.SessionInterface):
serializer = ujson
session_class = RedisSession
def __init__(self, redis_=None, prefix='mm-session:'):
if redis_ is None:
redis_ = redis.StrictRedis()
self.redis = redis_
self.prefix = prefix
self.expirtaion_delta = timedelta(
minutes=int(os.environ.get(
SESSION_EXPIRATION_ENV,
DEFAULT_SESSION_EXPIRATION
))
)
def generate_sid(self):
return str(uuid4())
def get_redis_expiration_time(self, app, session):
return timedelta(minutes=10)
def open_session(self, app, request):
LOG.debug(
'redis session connection pool: in use: {} available: {}'.format(
len(self.redis.connection_pool._in_use_connections),
len(self.redis.connection_pool._available_connections)
)
)
sid = request.cookies.get(app.session_cookie_name)
if not sid:
sid = self.generate_sid()
return self.session_class(sid=sid, new=True)
val = self.redis.get(self.prefix + sid)
if val is not None:
data = self.serializer.loads(val)
return self.session_class(data, sid=sid)
return self.session_class(sid=sid, new=True)
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
if 'user_id' not in session:
self.redis.delete(self.prefix + session.sid)
if session.modified:
response.delete_cookie(
app.session_cookie_name,
domain=domain
)
return
redis_exp = self.get_redis_expiration_time(app, session)
cookie_exp = self.get_expiration_time(app, session)
val = self.serializer.dumps(dict(session))
self.redis.setex(
self.prefix + session.sid,
int(redis_exp.total_seconds()),
val
)
response.set_cookie(
app.session_cookie_name,
session.sid,
expires=cookie_exp,
httponly=True,
domain=domain
)
def init_app(app, redis_url):
redis_cp = redis.ConnectionPool.from_url(
redis_url,
max_connections=int(os.environ.get('REDIS_SESSIONS_MAX_CONNECTIONS', 20))
)
app.session_interface = RedisSessionInterface(
redis_=redis.StrictRedis(connection_pool=redis_cp)
)
app.config.update(
SESSION_COOKIE_NAME='mm-session',
SESSION_COOKIE_SECURE=True
)
```
#### File: minemeld/flask/taxiiutils.py
```python
import functools
from flask import request
from flask import make_response
from .mmrpc import MMMaster
from .logger import LOG
def taxii_make_response(m11):
h = {
'Content-Type': "application/xml",
'X-TAXII-Content-Type': 'urn:taxii.mitre.org:message:xml:1.1',
'X-TAXII-Protocol': 'urn:taxii.mitre.org:protocol:http:1.0'
}
r = make_response((m11.to_xml(pretty_print=True), 200, h))
return r
def taxii_make_response_10(m10):
h = {
'Content-Type': "application/xml",
'X-TAXII-Content-Type': 'urn:taxii.mitre.org:message:xml:1.0',
'X-TAXII-Protocol': 'urn:taxii.mitre.org:protocol:http:1.0'
}
r = make_response((m10.to_xml(pretty_print=True), 200, h))
return r
def taxii_check(f):
@functools.wraps(f)
def check(*args, **kwargs):
tct = request.headers.get('X-TAXII-Content-Type', None)
if tct not in [
'urn:taxii.mitre.org:message:xml:1.1',
'urn:taxii.mitre.org:message:xml:1.0'
]:
return 'Invalid TAXII Headers', 400
tct = request.headers.get('X-TAXII-Protocol', None)
if tct not in [
'urn:taxii.mitre.org:protocol:http:1.0',
'urn:taxii.mitre.org:protocol:https:1.0'
]:
return 'Invalid TAXII Headers', 400
tct = request.headers.get('X-TAXII-Services', None)
if tct not in [
'urn:taxii.mitre.org:services:1.1',
'urn:taxii.mitre.org:services:1.0'
]:
return 'Invalid TAXII Headers', 400
return f(*args, **kwargs)
return check
def get_taxii_feeds():
# check if feed exists
status = MMMaster.status()
status = status.get('result', None)
if status is None:
raise RuntimeError('Error retrieving engine status')
result = []
for node, node_status in list(status.items()):
class_ = node_status.get('class', None)
if class_ != 'minemeld.ft.taxii.DataFeed':
continue
_, _, feedname = node.split(':', 2)
result.append(feedname)
return result
```
#### File: minemeld/ft/csv.py
```python
from __future__ import absolute_import
from builtins import map
from builtins import str
import logging
import re
import os.path
import itertools
import csv
import requests
import yaml
import shutil
from urllib3.response import GzipDecoder
from . import basepoller
LOG = logging.getLogger(__name__)
class CSVFT(basepoller.BasePollerFT):
"""Implements class for miners of csv feeds over http/https.
**Config parameters**
:url: URL of the feed.
:polling_timeout: timeout of the polling request in seconds.
Default: 20
:verify_cert: boolean, if *true* feed HTTPS server certificate is
verified. Default: *true*
:ignore_regex: Python regular expression for lines that should be
ignored. Default: *null*
:fieldnames: list of field names in the file. If *null* the values
in the first row of the file are used as names. Default: *null*
:delimiter: see `csv Python module <https://docs.python.org/2/library/csv.html#dialects-and-formatting-parameters>`_.
Default: ,
:doublequote: see `csv Python module <https://docs.python.org/2/library/csv.html#dialects-and-formatting-parameters>`_.
Default: true
:escapechar: see `csv Python module <https://docs.python.org/2/library/csv.html#dialects-and-formatting-parameters>`_.
Default: null
:quotechar: see `csv Python module <https://docs.python.org/2/library/csv.html#dialects-and-formatting-parameters>`_.
Default: "
:skipinitialspace: see `csv Python module <https://docs.python.org/2/library/csv.html#dialects-and-formatting-parameters>`_.
Default: false
Example:
Example config in YAML::
url: https://sslbl.abuse.ch/blacklist/sslipblacklist.csv
ignore_regex: '^#'
fieldnames:
- indicator
- port
- sslblabusech_type
Args:
name (str): node name, should be unique inside the graph
chassis (object): parent chassis instance
config (dict): node config.
"""
def configure(self):
super(CSVFT, self).configure()
self.polling_timeout = self.config.get('polling_timeout', 20)
self.url = self.config.get('url', None)
self.verify_cert = self.config.get('verify_cert', True)
self.username = self.config.get('username', None)
self.password = self.config.get('password', None)
self.ignore_regex = self.config.get('ignore_regex', None)
if self.ignore_regex is not None:
self.ignore_regex = re.compile(self.ignore_regex)
self.fieldnames = self.config.get('fieldnames', None)
self.dialect = {
'delimiter': self.config.get('delimiter', ','),
'doublequote': self.config.get('doublequote', True),
'escapechar': self.config.get('escapechar', None),
'quotechar': self.config.get('quotechar', '"'),
'skipinitialspace': self.config.get('skipinitialspace', False)
}
self.decode_gzip = self.config.get('decode_gzip', False)
self.side_config_path = self.config.get('side_config', None)
if self.side_config_path is None:
self.side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'%s_side_config.yml' % self.name
)
self._load_side_config()
def _load_side_config(self):
try:
with open(self.side_config_path, 'r') as f:
sconfig = yaml.safe_load(f)
except Exception as e:
LOG.error('%s - Error loading side config: %s', self.name, str(e))
return
username = sconfig.get('username', None)
if username is not None:
self.username = username
LOG.info('%s - username set', self.name)
password = sconfig.get('password', None)
if password is not None:
self.password = password
LOG.info('%s - password set', self.name)
def _process_item(self, item):
item.pop(None, None) # I love this
indicator = item.pop('indicator', None)
return [[indicator, item]]
def _build_request(self, now):
auth = None
if self.username is not None and self.password is not None:
auth = (self.username, self.password)
r = requests.Request(
'GET',
self.url,
auth=auth
)
return r.prepare()
def _build_iterator(self, now):
def _debug(x):
LOG.info('{!r}'.format(x))
return x
_session = requests.Session()
prepreq = self._build_request(now)
# this is to honour the proxy environment variables
rkwargs = _session.merge_environment_settings(
prepreq.url,
{}, None, None, None # defaults
)
rkwargs['stream'] = True
rkwargs['verify'] = self.verify_cert
rkwargs['timeout'] = self.polling_timeout
r = _session.send(prepreq, **rkwargs)
try:
r.raise_for_status()
except:
LOG.debug('%s - exception in request: %s %s',
self.name, r.status_code, r.content)
raise
response = r.raw
if self.decode_gzip:
response = self._gzipped_line_splitter(r)
if self.ignore_regex is not None:
response = [x for x in response if self.ignore_regex.match(x) is None]
csvreader = csv.DictReader(
response,
fieldnames=self.fieldnames,
**self.dialect
)
return csvreader
def _gzipped_line_splitter(self, response):
# same logic used in urllib32.response.iter_lines
pending = None
decoder = GzipDecoder()
chunks = list(map(
decoder.decompress,
response.iter_content(chunk_size=1024*1024)
))
for chunk in chunks:
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
def hup(self, source=None):
LOG.info('%s - hup received, reload side config', self.name)
self._load_side_config()
super(CSVFT, self).hup(source=source)
@staticmethod
def gc(name, config=None):
basepoller.BasePollerFT.gc(name, config=config)
shutil.rmtree('{}_temp'.format(name), ignore_errors=True)
side_config_path = None
if config is not None:
side_config_path = config.get('side_config', None)
if side_config_path is None:
side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'{}_side_config.yml'.format(name)
)
try:
os.remove(side_config_path)
except:
pass
```
#### File: minemeld/ft/threatq.py
```python
from builtins import str
import requests
import logging
import os
import yaml
import netaddr
from . import basepoller
LOG = logging.getLogger(__name__)
class Export(basepoller.BasePollerFT):
"""Implements class for Miners of ThreatQ Export API.
**Config parameters**
:side_config (str): path to the side config file, defaults
to CONFIGDIR/<node name>_side_config.yml
:polling_timeout: timeout of the polling request in seconds.
Default: 20
**Side Config parameters**
:url: URL of the feed.
:polling_timeout: timeout of the polling request in seconds.
Default: 20
:verify_cert: boolean, if *true* feed HTTPS server certificate is
verified. Default: *true*
Example:
Example side config in YAML::
url: https://10.5.172.225/api/export/6e472a434efe34ceb5a99ff6c9a8124e/?token=<PASSWORD>
verify_cert: false
Args:
name (str): node name, should be unique inside the graph
chassis (object): parent chassis instance
config (dict): node config.
"""
def configure(self):
super(Export, self).configure()
self.polling_timeout = self.config.get('polling_timeout', 20)
self.side_config_path = self.config.get('side_config', None)
if self.side_config_path is None:
self.side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'%s_side_config.yml' % self.name
)
self._load_side_config()
def _load_side_config(self):
try:
with open(self.side_config_path, 'r') as f:
sconfig = yaml.safe_load(f)
except Exception as e:
LOG.error('%s - Error loading side config: %s', self.name, str(e))
return
self.url = sconfig.get('url', None)
if self.url is not None:
LOG.info('%s - url set', self.name)
self.verify_cert = sconfig.get('verify_cert', True)
def _process_item(self, line):
line = line.strip()
if not line:
return [[None, None]]
itype, indicator = line.split(',', 1)
attributes = {}
if itype == 'IP Address':
ipaddr = netaddr.IPAddress(indicator)
if ipaddr.version == 4:
attributes['type'] = 'IPv4'
elif ipaddr.version == 6:
attributes['type'] = 'IPv6'
else:
LOG.error(
'%s - %s: unknown IP version %s',
line,
self.name,
ipaddr.version
)
return [[None, None]]
elif itype == 'CIDR Block':
ipaddr = netaddr.IPNetwork(indicator)
if ipaddr.version == 4:
attributes['type'] = 'IPv4'
elif ipaddr.version == 6:
attributes['type'] = 'IPv6'
else:
LOG.error(
'%s - %s: unknown IP version %s',
line,
self.name,
ipaddr.version
)
return [[None, None]]
elif itype == 'FQDN':
attributes['type'] = 'domain'
elif itype == 'URL':
attributes['type'] = 'URL'
else:
LOG.error(
'%s - unknown indicator type %s - ignored',
self.name,
itype
)
return [[None, None]]
return [[indicator, attributes]]
def _build_iterator(self, now):
if self.url is None:
raise RuntimeError(
'%s - url not set, poll not performed' % self.name
)
rkwargs = dict(
stream=True,
verify=self.verify_cert,
timeout=self.polling_timeout
)
r = requests.get(
self.url,
**rkwargs
)
try:
r.raise_for_status()
except:
LOG.debug('%s - exception in request: %s %s',
self.name, r.status_code, r.content)
raise
result = r.iter_lines()
return result
def hup(self, source=None):
LOG.info('%s - hup received, reload side config', self.name)
self._load_side_config()
super(Export, self).hup(source=source)
@staticmethod
def gc(name, config=None):
basepoller.BasePollerFT.gc(name, config=config)
side_config_path = None
if config is not None:
side_config_path = config.get('side_config', None)
if side_config_path is None:
side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'{}_side_config.yml'.format(name)
)
try:
os.remove(side_config_path)
except:
pass
```
#### File: minemeld/ft/tmt.py
```python
from __future__ import absolute_import
from __future__ import division
from builtins import str
from past.utils import old_div
import logging
import requests
import os
import yaml
import itertools
import csv
import gevent
import shutil
from . import basepoller
from . import table
from .utils import interval_in_sec
LOG = logging.getLogger(__name__)
class DTIAPI(basepoller.BasePollerFT):
_AGE_OUT_BASES = ['first_seen', 'last_seen', 'tmt_last_sample_timestamp']
_DEFAULT_AGE_OUT_BASE = 'tmt_last_sample_timestamp'
def __init__(self, name, chassis, config):
self.ttable = None
super(DTIAPI, self).__init__(name, chassis, config)
def configure(self):
super(DTIAPI, self).configure()
self.polling_timeout = self.config.get('polling_timeout', 120)
self.verify_cert = self.config.get('verify_cert', True)
self.dialect = {
'delimiter': self.config.get('delimiter', ','),
'doublequote': self.config.get('doublequote', True),
'escapechar': self.config.get('escapechar', None),
'quotechar': self.config.get('quotechar', '"'),
'skipinitialspace': self.config.get('skipinitialspace', False)
}
self.include_suspicious = self.config.get('include_suspicious', True)
initial_interval = self.config.get('initial_interval', '2d')
self.initial_interval = interval_in_sec(initial_interval)
if self.initial_interval is None:
LOG.error(
'%s - wrong initial_interval format: %s',
self.name, initial_interval
)
self.initial_interval = interval_in_sec('2d')
self.source_name = 'themediatrust.dti'
self.api_key = None
self.side_config_path = self.config.get('side_config', None)
if self.side_config_path is None:
self.side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'%s_side_config.yml' % self.name
)
self._load_side_config()
def _load_side_config(self):
try:
with open(self.side_config_path, 'r') as f:
sconfig = yaml.safe_load(f)
except Exception as e:
LOG.error('%s - Error loading side config: %s', self.name, str(e))
return
self.api_key = sconfig.get('api_key', None)
if self.api_key is not None:
LOG.info('%s - authorization code set', self.name)
def _process_row(self, row):
ip = row.pop('ip_addres', None)
if ip == '0.0.0.0':
ip = None
domain = row.pop('host_name', None)
value = {}
for k, v in list(row.items()):
if k == 'last_sample_timestamp':
value['tmt_last_sample_timestamp'] = int(v)*1000
continue
key = k
if not k.startswith('tmt'):
key = 'tmt_%s' % k
value[key] = [v]
return ip, domain, value
def _process_item(self, item):
type_, indicator = item[0].split(':', 1)
value = {}
for k, v in list(item[1].items()):
value[k] = v
value['type'] = type_
return [[indicator, value]]
def _tmerge(self, indicator, value):
ov = self.ttable.get(indicator)
if ov is None:
self.ttable.put(indicator, value)
return
for k, v in list(value.items()):
if k == 'tmt_last_sample_timestamp':
if v > ov[k]: # confusing, this is just for PEP8 sake
ov[k] = v
continue
if v[0] not in ov[k]:
ov[k].append(v)
self.ttable.put(indicator, ov)
def _build_iterator(self, now):
if self.api_key is None:
raise RuntimeError('%s - api_key not set' % self.name)
if self.ttable is not None:
self.ttable.close()
self.ttable = None
self.ttable = table.Table(self.name+'_temp', truncate=True)
last_fetch = self.last_run
if last_fetch is None:
last_fetch = int(old_div(now,1000)) - self.initial_interval
params = dict(
key=self.api_key,
action='fjord_base',
include_suspicious=(1 if self.include_suspicious else 0),
last_fetch=last_fetch
)
rkwargs = dict(
stream=True,
verify=self.verify_cert,
timeout=self.polling_timeout,
params=params
)
r = requests.get(
'https://www.themediatrust.com/api',
**rkwargs
)
try:
r.raise_for_status()
except:
LOG.debug('%s - exception in request: %s %s',
self.name, r.status_code, r.content)
raise
response = [x for x in r.raw if not x.startswith('got commandoptions')]
csvreader = csv.DictReader(
response,
**self.dialect
)
for row in csvreader:
gevent.sleep(0)
ip, domain, value = self._process_row(row)
if ip is None and domain is None:
continue
if ip is not None:
self._tmerge('IPv4:%s' % ip, value)
if domain is not None:
self._tmerge('domain:%s' % domain, value)
return self.ttable.query(include_value=True)
def hup(self, source=None):
LOG.info('%s - hup received, reload side config', self.name)
self._load_side_config()
super(DTIAPI, self).hup(source=source)
@staticmethod
def gc(name, config=None):
basepoller.BasePollerFT.gc(name, config=config)
shutil.rmtree('{}_temp'.format(name), ignore_errors=True)
side_config_path = None
if config is not None:
side_config_path = config.get('side_config', None)
if side_config_path is None:
side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'{}_side_config.yml'.format(name)
)
try:
os.remove(side_config_path)
except:
pass
```
#### File: packages/gevent_openssl/SSL.py
```python
from builtins import object
import logging
import OpenSSL.SSL
from gevent.socket import wait_read, wait_write
_real_connection = OpenSSL.SSL.Connection
LOG = logging.getLogger(__name__)
class Connection(object):
"""OpenSSL Connection wrapper
"""
_reverse_mapping = _real_connection._reverse_mapping
def __init__(self, context, sock):
self._context = context
self._sock = sock
self._connection = _real_connection(context, sock)
def __getattr__(self, attr):
return getattr(self._connection, attr)
def __iowait(self, io_func, *args, **kwargs):
fd = self._sock.fileno()
timeout = self._sock.gettimeout()
while True:
try:
return io_func(*args, **kwargs)
except (OpenSSL.SSL.WantReadError, OpenSSL.SSL.WantX509LookupError):
wait_read(fd, timeout=timeout)
except OpenSSL.SSL.WantWriteError:
wait_write(fd, timeout=timeout)
def accept(self):
sock, addr = self._sock.accept()
return Connection(self._context, sock), addr
def do_handshake(self):
# even if some sites are super sensible
# to handshake timeouts (to avoid DDoS),
# we have to make handshake not blocking to avoid issues
# with firewalls or other middle boxes dropping the connection
return self.__iowait(self._connection.do_handshake)
def connect(self, *args, **kwargs):
return self.__iowait(self._connection.connect, *args, **kwargs)
def send(self, data, flags=0):
return self.__send(self._connection.send, data, flags)
def sendall(self, data, flags=0):
# see https://github.com/mjs/gevent_openssl/issues/12
# Note: all of the types supported by OpenSSL's Connection.sendall,
# basestring, memoryview, and buffer, support len(...) and slicing,
# so they are safe to use here.
while len(data) > 0:
res = self.send(data, flags)
data = data[res:]
def __send(self, send_method, data, flags=0):
try:
return self.__iowait(send_method, data, flags)
except OpenSSL.SSL.SysCallError as e:
if e[0] == -1 and not data:
# errors when writing empty strings are expected and can be
# ignored
return 0
raise
def recv(self, bufsiz, flags=0):
pending = self._connection.pending()
if pending:
return self._connection.recv(min(pending, bufsiz))
try:
return self.__iowait(self._connection.recv, bufsiz, flags)
except OpenSSL.SSL.ZeroReturnError:
return ''
except OpenSSL.SSL.SysCallError as e:
if e[0] == -1 and 'Unexpected EOF' in e[1]:
# errors when reading empty strings are expected and can be
# ignored
return ''
raise
def shutdown(self):
return self.__iowait(self._connection.shutdown)
```
#### File: minemeld-core/tests/test_traced_storage.py
```python
from __future__ import print_function
import unittest
import tempfile
import shutil
import random
import time
import mock
import logging
from nose.plugins.attrib import attr
import minemeld.traced.storage
import traced_mock
TABLENAME = tempfile.mktemp(prefix='minemeld.traced.storagetest')
LOG = logging.getLogger(__name__)
class MineMeldTracedStorage(unittest.TestCase):
def setUp(self):
traced_mock.table_cleanup()
try:
shutil.rmtree(TABLENAME)
except:
pass
def tearDown(self):
traced_mock.table_cleanup()
try:
shutil.rmtree(TABLENAME)
except:
pass
def test_table_constructor(self):
self.assertRaises(
minemeld.traced.storage.TableNotFound,
minemeld.traced.storage.Table, TABLENAME, create_if_missing=False
)
table = minemeld.traced.storage.Table(TABLENAME, create_if_missing=True)
self.assertEqual(table.max_counter, -1)
table.close()
table = None
table = minemeld.traced.storage.Table(TABLENAME, create_if_missing=False)
self.assertEqual(table.max_counter, -1)
table.close()
table = None
def test_table_write(self):
table = minemeld.traced.storage.Table(TABLENAME, create_if_missing=True)
table.put('%016x' % 0, 'value0')
self.assertEqual(table.max_counter, 0)
table.close()
table = None
table = minemeld.traced.storage.Table(TABLENAME, create_if_missing=False)
iterator = table.backwards_iterator(1, 0xFFFFFFFFFFFFFFFF)
ts, line = next(iterator)
self.assertEqual(line, 'value0')
self.assertEqual(int(ts[:16], 16), 0)
self.assertEqual(int(ts[16:], 16), 0)
self.assertRaises(StopIteration, next, iterator)
table.close()
table = None
def test_table_references(self):
table = minemeld.traced.storage.Table(TABLENAME, create_if_missing=True)
self.assertEqual(table.ref_count(), 0)
table.add_reference('ref1')
self.assertEqual(table.ref_count(), 1)
table.add_reference('ref2')
self.assertEqual(table.ref_count(), 2)
table.remove_reference('ref1')
self.assertEqual(table.ref_count(), 1)
table.remove_reference('ref1')
self.assertEqual(table.ref_count(), 1)
table.remove_reference('ref2')
self.assertEqual(table.ref_count(), 0)
def test_table_oldest(self):
old_ = '%016x' % (3*86400)
new_ = '%016x' % (4*86400)
oldest = minemeld.traced.storage.Table.oldest_table()
self.assertEqual(oldest, None)
table = minemeld.traced.storage.Table(old_, create_if_missing=True)
table.close()
table = minemeld.traced.storage.Table(new_, create_if_missing=True)
table.close()
oldest = minemeld.traced.storage.Table.oldest_table()
self.assertEqual(oldest, old_)
shutil.rmtree(old_)
shutil.rmtree(new_)
def test_store_simple(self):
store = minemeld.traced.storage.Store()
store.stop()
self.assertEqual(len(store.current_tables), 0)
@mock.patch.object(minemeld.traced.storage, 'Table', side_effect=traced_mock.table_factory)
def test_store_write(self, table_mock):
store = minemeld.traced.storage.Store()
store.write(0*86400*1000, 'log0')
self.assertEqual(traced_mock.MOCK_TABLES[0].name, '%016x' % 0)
store.write(1*86400*1000, 'log1')
self.assertEqual(traced_mock.MOCK_TABLES[1].name, '%016x' % (86400*1))
store.write(2*86400*1000, 'log2')
self.assertEqual(traced_mock.MOCK_TABLES[2].name, '%016x' % (86400*2))
store.write(3*86400*1000, 'log3')
self.assertEqual(traced_mock.MOCK_TABLES[3].name, '%016x' % (86400*3))
store.write(4*86400*1000, 'log4')
self.assertEqual(traced_mock.MOCK_TABLES[4].name, '%016x' % (86400*4))
store.write(5*86400*1000, 'log5')
self.assertEqual(traced_mock.MOCK_TABLES[5].name, '%016x' % (86400*5))
self.assertNotIn('%016x' % 0, store.current_tables)
store.write(6*86400*1000, 'log6')
self.assertEqual(traced_mock.MOCK_TABLES[6].name, '%016x' % (86400*6))
self.assertNotIn('%016x' % 86400, store.current_tables)
store.stop()
self.assertEqual(len(store.current_tables), 0)
@mock.patch.object(minemeld.traced.storage, 'Table', side_effect=traced_mock.table_factory)
def test_store_iterate_backwards(self, table_mock):
_oldest_table_mock = mock.MagicMock(side_effect=traced_mock.MockTable.oldest_table)
table_mock.attach_mock(_oldest_table_mock, 'oldest_table')
store = minemeld.traced.storage.Store()
store.write(1*86400*1000, 'log0')
store.write(2*86400*1000, 'log1')
store.write(3*86400*1000, 'log2')
store.write(4*86400*1000, 'log3')
store.write(5*86400*1000, 'log4')
self.assertEqual(minemeld.traced.storage.Table.oldest_table(), '%016x' % 86400)
iterator = store.iterate_backwards(
ref='test-iter1',
timestamp=6*86400*1000,
counter=0xFFFFFFFFFFFFFFFF
)
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-07')
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-06')
self.assertEqual(next(iterator)['log'], 'log4')
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-05')
self.assertEqual(next(iterator)['log'], 'log3')
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-04')
self.assertEqual(next(iterator)['log'], 'log2')
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-03')
self.assertEqual(next(iterator)['log'], 'log1')
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-02')
self.assertEqual(next(iterator)['log'], 'log0')
self.assertEqual(next(iterator)['msg'], 'No more logs to check')
self.assertRaises(StopIteration, next, iterator)
store.stop()
store.stop() # just for coverage
@mock.patch.object(minemeld.traced.storage, 'Table', side_effect=traced_mock.table_factory)
def test_store_iterate_backwards_2(self, table_mock):
_oldest_table_mock = mock.MagicMock(side_effect=traced_mock.MockTable.oldest_table)
table_mock.attach_mock(_oldest_table_mock, 'oldest_table')
store = minemeld.traced.storage.Store()
store.write(0*86400*1000, 'log0')
store.write(2*86400*1000, 'log1')
self.assertEqual(minemeld.traced.storage.Table.oldest_table(), '%016x' % 0)
iterator = store.iterate_backwards(
ref='test-iter1',
timestamp=3*86400*1000,
counter=0xFFFFFFFFFFFFFFFF
)
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-04')
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-03')
self.assertEqual(next(iterator)['log'], 'log1')
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-02')
self.assertEqual(next(iterator)['msg'], 'Checking 1970-01-01')
self.assertEqual(next(iterator)['log'], 'log0')
self.assertEqual(next(iterator)['msg'], 'We haved reached the origins of time')
self.assertRaises(StopIteration, next, iterator)
store.stop()
@mock.patch.object(minemeld.traced.storage, 'Table', side_effect=traced_mock.table_factory)
def test_store_iterate_backwards_empty(self, table_mock):
_oldest_table_mock = mock.MagicMock(side_effect=traced_mock.MockTable.oldest_table)
table_mock.attach_mock(_oldest_table_mock, 'oldest_table')
store = minemeld.traced.storage.Store()
self.assertEqual(minemeld.traced.storage.Table.oldest_table(), None)
iterator = store.iterate_backwards(
ref='test-iter1',
timestamp=3*86400*1000,
counter=0xFFFFFFFFFFFFFFFF
)
self.assertEqual(next(iterator)['msg'], 'No more logs to check')
self.assertRaises(StopIteration, next, iterator)
table_mock.assert_not_called()
store.stop()
@attr('slow')
def test_stress_1(self):
num_lines = 200000
store = minemeld.traced.storage.Store()
t1 = time.time()
for j in xrange(num_lines):
value = '{ "log": %d }' % random.randint(0, 0xFFFFFFFF)
t2 = time.time()
dt = t2-t1
t1 = time.time()
for j in xrange(num_lines):
value = '{ "log": %d }' % random.randint(0, 0xFFFFFFFF)
store.write(j, value)
t2 = time.time()
print("TIME: Inserted %d lines in %d sec" % (num_lines, (t2-t1-dt)))
store.stop()
shutil.rmtree('1970-01-01')
```
#### File: minemeld-core/tests/traced_mock.py
```python
import gevent
import gevent.event
import logging
from minemeld.traced.storage import TableNotFound
LOG = logging.getLogger(__name__)
CLOCK = -1
def _get_clock():
global CLOCK
CLOCK += 1
return CLOCK
MOCK_TABLES = []
class MockTable(object):
def __init__(self, name, create_if_missing=True):
self.name = name
self.create_if_missing = create_if_missing
self.last_used = None
self.refs = []
self.db_open = True
self.db = {}
self.max_counter = -1
def add_reference(self, refid):
self.refs.append(refid)
def remove_reference(self, refid):
try:
self.refs.remove(refid)
except ValueError:
pass
def ref_count(self):
return len(self.refs)
def put(self, key, value):
self.last_used = _get_clock()
self.max_counter += 1
new_max_counter = '%016x' % self.max_counter
self.db[key+new_max_counter] = value
def backwards_iterator(self, timestamp, counter):
starting_key = <KEY>' % (timestamp, counter)
items = [[k, v] for k, v in self.db.items() if k <= starting_key]
items = sorted(items, cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
return items
def close(self):
self.db_open = False
@staticmethod
def oldest_table():
tables = [t.name for t in MOCK_TABLES]
LOG.debug(tables)
if len(tables) == 0:
return None
return sorted(tables)[0]
def table_factory(name, create_if_missing=True):
table = next((t for t in MOCK_TABLES if t.name == name), None)
if table is not None:
return table
if not create_if_missing:
raise TableNotFound()
mt = MockTable(name, create_if_missing=create_if_missing)
MOCK_TABLES.append(mt)
return mt
def table_cleanup():
global MOCK_TABLES
MOCK_TABLES = []
class MockStore(object):
def __init__(self, config=None):
if config is None:
config = {}
self.config = config
self.writes = []
self.db = {}
self.counter = 0
self.release_alls = []
def write(self, timestamp, log):
self.writes.append({
'timestamp': timestamp,
'log': log
})
self.db['%016x%016x' % (timestamp, self.counter)] = log
self.counter += 1
def iterate_backwards(self, ref, timestamp, counter):
starting_key = '%016x%016x' % (timestamp, counter)
items = [[k, v] for k, v in self.db.items() if k <= starting_key]
items = sorted(items, cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
for c, i in enumerate(items):
if c % 1 == 0:
yield {'msg': 'test message'}
yield {'timestamp': i[0], 'log': i[1]}
def release_all(self, ref):
self.release_alls.append(ref)
def store_factory(config=None):
return MockStore(config=config)
MOCK_QUERIES = []
class MockQuery(gevent.Greenlet):
def __init__(self, store, query, timestamp, counter,
num_lines, uuid, redis_config):
self.store = store
self.query = query
self.timestamp = timestamp
self.counter = counter
self.num_lines = num_lines
self.uuid = uuid
self.redis_config = redis_config
self.finish_event = gevent.event.Event()
super(MockQuery, self).__init__()
def kill(self):
LOG.debug("%s killed", self.uuid)
super(MockQuery, self).kill()
def _run(self):
LOG.debug("%s started", self.uuid)
self.finish_event.wait()
LOG.debug("%s finished", self.uuid)
class MockEQuery(gevent.Greenlet):
def __init__(self, store, query, timestamp, counter,
num_lines, uuid, redis_config):
self.store = store
self.query = query
self.timestamp = timestamp
self.counter = counter
self.num_lines = num_lines
self.uuid = uuid
self.redis_config = redis_config
self.finish_event = gevent.event.Event()
super(MockEQuery, self).__init__()
def kill(self):
LOG.debug("%s killed", self.uuid)
super(MockEQuery, self).kill()
def _run(self):
LOG.debug("%s started", self.uuid)
self.finish_event.wait()
raise RuntimeError("BAD BAD QUERY!")
def query_factory(store, query, timestamp, counter,
num_lines, uuid, redis_config):
if query == "bad":
mqf = MockEQuery
else:
mqf = MockQuery
mq = mqf(store, query, timestamp, counter,
num_lines, uuid, redis_config)
MOCK_QUERIES.append(mq)
return mq
def query_cleanup():
global MOCK_QUERIES
MOCK_QUERIES = []
``` |
{
"source": "JordanReiter/django-cas-provider",
"score": 2
} |
#### File: django-cas-provider/cas_provider/forms.py
```python
from django import forms
from django.conf import settings
from django.contrib.auth import authenticate
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
from models import LoginTicket
import datetime
class LoginForm(forms.Form):
email = forms.CharField(widget=forms.TextInput(attrs={'autofocus': 'autofocus',
'placeholder': 'Email',
'max_length': '255'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password'}))
service = forms.CharField(widget=forms.HiddenInput, required=False)
remember_me = forms.BooleanField(required=False, label="Keep me signed in",
widget=forms.CheckboxInput(attrs={'class': 'remember_me'}))
def __init__(self, *args, **kwargs):
# renew = kwargs.pop('renew', None)
# gateway = kwargs.pop('gateway', None)
request = kwargs.pop('request', None)
super(LoginForm, self).__init__(*args, **kwargs)
self.request = request
def clean_remember_me(self):
remember = self.cleaned_data['remember_me']
if not remember and self.request is not None:
self.request.session.set_expiry(0)
class MergeLoginForm(LoginForm):
email = forms.CharField(max_length=255, widget=forms.HiddenInput)
```
#### File: management/commands/cleanuptickets.py
```python
from django.core.management.base import NoArgsCommand
from django.conf import settings
import datetime
from cas_provider.models import ServiceTicket, LoginTicket
class Command(NoArgsCommand):
help = "Delete expired service tickets from the database"
def handle_noargs(self, **options):
print "Service tickets:"
tickets = ServiceTicket.objects.all()
for ticket in tickets:
expiration = datetime.timedelta(minutes=settings.CAS_TICKET_EXPIRATION)
if datetime.datetime.now() > ticket.created + expiration:
print "Deleting %s..." % ticket.ticket
ticket.delete()
else:
print "%s not expired..." % ticket.ticket
tickets = LoginTicket.objects.all()
print "Login tickets:"
for ticket in tickets:
expiration = datetime.timedelta(minutes=settings.CAS_TICKET_EXPIRATION)
if datetime.datetime.now() > ticket.created + expiration:
print "Deleting %s..." % ticket.ticket
ticket.delete()
else:
print "%s not expired..." % ticket.ticket
```
#### File: django-cas-provider/cas_provider/models.py
```python
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from random import Random
import string
import urllib
import urlparse
if hasattr(urlparse, 'parse_qs'):
parse_qs = urlparse.parse_qs
else:
# Python <2.6 compatibility
from cgi import parse_qs
__all__ = ['ServiceTicket', 'LoginTicket', 'ProxyGrantingTicket', 'ProxyTicket', 'ProxyGrantingTicketIOU']
class BaseTicket(models.Model):
ticket = models.CharField(_('ticket'), max_length=32)
created = models.DateTimeField(_('created'), auto_now=True)
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
if 'ticket' not in kwargs:
kwargs['ticket'] = self._generate_ticket()
super(BaseTicket, self).__init__(*args, **kwargs)
def __unicode__(self):
return self.ticket
def _generate_ticket(self, length=ticket.max_length, chars=string.ascii_letters + string.digits):
""" Generates a random string of the requested length. Used for creation of tickets. """
return u"%s-%s" % (self.prefix, ''.join(Random().sample(chars, length - (len(self.prefix) + 1))))
class ServiceTicket(BaseTicket):
user = models.ForeignKey(User, verbose_name=_('user'))
service = models.URLField(_('service'), verify_exists=False)
prefix = 'ST'
class Meta:
verbose_name = _('Service Ticket')
verbose_name_plural = _('Service Tickets')
def get_redirect_url(self):
parsed = urlparse.urlparse(self.service)
query = parse_qs(parsed.query)
query['ticket'] = [self.ticket]
query = [((k, v) if len(v) > 1 else (k, v[0])) for k, v in query.iteritems()]
parsed = urlparse.ParseResult(parsed.scheme, parsed.netloc,
parsed.path, parsed.params,
urllib.urlencode(query), parsed.fragment)
return parsed.geturl()
class LoginTicket(BaseTicket):
prefix = 'LT'
class Meta:
verbose_name = _('Login Ticket')
verbose_name_plural = _('Login Tickets')
class ProxyGrantingTicket(BaseTicket):
serviceTicket = models.ForeignKey(ServiceTicket, null=True)
pgtiou = models.CharField(max_length=256, verbose_name=_('PGTiou'))
prefix = 'PGT'
def __init__(self, *args, **kwargs):
if 'pgtiou' not in kwargs:
kwargs['pgtiou'] = u"PGTIOU-%s" % (''.join(Random().sample(string.ascii_letters + string.digits, 50)))
super(ProxyGrantingTicket, self).__init__(*args, **kwargs)
class Meta:
verbose_name = _('Proxy Granting Ticket')
verbose_name_plural = _('Proxy Granting Tickets')
class ProxyTicket(ServiceTicket):
proxyGrantingTicket = models.ForeignKey(ProxyGrantingTicket, verbose_name=_('Proxy Granting Ticket'))
prefix = 'PT'
class Meta:
verbose_name = _('Proxy Ticket')
verbose_name_plural = _('Proxy Tickets')
class ProxyGrantingTicketIOU(BaseTicket):
proxyGrantingTicket = models.ForeignKey(ProxyGrantingTicket, verbose_name=_('Proxy Granting Ticket'))
prefix = 'PGTIOU'
class Meta:
verbose_name = _('Proxy Granting Ticket IOU')
verbose_name_plural = _('Proxy Granting Tickets IOU')
```
#### File: django-cas-provider/cas_provider/views.py
```python
import logging
logger = logging.getLogger('cas_provider.views')
import urllib
import logging
from urllib import urlencode
import urllib2
import urlparse
from functools import wraps
from django.utils.decorators import available_attrs
from django.views.decorators.debug import sensitive_post_parameters
from django.views.decorators.cache import cache_control
from django.utils.cache import patch_cache_control
from django.views.decorators.csrf import csrf_protect
from django.http import HttpResponse, HttpResponseRedirect
from django.conf import settings
from django.contrib.auth import login as auth_login, logout as auth_logout
from django.core.urlresolvers import get_callable
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.auth import authenticate
from django.core.urlresolvers import reverse
from lxml import etree
from cas_provider.attribute_formatters import NSMAP, CAS
from cas_provider.models import ProxyGrantingTicket, ProxyTicket
from cas_provider.models import ServiceTicket
from cas_provider.exceptions import SameEmailMismatchedPasswords
from cas_provider.forms import LoginForm, MergeLoginForm
from . import signals
__all__ = ['login', 'validate', 'logout', 'service_validate']
INVALID_TICKET = 'INVALID_TICKET'
INVALID_SERVICE = 'INVALID_SERVICE'
INVALID_REQUEST = 'INVALID_REQUEST'
INTERNAL_ERROR = 'INTERNAL_ERROR'
ERROR_MESSAGES = (
(INVALID_TICKET, u'The provided ticket is invalid.'),
(INVALID_SERVICE, u'Service is invalid'),
(INVALID_REQUEST, u'Not all required parameters were sent.'),
(INTERNAL_ERROR, u'An internal error occurred during ticket validation'),
)
logger = logging.getLogger(__name__)
_never_cache = cache_control(no_cache=True, must_revalidate=True)
def never_cache(view_func):
"""
Decorator that adds headers to a response so that it will
never be cached.
"""
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view_func(request, *args, **kwargs):
response = view_func(request, *args, **kwargs)
patch_cache_control(response, no_cache=True,
must_revalidate=True, proxy_revalidate=True)
response['Pragma'] = 'no-cache'
return response
return _wrapped_view_func
@sensitive_post_parameters()
@csrf_protect
@never_cache
def login(request, template_name='cas/login.html',
success_redirect=settings.LOGIN_REDIRECT_URL,
warn_template_name='cas/warn.html', **kwargs):
merge = kwargs.get('merge', False)
logging.debug('CAS Provider Login view. Method is %s, merge is %s, template is %s.',
request.method, merge, template_name)
service = request.GET.get('service', None)
if service is not None:
# Save the service on the session, for later use if we end up
# in one of the more complicated workflows.
request.session['service'] = service
user = request.user
errors = []
if request.method == 'POST':
if merge:
form = MergeLoginForm(request.POST, request=request)
else:
form = LoginForm(request.POST, request=request)
if form.is_valid():
service = form.cleaned_data.get('service', None)
try:
auth_args = dict(username=form.cleaned_data['email'],
password=form.cleaned_data['password'])
if merge:
# We only want to send the merge argument if it's
# True. If it it's False, we want it to propagate
# through the auth backends properly.
auth_args['merge'] = merge
user = authenticate(**auth_args)
except SameEmailMismatchedPasswords:
# Need to merge the accounts?
if merge:
# We shouldn't get here...
raise
else:
base_url = reverse('cas_provider_merge')
args = dict(
success_redirect=success_redirect,
email=form.cleaned_data['email'],
)
if service is not None:
args['service'] = service
args = urllib.urlencode(args)
url = '%s?%s' % (base_url, args)
logging.debug('Redirecting to %s', url)
return HttpResponseRedirect(url)
if user is None:
errors.append('Incorrect username and/or password.')
else:
if user.is_active:
auth_login(request, user)
else: # Not a POST...
if merge:
form = MergeLoginForm(initial={'service': service, 'email': request.GET.get('email')})
else:
form = LoginForm(initial={'service': service})
if user is not None and user.is_authenticated():
# We have an authenticated user.
if not user.is_active:
errors.append('This account is disabled.')
else:
# Send the on_cas_login signal. If we get an HttpResponse, return that.
for receiver, response in signals.on_cas_login.send(sender=login, request=request, **kwargs):
if isinstance(response, HttpResponse):
return response
if service is None:
# Try and pull the service off the session
service = request.session.pop('service', service)
signals.on_cas_login_success.send(sender=login, request=request,
service=service, **kwargs)
if service is None:
# Normal internal success redirection.
logging.debug('Redirecting to %s', success_redirect)
return HttpResponseRedirect(success_redirect)
else:
if request.GET.get('warn', False):
return render_to_response(warn_template_name, {
'service': service,
'warn': False
}, context_instance=RequestContext(request))
# Create a service ticket and redirect to the service.
ticket = ServiceTicket.objects.create(service=service, user=user)
if 'service' in request.session:
# Don't need this any more.
del request.session['service']
url = ticket.get_redirect_url()
logging.debug('Redirecting to %s', url)
return HttpResponseRedirect(url)
else:
if request.method == 'POST':
signals.on_cas_login_failure.send(sender=login, request=request,
service=service, **kwargs)
logging.debug('Rendering response on %s, merge is %s', template_name, merge)
return render_to_response(template_name, {'form': form, 'errors': errors}, context_instance=RequestContext(request))
@never_cache
def validate(request):
"""Validate ticket via CAS v.1 protocol
"""
service = request.GET.get('service', None)
ticket_string = request.GET.get('ticket', None)
logger.info('Validating ticket %s for %s', ticket_string, service)
if service is not None and ticket_string is not None:
#renew = request.GET.get('renew', True)
#if not renew:
# TODO: check user SSO session
try:
ticket = ServiceTicket.objects.get(ticket=ticket_string)
assert ticket.service == service
except ServiceTicket.DoesNotExist:
logger.exception("Tried to validate with an invalid ticket %s for %s", ticket_string, service)
except Exception as e:
logger.exception('Got an exception: %s', e)
else:
username = ticket.user.username
ticket.delete()
results = signals.on_cas_collect_histories.send(sender=validate, for_user=ticket.user)
histories = '\n'.join('\n'.join(rs) for rc, rs in results)
logger.info('Validated %s %s', username, "(also %s)" % histories if histories else '')
signals.on_cas_validation_success.send(sender=validate, version=1, service=service)
return HttpResponse("yes\n%s\n%s" % (username, histories))
logger.info('Validation failed.')
signals.on_cas_validation_failure.send(sender=validate, version=1, service=service)
return HttpResponse("no\n\n")
@never_cache
def logout(request, template_name='cas/logout.html',
auto_redirect=settings.CAS_AUTO_REDIRECT_AFTER_LOGOUT):
url = request.GET.get('url', None)
if request.user.is_authenticated():
for ticket in ServiceTicket.objects.filter(user=request.user):
ticket.delete()
auth_logout(request)
if url and auto_redirect:
return HttpResponseRedirect(url)
return render_to_response(template_name, {'url': url},
context_instance=RequestContext(request))
@never_cache
def proxy(request):
targetService = request.GET['targetService']
pgt_id = request.GET['pgt']
try:
proxyGrantingTicket = ProxyGrantingTicket.objects.get(ticket=pgt_id)
except ProxyGrantingTicket.DoesNotExist:
return _cas2_error_response(INVALID_TICKET, service=targetService)
pt = ProxyTicket.objects.create(proxyGrantingTicket=proxyGrantingTicket,
user=proxyGrantingTicket.serviceTicket.user,
service=targetService)
return _cas2_proxy_success(pt.ticket, service=targetService)
def ticket_validate(service, ticket_string, pgtUrl):
if service is None or ticket_string is None:
return _cas2_error_response(INVALID_REQUEST)
try:
if ticket_string.startswith('ST'):
ticket = ServiceTicket.objects.get(ticket=ticket_string)
elif ticket_string.startswith('PT'):
ticket = ProxyTicket.objects.get(ticket=ticket_string)
else:
return _cas2_error_response(INVALID_TICKET,
'%(ticket)s is neither Service (ST-...) nor Proxy Ticket (PT-...)' % {
'ticket': ticket_string},
service=service)
except ServiceTicket.DoesNotExist:
return _cas2_error_response(INVALID_TICKET, service=service)
ticketUrl = urlparse.urlparse(ticket.service)
serviceUrl = urlparse.urlparse(service)
if not(ticketUrl.hostname == serviceUrl.hostname and ticketUrl.path == serviceUrl.path and ticketUrl.port == serviceUrl.port):
return _cas2_error_response(INVALID_SERVICE, service=service)
pgtIouId = None
proxies = ()
if pgtUrl is not None:
pgt = generate_proxy_granting_ticket(pgtUrl, ticket)
if pgt:
pgtIouId = pgt.pgtiou
if hasattr(ticket, 'proxyticket'):
pgt = ticket.proxyticket.proxyGrantingTicket
# I am issued by this proxy granting ticket
if hasattr(pgt.serviceTicket, 'proxyticket'):
while pgt:
if hasattr(pgt.serviceTicket, 'proxyticket'):
proxies += (pgt.serviceTicket.service,)
pgt = pgt.serviceTicket.proxyticket.proxyGrantingTicket
else:
pgt = None
user = ticket.user
ticket.delete()
return _cas2_success_response(user, pgtIouId, proxies, service=service)
@never_cache
def service_validate(request):
"""Validate ticket via CAS v.2 protocol"""
service = request.GET.get('service', None)
ticket_string = request.GET.get('ticket', None)
pgtUrl = request.GET.get('pgtUrl', None)
if ticket_string.startswith('PT-'):
return _cas2_error_response(INVALID_TICKET, "serviceValidate cannot verify proxy tickets", service=service)
else:
return ticket_validate(service, ticket_string, pgtUrl)
@never_cache
def proxy_validate(request):
"""Validate ticket via CAS v.2 protocol"""
service = request.GET.get('service', None)
ticket_string = request.GET.get('ticket', None)
pgtUrl = request.GET.get('pgtUrl', None)
return ticket_validate(service, ticket_string, pgtUrl)
def generate_proxy_granting_ticket(pgt_url, ticket):
proxy_callback_good_status = (200, 202, 301, 302, 304)
uri = list(urlparse.urlsplit(pgt_url))
pgt = ProxyGrantingTicket()
pgt.serviceTicket = ticket
pgt.targetService = pgt_url
if hasattr(ticket, 'proxyGrantingTicket'):
# here we got a proxy ticket! tata!
pgt.pgt = ticket.proxyGrantingTicket
params = {'pgtId': pgt.ticket, 'pgtIou': pgt.pgtiou}
query = dict(urlparse.parse_qsl(uri[4]))
query.update(params)
uri[3] = urlencode(query)
try:
response = urllib2.urlopen(urlparse.urlunsplit(uri))
except urllib2.HTTPError as e:
if not e.code in proxy_callback_good_status:
logger.debug('Checking Proxy Callback URL {} returned {}. Not issuing PGT.'.format(uri, e.code))
return
except urllib2.URLError as e:
logger.debug('Checking Proxy Callback URL {} raised URLError. Not issuing PGT.'.format(uri))
return
pgt.save()
return pgt
def _cas2_proxy_success(pt, service=None):
signals.on_cas_proxy_success.send(sender=proxy, service=service)
return HttpResponse(proxy_success(pt))
def _cas2_success_response(user, pgt=None, proxies=None, service=None):
signals.on_cas_validation_success.send(sender=ticket_validate, version=2, service=service)
return HttpResponse(auth_success_response(user, pgt, proxies), mimetype='text/xml')
def _cas2_error_response(code, message=None, service=None):
signals.on_cas_validation_failure.send(sender=service_validate,
version=2, code=code,
message=message, service=service)
return HttpResponse(u'''<cas:serviceResponse xmlns:cas="http://www.yale.edu/tp/cas">
<cas:authenticationFailure code="%(code)s">
%(message)s
</cas:authenticationFailure>
</cas:serviceResponse>''' % {
'code': code,
'message': message if message else dict(ERROR_MESSAGES).get(code)
}, mimetype='text/xml')
def proxy_success(pt):
response = etree.Element(CAS + 'serviceResponse', nsmap=NSMAP)
proxySuccess = etree.SubElement(response, CAS + 'proxySuccess')
proxyTicket = etree.SubElement(proxySuccess, CAS + 'proxyTicket')
proxyTicket.text = pt
return unicode(etree.tostring(response, encoding='utf-8'), 'utf-8')
def auth_success_response(user, pgt, proxies):
response = etree.Element(CAS + 'serviceResponse', nsmap=NSMAP)
auth_success = etree.SubElement(response, CAS + 'authenticationSuccess')
username = etree.SubElement(auth_success, CAS + 'user')
username.text = user.username
attrs = {}
for receiver, custom in signals.cas_collect_custom_attributes.send(sender=auth_success_response, user=user):
if custom:
attrs.update(custom)
identifiers = [i for sr, rr in signals.on_cas_collect_histories.send(sender=validate, for_user=user)
for i in rr]
if identifiers:
# Singular `identifier`, as that is the name of the element tag(s).
attrs['identifier'] = identifiers
if attrs:
formatter = get_callable(settings.CAS_CUSTOM_ATTRIBUTES_FORMATER)
formatter(auth_success, attrs)
if pgt:
pgtElement = etree.SubElement(auth_success, CAS + 'proxyGrantingTicket')
pgtElement.text = pgt
if proxies:
proxiesElement = etree.SubElement(auth_success, CAS + "proxies")
for proxy in proxies:
proxyElement = etree.SubElement(proxiesElement, CAS + "proxy")
proxyElement.text = proxy
return unicode(etree.tostring(response, encoding='utf-8'), 'utf-8')
``` |
{
"source": "JordanReiter/django-impersonate-auth",
"score": 2
} |
#### File: django-impersonate-auth/tests/test_silly.py
```python
from django.test import TestCase
from impersonate_auth.backends import ImpersonationBackendMixin
from .base import TEST_USER, TEST_SUPERUSER
from .base import BaseImpersonationBackendTest
SILLY_BACKEND = 'tests.backends.SillyBackend'
SILLY_IMP_BACKEND = 'tests.backends.SillyImpersonationBackend'
class TestSillyImpersonationBackend(BaseImpersonationBackendTest, TestCase):
backends = [SILLY_IMP_BACKEND, SILLY_BACKEND]
user_name = TEST_USER['email']
user_pw = TEST_USER['email'][::-1]
superuser_name = TEST_SUPERUSER['email']
superuser_pw = TEST_SUPERUSER['email'][::-1]
impersonation_backend = SILLY_IMP_BACKEND
def test_impersonation_login_password_contains_sep(self):
pass # test doesn't apply for this backend
``` |
{
"source": "JordanReiter/django-messages",
"score": 2
} |
#### File: management/commands/process_message.py
```python
import logging
import datetime
import sys
import email
import re
from django.core.management.base import BaseCommand
from django.conf import settings
from django_messages.models import Message
try:
from notification import models as notification
except ImportError:
notification = None
SUBJECT_HEADER_ID_REGEX = getattr(settings,'MESSAGES_SUBJECT_HEADER_REGEX', r'^.+?\[([0-9a-z]+)\]\s*$')
class Command(BaseCommand):
help = "Process an incoming message."
def send_reply(self, parent_msg, body):
message_list = []
recipient = parent_msg.sender
sender = parent_msg.recipient
subject = "re: %s" % re.sub(r"^(re:\s*)+","",parent_msg.subject)
msg = Message(
sender = sender,
recipient = recipient,
subject = subject,
body = body,
)
msg.parent_msg = parent_msg
parent_msg.replied_at = datetime.datetime.now()
parent_msg.save()
msg.save()
message_list.append(msg)
if notification:
if parent_msg is not None:
notification.send([sender], "messages_replied", {'message': msg,})
notification.send([recipient], "messages_reply_received", {'message': msg,}, from_address=settings.MESSAGES_HANDLER_ADDRESS)
else:
notification.send([sender], "messages_sent", {'message': msg,})
notification.send([recipient], "messages_received", {'message': msg,}, from_address=settings.MESSAGES_HANDLER_ADDRESS)
return message_list
def get_message(self, msg):
maintype = msg.get_content_maintype()
if maintype == 'multipart':
for part in msg.get_payload():
if part.get_content_maintype() == 'text':
return part.get_pay_load()
elif maintype == 'text':
return msg.get_payload()
def handle(self, **options):
msg = email.message_from_file(sys.stdin)
if not len(msg.values()):
raise ValueError("E-mail was empty.")
content = self.get_message(msg)
if not len(content):
raise ValueError("Message was empty.")
try:
subject = re.sub(r'\s+',' ',msg['Subject'])
except KeyError:
raise ValueError("The email message did not have a valid header (Subject line required).")
try:
message_id = re.findall(SUBJECT_HEADER_ID_REGEX,subject.strip())[0]
except IndexError:
raise ValueError("The email message did not have a valid subject (id at the end omitted): %s." % subject)
parent_msg = Message.objects.get_for_key(message_id)
self.send_reply(parent_msg, content)
``` |
{
"source": "JordanReiter/pyoai",
"score": 3
} |
#### File: oaipmh/tests/test_deleted_records.py
```python
from unittest import TestCase, TestSuite, main, makeSuite
from fakeclient import FakeClient
import os
from oaipmh import metadata
from datetime import datetime
directory = os.path.dirname(__file__)
fake2 = os.path.join(directory, 'fake2')
fakeclient = FakeClient(fake2)
fakeclient.getMetadataRegistry().registerReader(
'oai_dc', metadata.oai_dc_reader)
class DeletedRecordsTestCase(TestCase):
def test_getRecord_deleted(self):
header, metadata, about = fakeclient.getRecord(
metadataPrefix='oai_dc', identifier='hdl:1765/1160')
self.assert_(metadata is None)
self.assert_(header.isDeleted())
def test_getRecord_not_deleted(self):
header, metadata, about = fakeclient.getRecord(
metadataPrefix='oai_dc', identifier='hdl:1765/1162')
self.assert_(metadata is not None)
self.assert_(not header.isDeleted())
def test_listRecords(self):
records = fakeclient.listRecords(from_=datetime(2004, 01, 01),
metadataPrefix='oai_dc')
# lazy, just test first one
for header, metadata, about in records:
if header.isDeleted():
self.assert_(metadata is None)
else:
self.assert_(metadata is not None)
def test_suite():
return TestSuite((makeSuite(DeletedRecordsTestCase), ))
if __name__=='__main__':
main(defaultTest='test_suite')
``` |
{
"source": "JordanReiter/validator-collection",
"score": 2
} |
#### File: validator-collection/validator_collection/_decorators.py
```python
import os
from functools import wraps
from validator_collection.errors import ValidatorUsageError
def disable_on_env(func):
"""Disable the ``func`` called if its name is present in ``VALIDATORS_DISABLED``.
:param func: The function/validator to be disabled.
:type func: callable
:returns: If disabled, the ``value`` (first positional argument) passed to
``func``. If enabled, the result of ``func``.
"""
@wraps(func)
def func_wrapper(*args, **kwargs):
# pylint: disable=C0111, C0103
function_name = func.__name__
VALIDATORS_DISABLED = os.getenv('VALIDATORS_DISABLED', '')
disabled_functions = [x.strip() for x in VALIDATORS_DISABLED.split(',')]
force_run = kwargs.get('force_run', False)
try:
value = args[0]
except IndexError:
raise ValidatorUsageError('no value was supplied')
if function_name in disabled_functions and not force_run:
return value
else:
updated_kwargs = {key : kwargs[key]
for key in kwargs
if key != 'force_run'}
return func(*args, **updated_kwargs)
return func_wrapper
def disable_checker_on_env(func):
"""Disable the ``func`` called if its name is present in ``CHECKERS_DISABLED``.
:param func: The function/validator to be disabled.
:type func: callable
:returns: If disabled, ``True``. If enabled, the result of ``func``.
"""
@wraps(func)
def func_wrapper(*args, **kwargs):
# pylint: disable=C0111, C0103
function_name = func.__name__
CHECKERS_DISABLED = os.getenv('CHECKERS_DISABLED', '')
disabled_functions = [x.strip() for x in CHECKERS_DISABLED.split(',')]
force_run = kwargs.get('force_run', False)
if function_name in disabled_functions and not force_run:
return True
else:
return func(*args, **kwargs)
return func_wrapper
``` |
{
"source": "Jordan-Ren/6.08-Final-Project",
"score": 3
} |
#### File: 6.08-Final-Project/spotify_api/voice_text_input.py
```python
import datetime
import json
import sqlite3
import spotipy
from spotipy.oauth2 import SpotifyClientCredentials, SpotifyOAuth
SPOTIFY_CLIENT_ID = ""
SPOTIFY_CLIENT_SECRET = ""
ACCESS_TOKEN = ""
server_user = 'team15'
ht_db = f'/var/jail/home/{server_user}/final/song_queue.db'
# ht_db = 'test.db'
BASE_URL = 'https://api.spotify.com/v1/'
scope = "user-read-currently-playing user-top-read user-read-recently-played user-read-playback-state " \
"user-modify-playback-state streaming app-remote-control user-library-read"
VALID_GROUPS = {'test1': "pass1", 'test2': "pass2"}
def request_handler(request):
cache_handler = spotipy.cache_handler.CacheFileHandler(
cache_path=f'/var/jail/home/{server_user}/final/spotify_cache')
auth_manager = spotipy.oauth2.SpotifyOAuth(scope=scope,
cache_handler=cache_handler,
show_dialog=True, client_id=SPOTIFY_CLIENT_ID,
client_secret=SPOTIFY_CLIENT_SECRET,
redirect_uri="http://example.com")
if request['method'] == "POST":
if request["form"].get('code'):
auth_manager.get_access_token(request["form"]["code"])
return "Token added"
else:
if not auth_manager.validate_token(cache_handler.get_cached_token()):
auth_url = auth_manager.get_authorize_url()
return f'<h2><a href="{auth_url}">Sign in</a></h2>'
try:
sp = spotipy.Spotify(auth_manager=auth_manager)
username = request["form"].get("user")
group_name = request["form"].get("group")
password = request["form"].get("password")
voice_input = request["form"].get("voice")
command, data = parse_voice_input(voice_input)
response = None
if group_name in VALID_GROUPS and VALID_GROUPS[group_name] == password:
if command == "play" or command == "add" and data.get("song_name"):
response = get_song_uri(sp, data.get("song_name"), data.get("artist_name"))
add_song_to_db(sp, song_uri=response.get("track_uri"), song_name=response.get("song_name"),
group_name=group_name, user_name=username)
# add_song_to_queue(sp, response['track_uri'])
add_user(group_n=group_name, user_name=username)
return f"Song: {response.get('song_name')} added to the requests queue"
elif command == "pause":
pause(sp)
return "Paused playback"
elif command == "resume":
resume(sp)
return "Resumed playback"
elif command == "clear":
clear_queue()
return "Cleared Queue"
elif command == "skip":
sp.next_track()
current_song = skip_song(sp, group_name)
return f"The next song in the queue is {current_song}"
elif command == "like":
user = like_dislike_user(sp, 1)
return f"You liked {user}'s song"
elif command == "dislike":
user = like_dislike_user(sp, 0)
return f"You disliked {user}'s song"
elif command == "testing":
return [x[4] for x in get_queue()]
elif command == "None":
return "Invalid voice input, please try again"
return response
else:
return "Invalid Group or Invalid Group Password"
except Exception as e:
raise e
elif request["method"] == "GET":
try:
sp = spotipy.Spotify(auth_manager=auth_manager)
group_name = request["values"]["group"]
# Brandon and David added this field for returning more song info
if group_name in VALID_GROUPS:
if "requests" not in request["values"]:
queue_manager(sp, group_name)
with sqlite3.connect(ht_db) as c:
data = c.execute(
"""SELECT song_name, tempo, danceability, segments FROM song_queue WHERE status = ? AND group_name = ? ORDER BY time_ ASC LIMIT 1;""",
("queued", group_name,)).fetchone()
if not data:
return {"name": "", "tempo": 0, "genres": ["empty"]}
if not sp.currently_playing():
return {"name": "", "tempo": 0, "genres": ["empty"]}
artist_uri = sp.currently_playing()['item']['artists'][0]['uri']
genres = sp.artist(artist_uri).get('genres')
data = list(data)
data.append(genres)
data2 = {"name": data[0], "tempo": data[1], "genres": genres}
return data2
else:
with sqlite3.connect(ht_db) as c:
data = c.execute(
"""SELECT song_name, time_ FROM song_queue WHERE group_name = ? ORDER BY time_ ASC LIMIT 3;""",
(group_name,)).fetchall()
if not data:
return {"name": "", "tempo": 0, "genres": ["empty"]}
if len(data) == 0:
return {"name": "", "tempo": 0, "genres": ["empty"]}
return data
except Exception as e:
raise e
else:
return "Invalid group given"
else:
return "invalid HTTP method for this url."
def clear_queue():
with sqlite3.connect(ht_db) as c:
res = c.execute("""DELETE from song_queue""")
return res
def skip_song(sp, group_name):
with sqlite3.connect(ht_db) as c:
res = c.execute(
"""SELECT time_ FROM song_queue WHERE status = ? AND group_name = ? ORDER BY time_ ASC LIMIT 1;""",
("queued", group_name,)).fetchone()
if res:
c.execute("""DELETE FROM song_queue WHERE time_ = ?""", (res[0],))
# queue_manager(sp, group_name)
current_song = sp.currently_playing().get('item')
if current_song:
current_song = current_song.get("name")
return current_song
# currently_playing = sp.currently_playing().get('item')
# res = c.execute("""SELECT song_name FROM song_queue WHERE group_name = ? ORDER BY time_ ASC LIMIT 1;""",
# (group_name,)).fetchone()
# if res:
# next_song = res[0]
# else:
# next_song = None
return None
def get_queue():
try:
with sqlite3.connect(ht_db) as c:
res = c.execute("""SELECT * from song_queue WHERE status = 'queued'""").fetchall()
return res
except:
return "Database does not exist"
def create_db():
try:
with sqlite3.connect(ht_db) as c:
c.execute("""CREATE TABLE IF NOT EXISTS song_queue (time_ timestamp, group_name text, user_name text, status text, song_name text, song_uri text,
tempo real, energy real, time_signature integer, danceability real, segments text);""")
except:
raise Exception("Could not create song_queue table")
def create_users_db():
try:
with sqlite3.connect(ht_db) as c:
c.execute(
"""CREATE TABLE IF NOT EXISTS song_users (group_name text, user_name text, popularity real, votes real);""")
except:
raise Exception("Could not create users table")
def add_user(group_n, user_name):
with sqlite3.connect(ht_db) as c:
res = c.execute("""SELECT * FROM song_users WHERE group_name = ? AND user_name = ?;""",
(group_n, user_name,)).fetchall()
if res is None or len(res) == 0:
res = c.execute("""SELECT * FROM song_users;""").fetchall()
c.execute("""INSERT into song_users VALUES (?,?,?,?)""",
(group_n, user_name, .5, 1))
res = c.execute("""SELECT * FROM song_users;""").fetchall()
def update_user_popularity(group_n, user_n, vote):
with sqlite3.connect(ht_db) as c:
res = c.execute("""SELECT popularity, votes FROM song_users WHERE group_name = ? AND user_name = ?;""",
(group_n, user_n)).fetchall()
if len(res) > 0:
prev_pop, tot_votes = res[0]
new_popularity = (vote + prev_pop * tot_votes) / (tot_votes + 1)
c.execute("""UPDATE song_users SET popularity = ?, votes = ? WHERE group_name = ? AND user_name = ?""",
(new_popularity, tot_votes + 1, group_n, user_n))
res = c.execute("""SELECT * FROM song_users WHERE group_name = ? AND user_name = ?;""",
(group_n, user_n)).fetchall()
# print("Updated popularity values below!")
# print(res)
def like_dislike_user(sp, vote):
currently_playing = sp.currently_playing().get('item')
if currently_playing is not None:
song_uri = currently_playing.get('uri')
with sqlite3.connect(ht_db) as c:
data = c.execute("""SELECT group_name, user_name FROM song_queue WHERE status = ? AND song_uri = ? """,
("queued", song_uri)).fetchall()
try:
group_name = data[0][0]
user = data[0][1]
update_user_popularity(group_name, user, vote)
return user
except:
raise Exception("Could not find user for like/dislike")
def add_song_to_db(sp, song_uri, song_name, group_name, user_name):
create_db()
create_users_db()
if len(get_queue()) == 0:
play_song(sp, song_uri)
status = "queued"
else:
status = "requested"
try:
tempo, energy, time_signature, danceability, segments = get_audio_features(sp, song_uri)
now = datetime.datetime.now()
except:
raise Exception("Could not get audio analysis")
try:
with sqlite3.connect(ht_db) as c:
c.execute("""INSERT into song_queue VALUES (?,?,?,?,?,?,?,?,?,?,?)""",
(now, group_name, user_name, status, song_name, song_uri, tempo, energy,
time_signature, danceability,
json.dumps(segments)))
except:
raise Exception("Could not add song to db")
def clean_input(voice_input):
try:
voice_input = voice_input.lower()
voice_input = voice_input.replace('"', "")
if voice_input[-1] == '.':
voice_input = voice_input[:-1]
voice_input = voice_input.replace("to the queue", "")
voice_input = voice_input.replace("to the q", "")
voice_input = voice_input.replace("can you please", "")
# voice_input = voice_input.replace(voice_input.split("play")[0], "") # remove everything before "play [song]"
inp_list = voice_input.split(' ')
if "next song" not in voice_input and "next" == inp_list[-1]:
voice_input = voice_input.replace("next", "")
if "now" == inp_list[-1]:
voice_input = voice_input.replace("now", "")
return voice_input
except:
raise Exception("Could not clean voice input")
def parse_artist(song_desc):
try:
data = {}
if "by" in song_desc[:-1]:
song = " ".join(song_desc[:song_desc.index("by")])
artist = " ".join(song_desc[(song_desc.index("by") + 1):])
elif "bye" in song_desc[:-1]:
song = " ".join(song_desc[:song_desc.index("bye")])
artist = " ".join(song_desc[(song_desc.index("bye") + 1):])
else:
song = " ".join(song_desc)
artist = "None"
data["song_name"] = song
data["artist_name"] = artist
return data
except:
raise Exception("Could not parse artist")
def parse_voice_input(voice_input):
'''
Possible Commands:
# Skipping
* skip *
* next song *
# Add to queue (Only if the skipping phrases are missing)
* play ___ ("" | next | now)
* add ___ (to the queue | "" | next)
* queue up ___ (next | now | "")
# With a specific artist request within ___
"SONG" by "ARTIST"
# Pausing (Only if the add/play/queue-up key words are missing)
* pause *
# Resuming (Only if the add/play/queue-up/pause key words are missing)
* resume *
'''
try:
voice_input = clean_input(voice_input)
input_list = voice_input.split()
data = {}
if "skip" in input_list or "next song" in voice_input:
command = "skip"
elif "play" in input_list:
command = "play"
data = parse_artist(input_list[(input_list.index("play") + 1):])
elif "add" in input_list:
command = "add"
data = parse_artist(input_list[(input_list.index("add") + 1):])
elif "queue up" in voice_input or "q up" in voice_input:
command = "add"
data = parse_artist(input_list[(input_list.index("up") + 1):])
elif "pause" in input_list:
command = "pause"
elif "resume" in input_list:
command = "resume"
elif "clear" in input_list:
return "clear", None
elif "like" in input_list:
command = "like"
elif "dislike" in input_list:
command = "dislike"
elif "testing" in input_list:
command = "testing"
else:
command = "No Command"
return command, data
except Exception as e:
raise e
def get_song_uri(sp, song, artist):
try:
response_data = {}
lim = 1 if artist == "None" else 50
res = sp.search(song, limit=lim, type="track")
found = False
if len(res["tracks"]["items"]) > 0:
if artist == "None":
response_data['song_name'] = res["tracks"]["items"][0]["name"]
response_data['track_uri'] = res["tracks"]["items"][0]["uri"]
response_data['url'] = res["tracks"]["items"][0]["external_urls"]["spotify"]
found = True
else:
for song in res["tracks"]["items"]:
artists = {a['name'].lower() for a in song['artists']}
if artist in artists:
response_data['song_name'] = song["name"]
response_data['track_uri'] = song["uri"]
response_data['url'] = song["external_urls"]["spotify"]
found = True
if found:
return response_data
else:
return "Song not found"
except:
raise Exception("Could not get song uri")
def play_song(sp, song_uri):
sp.start_playback(uris=[song_uri], position_ms=0)
def add_song_to_queue(sp, song_uri):
sp.add_to_queue(uri=song_uri)
def pause(sp):
sp.pause_playback()
def resume(sp):
sp.start_playback()
def get_audio_features(sp, song_uri):
res = sp.audio_features(tracks=[song_uri])
tempo, danceability = res[0].get('tempo'), res[0].get('danceability')
energy, time_signature = res[0].get('energy'), res[0].get('time_signature')
res = sp.audio_analysis(song_uri)
segments = [{'start': x.get('start'), 'duration': x.get('duration'), 'loudness': x.get('loudness')} for x in
res.get('sections')]
return tempo, energy, time_signature, danceability, segments
# def get_queue():
# with sqlite3.connect(ht_db) as c:
# res = c.execute("""SELECT song_uri FROM song_queue WHERE status = ? ORDER BY time_ ASC;""",
# ('queued',)).fetchall()
# return res
def queue_manager(sp, group_name):
songs_on_queue = get_queue()
if len(songs_on_queue) < 3:
with sqlite3.connect(ht_db) as c:
songs_to_add = 3 - len(songs_on_queue)
reqed_songs = c.execute(
"""SELECT song_uri, user_name FROM song_queue WHERE status = ? ORDER BY time_ ASC;""",
('requested',)).fetchall()
its = min(songs_to_add, len(reqed_songs))
for i in range(its):
reqed_users = {x[1] for x in reqed_songs}
user_pops = c.execute("""SELECT user_name, popularity FROM song_users WHERE group_name = ?""",
(group_name,)).fetchall()
users_ = [x for x in user_pops if x[0] in reqed_users]
if len(users_) > 0:
best_user = max(users_, key=lambda x: x[1])[0]
song = c.execute(
"""SELECT song_uri FROM song_queue WHERE user_name = ? AND group_name = ? AND status = 'requested' ORDER BY time_ ASC LIMIT 1""",
(best_user, group_name,)).fetchone()
add_song_to_queue(sp, song_uri=song[0])
c.execute(
"""UPDATE song_queue SET status = ?, time_ = ? WHERE group_name = ? AND user_name = ? AND song_uri = ?""",
("queued", datetime.datetime.now(), group_name, best_user, song[0]))
reqed_songs = c.execute(
"""SELECT song_uri, user_name FROM song_queue WHERE status = ? ORDER BY time_ ASC;""",
('requested',)).fetchall()
currently_playing = sp.currently_playing().get('item')
if currently_playing:
song_uri = currently_playing.get('uri')
with sqlite3.connect(ht_db) as c:
res = c.execute(
"""SELECT song_uri FROM song_queue WHERE status = ? AND group_name = ? ORDER BY time_ ASC LIMIT 1;""",
("queued", group_name,)).fetchone()
if res:
res = res[0]
if res and song_uri != res:
skip_song(sp, group_name)
queue_manager(sp, group_name)
if __name__ == "__main__":
# auth_manager = spotipy.oauth2.SpotifyOAuth(scope=scope,
# show_dialog=True, client_id=SPOTIFY_CLIENT_ID,
# client_secret=SPOTIFY_CLIENT_SECRET, redirect_uri="http://example.com")
# sp = spotipy.Spotify(auth_manager=auth_manager)
# u = sp.currently_playing()['item']['artists'][0]['uri']
# print(sp.artist(u)['genres'])
# print(sp.album(u).get('genres'))
# print(sp.audio_features(['spotify:track:2r6OAV3WsYtXuXjvJ1lIDi']))
# print(get_song_uri(sp, "fun", "None"))
# queue_manager(sp)
# req = {
# "method": "GET",
# "values": {
# "user": "acelli",
# "group": "group15",
# "voice": "Play despacito"
# }
# }
# request_handler(req)
# req2 = {
# "method": "GET",
# "values": {
# "user": "acelli",
# "group": "group15",
# "voice": "add sunburn to the queue"
# }
# }
# request_handler(req2)
# import time
# time.sleep(10)
# req3 = {
# "method": "GET",
# "values": {
# "user": "acelli",
# "voice": "pause"
# }
# }
# request_handler(req3)
# print(request_handler(req))
# print(get_audio_features('spotify:track:6habFhsOp2NvshLv26DqMb'))
pass
``` |
{
"source": "JordanRex/YAAML",
"score": 3
} |
#### File: src/algos/multiclass.py
```python
import xgboost as xgb
from sklearn.multiclass import OneVsRestClassifier, OneVsOneClassifier, OutputCodeClassifier
from sklearn.svm import SVC, LinearSVC, NuSVC
from sklearn.ensemble import GradientBoostingClassifier, ExtraTreesClassifier
from sklearn.ensemble import RandomForestClassifier, VotingClassifier, AdaBoostClassifier
from sklearn.linear_model import LogisticRegressionCV, LogisticRegression, RidgeClassifierCV, PassiveAggressiveClassifier, SGDClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier, RadiusNeighborsClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis, QuadraticDiscriminantAnalysis
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.neural_network import MLPClassifier
#%matplotlib inline
import matplotlib
import matplotlib.pyplot as plt
# multi-class classification/regression superclass
class opr_model():
""" Algorithms in each function are as follows:
1. opr_multiclass_inherant
2. opr_multiclass_oneVS
3. opr_regression
4. opr_ordinal
5. opr_neuralnets
6. ...
P.S: Not good enough algos commented
"""
def __init__(self, train_df, valid_df, ytrain_vec, yvalid_vec, algo=0):
# initialize the arguments
self.train = train_df
self.valid = valid_df
self.ytrain = ytrain_vec
self.yvalid = yvalid_vec
self.algo = algo
# print stats
print(train_df.shape, '\n', valid_df.shape, '\n', ytrain_vec.shape, '\n', yvalid_vec.shape)
print('the class values are:', yvalid_vec.unique(), '\n')
# run the various algorithm functions
self.opr_multiclass_inherant()
self.opr_neuralnets()
self.opr_multiclass_oneVS()
self.main()
self.ensemble()
def main(self):
names = ['Logit', 'AdaBoost', 'ExtraTrees', 'LinearSVC', 'Ridge',
'GBM', 'OneVsRest', 'OneVsOne', 'OutputCode']
classifiers = [self.logr, self.adab, self.extt, self.lsvc, self.rdgc,
self.rfcf, self.gbmc, self.ovrc, self.ovoc, self.occf]
#testnames = ['Logit', 'ExtraTrees', 'MLP']
#testclassifiers = [self.logr, self.extt, self.mlpc]
# iterate over classifiers
clf_scores = {}
clf_probs = {}
clf_predictions = {}
clf_insample = {}
for name, clf in zip(names, classifiers):
print(name, 'is happening \n')
clf.fit(self.train.values, self.ytrain)
clf_scores[name] = clf.score(self.valid.values, self.yvalid)
print(clf_scores[name], '\n')
# predict probs
if hasattr(clf, "predict_proba"):
clf_probs[name] = clf.predict_proba(self.valid.values)
clf_insample[name] = clf.predict_proba(self.train.values)
else:
clf_probs[name] = clf.predict(self.valid.values)
clf_insample[name] = clf.predict(self.train.values)
# predictions as well
clf_predictions[name] = clf.predict(self.valid.values)
self.scores = clf_scores
self.probs = clf_probs
self.predictions = clf_predictions
self.insample = clf_insample
return None
def opr_multiclass_inherant(self):
# all sklearn native algorithms
self.logr = LogisticRegressionCV(random_state=1, multi_class='ovr', max_iter=1000, penalty='l2') #has probs
self.adab = AdaBoostClassifier(DecisionTreeClassifier(max_depth=12, presort=True),
n_estimators=100, learning_rate=0.1) #has probs
self.extt = ExtraTreesClassifier(n_estimators=200, max_depth=10, n_jobs=-1) #has probs
#self.knnc = KNeighborsClassifier(n_neighbors=3, weights='distance') #has probs
#self.ldac = LinearDiscriminantAnalysis() #has probs
#self.qdac = QuadraticDiscriminantAnalysis() #has probs
self.lsvc = LinearSVC(multi_class='ovr', random_state=1) #multiclass='crammer_singer' another setting #no probs
self.rdgc = RidgeClassifierCV(cv=5) #no probs
self.rncf = RadiusNeighborsClassifier(n_jobs=-1, radius=2, outlier_label=[2091]) #no probs
self.rfcf = RandomForestClassifier(n_estimators=200, max_depth=10, n_jobs=-1, random_state=1,
class_weight={0:1, 1:1, 2:1, 3:1, 4:3, 5:3}) #has probs
#self.nsvc = NuSVC(kernel='linear', nu=0.7, probability=True, class_weight={0:2, 1:1, 2:1, 3:1, 4:2, 5:2}, random_state=1) #has probs
#self.ksvc = SVC(kernel='poly', probability=True, class_weight={0:1, 1:1, 2:1, 3:1, 4:3, 5:3}, random_state=1) #has probs
#self.gpcf = GaussianProcessClassifier(random_state=1, multi_class='one_vs_rest', n_jobs=-1) #has probs
self.gbmc = GradientBoostingClassifier(learning_rate=0.01, max_depth=12, n_estimators=200, subsample=0.8,
max_features=0.8, random_state=1) #has probs
self.sgdc = SGDClassifier(loss='log', penalty='elasticnet', max_iter=20, n_jobs=-1, early_stopping=True,
class_weight={0:2, 1:2, 2:1, 3:1, 4:3, 5:4}) #loss ='modified_huber' #has probs
return None
def opr_multiclass_oneVS(self):
""" best algorithms found from the opr_multiclass_inherant will be used as the starting base estimator for the
methods below. a separate tuning framework to find the best base estimator for oneVS methods will be
implemented later """
self.ovrc = OneVsRestClassifier(ExtraTreesClassifier(n_estimators=200, max_depth=10, n_jobs=-1), n_jobs=-1) #has probs
self.ovoc = OneVsOneClassifier(xgb.XGBClassifier(learning_rate=0.01, n_estimators=200, colsample_bytree=0.7, subsample=0.7,
scale_pos_weight=2, objective='multi:softmax', max_depth=10, num_class=6)) #no probs
self.occf = OutputCodeClassifier(ExtraTreesClassifier(n_estimators=200, max_depth=10, n_jobs=-1),
code_size=5, random_state=1) #no probs
return None
def opr_regression(self):
return None
def opr_ordinal(self):
return None
def opr_neuralnets(self):
### mlp classifier ###
self.mlpc = MLPClassifier(hidden_layer_sizes=(10,))
return None
def ensemble(self):
train_output = self.ytrain.copy()
valid_output = self.yvalid.copy()
for k,v in self.insample.items():
df_insample = pd.DataFrame(self.insample[k])
df_valid = pd.DataFrame(self.probs[k])
df_insample.columns = [k+str(i) for i in df_insample.columns.values.tolist()]
df_valid.columns = [k+str(i) for i in df_valid.columns.values.tolist()]
train_output = pd.concat([train_output, df_insample], axis=1, ignore_index=False)
valid_output = pd.concat([valid_output, df_valid], axis=1, ignore_index=False)
ens_ytrain = train_output.response.values
ens_yvalid = valid_output.response.values
self.ens_train = train_output.drop(['response'], axis=1, inplace=False)
self.ens_valid = valid_output.drop(['response'], axis=1, inplace=False)
ens_model = ExtraTreesClassifier(n_estimators=100, max_depth=5, n_jobs=-1)
ens_model.fit(self.ens_train, ens_ytrain)
print('ensemble score is:', ens_model.score(self.ens_valid, ens_yvalid))
self.ensmod = ens_model
return None
oprmod = opr_model(train, valid, ytrain, yvalid)
```
#### File: src/algos/one_class.py
```python
from sklearn.covariance import EllipticEnvelope
from sklearn.ensemble import IsolationForest
from sklearn.svm import OneClassSVM
### ONE-CLASS METHODS ###
class oneclass_models():
def __init__():
""" this class contains several modelling algorithms for one-class classification/anomaly detection """
def data_prepare(X_train, X_valid):
# split and create 2 dataframes corresponing to positive/negative classes
Negatives=X_train[X_train['response']==0]
Positives=X_train[X_train['response']==1]
Negatives.drop(['response'], axis=1, inplace=True)
Positives.drop(['response'], axis=1, inplace=True)
print(Negatives.shape)
print(Positives.shape)
# remove response from validation df too
X_v = X_valid.drop(['response'], axis=1, inplace=False)
print(X_v.shape)
# take a random fraction of the negatives to reduce computation time
Negatives = Negatives.sample(frac=0.1, replace=False, random_state=1)
return Positives, Negatives, X_v
def uni_svm(X_train, X_valid):
""" one-class svm by training separately on positives and negatives """
Positives, Negatives, X_v = oneclass_models.data_prepare(X_train, X_valid)
# Set the parameters by cross-validation
params = [{'kernel': ['rbf'],
'gamma': [0.01, 0.1, 0.5],
'nu': [0.01, 0.1, 0.5]}]
clf_P = GridSearchCV(OneClassSVM(), cv=3, param_grid=params, scoring='accuracy', verbose=True)
clf_N = GridSearchCV(OneClassSVM(), cv=3, param_grid=params, scoring='accuracy', verbose=True)
clf_P.fit(X=Positives, y=np.full(len(Positives),1))
print('positive model fit \n')
clf_N.fit(X=Negatives, y=np.full(len(Negatives),1))
print('negative model fit \n')
clf_AD_P = OneClassSVM(gamma=clf_P.best_params_['gamma'],
kernel=clf_P.best_params_['kernel'], nu=clf_P.best_params_['nu'], verbose=True)
clf_AD_P.fit(Positives)
clf_AD_N = OneClassSVM(gamma=clf_N.best_params_['gamma'],
kernel=clf_N.best_params_['kernel'], nu=clf_N.best_params_['nu'], verbose=True)
clf_AD_N.fit(Negatives)
valid_pred_P=clf_AD_P.predict(X_v)
valid_pred_N=clf_AD_N.predict(X_v)
return valid_pred_P, valid_pred_N, clf_AD_P, clf_AD_N
def score_table(valid_pred_P, valid_pred_N):
table = pd.DataFrame({'P': valid_pred_P,
'N': -1*valid_pred_N,
'O': y_valid})
table['P_N'] = np.where((table['P'] == 1) & (table['N'] == -1), 1, 0)
print(sklearn.metrics.accuracy_score(y_pred=table['P_N'], y_true=table['O']))
print(sklearn.metrics.precision_score(y_pred=table['P_N'], y_true=table['O']))
print(sklearn.metrics.recall_score(y_pred=table['P_N'], y_true=table['O']))
return table
# predictions
p, n, clf_p, clf_n = oneclass_models.uni_svm(X_train=X_train, X_valid=X_valid)
table=oneclass_models.score_table(valid_pred_N=n, valid_pred_P=p)
# ISOLATION FOREST
IFA=IsolationForest(n_estimators=200, max_features=0.3)
IFA.fit(Negatives)
train_IFA=IFA.predict(Negatives)
test_IFA=IFA.predict(Positives)
# accuracy custom function
def Train_Accuracy(Mat):
Sum=0
for i in Mat:
if(i==1):
Sum+=1.0
return (Sum/len(Mat)*100)
def Test_Accuracy(Mat):
Sum=0
for i in Mat:
if(i==-1):
Sum+=1.0
return (Sum/len(Mat)*100)
print("Training: Isolation Forest: ",(Train_Accuracy(train_IFA)),"%")
print("Test: Isolation Forest: ",(Test_Accuracy(test_IFA)),"%")
```
#### File: YAAML/src/hyperas.py
```python
from hyperopt import Trials, STATUS_OK, tpe
from hyperas import optim
from hyperas.distributions import choice, uniform
import tensorflow as tf
from tensorflow.keras.layers import Dense, Dropout, BatchNormalization, Input, Flatten
from tensorflow.keras.models import Sequential
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.models import load_model
from tensorflow.keras.callbacks import EarlyStopping
import pickle
def save_data(train, valid, ytrain, yvalid):
x = np.array(train)
X = np.array(valid)
y = to_categorical(ytrain)
Y = to_categorical(yvalid)
# save backup
nnbp = open('./backup.pkl','wb')
pickle.dump(x, nnbp)
pickle.dump(X, nnbp)
pickle.dump(y, nnbp)
pickle.dump(Y, nnbp)
nnbp.close()
return None
def data():
# load backup
nnbp = open('./backup.pkl', 'rb')
x = pickle.load(nnbp)
X = pickle.load(nnbp)
y = pickle.load(nnbp)
Y = pickle.load(nnbp)
nnbp.close()
return x, X, y, Y
def create_model(x, X, y, Y):
'''
Create Keras model with double curly brackets dropped-in as needed.
Return value has to be a valid python dictionary with two customary keys:
- loss: Specify a numeric evaluation metric to be minimized
- status: Just use STATUS_OK and see hyperopt documentation if not feasible
The last one is optional, though recommended, namely:
- model: specify the model just created so that we can later use it again.
'''
input_dim = x.shape[1]
model = Sequential()
model.add(Dense(input_dim, input_dim = input_dim , activation={{choice(['relu', 'sigmoid', 'tanh', 'elu'])}}))
model.add(BatchNormalization())
model.add(Dense({{choice([50, 100, 250, 500, 1000, 2000])}}, activation={{choice(['relu', 'sigmoid', 'tanh', 'elu'])}}))
model.add(Dropout({{uniform(0, 0.7)}}))
model.add(Dense({{choice([50, 100, 250, 500, 1000])}}, activation = {{choice(['relu', 'sigmoid', 'tanh', 'elu'])}}))
if {{choice(['true', 'false'])}} == 'true':
model.add(Dense({{choice([5, 20, 30, 50, 100])}}, activation = {{choice(['relu', 'sigmoid', 'tanh', 'elu'])}}))
if {{choice(['true', 'false'])}} == 'true':
model.add(Dense({{choice([5, 20, 30, 50, 100])}}, activation = {{choice(['relu', 'sigmoid', 'tanh', 'elu'])}}))
model.add(Dropout({{uniform(0, 0.7)}}))
model.add(Dense(9, activation={{choice(['softmax', 'sigmoid'])}}))
model.compile(loss='categorical_crossentropy', optimizer = {{choice(['rmsprop', 'adam', 'sgd', 'nadam', 'adadelta'])}},
metrics=['accuracy'])
model.fit(x, y, batch_size={{choice([10, 20])}}, epochs=5, verbose=2, validation_data=(X, Y), shuffle=True, callbacks=[EarlyStopping(monitor='val_loss', patience=7, min_delta=0.0001)])
score, acc = model.evaluate(X, Y, verbose=1)
print('Test accuracy:', acc)
return {'loss': -acc, 'status': STATUS_OK, 'model': model}
if __name__ == '__main__':
best_run, best_model, space = optim.minimize(model=create_model,
data=data,
algo=tpe.suggest,
max_evals=5,
trials=Trials(),
eval_space=True,
return_space=True)
x, X, y, Y = data()
print("Evalutation of best performing model:")
print(best_model.evaluate(X, Y))
print("Best performing model chosen hyper-parameters:")
print(best_run)
best_model.save('model.h5')
best_model.fit(x,y,batch_size=10, epochs=10, verbose=1, shuffle=True, validation_data=(X,Y))
model.evaluate(X,Y)
nn_pred = model.predict_classes(x=X)
skm.accuracy_score(y_pred=nn_pred, y_true=Y)
skm.confusion_matrix(y_pred=nn_pred, y_true=Y)
```
#### File: src/modules/misc.py
```python
import math
import numpy as np
import pandas as pd
import seaborn as sns
import scipy.stats as ss
import matplotlib.pyplot as plt
from collections import Counter
def conditional_entropy(x, y):
"""
Calculates the conditional entropy of x given y: S(x|y)
Wikipedia: https://en.wikipedia.org/wiki/Conditional_entropy
:param x: list / NumPy ndarray / Pandas Series
A sequence of measurements
:param y: list / NumPy ndarray / Pandas Series
A sequence of measurements
:return: float
"""
# entropy of x given y
y_counter = Counter(y)
xy_counter = Counter(list(zip(x,y)))
total_occurrences = sum(y_counter.values())
entropy = 0.0
for xy in xy_counter.keys():
p_xy = xy_counter[xy] / total_occurrences
p_y = y_counter[xy[1]] / total_occurrences
entropy += p_xy * math.log(p_y/p_xy)
return entropy
def cramers_v(x, y):
"""
Calculates Cramer's V statistic for categorical-categorical association.
Uses correction from Bergsma and Wicher, Journal of the Korean Statistical Society 42 (2013): 323-328.
This is a symmetric coefficient: V(x,y) = V(y,x)
Original function taken from: https://stackoverflow.com/a/46498792/5863503
Wikipedia: https://en.wikipedia.org/wiki/Cram%C3%A9r%27s_V
:param x: list / NumPy ndarray / Pandas Series
A sequence of categorical measurements
:param y: list / NumPy ndarray / Pandas Series
A sequence of categorical measurements
:return: float
in the range of [0,1]
"""
confusion_matrix = pd.crosstab(x,y)
chi2 = ss.chi2_contingency(confusion_matrix)[0]
n = confusion_matrix.sum().sum()
phi2 = chi2/n
r,k = confusion_matrix.shape
phi2corr = max(0, phi2-((k-1)*(r-1))/(n-1))
rcorr = r-((r-1)**2)/(n-1)
kcorr = k-((k-1)**2)/(n-1)
return np.sqrt(phi2corr/min((kcorr-1),(rcorr-1)))
def theils_u(x, y):
"""
Calculates Theil's U statistic (Uncertainty coefficient) for categorical-categorical association.
This is the uncertainty of x given y: value is on the range of [0,1] - where 0 means y provides no information about
x, and 1 means y provides full information about x.
This is an asymmetric coefficient: U(x,y) != U(y,x)
Wikipedia: https://en.wikipedia.org/wiki/Uncertainty_coefficient
:param x: list / NumPy ndarray / Pandas Series
A sequence of categorical measurements
:param y: list / NumPy ndarray / Pandas Series
A sequence of categorical measurements
:return: float
in the range of [0,1]
"""
s_xy = conditional_entropy(x,y)
x_counter = Counter(x)
total_occurrences = sum(x_counter.values())
p_x = list(map(lambda n: n/total_occurrences, x_counter.values()))
s_x = ss.entropy(p_x)
if s_x == 0:
return 1
else:
return (s_x - s_xy) / s_x
def correlation_ratio(categories, measurements):
"""
Calculates the Correlation Ratio (sometimes marked by the greek letter Eta) for categorical-continuous association.
Answers the question - given a continuous value of a measurement, is it possible to know which category is it
associated with?
Value is in the range [0,1], where 0 means a category cannot be determined by a continuous measurement, and 1 means
a category can be determined with absolute certainty.
Wikipedia: https://en.wikipedia.org/wiki/Correlation_ratio
:param categories: list / NumPy ndarray / Pandas Series
A sequence of categorical measurements
:param measurements: list / NumPy ndarray / Pandas Series
A sequence of continuous measurements
:return: float
in the range of [0,1]
"""
categories = convert(categories, 'array')
measurements = convert(measurements, 'array')
fcat, _ = pd.factorize(categories)
cat_num = np.max(fcat)+1
y_avg_array = np.zeros(cat_num)
n_array = np.zeros(cat_num)
for i in range(0,cat_num):
cat_measures = measurements[np.argwhere(fcat == i).flatten()]
n_array[i] = len(cat_measures)
y_avg_array[i] = np.average(cat_measures)
y_total_avg = np.sum(np.multiply(y_avg_array,n_array))/np.sum(n_array)
numerator = np.sum(np.multiply(n_array,np.power(np.subtract(y_avg_array,y_total_avg),2)))
denominator = np.sum(np.power(np.subtract(measurements,y_total_avg),2))
if numerator == 0:
eta = 0.0
else:
eta = numerator/denominator
return eta
def associations(dataset, nominal_columns=None, mark_columns=False, theil_u=False, plot=True,
return_results = False, **kwargs):
"""
Calculate the correlation/strength-of-association of features in data-set with both categorical (eda_tools) and
continuous features using:
- Pearson's R for continuous-continuous cases
- Correlation Ratio for categorical-continuous cases
- Cramer's V or Theil's U for categorical-categorical cases
:param dataset: NumPy ndarray / Pandas DataFrame
The data-set for which the features' correlation is computed
:param nominal_columns: string / list / NumPy ndarray
Names of columns of the data-set which hold categorical values. Can also be the string 'all' to state that all
columns are categorical, or None (default) to state none are categorical
:param mark_columns: Boolean (default: False)
if True, output's columns' names will have a suffix of '(nom)' or '(con)' based on there type (eda_tools or
continuous), as provided by nominal_columns
:param theil_u: Boolean (default: False)
In the case of categorical-categorical feaures, use Theil's U instead of Cramer's V
:param plot: Boolean (default: True)
If True, plot a heat-map of the correlation matrix
:param return_results: Boolean (default: False)
If True, the function will return a Pandas DataFrame of the computed associations
:param kwargs:
Arguments to be passed to used function and methods
:return: Pandas DataFrame
A DataFrame of the correlation/strength-of-association between all features
"""
dataset = convert(dataset, 'dataframe')
columns = dataset.columns
if nominal_columns is None:
nominal_columns = list()
elif nominal_columns == 'all':
nominal_columns = columns
corr = pd.DataFrame(index=columns, columns=columns)
for i in range(0,len(columns)):
for j in range(i,len(columns)):
if i == j:
corr[columns[i]][columns[j]] = 1.0
else:
if columns[i] in nominal_columns:
if columns[j] in nominal_columns:
if theil_u:
corr[columns[j]][columns[i]] = theils_u(dataset[columns[i]],dataset[columns[j]])
corr[columns[i]][columns[j]] = theils_u(dataset[columns[j]],dataset[columns[i]])
else:
cell = cramers_v(dataset[columns[i]],dataset[columns[j]])
corr[columns[i]][columns[j]] = cell
corr[columns[j]][columns[i]] = cell
else:
cell = correlation_ratio(dataset[columns[i]], dataset[columns[j]])
corr[columns[i]][columns[j]] = cell
corr[columns[j]][columns[i]] = cell
else:
if columns[j] in nominal_columns:
cell = correlation_ratio(dataset[columns[j]], dataset[columns[i]])
corr[columns[i]][columns[j]] = cell
corr[columns[j]][columns[i]] = cell
else:
cell, _ = ss.pearsonr(dataset[columns[i]], dataset[columns[j]])
corr[columns[i]][columns[j]] = cell
corr[columns[j]][columns[i]] = cell
corr.fillna(value=np.nan, inplace=True)
if mark_columns:
marked_columns = ['{} (nom)'.format(col) if col in nominal_columns else '{} (con)'.format(col) for col in columns]
corr.columns = marked_columns
corr.index = marked_columns
if plot:
plt.figure(figsize=kwargs.get('figsize',None))
sns.heatmap(corr, annot=kwargs.get('annot',True), fmt=kwargs.get('fmt','.2f'))
plt.show()
if return_results:
return corr
###############################################################################################################################
## ENCODING
###############################################################################################################################
"""
below class was taken from url=https://www.kaggle.com/superant/oh-my-cat
Thermometer encoding (believed to be working really good for GANs)
cannot handle unseen values in test. so use for situations where all levels for a cat variable has atleast 1 sample in train
"""
from sklearn.base import TransformerMixin
from itertools import repeat
import scipy
class ThermometerEncoder(TransformerMixin):
"""
Assumes all values are known at fit
"""
def __init__(self, sort_key=None):
self.sort_key = sort_key
self.value_map_ = None
def fit(self, X, y=None):
self.value_map_ = {val: i for i, val in enumerate(sorted(X.unique(), key=self.sort_key))}
return self
def transform(self, X, y=None):
values = X.map(self.value_map_)
possible_values = sorted(self.value_map_.values())
idx1 = []
idx2 = []
all_indices = np.arange(len(X))
for idx, val in enumerate(possible_values[:-1]):
new_idxs = all_indices[values > val]
idx1.extend(new_idxs)
idx2.extend(repeat(idx, len(new_idxs)))
result = scipy.sparse.coo_matrix(([1] * len(idx1), (idx1, idx2)), shape=(len(X), len(possible_values)), dtype="int8")
return result
###############################################################################################################################
## MISC
###############################################################################################################################
# global function to flatten columns after a grouped operation and aggregation
# outside all classes since it is added as an attribute to pandas DataFrames
def __my_flatten_cols(self, how="_".join, reset_index=True):
how = (lambda iter: list(iter)[-1]) if how == "last" else how
self.columns = [how(filter(None, map(str, levels))) for levels in self.columns.values] \
if isinstance(self.columns, pd.MultiIndex) else self.columns
return self.reset_index(drop=True) if reset_index else self
pd.DataFrame.my_flatten_cols = __my_flatten_cols
# find and append multiple dataframes of the type specified in string
def append_datasets(cols_to_remove, string=['train', 'valid']):
# pass either train or valid as str argument
temp_files = [name for name in os.listdir('../input/') if name.startswith(string)]
temp_dict = {}
for i in temp_files:
df_name = re.sub(string=i, pattern='.csv', repl='')
temp_dict[df_name] = pd.read_csv(str('../input/' + str(i)), na_values=['No Data', ' ', 'UNKNOWN', '', 'NA', 'nan', 'none'])
temp_dict[df_name].columns = map(str.lower, temp_dict[df_name].columns)
temp_dict[df_name].drop(cols_to_remove, axis=1, inplace=True)
chars_to_remove = [' ', '.', '(', ')', '__', '-']
for j in chars_to_remove:
temp_dict[df_name].columns = temp_dict[df_name].columns.str.strip().str.lower().str.replace(j, '_')
temp_list = [v for k, v in temp_dict.items()]
if len(temp_list) > 1:
temp = pd.concat(temp_list, axis=0, sort=True, ignore_index=True)
else:
temp = temp_list[0]
return temp
def read_file(path, format='csv', sheet_name='Sheet 1', skiprows=0, sep='|'):
if format=='csv':
try:
x=pd.read_csv(path, na_values=['No Data', ' ', 'UNKNOWN', '', 'Not Rated', 'Not Applicable'], encoding='utf-8', low_memory=False)
except:
x=pd.read_csv(path, na_values=['No Data', ' ', 'UNKNOWN', '', 'Not Rated', 'Not Applicable'], encoding='latin-1', low_memory=False)
pass
elif format=='txt':
x=pd.read_table(file_path, sep=sep, skiprows=skiprows, na_values=['No Data', ' ', 'UNKNOWN', '', 'Not Rated', 'Not Applicable'])
elif format=='xlsx':
x=pd.read_excel(file_path, na_values=['No Data', ' ', 'UNKNOWN', '', 'Not Rated', 'Not Applicable'], sheet_name=sheet_name)
else:
raise ValueError("format not supported")
x.columns = x.columns.str.strip().lower().replace(r'[^\w\s]+', '_', regex=True)
x.drop_duplicates(inplace=True)
print(x.shape)
return x
``` |
{
"source": "jordanrhall/summix_py",
"score": 3
} |
#### File: summix_py/summix/adj_af.py
```python
import numpy as np
import pandas as pd
from summix.summix import data_processor
### adjAF: (ref, obs, pi_target, file_name, file_format, pi_hat) -> new_DF
### A data-processing function that takes 8 inputs:
## 1. ref: A list of strings containing which columns the user wants to use for reference allele frequencies.
## Pass the name of each column as a string.
## So for example, if the desired reference ancestries are called "ref_eur_1000G" and "ref_afr_1000G", then use
## ref=['ref_eur_1000G','ref_afr_1000G'].
## 2. obs: Which column to use for observed allele freq's. Pass the name of this column, as a string.
## So for example, if the desired observed ancestry is stored in a column called "gnomAD_afr", then obs='gnomAD_afr'.
## 3. pi_target: Updated proportions of each reference ancestry within some true population.
## Values should be provided in the same order as references are provided in!!!
## 4. file_name: A user-input genetic data "file".
## Must be a .txt or a .csv file (with the .txt or .csv as the last four characters of the actual file name).
## See data formatting standards for more info about required rows/columns.
## 5. file_format: The "file_format" of file, as a string.
## Default is 'tab' which is short for tab-delimited text files. Can also choose 'csv' for CSV files.
## 6. pi_hat: The estimated proportions of each reference ancestry within the observered or modeled population.
## Values should be provided in the same order as references are provided in!!!
## pi_hat is an optional argument if the user provided allele freq's for all references, and will be solved via summix
### and returns 1 output:
## 1. new_DF: Genetic data in an input array "A" size Nxk containing N SNPs (these are the rows), and k reference ancestries (these are the columns);
def adjAF(ref, obs, pi_target, file_name, file_format, pi_hat=None):
k = len(ref) #grab number of provided reference ancestries
# Check that user has provided pi_target and pi_hat of correct length
if np.shape(pi_target)[0] != k:
print('Please ensure pi_target has as many entries as there are references provided.')
return
if np.shape(pi_hat)[0] != k and pi_hat != None:
print('Please ensure pi_hat has as many entries as there are references provided.')
return
pi_target = np.copy(pi_target)/np.sum(pi_target) # normalize pi_target
# Reads data file in using pandas, depending on file_format
if (file_format=='csv') == True:
D = pd.read_csv(file_name)
else:
D = pd.read_csv(file_name, sep='\t')
names = D.columns # collect list of column names in provided data frame
# Now we count how many references are actually in the data frame, because sometimes the user may be missing 1
# (which is OK here, we just need to know which one is missing...)
ref_count=0
missing_ref_index = k-1 # default
for i in range(0,np.shape(ref)[0]):
if (ref[i] in names) == True:
ref_count=ref_count+1
else:
missing_ref_index = i
print('Note: There is no allele frequency data provided for the',ref[missing_ref_index],' ancestry. One missing ancestry is permitted in this formulation. \n \n \n')
# Confusing/not needed, but not quite ready to delete yet...
# if ref_count == k:
#print('Note: Because all allele frequencies were provided for all ancestries, the',ref[missing_ref_index],'ancestry is not used in the formulation.\n \n \n')
if pi_hat is None and ref_count==k:
answer_obj = HA_script.SUMMIX(ref=ref,obs=obs, file=file, k=k, x_guess=None, file_format=file_format)
pi_hat = answer_obj[0] # Defines pi_final as the solution vector
print('Because pi_hat was unspecified, pi_hat has been estimated using the HA script with your specified inputs. \n \n \n', 'The resulting pi_hat is shown in the full HA printout above. \n \n \n')
elif pi_hat is None and ref_count<k:
print('Because one of the reference ancestries cannot be found in the provided data frame, we cannot use summix to provide an estimate for pi_target. \n \n \n', ' Please provide an estimate for pi_target. \n \n \n')
return
pi_hat = np.copy(pi_hat)/np.sum(pi_hat) # normalize pi_hat
# Form needed constant from adjAF formula (see paper)
C = pi_target[missing_ref_index]/pi_hat[missing_ref_index]
# Remove the name for the missing reference from reference list. Default is the last one, if none are missing.
ref.pop(missing_ref_index)
D_pr_ref = D[ref] # grabs references for printing
D_pr_obs = D[obs] # grabs observed for printing
print('By default we print out the first 5 rows of the user-provided reference ancestries \n \n that will be used in the calculation. Please check column names for correctness: \n \n \n',D_pr_ref.head(5), '\n \n \n We also print out the first 5 rows of the user-provided observed ancestry \n \n to be used in the calculation. Please check column name for correctness: \n \n \n',D_pr_obs.head(5), '\n \n \n')
# Use the data_processor to take the info we need out of the data frame D
data_array = data_processor(file_name, file_format, k-1, ref, obs)
ref_AFs = data_array[0]
observed_AFs = data_array[1]
# Instantiate first term in summation
temp = C*observed_AFs
# Perform summation except at py_adj_index (which has already been excluded from ref_AFs
for i in range(0,k-1):
temp = np.copy(temp) - C*pi_hat[i]*ref_AFs[:,i:(i+1)] + pi_target[i]*ref_AFs[:,i:(i+1)]
# Routine for rounding estimates
for i in range(0,np.shape(temp)[0]):
if temp[i] <0:
temp[i]=0.0
elif temp[i]>1:
temp[i]=1.0
else:
temp = np.copy(temp)
# This is our answer, a new vector/column of ancestry-adjusted allele frequencies
adj_AF = temp
# Merge the adj_AF into the original data (Kaichao's contribution! Thanks Kaichao!) as an additional column called adjusted_AF
updateAF = pd.DataFrame(data=adj_AF, columns=['adjusted_AF'])
new_DF = pd.concat([D, updateAF], axis=1)
new_DF['adjusted_AF'] = new_DF['adjusted_AF'].astype(str)
print('Adjustment complete! \n \n', 'A data frame called new_DF has been created for the user. \n \n', 'new_DF contains the original, user-provided data frame, \n \n', 'concatenated with the adjusted allele frequencies (appended as the last column).')
return new_DF
``` |
{
"source": "JordanRMcLean/PySweeper",
"score": 3
} |
#### File: PySweeper/src/Game.py
```python
import random
from datetime import datetime
import src.config as Config
from src.Board import Board
class Game:
started = False
finished = False
start_time = 0
finish_time = 0
board = None
# cell co-ords are
def __init__(self, columns, rows, mines):
self.columns = columns
self.rows = rows
self.mines = mines
self.start_time = 0
self.mines_left = mines
self.cells_left = (columns * rows) - mines
# the game only starts when clicking first cell.
# additionally the first cell must not be a mine and open up some play.
def start(self, first_cell):
self.started = True
self.start_time = datetime.now()
self.board = Board(self.columns, self.rows, self.mines)
def finish(self, lost):
self.finished = True
self.finish_time = datetime.now()
# reveal all other cells.
# finish time.
if lost:
self.lost()
else:
self.won()
def lost(self):
return
# reveal all other cells
# highlight incorrectly flagged mines
def won(self):
return
# check if finish time is a hi score.
# update UI
def flag_cell(self):
return
def click_cell(self):
return
```
#### File: PySweeper/src/UI.py
```python
import tkinter as tk
import tkinter.ttk as ttk
import src.config as Config
import src.Cell as Cell
class PySweeperUI:
name = 'PySweeper'
window = None
# dict of different UI elements.
elements = {}
# definition of all elements within the header/footer of the UI. For easy customisation
@property
def header(self):
return {
'title': {
'element': ttk.Label,
'attributes': {
'text': self.name
},
'insert': lambda elem: elem.grid(row=0, column=0)
},
'new_game': {
'element': ttk.Button,
'attributes': {
'text': 'New Game',
'command': self.new_game
},
'insert': lambda elem: elem.grid(row=0, column=3)
},
'selector': {
'element': ttk.Combobox,
'attributes': {
'values': [dif.capitalize() for dif in Config.DIFFICULTIES.keys()],
'state': 'readonly'
},
'after': self.__init_difficulty
}
}
@property
def footer(self):
return {
'timer': {
'element': ttk.Label,
'attributes': {
'text': '00:00'
}
}
}
def __init__(self, window=None):
if isinstance(window, tk.Tk):
self.window = window
else:
self.window = tk.Tk()
self.window.title('PySweeper')
#import our styles in. This is done after the main window creation.
import src.styles
# header/footer won't be changed after init, so not storage needed.
# main is stored for updating of grid.
header = tk.Frame(master=self.window)
self.main = tk.Frame(master=self.window)
footer = tk.Frame(master=self.window)
self.add_elements(header, self.header)
header.grid()
self.main.grid()
self.add_elements(footer, self.footer)
footer.grid()
def add_elements(self, frame, elements):
for name, obj in elements.items():
element = obj.get('element')
attributes = obj.get('attributes')
after = obj.get('after')
insert = obj.get('show')
if element:
item = element(master=frame)
if attributes:
for attr, value in attributes.items():
item[attr] = value
if after:
after(item)
if insert:
insert(item)
else:
item.pack()
self.elements[name] = item
def new_game(self):
selector = self.get_element('selector')
difficulty = selector.get() if selector else 'beginner'
difficulty = difficulty.lower()
game_specs = Config.DIFFICULTIES[difficulty]
self.create_grid(game_specs[0], game_specs[1])
print('Starting new game with difficulty: ' + difficulty)
def create_grid(self, rows, columns):
for y in range(rows):
for x in range(columns):
name = Cell.Cell.create_name(x, y)
cell = ttk.Button(self.main, name = name, style = 'PyCell.TButton')
cell.bind('<ButtonPress-1>', self.__cell_click)
cell.bind('<ButtonPress-2>', self.__cell_flag)
cell.grid(column=x, row=y)
def say_hi(self, e):
print(e)
def get_element(self, elem):
if elem in self.elements:
return self.elements[elem]
else:
print(f'Warning: element {elem} does not exist.')
return ttk.Label()
# private internal functions
def __init_difficulty(self, elem):
elem.current(0)
# elem.bind('<<ComboboxSelected>>', self.say_hi)
def __cell_click(self, e):
print('cell clicked')
def __cell_flag(self, e):
print('cell flagged')
``` |
{
"source": "jordanrmerrick/Hackathon2020",
"score": 3
} |
#### File: jordanrmerrick/Hackathon2020/api.py
```python
import requests
import re
def poll_locs(address, key):
base_url = "https://www.googleapis.com/civicinfo/v2/voterinfo"
url = "{}?address={}&key={}".format(base_url, address, key)
request = requests.get(url=url)
return request.json()
def parse_address(station_info):
addr = station_info["line1"]
city = station_info["city"]
state = station_info["state"]
zip = station_info["zip"]
try:
addr2 = station_info["line2"]
address = "{}, {}, {}, {}, {}".format(addr, addr2, city, state, zip)
except KeyError:
address = "{}, {}, {}, {}".format(addr, city, state, zip)
return address
def parse_hours(hours_info):
hours = hours_info.split("\n")
if len(hours) == 1:
spt = hours[0].split(": ")
return {spt[0]: spt[1]}
else:
hours = hours[:7]
ret = {}
for time in hours:
day = time[:3]
time = re.findall(r"\d\d: (.*)", time)
try:
ret[day] = time[0]
except IndexError:
pass
return ret
def parse_polls(info):
ps = {}
ev = {}
try:
poll_stations = info["pollingLocations"]
for station in poll_stations:
st = station["address"]
address = parse_address(st)
name = st["locationName"]
time = parse_hours(station["pollingHours"])
ps[address] = {"name": name, "time": time}
except KeyError:
pass
try:
early_voting = info["earlyVoteSites"]
for station in early_voting:
st = station["address"]
address = parse_address(st)
name = st["locationName"]
time = parse_hours(station["pollingHours"])
ev[address] = {"name": name, "time": time}
except KeyError:
pass
return {"polling_stations": ps, "early_voting": ev}
def geoencoding(address, key):
base_url = "https://maps.googleapis.com/maps/api/geocode/json"
url = "{}?address={}&key={}".format(base_url, address, key)
request = requests.get(url=url)
return request.json()
```
#### File: jordanrmerrick/Hackathon2020/calculations.py
```python
from math import radians, cos, sqrt
from dbi import select_from_zip, select_from_id, create_connection
from api import *
import usaddress
def distance(lat1, lon1, lat2, lon2):
x = radians(lon1 - lon2) * cos(radians((lat1 + lat2) / 2))
y = radians(lat1 - lat2)
# 6371000 is the radius of earth, used to triangulate distance!
dist = 6371000 * sqrt((x * x) + (y * y))
return dist
class Closest_boxes(object):
def __init__(self, address, key):
self.address = address
self.key = key
def geoencode(self):
geo = geoencoding(self.address, self.key)
g = geo["results"][0]["geometry"]
location = g["location"]
lat1 = location["lat"]
lon1 = location["lng"]
return [lat1, lon1]
def parse_address(self):
try:
ret = usaddress.tag(self.address)
except usaddress.RepeatedLabelError:
ret = "Please enter a valid address."
return ret
def mailbox_loc(self):
conn = create_connection("fulldata.sqlite")
parsed = self.parse_address()[0]
zipcode = parsed["ZipCode"]
return select_from_zip(conn, zipcode)
def closest_boxes(self):
high, med, low = -1, -1, -1
hi, mi, li = 0, 0, 0
selfaddr = self.geoencode()
boxes = self.mailbox_loc()
for box in boxes:
lat = box[-2]
lon = box[-1]
dist = distance(float(lat), float(lon), float(selfaddr[0]), float(selfaddr[1]))
if high == -1 or med == -1 or low == -1:
high, med, low = dist, dist, dist
elif dist <= low:
high, med, low, hi, mi, li = med, low, dist, mi, li, box[0]
elif low < dist <= med:
high, med, hi, mi = med, dist, mi, box[0]
elif dist > med <= high:
high, hi = dist, box[0]
else:
pass
conn = create_connection("fulldata.sqlite")
r0 = select_from_id(conn, hi)
r1 = select_from_id(conn, mi)
r2 = select_from_id(conn, li)
ret = [r0, r1, r2]
return ret
def create_address(self):
box_locs = self.closest_boxes()
print(box_locs)
if len(box_locs) == 0:
return {"No boxes found": ""}
else:
box_locs.reverse()
ret = {}
for box in box_locs:
if len(box) == 0:
ret["No close boxes found. Please visit https://mailboxlocate.com/ to find your nearest mailbox"] = ""
continue
box_ = box[0]
addr = box_[1]
city = box_[2]
state = box_[3]
zipcode = box_[4]
full = "{}, {}, {}, {}".format(addr, city, state, zipcode)
ret[full] = (box_[-2], box_[-1])
return ret
``` |
{
"source": "jordanrmerrick/pandas",
"score": 3
} |
#### File: reshape/concat/test_datetimes.py
```python
import datetime as dt
from datetime import datetime
import dateutil
import numpy as np
import pytest
import pandas as pd
from pandas import (
DataFrame,
DatetimeIndex,
Index,
MultiIndex,
Series,
Timestamp,
concat,
date_range,
to_timedelta,
)
import pandas._testing as tm
class TestDatetimeConcat:
def test_concat_datetime64_block(self):
from pandas.core.indexes.datetimes import date_range
rng = date_range("1/1/2000", periods=10)
df = DataFrame({"time": rng})
result = concat([df, df])
assert (result.iloc[:10]["time"] == rng).all()
assert (result.iloc[10:]["time"] == rng).all()
def test_concat_datetime_datetime64_frame(self):
# GH#2624
rows = []
rows.append([datetime(2010, 1, 1), 1])
rows.append([datetime(2010, 1, 2), "hi"])
df2_obj = DataFrame.from_records(rows, columns=["date", "test"])
ind = date_range(start="2000/1/1", freq="D", periods=10)
df1 = DataFrame({"date": ind, "test": range(10)})
# it works!
concat([df1, df2_obj])
def test_concat_datetime_timezone(self):
# GH 18523
idx1 = date_range("2011-01-01", periods=3, freq="H", tz="Europe/Paris")
idx2 = date_range(start=idx1[0], end=idx1[-1], freq="H")
df1 = DataFrame({"a": [1, 2, 3]}, index=idx1)
df2 = DataFrame({"b": [1, 2, 3]}, index=idx2)
result = concat([df1, df2], axis=1)
exp_idx = (
DatetimeIndex(
[
"2011-01-01 00:00:00+01:00",
"2011-01-01 01:00:00+01:00",
"2011-01-01 02:00:00+01:00",
],
freq="H",
)
.tz_convert("UTC")
.tz_convert("Europe/Paris")
)
expected = DataFrame(
[[1, 1], [2, 2], [3, 3]], index=exp_idx, columns=["a", "b"]
)
tm.assert_frame_equal(result, expected)
idx3 = date_range("2011-01-01", periods=3, freq="H", tz="Asia/Tokyo")
df3 = DataFrame({"b": [1, 2, 3]}, index=idx3)
result = concat([df1, df3], axis=1)
exp_idx = DatetimeIndex(
[
"2010-12-31 15:00:00+00:00",
"2010-12-31 16:00:00+00:00",
"2010-12-31 17:00:00+00:00",
"2010-12-31 23:00:00+00:00",
"2011-01-01 00:00:00+00:00",
"2011-01-01 01:00:00+00:00",
]
)
expected = DataFrame(
[
[np.nan, 1],
[np.nan, 2],
[np.nan, 3],
[1, np.nan],
[2, np.nan],
[3, np.nan],
],
index=exp_idx,
columns=["a", "b"],
)
tm.assert_frame_equal(result, expected)
# GH 13783: Concat after resample
result = concat([df1.resample("H").mean(), df2.resample("H").mean()], sort=True)
expected = DataFrame(
{"a": [1, 2, 3] + [np.nan] * 3, "b": [np.nan] * 3 + [1, 2, 3]},
index=idx1.append(idx1),
)
tm.assert_frame_equal(result, expected)
def test_concat_datetimeindex_freq(self):
# GH 3232
# Monotonic index result
dr = date_range("01-Jan-2013", periods=100, freq="50L", tz="UTC")
data = list(range(100))
expected = DataFrame(data, index=dr)
result = concat([expected[:50], expected[50:]])
tm.assert_frame_equal(result, expected)
# Non-monotonic index result
result = concat([expected[50:], expected[:50]])
expected = DataFrame(data[50:] + data[:50], index=dr[50:].append(dr[:50]))
expected.index._data.freq = None
tm.assert_frame_equal(result, expected)
def test_concat_multiindex_datetime_object_index(self):
# https://github.com/pandas-dev/pandas/issues/11058
idx = Index(
[dt.date(2013, 1, 1), dt.date(2014, 1, 1), dt.date(2015, 1, 1)],
dtype="object",
)
s = Series(
["a", "b"],
index=MultiIndex.from_arrays(
[
[1, 2],
idx[:-1],
],
names=["first", "second"],
),
)
s2 = Series(
["a", "b"],
index=MultiIndex.from_arrays(
[[1, 2], idx[::2]],
names=["first", "second"],
),
)
mi = MultiIndex.from_arrays(
[[1, 2, 2], idx],
names=["first", "second"],
)
assert mi.levels[1].dtype == object
expected = DataFrame(
[["a", "a"], ["b", np.nan], [np.nan, "b"]],
index=mi,
)
result = concat([s, s2], axis=1)
tm.assert_frame_equal(result, expected)
def test_concat_NaT_series(self):
# GH 11693
# test for merging NaT series with datetime series.
x = Series(
date_range("20151124 08:00", "20151124 09:00", freq="1h", tz="US/Eastern")
)
y = Series(pd.NaT, index=[0, 1], dtype="datetime64[ns, US/Eastern]")
expected = Series([x[0], x[1], pd.NaT, pd.NaT])
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
# all NaT with tz
expected = Series(pd.NaT, index=range(4), dtype="datetime64[ns, US/Eastern]")
result = concat([y, y], ignore_index=True)
tm.assert_series_equal(result, expected)
# without tz
x = Series(date_range("20151124 08:00", "20151124 09:00", freq="1h"))
y = Series(date_range("20151124 10:00", "20151124 11:00", freq="1h"))
y[:] = pd.NaT
expected = Series([x[0], x[1], pd.NaT, pd.NaT])
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
# all NaT without tz
x[:] = pd.NaT
expected = Series(pd.NaT, index=range(4), dtype="datetime64[ns]")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("tz", [None, "UTC"])
def test_concat_NaT_dataframes(self, tz):
# GH 12396
first = DataFrame([[pd.NaT], [pd.NaT]])
first = first.apply(lambda x: x.dt.tz_localize(tz))
second = DataFrame(
[[Timestamp("2015/01/01", tz=tz)], [Timestamp("2016/01/01", tz=tz)]],
index=[2, 3],
)
expected = DataFrame(
[
pd.NaT,
pd.NaT,
Timestamp("2015/01/01", tz=tz),
Timestamp("2016/01/01", tz=tz),
]
)
result = concat([first, second], axis=0)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("tz1", [None, "UTC"])
@pytest.mark.parametrize("tz2", [None, "UTC"])
@pytest.mark.parametrize("s", [pd.NaT, Timestamp("20150101")])
def test_concat_NaT_dataframes_all_NaT_axis_0(self, tz1, tz2, s):
# GH 12396
# tz-naive
first = DataFrame([[pd.NaT], [pd.NaT]]).apply(lambda x: x.dt.tz_localize(tz1))
second = DataFrame([s]).apply(lambda x: x.dt.tz_localize(tz2))
result = concat([first, second], axis=0)
expected = DataFrame(Series([pd.NaT, pd.NaT, s], index=[0, 1, 0]))
expected = expected.apply(lambda x: x.dt.tz_localize(tz2))
if tz1 != tz2:
expected = expected.astype(object)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("tz1", [None, "UTC"])
@pytest.mark.parametrize("tz2", [None, "UTC"])
def test_concat_NaT_dataframes_all_NaT_axis_1(self, tz1, tz2):
# GH 12396
first = DataFrame(Series([pd.NaT, pd.NaT]).dt.tz_localize(tz1))
second = DataFrame(Series([pd.NaT]).dt.tz_localize(tz2), columns=[1])
expected = DataFrame(
{
0: Series([pd.NaT, pd.NaT]).dt.tz_localize(tz1),
1: Series([pd.NaT, pd.NaT]).dt.tz_localize(tz2),
}
)
result = concat([first, second], axis=1)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("tz1", [None, "UTC"])
@pytest.mark.parametrize("tz2", [None, "UTC"])
def test_concat_NaT_series_dataframe_all_NaT(self, tz1, tz2):
# GH 12396
# tz-naive
first = Series([pd.NaT, pd.NaT]).dt.tz_localize(tz1)
second = DataFrame(
[
[Timestamp("2015/01/01", tz=tz2)],
[Timestamp("2016/01/01", tz=tz2)],
],
index=[2, 3],
)
expected = DataFrame(
[
pd.NaT,
pd.NaT,
Timestamp("2015/01/01", tz=tz2),
Timestamp("2016/01/01", tz=tz2),
]
)
if tz1 != tz2:
expected = expected.astype(object)
result = concat([first, second])
tm.assert_frame_equal(result, expected)
class TestTimezoneConcat:
def test_concat_tz_series(self):
# gh-11755: tz and no tz
x = Series(date_range("20151124 08:00", "20151124 09:00", freq="1h", tz="UTC"))
y = Series(date_range("2012-01-01", "2012-01-02"))
expected = Series([x[0], x[1], y[0], y[1]], dtype="object")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
# gh-11887: concat tz and object
x = Series(date_range("20151124 08:00", "20151124 09:00", freq="1h", tz="UTC"))
y = Series(["a", "b"])
expected = Series([x[0], x[1], y[0], y[1]], dtype="object")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
# see gh-12217 and gh-12306
# Concatenating two UTC times
first = DataFrame([[datetime(2016, 1, 1)]])
first[0] = first[0].dt.tz_localize("UTC")
second = DataFrame([[datetime(2016, 1, 2)]])
second[0] = second[0].dt.tz_localize("UTC")
result = concat([first, second])
assert result[0].dtype == "datetime64[ns, UTC]"
# Concatenating two London times
first = DataFrame([[datetime(2016, 1, 1)]])
first[0] = first[0].dt.tz_localize("Europe/London")
second = DataFrame([[datetime(2016, 1, 2)]])
second[0] = second[0].dt.tz_localize("Europe/London")
result = concat([first, second])
assert result[0].dtype == "datetime64[ns, Europe/London]"
# Concatenating 2+1 London times
first = DataFrame([[datetime(2016, 1, 1)], [datetime(2016, 1, 2)]])
first[0] = first[0].dt.tz_localize("Europe/London")
second = DataFrame([[datetime(2016, 1, 3)]])
second[0] = second[0].dt.tz_localize("Europe/London")
result = concat([first, second])
assert result[0].dtype == "datetime64[ns, Europe/London]"
# Concat'ing 1+2 London times
first = DataFrame([[datetime(2016, 1, 1)]])
first[0] = first[0].dt.tz_localize("Europe/London")
second = DataFrame([[datetime(2016, 1, 2)], [datetime(2016, 1, 3)]])
second[0] = second[0].dt.tz_localize("Europe/London")
result = concat([first, second])
assert result[0].dtype == "datetime64[ns, Europe/London]"
def test_concat_tz_series_tzlocal(self):
# see gh-13583
x = [
Timestamp("2011-01-01", tz=dateutil.tz.tzlocal()),
Timestamp("2011-02-01", tz=dateutil.tz.tzlocal()),
]
y = [
Timestamp("2012-01-01", tz=dateutil.tz.tzlocal()),
Timestamp("2012-02-01", tz=dateutil.tz.tzlocal()),
]
result = concat([Series(x), Series(y)], ignore_index=True)
tm.assert_series_equal(result, Series(x + y))
assert result.dtype == "datetime64[ns, tzlocal()]"
def test_concat_tz_series_with_datetimelike(self):
# see gh-12620: tz and timedelta
x = [
Timestamp("2011-01-01", tz="US/Eastern"),
Timestamp("2011-02-01", tz="US/Eastern"),
]
y = [pd.Timedelta("1 day"), pd.Timedelta("2 day")]
result = concat([Series(x), Series(y)], ignore_index=True)
tm.assert_series_equal(result, Series(x + y, dtype="object"))
# tz and period
y = [pd.Period("2011-03", freq="M"), pd.Period("2011-04", freq="M")]
result = concat([Series(x), Series(y)], ignore_index=True)
tm.assert_series_equal(result, Series(x + y, dtype="object"))
def test_concat_tz_frame(self):
df2 = DataFrame(
{
"A": Timestamp("20130102", tz="US/Eastern"),
"B": Timestamp("20130603", tz="CET"),
},
index=range(5),
)
# concat
df3 = concat([df2.A.to_frame(), df2.B.to_frame()], axis=1)
tm.assert_frame_equal(df2, df3)
def test_concat_multiple_tzs(self):
# GH#12467
# combining datetime tz-aware and naive DataFrames
ts1 = Timestamp("2015-01-01", tz=None)
ts2 = Timestamp("2015-01-01", tz="UTC")
ts3 = Timestamp("2015-01-01", tz="EST")
df1 = DataFrame({"time": [ts1]})
df2 = DataFrame({"time": [ts2]})
df3 = DataFrame({"time": [ts3]})
results = concat([df1, df2]).reset_index(drop=True)
expected = DataFrame({"time": [ts1, ts2]}, dtype=object)
tm.assert_frame_equal(results, expected)
results = concat([df1, df3]).reset_index(drop=True)
expected = DataFrame({"time": [ts1, ts3]}, dtype=object)
tm.assert_frame_equal(results, expected)
results = concat([df2, df3]).reset_index(drop=True)
expected = DataFrame({"time": [ts2, ts3]})
tm.assert_frame_equal(results, expected)
def test_concat_multiindex_with_tz(self):
# GH 6606
df = DataFrame(
{
"dt": [
datetime(2014, 1, 1),
datetime(2014, 1, 2),
datetime(2014, 1, 3),
],
"b": ["A", "B", "C"],
"c": [1, 2, 3],
"d": [4, 5, 6],
}
)
df["dt"] = df["dt"].apply(lambda d: Timestamp(d, tz="US/Pacific"))
df = df.set_index(["dt", "b"])
exp_idx1 = DatetimeIndex(
["2014-01-01", "2014-01-02", "2014-01-03"] * 2, tz="US/Pacific", name="dt"
)
exp_idx2 = Index(["A", "B", "C"] * 2, name="b")
exp_idx = MultiIndex.from_arrays([exp_idx1, exp_idx2])
expected = DataFrame(
{"c": [1, 2, 3] * 2, "d": [4, 5, 6] * 2}, index=exp_idx, columns=["c", "d"]
)
result = concat([df, df])
tm.assert_frame_equal(result, expected)
def test_concat_tz_not_aligned(self):
# GH#22796
ts = pd.to_datetime([1, 2]).tz_localize("UTC")
a = DataFrame({"A": ts})
b = DataFrame({"A": ts, "B": ts})
result = concat([a, b], sort=True, ignore_index=True)
expected = DataFrame(
{"A": list(ts) + list(ts), "B": [pd.NaT, pd.NaT] + list(ts)}
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"t1",
[
"2015-01-01",
pytest.param(
pd.NaT,
marks=pytest.mark.xfail(
reason="GH23037 incorrect dtype when concatenating"
),
),
],
)
def test_concat_tz_NaT(self, t1):
# GH#22796
# Concatenating tz-aware multicolumn DataFrames
ts1 = Timestamp(t1, tz="UTC")
ts2 = Timestamp("2015-01-01", tz="UTC")
ts3 = Timestamp("2015-01-01", tz="UTC")
df1 = DataFrame([[ts1, ts2]])
df2 = DataFrame([[ts3]])
result = concat([df1, df2])
expected = DataFrame([[ts1, ts2], [ts3, pd.NaT]], index=[0, 0])
tm.assert_frame_equal(result, expected)
def test_concat_tz_with_empty(self):
# GH 9188
result = concat(
[DataFrame(date_range("2000", periods=1, tz="UTC")), DataFrame()]
)
expected = DataFrame(date_range("2000", periods=1, tz="UTC"))
tm.assert_frame_equal(result, expected)
class TestPeriodConcat:
def test_concat_period_series(self):
x = Series(pd.PeriodIndex(["2015-11-01", "2015-12-01"], freq="D"))
y = Series(pd.PeriodIndex(["2015-10-01", "2016-01-01"], freq="D"))
expected = Series([x[0], x[1], y[0], y[1]], dtype="Period[D]")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
def test_concat_period_multiple_freq_series(self):
x = Series(pd.PeriodIndex(["2015-11-01", "2015-12-01"], freq="D"))
y = Series(pd.PeriodIndex(["2015-10-01", "2016-01-01"], freq="M"))
expected = Series([x[0], x[1], y[0], y[1]], dtype="object")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
assert result.dtype == "object"
def test_concat_period_other_series(self):
x = Series(pd.PeriodIndex(["2015-11-01", "2015-12-01"], freq="D"))
y = Series(pd.PeriodIndex(["2015-11-01", "2015-12-01"], freq="M"))
expected = Series([x[0], x[1], y[0], y[1]], dtype="object")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
assert result.dtype == "object"
# non-period
x = Series(pd.PeriodIndex(["2015-11-01", "2015-12-01"], freq="D"))
y = Series(DatetimeIndex(["2015-11-01", "2015-12-01"]))
expected = Series([x[0], x[1], y[0], y[1]], dtype="object")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
assert result.dtype == "object"
x = Series(pd.PeriodIndex(["2015-11-01", "2015-12-01"], freq="D"))
y = Series(["A", "B"])
expected = Series([x[0], x[1], y[0], y[1]], dtype="object")
result = concat([x, y], ignore_index=True)
tm.assert_series_equal(result, expected)
assert result.dtype == "object"
def test_concat_timedelta64_block():
rng = to_timedelta(np.arange(10), unit="s")
df = DataFrame({"time": rng})
result = concat([df, df])
tm.assert_frame_equal(result.iloc[:10], df)
tm.assert_frame_equal(result.iloc[10:], df)
``` |
{
"source": "jordanrossetti/rtv",
"score": 2
} |
#### File: rtv/tests/test_mime_parsers.py
```python
from __future__ import unicode_literals
import re
from collections import OrderedDict
import pytest
from rtv.mime_parsers import parsers, ImgurApiMIMEParser
RegexpType = type(re.compile(''))
URLS = OrderedDict([
('simple_png', (
'http://www.example.com/i/image.png', # 1. URL
'http://www.example.com/i/image.png', # 2. Direct media link
'image/png')), # 3. MIME type of media
('simple_mpeg', (
'http://www.example.com/v/video.mpeg',
'http://www.example.com/v/video.mpeg',
'video/mpeg')),
('simple_unknown', (
'http://www.example.com/i/image',
'http://www.example.com/i/image',
None)),
('gfycat', (
'https://gfycat.com/DeliciousUnfortunateAdouri',
'https://giant.gfycat.com/DeliciousUnfortunateAdouri.webm',
'video/webm')),
('youtube', (
'https://www.youtube.com/watch?v=FjNdYp2gXRY',
'https://www.youtube.com/watch?v=FjNdYp2gXRY',
'video/x-youtube')),
('gifv', (
'http://i.imgur.com/i/image.gifv',
'http://i.imgur.com/i/image.mp4',
'video/mp4')),
('reddit_uploads', (
'https://i.reddituploads.com/a065472e47a4405da159189ee48bff46?fit=max&h=1536&w=1536&s=5639918a0c696b9bb3ec694dc3cf59ac',
'https://i.reddituploads.com/a065472e47a4405da159189ee48bff46?fit=max&h=1536&w=1536&s=5639918a0c696b9bb3ec694dc3cf59ac',
'image/jpeg')),
('imgur_1', (
'http://imgur.com/yW0kbMi',
'https://i.imgur.com/yW0kbMi.jpg',
'image/jpeg')),
('imgur_2', (
'http://imgur.com/gallery/yjP1v4B',
'https://i.imgur.com/yjP1v4B.mp4',
'video/mp4')),
('imgur_album', (
'https://imgur.com/a/pRYEi',
'https://i.imgur.com/tiUSazy.png https://i.imgur.com/phIsEAi.png https://i.imgur.com/TvT8V5m.png https://i.imgur.com/qETPFAi.png',
'image/x-imgur-album')),
('instagram_image', (
'https://www.instagram.com/p/BIxQ0vrBN2Y/?taken-by=kimchi_chic',
re.compile('https://instagram(.*)\.jpg'),
'image/jpeg')),
('instagram_video', (
'https://www.instagram.com/p/BUm3cvEhFMt/',
re.compile('https://instagram(.*)\.mp4'),
'video/mp4')),
('streamable_video', (
'https://streamable.com/vkc0y',
re.compile('https://(.*)\.streamablevideo\.com/video/mp4/(.*)\.mp4(.*)'),
'video/mp4')),
('vidme_video', pytest.param(
'https://vid.me/rHlb',
re.compile('https://(.*)\.cloudfront\.net/videos/15694926/52450725.mp4(.*)'),
'video/mp4',
marks=pytest.mark.xfail(reason="vidme API appears to have changed format"))),
('liveleak_video_1', (
'https://www.liveleak.com/view?i=08b_1499296574',
re.compile('https://cdn\.liveleak\.com/(.*)\.mp4(.*)'),
'video/mp4')),
('liveleak_video_2', (
'https://www.liveleak.com/view?i=cad_1487311993',
re.compile('www\.youtube\.com/embed/D4GrlOMlOBY'),
'video/x-youtube')),
('reddit_gif', (
'https://v.redd.it/wkm9zol8c6fz',
'https://v.redd.it/wkm9zol8c6fz/DASH_600_K',
'video/mp4')),
('reddit_video', (
'https://v.redd.it/zv89llsvexdz',
'https://v.redd.it/zv89llsvexdz/DASHPlaylist.mpd',
'video/x-youtube')),
('twitch_clip', (
'https://clips.twitch.tv/avaail/ExpensiveFishBCouch',
'https://clips-media-assets.twitch.tv/22467338656-index-0000000111.mp4',
'video/mp4')),
('oddshot', (
'https://oddshot.tv/s/5wN6Sy',
'https://oddshot.akamaized.net/m/render-captures/source/Unknown-YjBkNTcwZWFlZGJhMGYyNQ.mp4',
'video/mp4')),
('clippituser', (
'https://www.clippituser.tv/c/edqqld',
'https://clips.clippit.tv/edqqld/720.mp4',
'video/mp4')),
('gifs_com', (
'https://gifs.com/gif/cat-jumps-into-lake-after-guy-sneezes-0VqvRL',
'https://j.gifs.com/0VqvRL.gif',
'image/gif')),
('giphy', (
'https://giphy.com/gifs/cameron-dallas-OpesLQSjwdGj6',
'https://media.giphy.com/media/OpesLQSjwdGj6/giphy.mp4',
'video/mp4')),
('imgtc', (
'https://imgtc.com/w/Sa2whPE',
'https://imgtc.b-cdn.net/uploads/ZHI3OopOhKJ.mp4',
'video/mp4')),
('imgflip', (
'https://imgflip.com/i/1dtdbv',
'https://i.imgflip.com/1dtdbv.jpg',
'image/jpeg')),
('livememe', (
'http://www.livememe.com/v67cxy9',
'http://e.lvme.me/v67cxy9.jpg',
'image/jpeg')),
('makeameme', (
'http://makeameme.org/meme/joined-reddit-for',
'https://media.makeameme.org/created/joined-reddit-for.jpg',
'image/jpeg')),
('flickr', (
'https://www.flickr.com/photos/obamawhitehouse/8191317327/',
'https://c1.staticflickr.com/9/8065/8191317327_5180e95d98_b.jpg',
'image/jpeg')),
('worldstar_1', (
'http://www.worldstarhiphop.com/videos/video.php?v=wshh09DUN5MeDK2El4sO',
'https://hw-videos.worldstarhiphop.com/u/vid/2017/10/4OnndgEmNSME.mp4',
'video/mp4')),
('worldstar_2', (
'http://www.worldstarhiphop.com/videos/video.php?v=wshhJ6bVdAv0iMrNGFZG',
'http://www.youtube.com/embed/Bze53qwHS8o?autoplay=1',
'video/x-youtube')),
])
args, ids = URLS.values(), list(URLS)
@pytest.mark.parametrize('url,modified_url,mime_type', args, ids=ids)
def test_parser(url, modified_url, mime_type, reddit, config):
# Include the reddit fixture so the cassettes get generated
ImgurApiMIMEParser.CLIENT_ID = config['imgur_client_id']
for parser in parsers:
if parser.pattern.match(url):
parsed_url, parsed_type = parser.get_mimetype(url)
if isinstance(modified_url, RegexpType):
assert modified_url.match(parsed_url)
else:
assert modified_url == parsed_url
assert parsed_type == mime_type
break
else:
# The base parser should catch all urls before this point
assert False
def test_imgur_fallback(reddit):
"""
If something happens to the imgur API key, the code should fallback
to manually scraping the page.
"""
ImgurApiMIMEParser.CLIENT_ID = ''
for key in ['imgur_1', 'imgur_2', 'imgur_album']:
url, modified_url, mime_type = URLS[key]
assert ImgurApiMIMEParser.pattern.match(url)
parsed_url, parsed_type = ImgurApiMIMEParser.get_mimetype(url)
# Not sure why, but http://imgur.com/gallery/yjP1v4B (a .gif)
# appears to incorrectly return as a JPG type from the scraper
assert parsed_type is not None
ImgurApiMIMEParser.CLIENT_ID = 'invalid_api_key'
for key in ['imgur_1', 'imgur_2', 'imgur_album']:
url, modified_url, mime_type = URLS[key]
assert ImgurApiMIMEParser.pattern.match(url)
parsed_url, parsed_type = ImgurApiMIMEParser.get_mimetype(url)
# Not sure why, but http://imgur.com/gallery/yjP1v4B (a .gif)
# appears to incorrectly return as a JPG type from the scraper
assert parsed_type is not None
``` |
{
"source": "jordanrule/athena",
"score": 3
} |
#### File: athena/helpers/exception.py
```python
class EnvironmentException(EnvironmentError):
"""
Exception type for handling invalid environment configuration.
"""
def __init__(self, arg):
self.strerror = arg
self.args = {arg}
class MonitorException(ValueError):
"""
Exception type for handling run-time monitoring.
May be further sub-typed per model for log/alerting clarity.
"""
def __init__(self, arg):
self.strerror = arg
self.args = {arg}
```
#### File: athena/tests/test_exec.py
```python
from unittest import TestCase
from athena.exec import execute_message
from athena.state import State
class TestExec(TestCase):
def setUp(self):
pass
def test_execute_message(self):
"""
Test that execute_message generates expected output message
"""
func = lambda x: x.environment.Init.ENV
message = {'environment': {'ENV': 'NOSE'}}
self.assertEquals(execute_message(func, State, message), 'NOSE')
``` |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.