id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
3357230 | <filename>wormil/migrations/0003_auto_20220513_0208.py
# Generated by Django 3.2.12 on 2022-05-13 02:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wormil', '0002_alter_specimen_description'),
]
operations = [
migrations.AddField(
model_name='fossil',
name='organism_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='fossil',
name='taxonomic_problem',
field=models.BooleanField(blank=True, null=True),
),
]
| StarcoderdataPython |
1704728 | <reponame>ElgaSalvadore/watools
# -*- coding: utf-8 -*-
"""
Authors: <NAME> and <NAME>
UNESCO-IHE 2016
Contact: <EMAIL>
<EMAIL>
Repository: https://github.com/wateraccounting/watools
Module: Collect/CHIRPS
Description:
This module downloads daily and monthly CHIRPS 2.0 data from
ftp://chg-ftpout.geog.ucsb.edu server. Use the CHIRPS.daily or CHIRPS.monthly
functions to download and create daily or monthly CHIRPS images in Gtiff
format. The CHIRP data is available since 1981-01-01 till the present.
Examples:
from watools.Collect import CHIRPS
CHIRPS.daily(Dir='C:/Temp/', Startdate='1999-02-01', Enddate='1999-02-03',
latlim=[-10, 30], lonlim=[-20, 120])
CHIRPS.monthly(Dir='C:/Temp/', Startdate='1999-02-01', Enddate='1999-02-28',
latlim=[-10, 30], lonlim=[-20, 120])
"""
from .daily import main as daily
from .monthly import main as monthly
__all__ = ['daily', 'monthly']
__version__ = '0.1'
| StarcoderdataPython |
3338858 | <reponame>Stanford-PERTS/neptune
"""Project: A team from some organization participating in a program."""
from collections import OrderedDict
from google.appengine.api import memcache, taskqueue
from google.appengine.ext import ndb
import json
import logging
from gae_models import DatastoreModel, CachedPropertiesModel
import config
import model
import util
class Project(DatastoreModel, CachedPropertiesModel):
organization_id = ndb.StringProperty()
# These two additional properties are available to the client, added in
# to_client_dict().
# organization_name
# organization_status
program_label = ndb.StringProperty()
# The program_admin assigned to handle communication for this project.
account_manager_id = ndb.StringProperty()
# The non-admin user assigned to handle communication for this project.
liaison_id = ndb.StringProperty()
# PERTS has schools that we have a special connection with, so they are
# given priority in certain programs. This allows us to highlight these
# schools in admin interfaces.
priority = ndb.BooleanProperty(default=False)
deidentification_method = ndb.StringProperty()
# Admin notes regarding the Letter of Agreement.
loa_notes = ndb.TextProperty()
# The last time a project or survey task was updated by an org admin.
last_active = ndb.DateTimeProperty()
# After creation, this will have a tasklist object, which in turn will
# have checkpoints and tasks that need saving to their respective
# databases.
tasklist = None
@classmethod
def create(klass, **kwargs):
"""Validate program relationship, create task lists."""
project = super(klass, klass).create(**kwargs)
program_config = model.Program.get_config(project.program_label)
# Build the task list.
template = program_config['project_tasklist_template']
project.tasklist = model.Tasklist.create(
template, project, program_label=project.program_label,
organization_id=project.organization_id)
# Assign a default account manager, if available.
am_email = program_config.get('default_account_manager', '')
if model.User.email_exists(am_email):
am = model.User.get_by_auth('email', am_email)
project.account_manager_id = am.uid
return project
def after_put(self, *args, **kwargs):
if self.tasklist:
# Tasklist might not always be present; it is if created via
# create(), but not if fetched from the datastore.
self.tasklist.put()
# Reset memcache for cached properties of related objects.
# This relationship is "down" so there may be many keys to clear so
# don't try to actually refresh the cached values, just set up a cache
# miss for their next read and they'll recover.
to_delete = []
for pc in model.ProjectCohort.get(n=float('inf'), project_id=self.uid):
# These keys are for individual project cohort entities.
to_delete.append(util.cached_properties_key(pc.uid))
# These are for caches of whole query results.
kwargs = {'program_label': pc.program_label,
'cohort_label': pc.cohort_label}
to_delete.append(util.cached_query_key('SuperDashboard', **kwargs))
taskqueue.add(
url='/task/cache_dashboard',
headers={'Content-Type': 'application/json; charset=utf-8'},
payload=json.dumps(kwargs),
countdown=config.task_consistency_countdown,
)
# Also clear the dashboard's organization query.
to_delete.append(
util.cached_query_key('SuperDashboard',
organization_id=self.organization_id)
)
memcache.delete_multi(to_delete)
def tasklist_name(self):
program_config = model.Program.get_config(self.program_label)
org = DatastoreModel.get_by_id(self.organization_id)
return 'Application for {org} to join {program}'.format(
org=org.name, program=program_config['name'])
def liaison(self):
return DatastoreModel.get_by_id(self.liaison_id)
@classmethod
def batch_cached_properties_from_db(
klass,
ids=[],
projects=[],
organizations=[],
programs=[],
):
if not ids and not projects:
return {}
if not projects:
projects = klass.get_by_id(ids)
if not organizations:
organizations = model.Organization.get_by_id(
[p.organization_id for p in projects]
)
if not programs:
labels = set(p.program_label for p in projects)
programs = [model.Program.get_config(l) for l in labels]
orgs_by_id = {o.uid: o for o in organizations}
programs_by_label = {p['label']: p for p in programs}
props_by_id = {}
for p in projects:
props_by_id[p.uid] = p.get_cached_properties_from_db(
organization=orgs_by_id[p.organization_id],
program=programs_by_label[p.program_label],
)
return props_by_id
def get_cached_properties_from_db(self, organization=None, program=None):
"""Add program- and organization-derived properties for convenience."""
if not organization:
organization = model.Organization.get_by_id(self.organization_id)
if not program:
program = model.Program.get_config(self.program_label)
return {
'program_name': program['name'],
'program_description': program['description'],
'organization_name': organization.name if organization else None,
'organization_status': organization.status if organization else None,
}
def to_client_dict(self):
"""Decorate with counts of related objects; cached."""
d = super(Project, self).to_client_dict()
d.update(self.get_cached_properties())
# Keep client dict ordered
return OrderedDict((k, d[k]) for k in sorted(d.keys()))
| StarcoderdataPython |
3259865 | from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
from typing import Callable
from functools import wraps
def plotaxes():
glClear(GL_COLOR_BUFFER_BIT)
glColor3f(1.0,1.0,1.0)
glBegin(GL_LINES)
glVertex2f(0,-100)
glVertex2f(0,100)
glEnd()
glBegin(GL_LINES)
glVertex2f(100,0)
glVertex2f(-100,0)
glEnd()
def plotgrid():
glColor3f(0.202, 0.202, 0.202)
for i in range(-100,100,10):
if i != 0:
glBegin(GL_LINES)
glVertex2f(i,100)
glVertex2f(i,-100)
glEnd()
glBegin(GL_LINES)
glVertex2f(100,i)
glVertex2f(-100,i)
glEnd()
def grid(func):
def inner(*args,**kwargs):
plotaxes()
plotgrid()
func(*args,**kwargs)
return inner | StarcoderdataPython |
96720 | from djitellopy import Tello
import time
tello = Tello()
tello.connect()
user_input = ' '
while user_input != 'x':
user_input = input()
if user_input == 't':
print("takeoff")
tello.takeoff()
if user_input == 'l':
print("land")
tello.land()
if user_input == 'a':
print("left")
tello.move_left(30)
if user_input == 'w':
print("forward")
tello.move_forward(30)
if user_input == 'd':
print("right")
tello.move_right(30)
if user_input == 's':
print("back")
tello.move_back(30)
if user_input == 'e':
print("up")
tello.move_up(30)
if user_input == 'q':
print("down")
tello.move_down(30)
print("exit")
| StarcoderdataPython |
72138 | import logging
class LogHelper():
handler = None
@staticmethod
def setup():
FORMAT = '[%(levelname)s] %(asctime)s - %(name)s - %(message)s'
LogHelper.handler = logging.StreamHandler()
LogHelper.handler.setLevel(logging.DEBUG)
LogHelper.handler.setFormatter(logging.Formatter(FORMAT))
LogHelper.get_logger(LogHelper.__name__).info("Log Helper set up")
@staticmethod
def get_logger(name,level=logging.DEBUG):
l = logging.getLogger(name)
l.setLevel(level)
l.addHandler(LogHelper.handler)
return l
| StarcoderdataPython |
176240 | <gh_stars>0
## This table will store the daily company earnings
## Of every branch inside the company.
from tools.DataBase.Connect import conection
__maintainer__ = '<NAME>'
__email__ = '<EMAIL>'
__date__ = '1/23/2019'
from sqlalchemy import Column, BIGINT, MetaData, Table
from sqlalchemy.dialects.mysql.base import NUMERIC
from tools.DataBase.Definition.Base import Base
class ComapnyEarnings(Base):
metadata = MetaData()
__tablename__ = "companyearnings_reg"
__table_args__ = {"useexisting": True}
id = Column('id', BIGINT, primary_key=True)
code = Column("code", BIGINT, nullable=True)
company = Column("company", BIGINT, nullable=True)
total_earnings = Column("total_earnings", NUMERIC(20, 2), nullable=True)
_date = Column("_date", NUMERIC(20, 2), nullable=True)
company_earnings = Column("company_earnings", NUMERIC(20, 2), nullable=True, default=0.00)
status = Column("status", BIGINT, nullable=True)
ComapnyEarnings_tbl = Table(__tablename__, metadata, id, code, company, total_earnings,
_date, status)
def __repr__(self):
return "<ComapnyEarnings(id='%s',code='%s',total_earnings='%s',_date='%s'," \
"comapany_earnings='%s',status='%s',company='%s')>" % \
(self.id,self.code,self.total_earnings, self._date,
self.company_earnings, self.status, self.company)
engine = conection().conORM()
metadata.create_all(engine)
engine.connect().close()
def __Publish__(self):
data={}
for column in self.__table__.columns.keys():
value=self.__dict__[self.__table__.columns[column].name]
if self.__table__.columns[column].type =="BIGINT":
data[self.__table__.columns[column].name]=int(value)
elif self.__table__.columns[column].type =="Integer":
data[self.__table__.columns[column].name]=int(value)
elif self.__table__.columns[column].type=="NUMERIC":
data[self.__table__.columns[column].name] = float(value)
elif self.__table__.columns[column].type=="Decimal":
data[self.__table__.columns[column].name] = float(value)
elif self.__table__.columns[column].type=="time":
data[self.__table__.columns[column].name] = str(value.strftime('%H:%M:%S'))
elif self.__table__.columns[column].type=="datetime":
data[self.__table__.columns[column].name] = str(value.strftime('%H:%M:%S'))
else:
data[self.__table__.columns[column].name] = str(value)
return data
if __name__ == '__main__':
cashboxOpen()
| StarcoderdataPython |
1733625 | # -----------------------------------------------------------------------------
# Common config file for bridge,py, test_bridge.py, etc.
# -----------------------------------------------------------------------------
# For challenges 1 & 2:
Z_SPIN_PERIOD_SECS = 75
Z_SPIN_COMMS_WINDOW_SECS = 15
DO_ORBITAL_BLACKOUT = True # set True for release build!!!1
ORBIT_PERIOD_SECS = 60 * 15
ORBIT_COMMS_WINDOW_SECS = 60 * 7.5
# client client bridge bridge client
# IP rx IP rx name
# --------- ---- ---------- ---- --------
TEAMS = [
("127.0.0.1", 5000, "127.0.0.1", 5020 ), # team_1
("127.0.0.1", 5001, "127.0.0.1", 5021 ), # team_2
("127.0.0.1", 5002, "127.0.0.1", 5022 ), # team_3
("127.0.0.1", 5003, "127.0.0.1", 5023 ), # team_4
("127.0.0.1", 5004, "127.0.0.1", 5024 ), # team_5
("127.0.0.1", 5005, "127.0.0.1", 5025 ), # team_6
("127.0.0.1", 5006, "127.0.0.1", 5026 ), # team_7
("127.0.0.1", 5007, "127.0.0.1", 5027 ), # team_8
("127.0.0.1", 5008, "127.0.0.1", 5028 ), # team_9
("127.0.0.1", 5009, "127.0.0.1", 5029 ), # team_10
]
WATCHERS = [
("127.0.0.1", 5010, "127.0.0.1", 5030 ), # watcher_1 of team_1
("127.0.0.1", 5011, "127.0.0.1", 5031 ), # watcher_2 of team_2
("127.0.0.1", 5012, "127.0.0.1", 5032 ), # watcher_3 of team_3
("127.0.0.1", 5013, "127.0.0.1", 5033 ), # watcher_4 of team_4
("127.0.0.1", 5014, "127.0.0.1", 5034 ), # watcher_5 of team_5
("127.0.0.1", 5015, "127.0.0.1", 5035 ), # watcher_6 of team_6
("127.0.0.1", 5016, "127.0.0.1", 5036 ), # watcher_7 of team_7
("127.0.0.1", 5017, "127.0.0.1", 5037 ), # watcher_8 of team_8
("127.0.0.1", 5018, "127.0.0.1", 5038 ), # watcher_9 of team_9
("127.0.0.1", 5019, "127.0.0.1", 5039 ), # watcher_10 of team_10
]
BOSSES = [
("127.0.0.1", 5040, "127.0.0.1", 5050 ), # boss_1 of team_1
("127.0.0.1", 5041, "127.0.0.1", 5051 ), # boss_2 of team_2
("127.0.0.1", 5042, "127.0.0.1", 5052 ), # boss_3 of team_3
("127.0.0.1", 5043, "127.0.0.1", 5053 ), # boss_4 of team_4
("127.0.0.1", 5044, "127.0.0.1", 5054 ), # boss_5 of team_5
("127.0.0.1", 5045, "127.0.0.1", 5055 ), # boss_6 of team_6
("127.0.0.1", 5046, "127.0.0.1", 5056 ), # boss_7 of team_7
("127.0.0.1", 5047, "127.0.0.1", 5057 ), # boss_8 of team_8
("127.0.0.1", 5048, "127.0.0.1", 5058 ), # boss_9 of team_9
("127.0.0.1", 5049, "127.0.0.1", 5059 ), # boss_10 of team_10
]
# -----------------------------------------------------------------------------
# Types of Bridge Clients: (Enforced based on assigned IP)
# -----------------------------------------------------------------------------
#
# Teams:
# -----
# - Can only send/recv messages of type L3 and RSC, thru/to their own radio,
# with their own flatsat.
# - The src IP of a message is checked against the assigned IP for that team.
#
# Bosses:
# ------
# - One per team.
# - Can send any message type besides RSC. (OM, L3, RA, BR)
# - Can seize their Team's radio:
# . While siezed, normal team-radio comms semi-gracefully paused.
# . When Boss is done, it tells the bridge to release the radio.
#
# Watchers:
# --------
# - One per team.
# - Receive the same L3 messages as their team receives
# - Can only send/recv messages of type L3 and RSC, thru/to their own radio,
# with their own flatsat.
# - Not subject to boss radio seizure or comms blackouts
#
# -----------------------------------------------------------------------------
| StarcoderdataPython |
1672722 | # This script tests the Terminal class on computers which are running
# a Linux operating system
from terminal import Terminal
testTerminal = Terminal("test-terminal", "/bin/bash")
terminalOutput = testTerminal.executeCommand("ls")
print(
"The command '" + terminalOutput['executedCommand']
+ "' was executed in terminal '" + terminalOutput['terminalName']
+ "'.\n\n" + "The terminal returned the output:\n'"
+ terminalOutput['outputString'] + "'.\n\n"
+ "If there was an error message, it reads: '"
+ terminalOutput['errorString'] + "'.\n"
) | StarcoderdataPython |
1696366 | <filename>sea5kg_cpplint/__pkginfo__.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2020 <NAME> <<EMAIL>>
# pylint: disable=redefined-builtin,invalid-name
"""sea5kg_cpplint packaging information"""
# For an official release, use dev_version = None
numversion = (0, 0, 2)
version = ".".join(str(num) for num in numversion)
name = "sea5kg_cpplint"
dependency_links = []
license = "MIT"
description = "c++ code static checker"
web = "https://github.com/sea-kg/sea5kg_cpplint"
mailinglist = "mailto:<EMAIL>"
author = "<NAME>"
author_email = "<EMAIL>"
classifiers = [
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: PyPy"
]
| StarcoderdataPython |
16616 | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import logging
import pytest
import threading
import time
from azure.iot.device.common import handle_exceptions
from azure.iot.device.iothub import client_event
from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException
from azure.iot.device.iothub.sync_handler_manager import MESSAGE, METHOD, TWIN_DP_PATCH
from azure.iot.device.iothub.inbox_manager import InboxManager
from azure.iot.device.iothub.sync_inbox import SyncClientInbox
logging.basicConfig(level=logging.DEBUG)
# NOTE ON TEST IMPLEMENTATION:
# Despite having significant shared implementation between the sync and async handler managers,
# there are not shared tests. This is because while both have the same set of requirements and
# APIs, the internal implementation is different to an extent that it simply isn't really possible
# to test them to an appropriate degree of correctness with a shared set of tests.
# This means we must be very careful to always change both test modules when a change is made to
# shared behavior, or when shared features are added.
# NOTE ON TIMING/DELAY
# Several tests in this module have sleeps/delays in their implementation due to needing to wait
# for things to happen in other threads.
all_internal_receiver_handlers = [MESSAGE, METHOD, TWIN_DP_PATCH]
all_internal_client_event_handlers = [
"_on_connection_state_change",
"_on_new_sastoken_required",
"_on_background_exception",
]
all_internal_handlers = all_internal_receiver_handlers + all_internal_client_event_handlers
all_receiver_handlers = [s.lstrip("_") for s in all_internal_receiver_handlers]
all_client_event_handlers = [s.lstrip("_") for s in all_internal_client_event_handlers]
all_handlers = all_receiver_handlers + all_client_event_handlers
class ThreadsafeMock(object):
"""This class provides (some) Mock functionality in a threadsafe manner, specifically, it
ensures that the 'call_count' attribute will be accurate when the mock is called from another
thread.
It does not cover ALL mock functionality, but more features could be added to it as necessary
"""
def __init__(self):
self.call_count = 0
self.lock = threading.Lock()
def __call__(self, *args, **kwargs):
with self.lock:
self.call_count += 1
@pytest.fixture
def inbox_manager(mocker):
return InboxManager(inbox_type=SyncClientInbox)
@pytest.fixture
def handler():
def some_handler_fn(arg):
pass
return some_handler_fn
@pytest.mark.describe("SyncHandlerManager - Instantiation")
class TestInstantiation(object):
@pytest.mark.it("Initializes handler properties to None")
@pytest.mark.parametrize("handler_name", all_handlers)
def test_handlers(self, inbox_manager, handler_name):
hm = SyncHandlerManager(inbox_manager)
assert getattr(hm, handler_name) is None
@pytest.mark.it("Initializes receiver handler runner thread references to None")
@pytest.mark.parametrize(
"handler_name", all_internal_receiver_handlers, ids=all_receiver_handlers
)
def test_receiver_handler_runners(self, inbox_manager, handler_name):
hm = SyncHandlerManager(inbox_manager)
assert hm._receiver_handler_runners[handler_name] is None
@pytest.mark.it("Initializes client event handler runner thread reference to None")
def test_client_event_handler_runner(self, inbox_manager):
hm = SyncHandlerManager(inbox_manager)
assert hm._client_event_runner is None
@pytest.mark.describe("SyncHandlerManager - .stop()")
class TestStop(object):
@pytest.fixture(
params=[
"No handlers running",
"Some receiver handlers running",
"Some client event handlers running",
"Some receiver and some client event handlers running",
"All handlers running",
]
)
def handler_manager(self, request, inbox_manager, handler):
hm = SyncHandlerManager(inbox_manager)
if request.param == "Some receiver handlers running":
# Set an arbitrary receiver handler
hm.on_message_received = handler
elif request.param == "Some client event handlers running":
# Set an arbitrary client event handler
hm.on_connection_state_change = handler
elif request.param == "Some receiver and some client event handlers running":
# Set an arbitrary receiver and client event handler
hm.on_message_received = handler
hm.on_connection_state_change = handler
elif request.param == "All handlers running":
# NOTE: this sets all handlers to be the same fn, but this doesn't really
# make a difference in this context
for handler_name in all_handlers:
setattr(hm, handler_name, handler)
yield hm
hm.stop()
@pytest.mark.it("Stops all currently running handlers")
def test_stop_all(self, handler_manager):
handler_manager.stop()
for handler_name in all_internal_receiver_handlers:
assert handler_manager._receiver_handler_runners[handler_name] is None
assert handler_manager._client_event_runner is None
@pytest.mark.it(
"Stops only the currently running receiver handlers if the 'receiver_handlers_only' parameter is True"
)
def test_stop_only_receiver_handlers(self, handler_manager):
if handler_manager._client_event_runner is not None:
client_event_handlers_running = True
else:
client_event_handlers_running = False
handler_manager.stop(receiver_handlers_only=True)
# All receiver handlers have stopped
for handler_name in all_internal_receiver_handlers:
assert handler_manager._receiver_handler_runners[handler_name] is None
# If the client event handlers were running, they are STILL running
if client_event_handlers_running:
assert handler_manager._client_event_runner is not None
@pytest.mark.it("Completes all pending handler invocations before stopping the runner(s)")
def test_completes_pending(self, mocker, inbox_manager):
hm = SyncHandlerManager(inbox_manager)
# NOTE: We use two handlers arbitrarily here to show this happens for all handler runners
mock_msg_handler = ThreadsafeMock()
mock_mth_handler = ThreadsafeMock()
msg_inbox = inbox_manager.get_unified_message_inbox()
mth_inbox = inbox_manager.get_method_request_inbox()
for _ in range(200): # sufficiently many items so can't complete quickly
msg_inbox.put(mocker.MagicMock())
mth_inbox.put(mocker.MagicMock())
hm.on_message_received = mock_msg_handler
hm.on_method_request_received = mock_mth_handler
assert mock_msg_handler.call_count < 200
assert mock_mth_handler.call_count < 200
hm.stop()
time.sleep(0.1)
assert mock_msg_handler.call_count == 200
assert mock_mth_handler.call_count == 200
assert msg_inbox.empty()
assert mth_inbox.empty()
@pytest.mark.describe("SyncHandlerManager - .ensure_running()")
class TestEnsureRunning(object):
@pytest.fixture(
params=[
"All handlers set, all stopped",
"All handlers set, receivers stopped, client events running",
"All handlers set, all running",
"Some receiver and client event handlers set, all stopped",
"Some receiver and client event handlers set, receivers stopped, client events running",
"Some receiver and client event handlers set, all running",
"Some receiver handlers set, all stopped",
"Some receiver handlers set, all running",
"Some client event handlers set, all stopped",
"Some client event handlers set, all running",
"No handlers set",
]
)
def handler_manager(self, request, inbox_manager, handler):
# NOTE: this sets all handlers to be the same fn, but this doesn't really
# make a difference in this context
hm = SyncHandlerManager(inbox_manager)
if request.param == "All handlers set, all stopped":
for handler_name in all_handlers:
setattr(hm, handler_name, handler)
hm.stop()
elif request.param == "All handlers set, receivers stopped, client events running":
for handler_name in all_handlers:
setattr(hm, handler_name, handler)
hm.stop(receiver_handlers_only=True)
elif request.param == "All handlers set, all running":
for handler_name in all_handlers:
setattr(hm, handler_name, handler)
elif request.param == "Some receiver and client event handlers set, all stopped":
hm.on_message_received = handler
hm.on_method_request_received = handler
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
hm.stop()
elif (
request.param
== "Some receiver and client event handlers set, receivers stopped, client events running"
):
hm.on_message_received = handler
hm.on_method_request_received = handler
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
hm.stop(receiver_handlers_only=True)
elif request.param == "Some receiver and client event handlers set, all running":
hm.on_message_received = handler
hm.on_method_request_received = handler
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
elif request.param == "Some receiver handlers set, all stopped":
hm.on_message_received = handler
hm.on_method_request_received = handler
hm.stop()
elif request.param == "Some receiver handlers set, all running":
hm.on_message_received = handler
hm.on_method_request_received = handler
elif request.param == "Some client event handlers set, all stopped":
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
hm.stop()
elif request.param == "Some client event handlers set, all running":
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
yield hm
hm.stop()
@pytest.mark.it(
"Starts handler runners for any handler that is set, but does not have a handler runner running"
)
def test_starts_runners_if_necessary(self, handler_manager):
handler_manager.ensure_running()
# Check receiver handlers
for handler_name in all_receiver_handlers:
if getattr(handler_manager, handler_name) is not None:
# NOTE: this assumes the convention of internal names being the name of a handler
# prefixed with a "_". If this ever changes, you must change this test.
assert handler_manager._receiver_handler_runners["_" + handler_name] is not None
# Check client event handlers
for handler_name in all_client_event_handlers:
if getattr(handler_manager, handler_name) is not None:
assert handler_manager._client_event_runner is not None
# don't need to check the rest of the handlers since they all share a runner
break
# ##############
# # PROPERTIES #
# ##############
class SharedHandlerPropertyTests(object):
@pytest.fixture
def handler_manager(self, inbox_manager):
hm = SyncHandlerManager(inbox_manager)
yield hm
hm.stop()
# NOTE: We use setattr() and getattr() in these tests so they're generic to all properties.
# This is functionally identical to doing explicit assignment to a property, it just
# doesn't read quite as well.
@pytest.mark.it("Can be both read and written to")
def test_read_write(self, handler_name, handler_manager, handler):
assert getattr(handler_manager, handler_name) is None
setattr(handler_manager, handler_name, handler)
assert getattr(handler_manager, handler_name) is handler
setattr(handler_manager, handler_name, None)
assert getattr(handler_manager, handler_name) is None
class SharedReceiverHandlerPropertyTests(SharedHandlerPropertyTests):
# NOTE: If there is ever any deviation in the convention of what the internal names of handlers
# are other than just a prefixed "_", we'll have to move this fixture to the child classes so
# it can be unique to each handler
@pytest.fixture
def handler_name_internal(self, handler_name):
return "_" + handler_name
@pytest.mark.it(
"Creates and starts a daemon Thread for the correpsonding handler runner when value is set to a function"
)
def test_thread_created(self, handler_name, handler_name_internal, handler_manager, handler):
assert handler_manager._receiver_handler_runners[handler_name_internal] is None
setattr(handler_manager, handler_name, handler)
assert isinstance(
handler_manager._receiver_handler_runners[handler_name_internal], threading.Thread
)
assert handler_manager._receiver_handler_runners[handler_name_internal].daemon is True
@pytest.mark.it(
"Stops the corresponding handler runner and completes any existing daemon Thread for it when the value is set back to None"
)
def test_thread_removed(self, handler_name, handler_name_internal, handler_manager, handler):
# Set handler
setattr(handler_manager, handler_name, handler)
# Thread has been created and is alive
t = handler_manager._receiver_handler_runners[handler_name_internal]
assert isinstance(t, threading.Thread)
assert t.is_alive()
# Set the handler back to None
setattr(handler_manager, handler_name, None)
# Thread has finished and the manager no longer has a reference to it
assert not t.is_alive()
assert handler_manager._receiver_handler_runners[handler_name_internal] is None
@pytest.mark.it(
"Does not delete, remove, or replace the Thread for the corresponding handler runner, when updated with a new function value"
)
def test_thread_unchanged_by_handler_update(
self, handler_name, handler_name_internal, handler_manager, handler
):
# Set the handler
setattr(handler_manager, handler_name, handler)
# Thread has been crated and is alive
t = handler_manager._receiver_handler_runners[handler_name_internal]
assert isinstance(t, threading.Thread)
assert t.is_alive()
# Set new handler
def new_handler(arg):
pass
setattr(handler_manager, handler_name, new_handler)
assert handler_manager._receiver_handler_runners[handler_name_internal] is t
assert t.is_alive()
@pytest.mark.it(
"Is invoked by the runner when the Inbox corresponding to the handler receives an object, passing that object to the handler"
)
def test_handler_invoked(self, mocker, handler_name, handler_manager, inbox):
# Set the handler
mock_handler = mocker.MagicMock()
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Add an item to corresponding inbox, triggering the handler
mock_obj = mocker.MagicMock()
inbox.put(mock_obj)
time.sleep(0.1)
# Handler has been called with the item from the inbox
assert mock_handler.call_count == 1
assert mock_handler.call_args == mocker.call(mock_obj)
@pytest.mark.it(
"Is invoked by the runner every time the Inbox corresponding to the handler receives an object"
)
def test_handler_invoked_multiple(self, mocker, handler_name, handler_manager, inbox):
# Set the handler
mock_handler = ThreadsafeMock()
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Add 5 items to the corresponding inbox, triggering the handler
for _ in range(5):
inbox.put(mocker.MagicMock())
time.sleep(0.2)
# Handler has been called 5 times
assert mock_handler.call_count == 5
@pytest.mark.it(
"Is invoked for every item already in the corresponding Inbox at the moment of handler removal"
)
def test_handler_resolve_pending_items_before_handler_removal(
self, mocker, handler_name, handler_manager, inbox
):
# Use a threadsafe mock to ensure accurate counts
mock_handler = ThreadsafeMock()
assert inbox.empty()
# Queue up a bunch of items in the inbox
for _ in range(100):
inbox.put(mocker.MagicMock())
# The handler has not yet been called
assert mock_handler.call_count == 0
# Items are still in the inbox
assert not inbox.empty()
# Set the handler
setattr(handler_manager, handler_name, mock_handler)
# The handler has not yet been called for everything that was in the inbox
# NOTE: I'd really like to show that the handler call count is also > 0 here, but
# it's pretty difficult to make the timing work
assert mock_handler.call_count < 100
# Immediately remove the handler
setattr(handler_manager, handler_name, None)
# Wait to give a chance for the handler runner to finish calling everything
time.sleep(0.2)
# Despite removal, handler has been called for everything that was in the inbox at the
# time of the removal
assert mock_handler.call_count == 100
assert inbox.empty()
# Add some more items
for _ in range(100):
inbox.put(mocker.MagicMock())
# Wait to give a chance for the handler to be called (it won't)
time.sleep(0.2)
# Despite more items added to inbox, no further handler calls have been made beyond the
# initial calls that were made when the original items were added
assert mock_handler.call_count == 100
@pytest.mark.it(
"Sends a HandlerManagerException to the background exception handler if any exception is raised during its invocation"
)
def test_exception_in_handler(
self, mocker, handler_name, handler_manager, inbox, arbitrary_exception
):
background_exc_spy = mocker.spy(handle_exceptions, "handle_background_exception")
# Handler will raise exception when called
mock_handler = mocker.MagicMock()
mock_handler.side_effect = arbitrary_exception
# Set handler
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Background exception handler has not been called
assert background_exc_spy.call_count == 0
# Add an item to corresponding inbox, triggering the handler
inbox.put(mocker.MagicMock())
time.sleep(0.1)
# Handler has now been called
assert mock_handler.call_count == 1
# Background exception handler was called
assert background_exc_spy.call_count == 1
e = background_exc_spy.call_args[0][0]
assert isinstance(e, HandlerManagerException)
assert e.__cause__ is arbitrary_exception
@pytest.mark.it(
"Can be updated with a new value that the corresponding handler runner will immediately begin using for handler invocations instead"
)
def test_handler_update_handler(self, mocker, handler_name, handler_manager, inbox):
def handler(arg):
# Invoking handler replaces the set handler with a mock
setattr(handler_manager, handler_name, mocker.MagicMock())
setattr(handler_manager, handler_name, handler)
inbox.put(mocker.MagicMock())
time.sleep(0.1)
# Handler has been replaced with a mock, but the mock has not been invoked
assert getattr(handler_manager, handler_name) is not handler
assert getattr(handler_manager, handler_name).call_count == 0
# Add a new item to the inbox
inbox.put(mocker.MagicMock())
time.sleep(0.1)
# The mock was now called
assert getattr(handler_manager, handler_name).call_count == 1
class SharedClientEventHandlerPropertyTests(SharedHandlerPropertyTests):
@pytest.fixture
def inbox(self, inbox_manager):
return inbox_manager.get_client_event_inbox()
@pytest.mark.it(
"Creates and starts a daemon Thread for the Client Event handler runner when value is set to a function if the Client Event handler runner does not already exist"
)
def test_no_client_event_runner(self, handler_name, handler_manager, handler):
assert handler_manager._client_event_runner is None
setattr(handler_manager, handler_name, handler)
t = handler_manager._client_event_runner
assert isinstance(t, threading.Thread)
assert t.daemon is True
@pytest.mark.it(
"Does not modify the Client Event handler runner thread when value is set to a function if the Client Event handler runner already exists"
)
def test_client_event_runner_already_exists(self, handler_name, handler_manager, handler):
# Add a fake client event runner thread
fake_runner_thread = threading.Thread()
fake_runner_thread.daemon = True
fake_runner_thread.start()
handler_manager._client_event_runner = fake_runner_thread
# Set handler
setattr(handler_manager, handler_name, handler)
# Fake thread was not changed
assert handler_manager._client_event_runner is fake_runner_thread
@pytest.mark.it(
"Does not delete, remove, or replace the Thread for the Client Event handler runner when value is set back to None"
)
def test_handler_removed(self, handler_name, handler_manager, handler):
# Set handler
setattr(handler_manager, handler_name, handler)
# Thread has been created and is alive
t = handler_manager._client_event_runner
assert isinstance(t, threading.Thread)
assert t.is_alive()
# Set the handler back to None
setattr(handler_manager, handler_name, None)
# Thread is still maintained on the manager and alive
assert handler_manager._client_event_runner is t
assert t.is_alive()
@pytest.mark.it(
"Does not delete, remove, or replace the Thread for the Client Event handler runner when updated with a new function value"
)
def test_handler_update(self, handler_name, handler_manager, handler):
# Set handler
setattr(handler_manager, handler_name, handler)
# Thread has been created and is alive
t = handler_manager._client_event_runner
assert isinstance(t, threading.Thread)
assert t.is_alive()
# Set new handler
def new_handler(arg):
pass
setattr(handler_manager, handler_name, new_handler)
# Thread is still maintained on the manager and alive
assert handler_manager._client_event_runner is t
assert t.is_alive()
@pytest.mark.it(
"Is invoked by the runner only when the Client Event Inbox receives a matching Client Event, passing any arguments to the handler"
)
def test_handler_invoked(self, mocker, handler_name, handler_manager, inbox, event):
# Set the handler
mock_handler = mocker.MagicMock()
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Add the event to the client event inbox
inbox.put(event)
time.sleep(0.1)
# Handler has been called with the arguments from the event
assert mock_handler.call_count == 1
assert mock_handler.call_args == mocker.call(*event.args_for_user)
# Add non-matching event to the client event inbox
non_matching_event = client_event.ClientEvent("NON_MATCHING_EVENT")
inbox.put(non_matching_event)
time.sleep(0.1)
# Handler has not been called again
assert mock_handler.call_count == 1
@pytest.mark.it(
"Is invoked by the runner every time the Client Event Inbox receives a matching Client Event"
)
def test_handler_invoked_multiple(self, handler_name, handler_manager, inbox, event):
# Set the handler
mock_handler = ThreadsafeMock()
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Add 5 matching events to the corresponding inbox, triggering the handler
for _ in range(5):
inbox.put(event)
time.sleep(0.2)
# Handler has been called 5 times
assert mock_handler.call_count == 5
@pytest.mark.it(
"Sends a HandlerManagerException to the background exception handler if any exception is raised during its invocation"
)
def test_exception_in_handler(
self, mocker, handler_name, handler_manager, inbox, event, arbitrary_exception
):
background_exc_spy = mocker.spy(handle_exceptions, "handle_background_exception")
# Handler will raise exception when called
mock_handler = mocker.MagicMock()
mock_handler.side_effect = arbitrary_exception
# Set handler
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Background exception handler has not been called
assert background_exc_spy.call_count == 0
# Add the event to the client event inbox, triggering the handler
inbox.put(event)
time.sleep(0.1)
# Handler has now been called
assert mock_handler.call_count == 1
# Background exception handler was called
assert background_exc_spy.call_count == 1
e = background_exc_spy.call_args[0][0]
assert isinstance(e, HandlerManagerException)
assert e.__cause__ is arbitrary_exception
@pytest.mark.it(
"Can be updated with a new value that the Client Event handler runner will immediately begin using for handler invocations instead"
)
def test_updated_handler(self, mocker, handler_name, handler_manager, inbox, event):
def handler(*args):
# Invoking handler replaces the set handler with a mock
setattr(handler_manager, handler_name, mocker.MagicMock())
setattr(handler_manager, handler_name, handler)
inbox.put(event)
time.sleep(0.1)
# Handler has been replaced with a mock, but the mock has not been invoked
assert getattr(handler_manager, handler_name) is not handler
assert getattr(handler_manager, handler_name).call_count == 0
# Add a new event to the inbox
inbox.put(event)
time.sleep(0.1)
# The mock was now called
assert getattr(handler_manager, handler_name).call_count == 1
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_message_received")
class TestSyncHandlerManagerPropertyOnMessageReceived(SharedReceiverHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_message_received"
@pytest.fixture
def inbox(self, inbox_manager):
return inbox_manager.get_unified_message_inbox()
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_method_request_received")
class TestSyncHandlerManagerPropertyOnMethodRequestReceived(SharedReceiverHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_method_request_received"
@pytest.fixture
def inbox(self, inbox_manager):
return inbox_manager.get_method_request_inbox()
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_twin_desired_properties_patch_received")
class TestSyncHandlerManagerPropertyOnTwinDesiredPropertiesPatchReceived(
SharedReceiverHandlerPropertyTests
):
@pytest.fixture
def handler_name(self):
return "on_twin_desired_properties_patch_received"
@pytest.fixture
def inbox(self, inbox_manager):
return inbox_manager.get_twin_patch_inbox()
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_connection_state_change")
class TestSyncHandlerManagerPropertyOnConnectionStateChange(SharedClientEventHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_connection_state_change"
@pytest.fixture
def event(self):
return client_event.ClientEvent(client_event.CONNECTION_STATE_CHANGE)
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_new_sastoken_required")
class TestSyncHandlerManagerPropertyOnNewSastokenRequired(SharedClientEventHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_new_sastoken_required"
@pytest.fixture
def event(self):
return client_event.ClientEvent(client_event.NEW_SASTOKEN_REQUIRED)
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_background_exception")
class TestSyncHandlerManagerPropertyOnBackgroundException(SharedClientEventHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_background_exception"
@pytest.fixture
def event(self, arbitrary_exception):
return client_event.ClientEvent(client_event.BACKGROUND_EXCEPTION, arbitrary_exception)
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .handling_client_events")
class TestSyncHandlerManagerPropertyHandlingClientEvents(object):
@pytest.fixture
def handler_manager(self, inbox_manager):
hm = SyncHandlerManager(inbox_manager)
yield hm
hm.stop()
@pytest.mark.it("Is True if the Client Event Handler Runner is running")
def test_client_event_runner_running(self, handler_manager):
# Add a fake client event runner thread
fake_runner_thread = threading.Thread()
fake_runner_thread.daemon = True
fake_runner_thread.start()
handler_manager._client_event_runner = fake_runner_thread
assert handler_manager.handling_client_events is True
@pytest.mark.it("Is False if the Client Event Handler Runner is not running")
def test_client_event_runner_not_running(self, handler_manager):
assert handler_manager._client_event_runner is None
assert handler_manager.handling_client_events is False
| StarcoderdataPython |
28856 | <filename>api/model.py<gh_stars>0
from torchvision import models
import json
import numpy as np
import torch
from collections import OrderedDict
from operator import itemgetter
import os
def return_top_5(processed_image):
# inception = models.inception_v3(pretrained=True)
inception = models.inception_v3()
inception.load_state_dict(torch.load("data/inception_v3_google-1a9a5a14.pth"))
inception.eval()
result = inception(processed_image)
#load imagenet classes
class_idx = json.load(open('data/imagenet_class_index.json'))
idx2label = [class_idx[str(k)][1] for k in range(len(class_idx))]
result_idx = result.sort()[1][0][-5:]
#exponentiate and get probabilities
exps = np.exp(result.detach().numpy()[0])
exps_sum = np.sum(exps)
softmax = [np.round((j / exps_sum)*100, 2) for j in exps]
out = []
for idx in result_idx:
out.append((idx2label[idx], softmax[idx]))
# out = {k: v for k, v in dict(out).items()}
result = OrderedDict(sorted(dict(out).items(), key=itemgetter(1), reverse=True))
return result | StarcoderdataPython |
3331358 | # Generated by Django 2.0.1 on 2018-01-09 18:57
from django.db import migrations, models
import django.db.models.deletion
import djstripe.fields
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0015_auto_20180109_0245'),
]
operations = [
migrations.CreateModel(
name='Dispute',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stripe_id', djstripe.fields.StripeIdField(max_length=255, unique=True)),
('livemode', djstripe.fields.StripeNullBooleanField(default=None, help_text='Null here indicates that the livemode status is unknown or was previously unrecorded. Otherwise, this field indicates whether this record comes from Stripe test mode or live mode operation.')),
('stripe_timestamp', djstripe.fields.StripeDateTimeField(help_text='The datetime this object was created in stripe.', null=True)),
('metadata', djstripe.fields.StripeJSONField(blank=True, help_text='A set of key/value pairs that you can attach to an object. It can be useful for storing additional information about an object in a structured format.', null=True)),
('description', djstripe.fields.StripeTextField(blank=True, help_text='A description of this object.', null=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('amount', djstripe.fields.StripeIntegerField(help_text='Disputed amount. Usually the amount of the charge, but can differ (usually because of currency fluctuation or because only part of the order is disputed).')),
('currency', djstripe.fields.StripeCharField(help_text='Three-letter ISO currency code.', max_length=3)),
('evidence', djstripe.fields.StripeJSONField(help_text='Evidence provided to respond to a dispute.')),
('evidence_details', djstripe.fields.StripeJSONField(help_text='Information about the evidence submission.')),
('is_charge_refundable', djstripe.fields.StripeBooleanField(help_text='If true, it is still possible to refund the disputed payment. Once the payment has been fully refunded, no further funds will be withdrawn from your Stripe account as a result of this dispute.')),
('reason', djstripe.fields.StripeCharField(choices=[('bank_cannot_process', 'Bank cannot process'), ('credit_not_processed', 'Credit not processed'), ('customer_initiated', 'Customer-initiated'), ('debit_not_authorized', 'Debit not authorized'), ('duplicate', 'Duplicate'), ('fraudulent', 'Fraudulent'), ('general', 'General'), ('incorrect_account_details', 'Incorrect account details'), ('insufficient_funds', 'Insufficient funds'), ('product_not_received', 'Product not received'), ('product_unacceptable', 'Product unacceptable'), ('subscription_canceled', 'Subscription canceled'), ('unrecognized', 'Unrecognized')], max_length=50)),
('status', djstripe.fields.StripeCharField(choices=[('charge_refunded', 'Charge refunded'), ('lost', 'Lost'), ('needs_response', 'Needs response'), ('under_review', 'Under review'), ('warning_closed', 'Warning closed'), ('warning_needs_response', 'Warning needs response'), ('warning_under_review', 'Warning under review'), ('won', 'Won')], max_length=50)),
],
options={
'abstract': False,
},
),
migrations.RemoveField(
model_name='charge',
name='disputed',
),
migrations.AddField(
model_name='charge',
name='dispute',
field=models.ForeignKey(help_text='Details about the dispute if the charge has been disputed.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='charges', to='djstripe.Dispute'),
),
]
| StarcoderdataPython |
4820005 | <gh_stars>1-10
import re
import copy
from urlparse import urlparse, ParseResult
from util import *
import datetime
class Value(object):
@property
def is_graph(self):
return False
@property
def is_literal(self):
return False
@property
def is_node(self):
return False
@property
def is_resource(self):
return False
@property
def is_statement(self):
return False
@property
def is_iri(self):
return self.uri
@property
def is_variable(self):
return False
def to_rdf(self):
return self
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, str(self))
class Term(Value):
def __cmp__(self, other):
return cmp(str(self), str(other))
def evaluate(self, bindings):
return self
@property
def is_constant(self):
return not self.is_variable
class Resource(Term):
@property
def is_resource(self):
return True
@classmethod
def new(self, *args):
arg = args.pop(0)
if type(arg) == str:
match = re.match("^_:(.*)$", arg)
if match:
return Node(match.group(1), *args)
else:
return Node.intern(arg, *args)
else:
return URI(arg, *args)
class URI(Resource):
CACHE_SIZE = -1
_cache = None
@classmethod
def intern(self, s):
if not URI._cache:
URI._cache = Cache(URI.CACHE_SIZE)
uri = URI._cache.get(str(s))
if not uri:
uri = URI(s)
URI._cache.set(s, uri)
return uri
@classmethod
def parse(self, s):
return URI(s)
def __init__(self, uri_or_options):
#TODO: uri_or_options could be a dict e.g {'scheme' : 'http', 'host' : 'rdf.rubyforge.org', 'path' : '/'}
if type(uri_or_options) == ParseResult:
self.uri = uri_or_options
else:
self.uri = urlparse(str(uri_or_options))
self._qname = ""
def starts_with(string):
return str(self).find(string) == 0
def ends_with(self, s):
return str(self)[-len(s):] == s
@property
def is_anonymous(self):
return False
@property
def is_uri(self):
return True
@property
def is_urn(self):
return self.starts_with('urn:')
@property
def is_url(self):
return not self.is_urn
@property
def length(self):
len(str(self))
@property
def size(self):
return self.length
def validate(self):
return self
def canonicalize(self):
return self
def join(self, *uris):
result = self.uri[:]
for uri in uris:
result = result + uri
return URI(result)
def __divmod__(self, fragment):
if 'to_uri' in dir(fragment):
fragment = fragment.to_uri()
else:
fragment = URI.Intern(str(fragment))
if self.is_urn:
return URI.intern(re.sub(":+$", "", str(self)) + ":" + re.sub("^:+", "", fragment))
else:
if str(self)[-1] == "#":
if str(fragment)[0] == "/": # Base ending with '#', fragment beginning with '/'. The fragment wins, we use '/'.
return URI.intern(re.sub("#+$", "", str(self)) + "/" + re.sub("^\/+", "", str(fragment)))
else:
return URI.intern(re.sub("#+$", "", str(self)) + "#" + re.sub("^#+", "", str(fragment)))
else: # includes '/'. Results from bases ending in '/' are the same as if there were no trailing slash.
if str(fragment)[0] == "#": # Base ending with '/', fragment beginning with '#'. The fragment wins, we use '#'.
return URI.intern(re.sub("\/+$", "", str(self)) + "#" + re.sub("^#+", "", str(fragment)))
else:
return URI.intern(re.sub("\/+$", "", str(self)) + "/" + re.sub("^\/+", "", str(fragment)))
def __add__(self, other):
return URI.intern(str(self) + str(other))
def __eq__(self, other):
return str(self) == str(other)
@property
def is_root(self):
return (len(self.path) == 0) or (self.path == "/")
def root(self):
if self.is_root:
return self
else:
uri = self.dup()
uri.path = "/"
return uri
@property
def has_parent(self):
return not self.is_root
@property
def hash(self):
return self.uri.__hash__()
def to_uri(self):
return self
def parent(self):
if self.is_root:
return None
else:
path = Pathname(self.path).parent
if path:
uri = self.dup()
uri.path = str(path)
if uri.is_root:
uri.path += "/"
return uri
def qname(self):
if self._qname:
return self._qname
else:
import vocab
match = re.search("[:/#]([^:/#]*)$", str(self))
if match:
local_name = match.group(1)
if len(local_name) == 0:
vocab_uri = str(self)
else:
vocab_uri = str(self)[0:-len(local_name)]
for v in vocab.VOCABS():
if v.uri == vocab_uri:
prefix = v.__prefix__
return [prefix, local_name if len(local_name) else None]
else:
for v in VOCABS():
vocab_uri = v.to_uri
if self.starts_with(vocab_uri):
prefix = v.__prefix__
local_name = str(self)[len(vocab_uri):]
return [prefix, local_name]
return None
def dup(self):
return URI(this.uri)
def __str__(self):
return self.uri.geturl()
class Node(Resource):
def __init__(self, i = None):
if i:
self.id = i
else:
self.id = str(id(self))
@classmethod
def intern(self, id):
return Node(id)
@property
def is_node(self):
return True
@property
def is_labeled(self):
return not self.is_unlabeled
@property
def is_anonymous(self):
return True
@property
def is_unlabeled(self):
return self.is_anonymous
def __eq__(self, other):
return type(other) == Node and other.is_node and ('node' in dir(other)) and ('id' in dir(other)) and (self.id == other.id)
def __str__(self):
return "_:%s" % self.id
class Statement(Value):
def __init__(self, subject = None, predicate = None, object = None, options = {}):
if isinstance(subject, dict):
self.options = subject.copy()
self.subject = self.options.get('subject')
self.predicate = self.options.get('predicate')
self.object = self.options.get('object')
else:
self.options = options.copy()
self.subject = subject
self.predicate = predicate
self.object = object
self.context = options.get('context')
self.id = options.get('id')
if isinstance(self.context, str):
self.context = Node.intern(self.context)
if isinstance(self.subject, str):
self.subject = Node.intern(self.subject)
if isinstance(self.predicate, str):
self.subject = Node.intern(self.predicate)
if self.object != None:
if isinstance(self.object, str):
self.object = Node.intern(self.object)
elif isinstance(self.object, Term):
pass
else:
self.object = Literal(self.object)
@classmethod
def factory(self, statement, options = {}):
if isinstance(statement, list):
return Statement(statement[0], statement[1], statement[2], options.update({'context' : statement[3] if len(statement) > 3 else None}))
elif isinstance(statement, Pattern):
return Statement(statement.subject, statement.predicate, statement.object, options.update({'context' : statement.context }))
elif isinstance(statement, Statement):
return statement
elif isinstance(statement, dict):
options.update(statement)
return Statement(options)
@property
def has_subject(self):
return self.subject != None
@property
def has_object(self):
return self.object != None
@property
def has_predicate(self):
return self.predicate != None
@property
def has_context(self):
return self.context != None
@property
def is_valid(self):
return self.has_subject and self.has_predicate and self.has_object
@property
def is_invalid(self):
return not self.is_valid
@property
def has_blank_nodes(self):
return (self.has_object and self.object.is_node) or (self.has_subject and subject.is_node)
@property
def is_asserted(self):
return not self.is_quoted
@property
def is_quoted(self):
return False
@property
def is_inferred(self):
return False
@property
def has_graph(self):
return self.has_context
def __eq__(self, other):
return hasattr(other, 'to_a') and (self.to_triple() == other.to_a())
def to_triple(self):
return [self.subject, self.predicate, self.object]
def to_quad(self):
return [self.subject, self.predicate, self.object, self.context]
def to_dict(self):
return {'subject' : self.subject, 'predicate': self.predicate, 'object': self.object, 'context': self.context}
def __str__(self):
s = ""
if type(self.subject) == Node:
s += str(self.subject)
elif type(self.subject) == URI:
s += "<%s>" % self.subject
else:
s += repr(self.subject)
s += " <%s>" % self.predicate
if (type(self.object) == Node) or (type(self.object) == Literal):
s += str(self.object)
elif type(self.subject) == URI:
s += "<%s>" % self.object
else:
s += repr(self.subject)
if self.context == None:
s += " ."
else:
s += " <%s>" % self.context
return s
def __repr__(self):
return repr(self.to_sxa())
def to_sxa(self):
return ['triple', self.subject, self.predicate, self.object]
def reified(self, options = {}):
#TODO: not completed
pass
def Literal(value, options = {}):
import literal
import vocab
datatype = options.get('datatype')
if datatype:
datetype_uri = URI(datatype)
if datetype_uri == vocab.XSD.get_prop('boolean'):
cls = literal.Boolean
elif datetype_uri == vocab.XSD.get_prop('integer'):
cls = literal.Integer
elif datetype_uri == vocab.XSD.get_prop('decimal'):
cls = literal.Decimal
elif datetype_uri == vocab.XSD.get_prop('double'):
cls = literal.double
elif datetype_uri == vocab.XSD.get_prop('dateTime'):
cls = literal.DateTime
elif datetype_uri == vocab.XSD.get_prop('date'):
cls = literal.Date
elif datetype_uri == vocab.XSD.get_prop('time'):
cls = literal.Time
elif datetype_uri == vocab.XSD.get_prop('token'):
cls = literal.Token
else:
cls = None
else:
if type(value) == bool:
cls = literal.Boolean
elif type(value) == int:
cls = literal.Integer
elif type(value) == long:
cls = literal.Decimal
elif type(value) == float:
cls = literal.Double
elif type(value) == datetime.datetime:
cls = literal.DateTime
elif type(value) == datetime.date:
cls = literal.Date
elif type(value) == datetime.time:
cls = literal.Time
elif type(value) == str:
cls = literal.Token
else:
cls = None
if cls:
return cls(value, options)
else:
return None
| StarcoderdataPython |
1640897 | <filename>age-dist-german-parliament-population.py
# coding: utf-8
data = np.genfromtxt('data/bundestag-WP18.csv', delimiter=',', skip_header=1)
df = pd.read_csv('data/bundestag-WP18.csv', sep=',')
plt.ion()
df.plot()
plt.hist(data[:,1])
plt.hist(data[:,1])
get_ipython().magic(u'pinfo plt.hist')
plt.hist(data[:,1], rwidth=0.5)
plt.hist(2017 - data[:,1], rwidth=0.5)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticks([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6,)
52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9,
76.6, 79.3, 82. ]
plt.gca().set_xticks([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6,)
52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9,
76.6, 79.3, 82. ])
plt.gca().set_xticks([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticks([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])
plt.gca().set_xtickmarks([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])
plt.gca().set_xtickmark([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])
plt.gca().set_xticklabels([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticks(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])))
plt.gca().set_xticklabels(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])))
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticklabels(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])) + 28.0)
plt.gca().set_xticks(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ]) + 28.0))
plt.gca().set_xticklabels(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ]) + 28.0))
plt.gca().set_xticklabels(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])) + 28.0)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticks(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])) + 28.0)
plt.gca().set_xticklabels(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ])) + 28.0)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticks(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ]) + 29.35)
plt.gca().set_xticklabels(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ]) + 29.35)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticks(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ]) + 28.0)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticks(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ]))
plt.gca().set_xticklabels(np.cumsum(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ]))) + 28.0)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.gca().set_xticklabels(np.cumsum(np.diff(np.array([ 28. , 30.7, 33.4, 36.1, 38.8, 41.5, 44.2, 46.9, 49.6, 52.3, 55. , 57.7, 60.4, 63.1, 65.8, 68.5, 71.2, 73.9, 76.6, 79.3, 82. ]))) + 28.0)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
get_ipython().magic(u'pinfo plt.hist')
plt.hist(2017 - data[:,1], bins = 20, rwidth='none')
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
plt.hist(2017 - data[:,1], bins = 20, rwidth=0.8)
np.min(data[:,1])
np.max(data[:,1])
1989-1935
plt.hist(2017 - data[:,1], bins = 54, rwidth=0.8)
plt.hist(2017 - data[:,1], bins = 54, rwidth=0.8)
plt.suptitle('age histogram of german parliament members')
plt.hist(2017 - data[:,1], bins = 54, rwidth=0.8)
plt.suptitle('age histogram of german parliament members')
plt.gca().set_xlim((0, 100))
df
df[:,Partei='SPD']
df[Partei='SPD']
df
df
df[Partei]
df[Partei='SPD']
df[...,Partei='SPD']
get_ipython().magic(u'pinfo df')
df[0]
df[1]
df = pd.read_csv('/home/lib/projects/zerotrust/bundestag-WP18.csv', sep=',')
df[1]
df[:]
df[:,0]
df[:][0]
df[:][Partei='SPD']
get_ipython().magic(u'pinfo pd.DataFrame')
pd.Index
get_ipython().magic(u'pinfo pd.Index')
df[col='Partei']
df['Partei']
df['Partei' == 'SPD']
df[Partei == 'SPD']
df['Partei' = 'SPD']
df['Partei']
df['Partei'][:10]
df['Partei']
df['Partei'] == 'SPD'
df[...,df['Partei'] == 'SPD']
df[df['Partei'] == 'SPD']
df[df['Partei'] == 'SPD',1]
df[df['Partei'] == 'SPD',[1]]
df[df['Partei'] == 'SPD']
df[df['Partei'] == 'SPD'][1]
df[df['Partei'] == 'SPD']
df[df['Partei'] == 'SPD',0]
df[df['Partei'] == 'SPD',1]
df[df['Partei'] == 'SPD'].shape
df[df['Partei'] == 'SPD'][0]
df[df['Partei'] == 'SPD'][1]
type(df[df['Partei'] == 'SPD'])
df[df['Partei'] == 'SPD']
df[df['Partei'] == 'SPD'][:]
df[df['Partei'] == 'SPD'][:,1]
df[df['Partei'] == 'SPD'][:,0]
df[df['Partei'] == 'SPD'][:,[0]]
df[df['Partei'] == 'SPD'][:]
df[df['Partei'] == 'SPD']
df[df['Partei'] == 'SPD','Partei']
df[df['Partei'] == 'SPD']['Partei']
df[df['Partei'] == 'SPD']['geb.']
plt.plot(df[df['Partei'] == 'SPD']['geb.'])
plt.hist(df[df['Partei'] == 'SPD']['geb.'])
plt.hist(2017 - df[df['Partei'] == 'SPD']['geb.'])
plt.hist(2017 - df[df['Partei'] == 'CSU']['geb.'])
plt.hist(2017 - df[df['Partei'] == 'CDU']['geb.'])
df
df.columns
df.columns()
df.index
df.names
df.names()
df.columns
plt.hist(2017 - df[df['Partei'] == 'CDU']['Partei'])
plt.hist(2017 - df[df['Partei'] == 'CDU']['Partei'])
df.columns
plt.hist(2017 - df[df['Partei'] == 'CDU']['geb.'])
df[df['Partei'] == 'CDU']['Partei']
df['Partei']
df['Partei'].uniquie()
df['Partei'].unique()
df['Partei'].unique()
plt.hist(2017 - df[df['Partei'] == 'DIE LINKE']['geb.'])
plt.hist(2017 - df[df['Partei'] == 'SPD']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'CSU']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'CDU']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'GR\xc3\x9cNE']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'fraktionslos']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'fraktionslos']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'GR\xc3\x9cNE']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'fraktionslos']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'CDU']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'CSU']['geb.'], alpha = 0.3)
plt.hist(2017 - df[df['Partei'] == 'SPD']['geb.'], alpha = 0.3)
get_ipython().magic(u'save age-dist-german-parliament-population')
get_ipython().magic(u'save age-dist-german-parliament-population 0-141')
| StarcoderdataPython |
1607696 | from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from core.import_secondary import import_secondary_schools
class Command(BaseCommand):
help = "Import secondary schools from .csv file"
def handle(self, *args, **options):
import_secondary_schools()
| StarcoderdataPython |
3362062 | <reponame>ThomasBollmeier/komparse<filename>src/komparse/translators.py
from .ast import Ast
class TokenType(object):
def __init__(self, token_type, id=""):
self._token_type = token_type
self._id = id
def translate(self, grammar, token_stream):
if not token_stream.has_next():
return None # <- translation failed
token = token_stream.peek()
if self._token_type in token.types:
token_stream.advance()
return [Ast(self._token_type, token.value, self._id)]
else:
return None # <- translation failed
class Rule(object):
def __init__(self, name, id=""):
self._name = name
self._id = id
def translate(self, grammar, token_stream):
translator = grammar.get_rule(self._name)
nodes = translator.translate(grammar, token_stream)
if nodes is not None:
ast = Ast(self._name, id=self._id)
for node in nodes:
ast.add_child(node)
trans = grammar.get_ast_transform(self._name)
if trans:
ast = trans(ast)
ast.id = self._id
if self._name == grammar.get_root_rule():
ast.set_attr("root", "true")
return [ast]
else:
return None
class Sequence(object):
def __init__(self, *elements):
self._elements = elements
def translate(self, grammar, token_stream):
ret = []
token_stream.open_transaction()
for element in self._elements:
nodes = element.translate(grammar, token_stream)
if nodes is None:
token_stream.undo()
return None
ret += nodes
token_stream.commit()
return ret
class OneOf(object):
def __init__(self, *choices):
self._choices = choices
def translate(self, grammar, token_stream):
token_stream.open_transaction()
for choice in self._choices:
nodes = choice.translate(grammar, token_stream)
if nodes is not None:
token_stream.commit()
return nodes
token_stream.undo()
return None
class Optional(object):
def __init__(self, translator):
self._translator = translator
def translate(self, grammar, token_stream):
return self._translator.translate(grammar, token_stream) or []
class Many(object):
def __init__(self, translator):
self._translator = translator
def translate(self, grammar, token_stream):
ret = []
while True:
nodes = self._translator.translate(grammar, token_stream)
if nodes is not None:
ret += nodes
else:
break
return ret
class OneOrMore(object):
def __init__(self, translator):
self._translator = translator
def translate(self, grammar, token_stream):
ret = []
nodes = self._translator.translate(grammar, token_stream)
if nodes is None:
return None
ret += nodes
while True:
nodes = self._translator.translate(grammar, token_stream)
if nodes is not None:
ret += nodes
else:
break
return ret
| StarcoderdataPython |
1758901 | from typing import Union
from .field import Field
import requests
class Attachment:
def __init__(self, text: str, fallback: str = None, color: Union[str, int] = None, pretext: str = None,
author_name: str = None, author_link: str = None, author_icon: str = None,
fields: Union[Field, list] = None, image_url: str = None, thumb_url: str = None, title: str = None,
title_link: str = None):
self.json = {}
if fallback is None:
fallback = text
args = locals()
for arg in args:
if args[arg] is not None:
self.json[arg] = args[arg]
self.json.pop('self')
def send(self, hook):
if 'fields' in self.json:
for idx,field in enumerate(self.json['fields']):
self.json['fields'][idx] = self.json['fields'][idx].json
json = {
'attachments':[self.json]
}
requests.post(url=str(hook), json=json) | StarcoderdataPython |
3378052 | <gh_stars>0
'''
Generates the location data for the data.gov.uk alpha.
'''
import argparse
import json
import traceback
import csv
from pprint import pprint
import requests
import requests_cache
from running_stats import Stats
stats_types = Stats()
stats = Stats()
args = None
max_pk = None
one_day = 60 * 60 * 24
requests_cache.install_cache('.drupal_dump', expire_after=one_day)
# Is there a register? Probably easier to hard-code
countries_of_the_uk = [
('England', ), # leave room for geo-data
('Wales', ),
('Scotland', ),
('Northern Ireland', ),
]
regions_of_the_uk = [
('Great Britain', ),
('British Isles', ),
]
# http://register.alpha.openregister.org/record/local-authority-eng
def publish_data():
'''
Produces the location.json for publish_data form
e.g. {"model":"datasets.location",
"pk":0,
"fields":{"name":"England"
"location_type":"country"}}
'''
try:
locations = []
# countries
for country_tuple in countries_of_the_uk:
name = country_tuple[0]
locations.append(location_dict(name, 'country'))
stats_types.add('Country added ok', name)
for region_tuple in regions_of_the_uk:
name = region_tuple[0]
locations.append(location_dict(name, 'region of the UK'))
stats_types.add('Region of the UK added ok', name)
locations.append(location_dict('United Kingdom', 'UK'))
stats_types.add('United Kingdom added ok', 'United Kingdom')
# local authorities
# The "name" field is the place
la_eng = requests.get('https://local-authority-eng.register.gov.uk/records.json?page-size=5000').json()
la_nir = requests.get('https://local-authority-nir.discovery.openregister.org/records.json?page-size=5000').json()
la_wls = requests.get('https://local-authority-wls.discovery.openregister.org/records.json?page-size=5000').json()
la_sct = requests.get('https://local-authority-sct.discovery.openregister.org/records.json?page-size=5000').json()
# "STA": {
# "entry-number": "331",
# "entry-timestamp": "2016-10-21T16:11:20Z",
# "item-hash": "sha-256:0e6d3a5790abf0248d74e9e6cdacc422dea9f871f65587e62e6d3dca6964a344",
# "local-authority-type": "NMD",
# "official-name": "Stafford Borough Council",
# "local-authority-eng": "STA",
# "name": "Stafford"
# },
try:
def add_local_authorities(locations, la_dict, country):
for la in la_dict.values():
name = la['name']
locations.append(dict(
model='datasets.location',
fields=dict(
name=name,
location_type='local authority'
)
)
)
stats_types.add('LA %s added ok' % country, name)
add_local_authorities(locations, la_eng, 'eng')
add_local_authorities(locations, la_nir, 'nir')
add_local_authorities(locations, la_wls, 'wls')
add_local_authorities(locations, la_sct, 'sct')
except Exception:
traceback.print_exc()
import pdb; pdb.set_trace()
# Clinical Commissioning Groups
# Clinical Commissioning Groups (April 2016) Ultra Generalised Clipped Boundaries in England
url = 'http://geoportal.statistics.gov.uk/datasets/1bc1e6a77cdd4b3a9a0458b64af1ade4_4'
# objectid ccg16cd ccg16nm st_areashape st_lengthshape
# 1 E38000001 NHS Airedale, Wharfedale and Craven CCG 1224636590 193149.7401
def ccg_name_processor(name):
assert name.startswith('NHS '), name
assert name.endswith(' CCG'), name
return name[4:-4]
add_ons_data(url, 'ccg16cd', 'ccg16nm',
'NHS Clinical Commissioning Group area', locations,
name_processor=ccg_name_processor)
# fill in 'pk' - keys
existing_locations = []
if args.existing_locations:
with open(args.existing_locations, 'r', encoding='utf8') as f:
existing_locations = json.load(f)
global max_pk
max_pk = max((l['pk'] for l in existing_locations)
if existing_locations else [0])
add_keys_from_existing_data(locations, location_type='country',
existing_locations=existing_locations)
# write
print('\nLocations:\n', stats_types)
print('\nStats:\n', stats)
with open(args.output_fpath, 'w', encoding='utf8') as output_f:
json.dump(locations, output_f, ensure_ascii=False)
print('Written %s' % args.output_fpath)
except Exception:
traceback.print_exc()
import pdb; pdb.set_trace()
def add_keys_from_existing_data(locations, location_type, existing_locations):
existing_keys_by_name = dict(
(l['fields']['name'], l['pk'])
for l in existing_locations
if l['fields']['location_type'] == location_type)
for location in locations:
name = location['fields']['name']
if name in existing_keys_by_name:
location['pk'] = existing_keys_by_name[name]
stats.add('Key reused', name)
else:
location['pk'] = get_new_pk(locations, existing_locations)
stats.add('Key new', name)
def location_dict(name, location_type):
return dict(
model='datasets.location',
fields=dict(
name=name,
location_type=location_type
)
)
def get_new_pk(locations, existing_locations):
global max_pk
max_pk += 1
return max_pk
ons_codes_added = set()
def add_ons_data(page_url, column_with_code, column_with_name,
location_type, locations, name_processor=None):
global ons_codes_added
response = requests.get(page_url + '.csv')
content = response.content
# need to get rid of the bom which is first 3 bytes
if content[:3] == b'\xef\xbb\xbf':
content = content[3:]
decoded_content = content.decode('utf8')
csv_data = csv.DictReader(decoded_content.splitlines())
for row in csv_data:
code = row[column_with_code]
name = row[column_with_name]
if name_processor:
name = name_processor(name)
if code in ons_codes_added:
stats.add('ONS place already added. Ignore %s dupe' %
location_type, name)
continue
ons_codes_added.add(code)
locations.append(location_dict(name, location_type))
stats_types.add('%s added ok' % location_type, name)
def administrative_areas():
# administrative areas
# Provided by ONS Geography http://geoportal.statistics.gov.uk/
# under menus: Boundaries | Administrative Boundaries |
# We want:
# * 'generalised resolution' - no need for full detail for searching
# * 'clipped to the coastline' - better to look good on a map than be
# perfectly accurate, when its only for searching anyway.
# i.e. Generalised Clipped
# We can start with CSVs and go to KMLs later when we need the outlines
# License is OGL, with attribution:
# Contains National Statistics data (c) Crown copyright and database right [year]
# Contains OS data (c) Crown copyright and database right [year]
# Combined Authorities - yes
# Combined Authorities (June 2016) Ultra Generalised Clipped Boundaries in England
url = 'http://geoportal.statistics.gov.uk/datasets/0293170f45ac4322868978b46dba822d_4'
# objectid cauth16cd cauth16nm st_areashape st_lengthshape
# 1 E47000001 Greater Manchester 1273598354 206057.3684
add_ons_data(url, 'cauth16cd', 'cauth16nm', 'combined authority')
# Local Authority Districts - yes
# Local Authority Districts (December 2015) Ultra Generalised Clipped Boundaries in Great Britain
url = 'http://geoportal.statistics.gov.uk/datasets/8edafbe3276d4b56aec60991cbddda50_4'
# lad15cd lad15nm lad15nmw objectid st_lengthshape st_areashape
# E06000001 Hartlepool 1 50790.07333 96334979.59
add_ons_data(url, 'lad15cd', 'lad15nm', 'district')
# Counties - yes
# Counties (December 2015) Ultra Generalised Clipped Boundaries in England
url = 'http://geoportal.statistics.gov.uk/datasets/97e17cbdddcb4c98b960d41104ef02e9_4'
# objectid cty15cd cty15nm st_areashape st_lengthshape
# 1 E10000002 Buckinghamshire 1572148102 303282.187
add_ons_data(url, 'cty15cd', 'cty15nm', 'county')
# Counties and UAs - yes but some overlap with previous
# Counties and Unitary Authorities (December 2015) Ultra Generalised Clipped Boundaries in England and Wales
# "counties, metropolitan districts, London boroughs and unitary authorities in England and Wales"
url = 'http://geoportal.statistics.gov.uk/datasets/0b09996863af4b5db78058225bac5d1b_4'
# ctyua15cd ctyua15nm ctyua15nmw objectid st_lengthshape st_areashape
# E06000001 Hartlepool 1 50778.094 96339578.63
add_ons_data(url, 'ctyua15cd', 'ctyua15nm', 'local authority area')
# Countries - no - only has 3 anyway
# Parishes - no - 11,000 of them
# Parishes (December 2016) Generalised Clipped Boundaries in England and Wales
url = 'http://geoportal.statistics.gov.uk/datasets/f13dad37854b4a1f869bf178489ff99a_2'
# Parishes and non-civil areas - no - almost the same as parishes
# Regions - no
# Upper tier LAs - lots of overlap but has London
# Upper Tier Local Authorities (December 2011) Boundaries
# Upper Tier Local Authorities in England and Wales as at 31 December 2011
url = 'http://geoportal.statistics.gov.uk/datasets/22264fcec9df4a7fafa56724ce14ad14_0'
# objectid utla11cd utla11nm st_areashape st_lengthshape
# 1 E06000001 Hartlepool 93886294.32 69010.01461
add_ons_data(url, 'utla11cd', 'utla11nm', 'local authority')
# Wards / Electoral Divisions - no
# Ones that dont match the name in the England register:
# ['Greater London', 'Kings Lynn and West Norfolk', 'City of Lincoln', 'Herefordshire', 'Kingston upon Hull', 'City of Bristol']
location_names = [l['fields']['name'] for l in locations]
la_eng = requests.get('https://local-authority-eng.register.gov.uk/records.json?page-size=5000').json()
for la in la_eng.values():
if la['name'] not in location_names:
stats.add('non matching name', la['name'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers()
subparser = subparsers.add_parser('publish_data')
subparser.set_defaults(func=publish_data)
subparser.add_argument(
'--output_fpath',
default='locations.json',
help='Location of the output locations.json, that is destined for: '
'src/datasets/fixtures/locations.json')
subparser.add_argument(
'--existing-locations',
help='Filepath to existing locations.json, so that keys can be kept '
'the same')
# subparser.add_argument('--users-from-drupal-user-table-dump',
# help='Filepath of drupal_users_table.csv.gz')
# subparser.add_argument('--users-tried-sequentially',
# action='store_true',
# help='Rather than try a given list of user ids, '
# 'just try all ids in order from 1 to 500000.')
# subparser.add_argument('-u', '--user',
# help='Only do it for a single user (eg 845)')
args = parser.parse_args()
# if args.cache_requests:
# requests_cache.install_cache('.drupal_dump') # doesn't expire
# call the function
args.func() | StarcoderdataPython |
3342957 | <gh_stars>0
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2016 kirmani <<EMAIL>>
#
# Distributed under terms of the MIT license.
"""
Timed Petri net for resource controller.
"""
from floor_listener import FloorListener
from ros_cadence.srv import *
from petri_net import *
import rospy
kVerbose = True
kDebug = True
kPlaces = ['requested_robot', 'free', 'owned_user', 'owned_robot',
'requested_user']
class BeliefControllerApi:
@staticmethod
def SetBelief(belief, value):
rospy.wait_for_service('set_belief')
try:
set_belief = rospy.ServiceProxy(
'set_belief', DoPetriNetArc)
set_belief(belief, value)
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
@staticmethod
def GetBelief(belief):
rospy.wait_for_service('get_belief')
try:
get_belief = rospy.ServiceProxy(
'get_belief', DoPetriNetArc)
return get_belief(belief).value
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
return False
class ActionProcessApi:
@staticmethod
def AddIntendedAction(action):
rospy.wait_for_service('add_intended_action')
try:
add_intended_action = rospy.ServiceProxy(
'add_intended_action', Resource)
add_intended_action(action)
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
@staticmethod
def RemoveIntendedAction(action):
rospy.wait_for_service('remove_intended_action')
try:
remove_intended_action = rospy.ServiceProxy(
'remove_intended_action', Resource)
remove_intended_action(action)
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
@staticmethod
def AddIntendedResource(resource):
rospy.wait_for_service('add_intended_resource')
try:
add_intended_resource = rospy.ServiceProxy(
'add_intended_resource', Resource)
add_intended_resource(resource)
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
@staticmethod
def RemoveIntendedResource(resource):
rospy.wait_for_service('remove_intended_resource')
try:
remove_intended_resource = rospy.ServiceProxy(
'remove_intended_resource', Resource)
remove_intended_resource(resource)
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
@staticmethod
def AddRequestedResource(resource):
rospy.wait_for_service('add_requested_resource')
try:
add_requested_resource = rospy.ServiceProxy(
'add_requested_resource', Resource)
add_requested_resource(resource)
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
@staticmethod
def RemoveRequestedResource(resource):
rospy.wait_for_service('remove_requested_resource')
try:
remove_requested_resource = rospy.ServiceProxy(
'remove_requested_resource', Resource)
remove_requested_resource(resource)
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
@staticmethod
def RobotOwnsResource(resource):
rospy.wait_for_service('robot_owns_resource')
try:
robot_owns_resource = rospy.ServiceProxy(
'robot_owns_resource', GetBelief)
return robot_owns_resource(resource).value
except rospy.ServiceException, e:
print("Service call failed: %s" % e)
class ReleaseRobotTransition(PetriNetTransition):
def __init__(self, requested_robot, owned_robot, free, beliefs):
PetriNetTransition.__init__(self, 'release_robot')
self.requested_robot_ = requested_robot
self.owned_robot_ = owned_robot
self.free_ = free
self.beliefs_ = beliefs
def fire(self):
for belief in self.beliefs_:
if (not self.requested_robot_.HasToken(belief)
and self.owned_robot_.HasToken(belief)):
if kVerbose:
print("Releasing resource (%s) from robot."
% belief)
self.owned_robot_.RemoveToken(belief)
self.free_.AddToken(belief)
def activated(self):
for belief in self.beliefs_:
if (not self.requested_robot_.HasToken(belief)
and self.owned_robot_.HasToken(belief)):
return True
return False
class YieldTransition(PetriNetTransition):
def __init__(self, requested_user, owned_robot, owned_user, beliefs):
PetriNetTransition.__init__(self, 'yield')
self.requested_user_ = requested_user
self.owned_robot_ = owned_robot
self.owned_user_ = owned_user
self.beliefs_ = beliefs
def fire(self):
for belief in self.beliefs_:
if (self.requested_user_.HasToken(belief)
and self.owned_robot_.HasToken(belief)):
if kVerbose:
print("Yielding resource (%s) from robot to human."
% belief)
self.requested_user_.RemoveToken(belief)
self.owned_robot_.RemoveToken(belief)
self.owned_user_.AddToken(belief)
def activated(self):
for belief in self.beliefs_:
if (self.requested_user_.HasToken(belief)
and self.owned_robot_.HasToken(belief)):
return True
return False
class RequestUserTransition(PetriNetTransition):
def __init__(self, owned_user, requested_user, beliefs):
PetriNetTransition.__init__(self, 'request_user')
self.owned_user_ = owned_user
self.requested_user_ = requested_user
self.beliefs_ = beliefs
def fire(self):
# Place resource tokens in requested place.
for belief in self.beliefs_:
if (not self.beliefs_[belief]
and not self.requested_user_.HasToken(belief)):
if kVerbose:
print("Requesting resource for user: %s"
% belief)
self.requested_user_.AddToken(belief)
def activated(self):
for belief in self.beliefs_:
if (not self.beliefs_[belief]
and not self.requested_user_.HasToken(belief)):
return True
return False
class SeizeUserTransition(PetriNetTransition):
def __init__(self, requested_user, free, owned_user, beliefs):
PetriNetTransition.__init__(self, 'seize_user')
self.requested_user_ = requested_user
self.free_ = free
self.owned_user_ = owned_user
self.beliefs_ = beliefs
def fire(self):
for belief in self.beliefs_:
if (self.requested_user_.HasToken(belief)
and self.free_.HasToken(belief)):
if kVerbose:
print("Seizing resource for user: %s"
% belief)
self.free_.RemoveToken(belief)
self.owned_user_.AddToken(belief)
def activated(self):
for belief in self.beliefs_:
if (self.requested_user_.HasToken(belief)
and self.free_.HasToken(belief)):
return True
return False
class ReleaseUserTransition(PetriNetTransition):
def __init__(self, requested_user, owned_user, free, beliefs):
PetriNetTransition.__init__(self, 'release_user')
self.requested_user_ = requested_user
self.owned_user_ = owned_user
self.free_ = free
self.beliefs_ = beliefs
def fire(self):
for belief in self.beliefs_:
if (self.beliefs_[belief]
and self.owned_user_.HasToken(belief)):
if kVerbose:
print("Releasing resource for user: %s"
% belief)
self.requested_user_.RemoveToken(belief)
self.owned_user_.RemoveToken(belief)
self.free_.AddToken(belief)
def activated(self):
for belief in self.beliefs_:
if (self.beliefs_[belief]
and self.owned_user_.HasToken(belief)):
return True
return False
class SeizeRobotTransition(PetriNetTransition):
def __init__(self, requested_robot, free, owned_robot, intended_resources):
PetriNetTransition.__init__(self, 'seize_robot')
self.requested_robot_ = requested_robot
self.free_ = free
self.owned_robot_ = owned_robot
self.intended_resources_ = intended_resources
def fire(self):
for resource in self.intended_resources_:
if (self.requested_robot_.HasToken(resource)
and self.free_.HasToken(resource)):
self.free_.RemoveToken(resource)
self.owned_robot_.AddToken(resource)
def activated(self):
for resource in self.intended_resources_:
if (self.requested_robot_.HasToken(resource)
and self.free_.HasToken(resource)):
return True
return False
class ResourceController(PetriNet):
def __init__(self):
PetriNet.__init__(self, 'resource_controller')
self.places_ = {}
self.intended_resources_ = []
self.intended_actions_ = []
self.beliefs_ = {}
# Places.
for place in kPlaces:
self.places_[place] = PetriNetPlace(place)
# Transitions.
self.transitions_.append(
SeizeRobotTransition(self.places_['requested_robot'],
self.places_['free'],
self.places_['owned_robot'],
self.intended_resources_))
self.transitions_.append(
ReleaseRobotTransition(self.places_['requested_robot'],
self.places_['owned_robot'],
self.places_['free'],
self.beliefs_))
self.transitions_.append(
YieldTransition(self.places_['requested_user'],
self.places_['owned_robot'],
self.places_['owned_user'],
self.beliefs_))
self.transitions_.append(
RequestUserTransition(self.places_['owned_user'],
self.places_['requested_user'],
self.beliefs_))
self.transitions_.append(
SeizeUserTransition(self.places_['requested_user'],
self.places_['free'],
self.places_['owned_user'],
self.beliefs_))
self.transitions_.append(
ReleaseUserTransition(self.places_['requested_user'],
self.places_['owned_user'],
self.places_['free'],
self.beliefs_))
# Perception Process API.
def SetBelief(self, belief, value):
if belief not in self.beliefs_:
if value:
self.places_['free'].AddToken(belief)
else:
self.places_['owned_user'].AddToken(belief)
print(self.GetMarking())
self.beliefs_[belief] = value
print("SetBelief: %s -> %s" % (belief, value))
print(self.beliefs_)
def GetBelief(self, belief):
if belief not in self.beliefs_:
raise ValueError("GetBelief: Error: belief (%s) does not exist"
% belief)
return self.beliefs_[belief]
# Action Process API.
def AddIntendedAction(self, action):
self.intended_actions_.append(action)
print("AddIntendedAction: added intended action (%s)"
% action)
print(self.intended_actions_)
self.Run()
print(self.GetMarking())
def RemoveIntendedAction(self, action):
self.intended_actions_.remove(action)
print("RemoveIntendedAction: removed intended action (%s)"
% action)
print(self.intended_actions_)
self.Run()
print(self.GetMarking())
def AddIntendedResource(self, resource):
self.intended_resources_.append(resource)
print("AddIntendedResource: added intended resource (%s)"
% resource)
print(self.intended_resources_)
self.Run()
print(self.GetMarking())
def RemoveIntendedResource(self, resource):
self.intended_resources_.remove(resource)
print("RemoveIntendedResource: removed intended resource (%s)"
% resource)
print(self.intended_resources_)
def AddRequestedResource(self, resource):
self.places_['requested_robot'].AddToken(resource)
print("AddRequestedResource: added requested resource (%s)"
% resource)
self.Run()
print(self.GetMarking())
def RemoveRequestedResource(self, resource):
self.places_['requested_robot'].RemoveToken(resource)
print("RemoveRequestedResource: removed requested resource (%s)"
% resource)
self.Run()
print(self.GetMarking())
def RobotOwnsResource(self, resource):
self.Run()
return self.places_['owned_robot'].HasToken(resource)
def GetMarking(self):
marking = {}
for place in self.places_:
marking[place] = self.places_[place].GetTokens()
return marking
def handle_do_petri_net_arc(req):
if req.function == 'add':
resource_controller.AddTokenToPlace(req.place, req.token)
return DoPetriNetArcResponse(True)
if req.function == 'remove':
return DoPetriNetArcResponse(
resource_controller.RemoveTokenFromPlace(req.place, req.token))
if req.function == 'guard':
response = resource_controller.HasTokenInPlace(req.place, req.token)
return DoPetriNetArcResponse(response)
if req.function == 'add_action':
resource_controller.AddActiveAction(req.action)
return DoPetriNetArcResponse(True)
if req.function == 'remove_action':
response = resource_controller.RemoveActiveAction(req.action)
return DoPetriNetArcResponse(response)
raise rospy.ServiceException("Invalid function input: %s" % req.function)
def set_belief_handler(req):
resource_controller.SetBelief(req.belief, req.value)
return SetBeliefResponse()
def get_belief_handler(req):
return GetBeliefResponse(resource_controller.GetBelief(req.belief))
def add_intended_action_handler(req):
resource_controller.AddIntendedAction(req.resource)
return ResourceResponse()
def remove_intended_action_handler(req):
resource_controller.RemoveIntendedAction(req.resource)
return ResourceResponse()
def add_intended_resource_handler(req):
resource_controller.AddIntendedResource(req.resource)
return ResourceResponse()
def remove_intended_resource_handler(req):
resource_controller.RemoveIntendedResource(req.resource)
return ResourceResponse()
def add_requested_resource_handler(req):
resource_controller.AddRequestedResource(req.resource)
return ResourceResponse()
def remove_requested_resource_handler(req):
resource_controller.RemoveRequestedResource(req.resource)
return ResourceResponse()
def robot_owns_resource_handler(req):
return GetBeliefResponse(
resource_controller.RobotOwnsResource(req.belief))
def main():
global resource_controller
resource_controller = ResourceController()
if kDebug:
print("Initial marking: %s" % str(resource_controller.GetMarking()))
rospy.init_node('do_petri_net_arc')
s = rospy.Service('do_petri_net_arc', DoPetriNetArc, handle_do_petri_net_arc)
# TODO(kirmani): Rename Resource.srv to String.srv
# TODO(kirmani): Rename GetBelief.srv to StringBool.srv or something
# better.
set_belief_service = rospy.Service(
'set_belief', SetBelief, set_belief_handler)
get_belief_service = rospy.Service(
'get_belief', GetBelief, get_belief_handler)
add_intended_action_service = rospy.Service(
'add_intended_action', Resource, add_intended_action_handler)
remove_intended_action_service = rospy.Service(
'remove_intended_action', Resource, remove_intended_action_handler)
add_intended_resource_service = rospy.Service(
'add_intended_resource', Resource, add_intended_resource_handler)
remove_intended_resource_service = rospy.Service(
'remove_intended_resource', Resource, remove_intended_resource_handler)
add_requested_resource_service = rospy.Service(
'add_requested_resource', Resource, add_requested_resource_handler)
remove_requested_resource_service = rospy.Service(
'remove_requested_resource', Resource, remove_requested_resource_handler)
robot_owns_resource_service = rospy.Service(
'robot_owns_resource', GetBelief,
robot_owns_resource_handler)
print("Ready to do petri net arcs.")
rospy.spin()
if __name__ == '__main__':
main()
| StarcoderdataPython |
103236 | <filename>binding.gyp
{
"variables": {
"GTK_Root%": "c:\\gtk",
"conditions": [
[ "OS == 'mac'", {
"pkg_env": "PKG_CONFIG_PATH=/opt/X11/lib/pkgconfig"
}, {
"pkg_env": ""
}]
]
},
"targets": [
{
"target_name": "rsvg",
"sources": [
"src/Rsvg.cc",
"src/Enums.cc",
"src/Autocrop.cc"
],
"include_dirs": [
"<!(node -e \"require('nan')\")"
],
"variables": {
"packages": "librsvg-2.0 cairo-png cairo-pdf cairo-svg",
"conditions": [
[ "OS!='win'", {
"libraries": "<!(<(pkg_env) pkg-config --libs-only-l <(packages))",
"ldflags": "<!(<(pkg_env) pkg-config --libs-only-L --libs-only-other <(packages))",
"cflags": "<!(<(pkg_env) pkg-config --cflags <(packages))"
}, { # else OS!='win'
"include_dirs": "<!(<(python) tools/include_dirs.py <(GTK_Root) <(packages))"
} ]
]
},
"conditions": [
[ "OS!='mac' and OS!='win'", {
"cflags": [
"<@(cflags)",
"-std=c++0x"
],
"ldflags": [
"<@(ldflags)"
],
"libraries": [
"<@(libraries)"
],
} ],
[ "OS=='mac'", {
"xcode_settings": {
"OTHER_CFLAGS": [
"<@(cflags)"
],
"OTHER_LDFLAGS": [
"<@(ldflags)"
]
},
"libraries": [
"<@(libraries)"
],
} ],
[ "OS=='win'", {
"sources+": [
"src/win32-math.cc"
],
"include_dirs": [
"<@(include_dirs)"
],
"libraries": [
'librsvg-2.dll.a',
'glib-2.0.lib',
'gobject-2.0.lib',
'cairo.lib'
],
"msvs_settings": {
'VCCLCompilerTool': {
'AdditionalOptions': [
"/EHsc"
]
}
},
"msbuild_settings": {
"Link": {
"AdditionalLibraryDirectories": [
"<(GTK_Root)\\lib"
],
"ImageHasSafeExceptionHandlers": "false"
}
}
} ]
]
}
]
}
| StarcoderdataPython |
3363505 | <gh_stars>0
import re
def hyperop(exp: str) -> int:
"""
This function facilitates higher order repetitive operations (hyperoperations) such as tetration.\n
'exp' should be a string of the form: an integer a, followed by one or more *'s, followed by an integer b. Whitespace around args is ignored.\n
E.g. '2***3' or ' 2 *** 3 '\n
The number of *'s corresponds to the order of the hyperoperation minus 1\n
E.g. * = multiplication, ** = exponentiation, *** = tetration, ...
"""
if re.match("^\s*\d+\s*\*+\s*\d+\s*", exp) == None:
raise Exception("expression was not formatted correctly")
newStr = "".join(exp.split())
ind = newStr.index("*")
n = newStr.count('*')
a = int(newStr[0:ind])
b = int(newStr[ind+n:len(newStr)])
return hyperopHelper(a, b, n)
def hyperopHelper(a, b, n):
if n == 1:
return a * b
if n == 2:
return a ** b
ans = a
for i in range(b-1):
ans = hyperopHelper(ans, a, n-1)
return ans
def main():
print(hyperop("2 * 3"))
print(hyperop(" 2 **3 "))
print(hyperop("2***3"))
print(hyperop("2 **** 3"))
print(hyperop("4 *** 4"))
print(hyperop("2 ********** 2"))
print(hyperop("wef wefwe f"))
if __name__ == "__main__":
main() | StarcoderdataPython |
1704486 | <filename>group/migrations/0001_initial.py
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-15 01:42
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import taggit.managers
class Migration(migrations.Migration):
initial = True
dependencies = [
('classification', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('taggit', '0002_auto_20150616_2121'),
]
operations = [
migrations.CreateModel(
name='Apply',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_time', models.DateTimeField(default=django.utils.timezone.now)),
],
options={
'permissions': [['view_apply', 'Can view apply']],
},
),
migrations.CreateModel(
name='Bookmark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_time', models.DateTimeField(default=django.utils.timezone.now)),
],
options={
'permissions': [['view_bookmark', 'Can view bookmark']],
},
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.CharField(max_length=200)),
('created_time', models.DateTimeField(default=django.utils.timezone.now)),
],
options={
'permissions': [['view_comment', 'Can view comment']],
},
),
migrations.CreateModel(
name='Membership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('R', 'Request'), ('D', 'Denied'), ('A', 'Approved')], default='R', max_length=1)),
('updated_time', models.DateTimeField(default=django.utils.timezone.now)),
('requested_date', models.DateTimeField(default=django.utils.timezone.now)),
('joined_date', models.DateTimeField(null=True)),
],
options={
'permissions': [['view_membership', 'Can view membership']],
},
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.CharField(max_length=5120)),
('status', models.CharField(choices=[('O', 'Open'), ('C', 'Closed')], default='O', max_length=1)),
('created_time', models.DateTimeField(default=django.utils.timezone.now)),
('updated_time', models.DateTimeField(default=django.utils.timezone.now)),
('comment_count', models.IntegerField(default=0)),
('bookmark_count', models.IntegerField(default=0)),
('apply_count', models.IntegerField(default=0)),
('share_count', models.IntegerField(default=0)),
('category', models.ManyToManyField(to='classification.Category')),
('manager', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='group_project_leaders', to=settings.AUTH_USER_MODEL)),
('member', models.ManyToManyField(through='group.Membership', to=settings.AUTH_USER_MODEL)),
('tags', taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags')),
],
options={
'permissions': [['view_project', 'Can view project']],
},
),
migrations.CreateModel(
name='Share',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_time', models.DateTimeField(default=django.utils.timezone.now)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='group.Project')),
('writer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_shares', to=settings.AUTH_USER_MODEL)),
],
options={
'permissions': [['view_share', 'Can view share']],
},
),
]
| StarcoderdataPython |
116080 | import numpy as np
import matplotlib.pyplot as plot
time = np.arange(0, 10, 0.1);
amplitude =np.sin(time)
plot.plot(time, amplitude)
plot.title('Sign Wave 1')
plot.xlabel('Time')
plot.ylabel('Amplitude = sin(time)')
plot.grid(True, which='both')
plot.axhline(y=0, color='k')
plot.show()
| StarcoderdataPython |
3244504 | <reponame>jethornton/7i97<filename>7i97/src/lib7i97/pcinfo.py
"""
Usage extcmd.job(self, cmd="something", args="",
dest=self.QPlainTextEdit, clean="file to delete when done")
To pipe the output of cmd1 to cmd2 use the following
Usage extcmd.pipe_job(self, cmd1="something", arg1="", cmd2="pipe to",
arg2, "", dest=self.QPlainTextEdit)
"""
def cpuInfo(parent):
parent.extcmd.job(cmd="lscpu", args=None, dest=parent.infoPTE)
def nicInfo(parent):
parent.extcmd.job(cmd="lspci", args=None, dest=parent.infoPTE)
def nicCalc(parent):
if parent.tMaxLE.text() != '' and parent.cpuSpeedLE.text() != '':
tMax = int(int(parent.tMaxLE.text()) / 1000)
cpuSpeed = float(parent.cpuSpeedLE.text()) * parent.cpuSpeedCB.currentData()
packetTime = tMax / cpuSpeed
parent.packetTimeLB.setText('{:.1%}'.format(packetTime))
threshold = (cpuSpeed * 0.7) / cpuSpeed
parent.thresholdLB.setText('{:.0%}'.format(threshold))
else:
errorText = []
if parent.cpuSpeedLE.text() == '':
errorText.append('CPU Speed can not be empty')
if parent.tMaxLE.text() == '':
errorText.append('tMax can not be empty')
parent.errorMsgOk('\n'.join(errorText))
def readTmax(parent):
parent.extcmd.job(cmd="halcmd", args=['show', 'param', 'hm2*.tmax'], dest=parent.tmaxPTE)
| StarcoderdataPython |
4811773 | <gh_stars>0
#!/usr/bin/env python
#
# Cork - Authentication module for tyyhe Bottle web framework
# Copyright (C) 2013 <NAME> and others, see AUTHORS file.
#
# This package is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This package is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from base64 import b64encode, b64decode
from beaker import crypto
from datetime import datetime, timedelta
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from logging import getLogger
from smtplib import SMTP, SMTP_SSL
from threading import Thread
from time import time
import bottle
import os
import re
import uuid
try:
import scrypt
scrypt_available = True
except ImportError: # pragma: no cover
scrypt_available = False
from backends import JsonBackend
log = getLogger(__name__)
class AAAException(Exception):
"""Generic Authentication/Authorization Exception"""
pass
class AuthException(AAAException):
"""Authentication Exception: incorrect username/password pair"""
pass
class Cork(object):
def __init__(self, directory=None, backend=None, email_sender=None,
initialize=False, session_domain=None, smtp_server=None,
smtp_url='localhost'):
"""Auth/Authorization/Accounting class
:param directory: configuration directory
:type directory: str.
:param users_fname: users filename (without .json), defaults to 'users'
:type users_fname: str.
:param roles_fname: roles filename (without .json), defaults to 'roles'
:type roles_fname: str.
"""
if smtp_server:
smtp_url = smtp_server
self.mailer = Mailer(email_sender, smtp_url)
self.password_reset_timeout = 3600 * 24
self.session_domain = session_domain
self.preferred_hashing_algorithm = 'PBKDF2'
# Setup JsonBackend by default for backward compatibility.
if backend is None:
self._store = JsonBackend(directory, users_fname='users',
roles_fname='roles', pending_reg_fname='register',
initialize=initialize)
else:
self._store = backend
def login(self, username, password, success_redirect=None,
fail_redirect=None):
"""Check login credentials for an existing user.
Optionally redirect the user to another page (typically /login)
:param username: username
:type username: str.
:param password: <PASSWORD>
:type password: str.
:param success_redirect: redirect authorized users (optional)
:type success_redirect: str.
:param fail_redirect: redirect unauthorized users (optional)
:type fail_redirect: str.
:returns: True for successful logins, else False
"""
assert isinstance(username, str), "the username must be a string"
assert isinstance(password, str), "the password must be a string"
if username in self._store.users:
if self._verify_password(username, password,
self._store.users[username]['hash']):
# Setup session data
self._setup_cookie(username)
self._store.users[username]['last_login'] = str(datetime.utcnow())
self._store.save_users()
if success_redirect:
bottle.redirect(success_redirect)
return True
if fail_redirect:
bottle.redirect(fail_redirect)
return False
def logout(self, success_redirect='/login', fail_redirect='/login'):
"""Log the user out, remove cookie
:param success_redirect: redirect the user after logging out
:type success_redirect: str.
:param fail_redirect: redirect the user if it is not logged in
:type fail_redirect: str.
"""
try:
session = self._beaker_session
session.delete()
except Exception, e:
log.debug("Exception %s while logging out." % repr(e))
bottle.redirect(fail_redirect)
bottle.redirect(success_redirect)
def require(self, username=None, role=None, fixed_role=False,
fail_redirect=None):
"""Ensure the user is logged in has the required role (or higher).
Optionally redirect the user to another page (typically /login)
If both `username` and `role` are specified, both conditions need to be
satisfied.
If none is specified, any authenticated user will be authorized.
By default, any role with higher level than `role` will be authorized;
set fixed_role=True to prevent this.
:param username: username (optional)
:type username: str.
:param role: role
:type role: str.
:param fixed_role: require user role to match `role` strictly
:type fixed_role: bool.
:param redirect: redirect unauthorized users (optional)
:type redirect: str.
"""
# Parameter validation
if username is not None:
if username not in self._store.users:
raise AAAException("Nonexistent user")
if fixed_role and role is None:
raise AAAException(
"""A role must be specified if fixed_role has been set""")
if role is not None and role not in self._store.roles:
raise AAAException("Role not found")
# Authentication
try:
cu = self.current_user
except AAAException:
if fail_redirect is None:
raise AuthException("Unauthenticated user")
else:
bottle.redirect(fail_redirect)
# Authorization
if cu.role not in self._store.roles:
raise AAAException("Role not found for the current user")
if username is not None:
if username != self.current_user.username:
if fail_redirect is None:
raise AuthException("Unauthorized access: incorrect"
" username")
else:
bottle.redirect(fail_redirect)
if fixed_role:
if role == self.current_user.role:
return
if fail_redirect is None:
raise AuthException("Unauthorized access: incorrect role")
else:
bottle.redirect(fail_redirect)
else:
if role is not None:
# Any role with higher level is allowed
current_lvl = self._store.roles[self.current_user.role]
threshold_lvl = self._store.roles[role]
if current_lvl >= threshold_lvl:
return
if fail_redirect is None:
raise AuthException("Unauthorized access: ")
else:
bottle.redirect(fail_redirect)
return
def create_role(self, role, level):
"""Create a new role.
:param role: role name
:type role: str.
:param level: role level (0=lowest, 100=admin)
:type level: int.
:raises: AuthException on errors
"""
if self.current_user.level < 100:
raise AuthException("The current user is not authorized to ")
if role in self._store.roles:
raise AAAException("The role is already existing")
try:
int(level)
except ValueError:
raise AAAException("The level must be numeric.")
self._store.roles[role] = level
self._store.save_roles()
def delete_role(self, role):
"""Deleta a role.
:param role: role name
:type role: str.
:raises: AuthException on errors
"""
if self.current_user.level < 100:
raise AuthException("The current user is not authorized to ")
if role not in self._store.roles:
raise AAAException("Nonexistent role.")
self._store.roles.pop(role)
self._store.save_roles()
def list_roles(self):
"""List roles.
:returns: (role, role_level) generator (sorted by role)
"""
for role in sorted(self._store.roles):
yield (role, self._store.roles[role])
def create_user(self, username, role, password, email_addr=None,
description=None):
"""Create a new user account.
This method is available to users with level>=100
:param username: username
:type username: str.
:param role: role
:type role: str.
:param password: <PASSWORD>
:type password: str.
:param email_addr: email address (optional)
:type email_addr: str.
:param description: description (free form)
:type description: str.
:raises: AuthException on errors
"""
assert username, "Username must be provided."
if self.current_user.level < 100:
raise AuthException("The current user is not authorized" \
" to create users.")
if username in self._store.users:
raise AAAException("User is already existing.")
if role not in self._store.roles:
raise AAAException("Nonexistent user role.")
tstamp = str(datetime.utcnow())
self._store.users[username] = {
'role': role,
'hash': self._hash(username, password),
'email_addr': email_addr,
'desc': description,
'creation_date': tstamp,
'last_login': tstamp
}
self._store.save_users()
def delete_user(self, username):
"""Delete a user account.
This method is available to users with level>=100
:param username: username
:type username: str.
:raises: Exceptions on errors
"""
if self.current_user.level < 100:
raise AuthException("The current user is not authorized to ")
if username not in self._store.users:
raise AAAException("Nonexistent user.")
self.user(username).delete()
def list_users(self):
"""List users.
:return: (username, role, email_addr, description) generator (sorted by
username)
"""
for un in sorted(self._store.users):
d = self._store.users[un]
yield (un, d['role'], d['email_addr'], d['desc'])
@property
def current_user(self):
"""Current autenticated user
:returns: User() instance, if authenticated
:raises: AuthException otherwise
"""
session = self._beaker_session
username = session.get('username', None)
if username is None:
raise AuthException("Unauthenticated user")
if username is not None and username in self._store.users:
return User(username, self, session=session)
raise AuthException("Unknown user: %s" % username)
@property
def user_is_anonymous(self):
"""Check if the current user is anonymous.
:returns: True if the user is anonymous, False otherwise
:raises: AuthException if the session username is unknown
"""
try:
username = self._beaker_session['username']
except KeyError:
return True
if username not in self._store.users:
raise AuthException("Unknown user: %s" % username)
return False
def user(self, username):
"""Existing user
:returns: User() instance if the user exist, None otherwise
"""
if username is not None and username in self._store.users:
return User(username, self)
return None
def register(self, username, password, email_addr, role='user',
max_level=50, subject="Signup confirmation",
email_template='views/registration_email.tpl',
description=None):
"""Register a new user account. An email with a registration validation
is sent to the user.
WARNING: this method is available to unauthenticated users
:param username: username
:type username: str.
:param password: <PASSWORD>
:type password: str.
:param role: role (optional), defaults to 'user'
:type role: str.
:param max_level: maximum role level (optional), defaults to 50
:type max_level: int.
:param email_addr: email address
:type email_addr: str.
:param subject: email subject
:type subject: str.
:param email_template: email template filename
:type email_template: str.
:param description: description (free form)
:type description: str.
:raises: AssertError or AAAException on errors
"""
assert username, "Username must be provided."
assert password, "A password must be provided."
assert email_addr, "An email address must be provided."
if username in self._store.users:
raise AAAException("User is already existing.")
if role not in self._store.roles:
raise AAAException("Nonexistent role")
if self._store.roles[role] > max_level:
raise AAAException("Unauthorized role")
registration_code = uuid.uuid4().hex
creation_date = str(datetime.utcnow())
# send registration email
email_text = bottle.template(email_template,
username=username,
email_addr=email_addr,
role=role,
creation_date=creation_date,
registration_code=registration_code
)
self.mailer.send_email(email_addr, subject, email_text)
# store pending registration
self._store.pending_registrations[registration_code] = {
'username': username,
'role': role,
'hash': self._hash(username, password),
'email_addr': email_addr,
'desc': description,
'creation_date': creation_date,
}
self._store.save_pending_registrations()
def validate_registration(self, registration_code):
"""Validate pending account registration, create a new account if
successful.
:param registration_code: registration code
:type registration_code: str.
"""
try:
data = self._store.pending_registrations.pop(registration_code)
except KeyError:
raise AuthException("Invalid registration code.")
username = data['username']
if username in self._store.users:
raise AAAException("User is already existing.")
# the user data is moved from pending_registrations to _users
self._store.users[username] = {
'role': data['role'],
'hash': data['hash'],
'email_addr': data['email_addr'],
'desc': data['desc'],
'creation_date': data['creation_date'],
'last_login': str(datetime.utcnow())
}
self._store.save_users()
def send_password_reset_email(self, username=None, email_addr=None,
subject="Password reset confirmation",
email_template='views/password_reset_email'):
"""Email the user with a link to reset his/her password
If only one parameter is passed, fetch the other from the users
database. If both are passed they will be matched against the users
database as a security check.
:param username: username
:type username: str.
:param email_addr: email address
:type email_addr: str.
:param subject: email subject
:type subject: str.
:param email_template: email template filename
:type email_template: str.
:raises: AAAException on missing username or email_addr,
AuthException on incorrect username/email_addr pair
"""
if username is None:
if email_addr is None:
raise AAAException("At least `username` or `email_addr` must" \
" be specified.")
# only email_addr is specified: fetch the username
for k, v in self._store.users.iteritems():
if v['email_addr'] == email_addr:
username = k
break
else:
raise AAAException("Email address not found.")
else: # username is provided
if username not in self._store.users:
raise AAAException("Nonexistent user.")
if email_addr is None:
email_addr = self._store.users[username].get('email_addr', None)
if not email_addr:
raise AAAException("Email address not available.")
else:
# both username and email_addr are provided: check them
stored_email_addr = self._store.users[username]['email_addr']
if email_addr != stored_email_addr:
raise AuthException("Username/email address pair not found.")
# generate a reset_code token
reset_code = self._reset_code(username, email_addr)
# send reset email
email_text = bottle.template(email_template,
username=username,
email_addr=email_addr,
reset_code=reset_code
)
self.mailer.send_email(email_addr, subject, email_text)
def reset_password(self, reset_code, password):
"""Validate reset_code and update the account password
The username is extracted from the reset_code token
:param reset_code: reset token
:type reset_code: str.
:param password: <PASSWORD>
:type password: str.
:raises: AuthException for invalid reset tokens, AAAException
"""
try:
reset_code = b64decode(reset_code)
username, email_addr, tstamp, h = reset_code.split(':', 3)
tstamp = int(tstamp)
except (TypeError, ValueError):
raise AuthException("Invalid reset code.")
if time() - tstamp > self.password_reset_timeout:
raise AuthException("Expired reset code.")
if not self._verify_password(username, email_addr, h):
raise AuthException("Invalid reset code.")
user = self.user(username)
if user is None:
raise AAAException("Nonexistent user.")
user.update(pwd=password)
def make_auth_decorator(self, username=None, role=None, fixed_role=False, fail_redirect='/login'):
'''
Create a decorator to be used for authentication and authorization
:param username: A resource can be protected for a specific user
:param role: Minimum role level required for authorization
:param fixed_role: Only this role gets authorized
:param fail_redirect: The URL to redirect to if a login is required.
'''
session_manager = self
def auth_require(username=username, role=role, fixed_role=fixed_role,
fail_redirect=fail_redirect):
def decorator(func):
import functools
@functools.wraps(func)
def wrapper(*a, **ka):
session_manager.require(username=username, role=role, fixed_role=fixed_role,
fail_redirect=fail_redirect)
return func(*a, **ka)
return wrapper
return decorator
return(auth_require)
## Private methods
@property
def _beaker_session(self):
"""Get Beaker session"""
return bottle.request.environ.get('beaker.session')
def _setup_cookie(self, username):
"""Setup cookie for a user that just logged in"""
session = self._beaker_session
session['username'] = username
if self.session_domain is not None:
session.domain = self.session_domain
session.save()
def _hash(self, username, pwd, salt=None, algo=None):
"""Hash username and password, generating salt value if required
"""
if algo is None:
algo = self.preferred_hashing_algorithm
if algo == 'PBKDF2':
return self._hash_pbkdf2(username, pwd, salt=salt)
if algo == 'scrypt':
return self._hash_scrypt(username, pwd, salt=salt)
raise RuntimeError("Unknown hashing algorithm requested: %s" % algo)
@staticmethod
def _hash_scrypt(username, pwd, salt=None):
"""Hash username and password, generating salt value if required
Use scrypt.
:returns: base-64 encoded str.
"""
if not scrypt_available:
raise Exception("scrypt.hash required."
" Please install the scrypt library.")
if salt is None:
salt = os.urandom(32)
assert len(salt) == 32, "Incorrect salt length"
cleartext = "%s\0%s" % (username, pwd)
h = scrypt.hash(cleartext, salt)
# 's' for scrypt
return b64encode('s' + salt + h)
@staticmethod
def _hash_pbkdf2(username, pwd, salt=None):
"""Hash username and password, generating salt value if required
Use PBKDF2 from Beaker
:returns: base-64 encoded str.
"""
if salt is None:
salt = os.urandom(32)
assert len(salt) == 32, "Incorrect salt length"
cleartext = "%s\0%s" % (username, pwd)
h = crypto.generateCryptoKeys(cleartext, salt, 10)
if len(h) != 32:
raise RuntimeError("The PBKDF2 hash is %d bytes long instead"
"of 32. The pycrypto library might be missing." % len(h))
# 'p' for PBKDF2
return b64encode('p' + salt + h)
def _verify_password(self, username, pwd, salted_hash):
"""Verity username/password pair against a salted hash
:returns: bool
"""
decoded = b64decode(salted_hash)
hash_type = decoded[0]
salt = decoded[1:33]
if hash_type == 'p': # PBKDF2
h = self._hash_pbkdf2(username, pwd, salt)
return salted_hash == h
if hash_type == 's': # scrypt
h = self._hash_scrypt(username, pwd, salt)
return salted_hash == h
raise RuntimeError("Unknown hashing algorithm: %s" % hash_type)
def _purge_expired_registrations(self, exp_time=96):
"""Purge expired registration requests.
:param exp_time: expiration time (hours)
:type exp_time: float.
"""
for uuid, data in self._store.pending_registrations.items():
creation = datetime.strptime(data['creation_date'],
"%Y-%m-%d %H:%M:%S.%f")
now = datetime.utcnow()
maxdelta = timedelta(hours=exp_time)
if now - creation > maxdelta:
self._store.pending_registrations.pop(uuid)
def _reset_code(self, username, email_addr):
"""generate a reset_code token
:param username: username
:type username: str.
:param email_addr: email address
:type email_addr: str.
:returns: Base-64 encoded token
"""
h = self._hash(username, email_addr)
t = "%d" % time()
reset_code = ':'.join((username, email_addr, t, h))
return b64encode(reset_code)
class User(object):
def __init__(self, username, cork_obj, session=None):
"""Represent an authenticated user, exposing useful attributes:
username, role, level, description, email_addr, session_creation_time,
session_accessed_time, session_id. The session-related attributes are
available for the current user only.
:param username: username
:type username: str.
:param cork_obj: instance of :class:`Cork`
"""
self._cork = cork_obj
assert username in self._cork._store.users, "Unknown user"
self.username = username
user_data = self._cork._store.users[username]
self.role = user_data['role']
self.description = user_data['desc']
self.email_addr = user_data['email_addr']
self.level = self._cork._store.roles[self.role]
if session is not None:
try:
self.session_creation_time = session['_creation_time']
self.session_accessed_time = session['_accessed_time']
self.session_id = session['_id']
except:
pass
def update(self, role=None, pwd=None, email_addr=None):
"""Update an user account data
:param role: change user role, if specified
:type role: str.
:param pwd: change user password, if specified
:type pwd: str.
:param email_addr: change user email address, if specified
:type email_addr: str.
:raises: AAAException on nonexistent user or role.
"""
username = self.username
if username not in self._cork._store.users:
raise AAAException("User does not exist.")
if role is not None:
if role not in self._cork._store.roles:
raise AAAException("Nonexistent role.")
self._cork._store.users[username]['role'] = role
if pwd is not None:
self._cork._store.users[username]['hash'] = self._cork._hash(
username, pwd)
if email_addr is not None:
self._cork._store.users[username]['email_addr'] = email_addr
self._cork._store.save_users()
def delete(self):
"""Delete user account
:raises: AAAException on nonexistent user.
"""
try:
self._cork._store.users.pop(self.username)
except KeyError:
raise AAAException("Nonexistent user.")
self._cork._store.save_users()
class Mailer(object):
def __init__(self, sender, smtp_url, join_timeout=5):
"""Send emails asyncronously
:param sender: Sender email address
:type sender: str.
:param smtp_server: SMTP server
:type smtp_server: str.
"""
self.sender = sender
self.join_timeout = join_timeout
self._threads = []
self._conf = self._parse_smtp_url(smtp_url)
def _parse_smtp_url(self, url):
"""Parse SMTP URL"""
match = re.match(r"""
( # Optional protocol
(?P<proto>smtp|starttls|ssl) # Protocol name
://
)?
( # Optional user:pass@
(?P<user>[^:]*) # Match every char except ':'
(: (?P<pass>.*) )? @ # Optional :pass
)?
(?P<fqdn> # Required FQDN on IP address
()| # Empty string
( # FQDN
[a-zA-Z_\-] # First character cannot be a number
[a-zA-Z0-9_\-\.]{,254}
)
|( # IPv4
([0-9]{1,3}\.){3}
[0-9]{1,3}
)
|( # IPv6
\[ # Square brackets
([0-9a-f]{,4}:){1,8}
[0-9a-f]{,4}
\]
)
)
( # Optional :port
:
(?P<port>[0-9]{,5}) # Up to 5-digits port
)?
[/]?
$
""", url, re.VERBOSE)
if not match:
raise RuntimeError("SMTP URL seems incorrect")
d = match.groupdict()
if d['proto'] is None:
d['proto'] = 'smtp'
if d['port'] is None:
d['port'] = 25
else:
d['port'] = int(d['port'])
if not 0 < d['port'] < 65536:
raise RuntimeError("Incorrect SMTP port")
return d
def send_email(self, email_addr, subject, email_text):
"""Send an email
:param email_addr: email address
:type email_addr: str.
:param subject: subject
:type subject: str.
:param email_text: email text
:type email_text: str.
:raises: AAAException if smtp_server and/or sender are not set
"""
if not (self._conf['fqdn'] and self.sender):
raise AAAException("SMTP server or sender not set")
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = self.sender
msg['To'] = email_addr
part = MIMEText(email_text, 'html')
msg.attach(part)
log.debug("Sending email using %s" % self._conf['fqdn'])
thread = Thread(target=self._send, args=(email_addr, msg.as_string()))
thread.start()
self._threads.append(thread)
def _send(self, email_addr, msg): # pragma: no cover
"""Deliver an email using SMTP
:param email_addr: recipient
:type email_addr: str.
:param msg: email text
:type msg: str.
"""
proto = self._conf['proto']
assert proto in ('smtp', 'starttls', 'ssl'), \
"Incorrect protocol: %s" % proto
try:
if proto == 'ssl':
log.debug("Setting up SSL")
session = SMTP_SSL(self._conf['fqdn'])
else:
session = SMTP(self._conf['fqdn'])
if proto == 'starttls':
log.debug('Sending EHLO and STARTTLS')
session.ehlo()
session.starttls()
session.ehlo()
if self._conf['user'] is not None:
log.debug('Performing login')
session.login(self._conf['user'], self._conf['pass'])
log.debug('Sending')
session.sendmail(self.sender, email_addr, msg)
session.quit()
log.info('Email sent')
except Exception as e:
log.error("Error sending email: %s" % e, exc_info=True)
def join(self):
"""Flush email queue by waiting the completion of the existing threads
:returns: None
"""
return [t.join(self.join_timeout) for t in self._threads]
def __del__(self):
"""Class destructor: wait for threads to terminate within a timeout"""
self.join()
| StarcoderdataPython |
1602119 | import math
import numpy as _np
import numba as _numba
import contextlib
@contextlib.contextmanager
def corrfunction(shape, z, qmax, xcenter=None, ycenter=None):
"""
CPU based radial Autocorrelation with q correction
parameters:
shape (tuple) of inputs in pixels
z (scalar) distance of detector in pixels
qmax (scalar): maximum distance
optional
xcenter (scalar): position of center in x direction, defaults to shape[0]/2
ycenter (scalar): position of center in x direction, defaults to shape[1]/2
returns a function with signature float[:](float[:,:] image) that does the correlation
"""
xcenter = xcenter or shape[0] / 2.0
ycenter = ycenter or shape[1] / 2.0
y, x = _np.meshgrid(_np.arange(shape[1], dtype=_np.float64), _np.arange(shape[0], dtype=_np.float64))
x -= xcenter
y -= ycenter
d = _np.sqrt(x ** 2 + y ** 2 + z ** 2)
qx, qy, qz = [(k / d * z) for k in (x, y, z)]
del x, y, d
def inner(input, qx, qy, qz):
out = _np.zeros((shape[0] + 10, qmax), dtype=_np.float64)
for refx in _numba.prange(shape[0]):
for refy in range(shape[1]):
qxr = qx[refx, refy]
qyr = qy[refx, refy]
qzr = qz[refx, refy]
refv = input[refx, refy]
for direction in range(2):
dqx = 0
x = refx + direction # dont dx=0 it twice
while -qmax <= dqx <= qmax and 0 <= x < input.shape[0]:
dqy = 0
y = refy
while dqy <= qmax and 0 <= y < input.shape[1]:
dq = (qx[x, y] - qxr) ** 2 + (qy[x, y] - qyr) ** 2 + (qz[x, y] - qzr) ** 2
qsave = int(round(math.sqrt(dq)))
if qsave >= qmax:
break
val = refv * input[x, y]
out[refx, qsave] += val
y += 1
x += -1 + 2 * direction
return out
finner = _numba.njit(inner, parallel=True, fastmath=True).compile("float64[:,:](float64[:,:],float64[:,:],float64[:,:],float64[:,:])")
def corr(input):
"""
Do the correlation
"""
if finner is None:
raise ValueError("already closed, use within with statement")
input = _np.asarray(input).astype(_np.float64, copy=False)
if not all(i == s for (i, s) in zip(input.shape, shape)):
raise ValueError("not the same shape")
return _np.sum(finner(input, qx, qy, qz), axis=0)
yield corr
qx = qy = qz = finner = shape = None
for x in locals():
del x
| StarcoderdataPython |
1767271 | """Problem 29 of https://projecteuler.net"""
def problem_29():
"""Solution to problem 29."""
powers = [a ** b for a in range(2, 101) for b in range(2, 101)]
answer = len(set(powers))
return answer
| StarcoderdataPython |
4815259 | from node import Node
class NodeNetwork(object):
''' Handles a collection of nodes in a grid. '''
def __init__(self, d1=5, d2=9):
''' Initialize stuff like the node grid. '''
# TODO?: Consider using numpy arrays
self.node_grid = [[None for b in range(0,d2)] for a in range(0,d1)]
# Create a root node at the center of the grid
center_d1 = int((d1-1) / 2)
center_d2 = int((d2-1) / 2)
self.main_root_node = Node(self)
self.node_grid[center_d1][center_d2] = self.main_root_node
def __iter__(self):
''' Iterate over the nodes in grid, starting at node_grid[0][0]. '''
for d1 in self.node_grid:
for d2 in d1:
yield d2
def add_node(self, root_node, new_node):
'''Adds a new node as close as possible to the root node. '''
root_pos = (0,0)
for x in range(0, len(self.node_grid)):
if root_node in self.node_grid[x]:
root_pos = (x, self.node_grid[x].index(root_node))
# Create the pathfinder, starting at the root node position
pathfinder = PathfinderIterator2D(
self.node_grid,
start_at=root_pos
)
# Find an empty node space
for coords in pathfinder:
if self.node_grid[coords[0]][coords[1]] == None:
self.node_grid[coords[0]][coords[1]] = new_node
print("Found Empty Node")
return
# No empty node found, raise exception
raise FullNodeGrid()
def display_active_nodes(self):
''' Print out a grid of what state each node is in. '''
num_rows = len(self.node_grid[0])
spacing = ' '
rows = ["" for row in range(num_rows)]
curr_row = 0
for node in self:
# Default print char, indicates a non-node/None obj in the grid
print_char = '?'
# Node is unoccupied
if node is None:
print_char = '-'
# Node is occupied by a node obj
elif isinstance(node, Node):
print_char = 'X'
# Add the node character + spacing to this row
rows[curr_row] += print_char
rows[curr_row] += spacing
# Move to the next row
curr_row += 1
# Go back to first row if just on last row
if curr_row >= num_rows:
curr_row = 0
# Print each row, reverse the rows so 0,0 is on lower left
for row in rows[::-1]:
print(row, sep='', end='\n')
def display_connection_count(self):
''' Print out a grid of the number of connections each node has. '''
# Figure out the highest # of connections
max_con = max(len(node.connections) for node in self if node is not None)
num_rows = len(self.node_grid[0])
spacing = ' '
rows = ["" for row in range(num_rows)]
curr_row = 0
for node in self:
to_print = ""
# Determine number to display
if node is not None:
num_str = str(len(getattr(node, "connections")))
# Add padding characters
if len(num_str) < len(str(max_con)):
for char in range(len(str(max_con))-len(num_str)):
to_print += ' '
# Concate padding with actual connection count
to_print += num_str
# Otherwise print filler text
else:
for char in range(max(len(str(max_con)),1)):
to_print += '-'
# Add to row
rows[curr_row] += to_print
# Add spacing
rows[curr_row] += spacing
# Go to next row
curr_row += 1
# Go back to first row if just on last row
if curr_row >= num_rows:
curr_row = 0
# Print each row, reverse the rows so 0,0 is on lower left
for row in rows[::-1]:
print(row, sep='', end='\n')
class FullNodeGrid(Exception):
''' Exception raised when attempting to add a node to a full node grid. '''
def __init__(self):
pass
def __str__(self):
return "Cannot add a node to a full node grid."
class PathfinderIterator2D(object):
''' An iterable object used to iterate over a 2d grid for use in simple
pathfinding algorithms.
'''
def __init__(self, grid_list, start_at=(0,0)):
''' Initialize stuff like the reference to the grid to iterate over. '''
# The 2D list/array to iterate over
self.iter_over = grid_list
# What grid space to start at
self.first_tile = start_at
def __iter__(self):
''' Iterate over the path determined by the algorithm.
Returns : A coordinate pair as a tuple
'''
print("Finding An Empty Node...")
width = len(self.iter_over)
height = len(self.iter_over[0])
# Tiles already parsed
done = []
# Tiles to search, FIFO (first in first out)
queue = []
# Add the tile to start with
queue.append(self.first_tile)
while len(queue) > 0:
next_tile = queue.pop(0)
print("Current In Pathfinder:", next_tile)
done.append(next_tile)
cx = next_tile[0]
cy = next_tile[1]
# Potential tiles to parse
right = (cx + 1, cy)
down = (cx, cy - 1)
left = (cx - 1, cy)
up = (cx, cy + 1)
# Right tile
if right not in done and right not in queue and right[0] in range(0, width):
print("Queueing Right")
queue.append(right)
# Down tile
if down not in done and down not in queue and down[1] in range(0, height):
print("Queueing Lower")
queue.append(down)
# Left tile
if left not in done and left not in queue and left[0] in range(0, width):
print("Queueing Left")
queue.append(left)
# Up tile
if up not in done and up not in queue and up[1] in range(0, height):
print("Queueing Upper")
queue.append(up)
# Next item in iterable
# yield self.iter_over(next_tile[0], next_tile[1])
# Next coord pair
yield next_tile | StarcoderdataPython |
1713925 | from setuptools import setup
setup(
name='HospitalDBWeb',
version='1.0',
packages=[''],
url='https://github.com/nairachiclana/HospitalDBWeb',
license='MIT',
author='joseroma & nairachiclana',
description='Trabajo de la asignatura de Estandares de datos abiertos e integración de datos.'
)
| StarcoderdataPython |
3252300 | <filename>tcpPC.py
# -*- coding: utf-8 -*-
"""
Created on Mon May 14 16:18:14 2018
@author: fahad
@contributor: <NAME>
"""
import socket
import sys
import time
import tty, termios
#-------------Initialization-----------------------------------
def tcpPC():
# this is the tcp client to connect with sailboat server
TCP_IP = '192.168.31.148'
TCP_PORT = 50007
BUFFER_SIZE = 1024
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Client socket1 is created \n')
try:
s.connect((TCP_IP, TCP_PORT))
print(' TCP1 Connected \n')
except:
print('An Error Occured! \n')
sys.exit()
s.sendall('Hello'.encode('utf-8')) ## Converting into Bytes
time.sleep(0.2) ## 0.2 second delay
#-------------------Sending Commands ----------------------------
print('****************Manual Control Mode********************\n\
Please Enter W for Forward, A for Turning left, D for Turning right,\n\
S for Stop, Q for Quit to choose other modes.')
while True:
fd=sys.stdin.fileno()
old_settings=termios.tcgetattr(fd)
try:
tty.setraw(fd)
command=sys.stdin.read(1).upper()
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
if command == 'S' or command == 'W' or command == 'A' or command == 'D':
message_bytes = command.encode('utf-8') ## Converting into Bytes
s.sendall(message_bytes)
elif command == 'Q':
answer = raw_input('Quit the Manual Control System. Are you sure? Y/N ').upper()
if answer == 'Y':
s.sendall(command.encode('utf-8'))
break
elif answer == 'N':
print('Reinput please.')
continue
break
elif command == '':
pass
else:
print('Wrong input: not B, P or E. Retype please.')
continue
# time.sleep(0.2) ## 0.2 second pause
data = s.recv(BUFFER_SIZE)
print (data.decode('utf-8'))
s.close()
time.sleep(0.5) ## 0.5 second delay
print('TCP1 Communication Closed')
sys.exit()
#----------------------------------------------------------------
| StarcoderdataPython |
1602096 | <filename>src/data/make_first_raw_dataset.py
from src.utils.utils import get_file_path
import json
from pickle import dump
def import_dataset(dataset_name):
reviews_list = list()
with open(get_file_path("raw\\" + dataset_name + ".json"), encoding="utf8") as json_file:
for line in json_file:
sample = json.loads(line)
reviews_list.append(sample)
return reviews_list
def export_sampled_datasets(train, file):
with open(get_file_path("raw\\"+file + ".pkl"), "wb") as f:
dump(train, f)
| StarcoderdataPython |
67999 | import time
import numpy as np
from kid_readout.interactive import *
from kid_readout.measurement import acquire
from kid_readout.roach import r2heterodyne, attenuator, hardware_tools
logger.setLevel(logging.DEBUG)
setup = hardware.Hardware()
ri = hardware_tools.r2h14_with_mk2(initialize=True, use_config=False)
ri.iq_delay=-1
dac_atten = 20
ri.set_dac_atten(dac_atten)
ri.set_fft_gain(6)
nsamp = 2**16
step = 1
nstep = 64
offset_bins = np.arange(-(nstep), (nstep)) * step
offsets = offset_bins * 512.0 / nsamp
ri.set_modulation_output('high')
#ri.set_lo(3200.)
ri.set_lo(2370.)
state = dict(magnetic_shield = 'on', cryostat='starcryo')
state.update(other=setup.state())
tic = time.time()
#for lo in 2200.+190*np.arange(0,2):
for lo in 2200. + 190 * np.arange(0, 2):
logger.info("Measuring at LO %.1f" % lo)
df = acquire.new_nc_file(suffix='scan_lo_%.1f_MHz_atten_%.1f_dB' % (lo, dac_atten))
ri.set_lo(lo)
state.update(other=setup.state(fast=True))
swa = acquire.run_sweep(ri, (np.arange(1, 257)[None, :] * 7 / 8. + ri.lo_frequency + offsets[:, None]),
num_tone_samples=nsamp, length_seconds=0.2, state=state, verbose=True)
df.write(swa)
df.close()
print "elapsed:", (time.time()-tic)/60.0,'minutes' | StarcoderdataPython |
3332997 | import redis
def connect_db():
"""Crear conexion a la base de datos."""
conexion = redis.StrictRedis(host='db-rentals',port=6379,db=0, decode_responses=True)
if(conexion.ping()):
print("Conectado al servidor de redis")
else:
print("Error")
return conexion | StarcoderdataPython |
3270064 | #!/user/bin/env python
# -*- coding: utf-8 -*-
"""
------------------------------------
@Project : nightwalker
@Time : 2020/10/13 14:01
@Auth : chineseluo
@Email : <EMAIL>
@File : cli.py
@IDE : PyCharm
------------------------------------
"""
import os
import sys
import pytest
import argparse
from nightwalker import __version__, __description__
def init_scaffold_parser(subparsers):
sub_scaffold_parser = subparsers.add_parser(
"startproject", help="Create a new project with template structure."
)
sub_scaffold_parser.add_argument(
"project_name", type=str, nargs="?", help="Specify new project name."
)
return sub_scaffold_parser
def main():
# Generate subcommand object
parser = argparse.ArgumentParser(description=__description__)
parser.add_argument("-v", "-V", "--version", "--Version", dest="version", action="store_true", help="show version")
subparsers = parser.add_subparsers(help="Night walker sub-command help")
sub_scaffold_parser = init_scaffold_parser(subparsers)
nw_argv = sys.argv
print(nw_argv)
if len(nw_argv) == 1:
parser.print_help()
sys.exit()
elif len(nw_argv) == 2:
if nw_argv[1] in ["-V", "-v", "--Version", "--version"]:
print(f"The Night Walker version is {__version__}")
elif nw_argv[1] in ["-h", "-H", "--help", "--Help"]:
parser.print_help()
elif nw_argv[1] == "startproject":
sub_scaffold_parser.print_help()
else:
print("Please use nm - h to view help information")
sys.exit(0)
elif len(sys.argv) == 3 and sys.argv[1] == "startproject" and sys.argv[2] in ["-h", "-H", "--help", "--Help"]:
pytest.main(["-h"])
sys.exit(0)
if __name__ == "__main__":
main()
| StarcoderdataPython |
1640544 | <reponame>victor-gp/tfg-H16b
import yaml
from os.path import relpath, commonprefix
def parse_job_config(args):
job_config_file = args.job_config
validate(job_config_file)
with open(job_config_file) as f:
job_config = yaml.safe_load(f)
if args.job_type is not None:
job_config['job_type'] = args.job_type
return job_config
def validate(path):
given_path = relpath(path)
jobs_dir = relpath('jobs/')
if commonprefix([given_path, jobs_dir]) != jobs_dir:
raise ValueError(
f'Invalid path: {given_path}. It must be inside directory {jobs_dir}.'
)
| StarcoderdataPython |
162993 | <reponame>sqall01/LSMS<filename>scripts/monitor_ssh_authorized_keys.py
#!/usr/bin/env python3
# written by sqall
# twitter: https://twitter.com/sqall01
# blog: https://h4des.org
# github: https://github.com/sqall01
#
# Licensed under the MIT License.
"""
Short summary:
Monitor ~/.ssh/authorized_keys for changes to detect malicious backdoor attempts.
NOTE: The first execution of this script will only show you the current state of the environment which should be acknowledged before monitoring for changes will become an effective security measure.
Requirements:
None
"""
import os
import stat
from typing import List, Tuple, Dict, Any
from lib.state import load_state, store_state
from lib.util import output_error, output_finding
from lib.util_user import get_system_users
# Read configuration.
try:
from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR
from config.monitor_ssh_authorized_keys import ACTIVATED
STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__))
except:
ALERTR_FIFO = None
FROM_ADDR = None
TO_ADDR = None
ACTIVATED = True
STATE_DIR = os.path.join("/tmp", os.path.basename(__file__))
class MonitorSSHException(Exception):
pass
def _get_home_dirs() -> List[Tuple[str, str]]:
return [(x.name, x.home) for x in get_system_users()]
def _get_system_ssh_data() -> List[Dict[str, Any]]:
ssh_data = []
user_home_list = _get_home_dirs()
for user, home in user_home_list:
# Monitor "authorized_keys2" too since SSH also checks this file for keys (even though it is deprecated).
for authorized_file_name in ["authorized_keys", "authorized_keys2"]:
authorized_keys_file = os.path.join(home, ".ssh", authorized_file_name)
if os.path.isfile(authorized_keys_file):
ssh_user_data = {"user": user,
"authorized_keys_file": authorized_keys_file,
"authorized_keys_entries": _parse_authorized_keys_file(authorized_keys_file)}
ssh_data.append(ssh_user_data)
return ssh_data
def _parse_authorized_keys_file(authorized_keys_file: str) -> List[str]:
entries = set()
try:
with open(authorized_keys_file, 'rt') as fp:
for line in fp:
entries.add(line.strip())
except Exception as e:
raise MonitorSSHException("Unable to parse file '%s'; Exception: '%s'" % (authorized_keys_file, str(e)))
return list(entries)
def monitor_ssh_authorized_keys():
# Decide where to output results.
print_output = False
if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None:
print_output = True
if not ACTIVATED:
if print_output:
print("Module deactivated.")
return
stored_ssh_data = []
curr_ssh_data = []
try:
state_data = load_state(STATE_DIR)
if "ssh_data" in state_data.keys():
stored_ssh_data = state_data["ssh_data"]
curr_ssh_data = _get_system_ssh_data()
except Exception as e:
output_error(__file__, str(e))
return
# Check if any authorized_keys file is world writable.
for curr_entry in curr_ssh_data:
authorized_keys_file = curr_entry["authorized_keys_file"]
file_stat = os.stat(authorized_keys_file)
if file_stat.st_mode & stat.S_IWOTH:
message = "SSH authorized_keys file for user '%s' is world writable." % curr_entry["user"]
output_finding(__file__, message)
# Compare stored data with current one.
for stored_entry in stored_ssh_data:
# Extract current entry belonging to the same user.
curr_user_entry = None
for curr_entry in curr_ssh_data:
if stored_entry["user"] == curr_entry["user"]:
curr_user_entry = curr_entry
break
if curr_user_entry is None:
message = "SSH authorized_keys file for user '%s' was deleted." % stored_entry["user"]
output_finding(__file__, message)
continue
# Check authorized_keys path has changed.
if stored_entry["authorized_keys_file"] != curr_user_entry["authorized_keys_file"]:
message = "SSH authorized_keys location for user '%s' changed from '%s' to '%s'." \
% (stored_entry["user"],
stored_entry["authorized_keys_file"],
curr_user_entry["authorized_keys_file"])
output_finding(__file__, message)
# Check authorized_key was removed.
for authorized_key in stored_entry["authorized_keys_entries"]:
if authorized_key not in curr_user_entry["authorized_keys_entries"]:
message = "SSH authorized_keys entry was removed.\n\n"
message += "Entry: %s" % authorized_key
output_finding(__file__, message)
# Check authorized_key was added.
for authorized_key in curr_user_entry["authorized_keys_entries"]:
if authorized_key not in stored_entry["authorized_keys_entries"]:
message = "SSH authorized_keys entry was added.\n\n"
message += "Entry: %s" % authorized_key
output_finding(__file__, message)
for curr_entry in curr_ssh_data:
found = False
for stored_entry in stored_ssh_data:
if curr_entry["user"] == stored_entry["user"]:
found = True
break
if not found:
message = "New authorized_keys file was added for user '%s'.\n\n" % curr_entry["user"]
message += "Entries:\n"
for authorized_key in curr_entry["authorized_keys_entries"]:
message += authorized_key
message += "\n"
output_finding(__file__, message)
try:
state_data["ssh_data"] = curr_ssh_data
store_state(STATE_DIR, state_data)
except Exception as e:
output_error(__file__, str(e))
if __name__ == '__main__':
monitor_ssh_authorized_keys()
| StarcoderdataPython |
1627872 | <reponame>killapop/hypha
import re
from datetime import timedelta
from bs4 import BeautifulSoup
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.test import RequestFactory, TestCase, override_settings
from django.urls import reverse
from django.utils import timezone
from django.utils.text import slugify
from hypha.apply.activity.models import TEAM, Activity
from hypha.apply.determinations.tests.factories import DeterminationFactory
from hypha.apply.funds.tests.factories import (
ApplicationRevisionFactory,
ApplicationSubmissionFactory,
AssignedReviewersFactory,
AssignedWithRoleReviewersFactory,
InvitedToProposalFactory,
LabSubmissionFactory,
ReminderFactory,
ReviewerRoleFactory,
ScreeningStatusFactory,
SealedRoundFactory,
SealedSubmissionFactory,
)
from hypha.apply.funds.workflow import INITIAL_STATE
from hypha.apply.home.factories import ApplySiteFactory
from hypha.apply.projects.models import Project
from hypha.apply.projects.tests.factories import ProjectFactory
from hypha.apply.review.tests.factories import ReviewFactory
from hypha.apply.users.tests.factories import (
ApplicantFactory,
CommunityReviewerFactory,
PartnerFactory,
ReviewerFactory,
StaffFactory,
SuperUserFactory,
)
from hypha.apply.utils.testing import make_request
from hypha.apply.utils.testing.tests import BaseViewTestCase
from ..models import (
ApplicationRevision,
ApplicationSubmission,
ReviewerSettings,
ScreeningStatus,
)
from ..views import SubmissionDetailSimplifiedView, SubmissionDetailView
from .factories import CustomFormFieldsFactory
def prepare_form_data(submission, **kwargs):
data = submission.raw_data
for field, value in kwargs.items():
# convert named fields into id
field_id = submission.field(field).id
data[field_id] = value
return CustomFormFieldsFactory.form_response(submission.form_fields, data)
class BaseSubmissionViewTestCase(BaseViewTestCase):
url_name = 'funds:submissions:{}'
base_view_name = 'detail'
def get_kwargs(self, instance):
return {'pk': instance.id}
class TestStaffSubmissionView(BaseSubmissionViewTestCase):
user_factory = StaffFactory
@classmethod
def setUpTestData(cls):
cls.submission = ApplicationSubmissionFactory()
super().setUpTestData()
def __setUp__(self):
self.refresh(self.submission)
def test_can_view_a_submission(self):
response = self.get_page(self.submission)
self.assertContains(response, self.submission.title)
def test_can_view_a_lab_submission(self):
submission = LabSubmissionFactory()
response = self.get_page(submission)
self.assertContains(response, submission.title)
def test_can_progress_phase(self):
next_status = 'internal_review'
self.post_page(self.submission, {'form-submitted-progress_form': '', 'action': next_status})
submission = self.refresh(self.submission)
self.assertEqual(submission.status, next_status)
def test_redirected_to_determination(self):
submission = ApplicationSubmissionFactory(status='concept_review_discussion', workflow_stages=2, lead=self.user)
response = self.post_page(submission, {'form-submitted-progress_form': '', 'action': 'invited_to_proposal'})
# Invited for proposal is a a determination, so this will redirect to the determination form.
url = self.url_from_pattern('funds:submissions:determinations:form', kwargs={'submission_pk': submission.id})
self.assertRedirects(response, f"{url}?action=invited_to_proposal")
def test_new_form_after_progress(self):
submission = ApplicationSubmissionFactory(status='invited_to_proposal', workflow_stages=2, lead=self.user)
stage = submission.stage
DeterminationFactory(submission=submission, accepted=True)
request = make_request(self.user, method='get', site=submission.page.get_site())
submission.progress_stage_when_possible(self.user, request)
submission = self.refresh(submission)
new_stage = submission.stage
self.assertNotEqual(stage, new_stage)
get_forms = submission.get_from_parent('get_defined_fields')
self.assertEqual(submission.form_fields, get_forms(new_stage))
self.assertNotEqual(submission.form_fields, get_forms(stage))
def test_cant_progress_stage_if_not_lead(self):
submission = ApplicationSubmissionFactory(status='concept_review_discussion', workflow_stages=2)
self.post_page(submission, {'form-submitted-progress_form': '', 'action': 'invited_to_proposal'})
submission = self.refresh(submission)
self.assertEqual(submission.status, 'concept_review_discussion')
self.assertIsNone(submission.next)
def test_not_redirected_if_determination_submitted(self):
submission = ApplicationSubmissionFactory(lead=self.user)
DeterminationFactory(submission=submission, rejected=True, submitted=True)
self.post_page(submission, {'form-submitted-progress_form': '', 'action': 'rejected'})
submission = self.refresh(submission)
self.assertEqual(submission.status, 'rejected')
def test_not_redirected_if_wrong_determination_selected(self):
submission = ApplicationSubmissionFactory(lead=self.user)
DeterminationFactory(submission=submission, accepted=True, submitted=True)
response = self.post_page(submission, {'form-submitted-progress_form': '', 'action': 'rejected'})
self.assertContains(response, 'you tried to progress')
submission = self.refresh(submission)
self.assertNotEqual(submission.status, 'accepted')
self.assertNotEqual(submission.status, 'rejected')
def test_cant_access_edit_button_when_applicant_editing(self):
submission = ApplicationSubmissionFactory(status='more_info')
response = self.get_page(submission)
self.assertNotContains(response, self.url(submission, 'edit', absolute=False))
def test_can_access_edit_button(self):
response = self.get_page(self.submission)
self.assertContains(response, self.url(self.submission, 'edit', absolute=False))
def test_can_access_edit(self):
response = self.get_page(self.submission, 'edit')
self.assertContains(response, self.submission.title)
def test_previous_and_next_appears_on_page(self):
proposal = InvitedToProposalFactory()
response = self.get_page(proposal)
self.assertContains(response, self.url(proposal.previous, absolute=False))
response = self.get_page(proposal.previous)
self.assertContains(response, self.url(proposal, absolute=False))
def test_can_edit_submission(self):
old_status = self.submission.status
new_title = 'A new Title'
data = prepare_form_data(self.submission, title=new_title)
response = self.post_page(self.submission, {'submit': True, **data}, 'edit')
url = self.url(self.submission)
self.assertRedirects(response, url)
submission = self.refresh(self.submission)
# Staff edits don't affect the status
self.assertEqual(old_status, submission.status)
self.assertEqual(new_title, submission.title)
def test_not_included_fields_render(self):
submission = ApplicationSubmissionFactory(form_fields__exclude__checkbox=True)
response = self.get_page(submission)
self.assertNotContains(response, 'check_one')
def test_can_screen_submission(self):
ScreeningStatus.objects.all().delete()
screening_outcome1 = ScreeningStatusFactory()
screening_outcome1.yes = True
screening_outcome1.save()
screening_outcome2 = ScreeningStatusFactory()
screening_outcome2.yes = True
screening_outcome2.default = True
screening_outcome2.save()
self.submission.screening_statuses.clear()
self.submission.screening_statuses.add(screening_outcome2)
self.post_page(self.submission, {'form-submitted-screening_form': '', 'screening_statuses': [screening_outcome1.id, screening_outcome2.id]})
submission = self.refresh(self.submission)
self.assertEqual(submission.screening_statuses.count(), 2)
def test_can_view_submission_screening_block(self):
ScreeningStatus.objects.all().delete()
screening_outcome1 = ScreeningStatusFactory()
screening_outcome1.yes = True
screening_outcome1.default = True
screening_outcome1.yes = True
screening_outcome1.save()
screening_outcome2 = ScreeningStatusFactory()
screening_outcome2.yes = False
screening_outcome2.default = True
screening_outcome2.save()
self.submission.screening_statuses.clear()
response = self.get_page(self.submission)
self.assertContains(response, 'Screening status')
def test_cant_view_submission_screening_block(self):
"""
If defaults are not set screening status block is not visible
"""
ScreeningStatus.objects.all().delete()
self.submission.screening_statuses.clear()
response = self.get_page(self.submission)
self.assertNotContains(response, 'Screening status')
def test_can_create_project(self):
# check submission doesn't already have a Project
with self.assertRaisesMessage(Project.DoesNotExist, 'ApplicationSubmission has no project.'):
self.submission.project
self.post_page(self.submission, {
'form-submitted-project_form': '',
'submission': self.submission.id,
})
project = Project.objects.order_by('-pk').first()
submission = ApplicationSubmission.objects.get(pk=self.submission.pk)
self.assertTrue(hasattr(submission, 'project'))
self.assertEquals(submission.project.id, project.id)
def test_can_see_add_determination_primary_action(self):
def assert_add_determination_displayed(submission, button_text):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(rf'\s*{button_text}\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 1)
submission = ApplicationSubmissionFactory(status='determination')
# Phase: ready-for-determination, no determination
# "Add determination" should be displayed
assert_add_determination_displayed(submission, 'Add determination')
# Phase: ready-for-determination, draft determination
# "Complete draft determination" should be displayed
DeterminationFactory(submission=submission, author=self.user, accepted=True, submitted=False)
assert_add_determination_displayed(submission, 'Complete draft determination')
def test_cant_see_add_determination_primary_action(self):
def assert_add_determination_not_displayed(submission, button_text):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(rf'\s*{button_text}\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 0)
submission = ApplicationSubmissionFactory()
# Phase: received / in_discussion
# "Add determination" should not be displayed
# "Complete draft determination" should not be displayed
assert_add_determination_not_displayed(submission, 'Add determination')
assert_add_determination_not_displayed(submission, 'Complete draft determination')
# Phase: accepted
# "Add determination" should not be displayed
# "Complete draft determination" should not be displayed
submission.perform_transition('accepted', self.user)
assert_add_determination_not_displayed(submission, 'Add determination')
assert_add_determination_not_displayed(submission, 'Complete draft determination')
def test_screen_application_primary_action_is_displayed(self):
ScreeningStatus.objects.all().delete()
# Submission not screened
screening_outcome = ScreeningStatusFactory()
screening_outcome.yes = False
screening_outcome.default = True
screening_outcome.save()
self.submission.screening_statuses.clear()
self.submission.screening_statuses.add(screening_outcome)
response = self.get_page(self.submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', text='Screen application')
self.assertEqual(len(buttons), 1)
self.submission.screening_statuses.clear()
def test_screen_application_primary_action_is_not_displayed(self):
response = self.get_page(self.submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', text='Screen application')
self.assertEqual(len(buttons), 0)
def test_can_see_create_review_primary_action(self):
def assert_create_review_displayed(submission, button_text):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(rf'\s*{button_text}\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 1)
submission = ApplicationSubmissionFactory(with_external_review=True, status='ext_internal_review')
# Phase: internal_review, no review
# "Add a review" should be displayed
assert_create_review_displayed(submission, 'Add a review')
# Phase: internal_review, draft review created
# "Complete draft review" should be displayed
review = ReviewFactory(submission=submission, author__reviewer=self.user, is_draft=True)
assert_create_review_displayed(submission, 'Complete draft review')
review.delete()
# Phase: external_review, no review
# "Add a review" should be displayed
submission.perform_transition('ext_post_review_discussion', self.user)
submission.perform_transition('ext_external_review', self.user)
assert_create_review_displayed(submission, 'Add a review')
# Phase: external_review, draft review created
# "Complete draft review" should be displayed
ReviewFactory(submission=submission, author__reviewer=self.user, is_draft=True)
assert_create_review_displayed(submission, 'Complete draft review')
def test_cant_see_create_review_primary_action(self):
def assert_create_review_not_displayed(submission, button_text):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(rf'\s*{button_text}\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 0)
submission = ApplicationSubmissionFactory(with_external_review=True)
# Phase: received / in_discussion
# "Add a review" should not be displayed
# "Complete draft review" should not be displayed
assert_create_review_not_displayed(submission, 'Add a review')
assert_create_review_not_displayed(submission, 'Complete draft review')
# Phase: internal_review, review completed
# "Add a review" should not be displayed
# "Update draft review" should not be displayed
submission.perform_transition('ext_internal_review', self.user)
ReviewFactory(submission=submission, author__reviewer=self.user, is_draft=False)
assert_create_review_not_displayed(submission, 'Add a review')
assert_create_review_not_displayed(submission, 'Complete draft review')
# Phase: external_review, review completed
# "Add a review" should not be displayed
# "Update draft review" should not be displayed
submission.perform_transition('ext_post_review_discussion', self.user)
submission.perform_transition('ext_external_review', self.user)
assert_create_review_not_displayed(submission, 'Add a review')
assert_create_review_not_displayed(submission, 'Complete draft review')
def test_can_see_assign_reviewers_primary_action(self):
def assert_assign_reviewers_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--primary', text='Assign reviewers')
self.assertEqual(len(buttons), 1)
submission = ApplicationSubmissionFactory(status='internal_review')
reviewer_role_a = ReviewerRoleFactory()
reviewer_role_b = ReviewerRoleFactory()
# Phase: internal_review - no reviewers assigned
# Assign reviewers should be displayed
assert_assign_reviewers_displayed(submission)
# Phase: internal_review - not all reviewer types assigned
# Assign reviewers should be displayed
AssignedReviewersFactory(submission=submission, reviewer=ReviewerFactory(), role=reviewer_role_a)
assert_assign_reviewers_displayed(submission)
# Phase: external_review - no reviewers assigned
# Assign reviewers should be displayed
submission = ApplicationSubmissionFactory(with_external_review=True, status='ext_external_review')
assert_assign_reviewers_displayed(submission)
# Phase: external_review - all reviewers types assigned
# Assign reviewers should still be displayed
AssignedReviewersFactory(submission=submission, reviewer=ReviewerFactory(), role=reviewer_role_a)
AssignedReviewersFactory(submission=submission, reviewer=ReviewerFactory(), role=reviewer_role_b)
assert_assign_reviewers_displayed(submission)
def test_cant_see_assign_reviewers_primary_action(self):
def assert_assign_reviewers_not_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--primary', text='Assign reviewers')
self.assertEqual(len(buttons), 0)
submission = ApplicationSubmissionFactory()
reviewer_role = ReviewerRoleFactory()
# Phase: received / in_discussion
# Assign reviewers should not be displayed
assert_assign_reviewers_not_displayed(submission)
# Phase: internal_review - all reviewer types assigned
# Assign reviewers should not be displayed
AssignedReviewersFactory(submission=submission, reviewer=ReviewerFactory(), role=reviewer_role)
assert_assign_reviewers_not_displayed(submission)
def test_can_see_assign_reviewers_secondary_action(self):
def assert_assign_reviewers_secondary_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--white', text='Reviewers')
self.assertEqual(len(buttons), 1)
submission = ApplicationSubmissionFactory()
reviewer_role = ReviewerRoleFactory()
# Phase: received / in_discussion
assert_assign_reviewers_secondary_displayed(submission)
# Phase: internal_review - no reviewers assigned
submission.perform_transition('internal_review', self.user)
assert_assign_reviewers_secondary_displayed(submission)
# Phase: internal_review - all reviewer types assigned
AssignedReviewersFactory(submission=submission, reviewer=ReviewerFactory(), role=reviewer_role)
assert_assign_reviewers_secondary_displayed(submission)
def test_can_see_view_determination_primary_action(self):
def assert_view_determination_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text='View determination')
self.assertEqual(len(buttons), 1)
# Phase: accepted
submission = ApplicationSubmissionFactory(status='accepted')
DeterminationFactory(submission=submission, author=self.user, accepted=True, submitted=True)
assert_view_determination_displayed(submission)
# Phase: rejected
submission = ApplicationSubmissionFactory(status='rejected')
DeterminationFactory(submission=submission, author=self.user, rejected=True, submitted=True)
assert_view_determination_displayed(submission)
def test_cant_see_view_determination_primary_action(self):
def assert_view_determination_not_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text='View determination')
self.assertEqual(len(buttons), 0)
# Phase: received / in_discussion
submission = ApplicationSubmissionFactory()
assert_view_determination_not_displayed(submission)
# Phase: ready-for-determination, no determination
submission.perform_transition('determination', self.user)
assert_view_determination_not_displayed(submission)
# Phase: ready-for-determination, draft determination
DeterminationFactory(submission=submission, author=self.user, accepted=True, submitted=False)
assert_view_determination_not_displayed(submission)
def test_cant_see_application_draft_status(self):
factory = RequestFactory()
submission = ApplicationSubmissionFactory(status='draft')
ProjectFactory(submission=submission)
request = factory.get(f'/submission/{submission.pk}')
request.user = StaffFactory()
with self.assertRaises(Http404):
SubmissionDetailView.as_view()(request, pk=submission.pk)
def test_applicant_can_see_application_draft_status(self):
factory = RequestFactory()
user = ApplicantFactory()
submission = ApplicationSubmissionFactory(status='draft', user=user)
ProjectFactory(submission=submission)
request = factory.get(f'/submission/{submission.pk}')
request.user = user
response = SubmissionDetailView.as_view()(request, pk=submission.pk)
self.assertEqual(response.status_code, 200)
class TestReviewersUpdateView(BaseSubmissionViewTestCase):
user_factory = StaffFactory
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.staff = StaffFactory.create_batch(4)
cls.reviewers = ReviewerFactory.create_batch(4)
cls.roles = ReviewerRoleFactory.create_batch(2)
def post_form(self, submission, reviewer_roles=list(), reviewers=list()):
data = {
'form-submitted-reviewer_form': '',
'reviewer_reviewers': [r.id for r in reviewers]
}
data.update(
**{
f'role_reviewer_{slugify(str(role))}': reviewer.id
for role, reviewer in zip(self.roles, reviewer_roles)
}
)
return self.post_page(submission, data)
def test_lead_can_add_staff_single(self):
submission = ApplicationSubmissionFactory(lead=self.user)
self.post_form(submission, reviewer_roles=[self.staff[0]])
self.assertCountEqual(submission.reviewers.all(), [self.staff[0]])
def test_lead_can_change_staff_single(self):
submission = ApplicationSubmissionFactory(lead=self.user)
AssignedWithRoleReviewersFactory(role=self.roles[0], submission=submission, reviewer=self.staff[0])
self.assertCountEqual(submission.reviewers.all(), [self.staff[0]])
self.post_form(submission, reviewer_roles=[self.staff[1]])
self.assertCountEqual(submission.reviewers.all(), [self.staff[1]])
self.assertEqual(submission.assigned.with_roles().first().reviewer, self.staff[1])
def test_lead_cant_add_reviewers_single(self):
submission = ApplicationSubmissionFactory(lead=self.user)
self.post_form(submission, reviewers=self.reviewers)
self.assertCountEqual(submission.reviewers.all(), [])
def test_lead_can_add_reviewers_for_proposal(self):
submission = InvitedToProposalFactory(lead=self.user)
self.post_form(submission, reviewers=self.reviewers)
self.assertCountEqual(submission.reviewers.all(), self.reviewers)
def test_lead_can_remove_reviewers_for_proposal(self):
submission = InvitedToProposalFactory(lead=self.user, reviewers=self.reviewers)
self.assertCountEqual(submission.reviewers.all(), self.reviewers)
self.post_form(submission)
self.assertCountEqual(submission.reviewers.all(), [])
def test_lead_can_remove_some_reviewers_for_proposal(self):
submission = InvitedToProposalFactory(lead=self.user, reviewers=self.reviewers)
self.assertCountEqual(submission.reviewers.all(), self.reviewers)
self.post_form(submission, reviewers=self.reviewers[0:2])
self.assertCountEqual(submission.reviewers.all(), self.reviewers[0:2])
def test_staff_cant_add_reviewers_proposal(self):
submission = ApplicationSubmissionFactory()
self.post_form(submission, reviewers=self.reviewers)
self.assertCountEqual(submission.reviewers.all(), [])
def test_staff_cant_remove_reviewers_proposal(self):
submission = ApplicationSubmissionFactory(reviewers=self.reviewers)
self.assertCountEqual(submission.reviewers.all(), self.reviewers)
self.post_form(submission, reviewers=[])
self.assertCountEqual(submission.reviewers.all(), self.reviewers)
def test_lead_can_change_role_reviewer_and_review_remains(self):
submission = ApplicationSubmissionFactory()
AssignedWithRoleReviewersFactory(role=self.roles[0], submission=submission, reviewer=self.staff[0])
# Add a review from that staff reviewer
ReviewFactory(submission=submission, author__reviewer=self.staff[0], author__staff=True)
# Assign a different reviewer to the same role
self.post_form(submission, reviewer_roles=[self.staff[1]])
# Make sure that the ex-role-reviewer is still assigned record
self.assertCountEqual(submission.reviewers.all(), self.staff[0:2])
def test_can_be_made_role_and_not_duplciated(self):
submission = ApplicationSubmissionFactory()
ReviewFactory(submission=submission, author__reviewer=self.staff[0], author__staff=True)
self.post_form(submission, reviewer_roles=[self.staff[0]])
self.assertCountEqual(submission.reviewers.all(), [self.staff[0]])
def test_can_remove_external_reviewer_and_review_remains(self):
submission = InvitedToProposalFactory(lead=self.user)
reviewer = self.reviewers[0]
AssignedReviewersFactory(submission=submission, reviewer=reviewer)
ReviewFactory(submission=submission, author__reviewer=reviewer)
self.post_form(submission, reviewers=[])
self.assertCountEqual(submission.reviewers.all(), [reviewer])
def test_can_add_external_reviewer_and_review_remains(self):
submission = InvitedToProposalFactory(lead=self.user)
reviewer = self.reviewers[0]
AssignedReviewersFactory(submission=submission, reviewer=reviewer)
ReviewFactory(submission=submission, author__reviewer=reviewer)
self.post_form(submission, reviewers=[self.reviewers[1]])
self.assertCountEqual(submission.reviewers.all(), [reviewer, self.reviewers[1]])
class TestReviewerSubmissionView(BaseSubmissionViewTestCase):
user_factory = ReviewerFactory
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.applicant = ApplicantFactory()
cls.reviewer_role = ReviewerRoleFactory()
apply_site = ApplySiteFactory()
cls.reviewer_settings, _ = ReviewerSettings.objects.get_or_create(site_id=apply_site.id)
cls.reviewer_settings.use_settings = True
cls.reviewer_settings.save()
def test_cant_see_add_determination_primary_action(self):
def assert_add_determination_not_displayed(submission, button_text):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(rf'\s*{button_text}\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 0)
submission = ApplicationSubmissionFactory(status='determination', user=self.applicant, reviewers=[self.user])
# Phase: ready-for-determination, no determination
# "Add determination" should not be displayed
# "Complete draft determination" should not be displayed
assert_add_determination_not_displayed(submission, 'Add determination')
assert_add_determination_not_displayed(submission, 'Complete draft determination')
# Phase: ready-for-determination, draft determination
# "Add determination" should not be displayed
# "Complete draft determination" should not be displayed
DeterminationFactory(submission=submission, accepted=True, submitted=False)
assert_add_determination_not_displayed(submission, 'Add determination')
assert_add_determination_not_displayed(submission, 'Complete draft determination')
def test_can_see_create_review_primary_action(self):
def assert_create_review_displayed(submission, button_text):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(rf'\s*{button_text}\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 1)
submission = ApplicationSubmissionFactory(with_external_review=True, status='ext_external_review', user=self.applicant, reviewers=[self.user])
# Phase: external_review, no review
# "Add a review" should be displayed
submission.perform_transition('ext_post_review_discussion', self.user)
submission.perform_transition('ext_external_review', self.user)
assert_create_review_displayed(submission, 'Add a review')
# Phase: external_review, draft review created
# "Complete draft review" should be displayed
ReviewFactory(submission=submission, author__reviewer=self.user, is_draft=True)
assert_create_review_displayed(submission, 'Complete draft review')
def test_cant_see_create_review_primary_action(self):
def assert_create_review_not_displayed(submission, button_text):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(rf'\s*{button_text}\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 0)
submission = ApplicationSubmissionFactory(with_external_review=True, user=self.applicant, reviewers=[self.user])
# Phase: received / in_discussion
# "Add a review" should not be displayed
# "Complete draft review" should not be displayed
assert_create_review_not_displayed(submission, 'Add a review')
assert_create_review_not_displayed(submission, 'Complete draft review')
# Phase: internal_review, only viewable by staff users
# "Add a review" should not be displayed
# "Update draft review" should not be displayed
submission.perform_transition('ext_internal_review', self.user)
assert_create_review_not_displayed(submission, 'Add a review')
assert_create_review_not_displayed(submission, 'Complete draft review')
# Phase: external_review, review completed
# "Add a review" should not be displayed
# "Update draft review" should not be displayed
submission.perform_transition('ext_post_review_discussion', self.user)
submission.perform_transition('ext_external_review', self.user)
ReviewFactory(submission=submission, author__reviewer=self.user, is_draft=False)
assert_create_review_not_displayed(submission, 'Add a review')
assert_create_review_not_displayed(submission, 'Complete draft review')
def test_cant_see_assign_reviewers_primary_action(self):
submission = ApplicationSubmissionFactory(status='internal_review', user=self.applicant, reviewers=[self.user])
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--primary', text='Assign reviewers')
self.assertEqual(len(buttons), 0)
def test_cant_see_assign_reviewers_secondary_action(self):
submission = ApplicationSubmissionFactory(status='internal_review', user=self.applicant, reviewers=[self.user])
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--white', text='Reviewers')
self.assertEqual(len(buttons), 0)
def test_can_see_view_determination_primary_action(self):
def assert_view_determination_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text='View determination')
self.assertEqual(len(buttons), 1)
# Phase: accepted
submission = ApplicationSubmissionFactory(status='accepted', user=self.applicant, reviewers=[self.user])
DeterminationFactory(submission=submission, accepted=True, submitted=True)
assert_view_determination_displayed(submission)
# Phase: rejected
submission = ApplicationSubmissionFactory(status='rejected', user=self.applicant, reviewers=[self.user])
DeterminationFactory(submission=submission, rejected=True, submitted=True)
assert_view_determination_displayed(submission)
def test_cant_see_view_determination_primary_action(self):
def assert_view_determination_not_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--primary', text='View determination')
self.assertEqual(len(buttons), 0)
# Phase: received / in_discussion
submission = ApplicationSubmissionFactory(user=self.applicant, reviewers=[self.user])
assert_view_determination_not_displayed(submission)
# Phase: ready-for-determination, no determination
submission.perform_transition('determination', self.user)
assert_view_determination_not_displayed(submission)
# Phase: ready-for-determination, draft determination
DeterminationFactory(submission=submission, author=self.user, accepted=True, submitted=False)
assert_view_determination_not_displayed(submission)
def test_can_access_any_submission(self):
"""
Reviewer settings are being used with default values.
"""
submission = ApplicationSubmissionFactory(user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
def test_can_only_access_reviewed_submission(self):
self.reviewer_settings.submission = 'reviewed'
self.reviewer_settings.state = 'all'
self.reviewer_settings.outcome = 'all'
self.reviewer_settings.save()
submission = ApplicationSubmissionFactory(user=self.applicant, reviewers=[self.user])
response = self.get_page(submission)
self.assertEqual(response.status_code, 403)
ReviewFactory(submission=submission, author__reviewer=self.user, is_draft=False)
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
def test_can_only_access_external_review_or_higher_submission(self):
self.reviewer_settings.submission = 'all'
self.reviewer_settings.state = 'ext_state_or_higher'
self.reviewer_settings.outcome = 'all'
self.reviewer_settings.assigned = False
self.reviewer_settings.save()
submission = ApplicationSubmissionFactory(user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 403)
submission = ApplicationSubmissionFactory(with_external_review=True, user=self.applicant)
submission.perform_transition('ext_internal_review', self.user)
submission.perform_transition('ext_post_review_discussion', self.user)
submission.perform_transition('ext_external_review', self.user)
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
def test_cant_access_dismissed_submission(self):
self.reviewer_settings.submission = 'all'
self.reviewer_settings.state = 'all'
self.reviewer_settings.outcome = 'all'
self.reviewer_settings.assigned = False
self.reviewer_settings.save()
submission = ApplicationSubmissionFactory(status='rejected', user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
self.reviewer_settings.outcome = 'all_except_dismissed'
self.reviewer_settings.save()
submission = ApplicationSubmissionFactory(status='rejected', user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 403)
def test_can_only_access_accepted_submission(self):
self.reviewer_settings.submission = 'all'
self.reviewer_settings.state = 'all'
self.reviewer_settings.save()
submission = ApplicationSubmissionFactory(status='rejected', user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
self.reviewer_settings.outcome = 'accepted'
self.reviewer_settings.save()
submission = ApplicationSubmissionFactory(status='rejected', user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 403)
submission = ApplicationSubmissionFactory(status='accepted', user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
def test_can_only_access_assigned_submission(self):
self.reviewer_settings.submission = 'all'
self.reviewer_settings.state = 'all'
self.reviewer_settings.outcome = 'all'
self.reviewer_settings.save()
submission = ApplicationSubmissionFactory(status='accepted', user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
self.reviewer_settings.assigned = True
self.reviewer_settings.save()
submission = ApplicationSubmissionFactory(status='accepted', user=self.applicant)
response = self.get_page(submission)
self.assertEqual(response.status_code, 403)
submission = ApplicationSubmissionFactory(status='accepted', user=self.applicant, reviewers=[self.user])
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
class TestApplicantSubmissionView(BaseSubmissionViewTestCase):
user_factory = ApplicantFactory
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.submission = ApplicationSubmissionFactory(user=cls.user)
cls.draft_proposal_submission = InvitedToProposalFactory(user=cls.user, draft=True)
def __setUp__(self):
self.refresh(self.submission)
self.refresh(self.draft_proposal_submission)
def test_can_view_own_submission(self):
response = self.get_page(self.submission)
self.assertContains(response, self.submission.title)
def test_sees_latest_draft_if_it_exists(self):
draft_revision = ApplicationRevisionFactory(submission=self.submission)
self.submission.draft_revision = draft_revision
self.submission.save()
draft_submission = self.submission.from_draft()
response = self.get_page(self.submission)
self.assertContains(response, draft_submission.title)
def test_cant_view_others_submission(self):
submission = ApplicationSubmissionFactory()
response = self.get_page(submission)
self.assertEqual(response.status_code, 403)
def test_get_edit_link_when_editable(self):
submission = ApplicationSubmissionFactory(user=self.user, status='more_info')
response = self.get_page(submission)
self.assertContains(response, 'Edit')
self.assertContains(response, self.url(submission, 'edit', absolute=False))
self.assertNotContains(response, 'Congratulations')
def test_get_congratulations_draft_proposal(self):
response = self.get_page(self.draft_proposal_submission)
self.assertContains(response, 'Congratulations')
def test_can_edit_own_submission(self):
response = self.get_page(self.draft_proposal_submission, 'edit')
self.assertContains(response, self.draft_proposal_submission.title)
def test_can_submit_submission(self):
old_status = self.draft_proposal_submission.status
data = prepare_form_data(self.draft_proposal_submission, title='This is different')
response = self.post_page(self.draft_proposal_submission, {'submit': True, **data}, 'edit')
url = self.url_from_pattern('funds:submissions:detail', kwargs={'pk': self.draft_proposal_submission.id})
self.assertRedirects(response, url)
submission = self.refresh(self.draft_proposal_submission)
self.assertNotEqual(old_status, submission.status)
def test_gets_draft_on_edit_submission(self):
draft_revision = ApplicationRevisionFactory(submission=self.draft_proposal_submission)
self.draft_proposal_submission.draft_revision = draft_revision
self.draft_proposal_submission.save()
response = self.get_page(self.draft_proposal_submission, 'edit')
self.assertDictEqual(response.context['object'].form_data, draft_revision.form_data)
def test_cant_edit_submission_incorrect_state(self):
submission = InvitedToProposalFactory(user=self.user)
response = self.get_page(submission, 'edit')
self.assertEqual(response.status_code, 403)
def test_cant_edit_other_submission(self):
submission = InvitedToProposalFactory(draft=True)
response = self.get_page(submission, 'edit')
self.assertEqual(response.status_code, 403)
def test_cant_screen_submission(self):
"""
Test that an applicant cannot set the screening status
and that they don't see the screening status form.
"""
screening_outcome = ScreeningStatusFactory()
response = self.post_page(self.submission, {'form-submitted-screening_form': '', 'screening_statuses': [screening_outcome.id]})
self.assertNotIn('screening_form', response.context_data)
submission = self.refresh(self.submission)
self.assertNotIn(screening_outcome, submission.screening_statuses.all())
def test_cant_see_screening_status_block(self):
response = self.get_page(self.submission)
self.assertNotContains(response, 'Screening status')
def test_cant_see_add_determination_primary_action(self):
def assert_add_determination_not_displayed(submission, button_text):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(rf'\s*{button_text}\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 0)
submission = ApplicationSubmissionFactory(status='determination', user=self.user)
# Phase: ready-for-determination, no determination
# "Add determination" should not be displayed
# "Complete draft determination" should not be displayed
assert_add_determination_not_displayed(submission, 'Add determination')
assert_add_determination_not_displayed(submission, 'Complete draft determination')
# Phase: ready-for-determination, draft determination
# "Add determination" should not be displayed
# "Complete draft determination" should not be displayed
DeterminationFactory(submission=submission, accepted=True, submitted=False)
assert_add_determination_not_displayed(submission, 'Add determination')
assert_add_determination_not_displayed(submission, 'Complete draft determination')
def test_cant_see_create_review_primary_action(self):
def assert_create_review_not_displayed(submission):
response = self.get_page(submission)
# Ignore whitespace (including line breaks) in button text
pattern = re.compile(r'\s*Add a review\s*')
buttons = BeautifulSoup(response.content, 'html5lib').find_all('a', class_='button--primary', text=pattern)
self.assertEqual(len(buttons), 0)
submission = ApplicationSubmissionFactory(user=self.user)
# Phase: received / in_discussion
# "Add a review" should not be displayed
assert_create_review_not_displayed(submission)
# Phase: internal_review
# "Add a review" should not be displayed
staff_user = StaffFactory()
submission.perform_transition('internal_review', staff_user)
assert_create_review_not_displayed(submission)
def test_cant_see_assign_reviewers_primary_action(self):
submission = ApplicationSubmissionFactory(status='internal_review', user=self.user)
ReviewerRoleFactory()
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--primary', text='Assign reviewers')
self.assertEqual(len(buttons), 0)
def test_cant_see_assign_reviewers_secondary_action(self):
submission = ApplicationSubmissionFactory(status='internal_review', user=self.user)
ReviewerRoleFactory()
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--white', text='Reviewers')
self.assertEqual(len(buttons), 0)
def test_can_see_view_determination_primary_action(self):
def assert_view_determination_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='js-actions-sidebar').find_all('a', class_='button--primary', text='View determination')
self.assertEqual(len(buttons), 1)
# Phase: accepted
submission = ApplicationSubmissionFactory(status='accepted', user=self.user)
DeterminationFactory(submission=submission, accepted=True, submitted=True)
assert_view_determination_displayed(submission)
# Phase: rejected
submission = ApplicationSubmissionFactory(status='rejected', user=self.user)
DeterminationFactory(submission=submission, rejected=True, submitted=True)
assert_view_determination_displayed(submission)
def test_cant_see_view_determination_primary_action(self):
def assert_view_determination_not_displayed(submission):
response = self.get_page(submission)
buttons = BeautifulSoup(response.content, 'html5lib').find(class_='sidebar').find_all('a', class_='button--primary', text='View determination')
self.assertEqual(len(buttons), 0)
# Phase: received / in_discussion
submission = ApplicationSubmissionFactory(user=self.user)
assert_view_determination_not_displayed(submission)
# Phase: ready-for-determination, no determination
submission.perform_transition('determination', self.user)
assert_view_determination_not_displayed(submission)
# Phase: ready-for-determination, draft determination
DeterminationFactory(submission=submission, accepted=True, submitted=False)
assert_view_determination_not_displayed(submission)
class TestRevisionsView(BaseSubmissionViewTestCase):
user_factory = ApplicantFactory
def test_create_revisions_on_submit(self):
submission = ApplicationSubmissionFactory(status='draft_proposal', workflow_stages=2, user=self.user)
old_data = submission.form_data.copy()
new_title = 'New title'
new_data = prepare_form_data(submission, title=new_title)
self.post_page(submission, {'submit': True, **new_data}, 'edit')
submission = self.refresh(submission)
self.maxDiff = None
self.assertEqual(submission.status, 'proposal_discussion')
self.assertEqual(submission.revisions.count(), 2)
self.assertDictEqual(submission.revisions.last().form_data, old_data)
self.assertDictEqual(submission.live_revision.form_data, submission.form_data)
self.assertEqual(submission.live_revision.author, self.user)
self.assertEqual(submission.title, new_title)
def test_dont_update_live_revision_on_save(self):
submission = ApplicationSubmissionFactory(status='draft_proposal', workflow_stages=2, user=self.user)
old_data = submission.form_data.copy()
new_data = prepare_form_data(submission, title='New title')
self.post_page(submission, {'save': True, **new_data}, 'edit')
submission = self.refresh(submission)
self.maxDiff = None
self.assertEqual(submission.status, 'draft_proposal')
self.assertEqual(submission.revisions.count(), 2)
self.assertDictEqual(submission.draft_revision.form_data, submission.from_draft().form_data)
self.assertEqual(submission.draft_revision.author, self.user)
self.assertDictEqual(submission.live_revision.form_data, old_data)
def test_existing_draft_edit_and_submit(self):
submission = ApplicationSubmissionFactory(status='draft_proposal', workflow_stages=2, user=self.user)
draft_data = prepare_form_data(submission, title='A new title')
self.post_page(submission, {'save': True, **draft_data}, 'edit')
submission = self.refresh(submission)
newer_title = 'Newer title'
draft_data = prepare_form_data(submission, title=newer_title)
self.post_page(submission, {'submit': True, **draft_data}, 'edit')
submission = self.refresh(submission)
self.maxDiff = None
self.assertDictEqual(submission.draft_revision.form_data, submission.from_draft().form_data)
self.assertDictEqual(submission.live_revision.form_data, submission.form_data)
self.assertEqual(submission.revisions.count(), 2)
self.assertEqual(submission.title, newer_title)
class TestRevisionCompare(BaseSubmissionViewTestCase):
base_view_name = 'revisions:compare'
user_factory = StaffFactory
def get_kwargs(self, instance):
return {
'submission_pk': instance.pk,
'to': instance.live_revision.id,
'from': instance.revisions.last().id,
}
def test_renders_with_all_the_diffs(self):
submission = ApplicationSubmissionFactory()
new_data = ApplicationSubmissionFactory(round=submission.round, form_fields=submission.form_fields).form_data
submission.form_data = new_data
submission.create_revision()
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
class TestRevisionList(BaseSubmissionViewTestCase):
base_view_name = 'revisions:list'
user_factory = StaffFactory
def get_kwargs(self, instance):
return {'submission_pk': instance.pk}
def test_list_doesnt_include_draft(self):
submission = ApplicationSubmissionFactory()
draft_revision = ApplicationRevisionFactory(submission=submission)
submission.draft_revision = draft_revision
submission.save()
response = self.get_page(submission)
self.assertNotIn(draft_revision, response.context['object_list'])
def test_get_in_correct_order(self):
submission = ApplicationSubmissionFactory()
revision = ApplicationRevisionFactory(submission=submission)
ApplicationRevision.objects.filter(id=revision.id).update(timestamp=timezone.now() - timedelta(days=1))
revision_older = ApplicationRevisionFactory(submission=submission)
ApplicationRevision.objects.filter(id=revision_older.id).update(timestamp=timezone.now() - timedelta(days=2))
response = self.get_page(submission)
self.assertSequenceEqual(
response.context['object_list'],
[submission.live_revision, revision, revision_older],
)
class TestStaffSealedView(BaseSubmissionViewTestCase):
user_factory = StaffFactory
def test_redirected_to_sealed(self):
submission = SealedSubmissionFactory()
response = self.get_page(submission)
url = self.url_from_pattern('funds:submissions:sealed', kwargs={'pk': submission.id})
self.assertRedirects(response, url)
def test_cant_post_to_sealed(self):
submission = SealedSubmissionFactory()
response = self.post_page(submission, {'some': 'data'}, 'sealed')
# Because of the redirect chain the url returned is not absolute
url = self.url_from_pattern('funds:submissions:sealed', kwargs={'pk': submission.id}, absolute=False)
self.assertRedirects(response, url)
def test_non_sealed_unaffected(self):
submission = ApplicationSubmissionFactory()
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
def test_non_sealed_redirected_away(self):
submission = ApplicationSubmissionFactory()
response = self.get_page(submission, 'sealed')
url = self.url_from_pattern('funds:submissions:detail', kwargs={'pk': submission.id})
self.assertRedirects(response, url)
class TestSuperUserSealedView(BaseSubmissionViewTestCase):
user_factory = SuperUserFactory
def test_redirected_to_sealed(self):
submission = SealedSubmissionFactory()
response = self.get_page(submission)
url = self.url_from_pattern('funds:submissions:sealed', kwargs={'pk': submission.id})
self.assertRedirects(response, url)
def test_can_post_to_sealed(self):
submission = SealedSubmissionFactory()
response = self.post_page(submission, {}, 'sealed')
url = self.url_from_pattern('funds:submissions:detail', kwargs={'pk': submission.id})
self.assertRedirects(response, url)
def test_peeking_is_logged(self):
submission = SealedSubmissionFactory()
self.post_page(submission, {}, 'sealed')
self.assertTrue('peeked' in self.client.session)
self.assertTrue(str(submission.id) in self.client.session['peeked'])
self.assertEqual(Activity.objects.count(), 1)
self.assertTrue('sealed' in Activity.objects.first().message)
def test_not_asked_again(self):
submission = SealedSubmissionFactory()
self.post_page(submission, {}, 'sealed')
# Now request the page again
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
def test_can_view_multiple_sealed(self):
sealed_round = SealedRoundFactory()
first, second = SealedSubmissionFactory.create_batch(2, round=sealed_round)
self.post_page(first, {}, 'sealed')
self.post_page(second, {}, 'sealed')
self.assertTrue('peeked' in self.client.session)
self.assertTrue(str(first.id) in self.client.session['peeked'])
self.assertTrue(str(second.id) in self.client.session['peeked'])
class TestSuperUserSubmissionView(BaseSubmissionViewTestCase):
user_factory = SuperUserFactory
@classmethod
def setUpTestData(cls):
cls.submission = ApplicationSubmissionFactory()
super().setUpTestData()
def __setUp__(self):
self.refresh(self.submission)
def test_can_screen_submission(self):
ScreeningStatus.objects.all().delete()
screening_outcome1 = ScreeningStatusFactory()
screening_outcome1.yes = True
screening_outcome1.save()
screening_outcome2 = ScreeningStatusFactory()
screening_outcome2.yes = True
screening_outcome2.default = True
screening_outcome2.save()
self.submission.screening_statuses.clear()
self.submission.screening_statuses.add(screening_outcome2)
self.post_page(self.submission, {'form-submitted-screening_form': '', 'screening_statuses': [screening_outcome1.id, screening_outcome2.id]})
submission = self.refresh(self.submission)
self.assertEqual(submission.screening_statuses.count(), 2)
def test_can_screen_applications_in_final_status(self):
"""
Now that the submission has been rejected (final determination),
we can still screen it because we are super user
"""
submission = ApplicationSubmissionFactory(rejected=True)
ScreeningStatus.objects.all().delete()
screening_outcome1 = ScreeningStatusFactory()
screening_outcome1.yes = True
screening_outcome1.save()
screening_outcome2 = ScreeningStatusFactory()
screening_outcome2.yes = True
screening_outcome2.default = True
screening_outcome2.save()
submission.screening_statuses.add(screening_outcome2)
response = self.post_page(submission, {'form-submitted-screening_form': '', 'screening_statuses': [screening_outcome1.id, screening_outcome2.id]})
submission = self.refresh(submission)
self.assertEqual(response.context_data['screening_form'].should_show, True)
self.assertEqual(submission.screening_statuses.count(), 2)
# Check that an activity was created that should only be viewable internally
activity = Activity.objects.filter(message__contains='Screening status').first()
self.assertEqual(activity.visibility, TEAM)
class TestSubmissionDetailSimplifiedView(TestCase):
def test_staff_only(self):
factory = RequestFactory()
submission = ApplicationSubmissionFactory()
ProjectFactory(submission=submission)
request = factory.get(f'/submission/{submission.pk}')
request.user = StaffFactory()
response = SubmissionDetailSimplifiedView.as_view()(request, pk=submission.pk)
self.assertEqual(response.status_code, 200)
request.user = ApplicantFactory()
with self.assertRaises(PermissionDenied):
SubmissionDetailSimplifiedView.as_view()(request, pk=submission.pk)
def test_project_required(self):
factory = RequestFactory()
submission = ApplicationSubmissionFactory()
request = factory.get(f'/submission/{submission.pk}')
request.user = StaffFactory()
with self.assertRaises(Http404):
SubmissionDetailSimplifiedView.as_view()(request, pk=submission.pk)
ProjectFactory(submission=submission)
response = SubmissionDetailSimplifiedView.as_view()(request, pk=submission.pk)
self.assertEqual(response.status_code, 200)
class BaseSubmissionFileViewTestCase(BaseViewTestCase):
url_name = 'funds:submissions:{}'
base_view_name = 'serve_private_media'
def get_kwargs(self, instance):
document_fields = list(instance.file_field_ids)
field_id = document_fields[0]
document = instance.data(field_id)
return {
'pk': instance.pk,
'field_id': field_id,
'file_name': document.basename,
}
class TestStaffSubmissionFileView(BaseSubmissionFileViewTestCase):
user_factory = StaffFactory
def test_staff_can_access(self):
submission = ApplicationSubmissionFactory()
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.redirect_chain, [])
class TestUserSubmissionFileView(BaseSubmissionFileViewTestCase):
user_factory = ApplicantFactory
def test_owner_can_access(self):
submission = ApplicationSubmissionFactory(user=self.user)
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.redirect_chain, [])
def test_user_can_not_access(self):
submission = ApplicationSubmissionFactory()
response = self.get_page(submission)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.redirect_chain, [])
class TestAnonSubmissionFileView(BaseSubmissionFileViewTestCase):
user_factory = AnonymousUser
def test_anonymous_can_not_access(self):
submission = ApplicationSubmissionFactory()
response = self.get_page(submission)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.redirect_chain), 2)
for path, _ in response.redirect_chain:
self.assertIn(reverse('users_public:login'), path)
class BaseProjectDeleteTestCase(BaseViewTestCase):
url_name = 'funds:submissions:reminders:{}'
base_view_name = 'delete'
def get_kwargs(self, instance):
return {'pk': instance.id, 'submission_pk': instance.submission.id}
class TestStaffReminderDeleteView(BaseProjectDeleteTestCase):
user_factory = StaffFactory
def test_has_access(self):
reminder = ReminderFactory()
response = self.get_page(reminder)
self.assertEqual(response.status_code, 200)
def test_confirm_message(self):
reminder = ReminderFactory()
response = self.get_page(reminder)
self.assertContains(response, 'Are you sure you want to delete')
self.assertEqual(response.status_code, 200)
class TestUserReminderDeleteView(BaseProjectDeleteTestCase):
user_factory = ApplicantFactory
def test_doesnt_has_access(self):
reminder = ReminderFactory()
response = self.get_page(reminder)
self.assertEqual(response.status_code, 403)
@override_settings(ROOT_URLCONF='hypha.apply.urls')
class TestReviewerLeaderboard(TestCase):
def test_applicant_cannot_access_reviewer_leaderboard(self):
self.client.force_login(ApplicantFactory())
response = self.client.get('/apply/submissions/reviews/', follow=True, secure=True)
self.assertEqual(response.status_code, 403)
def test_community_reviewer_cannot_access_reviewer_leaderboard(self):
self.client.force_login(CommunityReviewerFactory())
response = self.client.get('/apply/submissions/reviews/', follow=True, secure=True)
self.assertEqual(response.status_code, 403)
def test_partner_cannot_access_reviewer_leaderboard(self):
self.client.force_login(PartnerFactory())
response = self.client.get('/apply/submissions/reviews/', follow=True, secure=True)
self.assertEqual(response.status_code, 403)
def test_reviewer_cannot_access_leader_board(self):
self.client.force_login(ReviewerFactory())
response = self.client.get('/apply/submissions/reviews/', follow=True, secure=True)
self.assertEqual(response.status_code, 403)
def test_staff_can_access_leaderboard(self):
self.client.force_login(StaffFactory())
response = self.client.get('/apply/submissions/reviews/', follow=True, secure=True)
self.assertEqual(response.status_code, 200)
class TestUpdateReviewersMixin(BaseSubmissionViewTestCase):
user_factory = StaffFactory
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.staff = StaffFactory.create_batch(4)
cls.reviewers = ReviewerFactory.create_batch(4)
cls.roles = ReviewerRoleFactory.create_batch(2)
def post_form(self, submission, reviewer_roles=list(), reviewers=list()):
data = {
'form-submitted-reviewer_form': '',
'reviewer_reviewers': [r.id for r in reviewers]
}
data.update(
**{
f'role_reviewer_{slugify(str(role))}': reviewer.id
for role, reviewer in zip(self.roles, reviewer_roles)
}
)
return self.post_page(submission, data)
def test_submission_transition_all_reviewer_roles_not_assigned(self):
submission = ApplicationSubmissionFactory(lead=self.user, status=INITIAL_STATE)
self.post_form(submission, reviewer_roles=[self.staff[0]])
submission = ApplicationSubmission.objects.get(id=submission.id)
# Submission state shouldn't change when all_reviewer_roles_not_assigned
self.assertEqual(
submission.status,
INITIAL_STATE
)
def test_submission_transition_to_internal_review(self):
submission = ApplicationSubmissionFactory(lead=self.user, status=INITIAL_STATE)
self.post_form(submission, reviewer_roles=[self.staff[0], self.staff[1]])
submission = ApplicationSubmission.objects.get(id=submission.id)
# Automatically transition the application to "Internal review".
self.assertEqual(
submission.status,
submission.workflow.stepped_phases[2][0].name
)
def test_submission_transition_to_proposal_internal_review(self):
submission = ApplicationSubmissionFactory(lead=self.user, status='proposal_discussion', workflow_stages=2)
self.post_form(submission, reviewer_roles=[self.staff[0], self.staff[1]])
submission = ApplicationSubmission.objects.get(id=submission.id)
# Automatically transition the application to "Internal review".
self.assertEqual(
submission.status,
'proposal_internal_review'
)
| StarcoderdataPython |
148294 | import requests
import os
import sys
def remove_nonpicture_files(arr, file, extensions):
for extension in extensions:
if file.endswith("." + extension):
return
arr.remove(file)
return
print("Running " + sys.argv[0] + " with " + str(len(sys.argv)) + " args...")
if not(len(sys.argv) == 4):
print("Please provide the paths to the two folders holding the images, and an api key when you run this program (for example: python " + sys.argv[0] + " \"C:\\Devel\\random\\ImageMapper\\items_x128\\items\\\" \"C:\\\\Devel\\random\\ImageMapper\\items\\items\" \"8f48c3c5-812a-0902-c271-9f3518b76c7b\" )")
sys.exit(1)
print("This is the name of the script:", sys.argv[0])
print("Number of arguments:", len(sys.argv))
print("The arguments are:" , str(sys.argv))
api = r'https://api.deepai.org/api/image-similarity'
F1 = sys.argv[1]
if not(F1[len(F1)-1] == '\\' or F1[len(F1)-1] == '/'):
F1 = F1 + "\\"
F2 = sys.argv[2]
if not(F2[len(F2)-1] == '\\' or F2[len(F2)-1] == '/'):
F2 = F2 + "\\"
api_key = sys.argv[3]
query_count = 0
empty_count = 0
f1arr = sorted(os.listdir(F1))
f2arr = sorted(os.listdir(F2))
valid_extensions = ["png", "jpg"]
print(f"original length: {len(f1arr)} * {len(f2arr)} = {len(f1arr)*len(f2arr)}")
for file in f1arr:
remove_nonpicture_files(f1arr, file, valid_extensions)
for file in f2arr:
remove_nonpicture_files(f2arr, file, valid_extensions)
for f1 in f1arr:
for f2 in f2arr:
if f1 == f2:
f1arr.remove(f1)
#f2arr.remove(f2) # TODO: make flag that enables this
print(f"reduced to: {len(f1arr)} * {len(f2arr)} = {len(f1arr)*len(f2arr)}")
map = dict({r"C:/path/to/example1/.png": (r"C:\path\to\example2", 100)})
for f1 in f1arr:
for f2 in f2arr:
r = requests.post(
api,
files={
'image1': open(F1 + f1, 'rb'),
'image2': open(F2 + f2, 'rb'),
},
headers={'Api-Key': api_key}
)
query_count += 1
if r.json() == None:
print("OUTPUT WAS EMPTY")
empty_count += 1
elif r.json().get('output') == None:
print(f1 + " + " + f2 + " -> None")
continue
val = r.json().get('output').get('distance')
print(f1 + " + " + f2 + " -> " + str(val))
if not(f1 in map) or val < map[f1][1]:
map[f1] = (f2, val)
del map['C:/path/to/example1/.png']
print(map)
with open(r".\map.txt", 'w') as output_file:
print(map, file=output_file)
print("map.txt successfully saved to the current folder.")
print("successfully finished running " + sys.argv[0] + f" with {empty_count} empty queries out of {query_count} total queries.")
| StarcoderdataPython |
3332380 | <filename>ApproachV3/src/spam_metric/multinomial_bayes.py
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn import metrics
import pandas as pd
import pickle
import re
import string
import time
table = str.maketrans({key: None for key in string.punctuation})
def save(vectorizer, classifier):
"""
save classifier to disk
"""
with open('model.pkl', 'wb') as file:
pickle.dump((vectorizer, classifier), file)
def load():
"""
load classifier from disk
"""
with open('/Users/kanishksinha/Desktop/TwitterBotDetection/ApproachV3/src/spam_metric/model.pkl', 'rb') as file:
vectorizer, classifier = pickle.load(file)
return vectorizer, classifier
def load_data():
dtype = {"tweets": str, "category": int}
df = pd.read_csv('cleaned_tweets.csv')
features = list(df.get('tweets'))
labels = list(df.get('category'))
return features, labels
def remove_url(tweet):
"""
Regex based URL removed. Removes all nonwhitespace characters after http until a whitespace is reached
:param tweet: Tweet to be checked
:return: Tweet that is substituted with URL in the place of the actual URL
"""
return re.sub(r"http\S+", "URL", tweet)
def preprocess(tweet):
"""
Substitures urls with the string URL. Removes leading and trailing whitespaces
Removes non latin characters
:param tweet:
:return:
"""
# remove URL
line = remove_url(str(tweet.strip()))
# remove non Latin characters
stripped_text = ''
for c in line:
stripped_text += c if len(c.encode(encoding='utf_8')) == 1 else ''
return stripped_text.translate(table).strip()
def main():
# load features and labels
print('Loading data')
start = time.time()
features, labels = load_data()
end = time.time()
print('CSV Loading time:{diff}'.format(diff=end - start))
# split data into training / test sets
print('Splitting data')
features_train, features_test, labels_train, labels_test = train_test_split(
features,
labels,
test_size=0.2, # use 10% for testing
random_state=42)
print("no. of train features: {}".format(len(features_train)))
print("no. of train labels: {}".format(len(labels_train)))
print("no. of test features: {}".format(len(features_test)))
print("no. of test labels: {}".format(len(labels_test)))
# vectorize email text into tfidf matrix
# TfidfVectorizer converts collection of raw documents to a matrix of TF-IDF features.
# It's equivalent to CountVectorizer followed by TfidfTransformer.
vectorizer = TfidfVectorizer(
input='content', # input is actual text
lowercase=True, # convert to lower case before tokenizing
stop_words='english' # remove stop words
)
print('Transforming features')
start = time.time()
features_train_transformed = vectorizer.fit_transform(features_train)
features_test_transformed = vectorizer.transform(features_test)
end = time.time()
print('Transforming time:{diff}'.format(diff=end - start))
# train a classifier
print('Training')
start = time.time()
classifier = MultinomialNB()
classifier.fit(features_train_transformed, labels_train)
end = time.time()
print('Training time:{diff}'.format(diff=end - start))
# save the trained model
save(vectorizer, classifier)
# score the classifier accuracy
print('Scoring')
print("classifier accuracy {:.2f}%".format(classifier.score(features_test_transformed, labels_test) * 100))
start = time.time()
prediction = classifier.predict(features_test_transformed)
end = time.time()
print('Testing time:{diff}'.format(diff=end - start))
fscore = metrics.f1_score(labels_test, prediction, average='macro')
print("F score {:.2f}".format(fscore))
def test():
tweet_list = []
preprocessed = []
tweet = "hello twitter facebook congratulations"
tweet2 = "This is a test"
tweet_list.append(str(tweet))
tweet_list.append(str(tweet2))
vectorizer, classifier = load()
preprocessed.append(preprocess(tweet))
preprocessed.append(preprocess(tweet2))
vectorized_tweet = vectorizer.transform(preprocessed)
prediction = classifier.predict(vectorized_tweet)
print(prediction)
if __name__ == '__main__':
# main()
test()
| StarcoderdataPython |
3312731 | import numpy as np
from tensorflow import keras
from sklearn.model_selection import train_test_split
import os
from numpy import save, load
from sklearn.preprocessing import StandardScaler
root_logdir = os.path.join(os.curdir, "rna_logs")
FILENAME = 'menus.csv'
etapa = '-400relu-300relu'
def get_run_logdir():
import time
run_id = time.strftime("run_%Y_%m_%d-%H_%M_%S"+etapa)
return os.path.join(root_logdir, run_id)
def inttobin(v,nb):
res = np.zeros(nb)
for i in range(nb):
res[i]=int(v%2)
v=int(v/2)
return np.flip(res)
def loadData(maxceros):
contX=0
contY=0
dataset = np.loadtxt(FILENAME, delimiter=',')
print(dataset)
print("Menus cargados (",len(dataset),"):")
print("Iniciando generación de datos para red neuronal...")
n = (2 ** 14) - 1 # 14 unos en binario
for f in range(0,len(dataset)):
print("Creando datos a partir de menu ",f)
v = dataset[f]
r=np.array([v*inttobin(n,14)])
for i in range(1,n):
aux=inttobin(i,14)
if ((14-np.count_nonzero(aux))<maxceros):
r=np.append(r,[v*aux],axis=0)
if f==0:
X=r
Y=np.tile(dataset[f],(len(r),1))
else:
X=np.append(X,r,axis=0)
Y = np.append(Y,np.tile(dataset[f],(len(r),1)),axis=0)
print("Agregadas ",len(X)-contX," filas en X (total ",len(X),") y ",len(Y)-contY," en Y (total ",len(Y),")")
contX = len(X)
contY = len(Y)
return X,Y
def optimizeAndSplitData(X,Y,percent):
total=np.append(X,Y,axis=1)
train, val = train_test_split(total, test_size=(percent/100), random_state=42)
X_train=train[:,:14]
Y_train=train[:,14:]
X_val=val[:,:14]
Y_val=val[:,14:]
return X_train, Y_train, X_val, Y_val
X,Y = loadData(7)
X_train, Y_train, X_val, Y_val = optimizeAndSplitData(X,Y,30)
print("Datos cargados.")
print("Escalando datos")
scalerX=StandardScaler()
X_train = scalerX.fit_transform(X_train)
X_val = scalerX.transform(X_val)
print("Datos escalados")
np.save('ScalerX.npy',[scalerX.mean_, scalerX.var_ ** 0.5])
model=keras.models.Sequential()
model.add(keras.layers.Dense(400, activation="relu" , input_shape=[14]))
model.add(keras.layers.Dense(300, activation="relu"))
model.add(keras.layers.Dense(14, activation="relu"))
model.compile(loss="mean_squared_error", optimizer="sgd", metrics=["accuracy"])
run_logdir = get_run_logdir()
tensorboard_cb = keras.callbacks.TensorBoard(run_logdir)
checkpoint_cb = keras.callbacks.ModelCheckpoint("rnaMenu_model"+etapa+".h5", save_best_only=True)
model.fit(X_train,Y_train,epochs=200,validation_data=(X_val,Y_val),callbacks=[checkpoint_cb,tensorboard_cb])
model=keras.models.load_model("rnaMenu_model"+etapa+".h5")
X_new=[[1,2,3,0,0,0,7,8,9,10,11,12,13,0]]
X_new = scalerX.transform(X_new)
Y_new = model.predict(X_new)
print('Con X_new=',X_new,'\nLa predicción es: ',Y_new)
X_new=[[1,15,0,17,0,19,7,0,20,12,0,3,22,23]]
X_new = scalerX.transform(X_new)
Y_new = model.predict(X_new)
print('Con X_new=',X_new,'\nLa predicción es: ',Y_new)
| StarcoderdataPython |
3222803 | from flask import Flask, render_template, request, redirect,send_file
from werkzeug.utils import secure_filename
from functions import split,transform
import io
import os
import pdfrw
from reportlab.pdfgen import canvas
from reportlab.lib.units import cm,mm
# 12 questions adjustments
diff = -0.45*mm#(74.7/2-9.37)*mm
diff_name = -5*mm#diff -5*mm
diff_space = 0#-0.03*mm
space = 2.4*mm + diff_space
space_q = (2.6*mm,4.5*mm+diff_space)
max_h = 296.93*mm
w = -38.6*mm + 42.7*mm#-0.02*mm
h = (125.7*mm - 121.9*mm)#+1*mm
start_id = (35.7*mm, max_h-135.3*mm+0.45*mm+diff)
start_q = (36.8*mm,max_h-208.3*mm+0.45*mm+diff)
error = 0.5*mm
x_name = 110*mm
y_name = 171*mm - diff_name
MAX_ITENS = 12
parameters = [space,space_q,max_h,w,h,start_id,start_q,error,x_name,y_name,y_name]
UPLOAD_FOLDER = 'uploads/'
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
@app.route("/",methods=["POST", "GET"])
def upload_page():
if request.method == "GET":
return render_template("index.html")
else:
uploaded_file = request.files['file']
if uploaded_file != '':
print(os.getcwd())
arq = os.path.join(app.config['UPLOAD_FOLDER'], secure_filename(uploaded_file.filename))
print(arq)
uploaded_file.save(arq)
name= request.form["name"]
page = int(request.form["page"])
id = request.form["id"]
quest = ""
for i in range(1,MAX_ITENS+1):
key = "customRadioInline"+str(i)
try:
quest += request.form[key]
except KeyError:
quest += "."
output = os.path.join(app.config['UPLOAD_FOLDER'],
"Gabarito_"+"_".join(name.split()))
split(arq, page, output+".pdf")
transform(id,quest,output,name,parameters)
return send_file(output+".pdf",as_attachment =True)#,mimetype = '.pdf')
os.remove(output+".pdf")
os.remove(arq)
if __name__ == "__main__":
#app.run(debug=True, host= '0.0.0.0')
app.run() | StarcoderdataPython |
113119 | <reponame>Timh37/SeasonalDSLC_NWES<gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 21 16:11:35 2019
Applies change in seasonal anomalies of wind-velocity change from CMIP6 model to ERA5 boundary conditions to ROMS model.
@author: thermans
"""
import numpy as np
import cmocean
import os
import fnmatch
import xarray as xr
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import xesmf as xe
import datetime
from seasonal_deviations_from_monthly_means import seasonal_deviations_from_monthly_means
from cdo import Cdo
cdo = Cdo()
model = 'CNRM-ESM2-1' #CMIP6 model to derive wind from
cmip6_dir = '/Volumes/Naamloos/PhD_Data/CMIP6/time_merged/' #path to CMIP6 data
ssp = 'ssp585' #SSP
in_dir = '/Volumes/Naamloos/PhD_Data/ERA5/ROMS_format/'; #input path bdy conditions
out_dir = '/Volumes/Naamloos/PhD_Data/ERA5/ROMS_format/wind_mod/'; #output path new bdy conditions
out_prefix = model+'_DJF_wind_' #prefix to prepend to output filename
if not os.path.exists(out_dir):
os.makedirs(out_dir)
years_to_mod = np.arange(1993,1996) #years to modify in ROMS forcing
#averaging periods
basey = np.arange(1995,2015)
futy = np.arange(2081,2101)
#load variant averaged wind velocity components from cmip6 model
#uas
var_model_dir = os.path.join(cmip6_dir,'uas',model)
for f,fn in enumerate(fnmatch.filter(os.listdir(var_model_dir), "*"+ssp+"*nc")): #loop over variants
#load ssp & historical
ssp_ds = xr.open_dataset(os.path.join(var_model_dir,fn))
print(ssp_ds.variant_label)
hist_ds = xr.open_dataset(os.path.join(var_model_dir,fnmatch.filter(os.listdir(var_model_dir), "*historical*"+ssp_ds.variant_label+"*nc")[0]))
#concatenate historical and ssp and replace model specific time with common time
hist_ds['time'] = xr.cftime_range(start="1850", periods=len(hist_ds.time), freq="MS", calendar="noleap") #replace model specific time array by common
ssp_ds['time'] = xr.cftime_range(start="2015", periods=len(ssp_ds.time), freq="MS", calendar="noleap")
hist_ssp_ds = xr.concat((hist_ds,ssp_ds),dim='time') #concatenate historical & ssp over time dimension
hist_ssp_ds = xr.decode_cf(hist_ssp_ds)
if 'time_bounds' in hist_ssp_ds:
hist_ssp_ds = hist_ssp_ds.rename(time_bounds='time_bnds')
if f==0: #append variants in 1 ds
model_ds = []
model_ds = hist_ssp_ds
else:
model_ds = xr.concat((model_ds,hist_ssp_ds),dim='variant') #generating new dimension 'variant'
if 'variant' in model_ds.dims: #if multiple variants, take variant mean
uas = model_ds.mean(dim='variant')
else:
uas = model_ds
#vas, idem
var_model_dir = os.path.join(cmip6_dir,'vas',model)
for f,fn in enumerate(fnmatch.filter(os.listdir(var_model_dir), "*"+ssp+"*nc")):
ssp_ds = xr.open_dataset(os.path.join(var_model_dir,fn))
print(ssp_ds.variant_label)
hist_ds = xr.open_dataset(os.path.join(var_model_dir,fnmatch.filter(os.listdir(var_model_dir), "*historical*"+ssp_ds.variant_label+"*nc")[0]))
hist_ds['time'] = xr.cftime_range(start="1850", periods=len(hist_ds.time), freq="MS", calendar="noleap")
ssp_ds['time'] = xr.cftime_range(start="2015", periods=len(ssp_ds.time), freq="MS", calendar="noleap")
hist_ssp_ds = xr.concat((hist_ds,ssp_ds),dim='time')
hist_ssp_ds = xr.decode_cf(hist_ssp_ds)
if 'time_bounds' in hist_ssp_ds:
hist_ssp_ds = hist_ssp_ds.rename(time_bounds='time_bnds')
if f==0:
model_ds = []
model_ds = hist_ssp_ds
else:
model_ds = xr.concat((model_ds,hist_ssp_ds),dim='variant') #generating new dimension 'model'
if 'variant' in model_ds.dims:
vas = model_ds.mean(dim='variant')
else:
vas = model_ds
#compute change in seasonal anomalies of the wind components
uas = uas.isel(time=np.arange(11+124*12,3011))
vas = vas.isel(time=np.arange(11+124*12,3011))
uas_djf_mean_anom, uas_jja_mean_anom, uas_mam_mean_anom, uas_son_mean_anom, uas_dec2nov_mean = seasonal_deviations_from_monthly_means(uas.uas)
vas_djf_mean_anom, vas_jja_mean_anom, vas_mam_mean_anom, vas_son_mean_anom, vas_dec2nov_mean = seasonal_deviations_from_monthly_means(vas.vas)
uas_djf_mean_anom_d = uas_djf_mean_anom.sel(year=futy).mean(dim='year')-uas_djf_mean_anom.sel(year=basey).mean(dim='year')
vas_djf_mean_anom_d = vas_djf_mean_anom.sel(year=futy).mean(dim='year')-vas_djf_mean_anom.sel(year=basey).mean(dim='year')
#replace wind over land with wind from nearest over-ocean grid cell
lf_path = os.path.join('/Volumes/Naamloos/PhD_Data/CMIP6/raw/sftlf',model) #get land fraction of atmospheric grid cells
lf_file = os.path.join(lf_path,fnmatch.filter(os.listdir(lf_path),'*sftlf*')[0])
lf = xr.open_dataset(lf_file)
if model=='UKESM1-0-LL': #U&V points on different grids, get them on a common grid
v = np.empty(np.shape(lf.sftlf))
v[:] = np.nan
v = (vas_djf_mean_anom_d[0:-1,:].values + vas_djf_mean_anom_d[1:,:].values)/2
uas_djf_mean_anom_d_rep = np.empty((np.shape(lf.sftlf)[0],np.shape(lf.sftlf)[1] + 1))
uas_djf_mean_anom_d_rep[:,0:-1] = uas_djf_mean_anom_d.values
uas_djf_mean_anom_d_rep[:,-1] = uas_djf_mean_anom_d.values[:,0]
u = np.empty(np.shape(lf.sftlf))
u[:] = np.nan
u = (uas_djf_mean_anom_d_rep[:,0:-1] + uas_djf_mean_anom_d_rep[:,1:])/2
uas_djf_mean_anom_d = xr.DataArray(data=u,dims=["lat", "lon"],coords=[lf.lat, lf.lon])
vas_djf_mean_anom_d = xr.DataArray(data=v,dims=["lat", "lon"],coords=[lf.lat, lf.lon])
elif model == 'ACCESS-ESM1-5': #U&V points on different grids, get them on a common grid
v = np.empty((np.shape(vas_djf_mean_anom_d)[0]-1,np.shape(vas_djf_mean_anom_d)[1]))
v[:] = np.nan
v = (vas_djf_mean_anom_d[0:-1,:].values + vas_djf_mean_anom_d[1:,:].values)/2
u = np.empty((np.shape(uas_djf_mean_anom_d)[0],np.shape(uas_djf_mean_anom_d)[1]-1))
u[:] = np.nan
u = (uas_djf_mean_anom_d[:,0:-1].values + uas_djf_mean_anom_d[:,1:].values)/2
uas_djf_mean_anom_d = xr.DataArray(data=u,dims=["lat", "lon"],coords=[lf.lat, lf.lon[1:]])
vas_djf_mean_anom_d = xr.DataArray(data=v,dims=["lat", "lon"],coords=[lf.lat[1:-1], lf.lon])
elif model == 'MPI-ESM1-2-LR': #same grids u&V and land fraction, but some precision differences
lf['lon'] = uas_djf_mean_anom_d.lon
lf['lat'] = uas_djf_mean_anom_d.lat #differences order 10e-14
#apply simple extrapolation correction for land contamination
uas_djf_mean_anom_d_masked = uas_djf_mean_anom_d.where(lf.sftlf==0,drop=False) #mask over-land grid cells
vas_djf_mean_anom_d_masked = vas_djf_mean_anom_d.where(lf.sftlf==0,drop=False)
#extrapolate over-ocean to over-land winds using nearest neighbor using CDO (slightly cumbersome, this could be improved)
if os.path.exists('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_masked.nc'):
os.remove('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_masked.nc')
if os.path.exists('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_masked.nc'):
os.remove('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_masked.nc')
if os.path.exists('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn.nc'):
os.remove('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn.nc')
if os.path.exists('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn.nc'):
os.remove('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn.nc')
if os.path.exists('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn_wrapped.nc'):
os.remove('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn_wrapped.nc')
if os.path.exists('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn_wrapped.nc'):
os.remove('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn_wrapped.nc')
uas_djf_mean_anom_d_masked.to_netcdf('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_masked.nc')
vas_djf_mean_anom_d_masked.to_netcdf('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_masked.nc')
cdo.setmisstonn(input='/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_masked.nc',
output='/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn.nc') #extrapolate ocean to land
cdo.sellonlatbox(-180,180,-90,90,input='/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn.nc',
output='/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn_wrapped.nc') #wrap around lon
cdo.setmisstonn(input='/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_masked.nc',
output='/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn.nc')
cdo.sellonlatbox(-180,180,-90,90,input='/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn.nc',
output='/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn_wrapped.nc')
if model == 'MPI-ESM1-2-LR':
uas_mod = xr.open_dataset('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn.nc') #cdo wrapping somehow doesnt work
vas_mod = xr.open_dataset('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn.nc')
uas_mod.coords['lon'] = ((uas_mod.coords['lon'] + 180) % 360) - 180 #wrap around 0
uas_mod = uas_mod.reindex({ 'lon' : np.sort(uas_mod['lon'])})
vas_mod.coords['lon'] = ((vas_mod.coords['lon'] + 180) % 360) - 180 #wrap around 0
vas_mod = vas_mod.reindex({ 'lon' : np.sort(vas_mod['lon'])})
else:
uas_mod = xr.open_dataset('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_uas_d_'+model+'_landfillednn_wrapped.nc')
vas_mod = xr.open_dataset('/Users/thermans/Documents/PhD/Phase4_seasonal/djf_anom_vas_d_'+model+'_landfillednn_wrapped.nc')
#wrap longitude
uas_djf_mean_anom_d.coords['lon'] = ((uas_djf_mean_anom_d.coords['lon'] + 180) % 360) - 180 #wrap around 0
uas_djf_mean_anom_d = uas_djf_mean_anom_d.reindex({ 'lon' : np.sort(uas_djf_mean_anom_d['lon'])})
vas_djf_mean_anom_d.coords['lon'] = ((vas_djf_mean_anom_d.coords['lon'] + 180) % 360) - 180 #wrap around 0
vas_djf_mean_anom_d = vas_djf_mean_anom_d.reindex({ 'lon' : np.sort(vas_djf_mean_anom_d['lon'])})
#extrapolation from over-ocean wind only if leading to higher wind speeds
if 'uas' in uas_mod:
uas_mod=uas_mod.where(abs(uas_mod.uas)>abs(uas_djf_mean_anom_d),uas_djf_mean_anom_d).uas
vas_mod=vas_mod.where(abs(vas_mod.vas)>abs(vas_djf_mean_anom_d),vas_djf_mean_anom_d).vas
else:
uas_mod=uas_mod.where(abs(uas_mod.__xarray_dataarray_variable__)>abs(uas_djf_mean_anom_d),uas_djf_mean_anom_d).__xarray_dataarray_variable__
vas_mod=vas_mod.where(abs(vas_mod.__xarray_dataarray_variable__)>abs(vas_djf_mean_anom_d),vas_djf_mean_anom_d).__xarray_dataarray_variable__
#apply the CMIP6 wind velocity change as a constant offset of ERA5 wind forcing
fn = 'ERA5_NorthAtlantic_an_1993_ForROMS.nc' #generate interpolation weights
ds = xr.open_dataset(os.path.join(in_dir,fn))
#regrid to ERA5 grid
regridder = xe.Regridder(uas_mod,ds,'bilinear')
uas_mod_regrid = regridder(uas_mod)
regridder = xe.Regridder(vas_mod,ds,'bilinear')
vas_mod_regrid = regridder(vas_mod)
for year in years_to_mod: #regrid, add and save
fn = 'ERA5_NorthAtlantic_an_'+str(year)+'_ForROMS.nc'
ds = xr.open_dataset(os.path.join(in_dir,fn))
with xr.set_options(keep_attrs=True):
ds['Uwind'] = ds['Uwind'] + uas_mod_regrid
ds['Vwind'] = ds['Vwind'] + vas_mod_regrid
#overwrite wind variables & save into new file
ds.attrs['comments'] = 'additional wind component added'
ds.attrs['mod_date'] = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y")
ds.Uwind.attrs['coordinates'] = 'lon lat'
ds.Vwind.attrs['coordinates'] = 'lon lat'
with xr.set_options(keep_attrs=True):
ds.to_netcdf(os.path.join(out_dir,out_prefix+fn),mode='w',encoding={'Uwind':{'zlib': True,'complevel': 4},'Vwind':{'zlib': True,'complevel': 4}}) #(over)write a new NetCDF
| StarcoderdataPython |
111467 | <reponame>MBHuman/NSD_TechSearch
from flask import Flask, render_template, request, send_file
import pickle
from lib.ut.robot import Robot
app = Flask(__name__)
results = []
@app.route('/', methods=['POST', 'GET'])
def index():
error = None
if request.method == 'POST':
search_field = request.form.get('search_field')
# Get data from search_engine
return( render_template('index.html'))
@app.route('/search', methods=['GET','POST'])
def search():
robot = Robot()
error, words = None, request.args.get('search_field')
results = robot.get_results_from_elastic(words)
# print(results)
if(len(results) == 0):
error = "По вашему запросу ничего не найдено"
elif isinstance(results, str):
error = results
results = []
robot.get_csv(results)
return( render_template('results.html', words=words, error=error, results=results))
@app.route('/download', methods=['POST', 'GET'])
def download():
return send_file('static/first.csv', as_attachment=True)
@app.route('/info')
def info():
return( render_template('info.html'))
if __name__ == '__main__':
app.run()
| StarcoderdataPython |
4842136 | # ЗАДАНИЕ ПЕРИОДА ОБНОВЛЕНИЯ ПОКАЗАНИЙ ДАТЧИКОВ:
# Подключаем библиотеку для работы с датчиком температуры и влажности I2C-flash (Sensor Humidity and Temperature).
from pyiArduinoI2Csht import *
from time import sleep
# Объявляем объект sht для работы с функциями и методами библиотеки pyiArduinoI2Csht.
# Если при объявлении объекта указать адрес, например, sht(0x09),
# то пример будет работать с тем модулем, адрес которого был указан.
sht = pyiArduinoI2Csht()
# Указываем модулю обновлять показания датчиков каждые две секунды.
sht.setPeriod(2.0)
while True:
# Выводим текущую температуру и влажность
print("t = %.1f" % sht.getTem(),"°C,",
"RH = %.1f" % sht.getHum(),"%%")
sleep(.1)
| StarcoderdataPython |
3260222 | <reponame>bitech-bit/projet<filename>projet.py
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def Afficher(self):
print("L'abscisse est :{}".format(self.x))
print("L'ordonnee est :{}".format(self.y))
class cercle(Point):
def __init__(self, x, y, R):
self.x = x
self.y = y
self.R = R
def getPerimetre(self):
p = 2*3.14*self.R
print("Le perimetre du cercle est:" +str(p))
def getSurface(self):
s = 3.14 * self.R * self.R
print("la surface du cercle est"+str(s))
def afficher(self):
print("CERCLE DE CENTRE O DE COORDONEES{}".format(self.x)+"{}".format(self.y))
print("ET DE RAYON R={}".format(self.R))
class cylindre(cercle):
def __init__(self,R,h):
self.R=R
self.h=h
def getVolume(self):
volume=3.14*self.R*self.R*self.h
print("le volume du cylindre est:"+str(volume))
# PROGRAMME PRINCIPAL
print("___________________")
print("la classe POINT")
print("___________________")
h1 = Point(2, 5)
h1.Afficher()
print("___________________")
print("La classe cercle")
print("___________________")
h2 =cercle(2,4,3)
h2.afficher()
h2.getPerimetre()
h2.getSurface()
print("___________________")
print("La classe cylindre")
print("___________________")
h3=cylindre(3,5)
h3.getVolume()
| StarcoderdataPython |
3399006 | <filename>tools/convertCANData.py
# Tool to read in image times and the CANData.csv file and out the interpolated value for each
# car parameter at the time of the image.
import pandas as pd
import numpy as np
import struct
import os
import argparse
def convertToBytes(x):
if len(x) == 1:
x = "0" + x
out = bytes.fromhex(x)
return out
def convertRow(row):
def convertToByte(x):
if len(x) == 1:
x = "0" + x
out = bytes.fromhex(x)
return out
d1 = convertToByte(row.d1)
d2 = convertToByte(row.d2)
d3 = convertToByte(row.d3)
d4 = convertToByte(row.d4)
d5 = convertToByte(row.d5)
d6 = convertToByte(row.d6)
d7 = convertToByte(row.d7)
d8 = convertToByte(row.d8)
if row["ID"] == '00000738': #EPS unit
if d1 == b'\x05' and d2 == b'\x62' and d3 == b'\x33' and d4 == b'\x02':
# steering wheel angle, relative to vehicle start
row.output = struct.unpack("h",d6+d5)[0]/10-780
row.commonName = "steeringWheelAngle"
elif d1 == b'\x04' and d2 == b'\x62' and d3 == b'\x33' and d4 == b'\x0b':
# steering torque
row.commonName = "steeringWheelTorque"
row.output = (struct.unpack("B",d5)[0]-127)/10
elif d1 == b'\x04' and d2 == b'\x62' and d3 == b'\x33' and d4 == b'\x01':
# steering rotation speed
row.commonName = "steeringRotationSpeed"
row.output = struct.unpack("B",d5)[0]*4
else:
print("Unknown packet from EPS: {}".format(row))
elif row["ID"] =='00000739': #
if d1 == b'\x05' and d2 == b'\x62' and d3 == b'\xd9' and d4 == b'\x80':
#turn signal indicator
row.commonName = "turnSignal"
if d5 == b'\x20': # none
row.output = 0
elif d5 == b'\x21': # left
row.output = -1
elif d5 == b'\x22': # right
row.output = 1
else:
print("Unknown value for turn signal: {}".format(row))
else:
print("Unknown packet from cluster: {}".format(row))
elif row["ID"] == '000007e8': # PCM
if d1 == b'\x04' and d2 == b'\x62' and d3 == b'\x03' and d4 == b'\x2b':
# accelerator position, 0-100%
row.output = struct.unpack("B",d5)[0]/2
row.commonName = "acceleratorPosition"
elif d1 == b'\x04' and d2 == b'\x62' and d3 == b'\xf4' and d4 == b'\x45':
# throttle position 0-1
row.output = struct.unpack("B",d5)[0]/255
row.commonName = "throttlePosition"
elif d1 == b'\x07' and d2 == b'\x62' and d3 == b'\x1e' and d4 == b'\x04':
# clutch applied
row.output = struct.unpack("B",d7)[0]/255
row.commonName = "clutchApplied-NOTIMPLEMENTED"
else:
print("Unknown packet from PCM? : {}".format(row))
elif row["ID"] == '00000768': # ABS module
if d1 == b'\x05' and d2 == b'\x62' and d3 == b'\x20' and d4 == b'\x34':
# brake pressure
row.output = struct.unpack("h",d6 + d5)[0]*33.3
row.commonName = "brakePressure"
elif d1 == b'\x04' and d2 == b'\x62' and d3 == b'\xf4' and d4 == b'\x0d':
# vehicle speed
row.output = struct.unpack("h",d6 + d5)[0]/255.
row.commonName = "vehicleSpeed"
elif d1 == b'\x04' and d2 == b'\x62' and d3 == b'\x2b' and d4 == b'\x06':
# left front wheel speed
row.output = struct.unpack("h",d6 + d5)[0]/255.
row.commonName = "leftFrontWheelSpeed"
elif d1 == b'\x04' and d2 == b'\x62' and d3 == b'\x2b' and d4 == b'\x07':
# right front wheel speed
row.output = struct.unpack("h",d6 + d5)[0]/255.
row.commonName = "rightFrontWheelSpeed"
elif d1 == b'\x04' and d2 == b'\x62' and d3 == b'\x2b' and d4 == b'\x08':
# left rear wheel speed
row.output = struct.unpack("h",d6 + d5)[0]/255.
row.commonName = "leftRearWheelSpeed"
elif d1 == b'\x04' and d2 == b'\x62' and d3 == b'\x2b' and d4 == b'\x09':
# right rear wheel speed
row.output = struct.unpack("h",d6 + d5)[0]/255.
row.commonName = "rightRearWheelSpeed"
elif d1 == b'\x05' and d2 == b'\x62' and d3 == b'\x2b' and d4 == b'\x11':
# Longitudinal acceleration
row.output = struct.unpack("h",d6 + d5)[0]/1000.
row.commonName = "longitudinalAcceleration"
elif d1 == b'\x05' and d2 == b'\x62' and d3 == b'\x2b' and d4 == b'\x0c':
# Lateral acceleration
row.output = struct.unpack("h",d6 + d5)[0]/255.
row.commonName = "lateralAcceleration"
else:
print("Unknown packet from ABS : {}".format(row))
else:
print("Unknown packet: {}".format(row))
return row
def GetImageTimes(path,extension="jpeg"):
'''
Given a path and extension(default=jpeg), get all the files in the path that match the extension
Return a list of times
This assumes that file names are decimal times in seconds
'''
assert os.path.exists(path), "Provided path does not exist!\n{}".format(path)
imgs = [x for x in os.listdir(path) if extension in x]
assert len(imgs) > 2, "There must be at least 2 images of type {} in the path {}".format(extension,path)
extensionLength = len(extension)+1
times = [float(t[:-extensionLength]) for t in imgs]
return np.sort(np.array(times))
def FilterDataByDelta(data,maxDelta=1.0):
'''
Takes in a data frame with the columns: TimeStamp and output
Returns a dataframe with columns: TimeStamp and output that has 1 less row the the source dataframe
Filters the data frame so that any output row that does not have an output in the
next maxDeta seconds is removed
'''
ts = np.array(data.TimeStamp[:-1])
ts2 = np.array(data.TimeStamp[1:])
data = data[:-1] # remove last data point
data = data.assign(delta = ts-ts2)
data = data[data.delta<maxDelta] # filter out deltas that are too big
data = data.reset_index() # need to reset the indices as there are gaps now
data = data.drop(labels="delta",axis=1) # get rid of delta column
return data
def FilterImgTimesByDataTimes(imgTimes,dataTimes,maxDelta=1.0):
'''
Given np arrays of image times and data times,
filter the image times so that there is always a data point within maxDelta of the image
1) get 1D array of times of images, imgTimes
2) get 1D array of times of samples, dataTimes
3) IMGTimes,DATATimes = np.meshgrid(imgTimes,dataTimes)
4) locs = np.where(np.abs(IMGTimes-DATATimes)<=maxDelta)
* The result in locs is (idx of dataTimes, idx of imgTimes)
5) imgLocs = np.unique(locs[1])
6) imgTimes = imgTimes[imgLocs]
'''
IMGTimes,DATATimes = np.meshgrid(imgTimes,dataTimes)
locs = np.where(np.abs(IMGTimes-DATATimes)<maxDelta)
imgLocs = np.unique(locs[1])
return imgTimes[imgLocs]
if __name__ == "__main__":
knownBadFormats = ["throttlePosition","turnSignal","vehicleSpeed","steeringWheelTorque","acceleratorPosition"]
parser = argparse.ArgumentParser(description="Convert a csv of captured CAN packets to individual csv files of just the data and time")
parser.add_argument("inputPath",help="Path with CANData.csv and folder imgs/ with all the images in it")
parser.add_argument("--maxDelta",help="The maximum difference in time between an image and data points",default=1.0)
parser.add_argument("--outputFile",help="The output csv file",default="interpolatedData.csv")
args = parser.parse_args()
inputPath = args.inputPath
assert os.path.isdir(inputPath), "The specified path does not exist!\n{}".format(inputPath)
maxDelta = args.maxDelta
inputCSV = os.path.join(inputPath,"CANData.csv")
imgPath = os.path.join(inputPath,"imgs")
assert os.path.isfile(inputCSV), "CANData.csv does not exist in the provided path!"
assert os.path.isdir(imgPath), "There is no imgs folder in the path!"
outputCSV = args.outputFile
if not outputCSV.endswith(".csv"):
outputCSV = outputCSV + ".csv"
outputCSV = os.path.join(inputPath,outputCSV)
imageTimes = GetImageTimes(imgPath)
print("Found {} images".format(len(imageTimes)))
# read in the raw CANData.csv file and convert the bytes to real values
dtype = {"TimeStamp":float, "ID":bytes, "d1":bytes, "d2":bytes, "d3":bytes,"d4":bytes, "d5":bytes, "d6":bytes, "d7":bytes, "d8":bytes,"dummy":str}
data = pd.read_csv(inputCSV,index_col=False,dtype=dtype)
data.columns = ["TimeStamp","ID","d1","d2","d3","d4","d5","d6","d7","d8"]
data["output"] = 0
data["commonName"] = ""
data = data.apply(lambda row: convertRow(row),axis=1)
# For each type of data, filter out times that do not have another data point within maxDelta seconds
dataNames = list(set(data.commonName.tolist()))
for dataName in dataNames:
if ("NOTIMPLEMENTED" in dataName) or (dataName in knownBadFormats):
print("Skipping {}".format(dataName))
continue
d = data[data.commonName == dataName]
d = d.sort_values("TimeStamp")
d = FilterDataByDelta(d,maxDelta=maxDelta)
dataTimes = np.array(d.TimeStamp)
imageTimes = FilterImgTimesByDataTimes(imageTimes,dataTimes,maxDelta=maxDelta)
print("After filtering with {}, now have {} images".format(dataName,len(imageTimes)))
print("Finished filtering image times based on data\n")
# now get the values at each imageTime
interpolatedData = pd.DataFrame(imageTimes,columns=["TimeStamp"])
interpolatedData = interpolatedData.sort_values("TimeStamp")
for dataName in dataNames:
if ("NOTIMPLEMENTED" in dataName) or (dataName in knownBadFormats):
continue
print("Interpolating {}...".format(dataName))
d = data[data.commonName == dataName]
d = d.sort_values("TimeStamp")
rawX = np.array(d.TimeStamp)
rawY = np.array(d.output)
interpolatedData[dataName] = np.interp(imageTimes,rawX,rawY)
print("\nSaving data!")
interpolatedData.to_csv(outputCSV,index=False)
print("Data was interpolated for {} images with at least 1 point within {:.3f} seconds".format(imageTimes.shape[0],maxDelta))
print("The file is saved at {}".format(outputCSV))
| StarcoderdataPython |
3285924 | <filename>sudoku/tests/test_tl.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019-10-20 13:10
# @Author : sean10
# @Site :
# @File : test_tl.py
# @Software: PyCharm
"""
test tui li sudoku from sudokufans.org
"""
import requests
import pytesseract
from PIL import Image
from bs4 import BeautifulSoup
import re
threshold = 140
table = []
for i in range(256):
if i < threshold:
table.append(0)
else:
table.append(1)
cookies = "__cfduid=df2b6bdb5b18f04b6b441b9c07832ac8b1570632906; PHPSESSID=37pga7s1fctjb3t77cg0ehreh3; c_userid=29143; c_username=sean10; ips4_IPSSessionFront=9mh6mnbs14vpah5jgc536egum0; light=1; yjs_id=fDE1NzE0NzMwNTU3NjU; ctrl_time=1"
headers = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
"accept-language": "en,zh-CN;q=0.9,zh;q=0.8,en-US;q=0.7,zh-TW;q=0.6,ja;q=0.5,zu;q=0.4",
"cache-control": "no-cache", "pragma": "no-cache", "upgrade-insecure-requests": "1",
"cookie": cookies}
def main():
src_img = Image.open("tl.img.png")
ocr_img(src_img)
def ocr_img(src_img:Image)->list:
num = []
for cnt in range(9):
temp = []
img_1 = preprocess_img(src_img, "left", cnt)
img_1.save("temp.png")
img_2 = preprocess_img(src_img, "right", cnt)
img_2.save("temp2.png")
temp.append(pytesseract.image_to_string(img_1, lang='snum',
config='--psm 7 sudoku'))
temp.append(pytesseract.image_to_string(img_2, lang='snum',
config='--psm 7 sudoku'))
num.append(temp)
return postprocess_num(num)
def postprocess_num(num:list)->list:
for row in num:
print(row)
return num
def guess_truth(num):
if num < 30 and num > 0:
pass
def preprocess_img(src_img:Image, location:str, postion:int)->Image:
if location == "left":
return src_img.crop((310, postion * 40+15, 325, 40 * (postion + 1))).convert('L').point(table, '1')
elif location == "right":
return src_img.crop((335, postion*40+15, 360, 40*(postion+1))).convert('L').point(table, '1')
else:
return None
def ocr_num()->tuple:
pass
def parse_img_src(src:str)->int:
model = re.compile(r'(?<=tl.img.php\?t=)\d+')
m = re.search(model, src).group(0)
return int(m)
def spider(params=None):
if not params:
url = "http://www.sudokufans.org.cn/lx/tl.index.php"
else:
url = "http://www.sudokufans.org.cn/lx/tl.img.php"
# cookies = {"__cfduid": "df2b6bdb5b18f04b6b441b9c07832ac8b1570632906", "PHPSESSID": "37pga7s1fctjb3t77cg0ehreh3",
# "c_userid": "29143", "c_username": "sean10", "ips4_IPSSessionFront": "9mh6mnbs14vpah5jgc536egum0",
# "light": "1", "yjs_id": "fDE1NzE0NzMwNTU3NjU", "ctrl_time": "1"}
html = requests.get(url, params=params, headers=headers)
if not params:
return parse_img_src(html.text)
print(html.url)
print(html.headers)
with open("temp.png", "wb") as f:
f.write(html.content)
content = BeautifulSoup(html.text, 'html.parser')
with open("output.html", "w") as f:
f.write(content.prettify())
def submit(ans):
data = {f"find[{i}]": ans[i] for i in range(len(ans))}
url = "http://www.sudokufans.org.cn/lx/fine2.php"
html = requests.post(url, data=data, headers=headers)
content = BeautifulSoup(html.text, 'html.parser')
with open("temp.html", "w") as f:
f.write(content.prettify())
if __name__ == "__main__":
main()
# submit(ans)
# num = spider()
# spider({"t": num})
| StarcoderdataPython |
3298635 | from functools import partial
import sqlalchemy as sa
RequiredColumn = partial(sa.Column, nullable=False)
metadata = sa.MetaData()
company = sa.Table(
'companies', metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
RequiredColumn('name', sa.types.String),
RequiredColumn('phone', sa.types.String),
sa.Column('description', sa.types.String),
# Allthroug it is possible to provide separate contact
# email for company, i thing it would be more legitimate
# to contant person who created buy order for company
# RequiredColumn('email', sa.types.String),
)
account = sa.Table(
'accounts', metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
RequiredColumn('company_id', sa.types.Integer,
sa.schema.ForeignKey('companies.id')),
RequiredColumn('email', sa.types.String, unique=True),
RequiredColumn('first_name', sa.types.String),
RequiredColumn('last_name', sa.types.String),
RequiredColumn('password', sa.types.String),
)
software = sa.Table(
'software', metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
RequiredColumn('version', sa.types.Integer),
RequiredColumn('distributor_id', sa.types.Integer,
sa.schema.ForeignKey('companies.id')),
RequiredColumn('name', sa.types.String),
RequiredColumn('description', sa.types.String),
sa.Column('icon', sa.types.Binary),
sa.schema.UniqueConstraint('id', 'version')
)
software_order = sa.Table(
'software_orders', metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
sa.Column('purchaser_id', sa.types.Integer,
sa.schema.ForeignKey('accounts.id')),
)
software_order_item = sa.Table(
'software_order_items', metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
RequiredColumn('order_id', sa.types.Integer,
sa.schema.ForeignKey('software_orders.id')),
RequiredColumn('software_id', sa.types.Integer),
RequiredColumn('software_version', sa.types.Integer),
RequiredColumn('amount', sa.types.Integer,
sa.schema.CheckConstraint('amount>0'), default=1),
RequiredColumn('price', sa.types.Numeric,
sa.schema.CheckConstraint('price>=0')),
sa.schema.ForeignKeyConstraint(('software_id', 'software_version'),
('software.id', 'software.version'))
)
| StarcoderdataPython |
4837679 | <filename>wechat_django/tests/test_site_admin.py<gh_stars>100-1000
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .base import WeChatTestCase
class AdminSiteTestCase(WeChatTestCase):
def test_admin_view(self):
"""测试admin view"""
# 测试request能正确拿到appid
pass
# 测试响应有extra_context
pass
| StarcoderdataPython |
1617510 | """
Lidar
"""
# requies glob to be installed: "pip3 install glob2"
# requires rplidar to be installed: "pip3 install rplidar"
import time
import math
import pickle
import serial
import numpy as np
from donkeycar.utils import norm_deg, dist, deg2rad, arr_to_img
from PIL import Image, ImageDraw
class RPLidar(object):
'''
https://github.com/SkoltechRobotics/rplidar
'''
def __init__(self, lower_limit = 0, upper_limit = 360, debug=False):
from rplidar import RPLidar
import glob
port_found = False
self.lower_limit = lower_limit
self.upper_limit = upper_limit
temp_list = glob.glob ('/dev/ttyUSB*')
result = []
for a_port in temp_list:
try:
s = serial.Serial(a_port)
s.close()
result.append(a_port)
port_found = True
except serial.SerialException:
pass
if port_found:
self.port = result[0]
self.distances = [] #a list of distance measurements
self.angles = [] # a list of angles corresponding to dist meas above
self.lidar = RPLidar(self.port, baudrate=115200)
self.lidar.clear_input()
time.sleep(1)
self.on = True
#print(self.lidar.get_info())
#print(self.lidar.get_health())
else:
print("No Lidar found")
def update(self):
scans = self.lidar.iter_scans(550)
while self.on:
try:
for scan in scans:
self.distances = [item[2] for item in scan]
self.angles = [item[1] for item in scan]
except serial.serialutil.SerialException:
print('serial.serialutil.SerialException from Lidar. common when shutting down.')
def run_threaded(self):
sorted_distances = []
if (self.angles != []) and (self.distances != []):
angs = np.copy(self.angles)
dists = np.copy(self.distances)
filter_angs = angs[(angs > self.lower_limit) & (angs < self.upper_limit)]
filter_dist = dists[(angs > self.lower_limit) & (angs < self.upper_limit)] #sorts distances based on angle values
angles_ind = np.argsort(filter_angs) # returns the indexes that sorts filter_angs
if angles_ind != []:
sorted_distances = np.argsort(filter_dist) # sorts distances based on angle indexes
return sorted_distances
def shutdown(self):
self.on = False
time.sleep(2)
self.lidar.stop()
self.lidar.stop_motor()
self.lidar.disconnect()
class YDLidar(object):
'''
https://pypi.org/project/PyLidar3/
'''
def __init__(self, port='/dev/ttyUSB0'):
import PyLidar3
self.port = port
self.distances = [] #a list of distance measurements
self.angles = [] # a list of angles corresponding to dist meas above
self.lidar = PyLidar3.YdLidarX4(port)
if(self.lidar.Connect()):
print(self.lidar.GetDeviceInfo())
self.gen = self.lidar.StartScanning()
else:
print("Error connecting to lidar")
self.on = True
def init(self, port='/dev/ttyUSB0'):
import PyLidar3
print("Starting lidar...")
self.port = port
self.distances = [] #a list of distance measurements
self.angles = [] # a list of angles corresponding to dist meas above
self.lidar = PyLidar3.YdLidarX4(port)
if(self.lidar.Connect()):
print(self.lidar.GetDeviceInfo())
gen = self.lidar.StartScanning()
return gen
else:
print("Error connecting to lidar")
self.on = True
#print(self.lidar.get_info())
#print(self.lidar.get_health())
def update(self, lidar, debug = False):
while self.on:
try:
self.data = next(lidar)
for angle in range(0,360):
if(self.data[angle]>1000):
self.angles = [angle]
self.distances = [self.data[angle]]
if debug:
return self.distances, self.angles
except serial.serialutil.SerialException:
print('serial.serialutil.SerialException from Lidar. common when shutting down.')
def run_threaded(self):
return self.distances, self.angles
def shutdown(self):
self.on = False
time.sleep(2)
self.lidar.StopScanning()
self.lidar.Disconnect()
class LidarPlot(object):
'''
takes the raw lidar measurements and plots it to an image
'''
PLOT_TYPE_LINE = 0
PLOT_TYPE_CIRC = 1
def __init__(self, resolution=(500,500),
max_dist=1000, #mm
radius_plot=3,
plot_type=PLOT_TYPE_CIRC):
self.frame = Image.new('RGB', resolution)
self.max_dist = max_dist
self.rad = radius_plot
self.resolution = resolution
if plot_type == self.PLOT_TYPE_CIRC:
self.plot_fn = self.plot_circ
else:
self.plot_fn = self.plot_line
def plot_line(self, img, dist, theta, max_dist, draw):
'''
scale dist so that max_dist is edge of img (mm)
and img is PIL Image, draw the line using the draw ImageDraw object
'''
center = (img.width / 2, img.height / 2)
max_pixel = min(center[0], center[1])
dist = dist / max_dist * max_pixel
if dist < 0 :
dist = 0
elif dist > max_pixel:
dist = max_pixel
theta = np.radians(theta)
sx = math.cos(theta) * dist + center[0]
sy = math.sin(theta) * dist + center[1]
ex = math.cos(theta) * (dist + self.rad) + center[0]
ey = math.sin(theta) * (dist + self.rad) + center[1]
fill = 128
draw.line((sx,sy, ex, ey), fill=(fill, fill, fill), width=1)
def plot_circ(self, img, dist, theta, max_dist, draw):
'''
scale dist so that max_dist is edge of img (mm)
and img is PIL Image, draw the circle using the draw ImageDraw object
'''
center = (img.width / 2, img.height / 2)
max_pixel = min(center[0], center[1])
dist = dist / max_dist * max_pixel
if dist < 0 :
dist = 0
elif dist > max_pixel:
dist = max_pixel
theta = np.radians(theta)
sx = int(math.cos(theta) * dist + center[0])
sy = int(math.sin(theta) * dist + center[1])
ex = int(math.cos(theta) * (dist + 2 * self.rad) + center[0])
ey = int(math.sin(theta) * (dist + 2 * self.rad) + center[1])
fill = 128
draw.ellipse((min(sx, ex), min(sy, ey), max(sx, ex), max(sy, ey)), fill=(fill, fill, fill))
def plot_scan(self, img, distances, angles, max_dist, draw):
for dist, angle in zip(distances, angles):
self.plot_fn(img, dist, angle, max_dist, draw)
def run(self, distances, angles):
'''
takes two lists of equal length, one of distance values, the other of angles corresponding to the dist meas
'''
self.frame = Image.new('RGB', self.resolution, (255, 255, 255))
draw = ImageDraw.Draw(self.frame)
self.plot_scan(self.frame, distances, angles, self.max_dist, draw)
return self.frame
def shutdown(self):
pass
class BreezySLAM(object):
'''
https://github.com/simondlevy/BreezySLAM
'''
def __init__(self, MAP_SIZE_PIXELS=500, MAP_SIZE_METERS=10):
from breezyslam.algorithms import RMHC_SLAM
from breezyslam.sensors import Laser
laser_model = Laser(scan_size=360, scan_rate_hz=10., detection_angle_degrees=360, distance_no_detection_mm=12000)
MAP_QUALITY=5
self.slam = RMHC_SLAM(laser_model, MAP_SIZE_PIXELS, MAP_SIZE_METERS, MAP_QUALITY)
def run(self, distances, angles, map_bytes):
self.slam.update(distances, scan_angles_degrees=angles)
x, y, theta = self.slam.getpos()
if map_bytes is not None:
self.slam.getmap(map_bytes)
#print('x', x, 'y', y, 'theta', norm_deg(theta))
return x, y, deg2rad(norm_deg(theta))
def shutdown(self):
pass
class BreezyMap(object):
'''
bitmap that may optionally be constructed by BreezySLAM
'''
def __init__(self, MAP_SIZE_PIXELS=500):
self.mapbytes = bytearray(MAP_SIZE_PIXELS * MAP_SIZE_PIXELS)
def run(self):
return self.mapbytes
def shutdown(self):
pass
class MapToImage(object):
def __init__(self, resolution=(500, 500)):
self.resolution = resolution
def run(self, map_bytes):
np_arr = np.array(map_bytes).reshape(self.resolution)
return arr_to_img(np_arr)
def shutdown(self):
pass
| StarcoderdataPython |
3329834 | # coding: utf-8
import glob
import sys
import os
import csv
filedir = sys.argv[2]
outfilename = sys.argv[1]
dup_check = dict()
with open(outfilename, 'wt', encoding='utf-8') as f:
filelist = glob.glob( os.path.join(filedir,"*.csv"))
csv_writer = csv.writer(f, delimiter=',')
for filename in filelist:
try:
with open(filename, 'rt', encoding='cp949') as fp:
fp.readline()
for row in fp.readlines():
fields = row.split(',', 1)
strings = fields[1].strip()
if strings[-1] == ',':
strings = strings[1:-1]
if strings not in dup_check:
keywords = strings.split('|')
if len(keywords) < 2:
print ("error keywords={}", keywords)
continue
try:
csv_writer.writerow(keywords)
dup_check[strings] = 1
except Exception as e:
print ("except:{}, error={},keywords={}".format(filename, e, keywords))
except Exception as e:
print ("except:{}, error={}".format(filename, e))
| StarcoderdataPython |
104241 | <gh_stars>0
from keybender import config
from keybender.knox import KnoX
from keybender.listener import Listener
from keybender.event import Event, EventLoop
from keybender.rctl import SocketMgr, SocketSender
import sys
import os
import argparse
import socket
import traceback
"""
argument parser
open named pipe for communication with external control
on startup a script can start urxvt and this inside of it, then send the request
to the pipe to find that PID's window, then ask for removing borders, removing it from
the taskbar, setting it to always under everything else, etc...
bindkeysequence -t urxvt
then run urxvt -e runme...
"""
# argumentparser,
# decide to start at start (1st level) or at a specific waiter or even an action
# or the same but at 2nd level (with opening tk root and exit on undefined key)
class Director:
def process_args(self):
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", metavar="FILENAME",
help="Configuration file",
dest="config", default=None, required=True)
parser.add_argument("-s", "--socket", metavar="SOCKET",
help="Socket path to listen on for commands.",
dest="socket_path", default=None)
parser.add_argument("-o", "--options", metavar="[SECTION:]OPTION=VALUE",
help=
"Option name and value to set in the opt section"
" in the configuration file.",
action="append",
dest="options", default=[])
self.options = parser.parse_args()
if not self.options:
parser.print_help()
sys.exit(1)
self.special_options = dict()
broken = False
for opt_str in self.options.options:
parts = opt_str.split('=')
if len(parts) != 2:
print("Bad option: %r" % opt_str, file=sys.stderr)
broken = True
continue
if parts[0] in self.special_options:
print("Repeated option name in: %r" % opt_str, file=sys.stderr)
broken = True
continue
self.special_options[parts[0]] = parts[1]
if broken:
sys.exit(2)
def __init__(self):
self.process_args()
self.knox = KnoX()
self.event_loop = EventLoop()
self.cfg = config.Config(self.knox,
self.options.config, self.event_loop,
extra_options=self.special_options)
self.cfg.start.execute()
if self.options.socket_path:
if os.path.exists(self.options.socket_path):
os.unlink(self.options.socket_path)
self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.socket.bind(self.options.socket_path)
self.socket.listen(0)
self.event_loop.register(
Event.READABLE, self.remote_control_connection, fd=self.socket)
else:
self.socket = None
self.event_loop.register(Event.IDLE, self.check_config, timeout=4)
self.ls = Listener(self.knox, self.event_loop, self.cfg.start.triggers)
def main(self):
while True:
self.ls.listen()
def remote_control_connection(self, event, event_loop):
(conn, _) = self.socket.accept()
print("Somebody connected on #%r" % conn.fileno())
conn = SocketMgr(conn)
self.event_loop.register(
Event.READABLE, self.remote_control_msg,
fd=conn, consultant=config.Consultant(self.cfg))
def remote_control_msg(self, event, event_loop):
data = event.fd.recv(1024)
if not data:
print("CLOSING #%r" % event.fd.fileno(), "==" * 30)
event.fd.close_rd()
#event.fd.close()
self.event_loop.unregister(event.key)
else:
r = event.consultant.incoming(data.decode().splitlines(),
responder=SocketSender(event.fd, event_loop))
def check_config(self, event, event_loop):
try:
if self.cfg.changed():
if self.ls.level > 1:
print("Config file changed, but cannot reload...")
else:
print("Config file changed, reloading...")
new_cfg = self.cfg.reload()
self.ls = Listener(self.knox, self.event_loop, new_cfg.start.triggers)
self.cfg = new_cfg
event_loop.quit()
except Exception as e:
traceback.print_exc(file=sys.stderr)
#print(e, file=sys.stderr)
Director().main()
| StarcoderdataPython |
3386453 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numpy as np
from ax.metrics.noisy_function import NoisyFunctionMetric
from ax.utils.common.typeutils import checked_cast
from ax.utils.measurement.synthetic_functions import aug_hartmann6, hartmann6
class Hartmann6Metric(NoisyFunctionMetric):
def f(self, x: np.ndarray) -> float:
return checked_cast(float, hartmann6(x))
class AugmentedHartmann6Metric(NoisyFunctionMetric):
def f(self, x: np.ndarray) -> float:
return checked_cast(float, aug_hartmann6(x))
| StarcoderdataPython |
2861 | import unittest
from http import HTTPStatus
from unittest import TestCase
import bcrypt
from flask.ctx import AppContext
from flask.testing import FlaskClient
from app import create_app
from models.theme import Theme, SubTheme
from models.users import Users
class TestSubTemes(TestCase):
"""
Unittest for the creation, renaming and deleting of Themes
"""
def setUp(self):
"""
Setup a FlaskClient for testing, creates an admin user and creates the authorization header for requests to
the Flask Client and a dummy theme
"""
self.client, self.app_context = self.create_test_client()
self.user = self.create_admin_user()
self.auth_header = self.get_auth_header()
self.theme = Theme.get_by_name("_test_add_Subtheme_")
if not self.theme:
self.theme = Theme("_test_add_Subtheme_")
self.theme.save()
self.theme.commit()
self.theme = Theme.get_by_name("_test_add_Subtheme_")
self.subtheme = self.create_dummy_subtheme()
def create_test_client(self) -> (FlaskClient, AppContext):
"""
Create flask testing client
:return: FlaskClient for tests and AppContext
"""
test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True)
testing_client = test_app.test_client()
test_app_context = test_app.app_context()
test_app_context.push()
return testing_client, test_app_context
def create_dummy_subtheme(self) -> SubTheme:
"""
Create SubTheme for tests
:return: SubTheme for tests
"""
subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_')
if not subtheme:
subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_')
subtheme.save()
subtheme.commit()
subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_')
return subtheme
def create_admin_user(self) -> Users:
"""
Create Admin user for tests
:return: an admin user for tests
"""
password_hash = bcrypt.hashpw("<PASSWORD>".encode("utf-8"), bcrypt.gensalt())
user = Users.find_by_email("<EMAIL>")
if not user:
user = Users("Admin", "<EMAIL>", password_hash.decode("utf8"), True, True)
try:
user.save()
user.commit()
except Exception as e:
pass
return user
def get_auth_header(self) -> {str: str}:
"""
Create an Authorization header for test
:return: An authorization header
"""
response_login = self.client.post('/login', data=dict(email=self.user.email, password="<PASSWORD>", remember=True),
follow_redirects=True)
response_login_json = response_login.get_json()
return {'Authorization': 'Bearer {}'.format(response_login_json["access_token"])}
def test_add_subtheme(self):
"""
Create a new SubTheme and check the client response status code for http status 200 (OK)
Check JSON response data for the expected message 'New theme created' and
Theme name
"""
response = self.client.post('/admin/themes/add_subtheme',
json={"theme_id": self.theme.id, "subtheme": "_TEST_SUB_THEME_2"},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
json_response = response.get_json()
self.assertEqual(json_response["message"], "sub theme created")
self.assertEqual(json_response["theme_id"], self.theme.id)
self.assertEqual(json_response["subtheme"], "_TEST_SUB_THEME_2")
def test_rename_subtheme_theme_id(self):
"""
Rename a SubTheme by theme_id and check the clients response status code for http status 200 (OK)
Check response data for the expected message 'Subtheme renamed' and the
Subtheme name has been changed
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
current_name = self.subtheme.name
response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": self.subtheme.t_id,
"current_name": current_name,
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
response = response.get_json()
self.assertEqual(response["id"], self.subtheme.id)
self.assertEqual(response["message"], "Subtheme renamed")
self.assertEqual(response["old_name"], current_name)
self.assertEqual(response["new_name"], "new_name_not_1")
def test_rename_subtheme_id(self):
"""
Rename a SubTheme by id and check the clients response status code for http status 200 (OK)
Check response data for the expected message 'Subtheme renamed' and the
Subtheme name has been changed
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
current_name = self.subtheme.name
response = self.client.post('/admin/themes/rename_subtheme', json={"id": self.subtheme.id,
"current_name": current_name,
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
response = response.get_json()
self.assertEqual(response["id"], self.subtheme.id)
self.assertEqual(response["message"], "Subtheme renamed")
self.assertEqual(response["old_name"], current_name)
self.assertEqual(response["new_name"], "new_name_not_1")
def test_rename_non_existant_subtheme(self):
"""
Rename a SubTheme that does not exist and check the clients response status code for http status 404 (OK)
"""
response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": -1,
"current_name": "a3d4f5g6h7j8k0",
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
def test_delete_non_exsitant_subtheme(self):
"""
Delete a SubTheme that does not exist and check the client response status code for http status 404
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme',
json={"name": "weA_gfj24fhurtyui", "theme_id": -1},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
def test_delete_subtheme_by_id(self):
"""
Delete a SubTheme by id and check the client response status code for http status 204 (NO_CONTENT)
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme', json={"id": self.subtheme.id},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT)
def test_delete_subtheme_by_theme_id_and_name(self):
"""
Delete a SubTheme by theme_id and name: check the client response status code for http status 204 (NO_CONTENT)
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme',
json={"theme_id": self.subtheme.t_id, "name": self.subtheme.name},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT)
def tearDown(self):
""" Handle the cleanup after tests"""
self.subtheme = SubTheme.get_by_name("new_name_not_1")
if not self.subtheme:
self.subtheme = SubTheme.get_by_name("_TEST_SUB_THEME_")
if self.subtheme:
self.subtheme.delete()
self.subtheme.commit()
test_sub = SubTheme.get_by_name("_TEST_SUB_THEME_2")
if test_sub:
test_sub.delete()
test_sub.commit()
if self.theme:
self.theme.delete()
self.theme.commit()
self.client.post('/logout', headers=self.auth_header)
if self.user:
self.user.delete()
self.user.commit()
self.app_context.pop()
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
3252087 | <reponame>mikacuy/point2cyl
import numpy as np
import torch.nn as nn
import torch
from torch.autograd import grad
import torch.nn.functional as F
from general import *
def gradient(inputs, outputs):
d_points = torch.ones_like(outputs, requires_grad=False, device=outputs.device)
points_grad = grad(
outputs=outputs,
inputs=inputs,
grad_outputs=d_points,
create_graph=True,
retain_graph=True,
only_inputs=True)[0][:, -2:]
return points_grad
class ImplicitNet(nn.Module):
def __init__(
self,
d_in,
dims,
skip_in=(),
geometric_init=True,
radius_init=1,
beta=100
):
super().__init__()
dims = [d_in] + dims + [1]
self.num_layers = len(dims)
self.skip_in = skip_in
for layer in range(0, self.num_layers - 1):
if layer + 1 in skip_in:
out_dim = dims[layer + 1] - d_in
else:
out_dim = dims[layer + 1]
lin = nn.Linear(dims[layer], out_dim)
# if true preform preform geometric initialization
if geometric_init:
if layer == self.num_layers - 2:
torch.nn.init.normal_(lin.weight, mean=np.sqrt(np.pi) / np.sqrt(dims[layer]), std=0.00001)
torch.nn.init.constant_(lin.bias, -radius_init)
else:
torch.nn.init.constant_(lin.bias, 0.0)
torch.nn.init.normal_(lin.weight, 0.0, np.sqrt(2) / np.sqrt(out_dim))
setattr(self, "lin" + str(layer), lin)
if beta > 0:
self.activation = nn.Softplus(beta=beta)
# vanilla relu
else:
self.activation = nn.ReLU()
def forward(self, input):
x = input
for layer in range(0, self.num_layers - 1):
lin = getattr(self, "lin" + str(layer))
if layer in self.skip_in:
x = torch.cat([x, input], -1) / np.sqrt(2)
x = lin(x)
if layer < self.num_layers - 2:
# if layer < self.num_layers - 3:
x = self.activation(x)
# else:
# x = torch.tanh(x)
### Works bad
# x = (torch.sigmoid(x) - 0.5) * 2.2
# x = torch.clamp(x, -2.0, 2.0)
return x
### PointNet Encoder
class STN3D(nn.Module):
def __init__(self, input_channels=3):
super(STN3D, self).__init__()
self.input_channels = input_channels
self.mlp1 = nn.Sequential(
nn.Conv1d(input_channels, 64, 1),
nn.BatchNorm1d(64),
nn.ReLU(),
nn.Conv1d(64, 128, 1),
nn.BatchNorm1d(128),
nn.ReLU(),
nn.Conv1d(128, 1024, 1),
nn.BatchNorm1d(1024),
nn.ReLU(),
)
self.mlp2 = nn.Sequential(
nn.Linear(1024, 512),
nn.BatchNorm1d(512),
nn.ReLU(),
nn.Linear(512, 256),
nn.BatchNorm1d(256),
nn.ReLU(),
nn.Linear(256, input_channels * input_channels),
)
def forward(self, x):
batch_size = x.size(0)
num_points = x.size(2)
x = self.mlp1(x)
x = F.max_pool1d(x, num_points).squeeze(2)
x = self.mlp2(x)
I = torch.eye(self.input_channels).view(-1).to(x.device)
x = x + I
x = x.view(-1, self.input_channels, self.input_channels)
return x
class PointNetEncoder(nn.Module):
def __init__(self, embedding_size, input_channels=2, with_normals=False):
super(PointNetEncoder, self).__init__()
if not with_normals:
self.input_channels = input_channels
else:
self.input_channels = input_channels * 2
# self.stn1 = STN3D(input_channels)
# self.stn2 = STN3D(64)
self.mlp1 = nn.Sequential(
nn.Conv1d(self.input_channels, 64, 1),
nn.BatchNorm1d(64),
nn.ReLU(),
nn.Conv1d(64, 64, 1),
nn.BatchNorm1d(64),
nn.ReLU(),
)
self.mlp2 = nn.Sequential(
nn.Conv1d(64, 64, 1),
nn.BatchNorm1d(64),
nn.ReLU(),
nn.Conv1d(64, 128, 1),
nn.BatchNorm1d(128),
nn.ReLU(),
nn.Conv1d(128, 1024, 1),
nn.BatchNorm1d(1024),
nn.ReLU(),
)
self.fc = nn.Linear(1024, embedding_size)
def forward(self, x):
batch_size = x.shape[0]
num_points = x.shape[1]
x = x[:, :, : self.input_channels]
x = x.transpose(2, 1) # transpose to apply 1D convolution
x = self.mlp1(x)
x = self.mlp2(x)
x = F.max_pool1d(x, num_points).squeeze(2) # max pooling
x = self.fc(x)
x = F.normalize(x)
return x
def get_learning_rate_schedules(schedule_specs):
schedules = []
for schedule_specs in schedule_specs:
if schedule_specs["Type"] == "Step":
schedules.append(
StepLearningRateSchedule(
schedule_specs["Initial"],
schedule_specs["Interval"],
schedule_specs["Factor"],
)
)
else:
raise Exception(
'no known learning rate schedule of type "{}"'.format(
schedule_specs["Type"]
)
)
return schedules
def add_latent(points, latent_codes):
batch_size, num_of_points, dim = points.shape
points = points.reshape(batch_size * num_of_points, dim)
latent_codes = latent_codes.unsqueeze(1).repeat(1,num_of_points,1).reshape(batch_size * num_of_points, -1)
out = torch.cat([latent_codes, points], 1)
return out
| StarcoderdataPython |
1625403 | '''
Various types of "assist", i.e. different methods for shared control
between neural control and machine control. Only applies in cases where
some knowledge of the task goals is available.
'''
import numpy as np
from riglib.stereo_opengl import ik
from riglib.bmi import feedback_controllers
import pickle
from utils.angle_utils import *
from utils.constants import *
class Assister(object):
'''
Parent class for various methods of assistive BMI. Children of this class
can compute an "optimal" input to the system, which is mixed in with the input
derived from the subject's neural input. The parent exists primarily for
interface standardization and type-checking.
'''
def calc_assisted_BMI_state(self, current_state, target_state, assist_level, mode=None, **kwargs):
'''
Main assist calculation function
Parameters
----------
current_state: np.ndarray of shape (n_states, 1)
Vector representing the current state of the prosthesis
target_state: np.ndarray of shape (n_states, 1)
Vector representing the target state of the prosthesis, i.e. the optimal state for the prosthesis to be in
assist_level: float
Number indicating the level of the assist. This can in general have arbitrary units but most assisters
will have this be a number in the range (0, 1) where 0 is no assist and 1 is full assist
mode: hashable type, optional, default=None
Indicator of which mode of the assistive controller to use. When applied, this 'mode' is used as a dictionary key and must be hashable
kwargs: additional keyword arguments
These are ignored
Returns
-------
'''
pass
def __call__(self, *args, **kwargs):
'''
Wrapper for self.calc_assisted_BMI_state
'''
return self.calc_assisted_BMI_state(*args, **kwargs)
class FeedbackControllerAssist(Assister):
'''
Assister where the machine control is an LQR controller, possibly with different 'modes' depending on the state of the task
'''
def __init__(self, fb_ctrl, style='additive'):
'''
Parameters
----------
fb_ctrl : feedback_controllers.FeedbackController instance
TODO
Returns
-------
FeedbackControllerAssist instance
'''
self.fb_ctrl = fb_ctrl
self.style = style
assert self.style in ['additive', 'mixing', 'additive_cov']
def calc_assisted_BMI_state(self, current_state, target_state, assist_level, mode=None, **kwargs):
'''
See docs for Assister.calc_assisted_BMI_state
'''
if self.style == 'additive':
Bu = assist_level * self.fb_ctrl(current_state, target_state, mode=mode)
return dict(Bu=Bu, assist_level=0)
elif self.style == 'mixing':
x_assist = self.fb_ctrl.calc_next_state(current_state, target_state, mode=mode)
return dict(x_assist=x_assist, assist_level=assist_level)
elif self.style == 'additive_cov':
F = self.get_F(assist_level)
return dict(F=F, x_target=target_state)
class FeedbackControllerAssist_StateSpecAssistLevels(FeedbackControllerAssist):
'''
Assister where machine controller is LQR controller, but different assist_levels for
different control variables (e.g. X,Y,PSI in ArmAssist vs. Rehand)
'''
def __init__(self, fb_ctrl, style='additive', **kwargs):
super(FeedbackControllerAssist_StateSpecAssistLevels, self).__init__(fb_ctrl, style)
# Currently this assister assumes that plant is IsMore Plant:
self.assist_level_state_ix = dict()
self.assist_level_state_ix[0] = np.array([0, 1, 2, 7, 8, 9]) # ARM ASSIST
self.assist_level_state_ix[1] = np.array([3, 4, 5, 6, 10, 11, 12, 13]) # REHAND
def calc_assisted_BMI_state(self, current_state, target_state, assist_level, mode=None, **kwargs):
if self.style == 'additive':
Bu = self.fb_ctrl(current_state, target_state, mode=mode)
for ia, al in enumerate(assist_level):
Bu[self.assist_level_state_ix[ia]] = al*Bu[self.assist_level_state_ix[ia]]
return dict(Bu=Bu, assist_level=0)
elif self.style == 'mixing':
x_assist = self.fb_ctrl.calc_next_state(current_state, target_state, mode=mode)
return dict(x_assist=x_assist, assist_level=assist_level, assist_level_ix=self.assist_level_state_ix)
class SSMLFCAssister(FeedbackControllerAssist):
'''
An LFC assister where the state-space matrices (A, B) are specified from the Decoder's 'ssm' attribute
'''
def __init__(self, ssm, Q, R, **kwargs):
'''
Constructor for SSMLFCAssister
Parameters
----------
ssm: riglib.bmi.state_space_models.StateSpace instance
The state-space model's A and B matrices represent the system to be controlled
args: positional arguments
These are ignored (none are necessary)
kwargs: keyword arguments
The constructor must be supplied with the 'kin_chain' kwarg, which must have the attribute 'link_lengths'
This is specific to 'KinematicChain' plants.
Returns
-------
SSMLFCAssister instance
'''
if ssm is None:
raise ValueError("SSMLFCAssister requires a state space model!")
A, B, W = ssm.get_ssm_matrices()
self.lqr_controller = feedback_controllers.LQRController(A, B, Q, R)
| StarcoderdataPython |
137138 | #!/usr/bin/env python3
file = open('file_example.txt', 'r')
contents = file.read()
file.close()
print(contents)
with open('file_example.txt', 'r') as file:
contents = file.read()
print(contents)
import os
print(os.getcwd())
os.chdir('/Users/denov/Downloads/python-book/')
print(os.getcwd())
os.chdir('/Users/denov/Downloads/python-book/file_examplesch10')
print(os.getcwd())
with open('file_example.txt', 'r') as example_file:
first_ten_chars = example_file.read(18)
the_rest = example_file.read()
print("The first 10 characters:", first_ten_chars)
print()
print("The rest of the file:", the_rest)
with open('file_example.txt', 'r') as example_file:
lines = example_file.readlines()
print(lines)
with open('planets.txt', 'r') as planets_file:
planets = planets_file.readlines()
print(planets)
for planet in reversed(planets):
print(planet.strip())
print()
for planet in sorted(planets):
print(planet.strip())
print()
with open('planets.txt', 'r') as data_file:
for line in data_file:
print(len(line))
with open('hopedale.txt', 'r') as hopedale_file:
# Read and skip the description line.
hopedale_file.readline()
# Keep reading and skipping comment lines until we read the first piece
# of data.
data = hopedale_file.readline().strip()
while data.startswith('#'):
data = hopedale_file.readline().strip()
# Now we have the first piece of data. Accumulate the total number of
# pelts.
total_pelts = int(data)
# Read the rest of the data.
for data in hopedale_file:
total_pelts = total_pelts + int(data.strip())
print("Total number of pelts:", total_pelts)
with open('hopedale.txt', 'r') as hopedale_file:
# Read and skip the description line.
hopedale_file.readline()
# Keep reading and skipping comment lines until we read the first piece
# of data.
data = hopedale_file.readline().rstrip()
while data.startswith('#'):
data = hopedale_file.readline().rstrip()
# Now we have the first piece of data. Accumulate the total number of
# pelts.
print(data)
# Read the rest of the data.
for data in hopedale_file:
print(data.rstrip())
| StarcoderdataPython |
1724312 | import inspect
import nest_asyncio
from async_eval import async_eval, asyncio_patch
from . import code
def generate_main_script() -> str:
return "\n".join(
inspect.getsource(m)
for m in (
nest_asyncio,
asyncio_patch,
async_eval,
code,
)
)
__all__ = ["generate_main_script"]
| StarcoderdataPython |
1608769 | #!/usr/bin/python
#csv upload to gsheet
import logging
import json
import gspread
import time
import re
from oauth2client.client import SignedJwtAssertionCredentials
from Naked.toolshed.shell import muterun_rb
logging.basicConfig(filename='/var/log/gspread.log',format='%(asctime)s %(levelname)s:%(message)s',level=logging.INFO)
filename = '<google sheet name>'
#OAuth login
json_key = json.load(open('oauth.json'))
"""
JSON in the form:
{
"private_key_id": "",
"private_key": "",
"client_email": "",
"client_id": "",
"type": "service_account"
}
"""
scope = ['https://spreadsheets.google.com/feeds']
credentials = SignedJwtAssertionCredentials(json_key['client_email'], json_key['private_key'], scope)
gc = gspread.authorize(credentials)
if gc:
logging.info('OAuth succeeded')
else:
logging.warn('Oauth failed')
now = time.strftime("%c")
# get data from ruby script
response = muterun_rb('script')
if response:
logging.info('Data collected')
else:
logging.warn('Could not collect data')
csv = response.stdout
csv = re.sub('/|"|,[0-9][0-9][0-9]Z|Z', '', csv)
csv_lines = csv.split('\n')
#get columns and rows for cell list
column = len(csv_lines[0].split(","))
row = 1
for line in csv_lines:
row += 1
#create cell range
columnletter = chr((column - 1) + ord('A'))
cell_range = 'A1:%s%s' % (columnletter, row)
#open the worksheet and create a new sheet
wks = gc.open(filename)
if wks:
logging.info('%s file opened for writing', filename)
else:
logging.warn('%s file could not be opened', filename)
sheet = wks.add_worksheet(title=now, rows=(row + 2), cols=(column + 2))
cell_list = sheet.range(cell_range)
#create values list
csv = re.split("\n|,", csv)
for item, cell in zip(csv, cell_list):
cell.value = item
# Update in batch
if sheet.update_cells(cell_list):
logging.info('upload to %s sheet in %s file done', now, filename)
else:
logging.warn('upload to %s sheet in %s file failed', now, filename)
| StarcoderdataPython |
1644216 | # Copyright 2021 The Kubeflow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test forecasting components to ensure they compile without error."""
import unittest
from google_cloud_pipeline_components.aiplatform import TimeSeriesDatasetCreateOp
from google_cloud_pipeline_components.experimental.forecasting import ForecastingTrainingWithExperimentsOp
import kfp
from kfp.v2 import compiler
class ForecastingComponetsCompileTest(unittest.TestCase):
def setUp(self):
super(ForecastingComponetsCompileTest, self).setUp()
self._project = 'test_project'
self._display_name = 'test_display_name'
self._package_path = 'pipeline.json'
self._location = 'us-central1'
self._bq_source = 'bq://test_project.test_dataset.training_input'
def test_tabular_data_pipeline_component_ops_compile(self):
@kfp.dsl.pipeline(name='forecasting-training')
def pipeline():
dataset_create_op = TimeSeriesDatasetCreateOp(
display_name=self._display_name,
bq_source=self._bq_source,
project=self._project,
location=self._location,
)
train_op = ForecastingTrainingWithExperimentsOp(
display_name=self._display_name,
time_series_identifier_column='datetime',
time_series_attribute_columns='["location_id", "product_id"]',
available_at_forecast_columns='["datetime", "year", "ml_use"]',
unavailable_at_forecast_columns='["gross_quantity"]',
column_transformations=(
'[{"numeric": {"column_name": "gross_quantity"}}]'
),
dataset=dataset_create_op.outputs['dataset'],
target_column='gross_quantity',
time_column='datetime',
forecast_horizon=7,
data_granularity_unit='day',
data_granularity_count=1,
budget_milli_node_hours=1000,
project=self._project,
location=self._location,
optimization_objective='minimize-rmse',
additional_experiments='["enable_model_compression"]',
)
compiler.Compiler().compile(
pipeline_func=pipeline, package_path=self._package_path
)
| StarcoderdataPython |
106994 | <gh_stars>0
"""Collection of Functions to convert API responses into python objects
and vice versa.
"""
from contextlib import contextmanager
from functools import wraps
from inspect import signature
import json
import re
import pandas as pd
def _dataframe_to_json(payload_df):
payload_df.index.name = 'timestamp'
json_vals = payload_df.tz_convert("UTC").reset_index().to_json(
orient="records", date_format='iso', date_unit='s')
return '{"values":' + json_vals + '}'
def observation_df_to_json_payload(
observation_df, default_quality_flag=None):
"""Extracts a variable from an observation DataFrame and formats it
into a JSON payload for posting to the Solar Forecast Arbiter API.
Parameters
----------
observation_df : DataFrame
Dataframe of observation data. Must contain a tz-aware DateTimeIndex
and a 'value' column. May contain a column of data quality
flags labeled 'quality_flag'.
default_quality_flag : int
If 'quality_flag' is not a column, the quality flag for each row is
set to this value.
Returns
-------
string
SolarForecastArbiter API JSON payload for posting to the observation
endpoint. See Notes section for example.
Notes
-----
Function returns an object in the following format:
.. code::
{
'values': [
{
“timestamp”: “2018-11-22T12:01:48Z”, # ISO 8601 datetime in UTC
“value”: 10.23, # floating point value of observation
“quality_flag”: 0
},...
]
}
Raises
------
KeyError
When 'value' is missing from the columns or 'quality_flag'
is missing and default_quality_flag is None
"""
if default_quality_flag is None:
payload_df = observation_df[['value', 'quality_flag']]
else:
payload_df = observation_df[['value']]
payload_df['quality_flag'] = int(default_quality_flag)
return _dataframe_to_json(payload_df)
def forecast_object_to_json(forecast_series):
"""
Converts a forecast Series to JSON to post to the
SolarForecastArbiter API.
Parameters
----------
forecast_series : pandas.Series
The series that contains the forecast values with a
datetime index.
Returns
-------
string
The JSON encoded forecast values dict
"""
payload_df = forecast_series.to_frame('value')
return _dataframe_to_json(payload_df)
def _json_to_dataframe(json_payload):
# in the future, might worry about reading the response in chunks
# to stream the data and avoid having it all in memory at once,
# but 30 days of 1 minute data is probably ~4 MB of text. A better
# approach would probably be to switch to a binary format.
vals = json_payload['values']
if len(vals) == 0:
df = pd.DataFrame([], columns=['value', 'quality_flag'],
index=pd.DatetimeIndex([], name='timestamp',
tz='UTC'))
else:
df = pd.DataFrame.from_dict(json_payload['values'])
df.index = pd.to_datetime(df['timestamp'], utc=True,
infer_datetime_format=True)
return df
def json_payload_to_observation_df(json_payload):
"""
Convert the JSON payload dict as returned by the SolarForecastArbiter API
observations/values endpoint into a DataFrame
Parameters
----------
json_payload : dict
Dictionary as returned by the API with a "values" key which is a list
of dicts like {'timestamp': <timestamp>, 'value': <float>,
'quality_flag': <int>}
Returns
-------
pandas.DataFrame
With a tz-aware DatetimeIndex and ['value', 'quality_flag'] columns
and dtypes {'value': float, 'quality_flag': int}
"""
df = _json_to_dataframe(json_payload)
return df[['value', 'quality_flag']].astype(
{'value': float, 'quality_flag': int})
def json_payload_to_forecast_series(json_payload):
"""
Convert the JSON payload dict as returned by the SolarForecastArbiter API
forecasts/values endpoing into a Series
Parameters
----------
json_payload : dict
Dictionary as returned by the API with a "values" key which is a list
of dicts like {'timestamp': <timestamp>, 'value': <float>}
Returns
-------
pandas.Series
With a tz-aware DatetimeIndex and float dtype
"""
df = _json_to_dataframe(json_payload)
return df['value'].astype(float)
def adjust_start_end_for_interval_label(interval_label, start, end,
limit_instant=False):
"""
Adjusts the start and end times depending on the interval_label.
Parameters
----------
interval_label : str or None
The interval label for the the object the data represents
start : pandas.Timestamp
Start time to restrict data to
end : pandas.Timestamp
End time to restrict data to
limit_instant : boolean
If true, an interval label of 'instant' will remove a nanosecond
from end to ensure forecasts do not overlap. If False, instant
returns start, end unmodified
Returns
-------
start, end
Return the adjusted start and end
Raises
------
ValueError
If an invalid interval_label is given
Examples
--------
.. testsetup::
from solarforecastarbiter.io.utils import *
Define input start/end:
>>> start = pd.Timestamp('20190101 1200Z')
>>> end = pd.Timestamp('20190101 1300Z')
Beginning:
>>> adjust_start_end_for_interval_label('beginning', start, end)
(Timestamp('2019-01-01 12:00:00+0000', tz='UTC'), Timestamp('2019-01-01 12:59:59.999999999+0000', tz='UTC'))
Ending:
>>> adjust_start_end_for_interval_label('ending', start, end)
(Timestamp('2019-01-01 12:00:00.000000001+0000', tz='UTC'), Timestamp('2019-01-01 13:00:00+0000', tz='UTC'))
Instantaneous:
>>> adjust_start_end_for_interval_label('instant', start, end)
(Timestamp('2019-01-01 12:00:00+0000', tz='UTC'), Timestamp('2019-01-01 13:00:00+0000', tz='UTC'))
>>> adjust_start_end_for_interval_label('instant', start, end,
... limit_instant=True)
(Timestamp('2019-01-01 12:00:00+0000', tz='UTC'), Timestamp('2019-01-01 12:59:59.999999999+0000', tz='UTC'))
""" # NOQA
if (
interval_label is not None and
interval_label not in ('instant', 'beginning', 'ending')
):
raise ValueError('Invalid interval_label')
if (
interval_label == 'beginning' or
(interval_label == 'instant' and limit_instant)
):
end -= pd.Timedelta(1, unit='nano')
elif interval_label == 'ending':
start += pd.Timedelta(1, unit='nano')
return start, end
def adjust_timeseries_for_interval_label(data, interval_label, start, end):
"""
Adjusts the index of the data depending on the interval_label, start,
and end. Will always return the data located between start, end.
Parameters
----------
data : pandas.Series or pandas.DataFrame
The data with a localized DatetimeIndex
interval_label : str or None
The interval label for the the object the data represents
start : pandas.Timestamp
Start time to restrict data to
end : pandas.Timestamp
End time to restrict data to
Returns
-------
pandas.Series or pandas.DataFrame
Return data between start and end, in/excluding the endpoints
depending on interval_label
Raises
------
ValueError
If an invalid interval_label is given or data is not localized.
"""
start, end = adjust_start_end_for_interval_label(interval_label, start,
end)
data = data.sort_index(axis=0)
# pandas >= 0.25.1 requires start, end to have same tzinfo.
# unexpected behavior when data is not localized, so prevent that
if data.empty:
return data
if data.index.tzinfo is None:
raise ValueError('data must be localized')
start = start.tz_convert(data.index.tzinfo)
end = end.tz_convert(data.index.tzinfo)
return data.loc[start:end]
def serialize_timeseries(ser):
"""
Serialize a timeseries to JSON. Note that the microseconds
portion of the index will be discarded.
Parameters
----------
ser : {pandas.Series, pandas.DataFrame}
Must have a tz-localized datetime index
Returns
-------
str
The JSON serialized data along with a schema
Raises
------
TypeError
If the input is invalid
"""
if not (
isinstance(ser, (pd.Series, pd.DataFrame)) and
isinstance(ser.index, pd.DatetimeIndex) and
ser.index.tzinfo is not None
):
raise TypeError(
'Only pandas Series or DataFrame with a localized DatetimeIndex '
'is supported')
v = ser.copy()
v.index.name = 'timestamp'
if isinstance(v, pd.Series):
jsonvals = v.tz_convert('UTC').reset_index(name='value').to_json(
orient='records', date_format='iso', date_unit='s')
column = 'value'
dtype = str(v.dtype)
objtype = 'Series'
else:
v.index.name = 'timestamp'
jsonvals = v.tz_convert('UTC').reset_index().to_json(
orient='records', date_format='iso', date_unit='s')
column = v.columns.astype(str).to_list()
dtype = v.dtypes.astype(str).to_list()
objtype = 'DataFrame'
schema = {
'version': 1,
'orient': 'records',
'timezone': 'UTC',
'column': column,
'index': 'timestamp',
'dtype': dtype,
'objtype': objtype
}
out = '{"schema":' + json.dumps(schema) + ',"data":' + jsonvals + '}'
return out
def deserialize_timeseries(data):
"""
Deserializes a timeseries from JSON
Parameters
----------
data : str
JSON string to deserialize. Must have schema and data keys.
Returns
-------
pandas.Series or pandas.DataFrame
Deserialized timeseries
Raises
------
ValueError
If "schema" or "data" keys are not found in the JSON string
KeyError
If the schema object does not contain the proper keys
"""
schema_str = re.search('(?<="schema":)\\s*{[^{}]*}\\s*(?=(,|}))', data)
if schema_str is None:
raise ValueError('Could not locate schema in data string')
schema = json.loads(schema_str.group(0))
if schema['version'] == 0:
# compatibility with data serialized and stored before the
# objtype key was added to schema and DataFrames were suppored in v1
objtype_str = 'Series'
else:
objtype_str = schema['objtype']
# find between "data": and , or }, with only one set of []
data_str = re.search('(?<="data":)\\s*\\[[^\\[\\]]*\\](?=\\s*(,|}))', data)
if data_str is None:
raise ValueError('Could not locate data key in data string')
df = pd.read_json(data_str.group(0), orient=schema['orient'],
convert_dates=True)
if objtype_str == 'Series':
if df.empty:
return pd.Series([], name=schema['column'], index=pd.DatetimeIndex(
[], tz=schema['timezone'], name='timestamp'),
dtype=schema.get('dtype', float))
out = df.set_index(schema['index'])[schema['column']].astype(
schema['dtype'])
elif objtype_str == 'DataFrame':
if df.empty:
try:
dtype = schema['dtype'][0]
except IndexError:
dtype = float
return pd.DataFrame(
[], columns=schema['column'], index=pd.DatetimeIndex(
[], tz=schema['timezone'], name='timestamp'), dtype=dtype)
out = df.set_index(schema['index'])
# pd.read_json will set all column names to strings, so
# columns originally specified with float names need to be
# mapped back into the right name dtype. this code is not needed
# if columns are always strings.
# str_col_map = {str(col): col for col in schema['column']}
# out = out.rename(columns=str_col_map)
out = out.astype(dict(zip(schema['column'], schema['dtype'])))
if out.index.tzinfo is None:
out = out.tz_localize(schema['timezone'])
return out
class HiddenToken:
"""
Obscure the representation of the input string `token` to avoid saving
or displaying access tokens in logs.
"""
def __init__(self, token):
self.token = str(token) # make sure it isn't a localproxy
def __repr__(self):
return '****ACCESS*TOKEN****'
def ensure_timestamps(*time_args):
"""
Decorator that converts the specified time arguments of the wrapped
function to pandas.Timestamp objects
Parameters
----------
strings
Function arguments to convert to pandas.Timestamp before
executing function
Raises
------
ValueError
If any of time_args cannot be converted to pandas.Timestamp
Examples
--------
.. testsetup::
import datetime as dt
from solarforecastarbiter.io.utils import *
>>> @ensure_timestamps('start', 'end')
... def get_values(start, end, other_arg):
... # do stuff with start, end assumed to be pandas.Timestamps
... if isinstance(start, pd.Timestamp):
... return True
>>> get_values('2019-01-01T00:00Z', dt.datetime(2019, 1, 2, 12), 'other')
True
"""
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
sig = signature(f)
inds = {k: None for k in time_args}
for i, k in enumerate(sig.parameters.keys()):
if k in inds:
inds[k] = i
nargs = list(args)
for k, ind in inds.items():
if k in kwargs:
kwargs[k] = pd.Timestamp(kwargs[k])
elif ind is not None:
nargs[ind] = pd.Timestamp(args[ind])
return f(*nargs, **kwargs)
return wrapper
return decorator
def load_report_values(raw_report, values):
"""
Load the processed forecast/observation data into the
datamodel.ProcessedForecastObservation objects of the raw_report.
Parameters
----------
raw_report : datamodel.RawReport
The raw report with processed_forecasts_observations to
be replaced
values : list
The report values dict as returned by the API.
Returns
-------
tuple
Of datamodel.ProcessedForecastObservation with values loaded into
`forecast_values` and `observation_values`
"""
val_dict = {v['id']: v['processed_values'] for v in values}
out = []
for fxobs in raw_report.processed_forecasts_observations:
fx_vals = val_dict.get(fxobs.forecast_values, None)
if fx_vals is not None:
fx_vals = deserialize_timeseries(fx_vals)
obs_vals = val_dict.get(fxobs.observation_values, None)
if obs_vals is not None:
obs_vals = deserialize_timeseries(obs_vals)
ref_fx_vals = val_dict.get(fxobs.reference_forecast_values)
if ref_fx_vals is not None:
ref_fx_vals = deserialize_timeseries(ref_fx_vals)
new_fxobs = fxobs.replace(forecast_values=fx_vals,
observation_values=obs_vals,
reference_forecast_values=ref_fx_vals)
out.append(new_fxobs)
return tuple(out)
@contextmanager
def mock_raw_report_endpoints(base_url):
"""
Mock API report endpoints under base_url to enable testing
of the report generation task run via the dashboard. Requires
requests_mock>=1.8.0. Catches all report endpoints, but if
report generation requires POSTing to other endpoints in the
future, they will need to be added here.
"""
import requests_mock
value_dict = {} # for raw processed values
def post_value_callback(request, context):
context.status_code = 200
rjson = request.json()
id_ = rjson['object_id']
value_dict[id_] = rjson['processed_values']
return id_
raw_dict = {} # for the raw reports
def post_raw_callback(request, context):
context.status_code = 200
raw_dict.update(request.json())
report_dict = {} # for new and full reports
def post_report_callback(request, context):
context.status_code = 200
report_dict.update(request.json())
return 'no_id'
def get_report_callback(request, context):
context.status_code = 200
out = report_dict.copy()
if raw_dict:
out['raw_report'] = raw_dict
out['values'] = [
{'id': k, 'processed_values': v}
for k, v in value_dict.items()]
out['status'] = 'complete'
return out
with requests_mock.Mocker(real_http=True) as m:
value_re = re.compile(f'{base_url}/reports/.*/values')
raw_re = re.compile(f'{base_url}/reports/.*/raw')
m.register_uri('GET', re.compile(
f'{base_url}/reports/.*'),
json=get_report_callback)
m.register_uri('POST', re.compile(
f'{base_url}/reports/*'),
text=post_report_callback)
m.register_uri('POST', re.compile(
f'{base_url}/reports/.*/status/.*'))
m.register_uri('POST', value_re, text=post_value_callback)
m.register_uri('POST', raw_re, json=post_raw_callback)
yield
| StarcoderdataPython |
38127 | <filename>idact/detail/deployment_sync/nodes/get_nodes_deployment_definition.py
from idact.detail.deployment_sync.deployment_definition import \
DeploymentDefinition
from idact.detail.deployment_sync.nodes.get_expiration_date_from_nodes \
import get_expiration_date_from_nodes
from idact.detail.nodes.nodes_impl import NodesImpl
# pylint: disable=bad-continuation
def get_nodes_deployment_definition(
deployment: NodesImpl) -> DeploymentDefinition: # noqa
"""Obtains a definition from an allocation deployment.
Expiration date is the minimum node allocation end date,
or one day from now if the nodes were not yet allocated.
:param deployment: Deployment to obtain the definition of.
"""
expiration_date = get_expiration_date_from_nodes(nodes=deployment)
return DeploymentDefinition(value=deployment.serialize(),
expiration_date=expiration_date)
| StarcoderdataPython |
1722354 | # This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class SegmentAnimalRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentAnimalResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentAnimalResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentAnimalResponseData(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentAnimalAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class SegmentHDBodyRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentHDBodyResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentHDBodyResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentHDBodyResponseData(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentHDBodyAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class SegmentSkyRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentSkyResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentSkyResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentSkyResponseData(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentSkyAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class GetAsyncJobResultRequest(TeaModel):
def __init__(self, job_id=None):
self.job_id = job_id
def validate(self):
self.validate_required(self.job_id, 'job_id')
def to_map(self):
result = {}
result['JobId'] = self.job_id
return result
def from_map(self, map={}):
self.job_id = map.get('JobId')
return self
class GetAsyncJobResultResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = GetAsyncJobResultResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class GetAsyncJobResultResponseData(TeaModel):
def __init__(self, error_code=None, error_message=None, job_id=None, result=None, status=None):
self.error_code = error_code
self.error_message = error_message
self.job_id = job_id
self.result = result
self.status = status
def validate(self):
self.validate_required(self.error_code, 'error_code')
self.validate_required(self.error_message, 'error_message')
self.validate_required(self.job_id, 'job_id')
self.validate_required(self.result, 'result')
self.validate_required(self.status, 'status')
def to_map(self):
result = {}
result['ErrorCode'] = self.error_code
result['ErrorMessage'] = self.error_message
result['JobId'] = self.job_id
result['Result'] = self.result
result['Status'] = self.status
return result
def from_map(self, map={}):
self.error_code = map.get('ErrorCode')
self.error_message = map.get('ErrorMessage')
self.job_id = map.get('JobId')
self.result = map.get('Result')
self.status = map.get('Status')
return self
class SegmentFurnitureRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentFurnitureResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentFurnitureResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentFurnitureResponseDataElements(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentFurnitureResponseData(TeaModel):
def __init__(self, elements=None):
self.elements = []
def validate(self):
self.validate_required(self.elements, 'elements')
if self.elements:
for k in self.elements:
if k :
k.validate()
def to_map(self):
result = {}
result['Elements'] = []
if self.elements is not None:
for k in self.elements:
result['Elements'].append(k.to_map() if k else None)
else:
result['Elements'] = None
return result
def from_map(self, map={}):
self.elements = []
if map.get('Elements') is not None:
for k in map.get('Elements'):
temp_model = SegmentFurnitureResponseDataElements()
temp_model = temp_model.from_map(k)
self.elements.append(temp_model)
else:
self.elements = None
return self
class SegmentFurnitureAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class RefineMaskRequest(TeaModel):
def __init__(self, mask_image_url=None, image_url=None):
self.mask_image_url = mask_image_url
self.image_url = image_url
def validate(self):
self.validate_required(self.mask_image_url, 'mask_image_url')
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['MaskImageURL'] = self.mask_image_url
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.mask_image_url = map.get('MaskImageURL')
self.image_url = map.get('ImageURL')
return self
class RefineMaskResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = RefineMaskResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class RefineMaskResponseDataElements(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class RefineMaskResponseData(TeaModel):
def __init__(self, elements=None):
self.elements = []
def validate(self):
self.validate_required(self.elements, 'elements')
if self.elements:
for k in self.elements:
if k :
k.validate()
def to_map(self):
result = {}
result['Elements'] = []
if self.elements is not None:
for k in self.elements:
result['Elements'].append(k.to_map() if k else None)
else:
result['Elements'] = None
return result
def from_map(self, map={}):
self.elements = []
if map.get('Elements') is not None:
for k in map.get('Elements'):
temp_model = RefineMaskResponseDataElements()
temp_model = temp_model.from_map(k)
self.elements.append(temp_model)
else:
self.elements = None
return self
class RefineMaskAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None, mask_image_url=None):
self.image_urlobject = image_urlobject
self.mask_image_url = mask_image_url
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
self.validate_required(self.mask_image_url, 'mask_image_url')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
result['MaskImageURL'] = self.mask_image_url
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
self.mask_image_url = map.get('MaskImageURL')
return self
class ParseFaceRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class ParseFaceResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = ParseFaceResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class ParseFaceResponseDataElements(TeaModel):
def __init__(self, name=None, image_url=None):
self.name = name
self.image_url = image_url
def validate(self):
self.validate_required(self.name, 'name')
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['Name'] = self.name
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.name = map.get('Name')
self.image_url = map.get('ImageURL')
return self
class ParseFaceResponseData(TeaModel):
def __init__(self, origin_image_url=None, elements=None):
self.origin_image_url = origin_image_url
self.elements = []
def validate(self):
self.validate_required(self.origin_image_url, 'origin_image_url')
self.validate_required(self.elements, 'elements')
if self.elements:
for k in self.elements:
if k :
k.validate()
def to_map(self):
result = {}
result['OriginImageURL'] = self.origin_image_url
result['Elements'] = []
if self.elements is not None:
for k in self.elements:
result['Elements'].append(k.to_map() if k else None)
else:
result['Elements'] = None
return result
def from_map(self, map={}):
self.origin_image_url = map.get('OriginImageURL')
self.elements = []
if map.get('Elements') is not None:
for k in map.get('Elements'):
temp_model = ParseFaceResponseDataElements()
temp_model = temp_model.from_map(k)
self.elements.append(temp_model)
else:
self.elements = None
return self
class ParseFaceAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class SegmentVehicleRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentVehicleResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentVehicleResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentVehicleResponseDataElements(TeaModel):
def __init__(self, origin_image_url=None, image_url=None):
self.origin_image_url = origin_image_url
self.image_url = image_url
def validate(self):
self.validate_required(self.origin_image_url, 'origin_image_url')
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['OriginImageURL'] = self.origin_image_url
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.origin_image_url = map.get('OriginImageURL')
self.image_url = map.get('ImageURL')
return self
class SegmentVehicleResponseData(TeaModel):
def __init__(self, elements=None):
self.elements = []
def validate(self):
self.validate_required(self.elements, 'elements')
if self.elements:
for k in self.elements:
if k :
k.validate()
def to_map(self):
result = {}
result['Elements'] = []
if self.elements is not None:
for k in self.elements:
result['Elements'].append(k.to_map() if k else None)
else:
result['Elements'] = None
return result
def from_map(self, map={}):
self.elements = []
if map.get('Elements') is not None:
for k in map.get('Elements'):
temp_model = SegmentVehicleResponseDataElements()
temp_model = temp_model.from_map(k)
self.elements.append(temp_model)
else:
self.elements = None
return self
class SegmentVehicleAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class SegmentHairRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentHairResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentHairResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentHairResponseDataElements(TeaModel):
def __init__(self, image_url=None, x=None, y=None, width=None, height=None):
self.image_url = image_url
self.x = x
self.y = y
self.width = width
self.height = height
def validate(self):
self.validate_required(self.image_url, 'image_url')
self.validate_required(self.x, 'x')
self.validate_required(self.y, 'y')
self.validate_required(self.width, 'width')
self.validate_required(self.height, 'height')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
result['X'] = self.x
result['Y'] = self.y
result['Width'] = self.width
result['Height'] = self.height
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
self.x = map.get('X')
self.y = map.get('Y')
self.width = map.get('Width')
self.height = map.get('Height')
return self
class SegmentHairResponseData(TeaModel):
def __init__(self, elements=None):
self.elements = []
def validate(self):
self.validate_required(self.elements, 'elements')
if self.elements:
for k in self.elements:
if k :
k.validate()
def to_map(self):
result = {}
result['Elements'] = []
if self.elements is not None:
for k in self.elements:
result['Elements'].append(k.to_map() if k else None)
else:
result['Elements'] = None
return result
def from_map(self, map={}):
self.elements = []
if map.get('Elements') is not None:
for k in map.get('Elements'):
temp_model = SegmentHairResponseDataElements()
temp_model = temp_model.from_map(k)
self.elements.append(temp_model)
else:
self.elements = None
return self
class SegmentHairAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class SegmentFaceRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentFaceResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentFaceResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentFaceResponseDataElements(TeaModel):
def __init__(self, image_url=None, x=None, y=None, width=None, height=None):
self.image_url = image_url
self.x = x
self.y = y
self.width = width
self.height = height
def validate(self):
self.validate_required(self.image_url, 'image_url')
self.validate_required(self.x, 'x')
self.validate_required(self.y, 'y')
self.validate_required(self.width, 'width')
self.validate_required(self.height, 'height')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
result['X'] = self.x
result['Y'] = self.y
result['Width'] = self.width
result['Height'] = self.height
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
self.x = map.get('X')
self.y = map.get('Y')
self.width = map.get('Width')
self.height = map.get('Height')
return self
class SegmentFaceResponseData(TeaModel):
def __init__(self, elements=None):
self.elements = []
def validate(self):
self.validate_required(self.elements, 'elements')
if self.elements:
for k in self.elements:
if k :
k.validate()
def to_map(self):
result = {}
result['Elements'] = []
if self.elements is not None:
for k in self.elements:
result['Elements'].append(k.to_map() if k else None)
else:
result['Elements'] = None
return result
def from_map(self, map={}):
self.elements = []
if map.get('Elements') is not None:
for k in map.get('Elements'):
temp_model = SegmentFaceResponseDataElements()
temp_model = temp_model.from_map(k)
self.elements.append(temp_model)
else:
self.elements = None
return self
class SegmentFaceAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class SegmentHeadRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentHeadResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentHeadResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentHeadResponseDataElements(TeaModel):
def __init__(self, image_url=None, x=None, y=None, width=None, height=None):
self.image_url = image_url
self.x = x
self.y = y
self.width = width
self.height = height
def validate(self):
self.validate_required(self.image_url, 'image_url')
self.validate_required(self.x, 'x')
self.validate_required(self.y, 'y')
self.validate_required(self.width, 'width')
self.validate_required(self.height, 'height')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
result['X'] = self.x
result['Y'] = self.y
result['Width'] = self.width
result['Height'] = self.height
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
self.x = map.get('X')
self.y = map.get('Y')
self.width = map.get('Width')
self.height = map.get('Height')
return self
class SegmentHeadResponseData(TeaModel):
def __init__(self, elements=None):
self.elements = []
def validate(self):
self.validate_required(self.elements, 'elements')
if self.elements:
for k in self.elements:
if k :
k.validate()
def to_map(self):
result = {}
result['Elements'] = []
if self.elements is not None:
for k in self.elements:
result['Elements'].append(k.to_map() if k else None)
else:
result['Elements'] = None
return result
def from_map(self, map={}):
self.elements = []
if map.get('Elements') is not None:
for k in map.get('Elements'):
temp_model = SegmentHeadResponseDataElements()
temp_model = temp_model.from_map(k)
self.elements.append(temp_model)
else:
self.elements = None
return self
class SegmentHeadAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class SegmentCommodityRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentCommodityResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentCommodityResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentCommodityResponseData(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentCommodityAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
class SegmentBodyRequest(TeaModel):
def __init__(self, image_url=None, async=None):
self.image_url = image_url
self.async = async
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
result['Async'] = self.async
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
self.async = map.get('Async')
return self
class SegmentBodyResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentBodyResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentBodyResponseData(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentBodyAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None, async=None):
self.image_urlobject = image_urlobject
self.async = async
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
result['Async'] = self.async
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
self.async = map.get('Async')
return self
class SegmentCommonImageRequest(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentCommonImageResponse(TeaModel):
def __init__(self, request_id=None, data=None):
self.request_id = request_id
self.data = data
def validate(self):
self.validate_required(self.request_id, 'request_id')
self.validate_required(self.data, 'data')
if self.data:
self.data.validate()
def to_map(self):
result = {}
result['RequestId'] = self.request_id
if self.data is not None:
result['Data'] = self.data.to_map()
else:
result['Data'] = None
return result
def from_map(self, map={}):
self.request_id = map.get('RequestId')
if map.get('Data') is not None:
temp_model = SegmentCommonImageResponseData()
self.data = temp_model.from_map(map['Data'])
else:
self.data = None
return self
class SegmentCommonImageResponseData(TeaModel):
def __init__(self, image_url=None):
self.image_url = image_url
def validate(self):
self.validate_required(self.image_url, 'image_url')
def to_map(self):
result = {}
result['ImageURL'] = self.image_url
return result
def from_map(self, map={}):
self.image_url = map.get('ImageURL')
return self
class SegmentCommonImageAdvanceRequest(TeaModel):
def __init__(self, image_urlobject=None):
self.image_urlobject = image_urlobject
def validate(self):
self.validate_required(self.image_urlobject, 'image_urlobject')
def to_map(self):
result = {}
result['ImageURLObject'] = self.image_urlobject
return result
def from_map(self, map={}):
self.image_urlobject = map.get('ImageURLObject')
return self
| StarcoderdataPython |
191035 | import tensorflow as tf
import numpy as np
from utils import get_shape
try:
from tensorflow.contrib.rnn import LSTMStateTuple
except ImportError:
LSTMStateTuple = tf.nn.rnn_cell.LSTMStateTuple
def bidirectional_rnn(cell_fw, cell_bw, inputs, input_lengths,
initial_state_fw=None, initial_state_bw=None,
scope=None):
with tf.variable_scope(scope or 'bi_rnn') as scope:
(fw_outputs, bw_outputs), (fw_state, bw_state) = tf.nn.bidirectional_dynamic_rnn(
cell_fw=cell_fw,
cell_bw=cell_bw,
inputs=inputs,
sequence_length=input_lengths,
initial_state_fw=initial_state_fw,
initial_state_bw=initial_state_bw,
dtype=tf.float32,
scope=scope
)
outputs = tf.concat((fw_outputs, bw_outputs), axis=2)
def concatenate_state(fw_state, bw_state):
if isinstance(fw_state, LSTMStateTuple):
state_c = tf.concat(
(fw_state.c, bw_state.c), 1, name='bidirectional_concat_c')
state_h = tf.concat(
(fw_state.h, bw_state.h), 1, name='bidirectional_concat_h')
state = LSTMStateTuple(c=state_c, h=state_h)
return state
elif isinstance(fw_state, tf.Tensor):
state = tf.concat((fw_state, bw_state), 1,
name='bidirectional_concat')
return state
elif (isinstance(fw_state, tuple) and
isinstance(bw_state, tuple) and
len(fw_state) == len(bw_state)):
# multilayer
state = tuple(concatenate_state(fw, bw)
for fw, bw in zip(fw_state, bw_state))
return state
else:
raise ValueError(
'unknown state type: {}'.format((fw_state, bw_state)))
state = concatenate_state(fw_state, bw_state)
return outputs, state
def masking(scores, sequence_lengths, score_mask_value=tf.constant(-np.inf)):
score_mask = tf.sequence_mask(sequence_lengths, maxlen=tf.shape(scores)[1])
score_mask_values = score_mask_value * tf.ones_like(scores)
return tf.where(score_mask, scores, score_mask_values)
def attention(inputs, att_dim, sequence_lengths, scope=None):
assert len(inputs.get_shape()) == 3 and inputs.get_shape()[-1].value is not None
with tf.variable_scope(scope or 'attention'):
word_att_W = tf.get_variable(name='att_W', shape=[att_dim, 1])
projection = tf.layers.dense(inputs, att_dim, tf.nn.tanh, name='projection')
alpha = tf.matmul(tf.reshape(projection, shape=[-1, att_dim]), word_att_W)
alpha = tf.reshape(alpha, shape=[-1, get_shape(inputs)[1]])
alpha = masking(alpha, sequence_lengths, tf.constant(-1e15, dtype=tf.float32))
alpha = tf.nn.softmax(alpha)
outputs = tf.reduce_sum(inputs * tf.expand_dims(alpha, 2), axis=1)
return outputs, alpha
| StarcoderdataPython |
1676824 | import collections
import pytest
import itertools
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import cross_val_score
from sklearn.linear_model import LogisticRegression
from sklearn.utils import estimator_checks
from skorecard.bucketers import (
DecisionTreeBucketer,
OptimalBucketer,
)
from skorecard.pipeline import (
BucketingProcess,
)
from category_encoders.woe import WOEEncoder
from tests.conftest import CLASSIFIERS, TRANSFORMERS
# checks lists shamelessly copied from
# https://github.com/koaning/human-learn/blob/master/tests/conftest.py
classifier_checks = (
estimator_checks.check_classifier_data_not_an_array,
estimator_checks.check_classifiers_one_label,
estimator_checks.check_classifiers_classes,
estimator_checks.check_estimators_partial_fit_n_features,
estimator_checks.check_classifiers_train,
estimator_checks.check_supervised_y_2d,
estimator_checks.check_supervised_y_no_nan,
estimator_checks.check_estimators_unfitted,
estimator_checks.check_non_transformer_estimators_n_iter,
estimator_checks.check_decision_proba_consistency,
)
transformer_checks = (
estimator_checks.check_transformer_data_not_an_array,
estimator_checks.check_transformer_general,
estimator_checks.check_transformers_unfitted,
)
general_checks = (
estimator_checks.check_fit2d_predict1d,
estimator_checks.check_methods_subset_invariance,
estimator_checks.check_fit2d_1sample,
estimator_checks.check_fit2d_1feature,
estimator_checks.check_fit1d,
estimator_checks.check_get_params_invariance,
estimator_checks.check_set_params,
estimator_checks.check_dict_unchanged,
estimator_checks.check_dont_overwrite_parameters,
)
nonmeta_checks = (
estimator_checks.check_estimators_pickle,
estimator_checks.check_estimators_dtypes,
estimator_checks.check_fit_score_takes_y,
estimator_checks.check_dtype_object,
estimator_checks.check_sample_weights_pandas_series,
estimator_checks.check_sample_weights_list,
estimator_checks.check_estimators_fit_returns_self,
estimator_checks.check_complex_data,
estimator_checks.check_estimators_empty_data_messages,
estimator_checks.check_pipeline_consistency,
estimator_checks.check_estimators_nan_inf,
estimator_checks.check_estimators_overwrite_params,
estimator_checks.check_estimator_sparse_data,
)
def select_tests(include, exclude=[]):
"""Return an iterable of include with all tests whose name is not in exclude.
Credits: https://github.com/koaning/human-learn/blob/master/tests/conftest.py
"""
for test in include:
if test.__name__ not in exclude:
yield test
def flatten(nested_iterable):
"""
Returns an iterator of flattened values from an arbitrarily nested iterable.
Usage:
```python
from hulearn.common import flatten
res1 = list(flatten([['test1', 'test2'], ['a', 'b', ['c', 'd']]]))
res2 = list(flatten(['test1', ['test2']]))
assert res1 == ['test1', 'test2', 'a', 'b', 'c', 'd']
assert res2 == ['test1', 'test2']
```
Credits: https://github.com/koaning/human-learn/blob/master/hulearn/common.py
""" # noqa
for el in nested_iterable:
if isinstance(el, collections.abc.Iterable) and not isinstance(el, (str, bytes)):
yield from flatten(el)
else:
yield el
@pytest.mark.parametrize(
"transformer,test_fn",
list(
itertools.product(
TRANSFORMERS,
select_tests(
include=flatten([general_checks, transformer_checks, nonmeta_checks]),
exclude=[
"check_fit2d_1sample",
"check_methods_subset_invariance",
"check_estimators_nan_inf",
"check_estimators_empty_data_messages",
"check_transformer_data_not_an_array",
"check_dtype_object",
"check_complex_data",
"check_fit1d",
"check_transformers_unfitted",
],
),
)
),
)
def test_transformer_checks(transformer, test_fn):
"""
Runs a scikitlearn check on a skorecard transformer.
"""
t = transformer()
test_fn(t.__class__.__name__, t)
@pytest.mark.parametrize(
"classifier,test_fn",
list(
itertools.product(
CLASSIFIERS,
select_tests(
include=flatten([general_checks, classifier_checks, nonmeta_checks]),
exclude=[
"check_methods_subset_invariance",
"check_fit2d_1sample",
"check_fit2d_1feature",
"check_classifier_data_not_an_array",
"check_classifiers_one_label",
"check_classifiers_classes",
"check_classifiers_train",
"check_supervised_y_2d",
"check_estimators_pickle",
"check_pipeline_consistency",
"check_fit2d_predict1d",
"check_fit1d",
"check_dtype_object",
"check_complex_data",
"check_estimators_empty_data_messages",
"check_estimators_nan_inf",
"check_estimator_sparse_data",
"check_supervised_y_no_nan",
"check_estimators_partial_fit_n_features",
],
),
)
),
)
def test_classifier_checks(classifier, test_fn):
"""
Runs a scikitlearn check on a skorecard transformer.
"""
clf = classifier()
test_fn(clf.__class__.__name__, clf)
def test_cross_val(df):
"""
Test using CV.
When defining specials combined with using CV, we would get a
ValueError: Specials should be defined on the BucketingProcess level,
remove the specials from DecisionTreeBucketer(specials={'EDUCATION': {'Some specials': [1, 2]}})
This unit test ensures specials with CV keep working.
"""
y = df["default"].values
X = df.drop(columns=["default", "pet_ownership"])
specials = {"EDUCATION": {"Some specials": [1, 2]}}
bucketing_process = BucketingProcess(
prebucketing_pipeline=make_pipeline(
DecisionTreeBucketer(max_n_bins=100, min_bin_size=0.05),
),
bucketing_pipeline=make_pipeline(
OptimalBucketer(max_n_bins=10, min_bin_size=0.05),
),
specials=specials,
)
pipe = make_pipeline(bucketing_process, StandardScaler(), LogisticRegression(solver="liblinear", random_state=0))
cross_val_score(pipe, X, y, cv=5, scoring="roc_auc")
def test_cv_pipeline(df):
"""
Another CV.
"""
y = df["default"].values
X = df.drop(columns=["default", "pet_ownership"])
specials = {"EDUCATION": {"Some specials": [1, 2]}}
bucketing_process = BucketingProcess(
prebucketing_pipeline=make_pipeline(
DecisionTreeBucketer(max_n_bins=100, min_bin_size=0.05),
),
bucketing_pipeline=make_pipeline(
OptimalBucketer(max_n_bins=10, min_bin_size=0.05),
),
specials=specials,
)
pipe = make_pipeline(
bucketing_process, WOEEncoder(cols=X.columns), LogisticRegression(solver="liblinear", random_state=0)
)
with pytest.warns(None) as _:
cross_val_score(pipe, X, y, cv=5, scoring="roc_auc")
# also make sure no warnings were raised
# assert len(record) == 0
| StarcoderdataPython |
3352149 | from jproperties import Properties
def test_repeated():
p = Properties()
p.load(b"key:value\nkey=the value\nkey = value1\nkey : value2\nkey value3\nkey\tvalue4")
assert p.properties == {"key": "value4"}
def test_repeated_with_meta():
p = Properties()
p.load(b"""
key = value1
#: metakey = metaval1
#: metakey2 = metaval22
key = value2
# Expected: Metadata should ONLY contain the following
# 'metakey' key.
#: metakey = metaval2
key = value3
""")
assert p.properties == {"key": "value3"}
assert p["key"] == ("value3", {"metakey": "metaval2"})
| StarcoderdataPython |
3240447 | <reponame>hitachi-rd-yokohama-sato/deep_saucer
# -*- coding: utf-8 -*-
#******************************************************************************************
# Copyright (c) 2019
# School of Electronics and Computer Science, University of Southampton and Hitachi, Ltd.
# All rights reserved. This program and the accompanying materials are made available under
# the terms of the MIT License which accompanies this distribution, and is available at
# https://opensource.org/licenses/mit-license.php
#
# March 1st, 2019 : First version.
#******************************************************************************************
"""
# Model loading script used with DeepSaucer
## Directory Structure
Any Directory
`-- DeepSaucer
|-- downloaded_data (downloaded_path)
| `-- mnist_chainer (dl_dir)
| `-- chainer_model_mnist.npz (param_path)
`-- mnist
`-- model
`-- model_c2k.py @
"""
import sys
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
import pathlib
def model_load(downloaded_path):
"""
Load mnist model
Read parameters of model created by Chainer and generate model for Keras
:return:
"""
dl_dir = pathlib.Path(downloaded_path, 'mnist_chainer')
param_path = dl_dir.joinpath('chainer_model_mnist.npz')
model = _c2k_mnist(str(param_path))
return model
def _c2k_mnist(param_path):
"""
Chainer -> Keras (using model parameters)
Confirmed the same accuracy as "predict_chainer"
The loss function in training at chainer is "softmax_cross_entropy"
It was reproduced by using "softmax" for the output layer and
"categorical_crossentropy" as the loss function.
model compile parameters:
* loss='categorical_crossentropy'
* optimizer=Adam
* metrics=['accuracy']
:param param_path:
:return:
"""
# Model structure
input_shape = (28 * 28,)
# Layers
layers = [Dense, Dense, Dense]
# units
units = [1000, 1000, 10]
# layer activations
activations = ['relu', 'relu', 'softmax']
# Load chainer model
print('Load File (%s)' % param_path)
try:
w1, w2, w3, b1, b2, b3 = _load_param(param_path)
# weight, bias
params = [[w1, b1], [w2, b2], [w3, b3]]
except Exception as e:
sys.stderr.write(str(e))
return None
# Restore to the model of Keras
try:
model = _create_model(input_shape=input_shape,
layers=layers, units=units,
params=params, activations=activations)
# model compile parameters
loss_func = 'categorical_crossentropy'
optimizer = 'adam'
metrics = ['accuracy']
# compile
model.compile(loss=loss_func, optimizer=optimizer, metrics=metrics)
except Exception as e:
sys.stderr.write(str(e))
return None
print('Compiled model')
model.summary()
return model
def _create_model(input_shape, layers, units, params, activations):
model = Sequential()
# Dense:
# kernel_initializer='glorot_uniform'
# (chainer: i.i.d. Gaussian samples)
# bias_initializer='zeros'
# input layer
model.add(
layers[0](units=units[0], activation=activations[0], weights=params[0],
input_shape=input_shape, use_bias=True))
for layer, unit, activation, param in zip(layers[1:], units[1:],
activations[1:], params[1:]):
# add layer
model.add(
layer(units=unit, activation=activation, weights=param,
use_bias=True))
return model
def _load_param(param_path):
"""
Loaad the weight and bias of each layer
:param param_path:
:return: The weight and bias of the three layers
"""
# load npz
param = np.load(param_path)
name_list = param['arr_0']
param_list = param['arr_1']
w1 = w2 = w3 = b1 = b2 = b3 = []
# Premise: The number of name_list is the same as the number of param_list
for name, param in zip(name_list, param_list):
if 'W' in name:
# set weight
if '1' in name:
w1 = param.T
elif '2' in name:
w2 = param.T
elif '3' in name:
w3 = param.T
elif 'b' in name:
# set bias
if '1' in name:
b1 = param
elif '2' in name:
b2 = param
elif '3' in name:
b3 = param
return w1, w2, w3, b1, b2, b3
def _print_model_info(model):
print('')
sep = '_________________________________________________________________'
iter_weights = iter(model.get_weights())
for index, (weight, bias) in enumerate(zip(iter_weights, iter_weights)):
print(sep)
print('Layer :', index)
print('Weight :', np.shape(weight))
print(weight)
print('Bias :', np.shape(bias))
print(bias)
print(sep)
def _evaluate_model(model, x, y):
loss, accuracy = model.evaluate(x, y, verbose=1)
print('')
print('loss :', loss)
print('accuracy :', accuracy)
def _check_param(model):
# debug
print('check load param:')
p_path = pathlib.Path(load_dir, 'mnist_chainer/chainer_model_mnist.npz')
w1, w2, w3, b1, b2, b3 = _load_param(str(p_path))
for i, (v1, v2) in enumerate(
zip([w1, b1, w2, b2, w3, b3], model.get_weights())):
if i % 2 is 0:
pf = 'w'
else:
pf = 'b'
flag = np.allclose(v1, v2)
print('%s_%d :' % (pf, i // 2), flag)
if not flag:
print(v1 - v2)
if __name__ == '__main__':
load_dir = '/home/sato/Documents/OSARA/downloaded_data'
k_model = model_load(load_dir)
# evaluate
from keras.datasets import mnist
import keras.utils
# Load Test Data
(X_train, Y_train), (X_test, Y_test) = mnist.load_data()
X_test = X_test.reshape(10000, 28 * 28).astype('float32') / 255
Y_test = keras.utils.to_categorical(Y_test, 10)
# train = True
train = False
if train:
# Save load model
k_model.save('mnist_mlp_load_chainer_model.h5')
# training load model
X_train = X_train.reshape(60000, 28 * 28).astype('float32') / 255
Y_train = keras.utils.to_categorical(Y_train, 10)
batch_size = 100
epochs = 20
verbose = 2
k_model.fit(X_train, Y_train,
batch_size=batch_size, epochs=epochs, verbose=verbose)
# save keras new model
k_model.save('mnist_mlp_train_model.h5')
else:
# Read the parameters of the model created with chainer
print('----------param_load_model----------')
_print_model_info(k_model)
_evaluate_model(k_model, X_test, Y_test)
_check_param(k_model)
# Read the parameters of the model created with chainer and
# load the model save by keras
print('----------keras_save_model----------')
keras_save_model = keras.models.load_model(
'mnist_mlp_load_chainer_model.h5')
_print_model_info(keras_save_model)
_evaluate_model(keras_save_model, X_test, Y_test)
_check_param(keras_save_model)
# Read the parameters of the model created with chainer,
# train by keras and load the saved model
print('----------new_train_model----------')
new_train_model = keras.models.load_model(
'mnist_mlp_train_model.h5')
_print_model_info(new_train_model)
_evaluate_model(new_train_model, X_test, Y_test)
_check_param(new_train_model)
| StarcoderdataPython |
1631966 | <filename>hyde/environment.py
from hyde.errors import BaseError
class RuntimeError(BaseError):
pass
class Environment:
def __init__(self, enclosing = None):
self.values = {}
self.enclosing = enclosing
def assign(self, name, value):
if name.lexeme in self.values:
self.values[name.lexeme] = value
return
if self.enclosing is not None:
return self.enclosing.assign(name, value)
raise RuntimeError(name, f'Undefined variable {name.lexeme}.')
def assign_at(self, distance, name, value):
self.ancestor(distance).values[name.lexeme] = value
def define(self, name, value):
self.values[name] = value
def get(self, name):
if name.lexeme in self.values:
return self.values[name.lexeme]
if self.enclosing is not None:
return self.enclosing.get(name)
raise RuntimeError(name, f'Undefined variable {name.lexeme}.')
def get_at(self, distance, name):
return self.ancestor(distance).values.get(name)
def ancestor(self, distance):
environment = self
while distance > 0:
distance -= 1
environment = environment.enclosing
return environment
def merge(self, other):
target = self
while other:
target.values.update(other.values)
if not self.enclosing and other.enclosing:
target.enclosing = Environment()
target = self.enclosing
other = other.enclosing
| StarcoderdataPython |
1793972 | <filename>pygfx/objects/_base.py
import random
import weakref
import threading
import numpy as np
from ._events import EventTarget
from ..linalg import Vector3, Matrix4, Quaternion
from ..linalg.utils import transform_aabb, aabb_to_sphere
from ..resources import Resource, Buffer
from ..utils import array_from_shadertype
class IdProvider:
"""Object for internal use to manage world object id's."""
def __init__(self):
self._ids_in_use = set([0])
self._map = weakref.WeakValueDictionary()
self._lock = threading.RLock()
def claim_id(self, wobject):
"""Used by wobjects to claim an id."""
# We don't simply count up, but keep a pool of ids. This is
# because an application *could* create and discard objects at
# a high rate, so we want to be able to re-use these ids.
#
# Some numbers:
# * 4_294_967_296 (2**32) max number for u32
# * 2_147_483_647 (2**31 -1) max number for i32.
# * 16_777_216 max integer that can be stored exactly in f32
# * 4_000_000 max integer that survives being passed as a varying (in my tests)
# * 1_048_575 is ~1M is 2**20 seems like a good max scene objects.
# * 67_108_864 is ~50M is 2**26 seems like a good max vertex count.
# which leaves 64-20-26=18 bits for any other picking info.
# Max allowed id, inclusive
id_max = 1_048_575 # 2*20-1
# The max number of ids. This is a bit less to avoid choking
# when there are few free id's left.
max_items = 1_000_000
with self._lock:
if len(self._ids_in_use) >= max_items:
raise RuntimeError("Max number of objects reached.")
id = 0
while id in self._ids_in_use:
id = random.randint(1, id_max)
self._ids_in_use.add(id)
self._map[id] = wobject
return id
def release_id(self, wobject, id):
"""Release an id associated with a wobject."""
with self._lock:
self._ids_in_use.discard(id)
self._map.pop(id, None)
def get_object_from_id(self, id):
"""Return the wobject associated with an id, or None."""
return self._map.get(id)
id_provider = IdProvider()
class ResourceContainer:
"""Base class for WorldObject, Geometry and Material."""
def __init__(self):
self._resource_parents = weakref.WeakSet()
self._rev = 0
@property
def rev(self):
"""Monotonically increasing integer that gets bumped when any
of its buffers or textures are set. (Not when updates are made
to these resources themselves).
"""
return self._rev
# NOTE: we could similarly let bumping of a resource's rev bump a
# data_rev here. But it is not clear whether the (minor?) increase
# in performance is worth the added complexity.
def _bump_rev(self):
"""Bump the rev (and that of any "resource parents"), to trigger a pipeline rebuild."""
self._rev += 1
for x in self._resource_parents:
x._rev += 1
def __setattr__(self, name, value):
super().__setattr__(name, value)
if isinstance(value, ResourceContainer):
value._resource_parents.add(self)
self._bump_rev()
elif isinstance(value, Resource):
self._bump_rev()
class WorldObject(EventTarget, ResourceContainer):
"""The base class for objects present in the "world", i.e. the scene graph.
Each WorldObject has geometry to define it's data, and material to define
its appearance. The object itself is only responsible for defining object
hierarchies (parent / children) and its position and orientation in the world.
This is considered a base class. Use Group to collect multiple world objects
into a single empty world object.
"""
# The uniform type describes the structured info for this object, which represents
# every "propery" that a renderer would need to know in order to visualize it.
# Put larger items first for alignment, also note that host-sharable structs
# align at power-of-two only, so e.g. vec3 needs padding.
# todo: rename uniform to info or something?
uniform_type = dict(
world_transform="4x4xf4",
world_transform_inv="4x4xf4",
id="i4",
)
_v = Vector3()
_m = Matrix4()
_q = Quaternion()
def __init__(
self,
geometry=None,
material=None,
*,
visible=True,
render_order=0,
render_mask="auto",
):
super().__init__()
self.geometry = geometry
self.material = material
# Init visibility and render props
self.visible = visible
self.render_order = render_order
self.render_mask = render_mask
# Init parent and children
self._parent_ref = None
self._children = []
self.position = Vector3()
self.rotation = Quaternion()
self.scale = Vector3(1, 1, 1)
self._transform_hash = ()
self.up = Vector3(0, 1, 0)
self._matrix = Matrix4()
self._matrix_auto_update = True
self._matrix_world = Matrix4()
self._matrix_world_dirty = True
# Compose complete uniform type
self.uniform_type = {}
for cls in reversed(self.__class__.mro()):
self.uniform_type.update(getattr(cls, "uniform_type", {}))
self.uniform_buffer = Buffer(array_from_shadertype(self.uniform_type))
# Set id
self._id = id_provider.claim_id(self)
self.uniform_buffer.data["id"] = self._id
def __del__(self):
id_provider.release_id(self, self.id)
@property
def id(self):
"""An integer id smaller than 2**31 (read-only)."""
return self._id
@property
def visible(self):
"""Wheter is object is rendered or not. Default True."""
return self._visible
@visible.setter
def visible(self, visible):
self._visible = bool(visible)
@property
def render_order(self):
"""This value allows the default rendering order of scene graph
objects to be controlled. Default 0. See ``Renderer.sort_objects``
for details.
"""
return self._render_order
@render_order.setter
def render_order(self, value):
self._render_order = float(value)
@property
def render_mask(self):
"""Indicates in what render passes to render this object:
* "auto": try to determine the best approach (default).
* "opaque": only in the opaque render pass.
* "transparent": only in the transparent render pass(es).
* "all": render in both opaque and transparent render passses.
If "auto" (the default), the renderer attempts to determine
whether all fragments will be either opaque or all transparent,
and only apply the needed render passes. If this cannot be
determined, it falls back to "all".
Some objects may contain both transparent and opaque fragments,
and should be rendered in all passes - the object's contribution
to each pass is determined on a per-fragment basis.
For clarity, rendering objects in all passes even though they
are fully opaque/transparent yields correct results and is
generally the "safest" option. The only cost is performance.
Rendering transparent fragments in the opaque pass makes them
invisible. Rendering opaque fragments in the transparent pass
blends them as if they are transparent with an alpha of 1.
"""
return self._render_mask
@render_mask.setter
def render_mask(self, value):
value = "auto" if value is None else value
assert isinstance(value, str), "render_mask should be string"
value = value.lower()
options = ("opaque", "transparent", "auto", "all")
if value not in options:
raise ValueError(
f"WorldObject.render_mask must be one of {options} not {value!r}"
)
self._render_mask = value
# Trigger a pipeline redraw, because this info is used in that code path
self._bump_rev()
@property
def geometry(self):
"""The object's geometry, the data that defines (the shape of) this object."""
return self._geometry
@geometry.setter
def geometry(self, geometry):
self._geometry = geometry
@property
def material(self):
"""Wheter is object is rendered or not. Default True."""
return self._material
@material.setter
def material(self, material):
self._material = material
@property
def parent(self):
"""Object's parent in the scene graph (read-only).
An object can have at most one parent.
"""
return self._parent_ref and self._parent_ref()
@property
def children(self):
"""The child objects of this wold object (read-only tuple).
Use ``.add()`` and ``.remove()`` to change this list.
"""
return tuple(self._children)
def add(self, *objects, before=None):
"""Adds object as child of this object. Any number of
objects may be added. Any current parent on an object passed
in here will be removed, since an object can have at most one
parent.
If ``before`` argument is given (and present in children), then
the items are inserted before the given element.
"""
idx = len(self._children)
if before:
try:
idx = self._children.index(before)
except ValueError:
pass
for obj in objects:
# orphan if needed
if obj._parent_ref is not None:
obj._parent_ref().remove(obj)
# attach to scene graph
obj._parent_ref = weakref.ref(self)
self._children.insert(idx, obj)
idx += 1
# flag world matrix as dirty
obj._matrix_world_dirty = True
return self
def remove(self, *objects):
"""Removes object as child of this object. Any number of objects may be removed.
If a given object is not a child, it is ignored.
"""
for obj in objects:
try:
self._children.remove(obj)
obj._parent_ref = None
except ValueError:
pass
return self
def clear(self):
"""Removes all children."""
for child in self._children:
child._parent_ref = None
self._children.clear()
def traverse(self, callback, skip_invisible=False):
"""Executes the callback on this object and all descendants.
If ``skip_invisible`` is given and True, objects whose
``visible`` property is False - and their children - are
skipped. Note that modifying the scene graph inside the callback
is discouraged.
"""
if skip_invisible and not self._visible:
return
callback(self)
for child in self._children:
child.traverse(callback, skip_invisible)
def update_matrix(self):
p, r, s = self.position, self.rotation, self.scale
hash = p.x, p.y, p.z, r.x, r.y, r.z, r.w, s.x, s.y, s.z
if hash != self._transform_hash:
self._transform_hash = hash
self._matrix.compose(self.position, self.rotation, self.scale)
self._matrix_world_dirty = True
@property
def matrix(self):
"""The (settable) transformation matrix."""
return self._matrix
@matrix.setter
def matrix(self, matrix):
self._matrix.copy(matrix)
self._matrix.decompose(self.position, self.rotation, self.scale)
self._matrix_world_dirty = True
@property
def matrix_world(self):
"""The world matrix (local matrix composed with any parent matrices)."""
return self._matrix_world
@property
def matrix_auto_update(self):
"""Whether or not the matrix auto-updates."""
return self._matrix_auto_update
@matrix_auto_update.setter
def matrix_auto_update(self, value):
self._matrix_auto_update = bool(value)
@property
def matrix_world_dirty(self):
"""Whether or not the matrix needs updating (readonly)."""
return self._matrix_world_dirty
def apply_matrix(self, matrix):
if self._matrix_auto_update:
self.update_matrix()
self._matrix.premultiply(matrix)
self._matrix.decompose(self.position, self.rotation, self.scale)
self._matrix_world_dirty = True
def update_matrix_world(
self, force=False, update_children=True, update_parents=False
):
if update_parents and self.parent:
self.parent.update_matrix_world(
force=force, update_children=False, update_parents=True
)
if self._matrix_auto_update:
self.update_matrix()
if self._matrix_world_dirty or force:
if self.parent is None:
self._matrix_world.copy(self._matrix)
else:
self._matrix_world.multiply_matrices(
self.parent._matrix_world, self._matrix
)
self.uniform_buffer.data[
"world_transform"
].flat = self._matrix_world.elements
tmp_inv_matrix = Matrix4().get_inverse(self._matrix_world)
self.uniform_buffer.data[
"world_transform_inv"
].flat = tmp_inv_matrix.elements
self.uniform_buffer.update_range(0, 1)
self._matrix_world_dirty = False
for child in self._children:
child._matrix_world_dirty = True
if update_children:
for child in self._children:
child.update_matrix_world()
def look_at(self, target: Vector3):
self.update_matrix_world(update_parents=True, update_children=False)
self._v.set_from_matrix_position(self._matrix_world)
self._m.look_at(self._v, target, self.up)
self.rotation.set_from_rotation_matrix(self._m)
if self.parent:
self._m.extract_rotation(self.parent._matrix_world)
self._q.set_from_rotation_matrix(self._m)
self.rotation.premultiply(self._q.inverse())
def get_world_position(self):
self.update_matrix_world(update_parents=True, update_children=False)
self._v.set_from_matrix_position(self._matrix_world)
return self._v.clone()
def get_world_bounding_box(self):
"""Updates all parent and children world matrices, and returns
a single world-space axis-aligned bounding box for this object's
geometry and all of its children (recursively)."""
self.update_matrix_world(update_parents=True, update_children=True)
return self._get_world_bounding_box()
def _get_world_bounding_box(self):
"""Returns a world-space axis-aligned bounding box for this object's
geometry and all of its children (recursively)."""
boxes = []
if self._geometry:
aabb = self._geometry.bounding_box()
aabb_world = transform_aabb(aabb, self._matrix_world.to_ndarray())
boxes.append(aabb_world)
if self._children:
boxes.extend(
[
b
for b in (c.get_world_bounding_box() for c in self._children)
if b is not None
]
)
if len(boxes) == 1:
return boxes[0]
if boxes:
boxes = np.array(boxes)
return np.array([boxes[:, 0].min(axis=0), boxes[:, 1].max(axis=0)])
def get_world_bounding_sphere(self):
"""Returns a world-space bounding sphere by converting an
axis-aligned bounding box to a sphere.
See WorldObject.get_world_bounding_box.
"""
aabb = self.get_world_bounding_box()
if aabb is not None:
return aabb_to_sphere(aabb)
def _wgpu_get_pick_info(self, pick_value):
# In most cases the material handles this.
return self.material._wgpu_get_pick_info(pick_value)
| StarcoderdataPython |
147478 | import soundset
# create random score for C3~C5note == 130.8~523.3hz
#-> score object
s1 = soundset.score.random(length=32,tempo=120,beat=16,chord=3,pitch=3,register=25,random_state=None)
# create score piano roll
#-> 2-dim binaly numpy array, size of (length, 128)
roll = s1.to_roll(ignore_out_of_range=False)
assert roll.dtype==int, 'it is binaly array but dtype is int'
assert roll.shape==(32,128), 'shape is to (32,128)'
assert roll.min()==0 and roll.max()==1, 'binaly array'
assert all( roll.sum(axis=1)==3 ), 'each line has 3 notes'
roll = s1.to_roll(ignore_out_of_range=True)
assert roll.shape == (32,25), 'the roll shape is to (32,25) when ignore_out_of_range is True'
# synthesize score
#-> 1 or 2-dim float on [-1,1] numpy array, size of (length):mono or (length, 2):stereo
wave = s1.to_wave(instrument=0,stereo=True,rate=44100) # inst0=piano
assert wave.dtype==float
assert wave.shape==(44100*32*4/16*60/120, 2)
assert -1<=wave.min() and wave.max()<=1
# synthesize score with system soundfont
wave = s1.to_wave(instrument=40) # synthesize violin with default font
# refaences
## score
## you can generate specific score with pianoroll
# s2 = soundset.score.generate(roll=roll) # the size must be (length, 128)
## (in future) you can generate score from midi file
# s2 = soundset.score.load(filename='midi/file/name.midi')
## (in future) you can save score as midi file
# s2.save(filename='midifilename.midi')
## synthesize
## (in future) wave is Periodic function which period is 2 pi, and range is [-1,1]
# s2.to_wave(wave=np.sin)
print('')
print('all tests pass.')
print('done.')
| StarcoderdataPython |
147003 | import os
import pusher
import hashlib
from dotenv import load_dotenv
from flask import Blueprint, request
from flask_login import login_required, current_user
from models import Session
project_folder = os.path.dirname(os.path.abspath(__file__))
load_dotenv(os.path.join(project_folder, os.pardir, 'app-env'))
pusher_bp = Blueprint('pusher', __name__)
pusher_client = pusher.Pusher(
app_id=os.getenv("PUSHER_APPID"),
key=os.getenv("PUSHER_KEY"),
secret=os.getenv("PUSHER_SECRET"),
cluster='eu',
ssl=True
)
@pusher_bp.route('/send-push', methods=["POST"])
@login_required
def send_push():
data = request.form
session = Session.query.filter_by(name=data['session_name'], open=True, owner=current_user).first()
if not session:
return "Error: No such session open", 400
all_channel = hashlib.md5("{}_{}".format(session.name, current_user.username).lower().encode()).hexdigest()
pusher_client.trigger(all_channel, 'message', {
'title': data['title'],
'body': data['body'],
'code': data['code'],
'type': data['type'],
'timeout': 0
})
return "Message sent" | StarcoderdataPython |
3258974 | import math
import torch.nn as nn
from torch.optim import Adam
from gym import spaces
from common.distributions import *
from common.util import Flatten
def atari(env, **kwargs):
in_dim = env.observation_space.shape
policy_dim = env.action_space.n
network = CNN(in_dim, policy_dim)
optimizer = Adam(network.parameters(), 2.5e-4, eps=1e-5)
params = dict(
dist=Categorical,
network=network,
optimizer=optimizer,
gamma=0.99,
grad_norm=0.5,
timesteps_per_batch=128,
ent_coef=.01,
vf_coef=0.5,
gae_lam=0.95,
nminibatches=4,
opt_iter=4,
cliprange=0.1,
ob_scale=1.0 / 255
)
params.update(kwargs)
return params
def classic_control(env, **kwargs):
in_dim = env.observation_space.shape[0]
if isinstance(env.action_space, spaces.Box):
dist = DiagGaussian
policy_dim = env.action_space.shape[0] * 2
elif isinstance(env.action_space, spaces.Discrete):
dist = Categorical
policy_dim = env.action_space.n
else:
raise ValueError
network = MLP(in_dim, policy_dim)
optimizer = Adam(network.parameters(), 3e-4, eps=1e-5)
params = dict(
dist=dist,
network=network,
optimizer=optimizer,
gamma=0.99,
grad_norm=0.5,
timesteps_per_batch=2048,
ent_coef=0,
vf_coef=0.5,
gae_lam=0.95,
nminibatches=4,
opt_iter=4,
cliprange=0.2,
ob_scale=1
)
params.update(kwargs)
return params
def box2d(env, **kwargs):
return classic_control(env, **kwargs)
class CNN(nn.Module):
def __init__(self, in_shape, policy_dim):
super().__init__()
c, h, w = in_shape
cnn_out_dim = 64 * ((h - 28) // 8) * ((w - 28) // 8)
self.feature = nn.Sequential(
nn.Conv2d(c, 32, 8, 4),
nn.ReLU(True),
nn.Conv2d(32, 64, 4, 2),
nn.ReLU(True),
nn.Conv2d(64, 64, 3, 1),
nn.ReLU(True),
Flatten(),
nn.Linear(cnn_out_dim, 512),
nn.ReLU(True)
)
for _, m in self.named_modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):
nn.init.orthogonal_(m.weight, math.sqrt(2))
nn.init.constant_(m.bias, 0)
self.policy = nn.Linear(512, policy_dim)
nn.init.orthogonal_(self.policy.weight, 1e-2)
nn.init.constant_(self.policy.bias, 0)
self.value = nn.Linear(512, 1)
nn.init.orthogonal_(self.value.weight, 1)
nn.init.constant_(self.value.bias, 0)
def forward(self, x):
latent = self.feature(x)
return self.policy(latent), self.value(latent)
class MLP(nn.Module):
def __init__(self, in_dim, policy_dim):
super().__init__()
self.feature = nn.Sequential(
nn.Linear(in_dim, 64),
nn.Tanh(),
nn.Linear(64, 64),
nn.Tanh()
)
for _, m in self.named_modules():
if isinstance(m, nn.Linear):
nn.init.orthogonal_(m.weight, math.sqrt(2))
nn.init.constant_(m.bias, 0)
self.policy = nn.Linear(64, policy_dim)
nn.init.orthogonal_(self.policy.weight, 1e-2)
nn.init.constant_(self.policy.bias, 0)
self.value = nn.Linear(64, 1)
nn.init.orthogonal_(self.value.weight, 1)
nn.init.constant_(self.value.bias, 0)
def forward(self, x):
latent = self.feature(x)
return self.policy(latent), self.value(latent)
| StarcoderdataPython |
1697358 | import ujson
import usocket
import _thread
import time
import ussl
DEBUG=1
class VAR:
class auth:
list={}
surl="identitytoolkit.googleapis.com"
class rtdb:
url=None
secret=None
ruleurl=None
apikey=None
socklist={}
authct=None
class rtdb:
def put(PATH, DATA, DUMP=None, bg=True, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.rtdb.put, [PATH, ujson.dumps(DATA), DUMP, str(id), cb])
else:
INTERNAL.rtdb.put(PATH, ujson.dumps(DATA), DUMP, str(id), cb)
def patch(PATH, DATATAG, DUMP=None, bg=True, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.rtdb.patch, [PATH, ujson.dumps(DATATAG), DUMP, str(id), cb])
else:
INTERNAL.rtdb.patch(PATH, ujson.dumps(DATATAG), DUMP, str(id), cb)
def getfile(PATH, FILE, DUMP=None, bg=False, id=0, cb=None, limit=False):
if bg:
_thread.start_new_thread(INTERNAL.getfile, [PATH, FILE, DUMP, bg, str(id), cb, limit])
else:
INTERNAL.getfile(PATH, FILE, DUMP, bg, str(id), cb, limit)
def get(PATH, DUMP, bg=False, cb=None, id=0, limit=False):
if bg:
_thread.start_new_thread(INTERNAL.rtdb.get, [PATH, DUMP, str(id), cb, limit])
else:
INTERNAL.rtdb.get(PATH, DUMP, str(id), cb, limit)
def delete(PATH, DUMP=None, bg=True, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.rtdb.delete, [PATH, DUMP, str(id), cb])
else:
INTERNAL.rtdb.delete(PATH, DUMP, str(id), cb)
def addto(PATH, DATA, DUMP=None, bg=True, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.rtdb.addto, [PATH, ujson.dumps(DATA), DUMP, str(id), cb])
else:
INTERNAL.rtdb.addto(PATH, ujson.dumps(DATA), DUMP, str(id), cb)
class conf:
def seturl(url):
try:
proto, dummy, host, path = url.split("/", 3)
except ValueError:
proto, dummy, host = url.split("/", 2)
path = ""
if proto == "http:":
port = 80
elif proto == "https:":
port = 443
else:
raise ValueError("Unsupported protocol: " + proto)
if ":" in host:
host, port = host.split(":", 1)
port = int(port)
VAR.rtdb.url={"proto": proto, "host": host, "port": port}
def setsecret(secret):
if VAR.rtdb.url:
VAR.rtdb.secret=secret
VAR.rtdb.ruleurl=("https://"+VAR.rtdb.url["host"]+"/.firebaseio/.settings/rules.json?access_token="+secret)
else:
raise OSError("Please specify the rtdb url first: firebase.rtdb.conf.seturl(url)")
def getrules(DUMP, bg=False, id=0, cb=None):
if VAR.rtdb.ruleurl:
if bg:
_thread.start_new_thread(INTERNAL.rtdb.getrules, [DUMP, str(id), cb])
else:
INTERNAL.rtdb.getrules(DUMP, str(id), cb)
else:
raise OSError("Please specify the rtdb secret first: firebase.rtdb.conf.setsecret(secret)")
class auth:
def selauth(email):
if str(email) in VAR.auth.list:
VAR.authct=email
return 1
else:
raise OSError("{email} not signed in. Use auth.signin_ep to enable this feature.".format(email=email))
def desauth():
VAR.authct=None
return 1
def clear():
VAR.auth.list={}
def sign_in_ep(email, passwd, bg=False, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.auth.sign_in_ep, [email, passwd, str(id), cb])
else:
INTERNAL.auth.sign_in_ep(email, passwd, str(id), cb)
def send_password_reset_email(DUMP, bg=False, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.auth.send_password_reset_email,[DUMP, str(id), cb])
else:
INTERNAL.auth.send_password_reset_email(DUMP, str(id), cb)
def sign_up_ep(email, passwd, bg=False, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.auth.sign_up_ep, [email, passwd, str(id), cb])
else:
INTERNAL.auth.sign_up_ep(email, passwd, str(id), cb)
def sign_in_anonym(bg=False, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.auth.sign_in_anonym, [str(id), cb])
else:
INTERNAL.auth.sign_in_anonym(str(id), cb)
def verify_password_reset_code(oobCode, DUMP, bg=False, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.auth.send_password_reset,[oobCode, DUMP, str(id), cb])
else:
INTERNAL.auth.send_password_reset(oobCode, DUMP, str(id), cb)
def confirm_password_reset(oobCode, newpasswd, DUMP, bg=False, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.auth.confirm_password_reset,[oobCode, newpasswd, DUMP, str(id), cb])
else:
INTERNAL.auth.confirm_password_reset(oobCode, newpasswd, DUMP, str(id), cb)
def change_email(newemail, DUMP=None, bg=False, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.auth.change_email,[newemail, DUMP, str(id), cb])
else:
INTERNAL.auth.change_email(newemail, DUMP, str(id), cb)
def change_password(newpassword, DUMP=None, bg=False, id=0, cb=None):
if bg:
_thread.start_new_thread(INTERNAL.auth.change_email,[newpassword, DUMP, str(id), cb])
else:
INTERNAL.auth.change_email(newpassword, DUMP, str(id), cb)
class INTERNAL:
def checksockav(id):
try:
while VAR.socklist["SS"+id]:
time.sleep(2)
VAR.socklist["SS"+id]=True
except:
VAR.socklist["SS"+id]=True
def callback(cb):
try:
cb[0](*cb[1])
except:
try:
cb[0](cb[1])
except:
raise OSError("Callback function could not be executed. Try the function without ufirebase.py callback.")
def disconnect(id):
VAR.socklist["SS"+id].close()
VAR.socklist["SS"+id]=None
VAR.socklist["S"+id]=None
def connect(host, port, id):
LOCAL_ADINFO=usocket.getaddrinfo(host, port, 0, usocket.SOCK_STREAM)[0]
VAR.socklist["S"+id] = usocket.socket(LOCAL_ADINFO[0], LOCAL_ADINFO[1], LOCAL_ADINFO[2])
VAR.socklist["S"+id].connect(LOCAL_ADINFO[-1])
try:
VAR.socklist["SS"+id] = ussl.wrap_socket(VAR.socklist["S"+id], server_hostname=VAR.rtdb.url["host"])
except Exception as Exception:
INTERNAL.disconnect(id)
raise Exception
class rtdb:
def put(PATH, DATA, DUMP, id, cb):
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.rtdb.url["host"], VAR.rtdb.url["port"], id)
LOCAL_SS=VAR.socklist["SS"+id]
if VAR.authct:
INTERNAL.auth.update(VAR.authct)
LOCAL_SS.write(b"PUT /"+PATH+b".json?auth="+VAR.auth.list[VAR.authct]["idToken"]+b" HTTP/1.0\r\n")
else:
LOCAL_SS.write(b"PUT /"+PATH+b".json HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: "+VAR.rtdb.url["host"]+b"\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
try:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
INTERNAL.disconnect(id)
if cb:
INTERNAL.callback(cb)
def patch(PATH, DATATAG, DUMP, id, cb):
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.rtdb.url["host"], VAR.rtdb.url["port"], id)
LOCAL_SS=VAR.socklist["SS"+id]
if VAR.authct:
INTERNAL.auth.update(VAR.authct)
LOCAL_SS.write(b"PATCH /"+PATH+b".json?auth="+VAR.auth.list[VAR.authct]["idToken"]+b" HTTP/1.0\r\n")
else:
LOCAL_SS.write(b"PATCH /"+PATH+b".json HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: "+VAR.rtdb.url["host"]+b"\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATATAG))+"\r\n\r\n")
LOCAL_SS.write(DATATAG)
LOCAL_DATA=LOCAL_SS.read()
try:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
INTERNAL.disconnect(id)
if cb:
INTERNAL.callback(cb)
def get(PATH, DUMP, id, cb, limit):
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.rtdb.url["host"], VAR.rtdb.url["port"], id)
LOCAL_SS=VAR.socklist["SS"+id]
if VAR.authct:
INTERNAL.auth.update(VAR.authct)
LOCAL_SS.write(b"GET /"+PATH+b".json?auth="+VAR.auth.list[VAR.authct]["idToken"]+b" HTTP/1.0\r\n")
else:
LOCAL_SS.write(b"GET /"+PATH+b".json?shallow="+str(limit)+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: "+VAR.rtdb.url["host"]+b"\r\n\r\n")
LOCAL_DATA=LOCAL_SS.read()
try:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
INTERNAL.disconnect(id)
if cb:
INTERNAL.callback(cb)
def getfile(PATH, FILE, DUMP, bg, id, cb, limit):
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.rtdb.url["host"], VAR.rtdb.url["port"], id)
LOCAL_SS=VAR.socklist["SS"+id]
if VAR.authct:
INTERNAL.auth.update(VAR.authct)
LOCAL_SS.write(b"GET /"+PATH+b".json?auth="+VAR.auth.list[VAR.authct]["idToken"]+b" HTTP/1.0\r\n")
else:
LOCAL_SS.write(b"GET /"+PATH+b".json?shallow="+str(limit)+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: "+VAR.rtdb.url["host"]+b"\r\n\r\n")
while not LOCAL_SS.readline()==b"\r\n":
pass
LOCAL_FILE=open(FILE, "wb")
if bg:
while True:
LOCAL_LINE=LOCAL_SS.read(1024)
if LOCAL_LINE==b"":
break
LOCAL_FILE.write(LOCAL_LINE)
time.sleep_ms(1)
else:
while True:
LOCAL_LINE=LOCAL_SS.read(1024)
if LOCAL_LINE==b"":
break
LOCAL_FILE.write(LOCAL_LINE)
LOCAL_FILE.close()
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
if cb:
INTERNAL.callback(cb)
def delete(PATH, DUMP, id, cb):
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.rtdb.url["host"], VAR.rtdb.url["port"], id)
LOCAL_SS=VAR.socklist["SS"+id]
if VAR.authct:
INTERNAL.auth.update(VAR.authct)
LOCAL_SS.write(b"DELETE /"+PATH+b".json?auth="+VAR.auth.list[VAR.authct]["idToken"]+b" HTTP/1.0\r\n")
else:
LOCAL_SS.write(b"DELETE /"+PATH+b".json HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: "+VAR.rtdb.url["host"]+b"\r\n\r\n")
LOCAL_DATA=LOCAL_SS.read()
try:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
INTERNAL.disconnect(id)
if cb:
INTERNAL.callback(cb)
def addto(PATH, DATA, DUMP, id, cb):
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.rtdb.url["host"], VAR.rtdb.url["port"], id)
LOCAL_SS=VAR.socklist["SS"+id]
if VAR.authct:
INTERNAL.auth.update(VAR.authct)
LOCAL_SS.write(b"POST /"+PATH+b".json?auth="+VAR.auth.list[VAR.authct]["idToken"]+b" HTTP/1.0\r\n")
else:
LOCAL_SS.write(b"POST /"+PATH+b".json"+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: "+VAR.rtdb.url["host"]+b"\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
try:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
INTERNAL.disconnect(id)
if cb:
INTERNAL.callback(cb)
def getrules(DUMP, id, cb):
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.rtdb.ruleurl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"GET "+VAR.rtdb.ruleurl+" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: "+VAR.rtdb.url["host"]+b"\r\n")
LOCAL_DATA=LOCAL_SS.read()
try:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
INTERNAL.disconnect(id)
if cb:
INTERNAL.callback(cb)
def setrules():
pass
class auth:
def sign_in_ep(email, passwd, DUMP, id, cb):
DATA=ujson.dumps({"email":email,"password":<PASSWORD>,"returnSecureToken":True})
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.auth.surl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"POST /v1/accounts:signInWithPassword?key="+VAR.apikey+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: identitytoolkit.googleapis.com\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
if DUMP:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").replace(b" ",b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
try:
VAR.auth.list[str(email)]={"time": time.mktime(time.localtime()), "passwd": <PASSWORD>, "idToken": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["idToken"], "expiresIn": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["expiresIn"]}
except:
if DEBUG:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
else:
pass
if cb:
INTERNAL.callback(cb)
def update(email):
if ((time.mktime(time.localtime()) - int(VAR.auth.list[str(email)]["time"])) > int(VAR.auth.list[str(email)]['expiresIn']) - 600):
INTERNAL.auth.add(email, VAR.auth.list[str(email)]["passwd"])
def send_password_reset_email(DUMP, id, cb):
DATA=ujson.dumps({"requestType":"PASSWORD_RESET", "email":VAR.auth.list[VAR.authct]["email"]})
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.auth.surl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"POST https://identitytoolkit.googleapis.com/v1/accounts:sendOobCode?key="+VAR.apikey+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: identitytoolkit.googleapis.com\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
LOCAL_DATA=LOCAL_SS.read()
try:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
if cb:
INTERNAL.callback(cb)
def sign_up_ep(email, passwd, id, cb):
DATA=ujson.dumps({"email":email,"password":<PASSWORD>,"returnSecureToken":True})
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.auth.surl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"POST /v1/accounts:signUp?key="+VAR.apikey+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: identitytoolkit.googleapis.com\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
try:
VAR.auth.list[str(email)]={"time": time.mktime(time.localtime()), "passwd": <PASSWORD>, "idToken": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["idToken"], "expiresIn": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["expiresIn"]}
except Exception as Exception:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
if cb:
INTERNAL.callback(cb)
def sign_in_anonym(id, cb):
DATA=ujson.dumps({"returnSecureToken":True})
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.auth.surl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"POST /v1/accounts:signUp?key="+VAR.apikey+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: identitytoolkit.googleapis.com\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
try:
VAR.auth.list["anonym"]={"time": time.mktime(time.localtime()), "passwd": <PASSWORD>, "idToken": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["idToken"], "expiresIn": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["expiresIn"]}
except Exception as Exception:
raise Exception
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
if cb:
INTERNAL.callback(cb)
def verify_password_reset_code(oobCode, DUMP, id, cb):
DATA=ujson.dumps({"oobCode":oobCode})
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.auth.surl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"POST https://identitytoolkit.googleapis.com/v1/accounts:sendOobCode?key="+VAR.apikey+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: identitytoolkit.googleapis.com\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
LOCAL_DATA=LOCAL_SS.read()
try:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
if DEBUG:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
else:
pass
if cb:
INTERNAL.callback(cb)
def confirm_password_reset(oobCode, newpasswd, DUMP, id, cb):
DATA=ujson.dumps({"oobCode":oobCode, "newPassword":<PASSWORD>})
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.auth.surl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"POST https://identitytoolkit.googleapis.com/v1/accounts:sendOobCode?key="+VAR.apikey+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: identitytoolkit.googleapis.com\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
LOCAL_DATA=LOCAL_SS.read()
try DUMP:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").replace(b" ",b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
except:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
if cb:
INTERNAL.callback(cb)
def change_email(newemail, DUMP, id, cb):
DATA=ujson.dumps({"idToken":VAR.auth.list[VAR.authct]["idToken"],"email":newemail,"returnSecureToken":True})
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.auth.surl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"POST /v1/accounts:update?key="+VAR.apikey+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: identitytoolkit.googleapis.com\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
if DUMP:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").replace(b" ",b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
try:
VAR.auth.list[str(newemail)]={"time": time.mktime(time.localtime()), "passwd": VAR.auth.list[VAR.authct]["passwd"], "idToken": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["idToken"], "expiresIn": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["expiresIn"]}
except:
if DEBUG:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
else:
pass
if cb:
INTERNAL.callback(cb)
def change_password(newpassword, DUMP, id, cb):
DATA=ujson.dumps({"idToken":VAR.auth.list[VAR.authct]["idToken"],"password":<PASSWORD>,"returnSecureToken":True})
INTERNAL.checksockav(id)
INTERNAL.connect(VAR.auth.surl, 443, id)
LOCAL_SS=VAR.socklist["SS"+id]
LOCAL_SS.write(b"POST /v1/accounts:update?key="+VAR.apikey+b" HTTP/1.0\r\n")
LOCAL_SS.write(b"Host: identitytoolkit.googleapis.com\r\n")
LOCAL_SS.write(b"Content-Length: "+str(len(DATA))+"\r\n\r\n")
LOCAL_SS.write(DATA)
LOCAL_DATA=LOCAL_SS.read()
INTERNAL.disconnect(id)
if DUMP:
LOCAL_OUTPUT=ujson.loads(LOCAL_DATA.replace(b"\n", b"").replace(b" ",b"").splitlines()[-1])
globals()[DUMP]=LOCAL_OUTPUT
try:
VAR.auth.list[firebase.VAR.authct]={"time": time.mktime(time.localtime()), "passwd": <PASSWORD>, "idToken": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["idToken"], "expiresIn": ujson.loads(LOCAL_DATA.replace(b"\n", b"").splitlines()[-1])["expiresIn"]}
except:
if DEBUG:
raise OSError("parse error:\r\n {val}".format(val=LOCAL_DATA))
else:
pass
if cb:
INTERNAL.callback(cb)
def setapikey(key):
VAR.apikey=key
| StarcoderdataPython |
190369 | import mongoengine
from marvinbot.utils import localized_date
class ChatLink(mongoengine.Document):
source_chat_id = mongoengine.LongField(required=True, null=False)
target_chat_id = mongoengine.LongField(null=True)
first_name = mongoengine.StringField()
user_id = mongoengine.LongField(required=True, null=False)
date_added = mongoengine.DateTimeField(default=localized_date)
@classmethod
def all_by_source_chat_id(cls, id):
try:
return cls.objects.filter(source_chat_id=id)
except:
return []
@classmethod
def all_by_target_chat_id(cls, id):
try:
return cls.objects.filter(target_chat_id=id)
except:
return []
@classmethod
def by_id(cls, id):
try:
return cls.objects.get(id=id)
except:
return None
@classmethod
def by_source_chat_id(cls, id):
try:
return cls.objects.get(source_chat_id=id)
except:
return None
def __str__(self):
return "{{ id = \"{id}\", source_chat_id = {source_chat_id}, target_chat_id = {target_chat_id} }}".format(
id=self.id,
source_chat_id=self.source_chat_id,
target_chat_id=self.target_chat_id)
| StarcoderdataPython |
38400 | from vivid.core import BaseBlock, network_hash
def test_network_hash():
a = BaseBlock('a')
b = BaseBlock('b')
assert network_hash(a) != network_hash(b)
assert network_hash(a) == network_hash(a)
c = BaseBlock('c', parent=[a, b])
hash1 = network_hash(c)
a._parent = [BaseBlock('z')]
hash2 = network_hash(c)
assert hash1 != hash2
| StarcoderdataPython |
4832675 | <gh_stars>0
class Solution:
def XXX(self, n: int) -> List[str]:
res=[]
def backtrace(s: str,left: int,right: int):
if left==n and right==n:
res.append(s)
if left<n:backtrace(s+"(",left+1,right)
if right<left:backtrace(s+")",left,right+1)
backtrace("",0,0)
return res
| StarcoderdataPython |
1702259 | <filename>dgen/commands/model/templates/model.py
class [[ name ]](models.Model):
class Meta:
verbose_name = _('[[ name ]]')
verbose_name_plural = _('[[ name ]]s')
ordering = ['id']
[% for field in fields %][[ field ]][% endfor %]
def __str__(self):
return f'[[ name ]]{self.id}'
| StarcoderdataPython |
1453 | from bs4 import BeautifulSoup
import requests
from urllib.request import urlretrieve
ROOT = 'http://pdaotao.duytan.edu.vn'
def get_url_sub(sub, id_, page):
all_td_tag = []
for i in range(1, page+1):
print('http://pdaotao.duytan.edu.vn/EXAM_LIST/?page={}&lang=VN'.format(i))
r = requests.get('http://pdaotao.duytan.edu.vn/EXAM_LIST/?page={}&lang=VN'.format(i))
soup = BeautifulSoup(r.text, 'lxml')
list_td_tag = soup.find_all('td', attrs={'style': 'padding-top:10px'})
all_td_tag = all_td_tag + list_td_tag
for td_tag in all_td_tag:
if (((sub+id_) in str(td_tag.a.contents[0])) or
((sub+' '+id_) in str(td_tag.a.contents[0])) or
((sub+'_'+id_) in str(td_tag.a.contents[0]))):
print('\nComplete!!!')
print(' '.join(str(td_tag.a.string).split()))
print(str(td_tag.a['href']).replace('..', ROOT))
return str(td_tag.a['href']).replace('..', ROOT)
def get_excel_url(url):
r = requests.get(url)
soup = BeautifulSoup(r.text,'lxml')
list_span_tags = soup.find_all('span',class_='txt_l4')
excel_url = list_span_tags[1].a['href'].replace('..',ROOT)
return excel_url
# a = get_excel_url('http://pdaotao.duytan.edu.vn/EXAM_LIST_Detail/?ID=52289&lang=VN')
def main():
sub = input('Nhap ten mon: ')
id_ = input('Nhap id mon: ')
url = get_url_sub(sub,id_,4)
if url == None:
print('Khong tim thay mon nao nhu nay ({} {}) ca :('.format(sub, id_))
return
else:
print('get excel URL!!!')
excel_url = get_excel_url(url)
excel_url = excel_url.replace(' ','%20')
print('Download excel file!!!')
save_at = 'C:/Users/truon/Desktop/'
filename = save_at + excel_url.split('/')[-1].replace('%20',' ')
urlretrieve(excel_url,filename)
print('Done!')
main()
| StarcoderdataPython |
3340084 | from __future__ import unicode_literals
from django.db import models
from django.conf import settings
from decimal import Decimal
from django.utils.translation import pgettext_lazy
from django.utils.timezone import now
from datetime import datetime
from django.contrib.auth.models import (AbstractBaseUser, BaseUserManager, PermissionsMixin)
from django.core.validators import MinValueValidator, RegexValidator
from saleor.billtypes.models import BillTypes
from saleor.customer.models import Customer
from saleor.room.models import Room
from saleor.booking.models import Book
class BillManager(BaseUserManager):
def payments(self):
query = self.get_queryset().filter(status='fully-paid')
return query
def bill_types(self):
billtypes = []
query = self.get_queryset()
for bill in query:
if bill.billtype.name not in billtypes:
billtypes.append(bill.billtype.name)
return billtypes
def customer_summary(self, status='fully-paid', billtype=None, start_date=None, end_date=None):
"""
Get bills query set based on query filters
:param status: payment status
:param billtype: type of bill (e.g Rent, Service ...)
:param start_date: filter between date range
:param end_date: filter between date range
:return: bill model query set
"""
query = self.get_queryset()
if status == 'fully-paid':
query = query.filter(status=status)
if status == 'pending':
query = query.filter(status=status)
if billtype:
query = query.filter(billtype__name=billtype)
if start_date and end_date:
query = query.filter(
models.Q(month__gte=start_date) &
models.Q(month__lte=end_date)
)
return query.aggregate(models.Sum('amount'))['amount__sum']
def customer_bills(self, customer, status='fully-paid', billtype=None, booking=None, check_in=None, check_out=None):
"""
Compute customers amount paid & amount pending or total amount
:param customer: customer model object
:param status: pending to return pending amount.
:return: decimal (amount either pending/full-paid or total amount
"""
query = self.get_queryset()
if customer:
query = query.filter(customer=customer)
if billtype:
query = query.filter(billtype__name=billtype)
if booking:
query = query.filter(booking=booking)
if status == 'fully-paid':
query = query.filter(status=status)
if check_in:
query = query.filter(
models.Q(month=check_in)
)
if status == 'pending':
query = query.filter(status=status)
return query.aggregate(models.Sum('amount'))['amount__sum']
class Bill(models.Model):
''' invoice_number for generating invoices of that bill '''
invoice_number = models.CharField(
pgettext_lazy('Bill field', 'invoice_number'), default='', null=True, max_length=36)
description = models.TextField(
verbose_name=pgettext_lazy('Bill field', 'description'), blank=True, null=True)
billtype = models.ForeignKey(
BillTypes, blank=True, null=True, related_name='bill_types',
verbose_name=pgettext_lazy('Bill field', 'customer'), on_delete=models.SET_NULL)
customer = models.ForeignKey(
Customer, blank=True, null=True, related_name='bill_customers',
verbose_name=pgettext_lazy('Bill field', 'customer'), on_delete=models.SET_NULL)
room = models.ForeignKey(
Room, blank=True, null=True, related_name='bill_rooms',
verbose_name=pgettext_lazy('Bill field', 'room'), on_delete=models.SET_NULL)
booking = models.ForeignKey(
Book, blank=True, null=True, related_name='bill_booking',
verbose_name=pgettext_lazy('Bill field', 'bill'))
amount = models.DecimalField(
pgettext_lazy('Bill field', 'amount of the bill'), max_digits=12,
validators=[MinValueValidator(0)], default=Decimal(0), decimal_places=2)
tax = models.DecimalField(
pgettext_lazy('Bill field', 'tax of the bill based on the amount'), max_digits=12,
validators=[MinValueValidator(0)], default=Decimal(0), decimal_places=2)
is_taxable = models.BooleanField(
pgettext_lazy('Book field', 'is taxable'), default=False)
month = models.DateField(
pgettext_lazy('Bill field', 'month billed'),
default=now)
status = models.CharField(
pgettext_lazy('Bill field', 'status'), default='pending', null=True, max_length=128)
updated_at = models.DateTimeField(
pgettext_lazy('Bill field', 'updated at'), auto_now=True, null=True)
created = models.DateTimeField(pgettext_lazy('Bill field', 'created'), default=now, editable=False)
objects = BillManager()
class Meta:
app_label = 'bill'
verbose_name = pgettext_lazy('Bill model', 'Bill')
verbose_name_plural = pgettext_lazy('Bills model', 'Bills')
def __str__(self):
return self.status
def get_total_amount(self):
''' amount plus the tax '''
return (self.amount + self.tax)
| StarcoderdataPython |
1756772 | <reponame>restful-open-annotation/eve-restoa
#!/usr/bin/env python
"""RESTful Open Annotation server based on Eve.
The RESTful Open Annotation API is primarily implemented using two
ways of modifying the Eve default API:
1. global configuration of keys in settings.py to use OA names,
e.g. "annotatedAt" instead of the default "_created".
2. event hooks to modify incoming and outcoming documents in more
complex ways, such as removing the default "@context" value on POST
and adding it to the top-level graph on GET.
"""
__author__ = '<NAME>'
__license__ = 'MIT'
import sys
import os
from eve import Eve
from oaeve import setup_callbacks
from settings import PORT
# TODO: I think we need this for mod_wsgi, but make sure.
appdir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(appdir))
try:
from development import DEBUG
print >> sys.stderr, '########## Devel, DEBUG %s ##########' % DEBUG
except ImportError:
DEBUG = False
# Eve's "settings.py application folder" default fails with wsgi
app = Eve(settings=os.path.join(appdir, 'settings.py'))
setup_callbacks(app)
def main(argv):
if not DEBUG:
app.run(host='0.0.0.0', port=PORT, debug=False)
else:
app.run(debug=DEBUG, port=PORT)
return 1
if __name__ == '__main__':
sys.exit(main(sys.argv))
| StarcoderdataPython |
8344 | <filename>dragontail/content/models/basicpage.py<gh_stars>0
# encoding: utf-8
from django.db import models
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.fields import StreamField
from wagtail.wagtailcore import blocks
from wagtail.wagtailadmin.edit_handlers import FieldPanel, StreamFieldPanel
from wagtail.wagtailimages.blocks import ImageChooserBlock
class BasicPage(Page):
body = StreamField([
('paragraph', blocks.RichTextBlock()),
('image', ImageChooserBlock()),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
def get_template(self, request, *args, **kwargs):
from .templatesettings import TemplateSettings
template_settings = TemplateSettings.for_site(request.site)
return template_settings.basic_page_template | StarcoderdataPython |
48007 | # -*- coding: utf-8 -*-
# (The MIT License)
#
# Copyright (c) 2013-2021 Kura
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
import os
import tempfile
import pytest
from blackhole.utils import Singleton
logging.getLogger("blackhole").addHandler(logging.NullHandler())
@pytest.fixture()
def cleandir():
newpath = tempfile.mkdtemp()
os.chdir(newpath)
@pytest.fixture()
def reset():
Singleton._instances = {}
def create_config(data):
cwd = os.getcwd()
path = os.path.join(cwd, "test.conf")
with open(path, "w") as cfile:
cfile.write("\n".join(data))
return path
def create_file(name, data=""):
cwd = os.getcwd()
path = os.path.join(cwd, name)
with open(path, "w") as ffile:
ffile.write(str(data))
return path
class Args(object):
def __init__(self, args=None):
if args is not None:
for arg in args:
setattr(self, arg[0], arg[1])
| StarcoderdataPython |
73684 | <filename>manager/projects/models/snapshots.py
import os
from typing import Optional
import shortuuid
from django.db import models
from django.http import HttpRequest
from django.utils import timezone
from jobs.models import Job, JobMethod
from manager.storage import StorageUsageMixin, snapshots_storage
from projects.models.files import File, get_modified
from projects.models.projects import Project
from users.models import User
def generate_snapshot_id():
"""
Generate a unique snapshot id.
The is separate function to avoid new AlterField migrations
being created as happens when `default=shortuuid.uuid`.
"""
return shortuuid.uuid()
class Snapshot(StorageUsageMixin, models.Model):
"""
A project snapshot.
The `path` field is stored on the model to improve durability (if
the convention for creating paths changes, the existing paths will not change).
The `zip_name` field provides a way of providing a more useful filename
when downloading the archive (it is populated with the project name and snapshot number).
"""
id = models.CharField(
primary_key=True,
max_length=32,
editable=False,
default=generate_snapshot_id,
help_text="The unique id of the snapshot.",
)
project = models.ForeignKey(
Project,
on_delete=models.CASCADE,
related_name="snapshots",
null=False,
blank=False,
help_text="The project that the snapshot is for.",
)
number = models.IntegerField(
db_index=True, help_text="The number of the snapshot within the project.",
)
creator = models.ForeignKey(
User,
null=True,
on_delete=models.SET_NULL,
related_name="snapshots_created",
help_text="The user who created the snapshot.",
)
created = models.DateTimeField(
auto_now_add=True, help_text="The time the snapshot was created."
)
path = models.CharField(
max_length=1024,
null=True,
help_text="The path of the snapshot's directory within the snapshot storage volume.",
)
zip_name = models.CharField(
max_length=1024,
null=True,
help_text="The name of snapshot's Zip file (within the snapshot directory).",
)
container_image = models.TextField(
null=True,
blank=True,
help_text="The container image to use as the execution environment for this snapshot.",
)
job = models.ForeignKey(
Job,
on_delete=models.SET_NULL,
related_name="snapshot_created",
null=True,
blank=True,
help_text="The job that created the snapshot",
)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["project", "number"], name="%(class)s_unique_project_number"
)
]
STORAGE = snapshots_storage()
def __str__(self):
"""
Get a string representation to use in select options etc.
"""
return "Snapshot #{0}".format(self.number)
def save(self, *args, **kwargs):
"""
Override to ensure certain fields are populated.
Ensures that:
- `number` is not null and monotonically increases
- `path` and `zip_name` are set
"""
if self.number is None:
result = Snapshot.objects.filter(project=self.project).aggregate(
models.Max("number")
)
self.number = (result["number__max"] or 0) + 1
if not self.path:
self.path = os.path.join(str(self.project.id), str(self.id))
if not self.zip_name:
self.zip_name = "{project}-v{number}.zip".format(
project=self.project.name, number=self.number
)
return super().save(*args, **kwargs)
@staticmethod
def create(project: Project, user: User) -> Job:
"""
Snapshot the project.
"""
snapshot = Snapshot.objects.create(project=project, creator=user)
subjobs = []
# Clean the project's working directory
subjobs.append(project.cleanup(user))
# Pull the project's sources
subjobs.append(project.pull(user))
# "Reflow" the project (regenerate derived files)
reflow = project.reflow(user)
if reflow:
subjobs.append(reflow)
# Pin the container image
subjobs.append(
project.pin(user, **Job.create_callback(snapshot, "pin_callback"))
)
# Create an index.html if a "main" file is defined
main = project.get_main()
if main:
options = {}
theme = project.get_theme()
if theme:
options["theme"] = theme
subjobs.append(main.convert(user, "index.html", options=options))
# This is currently required to populate field `zip_name` below
snapshot.save()
# Archive the working directory to the snapshot directory
subjobs.append(
project.archive(
user,
snapshot=snapshot.id,
path=f"{project.id}/{snapshot.id}/{snapshot.zip_name}",
**Job.create_callback(snapshot, "archive_callback"),
)
)
job = Job.objects.create(
method=JobMethod.series.name,
description="Snapshot project '{0}'".format(project.name),
project=project,
creator=user,
)
job.children.set(subjobs)
job.dispatch()
snapshot.job = job
snapshot.save()
return snapshot
def pin_callback(self, job: Job):
"""
Update the container image for this snapshot.
Called when the `pin` sub-job is complete.
"""
self.container_image = job.result
self.save()
def archive_callback(self, job: Job):
"""
Update the files associated with this snapshot.
Called when the `archive` sub-job is complete.
"""
result = job.result
if not result:
return
# Do a batch insert of files. This is much faster when there are a lot of file
# than inserting each file individually.
File.objects.bulk_create(
[
File(
project=self.project,
path=path,
current=False,
job=job,
snapshot=self,
updated=timezone.now(),
modified=get_modified(info),
size=info.get("size"),
mimetype=info.get("mimetype"),
encoding=info.get("encoding"),
fingerprint=info.get("fingerprint"),
)
for path, info in result.items()
]
)
def session(self, request: HttpRequest) -> Job:
"""
Create a session job for the snapshot.
"""
project = self.project
job = Job.objects.create(
project=project,
creator=request.user if request.user.is_authenticated else None,
snapshot=self,
method=JobMethod.session.name,
params=dict(
snapshot=self.id,
snapshot_url=self.file_url(self.zip_name),
container_image=self.container_image,
mem_request=project.session_memory,
mem_limit=project.session_memory,
timeout=project.session_timeout,
timelimit=project.session_timelimit,
),
description="Session for snapshot #{0}".format(self.number),
)
job.add_user(request)
return job
@property
def is_active(self) -> bool:
"""
Is the snapshot currently active.
"""
return self.job and self.job.is_active
@property
def has_index(self) -> bool:
"""
Determine if the snapshot has an index.html file, or not.
"""
try:
self.files.get(path="index.html")
return True
except File.DoesNotExist:
return False
def content_url(self, path: Optional[str] = None) -> str:
"""
Get the URL that this snapshot content is served from.
"""
return self.project.content_url(snapshot=self, path=path)
def file_location(self, file: str) -> str:
"""
Get the location of one of the snapshot's files relative to the root of the storage volume.
"""
return os.path.join(self.path, file)
| StarcoderdataPython |
4808593 | from random import randint
from sympy import Eq, solve, symbols
from homogeneous import *
def point_on_conic(conic_z_roots, x0, y0):
f, x, y = symbols('f, x, y')
return multiplied(x0, y0, conic_z_roots[randint(0, 1)].subs(x, x0).subs(y, y0))
def main():
a, b, c, d, e, f, x, y, z = symbols('a, b, c, d, e, f, x, y, z')
roots = solve(Eq(a*x**2 + 2*b*x*y + c*y**2 + 2*d*x*z + 2*e*y*z + f*z**2, 0), z)
print('z =', roots)
g, h, j, k, m, n, p, q, r, s, t, u = symbols('g, h, j, k, m, n, p, q, r, s, t, u')
A, B, C = point_on_conic(roots, g, h), point_on_conic(roots, j, k), point_on_conic(roots, m, n)
D, E, F = point_on_conic(roots, p, q), point_on_conic(roots, r, s), point_on_conic(roots, t, u)
print('A:', A)
print('B:', B)
print('C:', C)
print('D:', D)
print('E:', E)
print('F:', F)
AB, BC, CD, DE, EF, FA = cross(A, B), cross(B, C), cross(C, D), cross(D, E), cross(E, F), cross(F, A)
G, H, J = cross(AB, DE), cross(BC, EF), cross(CD, FA)
print('G:', G)
print('H:', H)
print('J:', J)
print('Are GHJ collinear/concurrent?', incidence(G, H, J) == 0)
if __name__ == '__main__':
main() | StarcoderdataPython |
3278377 | """Peak control."""
import logging
from datetime import timedelta
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from homeassistant.const import (
EVENT_HOMEASSISTANT_START,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
)
from homeassistant.core import callback
from homeassistant.helpers.event import (
async_track_state_change,
)
from homeassistant.util import dt as dt_util
DOMAIN = "peak_control"
STOPPED_DEVICES = "stopped_devices"
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: {
vol.Required("estimated_hourly_consumtion_sensor"): cv.entity_id,
vol.Required("max_hourly_consumption"): cv.entity_id,
vol.Required("devices"): cv.entity_ids,
}
},
extra=vol.ALLOW_EXTRA,
)
print(CONFIG_SCHEMA)
def setup(hass, config):
"""Sets up the effect control."""
print(config)
print(config[DOMAIN])
print(config[DOMAIN].keys())
devices = config[DOMAIN]["devices"]
hass.data[STOPPED_DEVICES] = {}
last_update = dt_util.now() - timedelta(hours=1)
store = None
async def _async_initialize(_=None):
"""Get the cache data."""
async_track_state_change(
hass, config[DOMAIN]["estimated_hourly_consumtion_sensor"], _activate
)
async_track_state_change(
hass, config[DOMAIN]["max_hourly_consumption"], _activate
)
nonlocal store
store = hass.helpers.storage.Store(1, DOMAIN)
hass.data[STOPPED_DEVICES] = await store.async_load()
if hass.data[STOPPED_DEVICES] is None:
hass.data[STOPPED_DEVICES] = {}
print("STOPPED_DEVICES", hass.data[STOPPED_DEVICES])
@callback
def _data_to_save():
"""Return data of entity map to store in a file."""
print(hass.data[STOPPED_DEVICES])
return hass.data[STOPPED_DEVICES]
async def _activate(entity_id, old_state, new_state):
try:
est_hour_cons = float(new_state.state)
except ValueError:
return
try:
max_cons = float(hass.states.get(config[DOMAIN]["max_hourly_consumption"]).state)
max_cons = 6.0
except ValueError:
return
except AttributeError:
_LOGGER.error("No state found for %s", config[DOMAIN]["max_hourly_consumption"], exc_info=True)
raise
now = dt_util.now()
if now.minute < 5 and not hass.data[STOPPED_DEVICES]:
return
nonlocal last_update
if (now - last_update) < timedelta(minutes=1):
return
_LOGGER.debug("%s %s %s", float(new_state.state), est_hour_cons, max_cons)
last_update = now
if now.minute > 45:
sec_left = 3600 - now.minute * 60 - now.second
factor = 0.99 - sec_left / (15 * 60) * 0.09
else:
factor = 0.90
# Restore
print(est_hour_cons, factor, max_cons, hass.data[STOPPED_DEVICES])
if est_hour_cons < factor * max_cons and hass.data[STOPPED_DEVICES]:
for entity_id in devices[::-1]:
if entity_id not in hass.data[STOPPED_DEVICES]:
continue
state = hass.data[STOPPED_DEVICES].pop(entity_id)
_LOGGER.debug("Restore %s", entity_id)
if "climate" in entity_id:
_data = {
"entity_id": entity_id,
"temperature": int(float(state)),
}
await hass.services.async_call(
"climate", "set_temperature", _data, blocking=False
)
elif "switch" in entity_id or "input_boolean" in entity_id:
_data = {"entity_id": entity_id}
await hass.services.async_call(
"homeassistant", SERVICE_TURN_ON, _data, blocking=False
)
store.async_delay_save(_data_to_save, 10)
return
return
if est_hour_cons < factor * max_cons:
return
# Turn down
for entity_id in devices:
if entity_id in hass.data[STOPPED_DEVICES]:
continue
_LOGGER.debug("Turn down %s", entity_id)
if "climate" in entity_id:
_data = {"entity_id": entity_id, "temperature": 10}
print(hass.states.get(entity_id).attributes.get("min_temp", 10))
hass.data[STOPPED_DEVICES][entity_id] = hass.states.get(entity_id).attributes.get("temperature")
await hass.services.async_call(
"climate", "set_temperature", _data, blocking=False
)
elif "switch" in entity_id or "input_boolean" in entity_id:
state = hass.states.get(entity_id).state
if state == STATE_OFF:
continue
_data = {"entity_id": entity_id}
hass.data[STOPPED_DEVICES][entity_id] = state
await hass.services.async_call(
"homeassistant", SERVICE_TURN_OFF, _data, blocking=False
)
store.async_delay_save(_data_to_save, 10)
return
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_initialize)
return True
| StarcoderdataPython |
1764882 | <filename>pystratis/api/interop/responsemodels/transactionresponsemodel.py
from pydantic import Field
from pystratis.api import Model
from pystratis.core import DestinationChain
from pystratis.core.types import Money, hexstr
class TransactionResponseModel(Model):
"""A pydantic model of a multisig transaction response."""
data: hexstr
"""The transaction hexstr."""
destination: DestinationChain
"""The destination chain."""
value: Money
"""The amount converted."""
executed: bool
"""True if the transaction has been processed."""
| StarcoderdataPython |
1763573 | <reponame>usgin/nrrc-repository
from django.http import HttpResponseNotAllowed, HttpResponseForbidden
from django.contrib.auth.decorators import login_required
from metadatadb.proxy import proxyRequest, can_edit, hide_unpublished
def oneFile(req, resourceId, fileName):
allowed = [ 'GET', 'DELETE' ]
if req.method not in allowed:
return HttpResponseNotAllowed(allowed)
def getFile(req, resourceId, fileName):
response = hide_unpublished(req.user, proxyRequest(path='/metadata/record/' + resourceId + '/', method='GET'))
if response.content == '':
return HttpResponseForbidden('You do not have permission to view this resource')
else:
kwargs = {
'path': '/metadata/record/' + resourceId + '/file/' + fileName,
'method': req.method
}
return proxyRequest(**kwargs)
@login_required # Registry tracking required?
def deleteFile(req, resourceId, fileName):
if not can_edit(req.user, resourceId):
return HttpResponseForbidden('You do not have permission to edit this resource')
kwargs = {
'path': '/metadata/record/' + resourceId + '/file/' + fileName,
'method': req.method
}
return proxyRequest(**kwargs)
if req.method == 'GET': return getFile(req, resourceId, fileName)
if req.method == 'DELETE': return deleteFile(req, resourceId, fileName) | StarcoderdataPython |
3340287 | import numpy as np
tinycycle = np.array([[0, 1, 0],
[1, 0, 1],
[0, 1, 0]], dtype=bool)
tinyline = np.array([0, 1, 1, 1, 0], dtype=bool)
skeleton0 = np.array([[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1]], dtype=bool)
skeleton1 = np.array([[0, 1, 1, 1, 1, 1, 0],
[1, 0, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 1, 1, 0],
[1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 1, 1]], dtype=bool)
_zeros1 = np.zeros_like(skeleton1)
skeleton2 = np.column_stack((skeleton1, _zeros1))
skeleton2 = np.row_stack((skeleton2, skeleton2[:, ::-1]))
skeleton3d = np.array([[[1, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]],
[[0, 0, 0, 0, 0],
[0, 1, 0, 0, 1],
[0, 0, 0, 0, 1],
[0, 0, 1, 0, 1],
[1, 1, 0, 1, 0]],
[[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]],
[[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 1, 0, 0, 0]],
[[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 1, 0, 0, 1],
[1, 0, 1, 0, 1],
[0, 0, 0, 0, 1]]], dtype=bool)
topograph1d = np.array([3., 2., 3.])
skeleton4 = np.array([[1, 0, 0, 0, 0],
[0, 1, 1, 1, 1],
[0, 1, 0, 0, 0],
[0, 1, 0, 0, 0]], dtype=bool)
junction_first = np.array([[0, 1, 1, 1, 1],
[1, 1, 0, 0, 0],
[1, 0, 1, 0, 0],
[1, 0, 0, 1, 0],
[1, 0, 0, 0, 1]], dtype=bool)
| StarcoderdataPython |
67795 | <filename>All Stars Pattern/Hill Pattern.py
"""
Hill Pattern
"""
print("")
n = 5
# Method 1
print("Method 1")
for a in range(n):
for b in range(a, n):
print(" ", end="")
for c in range(a + 1):
print(" * ", end="")
for d in range(a):
print(" * ", end="")
print("")
print("\n*~*~*~*~*~*~*~*~*~*~*~*\n")
# Method 2
print("Method 2")
for a in range(n):
print(" " * (n - a), end="")
print(" * " * (a + 1), end="")
print(" * " * a)
print("")
"""
Author: <NAME>
Github Username: jxmked
""" | StarcoderdataPython |
1629438 | <gh_stars>10-100
#!/usr/bin/env python3
import json
import logging
import sys
import time
import click
import requests
from requests import Response
from bubuku.features.remote_exec import RemoteCommandExecutorCheck
from bubuku.utils import get_opt_broker_id, prepare_configs, is_cluster_healthy, get_max_bytes_in
from bubuku.zookeeper import load_exhibitor_proxy, BukuExhibitor, RebalanceThrottleManager
_LOG = logging.getLogger('bubuku.cli')
def _print_table(table: list, print_function=None):
if not print_function:
print_function = print
names = sorted(set([v for v in sum([list(k.keys()) for k in table], [])]))
lengths = {n: len(n) for n in names}
for d in table:
for k, v in d.items():
if lengths[k] < len(str(v)):
lengths[k] = len(str(v))
format_string = ' '.join(['{!s:' + str(lengths[n]) + 's}' for n in names])
print_function(format_string.format(*names))
for item in table:
print_function(format_string.format(*[item.get(n, '') for n in names]))
def __validate_not_empty(ctx, param, value):
if not value:
raise click.BadParameter('Parameter must have value')
return value
def __check_all_broker_ids_exist(broker_ids: list, zk: BukuExhibitor):
registered_brokers = zk.get_broker_ids()
unknown_brokers = [broker_id for broker_id in broker_ids if broker_id not in registered_brokers]
if len(unknown_brokers) == 1:
raise Exception('1 broker id is not valid: {}'.format(unknown_brokers[0]))
if len(unknown_brokers) > 1:
raise Exception('{} broker ids are not valid: {}'.format(len(unknown_brokers), ",".join(unknown_brokers)))
logging.basicConfig(level=getattr(logging, 'INFO', None))
@click.group()
def cli():
logo = """
____ __ __
/ __ )__ __/ /_ __ __/ /____ __
/ __ / / / / __ \/ / / / //_/ / / /
/ /_/ / /_/ / /_/ / /_/ / ,< / /_/ /
/_____/\__,_/_.___/\__,_/_/|_|\__,_/
"""
sys.stderr.write(logo + "\nStart, monitor and rebalance kafka cluster in AWS setup\n")
def _dump_replica_assignment_as_json(assignment: list) -> str:
json_element = {
"version": 1,
"partitions": [{'topic': v[0], 'partition': int(v[1])} for v in assignment]
}
return json.dumps(json_element, separators=(',', ':'))
@cli.command('preferred-replica-election',
help='Do preferred replica election, as command line tool from kafka have a number of limitations. '
'Only partitions, that are improperly allocated will be affected. In case if size of resulting json '
'is too big, it will be split into several parts, and they will be executed one after another.')
@click.option('--dry-run', is_flag=True, help="Do not apply the changes. Instead just prepare json file(s)")
@click.option('--max-json-size', type=click.INT, default=512000,
help="Maximum size of json data in bytes to write to zk", show_default=True)
def trigger_preferred_replica_election(dry_run: bool, max_json_size: int):
config, env_provider = prepare_configs()
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
partitions_state = {}
for topic, partition, state in zookeeper.load_partition_states():
partitions_state[(topic, partition)] = state
wrong_assignment = []
for topic, partition, replica_list in zookeeper.load_partition_assignment():
key = (topic, partition)
if not replica_list:
_LOG.warning('Replica list is not defined for %s', key)
continue
if key not in partitions_state:
_LOG.warning("Topic partition %s is not found in active states list. will skip it", key)
continue
leader = partitions_state[key].get('leader')
if leader is None:
_LOG.warning('Current leader is not defined for ')
continue
expected_leader = replica_list[0]
if leader != expected_leader:
_LOG.info("Found incorrect assignment: %s, leader is %d, but should be the first one in %s",
key, leader, replica_list)
wrong_assignment.append(key)
if dry_run:
print(_dump_replica_assignment_as_json(wrong_assignment))
else:
while wrong_assignment:
items_to_take = len(wrong_assignment)
change_applied = False
while not change_applied:
payload = _dump_replica_assignment_as_json(wrong_assignment[:items_to_take])
payload_bytes = payload.encode('utf-8')
if len(payload_bytes) > max_json_size:
new_items_to_take = int(items_to_take / 2)
_LOG.info("Not fitting to %d bytes with %d items, will try %d items",
max_json_size, items_to_take, new_items_to_take)
items_to_take = new_items_to_take
if items_to_take <= 0:
_LOG.error("Incorrect configuration - even one key is not fitting to proposed size %d. "
"Stop playing and do the job!", max_json_size)
exit(1)
continue
_LOG.info("Applying %s", payload)
zookeeper.exhibitor.create('/admin/preferred_replica_election', payload_bytes)
while zookeeper.exhibitor.is_node_present('/admin/preferred_replica_election'):
_LOG.info("Waiting for node to disappear")
time.sleep(1)
change_applied = True
del wrong_assignment[:items_to_take]
_LOG.info("Done with assignment")
@cli.command('restart', help='Restart kafka instance')
@click.option('--broker', type=click.STRING,
help='Broker id to restart. By default current broker id is restarted')
def restart_broker(broker: str):
config, env_provider = prepare_configs()
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
broker_id = get_opt_broker_id(broker, config, zookeeper, env_provider)
RemoteCommandExecutorCheck.register_restart(zookeeper, broker_id)
@cli.command('rolling-restart', help='Rolling restart of Kafka cluster')
@click.option('--image-tag', type=click.STRING, help='Docker image to run Kafka broker')
@click.option('--instance-type', type=click.STRING, required=True, help='AWS instance type to run Kafka broker on')
@click.option('--scalyr-key', type=click.STRING, help='Scalyr account key')
@click.option('--scalyr-region', type=click.STRING, help='Scalyr region to use')
@click.option('--kms-key-id', type=click.STRING, help='Kms key id to decrypt data with')
@click.option('--cool-down', type=click.INT, default=600, show_default=True,
help='Number of seconds to wait before passing the restart task to another broker, after cluster is '
'stable. Default value of 10 minutes is recommended for production in order to give consumers '
'enough time to stabilize in between restarts. This is particularly important for KStream '
'applications')
def rolling_restart_broker(image_tag: str, instance_type: str, scalyr_key: str, scalyr_region: str, kms_key_id: str,
cool_down: int):
if not is_cluster_healthy():
print('Cluster is not healthy, try again later :)')
return
config, env_provider = prepare_configs()
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
broker_id = get_opt_broker_id(None, config, zookeeper, env_provider)
RemoteCommandExecutorCheck.register_rolling_restart(zookeeper, broker_id, image_tag, instance_type, scalyr_key,
scalyr_region, kms_key_id, cool_down)
@cli.command('rebalance', help='Run rebalance process on one of brokers. If rack-awareness is enabled, replicas will '
'only be move to other brokers in the same rack')
@click.option('--broker', type=click.STRING,
help="Broker instance on which to perform rebalance. By default, any free broker will start it")
@click.option('--empty_brokers', type=click.STRING,
help="Comma-separated list of brokers to empty. All partitions will be moved to other brokers")
@click.option('--exclude_topics', type=click.STRING, help="Comma-separated list of topics to exclude from rebalance")
@click.option('--bin-packing', is_flag=True, help="Use bean packing approach instead of one way processing")
@click.option('--parallelism', type=click.INT, default=1, show_default=True,
help="Amount of partitions to move in a single rebalance step")
@click.option('--throttle', type=click.INT, default=100000000, help="Upper bound on bandwidth (in bytes/sec) used for "
"rebalance")
@click.option('--remove-throttle', is_flag=True, help="Don't trigger rebalance but remove throttling "
"configuration from all the brokers and topics")
def rebalance_partitions(broker: str, empty_brokers: str, exclude_topics: str, parallelism: int, bin_packing: bool,
throttle: int, remove_throttle: bool):
if throttle and throttle < get_max_bytes_in():
print('Throttle value must be set above the max BytesIn for the replication to progress. '
'The current max BytesIn is {}'.format(get_max_bytes_in()))
exit(1)
config, env_provider = prepare_configs()
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
if remove_throttle:
return RebalanceThrottleManager.remove_all_throttle_configurations(zookeeper)
empty_brokers_list = [] if empty_brokers is None else empty_brokers.split(',')
exclude_topics_list = [] if exclude_topics is None else exclude_topics.split(',')
__check_all_broker_ids_exist(empty_brokers_list, zookeeper)
broker_id = get_opt_broker_id(broker, config, zookeeper, env_provider) if broker else None
RemoteCommandExecutorCheck.register_rebalance(zookeeper, broker_id, empty_brokers_list,
exclude_topics_list, parallelism, bin_packing, throttle)
@cli.command('migrate', help='Replace one broker with another for all partitions')
@click.option('--from', 'from_', type=click.STRING, callback=__validate_not_empty,
help='List of brokers to migrate from (separated with ",")')
@click.option('--to', type=click.STRING, callback=__validate_not_empty,
help='List of brokers to migrate to (separated with ",")')
@click.option('--shrink', is_flag=True, default=False, show_default=True,
help='Whether or not to shrink replaced broker ids form partition assignment')
@click.option('--broker', type=click.STRING, help='Optional broker id to execute check on')
@click.option('--throttle', type=click.INT, default=100000000, help="Upper bound on bandwidth (in bytes/sec) used for "
"reassigning partitions")
@click.option('--parallelism', type=click.INT, show_default=True, default=1,
help="Amount of partitions to move in a single migration step")
@click.option('--remove-throttle', is_flag=True, help="Don't trigger rebalance but remove throttling "
"configuration from all the brokers and topics")
def migrate_broker(from_: str, to: str, shrink: bool, broker: str, throttle: int, parallelism: int,
remove_throttle: bool):
config, env_provider = prepare_configs()
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
if remove_throttle:
return RebalanceThrottleManager.remove_all_throttle_configurations(zookeeper)
broker_id = get_opt_broker_id(broker, config, zookeeper, env_provider) if broker else None
RemoteCommandExecutorCheck.register_migration(zookeeper, from_.split(','), to.split(','), shrink, broker_id,
throttle, parallelism)
@cli.command('swap_fat_slim', help='Move one partition from fat broker to slim one')
@click.option('--threshold', type=click.INT, default="100000", show_default=True, help="Threshold in kb to run swap")
def swap_partitions(threshold: int):
config, env_provider = prepare_configs()
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
RemoteCommandExecutorCheck.register_fatboy_slim(zookeeper, threshold_kb=threshold)
@cli.group(name='actions', help='Work with running actions')
def actions():
pass
@actions.command('list', help='List all the actions on broker(s)')
@click.option('--broker', type=click.STRING,
help='Broker id to list actions on. By default all brokers are enumerated')
def list_actions(broker: str):
table = []
config, env_provider = prepare_configs()
for broker_id, address in _list_broker_addresses(config, env_provider, broker):
try:
response = requests.get('http://{}:{}/api/controller/queue'.format(address, config.health_port))
except Exception as e:
print('Failed to query information on {} ({})'.format(broker_id, address))
_LOG.error('Failed to query information on {} ({})'.format(broker_id, address), exc_info=e)
continue
line = {
'_broker_id': broker_id,
'_broker_address': address,
}
if response.status_code != 200:
line['error'] = _extract_error(response)
table.append(line)
else:
changes = response.json()
if not changes:
line.update({
'type': None,
'description': None,
'running': None
})
table.append(line)
else:
for change in changes:
line_copy = dict(line)
line_copy.update(change)
table.append(line_copy)
if not table:
print('No brokers found')
else:
_print_table(table)
@actions.command('delete', help='Remove all actions of specified type on broker(s)')
@click.option('--action', type=click.STRING,
help='Action to delete')
@click.option('--broker', type=click.STRING,
help='Broker id to delete actions on. By default actions are deleted on all brokers')
def delete_actions(action: str, broker: str):
if not action:
print('No action specified. Please specify it')
config, env_provider = prepare_configs()
for broker_id, address in _list_broker_addresses(config, env_provider, broker):
try:
response = requests.delete(
'http://{}:{}/api/controller/queue/{}'.format(address, config.health_port, action))
except Exception as e:
print('Failed to query information on {} ({})'.format(broker_id, address))
_LOG.error('Failed to query information on {} ({})'.format(broker_id, address), exc_info=e)
continue
if response.status_code not in (200, 204):
print('Failed to delete action from {} ({}): {}'.format(broker, address, _extract_error(response)))
else:
print('Removed action {} from {} ({})'.format(action, broker_id, address))
def _extract_error(response: Response):
try:
return response.json()['message']
except Exception as e:
_LOG.error('Failed to parse response message', exc_info=e)
return response.text()
def _list_broker_addresses(config, env_provider, broker):
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
for broker_id in zookeeper.get_broker_ids():
if broker and broker != broker_id:
continue
yield broker_id, zookeeper.get_broker_address(broker_id)
@cli.command('stats', help='Display statistics about brokers')
def show_stats():
config, env_provider = prepare_configs()
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
disk_stats = zookeeper.get_disk_stats()
table = []
for broker_id in zookeeper.get_broker_ids():
disk = disk_stats.get(broker_id, {}).get('disk') if disk_stats else {}
table.append({
'Broker Id': broker_id,
'Address': zookeeper.get_broker_address(broker_id),
'Free kb': disk.get('free_kb'),
'Used kb': disk.get('used_kb')
})
_print_table(table)
@cli.group(name='validate', help='Validates internal structures of kafka/zk')
def validate():
pass
@validate.command('replication', help='Returns all partitions whose ISR size differs from the replication factor or '
'have not registered broker ids')
@click.option('--factor', type=click.INT, default=3, show_default=True, help='Replication factor')
def validate_replication(factor: int):
config, env_provider = prepare_configs()
with load_exhibitor_proxy(env_provider.get_address_provider(), config.zk_prefix) as zookeeper:
brokers = {int(x) for x in zookeeper.get_broker_ids()}
table = []
for topic_name, partition, state in zookeeper.load_partition_states():
if len(state['isr']) != factor or not set(state['isr']).issubset(brokers):
table.append({
'Partition': partition,
'Topic': topic_name,
'State': state
})
if table:
_LOG.info('Invalid topics:')
_print_table(table)
else:
print('All replica lists look valid')
if __name__ == '__main__':
cli()
| StarcoderdataPython |
3505 | greeting = """
--------------- BEGIN SESSION ---------------
You have connected to a chat server. Welcome!
:: About
Chat is a small piece of server software
written by <NAME> to allow people to
talk to eachother from any computer as long
as it has an internet connection. (Even an
arduino!). Check out the project at:
https://github.com/Ewpratten/chat
:: Disclaimer
While chatting, keep in mind that, if there
is a rule or regulation about privacy, this
server does not follow it. All data is sent
to and from this server over a raw TCP socket
and data is temporarily stored in plaintext
while the server handles message broadcasting
Now that's out of the way so, happy chatting!
---------------------------------------------
""" | StarcoderdataPython |
1678286 | <filename>pyleecan/Methods/Geometry/PolarArc/discretize.py<gh_stars>10-100
# -*-- coding: utf-8 -*
def discretize(self, nb_point):
"""Returns the discretize version of the PolarArc
Parameters
----------
nb_point : int
number of points wanted per line
Returns
-------
point_list : list
List of complex coordinate of the points
"""
# check if the PolarArc is correct
self.check()
if not isinstance(nb_point, int):
raise NbPointPolarArcError("Discretize : the nb_point must be an integer")
if nb_point < 0:
raise NbPointPolarArcError("Discretize: nb_point must be >= 0")
# getting lines that delimit the PolarArc
lines = self.get_lines()
point_list = list()
for line in lines:
point_list.extend(line.discretize(nb_point))
# The begin of the next line is the end of the current one
point_list.pop()
return point_list
class NbPointPolarArcError(Exception):
""" """
pass
| StarcoderdataPython |
3256900 | <filename>tg_bot/longpool.py
from config import *
from astro_bot_vars import *
from db_functions import *
from sending_functions import *
import telebot
from keyboards import *
bot = telebot.TeleBot(main_token)
print('Лонгпул запущен...')
@bot.message_handler(func = lambda message: message.text.lower() in ["/subscribe", "подписаться", emojize(":bell: подписаться :bell:")])
def subscribe(message):
send(message.from_user.id, 'Текст', subscribe_keyboard)
bot.register_next_step_handler(message, subscribe_get_degree) # следующий шаг – функция get_name
def subscribe_get_degree(message):
if message.text.lower() in [emojize("Развилка")]:
send(message.chat.id, 'Текст', standart_keyboard)
elif message.text.lower() in [emojize("Развилка")]:
send(message.chat.id, 'Текст', standart_keyboard)
else:
insert_into_db(message.from_user.id, var)
@bot.message_handler(func = lambda message: message.text.lower() in ["/unsubscribe", "отписаться", emojize(":bell_with_slash: отписаться :bell_with_slash:")])
def unsubscribe(message):
send(message.from_user.id, 'Текст', subscribe_keyboard)
bot.register_next_step_handler(message, unsubscribe_get_degree)
def unsubscribe_get_degree(message):
if message.text.lower() in [emojize("Развилка")]:
send(message.chat.id, 'Текст', standart_keyboard)
else:
delete_from_db(message.from_user.id, var)
@bot.message_handler(func = lambda message: message.text.lower() in ["/bugreport", "помощь", emojize(":warning: баг-репорт :warning:"), "багрепорт", "баг-репорт"])
def bugreport(message):
send(message.from_user.id, 'Пожалуйста, опишите проблему', types.ReplyKeyboardRemove())
bot.register_next_step_handler(message, get_bugreport) # следующий шаг – функция get_name
def get_bugreport(message):
send(792302351, f'{emojize(":warning: Баг-репорт :warning:")}\nТекст баг-репорта: {message.text}\nОтправитель: {message.from_user.id}', None)
send(message.chat.id, 'Ваш баг-репорт отправлен разработчику бота, в ближайшее время он займется исправлением неисправности. Спасибо :)', standart_keyboard)
@bot.message_handler(func = lambda message: message.text.lower() in ["/admin"])
def admin_send(message):
send(message.from_user.id, 'ID пользователя: ...', types.ReplyKeyboardRemove())
bot.register_next_step_handler(message, get_send_id) # следующий шаг – функция get_name
def get_send_id(message):
global id
id = message.text
send(message.from_user.id, 'Текст сообщения для пользователя: ...', None)
bot.register_next_step_handler(message, get_send_msg) # следующий шаг – функция get_name
def get_send_msg(message):
text = message.text
send(id, text, standart_keyboard)
send(message.from_user.id, 'Сообщение отправлено', standart_keyboard)
@bot.message_handler(content_types=["text"])
def send_text(message):
if message.text.lower() in ['старт', 'начать', 'привет', '/start']:
send(message.chat.id, hello, standart_keyboard)
elif message.text.lower() in ['команды', '/help', emojize(":memo: команды :memo:")]:
send(message.chat.id, commands, commands_keyboard)
elif message.text.lower() in ['в начало', emojize(":counterclockwise_arrows_button: в начало :counterclockwise_arrows_button:")]:
send(message.chat.id, 'Текст', standart_keyboard)
elif message.text.lower() in ['/stop', 'стоп', emojize(":cross_mark: стоп :cross_mark:")]:
delete_from_db_for_id(message.chat.id)
def job_longpool():
bot.infinity_polling()
| StarcoderdataPython |
59290 | import logging
from dotenv import load_dotenv
import sqlalchemy
import urllib
import pyodbc
import os
ROOT = os.path.dirname(os.path.abspath(__name__))
load_dotenv(os.path.join(ROOT, '.env'))
LOG = logging.getLogger('luigi-interface')
RPT_SERVER = os.environ['SERVER_A']
SCG_SERVER = os.environ['SERVER_B']
RPT_DB = os.environ['DB_A']
SCG_DB = os.environ['DB_B']
SCG_USR = os.environ['SERVER_B_USR']
SCG_PWD = os.environ['SERVER_B_PWD']
class Utils:
def get_db_connection(server, database):
driver = '{SQL Server}'
server = '{%s}' % server
database = '{%s}' % database
conn_str = ('DRIVER={};SERVER={};DATABASE={}'
';Trusted_Connection=yes').format(
driver,
server,
database
)
s = urllib.parse.quote_plus(conn_str)
engine = sqlalchemy.create_engine(
'mssql+pyodbc:///?odbc_connect={}'.format(s))
return engine | StarcoderdataPython |
3273744 | <reponame>GustavoMendel/curso-python<filename>mundo-1/ex029.py
velocidade = int(input('Digite a sua velocidade: '))
if velocidade > 80:
multa = (velocidade - 80) * 7
print('PARE! Você está acima do limite permitido!')
print('Você está a {}Km/h, ultrapassou {}Km/h do limite!'.format(velocidade, velocidade - 80))
print('Por isso, tomou uma multa de R$ {:.2f}'.format(multa))
print('Tenha um bom dia!')
| StarcoderdataPython |
30017 | # A python svg graph plotting library and creating interactive charts !
# PyPi: https://pypi.org/project/pygal/
# Docs: http://www.pygal.org/en/stable/index.html
# Chart types: http://www.pygal.org/en/stable/documentation/types/index.html
# Maps: http://www.pygal.org/en/stable/documentation/types/maps/pygal_maps_world.html
# pip install pygal
# pip install pygal_maps_world
import pygal
import seaborn as sns # just for datasets
from pygal.style import Style
# Loading Dataset
df = sns.load_dataset('tips')
# Simple Bar Chart
bar_chart = pygal.Bar()
bar_chart.add('Tip', df['tip'])
bar_chart.title = "Bla bla"
bar_chart.render_to_file('bar_chart.svg')
# bar_chart.render_in_browser()
# Customizing the graph and using a Style
custom_style = Style(colors=('#E80080', '#404040', '#9BC850'))
bar_chart = pygal.Bar(style=custom_style)
bar_chart.title = "Some text"
bar_chart.add("A", [0.95])
bar_chart.add("B", [1.25])
bar_chart.add("C", [1])
bar_chart.render_in_browser()
# Double Bar Chart
bar_chart.add('Tip', df['tip'][:10])
bar_chart.add('Total Bill', df['total'][:10])
bar_chart.render_to_file('bar_chart_2.svg')
# Horizontal bar diagram
line_chart = pygal.HorizontalBar()
line_chart.title = 'Browser usage in February 2012 (in %)'
line_chart.add('IE', 19.5)
line_chart.add('Firefox', 36.6)
line_chart.add('Chrome', 36.3)
line_chart.add('Safari', 4.5)
line_chart.add('Opera', 2.3)
line_chart.render()
# Line Chart
line_chart = pygal.Line()
line_chart.add('Total', df['total'][:15])
line_chart.render_to_file('line.svg')
# Double Line Chart
line_chart.add('Total', df['total_bill'][:15])
line_chart.add('Tip', df['tip'][:15])
line_chart.render_to_file('line_2.svg')
# Box Plot
box_plot = pygal.Box()
box_plot.title = 'Tips'
box_plot.add('Tip', df['tip'])
box_plot.render_to_file('box1.svg')
# Funnel Chart
funnel_chart = pygal.Funnel()
funnel_chart.title = 'Total'
funnel_chart.add('Total', df['total_bill'][:15])
funnel_chart.add('Tip', df['tip'][:15])
funnel_chart.render_to_file('funnel.svg')
# Working with maps
worldmap_chart = pygal.maps.world.World()
worldmap_chart.title = 'Some countries'
worldmap_chart.add('F countries', ['fr', 'fi'])
worldmap_chart.add('M countries', ['ma', 'mc', 'md', 'me', 'mg',
'mk', 'ml', 'mm', 'mn', 'mo',
'mr', 'mt', 'mu', 'mv', 'mw',
'mx', 'my', 'mz'])
worldmap_chart.add('U countries', ['ua', 'ug', 'us', 'uy', 'uz'])
worldmap_chart.render()
# specify a value for a country
worldmap_chart = pygal.maps.world.World()
worldmap_chart.title = 'Minimum deaths by capital punishement (source: Amnesty International)'
worldmap_chart.add('In 2012', {
'af': 14,
'bd': 1,
'by': 3,
'cn': 1000,
'gm': 9,
'in': 1,
'ir': 314,
'iq': 129,
'jp': 7,
'kp': 6,
'pk': 1,
'ps': 6,
'sa': 79,
'so': 6,
'sd': 5,
'tw': 6,
'ae': 1,
'us': 43,
'ye': 28
})
worldmap_chart.render()
# access to continents
supra = pygal.maps.world.SupranationalWorld()
supra.add('Asia', [('asia', 1)])
supra.add('Europe', [('europe', 1)])
supra.add('Africa', [('africa', 1)])
supra.add('North america', [('north_america', 1)])
supra.add('South america', [('south_america', 1)])
supra.add('Oceania', [('oceania', 1)])
supra.add('Antartica', [('antartica', 1)])
supra.render()
| StarcoderdataPython |
103299 | <reponame>magical-eda/UT-AnLay
#
# @file util.py
# @author <NAME>
# @date July 2019
# @brief generate coordinate channel embeddings
#
import numpy as np
# Coordinate channel embeddings
def cordinate_img(img):
# img shape (dim, dim, chan)
new_img_x = np.zeros((img.shape))
new_img_y = np.zeros((img.shape))
for x,y,c in list(zip(*np.where(img>0))):
new_img_x[x,y,c] = x + 1
new_img_y[x,y,c] = y + 1
return new_img_x, new_img_y
| StarcoderdataPython |
54073 | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from torchvision.models import resnet50, resnet101, resnext101_32x8d
import torch
import torch.nn as nn
import random
import numpy as np
class EncoderCNN(nn.Module):
def __init__(self, embed_size, dropout=0.5, image_model='resnet50', pretrained=True):
"""Load the pretrained model and replace top fc layer."""
super(EncoderCNN, self).__init__()
pretrained_net = globals()[image_model](pretrained=pretrained)
if 'resnet' in image_model or 'resnext' in image_model:
modules = list(pretrained_net.children())[:-2] # delete avg pooling and last fc layer
else:
raise ValueError('Invalid image_model {}'.format(image_model))
self.pretrained_net = nn.Sequential(*modules)
in_dim = pretrained_net.fc.in_features
if in_dim == embed_size:
self.last_module = None
else:
self.last_module = nn.Sequential(
nn.Conv2d(in_dim, embed_size, kernel_size=1, padding=0, bias=False),
nn.Dropout(dropout), nn.BatchNorm2d(embed_size, momentum=0.01), nn.ReLU())
def forward(self, images, keep_cnn_gradients=False):
"""Extract feature vectors from input images."""
if images is None:
return None
# Get encoder output
if keep_cnn_gradients:
raw_conv_feats = self.pretrained_net(images)
else:
with torch.no_grad():
raw_conv_feats = self.pretrained_net(images)
# Apply last_module to change the number of channels in the encoder output
if self.last_module is not None:
features = self.last_module(raw_conv_feats)
else:
features = raw_conv_feats
# Reshape features
features = features.view(features.size(0), features.size(1), -1)
return features
| StarcoderdataPython |
89252 | #!/usr/bin/python3
import os
import os.path
import sys
from bottle import abort, redirect, request, route, run, static_file, template
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
STATIC_DIR = '{}/static'.format(SCRIPT_DIR)
NAVIGATION_SIZE = 7
PREFETCH_SIZE = 5
ROW_COUNT = 5
LARGE_GALLERY_SIZE = 100
def fs_path(path, *args):
return os.path.join(sys.argv[1], path.format(*args))
@route('/static/<path:path>')
def serve_static(path):
return static_file(path, root=STATIC_DIR)
@route('/img/<path:path>')
def serve_image(path):
response = static_file(path, root=sys.argv[1])
response.headers.pop('Content-Length')
return response
@route('/')
def index():
return template('index.html.tpl')
def window(index, width, items):
if len(items) <= width:
return items
left_width = int(width / 2.0) + 1
right_width = width - left_width
start = max(0, index - left_width)
end = min(len(items), index + right_width)
actual_width = end - start
if (index - start) < left_width:
end += width - actual_width
if (end - index) < right_width:
start -= width - actual_width
return items[start:end]
def chunks(items, count):
for i in range(0, len(items), count):
yield items[i:i + count]
def make_nav(page, count):
return {
'first': 1,
'previous': page - 1 if page > 1 else None,
'next': page + 1 if page < count else None,
'last': count,
'window': window(page, NAVIGATION_SIZE, list(range(1, count + 1))),
'current': page
}
def fs_content(path, *args):
with open(fs_path(path, *args), 'r') as f:
return f.read()
def fs_peek(path, *args):
with open(fs_path(path, *args), 'rb') as f:
f.read(1)
def results_metadata(base):
per_page = int(fs_content('{}/per_page', base))
galleries = []
for i in range(per_page):
try:
ID = fs_content('{}/{}/id', base, i)
except FileNotFoundError:
break
title = (fs_content('{}/{}/title/pretty', base, i) or
fs_content('{}/{}/title/english', base, i))
files = os.listdir(fs_path('{}/{}', base, i))
thumb = ['{}/{}/{}'.format(base, i, f) for f in files
if f.startswith('thumb.')][0]
tags = {ID: fs_content('{}/{}/tags/{}', base, i, ID)
for ID in os.listdir(fs_path('{}/{}/tags', base, i))}
grouped_tags = group_tags(tags)
language = 'language--{}'.format(guess_language(grouped_tags))
num_pages = int(fs_content('{}/{}/num_pages', base, i))
is_large = num_pages >= LARGE_GALLERY_SIZE
classes = [language, 'gallery--large'] if is_large else [language]
galleries.append({'id': ID, 'title': title, 'thumb': thumb,
'classes': ' '.join(classes)})
num_pages = int(fs_content('{}/num_pages', base))
return {'num_pages': num_pages, 'galleries': galleries}
@route('/all')
def frontpage():
title = 'Frontpage'
page = int(request.query.page or '1')
metadata = results_metadata('all/{}'.format(page))
num_pages = metadata['num_pages']
galleries = chunks(metadata['galleries'], ROW_COUNT)
nav = make_nav(page, num_pages)
base = '/all?page='
return template('results.html.tpl', title=title, base=base,
galleries=galleries, nav=nav)
@route('/search')
def search():
query = request.query.query
if not query:
redirect('/all')
title = 'Search: {}'.format(query)
page = int(request.query.page or '1')
metadata = results_metadata('search/{}/{}'.format(query, page))
num_pages = metadata['num_pages']
galleries = chunks(metadata['galleries'], ROW_COUNT)
nav = make_nav(page, num_pages)
base = '/search?query={}&page='.format(query)
return template('results.html.tpl', title=title, base=base,
galleries=galleries, nav=nav)
@route('/tagged/<tag_id:int>')
def tagged(tag_id):
title = 'Tag: {}'.format(tag_id)
page = int(request.query.page or '1')
metadata = results_metadata('tagged/{}/{}'.format(tag_id, page))
num_pages = metadata['num_pages']
galleries = chunks(metadata['galleries'], ROW_COUNT)
nav = make_nav(page, num_pages)
base = '/tagged/{}?page='.format(tag_id)
return template('results.html.tpl', title=title, base=base,
galleries=galleries, nav=nav)
def group_tags(tags):
result = {}
for ID, tag in tags.items():
key, value = tag.split(':')
if key not in result:
result[key] = {}
result[key][value] = ID
return result
def guess_language(grouped_tags):
if 'language' not in grouped_tags:
return 'unknown'
candidates = grouped_tags['language']
if 'translated' in candidates:
candidates.pop('translated')
languages = list(candidates.keys())
if 'english' in languages:
return 'english'
elif 'japanese' in languages:
return 'japanese'
elif 'chinese' in languages:
return 'chinese'
elif len(languages) > 0:
return languages[0]
else:
return 'unknown'
def gallery_metadata(gallery_id):
base = 'gallery/{}'.format(gallery_id)
files = os.listdir(fs_path(base))
tags = {ID: fs_content('{}/tags/{}', base, ID)
for ID in os.listdir(fs_path('{}/tags', base))}
return {
'id': gallery_id,
'title': {
'pretty': fs_content('{}/title/pretty', base),
'native': fs_content('{}/title/native', base),
'english': fs_content('{}/title/english', base)
},
'cover': ['{}/{}'.format(base, f) for f in files
if f.startswith('cover.')][0],
'tags': group_tags(tags),
'filenames': fs_content('{}/filenames', base).split('\n'),
'num_pages': int(fs_content('{}/num_pages', base)),
'uploaded': int(fs_content('{}/uploaded', base))
}
def related_metadata(gallery_id):
base = 'related/{}'.format(gallery_id)
directories = os.listdir(fs_path(base))
galleries = []
for directory in directories:
base = 'related/{}/{}'.format(gallery_id, directory)
files = os.listdir(fs_path(base))
ID = fs_content('{}/id', base, directory)
tags = {ID: fs_content('{}/tags/{}', base, ID)
for ID in os.listdir(fs_path('{}/tags', base))}
grouped_tags = group_tags(tags)
language = 'language--{}'.format(guess_language(grouped_tags))
num_pages = int(fs_content('{}/num_pages', base))
is_large = num_pages >= LARGE_GALLERY_SIZE
classes = [language, 'gallery--large'] if is_large else [language]
gallery = {
'id': ID,
'title': fs_content('{}/title/pretty', base),
'cover': ['gallery/{}/{}'.format(ID, f) for f in files
if f.startswith('cover.')][0],
'classes': ' '.join(classes)
}
galleries.append(gallery)
return galleries
@route('/gallery/<gallery_id:int>')
def gallery(gallery_id):
metadata = gallery_metadata(gallery_id)
title = metadata['title']['pretty'] or metadata['title']['english']
filenames = ['/img/gallery/{}/thumbs/{}'.format(gallery_id, f)
for f in metadata['filenames']]
thumbs = chunks(list(enumerate(filenames)), ROW_COUNT)
related = related_metadata(gallery_id)
return template('gallery.html.tpl', title=title,
thumbs=thumbs, metadata=metadata, related=related)
@route('/gallery/<gallery_id:int>/<page:int>')
def gallery_page(gallery_id, page):
index = page - 1
base = 'gallery/{}'.format(gallery_id)
title = fs_content('{}/title/pretty', base)
num_pages = int(fs_content('{}/num_pages', base))
if page < 1 or page > num_pages:
abort(404)
nav = make_nav(page, num_pages)
nav.pop('window')
filenames = fs_content('{}/filenames', base).split('\n')
page_url = '/img/gallery/{}/pages/{}'.format(gallery_id, filenames[index])
base = '/gallery/{}/'.format(gallery_id)
gallery_url = '/gallery/{}'.format(gallery_id)
return template('gallery_page.html.tpl', title=title, base=base,
nav=nav, page_url=page_url, gallery_url=gallery_url)
@route('/gallery/<gallery_id:int>/<page:int>/prefetch')
def gallery_page_prefetch(gallery_id, page):
base = 'gallery/{}'.format(gallery_id)
filenames = fs_content('{}/filenames', base).split('\n')
pages = window(page, PREFETCH_SIZE, filenames)
center = PREFETCH_SIZE // 2
pages.pop(center)
for page in pages:
fs_peek('{}/pages/{}', base, page)
return {'status': 0}
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Usage: {} <mountpoint>'.format(sys.argv[0]))
sys.exit(1)
try:
port = int(os.getenv('PORT') or '8080')
run(port=port)
except FileNotFoundError:
abort(404)
# TODO:
# CSS:
# - [ ] make it dark
# - [X] highlight large galleries
# JS:
# - [X] load thumbs as they scroll into view
# - [X] preload gallery images relative to the current one
# - [X] bind keys in gallery viewer
| StarcoderdataPython |
Subsets and Splits