id
stringlengths 2
8
| text
stringlengths 16
264k
| dataset_id
stringclasses 1
value |
---|---|---|
8106318
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RDeoptimr(RPackage):
"""Differential Evolution Optimization in Pure R.
Differential Evolution (DE) stochastic algorithms for global optimization
of problems with and without constraints. The aim is to curate a collection
of its state-of-the-art variants that (1) do not sacrifice simplicity of
design, (2) are essentially tuning-free, and (3) can be efficiently
implemented directly in the R language. Currently, it only provides an
implementation of the 'jDE' algorithm by Brest et al. (2006)
<doi:10.1109/TEVC.2006.872133>."""
cran = "DEoptimR"
version('1.0-10', sha256='774f7ba0ac9c73aaab4567024b98afdb58098905726e72bceeeb9e380e782ad5')
version('1.0-8', sha256='846911c1b2561a9fae73a8c60a21a5680963ebb0050af3c1f1147ae9a121e5ef')
|
StarcoderdataPython
|
8023415
|
import claripy
import logging
l = logging.getLogger('fidget.techniques')
class FidgetTechnique(object):
project = None
def constrain_variables(self, func, solver, stack):
raise NotImplementedError()
def set_project(self, project):
self.project = project
class FidgetDefaultTechnique(FidgetTechnique):
def __init__(self, largemode=False, safe=False):
self.largemode = largemode
self.safe = safe
def constrain_variables(self, func, solver, stack):
# Find the lowest sp-access that isn't an argument to the next function
# By starting at accesses to [esp] and stepping up a word at a time
if self.project.arch.name == 'X86':
last_addr = -stack.conc_size
for var in stack:
if var.conc_addr != last_addr:
break
last_addr += self.project.arch.bytes
#var.special_top = True
#l.debug("Marked TOP addr %d as special", var.conc_addr)
#last_addr = None
#for var in reversed(stack):
# if last_addr is None:
# if var.conc_addr < 0:
# break # why would this happen
# last_addr = var.conc_addr
# if var.conc_addr != last_addr:
# break
# last_addr -= self.project.arch.bytes
# var.special_bottom = True
# l.debug("Marked BOTTOM addr %d as special", var.conc_addr)
self.collapse(stack)
self.mark_sizes(stack)
stack.alloc_op.apply_constraints(solver)
solver.add(stack.alloc_op.symval == -stack.sym_size)
for op in stack.dealloc_ops:
op.apply_constraints(solver)
solver.add(op.symval == 0)
if self.largemode and not self.safe:
solver.add(stack.sym_size <= stack.conc_size + (1024 * stack.num_vars + 2048))
stack.unsafe_constraints.append(stack.sym_size >= stack.conc_size + (1024 * stack.num_vars))
stack.unsafe_constraints.append(stack.sym_size >= 0x78)
stack.unsafe_constraints.append(stack.sym_size >= 0xF8)
elif self.largemode and self.safe:
solver.add(stack.sym_size <= stack.conc_size + 1024*16)
stack.unsafe_constraints.append(stack.sym_size >= stack.conc_size + 1024*8)
stack.unsafe_constraints.append(stack.sym_size >= 0x78)
stack.unsafe_constraints.append(stack.sym_size >= 0xF0)
elif not self.largemode and self.safe:
solver.add(stack.sym_size <= stack.conc_size + 256)
elif not self.largemode and not self.safe:
solver.add(stack.sym_size <= stack.conc_size + (16 * stack.num_vars + 32))
self.sym_link(stack, solver)
def sym_link(self, stack, solver):
solver.add(stack.sym_size >= stack.conc_size)
solver.add(stack.sym_size % (stack.arch.bytes) == 0)
stack.unsafe_constraints.append(stack.sym_size > stack.conc_size)
first = stack.variables[stack.addr_list[0]]
solver.add(first.sym_addr >= (first.conc_addr + stack.conc_size) - stack.sym_size)
var_list = list(stack)
for var, next_var in zip(var_list, var_list[1:] + [None]):
var.sym_link(solver, stack)
stack.unsafe_constraints.extend(var.unsafe_constraints)
if var.conc_addr % (stack.arch.bytes) == 0:
solver.add(var.sym_addr % (stack.arch.bytes) == 0)
if var.special:
# We're one of the args that needs to stay fixed relative somewhere
pass
elif next_var is None or next_var.special_bottom:
# If we're the last free-floating variable, set a solid bottom
solver.add(var.sym_addr <= var.conc_addr)
if var.size is not None:
solver.add(claripy.SLE(var.sym_addr, var.sym_addr + var.size))
solver.add(var.sym_addr + var.size <= next_var.sym_addr)
stack.unsafe_constraints.append(var.sym_addr + var.size < next_var.sym_addr)
else:
# Otherwise we're one of the free-floating variables
solver.add(var.sym_addr <= var.sym_addr + var.size)
stack.unsafe_constraints.append(var.sym_addr + var.size < next_var.sym_addr)
if self.safe:
solver.add(var.sym_addr + var.size == next_var.sym_addr)
else:
solver.add(var.sym_addr + var.size <= next_var.sym_addr)
@staticmethod
def collapse(stack):
i = 0 # old fashioned loop because we're removing items
while i < len(stack.addr_list) - 1:
i += 1
var = stack.variables[stack.addr_list[i]]
if var.special:
continue
#if var.conc_addr % (stack.arch.bytes) != 0:
# stack.merge_up(i)
# i -= 1
if var.access_flags & 8:
stack.merge_up(i)
i -= 1
elif var.access_flags & 4:
pass
elif var.access_flags != 3:
stack.merge_up(i)
i -= 1
@staticmethod
def mark_sizes(stack):
for i, addr in enumerate(stack.addr_list[:-1]):
var = stack.variables[addr]
next_var = stack.variables[stack.addr_list[i+1]]
var.size = next_var.conc_addr - var.conc_addr
var = stack.variables[stack.addr_list[-1]]
var.size = None
class FidgetManualTechnique(FidgetTechnique):
def __init__(self, funcdata):
"""
:param funcdata: A dict mapping functions to dicts {addr: (size, fix, align)} of var info
- addr is an offset relative to the stack pointer at function entry
- size is the size in bytes
- fix is a string "TOP", "BOTTOM" or None, describing if a var needs
to be fixed relative to the top (low addresses) or bottom (high addrs)
of the stack frame
- align is the alignment required for the variable
"""
self.funcdata = funcdata
self.offsets = None
self.bounds_marked = None
self.stack = None
self.solver = None
def constrain_variables(self, func, solver, stack):
self.offsets = self.funcdata[func.addr]
self.bounds_marked = set()
self.stack = stack
self.solver = solver
# do some sanity checking first
top = min(self.offsets)
for addr in stack.addr_list:
if addr < top:
raise Exception("Provided vars miss an access (off the top!)")
base_addr = addr
while base_addr not in self.offsets:
base_addr -= 1
this_offset = addr - base_addr
if this_offset >= self.offsets[base_addr][0]:
raise Exception("Provided vars miss an access (between the cracks!)")
i = 0
while i < len(stack.addr_list):
addr = stack.addr_list[i]
if addr in self.offsets:
if i != 0 and self.offsets[stack.addr_list[i-1]][0] + stack.addr_list[i-1] > addr:
raise Exception("Provided vars have an overlap!")
i += 1
continue
stack.merge_up(i)
# standard stuff
stack.alloc_op.apply_constraints(solver)
solver.add(stack.alloc_op.symval == -stack.sym_size)
for op in stack.dealloc_ops:
op.apply_constraints(solver)
solver.add(op.symval == 0)
solver.add(stack.sym_size % stack.arch.bytes == 0)
solver.add(claripy.SGE(stack.sym_size, stack.conc_size))
stack.unsafe_constraints.append(claripy.SGT(stack.sym_size, stack.conc_size))
stack.unsafe_constraints.append(claripy.SGE(stack.sym_size, stack.conc_size * 2))
stack.unsafe_constraints.append(claripy.SLT(stack.sym_size, stack.conc_size * 3))
# loop through variables, add the important constraints!
i = 0
while i < len(stack.addr_list):
addr = stack.addr_list[i]
var = stack.variables[addr]
var.size = self.offsets[addr][0]
fix = self.offsets[addr][1]
if fix == 'TOP':
var.special_top = True
elif fix == 'BOTTOM':
var.special_bottom = True
align = self.offsets[addr][2]
if align != 1:
solver.add(var.sym_addr % align == 0)
var.sym_link(solver, stack) # this hooks up the constrains to actual immediates
# also the top/bottom fixing happens in there
if i != 0:
prev_var = stack.variables[stack.addr_list[i-1]]
self.mark_boundaries(prev_var, var)
if i != len(stack.addr_list) - 1:
next_var = stack.variables[stack.addr_list[i+1]]
self.mark_boundaries(var, next_var)
# ew. ew ew ew ew ew ew!!!
diff = next_var.conc_addr - var.conc_addr
solver.add(claripy.SLT(var.sym_addr, var.sym_addr + diff))
if i == 0:
solver.add(claripy.SLE(-stack.sym_size, var.sym_addr))
i += 1
def mark_boundaries(self, var_1, var_2):
key = (var_1.conc_addr, var_2.conc_addr)
if key in self.bounds_marked:
return
self.bounds_marked.add(key)
diff = var_2.conc_addr - var_1.conc_addr
self.solver.add(claripy.SLE(var_1.sym_addr + diff, var_2.sym_addr))
self.stack.unsafe_constraints.append(claripy.SLT(var_1.sym_addr + diff, var_2.sym_addr))
|
StarcoderdataPython
|
6546560
|
from p1.foo import foo
|
StarcoderdataPython
|
11261475
|
<filename>test/1.2.0/doi-identifiers/_I/test_1_2_0_doi_identifiers__I_endnote_object.py<gh_stars>10-100
import os
import pytest
from test.contracts.endnote_object import Contract
from cffconvert.behavior_1_2_x.endnote_object import EndnoteObject
from cffconvert import Citation
@pytest.fixture(scope="module")
def endnote_object():
fixture = os.path.join(os.path.dirname(__file__), "CITATION.cff")
with open(fixture, "rt", encoding="utf-8") as f:
cffstr = f.read()
citation = Citation(cffstr)
return EndnoteObject(citation.cffobj, initialize_empty=True)
class TestEndnoteObject(Contract):
def test_as_string(self, endnote_object):
actual_endnote = endnote_object.add_all().as_string()
fixture = os.path.join(os.path.dirname(__file__), "endnote.enw")
with open(fixture, "rt", encoding="utf-8") as f:
expected_endnote = f.read()
assert actual_endnote == expected_endnote
def test_author(self, endnote_object):
assert endnote_object.add_author().author == '%A Test author\n'
def test_check_cffobj(self, endnote_object):
endnote_object.check_cffobj()
# doesn't need an assert
def test_doi(self, endnote_object):
assert endnote_object.add_doi().doi == '%R 10.0000/from-identifiers\n'
def test_keyword(self, endnote_object):
assert endnote_object.add_keyword().keyword is None
def test_name(self, endnote_object):
assert endnote_object.add_name().name == '%T Test title\n'
def test_url(self, endnote_object):
assert endnote_object.add_url().url is None
def test_year(self, endnote_object):
assert endnote_object.add_year().year is None
|
StarcoderdataPython
|
8016661
|
from bs4 import BeautifulSoup
from urllib.request import urlopen
import pprint,webbrowser
gaana_com_url=urlopen('https://gaana.com/')
soup=BeautifulSoup(gaana_com_url,'html.parser')
h2_link=soup.find('h2',{'id':'themechange','class':'themechange'}).find('a').get('href')
top_charts=('https://gaana.com'+h2_link)
link=urlopen(top_charts)
next_soup=BeautifulSoup(link,'html.parser')
gaana_ul=next_soup.find('ul',class_='content-container artworkload clearfix a-list')
gaana_li=gaana_ul.findAll('div',class_='card_layout_data')
def scrape_songs_names(gaana):
All_Songs_list=[]
number=0
for tital in gaana:
print(number, tital.text.strip())
number+=1
All_Songs_list.append('https://gaana.com'+tital.find('a').get('href'))
return(All_Songs_list)
All_type_songs=scrape_songs_names(gaana_li)
# pprint.pprint(All_type_songs)
def scrape_songs_link(gaana):
user_chose=int(input('######### What du you want to lesten ########## >'))
get_url=urlopen(gaana[user_chose])
top_songs=BeautifulSoup(get_url,'html.parser')
songs_total_list=top_songs.find('div',class_='s_c')
gaana_names=songs_total_list.findAll('div',class_='playlist_thumb_det')
count=0
song_link=[]
for songs_name in gaana_names:
song_title=songs_name.find('a').get_text()
song_link.append(songs_name.find('a').get('href'))
print(count,song_title)
count+=1
return song_link
All_songs_links=scrape_songs_link(All_type_songs)
# print(All_songs_links)
def play_the_song(gaana):
second_user_chose=int(input('~~~~~~~~~~~~~ Which song do you want to listen to ~~~~~~~~~~~~~ >'))
play=webbrowser.open(gaana[second_user_chose])
print(play)
play_the_song(All_songs_links)
|
StarcoderdataPython
|
6462007
|
# Copyright (c) 2013 Shotgun Software Inc.
#
# CONFIDENTIAL AND PROPRIETARY
#
# This work is provided "AS IS" and subject to the Shotgun Pipeline Toolkit
# Source Code License included in this distribution package. See LICENSE.
# By accessing, using, copying or modifying this work you indicate your
# agreement to the Shotgun Pipeline Toolkit Source Code License. All rights
# not expressly granted therein are reserved by Shotgun Software Inc.
"""
A 3ds Max (2015+) engine for Toolkit that uses MaxPlus.
"""
import os
import time
import math
import sgtk
import MaxPlus
class MaxEngine(sgtk.platform.Engine):
"""
The main Toolkit engine for 3ds Max
"""
@property
def host_info(self):
"""
:returns: A dictionary with information about the application hosting this engine.
The returned dictionary is of the following form on success:
Note that the version field refers to the release year.
{
"name": "3ds Max",
"version": "2018",
}
The returned dictionary is of following form on an error preventing
the version identification.
{
"name": "3ds Max",
"version: "unknown"
}
References:
http://docs.autodesk.com/3DSMAX/16/ENU/3ds-Max-Python-API-Documentation/index.html
"""
host_info = {"name": "3ds Max", "version": "unknown"}
try:
host_info["version"] = str(self._max_version_to_year(self._get_max_version()))
except:
# Fallback to initialized values above
pass
return host_info
def __init__(self, *args, **kwargs):
"""
Engine Constructor
"""
# Add instance variables before calling our base class
# __init__() because the initialization may need those
# variables.
self._parent_to_max = True
self._on_menus_loaded_handler = None
self._dock_widgets = []
# proceed about your business
sgtk.platform.Engine.__init__(self, *args, **kwargs)
##########################################################################################
# properties
@property
def context_change_allowed(self):
"""
Tells the core API that context changes are allowed by this engine.
"""
return True
##########################################################################################
# init
def pre_app_init(self):
"""
Called before all apps have initialized
"""
from sgtk.platform.qt import QtCore
self.log_debug("%s: Initializing..." % self)
if self._get_max_version() > MaxEngine.MAXIMUM_SUPPORTED_VERSION:
# Untested max version
highest_supported_version = self._max_version_to_year(MaxEngine.MAXIMUM_SUPPORTED_VERSION)
msg = ("Shotgun Pipeline Toolkit!\n\n"
"The Shotgun Pipeline Toolkit has not yet been fully tested with 3ds Max versions greater than %s. "
"You can continue to use the Toolkit but you may experience bugs or instability. "
"Please report any issues you see to <EMAIL>" % highest_supported_version)
# Display warning dialog
max_year = self._max_version_to_year(self._get_max_version())
max_next_year = highest_supported_version + 1
if max_year >= self.get_setting("compatibility_dialog_min_version", max_next_year):
MaxPlus.Core.EvalMAXScript('messagebox "Warning - ' + msg + '" title: "Shotgun Warning"')
# and log the warning
self.log_warning(msg)
elif not self._is_at_least_max_2016():
# Unsupported max version
msg = ("Shotgun Pipeline Toolkit!\n\n"
"The Shotgun Pipeline Toolkit does not work with 3ds max versions prior to 2016.")
# Display warning dialog
MaxPlus.Core.EvalMAXScript('messagebox "Warning - ' + msg + '" title: "Shotgun Warning"')
# and log the warning
self.log_warning(msg)
self._safe_dialog = []
# Add image formats since max doesn't add the correct paths by default and jpeg won't be readable
maxpath = QtCore.QCoreApplication.applicationDirPath()
pluginsPath = os.path.join(maxpath, "plugins")
QtCore.QCoreApplication.addLibraryPath(pluginsPath)
# Window focus objects are used to enable proper keyboard handling by the window instead of 3dsMax's accelerators
engine = self
class DialogEvents(QtCore.QObject):
def eventFilter(self, obj, event):
if event.type() == QtCore.QEvent.WindowActivate:
MaxPlus.CUI.DisableAccelerators()
elif event.type() == QtCore.QEvent.WindowDeactivate:
MaxPlus.CUI.EnableAccelerators()
# Remove from tracked dialogs
if event.type() == QtCore.QEvent.Close:
if obj in engine._safe_dialog:
engine._safe_dialog.remove(obj)
return False
self.dialogEvents = DialogEvents()
# set up a qt style sheet
# note! - try to be smart about this and only run
# the style setup once per session - it looks like
# 3dsmax slows down if this is executed every engine restart.
qt_app_obj = sgtk.platform.qt.QtCore.QCoreApplication.instance()
curr_stylesheet = qt_app_obj.styleSheet()
if "toolkit 3dsmax style extension" not in curr_stylesheet:
# If we're in pre-2017 Max then we need to handle our own styling. Otherwise
# we just inherit from Max.
if self._max_version_to_year(self._get_max_version()) < 2017:
self._initialize_dark_look_and_feel()
curr_stylesheet += "\n\n /* toolkit 3dsmax style extension */ \n\n"
curr_stylesheet += "\n\n QDialog#TankDialog > QWidget { background-color: #343434; }\n\n"
qt_app_obj.setStyleSheet(curr_stylesheet)
# This needs to be present for apps as it will be used in show_dialog when perforce asks for login
# info very early on.
self.tk_3dsmax = self.import_module("tk_3dsmaxplus")
# The "qss_watcher" setting causes us to monitor the engine's
# style.qss file and re-apply it on the fly when it changes
# on disk. This is very useful for development work,
if self.get_setting("qss_watcher", False):
self._qss_watcher = QtCore.QFileSystemWatcher(
[os.path.join(self.disk_location, sgtk.platform.constants.BUNDLE_STYLESHEET_FILE)],
)
self._qss_watcher.fileChanged.connect(self.reload_qss)
def _add_shotgun_menu(self):
"""
Add Shotgun menu to the main menu bar.
"""
self.log_debug("Adding the shotgun menu to the main menu bar.")
self._menu_generator.create_menu()
self.tk_3dsmax.MaxScript.enable_menu()
def _remove_shotgun_menu(self):
"""
Remove Shotgun menu from the main menu bar.
"""
self.log_debug("Removing the shotgun menu from the main menu bar.")
self._menu_generator.destroy_menu()
def _on_menus_loaded(self, code):
"""
Called when receiving CuiMenusPostLoad from 3dsMax.
:param code: Notification code received
"""
self._add_shotgun_menu()
def post_app_init(self):
"""
Called when all apps have initialized
"""
# set up menu handler
self._menu_generator = self.tk_3dsmax.MenuGenerator(self)
self._add_shotgun_menu()
try:
# Listen to the CuiMenusPostLoad notification in order to add
# our shotgun menu after workspace reset/switch.
self._on_menus_loaded_handler = MaxPlus.NotificationManager.Register(
MaxPlus.NotificationCodes.CuiMenusPostLoad, self._on_menus_loaded)
except AttributeError:
self.log_debug("CuiMenusPostLoad notification code is not available in this version of MaxPlus.")
# Run a series of app instance commands at startup.
self._run_app_instance_commands()
def post_context_change(self, old_context, new_context):
"""
Handles necessary processing after a context change has been completed
successfully.
:param old_context: The previous context.
:param new_context: The current, new context.
"""
# Replacing the menu will cause the old one to be removed
# and the new one put into its place.
self._add_shotgun_menu()
def _run_app_instance_commands(self):
"""
Runs the series of app instance commands listed in the 'run_at_startup' setting
of the environment configuration yaml file.
"""
# Build a dictionary mapping app instance names to dictionaries of commands they registered with the engine.
app_instance_commands = {}
for (command_name, value) in self.commands.iteritems():
app_instance = value["properties"].get("app")
if app_instance:
# Add entry 'command name: command function' to the command dictionary of this app instance.
command_dict = app_instance_commands.setdefault(app_instance.instance_name, {})
command_dict[command_name] = value["callback"]
# Run the series of app instance commands listed in the 'run_at_startup' setting.
for app_setting_dict in self.get_setting("run_at_startup", []):
app_instance_name = app_setting_dict["app_instance"]
# Menu name of the command to run or '' to run all commands of the given app instance.
setting_command_name = app_setting_dict["name"]
# Retrieve the command dictionary of the given app instance.
command_dict = app_instance_commands.get(app_instance_name)
if command_dict is None:
self.log_warning(
"%s configuration setting 'run_at_startup' requests app '%s' that is not installed." %
(self.name, app_instance_name))
else:
if not setting_command_name:
# Run all commands of the given app instance.
for (command_name, command_function) in command_dict.iteritems():
self.log_debug("%s startup running app '%s' command '%s'." %
(self.name, app_instance_name, command_name))
command_function()
else:
# Run the command whose name is listed in the 'run_at_startup' setting.
command_function = command_dict.get(setting_command_name)
if command_function:
self.log_debug("%s startup running app '%s' command '%s'." %
(self.name, app_instance_name, setting_command_name))
command_function()
else:
known_commands = ', '.join("'%s'" % name for name in command_dict)
self.log_warning(
"%s configuration setting 'run_at_startup' requests app '%s' unknown command '%s'. "
"Known commands: %s" %
(self.name, app_instance_name, setting_command_name, known_commands))
def destroy_engine(self):
"""
Called when the engine is shutting down
"""
self.log_debug('%s: Destroying...' % self)
if self._on_menus_loaded_handler is not None:
MaxPlus.NotificationManager.Unregister(self._on_menus_loaded_handler)
self._remove_shotgun_menu()
def update_shotgun_menu(self):
"""
Rebuild the shotgun menu displayed in the main menu bar
"""
self._remove_shotgun_menu()
self._add_shotgun_menu()
##########################################################################################
# logging
# Should only call logging function from the main thread, although output to listener is
# supposed to be thread-safe.
# Note From the max team: Python scripts run in MAXScript are not thread-safe.
# Python commands are always executed in the main 3ds Max thread.
# You should not attempt to spawn separate threads in your scripts
# (for example, by using the Python threading module).
def log_debug(self, msg):
"""
Debug logging.
:param msg: The message string to log
"""
if self.get_setting("debug_logging", False):
self.async_execute_in_main_thread(self._print_output, "Shotgun Debug: %s" % msg)
def log_info(self, msg):
"""
Info logging.
:param msg: The message string to log
"""
self.async_execute_in_main_thread(self._print_output, "Shotgun Info: %s" % msg)
def log_warning(self, msg):
"""
Warning logging.
:param msg: The message string to log
"""
self.async_execute_in_main_thread(self._print_output, "Shotgun Warning: %s" % msg)
def log_error(self, msg):
"""
Error logging.
:param msg: The message string to log
"""
self.async_execute_in_main_thread(self._print_output, "Shotgun Error: %s" % msg)
def _print_output(self, msg):
"""
Print the specified message to the maxscript listener
:param msg: The message string to print
"""
print "[%-13s] %s" % (str(time.time()), msg)
##########################################################################################
# Engine
def show_panel(self, panel_id, title, bundle, widget_class, *args, **kwargs):
"""
Docks an app widget in a 3dsmax panel.
:param panel_id: Unique identifier for the panel, as obtained by register_panel().
:param title: The title of the panel
:param bundle: The app, engine or framework object that is associated with this window
:param widget_class: The class of the UI to be constructed. This must derive from QWidget.
Additional parameters specified will be passed through to the widget_class constructor.
:returns: the created widget_class instance
"""
from sgtk.platform.qt import QtCore, QtGui
self.log_debug("Begin showing panel %s" % panel_id)
if self._max_version_to_year(self._get_max_version()) <= 2017:
# Qt docking is supported in version 2018 and later.
self.log_warning("Panel functionality not implemented. Falling back to showing "
"panel '%s' in a modeless dialog" % panel_id)
return super(MaxEngine, self).show_panel(panel_id, title, bundle, widget_class, *args, **kwargs)
dock_widget_id = "sgtk_dock_widget_" + panel_id
main_window = MaxPlus.GetQMaxMainWindow()
# Check if the dock widget wrapper already exists.
dock_widget = main_window.findChild(QtGui.QDockWidget, dock_widget_id)
if dock_widget is None:
# The dock widget wrapper cannot be found in the main window's
# children list so that means it has not been created yet, so create it.
widget_instance = widget_class(*args, **kwargs)
widget_instance.setParent(self._get_dialog_parent())
widget_instance.setObjectName(panel_id)
dock_widget = QtGui.QDockWidget(title, parent=main_window)
dock_widget.setObjectName(dock_widget_id)
dock_widget.setWidget(widget_instance)
self.log_debug("Created new dock widget %s" % dock_widget_id)
# Disable 3dsMax accelerators, in order for QTextEdit and QLineEdit
# widgets to work properly.
widget_instance.setProperty("NoMaxAccelerators", True)
else:
# The dock widget wrapper already exists, so just get the
# shotgun panel from it.
widget_instance = dock_widget.widget()
self.log_debug("Found existing dock widget %s" % dock_widget_id)
# apply external stylesheet
self._apply_external_stylesheet(bundle, widget_instance)
if not main_window.restoreDockWidget(dock_widget):
# The dock widget cannot be restored from the main window's state,
# so dock it to the right dock area and make it float by default.
main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, dock_widget)
dock_widget.setFloating(True)
dock_widget.show()
# Remember the dock widget, so we can delete it later.
self._dock_widgets.append(dock_widget)
return widget_instance
def close_windows(self):
"""
Closes the various windows (dialogs, panels, etc.) opened by the engine.
"""
# Make a copy of the list of Tank dialogs that have been created by the engine and
# are still opened since the original list will be updated when each dialog is closed.
opened_dialog_list = self.created_qt_dialogs[:]
# Loop through the list of opened Tank dialogs.
for dialog in opened_dialog_list:
dialog_window_title = dialog.windowTitle()
try:
# Close the dialog and let its close callback remove it from the original dialog list.
self.log_debug("Closing dialog %s." % dialog_window_title)
dialog.close()
except Exception, exception:
self.log_error("Cannot close dialog %s: %s" % (dialog_window_title, exception))
# Delete all dock widgets previously added.
for dock_widget in self._dock_widgets:
# Keep MaxPlus.GetQMaxMainWindow() inside for-loop
# This will be executed only in version > 2017
# which supports Qt-docking.
MaxPlus.GetQMaxMainWindow().removeDockWidget(dock_widget)
dock_widget.deleteLater()
def _create_dialog(self, title, bundle, widget, parent):
"""
Parent function override to install event filtering in order to allow proper events to
reach window dialogs (such as keyboard events).
"""
dialog = sgtk.platform.Engine._create_dialog(self, title, bundle, widget, parent)
# Attaching the dialog to Max is a matter of whether this is a new
# enough version of 3ds Max. Anything short of 2016 SP1 is going to
# fail here with an AttributeError, so we can just catch that and
# continue on without the new-style parenting.
previous_parent = dialog.parent()
if self._parent_to_max:
try:
self.log_debug("Attempting to attach dialog to 3ds Max...")
# widget must be parentless when calling MaxPlus.AttachQWidgetToMax
dialog.setParent(None)
MaxPlus.AttachQWidgetToMax(dialog)
self.log_debug("AttachQWidgetToMax successful.")
except AttributeError:
dialog.setParent(previous_parent)
self.log_debug("AttachQWidgetToMax not available in this version of 3ds Max.")
dialog.installEventFilter(self.dialogEvents)
# Add to tracked dialogs (will be removed in eventFilter)
self._safe_dialog.append(dialog)
# Apply the engine-level stylesheet.
self._apply_external_styleshet(self, dialog)
return dialog
def reload_qss(self):
"""
Causes the style.qss file that comes with the tk-rv engine to
be re-applied to all dialogs that the engine has previously
launched.
"""
self.log_warning("Reloading engine QSS...")
for dialog in self.created_qt_dialogs:
self._apply_external_styleshet(self, dialog)
dialog.update()
def show_modal(self, title, bundle, widget_class, *args, **kwargs):
from sgtk.platform.qt import QtGui
if not self.has_ui:
self.log_error("Sorry, this environment does not support UI display! Cannot show "
"the requested window '%s'." % title)
return None
status = QtGui.QDialog.DialogCode.Rejected
try:
# Disable 'Shotgun' background menu while modals are there.
self.tk_3dsmax.MaxScript.disable_menu()
# create the dialog:
try:
self._parent_to_max = False
dialog, widget = self._create_dialog_with_widget(
title,
bundle,
widget_class,
*args, **kwargs
)
finally:
self._parent_to_max = True
# finally launch it, modal state
status = dialog.exec_()
except Exception:
import traceback
tb = traceback.format_exc()
self.log_error("Exception in modal window: %s" % tb)
finally:
# Re-enable 'Shotgun' background menu after modal has been closed
self.tk_3dsmax.MaxScript.enable_menu()
# lastly, return the instantiated widget
return (status, widget)
def safe_dialog_exec(self, func):
"""
If running a command from a dialog also creates a 3ds max window, this function tries to
ensure that the dialog will stay alive and that the max modal window becomes visible
and unobstructed.
:param script: Function to execute (partial/lambda)
"""
# Merge operation can cause max dialogs to pop up, and closing the window results in a crash.
# So keep alive and hide all of our qt windows while this type of operations are occuring.
from sgtk.platform.qt import QtGui
toggled = []
for dialog in self._safe_dialog:
needs_toggling = dialog.isVisible()
if needs_toggling:
self.log_debug("Toggling dialog off: %r" % dialog)
toggled.append(dialog)
dialog.hide()
dialog.lower()
QtGui.QApplication.processEvents()
else:
self.log_debug("Dialog is already hidden: %r" % dialog)
try:
func()
finally:
for dialog in toggled:
# Restore the window after the operation is completed
self.log_debug("Toggling dialog on: %r" % dialog)
dialog.show()
dialog.activateWindow() # for Windows
dialog.raise_() # for MacOS
##########################################################################################
# MaxPlus SDK Patching
# Version Id for 3dsmax 2016 Taken from Max Sdk (not currently available in maxplus)
MAX_RELEASE_R18 = 18000
# Latest supported max version
MAXIMUM_SUPPORTED_VERSION = 20000
def _max_version_to_year(self, version):
"""
Get the max year from the max release version.
Note that while 17000 is 2015, 17900 would be 2016 alpha
"""
year = 2000 + (math.ceil(version / 1000.0) - 2)
return year
def _get_max_version(self):
"""
Returns Version integer of max release number.
"""
# 3dsMax Version returns a number which contains max version, sdk version, etc...
version_id = MaxPlus.Application.Get3DSMAXVersion()
# Transform it to a version id
# (Macro to get 3ds max release from version id)
version_number = (version_id >> 16) & 0xffff
return version_number
def _is_at_least_max_2016(self):
"""
Returns True if current Max version is equal or above 3ds max 2015
"""
return self._get_max_version() >= MaxEngine.MAX_RELEASE_R18
|
StarcoderdataPython
|
9749856
|
from cart.models import CartItem
from catalog.models import Product
from BuyIT_Lite import settings
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect
from django.db.models import Max
from datetime import datetime, timedelta
import decimal
import random
CART_ID_SESSION_KEY = 'cart_id'
def _cart_id(request):
""" get the current user's cart id, sets new one if blank;
Note: the syntax below matches the text, but an alternative,
clearer way of checking for a cart ID would be the following:
if not CART_ID_SESSION_KEY in request.session:
"""
if request.session.get(CART_ID_SESSION_KEY, '') == '':
request.session[CART_ID_SESSION_KEY] = _generate_cart_id()
return request.session[CART_ID_SESSION_KEY]
def _generate_cart_id():
""" function for generating random cart ID values """
cart_id = ''
characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890!@#$%^&*()'
cart_id_length = 50
for y in range(cart_id_length):
cart_id += characters[random.randint(0, len(characters) - 1)]
return cart_id
def get_cart_items(request):
""" return all items from the current user's cart """
return CartItem.objects.filter(cart_id=_cart_id(request))
def add_to_cart(request):
""" function that takes a POST request and adds a product instance to the current customer's shopping cart """
postdata = request.POST.copy()
# get product slug from post data, return blank if empty
product_slug = postdata.get('product_slug', '')
# get quantity added, return 1 if empty
quantity = postdata.get('quantity', 1)
# fetch the product or return a missing page error
p = get_object_or_404(Product, slug=product_slug)
# get products in cart
cart_products = get_cart_items(request)
product_in_cart = False
# check to see if item is already in cart
for cart_item in cart_products:
if cart_item.product.id == p.id:
# update the quantity if found
cart_item.augment_quantity(quantity)
product_in_cart = True
if not product_in_cart:
# create and save a new cart item
ci = CartItem()
ci.product = p
ci.quantity = quantity
ci.cart_id = _cart_id(request)
ci.save()
def get_single_item(request, item_id):
return get_object_or_404(CartItem, id=item_id, cart_id=_cart_id(request))
# update quantity for single item
def update_cart(request):
""" function takes a POST request that updates the quantity for single product instance in the
current customer's shopping cart
"""
postdata = request.POST.copy()
item_id = postdata['item_id']
quantity = postdata['quantity']
cart_item = get_single_item(request, item_id)
if cart_item:
if int(quantity) > 0:
cart_item.quantity = int(quantity)
cart_item.save()
else:
remove_from_cart(request)
# remove a single item from cart
def remove_from_cart(request):
""" function that takes a POST request removes a single product instance from the current customer's
shopping cart
"""
postdata = request.POST.copy()
item_id = postdata['item_id']
cart_item = get_single_item(request, item_id)
if cart_item:
cart_item.delete()
def cart_subtotal(request):
""" gets the subtotal for the current shopping cart """
cart_total = decimal.Decimal('0.00')
cart_products = get_cart_items(request)
for cart_item in cart_products:
cart_total += cart_item.product.price * cart_item.quantity
return cart_total
# returns the total number of items in the user's cart
def cart_distinct_item_count(request):
return get_cart_items(request).count()
def is_empty(request):
return cart_distinct_item_count(request) == 0
def empty_cart(request):
""" empties the shopping cart of the current customer """
user_cart = get_cart_items(request)
user_cart.delete()
def remove_old_cart_items():
""" 1. calculate date of 90 days ago (or session lifespan)
2. create a list of cart IDs that haven't been modified
3. delete those CartItem instances
"""
print
"Removing old carts"
remove_before = datetime.now() + timedelta(days=-settings.SESSION_COOKIE_DAYS)
cart_ids = []
old_items = CartItem.objects.values('cart_id').annotate(last_change=Max('date_added')).filter(
last_change__lt=remove_before).order_by()
for item in old_items:
cart_ids.append(item['cart_id'])
to_remove = CartItem.objects.filter(cart_id__in=cart_ids)
to_remove.delete()
print
str(len(cart_ids)) + " carts were removed"
|
StarcoderdataPython
|
6667146
|
from datetime import datetime
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse_lazy
from .models import Pressao
class PressaoListView(LoginRequiredMixin, ListView):
model = Pressao
template_name = 'registros/registros_list.html'
paginate_by = 100
def get_queryset(self):
return self.model.objects.filter(pessoa=self.request.user).order_by('-data')
class PressaoDetailView(LoginRequiredMixin, DetailView):
model = Pressao
template_name = 'registros/registro_detail.html'
class PressaoCreateView(LoginRequiredMixin, CreateView):
model = Pressao
template_name = 'registros/registro_new.html'
fields = ['sis', 'dia', 'pul']
def form_valid(self, form):
form.instance.pessoa = self.request.user
form.instance.data = datetime.now()
return super().form_valid(form)
class PressaoUpdateView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
model = Pressao
template_name = 'registros/registro_edit.html'
fields = ['sis', 'dia', 'pul', 'data']
def test_func(self):
obj = self.get_object()
return obj.pessoa == self.request.user
class PressaoDeleteView(LoginRequiredMixin, UserPassesTestMixin, DeleteView):
model = Pressao
template_name = 'registros/registro_delete.html'
success_url = reverse_lazy('registros_list')
def test_func(self):
obj = self.get_object()
return obj.pessoa == self.request.user
|
StarcoderdataPython
|
5066489
|
from rest_framework import permissions
from rest_framework.exceptions import PermissionDenied
ERROR_MESSAGES = {
'update_denied': 'Изменение чужого контента запрещено!',
'delete_denied': 'Удаление чужого контента запрещено!'
}
class AuthorOrReadOnly(permissions.IsAuthenticatedOrReadOnly):
def has_object_permission(self, request, view, obj):
if obj.author != request.user:
if request.method in ['PUT', 'PATCH']:
raise PermissionDenied(ERROR_MESSAGES['update_denied'])
if request.method in ['DELETE']:
raise PermissionDenied(ERROR_MESSAGES['delete_denied'])
return True
|
StarcoderdataPython
|
379558
|
import uctypes as ct
SERCOM_I2CM = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 3 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'PINOUT' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'SDAHOLD' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 2 << ct.BF_LEN,
'MEXTTOEN' : 0x00 | ct.BFUINT32 | 22 << ct.BF_POS | 1 << ct.BF_LEN,
'SEXTTOEN' : 0x00 | ct.BFUINT32 | 23 << ct.BF_POS | 1 << ct.BF_LEN,
'SPEED' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
'SCLSM' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 1 << ct.BF_LEN,
'INACTOUT' : 0x00 | ct.BFUINT32 | 28 << ct.BF_POS | 2 << ct.BF_LEN,
'LOWTOUTEN' : 0x00 | ct.BFUINT32 | 30 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLB' : ( 0x04, {
'reg' : 0x00 | ct.UINT32,
'SMEN' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'QCEN' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'CMD' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 2 << ct.BF_LEN,
'ACKACT' : 0x00 | ct.BFUINT32 | 18 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLC' : ( 0x08, {
'reg' : 0x00 | ct.UINT32,
'DATA32B' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'BAUD' : ( 0x0C, {
'reg' : 0x00 | ct.UINT32,
'BAUD' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'BAUDLOW' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 8 << ct.BF_LEN,
'HSBAUD' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 8 << ct.BF_LEN,
'HSBAUDLOW' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 8 << ct.BF_LEN,
}),
'INTENCLR' : ( 0x14, {
'reg' : 0x00 | ct.UINT8,
'MB' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'SB' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x16, {
'reg' : 0x00 | ct.UINT8,
'MB' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'SB' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x18, {
'reg' : 0x00 | ct.UINT8,
'MB' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'SB' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x1A, {
'reg' : 0x00 | ct.UINT16,
'BUSERR' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ARBLOST' : 0x00 | ct.BFUINT16 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXNACK' : 0x00 | ct.BFUINT16 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'BUSSTATE' : 0x00 | ct.BFUINT16 | 4 << ct.BF_POS | 2 << ct.BF_LEN,
'LOWTOUT' : 0x00 | ct.BFUINT16 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'CLKHOLD' : 0x00 | ct.BFUINT16 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'MEXTTOUT' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'SEXTTOUT' : 0x00 | ct.BFUINT16 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'LENERR' : 0x00 | ct.BFUINT16 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x1C, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'SYSOP' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'LENGTH' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'ADDR' : ( 0x24, {
'reg' : 0x00 | ct.UINT32,
'ADDR' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 11 << ct.BF_LEN,
'LENEN' : 0x00 | ct.BFUINT32 | 13 << ct.BF_POS | 1 << ct.BF_LEN,
'HS' : 0x00 | ct.BFUINT32 | 14 << ct.BF_POS | 1 << ct.BF_LEN,
'TENBITEN' : 0x00 | ct.BFUINT32 | 15 << ct.BF_POS | 1 << ct.BF_LEN,
'LEN' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 8 << ct.BF_LEN,
}),
'DATA' : 0x28 | ct.UINT8,
'DBGCTRL' : ( 0x30, {
'reg' : 0x00 | ct.UINT8,
'DBGSTOP' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
}),
}
SERCOM_I2CS = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 3 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'PINOUT' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'SDAHOLD' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 2 << ct.BF_LEN,
'SEXTTOEN' : 0x00 | ct.BFUINT32 | 23 << ct.BF_POS | 1 << ct.BF_LEN,
'SPEED' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
'SCLSM' : 0x00 | ct.BFUINT32 | 27 << ct.BF_POS | 1 << ct.BF_LEN,
'LOWTOUTEN' : 0x00 | ct.BFUINT32 | 30 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLB' : ( 0x04, {
'reg' : 0x00 | ct.UINT32,
'SMEN' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'GCMD' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'AACKEN' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'AMODE' : 0x00 | ct.BFUINT32 | 14 << ct.BF_POS | 2 << ct.BF_LEN,
'CMD' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 2 << ct.BF_LEN,
'ACKACT' : 0x00 | ct.BFUINT32 | 18 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLC' : ( 0x08, {
'reg' : 0x00 | ct.UINT32,
'SDASETUP' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 4 << ct.BF_LEN,
'DATA32B' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENCLR' : ( 0x14, {
'reg' : 0x00 | ct.UINT8,
'PREC' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'AMATCH' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'DRDY' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x16, {
'reg' : 0x00 | ct.UINT8,
'PREC' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'AMATCH' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'DRDY' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x18, {
'reg' : 0x00 | ct.UINT8,
'PREC' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'AMATCH' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'DRDY' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x1A, {
'reg' : 0x00 | ct.UINT16,
'BUSERR' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'COLL' : 0x00 | ct.BFUINT16 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXNACK' : 0x00 | ct.BFUINT16 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'DIR' : 0x00 | ct.BFUINT16 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'SR' : 0x00 | ct.BFUINT16 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'LOWTOUT' : 0x00 | ct.BFUINT16 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'CLKHOLD' : 0x00 | ct.BFUINT16 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'SEXTTOUT' : 0x00 | ct.BFUINT16 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'HS' : 0x00 | ct.BFUINT16 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'LENERR' : 0x00 | ct.BFUINT16 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x1C, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'LENGTH' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'LENGTH' : ( 0x22, {
'reg' : 0x00 | ct.UINT16,
'LEN' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'LENEN' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'ADDR' : ( 0x24, {
'reg' : 0x00 | ct.UINT32,
'GENCEN' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ADDR' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 10 << ct.BF_LEN,
'TENBITEN' : 0x00 | ct.BFUINT32 | 15 << ct.BF_POS | 1 << ct.BF_LEN,
'ADDRMASK' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 10 << ct.BF_LEN,
}),
'DATA' : 0x28 | ct.UINT32,
}
SERCOM_SPIS = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 3 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'IBON' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'DOPO' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 2 << ct.BF_LEN,
'DIPO' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 2 << ct.BF_LEN,
'FORM' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 4 << ct.BF_LEN,
'CPHA' : 0x00 | ct.BFUINT32 | 28 << ct.BF_POS | 1 << ct.BF_LEN,
'CPOL' : 0x00 | ct.BFUINT32 | 29 << ct.BF_POS | 1 << ct.BF_LEN,
'DORD' : 0x00 | ct.BFUINT32 | 30 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLB' : ( 0x04, {
'reg' : 0x00 | ct.UINT32,
'CHSIZE' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'PLOADEN' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'SSDE' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'MSSEN' : 0x00 | ct.BFUINT32 | 13 << ct.BF_POS | 1 << ct.BF_LEN,
'AMODE' : 0x00 | ct.BFUINT32 | 14 << ct.BF_POS | 2 << ct.BF_LEN,
'RXEN' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLC' : ( 0x08, {
'reg' : 0x00 | ct.UINT32,
'ICSPACE' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 6 << ct.BF_LEN,
'DATA32B' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'BAUD' : 0x0C | ct.UINT8,
'INTENCLR' : ( 0x14, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'SSL' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x16, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'SSL' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x18, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'SSL' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x1A, {
'reg' : 0x00 | ct.UINT16,
'BUFOVF' : 0x00 | ct.BFUINT16 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'LENERR' : 0x00 | ct.BFUINT16 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x1C, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'CTRLB' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'LENGTH' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'LENGTH' : ( 0x22, {
'reg' : 0x00 | ct.UINT16,
'LEN' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'LENEN' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'ADDR' : ( 0x24, {
'reg' : 0x00 | ct.UINT32,
'ADDR' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'ADDRMASK' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 8 << ct.BF_LEN,
}),
'DATA' : 0x28 | ct.UINT32,
'DBGCTRL' : ( 0x30, {
'reg' : 0x00 | ct.UINT8,
'DBGSTOP' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
}),
}
SERCOM_SPIM = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 3 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'IBON' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'DOPO' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 2 << ct.BF_LEN,
'DIPO' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 2 << ct.BF_LEN,
'FORM' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 4 << ct.BF_LEN,
'CPHA' : 0x00 | ct.BFUINT32 | 28 << ct.BF_POS | 1 << ct.BF_LEN,
'CPOL' : 0x00 | ct.BFUINT32 | 29 << ct.BF_POS | 1 << ct.BF_LEN,
'DORD' : 0x00 | ct.BFUINT32 | 30 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLB' : ( 0x04, {
'reg' : 0x00 | ct.UINT32,
'CHSIZE' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'PLOADEN' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'SSDE' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'MSSEN' : 0x00 | ct.BFUINT32 | 13 << ct.BF_POS | 1 << ct.BF_LEN,
'AMODE' : 0x00 | ct.BFUINT32 | 14 << ct.BF_POS | 2 << ct.BF_LEN,
'RXEN' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLC' : ( 0x08, {
'reg' : 0x00 | ct.UINT32,
'ICSPACE' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 6 << ct.BF_LEN,
'DATA32B' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'BAUD' : 0x0C | ct.UINT8,
'INTENCLR' : ( 0x14, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'SSL' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x16, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'SSL' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x18, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'SSL' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x1A, {
'reg' : 0x00 | ct.UINT16,
'BUFOVF' : 0x00 | ct.BFUINT16 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'LENERR' : 0x00 | ct.BFUINT16 | 11 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x1C, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'CTRLB' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'LENGTH' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'LENGTH' : ( 0x22, {
'reg' : 0x00 | ct.UINT16,
'LEN' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'LENEN' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'ADDR' : ( 0x24, {
'reg' : 0x00 | ct.UINT32,
'ADDR' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'ADDRMASK' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 8 << ct.BF_LEN,
}),
'DATA' : 0x28 | ct.UINT32,
'DBGCTRL' : ( 0x30, {
'reg' : 0x00 | ct.UINT8,
'DBGSTOP' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
}),
}
SERCOM_USART_EXT = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 3 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'IBON' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'TXINV' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'RXINV' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'SAMPR' : 0x00 | ct.BFUINT32 | 13 << ct.BF_POS | 3 << ct.BF_LEN,
'TXPO' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 2 << ct.BF_LEN,
'RXPO' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 2 << ct.BF_LEN,
'SAMPA' : 0x00 | ct.BFUINT32 | 22 << ct.BF_POS | 2 << ct.BF_LEN,
'FORM' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 4 << ct.BF_LEN,
'CMODE' : 0x00 | ct.BFUINT32 | 28 << ct.BF_POS | 1 << ct.BF_LEN,
'CPOL' : 0x00 | ct.BFUINT32 | 29 << ct.BF_POS | 1 << ct.BF_LEN,
'DORD' : 0x00 | ct.BFUINT32 | 30 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLB' : ( 0x04, {
'reg' : 0x00 | ct.UINT32,
'CHSIZE' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'SBMODE' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'COLDEN' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'SFDE' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'ENC' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'PMODE' : 0x00 | ct.BFUINT32 | 13 << ct.BF_POS | 1 << ct.BF_LEN,
'TXEN' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'RXEN' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'LINCMD' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'CTRLC' : ( 0x08, {
'reg' : 0x00 | ct.UINT32,
'GTIME' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'BRKLEN' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 2 << ct.BF_LEN,
'HDRDLY' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 2 << ct.BF_LEN,
'INACK' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'DSNACK' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'MAXITER' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 3 << ct.BF_LEN,
'DATA32B' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'BAUD' : 0x0C | ct.UINT16,
'BAUD_FRAC_MODE' : ( 0x0C, {
'reg' : 0x00 | ct.UINT16,
'BAUD' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 13 << ct.BF_LEN,
'FP' : 0x00 | ct.BFUINT16 | 13 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'BAUD_FRACFP_MODE' : ( 0x0C, {
'reg' : 0x00 | ct.UINT16,
'BAUD' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 13 << ct.BF_LEN,
'FP' : 0x00 | ct.BFUINT16 | 13 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'BAUD_USARTFP_MODE' : 0x0C | ct.UINT16,
'RXPL' : 0x0E | ct.UINT8,
'INTENCLR' : ( 0x14, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RXS' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CTSIC' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'RXBRK' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x16, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RXS' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CTSIC' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'RXBRK' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x18, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RXS' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CTSIC' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'RXBRK' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x1A, {
'reg' : 0x00 | ct.UINT16,
'PERR' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'FERR' : 0x00 | ct.BFUINT16 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'BUFOVF' : 0x00 | ct.BFUINT16 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'CTS' : 0x00 | ct.BFUINT16 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'ISF' : 0x00 | ct.BFUINT16 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'COLL' : 0x00 | ct.BFUINT16 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'TXE' : 0x00 | ct.BFUINT16 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'ITER' : 0x00 | ct.BFUINT16 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x1C, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'CTRLB' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RXERRCNT' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'LENGTH' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'RXERRCNT' : 0x20 | ct.UINT8,
'LENGTH' : ( 0x22, {
'reg' : 0x00 | ct.UINT16,
'LEN' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'LENEN' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'DATA' : 0x28 | ct.UINT32,
'DBGCTRL' : ( 0x30, {
'reg' : 0x00 | ct.UINT8,
'DBGSTOP' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
}),
}
SERCOM_USART_INT = {
'CTRLA' : ( 0x00, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'MODE' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 3 << ct.BF_LEN,
'RUNSTDBY' : 0x00 | ct.BFUINT32 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
'IBON' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'TXINV' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'RXINV' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'SAMPR' : 0x00 | ct.BFUINT32 | 13 << ct.BF_POS | 3 << ct.BF_LEN,
'TXPO' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 2 << ct.BF_LEN,
'RXPO' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 2 << ct.BF_LEN,
'SAMPA' : 0x00 | ct.BFUINT32 | 22 << ct.BF_POS | 2 << ct.BF_LEN,
'FORM' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 4 << ct.BF_LEN,
'CMODE' : 0x00 | ct.BFUINT32 | 28 << ct.BF_POS | 1 << ct.BF_LEN,
'CPOL' : 0x00 | ct.BFUINT32 | 29 << ct.BF_POS | 1 << ct.BF_LEN,
'DORD' : 0x00 | ct.BFUINT32 | 30 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'CTRLB' : ( 0x04, {
'reg' : 0x00 | ct.UINT32,
'CHSIZE' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'SBMODE' : 0x00 | ct.BFUINT32 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'COLDEN' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 1 << ct.BF_LEN,
'SFDE' : 0x00 | ct.BFUINT32 | 9 << ct.BF_POS | 1 << ct.BF_LEN,
'ENC' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 1 << ct.BF_LEN,
'PMODE' : 0x00 | ct.BFUINT32 | 13 << ct.BF_POS | 1 << ct.BF_LEN,
'TXEN' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'RXEN' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'LINCMD' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'CTRLC' : ( 0x08, {
'reg' : 0x00 | ct.UINT32,
'GTIME' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 3 << ct.BF_LEN,
'BRKLEN' : 0x00 | ct.BFUINT32 | 8 << ct.BF_POS | 2 << ct.BF_LEN,
'HDRDLY' : 0x00 | ct.BFUINT32 | 10 << ct.BF_POS | 2 << ct.BF_LEN,
'INACK' : 0x00 | ct.BFUINT32 | 16 << ct.BF_POS | 1 << ct.BF_LEN,
'DSNACK' : 0x00 | ct.BFUINT32 | 17 << ct.BF_POS | 1 << ct.BF_LEN,
'MAXITER' : 0x00 | ct.BFUINT32 | 20 << ct.BF_POS | 3 << ct.BF_LEN,
'DATA32B' : 0x00 | ct.BFUINT32 | 24 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'BAUD' : 0x0C | ct.UINT16,
'BAUD_FRAC_MODE' : ( 0x0C, {
'reg' : 0x00 | ct.UINT16,
'BAUD' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 13 << ct.BF_LEN,
'FP' : 0x00 | ct.BFUINT16 | 13 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'BAUD_FRACFP_MODE' : ( 0x0C, {
'reg' : 0x00 | ct.UINT16,
'BAUD' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 13 << ct.BF_LEN,
'FP' : 0x00 | ct.BFUINT16 | 13 << ct.BF_POS | 3 << ct.BF_LEN,
}),
'BAUD_USARTFP_MODE' : 0x0C | ct.UINT16,
'RXPL' : 0x0E | ct.UINT8,
'INTENCLR' : ( 0x14, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RXS' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CTSIC' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'RXBRK' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTENSET' : ( 0x16, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RXS' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CTSIC' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'RXBRK' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'INTFLAG' : ( 0x18, {
'reg' : 0x00 | ct.UINT8,
'DRE' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'TXC' : 0x00 | ct.BFUINT8 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'RXC' : 0x00 | ct.BFUINT8 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RXS' : 0x00 | ct.BFUINT8 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'CTSIC' : 0x00 | ct.BFUINT8 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'RXBRK' : 0x00 | ct.BFUINT8 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'ERROR' : 0x00 | ct.BFUINT8 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'STATUS' : ( 0x1A, {
'reg' : 0x00 | ct.UINT16,
'PERR' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'FERR' : 0x00 | ct.BFUINT16 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'BUFOVF' : 0x00 | ct.BFUINT16 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'CTS' : 0x00 | ct.BFUINT16 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'ISF' : 0x00 | ct.BFUINT16 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
'COLL' : 0x00 | ct.BFUINT16 | 5 << ct.BF_POS | 1 << ct.BF_LEN,
'TXE' : 0x00 | ct.BFUINT16 | 6 << ct.BF_POS | 1 << ct.BF_LEN,
'ITER' : 0x00 | ct.BFUINT16 | 7 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'SYNCBUSY' : ( 0x1C, {
'reg' : 0x00 | ct.UINT32,
'SWRST' : 0x00 | ct.BFUINT32 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
'ENABLE' : 0x00 | ct.BFUINT32 | 1 << ct.BF_POS | 1 << ct.BF_LEN,
'CTRLB' : 0x00 | ct.BFUINT32 | 2 << ct.BF_POS | 1 << ct.BF_LEN,
'RXERRCNT' : 0x00 | ct.BFUINT32 | 3 << ct.BF_POS | 1 << ct.BF_LEN,
'LENGTH' : 0x00 | ct.BFUINT32 | 4 << ct.BF_POS | 1 << ct.BF_LEN,
}),
'RXERRCNT' : 0x20 | ct.UINT8,
'LENGTH' : ( 0x22, {
'reg' : 0x00 | ct.UINT16,
'LEN' : 0x00 | ct.BFUINT16 | 0 << ct.BF_POS | 8 << ct.BF_LEN,
'LENEN' : 0x00 | ct.BFUINT16 | 8 << ct.BF_POS | 2 << ct.BF_LEN,
}),
'DATA' : 0x28 | ct.UINT32,
'DBGCTRL' : ( 0x30, {
'reg' : 0x00 | ct.UINT8,
'DBGSTOP' : 0x00 | ct.BFUINT8 | 0 << ct.BF_POS | 1 << ct.BF_LEN,
}),
}
SERCOM_ = {
'I2CM' : ( 0x00, SERCOM_I2CM ),
'I2CS' : ( 0x00, SERCOM_I2CS ),
'SPIS' : ( 0x00, SERCOM_SPIS ),
'SPIM' : ( 0x00, SERCOM_SPIM ),
'USART_EXT' : ( 0x00, SERCOM_USART_EXT ),
'USART_INT' : ( 0x00, SERCOM_USART_INT ),
}
SERCOM0 = ct.struct(0x40003000, SERCOM_)
|
StarcoderdataPython
|
168091
|
<gh_stars>0
import logging
from dataclasses import dataclass, field
from datetime import datetime
from typing import Optional
from thenewboston_node.business_logic.validators import (
validate_gte_value, validate_is_none, validate_not_none, validate_type
)
from thenewboston_node.core.logging import validates
from thenewboston_node.core.utils.cryptography import hash_normalized_dict
from thenewboston_node.core.utils.dataclass import cover_docstring, revert_docstring
from thenewboston_node.core.utils.types import hexstr
from .account_state import AccountState
from .base import BaseDataclass
from .mixins.compactable import MessagpackCompactableMixin
from .mixins.normalizable import NormalizableMixin
logger = logging.getLogger(__name__)
@revert_docstring
@dataclass
@cover_docstring
class BlockchainState(MessagpackCompactableMixin, NormalizableMixin, BaseDataclass):
account_states: dict[hexstr, AccountState] = field(
metadata={'example_value': {
'00f3d2477317d53bcc2a410decb68c769eea2f0d74b679369b7417e198bd97b6': {}
}}
)
"""Account number to account state map"""
last_block_number: Optional[int] = field(default=None, metadata={'example_value': 5})
"""Number of the last block included into the blockchain state (optional for blockchain genesis state)"""
# TODO(dmu) MEDIUM: Do we really need last_block_identifier?
last_block_identifier: Optional[hexstr] = field(
default=None, metadata={'example_value': 'b0dabd367eb1ed670ab9ce4cef9d45106332f211c7b50ddd60dec4ae62711fb7'}
)
"""Identifier of the last block included into the blockchain state (optional for blockchain genesis state)"""
# TODO(dmu) HIGH: Do we really need `last_block_timestamp`?
last_block_timestamp: Optional[datetime] = field(
default=None, metadata={'example_value': datetime(2021, 5, 19, 10, 34, 5, 54106)}
)
"""Timestamp of the last block included into the blockchain state (optional for blockchain genesis state)"""
next_block_identifier: Optional[hexstr] = field(
default=None, metadata={'example_value': 'dc6671e1132cbb7ecbc190bf145b5a5cfb139ca502b5d66aafef4d096f4d2709'}
)
"""Identifier of the next block to be added on top of the blockchain state
(optional for blockchain genesis state, blockchain state hash is used as next block identifier in this case)"""
def serialize_to_dict(self, skip_none_values=True, coerce_to_json_types=True, exclude=()):
serialized = super().serialize_to_dict(
skip_none_values=skip_none_values, coerce_to_json_types=coerce_to_json_types, exclude=exclude
)
for account_number, account_state in serialized['account_states'].items():
if account_state.get('balance_lock') == account_number:
del account_state['balance_lock']
return serialized
def get_account_state(self, account: hexstr) -> Optional[AccountState]:
return self.account_states.get(account)
def get_account_state_attribute_value(self, account: hexstr, attribute: str):
account_state = self.get_account_state(account)
if account_state is None:
from thenewboston_node.business_logic.utils.blockchain import get_attribute_default_value
return get_attribute_default_value(attribute, account)
return account_state.get_attribute_value(attribute, account)
def get_account_balance(self, account: hexstr) -> int:
return self.get_account_state_attribute_value(account, 'balance')
def get_account_balance_lock(self, account: hexstr) -> str:
return self.get_account_state_attribute_value(account, 'balance_lock')
def get_node(self, account: hexstr):
return self.get_account_state_attribute_value(account, 'node')
def get_next_block_number(self) -> int:
last_block_number = self.last_block_number
return 0 if last_block_number is None else last_block_number + 1
def get_next_block_identifier(self) -> hexstr:
next_block_identifier = self.next_block_identifier
if next_block_identifier:
return next_block_identifier
return self.get_hash() # initial blockchain state case
def get_hash(self):
return hash_normalized_dict(self.get_normalized())
def is_initial(self) -> bool:
return (
self.last_block_number is None and self.last_block_identifier is None and
self.next_block_identifier is None and self.last_block_timestamp is None
)
@validates('blockchain state')
def validate(self, is_initial=False):
self.validate_attributes(is_initial=is_initial)
self.validate_accounts()
@validates('blockchain state attributes', is_plural_target=True)
def validate_attributes(self, is_initial=False):
self.validate_last_block_number(is_initial)
self.validate_last_block_identifier(is_initial)
self.validate_last_block_timestamp(is_initial)
self.validate_next_block_identifier(is_initial)
@validates('blockchain state last_block_number')
def validate_last_block_number(self, is_initial):
if is_initial:
validate_is_none(f'Initial {self.humanized_class_name} last_block_number', self.last_block_number)
else:
validate_type(f'{self.humanized_class_name} last_block_number', self.last_block_number, int)
validate_gte_value(f'{self.humanized_class_name} last_block_number', self.last_block_number, 0)
@validates('blockchain state last_block_identifier')
def validate_last_block_identifier(self, is_initial):
if is_initial:
validate_is_none(f'Initial {self.humanized_class_name} last_block_identifier', self.last_block_identifier)
else:
validate_type(f'{self.humanized_class_name} last_block_identifier', self.last_block_identifier, str)
@validates('blockchain state last_block_timestamp')
def validate_last_block_timestamp(self, is_initial):
timestamp = self.last_block_timestamp
if is_initial:
validate_is_none(f'Initial {self.humanized_class_name} last_block_timestamp', timestamp)
else:
validate_not_none(f'{self.humanized_class_name} last_block_timestamp', timestamp)
validate_type(f'{self.humanized_class_name} last_block_timestamp', timestamp, datetime)
validate_is_none(f'{self.humanized_class_name} last_block_timestamp timezone', timestamp.tzinfo)
@validates('blockchain state next_block_identifier')
def validate_next_block_identifier(self, is_initial):
if is_initial:
validate_is_none(f'Initial {self.humanized_class_name} next_block_identifier', self.next_block_identifier)
else:
validate_type(f'{self.humanized_class_name} next_block_identifier', self.next_block_identifier, str)
@validates('blockchain state accounts', is_plural_target=True)
def validate_accounts(self):
for account, balance in self.account_states.items():
with validates(f'blockchain state account {account}'):
validate_type(f'{self.humanized_class_name} account', account, str)
balance.validate()
|
StarcoderdataPython
|
4844537
|
import sys
import numpy as np
from bitstring import BitArray
from util.log import *
def flipFloat(val, bit=None, log=False):
# Cast float to BitArray and flip (invert) random bit 0-31
faultValue = BitArray(float=val, length=32)
if bit == None:
bit = np.random.randint(0, faultValue.len)
faultValue.invert(bit)
if log:
logInjectionBit("\tFlipping bit ", bit)
logInjectionVal("\tOriginal: ", float(val), " Corrupted: ", faultValue.float)
return faultValue.float, bit
def flipInt(val, size, bit=None, log=False):
# Cast integer to BitArray and flip (invert) random bit 0-N
val = int(val)
faultValue = BitArray(int=val, length=size)
if bit == None:
bit = np.random.randint(0, faultValue.len)
faultValue.invert(bit)
if log:
logInjectionBit("\tFlipping bit ", bit)
logInjectionVal("\tOriginal: ", int(val), " Corrupted: ", faultValue.int)
return faultValue.int, bit
def bitFlip(value, size=8, bit=None, log=False, quantized=False):
if quantized:
return flipInt(value, size, bit, log)
else:
return flipFloat(value, bit, log)
|
StarcoderdataPython
|
5184516
|
from flask import Flask, render_template, request, redirect
import os
from core.dbdriver import get_db, init_tables
app = Flask(__name__)
# Init tables in db
init_tables()
@app.route('/')
def index():
""" Index page
Show list of `asks`, and cheer count of each ask
"""
with get_db().cursor() as cursor :
cursor.execute("SELECT *, (SELECT COUNT(*) FROM `cheer` WHERE ask_id = ask.id) AS cheer_cnt FROM `ask`")
result = cursor.fetchall()
return render_template('main.html',
dataset=result,
)
@app.route('/ask/<int:ask_id>', methods=['GET'])
def view_ask(ask_id):
""" Show detail of one `ask`
See all cheers in this ask
:param ask_id: Primary key of `ask` table
"""
conn = get_db()
with conn.cursor() as cursor :
cursor.execute("SELECT * FROM `ask` WHERE id = %s", (ask_id, ))
row = cursor.fetchone()
cursor.execute("SELECT * FROM `cheer` WHERE ask_id = %s", (ask_id, ))
rows2 = cursor.fetchall()
return render_template('detail.html',
id=row[0],
message=row[1],
ip_address=row[2],
register_time=row[3],
current_url=request.url,
cheers=rows2,
)
@app.route('/ask', methods=['POST'])
def add_ask():
""" Add new ask
:post-param message: Message of `ask`
"""
conn = get_db()
message = request.form.get('message')
with conn.cursor() as cursor :
sql = "INSERT INTO `ask` (`message`, `ip_address`) VALUES (%s, %s)"
r = cursor.execute(sql, (message, request.remote_addr))
id = conn.insert_id()
conn.commit()
return redirect("/#a" + str(id))
@app.route('/ask/<int:ask_id>/cheer', methods=['POST'])
def add_cheer(ask_id):
""" Add new cheer to ask
:param ask_id: Primary key of `ask` table
:post-param message: Message of `cheer`
"""
conn = get_db()
message = request.form.get('message')
with conn.cursor() as cursor :
sql = "INSERT INTO `cheer` (`ask_id`, `message`, `ip_address`) VALUES (%s, %s, %s)"
r = cursor.execute(sql, (ask_id, message, request.remote_addr))
conn.commit()
redirect_url = request.form.get('back', '/#c' + str(ask_id))
return redirect(redirect_url)
@app.template_filter()
def hide_ip_address(ip_address):
"""
Template filter: <hide_ip_address>
Hide last sections of IP address
ex) 172.16.58.3 -> 65.3.*.*
"""
if not ip_address : return ""
else :
ipa = ip_address.split(".")
return "%s.%s.*.*" % (ipa[0], ipa[1])
if __name__ == '__main__':
app.run(
host='0.0.0.0',
debug=True,
port=os.environ.get('APP_PORT', 8080)
)
|
StarcoderdataPython
|
4887419
|
from collections import defaultdict
from datetime import timedelta
from typing import Dict, List
from celery import shared_task
from django.db import connection, transaction
from django.db.models import QuerySet
from django.utils import timezone
from django.utils.timezone import now
from sentry_sdk import capture_exception
from posthog.models import SessionRecordingEvent, Team
RETENTION_PERIOD = timedelta(days=7)
SESSION_CUTOFF = timedelta(minutes=30)
def session_recording_retention_scheduler() -> None:
for team in Team.objects.all().filter(session_recording_retention_period_days__isnull=False):
time_threshold = now() - timedelta(days=team.session_recording_retention_period_days)
session_recording_retention.delay(team_id=team.id, time_threshold=time_threshold.isoformat())
@shared_task(ignore_result=True, max_retries=1)
def session_recording_retention(team_id: int, time_threshold: str) -> None:
cursor = connection.cursor()
try:
# This deletes events, but may cut sessions in half, this is by choice for performance reasons
cursor.execute(
"DELETE FROM posthog_sessionrecordingevent WHERE team_id = %s AND timestamp < %s", [team_id, time_threshold]
)
except Exception as err:
capture_exception(err)
def build_sessions(events: QuerySet) -> Dict[str, List[SessionRecordingEvent]]:
sessions = defaultdict(list)
for event in events:
sessions[event.session_id].append(event)
return sessions
|
StarcoderdataPython
|
208258
|
#!/usr/bin/env python
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
"""
Non-signalized junctions: crossing negotiation:
The hero vehicle is passing through a junction without traffic lights
And encounters another vehicle passing across the junction.
"""
import py_trees
import carla
from srunner.scenariomanager.carla_data_provider import CarlaActorPool
from srunner.scenariomanager.scenarioatomics.atomic_behaviors import (ActorTransformSetter,
ActorDestroy,
SyncArrival,
KeepVelocity,
StopVehicle)
from srunner.scenariomanager.scenarioatomics.atomic_criteria import CollisionTest, DrivenDistanceTest
from srunner.scenariomanager.scenarioatomics.atomic_trigger_conditions import InTriggerRegion
from srunner.scenarios.basic_scenario import BasicScenario
class NoSignalJunctionCrossing(BasicScenario):
"""
Implementation class for
'Non-signalized junctions: crossing negotiation' scenario,
(Traffic Scenario 10).
This is a single ego vehicle scenario
"""
# ego vehicle parameters
_ego_vehicle_max_velocity = 20
_ego_vehicle_driven_distance = 105
# other vehicle
_other_actor_max_brake = 1.0
_other_actor_target_velocity = 15
def __init__(self, world, ego_vehicles, config, randomize=False, debug_mode=False, criteria_enable=True,
timeout=0):
"""
Setup all relevant parameters and create scenario
"""
self._other_actor_transform = None
# Timeout of scenario in seconds
self.timeout = timeout
super(NoSignalJunctionCrossing, self).__init__("NoSignalJunctionCrossing",
ego_vehicles,
config,
world,
debug_mode,
criteria_enable=False)
def _initialize_actors(self, config):
"""
Custom initialization
"""
self._other_actor_transform = config.other_actors[0].transform
first_vehicle_transform = carla.Transform(
carla.Location(config.other_actors[0].transform.location.x,
config.other_actors[0].transform.location.y,
config.other_actors[0].transform.location.z - 500),
config.other_actors[0].transform.rotation)
first_vehicle = CarlaActorPool.request_new_actor(config.other_actors[0].model, first_vehicle_transform)
first_vehicle.set_simulate_physics(enabled=False)
self.other_actors.append(first_vehicle)
def _create_behavior(self):
"""
After invoking this scenario, it will wait for the user
controlled vehicle to enter the start region,
then make a traffic participant to accelerate
until it is going fast enough to reach an intersection point.
at the same time as the user controlled vehicle at the junction.
Once the user controlled vehicle comes close to the junction,
the traffic participant accelerates and passes through the junction.
After 60 seconds, a timeout stops the scenario.
"""
# Creating leaf nodes
start_other_trigger = InTriggerRegion(
self.ego_vehicles[0],
-80, -70,
-75, -60)
sync_arrival = SyncArrival(
self.other_actors[0], self.ego_vehicles[0],
carla.Location(x=-74.63, y=-136.34))
pass_through_trigger = InTriggerRegion(
self.ego_vehicles[0],
-90, -70,
-124, -119)
keep_velocity_other = KeepVelocity(
self.other_actors[0],
self._other_actor_target_velocity)
stop_other_trigger = InTriggerRegion(
self.other_actors[0],
-45, -35,
-140, -130)
stop_other = StopVehicle(
self.other_actors[0],
self._other_actor_max_brake)
end_condition = InTriggerRegion(
self.ego_vehicles[0],
-90, -70,
-170, -156
)
# Creating non-leaf nodes
root = py_trees.composites.Sequence()
scenario_sequence = py_trees.composites.Sequence()
sync_arrival_parallel = py_trees.composites.Parallel(
policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ONE)
keep_velocity_other_parallel = py_trees.composites.Parallel(
policy=py_trees.common.ParallelPolicy.SUCCESS_ON_ONE)
# Building tree
root.add_child(scenario_sequence)
scenario_sequence.add_child(ActorTransformSetter(self.other_actors[0], self._other_actor_transform))
scenario_sequence.add_child(start_other_trigger)
scenario_sequence.add_child(sync_arrival_parallel)
scenario_sequence.add_child(keep_velocity_other_parallel)
scenario_sequence.add_child(stop_other)
scenario_sequence.add_child(end_condition)
scenario_sequence.add_child(ActorDestroy(self.other_actors[0]))
sync_arrival_parallel.add_child(sync_arrival)
sync_arrival_parallel.add_child(pass_through_trigger)
keep_velocity_other_parallel.add_child(keep_velocity_other)
keep_velocity_other_parallel.add_child(stop_other_trigger)
return root
def _create_test_criteria(self):
"""
A list of all test criteria will be created that is later used
in parallel behavior tree.
"""
criteria = []
# Adding checks for ego vehicle
collision_criterion_ego = CollisionTest(self.ego_vehicles[0])
driven_distance_criterion = DrivenDistanceTest(
self.ego_vehicles[0], self._ego_vehicle_driven_distance)
criteria.append(collision_criterion_ego)
criteria.append(driven_distance_criterion)
# Add approriate checks for other vehicles
for vehicle in self.other_actors:
collision_criterion = CollisionTest(vehicle)
criteria.append(collision_criterion)
return criteria
def __del__(self):
"""
Remove all actors upon deletion
"""
self.remove_all_actors()
|
StarcoderdataPython
|
11264146
|
<reponame>karimbahgat/GeoStream
import pygeoj
class GeoJSON(object):
def __init__(self, filepath, **kwargs):
self.filepath = filepath
self.kwargs = kwargs
self.reader = self.load_reader()
self.fieldnames, self.fieldtypes = self.load_fields()
self.meta = self.load_meta()
def __len__(self):
return len(self.reader)
def __iter__(self):
return self.stream()
def stream(self):
for feat in self.reader:
props = feat.properties
row = [props[field] for field in self.fieldnames]
geom = feat.geometry.__geo_interface__
yield row, geom
def load_reader(self):
reader = pygeoj.load(filepath=self.filepath, **self.kwargs)
return reader
def load_fields(self):
fieldnames = self.reader.all_attributes
fieldtypes = [None for _ in fieldnames]
return fieldnames, fieldtypes
def load_meta(self):
meta = None
return meta
|
StarcoderdataPython
|
6514550
|
# <NAME>
# https://github.com/danieljoserodriguez
import numpy as np
# Cross-entropy loss, or log loss, measures the performance of a classification model whose output is a
# probability value between 0 and 1
def cross_entropy(y_hat, y):
return -np.log(y_hat) if y == 1 else -np.log(1 - y_hat)
# Used for classification
def hinge(y_hat, y):
return np.max(0, 1 - y_hat * y)
# Typically used for regression. It’s less sensitive to outliers than the MSE as it treats error as square only
# inside an interval
def huber(y_hat, y, delta: int=1):
return np.where(np.abs(y - y_hat) < delta, .5 * (y - y_hat) ** 2, delta * (np.abs(y - y_hat) - 0.5 * delta))
# Mean Squared Error, or L2 loss.
def mean_square_error(true_values, predicted_values):
return ((true_values * predicted_values) ** 2.0).mean()
# Mean Absolute Error, or L1 loss.
def mean_absolute_error(y_hat, y):
return np.sum(np.absolute(y_hat - y))
|
StarcoderdataPython
|
315783
|
<reponame>musslick/sweetpea-py
import operator as op
import pytest
from itertools import permutations
from sweetpea import factor, derived_level, else_level, within_trial, at_most_k_in_a_row, transition
from sweetpea import fully_cross_block, synthesize_trials_non_uniform
congruency = factor("congruency", ["congruent", "incongruent", "neutral"])
congruency_transition = factor("congruency_transition", [
derived_level("con-con", transition(lambda c: c[0] == "congruent" and c[1] == "congruent", [congruency])),
derived_level("con-inc", transition(lambda c: c[0] == "congruent" and c[1] == "incongruent", [congruency])),
derived_level("con-ntr", transition(lambda c: c[0] == "congruent" and c[1] == "neutral", [congruency])),
derived_level("inc-con", transition(lambda c: c[0] == "incongruent" and c[1] == "congruent", [congruency])),
derived_level("inc-inc", transition(lambda c: c[0] == "incongruent" and c[1] == "incongruent", [congruency])),
derived_level("inc-ntr", transition(lambda c: c[0] == "incongruent" and c[1] == "neutral", [congruency])),
derived_level("ntr-con", transition(lambda c: c[0] == "neutral" and c[1] == "congruent", [congruency])),
derived_level("ntr-inc", transition(lambda c: c[0] == "neutral" and c[1] == "incongruent", [congruency])),
else_level("ntr-ntr")
])
@pytest.mark.parametrize('design', permutations([congruency, congruency_transition]))
def test_correct_solution_count_with_congruence_factor_but_unconstrained(design):
crossing = [congruency]
constraints = []
block = fully_cross_block(design, crossing, constraints)
experiments = synthesize_trials_non_uniform(block, 100)
assert len(experiments) == 6
|
StarcoderdataPython
|
11376382
|
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Copyright: (c) 2020, Deutsches Zentrum fuer Luft- und Raumfahrt e.V.
Contact: <EMAIL>
"""
from typing import *
from open_turb_arch.architecting.choice import *
from open_turb_arch.architecting.opt_defs import *
__all__ = ['ArchitectingMetric', 'OutputMetric', 'ObjectiveDirection', 'Objective', 'ConstraintDirection',
'Constraint', 'ArchitectingChoice', 'AnalysisProblem', 'OperatingMetricsMap', 'OperatingCondition',
'DesignCondition', 'EvaluateCondition']
class ArchitectingMetric:
"""Base class that represents some output metrics that can be used for different roles in the optimization problem:
as objective, as constraints, or as generic output metric."""
def get_opt_objectives(self, choices: List[ArchitectingChoice]) -> List[Objective]:
raise NotImplementedError
def get_opt_constraints(self, choices: List[ArchitectingChoice]) -> List[Constraint]:
raise NotImplementedError
def get_opt_metrics(self, choices: List[ArchitectingChoice]) -> List[OutputMetric]:
raise NotImplementedError
def extract_met(self, analysis_problem: AnalysisProblem, result: OperatingMetricsMap, architecture: TurbofanArchitecture) -> Sequence[float]:
raise NotImplementedError
def extract_obj(self, analysis_problem: AnalysisProblem, result: OperatingMetricsMap, architecture: TurbofanArchitecture) -> Sequence[float]:
return self.extract_met(analysis_problem, result, architecture)
def extract_con(self, analysis_problem: AnalysisProblem, result: OperatingMetricsMap, architecture: TurbofanArchitecture) -> Sequence[float]:
return self.extract_met(analysis_problem, result, architecture)
|
StarcoderdataPython
|
9687257
|
import jsonschema
import json
with open("schema.json") as schema_file:
schema = json.load(schema_file)
def validate(manifest):
""" Validate a given manifest
"""
if type(manifest) is dict:
jsonschema.validate(manifest, schema)
elif type(manifest) is str:
decoded = json.loads(manifest)
jsonschema;validate(manifest, schema)
else:
raise TypeError()
|
StarcoderdataPython
|
271687
|
<reponame>lumisota/ietfdata<gh_stars>1-10
# Copyright (C) 2020 University of Glasgow
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import csv
import email.header
import email.utils
import os
import re
import string
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from dataclasses import dataclass, field
from pathlib import Path
from ietfdata.datatracker import *
from ietfdata.mailarchive import *
from ietfdata.mailhelper_headerdata import *
from ietfdata.mailhelper_datatracker import *
dt = DataTracker(cache_dir=Path("cache"))
archive = MailArchive(cache_dir=Path("cache"))
lists = list(archive.mailing_list_names())
addrs = {}
archive.download_all_messages()
index = 1
for ml_name in lists:
print(F"{index:5d} /{len(lists):5d} {ml_name:40}", end="")
index += 1
failed = 0
total = 0
for msg_id, msg in archive.mailing_list(ml_name).messages():
total += 1
try:
date_str = msg.message["Date"]
date = email.utils.parsedate_to_datetime(date_str)
year = date.timetuple().tm_year
if year == 2007:
n, e = email.utils.parseaddr(msg.message["from"])
if e is not "" and e not in addrs:
addrs[e] = e
except:
failed += 1
print(F" {len(addrs):6}", end="")
if failed > 0:
print(F" (failed: {failed} of {total})")
else:
print("")
with open(Path("emails-2007.txt"), "w") as outf:
for e in addrs.values():
print(e, file=outf)
# =============================================================================
|
StarcoderdataPython
|
4854180
|
<gh_stars>1-10
from collections import defaultdict
import logging
from typing import Dict, List, Optional, Set, Tuple, Union
import torch
from torch.autograd import Variable
from allennlp.nn import util
from allennlp.nn.decoding.decoder_step import DecoderStep
from allennlp.nn.decoding.decoder_state import DecoderState
from allennlp.nn.decoding.decoder_trainers.decoder_trainer import DecoderTrainer
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class MaximumMarginalLikelihood(DecoderTrainer[Tuple[torch.Tensor, torch.Tensor]]):
"""
This class trains a decoder by maximizing the marginal likelihood of the targets. That is,
during training, we are given a `set` of acceptable or possible target sequences, and we
optimize the `sum` of the probability the model assigns to each item in the set. This allows
the model to distribute its probability mass over the set however it chooses, without forcing
`all` of the given target sequences to have high probability. This is helpful, for example, if
you have good reason to expect that the correct target sequence is in the set, but aren't sure
`which` of the sequences is actually correct.
This implementation of maximum marginal likelihood requires the model you use to be `locally
normalized`; that is, at each decoding timestep, we assume that the model creates a normalized
probability distribution over actions. This assumption is necessary, because we do no explicit
normalization in our loss function, we just sum the probabilities assigned to all correct
target sequences, relying on the local normalization at each time step to push probability mass
from bad actions to good ones.
"""
def decode(self,
initial_state: DecoderState,
decode_step: DecoderStep,
supervision: Tuple[torch.Tensor, torch.Tensor]) -> Dict[str, torch.Tensor]:
targets, target_mask = supervision
allowed_transitions = self._create_allowed_transitions(targets, target_mask)
finished_states = []
states = [initial_state]
step_num = 0
while states:
step_num += 1
next_states = []
# We group together all current states to get more efficient (batched) computation.
grouped_state = states[0].combine_states(states)
allowed_actions = self._get_allowed_actions(grouped_state, allowed_transitions)
# This will store a set of (batch_index, action_history) tuples, and we'll check it
# against the allowed actions to make sure we're actually scoring all of the actions we
# are supposed to.
actions_taken: Set[Tuple[int, Tuple[int, ...]]] = set()
for next_state in decode_step.take_step(grouped_state, allowed_actions=allowed_actions):
actions_taken.add((next_state.batch_indices[0], tuple(next_state.action_history[0])))
if next_state.is_finished():
finished_states.append(next_state)
else:
next_states.append(next_state)
states = next_states
self._check_all_actions_taken(actions_taken, grouped_state, allowed_actions)
# This is a dictionary of lists - for each batch instance, we want the score of all
# finished states. So this has shape (batch_size, num_target_action_sequences), though
# it's not actually a tensor, because different batch instance might have different numbers
# of finished states.
batch_scores = self._group_scores_by_batch(finished_states)
loss = 0
for scores in batch_scores.values(): # we don't care about the batch index, just the scores
loss += -util.logsumexp(torch.cat(scores))
return {'loss': loss / len(batch_scores)}
@staticmethod
def _create_allowed_transitions(targets: Union[torch.Tensor, List[List[List[int]]]],
target_mask: Optional[torch.Tensor] = None) -> List[Dict[Tuple[int, ...],
Set[int]]]:
"""
Takes a list of valid target action sequences and creates a mapping from all possible
(valid) action prefixes to allowed actions given that prefix. ``targets`` is assumed to be
a tensor of shape ``(batch_size, num_valid_sequences, sequence_length)``. If the mask is
not ``None``, it is assumed to have the same shape, and we will ignore any value in
``targets`` that has a value of ``0`` in the corresponding position in the mask. We assume
that the mask has the format 1*0* for each item in ``targets`` - that is, once we see our
first zero, we stop processing that target.
For example, if ``targets`` is the following tensor: ``[[1, 2, 3], [1, 4, 5]]``, the return
value will be: ``{(): set([1]), (1,): set([2, 4]), (1, 2): set([3]), (1, 4): set([5])}``.
We use this to prune the set of actions we consider during decoding, because we only need
to score the sequences in ``targets``.
"""
batched_allowed_transitions: List[Dict[Tuple[int, ...], Set[int]]] = []
if not isinstance(targets, list):
assert targets.dim() == 3, "targets tensor needs to be batched!"
targets = targets.data.cpu().numpy().tolist()
if target_mask is not None:
target_mask = target_mask.data.cpu().numpy().tolist()
else:
target_mask = [None for _ in targets]
for instance_targets, instance_mask in zip(targets, target_mask):
allowed_transitions: Dict[Tuple[int, ...], Set[int]] = defaultdict(set)
for i, target_sequence in enumerate(instance_targets):
history: Tuple[int, ...] = ()
for j, action in enumerate(target_sequence):
if instance_mask and instance_mask[i][j] == 0:
break
allowed_transitions[history].add(action)
history = history + (action,)
batched_allowed_transitions.append(allowed_transitions)
return batched_allowed_transitions
@staticmethod
def _check_all_actions_taken(action_histories: Set[Tuple[int, Tuple[int, ...]]],
grouped_state: DecoderState,
allowed_actions: List[Set[int]]) -> None:
expected_histories = set()
for i, batch_index in enumerate(grouped_state.batch_indices):
action_history = grouped_state.action_history[i]
for allowed_action in allowed_actions[i]:
expected_histories.add((batch_index, tuple(action_history + [allowed_action])))
assert action_histories == expected_histories, f'Expected: {expected_histories}'
@staticmethod
def _get_allowed_actions(state: DecoderState,
allowed_transitions: List[Dict[Tuple[int, ...], Set[int]]]) -> List[Set[int]]:
"""
Takes a list of allowed transitions for each element of a batch, and a decoder state that
contains the current action history for each `group` element, and returns a list of allowed
actions in the current state, also for each `group` element.
"""
allowed_actions = []
for batch_index, action_history in zip(state.batch_indices, state.action_history):
allowed_actions.append(allowed_transitions[batch_index][tuple(action_history)])
return allowed_actions
@staticmethod
def _group_scores_by_batch(finished_states: List[DecoderState]) -> Dict[int, List[Variable]]:
"""
Takes a list of finished states and groups all final scores for each batch element into a
list. This is not trivial because the instances in the batch all might "finish" at
different times, so we re-batch them during the training process. We need to recover the
original batch grouping so we can compute the loss correctly.
"""
batch_scores: Dict[int, List[Variable]] = defaultdict(list)
for state in finished_states:
for score, batch_index in zip(state.score, state.batch_indices):
batch_scores[batch_index].append(score)
return batch_scores
|
StarcoderdataPython
|
3559885
|
# -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""This is a class of ImageNet Dataset."""
import os
import tensorflow as tf
from official.r1.resnet.imagenet_preprocessing import preprocess_image
from vega.common import ClassFactory, ClassType
from vega.common import FileOps
from ..common.dataset import Dataset
from vega.datasets.conf.imagenet import ImagenetConfig
@ClassFactory.register(ClassType.DATASET)
class Imagenet(Dataset):
"""This is a class for Imagenet dataset.
:param data_dir: Imagenet data directory
:type data_dir: str
:param image_size: input imagenet size
:type image_size: int
:param batch_size: batch size
:type batch_size: int
:param mode: dataset mode, train or val
:type mode: str
:param fp16: whether to use fp16
:type fp16: bool, default False
:param num_parallel_batches: number of parallel batches
:type num_parallel_batches: int, default 8
:param drop_remainder: whether to drop data remainder
:type drop_remainder: bool, default False
:param transpose_input: whether to transpose input dimention
:type transpose_input: bool, default false
"""
config = ImagenetConfig()
def __init__(self, **kwargs):
"""Init Cifar10."""
super(Imagenet, self).__init__(**kwargs)
self.data_path = FileOps.download_dataset(self.args.data_path)
self.fp16 = self.args.fp16
self.num_parallel_batches = self.args.num_parallel_batches
self.image_size = self.args.image_size
self.drop_remainder = self.args.drop_last
if self.data_path == 'null' or not self.data_path:
self.data_path = None
self.num_parallel_calls = self.args.num_parallel_calls
def _record_parser(self, raw_record):
"""Parse dataset function."""
features_dict = {
'image/encoded': tf.FixedLenFeature((), tf.string, ''),
'image/class/label': tf.FixedLenFeature([], tf.int64, -1),
}
parsed = tf.parse_single_example(raw_record, features_dict)
image_buffer = parsed['image/encoded']
bbox = tf.constant([0.0, 0.0, 1.0, 1.0], dtype=tf.float32, shape=[1, 1, 4])
image = preprocess_image(image_buffer=image_buffer,
bbox=bbox,
output_height=self.image_size,
output_width=self.image_size,
num_channels=3,
is_training=self.train)
image = tf.cast(image, dtype=tf.float16 if self.fp16 else tf.float32)
label = tf.cast(parsed['image/class/label'], dtype=tf.int32) - 1
return image, label
def _read_raw_data(self, data_file):
"""Read raw data."""
dataset = tf.data.TFRecordDataset(data_file, buffer_size=8 * 1024 ** 2)
return dataset
def input_fn(self):
"""Define input_fn used by Tensorflow Estimator."""
data_files = os.path.join(
self.data_path, 'train/train-*' if self.mode == 'train' else 'val/val-*')
dataset = tf.data.Dataset.list_files(data_files, shuffle=False)
if self.world_size > 1:
dataset = dataset.shard(self.world_size, self.rank)
if self.mode == 'train':
dataset = dataset.shuffle(buffer_size=1024)
dataset = dataset.repeat()
dataset = dataset.apply(tf.contrib.data.parallel_interleave(
self._read_raw_data, cycle_length=self.num_parallel_calls, sloppy=True))
dataset = dataset.apply(
tf.contrib.data.map_and_batch(
self._record_parser, batch_size=self.args.batch_size,
num_parallel_batches=self.num_parallel_batches, drop_remainder=self.drop_remainder))
dataset = dataset.prefetch(tf.contrib.data.AUTOTUNE)
return dataset
|
StarcoderdataPython
|
1863691
|
#CASE14 Power flow data for IEEE 14 bus test case.
# Please see CASEFORMAT for details on the case file format.
# This data was converted from IEEE Common Data Format
# (ieee14cdf.txt) on 15-Oct-2014 by cdf2matp, rev. 2393
# See end of file for warnings generated during conversion.
#
# Converted from IEEE CDF file from:
# https://labs.ece.uw.edu/pstca/
#
# 08/19/93 UW ARCHIVE 100.0 1962 W IEEE 14 Bus Test Case
# MATPOWER
## MATPOWER Case Format : Version 2
import math
def return_data():
#
#
#
#
baseMVA = 100
#
## bus data
# bus_i type Pd Qd Gs Bs area Vm Va baseKV zone Vmax Vmin
bus = [
[1, 3, 0, 0, 0, 0, 1, 1.06, 0, 0, 1, 1.06, 0.94],
[2, 2, 21.7, 12.7, 0, 0, 1, 1.045, -4.98, 0, 1, 1.06, 0.94],
[3, 2, 94.2, 19, 0, 0, 1, 1.01, -12.72, 0, 1, 1.06, 0.94],
[4, 1, 47.8, -3.9, 0, 0, 1, 1.019, -10.33, 0, 1, 1.06, 0.94],
[5, 1, 7.6, 1.6, 0, 0, 1, 1.02, -8.78, 0, 1, 1.06, 0.94],
[6, 1, 0, 0, 0, 0, 1, 1.062, -13.37, 0, 1, 1.06, 0.94],
[7, 2, 0, 0, 0, 0, 1, 1.09, -13.36, 0, 1, 1.06, 0.94],
[8, 1, 29.5, 16.6, 0, 19, 1, 1.056, -14.94, 0, 1, 1.06, 0.94],
]
## generator data
# bus Pg Qg Qmax Qmin Vg mBase status Pmax Pmin Pc1, Pc2, Qc1min Qc1max Qc2min Qc2max ramp_agc ramp_10, ramp_30, ramp_q apf
gen = [
[1, 0, 0, 0, 0, 1.060, 100, 1, math.inf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2, 0, 0, 0, 0, 1.045, 100, 1, math.inf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[3, 0, 0, 0, 0, 1.010, 100, 1, math.inf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]
## branch data
# fbus tbus r x b rateA rateB rateC ratio angle status angmin angmax
branch = [
[1, 2, 0.01938, 0.05917, 0.0528, 0, 0, 0, 0, 0, 1, -360, 360], # 0
[1, 5, 0.05403, 0.22304, 0.0492, 0, 0, 0, 0, 0, 1, -360, 360], # 1
[2, 3, 0.04699, 0.19797, 0.0438, 0, 0, 0, 0, 0, 1, -360, 360], # 2
[2, 4, 0.05811, 0.17632, 0.034, 0, 0, 0, 0, 0, 1, -360, 360], # 3
[2, 5, 0.05695, 0.17388, 0.0346, 20, 0, 0, 0, 0, 1, -360, 360], # 4
[3, 4, 0.06701, 0.17103, 0.0128, 0, 0, 0, 0, 0, 1, -360, 360], # 5
[4, 5, 0.01335, 0.04211, 0.0000, 0, 0, 0, 0, 0, 1, -360, 360], # 6
[4, 6, 0.00000, 0.20912, 0.0000, 0, 0, 0, 0, 0, 1, -360, 360], # 7
[4, 8, 0.00000, 0.55618, 0.0000, 0, 0, 0, 0, 0, 1, -360, 360], # 8
[6, 7, 0.00000, 0.17615, 0.0000, 0, 0, 0, 0, 0, 1, -360, 360], # 13
[6, 8, 0.00000, 0.11001, 0.0000, 0, 0, 0, 0, 0, 1, -360, 360], # 14
]
##----- OPF Data -----##
## generator cost data
# 1, startup shutdown n x1, y1, ... xn yn
# 2, startup shutdown n c(n-1) ... c0
gencost = [
[2, 0, 0, 3, 0.0430292599, 20, 0],
[2, 0, 0, 3, 0.25, 20, 0],
[2, 0, 0, 3, 0.01, 40, 0],
]
#
#
#
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
# modify the node numbers (nodes start at 0 in python as opposed to 1 in MATLAB)
num_of_nodes = len(bus)
num_of_gens = len(gen)
num_of_branches = len(branch)
for i in range(0, num_of_nodes):
bus[i][0] = int(bus[i][0]) - 1
#
for i in range(0, num_of_gens):
gen[i][0] = int(gen[i][0]) - 1
#
for i in range(0, num_of_branches):
branch[i][0] = int(branch[i][0]) - 1
branch[i][1] = int(branch[i][1]) - 1
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
#
return bus, branch, gen, gencost, baseMVA
# Warnings from cdf2matp conversion:
#
# ***** check the title format in the first line of the cdf file.
# ***** Qmax = Qmin at generator at bus 1 (Qmax set to Qmin + 10)
# ***** MVA limit of branch 1 - 2 not given, set to 0
# ***** MVA limit of branch 1 - 5 not given, set to 0
# ***** MVA limit of branch 2 - 3 not given, set to 0
# ***** MVA limit of branch 2 - 4 not given, set to 0
# ***** MVA limit of branch 2 - 5 not given, set to 0
# ***** MVA limit of branch 3 - 4 not given, set to 0
# ***** MVA limit of branch 4 - 5 not given, set to 0
# ***** MVA limit of branch 4 - 7 not given, set to 0
# ***** MVA limit of branch 4 - 9 not given, set to 0
# ***** MVA limit of branch 5 - 6 not given, set to 0
# ***** MVA limit of branch 6 - 11 not given, set to 0
# ***** MVA limit of branch 6 - 12 not given, set to 0
# ***** MVA limit of branch 6 - 13 not given, set to 0
# ***** MVA limit of branch 7 - 8 not given, set to 0
# ***** MVA limit of branch 7 - 9 not given, set to 0
# ***** MVA limit of branch 9 - 10 not given, set to 0
# ***** MVA limit of branch 9 - 14 not given, set to 0
# ***** MVA limit of branch 10 - 11 not given, set to 0
# ***** MVA limit of branch 12 - 13 not given, set to 0
# ***** MVA limit of branch 13 - 14 not given, set to 0
|
StarcoderdataPython
|
3209418
|
<filename>docs/source/gallery/examples/stemplot.py
"""
Spectra : stemplot
====================
Once you have a table, one way to visualise how the ion peaks are distributed is to use
:func:`~interferences.plot.spectra.stemplot`, which you can also access from your
dataframe using the :class:`~interferences.mz` accessor:
"""
import matplotlib.pyplot as plt
from interferences import build_table
from pyrolite.geochem.ind import REE
########################################################################################
# Let's build a table to play with first, focusing on potential interferences for
# thulium (Tm), which has only one stable isotope (:math:`\mathrm{^{169}Tm}`),
# and is susceptible to interferences, especially for quadrupole ICP-MS:
#
window = ("Tm[169]", 0.1)
df = build_table(REE() + ["O", "N", "H"], window=window, max_atoms=2)
########################################################################################
# From this table, we can create our plot, here limiting the labelling to the
# five peaks with highest estimated intensity:
#
ax = df.mz.stemplot(window=window, max_labels=5, figsize=(8, 4))
plt.show()
########################################################################################
# While the production of the doubly-charged double-REE ions is likely less significant
# than shown here (no penalisation for higher charges/larger molecules is included
# in generating these spectra), we can see that :math:`\mathrm{^{153}Eu^{16}O}` could
# be a potential interference issue if the conditions are relatively oxidised,
# and if there's sufficient hydrogen, :math:`\mathrm{^{168}Er^{1}H}` may similarly
# contribute to problems.
#
# Notably, there's a number of other potential ions in vicinity of
# :math:`\mathrm{^{169}Tm}`. However, most of these are doubly-charged double-REE ions.
# Given the highly-correlated nature of the REE, these may not pose as significant
# issues for standardisation as the hydride and oxide ions.
#
|
StarcoderdataPython
|
3453955
|
''' Input Output of TSP related objects '''
from .population import (
City,
Route,
)
def read_cities(fp):
'''reads in the starter cities from a saved file'''
cities = []
with open(fp, 'r') as f:
f.readline()
for line in f:
xy = line.split('\t')
x = int(xy[0])
y = int(xy[1])
cities.append(City(x,y))
return cities
def make_cities(numcities: int):
cities = [City().randomize(0,200,0,200) for i in range(numcities)]
route = Route(cities)
return route
def write_cities(numcities: int):
route = make_cities(numcities)
route.to_csv(f'cities\\{numcities}cities.txt')
|
StarcoderdataPython
|
4938730
|
<reponame>biobdeveloper/p52
class SameCardsInOneDeckError(Exception):
pass
|
StarcoderdataPython
|
3518351
|
<filename>train_net.py
"""
Yolo Training script
This script is a simplified version of the script in detectron2/tools
"""
from pathlib import Path
import torch
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.engine import (
DefaultTrainer,
default_argument_parser,
default_setup,
launch
)
from detectron2.data import (
DatasetMapper,
build_detection_train_loader,
build_detection_test_loader
)
from detectron2.evaluation import COCOEvaluator
from detectron2.config import get_cfg
from yolo import add_yolo_config, build_yolo_aug
class Trainer(DefaultTrainer):
@classmethod
def build_evaluator(cls, cfg, dataset_name, output_folder=None):
if output_folder is None:
output_folder = Path(cfg.OUTPUT_DIR) / "inference"
return COCOEvaluator(dataset_name, output_dir=output_folder)
@classmethod
def build_train_loader(cls, cfg):
mapper = DatasetMapper(cfg, is_train=True, augmentations=build_yolo_aug(cfg))
return build_detection_train_loader(cfg, mapper=mapper)
@classmethod
def build_test_loader(cls, cfg, dataset_name):
mapper = DatasetMapper(cfg, is_train=False,
augmentations=build_yolo_aug(cfg, training=False))
return build_detection_test_loader(cfg, dataset_name, mapper=mapper)
def setup(args):
cfg = get_cfg()
add_yolo_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
default_setup(cfg, args)
return cfg
def main(args):
cfg = setup(args)
if args.eval_only:
model = Trainer.build_model(cfg)
DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume
)
res = Trainer.test(cfg, model)
return res
trainer = Trainer(cfg)
trainer.resume_or_load(resume=args.resume)
return trainer.train()
if __name__ == "__main__":
args = default_argument_parser().parse_args()
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
|
StarcoderdataPython
|
3526808
|
<filename>core/urls.py
from django.urls import path
from .views import client_view
app_name = "core"
urlpatterns = [
path("", view=client_view, name="client-app"),
]
|
StarcoderdataPython
|
11323424
|
<gh_stars>10-100
from aser.database.db_API import KG_Connection
import time
from tqdm import tqdm
# import aser
import ujson as json
from multiprocessing import Pool
import spacy
import random
import pandas
import numpy as np
from itertools import combinations
from scipy import spatial
import os
def get_ConceptNet_info(file_path):
tmp_collection = dict()
with open(file_path, 'r', encoding='utf-8') as f:
for line in f:
tmp_words = line[:-1].split('\t')
if tmp_words[3] == '0':
continue
if tmp_words[0] not in tmp_collection:
tmp_collection[tmp_words[0]] = list()
tmp_collection[tmp_words[0]].append((tmp_words[1], tmp_words[2]))
return tmp_collection
def load_jsonlines(file_name):
extracted_file = list()
with open(file_name, 'r') as f:
for line in f:
tmp_info = json.loads(line)
extracted_file.append(tmp_info)
return extracted_file
def chunks(l, group_number):
if len(l) < 10:
return [l]
group_size = int(len(l) / group_number)
final_data_groups = list()
for i in range(0, len(l), group_size):
final_data_groups.append(l[i:i+group_size])
return final_data_groups
def match_commonsense_and_aser(sample_pairs, ASER):
tmp_dict = dict()
for tmp_tuple in sample_pairs:
head_words = tmp_tuple['head'].split(' ')
tail_words = tmp_tuple['tail'].split(' ')
all_words = head_words + tail_words
tmp_key = tmp_tuple['head'] + '$$' + tmp_tuple['tail']
matched_eventualities = list()
for tmp_event in ASER:
is_match = True
for w in all_words:
if w not in tmp_event['words'].split(' '):
is_match = False
break
if is_match:
matched_eventualities.append(tmp_event)
tmp_dict[tmp_key] = matched_eventualities
return tmp_dict
def match_commonsense_and_aser_edge(sample_pairs, ASER):
tmp_dict = dict()
for tmp_tuple in sample_pairs:
head_words = tmp_tuple['head'].split(' ')
tail_words = tmp_tuple['tail'].split(' ')
all_words = head_words + tail_words
tmp_key = tmp_tuple['head'] + '$$' + tmp_tuple['tail']
matched_eventualities = list()
for tmp_event in ASER:
is_match = True
edge_words = tmp_event['event_1_words'].split(' ') + tmp_event['event_2_words'].split(' ')
for w in all_words:
if w not in edge_words:
is_match = False
break
if is_match:
matched_eventualities.append(tmp_event)
tmp_dict[tmp_key] = matched_eventualities
return tmp_dict
def find_head_tail_position_from_graph(graph, pattern_keywords, direction, loop):
if loop == 5:
print('Current loop is 5, we need to stop, something is wrong, please check.')
return []
if len(pattern_keywords) == 0:
return []
if direction == '<':
if len(pattern_keywords) == 3:
potential_links = list()
for edge in graph:
if edge[1] == pattern_keywords[1]:
if pattern_keywords[0] == 'head':
potential_links.append([edge[2], edge[0]])
else:
if pattern_keywords[0] == edge[2][0]:
potential_links.append([edge[0]])
return potential_links
else:
potential_links = list()
for edge in graph:
if edge[1] == pattern_keywords[1]:
if pattern_keywords[0] == 'head':
tmp_link = [edge[2], edge[0]]
new_pattern_keywords = pattern_keywords[2:]
rest_links = find_head_tail_position_from_graph(graph, new_pattern_keywords, direction, loop+1)
for tmp_rest_link in rest_links:
tmp_link = tmp_link + tmp_rest_link
potential_links.append(tmp_link)
else:
if pattern_keywords[0] == edge[2][0]:
tmp_link = [edge[0]]
new_pattern_keywords = pattern_keywords[2:]
rest_links = find_head_tail_position_from_graph(graph, new_pattern_keywords, direction,
loop + 1)
for tmp_rest_link in rest_links:
tmp_link = tmp_link + tmp_rest_link
potential_links.append(tmp_link)
return potential_links
else:
if len(pattern_keywords) == 3:
potential_links = list()
for edge in graph:
if edge[1] == pattern_keywords[1]:
if pattern_keywords[0] == 'head':
potential_links.append([edge[0], edge[2]])
else:
if pattern_keywords[0] == edge[0][0]:
potential_links.append([edge[2]])
return potential_links
else:
potential_links = list()
for edge in graph:
if edge[1] == pattern_keywords[1]:
if pattern_keywords[0] == 'head':
tmp_link = [edge[0], edge[2]]
new_pattern_keywords = pattern_keywords[2:]
rest_links = find_head_tail_position_from_graph(graph, new_pattern_keywords, direction, loop+1)
for tmp_rest_link in rest_links:
tmp_link = tmp_link + tmp_rest_link
potential_links.append(tmp_link)
else:
if pattern_keywords[0] == edge[0][0]:
tmp_link = [edge[2]]
new_pattern_keywords = pattern_keywords[2:]
rest_links = find_head_tail_position_from_graph(graph, new_pattern_keywords, direction,
loop + 1)
for tmp_rest_link in rest_links:
tmp_link = tmp_link + tmp_rest_link
potential_links.append(tmp_link)
return potential_links
def extract_knowledge_with_focused_position(graph, pattern_keywords, focused_position):
if len(pattern_keywords) == 0:
return focused_position[0]
else:
extracted_pattern = list()
extracted_nodes = [focused_position]
while len(extracted_pattern) != len(pattern_keywords):
found_new_node = False
for edge in graph:
if edge[1] in pattern_keywords and edge[1] not in extracted_pattern:
if edge[0] in extracted_nodes:
extracted_nodes.append(edge[2])
found_new_node = True
extracted_pattern.append(edge[1])
elif edge[2] in extracted_nodes:
extracted_nodes.append(edge[0])
found_new_node = True
extracted_pattern.append(edge[1])
if not found_new_node:
break
if len(extracted_pattern) == len(pattern_keywords):
sorted_nodes = sorted(extracted_nodes, key=lambda x: x[1])
tmp_knowledge = ''
for w in sorted_nodes:
tmp_knowledge += w[0]
tmp_knowledge += ' '
return tmp_knowledge[:-1]
else:
return None
def extract_knowledge_from_graph_with_knowledge(graph, pattern):
head_pattern = pattern.split(')')[0][1:]
if head_pattern == '':
head_keywords = []
else:
head_keywords = head_pattern.split('-')[1:-1]
internal_pattern = pattern.split(')')[1].split('(')[0]
tail_pattern = pattern.split('(')[2][:-1]
if tail_pattern == '':
tail_keywords = []
else:
tail_keywords = tail_pattern.split('-')[1:-1]
focus_nodes = list()
# We need to detect double direction
if '<-' in internal_pattern and '->' in internal_pattern:
all_paths = list()
# we find a double direction
if internal_pattern[0] == '<':
middle_word = internal_pattern.split('<-')[-1].split('->')[0]
first_half_pattern = internal_pattern.split(middle_word)[0]
first_half_keywords = first_half_pattern.split('<-')
first_half_keywords[0] = 'head'
first_half_keywords[-1] = 'tail'
first_half_paths = find_head_tail_position_from_graph(graph=graph, pattern_keywords=first_half_keywords, direction='<', loop=0)
second_half_pattern = internal_pattern.split(middle_word)[1]
second_half_keywords = second_half_pattern.split('->')
second_half_keywords[0] = 'head'
second_half_keywords[-1] = 'tail'
second_half_paths = find_head_tail_position_from_graph(graph=graph, pattern_keywords=second_half_keywords,
direction='>', loop=0)
for tmp_first_half_path in first_half_paths:
for tmp_second_half_path in second_half_paths:
if tmp_first_half_path[-1] == tmp_second_half_path[0] and tmp_first_half_path[-1][0] == middle_word:
all_paths.append((tmp_first_half_path[0], tmp_second_half_path[-1]))
else:
middle_word = internal_pattern.split('->')[-1].split('<-')[0]
first_half_pattern = internal_pattern.split(middle_word)[0]
first_half_keywords = first_half_pattern.split('->')
first_half_keywords[0] = 'head'
first_half_keywords[-1] = 'tail'
first_half_paths = find_head_tail_position_from_graph(graph=graph, pattern_keywords=first_half_keywords,
direction='>', loop=0)
second_half_pattern = internal_pattern.split(middle_word)[1]
second_half_keywords = second_half_pattern.split('<-')
second_half_keywords[0] = 'head'
second_half_keywords[-1] = 'tail'
second_half_paths = find_head_tail_position_from_graph(graph=graph, pattern_keywords=second_half_keywords,
direction='<', loop=0)
for tmp_first_half_path in first_half_paths:
for tmp_second_half_path in second_half_paths:
if tmp_first_half_path[-1] == tmp_second_half_path[0] and tmp_first_half_path[-1][0] == middle_word:
all_paths.append((tmp_first_half_path[0], tmp_second_half_path[-1]))
else:
if internal_pattern[0] == '<':
pattern_keywords = internal_pattern.split('<-')
else:
pattern_keywords = internal_pattern.split('->')
pattern_keywords[0] = 'head'
pattern_keywords[-1] = 'tail'
all_paths = find_head_tail_position_from_graph(graph=graph, pattern_keywords=pattern_keywords, direction=internal_pattern[0], loop=0)
extracted_knowledge_list = list()
for tmp_path in all_paths:
head_knowledge = extract_knowledge_with_focused_position(graph, head_keywords, tmp_path[0])
tail_knowledge = extract_knowledge_with_focused_position(graph, tail_keywords, tmp_path[-1])
if head_knowledge and tail_knowledge:
extracted_knowledge_list.append(head_knowledge + '$$' + tail_knowledge)
return extracted_knowledge_list
def extract_knowledge_from_eventuality_set(patterns, eventuality_set):
tmp_eventuality_dict = dict()
tmp_extracted_knowledge = dict()
for r in patterns:
tmp_extracted_knowledge[r] = dict()
for tmp_pattern in patterns[r]:
tmp_extracted_knowledge[r][tmp_pattern[0]] = dict()
for i, tmp_e in enumerate(eventuality_set):
doc = nlp(tmp_e['words'])
all_dependency_edges = list()
for word in doc:
all_dependency_edges.append(((word.head.norm_, word.head.i), word.dep_, (word.norm_, word.i)))
for r in patterns:
for pattern in patterns[r]:
tmp_knowledge_list = extract_knowledge_from_graph_with_knowledge(all_dependency_edges, pattern[0])
for tmp_knowledge in tmp_knowledge_list:
if tmp_knowledge not in tmp_extracted_knowledge[r][pattern[0]]:
tmp_extracted_knowledge[r][pattern[0]][tmp_knowledge] = 0
tmp_extracted_knowledge[r][pattern[0]][tmp_knowledge] += tmp_e['frequency']
if tmp_knowledge not in tmp_eventuality_dict:
tmp_eventuality_dict[tmp_knowledge] = list()
tmp_e['graph'] = all_dependency_edges
tmp_eventuality_dict[tmp_knowledge].append(tmp_e)
if i % 1000 == 0:
print('finished:', i, '/', len(eventuality_set))
return tmp_extracted_knowledge, tmp_eventuality_dict
def eventuality_to_graph(tmp_eventuality):
doc = nlp(tmp_eventuality['words'])
all_dependency_edges = list()
for word in doc:
all_dependency_edges.append(((word.head.norm_, word.head.i), word.dep_, (word.norm_, word.i)))
return all_dependency_edges
def eventuality_set_to_graph_set(eventuality_set):
tmp_event_id_to_graph = dict()
for i, tmp_eventuality in enumerate(eventuality_set):
tmp_graph = eventuality_to_graph(tmp_eventuality)
tmp_event_id_to_graph[tmp_eventuality['id']] = tmp_graph
if i % 10000 == 0:
print(i, '/', len(eventuality_set))
return tmp_event_id_to_graph
def extract_knowledge_from_edge_set(patterns, edge_set):
tmp_edge_dict = dict()
tmp_extracted_knowledge = dict()
for r in patterns:
tmp_extracted_knowledge[r] = dict()
for tmp_pattern in patterns[r]:
tmp_extracted_knowledge[r][tmp_pattern[0]] = dict()
for i, tmp_edge in enumerate(edge_set):
parsed_eventuality1_words = list()
doc = nlp(tmp_edge['event_1_words'])
event1_dependency_edges = list()
event1_verb = []
for word in doc:
event1_dependency_edges.append(((word.head.norm_, word.head.i), word.dep_, (word.norm_, word.i)))
parsed_eventuality1_words.append(word.text)
if word.dep_ == 'ROOT':
event1_verb = (word.norm_, word.i)
doc = nlp(tmp_edge['event_2_words'])
event2_dependency_edges = list()
event2_verb = []
for word in doc:
event2_dependency_edges.append(((word.head.norm_, word.head.i + len(parsed_eventuality1_words)), word.dep_,
(word.norm_, word.i + len(parsed_eventuality1_words))))
if word.dep_ == 'ROOT':
event2_verb = (word.norm_, word.i + len(parsed_eventuality1_words))
all_dependency_edges = event1_dependency_edges + event2_dependency_edges
all_dependency_edges.append((event1_verb, tmp_edge['connective'], event2_verb))
for r in patterns:
for pattern in patterns[r]:
tmp_knowledge_list = extract_knowledge_from_graph_with_knowledge(all_dependency_edges, pattern[0])
for tmp_knowledge in tmp_knowledge_list:
if tmp_knowledge not in tmp_extracted_knowledge[r][pattern[0]]:
tmp_extracted_knowledge[r][pattern[0]][tmp_knowledge] = 0
tmp_extracted_knowledge[r][pattern[0]][tmp_knowledge] += tmp_edge['frequency']
if tmp_knowledge not in tmp_edge_dict:
tmp_edge_dict[tmp_knowledge] = list()
tmp_edge['graph'] = all_dependency_edges
tmp_edge_dict[tmp_knowledge].append(tmp_edge)
if i % 1000 == 0:
print('finished:', i, '/', len(edge_set))
return tmp_extracted_knowledge, tmp_edge_dict
def edge_to_graph(tmp_edge):
parsed_eventuality1_words = list()
doc = nlp(tmp_edge['event_1_words'])
event1_dependency_edges = list()
event1_verb = []
for word in doc:
event1_dependency_edges.append(((word.head.norm_, word.head.i), word.dep_, (word.norm_, word.i)))
parsed_eventuality1_words.append(word.text)
if word.dep_ == 'ROOT':
event1_verb = (word.norm_, word.i)
doc = nlp(tmp_edge['event_2_words'])
event2_dependency_edges = list()
event2_verb = []
for word in doc:
event2_dependency_edges.append(((word.head.norm_, word.head.i + len(parsed_eventuality1_words)), word.dep_,
(word.norm_, word.i + len(parsed_eventuality1_words))))
if word.dep_ == 'ROOT':
event2_verb = (word.norm_, word.i + len(parsed_eventuality1_words))
all_dependency_edges = event1_dependency_edges + event2_dependency_edges
all_dependency_edges.append((event1_verb, tmp_edge['connective'], event2_verb))
return all_dependency_edges
def merge_extracted_knowledge_from_multi_core(all_extracted_knowledge):
merged_knowledge = dict()
for r in selected_patterns:
merged_knowledge[r] = dict()
for tmp_pattern in selected_patterns[r]:
merged_knowledge[r][tmp_pattern[0]] = dict()
for tmp_extracted_knowledge in tqdm(all_extracted_knowledge):
for r in tmp_extracted_knowledge:
for tmp_pattern in tmp_extracted_knowledge[r]:
for tmp_k in tmp_extracted_knowledge[r][tmp_pattern]:
if tmp_k not in merged_knowledge[r][tmp_pattern]:
merged_knowledge[r][tmp_pattern][tmp_k] = tmp_extracted_knowledge[r][tmp_pattern][tmp_k]
else:
merged_knowledge[r][tmp_pattern][tmp_k] += tmp_extracted_knowledge[r][tmp_pattern][tmp_k]
return merged_knowledge
nlp = spacy.load('en_core_web_sm')
try:
with open('selected_patterns.json', 'r') as f:
selected_patterns = json.load(f)
print('Finish loading the patterns')
except:
pass
Connectives = ['Precedence', 'Succession', 'Synchronous', 'Reason', 'Result', 'Condition', 'Contrast', 'Concession', 'Conjunction', 'Instantiation', 'Restatement', 'ChosenAlternative', 'Alternative', 'Exception']
|
StarcoderdataPython
|
239462
|
# -*- encoding: utf-8 -*-
'''
@project : LeetCode
@File : getLeastNumbers.py
@Contact : <EMAIL>
@Desc :
输入整数数组 arr ,找出其中最小的 k 个数。例如,输入4、5、1、6、2、7、3、8这8个数字,则最小的4个数字是1、2、3、4。
示例 1:
输入:arr = [3,2,1], k = 2
输出:[1,2] 或者 [2,1]
示例 2:
输入:arr = [0,1,2,1], k = 1
输出:[0]
限制:
0 <= k <= arr.length <= 10000
0 <= arr[i] <= 10000
@Modify Time @Author @Version @Desciption
------------ ------- -------- -----------
2020-03-20 zhan 1.0 None
'''
from typing import List
import heapq
class Solution:
def getLeastNumbers(self, arr: List[int], k: int) -> List[int]:
if k == 0: return []
if len(arr) <= k: return arr
hp = [-x for x in arr[:k]]
heapq.heapify(hp)
for x in arr[k:]:
if -x > hp[0]:
heapq.heappushpop(hp,-x)
ans = [-x for x in hp[:k]]
# ans = heapq.nsmallest(k,arr)
return ans
if __name__ == '__main__':
arr = [0, 1, 2, 1]
k = 2
# arr = [0, 0, 0, 2, 0, 5]
# k = 3
ans = Solution().getLeastNumbers(arr,k)
print(ans)
|
StarcoderdataPython
|
6477992
|
from sklearn.datasets import load_breast_cancer
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score , confusion_matrix
from sklearn.preprocessing import MinMaxScaler ,Normalizer
from sklearn.model_selection import train_test_split
data = load_breast_cancer()
X = data.data
y = data.target
#
scale = MinMaxScaler()
scale.fit(X)
newx = scale.transform(X)
nor = Normalizer(norm = 'max')
nor.fit(X)
newx = nor.transform(X)
x_train, x_test, y_train, y_test = train_test_split(newx, y, test_size = 0.2)
logreg = LogisticRegression()
logreg.fit(x_train , y_train)
result= logreg.predict(x_test)
print(accuracy_score(y_test , result))
conf = confusion_matrix(y_test , result)
print('confusion matrix \n', conf)
|
StarcoderdataPython
|
8071474
|
<reponame>mattsayar-splunk/phantom-apps
# File: mfservicemanager_connector.py
# Copyright (c) 2020 Splunk Inc.
#
# Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)
import phantom.app as phantom
from phantom.base_connector import BaseConnector
from phantom.action_result import ActionResult
import requests
import json
from mfservicemanager_consts import *
from bs4 import BeautifulSoup, UnicodeDammit
class RetVal(tuple):
def __new__(cls, val1, val2=None):
return tuple.__new__(RetVal, (val1, val2))
class MfServiceManagerConnector(BaseConnector):
def __init__(self):
# Call the Base Connector's init first
super(MfServiceManagerConnector, self).__init__()
self._state = None
# Variable to hold a base_url in case the app makes REST calls
# Do note that the app json defines the asset config, so please
# modify this as you deem fit.
self._base_url = None
def _unicode_string_handler(self, input_str):
"""helper method for handling unicode strings
Arguments:
input_str -- Input string that needs to be processed
Returns:
-- Processed input string based on input_str
"""
try:
if input_str:
return UnicodeDammit(input_str).unicode_markup.encode('utf-8')
except:
self.debug_print("Error ocurred while Unicode handling of the string")
return input_str
def _process_empty_reponse(self, response, action_result):
if response.status_code == 200:
return RetVal(phantom.APP_SUCCESS, {})
return RetVal(action_result.set_status(phantom.APP_ERROR, "Status code: {0}. Empty response and no information in the header".format(response.status_code)), None)
def _process_html_response(self, response, action_result):
# An html response, treat it like an error
status_code = response.status_code
try:
soup = BeautifulSoup(response.text, "html.parser")
error_text = soup.text
split_lines = error_text.split('\n')
split_lines = [x.strip() for x in split_lines if x.strip()]
error_text = '\n'.join(split_lines)
except:
error_text = "Cannot parse error details"
message = "Status Code: {0}. Data from server:\n{1}\n".format(status_code,
self._unicode_string_handler(error_text))
message = message.replace('{', '{{').replace('}', '}}')
return RetVal(action_result.set_status(phantom.APP_ERROR, message), None)
def _process_json_response(self, r, action_result):
# Try a json parse
try:
resp_json = r.json()
except Exception as e:
return RetVal(action_result.set_status(phantom.APP_ERROR, "Unable to parse JSON response. Error: {0}".format(str(e))), None)
# Please specify the status codes here
if 200 <= r.status_code < 399:
return RetVal(phantom.APP_SUCCESS, resp_json)
# You should process the error returned in the json
error_message = r.text.replace('{', '{{').replace('}', '}}')
error_message = self._unicode_string_handler(error_message)
message = "Error from server. Status Code: {0} Data from server: {1}".format(
r.status_code, error_message)
return RetVal(action_result.set_status(phantom.APP_ERROR, message), None)
def _process_response(self, r, action_result):
# store the r_text in debug data, it will get dumped in the logs if the action fails
if hasattr(action_result, 'add_debug_data'):
action_result.add_debug_data({'r_status_code': r.status_code})
action_result.add_debug_data({'r_text': r.text})
action_result.add_debug_data({'r_headers': r.headers})
# Process each 'Content-Type' of response separately
# Process a json response
if 'json' in r.headers.get('Content-Type', ''):
return self._process_json_response(r, action_result)
# Process an HTML response, Do this no matter what the API talks.
# There is a high chance of a PROXY in between phantom and the rest of
# world, in case of errors, PROXY's return HTML, this function parses
# the error and adds it to the action_result.
if 'html' in r.headers.get('Content-Type', ''):
return self._process_html_response(r, action_result)
# it's not content-type that is to be parsed, handle an empty response
if not r.text:
return self._process_empty_reponse(r, action_result)
# everything else is actually an error at this point
message = "Can't process response from server. Status Code: {0} Data from server: {1}".format(
r.status_code, self._unicode_string_handler(r.text.replace('{', '{{').replace('}', '}}')))
return RetVal(action_result.set_status(phantom.APP_ERROR, message), None)
def _make_rest_call(self, endpoint, action_result, method="get", **kwargs):
# **kwargs can be any additional parameters that requests.request accepts
config = self.get_config()
resp_json = None
try:
request_func = getattr(requests, method)
except AttributeError:
return RetVal(action_result.set_status(phantom.APP_ERROR, "Invalid method: {0}".format(method)), resp_json)
# Create a URL to connect to
url = self._base_url + endpoint
try:
r = request_func(
url,
auth=(self._username, self._password), # basic authentication
verify=config.get('verify_server_cert', False),
**kwargs)
except requests.exceptions.ConnectionError:
message = 'Error Details: Connection Refused from the Server'
return RetVal(action_result.set_status(phantom.APP_ERROR, message), resp_json)
except Exception as e:
if e.message:
try:
error_msg = self._unicode_string_handler(e.message)
message = ('Error connecting to server. Details: {0}').format(error_msg)
except:
return RetVal(action_result.set_status(phantom.APP_ERROR, 'Error connecting to server. Please check the asset configuration parameters.'), resp_json)
else:
message = "Error message unavailable. Please check the asset configuration and|or action parameters."
return RetVal(action_result.set_status(phantom.APP_ERROR, message), resp_json)
return self._process_response(r, action_result)
def _handle_test_connectivity(self, param):
action_result = self.add_action_result(ActionResult(dict(param)))
self.save_progress("Retrieving list of incidents...")
ret_val, response = self._make_rest_call(HPSM_INCIDENTS_ENDPOINT, action_result, params=None, headers=None)
if (phantom.is_fail(ret_val)):
self.save_progress("Test Connectivity Failed")
return action_result.get_status()
self.save_progress("Test Connectivity Passed")
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_create_incident(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
title = param.get('title', '')
description = param.get('description', '')
# description is stored as a list of lines
description = description.splitlines()
service = param.get('service', '')
area = param.get('area', '')
subarea = param.get('subarea', '')
assignment_group = param.get('assignment_group', '')
fields = param.get('fields', '')
try:
if not len(fields):
fields = '{}'
fields = json.loads(fields)
except:
return action_result.set_status(phantom.APP_ERROR, "'fields' is not a valid JSON string. Please validate and try running the action again.")
incident = {
'Incident': {
'Title': title,
'Description': description,
'Service': service,
'Area': area,
'Subarea': subarea,
'AssignmentGroup': assignment_group
}
}
incident['Incident'].update(fields)
# make rest call
ret_val, response = self._make_rest_call(HPSM_INCIDENTS_ENDPOINT, action_result, method='post', json=incident)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
resource_data = response.get('Incident', {})
action_result.add_data(resource_data)
summary = action_result.update_summary({})
summary['success'] = response.get('ReturnCode', 0) == 0
msgs = response.get('Messages', ['Create Failed'])
if not len(msgs):
msgs.append('Incident created successfully')
return action_result.set_status(phantom.APP_SUCCESS, msgs[0])
def _handle_get_incident(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
id = self._unicode_string_handler(param['id'])
endpoint = HPSM_GET_RESOURCE.format(id=id, project_key='incidents')
ret_val, response = self._make_rest_call(endpoint, action_result, params=None, headers=None)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
resource_data = response.get('Incident', {})
action_result.add_data(resource_data)
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_update_incident(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
id = self._unicode_string_handler(param['id'])
endpoint = HPSM_GET_RESOURCE.format(id=id, project_key='incidents')
update_fields = {
'Assignee': param['assignee']
}
if param.get('description'):
update_fields['Description'] = param.get('description')
if param.get('assignment_group'):
update_fields['AssignmentGroup'] = param.get('assignment_group')
if param.get('title'):
update_fields['Title'] = param.get('title')
if param.get('category'):
update_fields['Category'] = param.get('category')
if param.get('contact'):
update_fields['Contact'] = param.get('contact')
if param.get('impact'):
try:
impact = int(param.get('impact'))
except:
return action_result.set_status(phantom.APP_ERROR, "Please provide a valid integer value in the 'impact' parameter")
update_fields['Impact'] = str(impact)
if param.get('urgency'):
try:
urgency = int(param.get('urgency'))
except:
return action_result.set_status(phantom.APP_ERROR, "Please provide a valid integer value in the 'urgency' parameter")
update_fields['Urgency'] = str(urgency)
if param.get('affected_ci'):
update_fields['AffectedCI'] = param.get('affected_ci')
if param.get('area'):
update_fields['Area'] = param.get('area')
if param.get('subarea'):
update_fields['Subarea'] = param.get('subarea')
journal_updates = [
journal_update.strip()
for journal_update
in param.get('journal_updates').splitlines()
]
journal_updates = filter(None, journal_updates)
update_fields['JournalUpdates'] = journal_updates
if param.get('service'):
update_fields['Service'] = param.get('service')
if param.get('ticket_source'):
update_fields['mmmTicketSource'] = param.get('ticket_source')
update_obj = {
'Incident': update_fields
}
# update object
ret_val, response = self._make_rest_call(endpoint, action_result, method='put', json=update_obj)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
# grab relevant fields from the returned JSON response
resource_data = response.get('Incident', {})
action_result.add_data(resource_data)
summary = action_result.update_summary({})
summary['success'] = response.get('ReturnCode', 0) == 0
msgs = response.get('Messages', ['Update Failed'])
if not len(msgs):
msgs.append('Update successful')
return action_result.set_status(phantom.APP_SUCCESS, msgs[0])
def _handle_close_incident(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
id = self._unicode_string_handler(param['id'])
assignee = param['assignee']
closure_code = param['closure_code']
solution = [solution_line.strip() for solution_line in self._unicode_string_handler(param['solution']).splitlines()]
solution = filter(None, solution)
endpoint = HPSM_CLOSE_RESOURCE.format(id=id, project_key='incidents')
closure_data = {
'Incident': {
'Assignee': assignee,
'ClosureCode': closure_code,
'Solution': solution
}
}
# close incident
ret_val, response = self._make_rest_call(endpoint, action_result, method='post', json=closure_data)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
if response.get('Incident'):
action_result.add_data(response.get('Incident'))
else:
action_result.add_data(response)
summary = action_result.update_summary({})
summary['success'] = response.get('ReturnCode', 0) == 0
msgs = response.get('Messages', ['Close Failed'])
if not len(msgs):
msgs.append('Close successful')
return action_result.set_status(phantom.APP_SUCCESS, msgs[0])
def _handle_create_change(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
# description is stored as a list of lines
description = param.get('description', '')
description = description.splitlines()
# noeffect is stored as a list of lines
noeffect = param.get('no_implementation_effect', '')
noeffect = noeffect.splitlines()
title = param.get('title', '')
service = param.get('service', '')
# risk_assessment = param.get('risk assessment', '')
change_coordinator = param.get('change_coordinator', '')
category = param.get('category', '')
subcategory = param.get('subcategory', '')
impact = param.get('impact', '')
reason = param.get('reason', '')
planned_end = param.get('implementation_end', '')
planned_start = param.get('implementation_start', '')
assignment_group = param.get('assignment_group', '')
fields = param.get('fields', '')
try:
if not len(fields):
fields = '{}'
fields = json.loads(fields)
except:
return action_result.set_status(phantom.APP_ERROR, "'fields' is not a valid JSON string. Please validate and try running the action again.")
change = {
'Change': {
'Impact': impact,
# 'RiskAssessment': risk_assessment,
"EffectOfNotImplementing": noeffect,
'Service': service,
'AssignmentGroup': assignment_group,
'header': {
'Title': title,
"ChangeCoordinator": change_coordinator,
"Category": category,
"Subcategory": subcategory,
"AssignmentGroup": assignment_group,
"Reason": reason,
"PlannedEnd": planned_end,
"PlannedStart": planned_start,
},
'description.structure': {
'Description': description
}
}
}
change.update(fields)
# make rest call
ret_val, response = self._make_rest_call(HPSM_CHANGES_ENDPOINT, action_result, method='post', json=change)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
resource_data = response.get('Change', {})
action_result.add_data(resource_data)
summary = action_result.update_summary({})
summary['success'] = response.get('ReturnCode', 0) == 0
msgs = response.get('Messages', ['Create Failed'])
if not len(msgs):
msgs.append('Change created successfully')
return action_result.set_status(phantom.APP_SUCCESS, msgs[0])
def _handle_get_change(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
id = self._unicode_string_handler(param['id'])
endpoint = HPSM_GET_RESOURCE.format(id=id, project_key='changes')
ret_val, response = self._make_rest_call(endpoint, action_result, params=None, headers=None)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
change_data = response.get('Change', {})
action_result.add_data(change_data)
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_close_change(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
id = self._unicode_string_handler(param['id'])
try:
closure_code = int(param['closure_code'])
except:
return action_result.set_status(phantom.APP_ERROR, "Please provide valid numeric value in 'closure_code' action parameter")
closing_comments = [result_line.strip() for result_line in param['closure_comments'].splitlines()]
closing_comments = filter(None, closing_comments)
review_results = [result_line.strip() for result_line in param['review_results'].splitlines()]
review_results = filter(None, review_results)
endpoint = HPSM_CLOSE_RESOURCE.format(id=id, project_key='changes')
closure_data = {
'Change': {
'close': {
'ClosureCode': closure_code,
'ClosingComments': closing_comments
},
'ReviewResults': review_results
}
}
# close change
ret_val, response = self._make_rest_call(endpoint, action_result, method='post', json=closure_data)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
if response.get('Change'):
action_result.add_data(response.get('Change '))
else:
action_result.add_data(response)
summary = action_result.update_summary({})
summary['success'] = response.get('ReturnCode', 0) == 0
msgs = response.get('Messages', ['Close Failed'])
if not len(msgs):
msgs.append('Close successful')
return action_result.set_status(phantom.APP_SUCCESS, msgs[0])
def _handle_create_configitem(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
# description is stored as a list of lines
description = param.get('description', '')
description = description.splitlines()
fields = param.get('fields', '')
try:
if not len(fields):
fields = '{}'
fields = json.loads(fields)
except:
return action_result.set_status(phantom.APP_ERROR, "'fields' is not a valid JSON string. Please validate and try running the action again.")
device = {
"Device": {
"AssignmentGroup": param['assignment_group'],
"ConfigurationItemType": param['ci_type'],
"ConfigurationItemSubType": param['ci_subtype'],
"ContactName": param['owner_individual'],
"Department": param['department'],
"DepartmentOwner": param['department_owner'],
"DisplayName": param['display_name'],
"Status": param['status']
}
}
device.update(fields)
# make rest call
ret_val, response = self._make_rest_call(HPSM_CONFIGITEMS_ENDPOINT, action_result, method='post', json=device)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
resource_data = response.get('Device', {})
action_result.add_data(resource_data)
summary = action_result.update_summary({})
summary['success'] = response.get('ReturnCode', 0) == 0
msgs = response.get('Messages', ['Create Failed'])
if not len(msgs):
msgs.append('Config Item created successfully')
return action_result.set_status(phantom.APP_SUCCESS, msgs[0])
def _handle_get_object(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
id = self._unicode_string_handler(param['id'])
project_key = self._unicode_string_handler(param.get('project_key', 'incidents')).lower()
endpoint = HPSM_GET_RESOURCE.format(id=id, project_key=project_key)
ret_val, response = self._make_rest_call(endpoint, action_result, params=None, headers=None)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
# resource_data = response.get(project_key.capitalize()[:-1], {})
action_result.add_data(response)
return action_result.set_status(phantom.APP_SUCCESS)
def _handle_update_object(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
id = self._unicode_string_handler(param['id'])
update_fields = param.get('update_fields', '')
try:
if not len(update_fields):
update_fields = '{}'
update_fields = json.loads(update_fields)
except:
return action_result.set_status(phantom.APP_ERROR, "'update_fields' is not a valid JSON string. Please validate and try running the action again.")
project_key = self._unicode_string_handler(param.get('project_key', 'incidents')).lower()
endpoint = HPSM_GET_RESOURCE.format(id=id, project_key=project_key)
if not update_fields.get('JournalUpdates'):
update_fields['JournalUpdates'] = [HPSM_DEFAULT_UPDATE_MESSAGE]
update_obj = {
project_key.capitalize()[:-1]: update_fields
}
# update object
ret_val, response = self._make_rest_call(endpoint, action_result, method='put', json=update_obj)
if (phantom.is_fail(ret_val)):
return action_result.get_status()
# grab relevant fields from the returned JSON response
resource_data = response.get(project_key.capitalize()[:-1], {})
action_result.add_data(resource_data)
summary = action_result.update_summary({})
summary['success'] = response.get('ReturnCode', 0) == 0
msgs = response.get('Messages', ['Update Failed'])
if not len(msgs):
msgs.append('Update successful')
return action_result.set_status(phantom.APP_SUCCESS, msgs[0])
def _handle_close_object(self, param):
self.save_progress("In action handler for: {0}".format(self.get_action_identifier()))
action_result = self.add_action_result(ActionResult(dict(param)))
id = self._unicode_string_handler(param['id'])
project_key = self._unicode_string_handler(param.get('project_key', 'incidents')).lower()
endpoint = HPSM_CLOSE_RESOURCE.format(id=id, project_key=project_key)
# close object
ret_val, response = self._make_rest_call(endpoint, action_result, method='post')
if (phantom.is_fail(ret_val)):
return action_result.get_status()
action_result.add_data(response)
summary = action_result.update_summary({})
summary['success'] = response.get('ReturnCode', 0) == 0
msgs = response.get('Messages', ['Close Failed'])
if not len(msgs):
msgs.append('Close successful')
return action_result.set_status(phantom.APP_SUCCESS, msgs[0])
def handle_action(self, param):
ret_val = phantom.APP_SUCCESS
# Get the action that we are supposed to execute for this App Run
action_id = self.get_action_identifier()
self.debug_print("action_id", self.get_action_identifier())
if action_id == 'test_connectivity':
ret_val = self._handle_test_connectivity(param)
elif action_id == 'create_incident':
ret_val = self._handle_create_incident(param)
elif action_id == 'get_incident':
ret_val = self._handle_get_incident(param)
elif action_id == 'update_incident':
ret_val = self._handle_update_incident(param)
elif action_id == 'close_incident':
ret_val = self._handle_close_incident(param)
elif action_id == 'create_change':
ret_val = self._handle_create_change(param)
elif action_id == 'get_change':
ret_val = self._handle_get_change(param)
elif action_id == 'close_change':
ret_val = self._handle_close_change(param)
elif action_id == 'create_configitem':
ret_val = self._handle_create_configitem(param)
elif action_id == 'get_object':
ret_val = self._handle_get_object(param)
elif action_id == 'update_object':
ret_val = self._handle_update_object(param)
elif action_id == 'close_object':
ret_val = self._handle_close_object(param)
return ret_val
def initialize(self):
# Load the state in initialize, use it to store data
# that needs to be accessed across actions
self._state = self.load_state()
# get the asset config
config = self.get_config()
self._base_url = self._unicode_string_handler(config['base_url'])
if self._base_url.endswith('/'):
self._base_url = self._base_url[:-1]
self._username = self._unicode_string_handler(config['username'])
self._password = config['password']
return phantom.APP_SUCCESS
def finalize(self):
# Save the state, this data is saved across actions and app upgrades
self.save_state(self._state)
return phantom.APP_SUCCESS
if __name__ == '__main__':
import pudb
import argparse
pudb.set_trace()
argparser = argparse.ArgumentParser()
argparser.add_argument('input_test_json', help='Input Test JSON file')
argparser.add_argument('-u', '--username', help='username', required=False)
argparser.add_argument('-p', '--password', help='password', required=False)
args = argparser.parse_args()
session_id = None
username = args.username
password = args.password
if (username is not None and password is None):
# User specified a username but not a password, so ask
import getpass
password = <PASSWORD>("Password: ")
if (username and password):
try:
print ("Accessing the Login page")
login_url = '{}/login'.format(BaseConnector.get_phantom_base_url())
r = requests.get(login_url, verify=False)
csrftoken = r.cookies['csrftoken']
data = dict()
data['username'] = username
data['password'] = password
data['csrfmiddlewaretoken'] = csrftoken
headers = dict()
headers['Cookie'] = 'csrftoken=' + csrftoken
headers['Referer'] = login_url
print ("Logging into Platform to get the session id")
r2 = requests.post(login_url, verify=False, data=data, headers=headers)
session_id = r2.cookies['sessionid']
except Exception as e:
print ("Unable to get session id from the platfrom. Error: " + str(e))
exit(1)
with open(args.input_test_json) as f:
in_json = f.read()
in_json = json.loads(in_json)
print(json.dumps(in_json, indent=4))
connector = MfServiceManagerConnector()
connector.print_progress_message = True
if (session_id is not None):
in_json['user_session_token'] = session_id
connector._set_csrf_info(csrftoken, headers['Referer'])
ret_val = connector._handle_action(json.dumps(in_json), None)
print (json.dumps(json.loads(ret_val), indent=4))
exit(0)
|
StarcoderdataPython
|
311580
|
from Enemy.enemy import *
from Enemy.enemy1 import *
from Enemy.boss1 import *
from Enemy.boss2 import *
from Enemy.boss3 import *
class Stage():
def __init__(self):
self.deployEnemy()
def deployEnemy(self):
for i in range(0, 1):
x = 100 + int(i % 10) * 40
y = 50 + int(i / 10) * 40
enemy = Enemy((x, y))
|
StarcoderdataPython
|
9716904
|
"""
Client for interacting with the Neuromorphic Computing Platform of the Human Brain Project
as an administrator.
Authors: <NAME>, <NAME>, UNIC, CNRS
Copyright 2016 <NAME> and <NAME>, Centre National de la Recherche Scientifique
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import nmpi
TEST_QUOTAS = {
"BrainScaleS": {"limit": 0.1, "units": "wafer-hours"},
"BrainScaleS-2": {"limit": 1.0, "units": "chip-hours"},
"SpiNNaker": {"limit": 5000, "units": "core-hours"},
"BrainScaleS-ESS": {"limit": 10, "units": "hours"},
"Spikey": {"limit": 10, "units": "hours"}
}
class AdminClient(nmpi.Client):
"""
Client for interacting with the Neuromorphic Computing Platform of
the Human Brain Project, with additional methods only available to administrators.
"""
def resource_requests(self, collab_id=None, status=None):
"""
Return a list of compute-time resource requests.
Arguments
---------
`collab_id`: filter list by collab id (default: all collabs)
`status`: filter list by request status (default: all statuses)
Possible values for `status` are 'in preparation', 'under review',
'accepted', 'rejected'.
"""
projects = self._query(self.quotas_server + "/projects/")
# server-side filtering not yet supported, so we filter client side
if collab_id is not None:
projects = [p for p in projects if p['collab'] == str(collab_id)]
if status is not None:
projects = [p for p in projects if p['status'] == status]
return projects
def accept_resource_request(self, request_uri, with_quotas=False):
"""
Accept a resource (compute-time) allocation request.
"""
response = self._put(self.quotas_server + request_uri,
{"status": "accepted"})
if with_quotas:
for platform, values in with_quotas.items():
self.add_quota(request_uri,
platform=platform,
limit=values["limit"],
units=values["units"])
return response
def reject_resource_request(self, request_uri):
"""
Reject a resource (compute-time) allocation request.
"""
response = self._put(self.quotas_server + request_uri,
{"status": "rejected"})
return response
def add_quota(self, request_uri, platform, limit, units=None):
"""
Add a compute-time quota to a resource request.
"""
if units is None:
if platform in TEST_QUOTAS:
units = TEST_QUOTAS[platform]["units"]
else:
raise ValueError("Must specify units")
project_id = request_uri.split("/")[-1]
quota = {
"units": units,
"limit": limit,
"usage": 0.0,
"platform": platform,
"project" : project_id
}
response = self._post(self.quotas_server + request_uri + "/quotas/",
quota)
return response
|
StarcoderdataPython
|
30566
|
<gh_stars>1000+
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from fclib.models.dilated_cnn import create_dcnn_model
def test_create_dcnn_model():
mod0 = create_dcnn_model(seq_len=1) # default args
assert mod0 is not None
mod1 = create_dcnn_model(
seq_len=1, n_dyn_fea=1, n_outputs=2, n_dilated_layers=1, kernel_size=2, dropout_rate=0.05, max_cat_id=[30, 120]
)
assert mod1 is not None
mod2 = create_dcnn_model(
seq_len=1, n_dyn_fea=1, n_outputs=2, n_dilated_layers=2, kernel_size=2, dropout_rate=0.05, max_cat_id=[30, 120]
)
assert mod2 is not None
|
StarcoderdataPython
|
6502904
|
from __future__ import annotations
# Standard library
from pathlib import Path
# Third-party
import yaml
__all__ = ['load_yaml_config']
def load_yaml_config(file_path: str | Path) -> dict:
"""
Parameters
----------
file_path : str or Path
Yaml config file name. The file is assumed to be in
the repo's config directory.
Returns
-------
config : dict
Configuration parameters stored in a dictionary.
"""
file_path = Path(file_path)
with open(file_path) as file:
config = yaml.load(file, Loader=yaml.CLoader)
return config
|
StarcoderdataPython
|
6674308
|
from pyVDMC import *
@vdm_module('n1', 'n2')
class fibonacci:
"""
state State of
n1 : nat
n2 : nat
init s == s = mk_State(0, 1)
end
operations
next : () ==> nat
next() == (dcl n : nat := n1 + n2; n1 := n2; n2 := n; return n)
post RESULT = n1~ + n2~ and n1 = n2~ and n2 = RESULT;
prev : () ==> nat
prev() == (dcl n : nat := n2 - n1; n2 := n1; n1 := n; return n2)
post n1 + n2 = n2~ and n2 = n1~ and n2 = RESULT;
"""
def __init__(self):
self.n1 = 0
self.n2 = 1
@vdm_method
def next(self):
"""
generates the next fibonacci number
"""
pass
@vdm_test
def prev(self):
"""
rollbacks to the previous fibonacci number
"""
n = self.n2 - self.n1
self.n2 = self.n1
self.n1 = n
return self.n2
|
StarcoderdataPython
|
9680173
|
<gh_stars>0
from bisect import bisect
from typing import List, Callable, Dict
class OneTimeCancelFunction:
"""
A one time cancellation function
"""
def __init__(self, cancel: Callable):
self.cancelled: bool = False
self.__cancel = cancel
def run_cancel(self):
if self.cancelled:
return
self.cancelled = True
self.__cancel()
class UndoManager:
"""
Allows for actions to be undone / rolled back to certain points.
Note that if not enabled, registration and saving has no effect.
"""
def __init__(self):
# The current index in the undo history. (This is one ahead of the actual position in each list)
self.__current_index: int = 0
# Can enable and disable undo and redo history. This is done while undoing or redoing for example
self.enabled: bool = True
# A tuple of undo and redo actions
self.__undo_actions: List[Callable] = []
self.__redo_actions: List[Callable] = []
# A dictionary to remember locations in the undo history by name
self.__labels: Dict[str, List[int]] = dict()
def reset(self):
"""
Reset the undo manager so no undo commands are left in history
:return: nothing
"""
self.__current_index: int = 0
self.enabled: bool = True
self.__undo_actions: List[Callable] = []
self.__redo_actions: List[Callable] = []
self.__labels: Dict[str, List[int]] = dict()
def register(self, undo: Callable, redo: Callable):
"""
Register an undo and redo action so that the change can be reverted.
:param undo: a function to call to undo the change
:param redo: a function to call to redo the change.
:return: nothing
"""
if not self.enabled:
return
if self.__current_index < len(self.__undo_actions):
self.__undo_actions: List[Callable] = self.__undo_actions[:self.__current_index]
self.__redo_actions: List[Callable] = self.__redo_actions[:self.__current_index]
# Clear out any labels referring to positions we can no longer reach
for label in self.__labels:
self.__labels[label] = self.__labels[label][:bisect(self.__labels[label], self.__current_index)]
self.__undo_actions.append(undo)
self.__redo_actions.append(redo)
self.__current_index += 1
def register_cancel(self, cancel: Callable):
"""
Registers a function to "cancel" an action. This ensures an action is cancelled just once
on undo, and it is never redone.
:param cancel: the cancel function to invoke
:return: nothing
"""
canceller = OneTimeCancelFunction(cancel)
self.register(lambda: canceller.run_cancel, lambda: True)
def save_position(self, label: str):
"""
Save the current position / state with the given label.
The same label can be use3d more than once.
:param label: the label to save against
:return: nothing
"""
if not self.enabled:
return
if label not in self.__labels:
self.__labels[label] = [self.__current_index]
else:
if self.__current_index not in self.__labels[label]:
self.__labels[label].append(self.__current_index)
def delete_label(self, label: str):
"""
Forget about a label to free memory
:param label: the label to forget
:return: nothing
"""
self.__labels.pop(label, None)
def undo(self, label: str):
"""
Undo to the previous index stored against the given label.
If the label does not exist or has no position to return to, this does nothing.
:param label: the label to undo untuil
:return: nothing
"""
if label not in self.__labels or len(self.__labels[label]) == 0:
return
# Calculate the new position to go back to
indices = self.__labels[label][:bisect(self.__labels[label], self.__current_index)]
if not indices:
return
target_index = indices[-1]
if target_index == self.__current_index:
if len(indices) <= 1:
return
target_index = indices[-2]
self.undo_to_index(target_index)
def undo_to_index(self, target_index: int):
"""
Undo to a specific index. Note that zero undoes everything.
Raises a value error if the index is greater than current position, or the index is negative.
:param target_index: the index to undo to
:return: nothing
"""
if target_index < 0:
raise ValueError("Cannot undo to a negative index.")
if target_index > self.__current_index:
raise ValueError(f"Cannot undo as ahead of index. Current index is {self.__current_index} "
f"and requested index was {target_index}")
# Undo until we return to the specified index
self.enabled = False
for index in reversed(range(target_index, self.__current_index)):
self.__undo_actions[index]()
self.enabled = True
self.__current_index = target_index
def redo(self, label: str):
"""
Redo to the next index stored against the given label.
If the label does not exist or has no position to return to, this does nothing.
:param label: the label to redo until
:return: nothing
"""
if label not in self.__labels or len(self.__labels[label]) == 0:
return
# Calculate the position to go forward to
indices = self.__labels[label][bisect(self.__labels[label], self.__current_index):]
if not indices:
return
self.redo_to_index(indices[0])
def redo_to_index(self, target_index: int):
"""
Redo to a specific index.
Raises a value error if the index is less than current position, or the index is greater than the largest known.
:return: nothing
"""
if target_index > len(self.__redo_actions):
raise ValueError("Cannot redo beyond redo history.")
if target_index < self.__current_index:
raise ValueError(f"Cannot redo as behind index. Current index is {self.__current_index} "
f"and requested index was {target_index}")
# Redo until we return to the specified index
self.enabled = False
for index in range(self.__current_index, target_index):
self.__redo_actions[index]()
self.enabled = True
self.__current_index = target_index
def test_undo_manager():
undo_manager = UndoManager()
undo_manager.save_position("saved")
ls = [1]
undo_manager.register(ls.pop, lambda: ls.append(1))
undo_manager.save_position("saved")
ls.append(2)
undo_manager.register(ls.pop, lambda: ls.append(2))
undo_manager.save_position("saved")
ls.append(3)
undo_manager.register(ls.pop, lambda: ls.append(3))
undo_manager.save_position("saved")
undo_manager.undo("saved")
assert ls == [1, 2]
undo_manager.undo("saved")
assert ls == [1]
undo_manager.undo("saved")
assert ls == []
undo_manager.undo("saved")
assert ls == []
undo_manager.redo("saved")
assert ls == [1]
undo_manager.redo("saved")
assert ls == [1, 2]
undo_manager.redo("saved")
assert ls == [1, 2, 3]
undo_manager.redo("saved")
assert ls == [1, 2, 3]
undo_manager.undo("saved")
undo_manager.undo("saved")
assert ls == [1]
ls.append(4)
undo_manager.register(ls.pop, lambda: ls.append(4))
undo_manager.save_position("saved")
undo_manager.redo("saved")
assert ls == [1, 4]
undo_manager.undo("saved")
assert ls == [1]
undo_manager.undo("saved")
assert ls == []
undo_manager.redo("saved")
assert ls == [1]
undo_manager.redo("saved")
assert ls == [1, 4]
undo_manager.redo("saved")
assert ls == [1, 4]
|
StarcoderdataPython
|
8048648
|
import threading
from pathlib import Path
#pylint: disable=no-name-in-module
from PyQt5 import QtCore, QtGui
from PyQt5.QtWidgets import (QWidget, QToolButton, QLineEdit, QPushButton,
QFrame, QLabel, QApplication, QMessageBox)
import src.images
from utils.logger import logger
from utils.vaultplusDB import validate_backupcode
class EnterBackupCode(object):
"""Display enter backup code GUI to the user."""
def setupUi(self, Form: QWidget) -> None:
"""Creates the widget objects in the proper containers and assigns the proper object names to them.
Args:
Form: Object of QWidget.
"""
self.Form = Form
Form.setObjectName("Form")
Form.setEnabled(True)
Form.setFixedSize(343, 404)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/newPrefix/new.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Form.setWindowIcon(icon)
Form.setStyleSheet(
"*{\n"
"font-family:Calibri;\n"
"font-size:20px;\n"
"}\n"
"\n"
"QFrame\n"
"{\n"
"background: rgba(0,0,0,0.8);\n"
"border-radius:15px\n"
"}\n"
"\n"
"QToolButton\n"
"{\n"
"\n"
"background:#2671a0;\n"
"border-radius:40px;\n"
"}\n"
"\n"
"QLabel\n"
"{\n"
"color:white;\n"
"background:transparent;\n"
"font-weight:bold;\n"
"}\n"
"\n"
"#label_2{\n"
"font-weight:normal;\n"
"}\n"
"QPushButton\n"
"{\n"
"color:white;\n"
"background:#2671a0;\n"
"border-radius:15px;\n"
"font-weight:bold;\n"
"\n"
"}\n"
"QPushButton:hover\n"
"{\n"
"border-radius:15px;\n"
"}\n"
"\n"
"#pushbutton2\n"
"{\n"
"background:rgba(0,0,0,0);\n"
"font-weight:normal;\n"
"}\n"
"\n"
"QLineEdit\n"
"{\n"
"background:transparent;\n"
"border:none;\n"
"color:white;\n"
"border-bottom:1px solid #717072;\n"
"}"
)
self.frame = QFrame(Form)
self.frame.setGeometry(QtCore.QRect(10, 50, 321, 341))
self.frame.setFrameShape(QFrame.StyledPanel)
self.frame.setFrameShadow(QFrame.Raised)
self.frame.setObjectName("frame")
self.label = QLabel(self.frame)
self.label.setGeometry(QtCore.QRect(120, 40, 111, 41))
self.label.setStyleSheet("")
self.label.setFrameShadow(QFrame.Plain)
self.label.setTextFormat(QtCore.Qt.AutoText)
self.label.setObjectName("label")
self.pushButton = QPushButton(self.frame)
self.pushButton.setGeometry(QtCore.QRect(10, 220, 301, 51))
self.pushButton.setObjectName("pushButton")
self.lineEdit_2 = QLineEdit(self.frame)
self.lineEdit_2.setGeometry(QtCore.QRect(20, 150, 271, 31))
font = QtGui.QFont()
font.setFamily("Calibri")
self.lineEdit_2.setFont(font)
self.lineEdit_2.setStyleSheet("font-size:19px;")
self.lineEdit_2.setText("")
self.lineEdit_2.setEchoMode(QLineEdit.Normal)
self.lineEdit_2.setObjectName("lineEdit_2")
self.pushbutton2 = QPushButton(self.frame)
self.pushbutton2.setGeometry(QtCore.QRect(40, 290, 241, 31))
self.pushbutton2.setObjectName("pushbutton2")
self.label_2 = QLabel(self.frame)
self.label_2.setGeometry(QtCore.QRect(20, 100, 201, 31))
self.label_2.setObjectName("label_2")
self.toolButton = QToolButton(Form)
self.toolButton.setGeometry(QtCore.QRect(130, 10, 81, 81))
self.toolButton.setStyleSheet("")
self.toolButton.setText("")
self.toolButton.setIcon(icon)
self.toolButton.setIconSize(QtCore.QSize(45, 50))
self.toolButton.setObjectName("toolButton")
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form: QWidget) -> None:
"""Sets the text and titles of the widgets.
Args:
Form: Object of QWidget.
"""
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Vault Plus"))
self.label.setText(_translate("Form", "Vault Plus"))
self.pushButton.setText(_translate("Form", "Log in"))
self.lineEdit_2.setPlaceholderText(_translate("Form", "8 digit code"))
self.pushbutton2.setText(_translate("Form", "Go back"))
self.label_2.setText(_translate("Form", "Enter backup code:"))
def validate(self) -> bool:
"""Validate the input provided by the user.
Returns:
True if the input is valid else False.
"""
msg = QMessageBox()
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/newPrefix/new.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
msg.setWindowIcon(QtGui.QIcon(icon))
msg.setIcon(QMessageBox.Warning)
usercode = self.lineEdit_2.text()
if not usercode:
msg.setWindowTitle("Backup code")
msg.setText('Please fill all fields.')
msg.exec_()
else:
usercode = usercode.replace("-", "").replace(" ", "")
email = Path("modules", "user.txt").read_text()
if not validate_backupcode(email, usercode):
msg.setWindowTitle("Backup Code")
msg.setText("Invalid code. Try again.")
msg.exec_()
else:
self.Form.close()
msg.setIcon(QMessageBox.Information)
msg.setWindowTitle("Backup code")
msg.setText("Backup codes have been updated. Go to 'Backup codes' section and download the text file containing the new backup codes.")
msg.exec_()
return True
|
StarcoderdataPython
|
11365264
|
import random
import numpy as np
import cv2
import torch
import torch.utils.data as data
import data.util as util
import os.path as osp
class LQGT_dataset(data.Dataset):
def __init__(self, opt):
super(LQGT_dataset, self).__init__()
self.opt = opt
self.data_type = self.opt['data_type']
self.paths_LQ, self.paths_GT = None, None
self.sizes_GT, self.paths_GT = util.get_image_paths(self.data_type, opt['dataroot_GT'])
self.sizes_LQ, self.paths_LQ = util.get_image_paths(self.data_type, opt['dataroot_LQ'])
assert self.paths_GT, 'Error: GT path is empty.'
if self.paths_LQ and self.paths_GT:
assert len(self.paths_LQ) == len(
self.paths_GT
), 'GT and LQ datasets have different number of images - {}, {}.'.format(
len(self.paths_LQ), len(self.paths_GT))
self.mask_folder = opt['dataroot_mask']
def __getitem__(self, index):
GT_path, LQ_path = None, None
scale = self.opt['scale']
GT_size = self.opt['GT_size']
# get GT image
GT_path = self.paths_GT[index]
img_GT = util.read_img(None, GT_path)
# get LQ image
LQ_path = self.paths_LQ[index]
img_LQ = util.read_img(None, LQ_path)
# get mask when mask folder is not None
if self.mask_folder is not None:
mask_name = osp.basename(LQ_path)[:-4] + '.npy'
mask_path = osp.join(self.mask_folder, mask_name)
mask = util.read_npy(mask_path)
mask = np.expand_dims(mask, 2).repeat(3, axis=2)
if self.opt['phase'] == 'train':
H, W, C = img_LQ.shape
H_gt, W_gt, C = img_GT.shape
if H != H_gt:
print('*******wrong image*******:{}'.format(LQ_path))
LQ_size = GT_size // scale
# randomly crop
if GT_size is not None:
rnd_h = random.randint(0, max(0, H - LQ_size))
rnd_w = random.randint(0, max(0, W - LQ_size))
img_LQ = img_LQ[rnd_h:rnd_h + LQ_size, rnd_w:rnd_w + LQ_size, :]
rnd_h_GT, rnd_w_GT = int(rnd_h * scale), int(rnd_w * scale)
img_GT = img_GT[rnd_h_GT:rnd_h_GT + GT_size, rnd_w_GT:rnd_w_GT + GT_size, :]
# augmentation - flip, rotate
img_LQ, img_GT = util.augment([img_LQ, img_GT], self.opt['use_flip'],
self.opt['use_rot'])
# resize for alignment
H, W, C = img_LQ.shape
if H%32!=0 or W%32!=0:
H_new = int(np.ceil(H / 32) * 32)
W_new = int(np.ceil(W / 32) * 32)
img_LQ = cv2.resize(img_LQ, (W_new, H_new))
img_GT = cv2.resize(img_GT, (W_new, H_new))
if self.mask_folder is None:
r = 0.95
mask = np.max(img_LQ, 2)
mask = np.minimum(1.0, np.maximum(0, mask - r) / (1 - r))
mask = np.expand_dims(mask, 2).repeat(3, axis=2)
# BGR to RGB, HWC to CHW, numpy to tensor
if img_GT.shape[2] == 3:
img_GT = img_GT[:, :, [2, 1, 0]]
img_LQ = img_LQ[:, :, [2, 1, 0]]
img_GT = torch.from_numpy(np.ascontiguousarray(np.transpose(img_GT, (2, 0, 1)))).float()
img_LQ = torch.from_numpy(np.ascontiguousarray(np.transpose(img_LQ, (2, 0, 1)))).float()
mask = torch.from_numpy(np.ascontiguousarray(np.transpose(mask, (2, 0, 1)))).float()
if LQ_path is None:
LQ_path = GT_path
return {'LQ': img_LQ, 'GT': img_GT, 'mask': mask, 'LQ_path': LQ_path, 'GT_path': GT_path}
def __len__(self):
return len(self.paths_GT)
|
StarcoderdataPython
|
352195
|
<reponame>myles-novick/ml-workflow<gh_stars>0
"""
This module has a Master, which polls Splice Machine
for new jobs and dispatches them to Workers for execution
(in threads). This execution happens in parallel.
"""
import json
from os import environ as env_vars
from fastapi import FastAPI
from handlers.modifier_handlers import (DisableServiceHandler,
EnableServiceHandler)
from handlers.run_handlers import (AzureDeploymentHandler,
DatabaseDeploymentHandler,
DatabaseUndeploymentHandler,
KubernetesDeploymentHandler,
KubernetesUndeploymentHandler,
SageMakerDeploymentHandler)
from pyspark.sql import SparkSession
from pysparkling import H2OConf, H2OContext
from workerpool import Job as ThreadedTask
from workerpool import WorkerPool
from shared.db.connection import SQLAlchemyClient
from shared.db.sql import SQL
from shared.environments.cloud_environment import (CloudEnvironment,
CloudEnvironments)
from shared.logger.logging_config import logger
from shared.models.splice_models import Job, create_bobby_tables
from shared.services.handlers import (HandlerNames, KnownHandlers,
populate_handlers)
from shared.structures.ledger import JobLedger
__author__: str = "Splice Machine, Inc."
__copyright__: str = "Copyright 2019, Splice Machine Inc. All Rights Reserved"
__credits__: list = ["<NAME>"]
__license__: str = "Proprietary"
__version__: str = "2.0"
__maintainer__: str = "<NAME>"
__email__: str = "<EMAIL>"
APP = FastAPI()
# Jobs
POLL_INTERVAL: int = 5 # check for new jobs every 2 seconds
LEDGER_MAX_SIZE: int = int(env_vars['WORKER_THREADS'] * 2) # how many previous jobs to account for
# Spark
SPARK_SCHEDULING_FILE: str = "configuration/fair_scheduling.xml"
# We only need spark context for modifiable handlers
RUN_HANDLERS: tuple = KnownHandlers.get_modifiable()
WORKER_POOL: WorkerPool = WorkerPool(size=30)
LEDGER: JobLedger = JobLedger(LEDGER_MAX_SIZE)
def create_run_contexts():
"""
Create a Global Spark Context that runs in the FAIR scheduling mode, and an H2O context. This means that
it shares resources across threads. We need a Spark Context to create the directory structure from a
deserialized PipelineModel (formerly a byte stream in the database.py)
"""
SparkSession.builder \
.master("local[*]") \
.appName(env_vars.get('TASK_NAME', 'bobby-0')) \
.config('spark.scheduler.mode', 'FAIR') \
.config('spark.scheduler.allocation.file', f'{env_vars["SRC_HOME"]}/{SPARK_SCHEDULING_FILE}') \
.config('spark.driver.extraClassPath', f'{env_vars["SRC_HOME"]}/lib/*') \
.getOrCreate()
# Create pysparkling context for H2O model serialization/deserialization
conf = H2OConf().setInternalClusterMode()
H2OContext.getOrCreate(conf)
def register_handlers() -> None:
"""
Register all handlers
to their associated
handler classes
"""
current_environment: CloudEnvironment = CloudEnvironments.get_current()
if current_environment == CloudEnvironments.aws:
KnownHandlers.register(HandlerNames.deploy_csp, SageMakerDeploymentHandler)
elif current_environment == CloudEnvironments.azure:
KnownHandlers.register(HandlerNames.deploy_csp, AzureDeploymentHandler)
KnownHandlers.register(HandlerNames.enable_service, EnableServiceHandler)
KnownHandlers.register(HandlerNames.disable_service, DisableServiceHandler)
KnownHandlers.register(HandlerNames.deploy_k8s, KubernetesDeploymentHandler)
KnownHandlers.register(HandlerNames.undeploy_k8s, KubernetesUndeploymentHandler)
KnownHandlers.register(HandlerNames.deploy_database, DatabaseDeploymentHandler)
KnownHandlers.register(HandlerNames.undeploy_database, DatabaseUndeploymentHandler)
class Runner(ThreadedTask):
"""
A Threaded Worker that will be
scaled across a pool via threading
"""
def __init__(self, task_id: int, handler_name: str) -> None:
"""
:param task_id: (int) the job id to process.
Unfortunately, one of the limitations
of SQLAlchemy is that its Sessions aren't thread safe.
So that means we have to retrieve the id in
the main thread and then the actual object
in the structures thread, rather than passing the
object directly from the main thread.
This conforms to SQLAlchemy's 'thread-local' architecture.
"""
super().__init__()
self.task_id: id = task_id
self.handler_name = handler_name
def run(self) -> None:
"""
Execute the job
"""
try:
logger.info(f"Runner executing job id {self.task_id} --> {self.handler_name}")
KnownHandlers.get_class(self.handler_name)(self.task_id).handle()
except Exception: # uncaught exceptions should't break the runner
logger.exception(
f"Uncaught Exception Encountered while processing task #{self.task_id}")
def check_db_for_jobs() -> None:
"""
Gets the currently pending jobs for Bobby to handle. This gets called both on Bobby startup (to populate the
queue of jobs and on the api POST request /job for every new job submitted on the job-tracker/API code to mlflow.
:return: str return code 200 or 500
"""
try:
jobs = SQLAlchemyClient().execute(SQL.retrieve_jobs)
for job_data in jobs:
if job_data[0] not in LEDGER:
job_id, handler_name = job_data
logger.info(f"Found New Job with id #{job_id} --> {handler_name}")
LEDGER.record(job_id)
WORKER_POOL.put(Runner(job_id, handler_name))
except Exception:
logger.exception("Error: Encountered Fatal Error while locating and executing jobs")
raise
def check_for_k8s_deployments() -> None:
"""
When the database pauses or Bobby restarts, all k8s deployed models will be removed as they are children deployments
of Bobby. This function checks for k8s models that should be deployed and redeploys them.
:return:
"""
k8s_payloads = SQLAlchemyClient().execute(SQL.get_k8s_deployments_on_restart)
for user, payload in k8s_payloads:
# Create a new job to redeploy the model
job: Job = Job(handler_name=HandlerNames.deploy_k8s,
user=user,
payload=payload)
SQLAlchemyClient().SessionFactory.add(job)
SQLAlchemyClient().SessionFactory.commit()
check_db_for_jobs() # Add new jobs to the Job Ledger
@APP.post('/job', summary="Have bobby search for new jobs in the database", response_model=dict)
def get_new_jobs():
"""
Calls the function to get the new pending jobs via the API endpoint /job
:return: HTTP response 200 or 500
"""
check_db_for_jobs()
return dict(data="Checked DB for Jobs")
def main():
logger.info("Creating Contexts...")
create_run_contexts()
logger.info("Creating Splice Tables...")
create_bobby_tables()
logger.info('Registering handlers...')
register_handlers()
logger.info('Populating handlers...')
populate_handlers(SQLAlchemyClient().SessionFactory)
logger.info('Checking for pre-existing k8s deployments')
check_for_k8s_deployments()
logger.info('Waiting for new jobs...')
check_db_for_jobs() # get initial set of jobs from the DB
main()
|
StarcoderdataPython
|
6675603
|
<reponame>junhoyeo/fastdj
from django.urls import path
from comment import views
urlpatterns = [
path('get_comments_view/', views.get_comments_view, name='get_comments_view'),
path('', views.create_comment_view.as_view(), name='create_comment_view'),
]
|
StarcoderdataPython
|
318038
|
<reponame>PacktPublishing/Raspberry-Pi-Cookbook-for-Python-Programmers-SecondEdition
#import the necessary packages
import cv2
import numpy as np
BLUR=(5,5)
HIBLUR=(30,30)
GAUSSIAN=(21,21)
imageBG=None
gray=True
movement=[]
AVG=2
avgX=0
avgY=0
count=0
def process_image(raw_image,control):
global imageBG
global count,avgX,avgY,movement,gray
text=[]
images=[]
reset=False
#Toggle Gray and reset background
if control == ord("g"):
if gray:
gray=not gray
reset=True
print("Toggle Gray")
#Display contour and hierarchy details
elif control == ord("i"):
print("Contour: %s"%contours)
print("Hierarchy: %s"%hierarchy)
#Reset the background image
elif control == ord("r"):
reset=True
#Clear movement record and reset background
if reset:
print("Reset Background")
imageBG=None
movement=[]
#Keep a copy of the raw image
text.append("Raw Image")
images.append(raw_image)
if gray:
raw_image=cv2.cvtColor(raw_image,cv2.COLOR_BGR2GRAY)
#Blur the raw image
text.append("with Gaussian Blur...")
images.append(cv2.GaussianBlur(raw_image, GAUSSIAN, 0))
#Initialise background
if imageBG is None:
imageBG=images[-1]
text.append("with image delta...")
images.append(cv2.absdiff(imageBG,images[-1]))
text.append("with threshold mask...")
images.append(cv2.threshold(images[-1], 25, 255,
cv2.THRESH_BINARY)[1])
text.append("with dilation...")
images.append(cv2.dilate(images[-1],None, iterations=3))
#text.append("with dilation kernel...")
#kernel=np.ones((1,1),np.uint8)
#images.append(cv2.dilate(images[-2],kernel, iterations=3))
#Find contours
if not gray:
#Require gray image to find contours
text.append("with dilation gray...")
images.append(cv2.cvtColor(images[-1],cv2.COLOR_BGR2GRAY))
text.append("with contours...")
images.append(images[-1].copy())
aimage, contours, hierarchy = cv2.findContours(images[-1],
cv2.RETR_LIST,
cv2.CHAIN_APPROX_SIMPLE)
#Determine the area of each of the contours
largest_area=0
found_contour=None
for cnt in contours:
area = cv2.contourArea(cnt)
#Find which one is largest
if area > largest_area:
largest_area=area
found_contour=cnt
if found_contour != None:
#Find the centre of the contour
M=cv2.moments(found_contour)
cx,cy=int(M['m10']/M['m00']),int(M['m01']/M['m00'])
#Calculate the average
if count<AVG:
avgX=(avgX+cx)/2
avgY=(avgY+cy)/2
count=count+1
else:
movement.append((int(avgX),int(avgY)))
avgX=cx
avgY=cy
count=0
#Display
if found_contour != None:
cv2.circle(images[0],(cx,cy),10,(255,255,255),-1)
if len(movement) > 1:
for i,j in enumerate(movement):
if i>1:
cv2.line(images[0],movement[i-1],movement[i],(255,255,255))
return(images,text)
#End
|
StarcoderdataPython
|
206596
|
<gh_stars>0
import numpy
from audionmf.transforms.nmf import NMF
def nmf_matrix(matrix, max_iter=100, rank=30):
# increment the matrix to make sure it's positive
matrix_inc, min_val = increment_by_min(matrix)
# TODO save
# use Kullback-Leibler divergence
# nmf = nimfa.Nmf(matrix_inc, max_iter=max_iter, rank=rank, objective='div', update='divergence')()
# W = nmf.basis()
# H = nmf.coef()
# calculate NMF
nmf = NMF(matrix_inc, max_iter=max_iter, rank=rank)
W, H = nmf.factorize()
return W, H, min_val
def nmf_matrix_original(W, H, min_val):
# get the original matrix
matrix = numpy.matmul(W, H) - min_val
return matrix
def increment_by_min(matrix):
# increments matrix by its lowest value and returns the structure and the absolute value
min_val = abs(numpy.amin(matrix))
return matrix + min_val, min_val
|
StarcoderdataPython
|
113425
|
<reponame>garrethmartin/HSC_UML
import sys
import numpy
import config as cf
import log
import os
import inspect
import pandas as pd
import h5py
from time import gmtime, strftime
from helpers import boundingbox
from helpers import fitshelper
from astropy.stats import sigma_clipped_stats
from helpers.image_types import SkyArea, ImageFile, FieldRect, SigmaRect
from helpers.PatchFactory import PatchFactory
from features.PowerSpectrumFeature import PowerSpectrumFeature
from joblib import Parallel, delayed
def create_mask_files(options, output_folder_path, fh, image_files, positions, file_num, prefix):
sigma_multipliers = options.sigma_multipliers
xv = positions[:, 1] # x
yv = positions[:, 2] # y
for sigma_multiplier in sigma_multipliers:
sigma_multiplier = float(sigma_multiplier)
masks = []
for i in range(fh._num_images):
wavelength = fh._wavelengths[i]
sigma = image_files[wavelength].sigma
threshold = float(sigma) * sigma_multiplier
values = fh.get_adjusted_image_data(i, xv, yv)
mask = (values > threshold) * 1
masks.append(mask)
logger.info("image: {0} wavelength: {1} sigma: {2} sigma_multiplier: {3} threshold: {4}".format(
fh._image_paths[i], fh._wavelengths[i], sigma, sigma_multiplier, threshold))
if fh._wavelengths[i] not in fh._image_paths[i]:
print("Huge error, wave length not in image file name: {0}".format(fh._image_paths[i]))
masks = numpy.array(masks)
# logical or to combine into one. if a pixel is greater than the threshold on any of the files then it
# it will be allowed
final_mask = numpy.logical_or(masks[0], masks[1])
if fh._num_images > 2:
final_mask = numpy.logical_or(final_mask, masks[2])
output_file_path = output_folder_path + "/{0}_sigma{1}_positions_mask_{2}.txt".format(
prefix, int(sigma_multiplier), file_num)
numpy.savetxt(output_file_path, final_mask, delimiter=",", fmt="%i")
logger.info("Writing file: {0}".format(output_file_path))
def calc_rectangle(image_shape, size_pc):
centre_x = image_shape[0] / 2
centre_y = image_shape[1] / 2
# try 20% of width or max 1000 pixels
width = max(image_shape[0] * size_pc, 250)
height = max(image_shape[1] * size_pc, 250)
if width > image_shape[0]:
width = image_shape[0]
if height > image_shape[1]:
height = image_shape[1]
l = centre_x - width/2
r = centre_x + width/2
b = centre_y - height/2
t = centre_y + height/2
return int(b), int(t), int(l), int(r)
def guess_sigma_region(wavelength, fh):
# get the size of the first image
image_index = fh.get_image_index(wavelength)
image_shape = fh.get_image_shape()
#b = 0
#l = 0
#t = image_shape[0] - 1
#r = image_shape[1] - 1
#return b, t, l, r
factor = 0.2
b, t, l, r = calc_rectangle(image_shape, factor)
# check percentage of isnans and zeros
c = 0
i = 0
while i < 10:
num_pixels = (t - b) * (r - l)
sub_image_rect = fh.get_rectangle(image_index, b, t, l, r)
c = numpy.count_nonzero(sub_image_rect)
if c > (0.4 * num_pixels):
break
# widen
factor += 0.1
b, t, l, r = calc_rectangle(image_shape, factor)
logger.info("iterating sigma regon guess: num pix {0} factor {1} num nonzero {2}".format(num_pixels, factor, c))
i += 1
if i == 10:
logger.info("Warning: iter reached 10 but still no rect so using whole image: {0}".format(i))
b = 0
l = 0
t = image_shape[0] - 1
r = image_shape[1] - 1
#return b, t, l, r
return b, t, l, r
def calc_background_sigma_levels(options, sky_area, wavelength_image_files, fh):
#, sigma_clip=5, iterations=20
sigma_clip = int(options.sigma_clip)
sigma_iterations = int(options.sigma_clip_iterations)
# the types used to store the sky areas
#ImageFile = namedtuple("ImageFile", ["id", "wavelength", "sigma_patch", "sigma", "file_path"])
#SigmaPatch = namedtuple("SigmaPath", ["bottom", "top", "left", "right"])
for wavelength, image_file in wavelength_image_files.items():
rect = numpy.array([])
if sky_area.sigma_rect is not None:
b = sky_area.sigma_rect.bottom
t = sky_area.sigma_rect.top
l = sky_area.sigma_rect.left
r = sky_area.sigma_rect.right
rect = fh.get_wavelength_rectangle(wavelength, b, t, l, r)
else:
# guess location
b, t, l, r = guess_sigma_region(wavelength, fh)
rect = fh.get_wavelength_rectangle(wavelength, b, t, l, r)
logger.info("Calculating background level for: {0} {1}".format(wavelength, image_file.file_name))
clipped_mean, clipped_median, clipped_std = sigma_clipped_stats(
rect, mask_value=0., sigma=sigma_clip, iters=50)
image_file.sigma = clipped_std
image_file.threshold = options.min_sigma_multiplier * clipped_std
# calc threshold by multiplying the estimated background (clipped_std) by the input variable multiplier (5?)
logger.info("Clipped values wavelength: {0} mean: {1} median: {2} std-sigma: {3} thresh: {4} file: {5}".format(
image_file.wavelength, clipped_mean, clipped_median,
clipped_std, image_file.threshold, image_file.file_name))
def remove_already_processed_sky_areas(root_folder, sky_areas, completed_file_name):
processed_sky_areas = []
dict_completed = dict()
if not os.path.isfile(root_folder + completed_file_name):
return sky_areas, processed_sky_areas
completed_sky_areas = numpy.loadtxt(root_folder + completed_file_name, delimiter=",")
if len(completed_sky_areas.shape) == 0:
completed_sky_area_id = int(completed_sky_areas)
dict_completed[completed_sky_area_id] = completed_sky_area_id
else:
for i in range(completed_sky_areas.shape[0]):
completed_sky_area_id = completed_sky_areas[i]
dict_completed[completed_sky_area_id] = completed_sky_area_id
for sky_area_id, sky_area in sky_areas.items():
if sky_area_id in dict_completed:
processed_sky_areas.append(sky_area_id)
# don't delete during dictionary iteration, so delete after we have copied all the invalid ids
for processed_sky_area_id in processed_sky_areas:
if processed_sky_area_id in sky_areas:
del sky_areas[processed_sky_area_id]
else:
logger.info("processed sky area does not exist: {0}".format(processed_sky_area_id))
logger.info("already_processed sky areas: {0}".format(len(processed_sky_areas)))
return processed_sky_areas
def validate_sky_area_wavelengths(root_folder, image_folder, sky_areas, wavelengths, errors_file_name):
# validate
invalid_sky_areas = []
for sky_area_id, sky_area in sky_areas.items():
if len(sky_area.image_files) < len(wavelengths):
logger.info("removing due to missing wavelengths: {0} {1}".format(sky_area_id, len(sky_area.image_files)))
invalid_sky_areas.append(sky_area_id)
continue
# check if image files exist
for wavelength, image_file in sky_area.image_files.items():
if not os.path.isfile(image_folder + image_file.file_name):
logger.info("file does not exist. sky area: {0} wavelength {1} filename {2}".format(
sky_area_id, wavelength, image_folder + image_file.file_name))
invalid_sky_areas.append(sky_area_id)
# remove invalid sky areas
for i in range(len(invalid_sky_areas)):
if invalid_sky_areas[i] in sky_areas:
del sky_areas[invalid_sky_areas[i]]
# output the errors to file
numpy.array(invalid_sky_areas).tofile(root_folder + errors_file_name, sep=",", format="%s")
return invalid_sky_areas
def load_sky_areas(root_folder, sky_areas_file_name, image_file_name, wavelengths):
sky_areas = dict()
# the types used to store the sky areas
#SkyArea = namedtuple("SkyArea", ["id", "image_files", "field_rect", "sigma_rect"])
#ImageFile = namedtuple("ImageFile", ["id", "wavelength", "sigma", "file_name"])
#SigmaRect = namedtuple("SigmaRect", ["bottom", "top", "left", "right"])
#FieldRect = namedtuple("FieldRect",["bottom", "top", "left", "right"])
# id, field, field_rect, sigma_patch
sky_areas_input = pd.read_csv(root_folder + sky_areas_file_name, sep=',')
for row_idx in range(sky_areas_input.shape[0]):
sky_area_id = sky_areas_input.id[row_idx]
sigma_rect = sky_areas_input.sigma_rect[row_idx]
#sigma_rect = sky_areas_input.sigma_rect[row_idx].strip()
if sigma_rect != 'None':
bits = sigma_rect.strip().split('-')
sigma_rect = SigmaRect(bottom=int(bits[0]), top=int(bits[1]), left=int(bits[2]), right=int(bits[3]))
else:
sigma_rect = None
field_rect = sky_areas_input.field_rect[row_idx].strip()
if field_rect != 'None':
bits = field_rect.strip().split('-')
field_rect = FieldRect(bottom=int(bits[0]), top=int(bits[1]), left=int(bits[2]), right=int(bits[3]))
else:
field_rect = None
sky_area = SkyArea(id=sky_area_id, image_files={}, field_rect=field_rect, sigma_rect=sigma_rect)
sky_areas[sky_area_id] = sky_area
# id, field, wavelength, sigma_patch, file_path
gen_images = pd.read_csv(root_folder + image_file_name, sep=',')
for row_idx in range(gen_images.shape[0]):
id = gen_images.id[row_idx]
sky_area_id = gen_images.sky_area_id[row_idx]
wavelength = str(gen_images.wavelength[row_idx])
wavelength = wavelength.strip()
file_name = gen_images.file_name[row_idx].strip()
if wavelength not in wavelengths:
logger.info("wavelength doesn't exist: {0} {1} {2}".format(wavelength, sky_area_id, file_name))
continue
im = ImageFile(id=id, wavelength=wavelength, sigma=None, file_name=file_name)
if sky_area_id not in sky_areas:
logger.error("Error sky area from general not in sky areas: {0} {1} {2}".format(sky_area_id, wavelength,
file_name))
sky_areas[sky_area_id] = SkyArea(sky_area_id, {}, None, None)
sky_area = sky_areas[sky_area_id]
if wavelength not in sky_area.image_files:
sky_area.image_files[wavelength] = im
return sky_areas
def validate_files(root_folder, image_folder, sky_areas, wavelengths, completed_file_name, errors_file_name):
# remove areas that don't have all wavelengths
invalid_sky_areas = validate_sky_area_wavelengths(
root_folder, image_folder, sky_areas, wavelengths, errors_file_name)
# remove areas that have already been processed
process_sky_areas = remove_already_processed_sky_areas(root_folder, sky_areas, completed_file_name)
def check_output_folders(root_folder):
logger.info("Checking folders")
#if not os.path.isdir(root_folder + '/output'):
# os.makedirs(root_folder + '/output')
# logger.info("Creating output folder")
#if not os.path.isdir(root_folder + '/log'):
# os.makedirs(root_folder + '/log')
# logger.info("Creating log folder")
logger.info("Finished checking folders")
def get_bounding_box(sky_area, fits_image_shape, window_size, stride):
rect = []
if sky_area.field_rect is None:
rect = FieldRect(0, fits_image_shape[0], 0, fits_image_shape[1])
else:
rect = sky_area.field_rect
field_bb = boundingbox.BoundingBox(
left=rect.left+window_size, right=rect.right-window_size,
top=rect.top-window_size, bottom=rect.bottom+window_size, step=stride)
return field_bb
def extract_patch(options, feature_factory, sky_area_id, sky_area):
root_folder = options.root_folder
image_folder = options.image_folder_path
index = options.index
required_wavelengths=options.required_wavelengths
n_threads = options.n_threads
sky_areas_file_name = 'sky_areas_{0}.txt'.format(index)
images_file_name = 'image_files_{0}.txt'.format(index)
completed_file_name = 'processed_{0}.txt'.format(index)
errors_file_name = 'errors_{0}.txt'.format(index)
# open fits files for sky area
fh = fitshelper.FitsHelper(
logger, image_folder, sky_area.image_files, required_wavelengths, sky_area.field_rect, use_mem_map=False)
# calc background levels for each image file
calc_background_sigma_levels(options, sky_area, sky_area.image_files, fh)
# assume all the wavelengths fits data are the same size
field_bb = get_bounding_box(sky_area, fh.get_image_shape(), options.window_size, options.stride)
# get the patches/sub images, positions and offsets (if multiple fields)
patch_factory = PatchFactory(logger, fh, feature_factory)
gen_samples, gen_positions = patch_factory.get_features_all_pixels(options, field_bb, sky_area.image_files)
output_folder_path = options.output_folder_path + str(sky_area_id) + '/'
# create output dir
if not os.path.isdir(output_folder_path):
os.makedirs(output_folder_path)
logger.info("saving raw samples and positions for: {0}".format(sky_area_id))
numpy.savetxt(output_folder_path + "samples.csv", gen_samples, delimiter=",")
save_hd5(output_folder_path + "samples.hd5", gen_samples, data_name='samples')
numpy.savetxt(output_folder_path + "positions.csv", gen_positions, delimiter=",", fmt="%i")
logger.info("create sigma lists for object detection for: ".format(sky_area_id))
create_mask_files(options, output_folder_path, fh,
sky_area.image_files, gen_positions, options.index, options.prefix)
fh.close()
del fh
del patch_factory
del gen_positions
del gen_samples
with open(root_folder + completed_file_name, "a") as comp_file:
comp_file.write("{0}\r\n".format(sky_area_id))
comp_file.close()
logger.info("Finished creating features for: {0}".format(sky_area_id))
def process(options, feature_factory):
root_folder = options.root_folder
image_folder = options.image_folder_path
index = options.index
required_wavelengths=options.required_wavelengths
n_threads = options.n_threads
sky_areas_file_name = 'sky_areas_{0}.txt'.format(index)
images_file_name = 'image_files_{0}.txt'.format(index)
completed_file_name = 'processed_{0}.txt'.format(index)
errors_file_name = 'errors_{0}.txt'.format(index)
check_output_folders(root_folder)
# id, field, wavelength, sigma_patch, file_path
# remove any that don't have images in all wavelengths, or those that have already been processed
sky_areas = load_sky_areas(root_folder, sky_areas_file_name, images_file_name, required_wavelengths)
validate_files(root_folder, image_folder, sky_areas, required_wavelengths, completed_file_name, errors_file_name)
# process each sky area, listed in the general text file, one at a time
Parallel(n_jobs = n_threads, verbose=0)\
(delayed(extract_patch)\
(options, feature_factory, sky_area_id, sky_area)\
for sky_area_id, sky_area in sky_areas.items())
def save_hd5(file_name, data, data_name='samples'):
f = h5py.File(file_name, "w")
f.attrs['creator'] = 'alex'
f.attrs['test number'] = 'zz'
f.attrs['HDF5_Version'] = h5py.version.hdf5_version
f.attrs['h5py_version'] = h5py.version.version
entry = f.create_group('entry')
entry.attrs['default'] = 'data'
entry.create_dataset('title', data='1-D scan of I00 v. mr')
# write the data
ds = entry.create_dataset(data_name, data=data)
ds.attrs['units'] = 'test data'
ds.attrs['long_name'] = 'test data for saving'
f.close()
del f
print ("wrote file:", file_name)
def run(options):
#options = FeatureOptions(config)
#options.print_options()
#options.radial_width = 1
#options.window_size = 10
#options.stride = 1
options.patch_shape = numpy.array([options.window_size*2, options.window_size*2])
feature_factory = PowerSpectrumFeature(options.patch_shape, options.radial_width)
process(options, feature_factory)
# #######################################################################################
# # Main
# #######################################################################################
class FeatureOptions(object):
def __init__(self, config):
self.test_name = config['test_name']
self.index = config['index']
self.root_folder = config['root_folder']
self.image_folder_path = config['image_folder']
self.output_folder_path = self.root_folder + '/' + self.test_name + '/output_' + str(self.index) + '/'
self.n_threads = config['n_threads']
self.log_folder = self.output_folder_path + '/log/'
if not os.path.isdir(self.log_folder):
os.makedirs(self.log_folder)
self.log_file_name = '/feature_extraction_log_{0}_{1}_{2}.txt'.format(
config['test_name'], config['index'], strftime('%H_%M_%S', gmtime()))
self.required_wavelengths = config['required_wavelengths']
print(self.required_wavelengths)
self.required_wavelengths.sort() # sort to ensure always same order
self.stride = int(config['stride'])
if self.test_name.startswith('ps'):
self.radial_width = int(config['radial_width'])
self.window_size = int(config['window_size'])
self.prefix = "gen"
self.suffix = "_" + str(self.index)
self.min_sigma_multiplier = int(config['min_sigma_multiplier'])
self.RAW_SAMPLES_FILENAME = "/{0}_raw_samples{1}.csv".format(self.prefix, self.suffix)
self.POSITIONS_FILENAME = "/{0}_positions{1}.csv".format(self.prefix, self.suffix)
self.NORM_LOG_SAMPLES_FILENAME = "/{0}_normalized_logged_samples{1}.csv".format(self.prefix, self.suffix)
self.LOGGED_SAMPLES_FILENAME = "/{0}_logged_samples{1}.csv".format(self.prefix, self.suffix)
self.PCA_PLOT_FILENAME = "/{0}_normed_logged_samples_pca{1}.png".format(self.prefix, self.suffix)
self.sigma_clip = 3 #config['sigma_clip']
self.sigma_clip_iterations = 10 #config['sigma_iterations']
self.sigma_multipliers = config['sigma_multipliers']
#self.stride = 1
def print_options(self):
attributes = inspect.getmembers(self, lambda a:not(inspect.isroutine(a)))
for attr in attributes:
k = attr[0]
v = attr[1]
if k.startswith('_'):
continue
logger.info("{0} : {1} ".format(k, v))
logger = None
if __name__ == "__main__":
config = cf.get_config(sys.argv)
options = FeatureOptions(config)
log.configure_logging(options.log_folder + options.log_file_name)
logger = log.get_logger("feature_extraction_" + str(config['index']) + config['test_name'])
options.print_options()
logger.debug("*** Starting ***")
#try:
run(options)
#except Exception as excep:
# logger.error(excep)
logger.debug("*** Finished ***")
|
StarcoderdataPython
|
215193
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import logging
import threading
from natrixclient.common.config import NatrixConfig
from natrixclient.command.performance.webdriver import Firefox
from natrixclient.command.performance.webdriver import Chrome
from natrixclient.command.performance.webdriver import Curl
logger = logging.getLogger(__name__)
def execute(args):
print("---------PERFORMANCE--------")
print(args)
class BrowserPerformance():
def execute(self, nconfig):
print("==================Performance EXECUTE========================")
destination = nconfig.get_value("DEFAULT", "destination")
browser = nconfig.get_value("DEFAULT", "browser")
mode = nconfig.get_value("DEFAULT", "mode")
opt = nconfig.get_value("output", "type")
if opt == "rabbitmq":
conf = NatrixConfig()
# TODO. need to decrypt password
# TODO, for debug, log
if nconfig.get_config()["DEFAULT"]["debug"].lower() == "true":
print("rabbitmq_host = " + conf["host"])
print("rabbitmq_port = " + conf["port"])
print("rabbitmq_username = " + conf["username"])
print("rabbitmq_password = " + conf["password"])
print("rabbitmq_vhost = " + conf["vhost"])
if browser not in ("curl", "firefox", "chrome"):
msg = "invalid choice: '{}' (choose from 'firefox', 'chrome', 'curl')".format(browser)
raise BrowserChooseException(msg)
# rabbitmq_conf["destination"] = destination
# execute performance
PerformanceThread(destination, mode, browser, opt, **conf).start()
class PerformanceThread(threading.Thread):
def __init__(self, dest, mode, browser, opt, **param):
threading.Thread.__init__(self)
self.dest = "http://" + dest if not dest.startswith("http") else dest
self.mode = mode
self.browser = browser
self.opt = opt
self.param = param
def run(self):
if self.browser in ("firefox", "chrome"):
if self.mode == "time":
per_result = Performance(self.dest, self.browser).get_time()
elif self.mode == "resource":
per_result = Performance(self.dest, self.browser).get_resources()
elif self.mode == "data":
per_result = Performance(self.dest, self.browser).get_performance()
else:
raise ValueError("invalid choice: {} (choose from 'time', 'resource', 'data')".format(self.mode))
else:
if self.mode == "data":
per_result = Performance(self.dest, self.browser).curl_get_performance()
else:
raise ValueError("invalid choice: {} (curl only can choose 'data')".format(self.mode))
per_result = json.dumps(per_result)
print(per_result)
self.param["operation"] = "performance"
self.param["destination"] = self.dest
parameters = self.param
Output(self.opt, per_result, **parameters).store()
class Performance(object):
def __init__(self, dest, browser):
self.dest = dest
self.browser = browser
if self.browser == 'firefox':
self.browser = Firefox()
elif self.browser == 'chrome':
self.browser = Chrome()
elif self.browser == 'curl':
self.browser = Curl()
else:
raise ValueError("invalid choice: {} (choose from 'firefox', 'chrome')".format(self.browser))
# 浏览器获取页面整体请求时间元数据
def get_time(self):
try:
time_result = self.browser.get_performance_timing(self.dest)
data = json.loads(time_result)
if 'toJSON' in data:
del data['toJSON']
except Exception as e:
result = {
"status": 1,
"data": {
"errorinfo": "Invalid destination".format(self.dest),
"errorcode": 120
}
}
return result
result = {
"status": 0,
"data": data
}
return result
# 浏览器获取资源请求数据
def get_resources(self):
try:
resource_results = self.browser.get_performance_resource(self.dest)
data = json.loads(resource_results)
except Exception as e:
result = {
"status": 1,
"data": {
"errorinfo": "Invalid destination".format(self.dest),
"errorcode": 120
}
}
return result
for res in data:
if 'toJSON' in res:
del res['toJSON']
result = {
"status": 0,
"data": data,
}
return result
# 浏览器获取整体(资源+时间)数据
def get_performance(self):
try:
data = self.browser.get_performance(self.dest)
except Exception as e:
result = {
"status": 1,
"data": {
"errorinfo": "Invalid destination".format(self.dest),
"errorcode": 120
}
}
return result
time_data = data['timing']
res_data = data['resources']
if 'toJSON' in time_data:
del time_data['toJSON']
for res in res_data:
if 'toJSON' in res:
del res['toJSON']
result = {
"status": 0,
"data": data
}
return result
def curl_get_performance(self):
try:
data = Curl().get_performance(self.dest)
except Exception as e:
result = {
"status": 1,
"data": {
"errorinfo": "Invalid destination".format(self.dest),
"errorcode": 120
}
}
return result
if 'status' in data and data['status'] == 1:
result = data
return result
data['status'] = 0
result = data
return result
# if __name__ == '__main__':
# result = Performance("http://www.baidu.com", "chrome").get_performance()
# print(result)
|
StarcoderdataPython
|
3353303
|
<reponame>yxcxx/upass<gh_stars>0
__author__ = 'yxc'
import tornado.web
import tornado.ioloop
import tornado.options
import tornado.httpserver
from tornado.options import define, options
from url import HANDLER
from setting import setting
define("port", default=8000, help="app run in this port")
class Application(tornado.web.Application):
def __init__(self):
tornado.web.Application.__init__(self, HANDLER, **setting)
if __name__ == "__main__":
tornado.options.parse_command_line()
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
|
StarcoderdataPython
|
1928945
|
from tqdm import tqdm
import numpy as np
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from torch.utils import data
import math
from .dataset import Dataset
def get_dataloaders(task, text_encoder, test_split, validation_split, batch_size, device, verbose, sequence_dim=None):
train_file = task['train_file']
train_val_dataframe = load_dataframe(train_file['file_path'], train_file['file_type'], train_file['file_header'])
if 'test_file' in task:
test_file = task['test_file']
test_dataframe = load_dataframe(test_file['file_path'], test_file['file_type'], test_file['file_header'])
else:
train_val_dataframe, test_dataframe = split_dataframe(train_val_dataframe, test_split)
train_dataframe, validation_dataframe = split_dataframe(train_val_dataframe, validation_split)
raw_documents = task["documents"]
train_documents_dataframe, document_structure = create_documents(train_dataframe, raw_documents, text_encoder, verbose, sequence_dim)
validation_documents_dataframe, _ = create_documents(validation_dataframe, raw_documents, text_encoder, verbose, sequence_dim)
test_documents_dataframe, _ = create_documents(test_dataframe, raw_documents, text_encoder, verbose, sequence_dim)
max_sequence_length = max(
max([train_documents_dataframe[column].apply(lambda x: len(x)).max() for column in train_documents_dataframe.columns]),
max([validation_documents_dataframe[column].apply(lambda x: len(x)).max() for column in validation_documents_dataframe.columns]),
max([test_documents_dataframe[column].apply(lambda x: len(x)).max() for column in test_documents_dataframe.columns]))
if sequence_dim is not None:
max_sequence_length = min(sequence_dim, max_sequence_length)
train_document_matrix, train_mask_matrix = get_document_matrix(train_documents_dataframe, max_sequence_length)
train_matrices = (train_document_matrix, train_mask_matrix)
validation_document_matrix, validation_mask_matrix = get_document_matrix(validation_documents_dataframe, max_sequence_length)
validation_matrices = (validation_document_matrix, validation_mask_matrix)
test_document_matrix, test_mask_matrix = get_document_matrix(test_documents_dataframe, max_sequence_length)
test_matrices = (test_document_matrix, test_mask_matrix)
target_type = task['target']['target_type']
target_index = task['target']['column_index']
encoder = LabelEncoder()
encoder.fit_transform(np.concatenate([train_dataframe[train_dataframe.columns[target_index]].unique(),
validation_dataframe[validation_dataframe.columns[target_index]].unique(),
test_dataframe[test_dataframe.columns[target_index]].unique()]))
train_target_matrix, target_encoders = get_target_matrix(train_dataframe, target_index, target_type, [encoder])
train_matrices += (train_target_matrix,)
validation_target_matrix, _ = get_target_matrix(validation_dataframe, target_index, target_type, target_encoders)
validation_matrices += (validation_target_matrix,)
test_target_matrix, _ = get_target_matrix(test_dataframe, target_index, target_type, target_encoders)
test_matrices += (test_target_matrix,)
vocab_size = len(text_encoder.encoder)
train_set = Dataset(device, target_type, vocab_size, *train_matrices)
validation_set = Dataset(device, target_type, vocab_size, *validation_matrices)
test_set = Dataset(device, target_type, vocab_size, *test_matrices)
data_params = {
'batch_size': batch_size,
'shuffle': True
}
return data.DataLoader(train_set, **data_params), data.DataLoader(validation_set, **data_params), data.DataLoader(test_set, **data_params), document_structure
def load_dataframe(path, file_type, has_header):
if file_type == 'csv':
separator = ','
elif file_type == 'tsv':
separator = '\t'
else:
raise NotImplementedError('Cannot load {} file type'.format(file_type))
if has_header:
return pd.read_csv(path, sep=separator, header=0)
else:
return pd.read_csv(path, sep=separator)
def get_target_matrix(dataframe, target_col_index, target_type, encoders=None):
if target_type == 'regression':
return dataframe[dataframe.columns[target_col_index]].values, None
targets = []
if encoders is None:
target_col = dataframe[dataframe.columns[target_col_index]]
encoder = LabelEncoder()
targets.append(encoder.fit_transform(target_col).reshape(-1, 1))
encoders = (encoder,)
else:
for encoder, index in zip(encoders, [target_col_index]):
targets.append(encoder.transform(dataframe[dataframe.columns[index]]).reshape(-1,1))
target_matrix = np.concatenate(targets, axis=1)
if target_matrix.shape[1] == 1:
target_matrix = target_matrix.reshape(-1)
return target_matrix, encoders
def get_document_matrix(documents_dataframe, max_sequence_length):
document_matrices = [np.stack(documents_dataframe[column].apply(lambda x: np.pad(x, (0, max_sequence_length - len(x)), mode='constant')).values) for column in documents_dataframe.columns]
mask_matrices = [np.stack(documents_dataframe[column].apply(lambda x: np.pad(np.ones(len(x)), (0, max_sequence_length - len(x)), mode='constant')).values) for column in documents_dataframe.columns]
document_matrix = np.concatenate([document_matrix.reshape(-1, 1, max_sequence_length) for document_matrix in document_matrices], axis=1)
mask_matrix = np.concatenate([mask_matrix.reshape(-1, 1, max_sequence_length) for mask_matrix in mask_matrices], axis=1)
return document_matrix, mask_matrix
def create_documents(dataframe, documents, text_encoder, verbose, sequence_dim):
assert len(documents) == 1 or len(documents) ==2
if len(documents) == 1:
return create_one_document(dataframe, documents['primary_document'], text_encoder, verbose, sequence_dim)
else:
assert len(documents['associated_documents']) > 0
if len(documents['associated_documents']) == 1:
return create_one_to_one_document(dataframe, documents["primary_document"], documents["associated_documents"][0], text_encoder, verbose, sequence_dim)
else:
return create_one_to_many_document(dataframe, documents["primary_document"], documents["associated_documents"], text_encoder, verbose, sequence_dim)
def create_one_document(dataframe, document, text_encoder, verbose, sequence_dim):
document_dataframe = encode_documents(dataframe, [document], text_encoder, verbose)
tqdm.pandas(disable=not verbose, ncols=150, desc='Appending special tokens to {} document(s) for each instance'.format(document_dataframe.shape[1] - 1))
tqdm.pandas(disable=not verbose, ncols=150, desc='Appending special tokens to 1 document for each instance')
assert (document_dataframe.shape[1] == 1)
num_tokens = 2
doc_length = sequence_dim - num_tokens if sequence_dim is not None else None
return document_dataframe.progress_apply(
lambda x: pd.Series([[text_encoder.start_token] + x[0][:doc_length] + [text_encoder.classify_token]]), axis=1), "one"
def create_one_to_one_document(dataframe, doc1, doc2, text_encoder, verbose, sequence_dim):
documents_dataframe = encode_documents(dataframe, [doc1, doc2], text_encoder, verbose)
tqdm.pandas(disable=not verbose, ncols=150, desc='Appending special tokens to {} document(s) for each instance'.format(documents_dataframe.shape[1] - 1))
tqdm.pandas(disable=not verbose, ncols=150, desc='Appending special tokens to 1 document for each instance')
num_tokens = 3
max_len = sequence_dim - num_tokens if sequence_dim is not None else None
doc1_length = math.ceil(max_len / 2) if sequence_dim is not None else None
doc2_length = math.floor(max_len / 2) if sequence_dim is not None else None
return documents_dataframe.progress_apply(
lambda x: [text_encoder.start_token] + x[documents_dataframe.columns[0]][:doc1_length] + [
text_encoder.delimeter_token] + x[documents_dataframe.columns[1]][:doc2_length] + [
text_encoder.classify_token], axis=1).to_frame(), "one_to_one"
def create_one_to_many_document(dataframe, primary_doc, secondary_docs, text_encoder, verbose, sequence_dim):
documents_dataframe = encode_documents(dataframe, [primary_doc] + secondary_docs, text_encoder, verbose)
tqdm.pandas(disable=not verbose, ncols=150, desc='Appending special tokens to {} document(s) for each instance'.format(documents_dataframe.shape[1] - 1))
multiple_choice_documents = []
common_column_name = documents_dataframe.columns[0]
if sequence_dim is not None:
num_tokens = 3
max_len = sequence_dim - num_tokens
documents_dataframe['scale'] = pd.Series(
documents_dataframe.apply(lambda x: max_len / (len(x[0]) + max([len(y) for y in x[1:]])), axis=1),
index=documents_dataframe.index)
scale_column_name = documents_dataframe.columns[-1]
for choice_column_name in documents_dataframe.columns[1:-1]:
multiple_choice_documents.append(
documents_dataframe[[scale_column_name, common_column_name, choice_column_name]].progress_apply(
lambda x:
[text_encoder.start_token] +
x[common_column_name][:math.floor(len(x[common_column_name]) * x[scale_column_name])] +
[text_encoder.delimeter_token] +
x[choice_column_name][:max_len - math.floor(len(x[common_column_name]) * x[scale_column_name])] +
[text_encoder.classify_token], axis=1))
else:
for choice_column_name in documents_dataframe.columns[1:]:
multiple_choice_documents.append(
documents_dataframe[[common_column_name, choice_column_name]].progress_apply(
lambda x:
[text_encoder.start_token] +
x[common_column_name] +
[text_encoder.delimeter_token] +
x[choice_column_name] +
[text_encoder.classify_token], axis=1))
return pd.concat(multiple_choice_documents, axis=1), "one_to_many"
def encode_documents(dataframe, documents, text_encoder, verbose):
encoded_documents = []
for document_index, document in enumerate(documents):
tqdm.pandas(disable=not verbose, ncols=150,
desc='Creating document {} of {} for each instance'.format(document_index + 1, len(documents)))
document_dataframe = dataframe[dataframe.columns[document['column_indices']]].progress_apply(
lambda x: ' '.join(x), axis=1)
tqdm.pandas(disable=not verbose, ncols=150,
desc='Encoding document {} of {} for each instance'.format(document_index + 1, len(documents)))
encoded_documents.append(document_dataframe.progress_apply(text_encoder.encode))
return pd.concat(encoded_documents, axis=1)
def create_multiple_choice_documents(documents_dataframe, sequence_dim, text_encoder):
multiple_choice_documents = []
common_column_name = documents_dataframe.columns[0]
if sequence_dim is not None:
num_tokens = 4
max_len = sequence_dim - num_tokens
documents_dataframe['scale'] = pd.Series(
documents_dataframe.apply(lambda x: max_len / (len(x[0]) + max([len(y) for y in x[1:]])), axis=1),
index=documents_dataframe.index)
scale_column_name = documents_dataframe.columns[-1]
for choice_column_name in documents_dataframe.columns[1:-1]:
multiple_choice_documents.append(
documents_dataframe[[scale_column_name, common_column_name, choice_column_name]].progress_apply(
lambda x:
[text_encoder.start_token] +
x[common_column_name][:math.floor(len(x[common_column_name]) * x[scale_column_name])] +
[text_encoder.delimeter_token] +
x[choice_column_name][:max_len - math.floor(len(x[common_column_name]) * x[scale_column_name])] +
[text_encoder.classify_token], axis=1))
else:
for choice_column_name in documents_dataframe.columns[1:]:
multiple_choice_documents.append(
documents_dataframe[[common_column_name, choice_column_name]].progress_apply(
lambda x:
[text_encoder.start_token] +
x[common_column_name] +
[text_encoder.delimeter_token] +
x[choice_column_name] +
[text_encoder.classify_token], axis=1))
return pd.concat(multiple_choice_documents, axis=1)
def split_dataframe(dataframe, split=.2):
split_df2 = dataframe.sample(frac=split, replace=False)
split_df1 = dataframe.drop(split_df2.index)
return split_df1, split_df2
|
StarcoderdataPython
|
3328979
|
def main(request, response):
def fmt(x):
return f'"{x.decode("utf-8")}"' if x is not None else "undefined"
purpose = request.headers.get("Purpose", b"").decode("utf-8")
sec_purpose = request.headers.get("Sec-Purpose", b"").decode("utf-8")
headers = [(b"Content-Type", b"text/html"), (b'WWW-Authenticate', 'Basic')]
status = 200 if request.auth.username is not None or sec_purpose.startswith(
"prefetch") else 401
content = f'''
<!DOCTYPE html>
<script src="/common/dispatcher/dispatcher.js"></script>
<script src="utils.sub.js"></script>
<script>
window.requestHeaders = {{
purpose: "{purpose}",
sec_purpose: "{sec_purpose}"
}};
window.requestCredentials = {{
username: {fmt(request.auth.username)},
password: {fmt(request.auth.password)}
}};
const uuid = new URLSearchParams(location.search).get('uuid');
window.executor = new Executor(uuid);
</script>
'''
return status, headers, content
|
StarcoderdataPython
|
11398312
|
<filename>src/coop_assembly/help_functions/shared_const.py
# small number epsilon
EPS = 1e-8
# large number infinite
INF = 1e23
# TOL
TOL = 0.1
# vertex correction, deciding how low a new vertex can go for a three-bar group
NODE_CORRECTION_TOP_DISTANCE = 80 # millimter
NODE_CORRECTION_SINE_ANGLE = 0.4
|
StarcoderdataPython
|
8039331
|
<gh_stars>0
__all__ = ['load', 'save', 'manhattan_distance', 'split_work', 'DEFAULT_TYPE']
import pickle
from pathlib import Path
from typing import Union
from typing import Any
from typing import List
from typing import Generator
import numpy as np
DEFAULT_TYPE = np.float32
def split_work(work: List[Any], num_batches: int) -> Generator[List[Any], None, None]:
"""
Divides a list of work into evenly (as best as possible) sub batches.
:param work: a list of work
:param num_batches: the number of sublists to split it into
:return: a generator that return lists
"""
return (work[index::num_batches] for index in range(num_batches))
def manhattan_distance(a: np.ndarray, b: np.ndarray) -> float:
"""
Uses the second point 'b' to hold temporary values in calculation.
Point 'a' is not modified.
:param a: a numpy array
:param b: a numpy array
:return: the manhattan distance between two arrays
"""
b -= a
np.fabs(b, out=b)
return b.sum()
def load(filename: Union[Path, str]) -> Any:
"""
:param filename:
:return:
"""
pickled_obj_file = open(filename, 'rb')
obj = pickle.load(pickled_obj_file)
pickled_obj_file.close()
return obj
def save(data: Any,
filename: Union[Path, str],
protocol=pickle.DEFAULT_PROTOCOL):
pickled_obj_file = open(filename, 'wb')
pickle.dump(data, pickled_obj_file, protocol=protocol)
pickled_obj_file.close()
|
StarcoderdataPython
|
3212346
|
<reponame>gradecam/keras_to_deeplearnjs
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("input", help="keras model to convert in keras's h5py format")
parser.add_argument("output", help="output filename of javascript module")
parser.add_argument("--weights", help="output filename of weight file, if not present the weights will be inlined")
args = parser.parse_args()
from keras.models import load_model
import pdb
from convert import get_converter
model = load_model(args.input)
out_file = open(args.output, 'w')
layers = [ get_converter(layer) for layer in model.layers ]
def write_preamble(outf):
global args
outf.write(\
"""
let dl = require('deeplearn');
module.exports = {};
let weights = {};
""")
if args.weights:
outf.write("""\
console.log("Fetching Weights");
weightPromise = fetch('weights').then(function(response) { return response.arrayBuffer(); }).catch((err) => console.log("Error loading weights: ", err));
module.exports.load = function() { return weightPromise; }
""")
else:
outf.write("module.exports.load = function() { return Promise.resolve(); }\n")
def write_weights(outf, layers):
global args
data = bytearray() if args.weights else None
if args.weights: outf.write("weightPromise.then(function(weightBuf) {\n");
for layer in layers:
for key,value in layer.get_deeplearn_weights(data).items():
outf.write("weights['{key}'] = {value}\n".format(key=key, value=value))
outf.write("console.log('weights loaded');")
if args.weights: outf.write("});\n")
if data:
weight_file = open(args.weights, 'wb')
weight_file.write(data)
weight_file.close()
def write_infer(outf, model, layers):
outf.write(\
"""
module.exports.infer = function infer(input) {{
layers = {{}}
layers['{input_name}'] = input;
""".format(input_name = model.input.name))
for layer in layers:
outf.write(" layers['{key}'] = {op};\n".format(key = layer.get_output_name(), op = layer.get_deeplearn_op()))
outf.write(\
"""
return layers['{output_name}'];
}} // end infer
""".format(output_name = model.output.name))
write_preamble(out_file)
write_weights(out_file, layers)
write_infer(out_file, model, layers)
out_file.close()
|
StarcoderdataPython
|
397156
|
#!/usr/bin/env python
import os
try:
marsyas_datadir = os.environ['MARSYAS_DATADIR']
except:
marsyas_datadir = "."
class MarCollection():
def __init__(self, mf_filename=None):
self.data = []
if mf_filename is not None:
try:
self.read_mf(mf_filename)
except:
pass
def write(self, mf_filename=None):
if mf_filename is not None:
self.filename = mf_filename
out = open(self.filename, 'w')
for filename, label in self.data:
out.write("%s\t%s\n" % (filename, label))
out.close()
def read_mf(self, mf_filename):
self.filename = mf_filename
self.data = []
self.merge_mf(mf_filename)
def merge_mf(self, new_mf_filename):
lines = open(new_mf_filename).readlines()
for line in lines:
if len(line) < 2:
continue
if line[0] == '#':
continue
splitline = line.split('\t')
filename = splitline[0].rstrip().replace(
"MARSYAS_DATADIR", marsyas_datadir)
try:
label = splitline[1].rstrip()
except:
label = ""
self.set_item(filename, label)
def get_filenames(self):
return [f for f,l in self.data ]
def get_filename_index(self, filename):
for i, pair in enumerate(self.data):
if filename == pair[0]:
return i, pair
return False, None
def set_item(self, filename, label=""):
index, pair = self.get_filename_index(filename)
if index is not False:
self.data[index] = (pair[0], label)
else:
self.data.append( (filename, label) )
def get_filenames_matching_label(self, label_match):
return [f for f,l in self.data if l == label_match]
|
StarcoderdataPython
|
3308686
|
<filename>ttBotDemo/BotDemo.py
# -*- coding: UTF-8 -*-
import logging
import os
from TamTamBot.TamTamBot import TamTamBot
from TamTamBot.utils.lng import set_use_django
class BotDemo(TamTamBot):
@property
def description(self):
# type: () -> str
return 'Простейший бот на python. Исходный код - https://github.com/asvbkr/ttBotDemo.\n\n' \
'Simple bot in python. Source code - https://github.com/asvbkr/ttBotDemo.'
@property
def token(self):
# type: () -> str
token = os.environ.get('TT_BOT_DEMO_API_TOKEN')
return token
if __name__ == '__main__':
set_use_django(False)
bot = BotDemo()
bot.polling()
|
StarcoderdataPython
|
3462120
|
# coding=utf-8
import mysql.connector.pooling
import random
class DbHandler(object):
def __init__(self, **db_config):
self.db_config = db_config
self.pool_size = self.db_config.get('pool_size', 1)
self.mysql_pool = mysql.connector.pooling.MySQLConnectionPool(
pool_name=self.db_config.get('database', 'test'),
**self.db_config)
def get_conn(self):
return
def execute(self, *args, fetchall=True):
conn = self.mysql_pool.get_connection()
cursor = conn.cursor()
cursor.execute(*args)
if fetchall:
data = cursor.fetchall()
else:
data = cursor.fetchone()
cursor.close()
conn.close()
return data
class _DbHandlers(object):
def __init__(self):
self.dbhanders = {}
def set_dbhandler(self, key, config):
self.dbhanders[key] = DbHandler(**config)
def get_dbhandler(self, key):
return self.dbhanders[key]
@staticmethod
def instance():
if not hasattr(_DbHandlers, '_instance'):
_DbHandlers._instance = _DbHandlers()
return _DbHandlers._instance
DbHandlers = _DbHandlers.instance()
|
StarcoderdataPython
|
6442441
|
from flask import Flask, request, jsonify
from flask_basicauth import BasicAuth
from textblob import TextBlob
from sklearn.linear_model import LinearRegression
import pickle
import os
from googletrans import Translator
translator = Translator()
colunas = ['tamanho','ano','garagem']
modelo = pickle.load(open('../../models/modelo.sav','rb'))
app = Flask(__name__)
app.config['BASIC_AUTH_USERNAME'] = 'tifane'
app.config['BASIC_AUTH_PASSWORD'] = 'eu'
basic_auth = BasicAuth(app)
@app.route('/')
def home():
return "Minha primeira API."
@app.route('/sentimento/<frase>')
@basic_auth.required
def sentimento(frase):
frase_en = translator.translate(frase, dest='en')
tb_en = TextBlob(frase_en.text)
polaridade = tb_en.sentiment.polarity
return "polaridade: {}".format(polaridade)
@app.route('/cotacao/', methods=['POST'])
#<EMAIL>
def cotacao():
dados = request.get_json()
dados_input = [dados[col] for col in colunas]
preco = modelo.predict([dados_input])
return jsonify(preco=preco[0])
app.run(debug=True, host='0.0.0.0')
|
StarcoderdataPython
|
9697316
|
<gh_stars>100-1000
#encoding:utf-8
subreddit = 'theydidthemath'
t_channel = '@TheyDidTheMath'
def send_post(submission, r2t):
return r2t.send_simple(submission)
|
StarcoderdataPython
|
9699607
|
from sympy import ratsimpmodprime, ratsimp, Rational, sqrt, pi, log, erf, GF
from sympy.abc import x, y, z, t, a, b, c, d, e
def test_ratsimp():
f, g = 1 / x + 1 / y, (x + y) / (x * y)
assert f != g and ratsimp(f) == g
f, g = 1 / (1 + 1 / x), 1 - 1 / (x + 1)
assert f != g and ratsimp(f) == g
f, g = x / (x + y) + y / (x + y), 1
assert f != g and ratsimp(f) == g
f, g = -x - y - y ** 2 / (x + y) + x ** 2 / (x + y), -2 * y
assert f != g and ratsimp(f) == g
f = (
a * c * x * y
+ a * c * z
- b * d * x * y
- b * d * z
- b * t * x * y
- b * t * x
- b * t * z
+ e * x
) / (x * y + z)
G = [
a * c - b * d - b * t + (-b * t * x + e * x) / (x * y + z),
a * c - b * d - b * t - (b * t * x - e * x) / (x * y + z),
]
assert f != g and ratsimp(f) in G
A = sqrt(pi)
B = log(erf(x) - 1)
C = log(erf(x) + 1)
D = 8 - 8 * erf(x)
f = A * B / D - A * C / D + A * C * erf(x) / D - A * B * erf(x) / D + 2 * A / D
assert ratsimp(f) == A * B / 8 - A * C / 8 - A / (4 * erf(x) - 4)
def test_ratsimpmodprime():
a = y ** 5 + x + y
b = x - y
F = [x * y ** 5 - x - y]
assert ratsimpmodprime(a / b, F, x, y, order="lex") == (x ** 2 + x * y + x + y) / (
x ** 2 - x * y
)
a = x + y ** 2 - 2
b = x + y ** 2 - y - 1
F = [x * y - 1]
assert ratsimpmodprime(a / b, F, x, y, order="lex") == (1 + y - x) / (y - x)
a = 5 * x ** 3 + 21 * x ** 2 + 4 * x * y + 23 * x + 12 * y + 15
b = 7 * x ** 3 - y * x ** 2 + 31 * x ** 2 + 2 * x * y + 15 * y + 37 * x + 21
F = [x ** 2 + y ** 2 - 1]
assert ratsimpmodprime(a / b, F, x, y, order="lex") == (1 + 5 * y - 5 * x) / (
8 * y - 6 * x
)
a = x * y - x - 2 * y + 4
b = x + y ** 2 - 2 * y
F = [x - 2, y - 3]
assert ratsimpmodprime(a / b, F, x, y, order="lex") == Rational(2, 5)
# Test a bug where denominators would be dropped
assert ratsimpmodprime(x, [y - 2 * x], order="lex") == y / 2
a = x ** 5 + 2 * x ** 4 + 2 * x ** 3 + 2 * x ** 2 + x + 2 / x + x ** (-2)
assert ratsimpmodprime(a, [x + 1], domain=GF(2)) == 1
assert ratsimpmodprime(a, [x + 1], domain=GF(3)) == -1
|
StarcoderdataPython
|
4928295
|
<filename>edgeql_queries/queries.py
"""Definition for main collection for queries."""
from __future__ import annotations
from typing import Callable, Dict, List, Set, Union
from edgeql_queries.executors.async_executor import create_async_executor
from edgeql_queries.executors.sync_executor import create_sync_executor
from edgeql_queries.models import Query
from edgeql_queries.typing import QueriesTree
def _create_handler_from_query(query: Query, use_async: bool = True) -> Callable:
if use_async:
return create_async_executor(query)
return create_sync_executor(query)
def load_from_list(queries_collection: Queries, queries: List[Query]) -> Queries:
"""Add queries from list.
Arguments:
queries_collection: already registered queries.
queries: list of queries to be added.
Returns:
Collection of queries to which method was applied.
"""
for query in queries:
queries_collection.add_query(query.name, query)
return queries_collection
def load_from_tree(queries_collection: Queries, query_tree: QueriesTree) -> Queries:
"""Add queries from tree.
Arguments:
queries_collection: already registered queries.
query_tree: tree of queries that should be added.
Returns:
Collection of queries to which method was applied.
"""
for group_name, queries in query_tree.items():
if isinstance(queries, dict):
queries_collection.add_query(
group_name,
load_from_tree(Queries(queries_collection.is_async), queries),
)
else:
queries_collection.add_query(queries.name, queries)
return queries_collection
class Queries:
"""Collection and executor for queries."""
def __init__(self, is_async: bool = True) -> None:
"""Initialize collection and executor for queries.
Arguments:
is_async: use async driver for creating queries.
"""
self._query_handlers: Dict[str, Union[Callable, "Queries"]] = {}
self._available_queries: Set[Query] = set()
self._available_queries_groups: Dict[str, Queries] = {}
self._is_async = is_async
@property
def available_queries(self) -> List[Query]:
"""Sorted list of queries available on this collection.
Returns:
List of queries.
"""
return sorted(self._available_queries, key=lambda query: query.name)
@property
def is_async(self) -> bool:
"""Will be query handlers generated for async execution.
Returns:
Will be query handlers generated for async execution.
"""
return self._is_async
def add_query(self, name: str, query_handler: Union[Queries, Query]) -> None:
"""Add a single query to collection.
Arguments:
name: name of query or sub-queries to be added.
query_handler: a single [query][edgeql_queries.models.Query] that
will be transformed to executor or
[collection of queries][edgeql_queries.queries.Queries]
that will be registered as sub-queries.
"""
handler_for_query: Union[Callable, Queries]
if isinstance(query_handler, Query):
self._available_queries.add(query_handler)
handler_for_query = _create_handler_from_query(
query_handler,
self._is_async,
)
else:
handler_for_query = query_handler
self._available_queries_groups[name] = handler_for_query
self._query_handlers[name] = handler_for_query
def get_executor(self, query_name: str) -> Union[Callable, "Queries"]:
"""Return executor for query by name.
Arguments:
query_name: name of query for which executor should be returned.
Returns:
Executor for query.
"""
return self._query_handlers[query_name]
def __getattr__(self, query_name: str) -> Union[Callable, "Queries"]:
"""Get executor for query by name.
Arguments:
query_name: name of query or group.
Returns:
Executor for query.
"""
return self.get_executor(query_name)
def __repr__(self) -> str:
"""Return special string representation of collection.
Returns:
Raw string for queries collection.
"""
return "Queries(queries: {0}, groups: {1})".format(
self.available_queries,
self._available_queries_groups,
)
|
StarcoderdataPython
|
3285587
|
#!/usr/bin/python2.7
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Configuration options for the application.
OAuth 2.0 Client Settings:
Visit the APIs Console (https://code.google.com/apis/console/) to create or
obtain client details for a project.
Authorized Redirect URIs for your client should include the hostname of your
app with /admin/auth appended to the end.
e.g. http://example.appspot.com/admin/auth
XSRF Settings:
This is used to generate a unique key for each user of the app.
Replace this with a unique phrase or random set of characters.
Keep this a secret.
"""
__author__ = '<EMAIL> (NZ house of reps)'
# OAuth 2.0 Client Settings
AUTH_CONFIG = {
'OAUTH_CLIENT_ID': '742357081841-gdovo1s4nb7ucc4cs446bj4n34bn7c04.apps.googleusercontent.com',
'OAUTH_CLIENT_SECRET': '<KEY>',
# E.g. Local Dev Env on port 8080: http://localhost:8080
# E.g. Hosted on App Engine: https://your-application-id.appspot.com
'OAUTH_REDIRECT_URI': '%s%s' % (
'https://parliamentanalytics-159403.appspot.com',
'/admin/auth')
}
# XSRF Settings
XSRF_KEY = '<KEY>'
|
StarcoderdataPython
|
11235097
|
<reponame>croxis/SpaceDrive<filename>spacedrive/render_components.py
from panda3d.core import PerlinNoise2
class CelestialRenderComponent(object):
body = None
atmosphere = None
light = None
noise = PerlinNoise2(64, 64)
noise_texture = None
mesh = None
temperature = 0
|
StarcoderdataPython
|
4862496
|
<filename>tests/operators/ci_gpu/test_all.py<gh_stars>0
#!/usr/bin/env python3
# coding: utf-8
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
from test_utils import gen_random_shape
from test_ms_add import test_ms_add
from test_ms_addn import test_ms_addn
from test_ms_batch_matmul import test_ms_bmm
from test_ms_exp import test_ms_exp
from test_ms_maximum import test_ms_maximum
from test_ms_minimum import test_ms_minimum
from test_ms_mul import test_ms_mul
from test_ms_divide import test_ms_divide
from test_ms_rsqrt import test_ms_rsqrt
from test_ms_sub import test_ms_sub
from test_ms_tile import test_ms_tile
from test_ms_one_hot import test_ms_one_hot
from test_ms_sqrt import test_ms_sqrt
from test_ms_cast import test_ms_cast
from test_ms_reshape import test_ms_reshape
from test_ms_expand_dims import test_expand_dims
from test_ms_trans_data import test_ms_trans_data
from test_ms_log import test_ms_log
from test_ms_pow import test_ms_pow
from test_ms_reduce_sum import test_ms_reduce_sum
from test_ms_abs import test_ms_abs
from test_ms_neg import test_ms_neg
from test_ms_round import test_ms_round
from test_ms_select import test_ms_select
from test_ms_equal import test_ms_equal
from test_ms_less_equal import test_ms_less_equal
from test_ms_greater_equal import test_ms_greater_equal
from test_ms_reciprocal import test_ms_reciprocal
from test_ms_reduce_max import test_ms_reduce_max
from test_ms_reduce_min import test_ms_reduce_min
from test_fused_pad import test_fused_pad
from test_fused_bn_reduce import test_fused_bn_reduce
from test_fused_bn_update import test_fused_bn_update
from test_fused_bn_follow_relu import test_fused_bn_follow_relu
from test_fused_bn_follow_relu_avgpool import test_fused_bn_follow_relu_avgpool
from test_fused_bn_double_follow_relu import test_fused_bn_double_follow_relu
from test_fused_bn_reduce_grad import test_fused_bn_reduce_grad
from test_fused_relu_grad_bn_reduce_grad import test_fused_relu_grad_bn_reduce_grad
from test_fused_relu_grad_bn_double_reduce_grad import test_fused_relu_grad_bn_double_reduce_grad
from test_fused_l2loss_grad import test_fused_l2loss_grad
from test_fused_is_finite import test_fused_is_finite
from test_fused_relu_grad_bn_update_grad import test_fused_relu_grad_bn_update_grad
from test_fused_relu_grad_bn_double_update_grad import test_fused_relu_grad_bn_double_update_grad
from test_fused_relu_grad import test_fused_relu_grad
from test_fused_bn_update_grad import test_fused_bn_update_grad
from test_fused_mul_div_rsqrt_mul_isfinite_red import test_fused_mul_div_rsqrt_mul_isfinite_red
import pytest
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_add():
test_ms_add((1, 1024), (1, 1024), 'float32', poly_sch=True)
test_ms_add((2, 32, 256, 32, 32), (2, 32, 256, 32, 32),
'float32', poly_sch=True)
test_ms_add((1, 1024), (1, 1024), 'float32', poly_sch=False)
test_ms_add((2, 32, 256, 32, 32), (2, 32, 256, 32, 32),
'float32', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_addn():
test_ms_addn((1, 1024, 1024), "float32", 2, poly_sch=True)
test_ms_addn((1, 1024, 1024), "float16", 2, poly_sch=True)
test_ms_addn((1, 1024, 1024), "float32", 2, poly_sch=False)
test_ms_addn((1, 1024, 1024), "float16", 2, poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_bmm():
test_ms_bmm((8, 16, 32), (8, 64, 32), 'float32', poly_sch=True) # matmul with batch
test_ms_bmm((1, 2, 32), (1, 1, 32), 'float32', poly_sch=True) # matmul with some axis equals to 1
test_ms_bmm((1, 1024, 1024), (1, 1024, 1024), 'float32', poly_sch=True)
# test_ms_bmm((1, 1024, 1024), (1, 1024, 1024), 'float32', (1, 1024, 1024)) # cannot store type float32
test_ms_bmm((1, 1024, 512), (1, 256, 512), 'float32', poly_sch=True)
# test_ms_bmm((1, 1024, 1024), (1, 1024, 1024), 'float32', (1, 1024, 1024), poly_sch=True) # storage_flatten fail
test_ms_bmm((8, 16, 32), (8, 64, 32), 'float32', poly_sch=False) # matmul with batch
test_ms_bmm((1, 2, 32), (1, 1, 32), 'float32', poly_sch=False) # matmul with some axis equals to 1
test_ms_bmm((1, 1024, 1024), (1, 1024, 1024), 'float32', poly_sch=False)
# test_ms_bmm((1, 1024, 1024), (1, 1024, 1024), 'float32', (1, 1024, 1024)) # cannot store type float32
test_ms_bmm((1, 1024, 512), (1, 256, 512), 'float32', poly_sch=False)
# test_ms_bmm((1, 1024, 1024), (1, 1024, 1024), 'float32', (1, 1024, 1024), poly_sch=False) # storage_flatten fail
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_cast():
test_ms_cast((32, 32, 14, 14, 16), "float16", "float32", poly_sch=True)
test_ms_cast((32, 32, 14, 14, 16), "float32", "float16", poly_sch=True)
test_ms_cast((32, 32, 14, 14, 16), "float16", "float32", poly_sch=False)
test_ms_cast((32, 32, 14, 14, 16), "float32", "float16", poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_exp():
test_ms_exp((1024, 4096), 'float32', poly_sch=True)
test_ms_exp((1024, 4096), 'float16', poly_sch=True)
test_ms_exp((1024, 4095), 'float16', poly_sch=True)
test_ms_exp((1024, 799), 'float16', poly_sch=True)
test_ms_exp((1024, 4096), 'float32', poly_sch=False)
test_ms_exp((1024, 4096), 'float16', poly_sch=False)
test_ms_exp((1024, 4095), 'float16', poly_sch=False)
test_ms_exp((1024, 799), 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_maximum():
test_ms_maximum((32, 1024, 1024), (32, 1024, 1024), 'float32', poly_sch=True)
test_ms_maximum((32, 1024, 1024), (1, 1024, 1024), 'float16', poly_sch=True)
test_ms_maximum((32, 32, 32, 256), (32, 32, 1, 256), 'float16', poly_sch=True)
test_ms_maximum((32, 1024, 1024), (32, 1024, 1024), 'float32', poly_sch=False)
test_ms_maximum((32, 1024, 1024), (1, 1024, 1024), 'float16', poly_sch=False)
test_ms_maximum((32, 32, 32, 256), (32, 32, 1, 256), 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_minimum():
test_ms_minimum((32, 1024, 1024), (32, 1024, 1024), 'float32', poly_sch=True)
test_ms_minimum((32, 1024, 1024), (1, 1024, 1024), 'float16', poly_sch=True)
test_ms_minimum((32, 32, 32, 256), (32, 32, 1, 256), 'float16', poly_sch=True)
test_ms_minimum((32, 1024, 1024), (32, 1024, 1024), 'float32', poly_sch=False)
test_ms_minimum((32, 1024, 1024), (1, 1024, 1024), 'float16', poly_sch=False)
test_ms_minimum((32, 32, 32, 256), (32, 32, 1, 256), 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_mul():
test_ms_mul((1024, 4096), 'float32', poly_sch=True)
test_ms_mul((1024, 4096), 'float32', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_divide():
test_ms_divide((1024, 1024), 'float32', poly_sch=True)
test_ms_divide((1024, 1024), 'float16', poly_sch=True)
test_ms_divide((1024, 1024), 'float32', poly_sch=False)
test_ms_divide((1024, 1024), 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reshape():
test_ms_reshape("float32", (64, 128, 1024), (8192, 1024), poly_sch=True)
test_ms_reshape("float16", (64, 128, 1024), (8192, 1024), poly_sch=True)
test_ms_reshape("float32", (64, 128, 1024), (8192, 1024), poly_sch=False)
test_ms_reshape("float16", (64, 128, 1024), (8192, 1024), poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_rsqrt():
test_ms_rsqrt((32, 1024, 1024), 'float32', poly_sch=True)
test_ms_rsqrt((32, 1024, 1024), 'float16', poly_sch=True)
test_ms_rsqrt((32, 1024, 1024), 'float32', poly_sch=False)
test_ms_rsqrt((32, 1024, 1024), 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_sqrt():
test_ms_sqrt((1024, 1024), "float32", poly_sch=True)
test_ms_sqrt((1024, 1024), "float16", poly_sch=True)
test_ms_sqrt((1024, 1024), "float32", poly_sch=False)
test_ms_sqrt((1024, 1024), "float16", poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_sub():
test_ms_sub((32, 1024, 1024), (32, 1024, 1024), 'float32', poly_sch=True)
test_ms_sub((32, 1024, 1024), (32, 1024, 1024), 'float16', poly_sch=True)
test_ms_sub((32, 1024, 1024), (1, 1024, 1024), 'float32', poly_sch=True)
test_ms_sub((4, 4, 4), (1, 4, 4), 'float32', poly_sch=True)
test_ms_sub((32, 1024, 1024), (32, 1024, 1024), 'float32', poly_sch=False)
test_ms_sub((32, 1024, 1024), (32, 1024, 1024), 'float16', poly_sch=False)
test_ms_sub((32, 1024, 1024), (1, 1024, 1024), 'float32', poly_sch=False)
test_ms_sub((4, 4, 4), (1, 4, 4), 'float32', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_tile():
test_ms_tile((1024, 4096), (3,), 'float32', poly_sch=True)
test_ms_tile((1024, 4096), (3,), 'float16', poly_sch=True)
test_ms_tile((1024, 4096), (3,), 'float32', poly_sch=False)
test_ms_tile((1024, 4096), (3,), 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_one_hot():
test_ms_one_hot((1024,), 16, "int32", 1, 0, 0, poly_sch=True)
test_ms_one_hot((1024,), 16, "float32", 1, 0, 0, poly_sch=True)
test_ms_one_hot((32,), 16, "int32", 1, 0, 0, poly_sch=True)
test_ms_one_hot((32,), 16, "float32", 1, 0, 0, poly_sch=True)
test_ms_one_hot((1024,), 16, "int32", 1, 0, 0, poly_sch=False)
test_ms_one_hot((1024,), 16, "float32", 1, 0, 0, poly_sch=False)
test_ms_one_hot((32,), 16, "int32", 1, 0, 0, poly_sch=False)
test_ms_one_hot((32,), 16, "float32", 1, 0, 0, poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_expand_dims():
test_expand_dims((32, 1024, 1024), 1, 'float32', poly_sch=True)
test_expand_dims((32, 1024, 1024), 2, 'float16', poly_sch=True)
test_expand_dims((32, 1024, 1024), 1, 'float32', poly_sch=False)
test_expand_dims((32, 1024, 1024), 2, 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_trans_data():
test_ms_trans_data((8, 24, 38, 38), (0, 2, 1, 3), 'float32', poly_sch=True)
test_ms_trans_data((8, 24, 38, 38), (0, 2, 1, 3), 'float16', poly_sch=True)
test_ms_trans_data((8, 24, 38, 38), (0, 2, 1, 3), 'float32', poly_sch=False)
test_ms_trans_data((8, 24, 38, 38), (0, 2, 1, 3), 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_log():
test_ms_log((9, 1024, 1024), 'float16', poly_sch=True)
test_ms_log((9, 1024, 1024), 'float32', poly_sch=True)
test_ms_log((9, 1024, 1024), 'float16', poly_sch=False)
test_ms_log((9, 1024, 1024), 'float32', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_pow():
test_ms_pow((9, 1024, 1024), (9, 1024, 1024), 'float32', poly_sch=True)
test_ms_pow((9, 1024, 1024), (9, 1024, 1), 'float32', poly_sch=True)
test_ms_pow((9, 1024, 1024), (9, 1, 1), 'float32', poly_sch=True)
test_ms_pow((9, 1024, 1024), (1, 1, 1), 'float32', poly_sch=True)
test_ms_pow((9, 1024, 1024), (9, 1024, 1024), 'float16', poly_sch=True)
test_ms_pow((9, 1024, 1024), (9, 1024, 1), 'float16', poly_sch=True)
test_ms_pow((9, 1024, 1024), (9, 1, 1), 'float16', poly_sch=True)
test_ms_pow((9, 1024, 1024), (1, 1, 1), 'float16', poly_sch=True)
test_ms_pow((9, 1024, 1024), (9, 1024, 1024), 'float32', poly_sch=False)
test_ms_pow((9, 1024, 1024), (9, 1024, 1), 'float32', poly_sch=False)
test_ms_pow((9, 1024, 1024), (9, 1, 1), 'float32', poly_sch=False)
test_ms_pow((9, 1024, 1024), (1, 1, 1), 'float32', poly_sch=False)
test_ms_pow((9, 1024, 1024), (9, 1024, 1024), 'float16', poly_sch=False)
test_ms_pow((9, 1024, 1024), (9, 1024, 1), 'float16', poly_sch=False)
test_ms_pow((9, 1024, 1024), (9, 1, 1), 'float16', poly_sch=False)
test_ms_pow((9, 1024, 1024), (1, 1, 1), 'float16', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_abs():
test_ms_abs((1024, 1024), "float32", poly_sch=True)
test_ms_abs((1024, 1024), "float16", poly_sch=True)
test_ms_abs((1, ), "float32", poly_sch=True)
test_ms_abs((1, 1), "float32", poly_sch=True)
test_ms_abs((1, ), "float16", poly_sch=True)
test_ms_abs((1, 1), "float16", poly_sch=True)
test_ms_abs((1024, 1024), "float32", poly_sch=False)
test_ms_abs((1024, 1024), "float16", poly_sch=False)
test_ms_abs((1, ), "float32", poly_sch=False)
test_ms_abs((1, 1), "float32", poly_sch=False)
test_ms_abs((1, ), "float16", poly_sch=False)
test_ms_abs((1, 1), "float16", poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_neg():
test_ms_neg((1024, 1024), "float32", poly_sch=True)
test_ms_neg((1024, 1024), "float16", poly_sch=True)
test_ms_neg((1, ), "float32", poly_sch=True)
test_ms_neg((1, 1), "float32", poly_sch=True)
test_ms_neg((1, ), "float16", poly_sch=True)
test_ms_neg((1, 1), "float16", poly_sch=True)
test_ms_neg((1024, 1024), "float32", poly_sch=False)
test_ms_neg((1024, 1024), "float16", poly_sch=False)
test_ms_neg((1, ), "float32", poly_sch=False)
test_ms_neg((1, 1), "float32", poly_sch=False)
test_ms_neg((1, ), "float16", poly_sch=False)
test_ms_neg((1, 1), "float16", poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_round():
test_ms_round((1024, 1024), "float32", poly_sch=True)
test_ms_round((1024, 1024), "float16", poly_sch=True)
test_ms_round((1, ), "float32", poly_sch=True)
test_ms_round((1, 1), "float32", poly_sch=True)
test_ms_round((1, ), "float16", poly_sch=True)
test_ms_round((1, 1), "float16", poly_sch=True)
test_ms_round((1024, 1024), "float32", poly_sch=False)
test_ms_round((1024, 1024), "float16", poly_sch=False)
test_ms_round((1, ), "float32", poly_sch=False)
test_ms_round((1, 1), "float32", poly_sch=False)
test_ms_round((1, ), "float16", poly_sch=False)
test_ms_round((1, 1), "float16", poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reduce_sum():
test_ms_reduce_sum((9, 1024, 1024), 'float32', axis=None, keepdims=False, poly_sch=True)
test_ms_reduce_sum((9, 1024, 1024), 'float32', axis=2, keepdims=True, poly_sch=True)
test_ms_reduce_sum((9, 1024, 1024), 'float16', axis=None, keepdims=False, poly_sch=True)
test_ms_reduce_sum((9, 1024, 1024), 'float16', axis=2, keepdims=True, poly_sch=True)
test_ms_reduce_sum((9, 1024, 1024), 'float32', axis=None, keepdims=False, poly_sch=False)
test_ms_reduce_sum((9, 1024, 1024), 'float32', axis=2, keepdims=True, poly_sch=False)
test_ms_reduce_sum((9, 1024, 1024), 'float16', axis=None, keepdims=False, poly_sch=False)
test_ms_reduce_sum((9, 1024, 1024), 'float16', axis=2, keepdims=True, poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_select():
test_ms_select((2, ), (2, 2, 2), "int8", "float16", poly_sch=True)
test_ms_select((2, ), (2, 2, 2), "int8", "float16", poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_equal():
test_ms_equal(((1, 1024), (1, 1024)), 'float16', poly_sch=True)
test_ms_equal(((1, 1024), (1, 1024)), 'float32', poly_sch=True)
test_ms_equal(((1, 1024), (1, 1024)), 'float16', poly_sch=False)
test_ms_equal(((1, 1024), (1, 1024)), 'float32', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_less_equal():
test_ms_less_equal((1, 1024), (1, 1024), 'float16', poly_sch=True)
test_ms_less_equal((1, 1024), (1, 1024), 'float32', poly_sch=True)
test_ms_less_equal((1, 1024), (1, 1024), 'float16', poly_sch=False)
test_ms_less_equal((1, 1024), (1, 1024), 'float32', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_greater_equal():
test_ms_greater_equal((1, 1024), (1, 1024), 'float16', poly_sch=True)
test_ms_greater_equal((1, 1024), (1, 1024), 'float32', poly_sch=True)
test_ms_greater_equal((1, 1024), (1, 1024), 'float16', poly_sch=False)
test_ms_greater_equal((1, 1024), (1, 1024), 'float32', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reciprocal():
test_ms_reciprocal((1, 1024), 'float16', poly_sch=True)
test_ms_reciprocal((1, 1024), 'float32', poly_sch=True)
test_ms_reciprocal((1, 1024), 'float16', poly_sch=False)
test_ms_reciprocal((1, 1024), 'float32', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reduce_min():
test_ms_reduce_min((9, 1024, 1024), 'float32', axis=None, keepdims=False, poly_sch=True)
test_ms_reduce_min((9, 1024, 1024), 'float16', axis=None, keepdims=False, poly_sch=True)
test_ms_reduce_min((9, 1024, 1024), 'float32', axis=2, keepdims=False, poly_sch=True)
test_ms_reduce_min((9, 1024, 1024), 'float16', axis=2, keepdims=False, poly_sch=True)
test_ms_reduce_min((9, 1024, 1024), 'float32', axis=None, keepdims=False, poly_sch=False)
test_ms_reduce_min((9, 1024, 1024), 'float16', axis=None, keepdims=False, poly_sch=False)
test_ms_reduce_min((9, 1024, 1024), 'float32', axis=2, keepdims=False, poly_sch=False)
test_ms_reduce_min((9, 1024, 1024), 'float16', axis=2, keepdims=False, poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_reduce_max():
test_ms_reduce_max((9, 1024, 1024), 'float32', axis=None, keepdims=False, poly_sch=True)
test_ms_reduce_max((9, 1024, 1024), 'float16', axis=None, keepdims=False, poly_sch=True)
test_ms_reduce_max((9, 1024, 1024), 'float32', axis=2, keepdims=False, poly_sch=True)
test_ms_reduce_max((9, 1024, 1024), 'float16', axis=2, keepdims=False, poly_sch=True)
test_ms_reduce_max((9, 1024, 1024), 'float32', axis=None, keepdims=False, poly_sch=False)
test_ms_reduce_max((9, 1024, 1024), 'float16', axis=None, keepdims=False, poly_sch=False)
test_ms_reduce_max((9, 1024, 1024), 'float32', axis=2, keepdims=False, poly_sch=False)
test_ms_reduce_max((9, 1024, 1024), 'float16', axis=2, keepdims=False, poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_pad():
test_fused_pad((7, 7, 3, 64), (0, 0, 0, 0), (0, 0, 1, 0), layout='NHWC', pad_value=0.0, poly_sch=True)
test_fused_pad((7, 7, 3, 64), (0, 0, 0, 0), (0, 0, 1, 0), layout='NHWC', pad_value=0.0, poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_bn_reduce():
test_fused_bn_reduce((256, 7, 7, 2048), layout='NHWC', poly_sch=True)
test_fused_bn_reduce((256, 7, 7, 2048), layout='NHWC', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_bn_update():
test_fused_bn_update((2048,), poly_sch=True)
test_fused_bn_update((2048,), poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_bn_follow_relu():
test_fused_bn_follow_relu((256, 7, 7, 2048), layout='NHWC', poly_sch=True)
test_fused_bn_follow_relu((256, 7, 7, 2048), layout='NHWC', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_bn_follow_relu_avgpool():
test_fused_bn_follow_relu_avgpool((256, 7, 7, 2048), layout='NHWC', poly_sch=True)
test_fused_bn_follow_relu_avgpool((256, 7, 7, 2048), layout='NHWC', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_bn_double_follow_relu():
test_fused_bn_double_follow_relu((256, 7, 7, 2048), layout='NHWC', poly_sch=True)
test_fused_bn_double_follow_relu((256, 7, 7, 2048), layout='NHWC', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_bn_reduce_grad():
test_fused_bn_reduce_grad((256, 56, 56, 256), layout='NHWC', poly_sch=True)
test_fused_bn_reduce_grad((256, 56, 56, 256), layout='NHWC', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_relu_grad_bn_reduce_grad():
test_fused_relu_grad_bn_reduce_grad((64, ), (256, 112, 112, 64), layout='NHWC', poly_sch=True)
test_fused_relu_grad_bn_reduce_grad((64, ), (256, 112, 112, 64), layout='NHWC', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_relu_grad_bn_double_reduce_grad():
test_fused_relu_grad_bn_double_reduce_grad((256,), (256, 56, 56, 256), layout="NHWC", poly_sch=True)
test_fused_relu_grad_bn_double_reduce_grad((256,), (256, 56, 56, 256), layout="NHWC", poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_l2loss_grad():
test_fused_l2loss_grad((1, 1, 256, 1024), layout='NHWC', poly_sch=True)
test_fused_l2loss_grad((1, 1, 256, 1024), layout='NHWC', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_is_finite():
test_fused_is_finite((1, 1, 256, 1024), layout='NHWC', poly_sch=True)
test_fused_is_finite((1, 1, 256, 1024), layout='NHWC', poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_relu_grad_bn_update_grad():
test_fused_relu_grad_bn_update_grad((256, 112, 112, 64), (64,), layout="NHWC", poly_sch=False)
test_fused_relu_grad_bn_update_grad((256, 112, 112, 64), (64,), layout="NHWC", poly_sch=True)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_relu_grad_bn_double_update_grad():
test_fused_relu_grad_bn_double_update_grad((256, 56, 56, 256), (256, ), layout='NHWC', poly_sch=False)
test_fused_relu_grad_bn_double_update_grad((256, 56, 56, 256), (256, ), layout='NHWC', poly_sch=True)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_relu_grad():
test_fused_relu_grad((256, 56, 56, 256), poly_sch=True)
test_fused_relu_grad((256, 56, 56, 256), poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_bn_update_grad():
test_fused_bn_update_grad((256, 56, 56, 256), (256,), layout="NHWC", poly_sch=True)
test_fused_bn_update_grad((256, 56, 56, 256), (256,), layout="NHWC", poly_sch=False)
return True
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_ms_fused_mul_div_rsqrt_mul_isfinite_red():
test_fused_mul_div_rsqrt_mul_isfinite_red((64,), poly_sch=True)
test_fused_mul_div_rsqrt_mul_isfinite_red((64,), poly_sch=False)
return True
class Logger(object):
def __init__(self, filename, stream):
self.terminal = stream
self.log = open(filename, 'a')
def write(self, message):
self.terminal.write(message)
self.log.write(message)
def flush(self):
pass
def usage(op_map):
print("Usage:")
print("1. Run func1 and func2 with manual schedule:")
print("\t$python test_all.py -m func_name1 func_name2")
print("\t$python test_all.py --manual func_name1 func_name2")
print("2. Run all with auto schedule:")
print("\t$python test_all.py -a all\n")
print("\t$python test_all.py --auto all\n")
print("3. Both schedule methods will be tested if no option is specified")
print("\t$python test_all.py func_name")
print("4. Run fuzz test of add op with maximal dimension of shape equals 3")
print("\t$python test_all.py -f 3 add")
print("Available func:")
print("\t", list(op_map.keys()), "\n")
|
StarcoderdataPython
|
4890735
|
<reponame>ilay09/neutron
# Copyright 2016 Hewlett Packard Enterprise Development Company, LP
# Copyright 2016 Red Hat Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api import validators as lib_validators
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from oslo_log import log as logging
from neutron.services.qos import qos_consts
LOG = logging.getLogger(__name__)
@registry.has_registry_receivers
class DriverBase(object):
def __init__(self, name, vif_types, vnic_types,
supported_rules,
requires_rpc_notifications=False):
"""Instantiate a qos driver.
:param name: driver name.
:param vif_types: list of interfaces (VIFs) supported.
:param vnic_types: list of vnic types supported.
:param supported_rules: dict of supported rules.
:param requires_rpc_notifications: indicates if this driver
expects rpc push notifications to be sent from the driver.
"""
self.name = name
self.vif_types = vif_types
self.vnic_types = vnic_types
self.supported_rules = supported_rules
self.requires_rpc_notifications = requires_rpc_notifications
@registry.receives(qos_consts.QOS_PLUGIN, [events.AFTER_INIT])
def _register(self, resource, event, trigger, **kwargs):
if self.is_loaded:
# trigger is the QosServiceDriverManager
trigger.register_driver(self)
def is_loaded(self):
"""True if the driver is active for the Neutron Server.
Implement this property to determine if your driver is actively
configured for this Neutron Server deployment.
"""
return True
def is_vif_type_compatible(self, vif_type):
"""True if the driver is compatible with the VIF type."""
return vif_type in self.vif_types
def is_vnic_compatible(self, vnic_type):
"""True if the driver is compatible with the specific VNIC type."""
return vnic_type in self.vnic_types
def is_rule_supported(self, rule):
supported_parameters = self.supported_rules.get(rule.rule_type)
if not supported_parameters:
LOG.debug("Rule type %(rule_type)s is not supported by "
"%(driver_name)s",
{'rule_type': rule.rule_type,
'driver_name': self.name})
return False
for parameter, validators in supported_parameters.items():
parameter_value = rule.get(parameter)
for validator_type, validator_data in validators.items():
validator_function = lib_validators.get_validator(
validator_type)
validate_result = validator_function(parameter_value,
validator_data)
# NOTE(slaweq): validator functions returns None if data is
# valid or string with reason why data is not valid
if validate_result:
LOG.debug("Parameter %(parameter)s=%(value)s in "
"rule type %(rule_type)s is not "
"supported by %(driver_name)s. "
"Validate result: %(validate_result)s",
{'parameter': parameter,
'value': parameter_value,
'rule_type': rule.rule_type,
'driver_name': self.name,
'validate_result': validate_result})
return False
return True
def create_policy(self, context, policy):
"""Create policy invocation.
This method can be implemented by the specific driver subclass
to update the backend where necessary with the specific policy
information.
:param context: current running context information
:param policy: a QoSPolicy object being created, which will have no
rules.
"""
def update_policy(self, context, policy):
"""Update policy invocation.
This method can be implemented by the specific driver subclass
to update the backend where necessary.
:param context: current running context information
:param policy: a QoSPolicy object being updated.
"""
def delete_policy(self, context, policy):
"""Delete policy invocation.
This method can be implemented by the specific driver subclass
to delete the backend policy where necessary.
:param context: current running context information
:param policy: a QoSPolicy object being deleted
"""
|
StarcoderdataPython
|
4986145
|
'''Test module docstring'''
import json
import pprint
def test_uno():
'''Test function uno docstring'''
json_str = '''{
"info": {
"author": "The Python Packaging Authority",
"author_email": "<EMAIL>",
"home_page": "https://github.com/pypa/sampleproject",
"classifiers": ["Development Status :: 5 - Stable"]
}
}'''
project_info = json.loads(json_str)['info']
assert project_info['author_email'] == '<EMAIL>'
def test_dos(capsys):
'''Test function dos docstring'''
verses = [
'We have all received an education',
'In something', 'somehow', 'have we not?',
'So thank the Lord that in this nation',
'A little learning means a lot.',
'Onegin was, so some decided',
'A learned, if not pedantic, sort'
]
pprint.pprint(verses)
captured = capsys.readouterr()
assert captured.out == \
'''['We have all received an education',
'In something',
'somehow',
'have we not?',
'So thank the Lord that in this nation',
'A little learning means a lot.',
'Onegin was, so some decided',
'A learned, if not pedantic, sort']
'''
|
StarcoderdataPython
|
3262845
|
<filename>tor_starter.py<gh_stars>0
import os
import re
os.system("sudo /etc/init.d/tor restart")
os.system("sudo killall tor")
os.system("sudo killall tor")
f=open("/home/kc/.mozilla/firefox/profiles.ini","r")
file = f.read()
profile_list=re.findall(r'Path=[\w.]+',file)
profile = profile_list[0][5:]
f.close()
profile_path="/home/kc/.mozilla/firefox/"+profile
pref_file=profile_path+"/prefs.js"
f=open(pref_file,"r")
pref_js_data=f.read()
f.close()
pref_js_data_list=pref_js_data.split("\n")
present_network_proxy_setting=re.findall(r'''user_pref\(\"network.proxy.type\", \d''',pref_js_data)
if present_network_proxy_setting[0][-1]!=1:
pref_js_data_list[pref_js_data_list.index('''user_pref("network.proxy.type", 0);''')]='''user_pref("network.proxy.type", 1);'''
pref_js_data_list[pref_js_data_list.index('''user_pref("network.proxy.type", 1);''')-1]='''user_pref("network.proxy.socks_remote_dns", true);'''
pref_js_data_list[pref_js_data_list.index('''user_pref("network.proxy.type", 1);''')-2]='''user_pref("network.proxy.socks_port", 9050);'''
pref_js_data_list[pref_js_data_list.index('''user_pref("network.proxy.type", 1);''')-3]='''user_pref("network.proxy.socks", "127.0.0.1");'''
pref_torred_data='\n'.join(pref_js_data_list)
f=open(pref_file,"w")
f.write(pref_torred_data)
f.close()
|
StarcoderdataPython
|
9625586
|
from setuptools import find_packages, setup
setup(
name="SchedSlackBot",
version='1.0.0',
author="<NAME> <<EMAIL>>",
description="Setup and manage Rotating Schedules in Slack",
packages=find_packages(include=["sched_slack_bot"]),
)
|
StarcoderdataPython
|
9673332
|
<filename>test/test_download_models_class1.py
import logging
logging.getLogger('tensorflow').disabled = True
logging.getLogger('matplotlib').disabled = True
import numpy
numpy.random.seed(0)
import pickle
import tempfile
from numpy.testing import assert_equal
from mhcflurry import Class1AffinityPredictor, Class1NeuralNetwork
from mhcflurry.testing_utils import cleanup, startup
DOWNLOADED_PREDICTOR = None
def setup():
global DOWNLOADED_PREDICTOR
startup()
DOWNLOADED_PREDICTOR = Class1AffinityPredictor.load()
def teardown():
global DOWNLOADED_PREDICTOR
DOWNLOADED_PREDICTOR = None
cleanup()
def predict_and_check(
allele,
peptide,
predictor=DOWNLOADED_PREDICTOR,
expected_range=(0, 500)):
def debug():
print("\n%s" % (
predictor.predict_to_dataframe(
peptides=[peptide],
allele=allele,
include_individual_model_predictions=True)))
(prediction,) = predictor.predict(allele=allele, peptides=[peptide])
assert prediction >= expected_range[0], (predictor, prediction, debug())
assert prediction <= expected_range[1], (predictor, prediction, debug())
def test_a1_titin_epitope_downloaded_models():
# Test the A1 Titin epitope ESDPIVAQY from
# Identification of a Titin-Derived HLA-A1-Presented Peptide
# as a Cross-Reactive Target for Engineered MAGE A3-Directed
# T Cells
predict_and_check("HLA-A*01:01", "ESDPIVAQY")
def test_a1_mage_epitope_downloaded_models():
# Test the A1 MAGE epitope EVDPIGHLY from
# Identification of a Titin-Derived HLA-A1-Presented Peptide
# as a Cross-Reactive Target for Engineered MAGE A3-Directed
# T Cells
predict_and_check("HLA-A*01:01", "EVDPIGHLY")
def test_a2_hiv_epitope_downloaded_models():
# Test the A2 HIV epitope SLYNTVATL from
# The HIV-1 HLA-A2-SLYNTVATL Is a Help-Independent CTL Epitope
predict_and_check("HLA-A*02:01", "SLYNTVATL")
def test_caching():
if not DOWNLOADED_PREDICTOR.allele_to_sequence:
# Only run this test on allele-specific predictors.
Class1NeuralNetwork.KERAS_MODELS_CACHE.clear()
DOWNLOADED_PREDICTOR.predict(
peptides=["SIINFEKL"],
allele="HLA-A*02:01")
num_cached = len(Class1NeuralNetwork.KERAS_MODELS_CACHE)
assert num_cached > 0
def test_downloaded_predictor_is_serializable():
predictor_copy = pickle.loads(pickle.dumps(DOWNLOADED_PREDICTOR))
numpy.testing.assert_equal(
DOWNLOADED_PREDICTOR.predict(
["RSKERAVVVAW"], allele="HLA-A*01:01")[0],
predictor_copy.predict(
["RSKERAVVVAW"], allele="HLA-A*01:01")[0])
def test_downloaded_predictor_is_savable():
models_dir = tempfile.mkdtemp("_models")
print(models_dir)
DOWNLOADED_PREDICTOR.save(models_dir)
predictor_copy = Class1AffinityPredictor.load(models_dir)
numpy.testing.assert_equal(
DOWNLOADED_PREDICTOR.predict(
["RSKERAVVVAW"], allele="HLA-A*01:01")[0],
predictor_copy.predict(
["RSKERAVVVAW"], allele="HLA-A*01:01")[0])
|
StarcoderdataPython
|
265212
|
import FWCore.ParameterSet.Config as cms
from Configuration.Eras.Era_Run3_cff import Run3
process = cms.Process('DUMP',Run3)
process.source = cms.Source('EmptySource')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.load('Configuration.Geometry.GeometryExtended2021_cff')
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load("Geometry.MuonNumbering.muonNumberingInitialization_cfi")
process.load("Geometry.CSCGeometryBuilder.cscGeometry_cfi")
process.load("Geometry.CSCGeometryBuilder.cscGeometryDump_cfi")
process.CSCGeometryESModule.applyAlignment = False
if 'MessageLogger' in process.__dict__:
process.MessageLogger.Geometry=dict()
process.MessageLogger.CSCNumberingScheme=dict()
process.MessageLogger.CSCGeometry=dict()
process.source = cms.Source('EmptySource')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.cscGeometryDump.verbose = True
process.p = cms.Path(process.cscGeometryDump)
|
StarcoderdataPython
|
374709
|
# -*- coding: utf-8 -*-
"""
wow_addon_manager.app
~~~~~~~~~~~~~~~~~~~~~
The main application.
:author: qwezarty
:date: 04:03 pm Nov 14 2017
:email: <EMAIL>
"""
import sys
from .config import Config
from .request import Request
import zipfile
from wow_addon_manager import helpers
from os import path, getcwd, mkdir, makedirs
class App:
"""main application"""
def __init__(self, import_name, root_path=None):
self.import_name = import_name
if root_path is None:
root_path = self.get_root_path(import_name)
self.root_path = root_path
self.system_config = self.make_config('configs/system.json', relative=True)
self.user_config = self.make_config(self.get_user_config_path(), relative=False)
self.addons_dir = path.join(self.user_config['wow_root_folder'], '_retail_', 'Interface', 'Addons')
self.request = self.make_request(self.user_config['source'])
def get_root_path(self, import_name):
"""find the root path of the package"""
mod = sys.modules.get(import_name)
if mod is not None and hasattr(mod, '__file__'):
return path.dirname(path.abspath(mod.__file__))
# loader = pkgutil.get_loader(import_name)
# if loader is None or import_name == '__main__':
return getcwd()
def init_install_dirs(self):
"""init folders if not existed."""
if self.system_config['run_mode'] == 'test':
import pdb
pdb.set_trace()
self.addons_dir = path.join(self.root_path, 'tests', 'dst', 'Interface', 'Addons')
if not path.exists(self.addons_dir):
makedirs(self.addons_dir)
def get_user_config_path(self):
"""get user config file in system.json, or using default file, i.e. user.sample.json"""
user_path = path.join(self.root_path, self.system_config['user_config'])
default_path = path.join(self.root_path, 'configs', 'user.sample.json')
if path.exists(user_path):
return user_path
print('user configed file not existed! using default config.')
if path.exists(default_path):
return default_path
print('default configed file also not existed!')
import sys
sys.exit(1)
def make_config(self, file_path, relative=False):
"""get an instance of Config, and load a json file."""
config = Config(self.root_path)
if relative:
config.from_rel_json(file_path)
else:
config.from_abs_json(file_path)
return config
def make_request(self, source_name):
"""get an instance of SourceRequest, according to source_name."""
return Request(root_path=self.root_path)
def info(self, addon_id):
"""show the infomations of a specific addon."""
addon = self.request.get_info(addon_id)
print('name: ', addon['name'])
print('install-id: ', addon['id'])
print('game-version: ', addon['game_version'])
print('last-update', addon['last_update'])
print('addon-homepage', addon['addon_url'])
def install(self, addon_id):
"""install a specific addon."""
self.init_install_dirs()
zip_file = self.request.download_to_cache(addon_id)
print('===> Installing...', end='')
helpers.extract_to_dst(zip_file, self.addons_dir)
print('Done!')
def search(self, addon_name):
"""search the addon you want."""
addons = self.request.get_list(addon_name)
for a in addons:
print('%-40s%-20s' % (a['id'], a['last_update']))
|
StarcoderdataPython
|
3322980
|
<filename>generate-list-of-projects.py
#!/usr/bin/env python3
from collections import defaultdict
import csv
from datetime import datetime
from glob import glob
import os.path
import re
from urllib.request import urlopen
from jinja2 import Environment
from jinja2 import FileSystemLoader
import yaml
#
# Variables
#
with open('config.yml', 'r') as file:
CONFIG = yaml.safe_load(file)
SPREADSHEET_ID = CONFIG.get('spreadsheet_id')
SHEET_ID = CONFIG.get('sheet_id')
SHEET_URL = f'https://docs.google.com/spreadsheets/d/{SPREADSHEET_ID}/export'
SHEET_URL += f'?format=csv&id={SPREADSHEET_ID}&gid={SHEET_ID}'
TEMPLATES_DIR = CONFIG.get('templates_dir')
TEMPLATE_FILE = CONFIG.get('html_template_file')
OUTPUT_FILE = CONFIG.get('html_output_file')
LOGOS_DIR = CONFIG.get('logos_dir')
LOGOS_URL = CONFIG.get('logos_url')
COLORS = CONFIG.get('colors', {})
COMPANIES = defaultdict(dict)
PROJECTS = defaultdict(list)
#
# Text sanitizer
#
# – removes white characters from the beginning and the end of a string
# – replaces < and > with HTML entities to not allow extra tags in text
# – replaces newline characters with HTML line break tag
# – inserts non-breaking space after single letter or number
#
def sanitize(text: str) -> str:
sanitized = text.strip()
sanitized = sanitized.replace('<', '<').replace('>', '>')
sanitized = sanitized.replace('\n', '<br />')
sanitized = re.sub('[ ]([a-z0-9])[ ]', r' \1 ', sanitized)
return sanitized
#
# Read template
#
template_loader = FileSystemLoader(searchpath=TEMPLATES_DIR)
template_env = Environment(loader=template_loader)
template = template_env.get_template(TEMPLATE_FILE)
#
# Fetch data
#
data = urlopen(SHEET_URL).read().decode('utf-8')
lines = data.replace('\xa0', ' ').split('\r\n')
reader = csv.reader(lines[1:])
#
# Process projects
#
for row in reader:
# time = row[0] # Sygnatura czasowa
# email = row[1] # Adres e-mail
# name = row[2] # Imię i nazwisko
# phone = row[3] # Numer telefonu
company = sanitize(row[4]) # Nazwa firmy
title = sanitize(row[5]) # Tytuł projektu
description = sanitize(row[6]) # Opis projektu
offers = row[7] # Planowane formy współpracy
sizes = sanitize(row[8]) # Akceptowana wielkość grupy
groups = sanitize(row[9]) # Liczba równoległych realizacji
english = (row[10] == 'TAK') # Czy może być angielskojęzyczny?
appendix = sanitize(row[11]) # Dodatkowe uwagi
# reserved = row[12] # Przed-rezerwacja
verified = (row[13] == 'TAK') # ZWERYFIKOWANY
available = sanitize(row[14]) # DOSTĘPNE GRUPY
if verified:
offers = [sanitize(offer) for offer in offers.split(', ')]
language = 'angielski, polski' if english else 'polski'
if not appendix:
appendix = '(brak)'
company_ID = ''.join(filter(str.isalnum,
company.lower().strip().split()[0]))
logos = glob(f'{LOGOS_DIR}/{company_ID}.*')
if logos:
logo = logos[0]
logo = os.path.join(LOGOS_URL, os.path.basename(logo))
else:
logo = None
company = {
'name': company,
'logo': logo,
}
project = {
'title': title,
'description': description,
'offers': offers,
'sizes': sizes,
'groups': groups,
'language': language,
'appendix': appendix,
'available': available,
}
COMPANIES[company_ID] = company
PROJECTS[company_ID].append(project)
#
# Generate output
#
with open(OUTPUT_FILE, 'w') as file:
last_update = str(datetime.now().astimezone().isoformat())
file.write(template.render(
COLORS=COLORS,
COMPANIES=COMPANIES,
PROJECTS=PROJECTS,
last_update=last_update,
))
|
StarcoderdataPython
|
1708994
|
<filename>youtube_podcast_gateway/config.py
class Key:
def __init__(self, name, type, default_value=None):
self.name = name
self.type = type
self.default_value = default_value
class Configuration:
def __init__(self, values: dict):
self._values = values
def get(self, key: Key):
value_str = self._values.get(key.name)
# Use the default value of no value has been set for the key or the
# value is empty.
if value_str:
try:
return key.type(value_str)
except ValueError:
raise Exception('Error parsing value "{}" as type {} for configuration key {}.'.format(value_str, key.type.__name__, key.name))
else:
return key.default_value
|
StarcoderdataPython
|
4898032
|
<reponame>nick66551/VectorSpace
from __future__ import division, unicode_literals
import math
from textblob import TextBlob as tb
def tf(word, blob):
return blob.count(word) / len(blob)
def n_containing(word, bloblist):
return sum(1 for blob in bloblist if word in blob)
def idf(word, bloblist):
return math.log(len(bloblist) / (1 + n_containing(word, bloblist)))
def tfidf(word, blob, bloblist):
return tf(word, blob) * idf(word, bloblist)
document1 = tb("""Python is a 2000 made-for-TV horror movie directed by <NAME>. The film features several cult favorite actors, including <NAME> of The Karate Kid fame, <NAME>, <NAME>, <NAME>, <NAME>, <NAME> (best known for his role as <NAME> in the A Nightmare on Elm Street series of films), <NAME>, <NAME>, and <NAME>. The film concerns a genetically engineered snake, a python, that escapes and unleashes itself on a small town. It includes the classic final girl scenario evident in films like Friday the 13th. It was filmed in Los Angeles, California and Malibu, California. Python was followed by two sequels: Python II (2002) and Boa vs. Python (2004), both also made-for-TV films.""")
document2 = tb("""Python, from the Greek word, is a genus of nonvenomous pythons[2] found in Africa and Asia. Currently, 7 species are recognised.[2] A member of this genus, P. reticulatus, is among the longest snakes known.""")
document3 = tb("""The Colt Python is a .357 Magnum caliber revolver formerly manufactured by Colt's Manufacturing Company of Hartford, Connecticut. It is sometimes referred to as a "Combat Magnum".[1] It was first introduced in 1955, the same year as Smith & Wesson's M29 .44 Magnum. The now discontinued Colt Python targeted the premium revolver market segment. Some firearm collectors and writers such as <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and <NAME> have described the Python as the finest production revolver ever made.""")
bloblist = [document1, document2, document3]
#for i, blob in enumerate(bloblist):
# print("Top words in document {}".format(i + 1))
# print blob
# scores = {word: tfidf(word, blob, bloblist) for word in blob.words}
# print scores
# sorted_words = sorted(scores.items(), key=lambda x: x[1], reverse=True)
# for word, score in sorted_words[:3]:
# print("\tWtfidfratings=tfidfratings=ord: {}, TF-IDF: {}".format(word, round(score, 5)))
|
StarcoderdataPython
|
36584
|
from jobbergate import appform
def mainflow(data):
return [appform.Const("val", default=10)]
|
StarcoderdataPython
|
5077819
|
""" Klei rowid communication
Methods:
* getRegionalLobbies(ip_list) : list
* getServerInfoStea(ip, steam_port) : dictionary
By: github.com/iamflea
Licence: who cares July 2019
"""
import http.client
import json
from config import *
import zlib
import re
import os.path as op # For checking when the file `KLEI_SERVER_LIST_FILENAME` was modified
import datetime as dt
# Not sure if this is correct address
KEY_REGEX = re.compile(r'<Key>([^<]*)</Key>')
""" Get lobbies from regionPathURL
@param regionPatnURL string
@return JSON data (list)
"""
def getRegionalLobbies(regionPathURL):
# Parse only `.json.gz` files
if not regionPathURL.endswith(".json.gz"):
return []
#print(regionPathURL)
# Parse steam versions
if not "Steam" in regionPathURL:
return []
# China: 1989: Man vs. Chinese tank Tiananmen square
if "China" in regionPathURL:
return [] # sudo make revolution
# Connect
conn = http.client.HTTPSConnection(KLEI_CLOUD_URL, 443)
conn.request("GET", f'/{regionPathURL}')
res = conn.getresponse()
# Check if everything is all right
if res.status != 200:
return []
# Get data and close connection
data = res.read()
conn.close()
# Deflate and parse json
data = zlib.decompress(data, zlib.MAX_WBITS|32).decode("utf-8")
data = json.loads(data)['GET']
if data is None:
return []
else:
return data
""" Get lobbies regions from the cloud.
@raise StopIteration
@yield Region from a cloud
"""
def getRegions():
#print("Getting new list")
# Connect
conn = http.client.HTTPSConnection(KLEI_CLOUD_URL, 443)
conn.request("GET", '/')
res = conn.getresponse()
if res.status != 200:
raise StopIteration
data = res.read()
data = data.decode("utf-8") # TEST comment it?
conn.close()
# Here we have some server groups
for i in re.finditer(KEY_REGEX, data):
yield i.group(1)
""" Returns list of lobbies """
def getLobbies():
return [lobby for region in getRegions() for lobby in getRegionalLobbies(region)]
""" Load servers from file, if needed
@return list of lobbies (servers)
"""
def getServerListKlei():
try:
# Get time of the last modification of `KLEI_SERVER_LIST_FILENAME`
t = op.getmtime(KLEI_SERVER_LIST_FILENAME)
# If the file is older than 10 minutes, we will update it
needUpdate = dt.datetime.fromtimestamp(t) < dt.datetime.now() - dt.timedelta(minutes=60)
except FileNotFoundError:
needUpdate = True
if needUpdate:
# Load new lobbies
lobbies = getLobbies()
# Save lobbies into the file
with open(KLEI_SERVER_LIST_FILENAME, 'w') as fp:
json.dump(lobbies, fp)
# Return lobbies
return lobbies
else:
# We updated the list racently
with open(KLEI_SERVER_LIST_FILENAME, 'r') as fp:
return json.load(fp)
""" Get server's Klei row id
@param ip string
@param port int DST game port (Do not mistake with Steam port)
@return string klei row id
"""
def getServerRowID(ip, port):
port = int(port)
for lobby in getServerListKlei():
if lobby['__addr'] == ip and int(lobby['port']) == port:
#print(server)
return lobby
return None
""" Lua string to dictionary """
def parsePlayerNames(names):
# remove `return {...}` and makes it list `[...]`
names = "["+names[8:-1]+"]"
#print(names)
x = re.sub(r"{\n *colour=", '{"colour":',names)
x = re.sub(r",\n *eventlevel=", ',"eventlevel":',x)
x = re.sub(r",\n *name=", ',"name":',x)
x = re.sub(r",\n *netid=", ',"netid":',x)
x = re.sub(r",\n *prefab=", ',"prefab":',x)
return json.loads(x)
""" Lua string to dictionary """
def parseData(data):
data = data[7:]
x = re.sub(r" *day=", '"day":',data)
x = re.sub(r" *dayselapsedinseason=", '"dayselapsedinseason":',x)
x = re.sub(r" *daysleftinseason=", '"daysleftinseason":',x)
return json.loads(x)
""" Returns server info in JSON format (Klei API?)
Returns empty dictionary on error
@param string rowID
@return dictionary (json)
"""
def getServerInfoKlei(rowID):
#print("getServerInfoKlei()")
# Is it walid row ID?
if rowID == '':
return {}
# Prepare connection and
conn = http.client.HTTPSConnection(KLEI_LOBBY_EU_URL, 443)
post = { "__gameId": "DontStarveTogether",
"__token": KLEI_TOKEN, # How the hack can i get token
"query": {
"__rowId": rowID
}
}
conn.request("POST", '/lobby/read', json.dumps(post))
res = conn.getresponse()
# If everything is all right, then parse data, else returns the stuff
if res.status != 200:
return {}
data = res.read()
conn.close()
data = json.loads(data)
if 'error' in data: # AUTH_ERROR_E_EXPIRED_TOKEN
return {}
data = data['GET'][0]
data['players'] = parsePlayerNames(data['players'])
data['data'] = parseData(data['data'])
return data
if __name__ == '__main__':
getServerListKlei()
x = getServerRowID('172.16.31.10', 11000)
print(x)
#print(getServerInfoKlei('waat'))
pass
|
StarcoderdataPython
|
3210338
|
<filename>pyusermanager/user_funcs.py
from email.utils import parseaddr
from pony.orm import *
import bcrypt
from . import custom_exceptions as PyUserExceptions
from .auth_type_enum import AUTH_TYPE
class user:
"""
A Class to manage Users in the Database
"""
def __str__(self):
if len(self.__dict__) > 0:
return str(self.__dict__)
return None
def __init__(self, config, username=None, auth_type=AUTH_TYPE.LOCAL):
"""Function to init a User Object
Parameters:
cfg (General_Config): General Config Object used for stuff like simple Parameter Verification
username (str): Username for the specified User
auth_type (AUTH_TYPE enum): Specifies the User Type specified in the AUTH_TYPE enum
"""
self.cfg = config
if username is not None:
self.verify_inputs(username=username)
self.username = str(username)
self.auth_type = auth_type
def get_users(self):
"""
Gets all users including avatars as an array filled with dictionarys
Returns:
List filled with dicts
example:
[{"username": "admin","avatar":"admin.png"},{"username": "testuser","avatar":"default.png"}]
"""
userlist = []
with db_session:
users = self.cfg.db.User.select()
for user in users:
user_dict = {
"username": user.username,
"avatar": user.avatar,
}
userlist.append(user_dict)
return userlist
@staticmethod
def hash_pw(password=None):
"""A Function to hash specified Password (or any other string)
Parameters:
password (str): a string which will get hashed
Returns:
byte: pw_salt (salt used to hash input)
byte: pw_hash (hash of input)
"""
if password is None:
return None, None
else:
pw_salt = bcrypt.gensalt()
pw_hash = bcrypt.hashpw(password.encode("utf-8"), pw_salt)
return pw_salt, pw_hash
def verify_inputs(self, **kwargs):
"""A Function to check some qualitys of parameters
Exceptions:
ValueError -> if any parameter does not match requirements written down in the passed general config (self.cfg)
"""
found_email = False
if (
"email" in kwargs
and kwargs.get("email") == parseaddr(kwargs.get("email"))[1]
):
found_email = True
# verify activated if given
if "activated" in kwargs and not isinstance(kwargs.get("activated"), bool):
raise ValueError("Activated is not bool")
# verify password if gien
if (
"password" in kwargs
and kwargs.get("password",None) is not None
and len(kwargs.get("password")) < self.cfg.password_min_len
):
raise ValueError("password to short")
# verify username if gien
if "username" in kwargs and (
kwargs.get("username") == None
or len(kwargs.get("username")) < self.cfg.username_min_len
):
raise ValueError("username to short")
if self.cfg.email_required and not found_email:
raise ValueError("Email required but no valid provided!")
def create(self, password=<PASSWORD>, **kwargs):
"""A Function to create a User in the Database
Parameters:
password (str) mandatory
self.auth_type (AUTH_TYPE) <- provided by object!
email (str) optional
avatar (str) optional (is a path to the avatar)
activated (bool) if user is already activated
Returns:
success (bool) -> Usualy true since everythign else would raise an Exception
Exceptions:
PyUserExceptions.AlreadyExistsException -> if the user already exists
ValueError -> if parameters do not pass according to verify_inputs
"""
if self.auth_type != AUTH_TYPE.AD and "@" in str(self.username):
raise ValueError("@ in username is reserved for ad Users!")
with db_session:
try:
self.cfg.db.User[self.username]
raise PyUserExceptions.AlreadyExistsException
except ObjectNotFound as err:
self.verify_inputs(**kwargs, password=password)
pw_salt, pw_hash = self.hash_pw(password)
self.cfg.db.User(
username=self.username,
password_hash=pw_hash,
auth_type=self.auth_type,
**kwargs,
)
return True
def delete(self):
"""A Function to delete a User in the Database
Returns:
success (bool) -> Usualy true since everythign else would raise an Exception
Exceptions:
PyUserExceptions.MissingUserException -> if user to delete does not exist!
"""
with db_session:
# check if user exists
requested_user = self.cfg.db.User.get(username=self.username)
if requested_user is None:
raise PyUserExceptions.MissingUserException(
"user to delete does not exist!"
)
else:
requested_user.delete()
return True
def check(self):
"""A Function to check if a user exists
Returns:
success (bool) -> true = user exists, false = user does not exist
"""
with db_session:
# check if user exists
requested_user = self.cfg.db.User.get(username=self.username)
if requested_user is None:
return False
else:
return True
def change(self, **kwargs):
"""A Function to change multiple user Attributes
Parameters: (keyword params only!)
password (str)
email (str)
avatar (str)
Exceptions
see changepw(), changeemail(), changeavatar()
"""
if "email" in kwargs:
self.changeemail(kwargs["email"])
if "password" in kwargs:
self.changepw(kwargs["password"])
if "avatar" in kwargs:
self.changeavatar(kwargs["avatar"])
def changepw(self, password):
"""A Function to change the users password
Parameters:
password (str)
Exceptions
ValueError -> if password is to short or None
"""
if password is None:
raise ValueError("password empty!")
self.verify_inputs(password=password)
with db_session:
try:
user = self.cfg.db.User[self.username]
pw_salt, pw_hash = self.hash_pw(password)
user.password_hash = pw_hash
return True
except ObjectNotFound:
raise PyUserExceptions.MissingUserException
def changeemail(self, email):
"""A Function to change the users email
Parameters:
email (str)
Exceptions
ValueError -> if email is not "valid"
"""
if email is None:
raise ValueError("email is empty!")
self.verify_inputs(email=email)
with db_session:
try:
user = self.cfg.db.User[self.username]
user.email = email
return True
except ObjectNotFound:
raise PyUserExceptions.MissingUserException
def changeavatar(self, avatar):
"""A Function to change the users avatar
Parameters:
avatar (str)
Exceptions
ValueError -> if avatar is None
"""
if avatar is None:
raise ValueError("avatar name is invalid!")
with db_session:
try:
user = self.cfg.db.User[self.username]
user.avatar = avatar
return True
except ObjectNotFound:
raise PyUserExceptions.MissingUserException
def info(self, include_email=False):
"""A Function to return a users public information
Parameters:
include_email (bool) -> if set to true the returned dictionary will include the email address of the user
return:
Dictionary with user information
example:
{"username":"admin", "avatar":"default.png", "activated":True, "email":"<EMAIL>"}
Exceptions
PyUserExceptions.MissingUserException -> if requested user is not found
"""
with db_session:
try:
user = self.cfg.db.User[self.username]
return_dict = {
"username": user.username,
"avatar": user.avatar,
"activated": user.activated,
}
if include_email:
return_dict["email"] = user.email
return return_dict
except ObjectNotFound:
raise PyUserExceptions.MissingUserException
def info_extended(self):
"""A Function to return userinfo + auth token info + perms
return:
Dictionary with user information
example:
{"username":"admin", "avatar":"default.png", "activated":True, "email":"<EMAIL>", token:{"last_login":"01.01.2022 13:37", "valid_until":"02.01.2022 13:37"....},"perms":["admin","testgroup"]}
Exceptions
PyUserExceptions.MissingUserException -> if requested user is not found
"""
with db_session:
try:
user = self.cfg.db.User[self.username]
return_dict = self.info(include_email=True)
token_dict = {}
if user.token is not None:
token_dict["last_login"] = str(user.token.last_login)
token_dict["valid_until"] = str(user.token.valid_until)
token_dict["valid_for"] = user.token.ip
token_dict["token"] = user.token.token
# add perms to dict!
perm_array = []
for perm in user.perms:
perm_array.append(perm.perm_name)
return_dict["token"] = token_dict
return_dict["perms"] = perm_array
return return_dict
except ObjectNotFound:
raise PyUserExceptions.MissingUserException
|
StarcoderdataPython
|
3225656
|
<gh_stars>0
# -*- coding: utf-8 -*-
# Copyright 2015 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
test_etcdutils
~~~~~~~~~~~~~~
Tests for etcd utility function.
"""
import logging
import types
from etcd import EtcdResult
from mock import Mock
from calico.etcdutils import PathDispatcher
from calico.felix.test.base import BaseTestCase
_log = logging.getLogger(__name__)
SAME_AS_KEY = object()
class _TestPathDispatcherBase(BaseTestCase):
"""
Abstract base class for Dispatcher tests.
"""
# Etcd action that this class tests.
action = None
# Expected handler type, "set" or "delete".
expected_handlers = None
def setUp(self):
super(_TestPathDispatcherBase, self).setUp()
self.dispatcher = PathDispatcher()
self.handlers = {
"delete": {},
"set": {},
}
self.register("/")
self.register("/a")
self.register("/a/<b>")
self.register("/a/<b>/c")
self.register("/a/<b>/d")
self.register("/a/<b>/d/<e>")
def register(self, key):
m_on_set = Mock()
m_on_del = Mock()
self.dispatcher.register(key, on_set=m_on_set, on_del=m_on_del)
self.handlers["set"][key.strip("/")] = m_on_set
self.handlers["delete"][key.strip("/")] = m_on_del
def assert_handled(self, key, exp_handler=SAME_AS_KEY, **exp_captures):
if exp_handler is SAME_AS_KEY:
exp_handler = key
if isinstance(exp_handler, types.StringTypes):
exp_handler = exp_handler.strip("/")
m_response = Mock(spec=EtcdResult)
m_response.key = key
m_response.action = self.action
self.dispatcher.handle_event(m_response)
exp_handlers = self.handlers[self.expected_handlers]
for handler_key, handler in exp_handlers.iteritems():
assert isinstance(handler, Mock)
if handler_key == exp_handler:
continue
self.assertFalse(handler.called,
"Unexpected set handler %s was called for "
"key %s" % (handler_key, key))
unexp_handlers = self.handlers[self.unexpected_handlers]
for handler_key, handler in unexp_handlers.iteritems():
assert isinstance(handler, Mock)
self.assertFalse(handler.called,
"Unexpected del handler %s was called for "
"key %s" % (handler_key, key))
if exp_handler is not None:
exp_handlers[exp_handler].assert_called_once_with(
m_response, **exp_captures)
@property
def unexpected_handlers(self):
if self.expected_handlers == "set":
return "delete"
else:
return "set"
def test_dispatch_root(self):
self.assert_handled("/")
def test_dispatch_no_captures(self):
self.assert_handled("/a")
def test_dispatch_capture(self):
self.assert_handled("/a/bval", exp_handler="/a/<b>", b="bval")
def test_dispatch_after_capture(self):
self.assert_handled("/a/bval/c", exp_handler="/a/<b>/c", b="bval")
def test_dispatch_after_capture_2(self):
self.assert_handled("/a/bval/d", exp_handler="/a/<b>/d", b="bval")
def test_multi_capture(self):
self.assert_handled("/a/bval/d/eval",
exp_handler="/a/<b>/d/<e>",
b="bval", e="eval")
def test_non_match(self):
self.assert_handled("/a/bval/c/eval", exp_handler=None)
self.assert_handled("/foo", exp_handler=None)
def test_cover_no_match(self):
m_result = Mock(spec=EtcdResult)
m_result.key = "/a"
m_result.action = "unknown"
self.dispatcher.handle_event(m_result)
for handlers in self.handlers.itervalues():
for key, handler in handlers.iteritems():
self.assertFalse(handler.called,
msg="Unexpected handler called: %s" % key)
class TestDispatcherSet(_TestPathDispatcherBase):
action = "set"
expected_handlers = "set"
class TestDispatcherCaS(_TestPathDispatcherBase):
action = "compareAndSwap"
expected_handlers = "set"
class TestDispatcherCreate(_TestPathDispatcherBase):
action = "create"
expected_handlers = "set"
class TestDispatcherUpdate(_TestPathDispatcherBase):
action = "update"
expected_handlers = "set"
class TestDispatcherDel(_TestPathDispatcherBase):
action = "delete"
expected_handlers = "delete"
class TestDispatcherCaD(_TestPathDispatcherBase):
action = "compareAndDelete"
expected_handlers = "delete"
class TestDispatcherExpire(_TestPathDispatcherBase):
action = "expire"
expected_handlers = "delete"
|
StarcoderdataPython
|
6617670
|
from .sections import read_sections, read_dat_header, append_section
import struct
from collections import namedtuple
from ...text import decode_text, encode_text
import csv
ItemCategory = namedtuple('ItemCategory', ['name', 'recordSize'])
_ITEM_CATEGORIES = [
ItemCategory('Use', 0x4C),
ItemCategory('Weapon', 0x54),
ItemCategory('Armor', 0x54),
ItemCategory('Helm', 0x54),
ItemCategory('Acc', 0x5C),
ItemCategory('Material', 0x3C),
ItemCategory('Event', 0x3C),
ItemCategory('DLC', 0x54),
ItemCategory('CodeName', 0x50),
ItemCategory('Recipe', 0x80),
ItemCategory('RaveAbility', 0x48),
ItemCategory('OperationCond', 0x3c),
]
def read_items(category, names, descriptions):
count, = struct.unpack_from('<L', names, 0)
items = []
for i in range(count):
name = decode_text(names, 4 + i * category.recordSize + 0xf)
description = decode_text(descriptions, 4 + i * 0x92)
items.append({
'name': name,
'description': description,
})
return items
def _extract_items(binary):
items = {}
sections = read_sections(binary)
for i in range(0, len(_ITEM_CATEGORIES)):
items[_ITEM_CATEGORIES[i].name] = read_items(_ITEM_CATEGORIES[i],
sections[2*i],
sections[2*i+1])
return items
def extract_items(l7cdir, outputdir):
with open(l7cdir / '_Data/System/ItemDataPack.dat', 'rb') as f:
binary = f.read()
items = _extract_items(binary)
with open(outputdir / 'ItemDataPack.csv', 'w', encoding='utf-8', newline='') as f:
writer = csv.DictWriter(f, ['category', 'index', 'field', 'text'])
for category, items in items.items():
for i, item in enumerate(items):
writer.writerow({
'category': category,
'index': i,
'field': 'name',
'text': item['name'],
})
writer.writerow({
'category': category,
'index': i,
'field': 'description',
'text': item['description'],
})
def read_item_csv(csvdir):
items = {}
with open(csvdir / 'ItemDataPack.csv', 'r', encoding='utf-8', newline='') as f:
reader = csv.DictReader(f, ['category', 'index', 'field', 'japanese', 'translation'])
for row in reader:
category = row['category']
if not category:
continue
index = int(row['index'])
field = row['field']
translation = row['translation']
if category not in items:
items[category] = {}
if index not in items[category]:
items[category][index] = {}
if field == 'name':
items[category][index]['name'] = translation
elif field == 'description':
items[category][index]['description'] = translation
else:
raise ValueError('unknown field in ItemDataPack.csv')
return items
def write_items(category, names, descriptions, items):
count, = struct.unpack_from('<L', names, 0)
if count != len(items):
raise ValueError('number of items does not match original')
for i in range(count):
name = encode_text(items[i]['name'])
if len(name) > 0x2C:
print(f'"{category.name},{i},name" is too long (44 bytes allowed), truncating...')
name = name[:0x2B] # one less for trailing zero
name += bytes(0x2C - len(name))
name_start = 4 + i * category.recordSize + 0xf
names[name_start:name_start+0x2C] = name
description = encode_text(items[i]['description'])
if len(description) > 0x92:
print(f'"{category.name},{i},description" is too long (144 bytes allowed), truncating...')
description = description[:0x91] # one less for trailing zero
description += bytes(0x92 - len(description))
desc_start = 4 + i * 0x92
descriptions[desc_start:desc_start+0x92] = description
def insert_items(binary, items):
newbinary = read_dat_header(binary)
sections = [bytearray(section) for section in read_sections(binary)]
for i in range(0, len(_ITEM_CATEGORIES)):
write_items(_ITEM_CATEGORIES[i], sections[2*i], sections[2*i+1],
items[_ITEM_CATEGORIES[i].name])
newbinary = append_section(newbinary, sections[2*i])
newbinary = append_section(newbinary, sections[2*i+1])
newbinary = append_section(newbinary, sections[-1])
assert(len(binary) == len(newbinary))
return newbinary
def recompile_items(l7cdir, csvdir, outputdir):
items = read_item_csv(csvdir)
with open(l7cdir / '_Data/System/ItemDataPack.dat', 'rb') as f:
binary = f.read()
binary = insert_items(binary, items)
outputdir = outputdir / '_Data/System'
outputdir.mkdir(parents=True, exist_ok=True)
with open(outputdir / 'ItemDataPack.dat', 'wb') as f:
f.write(binary)
|
StarcoderdataPython
|
364094
|
def minimum2(L):
min_1 = L[0]
min_2 = L[1]
if min_1 < min_2:
min_1, min_2 = min_2, min_1
i = 0
while i < len(L):
if L[i] < min_1:
min_2, min_1 = min_1, L[i]
elif L[i] < min_2 and L[i] > min_1:
min_2 = L[i]
i += 1
return min_2
print(minimum2([3,2,5,7,2]))
|
StarcoderdataPython
|
4866034
|
<filename>venv/lib/python3.6/site-packages/ansible_collections/ansible/utils/plugins/test/in_one_network.py
# -*- coding: utf-8 -*-
# Copyright 2021 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
Test plugin file for netaddr tests: in_one_network
"""
from __future__ import absolute_import, division, print_function
from ansible_collections.ansible.utils.plugins.test.in_network import (
_in_network,
)
from ansible_collections.ansible.utils.plugins.plugin_utils.base.ipaddress_utils import (
_validate_args,
)
__metaclass__ = type
DOCUMENTATION = """
name: in_one_network
author: <NAME> (@priyamsahoo)
version_added: "2.2.0"
short_description: Test if IP address belongs in any one of the networks in the list
description:
- This plugin checks if the provided IP address belongs to the provided list network addresses
options:
ip:
description:
- A string that represents an IP address
- For example: "10.1.1.1"
type: str
required: True
networks:
description:
- A list of string and each string represents a network address in CIDR form
- For example: ['10.0.0.0/8', '192.168.1.0/24']
type: list
required: True
notes:
"""
EXAMPLES = r"""
#### Simple examples
- name: Set network list
ansible.builtin.set_fact:
networks:
- "10.0.0.0/8"
- "192.168.1.0/24"
- name: Check if 10.1.1.1 is in the provided network list
ansible.builtin.set_fact:
data: "{{ '10.1.1.1' is ansible.utils.in_one_network networks }}"
# TASK [Check if 10.1.1.1 is in the provided network list] **********************
# ok: [localhost] => {
# "ansible_facts": {
# "data": true
# },
# "changed": false
- name: Set network list
ansible.builtin.set_fact:
networks:
- "10.0.0.0/8"
- "10.1.1.0/24"
- name: Check if 10.1.1.1 is not in the provided network list
ansible.builtin.set_fact:
data: "{{ '10.1.1.1' is not ansible.utils.in_one_network networks }}"
# TASK [Check if 10.1.1.1 is in not the provided network list] ************************
# ok: [localhost] => {
# "ansible_facts": {
# "data": true
# },
# "changed": false
# }
"""
RETURN = """
data:
description:
- If jinja test satisfies plugin expression C(true)
- If jinja test does not satisfy plugin expression C(false)
"""
def _in_one_network(ip, networks):
"""Test if an IP or network is in one network"""
params = {"ip": ip, "networks": networks}
_validate_args("in_one_network", DOCUMENTATION, params)
bools = [_in_network(ip, network) for network in networks]
if bools.count(True) == 1:
return True
return False
class TestModule(object):
""" network jinja test"""
test_map = {"in_one_network": _in_one_network}
def tests(self):
return self.test_map
|
StarcoderdataPython
|
11398140
|
<reponame>kyouko-taiga/tango
import unittest
from tango.builtin import Bool, Double, Int, Nothing, String, Type
from tango.errors import InferenceError
from tango.parser import parse
from tango.scope_binder import bind_scopes
from tango.type_builder import build_types
from tango.type_solver import TypeVariable, infer_types
from tango.types import EnumType, FunctionType, GenericType, StructType, TypeUnion
from tango.utils import find
class TestTypeSolver(unittest.TestCase):
def test_container_decl(self):
module = self.prepare('cst x = 0')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare('cst x: Int')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare('cst x: Int = 0')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare('cst x: String = 0')
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare('cst x: (cst _: Int) -> Nothing')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertIsInstance(x_type, FunctionType)
self.assertEqual(x_type.domain, [Int])
self.assertEqual(x_type.codomain, Nothing)
self.assertEqual(x_type.labels, [None])
module = self.prepare(
'''
mut x
x = 1.0
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Double)
module = self.prepare(
'''
mut x
mut y = x
mut z: Int = y
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
for node in declaration_nodes:
self.assertEqual(self.type_of(node, environment), Int)
# TODO Declaring a container wihout any inferrable type should raise
# an error.
def test_function_decl(self):
module = self.prepare('fun f() {}')
(module, environment) = infer_types(module)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_type, FunctionType)
self.assertFalse(f_type.is_generic())
self.assertFalse(f_type.domain)
self.assertFalse(f_type.labels)
self.assertFalse(f_type.attributes)
self.assertEqual(f_type.codomain, Nothing)
module = self.prepare('fun f(cst x: Int, cst _ y: String) -> Double {}')
(module, environment) = infer_types(module)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_type, FunctionType)
self.assertFalse(f_type.is_generic())
self.assertEqual(f_type.domain, [Int, String])
self.assertEqual(f_type.labels, ['x', None])
self.assertEqual(f_type.attributes, [set(), set()])
self.assertEqual(f_type.codomain, Double)
module = self.prepare('fun f(mut x: Int, mut _ y: String) -> Double {}')
(module, environment) = infer_types(module)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_type, FunctionType)
self.assertFalse(f_type.is_generic())
self.assertEqual(f_type.domain, [Int, String])
self.assertEqual(f_type.labels, ['x', None])
self.assertEqual(f_type.attributes, [{'mutable'}, {'mutable'}])
self.assertEqual(f_type.codomain, Double)
module = self.prepare('fun f<T, U>(cst x: T, cst _ y: U) -> T {}')
(module, environment) = infer_types(module)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_type, FunctionType)
self.assertTrue(f_type.is_generic())
self.assertEqual(len(f_type.domain), 2)
self.assertIsInstance(f_type.domain[0], GenericType)
self.assertEqual(f_type.domain[0].signature, 'T')
self.assertIsInstance(f_type.domain[1], GenericType)
self.assertEqual(f_type.domain[1].signature, 'U')
self.assertEqual(len(f_type.labels), 2)
self.assertEqual(f_type.labels[0], 'x')
self.assertIsNone(f_type.labels[1])
self.assertEqual(f_type.attributes, [set(), set()])
self.assertIsInstance(f_type.codomain, GenericType)
self.assertEqual(f_type.codomain.signature, 'T')
module = self.prepare('fun f(cst x: Int = 0) {}')
(module, environment) = infer_types(module)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_type, FunctionType)
self.assertFalse(f_type.is_generic())
self.assertEqual(f_type.domain, [Int])
self.assertEqual(f_type.labels, ['x'])
self.assertEqual(f_type.attributes, [set()])
self.assertEqual(f_type.codomain, Nothing)
module = self.prepare('fun f(cst x: Int = 1.0) {}')
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare('fun f<T>(cst x: T = 1.0) {}')
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare('fun f(cst x: (cst y: Int) -> Int) {}')
(module, environment) = infer_types(module)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_type, FunctionType)
self.assertFalse(f_type.is_generic())
self.assertEqual(len(f_type.domain), 1)
self.assertIsInstance(f_type.domain[0], FunctionType)
self.assertEqual(f_type.domain[0].domain, [Int])
self.assertEqual(f_type.domain[0].labels, ['y'])
self.assertEqual(f_type.domain[0].attributes, [set()])
self.assertEqual(f_type.domain[0].codomain, Int)
self.assertEqual(f_type.labels, ['x'])
self.assertEqual(f_type.attributes, [set()])
self.assertEqual(f_type.codomain, Nothing)
module = self.prepare('fun f() -> (cst y: Int) -> Int {}')
(module, environment) = infer_types(module)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_type, FunctionType)
self.assertFalse(f_type.is_generic())
self.assertFalse(f_type.domain)
self.assertFalse(f_type.labels)
self.assertIsInstance(f_type.codomain, FunctionType)
self.assertEqual(f_type.codomain.domain, [Int])
self.assertEqual(f_type.codomain.labels, ['y'])
self.assertEqual(f_type.codomain.attributes, [set()])
self.assertEqual(f_type.codomain.codomain, Int)
def test_specialization_by_type_annotation(self):
module = self.prepare(
'''
fun f<T, U>(cst _ a: T) -> U {}
cst x: (cst _: Int) -> String = f
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertIsInstance(x_type, FunctionType)
self.assertEqual(x_type.domain, [Int])
self.assertEqual(x_type.codomain, String)
module = self.prepare(
'''
fun f<T, U> (cst _ a: T) -> U {}
fun g(cst x: (cst _: Int) -> String = f) {}
'''
)
(module, environment) = infer_types(module)
g_type = self.type_of(find('FunctionDecl', module)[1], environment)
self.assertIsInstance(g_type, FunctionType)
self.assertIsInstance(g_type.domain[0], FunctionType)
self.assertEqual(g_type.domain[0].domain, [Int])
self.assertEqual(g_type.domain[0].codomain, String)
def test_function_return_type_unification(self):
module = self.prepare('fun f() -> Int { return 0 }')
infer_types(module)
module = self.prepare('fun f() -> Int { return 0.0 }')
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare('fun f() { return 0.0 }')
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare('fun f<T>() -> T { return 0.0 }')
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare(
'''
fun f() -> Int {
if true {
return 0
} else if false {
return 1
} else {
return 2
}
}
''')
infer_types(module)
module = self.prepare(
'''
fun f() -> Int {
if true {
return 0
} else if false {
return '1'
} else {
return 2
}
}
''')
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare(
'''
fun f() -> Int {
if true {
return 0
} else if false {
return 1
} else {
return '2'
}
}
''')
with self.assertRaises(InferenceError):
infer_types(module)
def test_parameter_overloading(self):
module = self.prepare(
'''
fun f(cst x: Int) {}
fun f(cst x: String) {}
''')
(module, environment) = infer_types(module)
f_types = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_types, TypeUnion)
self.assertEqual(len(f_types), 2)
self.assertIn(Int, (f.domain[0] for f in f_types.types))
self.assertIn(String, (f.domain[0] for f in f_types.types))
self.assertEqual(len(f_types.types[0].domain), 1)
self.assertEqual(f_types.types[0].codomain, Nothing)
self.assertEqual(len(f_types.types[1].domain), 1)
self.assertEqual(f_types.types[1].codomain, Nothing)
module = self.prepare(
'''
fun f(cst x: Int, cst y: Int) {}
fun f(cst x: Int) {}
''')
(module, environment) = infer_types(module)
f_types = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_types, TypeUnion)
self.assertEqual(len(f_types), 2)
self.assertTrue((len(f_types.types[0].domain) == 1) or (len(f_types.types[1].domain) == 1))
self.assertTrue((len(f_types.types[0].domain) == 2) or (len(f_types.types[1].domain) == 2))
f0 = f_types.types[0] if len(f_types.types[0].domain) == 1 else f_types.types[1]
self.assertEqual(f0.domain, [Int])
self.assertEqual(f0.codomain, Nothing)
f1 = f_types.types[0] if len(f_types.types[0].domain) == 2 else f_types.types[1]
self.assertEqual(f1.domain, [Int, Int])
self.assertEqual(f1.codomain, Nothing)
# TODO Declaring multiple functions with the same signature should
# raise an error.
def test_parameter_mutability_overloading(self):
module = self.prepare(
'''
fun f(cst x: Int) {}
fun f(mut x: Int) {}
''')
(module, environment) = infer_types(module)
f_types = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_types, TypeUnion)
self.assertEqual(len(f_types), 2)
self.assertTrue(
('mutable' in f_types.types[0].attributes[0]) !=
('mutable' in f_types.types[1].attributes[0]))
self.assertEqual(f_types.types[0].domain, [Int])
self.assertEqual(f_types.types[0].codomain, Nothing)
self.assertEqual(f_types.types[1].domain, [Int])
self.assertEqual(f_types.types[1].codomain, Nothing)
# TODO Declaring multiple functions with the same signature should
# raise an error.
def test_label_overloading(self):
module = self.prepare(
'''
fun f(cst a x: Int) {}
fun f(cst b x: Int) {}
''')
(module, environment) = infer_types(module)
f_types = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_types, TypeUnion)
self.assertEqual(len(f_types), 2)
self.assertIn('a', (f.labels[0] for f in f_types))
self.assertIn('b', (f.labels[0] for f in f_types))
self.assertEqual(len(f_types.types[0].domain), 1)
self.assertEqual(f_types.types[0].domain, [Int])
self.assertEqual(f_types.types[0].codomain, Nothing)
self.assertEqual(len(f_types.types[1].domain), 1)
self.assertEqual(f_types.types[1].domain, [Int])
self.assertEqual(f_types.types[1].codomain, Nothing)
# TODO Declaring multiple functions with the same signature should
# raise an error.
def test_return_type_overloading(self):
module = self.prepare(
'''
fun f() -> Int {}
fun f() -> String {}
''')
(module, environment) = infer_types(module)
f_types = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertIsInstance(f_types, TypeUnion)
self.assertEqual(len(f_types), 2)
self.assertIn(Int, (f.codomain for f in f_types))
self.assertIn(String, (f.codomain for f in f_types))
self.assertFalse(f_types.types[0].domain)
self.assertFalse(f_types.types[1].domain)
# TODO Declaring multiple functions with the same signature should
# raise an error.
def test_inner_scoped_overloading(self):
module = self.prepare(
'''
fun f() -> Int {
fun f() -> String {}
}
''')
(module, environment) = infer_types(module)
function_nodes = find('FunctionDecl:*', module)
outer_f = self.type_of(function_nodes[0], environment)
self.assertIsInstance(outer_f, FunctionType)
self.assertEqual(outer_f.codomain, Int)
inner_f = self.type_of(function_nodes[1], environment)
self.assertIsInstance(inner_f, TypeUnion)
self.assertEqual(len(inner_f), 2)
self.assertTrue(any(f == outer_f for f in inner_f))
self.assertIn(Int, (f.codomain for f in inner_f))
self.assertIn(String, (f.codomain for f in inner_f))
# TODO Declaring multiple functions with the same signature should
# raise an error.
def test_shadowing(self):
module = self.prepare(
'''
cst x = 0
fun f() { cst x = 'Hello, World!' }
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
self.assertEqual(self.type_of(declaration_nodes[0], environment), Int)
self.assertEqual(self.type_of(declaration_nodes[1], environment), String)
module = self.prepare(
'''
cst x = 0
fun f() { cst x = x }
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
self.assertEqual(self.type_of(declaration_nodes[0], environment), Int)
self.assertEqual(self.type_of(declaration_nodes[1], environment), Int)
def test_struct_decl(self):
module = self.prepare('struct S {}')
(module, environment) = infer_types(module)
s_type = self.type_of(find('StructDecl:first', module)[0], environment)
self.assertIsInstance(s_type, StructType)
self.assertEqual(s_type.name, 'S')
module = self.prepare(
'''
struct S {
cst foo: Int
cst bar: String
}
'''
)
(module, environment) = infer_types(module)
s_type = self.type_of(find('StructDecl:first', module)[0], environment)
self.assertIsInstance(s_type, StructType)
self.assertEqual(s_type.name, 'S')
# TODO Fix reification of nominal types.
self.assertEqual(environment[s_type.members['foo']], Int)
self.assertEqual(environment[s_type.members['bar']], String)
module = self.prepare(
'''
struct S {
cst foo: Int
cst bar: String
fun baz(mut self: Self) {}
}
'''
)
(module, environment) = infer_types(module)
s_type = self.type_of(find('StructDecl:first', module)[0], environment)
self.assertIsInstance(s_type, StructType)
self.assertEqual(s_type.name, 'S')
# TODO Fix reification of nominal types.
self.assertEqual(environment[s_type.members['foo']], Int)
self.assertEqual(environment[s_type.members['bar']], String)
self.assertIsInstance(environment[s_type.members['baz']], FunctionType)
# TODO Declaring a method without Self as its first parameter should
# raise an error.
def test_enum_decl(self):
module = self.prepare('enum E {}')
(module, environment) = infer_types(module)
e_type = self.type_of(find('EnumDecl:first', module)[0], environment)
self.assertIsInstance(e_type, EnumType)
self.assertEqual(e_type.name, 'E')
module = self.prepare(
'''
enum E {
case foo
case bar(x: Int, y: Self)
}
'''
)
(module, environment) = infer_types(module)
e_type = self.type_of(find('EnumDecl:first', module)[0], environment)
self.assertIsInstance(e_type, EnumType)
self.assertEqual(e_type.name, 'E')
# TODO Fix reification of nominal types.
self.assertEqual(environment[e_type.members['foo']], e_type)
bar_type = environment[e_type.members['bar']]
self.assertIsInstance(bar_type, FunctionType)
self.assertEqual(len(bar_type.domain), 2)
self.assertEqual(bar_type.domain, [Int, e_type])
self.assertEqual(bar_type.codomain, e_type)
module = self.prepare(
'''
enum E {
case foo
case bar(x: Int, y: Self)
fun baz(mut self: Self) {}
}
'''
)
(module, environment) = infer_types(module)
e_type = self.type_of(find('EnumDecl:first', module)[0], environment)
self.assertIsInstance(e_type, EnumType)
self.assertEqual(e_type.name, 'E')
# TODO Fix reification of nominal types.
self.assertEqual(environment[e_type.members['foo']], e_type)
self.assertIsInstance(environment[e_type.members['bar']], FunctionType)
self.assertIsInstance(environment[e_type.members['baz']], FunctionType)
def test_annotating_custom_nominal_type(self):
module = self.prepare(
'''
struct S {}
cst x: S
'''
)
(module, environment) = infer_types(module)
s_type = self.type_of(find('StructDecl:first', module)[0], environment)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, s_type)
module = self.prepare(
'''
cst x: S
struct S {}
'''
)
(module, environment) = infer_types(module)
s_type = self.type_of(find('StructDecl:first', module)[0], environment)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, s_type)
module = self.prepare(
'''
enum E {}
cst x: E
'''
)
(module, environment) = infer_types(module)
e_type = self.type_of(find('EnumDecl:first', module)[0], environment)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, e_type)
module = self.prepare(
'''
cst x: E
enum E {}
'''
)
(module, environment) = infer_types(module)
e_type = self.type_of(find('EnumDecl:first', module)[0], environment)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, e_type)
def test_nested_types(self):
module = self.prepare(
'''
enum E {
struct S { cst y: Self }
}
cst x: E.S
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
self.assertEqual(self.type_of(declaration_nodes[0], environment).name, 'S')
self.assertEqual(self.type_of(declaration_nodes[1], environment).name, 'S')
module = self.prepare(
'''
cst x: E.S.F
enum E {
struct S {
enum F {}
}
}
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type.name, 'F')
def test_assignment(self):
module = self.prepare(
'''
cst x
x = 0
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare(
'''
cst x: Int
cst y
x = y
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
self.assertEqual(self.type_of(declaration_nodes[0], environment), Int)
self.assertEqual(self.type_of(declaration_nodes[1], environment), Int)
module = self.prepare(
'''
cst x: Int
cst y: String
x = y
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
def test_functions_as_first_class(self):
module = self.prepare(
'''
fun f(cst x: Int, cst y: Int) -> Int {}
cst x = f
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertEqual(x_type, f_type)
module = self.prepare(
'''
cst x = f
fun f(cst x: Int, cst y: Int) -> Int {}
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertEqual(x_type, f_type)
module = self.prepare(
'''
cst x = f
fun f(cst x: Int, cst y: Int) -> Int {}
fun f(cst x: String, cst y: String) -> String {}
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
f_type = self.type_of(find('FunctionDecl:first', module)[0], environment)
self.assertEqual(x_type, f_type)
def test_type_as_first_class(self):
module = self.prepare('cst x = Int.self')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Type)
module = self.prepare('cst x = Int')
with self.assertRaises(SyntaxError):
infer_types(module)
module = self.prepare(
'''
struct S {}
cst x = S.self
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Type)
module = self.prepare(
'''
cst x = S.self
struct S {}
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Type)
module = self.prepare(
'''
enum E {}
cst x = E.self
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Type)
module = self.prepare(
'''
cst x = E.self
enum E {}
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Type)
def test_call_without_overloading(self):
module = self.prepare(
'''
fun f() -> Int {}
cst x = f()
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare(
'''
cst x = f(x: 0, y: 0)
fun f(cst x: Int, cst y: Int) -> Int {}
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare(
'''
cst x = f(x: g)
fun f(cst x: (cst y: Int) -> Int) -> Int {}
fun g(cst y: Int) -> Int {}
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl:first', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare(
'''
fun f(cst x: Int, cst y: Int) -> Int {}
cst x = f(x: 0, z: 0)
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare(
'''
fun f(cst x: Int, cst y: Int) -> Int {}
cst x = f(x: 0, 0)
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare(
'''
fun f(cst x: Int, cst y: Int) -> Int {}
cst x = f(x: 0, y: 0.0)
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare(
'''
cst x = f(x: g)
fun f(cst x: (cst y: Int) -> Int) -> Int {}
fun g(cst x: Int) -> Int {}
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
def test_call_with_overloading(self):
module = self.prepare(
'''
fun f(cst x: Int, cst y: Int) -> Int {}
fun f(cst x: String, cst y: String) -> String {}
cst x = f(x: 0, y: 0)
cst y = f(x: 'hello', y: 'world')
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertEqual(x_type, Int)
y_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(y_type, String)
module = self.prepare(
'''
cst x = f(x: 0, y: 0)
cst y = f(x: 'hello', y: 'world')
fun f(cst x: Int, cst y: Int) -> Int {}
fun f(cst x: String, cst y: String) -> String {}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertEqual(x_type, Int)
y_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(y_type, String)
module = self.prepare(
'''
fun f(cst x: Int, cst y: Int) -> Int {}
fun f(cst _ x: String, cst _ y: String) -> String {}
cst x = f(x: 0, y: 0)
cst y = f('hello', 'world')
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertEqual(x_type, Int)
y_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(y_type, String)
module = self.prepare(
'''
cst x = f()
fun f() -> Int {}
fun f() -> String {}
'''
)
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl', module)[0], environment)
self.assertIsInstance(x_type, TypeUnion)
self.assertIn(Int, x_type)
self.assertIn(String, x_type)
module = self.prepare(
'''
mut x = 9
cst y = f(x)
fun f(mut _ a: Int) -> Int {}
fun f(cst _ a: Int) -> String {}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
y_type = self.type_of(declaration_nodes[1], environment)
self.assertIsInstance(y_type, TypeUnion)
self.assertIn(Int, y_type)
self.assertIn(String, y_type)
module = self.prepare(
'''
cst x = 9
cst y = f(x)
fun f(mut _ a: Int) -> Int {}
fun f(cst _ a: Int) -> String {}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
y_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(y_type, String)
def test_call_with_generic_parameters(self):
module = self.prepare(
'''
fun f<T, U>(cst x: T, cst y: U) -> T {}
cst x = f(x: 0, y: 'hello world')
cst y = f(x: 'hello world', y: 0)
cst z = f(x: 1.0, y: 2.0)
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertEqual(x_type, Int)
y_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(y_type, String)
y_type = self.type_of(declaration_nodes[2], environment)
self.assertEqual(y_type, Double)
module = self.prepare(
'''
cst x = f(x: g)
fun f<T>(cst x: T) -> T {}
fun g() {}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertIsInstance(x_type, FunctionType)
self.assertFalse(x_type.domain)
self.assertFalse(x_type.labels)
self.assertEqual(x_type.codomain, Nothing)
module = self.prepare(
'''
cst x = f(x: g)
cst y = f(x: h)
fun f<T, U>(cst x: (cst y: T, cst z: U) -> U) -> T {}
fun g(cst y: Int, cst z: Int) -> Int {}
fun h(cst y: String, cst z: Double) -> Double {}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertEqual(x_type, Int)
y_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(y_type, String)
module = self.prepare(
'''
cst x = f(x: f(x: g))
fun f<T>(cst x: T) -> T {}
fun g(cst x: Int) -> Int {}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertIsInstance(x_type, FunctionType)
self.assertEqual(x_type.domain, [Int])
self.assertEqual(x_type.codomain, Int)
def test_call_constraints_propagation(self):
module = self.prepare(
'''
cst x = f()
cst y = g(x)
cst z = h(y)
fun f() -> Int {}
fun f() -> String {}
fun f() -> Double {}
fun g(cst _ arg: Int) -> Int {}
fun g(cst _ arg: String) -> String {}
fun g<T>(cst _ arg: T) -> T {}
fun h(cst _ arg: Int) -> Int {}
fun h(cst _ arg: Double) -> Int {}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertIsInstance(x_type, TypeUnion)
self.assertIn(Int, x_type)
self.assertIn(Double, x_type)
y_type = self.type_of(declaration_nodes[1], environment)
self.assertIsInstance(y_type, TypeUnion)
self.assertIn(Int, y_type)
self.assertIn(Double, y_type)
z_type = self.type_of(declaration_nodes[2], environment)
self.assertEqual(z_type, Int)
def test_select(self):
module = self.prepare(
'''
cst s: S
cst x = s.foo
cst y = s.bar
struct S {
cst foo: Int
cst bar: String
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(x_type, Int)
y_type = self.type_of(declaration_nodes[2], environment)
self.assertEqual(y_type, String)
# TODO Selecting a property statically (e.g. `cst x = S.foo`) should
# raise an error.
def test_auto_self_binding(self):
module = self.prepare(
'''
cst s: S
cst x = s.baz
cst y = S.baz
struct S {
fun baz(mut self: Self) -> Int {}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
s_type = self.type_of(declaration_nodes[0], environment)
x_type = self.type_of(declaration_nodes[1], environment)
self.assertIsInstance(x_type, FunctionType)
self.assertFalse(x_type.domain)
self.assertFalse(x_type.labels)
self.assertEqual(x_type.codomain, Int)
y_type = self.type_of(declaration_nodes[2], environment)
self.assertIsInstance(y_type, FunctionType)
self.assertEqual(y_type.domain, [s_type])
self.assertEqual(y_type.labels, ['self'])
self.assertEqual(y_type.codomain, Int)
module = self.prepare(
'''
cst x = Point()
cst y = x.distance(to: x)
struct Point {
fun new(cst self: Self) -> Self {}
fun distance(cst self: Self, cst to other: Self) -> Double {}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertIsInstance(x_type, StructType)
self.assertEqual(x_type.name, 'Point')
y_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(y_type, Double)
def test_constructor(self):
module = self.prepare(
'''
cst s = S()
struct S {
fun new(mut self: Self) -> Self {}
}
'''
)
(module, environment) = infer_types(module)
s_type = self.type_of(find('ContainerDecl', module)[0], environment)
self.assertIsInstance(s_type, StructType)
self.assertEqual(s_type.name, 'S')
def test_enum_case_constructor(self):
module = self.prepare(
'''
cst x = E.foo
cst y = E.bar(x: 0, y: E.foo)
enum E {
case foo
case bar(x: Int, y: Self)
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertIsInstance(x_type, EnumType)
self.assertEqual(x_type.name, 'E')
y_type = self.type_of(declaration_nodes[1], environment)
self.assertIsInstance(y_type, EnumType)
self.assertEqual(y_type.name, 'E')
module = self.prepare(
'''
cst x: E = .foo
cst y: E = .bar(x: 0, y: .foo)
enum E {
case foo
case bar(x: Int, y: Self)
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertIsInstance(x_type, EnumType)
self.assertEqual(x_type.name, 'E')
y_type = self.type_of(declaration_nodes[1], environment)
self.assertIsInstance(y_type, EnumType)
self.assertEqual(y_type.name, 'E')
def test_prefixed_expression(self):
module = self.prepare('cst x = -0')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare(
'''
cst s: S
cst x = not s
struct S {
fun not(cst _ self: Self) -> Self {}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
s_type = self.type_of(declaration_nodes[0], environment)
x_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(x_type, s_type)
module = self.prepare(
'''
cst s: S
cst x = not s
struct S {
fun not(cst _ self: Self) -> Bool {}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(x_type, Bool)
def test_binary_expression(self):
module = self.prepare('cst x = 0 + 2')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare(
'''
cst s: S
cst x = s + s
struct S {
fun +(cst _ lhs: Self, cst _ rhs: Self) -> Self {}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
s_type = self.type_of(declaration_nodes[0], environment)
x_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(x_type, s_type)
module = self.prepare(
'''
cst s: S
cst x = s + 0
struct S {
fun +(cst _ lhs: Self, cst _ rhs: Int) -> Int {}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
x_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(x_type, Int)
def test_if_as_statement(self):
module = self.prepare('if true {} else {}')
infer_types(module)
module = self.prepare('if false {} else if 1 < 3 {}')
infer_types(module)
module = self.prepare(
'''
cst x: Bool
if x {}
'''
)
infer_types(module)
module = self.prepare('if 0 {}')
with self.assertRaises(InferenceError):
infer_types(module)
module = self.prepare(
'''
mut x = 1
if true {
x = 2
} else {
x = 'foo'
}
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
def test_if_with_pattern(self):
module = self.prepare(
'''
cst e: E = .bar(x: 0, y: .foo)
if let cst a, cst b in e == .bar(x: a, y: b) {}
enum E {
case foo
case bar(x: Int, y: Self)
fun == (cst _ lhs: Self, cst _ rhs: Self) -> Bool {}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
a_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(a_type, Int)
b_type = self.type_of(declaration_nodes[2], environment)
self.assertIsInstance(b_type, EnumType)
self.assertEqual(b_type.name, 'E')
def test_if_as_expression(self):
module = self.prepare('cst x = if true { return 0 } else { return 1 }')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl', module)[0], environment)
self.assertEqual(x_type, Int)
module = self.prepare(
'''
cst x = if true {
cst a = 9
return 1
} else if true {
cst a = 'bar'
return 2
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl:*', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertEqual(x_type, Int)
a0_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(a0_type, Int)
a1_type = self.type_of(declaration_nodes[2], environment)
self.assertEqual(a1_type, String)
module = self.prepare('cst x = true and if true { return true } else { return false }')
(module, environment) = infer_types(module)
x_type = self.type_of(find('ContainerDecl', module)[0], environment)
self.assertEqual(x_type, Bool)
module = self.prepare(
'''
cst x = if true {
return 1
} else if true {
return 'bar'
}
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
def test_switch_as_statement(self):
module = self.prepare(
'''
mut x = 1
switch x {
case 1 { x = 2 }
case 2 { x = 'foo' }
}
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
def test_switch_with_pattern(self):
module = self.prepare(
'''
cst e: E = .bar(x: 0, y: .foo)
switch e {
case let cst a, cst b in .bar(x: a, y: b) {}
}
enum E {
case foo
case bar(x: Int, y: Self)
fun == (cst _ lhs: Self, cst _ rhs: Self) -> Bool {}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl', module)
a_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(a_type, Int)
b_type = self.type_of(declaration_nodes[2], environment)
self.assertIsInstance(b_type, EnumType)
self.assertEqual(b_type.name, 'E')
def test_switch_as_expression(self):
module = self.prepare(
'''
cst x = switch 0 {
case 0 {
cst a = 9
return 'foo'
}
case _ {
cst a = '9'
return 'bar'
}
}
'''
)
(module, environment) = infer_types(module)
declaration_nodes = find('ContainerDecl:*', module)
x_type = self.type_of(declaration_nodes[0], environment)
self.assertEqual(x_type, String)
a0_type = self.type_of(declaration_nodes[1], environment)
self.assertEqual(a0_type, Int)
a1_type = self.type_of(declaration_nodes[2], environment)
self.assertEqual(a1_type, String)
module = self.prepare(
'''
cst x = switch 0 {
case 0 { return 'foo' }
case _ { return 0 }
}
'''
)
with self.assertRaises(InferenceError):
infer_types(module)
def type_of(self, node, environment):
return environment.storage[TypeVariable(node)]
def prepare(self, source):
module = parse(source)
module = build_types(module)
module = bind_scopes(module)
return module
|
StarcoderdataPython
|
4856473
|
<reponame>cluco91/Django_Farmacia
from unipath import Path
import os
BASE_DIR = Path(__file__).ancestor(3)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
SECRET_KEY = '<KEY>'
DJANGO_APPS = (
'suit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
LOCAL_APPS = (
'django.contrib.humanize',
'apps.clientes',
'apps.ventas',
'apps.medicamentos',
'apps.compras',
'apps.distribuidor',
'apps.users',
'apps.laboratorio',
'apps.inline',
'apps.factura',
)
THIRD_PARTY_APPS = (
)
INSTALLED_APPS = DJANGO_APPS + LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'farmacia.urls'
WSGI_APPLICATION = 'farmacia.wsgi.application'
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS as TCP
TEMPLATE_CONTEXT_PROCESSORS = TCP + (
'django.core.context_processors.request',
)
AUTH_USER_MODEL = 'users.User'
LANGUAGE_CODE = 'es-CL'
TIME_ZONE = 'America/Santiago'
USE_I18N = True
USE_L10N = True
USE_TZ = True
AUTH_USER_MODEL = 'users.User'
|
StarcoderdataPython
|
1865702
|
from pytest import approx
import torch
from torch_geometric.transforms import RandomRotate
from torch_geometric.data import Data
def test_spherical():
assert RandomRotate(-180).__repr__() == 'RandomRotate((-180, 180))'
pos = torch.tensor([[1, 0], [0, 1]], dtype=torch.float)
data = Data(pos=pos)
out = RandomRotate((90, 90))(data).pos.view(-1).tolist()
assert approx(out) == [0, 1, -1, 0]
|
StarcoderdataPython
|
12818818
|
# django imports
from django.db import models
from django.db.models.query import QuerySet
from django.conf import settings
from django.core.cache import cache
from django.http import Http404
from django.shortcuts import _get_queryset
def key_from_instance(instance):
opts = instance._meta
return '%s.%s:%s' % (opts.app_label, opts.module_name, instance.pk)
class SimpleCacheQuerySet(QuerySet):
def filter(self, *args, **kwargs):
pk = None
for val in ('pk', 'pk__exact', 'id', 'id__exact'):
if val in kwargs:
pk = kwargs[val]
break
if pk is not None:
opts = self.model._meta
key = '%s.%s.%s:%s' % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, opts.app_label, opts.module_name, pk)
obj = cache.get(key)
if obj is not None:
self._result_cache = [obj]
return super(SimpleCacheQuerySet, self).filter(*args, **kwargs)
class SimpleCacheManager(models.Manager):
def get_query_set(self):
return SimpleCacheQuerySet(self.model)
def lfs_get_object(klass, *args, **kwargs):
"""
Uses get() to return an object, or raises a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
"""
cache_key = "%s-%s-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, klass.__name__.lower(), kwargs.values()[0])
object = cache.get(cache_key)
if object is not None:
return object
queryset = _get_queryset(klass)
try:
object = queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
return None
else:
cache.set(cache_key, object)
return object
def lfs_get_object_or_404(klass, *args, **kwargs):
"""
Uses get() to return an object, or raises a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
"""
cache_key = "%s-%s-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, klass.__name__.lower(), kwargs.values()[0])
object = cache.get(cache_key)
if object is not None:
return object
queryset = _get_queryset(klass)
try:
object = queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
else:
cache.set(cache_key, object)
return object
def clear_cache():
"""Clears the complete cache.
"""
# memcached
try:
cache._cache.flush_all()
except AttributeError:
pass
else:
return
try:
cache._cache.clear()
except AttributeError:
pass
try:
cache._expire_info.clear()
except AttributeError:
pass
|
StarcoderdataPython
|
6459389
|
import argparse
from datetime import datetime
from random import normalvariate, choice
import struct
import sys
import time
import traceback
from uuid import UUID
import pigpio
from nrf24 import *
#
# A simple NRF24L client that connects to a PIGPIO instance on a hostname and port, default "localhost" and 8888, and
# starts sending request to a remote server expecting to receive a reply from it (client/server).
# Use the companion program "rr-server.py" to provide the server functionality.
#
if __name__ == "__main__":
print("Python NRF24 Request/Reply Client Example.")
# Parse command line argument.
parser = argparse.ArgumentParser(prog="rr-client.py", description="Simple NRF24 Request/Reply Client Example.")
parser.add_argument('-n', '--hostname', type=str, default='localhost', help="Hostname for the Raspberry running the pigpio daemon.")
parser.add_argument('-p', '--port', type=int, default=8888, help="Port number of the pigpio daemon.")
parser.add_argument('client', type=str, nargs='?', default='1CLNT', help="Address of this client (3 to 5 ASCII characters).")
parser.add_argument('server', type=str, nargs='?', default='1SRVR', help="Address of server (3 to 5 ASCII characters).")
args = parser.parse_args()
hostname = args.hostname
port = args.port
client = args.client
server = args.server
if not (2 < len(client) < 6):
print(f'Invalid client address {client}. Addresses must be 3 to 5 ASCII characters.')
sys.exit(1)
if not (2 < len(server) < 6):
print(f'Invalid server address {server}. Addresses must be 3 to 5 ASCII characters.')
sys.exit(1)
if len(client) != len(server):
print(f'Invalid client ({client}) and server ({server}) address, they must be same length.')
sys.exit(1)
# Connect to pigpiod
print(f'Connecting to GPIO daemon on {hostname}:{port} ...')
pi = pigpio.pi(hostname, port)
if not pi.connected:
print("Not connected to Raspberry Pi ... goodbye.")
sys.exit()
# Create NRF24 object.
# PLEASE NOTE: PA level is set to MIN, because test sender/receivers are often close to each other, and then MIN works better.
nrf = NRF24(pi, ce=25, payload_size=RF24_PAYLOAD.DYNAMIC, channel=100, data_rate=RF24_DATA_RATE.RATE_250KBPS, pa_level=RF24_PA.MIN)
nrf.set_address_bytes(len(client))
# Open the server address as a writing pipe (request).
nrf.open_writing_pipe(server)
# Open the client address as a reading pipe (response).
nrf.open_reading_pipe(RF24_RX_ADDR.P1, client)
# Display the content of NRF24L01 device registers.
nrf.show_registers()
try:
print(f'Send to {server}, with reply to {client}')
count = 0
while True:
# Pick a random command to send to the server.
command = choice([0x01, 0x02])
# Pack the request.
request = struct.pack('<H6p', command, bytes(client, 'ascii'))
print(f'Request: command={command}, reply_to={client}, {":".join(f"{c:02x}" for c in request)}')
# Send the request.
nrf.reset_packages_lost()
nrf.send(request)
try:
nrf.wait_until_sent()
except:
print("Timeout waiting for transmission to complete.")
print('Wait 10 seconds before sending new request.')
time.sleep(10)
continue
if nrf.get_packages_lost() == 0:
print(f'Success: lost={nrf.get_packages_lost()}, retries={nrf.get_retries()}')
else:
print(f'Error: lost={nrf.get_packages_lost()}, retries={nrf.get_retries()}')
if nrf.get_packages_lost() == 0:
# If we successfully sent a request we expect to receive a response so fire up RX.
nrf.power_up_rx()
reply_start = time.monotonic()
while True:
if nrf.data_ready():
response = nrf.get_payload()
if response[0] == 0x01:
# The response is a response to a 0x01 command.
command, uuid_bytes = struct.unpack('<H17p', response)
uuid = UUID(bytes=uuid_bytes)
print(f'Response: command={command}, uuid={uuid}')
break
elif response[0] == 0x02:
# The response is a response to a 0x01 command.
command, relay = struct.unpack('<H?', response)
print(f'Response: command={command}, relay on={relay}')
break
else:
# Invalid response.
print('Invalid response received.')
if time.monotonic() - reply_start > 1:
# If we have waited more than 1 second on a response, we time out.
# This obviously depends on the application.
print('Timeout waiting for response.')
break
# Wait 10 seconds before sending the next request.
print('Wait 10 seconds before sending new request.')
time.sleep(10)
except:
traceback.print_exc()
nrf.power_down()
pi.stop()
|
StarcoderdataPython
|
1785677
|
<filename>networking_cisco/plugins/cisco/db/device_manager/hosting_devices_db.py
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
from sqlalchemy import exc as sql_exc
from sqlalchemy.orm import exc
from neutron.db import common_db_mixin
from neutron.plugins.common import constants as svc_constants
from networking_cisco.plugins.cisco.db.device_manager import hd_models
from networking_cisco.plugins.cisco.extensions import ciscohostingdevicemanager
LOG = logging.getLogger(__name__)
AUTO_DELETE_DEFAULT = ciscohostingdevicemanager.AUTO_DELETE_DEFAULT
class HostingDeviceDBMixin(
ciscohostingdevicemanager.CiscoHostingDevicePluginBase,
common_db_mixin.CommonDbMixin):
"""A class implementing DB functionality for hosting devices."""
def create_hosting_device(self, context, hosting_device):
LOG.debug("create_hosting_device() called")
hd = hosting_device['hosting_device']
with context.session.begin(subtransactions=True):
credentials_id = hd.get('credentials_id')
if credentials_id is None:
hdt_db = self._get_hosting_device_template(context,
hd['template_id'])
credentials_id = hdt_db['default_credentials_id']
hd_db = hd_models.HostingDevice(
id=self._get_id(hd),
complementary_id=hd.get('complementary_id'),
tenant_id=hd['tenant_id'],
template_id=hd['template_id'],
credentials_id=credentials_id,
name=hd.get('name'),
description=hd.get('description'),
device_id=hd.get('device_id'),
admin_state_up=hd.get('admin_state_up', True),
management_ip_address=hd['management_ip_address'],
management_port_id=hd['management_port_id'],
protocol_port=hd.get('protocol_port'),
cfg_agent_id=hd.get('cfg_agent_id'),
created_at=hd.get('created_at', timeutils.utcnow()),
status=hd.get('status', svc_constants.ACTIVE),
tenant_bound=hd.get('tenant_bound'),
auto_delete=hd.get('auto_delete', AUTO_DELETE_DEFAULT))
context.session.add(hd_db)
return self._make_hosting_device_dict(hd_db)
def update_hosting_device(self, context, id, hosting_device):
LOG.debug("update_hosting_device() called")
hd = hosting_device['hosting_device']
with context.session.begin(subtransactions=True):
#TODO(bobmel): handle tenant_bound changes
hd_query = context.session.query(hd_models.HostingDevice)
if not hd_query.filter_by(id=id).update(hd):
raise ciscohostingdevicemanager.HostingDeviceNotFound(id=id)
#TODO(bobmel): notify_agent on changes to credentials,
# admin_state_up, tenant_bound
return self.get_hosting_device(context, id)
def delete_hosting_device(self, context, id):
LOG.debug("delete_hosting_device() called")
try:
with context.session.begin(subtransactions=True):
hd_query = context.session.query(hd_models.HostingDevice)
if not hd_query.filter_by(id=id).delete():
raise ciscohostingdevicemanager.HostingDeviceNotFound(
id=id)
except db_exc.DBError as e:
with excutils.save_and_reraise_exception() as ctxt:
if isinstance(e.inner_exception, sql_exc.IntegrityError):
ctxt.reraise = False
raise ciscohostingdevicemanager.HostingDeviceInUse(id=id)
def get_hosting_device(self, context, id, fields=None):
LOG.debug("get_hosting_device() called")
hd_db = self._get_hosting_device(context, id)
return self._make_hosting_device_dict(hd_db)
def get_hosting_devices(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
LOG.debug("get_hosting_devices() called")
return self._get_collection(context, hd_models.HostingDevice,
self._make_hosting_device_dict,
filters=filters, fields=fields,
sorts=sorts, limit=limit,
marker_obj=marker,
page_reverse=page_reverse)
def get_hosting_devices_db(self, context, filters=None, sorts=None,
limit=None, marker=None, page_reverse=False):
LOG.debug("get_hosting_devices_db() called")
return self._get_collection_query(
context, hd_models.HostingDevice, filters=filters, sorts=sorts,
limit=limit, marker_obj=marker, page_reverse=page_reverse)
def create_hosting_device_template(self, context, hosting_device_template):
LOG.debug("create_hosting_device_template() called")
hdt = hosting_device_template['hosting_device_template']
#TODO(bobmel): check service types
with context.session.begin(subtransactions=True):
hdt_db = hd_models.HostingDeviceTemplate(
id=self._get_id(hdt),
tenant_id=hdt['tenant_id'],
name=hdt.get('name'),
enabled=hdt.get('enabled', True),
host_category=hdt['host_category'],
service_types=hdt.get('service_types'),
image=hdt.get('image'),
flavor=hdt.get('flavor'),
default_credentials_id=hdt.get('default_credentials_id'),
configuration_mechanism=hdt.get('configuration_mechanism'),
protocol_port=hdt.get('protocol_port'),
booting_time=hdt.get('booting_time'),
slot_capacity=hdt['slot_capacity'],
desired_slots_free=hdt['desired_slots_free'],
tenant_bound=':'.join(hdt['tenant_bound']),
device_driver=hdt['device_driver'],
plugging_driver=hdt['plugging_driver'])
context.session.add(hdt_db)
return self._make_hosting_device_template_dict(hdt_db)
def update_hosting_device_template(self, context,
id, hosting_device_template):
LOG.debug("update_hosting_device_template() called")
hdt = hosting_device_template['hosting_device_template']
tenant_bound = hdt.get('tenant_bound')
if tenant_bound is not None:
hdt['tenant_bound'] = ':'.join(tenant_bound)
with context.session.begin(subtransactions=True):
hdt_query = context.session.query(hd_models.HostingDeviceTemplate)
if not hdt_query.filter_by(id=id).update(hdt):
raise ciscohostingdevicemanager.HostingDeviceTemplateNotFound(
id=id)
return self.get_hosting_device_template(context, id)
def delete_hosting_device_template(self, context, id):
LOG.debug("delete_hosting_device_template() called")
try:
with context.session.begin(subtransactions=True):
hdt_query = context.session.query(
hd_models.HostingDeviceTemplate)
if not hdt_query.filter_by(id=id).delete():
raise (ciscohostingdevicemanager.
HostingDeviceTemplateNotFound(id=id))
except db_exc.DBError as e:
with excutils.save_and_reraise_exception() as ctxt:
if isinstance(e.inner_exception, sql_exc.IntegrityError):
ctxt.reraise = False
raise (ciscohostingdevicemanager.
HostingDeviceTemplateInUse(id=id))
def get_hosting_device_template(self, context, id, fields=None):
LOG.debug("get_hosting_device_template() called")
hdt_db = self._get_hosting_device_template(context, id)
return self._make_hosting_device_template_dict(hdt_db)
def get_hosting_device_templates(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
LOG.debug("get_hosting_device_templates() called")
return self._get_collection(context, hd_models.HostingDeviceTemplate,
self._make_hosting_device_template_dict,
filters=filters, fields=fields,
sorts=sorts, limit=limit,
marker_obj=marker,
page_reverse=page_reverse)
def _get_id(self, res):
uuid = res.get('id')
if uuid:
return uuid
return uuidutils.generate_uuid()
def _get_hosting_device(self, context, id):
try:
return self._get_by_id(context, hd_models.HostingDevice, id)
except exc.NoResultFound:
raise ciscohostingdevicemanager.HostingDeviceNotFound(id=id)
def _make_hosting_device_dict(self, hd, fields=None):
res = {'id': hd['id'],
'complementary_id': hd['complementary_id'],
'tenant_id': hd['tenant_id'],
'template_id': hd['template_id'],
'credentials_id': hd['credentials_id'],
'name': hd['name'],
'description': hd['description'],
'device_id': hd['device_id'],
'admin_state_up': hd['admin_state_up'],
'management_ip_address': hd['management_ip_address'],
'management_port_id': hd['management_port_id'],
'protocol_port': hd['protocol_port'],
'cfg_agent_id': hd['cfg_agent_id'],
'created_at': hd['created_at'],
'status': hd['status'],
'tenant_bound': hd['tenant_bound'],
'auto_delete': hd['auto_delete']}
return self._fields(res, fields)
def _get_hosting_device_template(self, context, id):
try:
return self._get_by_id(context, hd_models.HostingDeviceTemplate,
id)
except exc.NoResultFound:
raise ciscohostingdevicemanager.HostingDeviceTemplateNotFound(
id=id)
def _make_hosting_device_template_dict(self, hdt, fields=None):
tb = hdt['tenant_bound'].split(':') if len(hdt['tenant_bound']) else []
res = {'id': hdt['id'],
'tenant_id': hdt['tenant_id'],
'name': hdt['name'],
'enabled': hdt['enabled'],
'host_category': hdt['host_category'],
'service_types': hdt['service_types'],
'image': hdt['image'],
'flavor': hdt['flavor'],
'default_credentials_id': hdt['default_credentials_id'],
'configuration_mechanism': hdt['configuration_mechanism'],
'protocol_port': hdt['protocol_port'],
'booting_time': hdt['booting_time'],
'slot_capacity': hdt['slot_capacity'],
'desired_slots_free': hdt['desired_slots_free'],
'tenant_bound': tb,
'device_driver': hdt['device_driver'],
'plugging_driver': hdt['plugging_driver']}
return self._fields(res, fields)
|
StarcoderdataPython
|
8192467
|
<filename>myfaker/code/constants.py
# define all the constants used in this project
MAX_SAMPLE_SIZE = 1000
REPEAT_SAMPLE = True
|
StarcoderdataPython
|
335766
|
#!/usr/bin/env python
import sys
import subprocess
import json
description = """
generate a json that contains UUID and path-to-data of the data set for all data sets.
print the json to stdout
"""
def get_irods_path_list(base_dir):
path_list = []
# list files inside base_dir
ls_output = subprocess.check_output(["ls", base_dir]).decode("utf-8")
path_list = ls_output.split()
for path in path_list:
path = path.strip()
return path_list
def find_UUID_for_all_path(base_dir, path_list):
data_sets = []
for path in path_list:
# get all data files in a data set via ls
files = subprocess.check_output(["ls", base_dir + path]).decode("utf-8")[1:]
for filename in files.split():
index = filename.find("_metadata.json")
# find the UUID from the filename of metadata file
if index != -1:
UUID = filename[:index]
data_sets.append((base_dir + path, UUID))
return data_sets
def main():
try:
parse_args()
except:
print_help()
raise
# get the base path from cmd line arg
base_dir = sys.argv[1]
if(base_dir[-1] != "/"):
base_dir += "/"
path_list = get_irods_path_list(base_dir)
data_sets = find_UUID_for_all_path(base_dir, path_list)
#if len(path_list) != len(data_sets):
# raise Exception("Error! number of directory is not matching with the number of data sets")
print_json(data_sets)
def parse_args():
if len(sys.argv) == 1:
raise Exception("Error! base path for the archive directory is not specified")
elif len(sys.argv) != 2:
raise Exception("Error! wrong number of args")
if len(sys.argv[1]) == 0:
raise Exception("Error! path empty")
def print_help():
print(description)
print("Usage:")
print("./gen_files_list.py <path-to-base-data-dir>")
print()
def print_json(data_sets):
# Generate & print the json file to stdout
json_obj = {}
json_obj["DATA_FILE_LIST"] = []
for data in data_sets:
data_obj = {}
data_obj["RAW_DATA_PATH"] = data[0] + "/"
data_obj["UUID"] = data[1]
json_obj["DATA_FILE_LIST"].append(data_obj)
dump_str = json.dumps(json_obj, indent=2, sort_keys=True)
print(dump_str)
main()
|
StarcoderdataPython
|
12829443
|
# -*- coding: utf-8 -*-
from .fields import ImportPathField
__all__ = ['ImportPathField']
|
StarcoderdataPython
|
3520173
|
"""
C/C++ integration
=================
NOTE: this module is deprecated and will be removed from Scipy before
the 1.0 release -- use the standalone weave package
(`https://github.com/scipy/weave`_) instead.
inline -- a function for including C/C++ code within Python
blitz -- a function for compiling Numeric expressions to C++
ext_tools -- a module that helps construct C/C++ extension modules.
accelerate -- a module that inline accelerates Python functions
.. note:: On Linux one needs to have the Python development headers installed
in order to be able to compile things with the `weave` module.
Since this is a runtime dependency these headers (typically in a
pythonX.Y-dev package) are not always installed when installing
scipy.
"""
from __future__ import absolute_import, print_function
import sys
from numpy import deprecate
if sys.version_info[0] >= 3:
raise ImportError("scipy.weave only supports Python 2.x")
@deprecate(old_name="scipy.weave", new_name="weave")
def _deprecated():
pass
try:
_deprecated()
except DeprecationWarning as e:
# don't fail import if DeprecationWarnings raise error -- works around
# the situation with Numpy's test framework
pass
from .weave_version import weave_version as __version__
try:
from .blitz_tools import blitz, BlitzWarning
except ImportError:
pass # scipy (core) wasn't available
from .inline_tools import inline
from . import ext_tools
from .ext_tools import ext_module, ext_function
try:
from .accelerate_tools import accelerate
except:
pass
from numpy.testing import Tester
test = Tester().test
|
StarcoderdataPython
|
6676469
|
from propsettings.setting import Setting
from propsettings_qt.input_handlers.input_handler import SettingDrawer
class ObjectHandler(SettingDrawer):
"""
InputHandler que se encarga de las configuraciones que sean de tipo object.
Estas solo se podrán modificar si tienen miembros de tipo Setting o si son de algún tipo de dato que se puede cargar.
"""
def __init__(self, setting_owner, setting: Setting):
from propsettings_qt.ui_settings_area import SettingsAreaWidget
super().__init__(setting_owner=setting_owner, setting=setting)
self.widget = SettingsAreaWidget()
self.widget.populate_object(setting.fget(self._setting_owner))
def get_widget(self):
return self.widget
def _on_configurable_object_selected(self, obj):
"""
Capturar evento de selección de objeto en combobox. Solo se usa cuando el objeto que se está configurando se
puede cargar desde memoria externa y por tanto se puede seleccionar en un combobox.
:param obj:
:return:
"""
self._set_value(obj)
|
StarcoderdataPython
|
3406154
|
import re
def mem_to_bytes(mem):
if isinstance(mem, (float, int)):
return mem
m = re.match(r"^((?:0|[1-9]\d*)(?:\.\d+)?)\s*([A-Za-z]+)?$", mem)
if m is None:
raise Exception("Invalid memory format: {}".format(mem))
val = float(m.group(1))
unit = m.group(2)
if unit is None:
return int(val)
units = ['K', 'M', 'G', 'T', 'P', 'E']
if unit in units:
return int(val * 10**((units.index(unit) + 1) * 3))
binary_units = ['Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei']
if unit in binary_units:
return int(val * 1024**(binary_units.index(unit) + 1))
raise Exception("Unknown unit: {}".format(unit))
def cpu_to_millicores(cpu):
try:
m = re.match(r"^([1-9]\d*)m$", cpu)
if m:
return int(m.group(1))
except TypeError:
pass
try:
cores = float(cpu)
except ValueError:
raise Exception('Invalid cpu format: {}'.format(cpu))
return int(cores * 1000)
|
StarcoderdataPython
|
333681
|
<gh_stars>0
#!/usr/local/bin/python3
# MIT License
# Copyright (c) 2021 HLSAnalyzer.com
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import urllib.request
import json
from urllib.request import Request, HTTPSHandler, build_opener, install_opener
import ssl
import os
import mysql.connector
from mysql.connector import errorcode
import utils
import time
import hashlib
import re
INTERVAL_MINUTES=400 #The update can be run every 30 minutes, with a 10 minute overlap in time
DBHOST = os.environ.get('DBHOST')
DBUSER = os.environ.get('DBUSER')
DBPW = os.environ.get('DBPW')
def create_database(cursor, db_name):
try:
cursor.execute(
"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'".format(db_name))
except mysql.connector.Error as err:
print("Failed creating database: {}".format(err))
def connect_db():
try:
connection = mysql.connector.connect(user=DBUSER, password=DBPW,
host=DBHOST)
return connection
except:
return None
def define_tables():
TABLES = {}
TABLES['AlertRecord'] = "CREATE TABLE AlertRecord (Timestamp INT, CreateTime INT, MasterID VARCHAR(32), VariantID VARCHAR(32), "\
"RecordHash VARCHAR(8), Record VARCHAR(255), PRIMARY KEY(Timestamp, VariantID, RecordHash))"
TABLES['AlertSummary'] = "CREATE TABLE AlertSummary (Timestamp INT, CreateTime INT, MasterID VARCHAR(32), VariantID VARCHAR(32), "\
"RecordHash VARCHAR(8), Type VARCHAR(32), Status VARCHAR(32), Duration DOUBLE, Units VARCHAR(12), PRIMARY KEY(Timestamp, VariantID, RecordHash))"
TABLES['SCTE35Record'] = "CREATE TABLE SCTE35Record (Timestamp INT, CreateTime INT, MasterID VARCHAR(32), VariantID VARCHAR(32), "\
"RecordHash VARCHAR(8), Record VARCHAR(255), PRIMARY KEY(Timestamp, VariantID, RecordHash))"
TABLES['SCTE35Summary'] = "CREATE TABLE SCTE35Summary (Timestamp INT, CreateTime INT, MasterID VARCHAR(32), VariantID VARCHAR(32), "\
"RecordHash VARCHAR(8), Duration DOUBLE, PRIMARY KEY(Timestamp, VariantID, RecordHash))"
return TABLES
def populate_scte35(db, cursor, records, master_id, link_id, create_time):
if records is None:
print("No records found for: %s, %s" %(master_id, link_id))
return
val_summary = []
val_record = []
for cur in records:
ts = cur['timestamp']
record = cur["scte35"]
record_hash = hashlib.sha1(record.encode("UTF-8")).hexdigest()
record_hash = record_hash[0:8]
val_record.append ((ts, create_time, master_id, link_id, record_hash, record))
m = re.search("Cue In (\d+.\d+) seconds", record)
if (m):
duration = m.group(1)
val_summary.append((ts, create_time, master_id, link_id, record_hash, duration))
if len(val_record) > 0:
sql = """INSERT INTO SCTE35Record (Timestamp, CreateTime, MasterID, VariantID, RecordHash, Record) VALUES (%s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE Timestamp=Timestamp,VariantID=VariantID,RecordHash=RecordHash"""
try:
cursor.executemany(sql, val_record)
except mysql.connector.Error as err:
print(err.msg)
if len(val_summary) > 0:
sql = """INSERT INTO SCTE35Summary (Timestamp, CreateTime, MasterID, VariantID, RecordHash, Duration) VALUES (%s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE Timestamp=Timestamp,VariantID=VariantID,RecordHash=RecordHash"""
try:
cursor.executemany(sql, val_summary)
except mysql.connector.Error as err:
print(err.msg)
db.commit()
def populate_alerts(db, cursor, records, master_id, link_id, create_time):
if records is None:
print("No records found for: %s, %s" %(master_id, link_id))
return
val_summary = []
val_record = []
for cur in records:
ts = cur['timestamp']
record = cur["alerts"]
record_hash = hashlib.sha1(record.encode("UTF-8")).hexdigest()
record_hash = record_hash[0:8]
val_record.append ((ts, create_time, master_id, link_id, record_hash, record))
m = re.search("(SCTE-35|STREAM) (OUTAGE ALERT|ALERT CLEARED) .* (\d+) (minutes|seconds)", record)
if (m):
type=m.group(1)
status=m.group(2)
duration = m.group(3)
units=m.group(4)
val_summary.append((ts, create_time, master_id, link_id, record_hash, type, status, duration, units))
if len(val_record) > 0:
sql = """INSERT INTO AlertRecord (Timestamp, CreateTime, MasterID, VariantID, RecordHash, Record) VALUES (%s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE Timestamp=Timestamp,VariantID=VariantID,RecordHash=RecordHash"""
try:
cursor.executemany(sql, val_record)
except mysql.connector.Error as err:
print(err.msg)
if len(val_summary) > 0:
sql = """INSERT INTO AlertSummary (Timestamp, CreateTime, MasterID, VariantID, RecordHash, Type, Status, Duration, Units) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE Timestamp=Timestamp,VariantID=VariantID,RecordHash=RecordHash"""
try:
cursor.executemany(sql, val_summary)
except mysql.connector.Error as err:
print(err.msg)
db.commit()
def update_hlsanalyzer_content(apikey, apihost):
db = connect_db()
if db is None:
raise Exception("Could not connect to database!")
else:
print("Connected.")
cursor = db.cursor()
if apikey is None:
raise Exception("API Key not found!")
db_name = apikey.replace("-","")
try:
cursor.execute("USE {}".format(db_name))
except mysql.connector.Error as err:
print("Database {} does not exists.".format(db_name))
if err.errno == errorcode.ER_BAD_DB_ERROR:
create_database(cursor, db_name)
print("Database {} created successfully.".format(db_name))
db.database = db_name
else:
print(err)
TABLES = define_tables()
for table_name in TABLES:
table_description = TABLES[table_name]
try:
print("Creating table {}: ".format(table_name), end='')
cursor.execute(table_description)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
print("already exists.")
else:
print(err.msg)
else:
print("OK")
create_time = int(time.time())
duration = INTERVAL_MINUTES*60
result = utils.get_all_status(apihost, apikey)
if result is not None:
#Traverse all HLS links being monitored.
# Each link can be either a master playlist with variants, or a single Media playlist
variant_list = []
for hls_link in result['status'].keys():
link_status = result['status'][hls_link]
has_variants = False
timestamp = link_status["Timestamp"]
cur_id = link_status["LinkID"]
if 'Variants' in link_status:
print("MASTER [%s]" %(hls_link))
master_id = cur_id
variant_status = result['status'][hls_link]['Variants']
for variant in variant_status.keys():
print("|-- Variant [%s] "%(variant))
variant_id = variant_status[variant]["LinkID"]
variant_list.append( (master_id, variant_id, timestamp))
else:
print("SINGLE MEDIA [%s]" %(hls_link))
variant_list.append((None, cur_id, timestamp))
for (master_id, cur_id, timestamp) in variant_list:
records = utils.get_records(apihost, apikey, cur_id, timestamp - duration, timestamp, mode="stream/scte35cues")
populate_scte35(db, cursor, records, master_id, cur_id, create_time)
records = utils.get_records(apihost, apikey, cur_id, timestamp - duration, timestamp, mode="stream/alertevents")
populate_alerts(db, cursor, records, master_id, cur_id, create_time)
print("Finished processing database ", db_name)
cursor.close()
db.close()
if __name__ == '__main__':
apikey = os.environ.get('APIKEY')
apihost = "https://staging.hlsanalyzer.com"
update_hlsanalyzer_content(apikey, apihost)
|
StarcoderdataPython
|
11382822
|
#Escreva um programa que faça o computador “pensar” em um número inteiro entre 0 e 5 e peça para o usuário tentar descobrir qual foi o número escolhido pelo computador.
#O programa deverá escrever na tela se o usuário venceu ou perdeu.
from numpy.random.mtrand import randint
chute = int(input("chuta um numero mlk de 1 ate 5:"))
n = randint(1,5)
print("o numero foi {}".format(n))
print("acerto miseravi" if n == chute else "errou")
|
StarcoderdataPython
|
8076357
|
<reponame>harry-7/mycodes<gh_stars>1-10
"""
Author: <NAME>
Handle:harry7
"""
#!/usr/bin/python
n=input()
s=raw_input()
p=0
k=1
for i in xrange(n):
p=p+k*int(s[i])
k*=2
p+=1
f=""
for i in xrange(n):
f+=str(p%2)
p/=2
cnt=0
for i in xrange(n):
if(s[i]!=f[i]):cnt+=1
print cnt
|
StarcoderdataPython
|
9790578
|
<filename>ark/opal/driver/execute_driver/bdcloud_execute_driver.py
# -*- coding: UTF-8 -*-
################################################################################
#
# Copyright (c) 2018 Baidu.com, Inc. All Rights Reserved
#
################################################################################
"""
"""
from base_execute_driver import BaseExecuteDriver
class BdcloudExecuteDriver(BaseExecuteDriver):
"""
百度云操作driver
"""
pass
|
StarcoderdataPython
|
11260787
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SupplementalFile'
db.create_table('collection_record_supplementalfile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('collection_record', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['collection_record.CollectionRecord'])),
('filename', self.gf('django.db.models.fields.CharField')(max_length=512)),
('label', self.gf('django.db.models.fields.CharField')(max_length=512, blank=True)),
))
db.send_create_signal('collection_record', ['SupplementalFile'])
# Changing field 'CollectionRecord.title_filing'
# only changing Django rep, no db change needed db.alter_column('collection_record_collectionrecord', 'title_filing', self.gf('django.db.models.fields.CharField')(max_length=255))
# Removing index on 'CollectionRecord', fields ['title_filing']
#db.delete_index('collection_record_collectionrecord', ['title_filing'])
def backwards(self, orm):
# Adding index on 'CollectionRecord', fields ['title_filing']
#db.create_index('collection_record_collectionrecord', ['title_filing'])
# Deleting model 'SupplementalFile'
db.delete_table('collection_record_supplementalfile')
# Changing field 'CollectionRecord.title_filing'
#NO DB CHANGE REQUIRED db.alter_column('collection_record_collectionrecord', 'title_filing', self.gf('django.db.models.fields.SlugField')(max_length=255))
models = {
'dublincore.qualifieddublincoreelement': {
'Meta': {'ordering': "['term']", 'object_name': 'QualifiedDublinCoreElement'},
'content': ('django.db.models.fields.TextField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'qualifier': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('<PASSWORD>', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'collection_record.collectionrecord': {
'Meta': {'unique_together': "(('title_filing', 'publisher'), ('local_identifier', 'publisher'))", 'object_name': 'CollectionRecord'},
'abstract': ('django.db.models.fields.TextField', [], {}),
'accessrestrict': ('django.db.models.fields.TextField', [], {}),
'acqinfo': ('django.db.models.fields.TextField', [], {}),
'ark': ('django.db.models.fields.CharField', [], {'max_length': '255', 'primary_key': 'True'}),
'bioghist': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_dacs': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'date_iso': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'extent': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'local_identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'online_items_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oac.Institution']"}),
'scopecontent': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'title_filing': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'userestrict': ('django.db.models.fields.TextField', [], {})
},
'collection_record.publishinginstitution': {
'Meta': {'ordering': "['name']", 'object_name': 'PublishingInstitution', 'db_table': "'oac_institution'", '_ormbases': ['oac.Institution'], 'proxy': 'True'}
},
'collection_record.supplementalfile': {
'Meta': {'object_name': 'SupplementalFile'},
'collection_record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['collection_record.CollectionRecord']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'oac.city': {
'Meta': {'ordering': "['name']", 'object_name': 'City'},
'county': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oac.County']"}),
'custom_zoom_level': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'oac.county': {
'Meta': {'ordering': "['name']", 'object_name': 'County'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'oac.institution': {
'Meta': {'ordering': "['name']", 'object_name': 'Institution'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'archivegrid_harvest': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'ark': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'blank': 'True'}),
'cdlpath': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oac.City']"}),
'county': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oac.County']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_zoom_level': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}),
'google_analytics_tracking_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isa_campus': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {}),
'longitude': ('django.db.models.fields.FloatField', [], {}),
'mainagency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent_ark': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'parent_institution': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['oac.Institution']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}),
'primary_contact': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'contact_for_institution'", 'null': 'True', 'to': "orm['auth.User']"}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'show_subjects': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'worldcat_harvest': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'zip4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['collection_record']
|
StarcoderdataPython
|
11203319
|
# Copyright (c) 2021, <NAME> and/or its affiliates. All rights reserved.
#
# This software is licensed under the Apache License, Version 2.0 (the
# "License") as published by the Apache Software Foundation.
#
# You may not use this file except in compliance with the License. You may
# obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from typing import Any, Dict, List, Union
from supertokens_python.async_to_sync_wrapper import sync
from supertokens_python.recipe.passwordless import asyncio
from supertokens_python.recipe.passwordless.interfaces import (
ConsumeCodeOkResult, ConsumeCodeResult, CreateCodeResult,
CreateNewCodeForDeviceResult, RevokeAllCodesResult, RevokeCodeResult,
UpdateUserResult)
from supertokens_python.recipe.passwordless.types import DeviceType, User
def create_code(email: Union[None, str] = None,
phone_number: Union[None, str] = None,
user_input_code: Union[None, str] = None,
user_context: Union[None, Dict[str, Any]] = None) -> CreateCodeResult:
return sync(asyncio.create_code(email=email, phone_number=phone_number,
user_input_code=user_input_code, user_context=user_context))
def create_new_code_for_device(device_id: str,
user_input_code: Union[str, None] = None,
user_context: Union[None, Dict[str, Any]] = None) -> CreateNewCodeForDeviceResult:
return sync(asyncio.create_new_code_for_device(device_id=device_id,
user_input_code=user_input_code,
user_context=user_context))
def consume_code(pre_auth_session_id: str,
user_input_code: Union[str, None] = None,
device_id: Union[str, None] = None,
link_code: Union[str, None] = None,
user_context: Union[None, Dict[str, Any]] = None) -> ConsumeCodeResult:
return sync(asyncio.consume_code(pre_auth_session_id=pre_auth_session_id, user_input_code=user_input_code,
device_id=device_id, link_code=link_code, user_context=user_context))
def get_user_by_id(user_id: str, user_context: Union[None, Dict[str, Any]] = None) -> Union[User, None]:
return sync(asyncio.get_user_by_id(
user_id=user_id, user_context=user_context))
def get_user_by_email(email: str, user_context: Union[None, Dict[str, Any]] = None) -> Union[User, None]:
return sync(asyncio.get_user_by_email(
email=email, user_context=user_context))
def get_user_by_phone_number(
phone_number: str, user_context: Union[None, Dict[str, Any]] = None) -> Union[User, None]:
return sync(asyncio.get_user_by_phone_number(
phone_number=phone_number, user_context=user_context))
def update_user(user_id: str, email: Union[str, None] = None,
phone_number: Union[str, None] = None, user_context: Union[None, Dict[str, Any]] = None) -> UpdateUserResult:
return sync(asyncio.update_user(user_id=user_id, email=email,
phone_number=phone_number, user_context=user_context))
def revoke_all_codes(email: Union[str, None] = None,
phone_number: Union[str, None] = None,
user_context: Union[None, Dict[str, Any]] = None) -> RevokeAllCodesResult:
return sync(asyncio.revoke_all_codes(
email=email, phone_number=phone_number, user_context=user_context))
def revoke_code(code_id: str, user_context: Union[None, Dict[str, Any]] = None) -> RevokeCodeResult:
return sync(asyncio.revoke_code(
code_id=code_id, user_context=user_context))
def list_codes_by_email(email: str, user_context: Union[None, Dict[str, Any]] = None) -> List[DeviceType]:
return sync(asyncio.list_codes_by_email(
email=email, user_context=user_context))
def list_codes_by_phone_number(
phone_number: str, user_context: Union[None, Dict[str, Any]] = None) -> List[DeviceType]:
return sync(asyncio.list_codes_by_phone_number(
phone_number=phone_number, user_context=user_context))
def list_codes_by_device_id(
device_id: str, user_context: Union[None, Dict[str, Any]] = None) -> Union[DeviceType, None]:
return sync(asyncio.list_codes_by_device_id(
device_id=device_id, user_context=user_context))
def list_codes_by_pre_auth_session_id(
pre_auth_session_id: str, user_context: Union[None, Dict[str, Any]] = None) -> Union[DeviceType, None]:
return sync(asyncio.list_codes_by_pre_auth_session_id(pre_auth_session_id=pre_auth_session_id,
user_context=user_context))
def create_magic_link(
email: Union[str, None], phone_number: Union[str, None], user_context: Union[None, Dict[str, Any]] = None) -> str:
return sync(asyncio.create_magic_link(
email=email, phone_number=phone_number, user_context=user_context))
def signinup(email: Union[str, None], phone_number: Union[str,
None], user_context: Union[None, Dict[str, Any]] = None) -> ConsumeCodeOkResult:
return sync(asyncio.signinup(
email=email, phone_number=phone_number, user_context=user_context))
|
StarcoderdataPython
|
1699769
|
import cv2
import numpy as np
#Sketch generation function:
def sketch(image):
#Convert image to greyscale
img_gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#Clean up image using Gaussian Blur
img_gray_blur = cv2.GaussianBlur(img_gray, (5,5), 0)
#Extract edges
canny_edges = cv2.Canny(img_gray_blur, 10, 70)
#Do an invert binarize the image
ret, mask = cv2.threshold(canny_edges, 70, 255, cv2.THRESH_BINARY_INV)
return mask
# Initialize webcam, cap is object provided by VideoCapture
# It contains a boolean indicating if it was successful (ret)
# It also contains the images collected from the webcam (frame)
cap = cv2.VideoCapture(0)
while True:
ret, frame = cap.read()
cv2.imshow("Live Sketcher", sketch(frame))
if cv2.waitKey(1) == 13: #13 is the Enter key
break
# Release camera and close window
cap.release()
cv2.destroyAllWindows()
|
StarcoderdataPython
|
6670736
|
<gh_stars>1-10
from . import RENAMES
from ..common.fix_strings import BaseFixStrings
class FixStrings(BaseFixStrings):
renames = RENAMES
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.