repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
kojo1/tinyTLS | examples/echo-client.py | 1 | 1056 | # echo-client.py
#
# Copyright (C) 2006-2017 wolfSSL Inc.
#
# This file is part of wolfSSL. (formerly known as CyaSSL)
#
# wolfSSL is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# wolfSSL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import socket
import sys
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = ('localhost', 11111)
sock.connect(server_address)
sock.sendall('Hello server')
data = sock.recv(32)
print 'received:' + data
sock.close()
| gpl-2.0 | -4,838,476,359,355,766,000 | 32 | 79 | 0.754735 | false |
tgbugs/pyontutils | ttlser/ttlser/utils.py | 1 | 1846 | import rdflib
rdflib.plugin.register('nifttl', rdflib.serializer.Serializer,
'ttlser', 'CustomTurtleSerializer')
rdflib.plugin.register('cmpttl', rdflib.serializer.Serializer,
'ttlser', 'CompactTurtleSerializer')
rdflib.plugin.register('uncmpttl', rdflib.serializer.Serializer,
'ttlser', 'UncompactTurtleSerializer')
rdflib.plugin.register('scottl', rdflib.serializer.Serializer,
'ttlser', 'SubClassOfTurtleSerializer')
rdflib.plugin.register('rktttl', rdflib.serializer.Serializer,
'ttlser', 'RacketTurtleSerializer')
rdflib.plugin.register('htmlttl', rdflib.serializer.Serializer,
'ttlser', 'HtmlTurtleSerializer')
def readFromStdIn(stdin=None):
from select import select
if stdin is None:
from sys import stdin
if select([stdin], [], [], 0.0)[0]:
return stdin
def subclasses(start):
for sc in start.__subclasses__():
yield sc
yield from subclasses(sc)
__jsonld_reg = False
def regjsonld():
global __jsonld_reg
if __jsonld_reg:
return
import rdflib_jsonld
rdflib.plugin.register('json-ld',
rdflib.parser.Parser,
'rdflib_jsonld.parser', 'JsonLDParser')
rdflib.plugin.register('application/ld+json',
rdflib.parser.Parser,
'rdflib_jsonld.parser', 'JsonLDParser')
rdflib.plugin.register('json-ld',
rdflib.serializer.Serializer,
'rdflib_jsonld.serializer', 'JsonLDSerializer')
rdflib.plugin.register('application/ld+json',
rdflib.serializer.Serializer,
'rdflib_jsonld.serializer', 'JsonLDSerializer')
__jsonld_reg = True
| mit | -1,344,507,736,967,548,200 | 34.5 | 71 | 0.602925 | false |
CNR-Engineering/TelTools | cli/slf_3d_to_2d.py | 1 | 4035 | #!/usr/bin/env python
"""
Perform a vertical operation on a 3D results file to get 2D
"""
import numpy as np
import sys
from tqdm import tqdm
from pyteltools.geom.transformation import Transformation
import pyteltools.slf.misc as operations
from pyteltools.slf import Serafin
from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
def slf_3d_to_2d(args):
with Serafin.Read(args.in_slf, args.lang) as resin:
resin.read_header()
logger.info(resin.header.summary())
resin.get_time()
if resin.header.is_2d:
logger.critical('The input file is not 3D.')
sys.exit(1)
if 'Z' not in resin.header.var_IDs:
logger.critical('The elevation variable Z is not found in the Serafin file.')
sys.exit(1)
if args.layer is not None:
upper_plane = resin.header.nb_planes
if args.layer < 1 or args.layer > upper_plane:
logger.critical('Layer has to be in [1, %i]' % upper_plane)
sys.exit(1)
output_header = resin.header.copy_as_2d()
# Shift mesh coordinates if necessary
if args.shift:
output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])
# Toggle output file endianness if necessary
if args.toggle_endianness:
output_header.toggle_endianness()
# Convert to single precision
if args.to_single_precision:
if resin.header.is_double_precision():
output_header.to_single_precision()
else:
logger.warn('Input file is already single precision! Argument `--to_single_precision` is ignored')
if args.aggregation is not None:
if args.aggregation == 'max':
operation_type = operations.MAX
elif args.aggregation == 'min':
operation_type = operations.MIN
else: # args.aggregation == 'mean'
operation_type = operations.MEAN
selected_vars = [var for var in output_header.iter_on_all_variables()]
vertical_calculator = operations.VerticalMaxMinMeanCalculator(operation_type, resin, output_header,
selected_vars, args.vars)
output_header.set_variables(vertical_calculator.get_variables()) # sort variables
# Add some elevation variables
for var_ID in args.vars:
output_header.add_variable_from_ID(var_ID)
with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
resout.write_header(output_header)
vars_2d = np.empty((output_header.nb_var, output_header.nb_nodes_2d), dtype=output_header.np_float_type)
for time_index, time in enumerate(tqdm(resin.time, unit='frame')):
if args.aggregation is not None:
vars_2d = vertical_calculator.max_min_mean_in_frame(time_index)
else:
for i, var in enumerate(output_header.var_IDs):
vars_2d[i, :] = resin.read_var_in_frame_as_3d(time_index, var)[args.layer - 1, :]
resout.write_entire_frame(output_header, time, vars_2d)
parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf', 'out_slf', 'shift'])
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--layer', help='layer number (1=lower, nb_planes=upper)', type=int, metavar=1)
group.add_argument('--aggregation', help='operation over the vertical', choices=('max', 'min', 'mean'))
parser.add_argument('--vars', nargs='+', help='variable(s) deduced from Z', default=[], choices=('B', 'S', 'H'))
parser.add_group_general(['force', 'verbose'])
if __name__ == '__main__':
args = parser.parse_args()
try:
slf_3d_to_2d(args)
except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
# Message is already reported by slf logger
sys.exit(1)
| gpl-3.0 | 2,957,268,180,725,916,700 | 41.925532 | 116 | 0.615366 | false |
storpool/python-storpool | storpool/sputils.py | 1 | 1408 | #
# Copyright (c) 2014 - 2019 StorPool.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Utility functions and constants for the StorPool API bindings. """
from __future__ import print_function
import os.path
import time
import six.moves
sec = 1.0
msec = 1.0e-3 * sec
usec = 1e-6 * sec
KB = 1024
MB = 1024 ** 2
GB = 1024 ** 3
TB = 1024 ** 4
def pr(x):
""" Display a value and return it; useful for lambdas. """
print(x)
return x
def pathPollWait(path, shouldExist, isLink, pollTime, maxTime):
""" Poll/listen for path to appear/disappear. """
for i in six.moves.range(int(maxTime / pollTime)):
pathExists = os.path.exists(path)
if pathExists and isLink:
assert os.path.islink(path)
if pathExists == shouldExist:
return True
else:
time.sleep(pollTime)
else:
return False
| apache-2.0 | 3,257,509,325,098,804,700 | 24.6 | 74 | 0.674006 | false |
loggrio/loggr-unit-raspberry | raspi_loggr/util.py | 1 | 3137 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
import logging
from enum import Enum
class LedStatusTypes(Enum):
"""Enum of led status types resp. colors"""
ok = 1 # green
sensor_broken = 2 # red
request_error = 3 # orange
pairing_succeeded = 4 # blue
class SensorTypes(Enum):
"""Enum of sensor types"""
temperature = 1
brightness = 2
humidity = 3
pressure = 4
def log_info(info):
"""Log info messages in logfile and console
Args:
info (str): info message which has to be logged
"""
logging.info(info)
print info
def log_error(err):
"""Log error messages in logfile and console
Args:
err (str): error message which has to be logged
"""
logging.error(err)
print err
def treat_sensor_errors(cpe):
"""Log sensor errors
Args:
cpe (subprocess.CalledProcessError): called process error exception object
"""
log_error('called process error: ' + str(cpe.cmd) + ' returned ' + str(cpe.returncode) + ': ' + cpe.output)
def treat_os_errors(ose):
"""Log os errors
Args:
ose (OSError): os error exception object
"""
log_error('oserror: ' + str(ose.strerror))
def treat_led_errors(cpe):
"""Log led errors
Args:
cpe (subprocess.CalledProcessError): called process error exception object
"""
if cpe.returncode == 1:
log_error('called process error: ' + str(cpe.cmd[0]) + ' returned 1: setup wiringPi failed')
elif cpe.returncode == 2:
log_error('called process error: ' + str(cpe.cmd[0]) + ' returned 2: invalid arguments')
def treat_requests_errors(re):
"""Log requests errors and set status led color to orange
Args:
re (requests.exceptions.RequestException): request exception object
"""
log_error('requests failure: ' + str(re))
set_status_led(LedStatusTypes.request_error.name)
def treat_sensor_broken_errors(sensortype):
"""Log broken sensor errors and set status led color to red
Args:
sensortype (str): type of sensor
"""
log_error(str(sensortype) + ' sensor broken')
set_status_led(LedStatusTypes.sensor_broken.name)
def treat_missing_config_errors():
"""Log missing config file errors"""
log_error('No valid config file found! Please start config server!')
def treat_pairing_errors():
"""Log pairing errors"""
log_error('No Token and/or UserId set in config file. Please pair your Raspberry Pi!')
def set_status_led(status):
"""Set status led color
Args:
status (LedStatusTypes): led status type resp. color of rgb led
"""
command = ['sensors/rgb.out', str(status)]
try:
subproc = subprocess.check_call(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except subprocess.CalledProcessError as cpe:
# catch invalid arguments errors
# catch wiringPi errors
treat_led_errors(cpe)
except OSError as ose:
# catch os errors, e.g. file-not-found
treat_os_errors(ose)
| gpl-3.0 | -4,851,364,773,348,961,000 | 24.92562 | 111 | 0.629582 | false |
wwrechard/pydlm | doc/source/conf.py | 1 | 10025 | # -*- coding: utf-8 -*-
#
# PyDLM documentation build configuration file, created by
# sphinx-quickstart on Fri Sep 9 23:34:57 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
import sys
# sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, '/Users/samuel/Documents/Github/PyDLM')
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
#collapse_navigation = True
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.githubpages',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PyDLM'
copyright = u'2016, Xiangyu Wang'
author = u'Xiangyu Wang'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'pyramid'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'PyDLM v0.1.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'PyDLMdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PyDLM.tex', u'PyDLM Documentation',
u'Xiangyu Wang', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pydlm', u'PyDLM Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PyDLM', u'PyDLM Documentation',
author, 'PyDLM', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
| bsd-3-clause | -657,461,185,511,227,300 | 27.724928 | 80 | 0.691671 | false |
chapering/PyVolRender | stlineRender.py | 1 | 1041 | #!/usr/bin/env python
import vtk
from vtk.util.colors import *
from tgdataReader import tgdataReader
import sys
stlineModel = tgdataReader( sys.argv[1] )
mapStreamLine = vtk.vtkPolyDataMapper()
mapStreamLine.SetInput( stlineModel )
streamLineActor = vtk.vtkActor()
streamLineActor.SetMapper(mapStreamLine)
streamLineActor.GetProperty().BackfaceCullingOn()
# Now create the usual graphics stuff.
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
ren.AddActor(streamLineActor)
ren.SetBackground(slate_grey)
# Here we specify a particular view.
# aCamera = vtk.vtkCamera()
# aCamera.SetClippingRange(0.726079, 36.3039)
# aCamera.SetFocalPoint(2.43584, 2.15046, 1.11104)
# aCamera.SetPosition(-4.76183, -10.4426, 3.17203)
# aCamera.SetViewUp(0.0511273, 0.132773, 0.989827)
# aCamera.SetViewAngle(18.604)
# aCamera.Zoom(1.2)
# ren.SetActiveCamera(aCamera)
renWin.SetSize(800, 600)
iren.Initialize()
renWin.Render()
iren.Start()
| gpl-2.0 | -3,682,871,427,551,950,000 | 24.390244 | 50 | 0.770413 | false |
libyal/libyal | yaldevtools/source_generators/common.py | 1 | 1219 | # -*- coding: utf-8 -*-
"""The source file generator for common source files."""
from __future__ import unicode_literals
import os
from yaldevtools.source_generators import interface
class CommonSourceFileGenerator(interface.SourceFileGenerator):
"""Common source files generator."""
_AUTHORS = 'Joachim Metz <[email protected]>'
_AUTHORS_SEPARATOR = ',\n * '
def Generate(self, project_configuration, output_writer):
"""Generates common source files.
Args:
project_configuration (ProjectConfiguration): project configuration.
output_writer (OutputWriter): output writer.
"""
template_mappings = self._GetTemplateMappings(
project_configuration, authors_separator=self._AUTHORS_SEPARATOR)
template_mappings['authors'] = self._AUTHORS
for directory_entry in os.listdir(self._template_directory):
template_filename = os.path.join(
self._template_directory, directory_entry)
if not os.path.isfile(template_filename):
continue
output_filename = os.path.join('common', directory_entry)
self._GenerateSection(
template_filename, template_mappings, output_writer, output_filename)
| apache-2.0 | 6,604,040,997,463,279,000 | 31.945946 | 79 | 0.696473 | false |
fniephaus/power-system-simulation | systems/producers.py | 1 | 4740 | import random
class GasPoweredGenerator(object):
def __init__(self, env):
self.env = env
self.gas_price_per_kwh = 0.0655 # Euro
self.running = False
self.workload = 0
self.current_gas_consumption = 0 # kWh
self.current_thermal_production = 0 # kWh
self.total_gas_consumption = 0.0 # kWh
self.total_thermal_production = 0.0 # kWh
def start(self):
self.running = True
def stop(self):
self.running = False
def consume_gas(self):
self.total_gas_consumption += self.current_gas_consumption
self.total_thermal_production += self.current_thermal_production
def get_operating_costs(self):
return self.total_gas_consumption * self.gas_price_per_kwh
class CogenerationUnit(GasPoweredGenerator):
def __init__(self, env, heat_storage, electrical_infeed):
GasPoweredGenerator.__init__(self, env)
self.heat_storage = heat_storage
# XRGI 15kW
self.max_gas_input = 49.0 # kW
self.electrical_efficiency = 0.3 # max 14.7 kW
self.thermal_efficiency = 0.62 # max 30.38 kW
self.maintenance_interval = 8500 # hours
self.electrical_infeed = electrical_infeed
self.minimal_workload = 40.0
self.noise = True
self.current_electrical_production = 0 # kWh
self.total_electrical_production = 0.0 # kWh
def calculate_workload(self):
calculated_workload = self.heat_storage.target_energy + \
self.minimal_workload - self.heat_storage.energy_stored()
if self.noise:
calculated_workload += random.random() - 0.5
# make sure that minimal_workload <= workload <= 99.0 or workload = 0
if calculated_workload >= self.minimal_workload:
self.workload = min(calculated_workload, 99.0)
else:
self.workload = 0.0
# calulate current consumption and production values
self.current_gas_consumption = self.workload / \
99.0 * self.max_gas_input
self.current_electrical_production = self.current_gas_consumption * \
self.electrical_efficiency
self.current_thermal_production = self.current_gas_consumption * \
self.thermal_efficiency
def consume_gas(self):
super(CogenerationUnit, self).consume_gas()
self.total_electrical_production += self.current_electrical_production
def update(self):
self.env.log('Starting cogeneration unit...')
self.start()
while True:
if self.running:
self.calculate_workload()
self.env.log(
'CU workload:', '%f %%' % self.workload, 'Total:', '%f kWh (%f Euro)' %
(self.total_gas_consumption, self.get_operating_costs()))
self.electrical_infeed.add_energy(
self.current_electrical_production)
self.heat_storage.add_energy(self.current_thermal_production)
self.consume_gas()
else:
self.env.log('Cogeneration unit stopped')
yield self.env.timeout(3600)
class PeakLoadBoiler(GasPoweredGenerator):
def __init__(self, env, heat_storage):
GasPoweredGenerator.__init__(self, env)
self.heat_storage = heat_storage
self.max_gas_input = 100.0 # kW
self.thermal_efficiency = 0.8
def calculate_workload(self):
# turn on if heat_storage is undersupplied
if self.heat_storage.undersupplied():
self.workload = 99.0
# turn off if heat storage's target_energy is almost reached
elif self.heat_storage.energy_stored() + self.current_thermal_production >= self.heat_storage.target_energy:
self.workload = 0
# calulate current consumption and production values
self.current_gas_consumption = self.workload / \
99.0 * self.max_gas_input
self.current_thermal_production = self.current_gas_consumption * \
self.thermal_efficiency
def update(self):
self.env.log('Starting peak load boiler...')
self.start()
while True:
if self.running:
self.calculate_workload()
self.env.log(
'PLB workload:', '%f %%' % self.workload, 'Total:', '%f kWh (%f Euro)' %
(self.total_gas_consumption, self.get_operating_costs()))
self.heat_storage.add_energy(self.current_thermal_production)
self.consume_gas()
else:
self.env.log('PLB stopped.')
self.env.log('=' * 80)
yield self.env.timeout(3600)
| mit | 1,032,790,854,854,087,200 | 33.59854 | 116 | 0.597679 | false |
rusty1s/graph-based-image-classification | patchy/patchy.py | 1 | 9132 | import os
import sys
import json
import tensorflow as tf
from data import DataSet, Record, datasets
from data import iterator, read_tfrecord, write_tfrecord
from grapher import graphers
from .helper.labeling import labelings, scanline
from .helper.neighborhood_assembly import neighborhood_assemblies as neighb,\
neighborhoods_weights_to_root
from .helper.node_sequence import node_sequence
DATA_DIR = '/tmp/patchy_san_data'
FORCE_WRITE = False
WRITE_NUM_EPOCHS = 1
DISTORT_INPUTS = False
NUM_NODES = 100
NODE_STRIDE = 1
NEIGHBORHOOD_SIZE = 9
INFO_FILENAME = 'info.json'
TRAIN_FILENAME = 'train.tfrecords'
TRAIN_INFO_FILENAME = 'train_info.json'
TRAIN_EVAL_FILENAME = 'train_eval.tfrecords'
TRAIN_EVAL_INFO_FILENAME = 'train_eval_info.json'
EVAL_FILENAME = 'eval.tfrecords'
EVAL_INFO_FILENAME = 'eval_info.json'
class PatchySan(DataSet):
def __init__(self, dataset, grapher, data_dir=DATA_DIR,
force_write=FORCE_WRITE, write_num_epochs=WRITE_NUM_EPOCHS,
distort_inputs=DISTORT_INPUTS, node_labeling=None,
num_nodes=NUM_NODES, node_stride=NODE_STRIDE,
neighborhood_assembly=None,
neighborhood_size=NEIGHBORHOOD_SIZE):
node_labeling = scanline if node_labeling is None else node_labeling
neighborhood_assembly = neighborhoods_weights_to_root if\
neighborhood_assembly is None else neighborhood_assembly
self._dataset = dataset
self._grapher = grapher
self._num_nodes = num_nodes
self._neighborhood_size = neighborhood_size
self._distort_inputs = distort_inputs
super().__init__(data_dir)
if tf.gfile.Exists(data_dir) and force_write:
tf.gfile.DeleteRecursively(data_dir)
tf.gfile.MakeDirs(data_dir)
info_file = os.path.join(data_dir, INFO_FILENAME)
if not tf.gfile.Exists(info_file) or force_write:
with open(info_file, 'w') as f:
json.dump({'max_num_epochs': write_num_epochs,
'distort_inputs': distort_inputs,
'node_labeling': node_labeling.__name__,
'num_nodes': num_nodes,
'num_node_channels': grapher.num_node_channels,
'node_stride': node_stride,
'neighborhood_assembly':
neighborhood_assembly.__name__,
'neighborhood_size': neighborhood_size,
'num_edge_channels': grapher.num_edge_channels}, f)
train_file = os.path.join(data_dir, TRAIN_FILENAME)
train_info_file = os.path.join(data_dir, TRAIN_INFO_FILENAME)
if not tf.gfile.Exists(train_file):
_write(dataset, grapher, False, train_file, train_info_file,
write_num_epochs, distort_inputs, True, node_labeling,
num_nodes, node_stride, neighborhood_assembly,
neighborhood_size)
eval_file = os.path.join(data_dir, EVAL_FILENAME)
eval_info_file = os.path.join(data_dir, EVAL_INFO_FILENAME)
if not tf.gfile.Exists(eval_file):
_write(dataset, grapher, True, eval_file, eval_info_file,
1, distort_inputs, False, node_labeling, num_nodes,
node_stride, neighborhood_assembly, neighborhood_size)
train_eval_file = os.path.join(data_dir, TRAIN_EVAL_FILENAME)
train_eval_info_file = os.path.join(data_dir, TRAIN_EVAL_INFO_FILENAME)
if distort_inputs and not tf.gfile.Exists(train_eval_file):
_write(dataset, grapher, False, train_eval_file,
train_eval_info_file, 1, distort_inputs, False,
node_labeling, num_nodes, node_stride,
neighborhood_assembly, neighborhood_size)
@classmethod
def create(cls, config):
"""Static constructor to create a PatchySan dataset based on a json
object.
Args:
config: A configuration object with sensible defaults for
missing values.
Returns:
A PatchySan dataset.
"""
dataset_config = config['dataset']
grapher_config = config['grapher']
return cls(datasets[dataset_config['name']].create(dataset_config),
graphers[grapher_config['name']].create(grapher_config),
config.get('data_dir', DATA_DIR),
config.get('force_write', FORCE_WRITE),
config.get('write_num_epochs', WRITE_NUM_EPOCHS),
config.get('distort_inputs', DISTORT_INPUTS),
labelings.get(config.get('node_labeling')),
config.get('num_nodes', NUM_NODES),
config.get('node_stride', NODE_STRIDE),
neighb.get(config.get('neighborhood_assembly')),
config.get('neighborhood_size', NEIGHBORHOOD_SIZE))
@property
def train_filenames(self):
return [os.path.join(self.data_dir, TRAIN_FILENAME)]
@property
def eval_filenames(self):
return [os.path.join(self.data_dir, EVAL_FILENAME)]
@property
def train_eval_filenames(self):
if self._distort_inputs:
return [os.path.join(self.data_dir, TRAIN_EVAL_FILENAME)]
else:
return [os.path.join(self.data_dir, TRAIN_FILENAME)]
@property
def labels(self):
return self._dataset.labels
@property
def num_examples_per_epoch_for_train(self):
with open(os.path.join(self._data_dir, TRAIN_INFO_FILENAME), 'r') as f:
count = json.load(f)['count']
return min(count, self._dataset.num_examples_per_epoch_for_train)
@property
def num_examples_per_epoch_for_eval(self):
with open(os.path.join(self._data_dir, EVAL_INFO_FILENAME), 'r') as f:
count = json.load(f)['count']
return min(count, self._dataset.num_examples_per_epoch_for_eval)
@property
def num_examples_per_epoch_for_train_eval(self):
if self._distort_inputs:
filename = os.path.join(self._data_dir, TRAIN_EVAL_INFO_FILENAME)
with open(filename, 'r') as f:
count = json.load(f)['count']
return min(count,
self._dataset.num_examples_per_epoch_for_train_eval)
else:
return self._dataset.num_examples_per_epoch_for_train
def read(self, filename_queue):
data, label = read_tfrecord(
filename_queue,
{'nodes': [-1, self._grapher.num_node_channels],
'neighborhood': [self._num_nodes, self._neighborhood_size]})
nodes = data['nodes']
# Convert the neighborhood to a feature map.
def _map_features(node):
i = tf.maximum(node, 0)
positive = tf.strided_slice(nodes, [i], [i+1], [1])
negative = tf.zeros([1, self._grapher.num_node_channels])
return tf.where(i < 0, negative, positive)
data = tf.reshape(data['neighborhood'], [-1])
data = tf.cast(data, tf.int32)
data = tf.map_fn(_map_features, data, dtype=tf.float32)
shape = [self._num_nodes, self._neighborhood_size,
self._grapher.num_node_channels]
data = tf.reshape(data, shape)
return Record(data, shape, label)
def _write(dataset, grapher, eval_data, tfrecord_file, info_file,
write_num_epochs, distort_inputs, shuffle,
node_labeling, num_nodes, node_stride, neighborhood_assembly,
neighborhood_size):
writer = tf.python_io.TFRecordWriter(tfrecord_file)
iterate = iterator(dataset, eval_data, distort_inputs=distort_inputs,
num_epochs=write_num_epochs, shuffle=shuffle)
def _before(image, label):
nodes, adjacencies = grapher.create_graph(image)
# Only take the first adjacency matrix.
count = tf.shape(adjacencies)[0]
adjacency = tf.strided_slice(
adjacencies, [0, 0, 0], [count, count, 1], [1, 1, 1])
adjacency = tf.squeeze(adjacency, axis=2)
sequence = node_labeling(adjacency)
sequence = node_sequence(sequence, num_nodes, node_stride)
neighborhood = neighborhood_assembly(adjacency, sequence,
neighborhood_size)
return [nodes, neighborhood, label]
def _each(output, index, last_index):
write_tfrecord(writer,
{'nodes': output[0], 'neighborhood': output[1]},
output[2])
sys.stdout.write(
'\r>> Saving graphs to {} {:.1f}%'
.format(tfrecord_file, 100.0 * index / last_index))
sys.stdout.flush()
def _done(index, last_index):
print('')
print('Successfully saved {} graphs to {}.'
.format(index, tfrecord_file))
with open(info_file, 'w') as f:
json.dump({'count': index}, f)
iterate(_each, _before, _done)
| mit | 1,591,233,635,957,854,200 | 36.892116 | 79 | 0.591984 | false |
AlanJAS/iknowAmerica | recursos/0ecuador/datos/0ecuador.py | 1 | 6073 | # -*- coding: utf-8 -*-
from gettext import gettext as _
NAME = _('Ecuador')
STATES = [
(_('Pichincha'), 254, 335, 210, 0),
(_('Guayas'), 253, 138, 472, 0),
(_('Azuay'), 252, 242, 622, 0),
(_('Manabí'), 251, 122, 316, 0),
(_('Esmeraldas'), 250, 234, 117, 0),
(_('El Oro'), 249, 158, 693, 0),
(_('Los Rios'), 248, 189, 415, 70),
(_('Loja'), 247, 177, 771, 0),
(_('Cañar'), 246, 276, 556, 0),
(_('Chimborazo'), 245, 302, 474, 60),
(_('Bolívar'), 244, 255, 425, 90),
(_('Tungurahua'), 243, 333, 386, 0),
(_('Cotopaxi'), 242, 291, 327, 0),
(_('Imbabura'), 241, 365, 160, 0),
(_('Carchi'), 240, 392, 103, -30),
(_('Zamora Chinchipe'), 239, 283, 784, 60),
(_('Morona Santiago'), 238, 432, 558, 0),
(_('Pastaza'), 237, 558, 449, 0),
(_('Napo'), 236, 409, 302, 0),
(_('Orellana'), 235, 630, 218, 0),
(_('Sucumbíos'), 234, 630, 320, 0),
(_('Santo Domingo de los Tsáchilas'), 233, 238, 249, 0),
(_('Colombia'), 231, 607, 66, 0),
(_('Perú'), 230, 570, 741, 0)
]
CAPITALS = [
(_('Quito'), 339, 243, 0, 0, -14),
(_('Ambato'), 325, 380, 1, 0, -14),
(_('Azogues'), 294, 582, 1, 0, -14),
(_('Babahoyo'), 203, 456, 1, 0, -14),
(_('Cuenca'), 275, 605, 1, 0, 14),
(_('Esmeraldas'), 179, 80, 1, 35, 14),
(_('Guaranda'), 273, 426, 1, 0, -14),
(_('Guayaquil'), 145, 504, 1, 0, 14),
(_('Ibarra'), 392, 162, 1, 0, 14),
(_('Latacunga'), 326, 335, 1, -20, -14),
(_('Loja'), 246, 755, 1, -15, -14),
(_('Macas'), 388, 526, 1, 0, -14),
(_('Machala'), 144, 652, 1, -30, -14),
(_('Nueva Loja'), 559, 198, 1, 0, -14),
(_('Portoviejo'), 75, 355, 1, 0, 14),
(_('Puerto Fsco. de Orellana'), 539, 270, 1, 0, 14),
(_('Puyo'), 414, 410, 1, 0, -14),
(_('Tena'), 435, 344, 1, 0, -14),
(_('Tulcán'), 441, 98, 1, -25, -14),
(_('Zamora'), 277, 764, 1, 15, 14)
]
CITIES = [
(_('Alausí'), 297, 507, 2, 0, -14),
(_('Baeza'), 422, 263, 2, 0, -14),
(_('Bahía de Caráquez'), 78, 295, 2, 0, -14),
(_('Cayambe'), 386, 202, 2, 0, 14),
(_('Chone'), 122, 303, 2, 0, 14),
(_('Cononaco'), 724, 414, 2, -30, -14),
(_('Jipijapa'), 58, 390, 2, 0, 14),
(_('Macará'), 144, 800, 2, 0, 14),
(_('Machachi'), 332, 279, 2, 0, 14),
(_('Manta'), 37, 341, 2, 0, -14),
(_('Milagro'), 191, 497, 2, 0, -14),
(_('Montalvo'), 547, 482, 2, 0, -14),
(_('Muisne'), 137, 120, 2, 0, 14),
(_('Naranjal'), 190, 575, 2, 0, 14),
(_('Nuevo Rocafuerte'), 751, 329, 2, -40, -14),
(_('Pasaje'), 157, 660, 2, 10, 14),
(_('Playas'), 86, 562, 2, 0, -14),
(_('Posorja'), 101, 574, 2, 0, 14),
(_('Puerto El Carmen'), 689, 195, 2, 0, 14),
(_('Quevedo'), 207, 347, 2, 20, -14),
(_('Río Corrientes'), 619, 527, 2, 0, 14),
(_('Río Tigre'), 663, 492, 2, 0, -14),
(_('Riobamba'), 325, 436, 2, 0, 14),
(_('Salinas'), 8, 508, 2, 30, -14),
(_('San Gabriel'), 434, 128, 2, 0, 14),
(_('San Lorenzo'), 295, 37, 2, 0, 14),
(_('Santo Domingo'), 251, 246, 2, 0, 14),
(_('Zumba'), 253, 877, 2, 0, -14)
]
RIVERS = [
(_('Curaray River'), 254, 596, 404, -10),
(_('Pintoyacu River'), 253, 533, 418, -5),
(_('Guayusa River'), 252, 646, 455, -30),
(_('Cunambo River'), 251, 630, 498, -25),
(_('Corrientes River'), 250, 647, 544, -40),
(_('Pastaza River'), 249, 421, 428, -60),
(_('Macuma River'), 248, 477, 553, 90),
(_('Morona River'), 247, 462, 637, -70),
(_('Mangosiza River'), 246, 407, 539, 90),
(_('Santiago River'), 245, 415, 667, 90),
(_('Numbala River'), 244, 275, 819, 70),
(_('Alamaor River'), 243, 90, 790, 50),
(_('Catamayo River'), 242, 179, 763, 30),
(_('Puyango River'), 241, 185, 732, 30),
(_('Zamora River'), 240, 333, 649, 70),
(_('Tenguel River'), 239, 185, 618, -40),
(_('Jagua River'), 238, 199, 581, 0),
(_('Caliu River'), 237, 191, 547, 0),
(_('Balsas River'), 236, 59, 472, 0),
(_('Daule River'), 235, 152, 401, 70),
(_('Chone River'), 234, 100, 323, -40),
(_('Reservoir Daule Peripa River'), 233, 174, 324, 45),
(_('Briceño River'), 232, 97, 271, -10),
(_('Blanco River'), 229, 253, 210, 30),
(_('Toachi River'), 228, 256, 262, -30),
(_('Esmeraldas River'), 227, 204, 105, -60),
(_('Onzole River'), 226, 255, 83, 70),
(_('Mira River'), 225, 357, 114, -45),
(_('San Juan River'), 224, 369, 81, -30),
(_('Nashiño River'), 223, 642, 348, -10),
(_('Coronaco River'), 222, 674, 400, -20),
(_('Tiguiño River'), 221, 536, 349, 0),
(_('Napo River'), 220, 502, 309, 45),
(_('Coca River'), 219, 515, 244, -60),
(_('Salado River'), 218, 412, 216, -50),
(_('Quijos River'), 217, 443, 262, 60),
(_('Aguarico River'), 216, 652, 237, 0),
(_('Lagarto Cocha River'), 215, 732, 250, -50),
(_('Putumayo River'), 214, 653, 149, -30),
(_('San Miguel River'), 213, 510, 173, -10),
(_('Babahoyo River'), 212, 201, 392, 75),
(_('Canal of Jambeli'), 211, 127, 632, -40),
(_('Gulf of Guayaquil'), 210, 32, 622, 90),
(_('Caráquez Bay'), 209, 106, 134, 0),
(_('Manta Bay'), 208, 93, 213, 0),
(_('Pacific Ocean'), 207, 174, 34, 0)
]
ROUTES = []
STATS = [
(_('Capital:'), _('Quito') + _("(0º13' S - 78º31' W)")),
(_('Language:'), _('Spanish')),
(_('Government:'), _('Presidential republic')),
(_('President:'), _('Rafael Correa')),
(_('Vice President:'), _('Jorge Glas')),
(_('Independence:'), _('from Spain')),
('', _('May 24, 1822')),
('', _('of Colombia: May 13, 1830')),
(_('Area:'), '%(sup)s %(u)s (%(p)s)' % {'sup': _('256.370'), 'u': _('km²'), 'p': _('71th')}),
(_('Population:'), '%(v)s (%(p)s)' % {'v': _('16.144.000'), 'p': _('65th')}),
(_('GDP:'), '%(c)s %(v)s %(u)s (%(p)s)' % {'c': _('USD'), 'v': _('100.755'), 'u': _('billion'), 'p': _('64th')}),
(_('HDI:'), '%(l)s - %(v)s (%(p)s)' % {'l': _('High'), 'v': _('0,732'), 'p': _('88th')}),
(_('Currency:'), _('United States Dollar')),
(_('Updated:'), _('April 5, 2016'))
]
| gpl-3.0 | 55,632,034,487,499,710 | 38.051613 | 117 | 0.455972 | false |
internap/almanach | tests/builder.py | 1 | 4540 | # Copyright 2016 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytz
from copy import copy
from datetime import datetime
from uuid import uuid4
from almanach.core.model import build_entity_from_dict, Instance, Volume, VolumeType
class Builder(object):
def __init__(self, dict_object):
self.dict_object = dict_object
class EntityBuilder(Builder):
def build(self):
return build_entity_from_dict(self.dict_object)
def with_id(self, entity_id):
self.dict_object["entity_id"] = entity_id
return self
def with_project_id(self, project_id):
self.dict_object["project_id"] = project_id
return self
def with_last_event(self, last_event):
self.dict_object["last_event"] = last_event
return self
def with_start(self, year, month, day, hour, minute, second):
self.with_datetime_start(datetime(year, month, day, hour, minute, second, tzinfo=pytz.utc))
return self
def with_datetime_start(self, date):
self.dict_object["start"] = date
return self
def with_end(self, year, month, day, hour, minute, second):
self.dict_object["end"] = datetime(year, month, day, hour, minute, second, tzinfo=pytz.utc)
return self
def with_no_end(self):
self.dict_object["end"] = None
return self
def with_flavor(self, flavor):
self.dict_object["flavor"] = flavor
return self
def with_metadata(self, metadata):
self.dict_object['metadata'] = metadata
return self
def build_from(self, other):
self.dict_object = copy(other.__dict__)
return self
def with_all_dates_in_string(self):
self.dict_object['start'] = self.dict_object['start'].strftime("%Y-%m-%dT%H:%M:%S.%fZ")
self.dict_object['last_event'] = self.dict_object['last_event'].strftime("%Y-%m-%dT%H:%M:%S.%fZ")
return self
class VolumeBuilder(EntityBuilder):
def with_attached_to(self, attached_to):
self.dict_object["attached_to"] = attached_to
return self
def with_no_attachment(self):
self.dict_object["attached_to"] = []
return self
def with_display_name(self, display_name):
self.dict_object["name"] = display_name
return self
def with_volume_type(self, volume_type):
self.dict_object["volume_type"] = volume_type
return self
class VolumeTypeBuilder(Builder):
def build(self):
return VolumeType(**self.dict_object)
def with_volume_type_id(self, volume_type_id):
self.dict_object["volume_type_id"] = volume_type_id
return self
def with_volume_type_name(self, volume_type_name):
self.dict_object["volume_type_name"] = volume_type_name
return self
def instance():
return EntityBuilder({
"entity_id": str(uuid4()),
"project_id": str(uuid4()),
"start": datetime(2014, 1, 1, 0, 0, 0, 0, pytz.utc),
"end": None,
"last_event": datetime(2014, 1, 1, 0, 0, 0, 0, pytz.utc),
"flavor": "A1.1",
"os": {
"os_type": "windows",
"distro": "windows",
"version": "2012r2"
},
"entity_type": Instance.TYPE,
"name": "some-instance",
"metadata": {
"a_metadata.to_filter": "include.this",
"a_metadata.to_exclude": "exclude.this"
}
})
def volume():
return VolumeBuilder({
"entity_id": str(uuid4()),
"project_id": str(uuid4()),
"start": datetime(2014, 1, 1, 0, 0, 0, 0, pytz.utc),
"end": None,
"last_event": datetime(2014, 1, 1, 0, 0, 0, 0, pytz.utc),
"volume_type": "SF400",
"size": 1000000,
"entity_type": Volume.TYPE,
"name": "some-volume",
"attached_to": None,
})
def volume_type():
return VolumeTypeBuilder({
"volume_type_id": str(uuid4()),
"volume_type_name": "a_type_name"
})
def a(builder):
return builder.build()
| apache-2.0 | 7,253,645,098,605,232,000 | 27.553459 | 105 | 0.609471 | false |
SegFaultAX/hotchip | tests/test_main.py | 1 | 1293 | # -*- coding: utf-8 -*-
from pytest import raises
# The parametrize function is generated, so this doesn't work:
#
# from pytest.mark import parametrize
#
import pytest
parametrize = pytest.mark.parametrize
from hotchip import metadata
from hotchip.main import main
class TestMain(object):
@parametrize('helparg', ['-h', '--help'])
def test_help(self, helparg, capsys):
with raises(SystemExit) as exc_info:
main(['progname', helparg])
out, err = capsys.readouterr()
# Should have printed some sort of usage message. We don't
# need to explicitly test the content of the message.
assert 'usage' in out
# Should have used the program name from the argument
# vector.
assert 'progname' in out
# Should exit with zero return code.
assert exc_info.value.code == 0
@parametrize('versionarg', ['-V', '--version'])
def test_version(self, versionarg, capsys):
with raises(SystemExit) as exc_info:
main(['progname', versionarg])
out, err = capsys.readouterr()
# Should print out version.
assert err == '{0} {1}\n'.format(metadata.project, metadata.version)
# Should exit with zero return code.
assert exc_info.value.code == 0
| mit | 4,576,581,993,130,779,600 | 33.026316 | 76 | 0.637278 | false |
crichardson17/starburst_atlas | SFH_comparison/data/Padova_inst/padova_inst_6/fullgrid/peaks_reader.py | 1 | 5057 | import csv
import matplotlib.pyplot as plt
from numpy import *
import scipy.interpolate
import math
from pylab import *
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
import matplotlib.patches as patches
from matplotlib.path import Path
import os
# ---------------------------------------------------
headerloc = "/Users/helen/Documents/Elon/Thesis_Research/github_repo/starburst_atlas/headers_dir/headers.txt"
# ------------------------------------------------------------------------------------------------------
#data files' names from source directory constructed here. default source directory is working directory
numFiles = 3 #change this if you have more/less files
gridFiles = [None]*numFiles
emissionFiles = [None]*numFiles
for i in range(numFiles):
for file in os.listdir('.'):
if file.endswith("{:d}.grd".format(i+1)):
gridFiles[i] = file
#keep track of all the files you'll be importing by printing
#print file
if file.endswith("{:d}.txt".format(i+1)):
emissionFiles[i] = file
#keep track of all the files you'll be importing by printing
#print file
print ("Files names constructed")
# ---------------------------------------------------
#this is where the grid information (phi and hdens) is read in and saved to grid.
print("Beginning file import")
for i in range(numFiles):
gridI = [];
with open(gridFiles[i], 'rb') as f:
csvReader = csv.reader(f, delimiter='\t')
for row in csvReader:
gridI.append(row)
gridI = asarray(gridI)
gridI = gridI[1:,6:8]
if ( i == 0 ):
grid = gridI
else :
grid = concatenate((grid,gridI))
for i in range(numFiles):
emissionLineI = [];
with open(emissionFiles[i], 'rb') as f:
csvReader = csv.reader(f, delimiter='\t')
headers = csvReader.next()
for row in csvReader:
emissionLineI.append(row)
emissionLineI = asarray(emissionLineI)
emissionLineI = emissionLineI[:,1:]
if ( i == 0 ):
Emissionlines = emissionLineI
else :
Emissionlines = concatenate((Emissionlines,emissionLineI))
hdens_values = grid[:,1]
phi_values = grid[:,0]
print("Import files complete")
#To fix when hdens > 10
#many of my grids were run off with hdens up to 12 so we needed to cut off part of the data
#first create temorary arrays
print("modifications begun")
hdens_values_2 = empty(shape=[0, 1])
phi_values_2 = empty(shape=[0, 1])
Emissionlines_2 = empty(shape=[0, len(Emissionlines[0,:])])
#save data in range desired to temp arrays
for i in range(len(hdens_values)):
if (float(hdens_values[i]) < 6.100) & (float(phi_values[i]) < 17.100) :
hdens_values_2 = append(hdens_values_2, hdens_values[i])
phi_values_2 = append(phi_values_2, phi_values[i])
Emissionlines_2 = vstack([Emissionlines_2, Emissionlines[i,:]])
#overwrite old arrays
hdens_values = hdens_values_2
phi_values = phi_values_2
Emissionlines = Emissionlines_2
print("modifications complete")
# ---------------------------------------------------
# ---------------------------------------------------
#there are the emission line names properly formatted
print("Importing headers from header file")
headersFile = open(headerloc,'r')
headers = headersFile.read().splitlines()
headersFile.close()
# ---------------------------------------------------
#constructing grid by scaling
#select the scaling factor
#for 1215
#incident = Emissionlines[1:,4]
concatenated_data = zeros((len(Emissionlines),len(Emissionlines[0])))
max_values = zeros((len(concatenated_data[0]),4))
#for 4860
incident = concatenated_data[:,57]
#take the ratio of incident and all the lines and put it all in an array concatenated_data
for i in range(len(Emissionlines)):
for j in range(len(Emissionlines[0])):
if math.log(4860.*(float(Emissionlines[i,j])/float(Emissionlines[i,57])), 10) > 0:
concatenated_data[i,j] = math.log(4860.*(float(Emissionlines[i,j])/float(Emissionlines[i,57])), 10)
else:
concatenated_data[i,j] == 0
# for 1215
#for i in range(len(Emissionlines)):
# for j in range(len(Emissionlines[0])):
# if math.log(1215.*(float(Emissionlines[i,j])/float(Emissionlines[i,4])), 10) > 0:
# concatenated_data[i,j] = math.log(1215.*(float(Emissionlines[i,j])/float(Emissionlines[i,4])), 10)
# else:
# concatenated_data[i,j] == 0
# ---------------------------------------------------
#find the maxima to plot onto the contour plots
for j in range(len(concatenated_data[0])):
max_values[j,0] = max(concatenated_data[:,j])
max_values[j,1] = argmax(concatenated_data[:,j], axis = 0)
max_values[j,2] = hdens_values[max_values[j,1]]
max_values[j,3] = phi_values[max_values[j,1]]
#to round off the maxima
max_values[:,0] = [ '%.1f' % elem for elem in max_values[:,0] ]
print "data arranged"
# ---------------------------------------------------
#Creating the grid to interpolate with for contours.
gridarray = zeros((len(concatenated_data),2))
gridarray[:,0] = hdens_values
gridarray[:,1] = phi_values
x = gridarray[:,0]
y = gridarray[:,1]
# ---------------------------------------------------
savetxt('peaks_Padova_inst_6', max_values, delimiter='\t')
| gpl-2.0 | -5,176,362,807,463,443,000 | 32.490066 | 109 | 0.636741 | false |
luoshao23/ML_algorithm | luolearn/metrics/classification.py | 1 | 1875 | import numpy as np
from scipy.sparse import csr_matrix
from ..utils import column_or_1d
from ..utils import check_consistent_length
from ..utils.multiclass import type_of_target
from ..utils.sparsefuncs import count_nonzero
def _check_targets(y_true, y_pred):
check_consistent_length(y_true, y_pred)
type_true = type_of_target(y_true)
type_pred = type_of_target(y_pred)
y_type = set([type_true, type_pred])
if y_type == set(["binary", "multiclass"]):
y_type = set(["multiclass"])
if len(y_type) > 1:
raise ValueError("Cannot handle!")
y_type = y_type.pop()
if (y_type not in ["binary", "multiclass", "multilabel-indicator"]):
raise ValueError("{0} is not supported".format(y_type))
if y_type in ["binary", "multiclass"]:
y_true = column_or_1d(y_true)
y_pred = column_or_1d(y_pred)
if y_pred == "binary":
unique_values = np.union1d(y_true, y_pred)
if len(unique_values) > 2:
y_type = "multiclass"
if y_type.startswith('multilabel'):
y_true = csr_matrix(y_true)
y_pred = csr_matrix(y_pred)
y_type = 'multilabel-indicator'
return y_type, y_true, y_pred
def _weighted_sum(sample_score, sample_weight, normalize=False):
if normalize:
return np.average(sample_score, weights=sample_weight)
elif sample_weight is not None:
return np.dot(sample_score, sample_weight)
else:
return sample_score.sum()
def accuracy_score(y_true, y_pred, normalize=True, sample_weight=None):
y_type, y_true, y_pred = _check_targets(y_true, y_pred)
if y_type.startswith('multilabel'):
differing_labels = count_nonzero(y_true - y_pred, axis=1)
score = differing_labels == 0
else:
score = y_true == y_pred
return _weighted_sum(score, sample_weight, normalize)
| mit | -3,364,809,980,190,395,400 | 30.25 | 72 | 0.629333 | false |
yuhaya/vim | .ycm_extra_conf.py | 1 | 6555 | # This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
import os
import ycm_core
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
'-Werror',
'-Wc++98-compat',
'-Wno-long-long',
'-Wno-variadic-macros',
'-fexceptions',
'-DNDEBUG',
# You 100% do NOT need -DUSE_CLANG_COMPLETER in your flags; only the YCM
# source code needs it.
'-DUSE_CLANG_COMPLETER',
# THIS IS IMPORTANT! Without a "-std=<something>" flag, clang won't know which
# language to use when compiling headers. So it will guess. Badly. So C++
# headers will be compiled as C headers. You don't want that so ALWAYS specify
# a "-std=<something>".
# For a C project, you would set this to something like 'c99' instead of
# 'c++11'.
'-std=c++11',
#'-std=c99',
# ...and the same thing goes for the magic -x option which specifies the
# language that the files to be compiled are written in. This is mostly
# relevant for c++ headers.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x',
'c++',
#'c',
'-isystem',
'../BoostParts',
'-isystem',
# This path will only work on OS X, but extra paths that don't exist are not
# harmful
'/System/Library/Frameworks/Python.framework/Headers',
'-isystem',
'../llvm/include',
'-isystem',
'../llvm/tools/clang/include',
'-I',
'.',
'-I',
'./ClangCompleter',
'-isystem',
'./tests/gmock/gtest',
'-isystem',
'./tests/gmock/gtest/include',
'-isystem',
'./tests/gmock',
'-isystem',
'./tests/gmock/include',
# me
'-isystem',
'/Library/Developer/CommandLineTools/usr/include',
'-isystem',
'/usr/include',
'-isystem',
'/usr/include/c++/4.2.1',
'-isystem',
'/usr/local/include',
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
# NOTE: This is just for YouCompleteMe; it's highly likely that your project
# does NOT need to remove the stdlib flag. DO NOT USE THIS IN YOUR
# ycm_extra_conf IF YOU'RE NOT 100% SURE YOU NEED IT.
try:
final_flags.remove( '-stdlib=libc++' )
except ValueError:
pass
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
| apache-2.0 | 1,071,250,335,781,771,500 | 32.106061 | 80 | 0.700686 | false |
amboycharlie/Child-Friendly-LCMS | leonardo/module/web/widgets/mixins.py | 1 | 6783 |
from __future__ import unicode_literals
import json
from django.db import models
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from .const import PAGINATION_CHOICES
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
class ListWidgetMixin(models.Model):
"""Provide API for listing items
"""
objects_per_page = models.PositiveIntegerField(
verbose_name=_('Objects per page'), blank=True, default=6)
objects_per_row = models.PositiveIntegerField(
verbose_name=_('Objects per row'), blank=True, default=3)
pagination_style = models.CharField(
verbose_name=_("Pagination Style"), max_length=50,
choices=PAGINATION_CHOICES, default='paginator')
def get_items(self, request=None):
'''returns queryset or array of items for listing'''
raise NotImplementedError
def filter_items(self, items):
'''perform filtering items by specific criteria'''
return items
def set_items(self, items):
'''just setter for items'''
self._items = items
@cached_property
def items(self):
'''access for filtered items'''
if hasattr(self, '_items'):
return self.filter_items(self._items)
self._items = self.get_items()
return self.filter_items(self._items)
def populate_items(self, request):
'''populate and returns filtered items'''
self._items = self.get_items(request)
return self.items
@cached_property
def get_list_template(self):
'''returns base list template by pagination_style'''
return "base/widget/list/_%s.html" % self.pagination_style
@cached_property
def get_rows(self):
'''returns rows with items
[[item1 item2 item3], [item2 ]]'''
rows = []
row = []
for i, item in enumerate(self.items):
if self.objects_per_row == i:
rows.append(row)
row = []
i = 0
row.append(item)
rows.append(row)
return rows
@cached_property
def columns_classes(self):
'''returns columns count'''
md = 12 / self.objects_per_row
sm = None
if self.objects_per_row > 2:
sm = 12 / (self.objects_per_row / 2)
return md, (sm or md), 12
@cached_property
def get_pages(self):
'''returns pages with rows'''
pages = []
page = []
for i, item in enumerate(self.get_rows):
if self.objects_per_page == i:
pages.append(page)
page = []
i = 0
page.append(item)
pages.append(page)
return pages
@cached_property
def needs_pagination(self):
if self.objects_per_page == 0:
return False
if len(self.items) > self.objects_per_page \
or len(self.get_pages[0]) >= self.objects_per_page:
return True
return False
@cached_property
def get_item_template(self):
'''returns template for one item from queryset'''
return "widget/%s/_item.html" % self.widget_name
def __init__(self, *args, **kwargs):
super(ListWidgetMixin, self).__init__(*args, **kwargs)
get_items = getattr(self, 'get_items', None)
render = getattr(self, 'render', None)
if not callable(get_items) or not callable(render):
raise Exception('bases on ListWidgetMixin must '
'have implemented get_items or render method')
class Meta:
abstract = True
class ContentProxyWidgetMixin(models.Model):
"""Provide basic fields and routines
for loading and caching data from external resource
define your implementation for getting data in ``get_data``
and use ``data`` property in your templates
"""
source_address = models.CharField(
verbose_name=_("Source Address"), max_length=255)
cache_validity = models.PositiveIntegerField(
verbose_name=_('Cache validity'), default=3600)
cache_data = models.TextField(
verbose_name=_("Cache data"), blank=True)
cache_updated = models.DateTimeField(
verbose_name=_('Cache update'),
editable=False, null=True, blank=True)
@cached_property
def address_parser(self):
return urlparse(self.source_address)
@cached_property
def get_port(self):
"""returns parsed port from ``source_address``
"""
return self.address_parser.port
@cached_property
def get_host(self):
"""returns parsed host from ``source_address``
"""
return self.address_parser.hostname
def is_obsolete(self):
"""returns True is data is obsolete and needs revalidation
"""
if self.cache_updated:
now = timezone.now()
delta = now - self.cache_updated
if delta.seconds < self.cache_validity:
return False
return True
def update_cache(self, data=None):
"""call with new data or set data to self.cache_data and call this
"""
if data:
self.cache_data = data
self.cache_updated = timezone.now()
self.save()
def get_data(self, *args, **kwargs):
"""define your behavior for loading raw data
"""
raise NotImplementedError
@property
def data(self):
"""this property just calls ``get_data``
but here you can serilalize your data or render as html
these data will be saved to self.cached_content
also will be accessable from template
"""
if self.is_obsolete():
self.update_cache(self.get_data())
return self.cache_data
class Meta:
abstract = True
class JSONContentMixin(object):
"""just expect json data from ``get_data`` method
"""
@property
def data(self):
"""load and cache data in json format
"""
if self.is_obsolete():
self.cache_data = json.dumps(self.get_data())
self.update_cache()
return json.loads(self.cache_data)
class AuthContentProxyWidgetMixin(models.Model):
"""widget mixin for getting remote content with credentials
"""
username = models.CharField(
verbose_name=_("Username"), max_length=255, blank=True, null=True)
password = models.CharField(
verbose_name=_('Password'), max_length=255, blank=True, null=True)
token = models.CharField(
verbose_name=_('API Token'), max_length=255, blank=True, null=True)
class Meta:
abstract = True
| apache-2.0 | 7,673,815,014,192,175,000 | 27.620253 | 75 | 0.600914 | false |
AQuadroTeam/CellsCycle | doc/tests/list-communication/KeyTest.py | 1 | 6180 | from CellCycle.ChainModule.ListThread import *
from start import loadSettings
from start import loadLogger
from CellCycle.MemoryModule.calculateSon import calculateSonId
def add_check():
currentProfile = {"profile_name": "alessandro_fazio", "key_pair": "AWSCellCycle", "branch": "ListUtilities"}
settings_to_launch = loadSettings(currentProfile=currentProfile)
logger_to_launch = loadLogger(settings_to_launch)
n1 = Node("1", "172.10.1.1", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "0", "19")
n2 = Node("2", "172.10.1.2", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "20", "39")
n3 = Node("3", "172.10.1.3", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "40", "59")
n4 = Node("4", "172.10.1.4", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "60", "79")
n5 = Node("5", "172.10.1.5", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "80", "99")
i3 = ListThread(master_of_master=n1, master=n2, myself=n3, slave=n4, slave_of_slave=n5, logger=logger_to_launch,
settings=settings_to_launch, name=n3.id)
i4 = ListThread(master_of_master=n2, master=n3, myself=n4, slave=n5, slave_of_slave=n1, logger=logger_to_launch,
settings=settings_to_launch, name=n4.id)
i5 = ListThread(master_of_master=n3, master=n4, myself=n5, slave=n1, slave_of_slave=n2, logger=logger_to_launch,
settings=settings_to_launch, name=n5.id)
i1 = ListThread(master_of_master=n4, master=n5, myself=n1, slave=n2, slave_of_slave=n3, logger=logger_to_launch,
settings=settings_to_launch, name=n1.id)
i2 = ListThread(master_of_master=n5, master=n1, myself=n2, slave=n3, slave_of_slave=n4, logger=logger_to_launch,
settings=settings_to_launch, name=n2.id)
# pretend that we add the new node
m_o = MemoryObject(n1, n2, n3, n4, n5)
new_min_max_key = keyCalcToCreateANewNode(m_o).newNode
new_node_id_to_add = str(calculateSonId(float(n3.id), float(n4.id)))
new_node_instance_to_add = Node(new_node_id_to_add, None, settings_to_launch.getIntPort(),
settings_to_launch.getExtPort(),
new_min_max_key.min_key, new_min_max_key.max_key)
'''
logger_to_launch.debug("########## BEFORE ADD ############")
i1.print_relatives()
i2.print_relatives()
i3.print_relatives()
i4.print_relatives()
i5.print_relatives()
'''
logger_to_launch.debug("########## AFTER ADD #############")
i4.change_added_keys_to(n3.id)
i4.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i4.change_parents_from_list()
i5.change_added_keys_to(n3.id)
i5.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i5.change_parents_from_list()
i1.change_added_keys_to(n3.id)
i1.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i1.change_parents_from_list()
i2.change_added_keys_to(n3.id)
i2.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i2.change_parents_from_list()
i3.change_added_keys_to(n3.id)
i3.test_update(source_id=n3.id, target_relative_id=n4.id, node_to_add=new_node_instance_to_add)
i3.change_parents_from_list()
i1.print_relatives()
i2.print_relatives()
i3.print_relatives()
i4.print_relatives()
i5.print_relatives()
def dead_check():
currentProfile = {"profile_name": "alessandro_fazio", "key_pair": "AWSCellCycle", "branch": "ListUtilities"}
settings_to_launch = loadSettings(currentProfile=currentProfile)
logger_to_launch = loadLogger(settings_to_launch)
n1 = Node("1", "172.10.1.1", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "12", "19")
n2 = Node("2", "172.10.1.2", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "20", "39")
n3 = Node("3", "172.10.1.3", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "40", "59")
n4 = Node("4", "172.10.1.4", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "60", "79")
n5 = Node("5", "172.10.1.5", settings_to_launch.getIntPort(), settings_to_launch.getExtPort(), "80", "11")
i3 = ListThread(master_of_master=n1, master=n2, myself=n3, slave=n4, slave_of_slave=n5, logger=logger_to_launch,
settings=settings_to_launch, name=n3.id)
i4 = ListThread(master_of_master=n2, master=n3, myself=n4, slave=n5, slave_of_slave=n1, logger=logger_to_launch,
settings=settings_to_launch, name=n4.id)
i5 = ListThread(master_of_master=n3, master=n4, myself=n5, slave=n1, slave_of_slave=n2, logger=logger_to_launch,
settings=settings_to_launch, name=n5.id)
i1 = ListThread(master_of_master=n4, master=n5, myself=n1, slave=n2, slave_of_slave=n3, logger=logger_to_launch,
settings=settings_to_launch, name=n1.id)
'''
logger_to_launch.debug("########## BEFORE ADD ############")
i1.print_relatives()
i2.print_relatives()
i3.print_relatives()
i4.print_relatives()
i5.print_relatives()
'''
logger_to_launch.debug("########## AFTER DEAD #############")
i4.change_dead_keys_to(n3.id)
i4.test_remove(target_id=n2.id, source_id=n3.id, target_relative_id=n1.id)
i4.change_parents_from_list()
i5.change_dead_keys_to(n3.id)
i5.test_remove(target_id=n2.id, source_id=n3.id, target_relative_id=n1.id)
i5.change_parents_from_list()
i1.change_dead_keys_to(n3.id)
i1.test_remove(target_id=n2.id, source_id=n3.id, target_relative_id=n1.id)
i1.change_parents_from_list()
i3.change_dead_keys_to(n3.id)
i3.test_remove(target_id=n2.id, source_id=n3.id, target_relative_id=n1.id)
i3.change_parents_from_list()
i1.print_relatives()
i3.print_relatives()
i4.print_relatives()
i5.print_relatives()
logger_to_launch.debug("this is the ip found {}".format((i1.node_list.find_memory_key(0)).target.ip))
| mit | -7,520,850,627,014,340,000 | 49.243902 | 116 | 0.652427 | false |
sweon/edx-dl | edx_dl/common.py | 1 | 5361 | # -*- coding: utf-8 -*-
"""
Common type definitions and constants for edx-dl
The classes in this module represent the structure of courses in edX. The
structure is:
* A Course contains Sections
* Each Section contains Subsections
* Each Subsection contains Units
Notice that we don't represent the full tree structure for both performance
and UX reasons:
Course -> [Section] -> [SubSection] -> [Unit] -> [Video]
In the script the data structures used are:
1. The data structures to represent the course information:
Course, Section->[SubSection]
2. The data structures to represent the chosen courses and sections:
selections = {Course, [Section]}
3. The data structure of all the downloable resources which represent each
subsection via its URL and the of resources who can be extracted from the
Units it contains:
all_units = {Subsection.url: [Unit]}
4. The units can contain multiple videos:
Unit -> [Video]
"""
class Course(object):
"""
Course class represents course information.
"""
def __init__(self, id, name, url, state):
"""
@param id: The id of a course in edX is composed by the path
{organization}/{course_number}/{course_run}
@type id: str or None
@param name: Name of the course. The name is taken from course page
h3 header.
@type name: str
@param url: URL of the course.
@type url: str or None
@param state: State of the course. One of the following values:
* 'Not yet'
* 'Started'
@type state: str
"""
self.id = id
self.name = name
self.url = url
self.state = state
def __repr__(self):
url = self.url if self.url else "None"
return self.name + ": " + url
class Section(object):
"""
Representation of a section of the course.
"""
def __init__(self, position, name, url, navsections):
"""
@param position: Integer position of the section in the list of
sections. Starts at 1.
@type position: int
@param name: Name of the section.
@type name: str
@param url: URL of the section. None when section contains no
subsections.
@type url: str or None
@param navsections: List of navsections.
@type navsections: [NavSection]
"""
self.position = position
self.name = name
self.url = url
self.navsections = navsections
class NavSection(object):
def __init__(self, position, name, subsections):
self.position = position
self.name = name
self.subsections = subsections
class SubSection(object):
"""
Representation of a subsection in a section.
"""
def __init__(self, position, name, url):
"""
@param position: Integer position of the subsection in the subsection
list. Starts at 1.
@type position: int
@param name: Name of the subsection.
@type name: str
@param url: URL of the subsection.
@type url: str
"""
self.position = position
self.name = name
self.url = url
def __repr__(self):
return self.name + ": " + self.url
class Unit(object):
"""
Representation of a single unit of the course.
"""
def __init__(self, videos, resources_urls, position):
"""
@param videos: List of videos present in the unit.
@type videos: [Video]
@param resources_urls: List of additional resources that are come along
with the unit. Resources include files with certain extensions
and youtube links.
@type resources_urls: [str]
@param position: Integer position of the subsection in the subsection
list. Starts at 1.
@type position: int
"""
self.position = position
self.videos = videos
self.resources_urls = resources_urls
class Video(object):
"""
Representation of a single video.
"""
def __init__(self, video_youtube_url, available_subs_url,
sub_template_url, mp4_urls):
"""
@param video_youtube_url: Youtube link (if any).
@type video_youtube_url: str or None
@param available_subs_url: URL to the available subtitles.
@type available_subs_url: str
@param sub_template_url: ???
@type sub_template_url: str
@param mp4_urls: List of URLs to mp4 video files.
@type mp4_urls: [str]
"""
self.video_youtube_url = video_youtube_url
self.available_subs_url = available_subs_url
self.sub_template_url = sub_template_url
self.mp4_urls = mp4_urls
class ExitCode(object):
"""
Class that contains all exit codes of the program.
"""
OK = 0
MISSING_CREDENTIALS = 1
WRONG_EMAIL_OR_PASSWORD = 2
MISSING_COURSE_URL = 3
INVALID_COURSE_URL = 4
UNKNOWN_PLATFORM = 5
NO_DOWNLOADABLE_VIDEO = 6
YOUTUBE_DL_CMD = ['youtube-dl', '--ignore-config']
DEFAULT_CACHE_FILENAME = 'edx-dl.cache'
DEFAULT_FILE_FORMATS = ['e?ps', 'pdf', 'txt', 'doc', 'xls', 'ppt',
'docx', 'xlsx', 'pptx', 'odt', 'ods', 'odp', 'odg',
'zip', 'rar', 'gz', 'mp3', 'R', 'Rmd', 'ipynb', 'py']
| lgpl-3.0 | -4,726,805,692,538,409,000 | 27.365079 | 79 | 0.598769 | false |
jbasko/pytest-random-order | random_order/bucket_types.py | 1 | 1572 | import functools
import os.path
from collections import OrderedDict
bucket_type_keys = OrderedDict()
def bucket_type_key(bucket_type):
"""
Registers a function that calculates test item key for the specified bucket type.
"""
def decorator(f):
@functools.wraps(f)
def wrapped(item, session):
key = f(item)
if session is not None:
for handler in session.random_order_bucket_type_key_handlers:
key = handler(item, key)
return key
bucket_type_keys[bucket_type] = wrapped
return wrapped
return decorator
@bucket_type_key('global')
def get_global_key(item):
return None
@bucket_type_key('package')
def get_package_key(item):
if not hasattr(item, "module"):
return os.path.split(item.location[0])[0]
return item.module.__package__
@bucket_type_key('module')
def get_module_key(item):
return item.location[0]
@bucket_type_key('class')
def get_class_key(item):
if not hasattr(item, "cls"):
return item.location[0]
if item.cls:
return item.module.__name__, item.cls.__name__
else:
return item.module.__name__
@bucket_type_key('parent')
def get_parent_key(item):
return item.parent
@bucket_type_key('grandparent')
def get_grandparent_key(item):
return item.parent.parent
@bucket_type_key('none')
def get_none_key(item):
raise RuntimeError('When shuffling is disabled (bucket_type=none), item key should not be calculated')
bucket_types = bucket_type_keys.keys()
| mit | 7,146,111,292,078,620,000 | 20.534247 | 106 | 0.646947 | false |
okfse/froide | froide/foirequest/urls.py | 1 | 3247 | from django.utils.six import text_type as str
from django.core.urlresolvers import reverse
from django.conf.urls import patterns
from django.utils.translation import pgettext
from django.shortcuts import redirect
from .models import FoiRequest
urlpatterns = patterns("froide.foirequest.views",
(r'^%s/$' % pgettext('URL part', 'not-foi'), 'list_requests',
{'not_foi': True}, 'foirequest-list_not_foi'),
# Old feed URL
(r'^%s/feed/$' % pgettext('URL part', 'latest'),
lambda r: redirect(reverse('foirequest-list_feed_atom'), permanent=True),
{}, 'foirequest-feed_latest_atom'),
(r'^%s/rss/$' % pgettext('URL part', 'latest'),
lambda r: redirect(reverse('foirequest-list_feed'), permanent=True),
{}, 'foirequest-feed_latest'),
(r'^unchecked/$', 'list_unchecked', {}, 'foirequest-list_unchecked'),
# Translators: part in /request/to/public-body-slug URL
(r'^submit$', 'submit_request', {}, 'foirequest-submit_request'),
)
foirequest_urls = [
(r'^$', 'list_requests', {}, 'foirequest-list'),
(r'^feed/$', 'list_requests',
{'feed': 'atom'}, 'foirequest-list_feed_atom'),
(r'^rss/$', 'list_requests',
{'feed': 'rss'}, 'foirequest-list_feed'),
# Translators: part in request filter URL
(r'^%s/(?P<topic>[-\w]+)/$' % pgettext('URL part', 'topic'), 'list_requests', {},
'foirequest-list'),
(r'^%s/(?P<topic>[-\w]+)/feed/$' % pgettext('URL part', 'topic'), 'list_requests',
{'feed': 'atom'}, 'foirequest-list_feed_atom'),
(r'^%s/(?P<topic>[-\w]+)/rss/$' % pgettext('URL part', 'topic'), 'list_requests',
{'feed': 'rss'}, 'foirequest-list_feed'),
# Translators: part in request filter URL
(r'^%s/(?P<tag>[-\w]+)/$' % pgettext('URL part', 'tag'), 'list_requests', {},
'foirequest-list'),
(r'^%s/(?P<tag>[-\w]+)/feed/$' % pgettext('URL part', 'tag'), 'list_requests',
{'feed': 'atom'}, 'foirequest-list_feed_atom'),
(r'^%s/(?P<tag>[-\w]+)/rss/$' % pgettext('URL part', 'tag'), 'list_requests',
{'feed': 'rss'}, 'foirequest-list_feed'),
# Translators: part in request filter URL
(r'^%s/(?P<public_body>[-\w]+)/$' % pgettext('URL part', 'to'), 'list_requests', {},
'foirequest-list'),
(r'^%s/(?P<public_body>[-\w]+)/feed/$' % pgettext('URL part', 'to'), 'list_requests',
{'feed': 'atom'}, 'foirequest-list_feed_atom'),
(r'^%s/(?P<public_body>[-\w]+)/rss/$' % pgettext('URL part', 'to'), 'list_requests',
{'feed': 'rss'}, 'foirequest-list_feed'),
] + [(r'^(?P<status>%s)/$' % str(urlinfo[0]), 'list_requests', {},
'foirequest-list') for urlinfo in FoiRequest.STATUS_URLS
] + [(r'^(?P<status>%s)/feed/$' % str(urlinfo[0]), 'list_requests',
{'feed': 'atom'},
'foirequest-list_feed_atom') for urlinfo in FoiRequest.STATUS_URLS
] + [(r'^(?P<status>%s)/rss/$' % str(urlinfo[0]), 'list_requests',
{'feed': 'rss'},
'foirequest-list_feed') for urlinfo in FoiRequest.STATUS_URLS]
urlpatterns += patterns("froide.foirequest.views",
*foirequest_urls
)
urlpatterns += patterns("froide.foirequest.views",
*[(r'^(?P<jurisdiction>[-\w]+)/%s' % r[0][1:], r[1], r[2], r[3]) for r in foirequest_urls]
)
| mit | -6,307,762,861,912,977,000 | 42.293333 | 94 | 0.581152 | false |
owlabs/incubator-airflow | tests/contrib/operators/test_aws_athena_operator.py | 1 | 7752 | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from airflow.contrib.operators.aws_athena_operator import AWSAthenaOperator
from airflow.contrib.hooks.aws_athena_hook import AWSAthenaHook
from airflow.models import DAG, TaskInstance
from airflow.utils import timezone
from airflow.utils.timezone import datetime
from tests.compat import mock
TEST_DAG_ID = 'unit_tests'
DEFAULT_DATE = datetime(2018, 1, 1)
ATHENA_QUERY_ID = 'eac29bf8-daa1-4ffc-b19a-0db31dc3b784'
MOCK_DATA = {
'task_id': 'test_aws_athena_operator',
'query': 'SELECT * FROM TEST_TABLE',
'database': 'TEST_DATABASE',
'outputLocation': 's3://test_s3_bucket/',
'client_request_token': 'eac427d0-1c6d-4dfb-96aa-2835d3ac6595',
'workgroup': 'primary'
}
query_context = {
'Database': MOCK_DATA['database']
}
result_configuration = {
'OutputLocation': MOCK_DATA['outputLocation']
}
# noinspection PyUnusedLocal
# pylint: disable=unused-argument
class TestAWSAthenaOperator(unittest.TestCase):
def setUp(self):
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'provide_context': True
}
self.dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once',
default_args=args,
schedule_interval='@once')
self.athena = AWSAthenaOperator(task_id='test_aws_athena_operator', query='SELECT * FROM TEST_TABLE',
database='TEST_DATABASE', output_location='s3://test_s3_bucket/',
client_request_token='eac427d0-1c6d-4dfb-96aa-2835d3ac6595',
sleep_time=1, max_tries=3, dag=self.dag)
def test_init(self):
self.assertEqual(self.athena.task_id, MOCK_DATA['task_id'])
self.assertEqual(self.athena.query, MOCK_DATA['query'])
self.assertEqual(self.athena.database, MOCK_DATA['database'])
self.assertEqual(self.athena.aws_conn_id, 'aws_default')
self.assertEqual(self.athena.client_request_token, MOCK_DATA['client_request_token'])
self.assertEqual(self.athena.sleep_time, 1)
@mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("SUCCESS",))
@mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_run_small_success_query(self, mock_conn, mock_run_query, mock_check_query_status):
self.athena.execute(None)
mock_run_query.assert_called_once_with(MOCK_DATA['query'], query_context, result_configuration,
MOCK_DATA['client_request_token'], MOCK_DATA['workgroup'])
self.assertEqual(mock_check_query_status.call_count, 1)
@mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("RUNNING", "RUNNING", "SUCCESS",))
@mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_run_big_success_query(self, mock_conn, mock_run_query, mock_check_query_status):
self.athena.execute(None)
mock_run_query.assert_called_once_with(MOCK_DATA['query'], query_context, result_configuration,
MOCK_DATA['client_request_token'], MOCK_DATA['workgroup'])
self.assertEqual(mock_check_query_status.call_count, 3)
@mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=(None, None,))
@mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_run_failed_query_with_none(self, mock_conn, mock_run_query, mock_check_query_status):
with self.assertRaises(Exception):
self.athena.execute(None)
mock_run_query.assert_called_once_with(MOCK_DATA['query'], query_context, result_configuration,
MOCK_DATA['client_request_token'], MOCK_DATA['workgroup'])
self.assertEqual(mock_check_query_status.call_count, 3)
@mock.patch.object(AWSAthenaHook, 'get_state_change_reason')
@mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("RUNNING", "FAILED",))
@mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_run_failure_query(self, mock_conn, mock_run_query, mock_check_query_status,
mock_get_state_change_reason):
with self.assertRaises(Exception):
self.athena.execute(None)
mock_run_query.assert_called_once_with(MOCK_DATA['query'], query_context, result_configuration,
MOCK_DATA['client_request_token'], MOCK_DATA['workgroup'])
self.assertEqual(mock_check_query_status.call_count, 2)
self.assertEqual(mock_get_state_change_reason.call_count, 1)
@mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("RUNNING", "RUNNING", "CANCELLED",))
@mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_run_cancelled_query(self, mock_conn, mock_run_query, mock_check_query_status):
with self.assertRaises(Exception):
self.athena.execute(None)
mock_run_query.assert_called_once_with(MOCK_DATA['query'], query_context, result_configuration,
MOCK_DATA['client_request_token'], MOCK_DATA['workgroup'])
self.assertEqual(mock_check_query_status.call_count, 3)
@mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("RUNNING", "RUNNING", "RUNNING",))
@mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_hook_run_failed_query_with_max_tries(self, mock_conn, mock_run_query, mock_check_query_status):
with self.assertRaises(Exception):
self.athena.execute(None)
mock_run_query.assert_called_once_with(MOCK_DATA['query'], query_context, result_configuration,
MOCK_DATA['client_request_token'], MOCK_DATA['workgroup'])
self.assertEqual(mock_check_query_status.call_count, 3)
@mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("SUCCESS",))
@mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
@mock.patch.object(AWSAthenaHook, 'get_conn')
def test_xcom_push_and_pull(self, mock_conn, mock_run_query, mock_check_query_status):
ti = TaskInstance(task=self.athena, execution_date=timezone.utcnow())
ti.run()
self.assertEqual(ti.xcom_pull(task_ids='test_aws_athena_operator'),
ATHENA_QUERY_ID)
# pylint: enable=unused-argument
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 7,077,962,466,399,328,000 | 50 | 109 | 0.664603 | false |
Dioptas/pymatgen | pymatgen/core/tests/test_surface.py | 2 | 10627 | #!/usr/bin/python
import unittest
import os
import random
import numpy as np
from pymatgen.core.structure import Structure
from pymatgen.core.lattice import Lattice
from pymatgen.core.surface import Slab, SlabGenerator, generate_all_slabs, \
get_symmetrically_distinct_miller_indices
from pymatgen.symmetry.groups import SpaceGroup
from pymatgen.util.testing import PymatgenTest
def get_path(path_str):
cwd = os.path.abspath(os.path.dirname(__file__))
path = os.path.join(cwd, "..", "..", "..", "test_files", "surface_tests",
path_str)
return path
class SlabTest(PymatgenTest):
def setUp(self):
zno1 = Structure.from_file(get_path("ZnO-wz.cif"), primitive=False)
zno55 = SlabGenerator(zno1, [1, 0, 0], 5, 5, lll_reduce=False,
center_slab=False).get_slab()
self.zno1 = zno1
self.zno55 = zno55
self.h = Structure(Lattice.cubic(3), ["H"],
[[0, 0, 0]])
self.libcc = Structure(Lattice.cubic(3.51004), ["Li", "Li"],
[[0, 0, 0], [0.5, 0.5, 0.5]])
def test_init(self):
zno_slab = Slab(self.zno55.lattice, self.zno55.species,
self.zno55.frac_coords,
self.zno55.miller_index,
self.zno55.oriented_unit_cell,
0, self.zno55.scale_factor)
m =self.zno55.lattice.matrix
area = np.linalg.norm(np.cross(m[0], m[1]))
self.assertAlmostEqual(zno_slab.surface_area, area)
self.assertEqual(zno_slab.lattice.lengths_and_angles,
self.zno55.lattice.lengths_and_angles)
self.assertEqual(zno_slab.oriented_unit_cell.composition,
self.zno1.composition)
self.assertEqual(len(zno_slab), 8)
def test_add_adsorbate_atom(self):
zno_slab = Slab(self.zno55.lattice, self.zno55.species,
self.zno55.frac_coords,
self.zno55.miller_index,
self.zno55.oriented_unit_cell,
0, self.zno55.scale_factor)
zno_slab.add_adsorbate_atom([1], 'H', 1)
self.assertEqual(len(zno_slab), 9)
self.assertEqual(str(zno_slab[8].specie), 'H')
self.assertAlmostEqual(zno_slab.get_distance(1, 8), 1.0)
self.assertTrue(zno_slab[8].c > zno_slab[0].c)
m = self.zno55.lattice.matrix
area = np.linalg.norm(np.cross(m[0], m[1]))
self.assertAlmostEqual(zno_slab.surface_area, area)
self.assertEqual(zno_slab.lattice.lengths_and_angles,
self.zno55.lattice.lengths_and_angles)
def test_get_sorted_structure(self):
species = [str(site.specie) for site in
self.zno55.get_sorted_structure()]
self.assertEqual(species, ["Zn2+"] * 4 + ["O2-"] * 4)
def test_methods(self):
#Test various structure methods
self.zno55.get_primitive_structure()
def test_as_from_dict(self):
d = self.zno55.as_dict()
obj = Slab.from_dict(d)
self.assertEqual(obj.miller_index, (1, 0, 0))
def test_dipole_and_is_polar(self):
self.assertArrayAlmostEqual(self.zno55.dipole, [0, 0, 0])
self.assertFalse(self.zno55.is_polar())
cscl = self.get_structure("CsCl")
cscl.add_oxidation_state_by_element({"Cs": 1, "Cl": -1})
slab = SlabGenerator(cscl, [1, 0, 0], 5, 5,
lll_reduce=False, center_slab=False).get_slab()
self.assertArrayAlmostEqual(slab.dipole, [-4.209, 0, 0])
self.assertTrue(slab.is_polar())
class SlabGeneratorTest(PymatgenTest):
def test_get_slab(self):
s = self.get_structure("LiFePO4")
gen = SlabGenerator(s, [0, 0, 1], 10, 10)
s = gen.get_slab(0.25)
self.assertAlmostEqual(s.lattice.abc[2], 20.820740000000001)
fcc = Structure.from_spacegroup("Fm-3m", Lattice.cubic(3), ["Fe"],
[[0, 0, 0]])
gen = SlabGenerator(fcc, [1, 1, 1], 10, 10)
slab = gen.get_slab()
gen = SlabGenerator(fcc, [1, 1, 1], 10, 10, primitive=False)
slab_non_prim = gen.get_slab()
self.assertEqual(len(slab), 6)
self.assertEqual(len(slab_non_prim), len(slab) * 4)
#Some randomized testing of cell vectors
for i in range(1, 231):
i = random.randint(1, 230)
sg = SpaceGroup.from_int_number(i)
if sg.crystal_system == "hexagonal" or (sg.crystal_system == \
"trigonal" and sg.symbol.endswith("H")):
latt = Lattice.hexagonal(5, 10)
else:
#Cubic lattice is compatible with all other space groups.
latt = Lattice.cubic(5)
s = Structure.from_spacegroup(i, latt, ["H"], [[0, 0, 0]])
miller = (0, 0, 0)
while miller == (0, 0, 0):
miller = (random.randint(0, 6), random.randint(0, 6),
random.randint(0, 6))
gen = SlabGenerator(s, miller, 10, 10)
a, b, c = gen.oriented_unit_cell.lattice.matrix
self.assertAlmostEqual(np.dot(a, gen._normal), 0)
self.assertAlmostEqual(np.dot(b, gen._normal), 0)
def test_get_slabs(self):
gen = SlabGenerator(self.get_structure("CsCl"), [0, 0, 1], 10, 10)
#Test orthogonality of some internal variables.
a, b, c = gen.oriented_unit_cell.lattice.matrix
self.assertAlmostEqual(np.dot(a, gen._normal), 0)
self.assertAlmostEqual(np.dot(b, gen._normal), 0)
self.assertEqual(len(gen.get_slabs()), 1)
s = self.get_structure("LiFePO4")
gen = SlabGenerator(s, [0, 0, 1], 10, 10)
self.assertEqual(len(gen.get_slabs()), 5)
self.assertEqual(len(gen.get_slabs(bonds={("P", "O"): 3})), 2)
# There are no slabs in LFP that does not break either P-O or Fe-O
# bonds for a miller index of [0, 0, 1].
self.assertEqual(len(gen.get_slabs(
bonds={("P", "O"): 3, ("Fe", "O"): 3})), 0)
#If we allow some broken bonds, there are a few slabs.
self.assertEqual(len(gen.get_slabs(
bonds={("P", "O"): 3, ("Fe", "O"): 3},
max_broken_bonds=2)), 2)
# At this threshold, only the origin and center Li results in
# clustering. All other sites are non-clustered. So the of
# slabs is of sites in LiFePO4 unit cell - 2 + 1.
self.assertEqual(len(gen.get_slabs(tol=1e-4)), 15)
LiCoO2 = Structure.from_file(get_path("icsd_LiCoO2.cif"),
primitive=False)
gen = SlabGenerator(LiCoO2, [0, 0, 1], 10, 10)
lco = gen.get_slabs(bonds={("Co", "O"): 3})
self.assertEqual(len(lco), 1)
a, b, c = gen.oriented_unit_cell.lattice.matrix
self.assertAlmostEqual(np.dot(a, gen._normal), 0)
self.assertAlmostEqual(np.dot(b, gen._normal), 0)
def test_triclinic_TeI(self):
# Test case for a triclinic structure of TeI. Only these three
# Miller indices are used because it is easier to identify which
# atoms should be in a surface together. The closeness of the sites
# in other Miller indices can cause some ambiguity when choosing a
# higher tolerance.
numb_slabs = {(0, 0, 1): 5, (0, 1, 0): 3, (1, 0, 0): 7}
TeI = Structure.from_file(get_path("icsd_TeI.cif"),
primitive=False)
for k, v in numb_slabs.items():
trclnc_TeI = SlabGenerator(TeI, k, 10, 10)
TeI_slabs = trclnc_TeI.get_slabs()
self.assertEqual(v, len(TeI_slabs))
class FuncTest(PymatgenTest):
def setUp(self):
self.cscl = self.get_structure("CsCl")
self.lifepo4 = self.get_structure("LiFePO4")
self.tei = Structure.from_file(get_path("icsd_TeI.cif"),
primitive=False)
self.LiCoO2 = Structure.from_file(get_path("icsd_LiCoO2.cif"),
primitive=False)
self.p1 = Structure(Lattice.from_parameters(3, 4, 5, 31, 43, 50),
["H", "He"], [[0, 0, 0], [0.1, 0.2, 0.3]])
def test_get_symmetrically_distinct_miller_indices(self):
indices = get_symmetrically_distinct_miller_indices(self.cscl, 1)
self.assertEqual(len(indices), 3)
indices = get_symmetrically_distinct_miller_indices(self.cscl, 2)
self.assertEqual(len(indices), 6)
self.assertEqual(len(get_symmetrically_distinct_miller_indices(
self.lifepo4, 1)), 7)
# The TeI P-1 structure should have 13 unique millers (only inversion
# symmetry eliminates pairs)
indices = get_symmetrically_distinct_miller_indices(self.tei, 1)
self.assertEqual(len(indices), 13)
# P1 and P-1 should have the same # of miller indices since surfaces
# always have inversion symmetry.
indices = get_symmetrically_distinct_miller_indices(self.p1, 1)
self.assertEqual(len(indices), 13)
def test_generate_all_slabs(self):
slabs = generate_all_slabs(self.cscl, 1, 10, 10)
# Only three possible slabs, one each in (100), (110) and (111).
self.assertEqual(len(slabs), 3)
slabs = generate_all_slabs(self.cscl, 1, 10, 10,
bonds={("Cs", "Cl"): 4})
# No slabs if we don't allow broken Cs-Cl
self.assertEqual(len(slabs), 0)
slabs = generate_all_slabs(self.cscl, 1, 10, 10,
bonds={("Cs", "Cl"): 4},
max_broken_bonds=100)
self.assertEqual(len(slabs), 3)
slabs1 = generate_all_slabs(self.lifepo4, 1, 10, 10, tol=0.1,
bonds={("P", "O"): 3})
self.assertEqual(len(slabs1), 4)
slabs2 = generate_all_slabs(self.lifepo4, 1, 10, 10,
bonds={("P", "O"): 3, ("Fe", "O"): 3})
self.assertEqual(len(slabs2), 0)
# There should be only one possible stable surfaces, all of which are
# in the (001) oriented unit cell
slabs3 = generate_all_slabs(self.LiCoO2, 1, 10, 10,
bonds={("Co", "O"): 3})
self.assertEqual(len(slabs3), 1)
mill = (0, 0, 1)
for s in slabs3:
self.assertEqual(s.miller_index, mill)
if __name__ == "__main__":
unittest.main()
| mit | 3,577,662,483,260,128,000 | 40.838583 | 77 | 0.559612 | false |
bluesquall/okeanidanalysis | examples/sensors/rowe-adcp-bottom-track-summary.py | 1 | 3719 | #!/bin/env python
"""
e.g.: $ python rowe-adcp-bottom-track-summary.py /mbari/LRAUV/makai/missionlogs/devel/20150617-ADCP-in-tank/20150617T172914/ADCP-2015061717.ENS.mat
"""
import numpy as np
import scipy as sp
import scipy.io
import matplotlib.pyplot as plt
def plot_adcp_bottom_track_summary(infile, save=True, show=True, autoscale_ylims=False):
if infile.endswith('ENS.mat'):
bt = sp.io.loadmat(infile)['E000010'].squeeze()
idx = 14 # TODO: There may be some sort of misalignment in ENS files...
vr = bt[:,idx:idx+4].squeeze()
snr = bt[:,idx+4:idx+8].squeeze()
amp = bt[:,idx+8:idx+12].squeeze()
cor = bt[:,idx+12:idx+16].squeeze()
bv = bt[:,idx+16:idx+20].squeeze()
bnum = bt[:,idx+20:idx+24].squeeze()
iv = bt[:,idx+24:idx+28].squeeze()
inum = bt[:,idx+28:idx+32].squeeze()
elif infile.endswith('mat'):
import okeanidanalysis
s = okeanidanalysis.logs.OkeanidLog(infile)
vr, t_vr = s.timeseries('Rowe_600.vertical_range')
snr, t_snr = s.timeseries('Rowe_600.signal_to_noise')
amp, t_amp = s.timeseries('Rowe_600.bottom_track_amplitude')
cor, t_cor = s.timeseries('Rowe_600.bottom_track_correlation')
bv, t_bv = s.timeseries('Rowe_600.bottom_track_beam_velocity')
iv, t_iv = s.timeseries('Rowe_600.bottom_track_instrument_velocity')
fig, axs = plt.subplots(6, 4, sharex=True, sharey='row', figsize=(6.5,9))
vrax = axs[0]
snrax = axs[1]
ampax = axs[2]
corax = axs[3]
bvax = axs[4]
ivax = axs[5]
for i in range(4):
vrax[i].plot(vr[:,i])
snrax[i].plot(snr[:,i])
ampax[i].plot(amp[:,i])
corax[i].plot(cor[:,i])
bvax[i].plot(bv[:,i])
ivax[i].plot(iv[:,i])
ylkw = dict(rotation='horizontal', horizontalalignment='right')
vrax[0].set_ylabel('vertical\nrange [m]', **ylkw)
snrax[0].set_ylabel('SNR [dB]', **ylkw)
ampax[0].set_ylabel('amplitude [dB]', **ylkw)
corax[0].set_ylabel('correlation [-]', **ylkw)
bvax[0].set_ylabel('beam\nvelocity [m/s]', **ylkw)
ivax[0].set_ylabel('instrument\nvelocity [m/s]', **ylkw)
ivax[0].set_xlabel('ensemble number')
for i, ax in enumerate(vrax): ax.set_title('beam {0}'.format(i))
if not autoscale_ylims:
vrax[0].set_ylim([0,125])
snrax[0].set_ylim([0,100])
ampax[0].set_ylim([0,200])
corax[0].set_ylim([0,1])
bvax[0].set_ylim([-2,2])
ivax[0].set_ylim([-2,2])
# TODO: Get the lines below to work.
#print([t.get_text() for t in ivax[0].xaxis.get_majorticklabels()])
#ivax[0].xaxis.set_ticklabels([t.get_text() for t in ivax[0].xaxis.get_majorticklabels()], rotation='vertical') # should propogate to ther x axes
for ax in ivax:
plt.sca(ax)
plt.setp(plt.xticks()[1], rotation=90, fontsize=6)
fig.suptitle(infile.rsplit('/')[-1])
plt.subplots_adjust(left=0.25, right=0.95, top=0.9, bottom=0.1, wspace=0)
if save: fig.savefig('/tmp/{0}.png'.format(infile.rsplit('/')[-1]))
if show: plt.show()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='plot summary of ADCP bottom track data')
parser.add_argument('-V', '--version', action='version',
version='%(prog)s 0.0.1',
help='display version information and exit')
parser.add_argument('infile', metavar='filename',
type=str, help='LRAUV slate or RTI .ENS unpacked into .mat')
parser.add_argument('-y', '--autoscale-ylims', action='store_true')
args = parser.parse_args()
plot_adcp_bottom_track_summary(**args.__dict__)
| mit | -4,899,205,760,434,004,000 | 37.340206 | 149 | 0.600161 | false |
ofilipowicz/owndb | store/views.py | 1 | 34136 | from django.contrib.auth.models import User
from django.views.generic import ListView, DetailView, View, CreateView, DeleteView
from django.views.generic.base import TemplateView
from django.db.models import Q
from django.core.urlresolvers import reverse, reverse_lazy
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, JsonResponse, Http404
from django.template.response import TemplateResponse as TR
from django.template import RequestContext
from django.template.defaultfilters import slugify
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.shortcuts import render, get_object_or_404, redirect
from allauth.account.decorators import verified_email_required
from friendship.models import Friend, Follow
from store.forms import FormShareForm # Check if guest is a logged user
from store import models
from datetime import datetime
import re, json
class VerifiedMixin(object):
@method_decorator(verified_email_required)
def dispatch(self, *args, **kwargs):
return super(VerifiedMixin, self).dispatch(*args, **kwargs)
class FormAdd(VerifiedMixin, TemplateView):
template_name = 'store/form_add.html'
def get(self, request, *args, **kwargs):
project = get_object_or_404(models.Project, pk=self.kwargs['project'])
if project.owner == request.user:
return super(FormAdd, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You are not allowed to add form."))
raise Http404()
def get_context_data(self, **kwargs):
context = super(FormAdd, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data()
print(request.POST)
print(request.FILES)
if request.POST.get('connection') == "forms":
if request.POST.get('form'):
fields = "<ul>"
for field in models.FormField.objects.filter(form=request.POST.get('form')).exclude(type__pk__in=[6,8,9,10]).order_by('position'):
fields += '<li><input type="checkbox" class="spec_field" name="'+str(field.pk)+'" /> <span>' + field.caption + '</span></li>'
return HttpResponse(fields + "</ul>")
forms = ""
for form in models.Form.objects.filter(project=self.kwargs['project']):
forms += '<option value="' + str(form.pk) + '">' + form.title + '</option>'
return HttpResponse(forms)
else:
form_title = request.POST.get('title')
if form_title.isspace() or form_title=='':
return HttpResponse(_("Form name is invalid!"))
names = json.loads(request.POST.get('names'))
types = json.loads(request.POST.get('types'))
settings = json.loads(request.POST.get('settings'))
c = 0
for type in types:
if type == "LabelImage":
c = c + 1
if c > 0:
if request.FILES:
if len(request.FILES) < c:
return HttpResponse(_("You should provide all images for labels."))
else:
return HttpResponse(_("You should provide image for label."))
p = models.Project.objects.get(pk=self.kwargs['project'])
f = models.Form(
title=form_title,
project=p,
slug = slugify(form_title)
)
f.save()
try:
i = 0
for name in names:
t = models.Type.objects.get(name=types[i])
s = settings[i]
ff = models.FormField(
form=f,
type=t,
caption=name,
settings=s,
position=i
)
ff.save()
if (t.name == "LabelText"):
data = models.DataText(
formfield = ff,
data = s
)
data.save()
elif (t.name == "LabelImage"):
imgname = "labelimage" + str(i)
img = models.Image(
formfield=ff,
image=request.FILES[imgname]
)
img.save()
elif (t.name == "Connection"):
d = s.split(';')
cf = models.Form.objects.get(pk=d[0])
c = models.Connection(
formfield = ff,
form = cf
)
c.save()
i += 1
except:
f.delete()
return HttpResponse(_("Error occurred while creating form!"))
messages.success(request, _("Form successfully added!"))
return HttpResponse("OK")
class FormEdit(VerifiedMixin, TemplateView):
template_name = 'store/form_edit.html'
def get(self, request, *args, **kwargs):
form = get_object_or_404(models.Form, pk=self.kwargs['form'])
if form.project.owner == self.request.user and not models.FormInstance.objects.filter(form__pk=self.kwargs['form']).exists():
return super(FormEdit, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You cannot edit this form."))
raise Http404()
def get_context_data(self, **kwargs):
context = super(FormEdit, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
context['form'] = models.Form.objects.get(pk=self.kwargs['form'])
fields = models.FormField.objects.filter(form=self.kwargs['form']).order_by('position')
for field in fields:
if field.type.pk == 7:
v = field.settings.split(';')
fpk = v[0]
field.fpk = fpk
con = models.Form.objects.get(pk=fpk)
field.conname = con.title
del v[0]
field.visibles = models.FormField.objects.filter(form=fpk).exclude(type__pk__in=[6,8,9,10]).order_by('pk')
for vis in field.visibles:
if str(vis.pk) in v:
vis.checked = True
context['fields'] = fields
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data()
print(request.POST)
print(request.FILES)
if request.POST.get('connection') == "forms":
if request.POST.get('form'):
fields = "<ul>"
for field in models.FormField.objects.filter(form=request.POST.get('form')).exclude(type__pk__in=[6,8,9,10]).order_by('position'):
fields += '<li><input type="checkbox" class="spec_field" name="'+str(field.pk)+'" /> <span>' + field.caption + '</span></li>'
return HttpResponse(fields + "</ul>")
forms = ""
for form in models.Form.objects.filter(project=self.kwargs['project']):
forms += '<option value="' + str(form.pk) + '">' + form.title + '</option>'
return HttpResponse(forms)
else:
form_title = request.POST.get('title')
if form_title.isspace() or form_title=='':
return HttpResponse(_("Form name is invalid!"))
names = json.loads(request.POST.get('names'))
types = json.loads(request.POST.get('types'))
settings = json.loads(request.POST.get('settings'))
c = 0
for type in types:
if type == "LabelImage":
c = c + 1
if c > 0:
if request.FILES:
if len(request.FILES) < c:
return HttpResponse(_("You should provide all images for labels."))
else:
return HttpResponse(_("You should provide image for label."))
f = models.Form.objects.get(pk=self.kwargs['form'])
f.title = form_title
f.slug = slugify(form_title)
f.save()
models.FormInstance.objects.filter(form=f).delete()
models.FormField.objects.filter(form=f).delete()
try:
i = 0
for name in names:
t = models.Type.objects.get(name=types[i])
s = settings[i]
ff = models.FormField(
form=f,
type=t,
caption=name,
settings=s,
position=i
)
ff.save()
if t.name == "LabelText":
data = models.DataText(
formfield = ff,
data = s
)
data.save()
elif t.name == "LabelImage":
imgname = "labelimage" + str(i)
img = models.Image(
formfield=ff,
image=request.FILES[imgname]
)
img.save()
elif t.name == "Connection":
d = s.split(';')
cf = models.Form.objects.get(pk=d[0])
c = models.Connection(
formfield = ff,
form = cf
)
c.save()
i += 1
for c in models.Connection.objects.filter(form=f):
models.FormField.objects.filter(pk=c.formfield.pk).delete()
except:
f.delete()
return HttpResponse(_("Error occurred while saving changes!"))
messages.success(request, _("Form updated successfully!"))
return HttpResponse("OK")
class FormInstanceAdd(VerifiedMixin, TemplateView):
template_name = 'store/forminstance_add.html'
def get(self, request, *args, **kwargs):
form = get_object_or_404(models.Form, pk=self.kwargs['form'])
sharing = models.Sharing.objects.filter(owner=self.request.user).filter(form=form)
if form.project.owner == self.request.user or sharing:
return super(FormInstanceAdd, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You are not allowed to add instances to this form."))
raise Http404()
def get_context_data(self, **kwargs):
context = super(FormInstanceAdd, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
context['form'] = models.Form.objects.get(pk=self.kwargs['form'])
fields = models.FormField.objects.filter(form=self.kwargs['form']).order_by('position')
for field in fields:
if models.Image.objects.filter(formfield=field, forminstance__isnull=True).exists():
field.labelimage = models.Image.objects.get(formfield=field, forminstance__isnull=True)
elif field.type.pk == 7:
field.fpk = field.settings.split(';')[0]
context['fields'] = fields
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data()
print(request.POST)
print(request.FILES)
if request.POST.get('connection') == "instances":
visibles = models.FormField.objects.get(pk=request.POST.get('formfield')).settings.split(';')
del visibles[0]
fpk = request.POST.get('form')
forms = '<div class="table-responsive"><table class="instances table table-hover"><thead><tr>'
for field in models.FormField.objects.filter(form=fpk).order_by('position'):
if (str(field.pk) in visibles and field.type.pk != 5 and field.type.pk != 8 and field.type.pk != 9 and field.type.pk != 10):
forms += '<th>'+ field.caption +'</th>'
forms += "</tr></thead><tbody>"
i = 0
for instance in models.FormInstance.objects.filter(form=models.Form.objects.get(pk=fpk)).order_by('-pk'):
forms += '<tr class="cmodal-select" name="'+str(instance.pk)+'">'
for field in models.FormField.objects.filter(form=fpk).order_by('position'):
if (str(field.pk) in visibles and field.type.pk != 8 and field.type.pk != 9 and field.type.pk != 10):
if field.type.pk == 7:
insd = models.ConnectionInstance.objects.get(connection__formfield = field, forminstance = instance)
elif field.type.pk == 6:
insd = models.File.objects.get(formfield = field, forminstance = instance)
elif field.type.pk == 5:
insd = models.Image.objects.get(formfield = field, forminstance = instance)
else:
insd = models.DataText.objects.get(formfield = field, forminstance = instance)
forms += '<td>' + insd.display() + '</td>'
forms += '</tr>'
i += 1
forms += '</tbody></table></div>'
if i==0:
forms = _('Connected form is empty! There is no data to show.')
return HttpResponse(forms)
else:
fields = models.FormField.objects.filter(form=self.kwargs['form']).order_by('position')
contents = json.loads(request.POST.get('contents'))
i = 0
c = 0
for field in fields:
if field.type.pk == 6 or field.type.pk == 5:
c = c + 1
elif field.type.pk == 7:
if contents[i] == '':
return HttpResponse(_("You have to choose all instances!"))
i += 1
if c > 0:
if request.FILES:
if len(request.FILES) < c:
return HttpResponse(_("You should choose all images or files."))
else:
return HttpResponse(_("You should choose image or file."))
f = models.Form.objects.get(pk=self.kwargs['form'])
fi = models.FormInstance(
form = f,
user = self.request.user
)
if fi.form.project.owner != self.request.user:
fi.approved = False
fi.save()
i = 0
for field in fields:
if (field.type.pk != 8 and field.type.pk != 9 and field.type.pk != 10):
if field.type.pk == 7:
if contents[i] != '':
con = models.Connection.objects.get(formfield=field)
chfi = models.FormInstance.objects.get(pk=contents[i])
ins = models.ConnectionInstance(
connection=con,
forminstance = fi,
choseninstance = chfi
)
ins.save()
elif field.type.pk == 6:
filename = "file" + str(i)
file = models.File(
formfield=field,
forminstance = fi,
file=request.FILES[filename]
)
file.save()
elif field.type.pk == 5:
imgname = "image" + str(i)
img = models.Image(
formfield=field,
forminstance = fi,
image=request.FILES[imgname]
)
img.save()
else:
data = models.DataText(
formfield = field,
forminstance = fi,
data = contents[i]
)
data.save()
i += 1
messages.success(request, _("Form instance added successfully!"))
return HttpResponse("OK")
class ProjectList(VerifiedMixin, ListView):
model = models.Project
paginate_by = 4
context_object_name = 'project_list'
def get_queryset(self):
q = self.request.GET.get('search')
if q:
ret = self.model.objects.filter(owner__pk=self.request.user.pk, title__icontains=q)
if not ret.exists():
messages.error(self.request, _("Projects with \"") + q + _("\" were not found!"))
else:
messages.success(self.request, _("List of projects with \"") + q + _("\" term."))
return ret
return self.model.objects.filter(owner__pk=self.request.user.pk).order_by('-pk')
def get_context_data(self, **kwargs):
context = super(ProjectList, self).get_context_data(**kwargs)
return context
class FormList(VerifiedMixin, ListView):
model = models.Form
paginate_by = 4
context_object_name = 'form_list'
def get(self, request, *args, **kwargs):
project = get_object_or_404(models.Project, pk=self.kwargs['project'])
if project.owner == request.user:
return super(FormList, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You are not allowed to see this forms list because you are not an owner."))
raise Http404()
def get_queryset(self):
q = self.request.GET.get('search')
if q:
ret = self.model.objects.filter(project__pk=self.kwargs['project'], title__icontains=q)
if not ret.exists():
messages.error(self.request, _("Forms with \"") + q + _("\" were not found!"))
else:
messages.success(self.request, _("List of forms with \"") + q + _("\" term."))
return ret
return self.model.objects.filter(project__pk=self.kwargs['project']).order_by('-pk')
def get_context_data(self, **kwargs):
context = super(FormList, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
return context
class SharingSomeones(VerifiedMixin, ListView):
model = models.Sharing
paginate_by = 2
context_object_name = 'sharedform_list'
template_name = 'store/sharing_someones.html'
def get_queryset(self):
return self.request.user.sharing_set.all()
class SharingMy(VerifiedMixin, ListView):
model = models.Sharing
paginate_by = 2
context_object_name = 'sharedform_list'
template_name = 'store/sharing_my.html'
def get_queryset(self):
return self.model.objects.filter(form__project__owner=self.request.user)
class SharingDelete(VerifiedMixin, DeleteView):
model = models.Sharing
slug_field = 'id'
slug_url_kwarg = 'shared_form'
success_url = reverse_lazy('project-list')
def get_success_url(self):
messages.success(self.request, _('Shared form successfully deleted!'))
return super(SharingDelete, self).get_success_url()
class FormInstanceList(VerifiedMixin, ListView):
model = models.FormInstance
paginate_by = 4
context_object_name = 'forminstance_list'
def get(self, request, *args, **kwargs):
form = get_object_or_404(models.Form, pk=self.kwargs['form'])
sharing = models.Sharing.objects.filter(owner=self.request.user).filter(
form=form)
if form.project.owner == self.request.user or sharing:
return super(FormInstanceList, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You are not allowed to add instances to this form."))
raise Http404()
def get_queryset(self):
q = self.request.GET.get('search')
if q:
datalist = models.DataText.objects.filter(formfield__form__pk=self.kwargs['form'], forminstance__isnull=False, data__icontains=q)
instanceslist = []
for i in datalist:
instanceslist.append(i.forminstance.pk)
ret = self.model.objects.filter(Q(form__pk=self.kwargs['form']), Q(approved=True), Q(pk__in=instanceslist) | Q(date__icontains=q) | Q(user__username__icontains=q)).order_by('-pk')
if not ret.exists():
messages.error(self.request, _("Instances with \"") + q + _("\" were not found!"))
else:
messages.success(self.request, _("List of instances with \"") + q + _("\" term."))
return ret
return self.model.objects.filter(form__pk=self.kwargs['form'], approved=True).order_by('-pk')
def get_context_data(self, **kwargs):
context = super(FormInstanceList, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
context['form'] = models.Form.objects.get(pk=self.kwargs['form'])
context['fields'] = models.FormField.objects.filter(form__pk=self.kwargs['form']).exclude(type__pk__in=[7,8,9,10]).order_by('position')
return context
class Dashboard(VerifiedMixin, ListView):
model = models.FormInstance
paginate_by = 10
context_object_name = 'instances'
template_name = 'store/dashboard.html'
def get_queryset(self):
return self.model.objects.filter(
form__project__owner=self.request.user,
approved=False)
@verified_email_required
def approve_instance(request, forminstance):
instance_obj = get_object_or_404(models.FormInstance, pk=forminstance)
if instance_obj.form.project.owner == request.user:
instance_obj.approved = True
instance_obj.save()
messages.success(request, _('Form instance approved'))
return HttpResponseRedirect(reverse_lazy('dashboard'))
else:
messages.error(request, _("You are not allowed to approve this instance."))
raise Http404()
class DeleteInstance(VerifiedMixin, DeleteView):
model = models.FormInstance
slug_field = 'id'
slug_url_kwarg = 'forminstance'
success_url = reverse_lazy('dashboard')
def get_success_url(self):
messages.success(self.request, _('Form instance deleted'))
return super(DeleteInstance, self).get_success_url()
class FormInstanceDetail(VerifiedMixin, DetailView):
model = models.FormInstance
context_object_name = 'forminstance'
slug_field = 'id'
slug_url_kwarg = 'forminstance'
def get(self, request, *args, **kwargs):
form = get_object_or_404(models.Form, pk=self.kwargs['form'])
if form.project.owner == self.request.user or self.get_object().user == self.request.user:
return super(FormInstanceDetail, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You can't view this instance details because it wasn't added by you."))
raise Http404()
def get_context_data(self, **kwargs):
context = super(FormInstanceDetail, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
context['form'] = models.Form.objects.get(pk=self.kwargs['form'])
formfield_list = models.FormField.objects.filter(form__pk=self.kwargs['form']).exclude(Q(type=10)).order_by('position')
for field in formfield_list:
if field.type.pk == 3 or field.type.pk == 4:
t = field.settings.split(';')
c = models.DataText.objects.get(formfield=field, forminstance=self.kwargs['forminstance']).data.split(';')
del t[0]
del c[0]
field.options = zip(t,c)
elif field.type.pk == 7:
field.fpk = field.settings.split(';')[0]
context['formfield_list'] = formfield_list
context['instances_count'] = models.FormInstance.objects.filter(form__pk=self.kwargs['form']).count()
return context
def post(self, request, *args, **kwargs):
form = get_object_or_404(models.Form, pk=self.kwargs['form'])
if not form.project.owner == self.request.user:
return HttpResponse(_("You can't update instances of this form because you are not an owner."))
print(request.POST)
print(request.FILES)
fields = models.FormField.objects.filter(form=self.kwargs['form']).order_by('position')
contents = json.loads(request.POST.get('contents'))
i = 0
for field in fields:
if (field.type.pk != 8 and field.type.pk != 9 and field.type.pk != 10):
if field.type.pk == 7:
if contents[i] != '':
chfi = models.FormInstance.objects.get(pk=contents[i])
con = models.ConnectionInstance.objects.get(connection__formfield=field, forminstance=self.kwargs['forminstance'])
con.choseninstance = chfi
con.save()
elif field.type.pk == 6:
filename = "file" + str(i)
if request.FILES.get(filename):
f = models.File.objects.get(formfield=field, forminstance=self.kwargs['forminstance'])
f.file.delete(save=False)
f.file=request.FILES[filename]
f.save()
elif field.type.pk == 5:
imgname = "image" + str(i)
if request.FILES.get(imgname):
f = models.Image.objects.get(formfield=field, forminstance=self.kwargs['forminstance'])
f.image.delete(save=False)
f.image=request.FILES[imgname]
f.save()
else:
f = models.DataText.objects.get(formfield=field, forminstance=self.kwargs['forminstance'])
f.data = contents[i]
f.save()
i += 1
messages.success(request, _("Instance successfully updated!"))
return HttpResponse("OK")
class FormInstanceDelete(VerifiedMixin, TemplateView):
template_name = 'store/forminstance_delete.html'
def get_context_data(self, **kwargs):
context = super(FormInstanceDelete, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
context['form'] = models.Form.objects.get(pk=self.kwargs['form'])
context['forminstance'] = models.FormInstance.objects.get(pk=self.kwargs['forminstance'])
context['dependent_count'] = models.ConnectionInstance.objects.filter(choseninstance__pk=self.kwargs['forminstance']).count()
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data()
try:
models.FormInstance.objects.filter(form__pk=self.kwargs['form'], pk=self.kwargs['forminstance']).delete()
messages.success(request, _("Form instance successfully deleted!"))
except:
messages.error(request, _("Error occurred while deleting form instance!"))
return HttpResponseRedirect(reverse('forminstance-list', kwargs={'project': self.kwargs['project'], 'form': self.kwargs['form'] } ))
class ProjectAdd(VerifiedMixin, TemplateView):
template_name = 'store/project_add.html'
def get_context_data(self, **kwargs):
context = super(ProjectAdd, self).get_context_data(**kwargs)
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data()
name = self.request.POST.get('project_name')
if name.isspace() or name=='':
messages.error(request, _("Bad project name!"))
return HttpResponseRedirect(reverse('project-add'))
p = models.Project(
title=name,
owner=self.request.user,
slug=slugify(name)
)
p.save()
messages.success(request, _("Project successfully added!"))
return HttpResponseRedirect(reverse('form-list', kwargs={'project': p.pk} ))
class ProjectEdit(VerifiedMixin, TemplateView):
template_name = 'store/project_edit.html'
def get(self, request, *args, **kwargs):
project = get_object_or_404(models.Project, pk=self.kwargs['project'])
if project.owner == request.user:
return super(ProjectEdit, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You are not allowed to see this forms list"))
raise Http404()
def get_context_data(self, **kwargs):
context = super(ProjectEdit, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data()
name = self.request.POST.get('project_name')
if name.isspace() or name=='':
messages.error(request, _("Bad project name!"))
return HttpResponseRedirect(reverse('project-edit', kwargs={'project': self.kwargs['project'] } ))
p = models.Project.objects.get(pk=self.kwargs['project'])
p.title = name
p.slug = slugify(name)
p.save()
messages.success(request, _("Project successfully updated!"))
return HttpResponseRedirect(reverse('form-list', kwargs={'project': self.kwargs['project'] } ))
class FormDelete(VerifiedMixin, TemplateView):
template_name = 'store/form_delete.html'
def get(self, request, *args, **kwargs):
project = get_object_or_404(models.Project, pk=self.kwargs['project'])
if project.owner == request.user:
return super(FormDelete, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You are not allowed to delete this form."))
raise Http404()
def get_context_data(self, **kwargs):
context = super(FormDelete, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
context['form'] = models.Form.objects.get(pk=self.kwargs['form'])
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data()
try:
models.Form.objects.get(pk=self.kwargs['form']).delete()
messages.success(request, _("Form successfully deleted!"))
except:
messages.error(request, _("Error occurred while deleting form!"))
return HttpResponseRedirect(reverse('form-list', kwargs={'project': self.kwargs['project'] } ))
class ProjectDelete(VerifiedMixin, TemplateView):
template_name = 'store/project_delete.html'
def get(self, request, *args, **kwargs):
project = get_object_or_404(models.Project, pk=self.kwargs['project'])
if project.owner == request.user:
return super(ProjectDelete, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You are not allowed to delete this project."))
raise Http404()
def get_context_data(self, **kwargs):
context = super(ProjectDelete, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data()
try:
models.Project.objects.get(pk=self.kwargs['project']).delete()
messages.success(request, _("Project successfully deleted!"))
except:
messages.error(request, _("Error occurred while deleting project!"))
return HttpResponseRedirect(reverse('project-list'))
class FormShare(VerifiedMixin, CreateView):
model = models.Sharing
template_name = 'store/form_share.html'
form_class = FormShareForm
def get(self, request, *args, **kwargs):
form = get_object_or_404(models.Form, pk=self.kwargs['form'])
if form.project.owner == self.request.user:
return super(FormShare, self).get(request, *args, **kwargs)
else:
messages.error(request, _("You can't share this form."))
raise Http404()
def get_context_data(self, **kwargs):
context = super(FormShare, self).get_context_data(**kwargs)
context['project'] = models.Project.objects.get(pk=self.kwargs['project'])
context['form_id'] = models.Form.objects.get(pk=self.kwargs['form'])
return context
def form_valid(self, form):
form.instance.form = models.Form.objects.get(pk=self.kwargs['form'])
form.instance.owner = User.objects.get(pk=form.cleaned_data.get('user'))
return super(FormShare, self).form_valid(form)
def get_success_url(self):
messages.success(self.request, _("Form successfully shared!"))
return reverse_lazy('forminstance-list', args=[self.kwargs['project'], self.kwargs['form']])
def get_form_kwargs(self):
kwargs = super(FormShare, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
| mit | 6,421,516,021,437,837,000 | 44.031662 | 191 | 0.555516 | false |
esanchezm/sqjobs | sqjobs/tests/worker_test.py | 1 | 3311 | import pytest
from ..connectors.dummy import Dummy
from ..worker import Worker
from ..broker import Broker
from .fixtures import Adder, FakeAdder, AbstractAdder
class TestWorker(object):
@property
def connector(self):
return Dummy()
@property
def broker(self):
return Broker(self.connector)
def test_worker_repr(self):
worker = Worker(self.broker, 'default')
assert repr(worker) == 'Worker(Dummy)'
def test_register_job(self):
worker = Worker(self.broker, 'default')
worker.register_job(Adder)
assert len(worker.registered_jobs) == 1
assert worker.registered_jobs[Adder.name] == Adder
def test_register_abstract_job(self):
worker = Worker(self.broker, 'default')
worker.register_job(AbstractAdder)
assert len(worker.registered_jobs) == 0
def test_register_job_twice(self):
worker = Worker(self.broker, 'default')
worker.register_job(Adder)
worker.register_job(Adder)
assert len(worker.registered_jobs) == 1
assert worker.registered_jobs[Adder.name] == Adder
def test_register_job_overwrite(self):
worker = Worker(self.broker, 'default')
worker.register_job(Adder)
worker.register_job(FakeAdder)
assert len(worker.registered_jobs) == 1
assert worker.registered_jobs[Adder.name] == FakeAdder
def test_job_builder(self):
worker = Worker(self.broker, 'default')
worker.register_job(Adder)
payload = self._job_payload(1, 'adder', 'default', 2, [1, 2], {})
job, args, kwargs = worker._build_job(payload)
assert isinstance(job, Adder)
assert args == [1, 2]
assert kwargs == {}
assert job.id == 1
assert job.queue == 'default'
assert job.retries == 2
def test_invalid_job_builder(self):
worker = Worker(self.broker, 'default')
worker.register_job(Adder)
payload = self._job_payload(1, 'WRONG', 'default', 2, [1, 2], {})
with pytest.raises(ValueError):
worker._build_job(payload)
def test_change_retry_time(self):
worker = Worker(self.broker, 'default')
worker.register_job(Adder)
payload = self._job_payload(1, 'adder', 'default', 2, [1, 2], {})
job, _, _ = worker._build_job(payload)
worker._change_retry_time(job)
assert len(worker.broker.connector.retried_jobs['default']) == 1
assert worker.broker.connector.retried_jobs['default'][0] == (1, 10)
def test_not_change_retry_time(self):
worker = Worker(self.broker, 'default')
worker.register_job(FakeAdder)
payload = self._job_payload(1, 'adder', 'default', 2, [1, 2], {})
job, _, _ = worker._build_job(payload)
worker._change_retry_time(job)
assert len(worker.broker.connector.retried_jobs) == 0
def _job_payload(self, jid, name, queue, retries, args, kwargs):
return {
'name': name,
'queue': queue,
'args': args,
'kwargs': kwargs,
'_metadata': {
'id': jid,
'retries': retries,
'created_on': 'NOW',
'first_execution_on': 'NOW',
},
}
| bsd-3-clause | -5,957,581,397,920,828,000 | 29.1 | 76 | 0.58804 | false |
Eldinnie/python-telegram-bot | tests/test_choseninlineresulthandler.py | 1 | 5421 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2018
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
import pytest
from telegram import (Update, Chat, Bot, ChosenInlineResult, User, Message, CallbackQuery,
InlineQuery, ShippingQuery, PreCheckoutQuery)
from telegram.ext import ChosenInlineResultHandler
message = Message(1, User(1, '', False), None, Chat(1, ''), text='Text')
params = [
{'message': message},
{'edited_message': message},
{'callback_query': CallbackQuery(1, User(1, '', False), 'chat', message=message)},
{'channel_post': message},
{'edited_channel_post': message},
{'inline_query': InlineQuery(1, User(1, '', False), '', '')},
{'shipping_query': ShippingQuery('id', User(1, '', False), '', None)},
{'pre_checkout_query': PreCheckoutQuery('id', User(1, '', False), '', 0, '')},
{'callback_query': CallbackQuery(1, User(1, '', False), 'chat')}
]
ids = ('message', 'edited_message', 'callback_query', 'channel_post',
'edited_channel_post', 'inline_query',
'shipping_query', 'pre_checkout_query', 'callback_query_without_message')
@pytest.fixture(scope='class', params=params, ids=ids)
def false_update(request):
return Update(update_id=1, **request.param)
@pytest.fixture(scope='class')
def chosen_inline_result():
return Update(1, chosen_inline_result=ChosenInlineResult('result_id',
User(1, 'test_user', False),
'query'))
class TestChosenInlineResultHandler(object):
test_flag = False
@pytest.fixture(autouse=True)
def reset(self):
self.test_flag = False
def callback_basic(self, bot, update):
test_bot = isinstance(bot, Bot)
test_update = isinstance(update, Update)
self.test_flag = test_bot and test_update
def callback_data_1(self, bot, update, user_data=None, chat_data=None):
self.test_flag = (user_data is not None) or (chat_data is not None)
def callback_data_2(self, bot, update, user_data=None, chat_data=None):
self.test_flag = (user_data is not None) and (chat_data is not None)
def callback_queue_1(self, bot, update, job_queue=None, update_queue=None):
self.test_flag = (job_queue is not None) or (update_queue is not None)
def callback_queue_2(self, bot, update, job_queue=None, update_queue=None):
self.test_flag = (job_queue is not None) and (update_queue is not None)
def test_basic(self, dp, chosen_inline_result):
handler = ChosenInlineResultHandler(self.callback_basic)
dp.add_handler(handler)
assert handler.check_update(chosen_inline_result)
dp.process_update(chosen_inline_result)
assert self.test_flag
def test_pass_user_or_chat_data(self, dp, chosen_inline_result):
handler = ChosenInlineResultHandler(self.callback_data_1, pass_user_data=True)
dp.add_handler(handler)
dp.process_update(chosen_inline_result)
assert self.test_flag
dp.remove_handler(handler)
handler = ChosenInlineResultHandler(self.callback_data_1, pass_chat_data=True)
dp.add_handler(handler)
self.test_flag = False
dp.process_update(chosen_inline_result)
assert self.test_flag
dp.remove_handler(handler)
handler = ChosenInlineResultHandler(self.callback_data_2, pass_chat_data=True,
pass_user_data=True)
dp.add_handler(handler)
self.test_flag = False
dp.process_update(chosen_inline_result)
assert self.test_flag
def test_pass_job_or_update_queue(self, dp, chosen_inline_result):
handler = ChosenInlineResultHandler(self.callback_queue_1, pass_job_queue=True)
dp.add_handler(handler)
dp.process_update(chosen_inline_result)
assert self.test_flag
dp.remove_handler(handler)
handler = ChosenInlineResultHandler(self.callback_queue_1, pass_update_queue=True)
dp.add_handler(handler)
self.test_flag = False
dp.process_update(chosen_inline_result)
assert self.test_flag
dp.remove_handler(handler)
handler = ChosenInlineResultHandler(self.callback_queue_2, pass_job_queue=True,
pass_update_queue=True)
dp.add_handler(handler)
self.test_flag = False
dp.process_update(chosen_inline_result)
assert self.test_flag
def test_other_update_types(self, false_update):
handler = ChosenInlineResultHandler(self.callback_basic)
assert not handler.check_update(false_update)
| gpl-3.0 | -3,852,272,266,382,345,700 | 38 | 90 | 0.654307 | false |
drytoastman/scorekeeperbackend | sync/tests/test_conflicts.py | 1 | 2530 | #!/usr/bin/env python3
import time
from helpers import *
def test_keyinsert(syncdbs, syncdata):
""" Merge drivers, delete on one while linking to a car on the other, should undelete driver and maintain car """
syncx, mergex = syncdbs
testid = '00000000-0000-0000-0000-000000000042'
testcid = '00000000-0000-0000-0000-000000000043'
# Insert remote
with syncx['B'].cursor() as cur:
cur.execute("INSERT INTO drivers (driverid, firstname, lastname, email, username) VALUES (%s, 'first', 'last', 'email', 'other')", (testid,))
syncx['B'].commit()
time.sleep(0.5)
dosync(syncx['A'], mergex['A'])
verify_driver(syncx, testid, (('firstname', 'first'), ('lastname', 'last'), ('email', 'email')), ())
with syncx['A'].cursor() as cur:
cur.execute("DELETE FROM drivers WHERE driverid=%s", (testid,))
syncx['A'].commit()
time.sleep(0.5)
with syncx['B'].cursor() as cur:
cur.execute("INSERT INTO cars (carid, driverid, classcode, indexcode, number, useclsmult, attr, modified) VALUES (%s, %s, 'c1', 'i1', 2, 'f', '{}', now())", (testcid, testid))
syncx['B'].commit()
time.sleep(0.5)
dosync(syncx['A'], mergex['A'])
verify_driver(syncx, testid, (('firstname', 'first'), ('lastname', 'last'), ('email', 'email')), ())
verify_car(syncx, testcid, (('classcode', 'c1'),), ())
def test_keyupdate(syncdbs, syncdata):
""" Test for updating a key column that references another deleted row """
syncx, mergex = syncdbs
testid = '00000000-0000-0000-0000-000000000142'
with syncx['B'].cursor() as cur:
cur.execute("INSERT INTO indexlist (indexcode, descrip, value) VALUES ('i2', '', 0.999)")
cur.execute("INSERT INTO cars (carid, driverid, classcode, indexcode, number, useclsmult, attr, modified) VALUES (%s, %s, 'c1', 'i1', 2, 'f', '{}', now())", (testid, syncdata.driverid))
syncx['B'].commit()
time.sleep(0.5)
dosync(syncx['A'], mergex['A'])
verify_car(syncx, testid, (('classcode', 'c1'),), ())
with syncx['A'].cursor() as cur:
cur.execute("DELETE FROM indexlist WHERE indexcode='i2'")
syncx['A'].commit()
with syncx['B'].cursor() as cur:
cur.execute("UPDATE cars SET indexcode='i2',modified=now() WHERE carid=%s", (testid,))
syncx['B'].commit()
time.sleep(0.5)
dosync(syncx['A'], mergex['A'])
verify_car(syncx, testid, (('classcode', 'c1'), ('indexcode', 'i2')), ())
verify_index(syncx, 'i2', (('value', 0.999),))
| gpl-3.0 | 8,740,280,239,374,381,000 | 40.47541 | 193 | 0.607115 | false |
rapkis/prekiu-automatas | aparatas1.py | 1 | 4408 | #!/usr/bin/python
import MySQLdb
import datetime
prekes = [None] * 16
automatoId=1
data = str(datetime.date.today())
prekesId = None
prekesPav = "Niekas"
def prisijungimas():
db = MySQLdb.connect(host="pardavimuaparatas2.mysql.database.azure.com",
user="pi@pardavimuaparatas2",
passwd="Aparatas1",
db="prekiuautomatai")
cur = db.cursor()
return cur
def ikelimas():
db = MySQLdb.connect(host="pardavimuaparatas2.mysql.database.azure.com",
user="pi@pardavimuaparatas2",
passwd="Aparatas1",
db="prekiuautomatai")
cur = db.cursor()
#ikeliami duomenys su SQL uzklausa
cur.execute("SELECT * FROM prekesautomate WHERE Automatai_id='%d';" % automatoId)
#duomenys irasomi i lista
i = 0
for row in cur.fetchall():
prekes[i] = int(row[3])
i = i + 1
#uzdaromas prisijungimas prie duombazes
db.close()
def saugojimas():
db = MySQLdb.connect(host="pardavimuaparatas2.mysql.database.azure.com",
user="pi@pardavimuaparatas2",
passwd="Aparatas1",
db="prekiuautomatai")
cur = db.cursor()
#naujinama duombazes lentele su naujais duomenimis
for x in range(1, 17):
cur.execute("UPDATE prekesautomate SET Kiekis=%d WHERE Automatai_id = %d AND NumerisAutomate = %d;" % (prekes[x-1], automatoId, x))
cur.execute("INSERT INTO pardavimustatistika (Automatai_id, Prekes_pavadinimas, data) VALUES ('%d', '%s', '%s' );" % (automatoId, prekesPav, data))
#pushinimas i duombaze ir prisijungimo uzdarymas
db.commit()
db.close()
#metodas skirtas atstatyti numatytasias db kiekiu reiksmes is aparato puses
def reset():
db = MySQLdb.connect(host="pardavimuaparatas2.mysql.database.azure.com",
user="pi@pardavimuaparatas2",
passwd="Aparatas1",
db="prekiuautomatai")
cur = db.cursor()
for x in range(1, 17):
cur.execute("UPDATE prekesautomate SET Kiekis=10 WHERE Automatai_id = %d AND NumerisAutomate = %d;" % (automatoId, x))
db.commit()
db.close()
#duomenu nuskaitymas is failo
def nuskaitymas():
file = open("duomenys1.txt", "r")
temp = file.read().splitlines()
i = 0
for line in temp:
prekes[i] = int(line)
i = i + 1
file.close()
#prekes numerio ivedimas is klaviaturos
def ivedimas():
db = MySQLdb.connect(host="pardavimuaparatas2.mysql.database.azure.com",
user="pi@pardavimuaparatas2",
passwd="Aparatas1",
db="prekiuautomatai")
cur = db.cursor()
try:
perkamaPreke = input('Iveskite perkamos prekes ID:')
except SyntaxError:
perkamaPreke = None
if perkamaPreke >= 1 and perkamaPreke <= len(prekes) and isinstance(perkamaPreke, int) and perkamaPreke != None:
print 'Perkamos prekes id: %s' % perkamaPreke
prekes[perkamaPreke-1] = prekes[perkamaPreke-1] - 1
cur.execute("SELECT Prekes_Pavadinimas FROM prekesautomate WHERE Automatai_id = %d AND NumerisAutomate = %d;" % (automatoId, perkamaPreke))
#prekesPav = str(cur.fetchone())
kiekis = 0
for row in cur.fetchall():
prekesPav = str(row[0])
#print(prekes)
#print(prekesPav)
else:
print('Neivestas arba neteisingai ivestas prekes kodas')
ivedimas();
return prekesPav
#duomenu irasymas i faila (atsarginiai kopijai saugoti)
def irasymas():
file = open("duomenys1.txt", "w")
for item in prekes:
file.write("%s\n" % item)
file.close()
while True:
#reset()
ikelimas()
prekesPav = ivedimas()
#print(prekesPav)
irasymas()
saugojimas()
| mit | 849,941,584,960,501,600 | 35.675214 | 155 | 0.533348 | false |
bistromath/gr-air-modes | python/sbs1.py | 1 | 7992 | #
# Copyright 2010 Nick Foster
#
# This file is part of gr-air-modes
#
# gr-air-modes is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# gr-air-modes is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with gr-air-modes; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import time, os, sys, socket
import air_modes
import datetime
from air_modes.exceptions import *
import threading
class dumb_task_runner(threading.Thread):
def __init__(self, task, interval):
threading.Thread.__init__(self)
self._task = task
self._interval = interval
self.shutdown = threading.Event()
self.finished = threading.Event()
self.setDaemon(True)
self.start()
def run(self):
while not self.shutdown.is_set():
self._task()
time.sleep(self._interval)
self.finished.set()
def close(self):
self.shutdown.set()
self.finished.wait(self._interval)
class output_sbs1:
def __init__(self, cprdec, port, pub):
self._s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._s.bind(('', port))
self._s.listen(1)
self._s.setblocking(0) #nonblocking
self._conns = [] #list of active connections
self._aircraft_id_map = {} # dictionary of icao24 to aircraft IDs
self._aircraft_id_count = 0 # Current Aircraft ID count
self._cpr = cprdec
#it could be cleaner if there were separate output_* fns
#but this works
for i in (0, 4, 5, 11, 17):
pub.subscribe("type%i_dl" % i, self.output)
#spawn thread to add new connections as they come in
self._runner = dumb_task_runner(self.add_pending_conns, 0.1)
def __del__(self):
self._s.close()
def get_aircraft_id(self, icao24):
if icao24 in self._aircraft_id_map:
return self._aircraft_id_map[icao24]
# Adding this new ID to the dictionary
self._aircraft_id_count += 1
self._aircraft_id_map[icao24] = self._aircraft_id_count
# Checking to see if we need to clean up in the event that the
# dictionary is getting too large.
if len(self._aircraft_id_map) > 1e4:
minimum = min(self._aircraft_id_map.values()) + (len(self._aircraft_id_map) - 1e4)
for icao, _id in dict(self._aircraft_id_map).iteritems():
if _id < minimum:
del self._aircraft_id_map[icao]
# Finally return the new pair
return self._aircraft_id_count
def output(self, msg):
try:
sbs1_msg = self.parse(msg)
if sbs1_msg is not None:
sbs1_bytes = sbs1_msg.encode('utf-8')
for conn in self._conns[:]: #iterate over a copy of the list
conn.send(sbs1_bytes)
except socket.error:
self._conns.remove(conn)
print("Connections: ", len(self._conns))
except ADSBError:
pass
def add_pending_conns(self):
try:
conn, addr = self._s.accept()
self._conns.append(conn)
print("Connections: ", len(self._conns))
except socket.error:
pass
def current_time(self):
timenow = datetime.datetime.now()
return [timenow.strftime("%Y/%m/%d"), timenow.strftime("%H:%M:%S.%f")[0:-3]]
def decode_fs(self, fs):
if fs == 0:
return "0,0,0,0"
elif fs == 1:
return "0,0,0,1"
elif fs == 2:
return "1,0,0,0"
elif fs == 3:
return "1,0,0,1"
elif fs == 4:
return "1,0,1,"
elif fs == 5:
return "0,0,1,"
else:
return ",,,"
def parse(self, msg):
#assembles a SBS-1-style output string from the received message
msgtype = msg.data["df"]
outmsg = None
if msgtype == 0:
outmsg = self.pp0(msg.data, msg.ecc)
elif msgtype == 4:
outmsg = self.pp4(msg.data, msg.ecc)
elif msgtype == 5:
outmsg = self.pp5(msg.data, msg.ecc)
elif msgtype == 11:
outmsg = self.pp11(msg.data, msg.ecc)
elif msgtype == 17:
outmsg = self.pp17(msg.data)
else:
raise NoHandlerError(msgtype)
return outmsg
def pp0(self, shortdata, ecc):
[datestr, timestr] = self.current_time()
aircraft_id = self.get_aircraft_id(ecc)
retstr = "MSG,7,0,%i,%06X,%i,%s,%s,%s,%s,,%s,,,,,,,,,," % (aircraft_id, ecc, aircraft_id+100, datestr, timestr, datestr, timestr, air_modes.decode_alt(shortdata["ac"], True))
if shortdata["vs"]:
retstr += "1\r\n"
else:
retstr += "0\r\n"
return retstr
def pp4(self, shortdata, ecc):
[datestr, timestr] = self.current_time()
aircraft_id = self.get_aircraft_id(ecc)
retstr = "MSG,5,0,%i,%06X,%i,%s,%s,%s,%s,,%s,,,,,,," % (aircraft_id, ecc, aircraft_id+100, datestr, timestr, datestr, timestr, air_modes.decode_alt(shortdata["ac"], True))
return retstr + self.decode_fs(shortdata["fs"]) + "\r\n"
def pp5(self, shortdata, ecc):
[datestr, timestr] = self.current_time()
aircraft_id = self.get_aircraft_id(ecc)
retstr = "MSG,6,0,%i,%06X,%i,%s,%s,%s,%s,,,,,,,,%04i," % (aircraft_id, ecc, aircraft_id+100, datestr, timestr, datestr, timestr, air_modes.decode_id(shortdata["id"]))
return retstr + self.decode_fs(shortdata["fs"]) + "\r\n"
def pp11(self, shortdata, ecc):
[datestr, timestr] = self.current_time()
aircraft_id = self.get_aircraft_id(shortdata["aa"])
return "MSG,8,0,%i,%06X,%i,%s,%s,%s,%s,,,,,,,,,,,,\r\n" % (aircraft_id, shortdata["aa"], aircraft_id+100, datestr, timestr, datestr, timestr)
def pp17(self, data):
icao24 = data["aa"]
aircraft_id = self.get_aircraft_id(icao24)
bdsreg = data["me"].get_type()
retstr = None
#we'll get better timestamps later, hopefully with actual VRT time
#in them
[datestr, timestr] = self.current_time()
if bdsreg == 0x08:
# Aircraft Identification
(msg, typestring) = air_modes.parseBDS08(data)
retstr = "MSG,1,0,%i,%06X,%i,%s,%s,%s,%s,%s,,,,,,,,,,,\r\n" % (aircraft_id, icao24, aircraft_id+100, datestr, timestr, datestr, timestr, msg)
elif bdsreg == 0x06:
# Surface position measurement
[ground_track, decoded_lat, decoded_lon, rnge, bearing] = air_modes.parseBDS06(data, self._cpr)
altitude = 0
if decoded_lat is None: #no unambiguously valid position available
retstr = None
else:
retstr = "MSG,2,0,%i,%06X,%i,%s,%s,%s,%s,,%i,,,%.5f,%.5f,,,,0,0,0\r\n" % (aircraft_id, icao24, aircraft_id+100, datestr, timestr, datestr, timestr, altitude, decoded_lat, decoded_lon)
elif bdsreg == 0x05:
# Airborne position measurements
# WRONG (rnge, bearing), is this still true?
[altitude, decoded_lat, decoded_lon, rnge, bearing] = air_modes.parseBDS05(data, self._cpr)
if decoded_lat is None: #no unambiguously valid position available
retstr = None
else:
retstr = "MSG,3,0,%i,%06X,%i,%s,%s,%s,%s,,%i,,,%.5f,%.5f,,,,0,0,0\r\n" % (aircraft_id, icao24, aircraft_id+100, datestr, timestr, datestr, timestr, altitude, decoded_lat, decoded_lon)
elif bdsreg == 0x09:
# Airborne velocity measurements
# WRONG (heading, vert_spd), Is this still true?
subtype = data["bds09"].get_type()
if subtype == 0 or subtype == 1:
parser = air_modes.parseBDS09_0 if subtype == 0 else air_modes.parseBDS09_1
[velocity, heading, vert_spd] = parser(data)
retstr = "MSG,4,0,%i,%06X,%i,%s,%s,%s,%s,,,%.1f,%.1f,,,%i,,,,,\r\n" % (aircraft_id, icao24, aircraft_id+100, datestr, timestr, datestr, timestr, velocity, heading, vert_spd)
return retstr
| gpl-3.0 | 689,276,427,596,621,600 | 35 | 191 | 0.624875 | false |
ifuding/Kaggle | ADDC/Code/blend.py | 1 | 5606 | import numpy as np
import pandas as pd
import time
from time import gmtime, strftime
from main import *
from sklearn import metrics
import lightgbm as lgb
from tensorflow.python.keras.models import load_model
from keras_train import DNN_Model
# len_train = 20905996
# len_valide = 20000001
# df = pd.read_pickle('MinMaxNormTrainValTest.pickle')
def load_val():
valide_df = load_valide_data()
# valide_df = df[len_train: len_train + len_valide]
valide_id = valide_df['id'].values
valide_data = valide_df[keras_train.USED_FEATURE_LIST].values.astype(DENSE_FEATURE_TYPE)
valide_label = valide_df['is_attributed'].values.astype(np.uint8)
del valide_df
gc.collect()
pos_cnt = valide_label.sum()
neg_cnt = len(valide_label) - pos_cnt
print ("valide type: {0} valide size: {1} valide data pos: {2} neg: {3}".format(
valide_data.dtype, len(valide_data), pos_cnt, neg_cnt))
return valide_data, valide_label, valide_id
def load_test():
test_df = load_test_data()
test_data = test_df[keras_train.USED_FEATURE_LIST].values.astype(DENSE_FEATURE_TYPE)
test_id = test_df['click_id'].values #.astype(np.uint32)
print ("test type {0}".format(test_data.dtype))
del test_df
gc.collect()
return test_data, test_id
def lgb_pred(valide_data, valide_label):
# load lightgbm model to predict
bst = lgb.Booster(model_file= FLAGS.input_previous_model_path + '/model_098597.txt')
lgb_pred = bst.predict(valide_data, num_iteration=FLAGS.best_iteration)
score = metrics.roc_auc_score(valide_label, lgb_pred)
print ("LightGbm AUC: {0}".format(score))
return lgb_pred
def keras_pred(valide_data, valide_label):
model = load_model(FLAGS.input_previous_model_path + '/model_0986303.h5')
print (model.summary())
y_pred = model.predict(DNN_Model.DNN_DataSet(None, valide_data), verbose=0, batch_size=10240)
score = metrics.roc_auc_score(valide_label, y_pred)
print ("Keras AUC: {0}".format(score))
return y_pred
def blend(sub1, sub2):
data_dir = "../Data/"
sub1 = pd.read_csv(data_dir + 'sub_2018_05_04_03_53_23.csv')
sub2 = pd.read_csv(data_dir + 'sub_2018_05_07_09_48_13.csv')
target = 'is_attributed'
#blend 1
blend = pd.merge(sub1, sub2, how='left', on='click_id')
print (blend.info())
blend[target] = np.sqrt(blend[target + "_x"] * blend[target+'_y'])
blend[target] = blend[target].clip(0+1e12, 1-1e12)
time_label = strftime('_%Y_%m_%d_%H_%M_%S', gmtime())
sub_name = data_dir + "sub" + time_label + ".csv"
blend[['click_id', target]].to_csv(sub_name, index=False)
def blend_tune(valide_label, sub1, sub2):
sub1 = sub1.reshape((len(valide_label), -1))
sub2 = sub2.reshape((len(valide_label), -1))
print (sub1.shape)
print (sub2.shape)
blend1 = 0.97 * sub1 + 0.03 * sub2
blend2 = np.sqrt((sub1 ** 0.45) * (sub2 ** 0.55))
#blend = np.sqrt(sub1 * sub2)
for i in range(30, 101, 1):
r = float(i) / 100
blend = (blend1 ** r) * (blend2 ** (1 - r))
score = metrics.roc_auc_score(valide_label, blend)
print ("r : {0} Blend AUC: {1}".format(r, score))
if __name__ == "__main__":
if FLAGS.blend_tune:
valide_data, valide_label, valide_id = load_val()
k_pred = keras_pred(valide_data, valide_label)
# df = pd.DataFrame()
# df['id'] = valide_id
# df['label'] = valide_label
# df['re'] = k_pred
# df = pd.read_pickle(path + 'valide_label_re.pickle')
# pre_k_pred = np.load('../Data/TrainValNuniqueVarCumNextClickReversecum/k_pred.npy')
# pre_l_pred = np.load('../Data/TrainValNuniqueVarCumNextClickReversecum/l_pred.npy')
# pre_label = np.load('../Data/TrainValNuniqueVarCumNextClickReversecum/valide_label.npy')
# pre_valide_id = np.load('../Data/TrainValNuniqueVarCumNextClickReversecum/valide_id.npy')
# pre_df = pd.DataFrame()
# pre_df['id'] = pre_valide_id
# pre_df['label'] = pre_label
# pre_df['re'] = np.sqrt(pre_k_pred.reshape((len(pre_label), -1)) * pre_l_pred.reshape((len(pre_label), -1)))
# print (pre_df.head)
# pre_df.to_pickle('../Data/TrainValNuniqueVarCumNextClickReversecum/valide_label_re.pickle')
# pre_df = pd.read_pickle('../Data/TrainValNuniqueVarCumNextClickReversecum/valide_label_re.pickle')
# df = pd.merge(df, pre_df, how = 'left', on = 'id')
# print (df.info())
# score = metrics.roc_auc_score(df['label_x'].values, df['re_y'].values)
# print ("pre Blend AUC: {0}".format(score))
# score = metrics.roc_auc_score(df['label_x'].values, np.sqrt(df['re_x'].values * df['re_y'].values))
# print ("Blend AUC: {0}".format(score))
# # l_pred = lgb_pred(valide_data, valide_label)
# np.save(path + '/valide_id.npy', valide_id)
# np.save('k_pred.npy', k_pred)
# np.save('l_pred.npy', l_pred)
# np.save('valide_label.npy', valide_label)
# valide_label = np.load('valide_label.npy')
# k_pred = np.load('k_pred.npy')
# l_pred = np.load('l_pred.npy')
# blend_tune(valide_label, k_pred, l_pred)
else:
# test_data, test_id = load_test()
# k_pred = keras_pred(test_data, test_id)
# sub = pd.DataFrame()
# sub['click_id'] = test_id
# sub['is_attributed'] = k_pred
blend(None, None)
# l_pred = lgb_pred(valide_data, valide_label)
| apache-2.0 | -9,144,364,364,217,793,000 | 42.492063 | 117 | 0.603639 | false |
fulfilio/trytond-picking-list-report | picking_list_report.py | 1 | 2960 | # -*- coding: utf-8 -*-
"""
picking_list_report.py
"""
from trytond.pool import PoolMeta, Pool
from trytond.transaction import Transaction
from openlabs_report_webkit import ReportWebkit
__metaclass__ = PoolMeta
__all__ = ['PickingListReport']
class ReportMixin(ReportWebkit):
"""
Mixin Class to inherit from, for all HTML reports.
"""
@classmethod
def wkhtml_to_pdf(cls, data, options=None):
"""
Call wkhtmltopdf to convert the html to pdf
"""
Company = Pool().get('company.company')
company = ''
if Transaction().context.get('company'):
company = Company(Transaction().context.get('company')).party.name
options = {
'margin-bottom': '0.50in',
'margin-left': '0.50in',
'margin-right': '0.50in',
'margin-top': '0.50in',
'footer-font-size': '8',
'footer-left': company,
'footer-line': '',
'footer-right': '[page]/[toPage]',
'footer-spacing': '5',
'page-size': 'Letter',
}
return super(ReportMixin, cls).wkhtml_to_pdf(
data, options=options
)
class PickingListReport(ReportMixin):
"""
HTML Report for Picking List
"""
__name__ = 'stock.shipment.out.picking_list.html'
@classmethod
def parse(cls, report, records, data, localcontext):
compare_context = cls.get_compare_context(report, records, data)
sorted_moves = {}
for shipment in records:
sorted_moves[shipment.id] = sorted(
shipment.inventory_moves,
lambda x, y: cmp(
cls.get_compare_key(x, compare_context),
cls.get_compare_key(y, compare_context)
)
)
localcontext['moves'] = sorted_moves
return super(PickingListReport, cls).parse(
report, records, data, localcontext
)
@staticmethod
def get_compare_context(report, records, data):
Location = Pool().get('stock.location')
from_location_ids = set()
to_location_ids = set()
for record in records:
for move in record.inventory_moves:
from_location_ids.add(move.from_location)
to_location_ids.add(move.to_location)
from_locations = Location.browse(list(from_location_ids))
to_locations = Location.browse(list(to_location_ids))
return {
'from_location_ids': [l.id for l in from_locations],
'to_location_ids': [l.id for l in to_locations],
}
@staticmethod
def get_compare_key(move, compare_context):
from_location_ids = compare_context['from_location_ids']
to_location_ids = compare_context['to_location_ids']
return [from_location_ids.index(move.from_location.id),
to_location_ids.index(move.to_location.id)]
| bsd-3-clause | -1,181,353,906,449,676,300 | 28.89899 | 78 | 0.571284 | false |
Cecca/lydoc | setup.py | 1 | 1336 | from setuptools import setup, find_packages
import io
version = dict()
with io.open("lydoc/_version.py", "r", encoding='utf-8') as fp:
exec(fp.read(), version)
with io.open("README.rst", "r", encoding='utf-8') as fp:
long_desc = fp.read()
setup(
name='lydoc',
version=version['__version__'],
author='Matteo Ceccarello',
author_email='[email protected]',
license='GPLv3',
url='https://github.com/Cecca/lydoc',
description='An API documentation generator for Lilypond files',
long_description=long_desc,
packages=find_packages(exclude=['docs', 'tests*']),
include_package_data=True,
install_requires=[
'jinja2',
'grako'
],
extras_require={
'dev': ['pyinstaller'],
'test': ['coverage', 'nose'],
},
classifiers={
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Development Status :: 3 - Alpha",
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
},
entry_points={
'console_scripts': [
'lydoc=lydoc:main'
]
}
)
| gpl-3.0 | 2,782,565,075,269,225,000 | 28.043478 | 75 | 0.583832 | false |
susundberg/Networkdroid | src/main_client_speak.py | 1 | 4093 |
import json
import argparse
import sys
import zmq
import datetime
from Sundberg.Logger import *
from subprocess import call
def get_command_line_arguments( ):
parser = argparse.ArgumentParser(description='Speak-aloud espeak client')
parser.add_argument("configfile", help = "Set configuration file to be used")
return parser.parse_args()
def main( args ):
log = Logger("client_speak.log")
log.info("Using configuration file: '%s'" % args.configfile )
with open( args.configfile ) as fid:
config = json.loads( fid.read() )
# Handle the module print messages
client = SpeakClient( config, log )
client.mainloop()
class SpeakClient:
def __init__(self, config, log ):
self.module_alives = {}
self.speak_lines = {
"pingalive" : "Connection to droid server",
"pingip" : "Connection to internet",
"pinghost" : "Domain name server",
"ssh" : "Nat compromised"
}
self.timeout = int( config["client_speak_timeout"] )
self.address = config["protocol"] + ":" + config["port_client_pub"]
self.log = log
self.speak_command = config["client_speak_cmd"]
def mainloop(self):
context = zmq.Context(1)
receive_socket = context.socket(zmq.SUB)
receive_socket.connect( self.address )
receive_socket.setsockopt(zmq.SUBSCRIBE, "")
receive_socket.setsockopt(zmq.RCVTIMEO, self.timeout )
deadline = datetime.timedelta( milliseconds = self.timeout )
while( True ):
# We need to first check if we have messages waiting, if yes, process those
# If not, do dead module check and enter timeout receive
# We need to do this to avoid a) dead modules check omitted b) dead modules check done while we have lines
# waiting to be processed (since the speak can take several secs)
# First check if we have messages waiting
try:
message = receive_socket.recv( flags = zmq.NOBLOCK )
self.process_message( message )
continue
except zmq.ZMQError as error:
if error.errno != zmq.EAGAIN :
raise( error )
self.check_for_dead_modules( deadline )
# No messages ready, do timeout receive
try:
message = receive_socket.recv( )
self.process_message( message )
except zmq.ZMQError as error:
if error.errno != zmq.EAGAIN :
raise( error )
def process_message( self, message ):
fields = message.split(":")
if len(fields) == 2 and fields[1].strip() == "HB":
module_name = fields[0].strip().lower()
if module_name in self.speak_lines:
if module_name not in self.module_alives:
self.speak_aloud( self.speak_lines[ module_name ] + " OK" )
self.log.info("Module '%s' ONLINE" % module_name )
self.module_alives[ module_name ] = datetime.datetime.now()
else:
print "GOT LINE:" + message
def check_for_dead_modules(self, deadline ):
# Check for timeouted speak aloud 'connection dropped'
current_time = datetime.datetime.now()
to_remove = []
for module_name in self.module_alives:
elapsed_since_last_hb = current_time - self.module_alives[ module_name ]
if elapsed_since_last_hb > deadline:
self.speak_aloud( self.speak_lines[ module_name ] + " got dropped")
to_remove.append( module_name )
self.log.info("Module '%s' went offline" % module_name )
for module_name in to_remove:
del self.module_alives[ module_name ]
def speak_aloud( self, line ):
retcode = call( [ self.speak_command, line ] )
if retcode != 0 :
self.log.error("Call '%s' returned nonzero: %d" % ( self.speak_command, retcode ) )
self.log.debug("Speak aloud: " + line )
if __name__ == "__main__":
sys.exit( main( get_command_line_arguments() ) )
| gpl-2.0 | -7,579,788,459,842,370,000 | 33.686441 | 113 | 0.597361 | false |
open-synergy/opnsynid-stock-logistics-warehouse | stock_picking_type_accounting_configuration/models/stock_move_account_source.py | 1 | 1177 | # 2020 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import models, api, fields
from openerp.tools.safe_eval import safe_eval as eval
class StockMoveAccountSource(models.Model):
_name = "stock.move_account_source"
_description = "Stock Move Account Source"
name = fields.Char(
string="Source Name",
required=True,
)
active = fields.Boolean(
string="Active",
default=True,
)
note = fields.Text(
string="Note",
)
python_code = fields.Text(
string="Python Code for Account Source",
required=True,
default="result = False",
)
def _get_localdict(self, move):
self.ensure_one()
return {
"env": self.env,
"move": move,
}
@api.multi
def _get_account(self, move):
self.ensure_one()
localdict = self._get_localdict(move)
try:
eval(self.python_code,
localdict, mode="exec", nocopy=True)
result = localdict["result"]
except: # noqa: E722
result = False
return result
| agpl-3.0 | -1,646,716,042,506,528,800 | 24.586957 | 68 | 0.571793 | false |
F1ashhimself/UISoup | uisoup/interfaces/i_soup.py | 1 | 2577 | #!/usr/bin/env python
# Copyright (c) 2014-2017 Max Beloborodko.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__author__ = '[email protected]'
from abc import ABCMeta, abstractmethod, abstractproperty
class ISoup(object):
"""
Class to work with UI objects.
"""
__metaclass__ = ABCMeta
@abstractproperty
def mouse(self):
"""
Instance of IMouse implementation.
"""
@abstractproperty
def keyboard(self):
"""
Instance of IKeyboard implementation.
"""
@abstractmethod
def get_object_by_coordinates(self, x, y):
"""
Gets object by coordinates.
:param int x: x coordinate.
:param int y: y coordinate.
:rtype: uisoup.interfaces.i_element.IElement
:return: object that was found by given coordinates.
"""
@abstractmethod
def is_window_exists(self, obj_handle):
"""
Verifies is window exists.
:param str | int obj_handle: window name (string) or window
handler (int) otherwise Desktop Window will be checked.
:rtype: bool
:return: True if window exists otherwise False.
"""
@abstractmethod
def get_window(self, obj_handle=None):
"""
Gets window.
:param str | int obj_handle: window name (string) or window
handler (int) otherwise Desktop Window will be checked.
:rtype: uisoup.interfaces.i_element.IElement
:return: window object.
"""
@abstractmethod
def get_visible_window_list(self):
"""
Gets list of visible windows.
:rtype: list[uisoup.interfaces.i_element.IElement]
:return: list of visible windows.
"""
@abstractmethod
def get_visible_object_list(self, window_name):
"""
Gets list of visible objects for specified window.
:param str window_name: window name.
:rtype: list[uisoup.interfaces.i_element.IElement]
:return: list of visible windows.
"""
| apache-2.0 | 5,912,929,335,668,427,000 | 27.318681 | 78 | 0.629802 | false |
desktopbsd/gbi | src/create_cfg.py | 1 | 22837 | #!/usr/bin/env python
#
# Copyright (c) 2013 GhostBSD
#
# See COPYING for licence terms.
#
# create_cfg.py v 1.4 Friday, January 17 2014 Eric Turgeon
#
import os
import pickle
from subprocess import Popen
# Directory use from the installer.
tmp = "/tmp/.gbi/"
installer = "/usr/local/lib/gbi/"
start_Install = 'python %sinstall.py' % installer
# Installer data file.
disk = '%sdisk' % tmp
layout = '%slayout' % tmp
model = '%smodel' % tmp
pcinstallcfg = '%spcinstall.cfg' % tmp
user_passwd = '%suser' % tmp
language = '%slanguage' % tmp
dslice = '%sslice' % tmp
left = '%sleft' % tmp
partlabel = '%spartlabel' % tmp
timezone = '%stimezone' % tmp
KBFile = '%skeyboard' % tmp
boot_file = '%sboot' % tmp
disk_schem = '%sscheme' % tmp
zfs_config = '%szfs_config' % tmp
class gbsd_cfg():
def __init__(self):
f = open('%spcinstall.cfg' % tmp, 'w')
# Installation Mode
f.writelines('# Installation Mode\n')
f.writelines('installMode=fresh\n')
f.writelines('installInteractive=no\n')
f.writelines('installType=GhostBSD\n')
f.writelines('installMedium=dvd\n')
f.writelines('packageType=livecd\n')
# System Language
lang = open(language, 'r')
lang_output = lang.readlines()[0].strip().split()[0].strip()
f.writelines('\n# System Language\n\n')
f.writelines('localizeLang=%s\n' % lang_output)
os.remove(language)
# Keyboard Setting
if os.path.exists(model):
f.writelines('\n# Keyboard Setting\n')
os.remove(model)
if os.path.exists(KBFile):
rkb = open(KBFile, 'r')
kb = rkb.readlines()
if len(kb) == 2:
l_output = kb[0].strip().partition('-')[2].strip()
f.writelines('localizeKeyLayout=%s\n' % l_output)
v_output = kb[1].strip().partition(':')[2].strip()
f.writelines('localizeKeyVariant=%s\n' % v_output)
else:
l_output = kb[0].strip().partition('-')[2].strip()
f.writelines('localizeKeyLayout=%s\n' % l_output)
os.remove(KBFile)
# Timezone
if os.path.exists(timezone):
time = open(timezone, 'r')
t_output = time.readlines()[0].strip()
f.writelines('\n# Timezone\n')
f.writelines('timeZone=%s\n' % t_output)
f.writelines('enableNTP=yes\n')
os.remove(timezone)
if os.path.exists(zfs_config):
# Disk Setup
r = open(zfs_config, 'r')
zfsconf = r.readlines()
for line in zfsconf:
if 'partscheme' in line:
f.writelines(line)
read = open(boot_file, 'r')
boot = read.readlines()[0].strip()
f.writelines('bootManager=%s\n' % boot)
os.remove(boot_file)
else:
f.writelines(line)
# os.remove(zfs_config)
else:
# Disk Setup
r = open(disk, 'r')
drive = r.readlines()
d_output = drive[0].strip()
f.writelines('\n# Disk Setup\n')
f.writelines('disk0=%s\n' % d_output)
os.remove(disk)
# Partition Slice.
p = open(dslice, 'r')
line = p.readlines()
part = line[0].rstrip()
f.writelines('partition=%s\n' % part)
os.remove(dslice)
# Boot Menu
read = open(boot_file, 'r')
line = read.readlines()
boot = line[0].strip()
f.writelines('bootManager=%s\n' % boot)
#os.remove(boot_file)
# Sheme sheme
read = open(disk_schem, 'r')
shem = read.readlines()[0]
f.writelines(shem + '\n')
f.writelines('commitDiskPart\n')
# os.remove(disk_schem)
# Partition Setup
f.writelines('\n# Partition Setup\n')
part = open(partlabel, 'r')
# If slice and auto file exist add first partition line.
# But Swap need to be 0 it will take the rest of the freespace.
for line in part:
if 'BOOT' in line or 'BIOS' in line or 'UEFI' in line:
pass
else:
f.writelines('disk0-part=%s\n' % line.strip())
f.writelines('commitDiskLabel\n')
os.remove(partlabel)
# Network Configuration
f.writelines('\n# Network Configuration\n')
readu = open(user_passwd, 'rb')
uf = pickle.load(readu)
net = uf[5]
f.writelines('hostname=%s\n' % net)
# Set the root pass
f.writelines('\n# Network Configuration\n')
readr = open('%sroot' % tmp, 'rb')
rf = pickle.load(readr)
root = rf[0]
f.writelines('\n# Set the root pass\n')
f.writelines('rootPass=%s\n' % root)
# Setup our users
user = uf[0]
f.writelines('\n# Setup user\n')
f.writelines('userName=%s\n' % user)
name = uf[1]
f.writelines('userComment=%s\n' % name)
passwd = uf[2]
f.writelines('userPass=%s\n' % passwd.rstrip())
shell = uf[3]
f.writelines('userShell=%s\n' % shell)
upath = uf[4]
f.writelines('userHome=%s\n' % upath.rstrip())
f.writelines('defaultGroup=wheel\n')
f.writelines('userGroups=operator\n')
f.writelines('commitUser\n')
f.writelines('runScript=/usr/local/bin/iso_to_hd\n')
if "af" == lang_output:
f.writelines('runCommand=pkg install -y af-libreoffice\n')
elif "ar" == lang_output:
f.writelines('runCommand=pkg install -y ar-libreoffice\n')
elif "bg" == lang_output:
f.writelines('runCommand=pkg install -y bg-libreoffice\n')
elif "bn" == lang_output:
f.writelines('runCommand=pkg install -y bn-libreoffice\n')
elif "br" == lang_output:
f.writelines('runCommand=pkg install -y br-libreoffice\n')
elif "bs" == lang_output:
f.writelines('runCommand=pkg install -y bs-libreoffice\n')
elif "ca" == lang_output:
f.writelines('runCommand=pkg install -y ca-libreoffice\n')
elif "cs" == lang_output:
f.writelines('runCommand=pkg install -y cs-libreoffice\n')
elif "cy" == lang_output:
f.writelines('runCommand=pkg install -y cy-libreoffice\n')
elif "da" == lang_output:
f.writelines('runCommand=pkg install -y da-libreoffice\n')
elif "de" == lang_output:
f.writelines('runCommand=pkg install -y de-libreoffice\n')
elif "el" == lang_output:
f.writelines('runCommand=pkg install -y el-libreoffice\n')
elif "en_GB" == lang_output:
f.writelines('runCommand=pkg install -y en_GB-libreoffice\n')
elif "en_ZA" == lang_output:
f.writelines('runCommand=pkg install -y en_ZA-libreoffice\n')
elif "es" == lang_output:
f.writelines('runCommand=pkg install -y es-libreoffice\n')
elif "et" == lang_output:
f.writelines('runCommand=pkg install -y et-libreoffice\n')
elif "eu" == lang_output:
f.writelines('runCommand=pkg install -y eu-libreoffice\n')
elif "fa" == lang_output:
f.writelines('runCommand=pkg install -y fa-libreoffice\n')
elif "fi" == lang_output:
f.writelines('runCommand=pkg install -y fi-libreoffice\n')
elif "fr" in lang_output:
f.writelines('runCommand=pkg install -y fr-libreoffice\n')
elif "ga" == lang_output:
f.writelines('runCommand=pkg install -y ga-libreoffice\n')
elif "gb" == lang_output:
f.writelines('runCommand=pkg install -y gd-libreoffice\n')
elif "gl" == lang_output:
f.writelines('runCommand=pkg install -y gl-libreoffice\n')
elif "he" == lang_output:
f.writelines('runCommand=pkg install -y he-libreoffice\n')
elif "hi" == lang_output:
f.writelines('runCommand=pkg install -y hi-libreoffice\n')
elif "hr" == lang_output:
f.writelines('runCommand=pkg install -y hr-libreoffice\n')
elif "hu" == lang_output:
f.writelines('runCommand=pkg install -y hu-libreoffice\n')
elif "id" == lang_output:
f.writelines('runCommand=pkg install -y id-libreoffice\n')
elif "is" == lang_output:
f.writelines('runCommand=pkg install -y is-libreoffice\n')
elif "it" == lang_output:
f.writelines('runCommand=pkg install -y it-libreoffice\n')
elif "ja" == lang_output:
f.writelines('runCommand=pkg install -y ja-libreoffice\n')
elif "ko" == lang_output:
f.writelines('runCommand=pkg install -y ko-libreoffice\n')
elif "lt" == lang_output:
f.writelines('runCommand=pkg install -y lt-libreoffice\n')
elif "lv" == lang_output:
f.writelines('runCommand=pkg install -y lv-libreoffice\n')
elif "mk" == lang_output:
f.writelines('runCommand=pkg install -y mk-libreoffice\n')
elif "mn" == lang_output:
f.writelines('runCommand=pkg install -y mn-libreoffice\n')
elif "nb" == lang_output:
f.writelines('runCommand=pkg install -y nb-libreoffice\n')
elif "ne" == lang_output:
f.writelines('runCommand=pkg install -y ne-libreoffice\n')
elif "nl" == lang_output:
f.writelines('runCommand=pkg install -y nl-libreoffice\n')
elif "pa_IN" == lang_output:
f.writelines('runCommand=pkg install -y pa_IN-libreoffice\n')
elif "pl" == lang_output:
f.writelines('runCommand=pkg install -y pl-libreoffice\n')
elif "pt" == lang_output:
f.writelines('runCommand=pkg install -y pt-libreoffice\n')
elif "pt_BR" == lang_output:
f.writelines('runCommand=pkg install -y pt_BR-libreoffice\n')
elif "ro" == lang_output:
f.writelines('runCommand=pkg install -y ro-libreoffice\n')
elif "ru" == lang_output:
f.writelines('runCommand=pkg install -y ru-libreoffice\n')
elif "sd" == lang_output:
f.writelines('runCommand=pkg install -y sd-libreoffice\n')
elif "sk" == lang_output:
f.writelines('runCommand=pkg install -y sk-libreoffice\n')
elif "sl" == lang_output:
f.writelines('runCommand=pkg install -y sl-libreoffice\n')
elif "sr" == lang_output:
f.writelines('runCommand=pkg install -y sr-libreoffice\n')
elif "sv" == lang_output:
f.writelines('runCommand=pkg install -y sv-libreoffice\n')
elif "ta" == lang_output:
f.writelines('runCommand=pkg install -y ta-libreoffice\n')
elif "tg" == lang_output:
f.writelines('runCommand=pkg install -y tg-libreoffice\n')
elif "tr" == lang_output:
f.writelines('runCommand=pkg install -y tr-libreoffice\n')
elif "uk" == lang_output:
f.writelines('runCommand=pkg install -y uk-libreoffice\n')
elif "vi" == lang_output:
f.writelines('runCommand=pkg install -y vi-libreoffice\n')
elif "zh_CN" == lang_output:
f.writelines('runCommand=pkg install -y zh_CN-libreoffice\n')
elif "zh_TW" == lang_output:
f.writelines('runCommand=pkg install -y zh_TW-libreoffice\n')
elif "zu" == lang_output:
f.writelines('runCommand=pkg install -y zu-libreoffice\n')
f.close()
os.remove(user_passwd)
Popen(start_Install, shell=True)
class dbsd_cfg():
def __init__(self):
f = open('%spcinstall.cfg' % tmp, 'w')
# Installation Mode
f.writelines('# Installation Mode\n')
f.writelines('installMode=fresh\n')
f.writelines('installInteractive=no\n')
f.writelines('installType=DesktopBSD\n')
f.writelines('installMedium=dvd\n')
f.writelines('packageType=livecd\n')
# System Language
lang = open(language, 'r')
lang_output = lang.readlines()[0].strip().split()[0].strip()
f.writelines('\n# System Language\n\n')
f.writelines('localizeLang=%s\n' % lang_output)
os.remove(language)
# Keyboard Setting
if os.path.exists(model):
f.writelines('\n# Keyboard Setting\n')
os.remove(model)
if os.path.exists(KBFile):
rkb = open(KBFile, 'r')
kb = rkb.readlines()
if len(kb) == 2:
l_output = kb[0].strip().partition('-')[2].strip()
f.writelines('localizeKeyLayout=%s\n' % l_output)
v_output = kb[1].strip().partition(':')[2].strip()
f.writelines('localizeKeyVariant=%s\n' % v_output)
else:
l_output = kb[0].strip().partition('-')[2].strip()
f.writelines('localizeKeyLayout=%s\n' % l_output)
os.remove(KBFile)
# Timezone
if os.path.exists(timezone):
time = open(timezone, 'r')
t_output = time.readlines()[0].strip()
f.writelines('\n# Timezone\n')
f.writelines('timeZone=%s\n' % t_output)
f.writelines('enableNTP=yes\n')
os.remove(timezone)
if os.path.exists(zfs_config):
# Disk Setup
r = open(zfs_config, 'r')
zfsconf = r.readlines()
for line in zfsconf:
if 'partscheme' in line:
f.writelines(line)
read = open(boot_file, 'r')
boot = read.readlines()[0].strip()
f.writelines('bootManager=%s\n' % boot)
os.remove(boot_file)
else:
f.writelines(line)
# os.remove(zfs_config)
else:
# Disk Setup
r = open(disk, 'r')
drive = r.readlines()
d_output = drive[0].strip()
f.writelines('\n# Disk Setup\n')
f.writelines('disk0=%s\n' % d_output)
os.remove(disk)
# Partition Slice.
p = open(dslice, 'r')
line = p.readlines()
part = line[0].rstrip()
f.writelines('partition=%s\n' % part)
os.remove(dslice)
# Boot Menu
read = open(boot_file, 'r')
line = read.readlines()
boot = line[0].strip()
f.writelines('bootManager=%s\n' % boot)
os.remove(boot_file)
# Sheme sheme
read = open(disk_schem, 'r')
shem = read.readlines()[0]
f.writelines(shem + '\n')
f.writelines('commitDiskPart\n')
# os.remove(disk_schem)
# Partition Setup
f.writelines('\n# Partition Setup\n')
part = open(partlabel, 'r')
# If slice and auto file exist add first partition line.
# But Swap need to be 0 it will take the rest of the freespace.
for line in part:
if 'BOOT' in line:
pass
else:
f.writelines('disk0-part=%s\n' % line.strip())
f.writelines('commitDiskLabel\n')
os.remove(partlabel)
# Network Configuration
f.writelines('\n# Network Configuration\n')
readu = open(user_passwd, 'rb')
uf = pickle.load(readu)
net = uf[5]
f.writelines('hostname=%s\n' % net)
# Set the root pass
f.writelines('\n# Network Configuration\n')
readr = open('%sroot' % tmp, 'rb')
rf = pickle.load(readr)
root = rf[0]
f.writelines('\n# Set the root pass\n')
f.writelines('rootPass=%s\n' % root)
# Setup our users
user = uf[0]
f.writelines('\n# Setup user\n')
f.writelines('userName=%s\n' % user)
name = uf[1]
f.writelines('userComment=%s\n' % name)
passwd = uf[2]
f.writelines('userPass=%s\n' % passwd.rstrip())
shell = uf[3]
f.writelines('userShell=%s\n' % shell)
upath = uf[4]
f.writelines('userHome=%s\n' % upath.rstrip())
f.writelines('defaultGroup=wheel\n')
f.writelines('userGroups=operator\n')
f.writelines('commitUser\n')
f.writelines('runScript=/usr/local/bin/iso_to_hd\n')
if "af" == lang_output:
f.writelines('runCommand=pkg install -y af-libreoffice\n')
elif "ar" == lang_output:
f.writelines('runCommand=pkg install -y ar-libreoffice\n')
elif "bg" == lang_output:
f.writelines('runCommand=pkg install -y bg-libreoffice\n')
elif "bn" == lang_output:
f.writelines('runCommand=pkg install -y bn-libreoffice\n')
elif "br" == lang_output:
f.writelines('runCommand=pkg install -y br-libreoffice\n')
elif "bs" == lang_output:
f.writelines('runCommand=pkg install -y bs-libreoffice\n')
elif "ca" == lang_output:
f.writelines('runCommand=pkg install -y ca-libreoffice\n')
elif "cs" == lang_output:
f.writelines('runCommand=pkg install -y cs-libreoffice\n')
elif "cy" == lang_output:
f.writelines('runCommand=pkg install -y cy-libreoffice\n')
elif "da" == lang_output:
f.writelines('runCommand=pkg install -y da-libreoffice\n')
elif "de" == lang_output:
f.writelines('runCommand=pkg install -y de-libreoffice\n')
elif "el" == lang_output:
f.writelines('runCommand=pkg install -y el-libreoffice\n')
elif "en_GB" == lang_output:
f.writelines('runCommand=pkg install -y en_GB-libreoffice\n')
elif "en_ZA" == lang_output:
f.writelines('runCommand=pkg install -y en_ZA-libreoffice\n')
elif "es" == lang_output:
f.writelines('runCommand=pkg install -y es-libreoffice\n')
elif "et" == lang_output:
f.writelines('runCommand=pkg install -y et-libreoffice\n')
elif "eu" == lang_output:
f.writelines('runCommand=pkg install -y eu-libreoffice\n')
elif "fa" == lang_output:
f.writelines('runCommand=pkg install -y fa-libreoffice\n')
elif "fi" == lang_output:
f.writelines('runCommand=pkg install -y fi-libreoffice\n')
elif "fr" in lang_output:
f.writelines('runCommand=pkg install -y fr-libreoffice\n')
elif "ga" == lang_output:
f.writelines('runCommand=pkg install -y ga-libreoffice\n')
elif "gb" == lang_output:
f.writelines('runCommand=pkg install -y gd-libreoffice\n')
elif "gl" == lang_output:
f.writelines('runCommand=pkg install -y gl-libreoffice\n')
elif "he" == lang_output:
f.writelines('runCommand=pkg install -y he-libreoffice\n')
elif "hi" == lang_output:
f.writelines('runCommand=pkg install -y hi-libreoffice\n')
elif "hr" == lang_output:
f.writelines('runCommand=pkg install -y hr-libreoffice\n')
elif "hu" == lang_output:
f.writelines('runCommand=pkg install -y hu-libreoffice\n')
elif "id" == lang_output:
f.writelines('runCommand=pkg install -y id-libreoffice\n')
elif "is" == lang_output:
f.writelines('runCommand=pkg install -y is-libreoffice\n')
elif "it" == lang_output:
f.writelines('runCommand=pkg install -y it-libreoffice\n')
elif "ja" == lang_output:
f.writelines('runCommand=pkg install -y ja-libreoffice\n')
elif "ko" == lang_output:
f.writelines('runCommand=pkg install -y ko-libreoffice\n')
elif "lt" == lang_output:
f.writelines('runCommand=pkg install -y lt-libreoffice\n')
elif "lv" == lang_output:
f.writelines('runCommand=pkg install -y lv-libreoffice\n')
elif "mk" == lang_output:
f.writelines('runCommand=pkg install -y mk-libreoffice\n')
elif "mn" == lang_output:
f.writelines('runCommand=pkg install -y mn-libreoffice\n')
elif "nb" == lang_output:
f.writelines('runCommand=pkg install -y nb-libreoffice\n')
elif "ne" == lang_output:
f.writelines('runCommand=pkg install -y ne-libreoffice\n')
elif "nl" == lang_output:
f.writelines('runCommand=pkg install -y nl-libreoffice\n')
elif "pa_IN" == lang_output:
f.writelines('runCommand=pkg install -y pa_IN-libreoffice\n')
elif "pl" == lang_output:
f.writelines('runCommand=pkg install -y pl-libreoffice\n')
elif "pt" == lang_output:
f.writelines('runCommand=pkg install -y pt-libreoffice\n')
elif "pt_BR" == lang_output:
f.writelines('runCommand=pkg install -y pt_BR-libreoffice\n')
elif "ro" == lang_output:
f.writelines('runCommand=pkg install -y ro-libreoffice\n')
elif "ru" == lang_output:
f.writelines('runCommand=pkg install -y ru-libreoffice\n')
elif "sd" == lang_output:
f.writelines('runCommand=pkg install -y sd-libreoffice\n')
elif "sk" == lang_output:
f.writelines('runCommand=pkg install -y sk-libreoffice\n')
elif "sl" == lang_output:
f.writelines('runCommand=pkg install -y sl-libreoffice\n')
elif "sr" == lang_output:
f.writelines('runCommand=pkg install -y sr-libreoffice\n')
elif "sv" == lang_output:
f.writelines('runCommand=pkg install -y sv-libreoffice\n')
elif "ta" == lang_output:
f.writelines('runCommand=pkg install -y ta-libreoffice\n')
elif "tg" == lang_output:
f.writelines('runCommand=pkg install -y tg-libreoffice\n')
elif "tr" == lang_output:
f.writelines('runCommand=pkg install -y tr-libreoffice\n')
elif "uk" == lang_output:
f.writelines('runCommand=pkg install -y uk-libreoffice\n')
elif "vi" == lang_output:
f.writelines('runCommand=pkg install -y vi-libreoffice\n')
elif "zh_CN" == lang_output:
f.writelines('runCommand=pkg install -y zh_CN-libreoffice\n')
elif "zh_TW" == lang_output:
f.writelines('runCommand=pkg install -y zh_TW-libreoffice\n')
elif "zu" == lang_output:
f.writelines('runCommand=pkg install -y zu-libreoffice\n')
f.close()
os.remove(user_passwd)
Popen(start_Install, shell=True)
| bsd-3-clause | 3,646,173,461,506,960,000 | 43.343689 | 75 | 0.556071 | false |
Juniper/tempest | tempest/api/compute/test_extensions.py | 1 | 2103 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest.api.compute import base
from tempest.common import utils
from tempest import config
from tempest.lib import decorators
CONF = config.CONF
LOG = logging.getLogger(__name__)
class ExtensionsTest(base.BaseV2ComputeTest):
@decorators.idempotent_id('3bb27738-b759-4e0d-a5fa-37d7a6df07d1')
def test_list_extensions(self):
# List of all extensions
if not CONF.compute_feature_enabled.api_extensions:
raise self.skipException('There are not any extensions configured')
extensions = self.extensions_client.list_extensions()['extensions']
ext = CONF.compute_feature_enabled.api_extensions[0]
# Log extensions list
extension_list = map(lambda x: x['alias'], extensions)
LOG.debug("Nova extensions: %s", ','.join(extension_list))
if ext == 'all':
self.assertIn('Hosts', map(lambda x: x['name'], extensions))
elif ext:
self.assertIn(ext, extension_list)
else:
raise self.skipException('There are not any extensions configured')
@decorators.idempotent_id('05762f39-bdfa-4cdb-9b46-b78f8e78e2fd')
@utils.requires_ext(extension='os-consoles', service='compute')
def test_get_extension(self):
# get the specified extensions
extension = self.extensions_client.show_extension('os-consoles')
self.assertEqual('os-consoles', extension['extension']['alias'])
| apache-2.0 | -5,326,168,622,325,801,000 | 37.236364 | 79 | 0.69805 | false |
XianliangJ/collections | CNUpdates/updates/examples/experiment_base_waxman.py | 1 | 1647 | from waxman_topo import Topology
global networkSize
networkSize = None
def set_size(size):
global networkSize
networkSize = size
def links_to_remove(version, graph):
if version == 0:
return []
if len(graph.coreSwitches) < 2:
return []
return [ (graph.coreSwitches[idx], graph.edgeSwitches[idx+version]) for idx in range(len(graph.coreSwitches))]
def nodes_to_remove(version, graph):
if version == 0:
return []
return [ host for host in graph.hosts() if host % 10 == (version + 1) ]
edges_to_remove = [ [(101,107),(103,108),(104,108)],
[(101,108),(103,107),(105,108)],
[] ]
def switches_to_remove(version, graph):
if version == 0:
return []
return [ core for core in graph.coreSwitches if core % 5 == (version + 1) ]
def _topology1(version, topology=Topology):
global networkSize
graph = topology(networkSize).nx_graph()
graph.remove_nodes_from(nodes_to_remove(version, graph))
graph.remove_edges_from(edges_to_remove[0])
return graph
def _topology2(version, topology=Topology):
global networkSize
graph = topology(networkSize).nx_graph()
graph.remove_nodes_from(nodes_to_remove(0, graph))
graph.remove_edges_from(edges_to_remove[version])
return graph
def _topology3(version, topology=Topology):
global networkSize
graph = topology(networkSize).nx_graph()
graph.remove_nodes_from(nodes_to_remove(version, graph))
graph.remove_edges_from(edges_to_remove[version])
return graph
topologies = [ _topology1,
_topology2,
_topology3 ]
| gpl-3.0 | -3,425,362,455,485,318,700 | 30.075472 | 114 | 0.645416 | false |
Sybrand/digital-panda | AutoUpdatePy/updater.py | 1 | 10275 | import os
import urllib
import httplib
import json
import hashlib
import logging
import time
import zipfile
import pythoncom
import ConfigParser
from win32com.client import Dispatch
class AutoUpdate:
def __init__(self, parent, upgradeHost):
self.parent = parent
# TODO: the config stuff is very much stolen from panda-tray - need,
# to pull this into a common place!
appDataFolder = os.environ['APPDATA']
configFolder = os.path.join(appDataFolder, 'Digital Panda')
configFilePath = os.path.join(configFolder, 'settings')
if not upgradeHost:
self.upgradeHost = self._get_config_key(configFilePath, "advanced", "upgrade_url")
if not self.upgradeHost:
# we default to the normal place to get this
self.upgradeHost = 'www.digitalpanda.co.za'
else:
self.upgradeHost = upgradeHost
logging.debug('AutoUpdate.__init__: self.upgradeHost is %s' % self.upgradeHost)
def _get_config_key(self, configFilePath, section, key):
config = ConfigParser.RawConfigParser()
config.read(configFilePath)
try:
return config.get(section, key)
except:
return ''
def IsInstalled(self):
# check to see if digital panda is installed
versionPath = self.GetCurrentVersionPath()
if os.path.exists(versionPath):
# is the application installed?
applicationPath = self.GetPandaPath()
return os.path.exists(applicationPath)
return False
def GetCurrentVersionPath(self):
return os.path.join(self.GetApplicationPath(), "version.txt")
def GetPandaExeName(self):
return "panda-tray-w.exe"
def GetPandaPath(self):
currentVersion, currentLocation = self.GetCurrentVersion()
return os.path.join(self.GetApplicationPath(), currentLocation,
self.GetPandaExeName())
def GetApplicationPath(self):
return os.path.join(os.environ['APPDATA'], 'Digital Panda')
# we have a rather elaborate way of figuring out the current version
# there are two way to run this class - from within an application - in which case
# using the compiled version number would make sense
# or - from an auto-update application, in which case the compiled version number
# doesn't make sense - since we're asking about another application!
def GetCurrentVersion(self):
versionPath = self.GetCurrentVersionPath()
version = 0
location = None
if os.path.exists(versionPath):
f = open(versionPath, 'rt')
data = f.read()
versionJson = json.loads(data)
f.close()
logging.info('%r' % versionJson['version'])
version = versionJson['version']
location = versionJson['location']
return (version, location)
def GetAvailableVersion(self):
logging.debug('creating http connection to %s' % self.upgradeHost)
connection = httplib.HTTPConnection(host=self.upgradeHost, port=80, strict=False)
infoLocation = '/update/win7_32.txt'
logging.debug('looking for update info @ %s' % infoLocation)
connection.request('GET', infoLocation)
result = connection.getresponse()
if result.status != 200:
raise Exception('unexpected response: %r' % result.status)
data = result.read()
return json.loads(data)
def GetUpdatePath(self, version):
return os.path.join(self.GetApplicationPath(),
"updates",
os.path.basename(version['location']))
def GetHashFromFile(self, path):
md5 = hashlib.md5()
with open(path, 'rb') as f:
while True:
data = f.read(1048576)
if not data:
break
md5.update(data)
return md5.hexdigest()
def IsFileOk(self, filePath, expectedHash):
if os.path.exists(filePath):
logging.info('%r already exists!' % (filePath))
fileHash = self.GetHashFromFile(filePath)
logging.debug('comparing %r with %r' % (fileHash, expectedHash))
return fileHash == expectedHash
return False
def GetFileResumePosition(self, path):
if os.path.exists(path):
return os.path.getsize(path)
return 0
def DownloadUpdate(self, version):
filePath = self.GetUpdatePath(version)
if self.IsFileOk(filePath, version['hash']):
# file is there - and ok - no need to download
return True
logging.debug('going to download file...')
# download the file...
logging.debug('version info: %r' % version)
fileSize = version['fileSize']
logging.debug('getting the resume position...')
totalBytesRead = self.GetFileResumePosition(filePath)
logging.debug('current file size = %r' % totalBytesRead)
if totalBytesRead >= fileSize:
logging.info('deleting the existing file - it''s too big!')
# if the total bytes read is more than the expected
# file size - we need to get rid of the existing file
os.remove(filePath)
totalBytesRead = 0
makingProgress = True
while totalBytesRead < fileSize and makingProgress:
makingProgress = False
resumePosition = self.GetFileResumePosition(filePath)
totalBytesRead = resumePosition
connection = httplib.HTTPConnection(version['host'])
resume = 'bytes=%d-%d' % (resumePosition, fileSize)
logging.info('Range: %s' % resume)
headers = {'Range': resume}
location = urllib.quote(version['location'])
logging.info(location)
connection.request('GET', location, None, headers)
result = connection.getresponse()
logging.info('request status: %r' % result.status)
if not (result.status == 200 or result.status == 206):
raise Exception(result.status)
if totalBytesRead < fileSize:
# if the file is greater than 0, and smaller than the file size,
# we read it appended
targetFile = open(filePath, 'a+b')
else:
# make sure totalBytes read is reset to 0!
targetFileDir = os.path.dirname(filePath)
if not os.path.exists(targetFileDir):
os.makedirs(targetFileDir)
targetFile = open(filePath, 'wb')
chunkSize = 1048576
data = result.read(chunkSize)
bytesRead = len(data)
logging.info('read %r bytes' % bytesRead)
while totalBytesRead < fileSize:
if data:
bytesRead = len(data)
totalBytesRead += bytesRead
logging.info('read %d / %d bytes' %
(totalBytesRead, fileSize))
makingProgress = True
targetFile.write(data)
self.UpdateProgress(totalBytesRead, fileSize)
else:
time.sleep(1)
data = result.read(chunkSize)
logging.debug('done reading file!')
targetFile.flush()
targetFile.close()
fileHash = self.GetHashFromFile(filePath)
logging.info('done downloading file - comparing hash: %r == %r' %
(fileHash, version['hash']))
return fileHash == version['hash']
def GetShortcutPath(self):
return os.path.join(self.GetApplicationPath(), "Digital Panda.lnk")
def InstallUpdate(self, version):
updatePath = self.GetUpdatePath(version)
applicationPath = self.GetApplicationPath()
fileName = os.path.basename(updatePath)
end = fileName.rfind('.')
if not (end > 0):
end = len(fileName)
directoryName = fileName[0: end]
targetPath = os.path.join(applicationPath, directoryName)
if not os.path.exists(targetPath):
os.makedirs(targetPath)
zfile = zipfile.ZipFile(updatePath)
for name in zfile.namelist():
(dirname, filename) = os.path.split(name)
targetDir = os.path.join(targetPath, dirname)
if not os.path.exists(targetDir):
os.makedirs(targetDir)
if filename:
logging.info('filename = %r' % filename)
targetFile = os.path.join(targetPath, dirname, filename)
fd = open(targetFile, 'wb')
fd.write(zfile.read(name))
fd.close()
pandaPath = os.path.join(targetPath, self.GetPandaExeName())
logging.info('pandaPath = %r' % pandaPath)
workingDirectory = os.path.dirname(pandaPath)
shortcutPath = self.GetShortcutPath()
pythoncom.CoInitialize()
shell = Dispatch('WScript.Shell')
shortcut = shell.CreateShortCut(shortcutPath)
shortcut.Targetpath = pandaPath
shortcut.WorkingDirectory = workingDirectory
shortcut.Description = 'The Digital Panda'
shortcut.IconLocation = pandaPath
shortcut.save()
fd = open(self.GetCurrentVersionPath(), 'w')
versionFile = ('{"version": %r, "location":"%s"}' %
(version['version'], directoryName))
fd.write(versionFile)
fd.flush()
fd.close()
return True
def Install(self):
version = self.GetAvailableVersion()
if self.DownloadUpdate(version):
return self.InstallUpdate(version)
return False
def UpdateProgress(self, bytesRead, expectedBytes):
if self.parent:
self.parent.SignalDownloadProgress(bytesRead, expectedBytes)
def UpdateAvailable(self):
version, location = self.GetCurrentVersion()
availableVersion = self.GetAvailableVersion()
logging.debug('current version: %r, available version: %r' % (version, availableVersion['version']))
return availableVersion['version'] > version
| mit | 3,438,924,125,686,284,300 | 38.671815 | 108 | 0.598443 | false |
egtaonline/quiesce | egta/script/innerloop.py | 1 | 5342 | """Script utility for running inner loop"""
import asyncio
import json
import logging
from concurrent import futures
from gameanalysis import regret
from egta import innerloop
from egta import schedgame
from egta.script import schedspec
from egta.script import utils
def add_parser(subparsers):
"""Create innerloop parser"""
parser = subparsers.add_parser(
"quiesce",
help="""Compute equilibria using the quiesce procedure""",
description="""Samples profiles from small restricted strategy sets,
expanding set support by best responses to candidate restricted game
equilibria. For games with a large number of players, a reduction
should be specified. The result is a list where each element specifies
an "equilibrium".""",
)
parser.add_argument(
"scheduler",
metavar="<sched-spec>",
help="""A scheduler specification,
see `egta spec` for more info.""",
)
parser.add_argument(
"--regret-thresh",
metavar="<reg>",
type=float,
default=1e-3,
help="""Regret threshold for a mixture to be considered an equilibrium.
(default: %(default)g)""",
)
parser.add_argument(
"--dist-thresh",
metavar="<norm>",
type=float,
default=0.1,
help="""Norm threshold for two mixtures to be considered distinct.
(default: %(default)g)""",
)
parser.add_argument(
"--max-restrict-size",
metavar="<support>",
type=int,
default=3,
help="""Support size threshold, beyond which restricted games are not
required to be explored. (default: %(default)d)""",
)
parser.add_argument(
"--num-equilibria",
metavar="<num>",
type=int,
default=1,
help="""Number of equilibria requested to be found. This is mainly
useful when game contains known degenerate equilibria, but those
strategies are still useful as deviating strategies. (default:
%(default)d)""",
)
parser.add_argument(
"--num-backups",
metavar="<num>",
type=int,
default=1,
help="""Number
of backup restricted strategy set to pop at a time, when no equilibria
are confirmed in initial required set. When games get to this point
they can quiesce slowly because this by default pops one at a time.
Increasing this number can get games like tis to quiesce more quickly,
but naturally, also schedules more, potentially unnecessary,
simulations. (default: %(default)d)""",
)
parser.add_argument(
"--dev-by-role",
action="store_true",
help="""Explore deviations in
role order instead of all at once. By default, when checking for
beneficial deviations, all role deviations are scheduled at the same
time. Setting this will check one role at a time. If a beneficial
deviation is found, then that restricted strategy set is scheduled
without exploring deviations from the other roles.""",
)
parser.add_argument(
"--style",
default="best",
choices=["fast", "more", "best", "one"],
help="""Style of equilibrium finding to use. `fast` is the fastests but
least thorough, `one` will guarantee an equilibrium is found in
potentially exponential time.""",
)
parser.add_argument(
"--procs",
type=int,
default=2,
metavar="<num-procs>",
help="""Number
of process to use. This will speed up computation if doing
computationally intensive things simultaneously, i.e. nash finding.
(default: %(default)d)""",
)
utils.add_reductions(parser)
parser.run = run
async def run(args):
"""Entry point for cli"""
sched = await schedspec.parse_scheduler(args.scheduler)
red, red_players = utils.parse_reduction(sched, args)
agame = schedgame.schedgame(sched, red, red_players)
async def get_regret(eqm):
"""Gets the regret of an equilibrium"""
game = await agame.get_deviation_game(eqm > 0)
return float(regret.mixture_regret(game, eqm))
async with sched:
with futures.ProcessPoolExecutor(args.procs) as executor:
eqa = await innerloop.inner_loop(
agame,
regret_thresh=args.regret_thresh,
dist_thresh=args.dist_thresh,
restricted_game_size=args.max_restrict_size,
num_equilibria=args.num_equilibria,
num_backups=args.num_backups,
devs_by_role=args.dev_by_role,
style=args.style,
executor=executor,
)
regrets = await asyncio.gather(*[get_regret(eqm) for eqm in eqa])
logging.error(
"quiesce finished finding %d equilibria:\n%s",
eqa.shape[0],
"\n".join(
"{:d}) {} with regret {:g}".format(i, sched.mixture_to_repr(eqm), reg)
for i, (eqm, reg) in enumerate(zip(eqa, regrets), 1)
),
)
json.dump(
[
{"equilibrium": sched.mixture_to_json(eqm), "regret": reg}
for eqm, reg in zip(eqa, regrets)
],
args.output,
)
args.output.write("\n")
| apache-2.0 | 7,585,665,042,589,189,000 | 33.915033 | 82 | 0.60745 | false |
MarkusHackspacher/PythonFarmGame | farmlib/gamemanager.py | 1 | 8848 | '''
Created on 17-07-2012
@author: orneo1212
'''
import os
import time
from farmlib import DictMapper
from farmlib.farm import FarmField, FarmObject, FarmTile, Seed, objects
from farmlib.player import Player
try:
xrange
except NameError:
xrange = range
class GameManager(object):
"""Game Manager class
"""
def __init__(self):
self.farms = []
self.gameseed = int(time.time())
self.gametime = int(time.time())
self.current_farm = 0
self.player = Player()
def getfarm(self, farmid=None):
"""getfarm
:param farmid:
:return:
"""
if farmid is None:
farmid = self.current_farm
if not self.farms:
self.addfarm()
try:
return self.farms[farmid]
except IndexError:
return None
def getfarmcount(self):
"""get farm count
:return:
"""
return len(self.farms)
def getcurrentfarmid(self):
"""get current farm id
:return:
"""
return self.current_farm
def getnextfarmcost(self):
"""get next farm cost
:return:
"""
farmcount = self.getfarmcount() - 1
cost = 10000 + 12000 * farmcount
return cost
def addfarm(self):
"""add farm
:return:
"""
newfarm = FarmField(self)
self.farms.append(newfarm)
return newfarm
def setcurrentfarm(self, farmid):
"""set current farm
:param farmid:
:return:
"""
if farmid > self.getfarmcount():
farmid = self.getfarmcount() - 1
self.current_farm = farmid
return farmid
def getgameseed(self):
"""get game seed
:return:
"""
return self.gameseed
def setgameseed(self, newseed):
"""set game seed
:param newseed:
:return:
"""
self.gameseed = newseed
def getplayer(self):
"""get player
:return:
"""
return self.player
def update(self):
"""should be called 20 times per second"""
# update selected item
if self.player.selecteditem is not None and \
not self.player.item_in_inventory(self.player.selecteditem):
# clear selected item if player dont have it
self.player.selecteditem = None
# update farms
for farm in self.farms:
farm.update()
def start_new_game(self):
"""Start new game
:return:
"""
farm = self.getfarm(0)
farm.generate_random_stones()
farm.generate_random_planks()
def savegame(self):
"""save game
:return:
"""
self.save_gamestate(self.player)
def loadgame(self):
"""load game
:return:
"""
result = self.load_gamestate('field.json', self.player)
return result
def timeforward(self):
"""time forward
:return:
"""
farm = self.getfarm(0)
if farm.seconds_to_update > 1000:
farm.seconds_to_update = 1000
if farm.seconds_to_update:
# 1 second is equal 20 updates
for _ in xrange(farm.seconds_to_update):
self.update()
def save_gamestate(self, player):
"""Saving game state
:param player:
:return:
"""
print("Saving game state...")
data = DictMapper()
# Save player data
data["inventory"] = player.inventory
data["itemscounter"] = player.itemscounter
data["money"] = player.money
data["watercanuses"] = player.watercanuses
data["exp"] = player.exp
data["nextlvlexp"] = player.nextlvlexp
data["level"] = player.level
# Save time
data["gametime"] = int(time.time())
data["gameseed"] = self.getgameseed()
# save tiles
data["fields"] = []
try:
dict.iteritems
except AttributeError:
# Python 3
def listkeys(d):
"""listkeys
:param d:
:return:
"""
return list(d)
else:
# Python 2
def listkeys(d):
"""listkeys
:param d:
:return:
"""
return d.keys()
# fill tiles
for farmid in xrange(self.getfarmcount()):
farm = self.getfarm(farmid)
data["fields"].append({"tiles": []})
for ftt in listkeys(farm.farmtiles):
ft = farm.farmtiles[ftt]
# skip when no seed
if not ft['object']:
continue
gameobject = ft['object']
tile = {}
tile["px"] = int(ftt.split('x')[0])
tile["py"] = int(ftt.split('x')[1])
tile["water"] = ft["water"]
tile["object"] = {}
# seed data
tile["object"]["type"] = gameobject.type
tile["object"]['id'] = gameobject.id
if isinstance(gameobject, Seed):
tile["object"]['growstarttime'] = gameobject.growstarttime
tile["object"]['growendtime'] = gameobject.growendtime
tile["object"]['growing'] = bool(gameobject.growing)
tile["object"]['to_harvest'] = bool(gameobject.to_harvest)
tile["object"]['harvestcount'] = gameobject.harvestcount
# set tile
data["fields"][farmid]["tiles"].append(tile)
# save data
data.save("field.json")
return True
def load_gamestate(self, filename, player):
"""Loading game state
:param filename:
:param player:
:return:
"""
if not os.path.isfile(filename):
return False
print("Loading game state...")
data = DictMapper()
data.load(filename)
player.inventory = data["inventory"]
player.itemscounter = data["itemscounter"]
player.watercanuses = data.get("watercanuses", 100)
player.exp = data.get("exp", 0.0)
player.nextlvlexp = data.get("nextlvlexp", 100.0)
player.money = int(data.get("money", 1))
player.level = int(data.get("level", 1))
# load game time
self.seconds_to_update = int(time.time()) - data.get("gametime",
int(time.time()))
seed = data.get("gameseed", int(time.time()))
self.setgameseed(seed)
# Migrate old farm
if "fields" not in data.keys():
data["fields"] = []
data['fields'].append({})
data['fields'][0]["tiles"] = data["tiles"]
# load tiles
for farmid in xrange(len(data["fields"])):
farm = self.getfarm(farmid)
if farm is None:
farm = self.addfarm()
# Restore tiles
for tile in data["fields"][farmid]["tiles"]:
px = tile["px"]
py = tile["py"]
# Avoid null objects
if not tile["object"]:
continue
# Restore seed or object
if tile["object"]["type"] == "seed":
objectdata = tile["object"]
newobject = Seed()
newobject.id = objectdata["id"]
newobject.type = objectdata["type"]
newobject.to_harvest = objectdata["to_harvest"]
newobject.growing = objectdata["growing"]
newobject.growendtime = objectdata["growendtime"]
newobject.growstarttime = objectdata["growstarttime"]
farmtile = FarmTile(newobject)
farmtile["water"] = tile["water"]
# Apply global object data
newobject.apply_dict(objects[newobject.id])
# Restore harvest count
newobject.harvestcount = objectdata.get(
"harvestcount", 1)
newobject.requiredlevel = objectdata.get(
"requiredlevel", 1)
else:
newobject = FarmObject()
newobject.id = tile["object"]["id"]
newobject.type = tile["object"]["type"]
# apply dict
newobject.apply_dict(objects[newobject.id])
farmtile = FarmTile(newobject)
# set farmtile
farm.set_farmtile(px, py, farmtile)
# return
return True
| gpl-3.0 | -7,167,441,936,136,486,000 | 27.541935 | 78 | 0.493897 | false |
anortef/calico | calico/felix/config.py | 1 | 17129 | # -*- coding: utf-8 -*-
# Copyright (c) 2014, 2015 Metaswitch Networks
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
felix.config
~~~~~~~~~~~~
Configuration management for Felix.
On instantiation, this module automatically parses the configuration file and
builds a singleton configuration object. That object may (once) be changed by
etcd configuration being reported back to it.
"""
import os
import ConfigParser
import logging
import socket
from calico import common
# Logger
log = logging.getLogger(__name__)
# Convert log level names into python log levels.
LOGLEVELS = {"none": None,
"debug": logging.DEBUG,
"info": logging.INFO,
"warn": logging.WARNING,
"warning": logging.WARNING,
"err": logging.ERROR,
"error": logging.ERROR,
"crit": logging.CRITICAL,
"critical": logging.CRITICAL}
# Sources of a configuration parameter. The order is highest-priority first.
DEFAULT = "Default"
ENV = "Environment variable"
FILE = "Configuration file"
GLOBAL_ETCD = "Global etcd configuration"
LOCAL_ETCD = "Host specific etcd configuration"
DEFAULT_SOURCES = [ ENV, FILE, GLOBAL_ETCD, LOCAL_ETCD ]
class ConfigException(Exception):
def __init__(self, message, parameter):
super(ConfigException, self).__init__(message)
self.message = message
self.parameter = parameter
def __str__(self):
return "%s (value %r for %s (%s), read from %r)" \
% (self.message,
self.parameter.value,
self.parameter.name,
self.parameter.description,
self.parameter.active_source)
class ConfigParameter(object):
"""
A configuration parameter. This contains the following information.
- The name of the field.
- Where the location can validly be read from
- The current value
- Where the value was read from
"""
def __init__(self, name, description, default,
sources=DEFAULT_SOURCES, value_is_int=False,
value_is_bool=False):
"""
Create a configuration parameter.
:param str description: Description for logging
:param list sources: List of valid sources to try
:param str default: Default value
:param bool value_is_int: Integer value?
"""
self.description = description
self.name = name
self.sources = sources
self.value = default
self.active_source = None
self.value_is_int = value_is_int
self.value_is_bool = value_is_bool
def set(self, value, source):
"""
Set a value of a parameter - unless already set.
:param value: value
:param source: source; for example "Configuration file /etc/felix.cfg"
"""
if self.active_source is None:
log.debug("Read value %r for %s (%s) from %r",
value,
self.name,
self.description,
source)
self.active_source = source
if self.value_is_int:
# Set value before the call to int, so the ConfigException has
# the right value if / when it goes wrong.
self.value = value
try:
self.value = int(value)
except ValueError:
raise ConfigException("Field was not integer",
self)
elif self.value_is_bool:
lower_val = str(value).lower()
log.debug("Parsing %r as a Boolean.", lower_val)
if lower_val in ("true", "1", "yes", "y", "t"):
self.value = True
elif lower_val in ("false", "0", "no", "n", "f"):
self.value = False
else:
raise ConfigException("Field was not a valid Boolean",
self)
else:
# Calling str in principle can throw an exception, but it's
# hard to see how in practice, so don't catch and wrap.
self.value = str(value)
else:
log.warning("Ignore %r value for %s (%s) - already set from %r",
source,
self.name,
self.description,
self.active_source)
class Config(object):
def __init__(self, config_path):
"""
Create a config. This reads data from the following sources.
- Environment variables
- Configuration file
- per-host etcd (/calico/vX/config)
- global etcd (/calico/vX/host/<host>/config)
After object creation, the environment variables and config file have
been read, and the variables ETCD_ADDR and HOSTNAME have been set and
validated. The caller is then responsible for reading the remaining
config from etcd and calling report_etcd_config with the returned
values before the rest of the config structure can be used.
:raises EtcdException
"""
self.parameters = {}
self.add_parameter("EtcdAddr", "Address and port for etcd",
"localhost:4001", sources=[ENV, FILE])
self.add_parameter("FelixHostname", "Felix compute host hostname",
socket.gethostname(), sources=[ENV, FILE])
self.add_parameter("StartupCleanupDelay", "Delay before cleanup starts",
30, value_is_int=True)
self.add_parameter("PeriodicResyncInterval",
"How often to do cleanups, seconds",
60 * 60, value_is_int=True)
self.add_parameter("IptablesRefreshInterval",
"How often to refresh iptables state, in seconds",
60, value_is_int=True)
self.add_parameter("MetadataAddr", "Metadata IP address or hostname",
"127.0.0.1")
self.add_parameter("MetadataPort", "Metadata Port",
8775, value_is_int=True)
self.add_parameter("InterfacePrefix", "Interface name prefix", None)
self.add_parameter("DefaultEndpointToHostAction",
"Action to take for packets that arrive from"
"an endpoint to the host.", "DROP")
self.add_parameter("LogFilePath",
"Path to log file", "/var/log/calico/felix.log")
self.add_parameter("LogSeverityFile",
"Log severity for logging to file", "INFO")
self.add_parameter("LogSeveritySys",
"Log severity for logging to syslog", "ERROR")
self.add_parameter("LogSeverityScreen",
"Log severity for logging to screen", "ERROR")
self.add_parameter("IpInIpEnabled",
"IP-in-IP device support enabled", False,
value_is_bool=True)
# Read the environment variables, then the configuration file.
self._read_env_vars()
self._read_cfg_file(config_path)
self._finish_update(final=False)
def add_parameter(self, name, description, default, **kwargs):
"""
Put a parameter in the parameter dictionary.
"""
self.parameters[name] = ConfigParameter(
name, description, default, **kwargs)
def _finish_update(self, final=False):
"""
Config has been completely read. Called twice - once after reading from
environment and config file (so we should be able to access etcd), and
once after reading from etcd (so we have all the config ready to go).
Responsible for :
- storing the parameters in the relevant fields in the structure
- validating the configuration is valid (for this stage in the process)
- updating logging parameters
Note that we complete the logging even before etcd configuration
changes are read. Hence, for example, if logging to file is turned on
after reading environment variables and config file, then the log file
is created and logging to it starts - even if later on etcd
configuration turns the file off. That's because we must log if etcd
configuration load fails, and not having the log file early enough is
worse.
:param final: Have we completed (rather than just read env and config file)
"""
self.ETCD_ADDR = self.parameters["EtcdAddr"].value
self.HOSTNAME = self.parameters["FelixHostname"].value
self.STARTUP_CLEANUP_DELAY = self.parameters["StartupCleanupDelay"].value
self.RESYNC_INTERVAL = self.parameters["PeriodicResyncInterval"].value
self.REFRESH_INTERVAL = self.parameters["IptablesRefreshInterval"].value
self.METADATA_IP = self.parameters["MetadataAddr"].value
self.METADATA_PORT = self.parameters["MetadataPort"].value
self.IFACE_PREFIX = self.parameters["InterfacePrefix"].value
self.DEFAULT_INPUT_CHAIN_ACTION = \
self.parameters["DefaultEndpointToHostAction"].value
self.LOGFILE = self.parameters["LogFilePath"].value
self.LOGLEVFILE = self.parameters["LogSeverityFile"].value
self.LOGLEVSYS = self.parameters["LogSeveritySys"].value
self.LOGLEVSCR = self.parameters["LogSeverityScreen"].value
self.IP_IN_IP_ENABLED = self.parameters["IpInIpEnabled"].value
self._validate_cfg(final=final)
# Update logging.
common.complete_logging(self.LOGFILE,
self.LOGLEVFILE,
self.LOGLEVSYS,
self.LOGLEVSCR)
if final:
# Log configuration - the whole lot of it.
for name, parameter in self.parameters.iteritems():
log.info("Parameter %s (%s) has value %r read from %s",
name,
parameter.description,
parameter.value,
parameter.active_source)
def _read_env_vars(self):
"""
Read all of the variables from the environment.
"""
for name, parameter in self.parameters.iteritems():
# All currently defined config parameters have ENV as a valid source.
assert(ENV in parameter.sources)
# ENV is the first source, so we can assert that using defaults.
assert(parameter.active_source is None)
env_var = ("FELIX_%s" % name).upper()
if env_var in os.environ:
parameter.set(os.environ[env_var],
"Environment variable %s" % env_var)
def _read_cfg_file(self, config_file):
parser = ConfigParser.ConfigParser()
parser.read(config_file)
cfg_dict = {}
# Build up the cfg dictionary from the file.
for section in parser.sections():
cfg_dict.update(dict(parser.items(section)))
source = "Configuration file %s" % config_file
for name, parameter in self.parameters.iteritems():
# Config parameters are lower-cased by ConfigParser
name = name.lower()
if FILE in parameter.sources and name in cfg_dict:
# This can validly be read from file.
parameter.set(cfg_dict.pop(name), source)
self._warn_unused_cfg(cfg_dict, source)
def report_etcd_config(self, host_dict, global_dict):
"""
Report configuration parameters read from etcd to the config
component. This must be called only once, after configuration is
initially read and before the config structure is used (except for
ETCD_ADDR and HOSTNAME).
:param host_dict: Dictionary of etcd parameters
:param global_dict: Dictionary of global parameters
:raises ConfigException
"""
log.debug("Configuration reported from etcd")
for source, cfg_dict in ((LOCAL_ETCD, host_dict),
(GLOBAL_ETCD, global_dict)):
for name, parameter in self.parameters.iteritems():
if source in parameter.sources and name in cfg_dict:
parameter.set(cfg_dict.pop(name), source)
self._warn_unused_cfg(cfg_dict, source)
self._finish_update(final=True)
def _validate_cfg(self, final=True):
"""
Firewall that the config is not invalid. Called twice, once when
environment variables and config file have been read, and once
after those plus the etcd configuration have been read.
:param final: Is this after final etcd config has been read?
:raises ConfigException
"""
fields = self.ETCD_ADDR.split(":")
if len(fields) != 2:
raise ConfigException("Invalid format for field - must be "
"hostname:port", self.parameters["EtcdAddr"])
self._validate_addr("EtcdAddr", fields[0])
try:
int(fields[1])
except ValueError:
raise ConfigException("Invalid port in field",
self.parameters["EtcdAddr"])
try:
self.LOGLEVFILE = LOGLEVELS[self.LOGLEVFILE.lower()]
except KeyError:
raise ConfigException("Invalid log level",
self.parameters["LogSeverityFile"])
try:
self.LOGLEVSYS = LOGLEVELS[self.LOGLEVSYS.lower()]
except KeyError:
raise ConfigException("Invalid log level",
self.parameters["LogSeveritySys"])
try:
self.LOGLEVSCR = LOGLEVELS[self.LOGLEVSCR.lower()]
except KeyError:
raise ConfigException("Invalid log level",
self.parameters["LogSeverityScreen"])
# Log file may be "None" (the literal string, case insensitive). In
# this case no log file should be written.
if self.LOGFILE.lower() == "none":
self.LOGFILE = None
if self.METADATA_IP.lower() == "none":
# Metadata is not required.
self.METADATA_IP = None
self.METADATA_PORT = None
else:
# Metadata must be supplied as IP or address, but we store as IP
self.METADATA_IP = self._validate_addr("MetadataAddr",
self.METADATA_IP)
if not common.validate_port(self.METADATA_PORT):
raise ConfigException("Invalid field value",
self.parameters["MetadataPort"])
if self.DEFAULT_INPUT_CHAIN_ACTION not in ("DROP", "RETURN", "ACCEPT"):
raise ConfigException(
"Invalid field value",
self.parameters["DefaultEndpointToHostAction"]
)
if not final:
# Do not check that unset parameters are defaulted; we have more
# config to read.
return
for name, parameter in self.parameters.iteritems():
if parameter.value is None:
# No value, not even a default
raise ConfigException("Missing undefaulted value",
self.parameters["InterfacePrefix"])
def _warn_unused_cfg(self, cfg_dict, source):
# Warn about any unexpected items - i.e. ones we have not used.
for lKey in cfg_dict:
log.warning("Got unexpected config item %s=%s",
lKey, cfg_dict[lKey])
def _validate_addr(self, name, addr):
"""
Validate an address, returning the IP address it resolves to. If the
address cannot be resolved then an exception is returned.
Parameters :
- name of the field, for use in logging
- address to resolve
"""
try:
stripped_addr = addr.strip()
if not stripped_addr:
raise ConfigException("Blank value",
self.parameters[name])
return socket.gethostbyname(addr)
except socket.gaierror:
raise ConfigException("Invalid or unresolvable value",
self.parameters[name])
| apache-2.0 | -5,891,084,876,158,930,000 | 40.175481 | 83 | 0.578142 | false |
oesteban/preprocessing-workflow | fmriprep/workflows/bold/util.py | 1 | 18323 | # -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Utility workflows
^^^^^^^^^^^^^^^^^
.. autofunction:: init_bold_reference_wf
.. autofunction:: init_enhance_and_skullstrip_bold_wf
.. autofunction:: init_skullstrip_bold_wf
"""
from packaging.version import parse as parseversion, Version
from pkg_resources import resource_filename as pkgr_fn
from nipype.pipeline import engine as pe
from nipype.interfaces import utility as niu, fsl, afni, ants
from templateflow.api import get as get_template
from niworkflows.engine.workflows import LiterateWorkflow as Workflow
from niworkflows.interfaces.ants import AI
from niworkflows.interfaces.fixes import (
FixHeaderRegistration as Registration,
FixHeaderApplyTransforms as ApplyTransforms,
)
from niworkflows.interfaces.images import ValidateImage, MatchHeader
from niworkflows.interfaces.masks import SimpleShowMaskRPT
from niworkflows.interfaces.registration import EstimateReferenceImage
from niworkflows.interfaces.utils import CopyXForm
DEFAULT_MEMORY_MIN_GB = 0.01
def init_bold_reference_wf(omp_nthreads, bold_file=None, pre_mask=False,
name='bold_reference_wf', gen_report=False):
"""
This workflow generates reference BOLD images for a series
The raw reference image is the target of :abbr:`HMC (head motion correction)`, and a
contrast-enhanced reference is the subject of distortion correction, as well as
boundary-based registration to T1w and template spaces.
.. workflow::
:graph2use: orig
:simple_form: yes
from fmriprep.workflows.bold import init_bold_reference_wf
wf = init_bold_reference_wf(omp_nthreads=1)
**Parameters**
bold_file : str
BOLD series NIfTI file
omp_nthreads : int
Maximum number of threads an individual process may use
name : str
Name of workflow (default: ``bold_reference_wf``)
gen_report : bool
Whether a mask report node should be appended in the end
**Inputs**
bold_file
BOLD series NIfTI file
bold_mask : bool
A tentative brain mask to initialize the workflow (requires ``pre_mask``
parameter set ``True``).
dummy_scans : int or None
Number of non-steady-state volumes specified by user at beginning of ``bold_file``
sbref_file
single band (as opposed to multi band) reference NIfTI file
**Outputs**
bold_file
Validated BOLD series NIfTI file
raw_ref_image
Reference image to which BOLD series is motion corrected
skip_vols
Number of non-steady-state volumes selected at beginning of ``bold_file``
algo_dummy_scans
Number of non-steady-state volumes agorithmically detected at
beginning of ``bold_file``
ref_image
Contrast-enhanced reference image
ref_image_brain
Skull-stripped reference image
bold_mask
Skull-stripping mask of reference image
validation_report
HTML reportlet indicating whether ``bold_file`` had a valid affine
**Subworkflows**
* :py:func:`~fmriprep.workflows.bold.util.init_enhance_and_skullstrip_wf`
"""
workflow = Workflow(name=name)
workflow.__desc__ = """\
First, a reference volume and its skull-stripped version were generated
using a custom methodology of *fMRIPrep*.
"""
inputnode = pe.Node(niu.IdentityInterface(fields=['bold_file', 'bold_mask', 'dummy_scans',
'sbref_file']),
name='inputnode')
outputnode = pe.Node(
niu.IdentityInterface(fields=['bold_file', 'raw_ref_image', 'skip_vols',
'algo_dummy_scans', 'ref_image', 'ref_image_brain',
'bold_mask', 'validation_report', 'mask _report']),
name='outputnode')
# Simplify manually setting input image
if bold_file is not None:
inputnode.inputs.bold_file = bold_file
validate = pe.Node(ValidateImage(), name='validate', mem_gb=DEFAULT_MEMORY_MIN_GB)
gen_ref = pe.Node(EstimateReferenceImage(), name="gen_ref",
mem_gb=1) # OE: 128x128x128x50 * 64 / 8 ~ 900MB.
enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf(
omp_nthreads=omp_nthreads, pre_mask=pre_mask)
calc_dummy_scans = pe.Node(niu.Function(function=_pass_dummy_scans,
output_names=['skip_vols_num']),
name='calc_dummy_scans',
run_without_submitting=True,
mem_gb=DEFAULT_MEMORY_MIN_GB)
workflow.connect([
(inputnode, enhance_and_skullstrip_bold_wf, [('bold_mask', 'inputnode.pre_mask')]),
(inputnode, validate, [('bold_file', 'in_file')]),
(inputnode, gen_ref, [('sbref_file', 'sbref_file')]),
(inputnode, calc_dummy_scans, [('dummy_scans', 'dummy_scans')]),
(validate, gen_ref, [('out_file', 'in_file')]),
(gen_ref, enhance_and_skullstrip_bold_wf, [('ref_image', 'inputnode.in_file')]),
(validate, outputnode, [('out_file', 'bold_file'),
('out_report', 'validation_report')]),
(gen_ref, calc_dummy_scans, [('n_volumes_to_discard', 'algo_dummy_scans')]),
(calc_dummy_scans, outputnode, [('skip_vols_num', 'skip_vols')]),
(gen_ref, outputnode, [('ref_image', 'raw_ref_image'),
('n_volumes_to_discard', 'algo_dummy_scans')]),
(enhance_and_skullstrip_bold_wf, outputnode, [
('outputnode.bias_corrected_file', 'ref_image'),
('outputnode.mask_file', 'bold_mask'),
('outputnode.skull_stripped_file', 'ref_image_brain')]),
])
if gen_report:
mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')
workflow.connect([
(enhance_and_skullstrip_bold_wf, mask_reportlet, [
('outputnode.bias_corrected_file', 'background_file'),
('outputnode.mask_file', 'mask_file'),
]),
])
return workflow
def init_enhance_and_skullstrip_bold_wf(
name='enhance_and_skullstrip_bold_wf',
pre_mask=False,
omp_nthreads=1):
"""
This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)`
:abbr:`fMRI (functional MRI)` average/summary (e.g., a reference image
averaging non-steady-state timepoints), and sharpens the histogram
with the application of the N4 algorithm for removing the
:abbr:`INU (intensity non-uniformity)` bias field and calculates a signal
mask.
Steps of this workflow are:
1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s
:abbr:`EPI (echo-planar imaging)` -*boldref* template, which
is in MNI space.
The tentative mask is obtained by resampling the MNI template's
brainmask into *boldref*-space.
2. Binary dilation of the tentative mask with a sphere of 3mm diameter.
3. Run ANTs' ``N4BiasFieldCorrection`` on the input
:abbr:`BOLD (blood-oxygen level-dependant)` average, using the
mask generated in 1) instead of the internal Otsu thresholding.
4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology
dilation of one iteration and a sphere of 6mm as structuring element.
5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image
with the latest mask calculated in 3), then use AFNI's ``3dUnifize``
to *standardize* the T2* contrast distribution.
6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast
enhancement of 4).
7. Calculate a final mask as the intersection of 4) and 6).
8. Apply final mask on the enhanced reference.
Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and
a tentative mask is passed in to the workflow throught the ``pre_mask``
Nipype input.
.. workflow ::
:graph2use: orig
:simple_form: yes
from fmriprep.workflows.bold.util import init_enhance_and_skullstrip_bold_wf
wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1)
**Parameters**
name : str
Name of workflow (default: ``enhance_and_skullstrip_bold_wf``)
pre_mask : bool
Indicates whether the ``pre_mask`` input will be set (and thus, step 1
should be skipped).
omp_nthreads : int
number of threads available to parallel nodes
**Inputs**
in_file
BOLD image (single volume)
pre_mask : bool
A tentative brain mask to initialize the workflow (requires ``pre_mask``
parameter set ``True``).
**Outputs**
bias_corrected_file
the ``in_file`` after `N4BiasFieldCorrection`_
skull_stripped_file
the ``bias_corrected_file`` after skull-stripping
mask_file
mask of the skull-stripped input file
out_report
reportlet for the skull-stripping
.. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053
"""
workflow = Workflow(name=name)
inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']),
name='inputnode')
outputnode = pe.Node(niu.IdentityInterface(fields=[
'mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode')
# Dilate pre_mask
pre_dilate = pe.Node(fsl.DilateImage(
operation='max', kernel_shape='sphere', kernel_size=3.0,
internal_datatype='char'), name='pre_mask_dilate')
# Ensure mask's header matches reference's
check_hdr = pe.Node(MatchHeader(), name='check_hdr',
run_without_submitting=True)
# Run N4 normally, force num_threads=1 for stability (images are small, no need for >1)
n4_correct = pe.Node(ants.N4BiasFieldCorrection(
dimension=3, copy_header=True, bspline_fitting_distance=200),
name='n4_correct', n_procs=1)
# Create a generous BET mask out of the bias-corrected EPI
skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
name='skullstrip_first_pass')
bet_dilate = pe.Node(fsl.DilateImage(
operation='max', kernel_shape='sphere', kernel_size=6.0,
internal_datatype='char'), name='skullstrip_first_dilate')
bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask')
# Use AFNI's unifize for T2 constrast & fix header
unifize = pe.Node(afni.Unifize(
t2=True, outputtype='NIFTI_GZ',
# Default -clfrac is 0.1, 0.4 was too conservative
# -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation)
args='-clfrac 0.2 -rbt 18.3 65.0 90.0',
out_file="uni.nii.gz"), name='unifize')
fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1)
# Run ANFI's 3dAutomask to extract a refined brain mask
skullstrip_second_pass = pe.Node(afni.Automask(dilate=1,
outputtype='NIFTI_GZ'),
name='skullstrip_second_pass')
fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1)
# Take intersection of both masks
combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'),
name='combine_masks')
# Compute masked brain
apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')
if not pre_mask:
bold_template = get_template(
'MNI152NLin2009cAsym', resolution=2, desc='fMRIPrep', suffix='boldref')
brain_mask = get_template(
'MNI152NLin2009cAsym', resolution=2, desc='brain', suffix='mask')
# Initialize transforms with antsAI
init_aff = pe.Node(AI(
fixed_image=str(bold_template),
fixed_image_mask=str(brain_mask),
metric=('Mattes', 32, 'Regular', 0.2),
transform=('Affine', 0.1),
search_factor=(20, 0.12),
principal_axes=False,
convergence=(10, 1e-6, 10),
verbose=True),
name='init_aff',
n_procs=omp_nthreads)
# Registration().version may be None
if parseversion(Registration().version or '0.0.0') > Version('2.2.0'):
init_aff.inputs.search_grid = (40, (0, 40, 40))
# Set up spatial normalization
norm = pe.Node(Registration(
from_file=pkgr_fn(
'fmriprep.data',
'epi_atlasbased_brainmask.json')),
name='norm',
n_procs=omp_nthreads)
norm.inputs.fixed_image = str(bold_template)
map_brainmask = pe.Node(
ApplyTransforms(interpolation='MultiLabel', float=True, input_image=str(brain_mask)),
name='map_brainmask'
)
workflow.connect([
(inputnode, init_aff, [('in_file', 'moving_image')]),
(inputnode, map_brainmask, [('in_file', 'reference_image')]),
(inputnode, norm, [('in_file', 'moving_image')]),
(init_aff, norm, [('output_transform', 'initial_moving_transform')]),
(norm, map_brainmask, [
('reverse_invert_flags', 'invert_transform_flags'),
('reverse_transforms', 'transforms')]),
(map_brainmask, pre_dilate, [('output_image', 'in_file')]),
])
else:
workflow.connect([
(inputnode, pre_dilate, [('pre_mask', 'in_file')]),
])
workflow.connect([
(inputnode, check_hdr, [('in_file', 'reference')]),
(pre_dilate, check_hdr, [('out_file', 'in_file')]),
(check_hdr, n4_correct, [('out_file', 'mask_image')]),
(inputnode, n4_correct, [('in_file', 'input_image')]),
(inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]),
(inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]),
(n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]),
(skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]),
(bet_dilate, bet_mask, [('out_file', 'mask_file')]),
(skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]),
(bet_mask, unifize, [('out_file', 'in_file')]),
(unifize, fixhdr_unifize, [('out_file', 'in_file')]),
(fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]),
(skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
(skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file')]),
(fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]),
(fixhdr_unifize, apply_mask, [('out_file', 'in_file')]),
(combine_masks, apply_mask, [('out_file', 'mask_file')]),
(combine_masks, outputnode, [('out_file', 'mask_file')]),
(apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
(n4_correct, outputnode, [('output_image', 'bias_corrected_file')]),
])
return workflow
def init_skullstrip_bold_wf(name='skullstrip_bold_wf'):
"""
This workflow applies skull-stripping to a BOLD image.
It is intended to be used on an image that has previously been
bias-corrected with
:py:func:`~fmriprep.workflows.bold.util.init_enhance_and_skullstrip_bold_wf`
.. workflow ::
:graph2use: orig
:simple_form: yes
from fmriprep.workflows.bold.util import init_skullstrip_bold_wf
wf = init_skullstrip_bold_wf()
Inputs
in_file
BOLD image (single volume)
Outputs
skull_stripped_file
the ``in_file`` after skull-stripping
mask_file
mask of the skull-stripped input file
out_report
reportlet for the skull-stripping
"""
workflow = Workflow(name=name)
inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
name='inputnode')
outputnode = pe.Node(niu.IdentityInterface(fields=['mask_file',
'skull_stripped_file',
'out_report']),
name='outputnode')
skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
name='skullstrip_first_pass')
skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'),
name='skullstrip_second_pass')
combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks')
apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')
mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')
workflow.connect([
(inputnode, skullstrip_first_pass, [('in_file', 'in_file')]),
(skullstrip_first_pass, skullstrip_second_pass, [('out_file', 'in_file')]),
(skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
(skullstrip_second_pass, combine_masks, [('out_file', 'operand_file')]),
(combine_masks, outputnode, [('out_file', 'mask_file')]),
# Masked file
(inputnode, apply_mask, [('in_file', 'in_file')]),
(combine_masks, apply_mask, [('out_file', 'mask_file')]),
(apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
# Reportlet
(inputnode, mask_reportlet, [('in_file', 'background_file')]),
(combine_masks, mask_reportlet, [('out_file', 'mask_file')]),
(mask_reportlet, outputnode, [('out_report', 'out_report')]),
])
return workflow
def _pass_dummy_scans(algo_dummy_scans, dummy_scans=None):
"""
**Parameters**
algo_dummy_scans : int
number of volumes to skip determined by an algorithm
dummy_scans : int or None
number of volumes to skip determined by the user
**Returns**
skip_vols_num : int
number of volumes to skip
"""
return dummy_scans or algo_dummy_scans
| bsd-3-clause | 8,292,137,848,406,605,000 | 40.08296 | 97 | 0.601867 | false |
googleapis/python-error-reporting | google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py | 1 | 12859 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.errorreporting_v1beta1.types import common
from google.cloud.errorreporting_v1beta1.types import error_group_service
from .base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import ErrorGroupServiceGrpcTransport
class ErrorGroupServiceGrpcAsyncIOTransport(ErrorGroupServiceTransport):
"""gRPC AsyncIO backend transport for ErrorGroupService.
Service for retrieving and updating individual error groups.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "clouderrorreporting.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "clouderrorreporting.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def get_group(
self,
) -> Callable[[error_group_service.GetGroupRequest], Awaitable[common.ErrorGroup]]:
r"""Return a callable for the get group method over gRPC.
Get the specified group.
Returns:
Callable[[~.GetGroupRequest],
Awaitable[~.ErrorGroup]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_group" not in self._stubs:
self._stubs["get_group"] = self.grpc_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/GetGroup",
request_serializer=error_group_service.GetGroupRequest.serialize,
response_deserializer=common.ErrorGroup.deserialize,
)
return self._stubs["get_group"]
@property
def update_group(
self,
) -> Callable[
[error_group_service.UpdateGroupRequest], Awaitable[common.ErrorGroup]
]:
r"""Return a callable for the update group method over gRPC.
Replace the data for the specified group.
Fails if the group does not exist.
Returns:
Callable[[~.UpdateGroupRequest],
Awaitable[~.ErrorGroup]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_group" not in self._stubs:
self._stubs["update_group"] = self.grpc_channel.unary_unary(
"/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/UpdateGroup",
request_serializer=error_group_service.UpdateGroupRequest.serialize,
response_deserializer=common.ErrorGroup.deserialize,
)
return self._stubs["update_group"]
__all__ = ("ErrorGroupServiceGrpcAsyncIOTransport",)
| apache-2.0 | -2,248,714,581,095,441,200 | 43.649306 | 93 | 0.621199 | false |
dnsforever/dnsforever-web | dnsforever/web/__init__.py | 1 | 1055 | from flask import Flask, g
from dnsforever.config import secret_key
from dnsforever.models import Session
from dnsforever.web.tools.session import get_user
blueprints = ['apis', 'index', 'account', 'domain',
'domain_a', 'domain_ddns', 'domain_aaaa',
'domain_cname', 'domain_mx',
'domain_txt', 'domain_subdomain']
def create_app():
app = Flask(__name__)
app.secret_key = secret_key
for name in blueprints:
app.register_blueprint(load_blueprint(name))
@app.before_request
def define_session():
g.service_name = 'DNS Forever beta'
g.session = Session()
g.user = get_user()
if g.user:
g.domain_list = [ownership.domain.name
for ownership in g.user.ownership]
else:
g.domain_list = []
g.debug = app.debug
return app
def load_blueprint(name):
module = __import__('dnsforever.web.' + name, None, None, ['app'])
blueprint = getattr(module, 'app')
return blueprint
| gpl-3.0 | 4,566,098,043,050,509,000 | 24.731707 | 70 | 0.589573 | false |
pbanaszkiewicz/amy | amy/extforms/tests/test_selforganised_submission_form.py | 1 | 4753 | from datetime import date
from django.conf import settings
from django.core import mail
from django.urls import reverse
from extforms.forms import SelfOrganisedSubmissionExternalForm
from extrequests.models import SelfOrganisedSubmission
from workshops.models import Curriculum, Language
from workshops.tests.base import TestBase
class TestSelfOrganisedSubmissionExternalForm(TestBase):
"""Test external (accessible to non-logged in users) form."""
def test_fields_presence(self):
"""Test if the form shows correct fields."""
form = SelfOrganisedSubmissionExternalForm()
fields_left = set(form.fields.keys())
fields_right = set(
[
"personal",
"family",
"email",
"secondary_email",
"institution",
"institution_other_name",
"institution_other_URL",
"institution_department",
"workshop_format",
"workshop_format_other",
"start",
"end",
"workshop_url",
"workshop_types",
"workshop_types_other_explain",
"country",
"language",
"public_event",
"public_event_other",
"additional_contact",
"data_privacy_agreement",
"code_of_conduct_agreement",
"host_responsibilities",
"workshop_listed",
"online_inperson",
"captcha",
]
)
self.assertEqual(fields_left, fields_right)
def test_request_added(self):
"""Ensure the request is successfully added to the pool, and
notification email is sent."""
data = {
"personal": "Harry",
"family": "Potter",
"email": "[email protected]",
"institution_other_name": "Ministry of Magic",
"institution_other_URL": "magic.gov.uk",
"workshop_format": "periodic",
"workshop_format_other": "",
"start": date(2020, 11, 7),
"end": date(2020, 11, 8),
"workshop_url": "",
"workshop_types": [
Curriculum.objects.filter(active=True)
.exclude(mix_match=True)
.first()
.pk,
],
"workshop_types_other_explain": "",
"country": "GB",
"language": Language.objects.get(name="English").pk,
"public_event": "closed",
"public_event_other": "",
"additional_contact": "",
"data_privacy_agreement": True,
"code_of_conduct_agreement": True,
"host_responsibilities": True,
"online_inperson": "inperson",
}
self.passCaptcha(data)
rv = self.client.post(reverse("selforganised_submission"), data, follow=True)
self.assertEqual(rv.status_code, 200)
content = rv.content.decode("utf-8")
if "form" in rv.context:
self.assertEqual(
rv.context["form"].is_valid(), True, dict(rv.context["form"].errors)
)
self.assertNotIn("Please fix errors in the form below", content)
self.assertIn("Thank you for submitting self-organised workshop", content)
self.assertEqual(SelfOrganisedSubmission.objects.all().count(), 1)
self.assertEqual(SelfOrganisedSubmission.objects.all()[0].state, "p")
# 1 email for autoresponder, 1 email for admins
self.assertEqual(len(mail.outbox), 2)
# save the email messages for test debuggig
# with open('email0.eml', 'wb') as f:
# f.write(mail.outbox[0].message().as_bytes())
# with open('email1.eml', 'wb') as f:
# f.write(mail.outbox[1].message().as_bytes())
# before tests, check if the template invalid string exists
self.assertTrue(settings.TEMPLATES[0]["OPTIONS"]["string_if_invalid"])
# test autoresponder email
msg = mail.outbox[0]
self.assertEqual(msg.subject, "Self-organised submission confirmation")
self.assertEqual(msg.recipients(), ["[email protected]"])
self.assertNotIn(
settings.TEMPLATES[0]["OPTIONS"]["string_if_invalid"], msg.body
)
# test email for admins
msg = mail.outbox[1]
self.assertEqual(
msg.subject,
"New self-organised submission: Ministry of Magic",
)
self.assertEqual(msg.recipients(), ["[email protected]"])
self.assertNotIn(
settings.TEMPLATES[0]["OPTIONS"]["string_if_invalid"], msg.body
)
| mit | 2,736,142,694,196,750,300 | 37.024 | 85 | 0.555228 | false |
FishyFing/FishBot | cogs/google.py | 1 | 2253 | import discord
from discord.ext import commands
from .utils import checks
import urllib
class SimplyGoogle:
"""A non sarcastic google command"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True)
async def google(self, ctx, text):
"""Its google, you search with it.
Example: google A french pug
Special search options are avaiable; Image, Maps
Example: google image You know, for kids!
Another example: google maps New York"""
search_type = ctx.message.content[len(ctx.prefix+ctx.command.name)+1:].lower().split(" ")
#Start of Image
if search_type[0] == "image":
search_valid = str(ctx.message.content[len(ctx.prefix+ctx.command.name)+1:].lower())
if search_valid == "image":
await self.bot.say("Please actually search something")
else:
uri = "https://www.google.com/search?tbm=isch&q="
quary = str(ctx.message.content[len(ctx.prefix+ctx.command.name)+7:].lower())
encode = urllib.parse.quote_plus(quary,encoding='utf-8',errors='replace')
await self.bot.say(uri+encode)
#End of Image
#Start of Maps
elif search_type[0] == "maps":
search_valid = str(ctx.message.content[len(ctx.prefix+ctx.command.name)+1:].lower())
if search_valid == "maps":
await self.bot.say("Please actually search something")
else:
uri = "https://www.google.com/maps/search/"
quary = str(ctx.message.content[len(ctx.prefix+ctx.command.name)+6:].lower())
encode = urllib.parse.quote_plus(quary,encoding='utf-8',errors='replace')
await self.bot.say(uri+encode)
#End of Maps
#Start of generic search
else:
uri = "https://www.google.com/search?q="
quary = str(ctx.message.content[len(ctx.prefix+ctx.command.name)+1:])
encode = urllib.parse.quote_plus(quary,encoding='utf-8',errors='replace')
await self.bot.say(uri+encode)
#End of generic search
def setup(bot):
n = SimplyGoogle(bot)
bot.add_cog(n)
| gpl-3.0 | -4,040,341,036,851,990,500 | 41.509434 | 97 | 0.590324 | false |
nomadicfm/pyramid-views | tests/_test_dates.py | 1 | 31005 | from __future__ import unicode_literals
import datetime
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import requires_tz_support
from django.utils import timezone
from .models import Book, BookSigning
def _make_books(n, base_date):
for i in range(n):
Book.objects.create(
name='Book %d' % i,
slug='book-%d' % i,
pages=100 + i,
pubdate=base_date - datetime.timedelta(days=i))
class ArchiveIndexViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_archive_view(self):
res = self.client.get('/dates/books/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'tests:templates/book_archive.html')
def test_archive_view_context_object_name(self):
res = self.client.get('/dates/books/context_object_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['thingies']), list(Book.objects.all()))
self.assertFalse('latest' in res.context)
self.assertTemplateUsed(res, 'tests:templates/book_archive.html')
def test_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get('/dates/books/')
self.assertEqual(res.status_code, 404)
def test_allow_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get('/dates/books/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertTemplateUsed(res, 'tests:templates/book_archive.html')
def test_archive_view_template(self):
res = self.client.get('/dates/books/template_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'tests:templates/list.html')
def test_archive_view_template_suffix(self):
res = self.client.get('/dates/books/template_name_suffix/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'tests:templates/book_detail.html')
def test_archive_view_invalid(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/dates/books/invalid/')
def test_archive_view_by_month(self):
res = self.client.get('/dates/books/by_month/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'month', 'DESC')))
def test_paginated_archive_view(self):
_make_books(20, base_date=datetime.date.today())
res = self.client.get('/dates/books/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['latest']), list(Book.objects.all()[0:10]))
self.assertTemplateUsed(res, 'tests:templates/book_archive.html')
res = self.client.get('/dates/books/paginated/?page=2')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 2)
self.assertEqual(list(res.context['latest']), list(Book.objects.all()[10:20]))
def test_paginated_archive_view_does_not_load_entire_table(self):
# Regression test for #18087
_make_books(20, base_date=datetime.date.today())
# 1 query for years list + 1 query for books
with self.assertNumQueries(2):
self.client.get('/dates/books/')
# same as above + 1 query to test if books exist + 1 query to count them
with self.assertNumQueries(4):
self.client.get('/dates/books/paginated/')
def test_no_duplicate_query(self):
# Regression test for #18354
with self.assertNumQueries(2):
self.client.get('/dates/books/reverse/')
def test_datetime_archive_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/')
self.assertEqual(res.status_code, 200)
@requires_tz_support
@skipUnlessDBFeature('has_zoneinfo_database')
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_archive_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/')
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted descending in index"""
_make_books(5, base_date=datetime.date(2011, 12, 25))
res = self.client.get('/dates/books/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(reversed(sorted(res.context['date_list']))))
class YearArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_year_view(self):
res = self.client.get('/dates/books/2008/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.date(2008, 10, 1)])
self.assertEqual(res.context['year'], datetime.date(2008, 1, 1))
self.assertTemplateUsed(res, 'tests:templates/book_archive_year.html')
# Since allow_empty=False, next/prev years must be valid (#7164)
self.assertEqual(res.context['next_year'], None)
self.assertEqual(res.context['previous_year'], datetime.date(2006, 1, 1))
def test_year_view_make_object_list(self):
res = self.client.get('/dates/books/2006/make_object_list/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.date(2006, 5, 1)])
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_year.html')
def test_year_view_empty(self):
res = self.client.get('/dates/books/1999/')
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/1999/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertEqual(list(res.context['book_list']), [])
# Since allow_empty=True, next/prev are allowed to be empty years (#7164)
self.assertEqual(res.context['next_year'], datetime.date(2000, 1, 1))
self.assertEqual(res.context['previous_year'], datetime.date(1998, 1, 1))
def test_year_view_allow_future(self):
# Create a new book in the future
year = datetime.date.today().year + 1
Book.objects.create(name="The New New Testement", pages=600, pubdate=datetime.date(year, 1, 1))
res = self.client.get('/dates/books/%s/' % year)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/allow_empty/' % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
res = self.client.get('/dates/books/%s/allow_future/' % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.date(year, 1, 1)])
def test_year_view_paginated(self):
res = self.client.get('/dates/books/2006/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_year.html')
def test_year_view_invalid_pattern(self):
res = self.client.get('/dates/books/no_year/')
self.assertEqual(res.status_code, 404)
def test_no_duplicate_query(self):
# Regression test for #18354
with self.assertNumQueries(4):
self.client.get('/dates/books/2008/reverse/')
def test_datetime_year_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/')
self.assertEqual(res.status_code, 200)
@skipUnlessDBFeature('has_zoneinfo_database')
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_year_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/')
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted ascending in year view"""
_make_books(10, base_date=datetime.date(2011, 12, 25))
res = self.client.get('/dates/books/2011/')
self.assertEqual(list(res.context['date_list']), list(sorted(res.context['date_list'])))
class MonthArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_month_view(self):
res = self.client.get('/dates/books/2008/oct/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'tests:templates/book_archive_month.html')
self.assertEqual(list(res.context['date_list']), [datetime.date(2008, 10, 1)])
self.assertEqual(list(res.context['book_list']),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))))
self.assertEqual(res.context['month'], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev months must be valid (#7164)
self.assertEqual(res.context['next_month'], None)
self.assertEqual(res.context['previous_month'], datetime.date(2006, 5, 1))
def test_month_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get('/dates/books/2000/jan/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2000/jan/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['month'], datetime.date(2000, 1, 1))
# Since allow_empty=True, next/prev are allowed to be empty months (#7164)
self.assertEqual(res.context['next_month'], datetime.date(2000, 2, 1))
self.assertEqual(res.context['previous_month'], datetime.date(1999, 12, 1))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = datetime.date.today().strftime('/dates/books/%Y/%b/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_month'], None)
def test_month_view_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60)).replace(day=1)
urlbit = future.strftime('%Y/%b').lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get('/dates/books/%s/' % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get('/dates/books/%s/allow_future/' % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['date_list'][0], b.pubdate)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['month'], future)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty months (#7164)
self.assertEqual(res.context['next_month'], None)
self.assertEqual(res.context['previous_month'], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month. So next
# should be in the future (yup, #7164, again)
res = self.client.get('/dates/books/2008/oct/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_month'], future)
self.assertEqual(res.context['previous_month'], datetime.date(2006, 5, 1))
def test_month_view_paginated(self):
res = self.client.get('/dates/books/2008/oct/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_month.html')
def test_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/')
self.assertEqual(res.status_code, 200)
def test_month_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/no_month/')
self.assertEqual(res.status_code, 404)
def test_previous_month_without_content(self):
"Content can exist on any day of the previous month. Refs #14711"
self.pubdate_list = [
datetime.date(2010, month, day)
for month, day in ((9, 1), (10, 2), (11, 3))
]
for pubdate in self.pubdate_list:
name = str(pubdate)
Book.objects.create(name=name, slug=name, pages=100, pubdate=pubdate)
res = self.client.get('/dates/books/2010/nov/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010, 10, 1))
# The following test demonstrates the bug
res = self.client.get('/dates/books/2010/nov/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010, 10, 1))
# The bug does not occur here because a Book with pubdate of Sep 1 exists
res = self.client.get('/dates/books/2010/oct/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010, 9, 1))
def test_datetime_month_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 2, 1, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 6, 3, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/')
self.assertEqual(res.status_code, 200)
@skipUnlessDBFeature('has_zoneinfo_database')
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_month_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 2, 1, 12, 0, tzinfo=timezone.utc))
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
BookSigning.objects.create(event_date=datetime.datetime(2008, 6, 3, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/')
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted ascending in month view"""
_make_books(10, base_date=datetime.date(2011, 12, 25))
res = self.client.get('/dates/books/2011/dec/')
self.assertEqual(list(res.context['date_list']), list(sorted(res.context['date_list'])))
class WeekArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_week_view(self):
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'tests:templates/book_archive_week.html')
self.assertEqual(res.context['book_list'][0], Book.objects.get(pubdate=datetime.date(2008, 10, 1)))
self.assertEqual(res.context['week'], datetime.date(2008, 9, 28))
# Since allow_empty=False, next/prev weeks must be valid
self.assertEqual(res.context['next_week'], None)
self.assertEqual(res.context['previous_week'], datetime.date(2006, 4, 30))
def test_week_view_allow_empty(self):
# allow_empty = False, empty week
res = self.client.get('/dates/books/2008/week/12/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2008/week/12/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['week'], datetime.date(2008, 3, 23))
# Since allow_empty=True, next/prev are allowed to be empty weeks
self.assertEqual(res.context['next_week'], datetime.date(2008, 3, 30))
self.assertEqual(res.context['previous_week'], datetime.date(2008, 3, 16))
# allow_empty but not allow_future: next_week should be empty
url = datetime.date.today().strftime('/dates/books/%Y/week/%U/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_week'], None)
def test_week_view_allow_future(self):
# January 7th always falls in week 1, given Python's definition of week numbers
future = datetime.date(datetime.date.today().year + 1, 1, 7)
future_sunday = future - datetime.timedelta(days=(future.weekday() + 1) % 7)
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
res = self.client.get('/dates/books/%s/week/1/' % future.year)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/week/1/allow_future/' % future.year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['week'], future_sunday)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty weeks
self.assertEqual(res.context['next_week'], None)
self.assertEqual(res.context['previous_week'], datetime.date(2008, 9, 28))
# allow_future, but not allow_empty, with a current week. So next
# should be in the future
res = self.client.get('/dates/books/2008/week/39/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_week'], future_sunday)
self.assertEqual(res.context['previous_week'], datetime.date(2006, 4, 30))
def test_week_view_paginated(self):
week_start = datetime.date(2008, 9, 28)
week_end = week_start + datetime.timedelta(days=7)
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_week.html')
def test_week_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/week/no_week/')
self.assertEqual(res.status_code, 404)
def test_week_start_Monday(self):
# Regression for #14752
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['week'], datetime.date(2008, 9, 28))
res = self.client.get('/dates/books/2008/week/39/monday/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['week'], datetime.date(2008, 9, 29))
def test_datetime_week_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/week/13/')
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_week_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/week/13/')
self.assertEqual(res.status_code, 200)
class DayArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_day_view(self):
res = self.client.get('/dates/books/2008/oct/01/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'tests:templates/book_archive_day.html')
self.assertEqual(list(res.context['book_list']),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))))
self.assertEqual(res.context['day'], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev days must be valid.
self.assertEqual(res.context['next_day'], None)
self.assertEqual(res.context['previous_day'], datetime.date(2006, 5, 1))
def test_day_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get('/dates/books/2000/jan/1/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2000/jan/1/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['day'], datetime.date(2000, 1, 1))
# Since it's allow empty, next/prev are allowed to be empty months (#7164)
self.assertEqual(res.context['next_day'], datetime.date(2000, 1, 2))
self.assertEqual(res.context['previous_day'], datetime.date(1999, 12, 31))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = datetime.date.today().strftime('/dates/books/%Y/%b/%d/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_day'], None)
def test_day_view_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60))
urlbit = future.strftime('%Y/%b/%d').lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get('/dates/books/%s/' % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get('/dates/books/%s/allow_future/' % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['day'], future)
# allow_future but not allow_empty, next/prev must be valid
self.assertEqual(res.context['next_day'], None)
self.assertEqual(res.context['previous_day'], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month.
res = self.client.get('/dates/books/2008/oct/01/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_day'], future)
self.assertEqual(res.context['previous_day'], datetime.date(2006, 5, 1))
# allow_future for yesterday, next_day is today (#17192)
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
res = self.client.get('/dates/books/%s/allow_empty_and_future/'
% yesterday.strftime('%Y/%b/%d').lower())
self.assertEqual(res.context['next_day'], today)
def test_day_view_paginated(self):
res = self.client.get('/dates/books/2008/oct/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10, pubdate__day=1)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10, pubdate__day=1)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_day.html')
def test_next_prev_context(self):
res = self.client.get('/dates/books/2008/oct/01/')
self.assertEqual(res.content, b"Archive for Oct. 1, 2008. Previous day is May 1, 2006")
def test_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/01/')
self.assertEqual(res.status_code, 200)
def test_day_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/oct/no_day/')
self.assertEqual(res.status_code, 404)
def test_today_view(self):
res = self.client.get('/dates/books/today/')
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/today/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['day'], datetime.date.today())
def test_datetime_day_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
@requires_tz_support
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_day_view(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) > 2008-04-01T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 404)
class DateDetailViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_date_detail_by_pk(self):
res = self.client.get('/dates/books/2008/oct/01/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Book.objects.get(pk=1))
self.assertEqual(res.context['book'], Book.objects.get(pk=1))
self.assertTemplateUsed(res, 'tests:templates/book_detail.html')
def test_date_detail_by_slug(self):
res = self.client.get('/dates/books/2006/may/01/byslug/dreaming-in-code/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], Book.objects.get(slug='dreaming-in-code'))
def test_date_detail_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/01/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], Book.objects.get(pk=1))
def test_date_detail_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60))
urlbit = future.strftime('%Y/%b/%d').lower()
b = Book.objects.create(name="The New New Testement", slug="new-new", pages=600, pubdate=future)
res = self.client.get('/dates/books/%s/new-new/' % urlbit)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/%s/allow_future/' % (urlbit, b.id))
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], b)
self.assertTemplateUsed(res, 'tests:templates/book_detail.html')
def test_invalid_url(self):
self.assertRaises(AttributeError, self.client.get, "/dates/books/2008/oct/01/nopk/")
def test_get_object_custom_query(self):
"""
Ensure that custom querys are used when provided to
BaseDateDetailView.get_object()
Refs #16918.
"""
res = self.client.get(
'/dates/books/get_object_custom_query/2006/may/01/2/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Book.objects.get(pk=2))
self.assertEqual(res.context['book'], Book.objects.get(pk=2))
self.assertTemplateUsed(res, 'tests:templates/book_detail.html')
res = self.client.get(
'/dates/books/get_object_custom_query/2008/oct/01/1/')
self.assertEqual(res.status_code, 404)
def test_datetime_date_detail(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
@requires_tz_support
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_date_detail(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) > 2008-04-01T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 404)
| mit | 4,528,030,635,448,044,000 | 48.608 | 140 | 0.652959 | false |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.1/Lib/sqlite3/test/hooks.py | 1 | 6677 | #-*- coding: ISO-8859-1 -*-
# pysqlite2/test/hooks.py: tests for various SQLite-specific hooks
#
# Copyright (C) 2006-2007 Gerhard Häring <[email protected]>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import unittest
import sqlite3 as sqlite
class CollationTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def CheckCreateCollationNotCallable(self):
con = sqlite.connect(":memory:")
try:
con.create_collation("X", 42)
self.fail("should have raised a TypeError")
except TypeError as e:
self.failUnlessEqual(e.args[0], "parameter must be callable")
def CheckCreateCollationNotAscii(self):
con = sqlite.connect(":memory:")
try:
con.create_collation("collä", lambda x, y: (x > y) - (x < y))
self.fail("should have raised a ProgrammingError")
except sqlite.ProgrammingError as e:
pass
def CheckCollationIsUsed(self):
if sqlite.version_info < (3, 2, 1): # old SQLite versions crash on this test
return
def mycoll(x, y):
# reverse order
return -((x > y) - (x < y))
con = sqlite.connect(":memory:")
con.create_collation("mycoll", mycoll)
sql = """
select x from (
select 'a' as x
union
select 'b' as x
union
select 'c' as x
) order by x collate mycoll
"""
result = con.execute(sql).fetchall()
if result[0][0] != "c" or result[1][0] != "b" or result[2][0] != "a":
self.fail("the expected order was not returned")
con.create_collation("mycoll", None)
try:
result = con.execute(sql).fetchall()
self.fail("should have raised an OperationalError")
except sqlite.OperationalError as e:
self.failUnlessEqual(e.args[0].lower(), "no such collation sequence: mycoll")
def CheckCollationRegisterTwice(self):
"""
Register two different collation functions under the same name.
Verify that the last one is actually used.
"""
con = sqlite.connect(":memory:")
con.create_collation("mycoll", lambda x, y: (x > y) - (x < y))
con.create_collation("mycoll", lambda x, y: -((x > y) - (x < y)))
result = con.execute("""
select x from (select 'a' as x union select 'b' as x) order by x collate mycoll
""").fetchall()
if result[0][0] != 'b' or result[1][0] != 'a':
self.fail("wrong collation function is used")
def CheckDeregisterCollation(self):
"""
Register a collation, then deregister it. Make sure an error is raised if we try
to use it.
"""
con = sqlite.connect(":memory:")
con.create_collation("mycoll", lambda x, y: (x > y) - (x < y))
con.create_collation("mycoll", None)
try:
con.execute("select 'a' as x union select 'b' as x order by x collate mycoll")
self.fail("should have raised an OperationalError")
except sqlite.OperationalError as e:
if not e.args[0].startswith("no such collation sequence"):
self.fail("wrong OperationalError raised")
class ProgressTests(unittest.TestCase):
def CheckProgressHandlerUsed(self):
"""
Test that the progress handler is invoked once it is set.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 0
con.set_progress_handler(progress, 1)
con.execute("""
create table foo(a, b)
""")
self.failUnless(progress_calls)
def CheckOpcodeCount(self):
"""
Test that the opcode argument is respected.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 0
con.set_progress_handler(progress, 1)
curs = con.cursor()
curs.execute("""
create table foo (a, b)
""")
first_count = len(progress_calls)
progress_calls = []
con.set_progress_handler(progress, 2)
curs.execute("""
create table bar (a, b)
""")
second_count = len(progress_calls)
self.failUnless(first_count > second_count)
def CheckCancelOperation(self):
"""
Test that returning a non-zero value stops the operation in progress.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 1
con.set_progress_handler(progress, 1)
curs = con.cursor()
self.assertRaises(
sqlite.OperationalError,
curs.execute,
"create table bar (a, b)")
def CheckClearHandler(self):
"""
Test that setting the progress handler to None clears the previously set handler.
"""
con = sqlite.connect(":memory:")
action = 0
def progress():
action = 1
return 0
con.set_progress_handler(progress, 1)
con.set_progress_handler(None, 1)
con.execute("select 1 union select 2 union select 3").fetchall()
self.failUnlessEqual(action, 0, "progress handler was not cleared")
def suite():
collation_suite = unittest.makeSuite(CollationTests, "Check")
progress_suite = unittest.makeSuite(ProgressTests, "Check")
return unittest.TestSuite((collation_suite, progress_suite))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
| mit | -6,211,855,525,565,105,000 | 34.515957 | 91 | 0.59323 | false |
evansd/django-envsettings | envsettings/cache.py | 1 | 4531 | from .base import URLSettingsBase, is_importable
class CacheSettings(URLSettingsBase):
REDIS_CONFIG = {'BACKEND': 'django_redis.cache.RedisCache', 'OPTIONS': {}}
CONFIG = {
'locmem': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'file': {'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache'},
# Memcached backends are auto-selected based on what packages are installed
'memcached': {'BACKEND': None},
'memcached-binary': {'BACKEND': None, 'BINARY': True},
'redis': REDIS_CONFIG,
'rediss': REDIS_CONFIG
}
def handle_file_url(self, parsed_url, config):
if parsed_url.path == '/dev/null':
config['BACKEND'] = 'django.core.cache.backends.dummy.DummyCache'
else:
config['LOCATION'] = parsed_url.path
return config
def handle_locmem_url(self, parsed_url, config):
config['LOCATION'] = '{0}{1}'.format(
parsed_url.hostname or '', parsed_url.path or '')
return config
def handle_redis_url(self, parsed_url, config):
if not parsed_url.hostname:
parsed_url = parsed_url._replace(scheme='unix')
config['LOCATION'] = parsed_url.geturl()
return config
def handle_rediss_url(self, parsed_url, config):
return self.handle_redis_url(parsed_url, config)
def handle_memcached_url(self, parsed_url, config):
if parsed_url.hostname:
netloc = parsed_url.netloc.split('@')[-1]
if ',' in netloc:
location = netloc.split(',')
else:
location = '{}:{}'.format(
parsed_url.hostname,
parsed_url.port or 11211)
else:
location = 'unix:{}'.format(parsed_url.path)
config['LOCATION'] = location
if parsed_url.username:
config['USERNAME'] = parsed_url.username
if parsed_url.password:
config['PASSWORD'] = parsed_url.password
# Only auto-select backend if one hasn't been explicitly configured
if not config['BACKEND']:
self.set_memcached_backend(config)
return config
def handle_memcached_binary_url(self, parsed_url, config):
return self.handle_memcached_url(parsed_url, config)
def set_memcached_backend(self, config):
"""
Select the most suitable Memcached backend based on the config and
on what's installed
"""
# This is the preferred backend as it is the fastest and most fully
# featured, so we use this by default
config['BACKEND'] = 'django_pylibmc.memcached.PyLibMCCache'
if is_importable(config['BACKEND']):
return
# Otherwise, binary connections can use this pure Python implementation
if config.get('BINARY') and is_importable('django_bmemcached'):
config['BACKEND'] = 'django_bmemcached.memcached.BMemcached'
return
# For text-based connections without any authentication we can fall
# back to Django's core backends if the supporting libraries are
# installed
if not any([config.get(key) for key in ('BINARY', 'USERNAME', 'PASSWORD')]):
if is_importable('pylibmc'):
config['BACKEND'] = \
'django.core.cache.backends.memcached.PyLibMCCache'
elif is_importable('memcached'):
config['BACKEND'] = \
'django.core.cache.backends.memcached.MemcachedCache'
def auto_config_memcachier(self, env, prefix='MEMCACHIER'):
try:
servers, username, password = [
env[prefix + key] for key in [
'_SERVERS', '_USERNAME', '_PASSWORD']]
except KeyError:
return
return 'memcached-binary://{username}:{password}@{servers}/'.format(
servers=servers, username=username, password=password)
def auto_config_memcachedcloud(self, env):
return self.auto_config_memcachier(env, prefix='MEMCACHEDCLOUD')
def auto_config_redis_url(self, env):
return env.get('REDIS_URL')
def auto_config_redistogo(self, env):
return env.get('REDISTOGO_URL')
def auto_config_rediscloud(self, env):
return env.get('REDISCLOUD_URL')
def auto_config_openredis(self, env):
return env.get('OPENREDIS_URL')
def auto_config_redisgreen(self, env):
return env.get('REDISGREEN_URL')
| mit | 6,058,745,335,508,903,000 | 38.745614 | 84 | 0.601854 | false |
akimtke/arke | textserver/main.py | 1 | 2812 | # -*- coding: utf-8 -*-
import pyjsonrpc
from gsmmodem.modem import GsmModem, SentSms, Sms
from gsmmodem.exceptions import TimeoutException, PinRequiredError, IncorrectPinError
def text(number, message, key):
if key.strip() == '9703BB8D5A':
print "Creating modem instance"
modem = GsmModem('/dev/ttyUSB0', 9600)
try:
print "Connecting modem"
modem.connect()
except PinRequiredError:
print "Pin required"
try:
print "Waiting for Network coverage info"
modem.waitForNetworkCoverage(5)
except TimeoutException:
print "Signal strength not strong enough"
return "No signal"
else:
try:
print "Sending %s to %s" % (message, number)
sms = modem.sendSms(number, message)
except TimeoutException:
print "Failed to send message"
return 'Error encountered'
print "Closing modem"
modem.close()
return True
else:
return 'Key is not correct'
def getUnreadText(key):
if key.strip() == '9703BB8D5A':
modem = GsmModem('/dev/ttyUSB0', 9600)
try:
print "Connecting mode"
modem.connect()
except:
return "Error connecting"
try:
messages = modem.listStoredSms(status=Sms.STATUS_RECEIVED_UNREAD)
except Exception as e:
return str(e)
modem.close()
retString = ""
print "Got %d messages" % len(messages)
for message in messages:
retString = retString + "%s : %s" % (message.number, message.text)
return retString
else:
return "Incorrect key"
def getAllText(key):
if key.strip() == '9703BB8D5A':
modem = GsmModem('/dev/ttyUSB0', 9600)
try:
print "Connecting modem"
modem.connect()
except Exception as e:
return str(e)
try:
messages = modem.listStoredSms()
except Exception as e:
return str(e)
modem.close()
retString = ""
print "Got %d messages" % len(messages)
for message in messages:
retString = retString + "%s : %s" % (message.number, message.text) + "\n"
return retString
else:
return "Incorrect key"
class RequestHandler(pyjsonrpc.HttpRequestHandler):
methods = {
"text": text,
"getUnreadText": getUnreadText,
"getAllText": getAllText
}
http_server = pyjsonrpc.ThreadingHttpServer(
server_address = ('192.168.0.20', 8081),
RequestHandlerClass = RequestHandler
)
print "Starting HTTP Server..."
http_server.serve_forever()
| gpl-3.0 | 7,310,618,908,439,127,000 | 26.841584 | 85 | 0.562945 | false |
schleichdi2/OPENNFR-6.3-CORE | opennfr-openembedded-core/meta/lib/oeqa/selftest/cases/resulttooltests.py | 1 | 6605 | #
# SPDX-License-Identifier: MIT
#
import os
import sys
basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
lib_path = basepath + '/scripts/lib'
sys.path = sys.path + [lib_path]
from resulttool.report import ResultsTextReport
from resulttool import regression as regression
from resulttool import resultutils as resultutils
from oeqa.selftest.case import OESelftestTestCase
class ResultToolTests(OESelftestTestCase):
base_results_data = {'base_result1': {'configuration': {"TEST_TYPE": "runtime",
"TESTSERIES": "series1",
"IMAGE_BASENAME": "image",
"IMAGE_PKGTYPE": "ipk",
"DISTRO": "mydistro",
"MACHINE": "qemux86"},
'result': {}},
'base_result2': {'configuration': {"TEST_TYPE": "runtime",
"TESTSERIES": "series1",
"IMAGE_BASENAME": "image",
"IMAGE_PKGTYPE": "ipk",
"DISTRO": "mydistro",
"MACHINE": "qemux86-64"},
'result': {}}}
target_results_data = {'target_result1': {'configuration': {"TEST_TYPE": "runtime",
"TESTSERIES": "series1",
"IMAGE_BASENAME": "image",
"IMAGE_PKGTYPE": "ipk",
"DISTRO": "mydistro",
"MACHINE": "qemux86"},
'result': {}},
'target_result2': {'configuration': {"TEST_TYPE": "runtime",
"TESTSERIES": "series1",
"IMAGE_BASENAME": "image",
"IMAGE_PKGTYPE": "ipk",
"DISTRO": "mydistro",
"MACHINE": "qemux86"},
'result': {}},
'target_result3': {'configuration': {"TEST_TYPE": "runtime",
"TESTSERIES": "series1",
"IMAGE_BASENAME": "image",
"IMAGE_PKGTYPE": "ipk",
"DISTRO": "mydistro",
"MACHINE": "qemux86-64"},
'result': {}}}
def test_report_can_aggregate_test_result(self):
result_data = {'result': {'test1': {'status': 'PASSED'},
'test2': {'status': 'PASSED'},
'test3': {'status': 'FAILED'},
'test4': {'status': 'ERROR'},
'test5': {'status': 'SKIPPED'}}}
report = ResultsTextReport()
result_report = report.get_aggregated_test_result(None, result_data, 'DummyMachine')
self.assertTrue(result_report['passed'] == 2, msg="Passed count not correct:%s" % result_report['passed'])
self.assertTrue(result_report['failed'] == 2, msg="Failed count not correct:%s" % result_report['failed'])
self.assertTrue(result_report['skipped'] == 1, msg="Skipped count not correct:%s" % result_report['skipped'])
def test_regression_can_get_regression_base_target_pair(self):
results = {}
resultutils.append_resultsdata(results, ResultToolTests.base_results_data)
resultutils.append_resultsdata(results, ResultToolTests.target_results_data)
self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
def test_regrresion_can_get_regression_result(self):
base_result_data = {'result': {'test1': {'status': 'PASSED'},
'test2': {'status': 'PASSED'},
'test3': {'status': 'FAILED'},
'test4': {'status': 'ERROR'},
'test5': {'status': 'SKIPPED'}}}
target_result_data = {'result': {'test1': {'status': 'PASSED'},
'test2': {'status': 'FAILED'},
'test3': {'status': 'PASSED'},
'test4': {'status': 'ERROR'},
'test5': {'status': 'SKIPPED'}}}
result, text = regression.compare_result(self.logger, "BaseTestRunName", "TargetTestRunName", base_result_data, target_result_data)
self.assertTrue(result['test2']['base'] == 'PASSED',
msg="regression not correct:%s" % result['test2']['base'])
self.assertTrue(result['test2']['target'] == 'FAILED',
msg="regression not correct:%s" % result['test2']['target'])
self.assertTrue(result['test3']['base'] == 'FAILED',
msg="regression not correct:%s" % result['test3']['base'])
self.assertTrue(result['test3']['target'] == 'PASSED',
msg="regression not correct:%s" % result['test3']['target'])
def test_merge_can_merged_results(self):
results = {}
resultutils.append_resultsdata(results, ResultToolTests.base_results_data, configmap=resultutils.flatten_map)
resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
| gpl-2.0 | -8,300,766,894,159,079,000 | 66.397959 | 139 | 0.421953 | false |
elifesciences/elife-metrics | src/article_metrics/ga_metrics/elife_v5.py | 1 | 1848 | "elife_v5, the addition of /executable paths"
from . import elife_v1
from article_metrics.utils import lfilter
import re
import logging
LOG = logging.getLogger(__name__)
event_counts_query = elife_v1.event_counts_query
event_counts = elife_v1.event_counts
# views counting
def path_counts_query(table_id, from_date, to_date):
"returns a query specific to this era that we can send to Google Analytics"
# use the v1 query as a template
new_query = elife_v1.path_counts_query(table_id, from_date, to_date)
new_query['filters'] = ','.join([
# ga:pagePath=~^/articles/50101$
r'ga:pagePath=~^/articles/[0-9]+$', # note: GA doesn't support {n,m} syntax ...
# ga:pagePath=~^/articles/50101/executable$
r'ga:pagePath=~^/articles/[0-9]+/executable$',
])
return new_query
# ...python *does* support {n,m} though, so we can filter bad article IDs in post
# parse the article ID from a path that may include an optional '/executable'
REGEX = r"/articles/(?P<artid>\d{1,5})(/executable)?$"
PATH_RE = re.compile(REGEX, re.IGNORECASE)
def path_count(pair):
"given a pair of (path, count), returns a triple of (art-id, art-type, count)"
path, count = pair
regex_obj = re.match(PATH_RE, path.lower())
if not regex_obj:
LOG.debug("skpping unhandled path %s", pair)
return
# "/articles/12345/executable" => {'artid': 12345}
data = regex_obj.groupdict()
count_type = 'full' # vs 'abstract' or 'digest' from previous eras
return data['artid'], count_type, int(count)
def path_counts(path_count_pairs):
"""takes raw path data from GA and groups by article, returning a
list of (artid, count-type, count)"""
path_count_triples = lfilter(None, [path_count(pair) for pair in path_count_pairs])
return elife_v1.group_results(path_count_triples)
| gpl-3.0 | -3,663,562,856,745,356,300 | 36.714286 | 87 | 0.671537 | false |
credativ/gofer | src/gofer/messaging/consumer.py | 1 | 4309 | # Copyright (c) 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from time import sleep
from logging import getLogger
from gofer.common import Thread, released
from gofer.messaging.model import InvalidDocument
from gofer.messaging.adapter.model import Reader
log = getLogger(__name__)
class ConsumerThread(Thread):
"""
An AMQP (abstract) consumer.
"""
def __init__(self, node, url, wait=3):
"""
:param node: An AMQP queue.
:type node: gofer.messaging.adapter.model.Node
:param url: The broker URL.
:type url: str
:param wait: Number of seconds to wait for a message.
:type wait: int
"""
Thread.__init__(self, name=node.name)
self.url = url
self.node = node
self.wait = wait
self.authenticator = None
self.reader = None
self.setDaemon(True)
def shutdown(self):
"""
Shutdown the consumer.
"""
self.abort()
@released
def run(self):
"""
Main consumer loop.
"""
self.reader = Reader(self.node, self.url)
self.reader.authenticator = self.authenticator
self.open()
try:
while not Thread.aborted():
self.read()
finally:
self.close()
def open(self):
"""
Open the reader.
"""
while not Thread.aborted():
try:
self.reader.open()
break
except Exception:
log.exception(self.getName())
sleep(30)
def close(self):
"""
Close the reader.
"""
try:
self.reader.close()
except Exception:
log.exception(self.getName())
def read(self):
"""
Read and process incoming documents.
"""
try:
wait = self.wait
reader = self.reader
message, document = reader.next(wait)
if message is None:
# wait expired
return
log.debug('{%s} read: %s', self.getName(), document)
self.dispatch(document)
message.ack()
except InvalidDocument, invalid:
self.rejected(invalid.code, invalid.description, invalid.document, invalid.details)
except Exception:
log.exception(self.getName())
sleep(60)
self.close()
self.open()
def rejected(self, code, description, document, details):
"""
Called to process the received (invalid) document.
This method intended to be overridden by subclasses.
:param code: The rejection code.
:type code: str
:param description: rejection description
:type description: str
:param document: The received *json* document.
:type document: str
:param details: The explanation.
:type details: str
"""
log.debug('rejected: %s', document)
def dispatch(self, document):
"""
Called to process the received document.
This method intended to be overridden by subclasses.
:param document: The received *json* document.
:type document: str
"""
log.debug('dispatched: %s', document)
class Consumer(ConsumerThread):
"""
An AMQP consumer.
Thread used to consumer messages from the specified queue.
On receipt, each message is used to build an document
and passed to dispatch().
"""
def __init__(self, node, url=None):
"""
:param node: The AMQP node.
:type node: gofer.messaging.adapter.model.Node
:param url: The broker URL.
:type url: str
"""
super(Consumer, self).__init__(node, url)
| lgpl-2.1 | 3,765,145,563,174,778,400 | 28.114865 | 95 | 0.576468 | false |
pyslackers/sirbot-slack | sirbot/slack/store/channel.py | 1 | 3727 | import json
import logging
import time
from sirbot.core import registry
from .store import SlackStore, SlackChannelItem
from .. import database
logger = logging.getLogger(__name__)
class Channel(SlackChannelItem):
"""
Class representing a slack channel.
"""
def __init__(self, id_, raw=None, last_update=None):
"""
:param id_: id_ of the channel
"""
super().__init__(id_, raw, last_update)
@property
def member(self):
return self._raw.get('is_member', False)
@member.setter
def member(self, _):
raise NotImplementedError
class ChannelStore(SlackStore):
"""
Store for the slack channels
"""
def __init__(self, client, refresh=3600):
super().__init__(client, refresh)
async def all(self):
channels = list()
channels_raw = await self._client.get_channels()
for channel_raw in channels_raw:
channel = await self.get(channel_raw['id'])
channels.append(channel)
return channels
async def get(self, id_=None, name=None, fetch=False):
"""
Return a Channel from the Channel Manager
:param id_: id of the channel
:param name: name of the channel
:param update: query the slack api for updated channel info
:return: Channel
"""
if not id_ and not name:
raise SyntaxError('id_ or name must be supplied')
db = registry.get('database')
if name:
data = await database.__dict__[db.type].channel.find_by_name(db,
name)
else:
data = await database.__dict__[db.type].channel.find_by_id(db, id_)
if data and (
fetch or data['last_update'] < (time.time() - self._refresh)):
channel = await self._query_by_id(data['id'])
if channel:
await self._add(channel, db=db)
else:
await self._delete(channel, db=db)
elif data:
channel = Channel(
id_=data['id'],
raw=json.loads(data['raw']),
last_update=data['last_update']
)
else:
logger.debug('Channel "%s" not found in the channel store. '
'Querying the Slack API', (id_ or name))
if id_:
channel = await self._query_by_id(id_)
else:
channel = await self._query_by_name(name)
if channel:
await self._add(channel, db=db)
return channel
async def _add(self, channel, db=None):
"""
Add a channel to the channel store
"""
if not db:
db = registry.get('database')
await database.__dict__[db.type].channel.add(db, channel)
await db.commit()
async def _delete(self, id_, db=None):
"""
Delete a channel from the channel store
:param id_: id of the channel
:return: None
"""
if not db:
db = registry.get('database')
await database.__dict__[db.type].channel.delete(db, id_)
await db.commit()
async def _query_by_id(self, id_):
raw = await self._client.get_channel(id_)
channel = Channel(
id_=id_,
raw=raw,
last_update=time.time(),
)
return channel
async def _query_by_name(self, name):
channels = await self._client.get_channels()
for channel in channels:
if channel['name'] == name:
c = await self.get(id_=channel['id'])
return c
| mit | -1,458,524,759,698,175,000 | 26.007246 | 79 | 0.523209 | false |
JoshuaBaunach/RollerCoasterDataBot | main.py | 1 | 3525 | #!/usr/bin/python
# This file is the main file that manages everything
from redditManager import RedditManager
import json
import time
import getpass
import sys
import traceback
def main():
# Clear the logfile
logfile = open('logfile.log', 'w')
logfile.write('')
logfile.close()
writeLogfile('Starting Program')
non_api = False
config_name = 'config.json'
# Determine if the program is going to run in non-api mode (i.e. read input from console)
if (len(sys.argv) != 1):
if ('-n' in sys.argv):
print 'Running in non-api mode'
non_api = True
manager = RedditManager(non_api=False)
if ('-c' in sys.argv):
config_name = sys.argv[sys.argv.index('-c')+1]
if ('-n' not in sys.argv):
# Read from the config file
configFile = open(config_name)
configJSON = json.load(configFile)
# If no password is provided, prompt for one
if (configJSON['password'] == ''):
configJSON['password'] = getpass.getpass('Password for /u/' + configJSON['username'] + ': ')
# If no user agent is provided, use a default one
if (configJSON['userAgent'] == ''):
configJSON['userAgent'] = 'RollerCoasterDataBot: Created by /u/MrManGuy16, being used under /u/RollerCoasterDataBot'
# Set up the Reddit Manager
manager = RedditManager(configJSON['clientId'], configJSON['clientSecret'], configJSON['username'], configJSON['password'], configJSON['userAgent'])
if (not non_api):
# Check the newest 50 posts for unaccounted queries in each of the whitelisted Subreddits
try:
for sub in configJSON['whitelistedSubreddits']:
manager.findQueries(sub, 50)
except Exception as excep:
writeLogfile('Error encountered while searching through comments. Details Below: \n' + excep.message)
time.sleep(60)
# Main loop
while True:
# Check the newest 10 posts for unaccounted queries in each of the whitelisted Subreddits
try:
for sub in configJSON['whitelistedSubreddits']:
manager.findQueries(sub, 10)
except Exception as excep:
writeLogfile('Error encountered while searching through comments. Details Below: \n' + excep.message)
time.sleep(60)
else:
while True:
fullText = raw_input('Query to process: ')
# Try to see if it is a query
beginIndex = fullText.find('{')
if (beginIndex != -1):
endIndex = fullText.find('}', beginIndex)
if (endIndex != -1):
query = fullText[beginIndex+1:endIndex]
try:
commentText = manager.buildComment(query)
print commentText.encode('utf-8')
except Exception as excep:
commentText = manager.buildErrorComment(excep.message)
print commentText.encode('utf-8')
print 'Here is what went wrong:'
traceback.print_exc()
# Function to write data to the logfile
def writeLogfile(message):
logfile = open('logfile.log', 'a')
# Get the timestamp
timestamp = '[' + time.strftime('%d/%m/%y %H:%M:%S', time.gmtime()) + '] '
logfile.write(timestamp + message + '\n')
logfile.close()
if __name__ == '__main__':
main()
| gpl-3.0 | 3,522,323,599,883,457,500 | 32.894231 | 156 | 0.582695 | false |
pjh/vm-analyze | app_scripts/app_python.py | 1 | 5200 | # Virtual memory analysis scripts.
# Developed 2012-2014 by Peter Hornyack, [email protected]
# Copyright (c) 2012-2014 Peter Hornyack and University of Washington
# Python script to run another Python script as a test application.
#
# Build + setup instructions: do these once / occasionally by hand, not
# done automatically by this script.
# Ensure that py_version is in your path.
# ...
# Run the firefox app script first, then copy its trace-events-full
# file and its target_pids file to match the locations specified
# by py_inputpids and py_inputfile below.
#
#
# Note: this script uses timeout features that were added to Python 3.3
# (available in Ubuntu 13.04) - if this is a problem, they should be
# fairly easy to eliminate from the code, just search for "timeout".
from app_scripts.app_to_run_class import *
from trace.run_common import *
from trace.traceinfo_class import traceinfo
py_version = 'python3.3'
py_scriptname = "{}/analyze_trace.py".format(scripts_dir)
# Run the analyze_trace.py script in the vm-analyze repository
# directly; don't try to copy it to another location and run
# it from there, as it makes the module imports / dependencies
# too hard to deal with.
py_app_dir = "{}/pythonapp".format(appscripts_dir)
py_inputpids = "{}/ffox-target-pids".format(py_app_dir)
py_inputfile = "{}/ffox-trace-events-full".format(py_app_dir)
py_outputdir = py_app_dir
py_cmd = ("{} {} -a ffox -p {} {} {}").format(py_version,
py_scriptname, py_inputpids, py_inputfile, py_outputdir)
# Enable (or leave enabled) options that will require more memory:
# physical page events.
# As of 20140703, running analyze_trace.py on a 420 MB trace-events-full
# from a firefox run (visiting 30 websites) with Rss events enabled
# takes just over five minutes, with ~600 MB virtual and ~450 MB physical
# memory used during the analysis.
poll_period = 10
##############################################################################
# Tracing should already be activated when this method is called - it
# will call trace_wait() while the python script runs.
# Returns a tuple:
# (True on success, False on error;
# pid of the python process on success)
def run_py_script(outputdir, py_stdout, py_stderr, tracer):
tag = 'run_py_script'
# http://docs.python.org/3.2/library/subprocess.html
args = shlex.split(py_cmd)
print_debug(tag, ("executing py_cmd=\"{}\"").format(py_cmd))
py_p = subprocess.Popen(args, stdout=py_stdout, stderr=py_stderr)
if not py_p:
print_error(tag, ("subprocess.Popen returned None; "
"py_cmd={}").format(py_cmd))
return (False, -1)
if not tracer.perf_on():
print_error(tag, ("perf_on() failed, but continuing"))
prefix = 'py'
retcode = tracer.trace_wait(py_p, poll_period, prefix)
tracer.perf_off()
if retcode != "success":
# Assume that trace buffer filling up is an error for this app.
print_error(tag, ("trace_wait() returned {}, either due to process "
"error or trace error; py_p.returncode is {}").format(
retcode, py_p.returncode))
return (False, -1)
elif py_p.returncode is None:
print_error(tag, ("py process' returncode not set?!?").format())
return (False, -1)
elif py_p.returncode != 0:
print_error(tag, ("py process returned error {}").format(
py_p.returncode))
return (False, -1)
print_debug(tag, ("py process exited successfully, output is "
"in directory {}").format(outputdir))
return (True, py_p.pid)
def py_init(outputdir):
tag = 'py_init'
py_stdout_fname = "{}/python-stdout".format(outputdir)
py_stderr_fname = "{}/python-stderr".format(outputdir)
py_stdout = open(py_stdout_fname, 'w')
py_stderr = open(py_stderr_fname, 'w')
return (py_stdout, py_stderr)
def py_cleanup(files_to_close):
for f in files_to_close:
f.close()
return
# Returns: a target_pids list containing the top-level pid of the
# python process, or an empty list on error.
def py_exec(outputdir):
tag = 'py_exec'
target_pids = []
(py_stdout, py_stderr) = py_init(outputdir)
tracer = traceinfo('python')
success = tracer.trace_on(outputdir, "starting python")
if not success:
print_error(tag, ("trace_on failed, returning [] now").format())
py_cleanup([py_stdout, py_stderr])
return []
(success, py_pid) = run_py_script(outputdir, py_stdout, py_stderr,
tracer)
if success and py_pid > 1:
target_pids.append(py_pid)
print_debug(tag, ("run_py_script() successful, target_pids: "
"{}").format(target_pids))
else:
print_error(tag, ("run_py_script() returned {} and {}; will "
"return empty target_pids list").format(success, py_pid))
(tracesuccess, buffer_full) = tracer.trace_off(
descr="python done".format())
if not tracesuccess or buffer_full:
print_error(tag, ("trace buffer filled up before "
"tracing turned off - considering this an error "
"here, but echo {} > target_pids file to analyze "
"trace anyway").format(py_pid))
success = False
target_pids = []
py_cleanup([py_stdout, py_stderr])
return target_pids
# First arg is "appname" member: used to construct output directory.
python_app = app_to_run('python', py_exec)
if __name__ == '__main__':
print_error_exit("not an executable module")
| bsd-3-clause | 8,262,389,718,545,666,000 | 33.666667 | 78 | 0.688077 | false |
BBN-Q/Quince | quince/node.py | 1 | 24377 | # coding: utf-8
# Raytheon BBN Technologies 2016
# Contributiors: Graham Rowlands
#
# This file contains the node descriptions
from qtpy.QtGui import *
from qtpy.QtCore import *
from qtpy.QtWidgets import *
from .wire import *
class Node(QGraphicsRectItem):
"""docstring for Node"""
def __init__(self, name, scene, parent=None):
super(Node, self).__init__(parent=parent)
self.name = name
self.scene = scene
self.setFlag(QGraphicsItem.ItemIsMovable)
self.setFlag(QGraphicsItem.ItemIsSelectable)
self.setFlag(QGraphicsItem.ItemSendsGeometryChanges)
self.outputs = {}
self.inputs = {}
self.allowed_destinations = {}
self.parameters = {}
self.parameter_order = {}
self.collapsed = False
self.bg_color = self.default_bg_color = QColor(240,240,240,235)
self.edge_color = self.default_edge_color = QColor(200,200,200)
self.edge_thick = 0.75
self.setRect(0,0,100,30)
# Title bar
self.title_bar = QGraphicsRectItem(parent=self)
self.title_bar.setRect(0,0,100,20)
self.title_color = self.default_title_color = QColor(80,80,100)
self.label = TitleText(self.name, parent=self)
self.label.setDefaultTextColor(Qt.white)
# Glossy flair
shiny_part = QGraphicsPolygonItem(QPolygonF([QPointF(0,0), QPointF(120,0), QPointF(0,8)]),
parent=self)
shiny_part.setBrush(QBrush(QColor(200,200,250,50)))
shiny_part.setPen(QPen(Qt.NoPen))
# Enabled by default
self.enabled = True
# For auspex interoperability
self.type = None
self.is_instrument = False
self.auspex_object = None
# Any additional json we should retain
self.base_params = None
# Dividing line and collapse button
self.divider = QGraphicsLineItem(20, 0, self.rect().width()-5, 0, self)
self.collapse_box = CollapseBox(parent=self)
self.collapse_box.setX(10)
# Make sure things are properly sized
self.min_height = 30.0
self.min_width = 120.0
self.update_min_width()
# if self.label.boundingRect().topRight().x() > 120:
# self.min_width = self.label.boundingRect().topRight().x()+20
# self.setRect(0,0,self.label.boundingRect().topRight().x()+20,30)
# else:
# self.min_width = 120.0
# Resize Handle
self.resize_handle = ResizeHandle(parent=self)
self.resize_handle.setPos(self.rect().width()-8, self.rect().height()-8)
# Disable box
self.disable_box = None
# Synchronizing parameters
self.changing = False
# Set up hovering
self.setAcceptHoverEvents(True)
self.update_screen(self.scene.window.devicePixelRatio())
def update_screen(self, pixel_ratio):
if pixel_ratio < 2:
# Render a nice Drop Shadow
shadow = QGraphicsDropShadowEffect()
shadow.setBlurRadius(18.0)
shadow.setOffset(0.0, 10.0)
shadow.setColor(QColor("#99121212"))
self.setGraphicsEffect(shadow)
@property
def enabled(self):
return self._enabled
@enabled.setter
def enabled(self, value):
self._enabled = value
if value:
self.bg_color = self.default_bg_color
self.title_color = QColor(80,80,100)
else:
self.bg_color = QColor(140,140,140)
self.title_color = QColor(100,100,100)
self.update()
def hoverEnterEvent(self, event):
self.prev_edge_color = self.edge_color
self.prev_edge_thick = self.edge_thick
self.edge_color = QColor(247,247,247)
self.edge_thick = 1.5
self.update()
def hoverLeaveEvent(self, event):
self.edge_color = self.prev_edge_color
self.edge_thick = self.prev_edge_thick
self.update()
def update_min_width(self):
widths = [p.label.boundingRect().topRight().x() for p in self.parameters.values()]
widths.extend([o.label.boundingRect().topRight().x() for o in self.outputs.values()])
widths.extend([i.label.boundingRect().topRight().x() for i in self.inputs.values()])
widths.append(self.label.boundingRect().topRight().x())
self.min_width = max(widths)+20
if self.min_width < 120:
self.min_width = 120.0
self.itemResize(QPointF(self.min_width - self.rect().width(),0.0))
def value_changed(self, name):
# Update the sweep parameters accordingly
if self.name == "Sweep":
stop = self.parameters['Stop'].value()
start = self.parameters['Start'].value()
incr = self.parameters['Incr.'].value()
steps = self.parameters['Steps'].value()
if name == "Incr.":
if incr != 0.0:
steps = int(float(stop-start)/float(incr))
self.parameters['Steps'].set_value(steps if steps>0 else 1)
elif name == "Steps":
self.parameters['Incr.'].set_value((stop-start)/steps)
else:
self.parameters['Incr.'].set_value((stop-start)/steps)
self.changing = False
def add_output(self, connector):
connector.setParentItem(self)
connector.parent = self
connector.setPos(self.rect().width(),30+15*(len(self.outputs)+len(self.inputs)))
self.outputs[connector.name] = connector
self.change_collapsed_state(self.collapsed) # Just for resizing in this case
def add_input(self, connector):
connector.setParentItem(self)
connector.parent = self
connector.setPos(0,30+15*(len(self.inputs)+len(self.outputs)))
self.inputs[connector.name] = connector
self.change_collapsed_state(self.collapsed) # Just for resizing in this case
def add_parameter(self, param):
param.setParentItem(self)
param.parent = self
self.parameters[param.name] = param
self.parameter_order[len(self.parameter_order)] = param.name
self.change_collapsed_state(self.collapsed) # Just for resizing in this case
def change_collapsed_state(self, collapsed):
self.collapsed = collapsed
self.collapse_box.setRotation(0.0 if self.collapsed else 90.0)
# Update the positions
pos = 32+15*(len(self.inputs)+len(self.outputs))
if len(self.parameters) > 0:
self.divider.setY(pos)
self.collapse_box.setY(pos)
self.divider.setVisible(True)
self.collapse_box.setVisible(True)
pos += 10
else:
self.divider.setVisible(False)
self.collapse_box.setVisible(False)
for i in range(len(self.parameter_order)):
# We completely hide parameters without inputs
if not self.parameters[self.parameter_order[i]].has_input:
if self.collapsed:
self.parameters[self.parameter_order[i]].setVisible(False)
else:
self.parameters[self.parameter_order[i]].setPos(0, pos)
pos += self.parameters[self.parameter_order[i]].height
self.parameters[self.parameter_order[i]].setVisible(True)
else:
self.parameters[self.parameter_order[i]].setVisible(True)
self.parameters[self.parameter_order[i]].setPos(0, pos)
self.parameters[self.parameter_order[i]].set_collapsed(self.collapsed)
if self.collapsed:
pos += self.parameters[self.parameter_order[i]].height_collapsed
else:
pos += self.parameters[self.parameter_order[i]].height
self.setRect(0,0,self.rect().width(), pos)
self.min_height = pos
self.update_min_width()
self.itemResize(QPointF(0.0,0.0))
for k, v in self.parameters.items():
for w in v.wires_in:
w.set_end(v.scenePos())
def update_fields_from_connector(self):
# This is peculiar to the "Sweep Nodes"
wires_out = self.outputs['Swept Param.'].wires_out
if len(wires_out) > 0:
wire_end = wires_out[0].end_obj
self.parameters['Start'].datatype = wire_end.value_box.datatype
self.parameters['Start'].value_box.datatype = wire_end.value_box.datatype
self.parameters['Start'].value_box.min_value = wire_end.value_box.min_value
self.parameters['Start'].value_box.max_value = wire_end.value_box.max_value
self.parameters['Start'].value_box.increment = wire_end.value_box.increment
self.parameters['Start'].value_box.snap = wire_end.value_box.snap
self.parameters['Start'].value_box.set_value(self.parameters['Start'].value())
self.parameters['Stop'].datatype = wire_end.value_box.datatype
self.parameters['Stop'].value_box.datatype = wire_end.value_box.datatype
self.parameters['Stop'].value_box.min_value = wire_end.value_box.min_value
self.parameters['Stop'].value_box.max_value = wire_end.value_box.max_value
self.parameters['Stop'].value_box.increment = wire_end.value_box.increment
self.parameters['Stop'].value_box.snap = wire_end.value_box.snap
self.parameters['Stop'].value_box.set_value(self.parameters['Stop'].value())
self.parameters['Incr.'].datatype = wire_end.value_box.datatype
self.parameters['Incr.'].value_box.datatype = wire_end.value_box.datatype
self.parameters['Incr.'].value_box.min_value = -2*abs(wire_end.value_box.max_value)
self.parameters['Incr.'].value_box.max_value = 2*abs(wire_end.value_box.max_value)
self.parameters['Incr.'].value_box.increment = wire_end.value_box.increment
self.parameters['Incr.'].value_box.snap = wire_end.value_box.snap
self.parameters['Incr.'].value_box.set_value(self.parameters['Incr.'].value())
def update_parameters_from(self, other_node):
# Make sure they are of the same type
if other_node.name == self.name:
for k, v in other_node.parameters.items():
self.parameters[k].set_value(v.value())
def itemChange(self, change, value):
if change == QGraphicsItem.ItemPositionChange:
for k, v in self.outputs.items():
v.setX(self.rect().width())
for w in v.wires_out:
w.set_start(v.pos()+value)
for k, v in self.inputs.items():
for w in v.wires_in:
w.set_end(v.pos()+value)
for k, v in self.parameters.items():
for w in v.wires_in:
w.set_end(v.pos()+value)
elif change == QGraphicsItem.ItemSelectedChange:
if value:
self.edge_color = QColor(247,217,17)
self.edge_thick = 1.25
self.title_color = QColor(110,110,80)
self.prev_edge_color = self.edge_color
self.prev_edge_thick = self.edge_thick
else:
self.edge_color = self.default_edge_color
self.edge_thick = 0.75
self.title_color = self.default_title_color
return QGraphicsRectItem.itemChange(self, change, value)
def itemResize(self, delta):
# Keep track of actual change
actual_delta = QPointF(0,0)
r = self.rect()
if r.width()+delta.x() >= self.min_width:
r.adjust(0, 0, delta.x(), 0)
actual_delta.setX(delta.x())
if r.height()+delta.y() >= self.min_height:
r.adjust(0, 0, 0, delta.y())
actual_delta.setY(delta.y())
self.setRect(r)
delta.setY(0.0)
if hasattr(self, 'resize_handle'):
self.resize_handle.setPos(self.rect().width()-8, self.rect().height()-8)
if hasattr(self, 'title_bar'):
self.title_bar.setRect(0,0,self.rect().width(),20)
conn_delta = actual_delta.toPoint()
conn_delta.setY(0.0)
self.divider.setLine(20, 0, self.rect().width()-5, 0)
# Move the outputs
for k, v in self.outputs.items():
v.setX(self.rect().width())
for w in v.wires_out:
w.set_start(v.scenePos()+conn_delta)
# Resize the parameters
for k, v in self.parameters.items():
v.set_box_width(self.rect().width())
return actual_delta
def create_wire(self, parent):
return Wire(parent)
def paint(self, painter, options, widget):
painter.setPen(QPen(self.edge_color, self.edge_thick))
self.title_bar.setPen(QPen(self.edge_color, self.edge_thick))
self.title_bar.setBrush(QBrush(self.title_color))
painter.setBrush(QBrush(self.bg_color))
painter.drawRoundedRect(self.rect(), 5.0, 5.0)
def dict_repr(self):
# First spit out any json that can't be modified in Quince.
# Base_params holds any parameters that aren't flagged as
# being "quince_parameters" in the auspex filters.
if self.base_params is not None:
dict_repr = dict(self.base_params)
else:
dict_repr = {}
# Now update and of the parameters that are set within
# Quince.
for name, param in self.parameters.items():
dict_repr[name] = param.value()
# Find the name of the source connectors (assuming one connection)
# The default connector name is "source", in which case data_source
# is just the name of the node. Otherwise, we return a data_source
# of the form "node_name:connector_name", e.g.
# "averager:partial_averages"
# If we have multiple inputs, they are simply separated by commas
# and some arbitrary and optional amount of whitespace.
if ('sink' in self.inputs.keys()) and len(self.inputs['sink'].wires_in) > 0:
connectors = [w.start_obj for w in self.inputs['sink'].wires_in]
source_text = []
for conn in connectors:
node_name = conn.parent.label.toPlainText()
conn_name = conn.name
if conn_name == "source":
source_text.append(node_name)
else:
source_text.append(node_name + " " + conn_name)
dict_repr['source'] = ", ".join(source_text)
else:
dict_repr['source'] = ""
# data_source not applicable for digitizers
if self.is_instrument:
dict_repr.pop('source')
dict_repr['enabled'] = self.enabled
dict_repr['type'] = self.type
return dict_repr
class TitleText(QGraphicsTextItem):
'''QGraphicsTextItem with textChanged() signal.'''
textChanged = Signal(str)
def __init__(self, text, parent=None):
super(TitleText, self).__init__(text, parent)
self.setTextInteractionFlags(Qt.TextEditorInteraction)
self.setFlag(QGraphicsItem.ItemIsFocusable)
self._value = text
self.parent = parent
def setPlainText(self, text):
if hasattr(self.scene(), 'items'):
nodes = [i for i in self.scene().items() if isinstance(i, Node)]
nodes.remove(self.parent)
node_names = [n.label.toPlainText() for n in nodes]
if text in node_names:
self.scene().window.set_status("Node name already exists")
else:
# self.scene().inspector_change_name(self._value, text)
self._value = text
self.textChanged.emit(self.toPlainText())
else:
self._value = text
super(TitleText, self).setPlainText(self._value)
def focusOutEvent(self, event):
self.setPlainText(self.toPlainText())
super(TitleText, self).focusOutEvent(event)
self.clearFocus()
def focusInEvent(self, event):
super(TitleText, self).focusInEvent(event)
self.setFocus()
def keyPressEvent(self, event):
if event.key() == Qt.Key_Return or event.key() == Qt.Key_Enter:
c = self.textCursor()
c.clearSelection()
self.setTextCursor(c)
self.clearFocus()
else:
return super(TitleText, self).keyPressEvent(event)
class ResizeHandle(QGraphicsRectItem):
"""docstring for ResizeHandle"""
def __init__(self, parent=None):
super(ResizeHandle, self).__init__()
self.dragging = False
self.parent = parent
self.drag_start = None
self.setParentItem(parent)
self.setRect(0,0,5,5)
self.setBrush(QColor(20,20,20))
def mousePressEvent(self, event):
self.dragging = True
self.drag_start = event.scenePos()
def mouseMoveEvent(self, event):
if self.dragging:
delta = event.scenePos() - self.drag_start
actual_delta = self.parent.itemResize(delta)
self.drag_start = self.drag_start + actual_delta
def mouseReleaseEvent(self, event):
self.dragging = False
class CollapseBox(QGraphicsItem):
"""docstring for CollapseBox"""
def __init__(self, parent=None):
super(CollapseBox, self).__init__(parent=parent)
self.parent = parent
self.clicking = False
self.height = 8
self.width = 8
self.setRotation(90.0)
def paint(self, painter, options, widget):
# Draw our triangle
painter.setPen(QPen(QColor(0,0,0), 1.0))
painter.setBrush(QColor(160,200,220))
path = QPainterPath()
path.moveTo(-4,4)
path.lineTo(4,0)
path.lineTo(-4,-4)
path.lineTo(-4,4)
painter.drawPath(path)
def boundingRect(self):
return QRectF(QPointF(-5,-6), QSizeF(15, 15))
def shape(self):
p = QPainterPath()
p.addRect(-5, -6, 15, 15)
return p
def mousePressEvent(self, event):
self.clicking = True
def mouseReleaseEvent(self, event):
if self.clicking:
self.parent.change_collapsed_state(not self.parent.collapsed)
self.setRotation(0.0 if self.parent.collapsed else 90.0)
self.clicking = False
class CommandAddNode(QUndoCommand):
def __init__(self, node_name, create_func, scene):
super(CommandAddNode, self).__init__("Add node {}".format(node_name))
self.create_func = create_func
self.scene = scene
def redo(self):
self.new_node = self.create_func()
def undo(self):
self.scene.removeItem(self.new_node)
class CommandDeleteNodes(QUndoCommand):
def __init__(self, nodes, scene):
super(CommandDeleteNodes, self).__init__("Delete nodes {}".format(",".join([n.name for n in nodes])))
self.scene = scene
self.nodes = nodes
def redo(self):
self.output_wires = []
self.input_wires = []
self.parameter_wires = []
for node in self.nodes:
for k, v in node.outputs.items():
for w in v.wires_out:
w.end_obj.wires_in.pop(w.end_obj.wires_in.index(w))
self.output_wires.append(w)
self.scene.removeItem(w)
for k, v in node.inputs.items():
for w in v.wires_in:
w.start_obj.wires_out.pop(w.start_obj.wires_out.index(w))
self.input_wires.append(w)
self.scene.removeItem(w)
for k, v in node.parameters.items():
for w in v.wires_in:
w.end_obj.wires_in.pop(w.end_obj.wires_in.index(w))
self.parameter_wires.append(w)
self.scene.removeItem(w)
self.scene.removeItem(node)
node.update()
self.scene.update()
def undo(self):
for node in self.nodes:
self.scene.addItem(node)
for w in self.output_wires:
w.end_obj.wires_in.append(w)
self.scene.addItem(w)
for w in self.input_wires:
w.start_obj.wires_out.append(w)
self.scene.addItem(w)
for w in self.parameter_wires:
w.end_obj.wires_in.append(w)
self.scene.addItem(w)
self.output_wires = []
self.input_wires = []
self.parameter_wires = []
self.scene.update()
class CommandDuplicateNodes(QUndoCommand):
def __init__(self, nodes, scene):
super(CommandDuplicateNodes, self).__init__("Duplicate nodes {}".format(",".join([n.name for n in nodes])))
self.nodes = nodes
self.scene = scene
self.new_nodes = []
def redo(self):
old_to_new = {}
for sn in self.nodes:
node_names = [i.label.toPlainText() for i in self.scene.items() if isinstance(i, Node)]
new_node = self.scene.create_node_by_name(sn.name)
nan = next_available_name(node_names, strip_numbers(sn.label.toPlainText()))
new_node.label.setPlainText(nan)
# Set base parameters from old
new_node.update_parameters_from(sn)
if sn.base_params:
new_node.base_params = dict(sn.base_params)
new_node.enabled = sn.enabled
# Update the mapping
old_to_new[sn] = new_node
self.new_nodes.append(new_node)
# Stagger and update the selection to include the new nodes
new_node.setPos(sn.pos()+QPointF(20,20))
sn.setSelected(False)
new_node.setSelected(True)
# Rewire the new nodes according to the old nodes
for sn in self.nodes:
new_node = old_to_new[sn]
for k, v in sn.outputs.items():
for w in v.wires_out:
# See if the user wants to copy nodes on both ends, otherwise don't make a wire
if w.start_obj.parent in old_to_new:
if w.end_obj.parent in old_to_new:
# Create the wire and set the start
new_wire = Wire(new_node.outputs[w.start_obj.name])
new_wire.set_start(new_node.outputs[w.start_obj.name].scenePos())
new_node.outputs[w.start_obj.name].wires_out.append(new_wire)
end_conn_name = w.end_obj.name
end_node = old_to_new[w.end_obj.parent]
if end_conn_name in end_node.inputs.keys():
new_wire.end_obj = end_node.inputs[end_conn_name]
new_wire.set_end(end_node.inputs[end_conn_name].scenePos())
end_node.inputs[end_conn_name].wires_in.append(new_wire)
elif end_conn_name in end_node.parameters.keys():
new_wire.end_obj = end_node.parameters[end_conn_name]
new_wire.set_end(end_node.parameters[end_conn_name].scenePos())
end_node.parameters[end_conn_name].wires_in.append(new_wire)
self.scene.addItem(new_wire)
self.scene.update()
def undo(self):
for node in self.new_nodes:
for k, v in node.outputs.items():
for w in v.wires_out:
w.end_obj.wires_in.pop(w.end_obj.wires_in.index(w))
self.scene.removeItem(w)
for k, v in node.inputs.items():
for w in v.wires_in:
w.start_obj.wires_out.pop(w.start_obj.wires_out.index(w))
self.scene.removeItem(w)
for k, v in node.parameters.items():
for w in v.wires_in:
w.end_obj.wires_in.pop(w.end_obj.wires_in.index(w))
self.scene.removeItem(w)
self.scene.removeItem(node)
node.update()
for sn in self.nodes:
sn.setSelected(True)
self.scene.update()
| apache-2.0 | 7,390,747,057,331,082,000 | 38.254428 | 115 | 0.57345 | false |
orcasgit/py-wsse | wsse/encryption.py | 1 | 11246 | """Functions for WS-Security (WSSE) encryption and decryption.
Heavily based on test examples in https://github.com/mehcode/python-xmlsec as
well as the xmlsec documentation at https://www.aleksey.com/xmlsec/. Some
functions from https://github.com/mvantellingen/py-soap-wsse.
Reading the xmldsig, xmlenc, and ws-security standards documents, though
admittedly painful, will likely assist in understanding the code in this
module.
"""
import base64
from lxml import etree
from OpenSSL import crypto
import xmlsec
from .constants import BASE64B, X509TOKEN, DS_NS, ENC_NS, SOAP_NS, WSSE_NS
from .xml import ensure_id, ns
def encrypt(envelope, certfile):
"""Encrypt body contents of given SOAP envelope using given X509 cert.
Currently only encrypts the first child node of the body, so doesn't really
support a body with multiple child nodes (the later ones won't be
encrypted), and doesn't support encryption of multiple nodes.
Expects to encrypt an incoming document something like this (xmlns
attributes omitted for readability):
<soap:Envelope>
<soap:Header>
<wsse:Security mustUnderstand="true">
<wsu:Timestamp>
<wsu:Created>2015-06-25T21:53:25.246276+00:00</wsu:Created>
<wsu:Expires>2015-06-25T21:58:25.246276+00:00</wsu:Expires>
</wsu:Timestamp>
</wsse:Security>
</soap:Header>
<soap:Body>
...
</soap:Body>
</soap:Envelope>
Encryption results in an XML structure something like this (note the added
wsse:BinarySecurityToken and xenc:EncryptedKey nodes in the wsse:Security
header, and that the contents of the soap:Body have now been replaced by a
wsse:EncryptedData node):
<soap:Envelope>
<soap:Header>
<wsse:Security mustUnderstand="true">
<wsse:BinarySecurityToken
wsu:Id="id-31e55a42-adef-4312-aa02-6da738177b25"
EncodingType="...-wss-soap-message-security-1.0#Base64Binary"
ValueType=".../oasis-200401-wss-x509-token-profile-1.0#X509v3">
MIIGRTCC...7RaVeFVB/w==
</wsse:BinarySecurityToken>
<xenc:EncryptedKey>
<xenc:EncryptionMethod
Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>
<ds:KeyInfo>
<wsse:SecurityTokenReference
wsse:TokenType="...wss-x509-token-profile-1.0#X509v3">
<wsse:Reference
ValueType="...-wss-x509-token-profile-1.0#X509v3"
URI="#id-31e55a42-adef-4312-aa02-6da738177b25"
/>
</wsse:SecurityTokenReference>
</ds:KeyInfo>
<xenc:CipherData>
<xenc:CipherValue>0m23u5UVh...YLcEcmgzng==</xenc:CipherValue>
</xenc:CipherData>
<xenc:ReferenceList>
<xenc:DataReference
URI="#id-094305bf-f73e-4940-88d9-00688bc78718"/>
</xenc:ReferenceList>
</xenc:EncryptedKey>
<wsu:Timestamp wsu:Id="id-d449ec14-f31c-4174-b51c-2a56843eeda5">
<wsu:Created>2015-06-25T22:26:57.618091+00:00</wsu:Created>
<wsu:Expires>2015-06-25T22:31:57.618091+00:00</wsu:Expires>
</wsu:Timestamp>
</wsse:Security>
</soap:Header>
<soap:Body wsu:Id="id-73bc3f79-1597-4e35-91d5-354fc6197858">
<xenc:EncryptedData
Type="http://www.w3.org/2001/04/xmlenc#Element"
wsu:Id="id-094305bf-f73e-4940-88d9-00688bc78718">
<xenc:EncryptionMethod
Algorithm="http://www.w3.org/2001/04/xmlenc#tripledes-cbc"/>
<xenc:CipherData>
<xenc:CipherValue>rSJC8m...js2RQfw/5</xenc:CipherValue>
</xenc:CipherData>
</xenc:EncryptedData>
</soap:Body>
</soap:Envelope>
(In practice, we'll generally be encrypting an already-signed document, so
the Signature node would also be present in the header, but we aren't
encrypting it and for simplicity it's omitted in this example.)
"""
doc = etree.fromstring(envelope)
header = doc.find(ns(SOAP_NS, 'Header'))
security = header.find(ns(WSSE_NS, 'Security'))
# Create a keys manager and load the cert into it.
manager = xmlsec.KeysManager()
key = xmlsec.Key.from_file(certfile, xmlsec.KeyFormat.CERT_PEM, None)
manager.add_key(key)
# Encrypt first child node of the soap:Body.
body = doc.find(ns(SOAP_NS, 'Body'))
target = body[0]
# Create the EncryptedData node we will replace the target node with,
# and make sure it has the contents XMLSec expects (a CipherValue node,
# a KeyInfo node, and an EncryptedKey node within the KeyInfo which
# itself has a CipherValue).
enc_data = xmlsec.template.encrypted_data_create(
doc,
xmlsec.Transform.DES3,
type=xmlsec.EncryptionType.ELEMENT,
ns='xenc',
)
xmlsec.template.encrypted_data_ensure_cipher_value(enc_data)
key_info = xmlsec.template.encrypted_data_ensure_key_info(
enc_data, ns='dsig')
enc_key = xmlsec.template.add_encrypted_key(
key_info, xmlsec.Transform.RSA_OAEP)
xmlsec.template.encrypted_data_ensure_cipher_value(enc_key)
enc_ctx = xmlsec.EncryptionContext(manager)
# Generate a per-session DES key (will be encrypted using the cert).
enc_ctx.key = xmlsec.Key.generate(
xmlsec.KeyData.DES, 192, xmlsec.KeyDataType.SESSION)
# Ask XMLSec to actually do the encryption.
enc_data = enc_ctx.encrypt_xml(enc_data, target)
# XMLSec inserts the EncryptedKey node directly within EncryptedData,
# but WSSE wants it in the Security header instead, and referencing the
# EncryptedData as well as the actual cert in a BinarySecurityToken.
# Move the EncryptedKey node up into the wsse:Security header.
security.insert(0, enc_key)
# Create a wsse:BinarySecurityToken node containing the cert and add it
# to the Security header.
cert_bst = create_binary_security_token(certfile)
security.insert(0, cert_bst)
# Create a ds:KeyInfo node referencing the BinarySecurityToken we just
# created, and insert it into the EncryptedKey node.
enc_key.insert(1, create_key_info_bst(cert_bst))
# Add a DataReference from the EncryptedKey node to the EncryptedData.
add_data_reference(enc_key, enc_data)
# Remove the now-empty KeyInfo node from EncryptedData (it used to
# contain EncryptedKey, but we moved that up into the Security header).
enc_data.remove(key_info)
return etree.tostring(doc)
def decrypt(envelope, keyfile):
"""Decrypt all EncryptedData, using EncryptedKey from Security header.
EncryptedKey should be a session key encrypted for given ``keyfile``.
Expects XML similar to the example in the ``encrypt`` docstring.
"""
# Create a key manager and load our key into it.
manager = xmlsec.KeysManager()
key = xmlsec.Key.from_file(keyfile, xmlsec.KeyFormat.PEM)
manager.add_key(key)
doc = etree.fromstring(envelope)
header = doc.find(ns(SOAP_NS, 'Header'))
security = header.find(ns(WSSE_NS, 'Security'))
enc_key = security.find(ns(ENC_NS, 'EncryptedKey'))
# Find each referenced encrypted block (each DataReference in the
# ReferenceList of the EncryptedKey) and decrypt it.
ref_list = enc_key.find(ns(ENC_NS, 'ReferenceList'))
for ref in ref_list:
# Find the EncryptedData node referenced by this DataReference.
ref_uri = ref.get('URI')
referenced_id = ref_uri[1:]
enc_data = doc.xpath(
"//enc:EncryptedData[@Id='%s']" % referenced_id,
namespaces={'enc': ENC_NS},
)[0]
# XMLSec doesn't understand WSSE, therefore it doesn't understand
# SecurityTokenReference. It expects to find EncryptedKey within the
# KeyInfo of the EncryptedData. So we get rid of the
# SecurityTokenReference and replace it with the EncryptedKey before
# trying to decrypt.
key_info = enc_data.find(ns(DS_NS, 'KeyInfo'))
key_info.remove(key_info[0])
key_info.append(enc_key)
# When XMLSec decrypts, it automatically replaces the EncryptedData
# node with the decrypted contents.
ctx = xmlsec.EncryptionContext(manager)
ctx.decrypt(enc_data)
return etree.tostring(doc)
def add_data_reference(enc_key, enc_data):
"""Add DataReference to ``enc_data`` in ReferenceList of ``enc_key``.
``enc_data`` should be an EncryptedData node; ``enc_key`` an EncryptedKey
node.
Add a wsu:Id attribute to the EncryptedData if it doesn't already have one,
so the EncryptedKey's URI attribute can reference it.
(See the example XML in the ``encrypt()`` docstring.)
Return created DataReference node.
"""
# Ensure the target EncryptedData has a wsu:Id.
data_id = ensure_id(enc_data)
# Ensure the EncryptedKey has a ReferenceList.
ref_list = ensure_reference_list(enc_key)
# Create the DataReference, with URI attribute referencing the target
# node's id, add it to the ReferenceList, and return it.
data_ref = etree.SubElement(ref_list, ns(ENC_NS, 'DataReference'))
data_ref.set('URI', '#' + data_id)
return data_ref
def ensure_reference_list(encrypted_key):
"""Ensure that given EncryptedKey node has a ReferenceList node.
Return the found or created ReferenceList node.
"""
ref_list = encrypted_key.find(ns(ENC_NS, 'ReferenceList'))
if ref_list is None:
ref_list = etree.SubElement(encrypted_key, ns(ENC_NS, 'ReferenceList'))
return ref_list
def create_key_info_bst(security_token):
"""Create and return a KeyInfo node referencing given BinarySecurityToken.
(See the example XML in the ``encrypt()`` docstring.)
Modified from https://github.com/mvantellingen/py-soap-wsse.
"""
# Create the KeyInfo node.
key_info = etree.Element(ns(DS_NS, 'KeyInfo'), nsmap={'ds': DS_NS})
# Create a wsse:SecurityTokenReference node within KeyInfo.
sec_token_ref = etree.SubElement(
key_info, ns(WSSE_NS, 'SecurityTokenReference'))
sec_token_ref.set(
ns(WSSE_NS, 'TokenType'), security_token.get('ValueType'))
# Add a Reference to the BinarySecurityToken in the SecurityTokenReference.
bst_id = ensure_id(security_token)
reference = etree.SubElement(sec_token_ref, ns(WSSE_NS, 'Reference'))
reference.set('ValueType', security_token.get('ValueType'))
reference.set('URI', '#%s' % bst_id)
return key_info
def create_binary_security_token(certfile):
"""Create a BinarySecurityToken node containing the x509 certificate.
Modified from https://github.com/mvantellingen/py-soap-wsse.
"""
# Create the BinarySecurityToken node with appropriate attributes.
node = etree.Element(ns(WSSE_NS, 'BinarySecurityToken'))
node.set('EncodingType', BASE64B)
node.set('ValueType', X509TOKEN)
# Set the node contents.
with open(certfile) as fh:
cert = crypto.load_certificate(crypto.FILETYPE_PEM, fh.read())
node.text = base64.b64encode(
crypto.dump_certificate(crypto.FILETYPE_ASN1, cert))
return node
| bsd-3-clause | 482,553,226,064,850,500 | 36.738255 | 79 | 0.669749 | false |
alanfbaird/PyTASA | tests/anisotropy_index_tests.py | 1 | 1660 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test the IO routines - based on the MSAT test cases
"""
import unittest
import numpy as np
import pytasa.anisotropy_index
class TestAnisotropyIndex(unittest.TestCase):
def setUp(self):
"""Some useful matricies for testing"""
self.olivine = np.array([[320.5, 68.1, 71.6, 0.0, 0.0, 0.0],
[68.1, 196.5, 76.8, 0.0, 0.0, 0.0],
[71.6, 76.8, 233.5, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 64.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 77.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 78.7]])
self.isotropic = np.array([[166.6667, 66.6667, 66.6667, 0.0, 0.0, 0.0],
[66.6667, 166.6667, 66.6667, 0.0, 0.0, 0.0],
[66.6667, 66.6667, 166.6667, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 50.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 50.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 50.0]])
def test_isotropic_zenner(self):
"""Test from MSAT - are isotropic results isotropic"""
np.testing.assert_almost_equal(pytasa.anisotropy_index.zenerAniso(
self.isotropic), [1.0, 0.0])
def test_isotropic_universal(self):
"""Test from MSAT - are isotropic results isotropic"""
np.testing.assert_almost_equal(pytasa.anisotropy_index.uAniso(
self.isotropic), [0.0, 0.0])
def suite():
return unittest.makeSuite(TestCijStability, 'test')
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| mit | 8,203,978,501,412,045,000 | 35.086957 | 79 | 0.487349 | false |
kingsdigitallab/tvof-kiln | preprocess/prepare/xml_parser.py | 1 | 11492 | # -*- coding: utf-8 -*-
import sys
import os
import re
import glob
import datetime
import xml.etree.ElementTree as ET
class XMLParser(object):
suppressed_output = False
default_output = u'parsed.xml'
# TODO: implement this, but not possible with ElementTree...
is_output_beautified = False
def __init__(self):
self.convert_only = False
self.xml_comments = []
self.reset()
def reset(self):
self.paragraphs = []
self.namespaces_implicit = {
'xml': 'http://www.w3.org/XML/1998/namespace',
}
self.xml = None
def has_xml(self):
return self.xml is not None
def transform(self, xml_path, xsl_path):
import lxml.etree as LET
dom = LET.parse(xml_path)
xslt = LET.parse(xsl_path)
trans = LET.XSLT(xslt)
newdom = trans(dom)
ret = LET.tostring(newdom, pretty_print=True)
# 2-space indent -> 4-space indent
ret = re.sub(r'(?m)^ +', lambda m: ' ' * (2 * len(m.group(0))), ret)
return ret
@classmethod
def run(cls, args=None):
if args is None and cls.__module__ != '__main__':
return
script_file = '%s.py' % cls.__module__
if args is None:
args = sys.argv
script_file = args.pop(0)
parser = cls()
print('python2 %s %s' % (script_file, ' '.join(args)))
if len(args) < 1:
print(
'Usage: {} INPUT.xml [-o OUTPUT.xml] [-r PARA_RANGE]'.format(
os.path.basename(script_file)))
exit()
output_path = cls.default_output
input_str = []
input_path_list = []
while len(args):
arg = (args.pop(0)).strip()
if arg.strip() == '-r':
if len(args) > 0:
arg = args.pop(0)
parser.set_paragraphs(arg)
elif arg.strip() == '-o':
if len(args) > 0:
arg = args.pop(0)
output_path = arg
elif arg.strip() == '-m':
if len(args) > 0:
arg = args.pop(0)
parser.ms_name = arg
elif arg.strip() == '-c':
# aggregate and convert only, don't tokenise or kwic
# TODO: this should really go into doall.py
parser.convert_only = True
else:
input_str.append(arg)
for input_paths in cls.get_expanded_paths(arg):
input_path_list += glob.glob(input_paths)
if input_path_list:
parser.run_custom(input_path_list, output_path)
if parser.has_xml():
parser.write_xml(output_path)
print('written %s' % output_path)
else:
if not getattr(cls, 'suppressed_output', False):
print(
'WARNING: Nothing to output, please check the input argument (%s)' %
', '.join(input_str))
print('done')
def set_paragraphs(self, paragraphs_string=None):
ret = []
if paragraphs_string:
# edfr20125_00589 in range '589-614'
for paras in paragraphs_string.strip().split(','):
paras = paras.split('-')
if len(paras) < 2:
paras[-1] = paras[0]
ret += range(int(paras[0]), int(paras[-1]) + 1)
self.paragraphs = ret
return ret
def is_para_in_range(self, parentid):
ret = False
if not self.paragraphs:
return True
if parentid:
# edfr20125_00589 in range '589-614'
para = re.findall('\d+$', parentid)
if para:
ret = int(para[0]) in self.paragraphs
return ret
@classmethod
def get_expanded_paths(cls, path):
# get_expanded_paths
# e.g. {16,18}X => [16X, 18X]
# e.g. {16-18}X => [16X, 17X, 18X]
ret = [path]
parts = re.findall(ur'^(.*)\{([-\d,]+)\}(.*)$', path)
if parts:
parts = parts[0]
ranges = parts[1].split(',')
for range in ranges:
ends = range.split('-')
if len(ends) == 1:
ends.append(ends[0])
ends = [int(end) for end in ends]
ends[-1] += 1
for end in xrange(*ends):
ret.append(ur'%s%s%s' % (parts[0], end, parts[-1]))
return ret
def set_namespaces_from_unicode(self, xml_string):
# grab all the namespaces
self.namespaces = {
prefix: uri
for definition, prefix, uri
in re.findall(ur'(xmlns:?(\w+)?\s*=\s*"([^"]+)")', xml_string)
}
self.namespaces.update(self.namespaces_implicit)
def set_xml_from_unicode(self, xml_string):
# grab all the namespaces
self.set_namespaces_from_unicode(xml_string)
# remove the default namespace definition
# to simplify parsing
# we'll put it back in get_unicode_from_xml()
xml_string = re.sub(ur'\sxmlns="[^"]+"', '', xml_string, count=1)
# note that ET takes a utf-8 encoded string
try:
self.xml = ET.fromstring(xml_string.encode('utf-8'))
except Exception as e:
f = open('error.log', 'w')
f.write(xml_string.encode('utf-8'))
f.close()
raise e
def get_unicode_from_xml(self, xml=None):
if xml is None:
for prefix, url in self.namespaces.iteritems():
# skip xml namespace, it's implicitly defined
if prefix == 'xml':
continue
aprefix = 'xmlns'
if prefix:
aprefix += ':' + prefix
self.xml.set(aprefix, url)
if xml is None:
xml = self.xml
return ET.tostring(xml, encoding='utf-8').decode('utf-8')
def read_xml(self, filepath):
ret = True
import codecs
with codecs.open(filepath, 'r', 'utf-8') as f:
content = f.read()
content = self.save_xml_comments(content)
try:
self.set_xml_from_unicode(content)
# self.is_wellformed(self.get_unicode_from_xml())
except ET.ParseError as e:
print(e)
ret = False
return ret
def forget_xml_comments(self):
self.xml_comments = []
def restore_xml_comments(self, content):
# xml.etree.ElementTree does NOT preserve <!-- -->
# We could use lxml but that would mean asking project partners
# to install that... let's do it manually.
# return content
def replace_comment(match):
ret = ur''
if self.xml_comments:
ret = self.xml_comments[int(match.group(1))]
return ret
return re.sub(ur'(?musi)<comment\s*id\s*=\s*"c-(\d+)"\s*/>',
replace_comment, content)
def save_xml_comments(self, content):
# xml.etree.ElementTree does NOT preserve <!-- -->
# We could use lxml but that would mean asking project partners
# to install that... let's do it manually.
# TODO: Alternatively
# https://stackoverflow.com/questions/33573807/faithfully-preserve-comments-in-parsed-xml-python-2-7
# return content
first_element_index = (re.search(ur'<\s*\w', content)).start()
def replace_comment(match):
ret = match.group(0)
if match.start() > first_element_index:
commentid = len(self.xml_comments)
self.xml_comments.append(ret)
ret = ur'<comment id="c-%s"/>' % commentid
return ret
return re.sub(ur'(?musi)<!--.*?-->', replace_comment, content)
def write_xml(self, file_path, encoding='utf-8'):
f = open(file_path, 'wb')
content = u'<?xml version="1.0" encoding="{}"?>\n'.format(encoding)
# insert the generated date
date_generated = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
content += u'<!-- AUTO-GENERATED by {} - {} -->\n'.format(
self.__class__.__name__,
date_generated
)
#
content += self.get_unicode_from_xml()
# insert date in tei headers
content = re.sub(
ur'(?musi)(\s+)(<publicationStmt>)(.*?)(</publicationStmt>)',
ur'\1\2\1 <publisher>King' + "'" +
ur's Digital Laboratory</publisher>\1 <date>{}</date>\1\4'.format(
date_generated
),
content
)
content = self.restore_xml_comments(content)
content = content.encode(encoding)
f.write(content)
f.close()
def get_element_text(self, element, recursive=False):
if recursive:
ret = element.itertext()
else:
ret = [(element.text or u'')] +\
[(child.tail or u'') for child in list(element)]
return u''.join(ret)
def expand_prefix(self, expression):
expression = re.sub(
ur'(\w+):',
lambda m: ur'{%s}' % self.namespaces[m.group(1)],
expression
)
return expression
def is_wellformed(self, xml_string):
ret = True
try:
xml_string = ET.fromstring(xml_string.encode('utf-8'))
except ET.ParseError as e:
print(u'%s' % e)
# (3056, 242) = (line, char)
lines = xml_string.split('\n')
print(lines[e.position[0] - 1])
print((' ' * (e.position[1] - 1)) + '^')
ret = False
return ret
def remove_elements(self, filters, condition_function=None):
# Remove all elements in the xml that match any of the given fitlers.
# e.g. filters = ['del', 'orig', 'seg[@type="semi-dip"]', 'sic', 'pb']
# self.remove_elements(filters)
if condition_function is None:
def condition_function(parent, element): return True
for filter in filters:
c = 0
matches = re.findall('^([^\[]*)(\[.*\])?', filter)
tag, condition = matches[0]
for parent in self.xml.findall(ur'.//*[' + tag + ur']'):
# slower version that completely removes the elements
elements = parent.findall(filter)
if len(elements):
previous = None
for element in list(parent):
if element in elements and condition_function(
parent, element):
# make sure we keep the tail
tail = element.tail
parent.remove(element)
c += 1
if tail:
if previous is not None:
previous.tail = (
previous.tail or ur'') + tail
else:
parent.text = (
parent.text or ur'') + tail
else:
previous = element
print('\t removed %s %s' % (c, filter))
| apache-2.0 | -6,888,239,231,041,055,000 | 31.463277 | 108 | 0.488688 | false |
novapost/django-email-change | src/email_change/urls.py | 1 | 1900 | # -*- coding: utf-8 -*-
#
# This file is part of django-email-change.
#
# django-email-change adds support for email address change and confirmation.
#
# Development Web Site:
# - http://www.codetrax.org/projects/django-email-change
# Public Source Code Repository:
# - https://source.codetrax.org/hgroot/django-email-change
#
# Copyright 2010 George Notaras <gnot [at] g-loaded.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.conf.urls.defaults import *
from django.views.generic import TemplateView
urlpatterns = patterns('',
url(r'^email/change/$', 'email_change.views.email_change_view', name='email_change'),
url(r'^email/verification/sent/$',
TemplateView.as_view(template_name='email_change/email_verification_sent.html'),
name='email_verification_sent'),
# Note taken from django-registration
# Verification keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad verification key should still get to the view;
# that way it can return a sensible "invalid key" message instead of a
# confusing 404.
url(r'^email/verify/(?P<verification_key>\w+)/$', 'email_change.views.email_verify_view', name='email_verify'),
url(r'^email/change/complete/$',
TemplateView.as_view(template_name='email_change/email_change_complete.html'),
name='email_change_complete'),
)
| apache-2.0 | -2,762,859,119,669,229,000 | 41.222222 | 115 | 0.715263 | false |
b3j0f/schema | b3j0f/schema/test/base.py | 1 | 7880 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2016 Jonathan Labéjof <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# --------------------------------------------------------------------
from unittest import main
from b3j0f.utils.ut import UTCase
from ..base import Schema, DynamicValue
class DynamicValueTest(UTCase):
def test(self):
dvalue = DynamicValue(lambda: 'test')
self.assertEqual('test', dvalue())
class SchemaTest(UTCase):
def test_init(self):
schema = Schema()
self.assertIsNone(schema._fget_)
self.assertIsNone(schema._fset_)
self.assertIsNone(schema._fdel_)
def test_uuid(self):
schema1 = Schema()
schema2 = Schema()
self.assertNotEqual(schema1.uuid, schema2.uuid)
def test_uuid_inheritance(self):
class BaseTest(Schema):
pass
class Test(BaseTest):
pass
basetest = BaseTest()
test = Test()
self.assertNotEqual(basetest.uuid, test.uuid)
def test_init_gsd(self):
class Test(object):
test = Schema()
test = Test()
res = test.test
self.assertIsNone(res)
schema = Schema()
test.test = schema
self.assertIsInstance(test.test, Schema)
del test.test
self.assertFalse(hasattr(test, Test.test._attrname()))
test.test = Schema()
self.assertIsInstance(test.test, Schema)
class DecorateTest(UTCase):
def setUp(self):
class TestSchema(Schema):
test = Schema()
def __init__(self, *args, **kwargs):
super(TestSchema, self).__init__(*args, **kwargs)
self.required = ['test']
self.testschema = TestSchema
def _assert(self, cls, processing):
test = cls()
self.assertNotIn('getter', processing)
self.assertNotIn('setter', processing)
self.assertNotIn('deleter', processing)
res = test.test
self.assertEqual(res, test)
self.assertIn('getter', processing)
self.assertNotIn('setter', processing)
self.assertNotIn('deleter', processing)
test.test = self.testschema()
self.assertIsInstance(test.test, Schema)
self.assertIn('getter', processing)
self.assertIn('setter', processing)
self.assertNotIn('deleter', processing)
test.test = None
cls.test.nullable = False
self.assertRaises(ValueError, setattr, test, 'test', None)
self.assertRaises(ValueError, setattr, test, 'test', 1)
test.test = DynamicValue(lambda: self.testschema())
self.assertRaises(ValueError, setattr, test, 'test', lambda: None)
self.assertRaises(ValueError, setattr, test, 'test', lambda: 1)
cls.test.nullable = True
test.test = DynamicValue(lambda: None)
del test.test
self.assertFalse(hasattr(test, '_value'))
self.assertIn('getter', processing)
self.assertIn('setter', processing)
self.assertIn('deleter', processing)
test.test = self.testschema()
self.assertIsInstance(test.test, Schema)
self.assertIn('getter', processing)
self.assertIn('setter', processing)
self.assertIn('deleter', processing)
def test_init_gsd_custom(self):
processing = []
class Test(object):
@self.testschema
def test(self):
processing.append('getter')
return getattr(self, '_value', self)
@test.setter
def test(self, value):
processing.append('setter')
self._value = value
@test.deleter
def test(self):
processing.append('deleter')
del self._value
self._assert(Test, processing)
def test_init_gsd_custom_params(self):
processing = []
class Test(object):
@self.testschema.apply()
def test(self):
processing.append('getter')
return getattr(self, '_value', self)
@test.setter
def test(self, value):
processing.append('setter')
self._value = value
@test.deleter
def test(self):
processing.append('deleter')
del self._value
self._assert(Test, processing)
def test__validate(self):
schema = Schema()
schema._validate(None)
schema.nullable = False
self.assertRaises(ValueError, schema._validate, None)
schema.nullable = True
schema._validate(None)
def test_getschemas(self):
class TestSchema(Schema):
a = Schema()
b = Schema()
schemas = TestSchema.getschemas()
self.assertEqual(len(Schema.getschemas()) + 2, len(schemas))
schema = TestSchema()
schemas = schema.getschemas()
self.assertEqual(len(Schema.getschemas()) + 2, len(schemas))
def test_notify_get(self):
class TestSchema(Schema):
test = Schema(name='test')
schema = None
value = None
def _getvalue(self, schema, value):
if schema.name == 'test':
self.schema = schema
self.value = value
self.assertIsNone(TestSchema.schema)
self.assertIsNone(TestSchema.value)
schema = TestSchema()
schema.test = Schema()
schema.test
self.assertIs(schema.schema, TestSchema.test)
self.assertIs(schema.value, schema.test)
def test_notify_set(self):
class TestSchema(Schema):
test = Schema(name='test')
schema = None
value = None
def _setvalue(self, schema, value):
if schema.name == 'test':
self.schema = schema
self.value = value
self.assertIsNone(TestSchema.schema)
self.assertIsNone(TestSchema.value)
schema = TestSchema()
schema.test = Schema()
self.assertIs(schema.schema, TestSchema.test)
self.assertIs(schema.value, schema.test)
def test_notify_del(self):
class TestSchema(Schema):
test = Schema(name='test')
schema = None
def _delvalue(self, schema, *args, **kwargs):
if schema.name == 'test':
self.schema = schema
self.assertIsNone(TestSchema.schema)
schema = TestSchema()
del schema.test
self.assertIs(schema.schema, TestSchema.test)
if __name__ == '__main__':
main()
| mit | 1,619,674,455,909,920,300 | 25.089404 | 79 | 0.582688 | false |
shacknetisp/vepybot | plugins/protocols/irc/core/users.py | 1 | 8208 | # -*- coding: utf-8 -*-
import bot
import time
class Whois:
def __init__(self):
self.time = time.time()
self.channels = {}
ident = ""
host = ""
name = ""
auth = ""
idle = 0
signon = 0
server = ""
class M_Whois(bot.Module):
index = "whois"
hidden = False
def register(self):
self.addhook("recv", "recv", self.recv)
self.addhook("whois.fromcontext", "fromcontext", self.fromcontext)
self.addhook("whois.fromtuple", "fromtuple", self.fromtuple)
self.addhook("chanmodes", "chanmodes", self.chanmodes)
self.addtimer(self.timer, "whois", 60 * 1000)
self.addtimer(self.chantimer, "chantimer", 60 * 1000)
self.whois = {}
self.server.whois = self.whois
self.tmp = {}
self.addcommand(self.getwhois, "whois",
"Get information about a nick. Space-delimited values."
" Values can be: nick, ident, host, or auth.",
["nick", "[values...]"])
self.addcommand(self.runwhois, "authme",
"Auth via WHOIS.", [])
self.serverset('whois.updatechannels', self.updatechannels)
def runwhois(self, context, args):
if not self.server.settings.get('server.whois'):
return "WHOIS is disabled."
self.server.send("WHOIS %s" % context.user[0])
return "Done, check your rights with: rights get"
def getwhois(self, context, args):
args.default('values', 'nick host channels')
info = {}
if args.getstr("nick") in self.whois:
w = self.whois[args.getstr("nick")]
info['nick'] = args.getstr("nick")
info['ident'] = w.ident
info['name'] = w.name
info['host'] = w.host
info['auth'] = w.auth
info['idstring'] = "irc:{nick}!{ident}@{host}!{auth}".format(**info)
else:
return "Nick not found."
out = []
values = args.getstr("values").split(' ')
for v in values:
if v in info and type(info[v]) in [str, int]:
if len(values) == 1:
out.append(str(info[v]))
else:
out.append("%s: %s" % (v, str(info[v])))
return ', '.join(out) or "No results."
def updatechannels(self, snick=None):
nicks = {}
for chan in self.server.channels:
v = self.server.channels[chan].names
for n in v:
if snick is not None and n not in snick:
continue
if n not in nicks:
nicks[n] = {}
nicks[n][chan] = v[n]
for nick in nicks:
if nick in self.whois:
self.whois[nick].channels = nicks[nick]
def chantimer(self):
self.updatechannels()
def timer(self):
tod = []
for w in self.whois:
if time.time() - self.whois[w].time > 250:
tod.append(w)
for d in tod:
self.whois.pop(d)
def fromcontext(self, context):
nicks = [context.user[0]]
if context.code('nick'):
nicks.append(context.rawsplit[2])
for nick in nicks:
if nick:
if nick not in self.whois:
self.whois[nick] = Whois()
w = self.whois[nick]
w.ident = context.user[1]
w.host = context.user[2]
def fromtuple(self, t):
nick = t[0]
if nick:
if nick not in self.whois:
self.whois[nick] = Whois()
w = self.whois[nick]
w.ident = t[1]
w.host = t[2]
self.server.dohook('whois.found', nick,
"%s!%s@%s!%s" % (nick, w.ident, w.host, w.auth),
w)
def recv(self, context):
if context.code("311"):
self.tmp[context.rawsplit[3]] = Whois()
w = self.tmp[context.rawsplit[3]]
w.ident = context.rawsplit[4]
w.host = context.rawsplit[5]
w.name = context.text
elif context.code("312"):
w = self.tmp[context.rawsplit[3]]
w.server = context.rawsplit[4]
elif context.code("317"):
w = self.tmp[context.rawsplit[3]]
w.idle = int(context.rawsplit[4])
w.signon = int(context.rawsplit[5])
elif context.code("318"):
if context.rawsplit[3] in self.whois:
self.tmp[context.rawsplit[3]].channels = self.whois[
context.rawsplit[3]].channels
self.whois[context.rawsplit[3]] = self.tmp[context.rawsplit[3]]
self.server.dohook("whois", context.rawsplit[3])
w = self.whois[context.rawsplit[3]]
self.server.dohook('whois.found', context.rawsplit[3],
"%s!%s@%s!%s" % (context.rawsplit[3], w.ident, w.host, w.auth),
w)
elif context.code("330"):
# Freenode
w = self.tmp[context.rawsplit[3]]
w.auth = context.rawsplit[4]
elif context.code("JOIN"):
self.handlejoin(context)
elif context.code("PART"):
self.handlepart(context)
elif context.code("QUIT"):
self.fromcontext(context)
w = self.whois[context.user[0]]
self.server.dohook('log', 'quit', (context.rawsplit[0],
context.user),
(list(w.channels.keys()), context.text))
elif context.code("MODE"):
self.handlemode(context)
def handlejoin(self, context):
self.fromcontext(context)
w = self.whois[context.user[0]]
channel = context.rawsplit[2].strip(':')
if channel not in w.channels:
w.channels[channel] = []
self.server.dohook('join', channel, context.user[0])
self.server.dohook('log', 'join', context.user,
channel)
self.server.dohook('whois.lightfound', context.user[0],
"%s!%s@%s!%s" % (context.user[0], w.ident, w.host, w.auth),
w)
def handlepart(self, context):
self.fromcontext(context)
w = self.whois[context.user[0]]
channel = context.rawsplit[2]
if channel in w.channels:
w.channels.pop(channel)
self.server.dohook('log', 'part', context.user,
(channel, context.text))
def handlemode(self, context):
channel = context.rawsplit[2]
modes = context.rawsplit[3]
final = {}
nicks = context.rawsplit[4:]
for n in nicks:
final[n] = []
now = ''
idx = 0
for cchar in modes:
if cchar in '-+':
now = cchar
elif now and idx in range(len(nicks)):
final[nicks[idx]].append(now + cchar)
self.server.dohook('chanmodes', channel, final)
self.server.dohook('log', 'mode', context.rawsplit[0],
(channel, modes, ' '.join(nicks)))
def chanmodes(self, channel, modes):
for target in modes:
if target not in self.whois:
continue
w = self.whois[target]
if channel not in w.channels:
w.channels[channel] = []
if channel not in self.server.channels:
c = channel.strip(':')
self.server.channels[c] = self.server.Channel(self.server, c)
names = self.server.channels[channel].names
for mode in modes[target]:
for mchar in 'ov':
if mode == '+%s' % mchar:
if mchar not in w.channels[channel]:
w.channels[channel].append(mchar)
elif mode == '-%s' % mchar:
if mchar in w.channels[channel]:
i = w.channels[channel].index(mchar)
w.channels[channel].pop(i)
names[target] = w.channels[channel]
bot.register.module(M_Whois)
| mit | -1,344,099,156,478,500,600 | 35 | 80 | 0.500731 | false |
pascalgutjahr/Praktikum-1 | V704_Absorp_Beta_Gamma/gammaPB.py | 1 | 1439 | import matplotlib as mpl
from scipy.optimize import curve_fit
mpl.use('pgf')
import matplotlib.pyplot as plt
plt.rcParams['lines.linewidth'] = 1
import numpy as np
mpl.rcParams.update({
'font.family': 'serif',
'text.usetex': True,
'pgf.rcfonts': False,
'pgf.texsystem': 'lualatex',
'pgf.preamble': r'\usepackage{unicode-math}\usepackage{siunitx}'
})
N0 = 1113 # für t = 1100s
N_0 = N0 / 1100 # für t = 1s
N_gamma = 131.62 # für t = 1s
N = np.array([6803, 10219, 7897, 7889, 6041, 3363, 3232, 2065, 1911, 1684])
d = np.array([0.1, 0.4, 1.03, 1.3, 2.01, 3.06, 3.45, 4.06, 4.56, 5.10])
t = np.array([60, 110, 180, 230, 400, 600, 700, 800, 900, 1000])
N = (N-N_0) / t # auf t = 1s normiert und Nulleffekt abgezogen
N_log = np.log(N/N_gamma)
F = np.sqrt(N) / t
print('Fehler Wurzel N durch t:', np.vstack(F))
def f(d, a, b):
return a * d + b
params, covariance = curve_fit(f, d, N_log)
errors = np.sqrt(np.diag(covariance))
print('a =', params[0], '±', errors[0])
print('b =', params[1], '±', errors[1])
plt.plot(d, f(d,*params), '-',color='deeppink', label='Lineare Ausgleichsrechnung')
plt.plot(d, N_log, 'bx', label='Messwerte für Bleiabsorber')
plt.grid()
plt.xlabel(r'$d \,/\, \si{\centi\meter}$')
plt.ylabel(r'$\log{(\frac{N-N_0}{N_\symup{ohne}})}$') # N_ohne: Zählrate ohne Absorber, N_0: Zählrate ohne Strahler
plt.legend(loc='best')
plt.tight_layout
plt.savefig('bilder/gammaPB.pdf')
plt.show()
| mit | -4,312,687,204,794,683,000 | 27.62 | 115 | 0.638015 | false |
marcuskelly/recover | app/manage_commands.py | 1 | 1623 | # This file defines command line commands for manage.py
import datetime
from app.init_app import app, db, manager
from app.models import User, Role
@manager.command
def init_db():
""" Initialize the database."""
# Create all tables
db.create_all()
# Add all Users
add_users()
def add_users():
""" Create users when app starts """
# Adding roles
admin_role = find_or_create_role('admin', u'Admin')
# Add users
#user = find_or_create_user(u'Admin', u'Example', u'[email protected]', 'Password1', admin_role)
#user = find_or_create_user(u'Admin1', u'Example', u'[email protected]', 'Password1', admin_role)
#user = find_or_create_user(u'User', u'Example', u'[email protected]', 'Password1')
# Save to DB
db.session.commit()
def find_or_create_role(name, label):
""" Find existing role or create new role """
role = Role.query.filter(Role.name == name).first()
if not role:
role = Role(name=name, label=label)
db.session.add(role)
return role
def find_or_create_user(user_name, dob, email, password, role=None):
""" Find existing user or create new user """
user = User.query.filter(User.email == email).first()
if not user:
user = User(username='Example',
email=email,
dob=datetime.datetime.utcnow(),
password=app.user_manager.hash_password(password),
active=True,
confirmed_at=datetime.datetime.utcnow())
if role:
user.roles.append(role)
db.session.add(user)
return user
| bsd-2-clause | -1,381,855,864,617,946,000 | 27.982143 | 102 | 0.612446 | false |
lifemapper/LmQGIS | scripts/createQgisPackage.py | 1 | 4111 | """
@summary: This module creates a zip file for a plugin that can be uploaded to
the QGIS repository.
@author: CJ Grady
@status: alpha
@version: 1.0
@license: gpl2
@copyright: Copyright (C) 2014, University of Kansas Center for Research
Lifemapper Project, lifemapper [at] ku [dot] edu,
Biodiversity Institute,
1345 Jayhawk Boulevard, Lawrence, Kansas, 66045, USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
"""
import fnmatch
import os
import re
from zipfile import ZipFile
import ConfigParser
import itertools
import StringIO
from LmCommon.common.config import Config
# Jeff: Change these to the locations on your system
IN_DIR = "/home/jcavner/workspace/lm3/components/LmClient/LmQGIS/V2/"
OUT_LOCATION = "/home/jcavner/plugin/V2/lifemapperTools_Testdfsdf2.zip"
#CONFIG_LOCATION = "/home/jcavner/workspace/lm3/components/config/lmconfigfile.jeff"
#SITE_CONFIG = "/home/jcavner/workspace/lm3/components/config/config.site.ini"
SECTIONS = ['LmClient - contact','LmCommon - common','LmClient - Open Tree of Life','SiteConfig']
EXCLUDES = ['.svn', '*.pyc','*.ini']
# .............................................................................
def getFilenames(inDir):
"""
@summary: Gets all of the files and directories in the input directory that
don't match the exclude patterns
@param inDir: The input directory to find files in
"""
excludes = r'|'.join([fnmatch.translate(x) for x in EXCLUDES]) or r'$.'
matches = []
for root, dirnames, fns in os.walk(inDir, topdown=True, followlinks=True):
dirnames[:] = [d for d in dirnames if d not in excludes]
files = [os.path.join(root, f) for f in fns]
files = [f for f in files if not re.match(excludes, f)]
matches.extend(files)
return matches
# .............................................................................
def createZipFile(matches, inDir, outFn, configStrIO):
"""
@summary: Creates a zip file containing all of the files in matches
@param matches: Files to include in the zip file
@param inDir: The base directory for these files. The zip file will store
the directory structure under this location
@param outFn: The output zip file name to use
"""
with ZipFile(outFn, mode='w') as zf:
for fn in matches:
zf.write(fn, fn[len(inDir):])
zf.writestr('lifemapperTools/config/config.ini', configStrIO.getvalue())
def getConfigSections():
#config = ConfigParser.SafeConfigParser()
config = Config().config
#config.read(CONFIG_LOCATION)
#config.read(SITE_CONFIG)
allSec = {}
for sec in SECTIONS:
allSec[sec] = config.items(sec)
return allSec
def createNewConfig(sections):
newConfig = ConfigParser.SafeConfigParser()
for key in sections.keys():
newConfig.add_section(key)
for k,v in sections[key]:
newConfig.set(key,k,v)
output = StringIO.StringIO()
newConfig.write(output)
return output
# .............................................................................
if __name__ == "__main__":
#print Config().config
filenames = getFilenames(IN_DIR)
sections = getConfigSections()
configStr = createNewConfig(sections)
createZipFile(filenames, IN_DIR, OUT_LOCATION,configStr)
| gpl-2.0 | 5,150,867,926,875,265,000 | 37.064815 | 97 | 0.640477 | false |
klenwell/mushpup-demo | main.py | 1 | 1071 | """`main` is the top level module for your Flask application."""
# Imports
from os.path import dirname, join
from datetime import date
from flask import Flask
import jinja2
# Constants
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(join(dirname(__file__), 'templates')),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
app = Flask(__name__)
@app.route('/')
def index():
"""Return a friendly HTTP greeting."""
template = JINJA_ENVIRONMENT.get_template('index.html')
return template.render(year=date.today().year)
@app.route('/hello')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def application_error(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
| apache-2.0 | -4,553,135,595,806,245,000 | 24.5 | 76 | 0.69281 | false |
Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/researcher_url_v30_rc1.py | 1 | 9643 | # coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.created_date_v30_rc1 import CreatedDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30_rc1 import LastModifiedDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.source_v30_rc1 import SourceV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.url_v30_rc1 import UrlV30Rc1 # noqa: F401,E501
class ResearcherUrlV30Rc1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'created_date': 'CreatedDateV30Rc1',
'last_modified_date': 'LastModifiedDateV30Rc1',
'source': 'SourceV30Rc1',
'url_name': 'str',
'url': 'UrlV30Rc1',
'visibility': 'str',
'path': 'str',
'put_code': 'int',
'display_index': 'int'
}
attribute_map = {
'created_date': 'created-date',
'last_modified_date': 'last-modified-date',
'source': 'source',
'url_name': 'url-name',
'url': 'url',
'visibility': 'visibility',
'path': 'path',
'put_code': 'put-code',
'display_index': 'display-index'
}
def __init__(self, created_date=None, last_modified_date=None, source=None, url_name=None, url=None, visibility=None, path=None, put_code=None, display_index=None): # noqa: E501
"""ResearcherUrlV30Rc1 - a model defined in Swagger""" # noqa: E501
self._created_date = None
self._last_modified_date = None
self._source = None
self._url_name = None
self._url = None
self._visibility = None
self._path = None
self._put_code = None
self._display_index = None
self.discriminator = None
if created_date is not None:
self.created_date = created_date
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if source is not None:
self.source = source
if url_name is not None:
self.url_name = url_name
if url is not None:
self.url = url
if visibility is not None:
self.visibility = visibility
if path is not None:
self.path = path
if put_code is not None:
self.put_code = put_code
if display_index is not None:
self.display_index = display_index
@property
def created_date(self):
"""Gets the created_date of this ResearcherUrlV30Rc1. # noqa: E501
:return: The created_date of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: CreatedDateV30Rc1
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""Sets the created_date of this ResearcherUrlV30Rc1.
:param created_date: The created_date of this ResearcherUrlV30Rc1. # noqa: E501
:type: CreatedDateV30Rc1
"""
self._created_date = created_date
@property
def last_modified_date(self):
"""Gets the last_modified_date of this ResearcherUrlV30Rc1. # noqa: E501
:return: The last_modified_date of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: LastModifiedDateV30Rc1
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this ResearcherUrlV30Rc1.
:param last_modified_date: The last_modified_date of this ResearcherUrlV30Rc1. # noqa: E501
:type: LastModifiedDateV30Rc1
"""
self._last_modified_date = last_modified_date
@property
def source(self):
"""Gets the source of this ResearcherUrlV30Rc1. # noqa: E501
:return: The source of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: SourceV30Rc1
"""
return self._source
@source.setter
def source(self, source):
"""Sets the source of this ResearcherUrlV30Rc1.
:param source: The source of this ResearcherUrlV30Rc1. # noqa: E501
:type: SourceV30Rc1
"""
self._source = source
@property
def url_name(self):
"""Gets the url_name of this ResearcherUrlV30Rc1. # noqa: E501
:return: The url_name of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: str
"""
return self._url_name
@url_name.setter
def url_name(self, url_name):
"""Sets the url_name of this ResearcherUrlV30Rc1.
:param url_name: The url_name of this ResearcherUrlV30Rc1. # noqa: E501
:type: str
"""
self._url_name = url_name
@property
def url(self):
"""Gets the url of this ResearcherUrlV30Rc1. # noqa: E501
:return: The url of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: UrlV30Rc1
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this ResearcherUrlV30Rc1.
:param url: The url of this ResearcherUrlV30Rc1. # noqa: E501
:type: UrlV30Rc1
"""
self._url = url
@property
def visibility(self):
"""Gets the visibility of this ResearcherUrlV30Rc1. # noqa: E501
:return: The visibility of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""Sets the visibility of this ResearcherUrlV30Rc1.
:param visibility: The visibility of this ResearcherUrlV30Rc1. # noqa: E501
:type: str
"""
allowed_values = ["LIMITED", "REGISTERED_ONLY", "PUBLIC", "PRIVATE"] # noqa: E501
if visibility not in allowed_values:
raise ValueError(
"Invalid value for `visibility` ({0}), must be one of {1}" # noqa: E501
.format(visibility, allowed_values)
)
self._visibility = visibility
@property
def path(self):
"""Gets the path of this ResearcherUrlV30Rc1. # noqa: E501
:return: The path of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this ResearcherUrlV30Rc1.
:param path: The path of this ResearcherUrlV30Rc1. # noqa: E501
:type: str
"""
self._path = path
@property
def put_code(self):
"""Gets the put_code of this ResearcherUrlV30Rc1. # noqa: E501
:return: The put_code of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: int
"""
return self._put_code
@put_code.setter
def put_code(self, put_code):
"""Sets the put_code of this ResearcherUrlV30Rc1.
:param put_code: The put_code of this ResearcherUrlV30Rc1. # noqa: E501
:type: int
"""
self._put_code = put_code
@property
def display_index(self):
"""Gets the display_index of this ResearcherUrlV30Rc1. # noqa: E501
:return: The display_index of this ResearcherUrlV30Rc1. # noqa: E501
:rtype: int
"""
return self._display_index
@display_index.setter
def display_index(self, display_index):
"""Sets the display_index of this ResearcherUrlV30Rc1.
:param display_index: The display_index of this ResearcherUrlV30Rc1. # noqa: E501
:type: int
"""
self._display_index = display_index
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResearcherUrlV30Rc1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResearcherUrlV30Rc1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| mit | -3,776,811,176,968,661,000 | 28.31003 | 182 | 0.583843 | false |
pudo-attic/docstash | docstash/collection.py | 1 | 2373 | from os import path, walk, close, unlink
from tempfile import mkstemp
import shutil
from docstash import util
from docstash.document import Document
class Collection(object):
def __init__(self, stash, name):
self.stash = stash
self.name = name
@property
def path(self):
return path.join(self.stash.path, self.name)
def exists(self):
return path.isdir(self.path)
def documents(self):
if self.exists():
for (dirpath, dirnames, filenames) in walk(self.path):
if util.MANIFEST_FILE in filenames:
content_id = path.basename(dirpath)
yield self.get(content_id)
def get(self, content_id):
return Document(self, content_id)
def __iter__(self):
return self.documents()
def ingest(self, something, _move=False, **kwargs):
return util.ingest_misc(self, something, _move=_move, **kwargs)
def ingest_fileobj(self, file_obj, _move=False, **kwargs):
if 'file' not in kwargs:
kwargs['file'] = None
sysfd, path = mkstemp()
with open(path, 'wb') as fh:
fh.write(file_obj.read())
close(sysfd)
return self.ingest_file(path, _move=True, **kwargs)
def ingest_file(self, file_path, _move=False, **kwargs):
file_path = util.fullpath(file_path)
if not _move:
kwargs['source_path'] = file_path
file_name = kwargs.get('file', file_path)
kwargs['file'] = util.filename(file_name)
if 'hash' not in kwargs:
kwargs['hash'] = util.checksum(file_path)
doc = Document(self, kwargs['hash'], **kwargs)
if file_path != doc.file:
if not path.exists(doc.file):
if _move:
shutil.move(file_path, doc.file)
else:
shutil.copyfile(file_path, doc.file)
elif _move:
unlink(file_path)
doc.save()
return doc
def ingest_dir(self, dir_path, **kwargs):
for (dirpath, dirnames, file_names) in walk(dir_path):
for file_name in file_names:
file_path = path.join(dirpath, file_name)
self.ingest_file(file_path, **kwargs)
def __repr__(self):
return '<Collection(%s, %s)>' % (self.stash.path, self.name)
| mit | 2,439,511,797,433,378,000 | 31.067568 | 71 | 0.564686 | false |
dev-alberto/Bachelor2017 | Code/PerformanceComparison/TestScripts/addition_tests.py | 1 | 1385 | from Code.DataStructures.PrimeCurves import P192, P224, P256, P384
from Code.PerformanceComparison.addition import AdditionPerformanceTest
addTest192 = AdditionPerformanceTest(1000, P192)
addTest192J = AdditionPerformanceTest(1000, P192, jacobi=True)
addTest224 = AdditionPerformanceTest(1000, P224)
addTest224J = AdditionPerformanceTest(1000, P224, jacobi=True)
addTest256 = AdditionPerformanceTest(1000, P256)
addTest256J = AdditionPerformanceTest(1000, P256, jacobi=True)
addTest384 = AdditionPerformanceTest(1000, P384)
addTest384J = AdditionPerformanceTest(1000, P384, jacobi=True)
print("** 192 **")
print("Affine")
print(addTest192.addition_test())
print(addTest192.point_double_test())
print("Jacobi")
print(addTest192J.point_double_test())
print(addTest192J.addition_test())
print("** 224 **")
print("Affine")
print(addTest224.addition_test())
print(addTest224.point_double_test())
print("Jacobi")
print(addTest224J.addition_test())
print(addTest224J.point_double_test())
print("** 256 **")
print("Affine")
print(addTest256.addition_test())
print(addTest256.point_double_test())
print("jacobi")
print(addTest256J.addition_test())
print(addTest256J.point_double_test())
print("** 384 **")
print("Affine")
print(addTest384.addition_test())
print(addTest384.point_double_test())
print("Jacobi")
print(addTest384J.addition_test())
print(addTest384J.point_double_test())
| apache-2.0 | -2,048,588,028,924,258,300 | 27.854167 | 71 | 0.777617 | false |
Smart-Green/needle | setup.py | 1 | 1056 | #!/usr/bin/env python
"""
Setup file for pyhaystack
"""
#from pyhaystack.client.HaystackConnection import HaystackConnection
#from pyhaystack.client.NiagaraAXConnection import NiagaraAXConnection
#from pyhaystack import pyhaystack as ph
import pyhaystack.info as info
#from setuptools import setup
from distutils.core import setup
import re
import os
import requests
os.environ['COPY_EXTENDED_ATTRIBUTES_DISABLE'] = 'true'
os.environ['COPYFILE_DISABLE'] = 'true'
setup(name='pyhaystack',
version=info.__version__,
description='Python Haystack Utility',
author='Christian Tremblay',
author_email='[email protected]',
url='http://www.project-haystack.com/',
long_description = "\n".join(info.__doc__.split('\n')),
install_requires = ['requests','setuptools','pandas','numpy'],
packages=['pyhaystack', 'pyhaystack.client', 'pyhaystack.haystackIO','pyhaystack.history','pyhaystack.util','pyhaystack.server',],
entry_points={
'console_scripts': ['pyhaystack=pyhaystack:main'],
},
) | apache-2.0 | 794,013,742,930,574,100 | 31.030303 | 136 | 0.721591 | false |
marble/Toolchain_RenderDocumentation | 18-Make-and-build/42-Latex/run_45-Copy-latex-typo3-stuff.py | 1 | 5072 | #!/usr/bin/env python
# coding: utf-8
from __future__ import print_function
from __future__ import absolute_import
import os
import re
import shutil
import stat
import sys
import tct
from os.path import exists as ospe, join as ospj
from tct import deepget
params = tct.readjson(sys.argv[1])
binabspath = sys.argv[2]
facts = tct.readjson(params['factsfile'])
milestones = tct.readjson(params['milestonesfile'])
reason = ''
resultfile = params['resultfile']
result = tct.readjson(resultfile)
loglist = result['loglist'] = result.get('loglist', [])
toolname = params['toolname']
toolname_pure = params['toolname_pure']
workdir = params['workdir']
exitcode = CONTINUE = 0
# ==================================================
# Make a copy of milestones for later inspection?
# --------------------------------------------------
if 0 or milestones.get('debug_always_make_milestones_snapshot'):
tct.make_snapshot_of_milestones(params['milestonesfile'], sys.argv[1])
# ==================================================
# Helper functions
# --------------------------------------------------
def lookup(D, *keys, **kwdargs):
result = deepget(D, *keys, **kwdargs)
loglist.append((keys, result))
return result
# ==================================================
# define
# --------------------------------------------------
copied_latex_resources = []
run_latex_make_sh_file = None
xeq_name_cnt = 0
# ==================================================
# Check params
# --------------------------------------------------
if exitcode == CONTINUE:
loglist.append('CHECK PARAMS')
make_latex = lookup(milestones, 'make_latex', default=None)
if not make_latex:
CONTINUE == -2
reason = 'Nothing to do'
if exitcode == CONTINUE:
build_latex = lookup(milestones, 'build_latex', default=None)
builder_latex_folder = lookup(milestones, 'builder_latex_folder', default=None)
latex_contrib_typo3_folder = lookup(milestones,
'latex_contrib_typo3_folder',
default=None)
if not (1
and build_latex
and builder_latex_folder
and latex_contrib_typo3_folder):
CONTINUE = -2
reason = 'Bad params or nothing to do'
if exitcode == CONTINUE:
loglist.append('PARAMS are ok')
else:
loglist.append('Bad PARAMS or nothing to do')
# ==================================================
# work
# --------------------------------------------------
if exitcode == CONTINUE:
if not os.path.isdir(latex_contrib_typo3_folder):
exitcode = 22
reason = 'Folder does not exist'
if exitcode == CONTINUE:
foldername = os.path.split(latex_contrib_typo3_folder)[1]
destpath = ospj(builder_latex_folder, foldername)
shutil.copytree(latex_contrib_typo3_folder, destpath)
if exitcode == CONTINUE:
run_latex_make_sh_file = ospj(builder_latex_folder, 'run-make.sh')
f2text = (
"#!/bin/bash\n"
"\n"
"# This is run-make.sh\n"
"\n"
'scriptdir=$( cd $(dirname "$0") ; pwd -P )'
"\n"
"# cd to this dir\n"
"pushd \"$scriptdir\" >/dev/null\n"
"\n"
"# set environment var pointing to the folder and run make\n"
"TEXINPUTS=::texmf_typo3 make\n"
"\n"
"popd >/dev/null\n"
"\n"
)
with open(run_latex_make_sh_file, 'w') as f2:
f2.write(f2text)
file_permissions = (os.stat(run_latex_make_sh_file).st_mode | stat.S_IXUSR
| stat.S_IXGRP
| stat.S_IXOTH)
os.chmod(run_latex_make_sh_file, file_permissions)
if exitcode == CONTINUE:
makefile_path = ospj(builder_latex_folder, 'Makefile')
makefile_original_path = makefile_path + '.original'
if ospe(makefile_path) and not ospe(makefile_original_path):
shutil.copy2(makefile_path, makefile_original_path)
with open(makefile_path, 'rb') as f1:
data = f1.read()
data, cnt = re.subn("LATEXMKOPTS[ ]*=[ ]*\n", "\n\n\n\nLATEXMKOPTS = -interaction=nonstopmode\n\n\n\n\n", data)
if cnt:
with open(makefile_path, 'wb') as f2:
f2.write(data)
# ==================================================
# Set MILESTONE
# --------------------------------------------------
if copied_latex_resources:
result['MILESTONES'].append({'copied_latex_resources':
copied_latex_resources})
if run_latex_make_sh_file:
result['MILESTONES'].append({'run_latex_make_sh_file':
run_latex_make_sh_file})
# ==================================================
# save result
# --------------------------------------------------
tct.save_the_result(result, resultfile, params, facts, milestones, exitcode, CONTINUE, reason)
# ==================================================
# Return with proper exitcode
# --------------------------------------------------
sys.exit(exitcode)
| mit | 4,781,026,684,952,183,000 | 29.554217 | 115 | 0.51321 | false |
WarmMe/alpha1 | WarmMe/thermostat/Activator.py | 1 | 3097 | #!/usr/bin/python
# set gpio18 to high when content of file state is 'ON'
import RPi.GPIO as GPIO
import time
import MySQLdb as mdb
import sys
import time
# set GPIO (pin 12) that command the releais
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(12, GPIO.OUT)
def run():
# Vary
curTime = time.strftime("%H:%M:%S")
try:
global con
con = mdb.connect('localhost', 'root', 'warmme', 'warmme');
cur = con.cursor()
# Current temp
cur.execute("SELECT value from sensorMonitorLast")
curTemp = cur.fetchone()
print 'Current temperature: ' + str(curTemp[0])
# Activation type
cur.execute("SELECT type from activationTarget")
qryResult = cur.fetchone()
if qryResult[0] == 'MANUAL':
print 'Activation type: MANUAL'
# Get manual activator
cur = con.cursor()
cur.execute("SELECT tempValue from activationManual")
manualActivator = cur.fetchone()
if manualActivator is None:
print "No manual temp set, set GPIO to low"
turnOff(curTemp[0]);
else:
print 'Target temperature: ' + str(manualActivator[0])
heatMe(curTemp[0], manualActivator[0])
elif qryResult[0] == 'SCHEDULE':
print 'Activation type: SCHEDULE'
# Get shcedule activator
cur = con.cursor()
qry = "SELECT tempValue from activationSchedule where (startTime <= '" + str(curTime) + "' and endTime >= '" + str(curTime) + "') or ((endTime - startTime) < 0 and (('" + str(curTime) + "' >= startTime and '" + str(curTime) + "' < '23:59:59') or ('" + str(curTime) + "' < endTime)))"
cur.execute(qry)
scheduleActivator = cur.fetchone()
if scheduleActivator is None:
print "No schedule, set GPIO to low"
turnOff(curTemp[0]);
else:
print 'Target temperature: ' + str(scheduleActivator[0])
heatMe(curTemp[0], scheduleActivator[0])
elif qryResult[0] == 'OFF':
print 'Activation type: OFF'
print "set GPIO to low"
turnOff(curTemp[0]);
except mdb.Error, e:
print "Error %d: %s" % (e.args[0],e.args[1])
sys.exit(1)
finally:
if con:
con.close()
def heatMe(curTemp, target):
cur = con.cursor()
if curTemp <= target - .3:
print 'status: HIGH'
GPIO.output(12, GPIO.HIGH)
cur.execute("INSERT into activationStatus (state,tempValue,zone_id) values ('ON',"+str(curTemp)+",1)")
con.commit()
cur = con.cursor()
cur.execute("Update activationStatusLast set state = 'ON', tempValue = "+str(curTemp))
con.commit()
elif curTemp > target + .3:
print 'status: LOW'
GPIO.output(12, GPIO.LOW)
cur = con.cursor()
cur.execute("INSERT into activationStatus (state,tempValue,zone_id) values ('OFF',"+str(curTemp)+",1)")
con.commit()
cur = con.cursor()
cur.execute("Update activationStatusLast set state = 'OFF', tempValue = "+str(curTemp))
con.commit()
def turnOff(curTemp):
GPIO.output(12, GPIO.LOW)
cur = con.cursor()
cur.execute("INSERT into activationStatus (state,tempValue,zone_id) values ('OFF',"+str(curTemp)+",1)")
cur.execute("Update activationStatusLast set state = 'OFF', tempValue = "+str(curTemp))
con.commit()
if __name__ == "__main__":
run()
| gpl-3.0 | -5,307,279,268,875,629,000 | 29.362745 | 286 | 0.659671 | false |
Couby/rocket-bike-challenge | hardware/rpm.py | 1 | 1834 | #!/usr/bin/python3
from gpiozero import Button
from http.server import BaseHTTPRequestHandler, HTTPServer
import time
import threading
hostName = "192.168.1.27"
hostPort = 9000
turns = 0
# HTTP server class.
class MyServer(BaseHTTPRequestHandler):
def do_GET(self):
global turns
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(bytes("<html><head><title>Rocket bike challenge</title></head>", "utf-8"))
self.wfile.write(bytes('<body>{"turns": %d}' % turns, "utf-8"))
# Thead class that permanently checks reed sensor status.
class ThreadingExample(object):
def __init__(self):
self.button = Button(2)
#self.turns = 0
thread = threading.Thread(target=self.run, args=())
thread.daemon = True # Daemonize thread
thread.start() # Start the execution
def run(self):
global turns
while True:
if self.button.is_pressed == True:
turns = turns + 1
print("Button was pressed {} times".format(turns))
while self.button.is_pressed:
pass
# Main function, launches the thread and the HTTP server
if __name__ == '__main__':
print("Launching counter thread...")
thread = ThreadingExample()
print("Thread launched, launchint HTTP server...")
myServer = HTTPServer((hostName, hostPort), MyServer)
print(time.asctime(), "Server Starts - %s:%s" % (hostName, hostPort))
try:
myServer.serve_forever()
except KeyboardInterrupt:
pass
myServer.server_close()
print(time.asctime(), "Server Stops - %s:%s" % (hostName, hostPort))
| mit | -7,046,286,909,193,763,000 | 30.75 | 99 | 0.585605 | false |
kiahosseini/django-form-validation | setup.py | 1 | 1327 | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-form-validation',
version='0.1',
packages=['form_validation'],
include_package_data=True,
license='MIT License', # example license
description="A simple Django app to handle form's client validation .",
long_description=README,
url='https://github.com/kiahosseini/django-form-validation',
author='Kiarash Hosseini',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: MIT License', # example license
'Operating System :: OS Independent',
'Programming Language :: Python',
# Replace these appropriately if you are stuck on Python 2.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2.6',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| mit | 2,919,118,836,394,630,000 | 35.861111 | 78 | 0.636775 | false |
ufal/neuralmonkey | neuralmonkey/encoders/cnn_encoder.py | 1 | 13814 | """CNN for image processing."""
from typing import cast, Callable, Dict, List, Tuple, Union
from typeguard import check_argument_types
import numpy as np
import tensorflow as tf
from neuralmonkey.dataset import Dataset
from neuralmonkey.decorators import tensor
from neuralmonkey.model.feedable import FeedDict
from neuralmonkey.model.parameterized import InitializerSpecs
from neuralmonkey.model.model_part import ModelPart
from neuralmonkey.model.stateful import (SpatialStatefulWithOutput,
TemporalStatefulWithOutput)
from neuralmonkey.nn.projection import multilayer_projection
# Tuples used for configuration of the convolutional layers. See docstring of
# CNNEncoder initialization for more details.
# pylint: disable=invalid-name
ConvSpec = Tuple[str, int, int, str, int]
ResNetSpec = Tuple[str, int, int]
MaxPoolSpec = Tuple[str, int, int, str]
# pylint: enable=invalid-name
class CNNEncoder(ModelPart, SpatialStatefulWithOutput):
"""An image encoder.
It projects the input image through a serie of convolutioal operations. The
projected image is vertically cut and fed to stacked RNN layers which
encode the image into a single vector.
"""
# pylint: disable=too-many-arguments, too-many-locals
def __init__(self,
name: str,
data_id: str,
convolutions: List[Union[ConvSpec, ResNetSpec, MaxPoolSpec]],
image_height: int, image_width: int, pixel_dim: int,
fully_connected: List[int] = None,
batch_normalize: bool = False,
dropout_keep_prob: float = 0.5,
reuse: ModelPart = None,
save_checkpoint: str = None,
load_checkpoint: str = None,
initializers: InitializerSpecs = None) -> None:
"""Initialize a convolutional network for image processing.
The convolutional network can consist of plain convolutions,
max-pooling layers and residual block. In the configuration, they are
specified using the following tuples.
* convolution: ("C", kernel_size, stride, padding, out_channel);
* max / average pooling: ("M"/"A", kernel_size, stride, padding);
* residual block: ("R", kernel_size, out_channels).
Padding must be either "valid" or "same".
Args:
convolutions: Configuration of convolutional layers.
data_id: Identifier of the data series in the dataset.
image_height: Height of the input image in pixels.
image_width: Width of the image.
pixel_dim: Number of color channels in the input images.
dropout_keep_prob: Probability of keeping neurons active in
dropout. Dropout is done between all convolutional layers and
fully connected layer.
"""
check_argument_types()
ModelPart.__init__(
self, name, reuse, save_checkpoint, load_checkpoint, initializers)
self.data_id = data_id
self.dropout_keep_prob = dropout_keep_prob
self.image_height = image_height
self.image_width = image_width
self.pixel_dim = pixel_dim
self.convolutions = convolutions
self.fully_connected = fully_connected
self.batch_normalize = batch_normalize
# pylint: enable=too-many-arguments, too-many-locals
@property
def input_types(self) -> Dict[str, tf.DType]:
return {self.data_id: tf.float32}
@property
def input_shapes(self) -> Dict[str, tf.TensorShape]:
return {self.data_id: tf.TensorShape(
[None, self.image_height, self.image_width, self.pixel_dim])}
@tensor
def image_input(self) -> tf.Tensor:
return self.dataset[self.data_id]
@tensor
def image_mask(self) -> tf.Tensor:
# the image mask is one everywhere where the image is non-zero, i.e.
# zero pixels are masked out
return tf.sign(tf.reduce_sum(self.image_input, axis=3, keepdims=True))
def batch_norm_callback(self, layer_output: tf.Tensor) -> tf.Tensor:
if self.batch_normalize:
return tf.layers.batch_normalization(
layer_output, training=self.train_mode)
return layer_output
@tensor
def image_processing_layers(self) -> List[Tuple[tf.Tensor, tf.Tensor]]:
"""Do all convolutions and return the last conditional map.
No dropout is applied between the convolutional layers. By default, the
activation function is ReLU.
"""
last_layer = self.image_input
last_mask = self.image_mask
last_channels = self.pixel_dim
image_processing_layers = [] # type: List[Tuple[tf.Tensor, tf.Tensor]]
with tf.variable_scope("convolutions"):
for i, specification in enumerate(self.convolutions):
if specification[0] == "C":
(last_layer, last_mask,
last_channels) = plain_convolution(
last_layer, last_mask,
cast(ConvSpec, specification),
self.batch_norm_callback, i)
image_processing_layers.append((last_layer, last_mask))
elif specification[0] in ["M", "A"]:
last_layer, last_mask = pooling(
last_layer, last_mask,
cast(MaxPoolSpec, specification), i)
image_processing_layers.append((last_layer, last_mask))
elif specification[0] == "R":
if not self.batch_normalize:
raise ValueError(
"Using ResNet blocks requires batch normalization "
"to be turned on.")
(last_layer, last_mask,
last_channels) = residual_block(
last_layer, last_mask, last_channels,
cast(ResNetSpec, specification),
self.batch_norm_callback, i)
image_processing_layers.append((last_layer, last_mask))
else:
raise ValueError(
"Unknown type of convoutional layer #{}: '{}'".format(
i + 1, specification[0]))
return image_processing_layers
@tensor
def spatial_states(self):
# pylint: disable=unsubscriptable-object
return self.image_processing_layers[-1][0]
# pylint: enable=unsubscriptable-object
@tensor
def spatial_mask(self) -> tf.Tensor:
# pylint: disable=unsubscriptable-object
return self.image_processing_layers[-1][1]
# pylint: enable=unsubscriptable-object
@tensor
def output(self) -> tf.Tensor:
"""Output vector of the CNN.
If there are specified some fully connected layers, there are applied
on top of the last convolutional map. Dropout is applied between all
layers, default activation function is ReLU. There are only projection
layers, no softmax is applied.
If there is fully_connected layer specified, average-pooled last
convolutional map is used as a vector output.
"""
# pylint: disable=no-member
last_height, last_width, last_n_channels = [
s.value for s in self.spatial_states.get_shape()[1:]]
# pylint: enable=no-member
if self.fully_connected is None:
# we average out by the image size -> shape is number
# channels from the last convolution
encoded = tf.reduce_mean(self.spatial_states, [1, 2])
return encoded
states_flat = tf.reshape(
self.spatial_states,
[-1, last_width * last_height * last_n_channels])
return multilayer_projection(
states_flat, self.fully_connected,
activation=tf.nn.relu,
dropout_keep_prob=self.dropout_keep_prob,
train_mode=self.train_mode)
def feed_dict(self, dataset: Dataset, train: bool = False) -> FeedDict:
fd = ModelPart.feed_dict(self, dataset, train)
# if it is from the pickled file, it is a list, not a numpy tensor,
# so convert it as as a prevention
images = np.array(list(dataset.get_series(self.data_id)))
fd[self.image_input] = images / 255.0
return fd
def plain_convolution(
prev_layer: tf.Tensor,
prev_mask: tf.Tensor,
specification: ConvSpec,
batch_norm_callback: Callable[[tf.Tensor], tf.Tensor],
layer_num: int) -> Tuple[tf.Tensor, tf.Tensor, int]:
try:
check_argument_types()
except TypeError as err:
raise ValueError((
"Specification of a convolutional layer (number {} in config) "
'needs to have 5 members: "C", kernel size, stride, '
"padding, output channels, was {}").format(
layer_num, specification)) from err
kernel_size, stride, pad, out_channels = specification[1:]
if pad not in ["same", "valid"]:
raise ValueError(
("Padding must be 'same' or 'valid', "
"was '{}' in layer {}.").format(pad, layer_num + 1))
with tf.variable_scope("layer_{}_convolution".format(layer_num)):
next_layer = tf.layers.conv2d(
prev_layer, out_channels, kernel_size,
activation=None, padding=pad)
next_layer = batch_norm_callback(next_layer)
next_layer = tf.nn.relu(next_layer)
next_mask = tf.layers.max_pooling2d(
prev_mask, kernel_size, stride, padding=pad)
return next_layer, next_mask, out_channels
def residual_block(
prev_layer: tf.Tensor,
prev_mask: tf.Tensor,
prev_channels: int,
specification: ResNetSpec,
batch_norm_callback: Callable[[tf.Tensor], tf.Tensor],
layer_num: int) -> Tuple[tf.Tensor, tf.Tensor, int]:
try:
check_argument_types()
except TypeError as err:
raise ValueError((
"Specification of a residual block (number {} in config) "
'needs to have 3 members: "R", kernel size, channels; '
"was {}").format(layer_num, specification)) from err
kernel_size, out_channels = specification[1:]
with tf.variable_scope("layer_{}_resnet_block".format(layer_num)):
if out_channels == prev_channels:
before_resnet_block = prev_layer
else:
with tf.variable_scope("project_input"):
before_resnet_block = tf.layers.conv2d(
prev_layer, out_channels, 1, 1,
"same", activation=None)
before_resnet_block = batch_norm_callback(before_resnet_block)
with tf.variable_scope("conv_a"):
after_cnn = batch_norm_callback(prev_layer)
after_cnn = tf.nn.relu(after_cnn)
after_cnn = tf.layers.conv2d(
after_cnn, out_channels, kernel_size,
padding="same", activation=None)
with tf.variable_scope("conv_b"):
after_cnn = batch_norm_callback(after_cnn)
after_cnn = tf.nn.relu(after_cnn)
after_cnn = tf.layers.conv2d(
after_cnn, out_channels, kernel_size,
padding="same", activation=None)
next_layer = after_cnn + before_resnet_block
return next_layer, prev_mask, out_channels
def pooling(
prev_layer: tf.Tensor,
prev_mask: tf.Tensor,
specification: MaxPoolSpec,
layer_num: int) -> Tuple[tf.Tensor, tf.Tensor]:
try:
check_argument_types()
except TypeError as err:
raise ValueError((
"Specification of a max-pooling layer (number {} in config) "
'needs to have 3 members: "M", pool size, stride, padding, '
"was {}").format(layer_num, specification)) from err
pool_type, pool_size, stride, pad = specification
if pool_type == "M":
pool_fn = tf.layers.max_pooling2d
elif pool_type == "A":
pool_fn = tf.layers.average_pooling2d
else:
raise ValueError(
("Unsupported type of pooling: {}, use 'M' for max-pooling or "
"'A' for average pooling.").format(pool_type))
if pad not in ["same", "valid"]:
raise ValueError(
"Padding must be 'same' or 'valid', was '{}' in layer {}."
.format(pad, layer_num + 1))
with tf.variable_scope("layer_{}_max_pool".format(layer_num)):
next_layer = pool_fn(prev_layer, pool_size, stride)
next_mask = tf.layers.max_pooling2d(prev_mask, pool_size, stride)
return next_layer, next_mask
class CNNTemporalView(ModelPart, TemporalStatefulWithOutput):
"""Slice the convolutional maps left to right."""
def __init__(self,
name: str,
cnn: CNNEncoder) -> None:
check_argument_types()
ModelPart.__init__(
self, name, save_checkpoint=None, load_checkpoint=None)
self._cnn = cnn
@tensor
def output(self) -> tf.Tensor:
return self._cnn.output
@tensor
def temporal_states(self):
states = tf.transpose(self._cnn.spatial_states, perm=[0, 2, 1, 3])
shape = states.get_shape()
res = tf.reshape(
states, [-1, shape[1].value, shape[2].value * shape[3].value])
return res
@tensor
def temporal_mask(self) -> tf.Tensor:
mask = tf.squeeze(self._cnn.spatial_mask, 3)
summed = tf.reduce_sum(mask, axis=1)
return tf.to_float(tf.greater(summed, 0))
@property
def dependencies(self) -> List[str]:
return super().dependencies + ["_cnn"]
| bsd-3-clause | 7,487,356,232,433,098,000 | 38.022599 | 79 | 0.597872 | false |
NeCTAR-RC/heat | heat/tests/test_fault_middleware.py | 1 | 6446 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception as heat_exc
from heat.openstack.common.rpc import common as rpc_common
from heat.tests.common import HeatTestCase
from oslo.config import cfg
import heat.api.middleware.fault as fault
class StackNotFoundChild(heat_exc.StackNotFound):
pass
class FaultMiddlewareTest(HeatTestCase):
def test_openstack_exception_with_kwargs(self):
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(heat_exc.StackNotFound(stack_name='a'))
expected = {'code': 404,
'error': {'message': 'The Stack (a) could not be found.',
'traceback': None,
'type': 'StackNotFound'},
'explanation': 'The resource could not be found.',
'title': 'Not Found'}
self.assertEqual(expected, msg)
def test_openstack_exception_without_kwargs(self):
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(heat_exc.StackResourceLimitExceeded())
expected = {'code': 500,
'error': {'message': 'Maximum resources '
'per stack exceeded.',
'traceback': None,
'type': 'StackResourceLimitExceeded'},
'explanation': 'The server has either erred or is '
'incapable of performing the requested '
'operation.',
'title': 'Internal Server Error'}
self.assertEqual(expected, msg)
def test_exception_with_non_ascii_chars(self):
# We set debug to true to test the code path for serializing traces too
cfg.CONF.set_override('debug', True)
msg = u'Error with non-ascii chars \x80'
class TestException(heat_exc.HeatException):
msg_fmt = msg
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(TestException())
expected = {'code': 500,
'error': {'message': u'Error with non-ascii chars \x80',
'traceback': 'None\n',
'type': 'TestException'},
'explanation': ('The server has either erred or is '
'incapable of performing the requested '
'operation.'),
'title': 'Internal Server Error'}
self.assertEqual(expected, msg)
def test_remote_exception(self):
# We want tracebacks
cfg.CONF.set_override('debug', True)
error = heat_exc.StackNotFound(stack_name='a')
exc_info = (type(error), error, None)
serialized = rpc_common.serialize_remote_exception(exc_info)
remote_error = rpc_common.deserialize_remote_exception(cfg.CONF,
serialized)
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(remote_error)
expected_message, expected_traceback = str(remote_error).split('\n', 1)
expected = {'code': 404,
'error': {'message': expected_message,
'traceback': expected_traceback,
'type': 'StackNotFound'},
'explanation': 'The resource could not be found.',
'title': 'Not Found'}
self.assertEqual(expected, msg)
def test_should_not_ignore_parent_classes(self):
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(StackNotFoundChild(stack_name='a'))
expected = {'code': 404,
'error': {'message': 'The Stack (a) could not be found.',
'traceback': None,
'type': 'StackNotFoundChild'},
'explanation': 'The resource could not be found.',
'title': 'Not Found'}
self.assertEqual(expected, msg)
def test_internal_server_error_when_exeption_and_parents_not_mapped(self):
wrapper = fault.FaultWrapper(None)
class NotMappedException(Exception):
pass
msg = wrapper._error(NotMappedException('A message'))
expected = {'code': 500,
'error': {'message': u'A message',
'traceback': None,
'type': 'NotMappedException'},
'explanation': ('The server has either erred or is '
'incapable of performing the requested '
'operation.'),
'title': 'Internal Server Error'}
self.assertEqual(expected, msg)
def test_should_not_ignore_parent_classes_even_for_remote_ones(self):
# We want tracebacks
cfg.CONF.set_override('debug', True)
cfg.CONF.set_override('allowed_rpc_exception_modules',
['heat.tests.test_fault_middleware'])
error = StackNotFoundChild(stack_name='a')
exc_info = (type(error), error, None)
serialized = rpc_common.serialize_remote_exception(exc_info)
remote_error = rpc_common.deserialize_remote_exception(cfg.CONF,
serialized)
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(remote_error)
expected_message, expected_traceback = str(remote_error).split('\n', 1)
expected = {'code': 404,
'error': {'message': expected_message,
'traceback': expected_traceback,
'type': 'StackNotFoundChild'},
'explanation': 'The resource could not be found.',
'title': 'Not Found'}
self.assertEqual(expected, msg)
| apache-2.0 | 1,952,317,835,905,729,500 | 44.394366 | 79 | 0.547626 | false |
wrapp/txwebtest | tests.py | 1 | 2259 | from klein import Klein
from twisted.internet import defer
from twisted.trial.unittest import TestCase
from txwebtest import TestClient
from urlparse import parse_qs
class Tests(TestCase):
def setUp(self):
self.app = TestClient(create_app().resource())
@defer.inlineCallbacks
def test_status_check(self):
yield self.app.get('/names/4', status=404)
try:
yield self.app.get('/names/4', status=200)
self.fail()
except AssertionError:
pass
@defer.inlineCallbacks
def test_post_with_body(self):
resp = yield self.app.post('/names', 'name=Ann', status=201)
new_item_path = resp.get_header('Location')
resp = yield self.app.get(new_item_path, status=200)
assert resp.text == 'Ann'
@defer.inlineCallbacks
def test_put_with_body(self):
yield self.app.put('/names/4', 'name=Ann', status=200)
resp = yield self.app.get('/names/4', status=200)
assert resp.text == 'Ann'
@defer.inlineCallbacks
def test_delete(self):
yield self.app.put('/names/4', 'name=Ann', status=200)
yield self.app.delete('/names/4', status=200)
yield self.app.get('/names/4', status=404)
def create_app():
''' A simple Klein app that associates ints with names. '''
app = Klein()
results = {}
@app.route('/names', methods=['POST'])
def post(request):
name = request.args['name'][0]
item_id = max(results.keys()) + 1 if results else 1
results[item_id] = name
request.setHeader('Location', '/names/%s' % item_id)
request.setResponseCode(201)
return name
@app.route('/names/<int:item_id>', methods=['GET'])
def get(request, item_id):
try:
return results[item_id]
except KeyError:
request.setResponseCode(404)
@app.route('/names/<int:item_id>', methods=['PUT'])
def put(request, item_id):
data = request.content.read()
args = parse_qs(data)
name = args['name'][0]
results[item_id] = name
return ''
@app.route('/names/<int:item_id>', methods=['DELETE'])
def delete(request, item_id):
results.pop(item_id, None)
return app
| mit | 813,507,418,160,723,800 | 29.12 | 68 | 0.59938 | false |
brokenseal/local | server/local/local/messages.py | 1 | 1789 | from __future__ import unicode_literals
import logging
import pymongo
from django.core import signing
from django.conf import settings
from . import models, exceptions
SECRET_KEY = "test"
routing = {}
logger = logging.getLogger(__name__)
def resolve(name):
return routing[name]
def event(message_or_func):
if callable(message_or_func):
message = ':'.join(message_or_func.__name__.split('_'))
routing[message] = message_or_func
return message_or_func
def _wrapper(func):
routing[message_or_func] = func
return func
return _wrapper
def _get_messages_table():
return pymongo.MongoClient(settings.MONGO_DB_URL).mongodb.default.messages
@event
def authentication_authenticate(connection, token, salt):
try:
channel = signing.loads(
token,
key=SECRET_KEY,
salt=salt,
max_age=settings.CHANNEL_MAX_AGE,
)
except (signing.BadSignature, KeyError):
logging.debug('Authentication error: invalid token')
raise exceptions.AuthenticationError('Invalid token')
else:
connection.channel = channel
logging.debug('Authentication successful')
connection.emit(name='authentication:success')
@event
def message_create(connection, author, text, client_id, **kwargs):
messages = _get_messages_table()
message_id = messages.insert(dict(
author=author,
text=text,
))
new_message = messages.find_one(dict(_id=message_id))
return dict(
name='message:created',
data=dict(
message=new_message,
client_id=client_id,
)
)
@event
def bootstrap(connection):
connection.emit(name="message:list", data=list(_get_messages_table().find()))
| mit | 3,372,694,716,418,899,000 | 22.539474 | 81 | 0.644494 | false |
tboyce1/home-assistant | homeassistant/components/switch/mqtt.py | 4 | 5255 | """
Support for MQTT switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.mqtt/
"""
import asyncio
import logging
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.components.mqtt import (
CONF_STATE_TOPIC, CONF_COMMAND_TOPIC, CONF_AVAILABILITY_TOPIC,
CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, CONF_QOS, CONF_RETAIN,
MqttAvailability)
from homeassistant.components.switch import SwitchDevice
from homeassistant.const import (
CONF_NAME, CONF_OPTIMISTIC, CONF_VALUE_TEMPLATE, CONF_PAYLOAD_OFF,
CONF_PAYLOAD_ON)
import homeassistant.components.mqtt as mqtt
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['mqtt']
DEFAULT_NAME = 'MQTT Switch'
DEFAULT_PAYLOAD_ON = 'ON'
DEFAULT_PAYLOAD_OFF = 'OFF'
DEFAULT_OPTIMISTIC = False
PLATFORM_SCHEMA = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up the MQTT switch."""
if discovery_info is not None:
config = PLATFORM_SCHEMA(discovery_info)
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
async_add_devices([MqttSwitch(
config.get(CONF_NAME),
config.get(CONF_STATE_TOPIC),
config.get(CONF_COMMAND_TOPIC),
config.get(CONF_AVAILABILITY_TOPIC),
config.get(CONF_QOS),
config.get(CONF_RETAIN),
config.get(CONF_PAYLOAD_ON),
config.get(CONF_PAYLOAD_OFF),
config.get(CONF_OPTIMISTIC),
config.get(CONF_PAYLOAD_AVAILABLE),
config.get(CONF_PAYLOAD_NOT_AVAILABLE),
value_template,
)])
class MqttSwitch(MqttAvailability, SwitchDevice):
"""Representation of a switch that can be toggled using MQTT."""
def __init__(self, name, state_topic, command_topic, availability_topic,
qos, retain, payload_on, payload_off, optimistic,
payload_available, payload_not_available, value_template):
"""Initialize the MQTT switch."""
super().__init__(availability_topic, qos, payload_available,
payload_not_available)
self._state = False
self._name = name
self._state_topic = state_topic
self._command_topic = command_topic
self._qos = qos
self._retain = retain
self._payload_on = payload_on
self._payload_off = payload_off
self._optimistic = optimistic
self._template = value_template
@asyncio.coroutine
def async_added_to_hass(self):
"""Subscribe to MQTT events."""
yield from super().async_added_to_hass()
@callback
def state_message_received(topic, payload, qos):
"""Handle new MQTT state messages."""
if self._template is not None:
payload = self._template.async_render_with_possible_json_value(
payload)
if payload == self._payload_on:
self._state = True
elif payload == self._payload_off:
self._state = False
self.async_schedule_update_ha_state()
if self._state_topic is None:
# Force into optimistic mode.
self._optimistic = True
else:
yield from mqtt.async_subscribe(
self.hass, self._state_topic, state_message_received,
self._qos)
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@asyncio.coroutine
def async_turn_on(self, **kwargs):
"""Turn the device on.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass, self._command_topic, self._payload_on, self._qos,
self._retain)
if self._optimistic:
# Optimistically assume that switch has changed state.
self._state = True
self.async_schedule_update_ha_state()
@asyncio.coroutine
def async_turn_off(self, **kwargs):
"""Turn the device off.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass, self._command_topic, self._payload_off, self._qos,
self._retain)
if self._optimistic:
# Optimistically assume that switch has changed state.
self._state = False
self.async_schedule_update_ha_state()
| apache-2.0 | -8,304,095,297,446,679,000 | 32.050314 | 79 | 0.631399 | false |
ganga-devs/ganga | ganga/GangaCore/Core/GangaRepository/GangaRepositoryXML.py | 1 | 53576 | # Note: Following stuff must be considered in a GangaRepository:
#
# * lazy loading
# * locking
from GangaCore.Core.GangaRepository import GangaRepository, RepositoryError, InaccessibleObjectError
from GangaCore.Utility.Plugin import PluginManagerError
import os
import os.path
import time
import errno
import copy
import threading
from GangaCore import GANGA_SWAN_INTEGRATION
from GangaCore.Core.GangaRepository.SessionLock import SessionLockManager, dry_run_unix_locks
from GangaCore.Core.GangaRepository.FixedLock import FixedLockManager
import GangaCore.Utility.logging
from GangaCore.Core.GangaRepository.PickleStreamer import to_file as pickle_to_file
from GangaCore.Core.GangaRepository.PickleStreamer import from_file as pickle_from_file
from GangaCore.Core.GangaRepository.VStreamer import to_file as xml_to_file
from GangaCore.Core.GangaRepository.VStreamer import from_file as xml_from_file
from GangaCore.Core.GangaRepository.VStreamer import XMLFileError
from GangaCore.GPIDev.Base.Objects import Node
from GangaCore.Core.GangaRepository.SubJobXMLList import SubJobXMLList
from GangaCore.GPIDev.Base.Proxy import isType, stripProxy, getName
from GangaCore.Utility.Config import getConfig
logger = GangaCore.Utility.logging.getLogger()
save_all_history = False
def check_app_hash(obj):
"""Writes a file safely, raises IOError on error
Args:
obj (GangaObject): This is an object which has a prepared application
"""
isVerifiableApp = False
isVerifiableAna = False
if hasattr(obj, 'application'):
if hasattr(obj.application, 'hash'):
if obj.application.hash is not None:
isVerifiableApp = True
elif hasattr(obj, 'analysis'):
if hasattr(obj.analysis, 'application'):
if hasattr(obj.analysis.application, 'hash'):
if obj.analysis.application.hash is not None:
isVerifiableAna = True
if isVerifiableApp is True:
hashable_app = stripProxy(obj.application)
elif isVerifiableAna is True:
hashable_app = stripProxy(obj.analysis.application)
else:
hashable_app = None
if hashable_app is not None:
if not hashable_app.calc_hash(True):
try:
logger.warning("%s" % hashable_app)
logger.warning('Protected attribute(s) of %s application (associated with %s #%s) changed!' % (getName(hashable_app), getName(obj), obj._registry_id))
except AttributeError as err:
logger.warning('Protected attribute(s) of %s application (associated with %s) changed!!!!' % (getName(hashable_app), getName(obj)))
logger.warning("%s" % err)
jobObj = stripProxy(hashable_app).getJobObject()
if jobObj is not None:
logger.warning('Job: %s is now possibly corrupt!' % jobObj.getFQID('.'))
logger.warning('If you knowingly circumvented the protection, ignore this message (and, optionally,')
logger.warning('re-prepare() the application). Otherwise, please file a bug report at:')
logger.warning('https://github.com/ganga-devs/ganga/issues/')
def safe_save(fn, _obj, to_file, ignore_subs=''):
"""Try to save the XML for this object in as safe a way as possible
Args:
fn (str): This is the name of the file we are to save the object to
_obj (GangaObject): This is the object which we want to save to the file
to_file (str): This is the method we want to use to save the to the file
ignore_subs (str): This is the name(s) of the attribute of _obj we want to ignore in writing to disk
"""
# Add a global lock to make absolutely sure we don't have multiple threads writing files
# See Github Issue 185
with safe_save.lock:
obj = stripProxy(_obj)
check_app_hash(obj)
# Create the dirs
dirname = os.path.dirname(fn)
if not os.path.exists(dirname):
os.makedirs(dirname)
# Prepare new data file
new_name = fn + '.new'
with open(new_name, "w") as tmpfile:
to_file(obj, tmpfile, ignore_subs)
# everything ready so create new data file and backup old one
if os.path.exists(new_name):
# Do we have an old one to backup?
if os.path.exists(fn):
os.rename(fn, fn + "~")
os.rename(new_name, fn)
# Global lock for above function - See issue #185
safe_save.lock = threading.Lock()
def rmrf(name, count=0):
"""
Safely recursively remove a file/folder from disk by first moving it then removing it
calls self and will only attempt to move/remove a file 3 times before giving up
Args:
count (int): This function calls itself recursively 3 times then gives up, this increments on each call
"""
if count != 0:
logger.debug("Trying again to remove: %s" % name)
if count == 3:
logger.error("Tried 3 times to remove file/folder: %s" % name)
from GangaCore.Core.exceptions import GangaException
raise GangaException("Failed to remove file/folder: %s" % name)
if os.path.isdir(name):
try:
remove_name = name
if not remove_name.endswith('__to_be_deleted'):
remove_name += '_%s__to_be_deleted_' % time.time()
os.rename(name, remove_name)
#logger.debug("Move completed")
except OSError as err:
if err.errno != errno.ENOENT:
logger.debug("rmrf Err: %s" % err)
logger.debug("name: %s" % name)
raise
return
for sfn in os.listdir(remove_name):
try:
rmrf(os.path.join(remove_name, sfn), count)
except OSError as err:
if err.errno == errno.EBUSY:
logger.debug("rmrf Remove err: %s" % err)
logger.debug("name: %s" % remove_name)
## Sleep 2 sec and try again
time.sleep(2.)
rmrf(os.path.join(remove_name, sfn), count+1)
try:
os.removedirs(remove_name)
except OSError as err:
if err.errno == errno.ENOTEMPTY:
rmrf(remove_name, count+1)
elif err.errno != errno.ENOENT:
logger.debug("%s" % err)
raise
return
else:
try:
remove_name = name + "_" + str(time.time()) + '__to_be_deleted_'
os.rename(name, remove_name)
except OSError as err:
if err.errno not in [errno.ENOENT, errno.EBUSY]:
raise
logger.debug("rmrf Move err: %s" % err)
logger.debug("name: %s" % name)
if err.errno == errno.EBUSY:
rmrf(name, count+1)
return
try:
os.remove(remove_name)
except OSError as err:
if err.errno != errno.ENOENT:
logger.debug("%s" % err)
logger.debug("name: %s" % remove_name)
raise
return
class GangaRepositoryLocal(GangaRepository):
"""GangaRepository Local"""
def __init__(self, registry):
"""
Initialize a Repository from within a Registry and keep a reference to the Registry which 'owns' it
Args:
Registry (Registry): This is the registry which manages this Repo
"""
super(GangaRepositoryLocal, self).__init__(registry)
self.dataFileName = "data"
self.sub_split = "subjobs"
self.root = os.path.join(self.registry.location, "6.0", self.registry.name)
self.lockroot = os.path.join(self.registry.location, "6.0")
self.saved_paths = {}
self.saved_idxpaths = {}
self._cache_load_timestamp = {}
self.printed_explanation = False
self._fully_loaded = {}
def startup(self):
""" Starts a repository and reads in a directory structure.
Raise RepositoryError"""
self._load_timestamp = {}
# New Master index to speed up loading of many, MANY files
self._cache_load_timestamp = {}
self._cached_cat = {}
self._cached_cls = {}
self._cached_obj = {}
self._master_index_timestamp = 0
self.known_bad_ids = []
if "XML" in self.registry.type:
self.to_file = xml_to_file
self.from_file = xml_from_file
elif "Pickle" in self.registry.type:
self.to_file = pickle_to_file
self.from_file = pickle_from_file
else:
raise RepositoryError(self, "Unknown Repository type: %s" % self.registry.type)
if getConfig('Configuration')['lockingStrategy'] == "UNIX":
# First test the UNIX locks are working as expected
try:
dry_run_unix_locks(self.lockroot)
except Exception as err:
# Locking has not worked, lets raise an error
logger.error("Error: %s" % err)
msg="\n\nUnable to launch due to underlying filesystem not working with unix locks."
msg+="Please try launching again with [Configuration]lockingStrategy=FIXED to start Ganga without multiple session support."
raise RepositoryError(self, msg)
# Locks passed test so lets continue
self.sessionlock = SessionLockManager(self, self.lockroot, self.registry.name)
elif getConfig('Configuration')['lockingStrategy'] == "FIXED":
self.sessionlock = FixedLockManager(self, self.lockroot, self.registry.name)
else:
raise RepositoryError(self, "Unable to launch due to unknown file-locking Strategy: \"%s\"" % getConfig('Configuration')['lockingStrategy'])
self.sessionlock.startup()
# Load the list of files, this time be verbose and print out a summary
# of errors
self.update_index(True, True)
logger.debug("GangaRepositoryLocal Finished Startup")
def shutdown(self):
"""Shutdown the repository. Flushing is done by the Registry
Raise RepositoryError
Write an index file for all new objects in memory and master index file of indexes"""
from GangaCore.Utility.logging import getLogger
logger = getLogger()
logger.debug("Shutting Down GangaRepositoryLocal: %s" % self.registry.name)
for k in self._fully_loaded:
try:
self.index_write(k, True)
except Exception as err:
logger.error("Warning: problem writing index object with id %s" % k)
try:
self._write_master_cache(True)
except Exception as err:
logger.warning("Warning: Failed to write master index due to: %s" % err)
self.sessionlock.shutdown()
def get_fn(self, this_id):
""" Returns the file name where the data for this object id is saved
Args:
this_id (int): This is the object id we want the XML filename for
"""
if this_id not in self.saved_paths:
self.saved_paths[this_id] = os.path.join(self.root, "%ixxx" % int(this_id * 0.001), "%i" % this_id, self.dataFileName)
return self.saved_paths[this_id]
def get_idxfn(self, this_id):
""" Returns the file name where the data for this object id is saved
Args:
this_id (int): This is the object id we want the index filename for
"""
if this_id not in self.saved_idxpaths:
self.saved_idxpaths[this_id] = os.path.join(self.root, "%ixxx" % int(this_id * 0.001), "%i.index" % this_id)
return self.saved_idxpaths[this_id]
def index_load(self, this_id):
""" load the index file for this object if necessary
Loads if never loaded or timestamp changed. Creates object if necessary
Returns True if this object has been changed, False if not
Raise IOError on access or unpickling error
Raise OSError on stat error
Raise PluginManagerError if the class name is not found
Args:
this_id (int): This is the id for which we want to load the index file from disk
"""
#logger.debug("Loading index %s" % this_id)
fn = self.get_idxfn(this_id)
# index timestamp changed
fn_ctime = os.stat(fn).st_ctime
cache_time = self._cache_load_timestamp.get(this_id, 0)
if cache_time != fn_ctime:
logger.debug("%s != %s" % (cache_time, fn_ctime))
try:
with open(fn, 'rb') as fobj:
cat, cls, cache = pickle_from_file(fobj)[0]
except EOFError:
pass
except Exception as x:
logger.warning("index_load Exception: %s" % x)
raise IOError("Error on unpickling: %s %s" %(getName(x), x))
if this_id in self.objects:
obj = self.objects[this_id]
setattr(obj, "_registry_refresh", True)
else:
try:
obj = self._make_empty_object_(this_id, cat, cls)
except Exception as err:
raise IOError('Failed to Parse information in Index file: %s. Err: %s' % (fn, err))
this_cache = obj._index_cache
this_data = this_cache if this_cache else {}
for k, v in cache.items():
this_data[k] = v
#obj.setNodeData(this_data)
obj._index_cache = cache
self._cache_load_timestamp[this_id] = fn_ctime
self._cached_cat[this_id] = cat
self._cached_cls[this_id] = cls
self._cached_obj[this_id] = cache
return True
elif this_id not in self.objects:
self.objects[this_id] = self._make_empty_object_(this_id, self._cached_cat[this_id], self._cached_cls[this_id])
self.objects[this_id]._index_cache = self._cached_obj[this_id]
setattr(self.objects[this_id], '_registry_refresh', True)
return True
else:
logger.debug("Doubly loading of object with ID: %s" % this_id)
logger.debug("Just silently continuing")
return False
def index_write(self, this_id, shutdown=False):
""" write an index file for this object (must be locked).
Should not raise any Errors,
Args:
this_id (int): This is the index for which we want to write the index to disk
shutdown (bool): True causes this to always be written regardless of any checks"""
if this_id in self.incomplete_objects:
return
logger.debug("Writing index: %s" % this_id)
obj = self.objects[this_id]
try:
ifn = self.get_idxfn(this_id)
new_idx_cache = self.registry.getIndexCache(stripProxy(obj))
if not os.path.exists(ifn) or shutdown:
new_cache = new_idx_cache
with open(ifn, "wb") as this_file:
new_index = (obj._category, getName(obj), new_cache)
logger.debug("Writing: %s" % str(new_index))
pickle_to_file(new_index, this_file)
self._cached_obj[this_id] = new_cache
obj._index_cache = {}
self._cached_obj[this_id] = new_idx_cache
except IOError as err:
logger.error("Index saving to '%s' failed: %s %s" % (ifn, getName(err), err))
def get_index_listing(self):
"""Get dictionary of possible objects in the Repository: True means index is present,
False if not present
Raise RepositoryError"""
try:
if not os.path.exists(self.root):
os.makedirs(self.root)
obj_chunks = [d for d in os.listdir(self.root) if d.endswith("xxx") and d[:-3].isdigit()]
except OSError as err:
logger.debug("get_index_listing Exception: %s" % err)
raise RepositoryError(self, "Could not list repository '%s'!" % (self.root))
objs = {} # True means index is present, False means index not present
for c in obj_chunks:
try:
listing = os.listdir(os.path.join(self.root, c))
except OSError as err:
logger.debug("get_index_listing Exception: %s")
raise RepositoryError(self, "Could not list repository '%s'!" % (os.path.join(self.root, c)))
objs.update(dict([(int(l), False) for l in listing if l.isdigit()]))
for l in listing:
if l.endswith(".index") and l[:-6].isdigit():
this_id = int(l[:-6])
if this_id in objs:
objs[this_id] = True
else:
try:
rmrf(self.get_idxfn(this_id))
logger.warning("Deleted index file without data file: %s" % self.get_idxfn(this_id))
except OSError as err:
logger.debug("get_index_listing delete Exception: %s" % err)
return objs
def _read_master_cache(self):
"""
read in the master cache to reduce significant I/O over many indexes separately on startup
"""
try:
_master_idx = os.path.join(self.root, 'master.idx')
if os.path.isfile(_master_idx):
logger.debug("Reading Master index")
self._master_index_timestamp = os.stat(_master_idx).st_ctime
with open(_master_idx, 'rb') as input_f:
this_master_cache = pickle_from_file(input_f)[0]
for this_cache in this_master_cache:
if this_cache[1] >= 0:
this_id = this_cache[0]
self._cache_load_timestamp[this_id] = this_cache[1]
self._cached_cat[this_id] = this_cache[2]
self._cached_cls[this_id] = this_cache[3]
self._cached_obj[this_id] = this_cache[4]
else:
logger.debug("Not Reading Master Index")
except Exception as err:
GangaCore.Utility.logging.log_unknown_exception()
logger.debug("Master Index corrupt, ignoring it")
logger.debug("Exception: %s" % err)
self._clear_stored_cache()
finally:
rmrf(os.path.join(self.root, 'master.idx'))
def _clear_stored_cache(self):
"""
clear the master cache(s) which have been stored in memory
"""
for k, v in self._cache_load_timestamp.items():
self._cache_load_timestamp.pop(k)
for k, v in self._cached_cat.items():
self._cached_cat.pop(k)
for k, v in self._cached_cls.items():
self._cached_cls.pop(k)
for k, v in self._cached_obj.items():
self._cached_obj.pop(k)
def _write_master_cache(self, shutdown=False):
"""
write a master index cache once per 300sec
Args:
shutdown (boool): True causes this to be written now
"""
try:
_master_idx = os.path.join(self.root, 'master.idx')
this_master_cache = []
if os.path.isfile(_master_idx) and not shutdown:
if abs(self._master_index_timestamp - os.stat(_master_idx).st_ctime) < 300:
return
items_to_save = iter(self.objects.items())
for k, v in items_to_save:
if k in self.incomplete_objects:
continue
try:
if k in self._fully_loaded:
# Check and write index first
obj = v#self.objects[k]
new_index = None
if obj is not None:
new_index = self.registry.getIndexCache(stripProxy(obj))
if new_index is not None:
#logger.debug("k: %s" % k)
arr_k = [k]
if len(self.lock(arr_k)) != 0:
self.index_write(k)
self.unlock(arr_k)
self._cached_obj[k] = new_index
except Exception as err:
logger.debug("Failed to update index: %s on startup/shutdown" % k)
logger.debug("Reason: %s" % err)
iterables = iter(self._cache_load_timestamp.items())
for k, v in iterables:
if k in self.incomplete_objects:
continue
cached_list = []
cached_list.append(k)
try:
fn = self.get_idxfn(k)
if os.path.isfile(fn):
time = os.stat(fn).st_ctime
else:
time = -1
except OSError as err:
logger.debug("_write_master_cache: %s" % err)
logger.debug("_cache_load_timestamp: %s" % self._cache_load_timestamp)
import errno
if err.errno == errno.ENOENT: # If file is not found
time = -1
else:
raise
if time > 0:
cached_list.append(time)
cached_list.append(self._cached_cat[k])
cached_list.append(self._cached_cls[k])
cached_list.append(self._cached_obj[k])
this_master_cache.append(cached_list)
try:
with open(_master_idx, 'wb') as of:
pickle_to_file(this_master_cache, of)
except IOError as err:
logger.debug("write_master: %s" % err)
try:
os.remove(os.path.join(self.root, 'master.idx'))
except OSError as x:
GangaCore.Utility.logging.log_user_exception(True)
except Exception as err:
logger.debug("write_error2: %s" % err)
GangaCore.Utility.logging.log_unknown_exception()
return
def updateLocksNow(self):
"""
Trigger the session locks to all be updated now
This is useful when the SessionLock is updating either too slowly or has gone to sleep when there are multiple sessions
"""
self.sessionlock.updateNow()
def update_index(self, this_id=None, verbose=False, firstRun=False):
""" Update the list of available objects
Raise RepositoryError
TODO avoid updating objects which haven't changed as this causes un-needed I/O
Args:
this_id (int): This is the id we want to explicitly check the index on disk for
verbose (bool): Should we be verbose
firstRun (bool): If this is the call from the Repo startup then load the master index for perfomance boost
"""
# First locate and load the index files
logger.debug("updating index...")
objs = self.get_index_listing()
changed_ids = []
deleted_ids = set(self.objects.keys())
summary = []
if firstRun:
self._read_master_cache()
logger.debug("Iterating over Items")
locked_ids = self.sessionlock.locked
for this_id in objs:
deleted_ids.discard(this_id)
# Make sure we do not overwrite older jobs if someone deleted the
# count file
if this_id > self.sessionlock.count:
self.sessionlock.count = this_id + 1
# Locked IDs can be ignored
if this_id in locked_ids:
continue
# Skip corrupt IDs
if this_id in self.incomplete_objects:
continue
# Now we treat unlocked IDs
try:
# if this succeeds, all is well and we are done
if self.index_load(this_id):
changed_ids.append(this_id)
continue
except IOError as err:
logger.debug("IOError: Failed to load index %i: %s" % (this_id, err))
except OSError as err:
logger.debug("OSError: Failed to load index %i: %s" % (this_id, err))
except PluginManagerError as err:
# Probably should be DEBUG
logger.debug("PluginManagerError: Failed to load index %i: %s" % (this_id, err))
# This is a FATAL error - do not try to load the main file, it
# will fail as well
summary.append((this_id, err))
continue
# this is bad - no or corrupted index but object not loaded yet!
# Try to load it!
if not this_id in self.objects:
try:
logger.debug("Loading disk based Object: %s from %s as indexes were missing" % (this_id, self.registry.name))
self.load([this_id])
changed_ids.append(this_id)
# Write out a new index if the file can be locked
if len(self.lock([this_id])) != 0:
if this_id not in self.incomplete_objects:
# If object is loaded mark it dirty so next flush will regenerate XML,
# otherwise just go about fixing it
if not self.isObjectLoaded(self.objects[this_id]):
self.index_write(this_id)
else:
self.objects[this_id]._setDirty()
#self.unlock([this_id])
except KeyError as err:
logger.debug("update Error: %s" % err)
# deleted job
if this_id in self.objects:
self._internal_del__(this_id)
changed_ids.append(this_id)
except (InaccessibleObjectError, ) as x:
logger.debug("update_index: Failed to load id %i: %s" % (this_id, x))
summary.append((this_id, x))
logger.debug("Iterated over Items")
# Check deleted files:
for this_id in deleted_ids:
self._internal_del__(this_id)
changed_ids.append(this_id)
if len(deleted_ids) > 0:
logger.warning("Registry '%s': Job %s externally deleted." % (self.registry.name, ",".join(map(str, list(deleted_ids)))))
if len(summary) > 0:
cnt = {}
examples = {}
for this_id, x in summary:
if this_id in self.known_bad_ids:
continue
cnt[getName(x)] = cnt.get(getName(x), []) + [str(this_id)]
examples[getName(x)] = str(x)
self.known_bad_ids.append(this_id)
# add object to incomplete_objects
if not this_id in self.incomplete_objects:
logger.error("Adding: %s to Incomplete Objects to avoid loading it again in future" % this_id)
self.incomplete_objects.append(this_id)
for exc, ids in cnt.items():
logger.error("Registry '%s': Failed to load %i jobs (IDs: %s) due to '%s' (first error: %s)" % (self.registry.name, len(ids), ",".join(ids), exc, examples[exc]))
if self.printed_explanation is False:
logger.error("If you want to delete the incomplete objects, you can type:\n")
logger.error("'for i in %s.incomplete_ids(): %s(i).remove()'\n (then press 'Enter' twice)" % (self.registry.name, self.registry.name))
logger.error("WARNING!!! This will result in corrupt jobs being completely deleted!!!")
self.printed_explanation = True
logger.debug("updated index done")
if len(changed_ids) != 0:
isShutdown = not firstRun
self._write_master_cache(isShutdown)
return changed_ids
def add(self, objs, force_ids=None):
""" Add the given objects to the repository, forcing the IDs if told to.
Raise RepositoryError
Args:
objs (list): GangaObject-s which we want to add to the Repo
force_ids (list, None): IDs to assign to object, None for auto-assign
"""
logger.debug("add")
if force_ids not in [None, []]: # assume the ids are already locked by Registry
if not len(objs) == len(force_ids):
raise RepositoryError(self, "Internal Error: add with different number of objects and force_ids!")
ids = force_ids
else:
ids = self.sessionlock.make_new_ids(len(objs))
logger.debug("made ids")
for i in range(0, len(objs)):
fn = self.get_fn(ids[i])
try:
os.makedirs(os.path.dirname(fn))
except OSError as e:
if e.errno != errno.EEXIST:
raise RepositoryError( self, "OSError on mkdir: %s" % (e))
self._internal_setitem__(ids[i], objs[i])
# Set subjobs dirty - they will not be flushed if they are not.
if self.sub_split and hasattr(objs[i], self.sub_split):
try:
sj_len = len(getattr(objs[i], self.sub_split))
if sj_len > 0:
for j in range(sj_len):
getattr(objs[i], self.sub_split)[j]._dirty = True
except AttributeError as err:
logger.debug("RepoXML add Exception: %s" % err)
logger.debug("Added")
return ids
def _safe_flush_xml(self, this_id):
"""
Flush XML to disk whilst checking for relavent SubJobXMLList which handles subjobs now
flush for "this_id" in the self.objects list
Args:
this_id (int): This is the id of the object we want to flush to disk
"""
fn = self.get_fn(this_id)
obj = self.objects[this_id]
from GangaCore.Core.GangaRepository.VStreamer import EmptyGangaObject
if not isType(obj, EmptyGangaObject):
split_cache = None
has_children = getattr(obj, self.sub_split, False)
if has_children:
logger.debug("has_children")
if hasattr(getattr(obj, self.sub_split), 'flush'):
# I've been read from disk in the new SubJobXMLList format I know how to flush
getattr(obj, self.sub_split).flush()
else:
# I have been constructed in this session, I don't know how to flush!
if hasattr(getattr(obj, self.sub_split)[0], "_dirty"):
split_cache = getattr(obj, self.sub_split)
for i in range(len(split_cache)):
if not split_cache[i]._dirty:
continue
sfn = os.path.join(os.path.dirname(fn), str(i), self.dataFileName)
if not os.path.exists(os.path.dirname(sfn)):
logger.debug("Constructing Folder: %s" % os.path.dirname(sfn))
os.makedirs(os.path.dirname(sfn))
else:
logger.debug("Using Folder: %s" % os.path.dirname(sfn))
safe_save(sfn, split_cache[i], self.to_file)
split_cache[i]._setFlushed()
# Now generate an index file to take advantage of future non-loading goodness
tempSubJList = SubJobXMLList(os.path.dirname(fn), self.registry, self.dataFileName, False, obj)
## equivalent to for sj in job.subjobs
tempSubJList._setParent(obj)
job_dict = {}
for sj in getattr(obj, self.sub_split):
job_dict[sj.id] = stripProxy(sj)
tempSubJList._reset_cachedJobs(job_dict)
tempSubJList.flush(ignore_disk=True)
del tempSubJList
safe_save(fn, obj, self.to_file, self.sub_split)
# clean files not in subjobs anymore... (bug 64041)
for idn in os.listdir(os.path.dirname(fn)):
split_cache = getattr(obj, self.sub_split)
if idn.isdigit() and int(idn) >= len(split_cache):
rmrf(os.path.join(os.path.dirname(fn), idn))
else:
logger.debug("not has_children")
safe_save(fn, obj, self.to_file, "")
# clean files leftover from sub_split
for idn in os.listdir(os.path.dirname(fn)):
if idn.isdigit():
rmrf(os.path.join(os.path.dirname(fn), idn))
if this_id not in self.incomplete_objects:
self.index_write(this_id)
else:
raise RepositoryError(self, "Cannot flush an Empty object for ID: %s" % this_id)
if this_id not in self._fully_loaded:
self._fully_loaded[this_id] = obj
def flush(self, ids):
"""
flush the set of "ids" to disk and write the XML representing said objects in self.objects
NB: This adds the given objects corresponding to ids to the _fully_loaded dict
Args:
ids (list): List of integers, used as keys to objects in the self.objects dict
"""
logger.debug("Flushing: %s" % ids)
#import traceback
#traceback.print_stack()
for this_id in ids:
if this_id in self.incomplete_objects:
logger.debug("Should NEVER re-flush an incomplete object, it's now 'bad' respect this!")
continue
try:
logger.debug("safe_flush: %s" % this_id)
self._safe_flush_xml(this_id)
self._cache_load_timestamp[this_id] = time.time()
self._cached_cls[this_id] = getName(self.objects[this_id])
self._cached_cat[this_id] = self.objects[this_id]._category
self._cached_obj[this_id] = self.objects[this_id]._index_cache
try:
self.index_write(this_id)
except:
logger.debug("Index write failed")
pass
if this_id not in self._fully_loaded:
self._fully_loaded[this_id] = self.objects[this_id]
subobj_attr = getattr(self.objects[this_id], self.sub_split, None)
sub_attr_dirty = getattr(subobj_attr, '_dirty', False)
if sub_attr_dirty:
if hasattr(subobj_attr, 'flush'):
subobj_attr.flush()
self.objects[this_id]._setFlushed()
except (OSError, IOError, XMLFileError) as x:
raise RepositoryError(self, "Error of type: %s on flushing id '%s': %s" % (type(x), this_id, x))
def _check_index_cache(self, obj, this_id):
"""
Checks the index cache of "this_id" against the index cache generated from the "obj"ect
If there is a problem then the object is unloaded from memory but will not do anything if everything agrees here
TODO CHECK IF THIS IS VALID GIVEN WE DYNAMICALLY GENERATE INDEX FOR LOADED OBJECTS
Args:
obj (GangaObject): This is the object which we've loaded from disk
this_id (int): This is the object id which is the objects key in the objects dict
"""
new_idx_cache = self.registry.getIndexCache(stripProxy(obj))
if new_idx_cache != obj._index_cache:
logger.debug("NEW: %s" % new_idx_cache)
logger.debug("OLD: %s" % obj._index_cache)
# index is wrong! Try to get read access - then we can fix this
if len(self.lock([this_id])) != 0:
if this_id not in self.incomplete_objects:
# Mark as dirty if loaded, otherwise load and fix
if not self.isObjectLoaded(self.objects[this_id]):
self.index_write(this_id)
else:
self.objects[this_id]._setDirty()
# self.unlock([this_id])
old_idx_subset = all((k in new_idx_cache and new_idx_cache[k] == v) for k, v in obj._index_cache.items())
if not old_idx_subset:
# Old index cache isn't subset of new index cache
new_idx_subset = all((k in obj._index_cache and obj._index_cache[k] == v) for k, v in new_idx_cache.items())
else:
# Old index cache is subset of new index cache so no need to check
new_idx_subset = True
if not old_idx_subset and not new_idx_subset:
if not GANGA_SWAN_INTEGRATION:
logger.warning("Incorrect index cache of '%s' object #%s was corrected!" % (self.registry.name, this_id))
logger.debug("old cache: %s\t\tnew cache: %s" % (obj._index_cache, new_idx_cache))
self.unlock([this_id])
else:
pass
# if we cannot lock this, the inconsistency is
# most likely the result of another ganga
# process modifying the repo
def _parse_xml(self, fn, this_id, load_backup, has_children, tmpobj):
"""
If we must actually load the object from disk then we end up here.
This replaces the attrs of "objects[this_id]" with the attrs from tmpobj
If there are children then a SubJobXMLList is created to manage them.
The fn of the job is passed to the SubbJobXMLList and there is some knowledge of if we should be loading the backup passed as well
Args:
fn (str): This is the path to the data file for this object in the XML
this_id (int): This is the integer key of the object in the self.objects dict
load_backup (bool): This reflects whether we are loading the backup 'data~' or normal 'data' XML file
has_children (bool): This contains the result of the decision as to whether this object actually has children
tmpobj (GangaObject): This contains the object which has been read in from the fn file
"""
# If this_id is not in the objects add the object we got from reading the XML
need_to_copy = True
if this_id not in self.objects:
self.objects[this_id] = tmpobj
need_to_copy = False
obj = self.objects[this_id]
# If the object was already in the objects (i.e. cache object, replace the schema content wilst avoiding R/O checks and such
# The end goal is to keep the object at this_id the same object in memory but to make it closer to tmpobj.
# TODO investigate changing this to copyFrom
# The temp object is from disk so all contents have correctly passed through sanitising via setattr at least once by now so this is safe
if need_to_copy:
for key, val in tmpobj._data.items():
obj.setSchemaAttribute(key, val)
for attr_name, attr_val in obj._schema.allItems():
if attr_name not in tmpobj._data:
obj.setSchemaAttribute(attr_name, obj._schema.getDefaultValue(attr_name))
if has_children:
logger.debug("Adding children")
# NB Keep be a SetSchemaAttribute to bypass the list manipulation which will put this into a list in some cases
obj.setSchemaAttribute(self.sub_split, SubJobXMLList(os.path.dirname(fn), self.registry, self.dataFileName, load_backup, obj))
else:
if obj._schema.hasAttribute(self.sub_split):
# Infinite loop if we use setattr btw
def_val = obj._schema.getDefaultValue(self.sub_split)
if def_val == []:
from GangaCore.GPIDev.Lib.GangaList.GangaList import GangaList
def_val = GangaList()
obj.setSchemaAttribute(self.sub_split, def_val)
from GangaCore.GPIDev.Base.Objects import do_not_copy
for node_key, node_val in obj._data.items():
if isType(node_val, Node):
if node_key not in do_not_copy:
node_val._setParent(obj)
# Check if index cache; if loaded; was valid:
if obj._index_cache not in [{}]:
self._check_index_cache(obj, this_id)
obj._index_cache = {}
if this_id not in self._fully_loaded:
self._fully_loaded[this_id] = obj
def _load_xml_from_obj(self, fobj, fn, this_id, load_backup):
"""
This is the method which will load the job from fn using the fobj using the self.from_file method and _parse_xml is called to replace the
self.objects[this_id] with the correct attributes. We also preseve knowledge of if we're being asked to load a backup or not
Args:
fobj (file handler): This is the file handler for the fn
fn (str): fn This is the name of the file which contains the XML data
this_id (int): This is the key of the object in the objects dict where the output will be stored
load_backup (bool): This reflects whether we are loading the backup 'data~' or normal 'data' XML file
"""
b4=time.time()
tmpobj, errs = self.from_file(fobj)
a4=time.time()
logger.debug("Loading XML file for ID: %s took %s sec" % (this_id, a4-b4))
if len(errs) > 0:
logger.error("#%s Error(s) Loading File: %s" % (len(errs), fobj.name))
for err in errs:
logger.error("err: %s" % err)
raise InaccessibleObjectError(self, this_id, errs[0])
logger.debug("Checking children: %s" % str(this_id))
#logger.debug("Checking in: %s" % os.path.dirname(fn))
#logger.debug("found: %s" % os.listdir(os.path.dirname(fn)))
has_children = SubJobXMLList.checkJobHasChildren(os.path.dirname(fn), self.dataFileName)
logger.debug("Found children: %s" % str(has_children))
self._parse_xml(fn, this_id, load_backup, has_children, tmpobj)
if hasattr(self.objects[this_id], self.sub_split):
sub_attr = getattr(self.objects[this_id], self.sub_split)
if sub_attr is not None and hasattr(sub_attr, '_setParent'):
sub_attr._setParent(self.objects[this_id])
self._load_timestamp[this_id] = os.fstat(fobj.fileno()).st_ctime
logger.debug("Finished Loading XML")
def _open_xml_file(self, fn, this_id, _copy_backup=False):
"""
This loads the XML for the job "this_id" in self.objects using the file "fn" and knowing whether we want the file or the backup by _copy_backup
Args:
fn (str): This is the full XML filename for the given id
this_id (int): This is the key for the object in the objects dict
_copy_backup (bool): Should we use the backup file 'data~' (True) or the 'data' file (False)
"""
fobj = None
has_loaded_backup = False
try:
if not os.path.isfile(fn) and _copy_backup:
if os.path.isfile(fn + '~'):
logger.warning("XML File: %s missing, recovering from backup, recent changes may have been lost!" % fn)
has_loaded_backup = True
try:
from shutil import copyfile
copyfile(fn+'~', fn)
except:
logger.warning("Error Recovering the backup file! loading of Job may Fail!")
fobj = open(fn, "r")
except IOError as x:
if x.errno == errno.ENOENT:
# remove index so we do not continue working with wrong information
try:
# remove internal representation
self._internal_del__(this_id)
rmrf(os.path.dirname(fn) + ".index")
except OSError as err:
logger.debug("load unlink Error: %s" % err)
pass
raise KeyError(this_id)
else:
raise RepositoryError(self, "IOError: %s" % x)
finally:
try:
if os.path.isdir(os.path.dirname(fn)):
ld = os.listdir(os.path.dirname(fn))
if len(ld) == 0:
os.rmdir(os.path.dirname(fn))
logger.warning("No job index or data found, removing empty directory: %s" % os.path.dirname(fn))
except Exception as err:
logger.debug("load error %s" % err)
pass
return fobj, has_loaded_backup
def load(self, ids, load_backup=False):
"""
Load the following "ids" from disk
If we want to load the backup files for these ids then use _copy_backup
Correctly loaded objects are dirty, Objects loaded from backups for whatever reason are marked dirty
Args:
ids (list): The object keys which we want to iterate over from the objects dict
load_backup (bool): This reflects whether we are loading the backup 'data~' or normal 'data' XML file
"""
#print("load: %s " % ids)
#import traceback
#traceback.print_stack()
#print("\n")
logger.debug("Loading Repo object(s): %s" % ids)
for this_id in ids:
if this_id in self.incomplete_objects:
raise RepositoryError(self, "Trying to re-load a corrupt repository id: %s" % this_id)
fn = self.get_fn(this_id)
if load_backup:
has_loaded_backup = True
fn = fn + "~"
else:
has_loaded_backup = False
try:
fobj, has_loaded_backup2 = self._open_xml_file(fn, this_id, True)
if has_loaded_backup2:
has_loaded_backup = has_loaded_backup2
except Exception as err:
logger.debug("XML load: Failed to load XML file: %s" % fn)
logger.debug("Error was:\n%s" % err)
logger.error("Adding id: %s to Corrupt IDs will not attempt to re-load this session" % this_id)
self.incomplete_objects.append(this_id)
raise
try:
self._load_xml_from_obj(fobj, fn, this_id, load_backup)
except RepositoryError as err:
logger.debug("Repo Exception: %s" % err)
logger.error("Adding id: %s to Corrupt IDs will not attempt to re-load this session" % this_id)
self.incomplete_objects.append(this_id)
raise
except Exception as err:
should_continue = self._handle_load_exception(err, fn, this_id, load_backup)
if should_continue is True:
has_loaded_backup = True
continue
else:
logger.error("Adding id: %s to Corrupt IDs will not attempt to re-load this session" % this_id)
self.incomplete_objects.append(this_id)
raise
finally:
fobj.close()
subobj_attr = getattr(self.objects[this_id], self.sub_split, None)
sub_attr_dirty = getattr(subobj_attr, '_dirty', False)
if has_loaded_backup:
self.objects[this_id]._setDirty()
else:
self.objects[this_id]._setFlushed()
if sub_attr_dirty:
getattr(self.objects[this_id], self.sub_split)._setDirty()
logger.debug("Finished 'load'-ing of: %s" % ids)
def _handle_load_exception(self, err, fn, this_id, load_backup):
"""
This method does a lot of the handling of an exception thrown from the load method
We will return True/False here, True if the error can be correctly caught and False if this is terminal and we couldn't load the object
Args:
err (exception): This is the original exception loading the XML data from disk
fn (str): This is the filename which was used to load the file from disk
this_id (int): This is the key of the object in the objects dict
load_backup (bool): This reflects whether we are loading the backup 'data~' or normal 'data' XML file
"""
if isType(err, XMLFileError):
logger.error("XML File failed to load for Job id: %s" % this_id)
logger.error("Actual Error was:\n%s" % err)
if load_backup:
logger.debug("Could not load backup object #%s: %s" % (this_id, err))
raise InaccessibleObjectError(self, this_id, err)
logger.debug("Could not load object #%s: %s" % (this_id, err))
# try loading backup
try:
self.load([this_id], True)
logger.warning("Object '%s' #%s loaded from backup file - recent changes may be lost." % (self.registry.name, this_id))
return True
except Exception as err2:
logger.debug("Exception when loading backup: %s" % err2 )
logger.error("XML File failed to load for Job id: %s" % this_id)
logger.error("Actual Error was:\n%s" % err)
# add object to incomplete_objects
if not this_id in self.incomplete_objects:
logger.error("Loading: %s into incomplete_objects to avoid loading it again in future" % this_id)
self.incomplete_objects.append(this_id)
# remove index so we do not continue working with wrong
# information
rmrf(os.path.dirname(fn) + ".index")
raise InaccessibleObjectError(self, this_id, err)
return False
def delete(self, ids):
"""
This is the method to 'delete' an object from disk, it's written in python and starts with the indexes first
Args:
ids (list): The object keys which we want to iterate over from the objects dict
"""
for this_id in ids:
# First remove the index, so that it is gone if we later have a
# KeyError
fn = self.get_fn(this_id)
try:
rmrf(os.path.dirname(fn) + ".index")
except OSError as err:
logger.debug("Delete Error: %s" % err)
self._internal_del__(this_id)
rmrf(os.path.dirname(fn))
if this_id in self._fully_loaded:
del self._fully_loaded[this_id]
if this_id in self.objects:
del self.objects[this_id]
def lock(self, ids):
"""
Request a session lock for the following ids
Args:
ids (list): The object keys which we want to iterate over from the objects dict
"""
return self.sessionlock.lock_ids(ids)
def unlock(self, ids):
"""
Unlock (release file locks of) the following ids
Args:
ids (list): The object keys which we want to iterate over from the objects dict
"""
released_ids = self.sessionlock.release_ids(ids)
if len(released_ids) < len(ids):
logger.error("The write locks of some objects could not be released!")
def get_lock_session(self, this_id):
"""get_lock_session(id)
Tries to determine the session that holds the lock on id for information purposes, and return an informative string.
Returns None on failure
Args:
this_id (int): Get the id of the session which has a lock on the object with this id
"""
return self.sessionlock.get_lock_session(this_id)
def get_other_sessions(self):
"""get_session_list()
Tries to determine the other sessions that are active and returns an informative string for each of them.
"""
return self.sessionlock.get_other_sessions()
def reap_locks(self):
"""reap_locks() --> True/False
Remotely clear all foreign locks from the session.
WARNING: This is not nice.
Returns True on success, False on error."""
return self.sessionlock.reap_locks()
def clean(self):
"""clean() --> True/False
Clear EVERYTHING in this repository, counter, all jobs, etc.
WARNING: This is not nice."""
self.shutdown()
try:
rmrf(self.root)
except Exception as err:
logger.error("Failed to correctly clean repository due to: %s" % err)
self.startup()
def isObjectLoaded(self, obj):
"""
This will return a true false if an object has been fully loaded into memory
Args:
obj (GangaObject): The object we want to know if it was loaded into memory
"""
try:
_id = next(id_ for id_, o in self._fully_loaded.items() if o is obj)
return True
except StopIteration:
return False
| gpl-2.0 | -6,180,378,785,507,544,000 | 43.646667 | 177 | 0.559075 | false |
google/starthinker | dags/bigquery_function_dag.py | 1 | 4467 | ###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
'''
--------------------------------------------------------------
Before running this Airflow module...
Install StarThinker in cloud composer ( recommended ):
From Release: pip install starthinker
From Open Source: pip install git+https://github.com/google/starthinker
Or push local code to the cloud composer plugins directory ( if pushing local code changes ):
source install/deploy.sh
4) Composer Menu
l) Install All
--------------------------------------------------------------
If any recipe task has "auth" set to "user" add user credentials:
1. Ensure an RECIPE['setup']['auth']['user'] = [User Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_user", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/deploy_commandline.md#optional-setup-user-credentials
--------------------------------------------------------------
If any recipe task has "auth" set to "service" add service credentials:
1. Ensure an RECIPE['setup']['auth']['service'] = [Service Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_service", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/cloud_service.md
--------------------------------------------------------------
BigQuery Function
Add a custom function or table to a dataset.
- Specify the dataset, and the function or table will be added.
- Pearson Significance Test: Check if a correlation is significant.
- RGB To HSV: Convert color values for analysis.
--------------------------------------------------------------
This StarThinker DAG can be extended with any additional tasks from the following sources:
- https://google.github.io/starthinker/
- https://github.com/google/starthinker/tree/master/dags
'''
from starthinker.airflow.factory import DAG_Factory
INPUTS = {
'auth': 'service', # Credentials used for writing function.
'function': 'Pearson Significance Test', # Function or table to create.
'dataset': '', # Existing BigQuery dataset.
}
RECIPE = {
'tasks': [
{
'bigquery': {
'auth': {
'field': {
'name': 'auth',
'kind': 'authentication',
'order': 0,
'default': 'service',
'description': 'Credentials used for writing function.'
}
},
'function': {
'field': {
'name': 'function',
'kind': 'choice',
'order': 1,
'choices': [
'Pearson Significance Test',
'RGB To HSV'
],
'default': 'Pearson Significance Test',
'description': 'Function or table to create.'
}
},
'to': {
'dataset': {
'field': {
'name': 'dataset',
'kind': 'string',
'order': 1,
'default': '',
'description': 'Existing BigQuery dataset.'
}
}
}
}
}
]
}
dag_maker = DAG_Factory('bigquery_function', RECIPE, INPUTS)
dag = dag_maker.generate()
if __name__ == "__main__":
dag_maker.print_commandline()
| apache-2.0 | -8,183,038,106,305,137,000 | 32.088889 | 145 | 0.570405 | false |
eggsandbeer/scheduler | synergy/db/model/daemon_process_entry.py | 1 | 2920 | __author__ = 'Bohdan Mushkevych'
from odm.document import BaseDocument
from odm.fields import StringField, DictField, ListField
from synergy.scheduler.scheduler_constants import TYPE_MANAGED, TYPE_FREERUN, TYPE_GARBAGE_COLLECTOR, EXCHANGE_UTILS, \
TYPE_DAEMON
PROCESS_NAME = 'process_name'
CLASSNAME = 'classname'
MQ_QUEUE = 'mq_queue'
MQ_EXCHANGE = 'mq_exchange'
MQ_ROUTING_KEY = 'mq_routing_key'
ARGUMENTS = 'arguments'
TOKEN = 'token'
PROCESS_TYPE = 'process_type'
LOG_FILENAME = 'log_filename'
LOG_TAG = 'log_tag'
PID_FILENAME = 'pid_filename'
PRESENT_ON_BOXES = 'present_on_boxes' # list of boxes where this process is monitored by the Supervisor
class DaemonProcessEntry(BaseDocument):
""" Non-persistent model. This class presents Process Context Entry record """
process_name = StringField(PROCESS_NAME)
classname = StringField(CLASSNAME)
token = StringField(TOKEN)
mq_queue = StringField(MQ_QUEUE)
mq_exchange = StringField(MQ_EXCHANGE)
mq_routing_key = StringField(MQ_ROUTING_KEY)
arguments = DictField(ARGUMENTS)
process_type = StringField(PROCESS_TYPE, choices=[TYPE_MANAGED, TYPE_FREERUN, TYPE_DAEMON, TYPE_GARBAGE_COLLECTOR])
present_on_boxes = ListField(PRESENT_ON_BOXES)
pid_filename = StringField(PID_FILENAME)
log_filename = StringField(LOG_FILENAME)
@BaseDocument.key.getter
def key(self):
return self.process_name
@key.setter
def key(self, value):
""" :param value: name of the process """
self.process_name = value
def daemon_context_entry(process_name,
classname,
token,
exchange=EXCHANGE_UTILS,
present_on_boxes=None,
arguments=None,
queue=None,
routing=None,
process_type=TYPE_DAEMON,
pid_file=None,
log_file=None):
""" forms process context entry """
_ROUTING_PREFIX = 'routing_'
_QUEUE_PREFIX = 'queue_'
_SUFFIX = '_daemon'
if queue is None:
queue = _QUEUE_PREFIX + token + _SUFFIX
if routing is None:
routing = _ROUTING_PREFIX + token + _SUFFIX
if pid_file is None:
pid_file = token + _SUFFIX + '.pid'
if log_file is None:
log_file = token + _SUFFIX + '.log'
if arguments is None:
arguments = dict()
else:
assert isinstance(arguments, dict)
process_entry = DaemonProcessEntry(
process_name=process_name,
classname=classname,
token=token,
mq_queue=queue,
mq_routing_key=routing,
mq_exchange=exchange,
present_on_boxes=present_on_boxes,
arguments=arguments,
process_type=process_type,
log_filename=log_file,
pid_filename=pid_file)
return process_entry
| bsd-3-clause | 1,068,565,629,341,200,000 | 31.444444 | 119 | 0.621575 | false |
updownlife/multipleK | bin/reads2kmer/reads2kmer.py | 1 | 1433 | #!/usr/bin/env python
import sys
from Bio import SeqIO
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-o','--output', dest = "outputFile", help = "Name of the output file")
parser.add_option('-k','--klength', dest = "kmerLength", help = "Length of kmer")
parser.add_option('-r','--readsfile', dest = "readsFilename", help = "Name of the reads file")
(options, args) = parser.parse_args(sys.argv[1:])
kmerFilename = options.outputFile
readsFilename = options.readsFilename
kmerLength = int(options.kmerLength)
read_id = -1;
if readsFilename[-1] == 'a':
formatStr = 'fasta'
else:
formatStr = 'fastq'
kmer_file = open(kmerFilename, 'w')
kmer_list=""
buffer_size = 255
count = 0
for seq_record in SeqIO.parse(readsFilename,formatStr):
read_id += 1
cur = 0
cur_max = len(seq_record) - kmerLength
for cur in range(0, cur_max):
kmer_seq = str(seq_record.seq[cur:cur+kmerLength]);
kmer = '>' + str(read_id) +' '+ seq_record.id + '\n' + kmer_seq + '\n'
# kmer = SeqIO.SeqRecord(kmer_seq, id=seq_record.id, description="")
# kmer = SeqIO.SeqRecord(kmer_seq, id=seq_record.id, name=seq_record.name, description=seq_record.description)
kmer_list += kmer
count += 1;
if count > buffer_size:
kmer_file.write(kmer_list);
count = 0
kmer_list = "";
# SeqIO.write(kmer_list, kmer_file, "fasta");
if count != 0:
kmer_file.write(kmer_list)
kmer_file.close()
| gpl-2.0 | -4,149,227,004,103,901,000 | 32.325581 | 111 | 0.665736 | false |
f3at/feat | src/feat/agencies/messaging/emu.py | 1 | 6790 | # F3AT - Flumotion Asynchronous Autonomous Agent Toolkit
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# See "LICENSE.GPL" in the source distribution for more information.
# Headers in this file shall remain intact.
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
import functools
from zope.interface import implements
from feat.common import log, defer
from feat.agencies.messaging.rabbitmq import Connection, Queue
from feat.agencies.message import BaseMessage
from feat.agencies import common
from feat.agencies.messaging.interface import IMessagingClient
class DirectExchange(object):
def __init__(self, name):
self.name = name
# key -> [ list of queues ]
self._bindings = {}
def bind(self, queue, key):
assert isinstance(queue, Queue)
list_for_key = self._bindings.get(key, [])
if not queue in list_for_key:
list_for_key.append(queue)
self._bindings[key] = list_for_key
def unbind(self, queue, key):
list_for_key = self._bindings.get(key, [])
if queue in list_for_key:
list_for_key.remove(queue)
if len(list_for_key) == 0:
del(self._bindings[key])
def publish(self, message, key):
assert message is not None
list_for_key = self._bindings.get(key, [])
for queue in list_for_key:
queue.enqueue(message)
class FanoutExchange(object):
def __init__(self, name):
self.name = name
# [ list of queues ]
self._bindings = []
def bind(self, queue, key=None):
assert isinstance(queue, Queue), type(Queue)
if key is not None:
raise AttributeError("Specified key for fanout exchange. Key: %r" %
(key, ))
if queue not in self._bindings:
self._bindings.append(queue)
def unbind(self, queue, key=None):
if key is not None:
raise AttributeError("Specified key for fanout exchange. Key: %r" %
(key, ))
try:
self._bindings.remove(queue)
except ValueError:
self.error("Queue %r not bounded too exchange %r" % (queue, self))
def publish(self, message, key=None):
assert message is not None
if key is not None:
raise AttributeError("Specified key for fanout exchange. Key: %r" %
(key, ))
for queue in self._bindings:
queue.enqueue(message)
class RabbitMQ(common.ConnectionManager, log.Logger, log.LogProxy,
common.Statistics):
implements(IMessagingClient)
log_category = "emu-rabbitmq"
exchange_factories = {'fanout': FanoutExchange,
'direct': DirectExchange}
def __init__(self):
common.ConnectionManager.__init__(self)
log_keeper = log.get_default() or log.FluLogKeeper()
log.LogProxy.__init__(self, log_keeper)
log.Logger.__init__(self, self)
common.Statistics.__init__(self)
# name -> queue
self._queues = {}
# name -> exchange
self._exchanges = {}
self._on_connected()
self._enabled = True
### called by simulation driver ###
def disable(self):
self._enabled = False
def enable(self):
self._enabled = True
### IMessagingClient ###
def is_idle(self):
return all(q.is_idle() for q in self._queues.itervalues())
# is_disconnected() from common.ConnectionManager
# wait_connected() from common.ConnectionManager
def disconnect(self):
# nothing to do here
pass
def new_channel(self, sink, queue_name=None):
return Connection(self, sink, queue_name)
def connect(self):
# nothing to do here, in future here implement timouts and/or failures
pass
# add_disconnected_cb() from common.ConnectionManager
# add_reconnected_cb() from common.ConnectionManager
### eoi ###
def define_exchange(self, name, exchange_type=None):
assert name is not None
factory = self.exchange_factories[exchange_type]
exchange = self._get_exchange(name)
if not exchange:
self.log("Defining exchange: %r" % name)
self.increase_stat('exchanges declared')
exchange = factory(name)
self._exchanges[name] = exchange
return exchange
def define_queue(self, name):
assert name is not None
queue = self._get_queue(name)
if not queue:
self.increase_stat('queues created')
queue = Queue(name, on_deliver=functools.partial(
self.increase_stat, 'messages delivered'))
self._queues[name] = queue
self.log("Defining queue: %r" % name)
return queue
def publish(self, key, shard, message):
assert isinstance(message, BaseMessage), str(type(message))
if not self._enabled:
self.log("RabbitMQ is disabled, message will not be really sent")
return defer.succeed(message)
exchange = self._get_exchange(shard)
if exchange:
self.increase_stat('messages published')
exchange.publish(message, key)
else:
self.error("Exchange %r not found!" % shard)
return defer.succeed(message)
def create_binding(self, exchange, queue, key=None):
ex = self._get_exchange(exchange)
if ex is None:
exchange_type = 'direct' if key is not None else 'fanout'
ex = self.define_exchange(exchange, exchange_type)
que = self._get_queue(queue)
ex.bind(que, key)
def delete_binding(self, exchange, queue, key=None):
ex = self._get_exchange(exchange)
que = self._get_queue(queue)
ex.unbind(que, key)
### private ###
def _get_exchange(self, name):
return self._exchanges.get(name, None)
def _get_queue(self, name):
return self._queues.get(name, None)
| gpl-2.0 | -3,548,642,203,115,684,400 | 29.863636 | 79 | 0.614286 | false |
michaeltchapman/flaskmon | app.py | 1 | 6975 | from flask import Flask, render_template, request, jsonify
from sqlalchemy import create_engine, Table, MetaData
from sqlalchemy.orm import sessionmaker
from rrdtool import fetch
import time
from os import listdir
graph_height = 50.0
app = Flask(__name__)
### SqlAlchemy stuff for accessing Openstack State ###
db = create_engine('postgresql://nova:[email protected]/nova')
Session = sessionmaker(bind=db)
session = Session()
metadata = MetaData(db)
sqlservices = Table('services', metadata, autoload=True)
sqlinstances = Table('instances', metadata, autoload=True)
# TODO split nodes by domain/cluster
domains = dict()
nodes = dict()
domains["openstack"] = nodes
@app.route('/')
def index():
return render_template('index.html')
green = '#3B8020'
yellow = '#bfbf00'
orange = '#f07a13'
red = '#bd3838'
@app.route('/fluid')
def fluid():
nodelist = listdir('/var/lib/ganglia/rrds/unspecified')
for node in nodelist:
if node != '__SummaryInfo__':
nodes[node.split('.')[0]] = dict()
nodes[node.split('.')[0]]['domain'] = node.split('.')[1]
nodes[node.split('.')[0]]['f'] = dict()
nodes[node.split('.')[0]]['s'] = dict()
#### SERVICES ####
# retrieve service statuses from nova database
# should do this from a metric
#for row in session.query(sqlservices):
# if row.host.encode("utf-8") not in nodes:
# print row.host.encode("utf-8")
# pass
# nodes[row.host.encode("utf-8")][s][row.topic.encode("utf-8") + '-disabled'] = row.disabled
# query sql server status
# do this from a local metric instead of here.
# r = session.query("Tuples Returned", "Tuples Fetched", "Transactions Committed", "Blocks Fetched", "Block Cache Hits").from_statement('select pg_stat_get_db_tuples_returned(1) as "Tuples Returned", pg_stat_get_db_tuples_fetched(1) as "Tuples Fetched", pg_stat_get_db_xact_commit(1) as "Transactions Committed", pg_stat_get_db_blocks_fetched(1) as "Blocks Fetched", pg_stat_get_db_blocks_hit(1) as "Block Cache Hits"').all()[0]
#d = dict()
#for row in r.keys():
# d[row] = r.__dict__[row]
#nodes['os-sql'] = d
#### LOAD ####
# use rrdtool to get load of each server
res = 60 # 1 minute
t = int(time.mktime(time.localtime(time.time())))
# need to move things out of 'unspecified" at some point...
# grab 10 minutes because fetch is a bit buggy
for node in nodes:
metrics = listdir('/var/lib/ganglia/rrds/unspecified/' + node + '.' + nodes[node]['domain'])
load_raw = fetch('/var/lib/ganglia/rrds/unspecified/'
+ node + '.' + nodes[node]['domain'] + '/'
+ 'load_one.rrd', 'AVERAGE', '-r ' + str(res),
'-s e-10m', '-e ' + str(t/res*res))[2]
cpus_raw = fetch('/var/lib/ganglia/rrds/unspecified/'
+ node + '.' + nodes[node]['domain'] + '/'
+ 'cpu_num.rrd', 'AVERAGE', '-r ' + str(res),
'-s e-10m', '-e ' + str(t/res*res))[2]
# If we are in the middle of a given
# minute there will be a null value
# so check back a couple of times to see
# if we hit a real value, then mark the
# host as down if that doesn't work
load = load_raw[-2:-1][0][0]
if load == None:
load = load_raw[-3:-2][0][0]
if load == None:
load = load_raw[-4:-3][0][0]
if load == None:
load = -1.0
cpus = cpus_raw[-2:-1][0][0]
if cpus == None:
cpus = cpus_raw[-3:-2][0][0]
if cpus == None:
cpus = cpus_raw[-4:-3][0][0]
if cpus == None:
cpus = -1.0;
if load > 0:
load = load / cpus
if (0 <= load < 0.25):
nodes[node.split('.')[0]]['s']['load'] = 'green'
if (0.25 < load < 0.5):
nodes[node.split('.')[0]]['s']['load'] = 'yellow'
if (0.5 <= load < 0.75):
nodes[node.split('.')[0]]['s']['load'] = 'orange'
if (load >= 0.75 <= 1.0):
nodes[node.split('.')[0]]['s']['load'] = 'red'
if (load < 0 ):
nodes[node.split('.')[0]]['s']['load'] = 'down'
return render_template('fluid.html', nodes=nodes)
# ajax route for node metric div
@app.route('/get_metric')
def get_metric():
node = request.args.get('node', 0, type=str)
# list of nodes avabilable from ganglia
nodelist = listdir('/var/lib/ganglia/rrds/unspecified')
for n in nodelist:
if n != '__SummaryInfo__':
nodes[n.split('.')[0]] = dict()
nodes[n.split('.')[0]]['domain'] = n.split('.')[1]
nodes[node.split('.')[0]]['f'] = dict()
nodes[node.split('.')[0]]['s'] = dict()
# use rrdtool to get load of server
res = 600 # 5 minutes
t = int(time.mktime(time.localtime(time.time())))
# need to move things out of 'unspecified" at some point...
metrics = listdir('/var/lib/ganglia/rrds/unspecified/' + node + '.' + nodes[node]['domain'])
for metric in metrics:
rawdata = fetch('/var/lib/ganglia/rrds/unspecified/'
+ node + '.' + nodes[node]['domain'] + '/'
+ metric, 'AVERAGE', '-r ' + str(res),
'-s e-30m', '-e ' + str(t/res*res))[2]
# find maximum
m = 0.0
for datapoint in rawdata:
if isinstance(datapoint[0], float):
if datapoint[0] > m:
m = datapoint[0]
if m == 0:
ratio = 1
else:
ratio = graph_height/m
data = list()
for i, datapoint in enumerate(rawdata):
if isinstance(datapoint[0], float) and i < 6: # Maybe remove size limit...
value = datapoint[0] * ratio
point = value
if '.' in str(value):
point = str(value).split('.')[0]# + "." + str(value).split('.')[1][:2] # round to 2 decimal places
data.append([str(point), datapoint[0]]) # append the normalised value for display plus the actual value for diagnosis
if isinstance(datapoint[0], str):
data.append(datapoint[0])
# TODO Handle string metrics here
if isinstance(rawdata[0][0], float):
nodes[node]['f'][metric.split('.')[0]] = data
if isinstance(rawdata[0][0], str):
nodes[node]['s'][metric.split('.')[0]] = data
instances = [ instance for instance in session.query(sqlinstances) if instance.deleted == False]
for instance in instances:
print instance.host
return jsonify(metrics=nodes[node])
if __name__ == '__main__':
app.run(host='0.0.0.0')
#app.run(host='172.22.1.205', debug=True)
| bsd-3-clause | 2,899,113,168,013,195,000 | 35.139896 | 435 | 0.531613 | false |
easel/gamestats | src/django/gamestats/loot/submission.py | 1 | 1871 | from xml.etree import ElementTree
from django.contrib.auth.models import User
from gamestats.loot.models import Character, Item, Loot, Attendance, Kill, LootType
def parse_xml(xml):
"""
Parse an XML submission
"""
root = ElementTree.fromstring(xml)
submitter = User.objects.get(username__iexact=root.get('submitter'))
elem = root.find('loot')
for child in elem.getchildren():
character, _ = Character.objects.get_or_create(name=child.get('looter'))
item, _ = Item.objects.get_or_create(name=child.get('item'))
timestamp = child.get('timestamp').replace('T', ' ')
Loot.objects.get_or_create(
submitter=submitter,
timestamp=timestamp,
character=character,
item=item,
defaults = {
'lootType': LootType.objects.get(name='Unknown')
}
)
# elem = tree.getroot().find('attendance')
# for child in elem.getchildren():
# character = Character.objects.get(name=child.get('name'))
# start_time = child.get('start_time').replace('T', ' ')
# end_time = child.get('end_time').replace('T', ' ')
# Attendance.objects.get_or_create(
# submitter = submitter,
# attendee = character,
# start_time = start_time,
# end_time = end_time
# )
# db.addAttendee(userid, characterid, start_time, end_time)
root.find('kills')
for child in elem.getchildren():
killer = Character.objects.get_or_create(name=child.get('killer'))
killee = Character.objects.get_or_create(name=child.get('killee'))
timestamp = child.get('timestamp').replace('T', ' ')
Kill.objects.add_if_new(
submitter = submitter,
killer = killer,
killee = killee,
timestamp = timestamp
)
| mit | -6,596,245,406,597,632,000 | 35.686275 | 83 | 0.590593 | false |
sharonlev/pyLoggingExtras | test/OutputSetter.py | 1 | 2060 | __author__ = 'Sharon Lev'
__email__ = '[email protected]'
__date__ = '10/25/16'
import sys
from StringIO import StringIO
from unittest import TestCase
from logging import root
from json import dumps
class OutputSetter(TestCase):
"""
"""
temp_stdout = None
@classmethod
def setUpClass(cls):
for handler in root.handlers[:]:
root.removeHandler(handler)
cls.temp_stdout = sys.stdout
sys.stdout = StringIO()
def setUp(self):
sys.stdout.truncate(0)
def tearDown(self):
content = sys.stdout.getvalue()
sys.stderr.writelines(content)
@classmethod
def tearDownClass(cls):
cls.tmp = sys.stdout
sys.stdout = cls.temp_stdout
print 'done!'
def get_log_lines_parts(self):
"""
:return: list of logged lines separated by separator ":"
"""
output = sys.stdout.getvalue().splitlines()
return [line.split(":") for line in output]
def validate_output(self, level, method, expected_output, *wargs, **kwargs):
"""
:param level:
:param method:
:param wargs:
:param kwargs:
:return:
"""
output = self.get_log_lines_parts()
self.assertEqual(len(output), 3, output)
for line in [output[0]] + [output[2]]:
self.assertEqual(line[0], level)
self.assertEqual(line[1], method)
self.assertIn(line[2].split()[0], ['entering', 'exiting'])
if wargs:
for arg in wargs:
self.assertIn(str(arg), output[0][2])
self.assertIn(str(arg), output[1][1])
if kwargs:
for key, value in kwargs.iteritems():
self.assertIn(str(key), ':'.join(output[0]))
self.assertIn(str(value), ':'.join(output[0]))
self.assertIn(str(value), output[1][1])
if expected_output:
self.assertIn("%s" % dumps(expected_output, ensure_ascii=False), ":".join(output[2]))
return output | gpl-3.0 | -1,023,077,614,002,122,000 | 27.625 | 97 | 0.562136 | false |
vaishaksuresh/udacity_data_analyst | P2/ProblemSets_2_to_4/P2_02.py | 1 | 1339 | import pandas
import pandasql
def max_temp_aggregate_by_fog(filename):
'''
This function should run a SQL query on a dataframe of
weather data. The SQL query should return two columns and
two rows - whether it was foggy or not (0 or 1) and the max
maxtempi for that fog value (i.e., the maximum max temperature
for both foggy and non-foggy days). The dataframe will be
titled 'weather_data'. You'll need to provide the SQL query.
You might also find that interpreting numbers as integers or floats may not
work initially. In order to get around this issue, it may be useful to cast
these numbers as integers. This can be done by writing cast(column as integer).
So for example, if we wanted to cast the maxtempi column as an integer, we would actually
write something like where cast(maxtempi as integer) = 76, as opposed to simply
where maxtempi = 76.
You can see the weather data that we are passing in below:
https://www.dropbox.com/s/7sf0yqc9ykpq3w8/weather_underground.csv
'''
weather_data = pandas.read_csv(filename)
q = """
select fog, max(cast (maxtempi as integer)) from weather_data group by fog;
"""
#Execute your SQL command against the pandas frame
rainy_days = pandasql.sqldf(q, locals())
return rainy_days
| gpl-2.0 | -4,712,227,669,270,800,000 | 38.382353 | 93 | 0.704257 | false |
nharraud/invenio-celery | invenio_celery/version.py | 1 | 1193 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Version information for Invenio-Celery.
This file is imported by ``invenio_celery.__init__``,
and parsed by ``setup.py``.
"""
from __future__ import absolute_import, print_function
__version__ = "1.0.0.dev20151008"
| gpl-2.0 | 7,263,255,283,209,021,000 | 35.151515 | 76 | 0.743504 | false |
maas/maas | src/maasserver/api/tests/test_domains.py | 1 | 9830 | # Copyright 2016 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for Domain API."""
import http.client
import json
import random
from django.conf import settings
from django.urls import reverse
from testtools.matchers import ContainsDict, Equals
from maasserver.models import GlobalDefault
from maasserver.models.dnspublication import zone_serial
from maasserver.models.domain import Domain
from maasserver.sequence import INT_MAX
from maasserver.testing.api import APITestCase
from maasserver.testing.factory import factory
from maasserver.utils.orm import reload_object
def get_domains_uri():
"""Return a Domain's URI on the API."""
return reverse("domains_handler", args=[])
def get_domain_uri(domain):
"""Return a Domain URI on the API."""
return reverse("domain_handler", args=[domain.id])
class TestDomainsAPI(APITestCase.ForUser):
def test_handler_path(self):
self.assertEqual("/MAAS/api/2.0/domains/", get_domains_uri())
def test_read(self):
for _ in range(3):
factory.make_Domain()
uri = get_domains_uri()
response = self.client.get(uri)
self.assertEqual(
http.client.OK, response.status_code, response.content
)
expected_ids = [domain.id for domain in Domain.objects.all()]
result_ids = [
domain["id"]
for domain in json.loads(
response.content.decode(settings.DEFAULT_CHARSET)
)
]
self.assertItemsEqual(expected_ids, result_ids)
def test_create(self):
self.become_admin()
domain_name = factory.make_name("domain")
uri = get_domains_uri()
response = self.client.post(uri, {"name": domain_name})
self.assertEqual(
http.client.OK, response.status_code, response.content
)
self.assertEqual(
domain_name,
json.loads(response.content.decode(settings.DEFAULT_CHARSET))[
"name"
],
)
def test_create_admin_only(self):
domain_name = factory.make_name("domain")
uri = get_domains_uri()
response = self.client.post(uri, {"name": domain_name})
self.assertEqual(
http.client.FORBIDDEN, response.status_code, response.content
)
def test_create_requires_name(self):
self.become_admin()
uri = get_domains_uri()
response = self.client.post(uri, {})
self.assertEqual(
http.client.BAD_REQUEST, response.status_code, response.content
)
def test_can_set_serial(self):
zone_serial.create_if_not_exists()
self.become_admin()
uri = get_domains_uri()
serial = random.randint(1, INT_MAX)
response = self.client.post(
uri, {"op": "set_serial", "serial": str(serial)}
)
self.assertEqual(
http.client.OK, response.status_code, response.content
)
# The handler forces a DNS reload by creating a new DNS publication,
# so the serial has already been incremented.
self.assertEqual(serial + 1, next(zone_serial))
def test_set_serial_rejects_serials_less_than_1(self):
zone_serial.create_if_not_exists()
self.become_admin()
uri = get_domains_uri()
# A serial of 1 is fine.
response = self.client.post(uri, {"op": "set_serial", "serial": "1"})
self.assertEqual(
http.client.OK, response.status_code, response.content
)
# A serial of 0 is rejected.
response = self.client.post(uri, {"op": "set_serial", "serial": "0"})
self.assertEqual(
http.client.BAD_REQUEST, response.status_code, response.content
)
def test_set_serial_rejects_serials_greater_than_4294967295(self):
zone_serial.create_if_not_exists()
self.become_admin()
uri = get_domains_uri()
# A serial of 4294967295 is fine.
response = self.client.post(
uri, {"op": "set_serial", "serial": "4294967295"}
)
self.assertEqual(
http.client.OK, response.status_code, response.content
)
# A serial of 4294967296 is rejected.
response = self.client.post(
uri, {"op": "set_serial", "serial": "4294967296"}
)
self.assertEqual(
http.client.BAD_REQUEST, response.status_code, response.content
)
class TestDomainAPI(APITestCase.ForUser):
def test_handler_path(self):
domain = factory.make_Domain()
self.assertEqual(
"/MAAS/api/2.0/domains/%s/" % domain.id, get_domain_uri(domain)
)
def test_read(self):
domain = factory.make_Domain()
for _ in range(3):
factory.make_DNSData(domain=domain)
uri = get_domain_uri(domain)
response = self.client.get(uri)
self.assertEqual(
http.client.OK, response.status_code, response.content
)
parsed_domain = json.loads(
response.content.decode(settings.DEFAULT_CHARSET)
)
self.assertThat(
parsed_domain,
ContainsDict(
{
"id": Equals(domain.id),
"name": Equals(domain.get_name()),
"resource_record_count": Equals(3),
}
),
)
def test_read_includes_default_domain(self):
defaults = GlobalDefault.objects.instance()
old_default = Domain.objects.get_default_domain()
domain = factory.make_Domain()
defaults.domain = domain
defaults.save()
uri = get_domain_uri(domain)
response = self.client.get(uri)
self.assertEqual(
http.client.OK, response.status_code, response.content
)
parsed_domain = json.loads(
response.content.decode(settings.DEFAULT_CHARSET)
)
self.assertThat(
parsed_domain, ContainsDict({"is_default": Equals(True)})
)
uri = get_domain_uri(old_default)
response = self.client.get(uri)
self.assertEqual(
http.client.OK, response.status_code, response.content
)
parsed_domain = json.loads(
response.content.decode(settings.DEFAULT_CHARSET)
)
self.assertThat(
parsed_domain, ContainsDict({"is_default": Equals(False)})
)
def test_read_404_when_bad_id(self):
uri = reverse("domain_handler", args=[random.randint(100, 1000)])
response = self.client.get(uri)
self.assertEqual(
http.client.NOT_FOUND, response.status_code, response.content
)
def test_update(self):
self.become_admin()
authoritative = factory.pick_bool()
domain = factory.make_Domain(authoritative=authoritative)
new_name = factory.make_name("domain")
new_ttl = random.randint(10, 1000)
new_auth = not authoritative
uri = get_domain_uri(domain)
response = self.client.put(
uri, {"name": new_name, "authoritative": new_auth, "ttl": new_ttl}
)
self.assertEqual(
http.client.OK, response.status_code, response.content
)
ret = json.loads(response.content.decode(settings.DEFAULT_CHARSET))
domain = reload_object(domain)
self.assertEqual(new_name, ret["name"])
self.assertEqual(new_name, domain.name)
self.assertEqual(new_ttl, ret["ttl"])
self.assertEqual(new_ttl, domain.ttl)
self.assertEqual(new_auth, ret["authoritative"])
self.assertEqual(new_auth, domain.authoritative)
def test_update_admin_only(self):
domain = factory.make_Domain()
new_name = factory.make_name("domain")
uri = get_domain_uri(domain)
response = self.client.put(uri, {"name": new_name})
self.assertEqual(
http.client.FORBIDDEN, response.status_code, response.content
)
def test_set_default(self):
self.become_admin()
domain = factory.make_Domain()
self.assertEqual(False, domain.is_default())
uri = get_domain_uri(domain)
response = self.client.post(uri, {"op": "set_default"})
self.assertEqual(
http.client.OK, response.status_code, response.content
)
ret = json.loads(response.content.decode(settings.DEFAULT_CHARSET))
domain = reload_object(domain)
self.assertEqual(True, ret["is_default"])
self.assertEqual(True, domain.is_default())
def test_set_default_admin_only(self):
domain = factory.make_Domain()
uri = get_domain_uri(domain)
self.client.post(uri, {"op": "set_default"})
def test_delete_deletes_domain(self):
self.become_admin()
domain = factory.make_Domain()
uri = get_domain_uri(domain)
response = self.client.delete(uri)
self.assertEqual(
http.client.NO_CONTENT, response.status_code, response.content
)
self.assertIsNone(reload_object(domain))
def test_delete_403_when_not_admin(self):
domain = factory.make_Domain()
uri = get_domain_uri(domain)
response = self.client.delete(uri)
self.assertEqual(
http.client.FORBIDDEN, response.status_code, response.content
)
self.assertIsNotNone(reload_object(domain))
def test_delete_404_when_invalid_id(self):
self.become_admin()
uri = reverse("domain_handler", args=[random.randint(100, 1000)])
response = self.client.delete(uri)
self.assertEqual(
http.client.NOT_FOUND, response.status_code, response.content
)
| agpl-3.0 | -6,530,128,466,336,639,000 | 34.232975 | 78 | 0.605595 | false |
michaupl/braincloud | braincloud/urls.py | 1 | 1388 | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.auth.views import login, logout
from tastypie.api import Api
from brainblog.views import *
from brainblog.api import UserResource
from brainindex.api import TextThoughtAjaxResource
admin.autodiscover()
# rest api
rest_api = Api(api_name = 'v1')
rest_api.register(UserResource())
rest_api.register(TextThoughtAjaxResource())
#rest_api.register(ThoughtResource())
urlpatterns = patterns(
'',
# thoughts
url(r'^thoughts/$', list_thoughts, name="list_thoughts"),
url(r'^thoughts/(?P<tag>.+)/$', list_thoughts, name="thoughts_by_tag"),
url(r'^view_thought/(?P<id>\w+)$', view_thought, name="view_thought"),
url(r'^add/$', add, name="add_thought"),
url(r'^edit/(?P<id>\w+)$', edit, name="edit_thought"),
url(r'^delete/(?P<id>\w+)$', delete, name="delete_thought"),
# cloud
url(r'^$', cloud, name="cloud"),
# users
url(r'^accounts/login/$', login),
url(r'^accounts/logout/$', logout, {'next_page': '/'}, name="logout"),
url(r'^accounts/register/$', register, name="register"),
# admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
# search
url(r'^search_results/$', search_thoughts),
# rest api
url(r'^api/', include(rest_api.urls)),
)
| apache-2.0 | -6,159,524,557,304,039,000 | 29.173913 | 75 | 0.654179 | false |
TUBvision/hrl | misc/old/simpledisplay.py | 1 | 1532 | import hrl
import pygame as pg
from pygame.locals import *
from OpenGL.GL import *
from patterns import *
from random import shuffle,randint
ntls = 4
wdth = 1024
hght = 766
def circle(y,ptch):
y = np.mod(y,2) - 1
x = np.sin(-np.pi * y)/2 + 0.5
y = np.abs(y)
return np.round((wdth-ptch.wdth)*x),np.round((hght-ptch.hght)*y)
def circleTileDraw(pstd,ptst):
itst = 2
stp = 2.0/ntls
for n in range(ntls):
if n == itst:
ptst.draw(circle(stp*n,ptst))
else:
pstd.draw(circle(stp*n,pstd))
def main():
pg.init()
hrl.initializeOpenGL(1024,766)
dpx = hrl.initializeDPX()
done = False
im1 = hrl.Texture('data/alien.png')
im2 = hrl.Texture('data/cow.png')
#im = hrl.Texture(flatGradient(1024,766),dpx=True)
#im = hrl.Texture('data/linear_rg_gradient.bmp')
while not done:
circleTileDraw(im1,im2)
#im.draw()
#im1.draw((0,0),300,300)
#im1.draw((300,550),200,200)
#im2.draw((550,300),200,200)
#im2.draw((300,50),200,200)
#im2.draw((50,300),200,200)
pg.display.flip()
eventlist = pg.event.get()
for event in eventlist:
if event.type == QUIT \
or event.type == KEYDOWN and event.key == K_ESCAPE:
done = True
if __name__ == '__main__':
main()
### pygame.time.Clock() objects can be used to measure the amount of time between events. ###
| lgpl-2.1 | -4,229,200,050,737,147,000 | 22.709677 | 93 | 0.54765 | false |
Mellthas/quodlibet | quodlibet/quodlibet/player/xinebe/cdefs.py | 1 | 9844 | # Copyright 2006 Lukas Lalinsky
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import ctypes
from quodlibet.util import load_library
try:
_libxine, name = load_library(["libxine.so.2", "libxine.so.1"])
except OSError as e:
raise ImportError(e)
if name.endswith("2"):
_version = 2
else:
_version = 1
class xine_event_t(ctypes.Structure):
if _version == 1:
_fields_ = [
('type', ctypes.c_int),
('stream', ctypes.c_void_p),
('data', ctypes.c_void_p),
('data_length', ctypes.c_int),
]
elif _version == 2:
_fields_ = [
('stream', ctypes.c_void_p),
('data', ctypes.c_void_p),
('data_length', ctypes.c_int),
('type', ctypes.c_int),
]
class xine_ui_message_data_t(ctypes.Structure):
_fields_ = [
('compatibility_num_buttons', ctypes.c_int),
('compatibility_str_len', ctypes.c_int),
('compatibility_str', 256 * ctypes.c_char),
('type', ctypes.c_int),
('explanation', ctypes.c_int),
('num_parameters', ctypes.c_int),
('parameters', ctypes.c_void_p),
('messages', ctypes.c_char),
]
# event listener callback type
xine_event_listener_cb_t = ctypes.CFUNCTYPE(
ctypes.c_void_p, ctypes.c_void_p,
ctypes.POINTER(xine_event_t))
# event types
XINE_EVENT_UI_PLAYBACK_FINISHED = 1
XINE_EVENT_UI_CHANNELS_CHANGED = 2
XINE_EVENT_UI_SET_TITLE = 3
XINE_EVENT_UI_MESSAGE = 4
XINE_EVENT_FRAME_FORMAT_CHANGE = 5
XINE_EVENT_AUDIO_LEVEL = 6
XINE_EVENT_QUIT = 7
XINE_EVENT_PROGRESS = 8
# stream parameters
XINE_PARAM_SPEED = 1 # see below
XINE_PARAM_AV_OFFSET = 2 # unit: 1/90000 ses
XINE_PARAM_AUDIO_CHANNEL_LOGICAL = 3 # -1 => auto, -2 => off
XINE_PARAM_SPU_CHANNEL = 4
XINE_PARAM_VIDEO_CHANNEL = 5
XINE_PARAM_AUDIO_VOLUME = 6 # 0..100
XINE_PARAM_AUDIO_MUTE = 7 # 1=>mute, 0=>unmute
XINE_PARAM_AUDIO_COMPR_LEVEL = 8 # <100=>off, % compress otherw
XINE_PARAM_AUDIO_AMP_LEVEL = 9 # 0..200, 100=>100% (default)
XINE_PARAM_AUDIO_REPORT_LEVEL = 10 # 1=>send events, 0=> don't
XINE_PARAM_VERBOSITY = 11 # control console output
XINE_PARAM_SPU_OFFSET = 12 # unit: 1/90000 sec
XINE_PARAM_IGNORE_VIDEO = 13 # disable video decoding
XINE_PARAM_IGNORE_AUDIO = 14 # disable audio decoding
XINE_PARAM_IGNORE_SPU = 15 # disable spu decoding
XINE_PARAM_BROADCASTER_PORT = 16 # 0: disable, x: server port
XINE_PARAM_METRONOM_PREBUFFER = 17 # unit: 1/90000 sec
XINE_PARAM_EQ_30HZ = 18 # equalizer gains -100..100
XINE_PARAM_EQ_60HZ = 19 # equalizer gains -100..100
XINE_PARAM_EQ_125HZ = 20 # equalizer gains -100..100
XINE_PARAM_EQ_250HZ = 21 # equalizer gains -100..100
XINE_PARAM_EQ_500HZ = 22 # equalizer gains -100..100
XINE_PARAM_EQ_1000HZ = 23 # equalizer gains -100..100
XINE_PARAM_EQ_2000HZ = 24 # equalizer gains -100..100
XINE_PARAM_EQ_4000HZ = 25 # equalizer gains -100..100
XINE_PARAM_EQ_8000HZ = 26 # equalizer gains -100..100
XINE_PARAM_EQ_16000HZ = 27 # equalizer gains -100..100
XINE_PARAM_AUDIO_CLOSE_DEVICE = 28 # force closing audio device
XINE_PARAM_AUDIO_AMP_MUTE = 29 # 1=>mute, 0=>unmute
XINE_PARAM_FINE_SPEED = 30 # 1.000.000 => normal speed
XINE_PARAM_EARLY_FINISHED_EVENT = 31 # send event when demux finish
XINE_PARAM_GAPLESS_SWITCH = 32 # next stream only gapless swi
XINE_PARAM_DELAY_FINISHED_EVENT = 33 # 1/10sec,0=>disable,-1=>forev
# speeds
XINE_SPEED_PAUSE = 0
XINE_SPEED_SLOW_4 = 1
XINE_SPEED_SLOW_2 = 2
XINE_SPEED_NORMAL = 4
XINE_SPEED_FAST_2 = 8
XINE_SPEED_FAST_4 = 16
# metadata
XINE_META_INFO_TITLE = 0
XINE_META_INFO_COMMENT = 1
XINE_META_INFO_ARTIST = 2
XINE_META_INFO_GENRE = 3
XINE_META_INFO_ALBUM = 4
XINE_META_INFO_YEAR = 5
XINE_META_INFO_VIDEOCODEC = 6
XINE_META_INFO_AUDIOCODEC = 7
XINE_META_INFO_SYSTEMLAYER = 8
XINE_META_INFO_INPUT_PLUGIN = 9
# statuses
XINE_STATUS_IDLE = 0
XINE_STATUS_STOP = 1
XINE_STATUS_PLAY = 2
XINE_STATUS_QUIT = 3
XINE_MSG_NO_ERROR = 0 # (messages to UI)
XINE_MSG_GENERAL_WARNING = 1 # (warning message)
XINE_MSG_UNKNOWN_HOST = 2 # (host name)
XINE_MSG_UNKNOWN_DEVICE = 3 # (device name)
XINE_MSG_NETWORK_UNREACHABLE = 4 # none
XINE_MSG_CONNECTION_REFUSED = 5 # (host name)
XINE_MSG_FILE_NOT_FOUND = 6 # (file name or mrl)
XINE_MSG_READ_ERROR = 7 # (device/file/mrl)
XINE_MSG_LIBRARY_LOAD_ERROR = 8 # (library/decoder)
XINE_MSG_ENCRYPTED_SOURCE = 9 # none
XINE_MSG_SECURITY = 10 # (security message)
XINE_MSG_AUDIO_OUT_UNAVAILABLE = 11 # none
XINE_MSG_PERMISSION_ERROR = 12 # (file name or mrl)
XINE_MSG_FILE_EMPTY = 13 # file is empty
XINE_MSG_AUTHENTICATION_NEEDED = 14 # (mrl, likely http); added in 1.2
# xine_t *xine_new(void)
xine_new = _libxine.xine_new
xine_new.restype = ctypes.c_void_p
# void xine_init(xine_t *self)
xine_init = _libxine.xine_init
xine_init.argtypes = [ctypes.c_void_p]
# void xine_exit(xine_t *self)
xine_exit = _libxine.xine_exit
xine_exit.argtypes = [ctypes.c_void_p]
# void xine_config_load(xine_t *self, const char *cfg_filename)
xine_config_load = _libxine.xine_config_load
xine_config_load.argtypes = [ctypes.c_void_p, ctypes.c_char_p]
# const char *xine_get_homedir(void)
xine_get_homedir = _libxine.xine_get_homedir
xine_get_homedir.restype = ctypes.c_char_p
# xine_audio_port_t *xine_open_audio_driver(xine_t *self, const char *id,
# void *data)
xine_open_audio_driver = _libxine.xine_open_audio_driver
xine_open_audio_driver.argtypes = [ctypes.c_void_p,
ctypes.c_char_p, ctypes.c_void_p]
xine_open_audio_driver.restype = ctypes.c_void_p
# void xine_close_audio_driver(xine_t *self, xine_audio_port_t *driver)
xine_close_audio_driver = _libxine.xine_close_audio_driver
xine_close_audio_driver.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
# xine_stream_t *xine_stream_new(xine_t *self,
# xine_audio_port_t *ao, xine_video_port_t *vo)
xine_stream_new = _libxine.xine_stream_new
xine_stream_new.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p]
xine_stream_new.restype = ctypes.c_void_p
# void xine_close(xine_sxine_event_create_listener_threadtream_t *stream)
xine_close = _libxine.xine_close
xine_close.argtypes = [ctypes.c_void_p]
# int xine_open (xine_stream_t *stream, const char *mrl)
xine_open = _libxine.xine_open
xine_open.argtypes = [ctypes.c_void_p, ctypes.c_char_p]
xine_open.restype = ctypes.c_int
# int xine_play(xine_stream_t *stream, int start_pos, int start_time)
xine_play = _libxine.xine_play
xine_play.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_int]
xine_play.restype = ctypes.c_int
# void xine_stop(xine_stream_t *stream)
xine_stop = _libxine.xine_stop
xine_stop.argtypes = [ctypes.c_void_p]
# void xine_dispose(xine_stream_t *stream)
xine_dispose = _libxine.xine_dispose
xine_dispose.argtypes = [ctypes.c_void_p]
# xine_event_queue_t *xine_event_new_queue(xine_stream_t *stream)
xine_event_new_queue = _libxine.xine_event_new_queue
xine_event_new_queue.argtypes = [ctypes.c_void_p]
xine_event_new_queue.restype = ctypes.c_void_p
# void xine_event_dispose_queue(xine_event_queue_t *queue)
xine_event_dispose_queue = _libxine.xine_event_dispose_queue
xine_event_dispose_queue.argtypes = [ctypes.c_void_p]
# void xine_event_create_listener_thread(xine_event_queue_t *queue,
# xine_event_listener_cb_t callback,
# void *user_data)
xine_event_create_listener_thread = _libxine.xine_event_create_listener_thread
xine_event_create_listener_thread.argtypes = [ctypes.c_void_p,
ctypes.c_void_p, ctypes.c_void_p]
xine_usec_sleep = _libxine.xine_usec_sleep
xine_usec_sleep.argtypes = [ctypes.c_int]
xine_set_param = _libxine.xine_set_param
xine_set_param.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_int]
xine_get_param = _libxine.xine_get_param
xine_get_param.argtypes = [ctypes.c_void_p, ctypes.c_int]
xine_get_param.restype = ctypes.c_int
xine_get_meta_info = _libxine.xine_get_meta_info
xine_get_meta_info.argtypes = [ctypes.c_void_p, ctypes.c_int]
xine_get_meta_info.restype = ctypes.c_char_p
xine_get_status = _libxine.xine_get_status
xine_get_status.argtypes = [ctypes.c_void_p]
xine_get_status.restype = ctypes.c_int
xine_get_pos_length = _libxine.xine_get_pos_length
xine_get_pos_length.argtypes = [ctypes.c_void_p,
ctypes.POINTER(ctypes.c_int), ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
xine_get_version_string = _libxine.xine_get_version_string
xine_get_version_string.restype = ctypes.c_char_p
xine_get_file_extensions = _libxine.xine_get_file_extensions
xine_get_file_extensions.argtypes = [ctypes.c_void_p]
xine_get_file_extensions.restype = ctypes.c_char_p
xine_get_mime_types = _libxine.xine_get_mime_types
xine_get_mime_types.argtypes = [ctypes.c_void_p]
xine_get_mime_types.restype = ctypes.c_char_p
xine_list_input_plugins = _libxine.xine_list_input_plugins
xine_list_input_plugins.argtypes = [ctypes.c_void_p]
xine_list_input_plugins.restype = ctypes.POINTER(ctypes.c_char_p)
xine_check_version = _libxine.xine_check_version
xine_check_version.argtypes = [ctypes.c_int, ctypes.c_int,
ctypes.c_int]
xine_check_version.restype = ctypes.c_int
_callbacks = []
def xine_event_create_listener_thread(queue, callback, user_data):
cb = xine_event_listener_cb_t(callback)
_callbacks.append(cb)
_libxine.xine_event_create_listener_thread(queue, cb, user_data)
def xine_get_pos_length(stream):
_pos_stream = ctypes.c_int()
_pos_time = ctypes.c_int()
_length_time = ctypes.c_int()
result = _libxine.xine_get_pos_length(stream, ctypes.byref(_pos_stream),
ctypes.byref(_pos_time), ctypes.byref(_length_time))
if result:
return _pos_stream.value, _pos_time.value, _length_time.value
else:
return 0, 0, 0
| gpl-2.0 | 7,453,561,007,139,033,000 | 33.907801 | 78 | 0.701849 | false |
dkriegner/xrayutilities | tests/test_amorphous.py | 1 | 1314 | # This file is part of xrayutilities.
#
# xrayutilities is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2016 Dominik Kriegner <[email protected]>
import unittest
import xrayutilities as xu
class Test_maplog(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.a = xu.materials.Amorphous('Ir0.2Mn0.8', 10130)
def test_elements(self):
self.assertEqual(self.a.base[0][0], xu.materials.elements.Ir)
self.assertEqual(self.a.base[1][0], xu.materials.elements.Mn)
def test_composition(self):
self.assertAlmostEqual(self.a.base[0][1], 0.2, places=10)
self.assertAlmostEqual(self.a.base[1][1], 0.8, places=10)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -650,083,536,945,599,600 | 33.578947 | 71 | 0.716895 | false |
ctalbert/mozharness | configs/signing/android_mozilla-esr10.py | 1 | 4482 | #!/usr/bin/env python
LOCALES = ["en-US", "multi"]
# override tag for all repos
TAG = None
#AUS_SERVER = "dev-stage01.build.mozilla.org"
AUS_SERVER = "aus3-staging.mozilla.org"
#FTP_SERVER = "dev-stage01.build.mozilla.org"
FTP_SERVER = "stage.mozilla.org"
AUS_UPLOAD_BASE_DIR = "/opt/aus2/snippets/staging"
AUS_DIR_BASE_NAME = "Fennec-%(version)s-build%(buildnum)d"
FTP_UPLOAD_BASE_DIR = "/pub/mozilla.org/mobile/candidates/%(version)s-candidates/build%(buildnum)d"
#DOWNLOAD_BASE_URL = "http://%s%s" % (FTP_SERVER, FTP_UPLOAD_BASE_DIR)
DOWNLOAD_BASE_URL = "http://ftp.mozilla.org/pub/mozilla.org/mobile/candidates/%(version)s-candidates/build%(buildnum)d"
APK_BASE_NAME = "fennec-%(version)s.%(locale)s.android-arm.apk"
BUILDID_BASE_URL = DOWNLOAD_BASE_URL + "/%(platform)s_info.txt"
STAGE_SSH_KEY = '~/.ssh/ffxbld_dsa'
#STAGE_SSH_KEY = '~/staging_ssh/ffxbld_dsa'
AUS_SSH_KEY = '~/.ssh/auspush'
#AUS_SSH_KEY = '~/staging_ssh/id_rsa'
RELEASE_UPDATE_URL = "http://download.mozilla.org/?product=fennec-%(version)s-complete&os=%(platform)s&lang=%(locale)s"
BETATEST_UPDATE_URL = "http://stage.mozilla.org/pub/mozilla.org/mobile/candidates/%(version)s-candidates/build%(buildnum)d/%(apk_name)s"
SNIPPET_TEMPLATE = """version=1
type=complete
url=%(url)s
hashFunction=sha512
hashValue=%(sha512_hash)s
size=%(size)d
build=%(buildid)s
appv=%(version)s
extv=%(version)s
"""
KEYSTORE = "/home/cltsign/.android/android-release.keystore"
JAVA_HOME = "/tools/jdk-1.6.0_17"
JARSIGNER = "%s/bin/jarsigner" % JAVA_HOME
KEY_ALIAS = "release"
config = {
"log_name": "sign_android_esr10",
"work_dir": "esr10",
"locales": LOCALES,
"locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-esr10.json",
"release_config_file": "buildbot-configs/mozilla/release-fennec-mozilla-esr10.py",
"platforms": ['android',],
"platform_config": {
'android': {},
'android-xul': {
'locales': ['en-US', 'multi'],
},
'android-armv6': {
'locales': ['en-US'],
'apk_base_name': "fennec-%(version)s.%(locale)s.android-arm-armv6.apk"
},
},
"update_platforms": [],
"update_platform_map": {
'android': 'Android_arm-eabi-gcc3',
'android-xul': 'Android_arm-eabi-gcc3-xul',
'android-armv6': 'Android_arm-eabi-gcc3-armv6',
},
"update_channels": {
'release': {
'url': RELEASE_UPDATE_URL,
'template': SNIPPET_TEMPLATE,
'dir_base_name': AUS_DIR_BASE_NAME,
},
'betatest': {
'url': BETATEST_UPDATE_URL,
'template': SNIPPET_TEMPLATE,
'dir_base_name': '%s-test' % AUS_DIR_BASE_NAME,
},
'releasetest': {
'url': RELEASE_UPDATE_URL,
'template': SNIPPET_TEMPLATE,
'dir_base_name': '%s-test' % AUS_DIR_BASE_NAME,
},
},
"ftp_upload_base_dir": FTP_UPLOAD_BASE_DIR,
# These should be from release_config, but that has stage-ffxbld
# which doesn't work with dev-stage01.
"ftp_ssh_key": STAGE_SSH_KEY,
"ftp_user": "ffxbld",
"aus_ssh_key": AUS_SSH_KEY,
"aus_upload_base_dir": AUS_UPLOAD_BASE_DIR,
"apk_base_name": APK_BASE_NAME,
"unsigned_apk_base_name": APK_BASE_NAME,
"download_base_url": DOWNLOAD_BASE_URL,
"download_unsigned_base_subdir": "unsigned/%(platform)s/%(locale)s",
"download_signed_base_subdir": "%(platform)s/%(locale)s",
"buildid_base_url": BUILDID_BASE_URL,
"old_buildid_base_url": BUILDID_BASE_URL,
"actions": [
"passphrase",
"clobber",
"pull",
"download-unsigned-bits",
"sign",
"verify-signatures",
"upload-signed-bits",
],
"keystore": KEYSTORE,
"key_alias": KEY_ALIAS,
"env": {
"PATH": JAVA_HOME + "/bin:%(PATH)s",
},
"exes": {
"jarsigner": JARSIGNER,
"zipalign": "/tools/android-sdk-r8/tools/zipalign",
},
"signature_verification_script": "tools/release/signing/verify-android-signature.sh",
"user_repo_override": "build",
"tag_override": TAG,
"repos": [{
"repo": "http://hg.mozilla.org/%(user_repo_override)s/tools",
"dest": "tools",
"revision": "default",
},{
"repo": "http://hg.mozilla.org/%(user_repo_override)s/buildbot-configs",
"dest": "buildbot-configs",
"revision": "production",
}],
}
| mpl-2.0 | 8,020,089,138,990,125,000 | 33.21374 | 136 | 0.598394 | false |
deepfield/ibis | ibis/sql/mysql/compiler.py | 1 | 6887 | import pandas as pd
import sqlalchemy as sa
import sqlalchemy.dialects.mysql as mysql
from ibis.sql.alchemy import (unary, fixed_arity, infix_op,
_variance_reduction)
import ibis.common as com
import ibis.expr.types as ir
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
import ibis.sql.alchemy as alch
_operation_registry = alch._operation_registry.copy()
# NOTE: window functions are available from MySQL 8 and MariaDB 10.2
_operation_registry.update(alch._window_functions)
def _substr(t, expr):
f = sa.func.substr
arg, start, length = expr.op().args
sa_arg = t.translate(arg)
sa_start = t.translate(start)
if length is None:
return f(sa_arg, sa_start + 1)
else:
sa_length = t.translate(length)
return f(sa_arg, sa_start + 1, sa_length)
def _string_find(t, expr):
arg, substr, start, _ = expr.op().args
if start is not None:
raise NotImplementedError
sa_arg = t.translate(arg)
sa_substr = t.translate(substr)
return sa.func.locate(sa_arg, sa_substr) - 1
def _capitalize(t, expr):
arg, = expr.op().args
sa_arg = t.translate(arg)
return sa.func.concat(
sa.func.ucase(
sa.func.left(sa_arg, 1)
),
sa.func.substring(sa_arg, 2)
)
def _extract(fmt):
def translator(t, expr):
arg, = expr.op().args
sa_arg = t.translate(arg)
return sa.extract(fmt, sa_arg)
return translator
_truncate_formats = {
's': '%Y-%m-%d %H:%i:%s',
'm': '%Y-%m-%d %H:%i:00',
'h': '%Y-%m-%d %H:00:00',
'D': '%Y-%m-%d',
# 'W': 'week',
'M': '%Y-%m-01',
'Y': '%Y-01-01'
}
def _truncate(t, expr):
arg, unit = expr.op().args
sa_arg = t.translate(arg)
try:
fmt = _truncate_formats[unit]
except KeyError:
raise com.UnsupportedOperationError(
'Unsupported truncate unit {}'.format(unit)
)
return sa.func.date_format(sa_arg, fmt)
def _cast(t, expr):
arg, typ = expr.op().args
sa_arg = t.translate(arg)
sa_type = t.get_sqla_type(typ)
# specialize going from an integer type to a timestamp
if isinstance(arg.type(), dt.Integer) and isinstance(sa_type, sa.DateTime):
return sa.func.timezone('UTC', sa.func.to_timestamp(sa_arg))
if arg.type().equals(dt.binary) and typ.equals(dt.string):
return sa.func.encode(sa_arg, 'escape')
if typ.equals(dt.binary):
# decode yields a column of memoryview which is annoying to deal with
# in pandas. CAST(expr AS BYTEA) is correct and returns byte strings.
return sa.cast(sa_arg, sa.Binary())
return sa.cast(sa_arg, sa_type)
def _log(t, expr):
arg, base = expr.op().args
sa_arg = t.translate(arg)
sa_base = t.translate(base)
return sa.func.log(sa_base, sa_arg)
def _identical_to(t, expr):
left, right = args = expr.op().args
if left.equals(right):
return True
else:
left, right = map(t.translate, args)
return left.op('<=>')(right)
def _round(t, expr):
arg, digits = expr.op().args
sa_arg = t.translate(arg)
if digits is None:
sa_digits = 0
else:
sa_digits = t.translate(digits)
return sa.func.round(sa_arg, sa_digits)
def _floor_divide(t, expr):
left, right = map(t.translate, expr.op().args)
return sa.func.floor(left / right)
def _string_join(t, expr):
sep, elements = expr.op().args
return sa.func.concat_ws(t.translate(sep), *map(t.translate, elements))
def _interval_from_integer(t, expr):
arg, unit = expr.op().args
if unit in {'ms', 'ns'}:
raise com.UnsupportedOperationError(
'MySQL does not allow operation '
'with INTERVAL offset {}'.format(unit)
)
sa_arg = t.translate(arg)
text_unit = expr.type().resolution.upper()
return sa.text('INTERVAL {} {}'.format(sa_arg, text_unit))
def _timestamp_diff(t, expr):
left, right = expr.op().args
sa_left = t.translate(left)
sa_right = t.translate(right)
return sa.func.timestampdiff(sa.text('SECOND'), sa_right, sa_left)
def _literal(t, expr):
if isinstance(expr, ir.IntervalScalar):
if expr.type().unit in {'ms', 'ns'}:
raise com.UnsupportedOperationError(
'MySQL does not allow operation '
'with INTERVAL offset {}'.format(expr.type().unit)
)
text_unit = expr.type().resolution.upper()
return sa.text('INTERVAL {} {}'.format(expr.op().value, text_unit))
elif isinstance(expr, ir.SetScalar):
return list(map(sa.literal, expr.op().value))
else:
value = expr.op().value
if isinstance(value, pd.Timestamp):
value = value.to_pydatetime()
return sa.literal(value)
_operation_registry.update({
ops.Literal: _literal,
# strings
ops.Substring: _substr,
ops.StringFind: _string_find,
ops.Capitalize: _capitalize,
ops.RegexSearch: infix_op('REGEXP'),
# math
ops.Log: _log,
ops.Log2: unary(sa.func.log2),
ops.Log10: unary(sa.func.log10),
ops.Round: _round,
# dates and times
ops.Date: unary(sa.func.date),
ops.DateAdd: infix_op('+'),
ops.DateSub: infix_op('-'),
ops.DateDiff: fixed_arity(sa.func.datediff, 2),
ops.TimestampAdd: infix_op('+'),
ops.TimestampSub: infix_op('-'),
ops.TimestampDiff: _timestamp_diff,
ops.DateTruncate: _truncate,
ops.TimestampTruncate: _truncate,
ops.IntervalFromInteger: _interval_from_integer,
ops.Strftime: fixed_arity(sa.func.date_format, 2),
ops.ExtractYear: _extract('year'),
ops.ExtractMonth: _extract('month'),
ops.ExtractDay: _extract('day'),
ops.ExtractHour: _extract('hour'),
ops.ExtractMinute: _extract('minute'),
ops.ExtractSecond: _extract('second'),
ops.ExtractMillisecond: _extract('millisecond'),
# reductions
ops.Variance: _variance_reduction('var'),
ops.StandardDev: _variance_reduction('stddev'),
ops.IdenticalTo: _identical_to,
ops.TimestampNow: fixed_arity(sa.func.now, 0),
})
def add_operation(op, translation_func):
_operation_registry[op] = translation_func
class MySQLExprTranslator(alch.AlchemyExprTranslator):
_registry = _operation_registry
_rewrites = alch.AlchemyExprTranslator._rewrites.copy()
_type_map = alch.AlchemyExprTranslator._type_map.copy()
_type_map.update({
dt.Boolean: mysql.BOOLEAN,
dt.Int8: mysql.TINYINT,
dt.Int32: mysql.INTEGER,
dt.Int64: mysql.BIGINT,
dt.Double: mysql.DOUBLE,
dt.Float: mysql.FLOAT,
dt.String: mysql.VARCHAR,
})
rewrites = MySQLExprTranslator.rewrites
compiles = MySQLExprTranslator.compiles
class MySQLDialect(alch.AlchemyDialect):
translator = MySQLExprTranslator
dialect = MySQLDialect
| apache-2.0 | -8,932,553,061,287,775,000 | 25.590734 | 79 | 0.62422 | false |
Cad/nameko-rethinkdb | test/test_nameko_rethinkdb.py | 1 | 3632 | import os
import uuid
import pytest
from weakref import WeakKeyDictionary
from nameko.testing.services import entrypoint_hook, dummy
from nameko.testing.utils import get_extension
from mock import Mock
import rethinkdb as r
from rethinkdb.errors import RqlRuntimeError
from nameko_rethinkdb import RDB, RDB_KEY
RDB_HOST = os.environ.get('NAMEKO_RETHINKDB_HOST', "localhost")
RDB_PORT = int(os.environ.get('NAMEKO_RETHINKDB_PORT', 28015))
@pytest.fixture(scope="function")
def setup_db(request):
db = uuid.uuid4().hex
connection = r.connect(host=RDB_HOST, port=RDB_PORT)
try:
r.db_create(db).run(connection)
r.db(db).table_create('test').run(connection)
def fin():
print("teardown db")
r.db_drop(db).run(connection)
connection.close()
request.addfinalizer(fin)
except RqlRuntimeError:
print('App database already exists!')
return db
def test_dependency_provider(container_factory, setup_db):
db = setup_db
class ExampleService(object):
name = "exampleservice"
rdbc = RDB(db)
@dummy
def write(self, value):
result = r.table('test').insert({'data': value}).run(self.rdbc)
return result
@dummy
def read(self, id):
return r.table('test').get(id).run(self.rdbc)
config = {
RDB_KEY: {
'exampleservice': {
'RDB_HOST': RDB_HOST,
'RDB_PORT': RDB_PORT,
}
}
}
container = container_factory(ExampleService, config)
container.start()
rdb_provider = get_extension(container, RDB, db=db)
# verify setup
assert rdb_provider.RDB_DB == db
# verify get_dependency
worker_ctx = Mock() # don't need a real worker context
rdb = rdb_provider.get_dependency(worker_ctx)
assert rdb_provider.rdb_connections[worker_ctx] is rdb
# verify multiple workers
worker_ctx_2 = Mock()
rdb_2 = rdb_provider.get_dependency(worker_ctx_2)
assert rdb_provider.rdb_connections == WeakKeyDictionary({
worker_ctx: rdb,
worker_ctx_2: rdb_2
})
# verify weakref
del worker_ctx_2
assert rdb_provider.rdb_connections == WeakKeyDictionary({
worker_ctx: rdb
})
# verify teardown
# TODO(cad): Add extra testing here
rdb_provider.worker_teardown(worker_ctx)
assert not rdb.is_open()
assert worker_ctx not in rdb_provider.rdb_connections
def test_end_to_end(container_factory, tmpdir, setup_db):
db = setup_db
class ExampleService(object):
name = "exampleservice"
rdbc = RDB(db)
@dummy
def write(self, value):
result = r.table('test').insert({'data': value}).run(self.rdbc)
return result
@dummy
def read(self, id):
return r.table('test').get(id).run(self.rdbc)
rdbc = r.connect(host=RDB_HOST, port=RDB_PORT)
config = {
RDB_KEY: {
'exampleservice': {
'RDB_HOST': RDB_HOST,
'RDB_PORT': RDB_PORT,
}
}
}
container = container_factory(ExampleService, config)
container.start()
# write through the service
with entrypoint_hook(container, "write") as write:
write("foobar")
# verify changes written to disk
entries = r.db(db).table('test').run(rdbc)
data = list(entries)[0]
assert data['data'] == u'foobar'
pk = data['id']
# read through the service
with entrypoint_hook(container, "read") as read:
assert read(pk)['data'] == "foobar"
| apache-2.0 | -1,585,711,963,583,978,000 | 24.222222 | 75 | 0.609031 | false |
Subsets and Splits