repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
litex-hub/litespi | litespi/ids.py | 1 | 2011 | #
# This file is part of LiteSPI
#
# Copyright (c) 2020 Antmicro <www.antmicro.com>
# SPDX-License-Identifier: BSD-2-Clause
import enum
class CFIManufacturerIDs(enum.Enum):
"""Manufacturer IDs from the CFI standard.
Common Flash Interface (CFI) is a standard introduced by the Joint Electron
Device Engineering Council (JEDEC) to allow in-system or programmer reading
of flash device characteristics, which is equivalent to having data sheet
parameters located in the device.
"""
AMD = 0x0001
AMIC = 0x0037
ATMEL = 0x001F
EON = 0x001C
ESMT = 0x008C
FUJITSU = 0x0004
HYUNDAI = 0x00AD
INTEL = 0x0089
ISSI = 0x009D
MACRONIX = 0x00C2
NEC = 0x0010
PMC = 0x009D
SAMSUNG = 0x00EC
SANYO = 0x0062
SHARP = 0x00B0
SST = 0x00BF
ST = 0x0020 # STMicroelectronics
MICRON = 0x002C
THOMSON = 0x00BA
TOSHIBA = 0x0098
WINBOND = 0x00DA
class SpiNorFlashManufacturerIDs(enum.Enum):
"""Manufacturer IDs for SPI NOR flash chips.
The first byte returned from the flash after sending opcode SPINor_OP_RDID.
Sometimes these are the same as CFI IDs, but sometimes they aren't.
"""
AMIC = CFIManufacturerIDs.AMIC.value
ATMEL = CFIManufacturerIDs.ATMEL.value
EON = CFIManufacturerIDs.EON.value
ESMT = CFIManufacturerIDs.ESMT.value
FUJITSU = CFIManufacturerIDs.FUJITSU.value
GIGADEVICE = 0xc8
INTEL = CFIManufacturerIDs.INTEL.value
ISSI = CFIManufacturerIDs.ISSI.value
ST = CFIManufacturerIDs.ST.value
MICRON = CFIManufacturerIDs.MICRON.value
MACRONIX = CFIManufacturerIDs.MACRONIX.value
SPANSION = CFIManufacturerIDs.AMD.value
SANYO = CFIManufacturerIDs.SANYO.value
SST = CFIManufacturerIDs.SST.value
THOMSON = CFIManufacturerIDs.THOMSON.value
WINBOND = 0xef # Also used by some Spansion
NONJEDEC = 0x0
| bsd-2-clause | 9,123,095,271,623,639,000 | 30.421875 | 79 | 0.662854 | false |
akrherz/iem | cgi-bin/request/raob.py | 1 | 3216 | """
Download interface for data from RAOB network
"""
from io import StringIO
import datetime
import pytz
from paste.request import parse_formvars
from pyiem.util import get_dbconn
from pyiem.network import Table as NetworkTable
def m(val):
"""Helper"""
if val is None:
return "M"
return val
def fetcher(station, sts, ets):
"""Do fetching"""
sio = StringIO()
dbconn = get_dbconn("postgis")
cursor = dbconn.cursor("raobstreamer")
stations = [station]
if station.startswith("_"):
nt = NetworkTable("RAOB", only_online=False)
stations = nt.sts[station]["name"].split("--")[1].strip().split(",")
cursor.execute(
"""
SELECT f.valid at time zone 'UTC', p.levelcode, p.pressure, p.height,
p.tmpc, p.dwpc, p.drct, round((p.smps * 1.94384)::numeric,0),
p.bearing, p.range_miles, f.station from
raob_profile p JOIN raob_flights f on
(f.fid = p.fid) WHERE f.station in %s and valid >= %s and valid < %s
""",
(tuple(stations), sts, ets),
)
sio.write(
(
"station,validUTC,levelcode,pressure_mb,height_m,tmpc,"
"dwpc,drct,speed_kts,bearing,range_sm\n"
)
)
for row in cursor:
sio.write(
("%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n")
% (
row[10],
m(row[0]),
m(row[1]),
m(row[2]),
m(row[3]),
m(row[4]),
m(row[5]),
m(row[6]),
m(row[7]),
m(row[8]),
m(row[9]),
)
)
return sio.getvalue().encode("ascii", "ignore")
def friendly_date(form, key):
"""More forgiving date conversion"""
val = form.get(key)
try:
val = val.strip()
if len(val.split()) == 1:
dt = datetime.datetime.strptime(val, "%m/%d/%Y")
else:
dt = datetime.datetime.strptime(val, "%m/%d/%Y %H:%M")
dt = dt.replace(tzinfo=pytz.UTC)
except Exception:
return (
"Invalid %s date provided, should be '%%m/%%d/%%Y %%H:%%M'"
" in UTC timezone"
) % (key,)
return dt
def application(environ, start_response):
"""Go Main Go"""
form = parse_formvars(environ)
sts = friendly_date(form, "sts")
ets = friendly_date(form, "ets")
for val in [sts, ets]:
if not isinstance(val, datetime.datetime):
headers = [("Content-type", "text/plain")]
start_response("500 Internal Server Error", headers)
return [val.encode("ascii")]
station = form.get("station", "KOAX")[:4]
if form.get("dl", None) is not None:
headers = [
("Content-type", "application/octet-stream"),
(
"Content-Disposition",
"attachment; filename=%s_%s_%s.txt"
% (
station,
sts.strftime("%Y%m%d%H"),
ets.strftime("%Y%m%d%H"),
),
),
]
else:
headers = [("Content-type", "text/plain")]
start_response("200 OK", headers)
return [fetcher(station, sts, ets)]
| mit | -8,461,958,948,684,487,000 | 27.714286 | 76 | 0.504353 | false |
asreimer/pyAMISR | docs/conf.py | 1 | 12402 | # -*- coding: utf-8 -*-
#
# backscatter documentation build configuration file, created by
# sphinx-quickstart on Mon Aug 29 14:27:57 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
from visuamisr import __version__
# sys.path.insert(0, os.path.abspath('.'))
# mock modules for c extensions
# import sys
# from mock import Mock as MagicMock
# class Mock(MagicMock):
# @classmethod
# def __getattr__(cls, name):
# return Mock()
# MOCK_MODULES = ['numpy']
# sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'AMISR'
copyright = u'2019, asreimer'
author = u'asreimer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'github_user': 'asreimer',
'github_repo': 'visuamisr',
'description': 'A basic data visualization toolkit for AMISR radar data',
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'backscatter v2016.08'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'backscatterdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'visuamisr.tex', u'visuamisr Documentation',
u'Ashton Reimer', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'visuamisr', u'visuamisr Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'visuamisr', u'visuamisr Documentation',
author, 'visuamisr', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = project
# The HTML theme for the epub output. Since the default themes are not
# optimized for small screen space, using the same theme for HTML and epub
# output is usually not wise. This defaults to 'epub', a theme designed to save
# visual space.
#
# epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#
# epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#
# epub_pre_files = []
# HTML files that should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#
# epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#
# epub_tocdepth = 3
# Allow duplicate toc entries.
#
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#
# epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#
# epub_fix_images = False
# Scale large images.
#
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# epub_show_urls = 'inline'
# If false, no index is generated.
#
# epub_use_index = True
| gpl-3.0 | 1,789,520,242,572,540,200 | 26.869663 | 80 | 0.691179 | false |
QuantiModo/QuantiModo-SDK-Python | swagger_client/models/user_variables.py | 1 | 8861 | # coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class UserVariables(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
UserVariables - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'user': 'int',
'variable': 'str',
'duration_of_action': 'int',
'filling_value': 'int',
'join_with': 'str',
'maximum_allowed_value': 'float',
'minimum_allowed_value': 'float',
'name': 'str',
'onset_delay': 'int',
'unit': 'str'
}
self.attribute_map = {
'user': 'user',
'variable': 'variable',
'duration_of_action': 'durationOfAction',
'filling_value': 'fillingValue',
'join_with': 'joinWith',
'maximum_allowed_value': 'maximumAllowedValue',
'minimum_allowed_value': 'minimumAllowedValue',
'name': 'name',
'onset_delay': 'onsetDelay',
'unit': 'unit'
}
self._user = None
self._variable = None
self._duration_of_action = None
self._filling_value = None
self._join_with = None
self._maximum_allowed_value = None
self._minimum_allowed_value = None
self._name = None
self._onset_delay = None
self._unit = None
@property
def user(self):
"""
Gets the user of this UserVariables.
User ID
:return: The user of this UserVariables.
:rtype: int
"""
return self._user
@user.setter
def user(self, user):
"""
Sets the user of this UserVariables.
User ID
:param user: The user of this UserVariables.
:type: int
"""
self._user = user
@property
def variable(self):
"""
Gets the variable of this UserVariables.
Variable DISPLAY name
:return: The variable of this UserVariables.
:rtype: str
"""
return self._variable
@variable.setter
def variable(self, variable):
"""
Sets the variable of this UserVariables.
Variable DISPLAY name
:param variable: The variable of this UserVariables.
:type: str
"""
self._variable = variable
@property
def duration_of_action(self):
"""
Gets the duration_of_action of this UserVariables.
Estimated duration of time following the onset delay in which a stimulus produces a perceivable effect
:return: The duration_of_action of this UserVariables.
:rtype: int
"""
return self._duration_of_action
@duration_of_action.setter
def duration_of_action(self, duration_of_action):
"""
Sets the duration_of_action of this UserVariables.
Estimated duration of time following the onset delay in which a stimulus produces a perceivable effect
:param duration_of_action: The duration_of_action of this UserVariables.
:type: int
"""
self._duration_of_action = duration_of_action
@property
def filling_value(self):
"""
Gets the filling_value of this UserVariables.
fillingValue
:return: The filling_value of this UserVariables.
:rtype: int
"""
return self._filling_value
@filling_value.setter
def filling_value(self, filling_value):
"""
Sets the filling_value of this UserVariables.
fillingValue
:param filling_value: The filling_value of this UserVariables.
:type: int
"""
self._filling_value = filling_value
@property
def join_with(self):
"""
Gets the join_with of this UserVariables.
joinWith
:return: The join_with of this UserVariables.
:rtype: str
"""
return self._join_with
@join_with.setter
def join_with(self, join_with):
"""
Sets the join_with of this UserVariables.
joinWith
:param join_with: The join_with of this UserVariables.
:type: str
"""
self._join_with = join_with
@property
def maximum_allowed_value(self):
"""
Gets the maximum_allowed_value of this UserVariables.
maximumAllowedValue
:return: The maximum_allowed_value of this UserVariables.
:rtype: float
"""
return self._maximum_allowed_value
@maximum_allowed_value.setter
def maximum_allowed_value(self, maximum_allowed_value):
"""
Sets the maximum_allowed_value of this UserVariables.
maximumAllowedValue
:param maximum_allowed_value: The maximum_allowed_value of this UserVariables.
:type: float
"""
self._maximum_allowed_value = maximum_allowed_value
@property
def minimum_allowed_value(self):
"""
Gets the minimum_allowed_value of this UserVariables.
minimumAllowedValue
:return: The minimum_allowed_value of this UserVariables.
:rtype: float
"""
return self._minimum_allowed_value
@minimum_allowed_value.setter
def minimum_allowed_value(self, minimum_allowed_value):
"""
Sets the minimum_allowed_value of this UserVariables.
minimumAllowedValue
:param minimum_allowed_value: The minimum_allowed_value of this UserVariables.
:type: float
"""
self._minimum_allowed_value = minimum_allowed_value
@property
def name(self):
"""
Gets the name of this UserVariables.
name
:return: The name of this UserVariables.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this UserVariables.
name
:param name: The name of this UserVariables.
:type: str
"""
self._name = name
@property
def onset_delay(self):
"""
Gets the onset_delay of this UserVariables.
onsetDelay
:return: The onset_delay of this UserVariables.
:rtype: int
"""
return self._onset_delay
@onset_delay.setter
def onset_delay(self, onset_delay):
"""
Sets the onset_delay of this UserVariables.
onsetDelay
:param onset_delay: The onset_delay of this UserVariables.
:type: int
"""
self._onset_delay = onset_delay
@property
def unit(self):
"""
Gets the unit of this UserVariables.
unit
:return: The unit of this UserVariables.
:rtype: str
"""
return self._unit
@unit.setter
def unit(self, unit):
"""
Sets the unit of this UserVariables.
unit
:param unit: The unit of this UserVariables.
:type: str
"""
self._unit = unit
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
| gpl-2.0 | -910,932,272,836,915,700 | 26.180982 | 110 | 0.569687 | false |
saatvikshah1994/SmartMM | KeywordExtraction/unsupervised/keywordextraction_rake.py | 1 | 10166 | from __future__ import absolute_import
from __future__ import print_function
import re
import operator
import six
from six.moves import range
from utilities import load_data,cross_validate
from sklearn.pipeline import Pipeline
from keyword_metrics import keyword_prf
from nltk.stem import PorterStemmer
import gensim
class Rake_KeywordSelection(object):
def __init__(self, stop_words_path, min_char_length=1, max_words_length=5, min_keyword_frequency=1, num_keywords=10,to_stem=False):
self.__stop_words_path = stop_words_path
self.__stop_words_pattern = self.build_stop_word_regex(stop_words_path)
self.__min_char_length = min_char_length
self.__max_words_length = max_words_length
self.__min_keyword_frequency = min_keyword_frequency
self.num_keywords = num_keywords
def fit(self,X,y=None):
return self
def predict(self, X):
y_pred = []
keyword_candidates_lst = []
keyword_candidates_scores_lst = []
for text in X:
sentence_list = self.split_sentences(text)
phrase_list = self.generate_candidate_keywords(sentence_list, self.__stop_words_pattern, self.__min_char_length, self.__max_words_length)
word_scores = self.calculate_word_scores(phrase_list)
keyword_candidates = self.generate_candidate_keyword_scores(phrase_list, word_scores, self.__min_keyword_frequency)
keyword_candidates_lst.append([keyword for (keyword,score) in keyword_candidates.iteritems()])
keyword_candidates_scores_lst.append(keyword_candidates)
corpus_tfidf,dictionary = self.score_keyphrases_by_tfidf(keyword_candidates_lst)
inv_dict = {val : key for key,val in dictionary.iteritems()}
for idx,keyword_cand_score_pairs in enumerate(keyword_candidates_scores_lst):
tfidf_keyvals = {tf_id : tf_score for tf_id,tf_score in corpus_tfidf[idx]}
keywords_available = [dictionary[tf_id] for tf_id in tfidf_keyvals.keys()]
for keyword in keyword_cand_score_pairs.keys():
if keyword in keywords_available:
keyword_cand_score_pairs[keyword] *= tfidf_keyvals[inv_dict[keyword]]
else:
keyword_cand_score_pairs[keyword] *= 0
sorted_keywords = sorted(six.iteritems(keyword_cand_score_pairs), key=operator.itemgetter(1), reverse=True)[:self.num_keywords]
y_pred.append([keyword for keyword,score in sorted_keywords])
return y_pred
def score_keyphrases_by_tfidf(self, candidates):
# make gensim dictionary and corpus
dictionary = gensim.corpora.Dictionary(candidates)
corpus = [dictionary.doc2bow(candidate) for candidate in candidates]
# transform corpus with tf*idf model
tfidf = gensim.models.TfidfModel(corpus)
corpus_tfidf = tfidf[corpus]
return corpus_tfidf, dictionary
def is_number(self,s):
try:
float(s) if '.' in s else int(s)
return True
except ValueError:
return False
def load_stop_words(self,stop_word_file):
"""
Utility function to load stop words from a file and return as a list of words
@param stop_word_file Path and file name of a file containing stop words.
@return list A list of stop words.
"""
stop_words = []
for line in open(stop_word_file):
if line.strip()[0:1] != "#":
for word in line.split(): # in case more than one per line
stop_words.append(word)
return stop_words
def separate_words(self,text, min_word_return_size):
"""
Utility function to return a list of all words that are have a length greater than a specified number of characters.
@param text The text that must be split in to words.
@param min_word_return_size The minimum no of characters a word must have to be included.
"""
splitter = re.compile('[^a-zA-Z0-9_\\+\\-/]')
words = []
for single_word in splitter.split(text):
current_word = single_word.strip().lower()
#leave numbers in phrase, but don't count as words, since they tend to invalidate scores of their phrases
if len(current_word) > min_word_return_size and current_word != '' and not self.is_number(current_word):
words.append(current_word)
return words
def split_sentences(self,text):
"""
Utility function to return a list of sentences.
@param text The text that must be split in to sentences.
"""
sentence_delimiters = re.compile(u'[\\[\\]\n.!?,;:\t\\-\\"\\(\\)\\\'\u2019\u2013]')
sentences = sentence_delimiters.split(text)
return sentences
def build_stop_word_regex(self,stop_word_file_path):
stop_word_list = self.load_stop_words(stop_word_file_path)
stop_word_regex_list = []
for word in stop_word_list:
word_regex = '\\b' + word + '\\b'
stop_word_regex_list.append(word_regex)
stop_word_pattern = re.compile('|'.join(stop_word_regex_list), re.IGNORECASE)
return stop_word_pattern
def generate_candidate_keywords(self,sentence_list, stopword_pattern, min_char_length=1, max_words_length=5):
phrase_list = []
for s in sentence_list:
tmp = re.sub(stopword_pattern, '|', s.strip())
phrases = tmp.split("|")
for phrase in phrases:
phrase = phrase.strip().lower()
if phrase != "" and self.is_acceptable(phrase, min_char_length, max_words_length):
phrase_list.append(phrase)
return phrase_list
def is_acceptable(self,phrase, min_char_length, max_words_length):
# a phrase must have a min length in characters
if len(phrase) < min_char_length:
return 0
# a phrase must have a max number of words
words = phrase.split()
if len(words) > max_words_length:
return 0
digits = 0
alpha = 0
for i in range(0, len(phrase)):
if phrase[i].isdigit():
digits += 1
elif phrase[i].isalpha():
alpha += 1
# a phrase must have at least one alpha character
if alpha == 0:
return 0
# a phrase must have more alpha than digits characters
if digits > alpha:
return 0
return 1
def calculate_word_scores(self,phraseList):
word_frequency = {}
word_degree = {}
for phrase in phraseList:
word_list = self.separate_words(phrase, 0)
word_list_length = len(word_list)
word_list_degree = word_list_length - 1
#if word_list_degree > 3: word_list_degree = 3 #exp.
for word in word_list:
word_frequency.setdefault(word, 0)
word_frequency[word] += 1
word_degree.setdefault(word, 0)
word_degree[word] += word_list_degree #orig.
#word_degree[word] += 1/(word_list_length*1.0) #exp.
for item in word_frequency:
word_degree[item] = word_degree[item] + word_frequency[item]
# Calculate Word scores = deg(w)/frew(w)
word_score = {}
for item in word_frequency:
word_score.setdefault(item, 0)
word_score[item] = word_degree[item] / (word_frequency[item] * 1.0) #orig.
#word_score[item] = word_frequency[item]/(word_degree[item] * 1.0) #exp.
return word_score
def generate_candidate_keyword_scores(self,phrase_list, word_score, min_keyword_frequency=1):
keyword_candidates = {}
for phrase in phrase_list:
if min_keyword_frequency > 1:
if phrase_list.count(phrase) < min_keyword_frequency:
continue
keyword_candidates.setdefault(phrase, 0)
word_list = self.separate_words(phrase, 0)
candidate_score = 0
for word in word_list:
candidate_score += word_score[word]
keyword_candidates[phrase] = candidate_score
return keyword_candidates
if __name__ == '__main__':
ids,docs,keywords_doc = load_data()
# ids = ids[:10]
# docs = docs[:10]
# keywords_doc = keywords_doc[:10]
to_stem=False
pipeline = Pipeline([
('keyword_selector',Rake_KeywordSelection("SmartStoplist.txt",5,3,4,num_keywords=10,to_stem=to_stem))
])
# pipeline.fit(docs)
# print(pipeline.predict(docs))
cross_validate((docs,keywords_doc),pipeline,keyword_prf,stem_y=to_stem)
# def predict(self, X):
# y_pred = []
# for text in X:
# sentence_list = self.split_sentences(text)
#
# phrase_list = self.generate_candidate_keywords(sentence_list, self.__stop_words_pattern, self.__min_char_length, self.__max_words_length)
#
# word_scores = self.calculate_word_scores(phrase_list)
#
# keyword_candidates = self.generate_candidate_keyword_scores(phrase_list, word_scores, self.__min_keyword_frequency)
#
# sorted_keywords = sorted(six.iteritems(keyword_candidates), key=operator.itemgetter(1), reverse=True)
# if to_stem:
# top_keywords = []
# stemmer = PorterStemmer()
# for keyword,score in sorted_keywords:
# if len(top_keywords) == self.num_keywords:
# y_pred.append(top_keywords)
# break
# try:
# stemmed_keyword = ' '.join([str(stemmer.stem(word)) for word in keyword.split()])
# except:
# stemmed_keyword = keyword
# if stemmed_keyword not in top_keywords:
# top_keywords.append(stemmed_keyword)
# else:
# y_pred.append([keyword for (keyword,score) in sorted_keywords[:self.num_keywords]])
# return y_pred
| mit | 2,615,882,647,518,628,000 | 41.008264 | 151 | 0.595908 | false |
community-ssu/telepathy-gabble | tests/twisted/tubes/accept-muc-dbus-tube.py | 1 | 4959 | import dbus
from servicetest import assertEquals, assertNotEquals, call_async, EventPattern
from gabbletest import exec_test, acknowledge_iq, make_muc_presence
import constants as cs
from twisted.words.xish import xpath
import ns
from mucutil import join_muc_and_check
def test(q, bus, conn, stream, access_control):
conn.Connect()
_, iq_event = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'))
acknowledge_iq(stream, iq_event.stanza)
muc = '[email protected]'
_, _, test_handle, bob_handle = \
join_muc_and_check(q, bus, conn, stream, muc)
# Bob offers a stream tube
bob_bus_name = ':2.Ym9i'
presence = make_muc_presence('owner', 'moderator', '[email protected]', 'bob')
tubes = presence.addElement((ns.TUBES, 'tubes'))
tube = tubes.addElement((None, 'tube'))
tube['type'] = 'dbus'
tube['initiator'] = '[email protected]/bob'
tube['stream-id'] = '10'
tube['id'] = '1'
tube['service'] = 'com.example.Test'
tube['dbus-name'] = bob_bus_name
parameters = tube.addElement((None, 'parameters'))
parameter = parameters.addElement((None, 'parameter'))
parameter['type'] = 'str'
parameter['name'] = 'foo'
parameter.addContent('bar')
stream.send(presence)
# tubes channel is created
event = q.expect('dbus-signal', signal='NewChannels')
channels = event.args[0]
path, props = channels[0]
# tube channel is created
event = q.expect('dbus-signal', signal='NewChannels')
channels = event.args[0]
path, props = channels[0]
assertEquals(cs.CHANNEL_TYPE_DBUS_TUBE, props[cs.CHANNEL_TYPE])
assertEquals('[email protected]/bob', props[cs.INITIATOR_ID])
bob_handle = props[cs.INITIATOR_HANDLE]
assertEquals([cs.CHANNEL_IFACE_GROUP, cs.CHANNEL_IFACE_TUBE],
props[cs.INTERFACES])
assertEquals(False, props[cs.REQUESTED])
assertEquals('[email protected]', props[cs.TARGET_ID])
assertEquals('com.example.Test', props[cs.DBUS_TUBE_SERVICE_NAME])
assertEquals({'foo': 'bar'}, props[cs.TUBE_PARAMETERS])
assertEquals([cs.SOCKET_ACCESS_CONTROL_CREDENTIALS,
cs.SOCKET_ACCESS_CONTROL_LOCALHOST],
props[cs.DBUS_TUBE_SUPPORTED_ACCESS_CONTROLS])
tube_chan = bus.get_object(conn.bus_name, path)
tube_iface = dbus.Interface(tube_chan, cs.CHANNEL_IFACE_TUBE)
dbus_tube_iface = dbus.Interface(tube_chan, cs.CHANNEL_TYPE_DBUS_TUBE)
tube_chan_iface = dbus.Interface(tube_chan, cs.CHANNEL)
# only Bob is in DBusNames
dbus_names = tube_chan.Get(cs.CHANNEL_TYPE_DBUS_TUBE, 'DBusNames', dbus_interface=cs.PROPERTIES_IFACE)
assertEquals({bob_handle: bob_bus_name}, dbus_names)
call_async(q, dbus_tube_iface, 'Accept', access_control)
return_event, names_changed1, names_changed2, presence_event = q.expect_many(
EventPattern('dbus-return', method='Accept'),
EventPattern('dbus-signal', signal='DBusNamesChanged', interface=cs.CHANNEL_TYPE_DBUS_TUBE),
EventPattern('dbus-signal', signal='DBusNamesChanged', interface=cs.CHANNEL_TYPE_DBUS_TUBE),
EventPattern('stream-presence', to='[email protected]/test'))
tube_addr = return_event.value[0]
assert len(tube_addr) > 0
# check presence stanza
tube_node = xpath.queryForNodes('/presence/tubes/tube', presence_event.stanza)[0]
assertEquals('[email protected]/bob', tube_node['initiator'])
assertEquals('com.example.Test', tube_node['service'])
assertEquals('10', tube_node['stream-id'])
assertEquals('dbus', tube_node['type'])
assertEquals('1', tube_node['id'])
self_bus_name = tube_node['dbus-name']
tubes_self_handle = tube_chan.GetSelfHandle(dbus_interface=cs.CHANNEL_IFACE_GROUP)
assertNotEquals(0, tubes_self_handle)
# both of us are in DBusNames now
dbus_names = tube_chan.Get(cs.CHANNEL_TYPE_DBUS_TUBE, 'DBusNames', dbus_interface=cs.PROPERTIES_IFACE)
assertEquals({bob_handle: bob_bus_name, tubes_self_handle: self_bus_name}, dbus_names)
added, removed = names_changed1.args
assertEquals({bob_handle: bob_bus_name}, added)
assertEquals([], removed)
added, removed = names_changed2.args
assertEquals({tubes_self_handle: self_bus_name}, added)
assertEquals([], removed)
tube_chan_iface.Close()
q.expect_many(
EventPattern('dbus-signal', signal='Closed'),
EventPattern('dbus-signal', signal='ChannelClosed'))
if __name__ == '__main__':
# We can't use t.exec_dbus_tube_test() as we can use only the muc bytestream
exec_test(lambda q, bus, conn, stream:
test(q, bus, conn, stream, cs.SOCKET_ACCESS_CONTROL_CREDENTIALS))
exec_test(lambda q, bus, conn, stream:
test(q, bus, conn, stream, cs.SOCKET_ACCESS_CONTROL_LOCALHOST))
| lgpl-2.1 | -3,745,408,645,259,888,000 | 39.647541 | 106 | 0.677959 | false |
trustcircleglobal/tcg-gae | tcg_gae/utils.py | 1 | 1098 | import arrow
import datetime
import uuid as original_uuid
import random
# string utils
def uuid():
return str(original_uuid.uuid4()).replace('-', '')
def random_code(size):
return ''.join(
str(random.randrange(0, 9))
for i in xrange(size))
# date time utils
def now():
return datetime.datetime.utcnow()
def now_delta(**kwargs):
return now() + datetime.timedelta(**kwargs)
def avoid_weekend(start_time):
weekday = start_time.weekday()
if weekday == 5: # saturday
return start_time + datetime.timedelta(days=2)
elif weekday == 6: # sunday
return start_time + datetime.timedelta(days=1)
return start_time
def convert_date_time_between_timezones(date_time, from_tz, to_tz):
return arrow.get(date_time, from_tz).to(
to_tz).datetime.replace(tzinfo=None)
def convert_from_utc(date_time, tz):
if date_time is None:
return None
return convert_date_time_between_timezones(date_time, 'UTC', tz)
def convert_to_utc(date_time, tz):
return convert_date_time_between_timezones(date_time, tz, 'UTC')
| isc | -3,920,932,696,313,604,600 | 21.875 | 68 | 0.666667 | false |
chrisburr/lhcb-talky | run_tests.py | 1 | 23553 | #!/usr/bin/env python
import tempfile
import os
import shutil
import unittest
from io import BytesIO
from werkzeug.datastructures import MultiDict
import talky
class TalkyBaseTestCase(unittest.TestCase):
def setUp(self):
# Set up a dummy database
self.db_fd, talky.app.config['DATABASE_FILE'] = tempfile.mkstemp()
talky.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + talky.app.config['DATABASE_FILE']
talky.app.config['TESTING'] = True
# Disable CSRF tokens for unit tests
talky.app.config['WTF_CSRF_ENABLED'] = False
# Set up a dummy location for uploaded files
talky.app.config['FILE_PATH'] = tempfile.mkdtemp()
# Prepare the test client
self.client = talky.app.test_client()
# Prevent sending email
talky.mail.send = lambda msg: print(f'Skipped sending {msg}')
# Fill the dummy database
with talky.app.app_context():
from talky import create_database
create_database.build_sample_db(fast=True)
def tearDown(self):
os.close(self.db_fd)
os.unlink(talky.app.config['DATABASE_FILE'])
shutil.rmtree(talky.app.config['FILE_PATH'])
def login(self, username, password):
return self.client.post('/secure/login/', data=dict(
email=username,
password=password,
), follow_redirects=True)
def logout(self):
return self.client.get('/secure/logout', follow_redirects=True)
def get_talk(self, *, experiment=None, min_submissions=0, min_comments=0):
"""Get a talk matching the criteria passed as arguments."""
with talky.app.app_context():
for talk in talky.schema.Talk.query.all():
if experiment is not None and talk.experiment.name != experiment:
continue
if len(talk.submissions.all()) < min_submissions:
continue
if len(talk.comments) < min_comments:
continue
return talk
raise ValueError('Invalid parameters passed', experiment,
min_submissions, min_comments)
class TalkyAuthTestCase(TalkyBaseTestCase):
def test_login_logout(self):
# Valid login for admin
rv = self.login('admin', 'admin')
assert b'Admin mode' in rv.data
assert b'User mode' in rv.data
rv = self.logout()
assert b'login_user_form' in rv.data
# Valid login for user
rv = self.login('userlhcb', 'user')
assert b'LHCb - userlhcb' in rv.data
assert b'Admin mode' not in rv.data
rv = self.logout()
assert b'login_user_form' in rv.data
# Invalid logins attempts
rv = self.login('', '')
assert b'Email not provided' in rv.data
rv = self.login('invalid_user', '')
assert b'Password not provided' in rv.data
rv = self.login('invalid_user', 'wrong_password')
assert b'Invalid username/password combination' in rv.data
rv = self.login('admin', 'wrong_password')
assert b'Invalid username/password combination' in rv.data
class TalkyTalkViewTestCase(TalkyBaseTestCase):
def test_check_view_key(self):
talk = self.get_talk()
rv = self.client.get(f'/view/{talk.id}/{talk.view_key}/')
assert rv.status == '200 OK'
assert talk.title.encode('utf-8') in rv.data
assert talk.upload_key.encode('utf-8') not in rv.data
rv = self.client.get(f'/view/{talk.id}/bad_view_key/')
assert rv.status == '404 NOT FOUND'
rv = self.client.get(f'/view/{talk.id}/{talk.upload_key}/')
assert rv.status == '404 NOT FOUND'
def test_view_as_admin(self):
talk = self.get_talk()
self.login('admin', 'admin')
rv = self.client.get(f'/view/{talk.id}/{talk.view_key}/')
self.logout()
assert rv.status == '200 OK'
assert talk.title.encode('utf-8') in rv.data
assert b'Upload submission' in rv.data
assert talk.upload_key.encode('utf-8') in rv.data
def test_view_as_user(self):
talk = self.get_talk(experiment='LHCb')
self.login('userlhcb', 'user')
rv = self.client.get(f'/view/{talk.id}/{talk.view_key}/')
self.logout()
assert rv.status == '200 OK'
assert talk.title.encode('utf-8') in rv.data
assert b'Upload submission' in rv.data
assert talk.upload_key.encode('utf-8') in rv.data
self.login('userbelle', 'user')
rv = self.client.get(f'/view/{talk.id}/{talk.view_key}/')
self.logout()
assert rv.status == '200 OK'
assert talk.title.encode('utf-8') in rv.data
assert b'Upload submission' not in rv.data
assert talk.upload_key.encode('utf-8') not in rv.data
class TalkySubmissionTestCase(TalkyBaseTestCase):
def test_check_upload_key(self):
talk = self.get_talk()
rv = self.client.get(f'/upload/{talk.id}/{talk.upload_key}/')
assert rv.status == '200 OK'
assert talk.title.encode('utf-8') in rv.data
rv = self.client.get(f'/upload/{talk.id}/bad_upload_key/')
assert rv.status == '404 NOT FOUND'
rv = self.client.get(f'/upload/{talk.id}/{talk.view_key}/')
assert rv.status == '404 NOT FOUND'
def test_upload(self):
talk = self.get_talk()
with talky.app.app_context():
before = {
s.id: (s.filename, s.version)
for s in talky.schema.Talk.query.get(talk.id).submissions.all()
}
with BytesIO(b' '*1024*1024*10) as f:
rv = self.client.post(
f'/upload/{talk.id}/{talk.upload_key}/',
data=dict(file=(f, 'large_file.pdf')),
follow_redirects=True
)
assert rv.status == '200 OK'
with talky.app.app_context():
after = {
s.id: (s.filename, s.version)
for s in talky.schema.Talk.query.get(talk.id).submissions.all()
}
# Validate the new submission and uploaded file
new_submissions = set(after.items()) - set(before.items())
assert len(new_submissions) == 1
submission_id, (fn, version) = new_submissions.pop()
fn = f'{talky.app.config["FILE_PATH"]}/{talk.id}/{version}/{fn}'
with open(fn, 'rt') as f:
file_contents = f.read()
assert len(file_contents) == 1024*1024*10
assert set(file_contents) == set([' '])
def test_upload_invalid_request(self):
talk = self.get_talk()
rv = self.client.post(
f'/upload/{talk.id}/{talk.upload_key}/',
data=dict(),
follow_redirects=True
)
assert rv.status == '400 BAD REQUEST'
with BytesIO(b'Example contents') as f:
rv = self.client.post(
f'/upload/{talk.id}/{talk.upload_key}/',
data=dict(file=(f, '')),
follow_redirects=True
)
assert rv.status == '200 OK'
assert b'No file specified' in rv.data, rv.data
def test_upload_bad_extension(self):
talk = self.get_talk()
with BytesIO(b'Example contents') as f:
rv = self.client.post(
f'/upload/{talk.id}/{talk.upload_key}/',
data=dict(file=(f, 'bad_file')),
follow_redirects=True
)
assert rv.status == '200 OK'
assert b'Invalid filename or extension' in rv.data
with BytesIO(b'Example contents') as f:
rv = self.client.post(
f'/upload/{talk.id}/{talk.upload_key}/',
data=dict(file=(f, 'bad_file.zip')),
follow_redirects=True
)
assert rv.status == '200 OK'
assert b'Invalid filename or extension' in rv.data
def test_upload_large_file(self):
talk = self.get_talk()
with BytesIO(b' '*1024*1024*50) as f:
rv = self.client.post(
f'/upload/{talk.id}/{talk.upload_key}/',
data=dict(file=(f, 'large_file.pdf')),
follow_redirects=True
)
assert rv.status == '413 REQUEST ENTITY TOO LARGE'
def test_valid_delete(self):
talk = self.get_talk(experiment='LHCb', min_submissions=1)
with talky.app.app_context():
submission = talky.schema.Talk.query.get(talk.id).submissions.all()[0]
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/v{submission.version}/',
follow_redirects=True
)
assert rv.status == '200 OK'
self.login('userlhcb', 'user')
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/{submission.id}/delete/',
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
# Should redirect to the talk view page
assert talk.title.encode('utf-8') in rv.data
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/v{submission.version}/',
follow_redirects=True
)
assert rv.status == '404 NOT FOUND'
def test_invalid_delete(self):
talk = self.get_talk(experiment='LHCb', min_submissions=1)
with talky.app.app_context():
submission = talky.schema.Talk.query.get(talk.id).submissions.all()[0]
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/v{submission.version}/',
follow_redirects=True
)
assert rv.status == '200 OK'
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/{submission.id}/delete/',
follow_redirects=True
)
assert rv.status == '404 NOT FOUND'
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/v{submission.version}/',
follow_redirects=True
)
assert rv.status == '200 OK'
def test_invalid_delete_wrong_experiment(self):
talk = self.get_talk(experiment='Belle', min_submissions=1)
with talky.app.app_context():
submission = talky.schema.Talk.query.get(talk.id).submissions.all()[0]
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/v{submission.version}/',
follow_redirects=True
)
assert rv.status == '200 OK'
self.login('userlhcb', 'user')
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/{submission.id}/delete/',
follow_redirects=True
)
self.logout()
assert rv.status == '404 NOT FOUND'
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/submission/v{submission.version}/',
follow_redirects=True
)
assert rv.status == '200 OK'
class TalkyConferenceTestCase(TalkyBaseTestCase):
def test_create_with_url(self):
self.login('userlhcb', 'user')
rv = self.client.post(
f'/secure/user/conference/new/?url=%2Fsecure%2Fuser%2Fconference%2F',
data=dict(
name='Example name WABW',
venue='Example venue WABW',
start_date='2000-05-03 01:35:00',
url='https://home.cern/'
),
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
assert b'Save and Continue Editing' not in rv.data
assert b'Example name WABW' in rv.data
assert b'Example venue WABW' in rv.data
assert b'2000-05-03' in rv.data
assert b'https://home.cern/' in rv.data
def test_create_without_url(self):
self.login('userlhcb', 'user')
rv = self.client.post(
f'/secure/user/conference/new/?url=%2Fsecure%2Fuser%2Fconference%2F',
data=dict(
name='Example name XABX',
venue='Example venue XABX',
start_date='2021-05-03 01:35:00'
),
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
assert b'Save and Continue Editing' not in rv.data
assert b'Example name XABX' in rv.data
assert b'Example venue XABX' in rv.data
assert b'2021-05-03' in rv.data
class TalkyContactTestCase(TalkyBaseTestCase):
def test_create(self):
self.login('userlhcb', 'user')
rv = self.client.post(
'/secure/user/contact/new/?url=%2Fsecure%2Fuser%2Fcontact%2F',
data=dict(email='[email protected]'),
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
assert b'Save and Continue Editing' not in rv.data
assert b'[email protected]' in rv.data
def test_delete(self):
# Ensure the contact with id == 1 exists
self.login('userlhcb', 'user')
rv = self.client.get(
'/secure/user/contact/',
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
assert b'<input id="id" name="id" type="hidden" value="1">' in rv.data
# Remove the contact with id == 1
self.login('userlhcb', 'user')
rv = self.client.post(
'/secure/user/contact/delete/',
data=dict(id=1, url='/secure/user/contact/'),
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
assert b'<input id="id" name="id" type="hidden" value="1">' not in rv.data
class TalkyCategoryTestCase(TalkyBaseTestCase):
def test_create(self):
self.login('userlhcb', 'user')
rv = self.client.post(
'/secure/user/category/new/?url=%2Fsecure%2Fuser%2Fcategory%2F',
data=MultiDict([('name', 'Semileptonic'), ('contacts', '4'), ('contacts', '7')]),
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
assert b'Save and Continue Editing' not in rv.data
assert b'Semileptonic' in rv.data
def test_delete(self):
# Ensure the category with id == 1 exists
self.login('userlhcb', 'user')
rv = self.client.get(
'/secure/user/category/',
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
assert b'<input id="id" name="id" type="hidden" value="1">' in rv.data
# Remove the category with id == 1
self.login('userlhcb', 'user')
rv = self.client.post(
'/secure/user/category/delete/',
data=dict(id=1, url='/secure/user/category/'),
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
assert b'<input id="id" name="id" type="hidden" value="1">' not in rv.data
class TalkyCommentsTestCase(TalkyBaseTestCase):
@classmethod
def format_coment(cls, comment):
formatted_comment = ''
for line in comment.splitlines():
formatted_comment += f'{line }<br \>'
return formatted_comment.encode('utf-8')
def test_get(self):
talk = self.get_talk()
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/comment/',
follow_redirects=True
)
assert rv.status == '404 NOT FOUND'
def test_top_comment(self):
talk = self.get_talk()
comment = dict(
parent_comment_id='None',
name='First 1489 Last',
email='[email protected]',
comment='Example\n\n comment\n 487'
)
# Ensure comments can't be posted without the view_key
rv = self.client.post(
f'/view/{talk.id}/bad_view_key/comment/',
data=comment,
follow_redirects=True
)
assert rv.status == '404 NOT FOUND', rv.status
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/',
follow_redirects=True
)
assert rv.status == '200 OK'
assert b'First 1489 Last' not in rv.data
assert b'[email protected]' not in rv.data
assert b'Example comment 487' not in rv.data
# Ensure comments can be posted
rv = self.client.post(
f'/view/{talk.id}/{talk.view_key}/comment/',
data=comment,
follow_redirects=True
)
assert rv.status == '200 OK'
assert b'First 1489 Last' in rv.data
assert b'[email protected]' in rv.data
assert self.format_coment(comment['comment']) in rv.data, comment['comment']
# TODO Test top ness
def test_invalid_top_comment(self):
talk = self.get_talk()
comment = dict(
parent_comment_id='None',
name='First 1236 Last',
email='[email protected]',
comment='Example comment 853'
)
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/',
follow_redirects=True
)
assert rv.status == '200 OK'
assert b'First 1236 Last' not in rv.data
assert b'[email protected]' not in rv.data
assert self.format_coment(comment['comment']) not in rv.data, comment['comment']
# Ensure incomplete comments can't be posted
rv = self.client.post(
f'/view/{talk.id}/{talk.view_key}/comment/',
data={},
follow_redirects=True
)
assert rv.status == '400 BAD REQUEST'
assert b'First 1236 Last' not in rv.data
assert b'[email protected]' not in rv.data
assert self.format_coment(comment['comment']) not in rv.data, comment['comment']
_comment = comment.copy()
del _comment['parent_comment_id']
rv = self.client.post(
f'/view/{talk.id}/{talk.view_key}/comment/',
data=_comment,
follow_redirects=True
)
assert rv.status == '400 BAD REQUEST'
assert b'First 1236 Last' not in rv.data
assert b'[email protected]' not in rv.data
assert self.format_coment(_comment['comment']) not in rv.data, _comment['comment']
_comment = comment.copy()
_comment['parent_comment_id'] = '99999'
rv = self.client.post(
f'/view/{talk.id}/{talk.view_key}/comment/',
data=_comment,
follow_redirects=True
)
assert rv.status == '400 BAD REQUEST'
assert b'First 1236 Last' not in rv.data
assert b'[email protected]' not in rv.data
assert self.format_coment(_comment['comment']) not in rv.data, _comment['comment']
_comment = comment.copy()
del _comment['name']
rv = self.client.post(
f'/view/{talk.id}/{talk.view_key}/comment/',
data=_comment,
follow_redirects=True
)
assert rv.status == '400 BAD REQUEST'
assert b'First 1236 Last' not in rv.data
assert b'[email protected]' not in rv.data
assert self.format_coment(_comment['comment']) not in rv.data, _comment['comment']
_comment = comment.copy()
del _comment['email']
rv = self.client.post(
f'/view/{talk.id}/{talk.view_key}/comment/',
data=_comment,
follow_redirects=True
)
assert rv.status == '400 BAD REQUEST'
assert b'First 1236 Last' not in rv.data
assert b'[email protected]' not in rv.data
assert self.format_coment(_comment['comment']) not in rv.data, _comment['comment']
_comment = comment.copy()
_comment['email'] = 'invalid.email.address'
rv = self.client.post(
f'/view/{talk.id}/{talk.view_key}/comment/',
data=_comment,
follow_redirects=True
)
assert rv.status == '400 BAD REQUEST'
assert b'First 1236 Last' not in rv.data
assert b'invalid.email.address' not in rv.data
assert self.format_coment(_comment['comment']) not in rv.data, _comment['comment']
_comment = comment.copy()
del _comment['comment']
rv = self.client.post(
f'/view/{talk.id}/{talk.view_key}/comment/',
data=_comment,
follow_redirects=True
)
assert rv.status == '400 BAD REQUEST'
assert b'First 1236 Last' not in rv.data
assert b'[email protected]' not in rv.data
assert self.format_coment(comment['comment']) not in rv.data, comment['comment']
def test_valid_delete(self):
talk = self.get_talk(experiment='LHCb', min_comments=1)
with talky.app.app_context():
comment = talky.schema.Talk.query.get(talk.id).comments[0]
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/',
follow_redirects=True
)
assert rv.status == '200 OK'
assert self.format_coment(comment.comment) in rv.data, comment.comment
self.login('userlhcb', 'user')
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/comment/{comment.id}/delete/',
follow_redirects=True
)
self.logout()
assert rv.status == '200 OK'
# Should redirect to the talk view page
assert self.format_coment(comment.comment) not in rv.data, comment.comment
def test_invalid_delete(self):
talk = self.get_talk(experiment='LHCb', min_comments=1)
with talky.app.app_context():
comment = talky.schema.Talk.query.get(talk.id).comments[0]
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/',
follow_redirects=True
)
assert rv.status == '200 OK'
assert self.format_coment(comment.comment) in rv.data, comment.comment
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/comment/{comment.id}/delete/',
follow_redirects=True
)
assert rv.status == '404 NOT FOUND'
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/',
follow_redirects=True
)
assert rv.status == '200 OK'
assert self.format_coment(comment.comment) in rv.data, comment.comment
def test_invalid_delete_wrong_experiment(self):
talk = self.get_talk(experiment='Belle', min_comments=1)
with talky.app.app_context():
comment = talky.schema.Talk.query.get(talk.id).comments[0]
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/',
follow_redirects=True
)
assert rv.status == '200 OK'
assert self.format_coment(comment.comment) in rv.data, comment.comment
self.login('userlhcb', 'user')
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/comment/{comment.id}/delete/',
follow_redirects=True
)
self.logout()
assert rv.status == '404 NOT FOUND'
rv = self.client.get(
f'/view/{talk.id}/{talk.view_key}/',
follow_redirects=True
)
assert rv.status == '200 OK'
assert self.format_coment(comment.comment) in rv.data, comment.comment
# TODO Add tests for child comments
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -4,585,854,063,573,237,000 | 35.235385 | 102 | 0.56651 | false |
amaurywalbert/twitter | JSON_to_BIN/n3/n3_egos_favorites_JSON_to_BIN_500.py | 1 | 5961 | # -*- coding: latin1 -*-
################################################################################################
# Script para coletar amigos a partir de um conjunto de alters do twitter
#
#
import tweepy, datetime, sys, time, json, os, os.path, shutil, time, struct, random
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
## Status - Versão 1 - Pesquisa na lista de favoritos e extrai informações necessárias para formar o conjunto de egos da rede N2 - Conjunto de autores de likes.
######################################################################################################################################################################
################################################################################################
# Imprime os arquivos binários com os ids dos amigos
################################################################################################
def read_arq_bin(file):
with open(file, 'r') as f:
f.seek(0,2)
tamanho = f.tell()
f.seek(0)
tweets_list = []
while f.tell() < tamanho:
buffer = f.read(favorites_struct.size)
tweet, user = favorites_struct.unpack(buffer)
status = {'tweet':tweet, 'user':user}
tweets_list.append(status)
return tweets_list
######################################################################################################################################################################
######################################################################################################################################################################
#
# Método principal do programa.
# Realiza teste e coleta dos favoritos do user especificado no arquivo.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
global i # numero de usuários com arquivos já coletados / Número de arquivos no diretório
for file in os.listdir(favorites_collected_dir): # Verifica a lista de egos coletados e para cada um, busca a timeline dos alters listados no arquivo do ego.
ego = file.split(".json")
ego = long(ego[0])
if not dictionary.has_key(ego):
i+=1
try:
with open(data_dir+str(ego)+".dat", 'w+b') as f:
with open(favorites_collected_dir+file,'r') as favorites:
for line in favorites:
favorite = json.loads(line)
like = favorite['id']
like = long(like)
user = favorite['user']['id']
user = long(user)
f.write(favorites_struct.pack(like, user)) # Grava os ids dos tweet e o id do autor n
print (str(i)+" - ego convertido com sucesso!")
###
# tweets_list = read_arq_bin(data_dir+str(ego)+".dat") # Função para converter o binário de volta em string em formato json.
# print tweets_list
####
except Exception as e:
print e
print("######################################################################")
print
print("######################################################################")
print("Conversão finalizada!")
print("######################################################################\n")
######################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
################################### CONFIGURAR AS LINHAS A SEGUIR ####################################################
######################################################################################################################
qtde_egos = 500 #10, 50, 100, 500, full
favorites_collected_dir = "/home/amaury/coleta/favorites_collect/"+str(qtde_egos)+"/json/"####### Arquivo contendo a lista dos usuários ego já coletados em formato JSON
data_dir = "/home/amaury/coleta/n3/egos/"+str(qtde_egos)+"/bin/" ############# Diretório para armazenamento dos arquivos
formato = 'll' ####################################################### Long para id do tweet e outro long para autor
favorites_struct = struct.Struct(formato) ###################################### Inicializa o objeto do tipo struct para poder armazenar o formato específico no arquivo binário
######################################################################################################################
######################################################################################################################
######################################################################################################################
#Cria os diretórios para armazenamento dos arquivos
if not os.path.exists(data_dir):
os.makedirs(data_dir)
###### Iniciando dicionário - tabela hash a partir dos arquivos já criados.
print
print("######################################################################")
print ("Criando tabela hash...")
dictionary = {} #################################################### Tabela {chave:valor} para facilitar a consulta dos usuários já coletados
i = 0 #Conta quantos usuários já foram coletados (todos arquivos no diretório)
for file in os.listdir(data_dir):
user_id = file.split(".dat")
user_id = long(user_id[0])
dictionary[user_id] = user_id
i+=1
print ("Tabela hash criada com sucesso...")
print("######################################################################\n")
#Executa o método main
if __name__ == "__main__": main() | gpl-3.0 | 3,029,549,472,148,146,000 | 53.935185 | 176 | 0.373904 | false |
alikzao/tao1 | tao1/libs/blog/routes.py | 1 | 1401 | from libs.blog.blog import *
from core.union import route
route( 'GET' , '/u/{u}', user, 'user' )
route( 'POST', '/add/mess', add_mess, 'add_mess' )
route( 'POST', '/add/fr', add_fr, 'add_fr' )
route( 'POST', '/add/sub', add_sub, 'add_sub' )
route( 'POST', '/main_page_signup', main_page_signup, 'tw_signup' )
route( 'POST', '/main_page_login', main_page_login, 'tw_login' )
route( 'GET' , '/signup/in/{mail}/{code}', signup_in, 'signup_in' )
route( 'GET' , '/add_email', add_email, 'add_email' )
route( 'POST', '/add_email', add_email_post, 'add_email_' )
route( 'GET' , '/list/users', list_users, 'list_users' )
route( 'GET' , '/subscribe', subscribe, 'subscribe' )
route( 'POST', '/subscribe', subscribe_post, 'subscribe_post' )
route( 'POST', '/subscribe/new', subscribe_new, 'subscribe_new' )
route( 'GET' , '/subscribe/in/{mail}/{code}', subscribe_in, 'subscribe_in' )
route( 'GET' , '/subscribe/out/{mail}/{code}', subscribe_out, 'subscribe_out' )
| mit | -7,137,969,375,725,347,000 | 55.04 | 91 | 0.428979 | false |
facebookresearch/ParlAI | parlai/chat_service/tasks/chatbot/worlds.py | 1 | 4333 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
# py parlai/chat_service/tasks/overworld_demo/run.py --debug --verbose
from parlai.core.worlds import World
from parlai.chat_service.services.messenger.worlds import OnboardWorld
from parlai.core.agents import create_agent_from_shared
# ---------- Chatbot demo ---------- #
class MessengerBotChatOnboardWorld(OnboardWorld):
"""
Example messenger onboarding world for Chatbot Model.
"""
@staticmethod
def generate_world(opt, agents):
return MessengerBotChatOnboardWorld(opt=opt, agent=agents[0])
def parley(self):
self.episodeDone = True
class MessengerBotChatTaskWorld(World):
"""
Example one person world that talks to a provided agent (bot).
"""
MAX_AGENTS = 1
MODEL_KEY = 'blender_90M'
def __init__(self, opt, agent, bot):
self.agent = agent
self.episodeDone = False
self.model = bot
self.first_time = True
@staticmethod
def generate_world(opt, agents):
if opt['models'] is None:
raise RuntimeError("Model must be specified")
return MessengerBotChatTaskWorld(
opt,
agents[0],
create_agent_from_shared(
opt['shared_bot_params'][MessengerBotChatTaskWorld.MODEL_KEY]
),
)
@staticmethod
def assign_roles(agents):
agents[0].disp_id = 'ChatbotAgent'
def parley(self):
if self.first_time:
self.agent.observe(
{
'id': 'World',
'text': 'Welcome to the ParlAI Chatbot demo. '
'You are now paired with a bot - feel free to send a message.'
'Type [DONE] to finish the chat, or [RESET] to reset the dialogue history.',
}
)
self.first_time = False
a = self.agent.act()
if a is not None:
if '[DONE]' in a['text']:
self.episodeDone = True
elif '[RESET]' in a['text']:
self.model.reset()
self.agent.observe({"text": "[History Cleared]", "episode_done": False})
else:
print("===act====")
print(a)
print("~~~~~~~~~~~")
self.model.observe(a)
response = self.model.act()
print("===response====")
print(response)
print("~~~~~~~~~~~")
self.agent.observe(response)
def episode_done(self):
return self.episodeDone
def shutdown(self):
self.agent.shutdown()
# ---------- Overworld -------- #
class MessengerOverworld(World):
"""
World to handle moving agents to their proper places.
"""
def __init__(self, opt, agent):
self.agent = agent
self.opt = opt
self.first_time = True
self.episodeDone = False
@staticmethod
def generate_world(opt, agents):
return MessengerOverworld(opt, agents[0])
@staticmethod
def assign_roles(agents):
for a in agents:
a.disp_id = 'Agent'
def episode_done(self):
return self.episodeDone
def parley(self):
if self.first_time:
self.agent.observe(
{
'id': 'Overworld',
'text': 'Welcome to the overworld for the ParlAI messenger '
'chatbot demo. Please type "begin" to start, or "exit" to exit',
'quick_replies': ['begin', 'exit'],
}
)
self.first_time = False
a = self.agent.act()
if a is not None and a['text'].lower() == 'exit':
self.episode_done = True
return 'EXIT'
if a is not None and a['text'].lower() == 'begin':
self.episodeDone = True
return 'default'
elif a is not None:
self.agent.observe(
{
'id': 'Overworld',
'text': 'Invalid option. Please type "begin".',
'quick_replies': ['begin'],
}
)
| mit | 1,519,304,251,816,974,600 | 29.300699 | 96 | 0.527348 | false |
Enerccio/SimplePython | src/math.py | 1 | 1234 | """
SimplePython - embeddable python interpret in java
Copyright (c) Peter Vanusanik, All rights reserved.
math module
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 3.0 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library.
"""
__all__ = ["floor", "max", "min"]
def floor(x):
"""
Returns floor of the number provided
"""
if type(x) == real:
return real(int(x))
elif type(x) == complex:
return complex(floor(x.real), floor(x.imag))
else:
return int(x)
def max(a, b):
"""
Returns maximum of values a b provided
"""
if a > b:
return a
return b
def min(a, b):
"""
Returns minumum of values a b provided
"""
if a < b:
return a
return b | lgpl-3.0 | -8,908,702,434,100,743,000 | 24.729167 | 66 | 0.670989 | false |
xmm/fstore | api/v0/image.py | 1 | 1528 | # -*- coding: utf-8 -*-
'''
Copyright (c) 2015
@author: Marat Khayrullin <[email protected]>
'''
from flask import current_app, request
from flask.views import MethodView
from werkzeug import abort
from werkzeug.exceptions import HTTPException
from api.exceptions import StorageUnavailableError, ResourceExists
from api.resource import Resource, SaveResource, RetrieveResource
from tools.xsendfile import x_accel_redirect
from tools.auth import login_required
class ImagesView(MethodView):
@login_required
def post(self):
try:
upload = request.files['image_file']
resource = Resource(SaveResource(upload))
try:
current_app.storages.saveResource(resource)
return 'OK', 201, {'location': resource.fileName}
except StorageUnavailableError as e:
abort(503)
except ResourceExists:
return 'OK', 200, {'location': resource.fileName}
except Exception as e:
abort(400)
def get(self, filename):
try:
resource = Resource(RetrieveResource(request))
try:
current_app.storages.retrieveResource(resource)
response = x_accel_redirect(resource.location, resource.size)
return response
except StorageUnavailableError as e:
abort(503)
except HTTPException as e:
# print(e)
raise
except Exception as e:
abort(500)
| bsd-3-clause | 8,955,208,477,573,750,000 | 28.384615 | 77 | 0.621728 | false |
marcusmueller/pybombs | pybombs/package_manager.py | 1 | 9047 | #!/usr/bin/env python2
#
# Copyright 2015 Free Software Foundation, Inc.
#
# This file is part of PyBOMBS
#
# PyBOMBS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# PyBOMBS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyBOMBS; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
Package Manager: Manages packages (no shit)
"""
from pybombs import pb_logging
from pybombs.pb_exception import PBException
from pybombs.config_manager import config_manager
from pybombs import recipe
from pybombs import packagers
class PackageManager(object):
"""
Meta-package manager. This will determine, according to our system
and the configuration, who takes care of managing packages and
then dispatches specific package managers. For example, this might
dispatch an apt-get backend on Ubuntu and Debian systems, or a
yum backend on Fedora systems.
"""
def __init__(self,):
# Set up logger:
self.log = pb_logging.logger.getChild("PackageManager")
self.cfg = config_manager
self.prefix_available = self.cfg.get_active_prefix().prefix_dir is not None
# Create a source package manager
if self.prefix_available:
self.src = packagers.Source()
self.prefix = self.cfg.get_active_prefix()
else:
self.log.debug("No prefix specified. Skipping source package manager.")
self.src = None
# Create sorted list of binary package managers
requested_packagers = [x.strip() for x in self.cfg.get('packagers').split(',') if x]
binary_pkgrs = []
for pkgr in requested_packagers:
self.log.debug("Attempting to add binary package manager {}".format(pkgr))
p = packagers.get_by_name(pkgr, packagers.__dict__.values())
if p is None:
self.log.warn("This binary package manager can't be instantiated: {}".format(pkgr))
continue
if p.supported():
self.log.debug("{} is supported!".format(pkgr))
binary_pkgrs.append(p)
self._packagers = []
for satisfy in self.cfg.get('satisfy_order').split(','):
satisfy = satisfy.strip()
if satisfy == 'src':
if self.prefix_available:
self._packagers += [self.src,]
elif satisfy == 'native':
self._packagers += binary_pkgrs
else:
raise PBException("Invalid satisfy_order value: {}".format(satisfy))
self.log.debug("Using packagers: {}".format([x.name for x in self._packagers]))
# Now we can use self.packagers, in order, for our commands.
def check_package_flag(self, pkgname, flag):
"""
See if package 'pkgname' has 'flag' set (return the boolean value
of that flag if yes, or None otherwise).
"""
return bool(
self.cfg.get_package_flags(
pkgname,
recipe.get_recipe(pkgname).category
).get(flag)
)
def get_packagers(self, pkgname):
"""
Return a valid list of packagers for a given package.
This will take care of cases where e.g. a source packager is
required (and then only return that).
"""
# Check if the package flags aren't forcing a source build:
if self.check_package_flag(pkgname, 'forcebuild'):
self.log.debug("Package {pkg} is requesting a source build.".format(pkg=pkgname))
if self.src is not None:
return [self.src,]
else:
return []
return self._packagers
def exists(self, name, return_pkgr_name=False):
"""
Check to see if this package is available on this platform.
Returns True or a version string if yes, False if not.
"""
self.log.debug("Checking if package {} is installable.".format(name))
if self.check_package_flag(name, 'forceinstalled'):
self.log.debug("Package {} is forced to state 'installed'.".format(name))
return ['force-installed'] if return_pkgr_name else True
r = recipe.get_recipe(name)
pkgrs = []
for pkgr in self.get_packagers(name):
pkg_version = pkgr.exists(r)
if pkg_version is None or not pkg_version:
continue
else:
if return_pkgr_name:
pkgrs.append(pkgr.name)
else:
return pkg_version
if return_pkgr_name and len(pkgrs):
return pkgrs
return False
def installed(self, name, return_pkgr_name=False):
"""
Check to see if this recipe is installed (identified by its name).
If not, return False. If yes, return value depends on return_pkgr_name
and is either a list of packager name that installed it, or a version
string (if the version string can't be determined, returns True instead).
"""
self.log.debug("Checking if package {} is installed.".format(name))
if self.check_package_flag(name, 'forceinstalled'):
self.log.debug("Package {} is forced to state 'installed'.".format(name))
# TODO maybe we can figure out a version string
return ['force-installed'] if return_pkgr_name else True
r = recipe.get_recipe(name)
pkgrs = []
for pkgr in self.get_packagers(name):
pkg_version = pkgr.installed(r)
if pkg_version is None or not pkg_version:
continue
else:
if return_pkgr_name:
pkgrs.append(pkgr.name)
else:
return pkg_version
if return_pkgr_name and len(pkgrs):
return pkgrs
return False
def install(self, name, static=False, verify=False):
"""
Install the given package. Returns True if successful, False otherwise.
"""
self.log.debug("install({}, static={})".format(name, static))
if self.check_package_flag(name, 'forceinstalled'):
self.log.debug("Package {} is assumed installed.".format(name))
# TODO maybe we can figure out a version string
return True
pkgrs = self.get_packagers(name)
if len(pkgrs) == 0:
self.log.error("Can't find any packagers to install {0}".format(name))
raise PBException("No packager available for package {0}".format(name))
if static:
self.log.debug('Package will be built statically.')
if not self.prefix_available:
self.log.error('Static builds require source builds.')
raise PBException('Static builds require source builds.')
pkgrs = [self.src,]
return self._std_package_operation(
name,
'install',
pkgrs,
verify=verify,
static=static,
)
def update(self, name, verify=False):
"""
Update the given package. Returns True if successful, False otherwise.
"""
return self._std_package_operation(
name,
'update',
self.get_packagers(name),
verify=verify,
)
def uninstall(self, name):
"""
Uninstall the given package.
Returns True if successful, False otherwise.
"""
return self._std_package_operation(
name,
'uninstall',
self.get_packagers(name),
)
def _std_package_operation(self, name, operation, pkgrs, verify=False, **kwargs):
"""
Standard package operation: Try an operation on all packagers.
"""
rec = recipe.get_recipe(name)
for pkgr in pkgrs:
self.log.debug("Using packager {}".format(pkgr.name))
try:
result = getattr(pkgr, operation)(rec, **kwargs)
if result:
if verify and not pkgr.verify(rec):
self.log.warn("Package reported successful {0}, but verification failed.".format(operation))
continue
return True
except PBException as ex:
self.log.error(
"Something went wrong while trying to {} {} using {}: {}".format(
operation, name, pkgr.name, str(ex).strip()
)
)
return False
| gpl-3.0 | 4,854,882,884,099,081,000 | 39.030973 | 116 | 0.585498 | false |
texastribune/salesforce-stripe | reconcile-email.py | 1 | 1616 | import os
from config import STRIPE_KEYS
from time import sleep
import stripe
from salesforce_bulk import SalesforceBulk
from simple_salesforce import Salesforce
stripe.api_key = STRIPE_KEYS["secret_key"]
# get Stripe emails
customers = stripe.Customer.list(limit=100)
stripe_emails = set(
(x["email"].lower() for x in customers.auto_paging_iter() if x["email"])
)
# then compare to SF
query = "SELECT All_In_One_EMail__c FROM Contact"
SALESFORCE = {
"USERNAME": os.getenv("SALESFORCE_USERNAME"),
"PASSWORD": os.getenv("SALESFORCE_PASSWORD"),
"HOST": os.getenv("SALESFORCE_HOST"),
"TOKEN": os.getenv("SALESFORCE_TOKEN"),
"CLIENT_ID": os.getenv("SALESFORCE_CLIENT_ID"),
"CLIENT_SECRET": os.getenv("SALESFORCE_CLIENT_SECRET"),
}
USER = SALESFORCE["USERNAME"]
PASS = SALESFORCE["PASSWORD"]
TOKEN = SALESFORCE["TOKEN"]
HOST = SALESFORCE["HOST"]
sf = Salesforce(username=USER, password=PASS, security_token=TOKEN)
bulk = SalesforceBulk(sessionId=sf.session_id, host=HOST)
job = bulk.create_query_job("Contact", contentType="CSV")
batch = bulk.query(job, query)
while not bulk.is_batch_done(job, batch):
sleep(3)
bulk.close_job(job)
rows = bulk.get_batch_result_iter(job, batch, parse_csv=True)
bulk_email = list(rows)
email_list = []
emails_sf = [x["All_In_One_EMail__c"] for x in bulk_email]
print("The following email addresses appear in Stripe but not Salesforce: \n")
for field in emails_sf:
for email in field.split(","):
if email != "":
email_list.append(email.strip())
diff = stripe_emails.difference(email_list)
for item in diff:
print(item)
| mit | 635,312,233,939,258,100 | 27.857143 | 78 | 0.704827 | false |
brendan-ward/rasterio | tests/test_rio_convert.py | 1 | 6836 | import sys
import os
import logging
import numpy as np
from click.testing import CliRunner
import rasterio
from rasterio.rio.main import main_group
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
TEST_BBOX = [-11850000, 4804000, -11840000, 4808000]
def bbox(*args):
return ' '.join([str(x) for x in args])
def test_clip_bounds(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group,
['clip', 'tests/data/shade.tif', output, '--bounds', bbox(*TEST_BBOX)])
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.shape == (419, 173)
def test_clip_bounds_geographic(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group,
['clip', 'tests/data/RGB.byte.tif', output, '--geographic', '--bounds',
'-78.95864996545055 23.564991210854686 -76.57492370013823 25.550873767433984'])
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open(output) as out:
assert out.shape == (718, 791)
def test_clip_like(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, [
'clip', 'tests/data/shade.tif', output, '--like',
'tests/data/shade.tif'])
assert result.exit_code == 0
assert os.path.exists(output)
with rasterio.open('tests/data/shade.tif') as template_ds:
with rasterio.open(output) as out:
assert out.shape == template_ds.shape
assert np.allclose(out.bounds, template_ds.bounds)
def test_clip_missing_params(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, ['clip', 'tests/data/shade.tif', output])
assert result.exit_code == 2
assert '--bounds or --like required' in result.output
def test_clip_bounds_disjunct(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group,
['clip', 'tests/data/shade.tif', output, '--bounds', bbox(0, 0, 10, 10)])
assert result.exit_code == 2
assert '--bounds' in result.output
def test_clip_like_disjunct(runner, tmpdir):
output = str(tmpdir.join('test.tif'))
result = runner.invoke(
main_group, [
'clip', 'tests/data/shade.tif', output, '--like',
'tests/data/RGB.byte.tif'])
assert result.exit_code == 2
assert '--like' in result.output
# Tests: format and type conversion, --format and --dtype
def test_format(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '--format', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_format_short(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '-f', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_output_opt(tmpdir):
outputname = str(tmpdir.join('test.jpg'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', '-o', outputname, '-f', 'JPEG'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.driver == 'JPEG'
def test_dtype(tmpdir):
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '--dtype', 'uint16'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.dtypes == tuple(['uint16'] * 3)
def test_dtype_rescaling_uint8_full(tmpdir):
"""Rescale uint8 [0, 255] to uint8 [0, 255]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '--scale-ratio', '1.0'])
assert result.exit_code == 0
src_stats = [
{"max": 255.0, "mean": 44.434478650699106, "min": 1.0},
{"max": 255.0, "mean": 66.02203484105824, "min": 1.0},
{"max": 255.0, "mean": 71.39316199120559, "min": 1.0}]
with rasterio.open(outputname) as src:
for band, expected in zip(src.read(masked=True), src_stats):
assert round(band.min() - expected['min'], 6) == 0.0
assert round(band.max() - expected['max'], 6) == 0.0
assert round(band.mean() - expected['mean'], 6) == 0.0
def test_dtype_rescaling_uint8_half(tmpdir):
"""Rescale uint8 [0, 255] to uint8 [0, 127]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(main_group, [
'convert', 'tests/data/RGB.byte.tif', outputname, '--scale-ratio', '0.5'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() - 0, 6) == 0.0
assert round(band.max() - 127, 6) == 0.0
def test_dtype_rescaling_uint16(tmpdir):
"""Rescale uint8 [0, 255] to uint16 [0, 4095]"""
# NB: 255 * 16 is 4080, we don't actually get to 4095.
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(main_group, [
'convert', 'tests/data/RGB.byte.tif', outputname, '--dtype', 'uint16',
'--scale-ratio', '16'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() - 0, 6) == 0.0
assert round(band.max() - 4080, 6) == 0.0
def test_dtype_rescaling_float64(tmpdir):
"""Rescale uint8 [0, 255] to float64 [-1, 1]"""
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(main_group, [
'convert', 'tests/data/RGB.byte.tif', outputname, '--dtype', 'float64',
'--scale-ratio', str(2.0 / 255), '--scale-offset', '-1.0'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
for band in src.read():
assert round(band.min() + 1.0, 6) == 0.0
assert round(band.max() - 1.0, 6) == 0.0
def test_rgb(tmpdir):
outputname = str(tmpdir.join('test.tif'))
runner = CliRunner()
result = runner.invoke(
main_group,
['convert', 'tests/data/RGB.byte.tif', outputname, '--rgb'])
assert result.exit_code == 0
with rasterio.open(outputname) as src:
assert src.colorinterp(1) == rasterio.enums.ColorInterp.red
| bsd-3-clause | 2,933,001,808,548,263,000 | 32.509804 | 88 | 0.610591 | false |
myvoice-nigeria/myvoice | myvoice/wsgi.py | 1 | 1136 | """
WSGI config for myvoice project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myvoice.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-2-clause | 6,711,555,638,155,862,000 | 39.571429 | 79 | 0.800176 | false |
akaszynski/vtkInterface | examples/02-plot/labels.py | 1 | 2507 | """
Label Points
~~~~~~~~~~~~
Use string arrays in a point set to label points
"""
# sphinx_gallery_thumbnail_number = 3
from pyvista import examples
import pyvista as pv
import numpy as np
# Labels are not currently supported by the VTKjs conversion script
pv.rcParams["use_panel"] = False
###############################################################################
# Label String Array
# ++++++++++++++++++
#
# This example will label the nodes of a mesh with a given array of string
# labels for each of the nodes.
# Make some random points
poly = pv.PolyData(np.random.rand(10, 3))
###############################################################################
# Add string labels to the point data - this associates a label with every
# node:
poly["My Labels"] = ["Label {}".format(i) for i in range(poly.n_points)]
poly
###############################################################################
# Now plot the points with labels:
plotter = pv.Plotter()
plotter.add_point_labels(poly, "My Labels", point_size=20, font_size=36)
plotter.show()
###############################################################################
# Label Node Locations
# ++++++++++++++++++++
#
# This example will label the nodes of a mesh with their coordinate locations
# Load example beam file
grid = pv.UnstructuredGrid(examples.hexbeamfile)
###############################################################################
# Create plotting class and add the unstructured grid
plotter = pv.Plotter()
plotter.add_mesh(grid, show_edges=True, color="tan")
# Add labels to points on the yz plane (where x == 0)
points = grid.points
mask = points[:, 0] == 0
plotter.add_point_labels(
points[mask], points[mask].tolist(), point_size=20, font_size=36
)
plotter.camera_position = [
(-1.5, 1.5, 3.0),
(0.05, 0.6, 1.2),
(0.2, 0.9, -0.25)]
plotter.show()
###############################################################################
# Label Scalar Values
# +++++++++++++++++++
#
# This example will label each point with their scalar values
mesh = examples.load_uniform().slice()
###############################################################################
p = pv.Plotter()
# Add the mesh:
p.add_mesh(mesh, scalars="Spatial Point Data", show_edges=True)
# Add the points with scalar labels:
p.add_point_scalar_labels(mesh, "Spatial Point Data", point_size=20, font_size=36)
# Use a nice camera position:
p.camera_position = [(7, 4, 5), (4.4, 7.0, 7.2), (0.8, 0.5, 0.25)]
p.show()
| mit | 5,852,070,679,874,245,000 | 27.168539 | 82 | 0.524531 | false |
dhondta/tinyscript | tests/test_interact.py | 1 | 1227 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""Interaction module assets' tests.
"""
from tinyscript.interact import set_interact_items
from utils import *
args.interact = True
set_interact_items(globals())
class TestInteraction(TestCase):
def test_interact_setup(self):
g = globals().keys()
self.assertTrue(args.interact)
self.assertIn("interact", g)
self.assertIn("compile_command", g)
self.assertIn("InteractiveConsole", g)
self.assertIn("RemoteInteractiveConsole", g)
def test_local_interaction(self):
temp_stdout(self)
temp_stdin(self, "\n")
self.assertIs(interact(), None)
temp_stdin(self, "\n")
self.assertIs(interact("test"), None)
temp_stdin(self, "\n")
self.assertIs(interact(exitmsg="test"), None)
def test_local_interactive_console(self):
temp_stdout(self)
temp_stdin(self, "\n")
with InteractiveConsole("test") as console:
pass
temp_stdin(self, "\n")
with InteractiveConsole(exitmsg="test") as console:
pass
temp_stdin(self, "\n")
with InteractiveConsole() as console:
console.interact()
| agpl-3.0 | 14,017,961,871,634,440 | 27.534884 | 59 | 0.609617 | false |
chop-dbhi/django-forkit | setup.py | 1 | 3444 | from distutils.core import setup
from distutils.command.install_data import install_data
from distutils.command.install import INSTALL_SCHEMES
import os
import sys
BASE_PACKAGE = 'forkit'
class osx_install_data(install_data):
# On MacOS, the platform-specific lib dir is /System/Library/Framework/Python/.../
# which is wrong. Python 2.5 supplied with MacOS 10.5 has an Apple-specific fix
# for this in distutils.command.install_data#306. It fixes install_lib but not
# install_data, which is why we roll our own install_data class.
def finalize_options(self):
# By the time finalize_options is called, install.install_lib is set to the
# fixed directory, so we set the installdir to install_lib. The
# install_data class uses ('install_data', 'install_dir') instead.
self.set_undefined_options('install', ('install_lib', 'install_dir'))
install_data.finalize_options(self)
if sys.platform == "darwin":
cmdclasses = {'install_data': osx_install_data}
else:
cmdclasses = {'install_data': install_data}
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Tell distutils to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk(BASE_PACKAGE):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'):
del dirnames[i]
elif dirname in ('tests', 'fixtures'):
del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]])
# Small hack for working with bdist_wininst.
# See http://mail.python.org/pipermail/distutils-sig/2004-August/004134.html
if len(sys.argv) > 1 and sys.argv[1] == 'bdist_wininst':
for file_info in data_files:
file_info[0] = '\\PURELIB\\%s' % file_info[0]
version = __import__(BASE_PACKAGE).get_version()
setup(
version = version,
name = 'django-forkit',
author = 'Byron Ruth',
author_email = '[email protected]',
description = 'Utility functions for forking, resetting ' \
'and diffing model objects',
license = 'BSD',
keywords = 'fork deepcopy model abstract diff',
packages = packages,
cmdclass = cmdclasses,
data_files = data_files,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
)
| bsd-3-clause | -1,044,305,317,180,701,700 | 34.142857 | 104 | 0.660569 | false |
mbokulic/bmt_parser | bmt_parser/parse_mets.py | 1 | 8134 | '''
Functions for parsing the toplevel mets file that contains metadata on an
issue.
Use the main() function.
TO DO
- I've seen that <typeOfResource>still image</> can be <genre>Music</genre>
I don't know if this distinction is important and should I record genre
'''
import bs4
import logging
import os
import re
from bmt_parser.MyError import MyError
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
stream_handler = logging.StreamHandler()
logger.addHandler(stream_handler)
file_handler = logging.FileHandler('parse.log')
file_handler.setLevel(logging.WARNING)
logger.addHandler(file_handler)
KNOWN_SUBS = ['Head', 'Subhead', 'Byline', 'Copy', 'TextContent',
'Illustration',
'Music', # not sure what to do with this one
'MinorHead'] # only one example
RELEVANT_SUBS = ['Head', 'Subhead', 'Byline', 'Copy']
VALID_SECTIONS = ['advertisement', 'parent', 'subsection', 'flat', 'image']
def main(filepath):
'''returns the mets (metadata) info on an issue:
- issue date, volume, etc
- list of sections (texts, images) and their metadata
:param filepath: path to the mets file
:returns: a nested dictionary
'''
result = {}
with open(filepath, 'r') as file:
root = bs4.BeautifulSoup(file, 'xml')
filename = os.path.split(filepath)[1]
# getting data
result.update(_get_issue_metadata(root, filename))
result['sections'] = _get_issue_sections(root, filename)
return result
def _get_issue_metadata(root, filename):
'''returns metadata (title, date...) in form of a dictionary
'''
result = {}
dmdsec = _only_one(root, 'dmdSec', filename)
part = _only_one(dmdsec, 'part', filename, {'type': 'issue'})
result['volume'] = part.find('detail', type='volume').number.string
result['number'] = part.find('detail', type='number').number.string
result['date'] = dmdsec.originInfo.find('dateIssued', keyDate='yes').string
return result
def _get_issue_sections(root, filename):
'''returns section (texts, images) data as a list
'''
# dmdSec was already checked
dmdsec = _only_one(root, 'dmdSec', filename)
mods = _only_one(dmdsec, 'mods', filename)
structMap = _only_one(root, 'structMap', filename,
{'LABEL': 'Logical Structure'})
result = []
sections = mods.find_all('relatedItem')
for sec in sections:
type = _get_section_type(sec, filename)
if type in VALID_SECTIONS:
data = _parse_section(sec, type, structMap, filename)
result.append(data)
return result
def _parse_section(section, type, structMap, filename):
'''returns data on a single section as a dict
'''
result = {}
# metadata: title, author name, etc
result['title'] = ' '.join([
part.string for part in section.titleInfo.find_all(True)])
result['authors'] = _get_names(section, type)
result['type_of_resource'] = section.find('typeOfResource').string
result['section_id'] = section['ID']
# text content
result['subsections'] = {}
if type == 'image':
remaining = RELEVANT_SUBS
else:
text_cont = 'SponsoredAd' if type == 'advertisement' else 'TextContent'
alto_locs = structMap.find('div', TYPE=text_cont, DMDID=section['ID'])
if not alto_locs:
raise MyError('section {} in file {} doesnt have a div with text '
'content'.format(section['ID'], filename))
divs = alto_locs.find_all('div', recursive=False)
div_types = set([div['TYPE'] for div in divs])
unknown = div_types - set(KNOWN_SUBS)
if len(unknown) > 0:
msg = ('div of type {} in section {} of file {} not '
'known!'.format(unknown, section['ID'], filename))
# quick fix for their typo
if 'Byline ' in unknown:
for div in divs:
if div['TYPE'] == 'Byline ':
div['TYPE'] = 'Byline'
# if there are unknown divs left, raise error
if (len(unknown) - 1) > 0:
raise MyError(msg)
else:
raise MyError(msg)
divs = [div for div in divs if div['TYPE'] in RELEVANT_SUBS]
for div in divs:
if div['TYPE'] in result:
raise MyError('duplicate alto location for {}!'.
format(div['TYPE']))
result['subsections'][div['TYPE']] = _get_alto_locations(div)
remaining = set(RELEVANT_SUBS) - set(div_types)
for r in remaining:
result['subsections'][r] = None
return result
def _get_names(section, type):
names = section.find_all('name', recursive=False)
# if subsection, probably the author is in the parent section
if not names and type == 'subsection':
names = section.parent.find_all('name', recursive=False)
if names:
names_text = [name.displayForm.string for name in names
if name.role.roleTerm.string == 'cre']
names_text = [name for name in names_text if name is not None]
return '||'.join(names_text)
else:
return None
def _only_one(root, tag_name, filename, optional_attr={}):
'''checks if root contains tag and returns it. Raises errors if no tag or
more than one tag.
'''
tags = root.find_all(tag_name, attrs=optional_attr)
if len(tags) > 1:
raise MyError('more than one {tag_name} in {filename}'.format(
tag_name=tag_name, filename=filename))
elif len(tags) == 0:
raise MyError('no {tag_name} in {filename}'.format(
tag_name=tag_name, filename=filename))
return tags[0]
def _test_section(section):
'''returns True if the given section is relevant
'''
if section.get('type'):
if section['type'] == 'constituent':
return True
# due to input mistakes, some sections do not have type
elif section.get('ID'):
if re.search('c[0-9]{3}', section['ID']):
return True
return False
def _get_section_type(section, filename):
'''returns section type and None if it is an invalid section
'''
if not _test_section(section):
logger.warning('ignoring section: {} {}'
.format(section.name, section.attrs))
return None
resource_type = section.find('typeOfResource').string
genre = section.find('genre').string.lower()
title = section.titleInfo.title.string
if resource_type == 'still image':
return 'image'
elif resource_type == 'text':
# special text section types
if 'advertisement' in genre:
return 'advertisement'
elif 'inhalt' in title.lower():
return 'contents'
# valid sections
elif len(list(section.find_all('relatedItem',
type='constituent'))) > 0:
return 'parent'
elif _test_section(section.parent):
if _test_section(section.parent.parent):
raise MyError('double nesting in section {}, file {}!'
.format(section['ID'], filename))
return 'subsection'
else:
return 'flat'
else:
logger.warning('unknown section {} type in file {}. Resource type: {},'
'genre: {}'
.format(section['ID'], filename, resource_type, genre))
return 'unknown'
def _get_alto_locations(section):
'''returns alto locations as a list. These are used when parsing alto file
'''
areas = section.find_all('area')
if len(areas) == 0:
return None
return [{'file': area['FILEID'], 'loc': area['BEGIN']} for area in areas]
if __name__ == '__main__':
import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('--path', '-p', dest='file_path', required=True)
args = parser.parse_args()
res = main(args.file_path)
print(json.dumps(res))
| mit | -3,810,432,262,548,088,300 | 31.798387 | 79 | 0.591345 | false |
a4fr/book_search_engine | search_engine.py | 1 | 5030 | import json
import urllib.request
from operator import itemgetter #uses in sorting data
from progress.bar import Bar
"""
find best book that you need and return them
@author Ali Najafi ([email protected])
@source http://it-ebooks-api.info/
"""
from oneconf.utils import save_json_file_update
class search_engine:
def __init__(self):
self.tags = []
self.limit_in_pages = 0 # value <= 0 means there is no limit
self.total_result = 0
self.all_pages = 0
self.show_progressbar = False
"""
this feature need for search book in online source
"""
def set_tag(self, tag_name):
self.tags.append(str(tag_name))
"""
@return dict
"""
def request(self, url):
r = urllib.request.urlopen(url).read().decode('utf8')
r = json.loads(r)
return r
def get_books__detail_from_source(self):
finded_books = []
tags = ''
if len(self.tags) > 0:
for tag in self.tags:
tags += " " + tag
tags = tags.strip()
else:
raise('length of "tags" is ZERO. function needs tags to search')
#request pages
START_PAGE = 1
END_PAGE = 1
CURRENT_PAGE = START_PAGE
while CURRENT_PAGE <= END_PAGE:
url = 'http://it-ebooks-api.info/v1/search/'+tags+'/page/'+str(CURRENT_PAGE)
request = self.request(url)
if CURRENT_PAGE == 1:
self.total_result = request["Total"]
self.all_pages = int(request['Total']) // 10 + 1
#prepare END_PAGE
if (self.limit_in_pages > 0) and (self.all_pages > self.limit_in_pages):
END_PAGE = self.limit_in_pages
else:
END_PAGE = self.all_pages
#append new books
finded_books.extend(request["Books"])
CURRENT_PAGE += 1
#extract other detail of books
if self.show_progressbar: progressbar = Bar('Searching ', max=len(finded_books))
for book_index in range(len(finded_books)):
url = "http://it-ebooks-api.info/v1/book/"+str(finded_books[book_index]["ID"])
other_details = self.request(url)
for detail in other_details:
if detail not in {"Error", "Time"}:
if detail in {"Year", "ISBN", "isbn", "Page"}:
#need this for sorting
finded_books[book_index][detail] = int(other_details[detail])
else:
finded_books[book_index][detail] = other_details[detail]
if self.show_progressbar: progressbar.next()
if self.show_progressbar: progressbar.finish()
#save data as json file
name = 'books-%s-[%sfrom%s].json' % (tags.replace(" ", "-"), len(finded_books), self.total_result)
save_json_file_update(name, finded_books)
print('"%s" Saved!' % name)
return finded_books
"""
load json file data instance of online source
@return list
"""
def get_books_detail_from_json_file(self, PATH):
if PATH:
file = open(PATH, 'r')
return json.loads(file.read())
"""
find best books and sort them for you
@sort_with [R]elevant, [Y]ear, [A]uthor, [B]Publisher, [P]age
* it can be lower case or UPPER
* -R -Y -A -P -B "reverse-sort"
@offline_json PATH of json file
@return list
"""
def find(self, sort_with="R", offline_json=""):
sort_mod = {'r': 'Relevant',
'y': 'Year',
'a': 'Author',
'b': 'Publisher',
'p': 'Page'}
#check sort_with
sort_with = sort_with.strip()
sort_with = sort_with.lower()
reverse = False
if len(sort_with) > 2:
raise('"sort_with" Error')
elif len(sort_with) == 2 and sort_with[0] == '-':
reverse = True
sort_with = sort_with[1]
elif len(sort_with) == 1 and sort_with in sort_mod.keys():
pass
else:
raise('"sort_with" Error')
#check offline mod
if not offline_json:
data = self.get_books__detail_from_source()
else:
data = self.get_books_detail_from_json_file(offline_json)
#sorting
if sort_with == 'r':
if reverse == True:
data.reverse()
else:
data = sorted(data, key=itemgetter(sort_mod[sort_with]), reverse=reverse)
return data
"""
save json (dict) data to a file
@input data must be dict()
"""
def save_json_to_file(self, PATH, data):
file = open(str(PATH), 'w')
file.write(json.dumps(data, indent=4, separators=(',', ': ')))
file.close()
| gpl-2.0 | -5,941,776,434,686,377,000 | 32.765101 | 106 | 0.514712 | false |
ProjetPP/PPP-QuestionParsing-PLYFrench | ppp_french_parser/parser.py | 1 | 10869 | import os
import itertools
import threading
import subprocess
from ply import lex, yacc
from nltk.corpus import wordnet
from collections import namedtuple, deque
from ppp_datamodel import Resource, Triple, Missing
from .config import Config
class ParserException(Exception):
pass
FORMS_ETRE = frozenset(filter(bool, '''
suis es est sommes êtes sont étais était
étions êtiez étaient
'''.split(' ')))
FORMS_AVOIR = frozenset(filter(bool, '''
ai as a avons avez ont avais avait
avions aviez avaient
'''.split(' ')))
class CoolLexToken(lex.LexToken):
"""LexToken with a constructor."""
def __init__(self, type, value, lineno, lexpos):
self.type = type
self.value = value
self.lineno = lineno
self.lexpos = lexpos
def is_etre(v):
if v.lower() in FORMS_ETRE:
return True
else:
return False
def is_avoir(v):
if v.lower() in FORMS_AVOIR:
return True
else:
return False
class Nom(str):
pass
class Pronom(str):
pass
class Article(str):
pass
class IntroCompl(str):
pass
class Verbe(str):
pass
class TokenList(tuple):
pass
class MotInterrogatif(str):
pass
class Hole:
pass
tokens = ('TOKENLIST',
'INTRO_COMPL',
'MOT_INTERROGATIF', 'ARTICLE', 'NOM', 'VERBE',
'GROUPE_NOMINAL', 'PRONOM',
)
t_ignore = ' \n'
def t_error(t):
raise ParserException('Illegal string `%r`' % t.value)
def t_PONCTUATION(t):
'''[^ "]*_PUNC '''
return None
def t_MOT_INTERROGATIF(t):
'''[^ ]*_(ADVWH|ADJWH|PROWH|DETWH) '''
t.value = MotInterrogatif(t.value.rsplit('_', 1)[0])
return t
def t_intro_compl_simpl(t):
'''(de|des|du)_P[ ]'''
t.type = 'INTRO_COMPL'
t.value = IntroCompl(t.value.rsplit('_', 1)[0])
return t
def t_intro_compl_apostrophe(t):
'''d['’]'''
t.type = 'INTRO_COMPL'
t.value = IntroCompl('d')
return t
def t_ARTICLE(t):
'''[^ ]*(?<!\bde)_DET '''
if t.value.startswith('l’') or t.value.startswith('l\''):
# Stupid taggger:
# * Quel_ADJWH est_V l’âge_NC de_P Obama_NPP ?_PUNC
# * Quel_ADJWH est_V l’âge_DET d’Obama_NPP ?_PUNC
t.type = 'GROUPE_NOMINAL'
t.value = GroupeNominal(Article('l'), [], Nom(t.value.rsplit('_', 1)[0][2:]))
else:
t.value = Article(t.value.rsplit('_', 1)[0])
return t
def t_PRONOM(t):
'''[^ ]*(?<! des)_P[ ]'''
t.value = Pronom(t.value.rsplit('_', 1)[0])
return t
def t_GROUPE_NOMINAL(t): # Stupid tagger
'''[^ ]*['’][^ ]*_(VINF|ADJ|NC) '''
v = t.value.rsplit('_', 1)[0]
(det, noun) = v.replace('’', '\'').split('\'', 1)
t.value = GroupeNominal(Article(det), [], Nom(noun))
return t
def t_NOM_complement(t):
'''d[’'](?P<content>[^ ]*)_(N|NC|NPP)[ ]'''
t.type = 'TOKENLIST'
t.value = TokenList([
LexToken('INTRO_COMPL', IntroCompl('d'), t.lineno, t.lexpos),
LexToken('NOM', Nom(lexer.lexmatch.group('content')), t.lineno, t.lexpos),
])
return t
def t_NOM(t):
'''[^ ]*_(N|NC|NPP)[ ]'''
assert not t.value.startswith('d’') and not t.value.startswith('d\'')
t.value = Nom(t.value.rsplit('_', 1)[0])
return t
def t_quotes(t):
'''"_PUNC (?P<content>[^"]*) "_PUNC'''
t.type = 'NOM'
c = lexer.lexmatch.group('content')
t.value = Nom(' '.join(x.rsplit('_', 1)[0] for x in c.split(' ')).strip())
return t
def t_VERBE(t):
'''[^ -]*_(V|VPP)[ ]'''
t.value = Verbe(t.value.rsplit('_', 1)[0])
return t
def t_verbe_sujet(t):
'''[^ ]*-[^ ]*_VPP '''
t.type = 'TOKENLIST'
t.value = t.value.rsplit('_', 1)[0]
(verb, noun) = t.value.split('-', 1)
t.value = TokenList([
CoolLexToken('VERBE', Verbe(verb), t.lineno, t.lexpos),
CoolLexToken('PRONOM', Nom(noun), t.lineno, t.lexpos),
])
return t
class DecomposingLexer:
def __init__(self):
self._backend = lex.lex()
self._buffer = deque()
def input(self, s):
self._backend.input(s)
def _token(self):
if self._buffer:
return self._buffer.popleft()
else:
token = self._backend.token()
if token and isinstance(token.value, TokenList):
self._buffer.extend(token.value[1:])
return token.value[0]
else:
return token
def token(self):
t = self._token()
assert not isinstance(t, TokenList), t
return t
@property
def lexmatch(self):
return self._backend.lexmatch
lexer = DecomposingLexer()
precedence = (
('right', 'INTRO_COMPL'),
)
class GroupeNominal(namedtuple('_GN', 'article qualificateurs nom')):
pass
def det_to_resource(det):
det = det.lower()
if det in ('mon', 'ma', 'mes', 'me', 'je', 'moi'):
return Resource('moi')
elif det in ('ton', 'ta', 'tes', 'te', 'tu', 'toi'):
return Resource('toi')
elif det in ('son', 'sa', 'ses', 'lui', 'elle', 'il', 'iel'):
return Resource('ellui')
else:
return None
def gn_to_subject(gn):
if gn.article:
return det_to_resource(gn.article)
else:
return None
def gn_to_triple(gn):
if gn.qualificateurs:
# TODO
return Triple(
gn_to_triple(gn.qualificateurs[0]),
Resource(gn.nom),
Missing())
elif gn_to_subject(gn):
return Triple(
gn_to_subject(gn),
Resource(gn.nom),
Missing())
else:
return Resource(gn.nom)
def noun_to_predicate(noun):
l = wordnet.synsets(noun, pos='n', lang='fra')
fr_nouns = itertools.chain.from_iterable(
x.lemma_names('fra') for x in l)
fr_nouns = list(fr_nouns)
if fr_nouns:
return Resource(fr_nouns[0]) # TODO multiple
else:
return Resource(noun)
def verb_to_predicate(verb):
l = wordnet.synsets(verb, lang='fra')
# XXX maybe add pos='v'? (note: wouldn't work for others than infinitive)
lemmas = itertools.chain.from_iterable(
x.lemmas() for x in l if x.pos() == 'v' or True)
drf = itertools.chain.from_iterable(
x.derivationally_related_forms() for x in lemmas)
nouns = (
x for x in drf
if x.synset().pos() == 'n')
fr_nouns = itertools.chain.from_iterable(
x.synset().lemma_names('fra') for x in nouns)
fr_nouns = list(fr_nouns)
if fr_nouns:
return Resource(fr_nouns[0]) # TODO multiple
else:
return Resource(verb)
def p_verbe_simple(t):
'''verbe : VERBE'''
t[0] = t[1]
def p_verbe_compose(t):
'''verbe : VERBE VERBE'''
if is_etre(t[1]) or is_avoir(t[1]):
t[0] = Verbe(t[2])
else:
assert False
def p_groupe_nominal_nom(t):
'''groupe_nominal : NOM'''
t[0] = GroupeNominal(None, [], t[1])
def p_groupe_nominal_gn(t):
'''groupe_nominal : GROUPE_NOMINAL'''
t[0] = t[1]
def p_groupe_nominal_simple(t):
'''groupe_nominal_simple : ARTICLE NOM'''
t[0] = GroupeNominal(t[1], [], t[2])
def p_groupe_nominal_base(t):
'''groupe_nominal : groupe_nominal_simple'''
t[0] = t[1]
def p_groupe_nominal_det_nom_compl(t):
'''groupe_nominal : groupe_nominal INTRO_COMPL groupe_nominal'''
if t[1].nom.lower() in ('date', 'lieu') and t[3].qualificateurs:
# Compress stuff like « date de naissance »
t[0] = GroupeNominal(t[1].article, t[3].qualificateurs,
'%s de %s' % (t[1].nom, t[3].nom))
else:
t[0] = GroupeNominal(t[1].article, [t[3]], t[1].nom)
def p_question_verb_first(t):
'''question : MOT_INTERROGATIF verbe groupe_nominal'''
word = t[1].lower()
if word in ('quel', 'quelle', 'qui'):
if is_etre(t[2]):
t[0] = gn_to_triple(t[3])
else:
t[0] = Triple(
gn_to_triple(t[3]),
verb_to_predicate(t[2]),
Missing())
elif word in ('où',):
if is_etre(t[2]):
t[0] = Triple(
gn_to_triple(t[3]),
Resource('localisation'),
Missing())
else:
assert False, t[2]
else:
assert False, word
def p_question_noun_first(t):
'''question : MOT_INTERROGATIF NOM VERBE PRONOM'''
word = t[1].lower()
if word in ('quel', 'quelle', 'qui'):
if is_avoir(t[3]) or is_etre(t[3]):
t[0] = Triple(
det_to_resource(t[4]),
noun_to_predicate(t[2]),
Missing())
else:
assert False, t[3]
else:
assert False, word
def p_error(t):
if t is None:
raise ParserException('Unknown PLY error.')
else:
raise ParserException("Syntax error at '%s' (%s)" %
(t.value, t.type))
parser = yacc.yacc(start='question', write_tables=0)
interpreters = [
'/usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java',
'/usr/lib/jvm/java-8-oracle/bin/java',
'/usr/local/bin/java',
'/usr/bin/java',
]
tagger_options = [
'-mx300m',
'edu.stanford.nlp.tagger.maxent.MaxentTagger',
]
class Tagger:
"""Runs an instance of a POS tagger and provides it through the 'tag'
method.
Thread-safe."""
def __init__(self):
self.lock = threading.Lock()
self.process = None
def select_interpreter(self):
for interpreter in interpreters:
if os.path.isfile(interpreter):
return [interpreter]
else:
['/usr/bin/env', 'java']
def start(self):
interpreter = self.select_interpreter()
print('Using interpreter: %s' % interpreter)
class_path = ['-classpath', Config().class_path]
model = ['-model', Config().model]
self.process = subprocess.Popen(
interpreter + class_path + tagger_options + model,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=None,
universal_newlines=True)
def tag(self, s):
with self.lock:
if not self.process:
self.start()
try:
self.process.stdin.write('')
self.process.stdin.flush()
except IOError:
self.start()
self.process.stdin.write(s + '\n')
self.process.stdin.flush()
return self.process.stdout.readline()
tagger = Tagger()
def parse(s):
s = tagger.tag(s) + ' '
"""
# Useful for debugging the lexer
lexer.input(s)
while True:
tok = lexer.token()
if not tok:
break
else:
print(tok)"""
return parser.parse(s, lexer=lexer)
| mit | -5,150,446,832,958,763,000 | 27.671958 | 85 | 0.540413 | false |
keenondrums/sovrin-node | sovrin_client/test/agent/test_anoncreds_claim_request.py | 1 | 2042 | from sovrin_client.test import waits
from stp_core.loop.eventually import eventually
from anoncreds.protocol.types import SchemaKey, ID
from sovrin_client.test.agent.messages import get_claim_request_libsovrin_msg
def test_claim_request_from_libsovrin_works(
aliceAgent,
aliceAcceptedFaber,
aliceAcceptedAcme,
acmeAgent,
emptyLooper,
faberAgent):
faberLink = aliceAgent.wallet.getConnection('Faber College')
name, version, origin = faberLink.availableClaims[0]
schemaKey = SchemaKey(name, version, origin)
timeout = waits.expectedClaimsReceived()
schema = faberAgent.issuer.wallet._schemasByKey[schemaKey]
async def create_claim_init_data_and_send_msg():
claimReq = await aliceAgent.prover.createClaimRequest(
schemaId=ID(schemaKey),
proverId='b1134a647eb818069c089e7694f63e6d',
reqNonRevoc=False)
assert claimReq
msg = get_claim_request_libsovrin_msg(claimReq, schema.seqId)
aliceAgent.signAndSendToLink(msg=msg, linkName=faberLink.name)
emptyLooper.run(eventually(
create_claim_init_data_and_send_msg, timeout=timeout))
# 2. check that claim is received from Faber
async def chkClaims():
claim = await aliceAgent.prover.wallet.getClaimSignature(ID(schemaKey))
assert claim.primaryClaim
emptyLooper.run(eventually(chkClaims, timeout=timeout))
# 3. send proof to Acme
acme_link, acme_proof_req = aliceAgent.wallet.getMatchingConnectionsWithProofReq(
"Job-Application", "Acme Corp")[0]
aliceAgent.sendProof(acme_link, acme_proof_req)
# 4. check that proof is verified by Acme
def chkProof():
internalId = acmeAgent.get_internal_id_by_nonce(
acme_link.request_nonce)
link = acmeAgent.wallet.getConnectionBy(internalId=internalId)
assert "Job-Application" in link.verifiedClaimProofs
timeout = waits.expectedClaimsReceived()
emptyLooper.run(eventually(chkProof, timeout=timeout))
| apache-2.0 | -1,131,301,748,622,422,400 | 35.464286 | 85 | 0.712537 | false |
safnuk/todone | todone/parser/tests/test_format.py | 1 | 3634 | from datetime import date
from dateutil.relativedelta import relativedelta
from unittest import TestCase
from todone.parser.format import (
ApplyFunctionFormat,
DateFormat,
PassthroughFormat,
)
class TestPassthroughFormat(TestCase):
def test_values_are_left_untouched(self):
formatter = PassthroughFormat()
value = ['a', 'b', 'C']
output = formatter.format(value)
self.assertEqual(value, output)
def test_empty_list_returns_empty_list(self):
formatter = PassthroughFormat()
output = formatter.format([])
self.assertEqual(output, [])
class TestApplyFunctionFormat(TestCase):
def test_format_function_is_applied_to_value(self):
class MockFormatFunction():
def __init__(self):
self.call_list = []
def mock_format(self, value):
self.call_list.append(value)
return value
mock_ff = MockFormatFunction()
formatter = ApplyFunctionFormat(format_function=mock_ff.mock_format)
value = ['arg1', 'arg2']
formatter.format(value)
self.assertEqual(mock_ff.call_list, [value, ])
class TestDateFormat(TestCase):
def test_no_date_offset_returns_max_date(self):
max_date = date(9999, 12, 31)
formatter = DateFormat()
match = MockDateMatch()
output = formatter.format([match, ])
self.assertEqual(output, max_date)
def test_day_offset_shifts_date_by_correct_amount(self):
offset = date.today()
offset += relativedelta(days=5)
formatter = DateFormat()
match = MockDateMatch(5, 'd')
output = formatter.format([match, ])
self.assertEqual(output, offset)
def test_week_offset_shifts_date_by_correct_amount(self):
offset = date.today()
offset += relativedelta(weeks=5)
formatter = DateFormat()
match = MockDateMatch(5, 'w')
output = formatter.format([match, ])
self.assertEqual(output, offset)
def test_month_offset_shifts_date_by_correct_amount(self):
offset = date.today()
offset += relativedelta(months=5)
formatter = DateFormat()
match = MockDateMatch(5, 'm')
output = formatter.format([match, ])
self.assertEqual(output, offset)
def test_year_offset_shifts_date_by_correct_amount(self):
offset = date.today()
offset += relativedelta(years=5)
formatter = DateFormat()
match = MockDateMatch(5, 'y')
output = formatter.format([match, ])
self.assertEqual(output, offset)
class MockDateMatch():
def __init__(self, offset=None, interval=None):
self.offset = offset
self.interval = interval
def groups(self):
if self.offset and self.interval:
return (
'due', '+',
'{}'.format(self.offset),
'{}'.format(self.interval)
)
else:
return ('due', )
def group(self, index):
if index == 0:
if self.offset and self.interval:
return 'due+{}{}'.format(self.offset, self.interval)
else:
return 'due'
if index == 1 or index == 'name':
return 'due'
if not (self.offset and self.interval):
raise IndexError
if index == 2 or index == 'sign':
return '+'
if index == 3 or index == 'offset':
return '{}'.format(self.offset)
if index == 4 or index == 'interval':
return '{}'.format(self.interval)
raise IndexError
| apache-2.0 | -5,121,249,632,408,522,000 | 30.877193 | 76 | 0.584755 | false |
AidanHolmes/gpstracker | trackergpio.py | 1 | 3793 | # Copyright 2017 Aidan Holmes
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import RPi.GPIO as io
import time
from threading import Lock
class GPIOButton(object):
'Button object to represent a GPIO input'
UP = 1
DN = 0
btnbouncems = 200
def __init__(self, pin, mode = 'BCM'):
self.__lock = Lock()
self.mode = mode
# making this private as changing the pin will not work as
# expected since the pin is only setup once in __init__
self.__pin = pin
# Callback functions held for rise and fall. Although these
# can be changed when running I'm unsure if exceptions could
# be raised if set to None mid check of value. A Lock may be required
self.rise_fn = None
self.fall_fn = None
self.state = GPIOButton.UP # Track the state of the button
self.__heldtime = 0 # Time button has been last held
io.setwarnings(False)
if mode == 'BCM':
io.setmode(io.BCM)
else:
io.setmode(io.BOARD)
def __del__(self):
self.stop()
def callback_fall(self, channel):
self.__lock.acquire()
self.state = GPIOButton.DN
self.__heldtime = time.time()
self.__lock.release()
if self.fall_fn is not None: self.fall_fn(self.__pin)
def start(self):
# Do some custom setup before starting the detection
# Configure the pin
io.setup(self.__pin, io.IN, pull_up_down=io.PUD_UP)
# Detect button falls. Trying to do BOTH causes issues when also used with
# button bounce prevention. The rising edges can get missed when buttons
# are quickly pressed.
io.add_event_detect(self.__pin, io.FALLING, callback=self.callback_fall, bouncetime=GPIOButton.btnbouncems)
def stop(self):
io.remove_event_detect(self.__pin)
io.cleanup(self.__pin)
def tick(self,t):
self.__lock.acquire()
if io.input(self.__pin) == io.HIGH and self.state == GPIOButton.DN:
# Reset the heldtime button. This also indicates
# that the last state was DN so trigger the callback
self.__heldtime = 0
self.state = GPIOButton.UP
if self.rise_fn is not None: self.rise_fn(self.__pin)
self.__lock.release()
@property
def heldtime(self):
self.__lock.acquire()
if self.__heldtime == 0: val = 0
else: val = time.time() - self.__heldtime
self.__lock.release()
return val
class IndicatorButton(GPIOButton):
'Button using 2 GPIO channels to read button state and indicate state'
def __init__(self, pin, ind_pin, mode = 'BCM'):
GPIOButton.__init__(self, pin, mode)
self.__ind_pin = ind_pin
self.__indicator = True
def start(self):
GPIOButton.start(self)
io.setup(self.__ind_pin, io.OUT)
def stop(self):
GPIOButton.stop(self)
io.cleanup(self.__ind_pin)
@property
def indicator(self):
return self.__indicator
@indicator.setter
def indicator(self, ind):
self.__indicator = ind
if ind:
io.output(self.__ind_pin, io.HIGH)
else:
io.output(self.__ind_pin, io.LOW)
| apache-2.0 | -7,886,770,358,443,798,000 | 31.418803 | 115 | 0.613762 | false |
szupie/emoji-to-english | localisationExtractor.py | 1 | 1505 | # Localisation files can be found at:
# https://github.com/unicode-org/cldr/tree/master/common/annotations
import argparse
import xml.etree.ElementTree as ET
import os, json
def getFile(path):
dir = os.path.dirname(__file__)
return os.path.join(dir, path)
parser = argparse.ArgumentParser()
parser.add_argument("src", help="directory containing CLDR annotations xml files")
args = parser.parse_args()
srcDir = getFile(args.src)
langs = []
for filename in os.listdir(srcDir):
locale = os.path.splitext(filename)[0]
langs.append(locale)
tree = ET.parse(os.path.join(srcDir, filename))
annotations = tree.getroot().find('annotations')
dictionary = {}
for annotation in annotations.iter('annotation'):
character = annotation.get('cp')
typeAttr = annotation.get('type')
# Use keywords if no other annotations available
if character not in dictionary:
dictionary[character] = annotation.text
# Use short names when available
if typeAttr == 'tts':
dictionary[character] = annotation.text
filePath = getFile('./addon/_locales/{}/messages.json'.format(locale))
os.makedirs(os.path.dirname(filePath), exist_ok=True)
formattedDictionary = {
character: {
'message': dictionary[character]
} for character in dictionary
}
with open(filePath, 'w') as f:
jsonString = json.dumps(formattedDictionary, ensure_ascii=False, sort_keys=True)
f.write(jsonString)
print("Written to", filePath)
print('{} annotation files parsed: {}'.format(len(langs), ', '.join(langs))) | unlicense | -2,759,959,282,878,903,300 | 26.381818 | 82 | 0.72691 | false |
JNU-Include/CNN | Test/lab-12-2-char-seq-rnn2.py | 1 | 1109 | # Lab 12 Character Sequence RNN
from lib.rnn_core2 import RNNCore2
class XXX (RNNCore2):
def init_network(self):
self.set_placeholder(self.sequence_length) #15
hypothesis = self.rnn_lstm_cell(self.X, self.num_classes, self.hidden_size, self.batch_size)
self.set_hypothesis(hypothesis)
self.set_cost_function(self.batch_size, self.sequence_length)
self.set_optimizer(0.1)
gildong = XXX()
ms = " If you want you"
xd, yd = gildong.get_data(ms)
print(xd)
print(yd)
gildong.learn(xd, yd, 400, 20) #3000
gildong.predict(xd)
gildong.show_error()
'''
0 loss: 2.29895 Prediction: nnuffuunnuuuyuy
1 loss: 2.29675 Prediction: nnuffuunnuuuyuy
2 loss: 2.29459 Prediction: nnuffuunnuuuyuy
3 loss: 2.29247 Prediction: nnuffuunnuuuyuy
...
1413 loss: 1.3745 Prediction: if you want you
1414 loss: 1.3743 Prediction: if you want you
1415 loss: 1.3741 Prediction: if you want you
1416 loss: 1.3739 Prediction: if you want you
1417 loss: 1.3737 Prediction: if you want you
1418 loss: 1.37351 Prediction: if you want you
1419 loss: 1.37331 Prediction: if you want you
'''
| mit | -6,704,559,214,281,357,000 | 23.644444 | 100 | 0.719567 | false |
kdmurray91/khmer | tests/test_hashbits.py | 1 | 19976 | #
# This file is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see LICENSE.
# Contact: [email protected]
#
# pylint: disable=missing-docstring,protected-access
import khmer
from khmer import ReadParser
from screed.fasta import fasta_iter
import screed
import khmer_tst_utils as utils
from nose.plugins.attrib import attr
def teardown():
utils.cleanup()
def test__get_set_tag_density():
ht = khmer.new_hashbits(32, 1, 1)
orig = ht._get_tag_density()
assert orig != 2
ht._set_tag_density(2)
assert ht._get_tag_density() == 2
def test_n_occupied_1():
filename = utils.get_test_data('random-20-a.fa')
K = 20 # size of kmer
HT_SIZE = 100000 # size of hashtable
N_HT = 1 # number of hashtables
# test modified c++ n_occupied code
ht1 = khmer.new_hashbits(K, HT_SIZE, N_HT)
for n, record in enumerate(fasta_iter(open(filename))):
ht1.consume(record['sequence'])
# this number calculated independently
assert ht1.n_occupied() == 3877
def test_bloom_python_1():
# test python code to count unique kmers using bloom filter
filename = utils.get_test_data('random-20-a.fa')
K = 20 # size of kmer
HT_SIZE = 100000 # size of hashtable
N_HT = 3 # number of hashtables
ht2 = khmer.new_hashbits(K, HT_SIZE, N_HT)
n_unique = 0
for n, record in enumerate(fasta_iter(open(filename))):
sequence = record['sequence']
seq_len = len(sequence)
for n in range(0, seq_len + 1 - K):
kmer = sequence[n:n + K]
if (not ht2.get(kmer)):
n_unique += 1
ht2.count(kmer)
assert n_unique == 3960
assert ht2.n_occupied() == 3882
assert ht2.n_unique_kmers() == 3960 # this number equals to n_unique
def test_bloom_c_1():
# test c++ code to count unique kmers using bloom filter
filename = utils.get_test_data('random-20-a.fa')
K = 20 # size of kmer
HT_SIZE = 100000 # size of hashtable
N_HT = 3 # number of hashtables
ht3 = khmer.new_hashbits(K, HT_SIZE, N_HT)
for n, record in enumerate(fasta_iter(open(filename))):
ht3.consume(record['sequence'])
assert ht3.n_occupied() == 3882
assert ht3.n_unique_kmers() == 3960
def test_n_occupied_2(): # simple one
K = 4
HT_SIZE = 10 # use 11
N_HT = 1
ht1 = khmer.new_hashbits(K, HT_SIZE, N_HT)
ht1.count('AAAA') # 00 00 00 00 = 0
assert ht1.n_occupied() == 1
ht1.count('ACTG') # 00 10 01 11 =
assert ht1.n_occupied() == 2
ht1.count('AACG') # 00 00 10 11 = 11 # collision 1
assert ht1.n_occupied() == 2
ht1.count('AGAC') # 00 11 00 10 # collision 2
assert ht1.n_occupied() == 2
def test_bloom_c_2(): # simple one
K = 4
HT_SIZE = 10 # use 11
N_HT1 = 1 # hashtable size = 11
N_HT2 = 2 # hashtable size = 11,13
# use only 1 hashtable, no bloom filter
ht1 = khmer.new_hashbits(K, HT_SIZE, N_HT1)
ht1.count('AAAA') # 00 00 00 00 = 0
ht1.count('ACTG') # 00 10 01 11 =
assert ht1.n_unique_kmers() == 2
ht1.count('AACG') # 00 00 10 11 = 11 # collision with 1st kmer
assert ht1.n_unique_kmers() == 2
ht1.count('AGAC') # 00 11 00 10 # collision with 2nd kmer
assert ht1.n_unique_kmers() == 2
# use two hashtables with 11,13
ht2 = khmer.new_hashbits(K, HT_SIZE, N_HT2)
ht2.count('AAAA') # 00 00 00 00 = 0
ht2.count('ACTG') # 00 10 01 11 = 2*16 +4 +3 = 39
assert ht2.n_unique_kmers() == 2
ht2.count('AACG') # 00 00 10 11 = 11 # collision with only 1st kmer
assert ht2.n_unique_kmers() == 3
ht2.count('AGAC') # 00 11 00 10 3*16 +2 = 50
# collision with both 2nd and 3rd kmers
assert ht2.n_unique_kmers() == 3
def test_filter_if_present():
ht = khmer.new_hashbits(32, 2, 2)
maskfile = utils.get_test_data('filter-test-A.fa')
inputfile = utils.get_test_data('filter-test-B.fa')
outfile = utils.get_temp_filename('filter')
ht.consume_fasta(maskfile)
ht.filter_if_present(inputfile, outfile)
records = list(fasta_iter(open(outfile)))
assert len(records) == 1
assert records[0]['name'] == '3'
def test_combine_pe():
inpfile = utils.get_test_data('combine_parts_1.fa')
ht = khmer.new_hashbits(32, 1, 1)
ht.consume_partitioned_fasta(inpfile)
assert ht.count_partitions() == (2, 0)
s1 = "CATGCAGAAGTTCCGCAACCATACCGTTCAGT"
pid1 = ht.get_partition_id(s1)
s2 = "CAAATGTACATGCACTTAAAATCATCCAGCCG"
pid2 = ht.get_partition_id(s2)
assert pid1 == 2
assert pid2 == 80293
ht.join_partitions(pid1, pid2)
pid1 = ht.get_partition_id(s1)
pid2 = ht.get_partition_id(s2)
assert pid1 == pid2
assert ht.count_partitions() == (1, 0)
def test_load_partitioned():
inpfile = utils.get_test_data('combine_parts_1.fa')
ht = khmer.new_hashbits(32, 1, 1)
ht.consume_partitioned_fasta(inpfile)
assert ht.count_partitions() == (2, 0)
s1 = "CATGCAGAAGTTCCGCAACCATACCGTTCAGT"
assert ht.get(s1)
s2 = "CAAATGTACATGCACTTAAAATCATCCAGCCG"
assert ht.get(s2)
s3 = "CATGCAGAAGTTCCGCAACCATACCGTTCAGTTCCTGGTGGCTA"[-32:]
assert ht.get(s3)
def test_count_within_radius_simple():
inpfile = utils.get_test_data('all-A.fa')
ht = khmer.new_hashbits(4, 2, 2)
print ht.consume_fasta(inpfile)
n = ht.count_kmers_within_radius('AAAA', 1)
assert n == 1
n = ht.count_kmers_within_radius('AAAA', 10)
assert n == 1
def test_count_within_radius_big():
inpfile = utils.get_test_data('random-20-a.fa')
ht = khmer.new_hashbits(20, 1e5, 4)
ht.consume_fasta(inpfile)
n = ht.count_kmers_within_radius('CGCAGGCTGGATTCTAGAGG', int(1e6))
assert n == 3960
ht = khmer.new_hashbits(21, 1e5, 4)
ht.consume_fasta(inpfile)
n = ht.count_kmers_within_radius('CGCAGGCTGGATTCTAGAGGC', int(1e6))
assert n == 39
def test_count_kmer_degree():
inpfile = utils.get_test_data('all-A.fa')
ht = khmer.new_hashbits(4, 2, 2)
ht.consume_fasta(inpfile)
assert ht.kmer_degree('AAAA') == 2
assert ht.kmer_degree('AAAT') == 1
assert ht.kmer_degree('AATA') == 0
assert ht.kmer_degree('TAAA') == 1
def test_save_load_tagset():
ht = khmer.new_hashbits(32, 1, 1)
outfile = utils.get_temp_filename('tagset')
ht.add_tag('A' * 32)
ht.save_tagset(outfile)
ht.add_tag('G' * 32)
ht.load_tagset(outfile) # implicitly => clear_tags=True
ht.save_tagset(outfile)
# if tags have been cleared, then the new tagfile will be larger (34 bytes)
# else smaller (26 bytes).
fp = open(outfile, 'rb')
data = fp.read()
fp.close()
assert len(data) == 26, len(data)
def test_save_load_tagset_noclear():
ht = khmer.new_hashbits(32, 1, 1)
outfile = utils.get_temp_filename('tagset')
ht.add_tag('A' * 32)
ht.save_tagset(outfile)
ht.add_tag('G' * 32)
ht.load_tagset(outfile, False) # set clear_tags => False; zero tags
ht.save_tagset(outfile)
# if tags have been cleared, then the new tagfile will be large (34 bytes);
# else small (26 bytes).
fp = open(outfile, 'rb')
data = fp.read()
fp.close()
assert len(data) == 34, len(data)
def test_stop_traverse():
filename = utils.get_test_data('random-20-a.fa')
K = 20 # size of kmer
HT_SIZE = 1e4 # size of hashtable
N_HT = 3 # number of hashtables
ht = khmer.new_hashbits(K, HT_SIZE, N_HT)
# without tagging/joining across consume, this breaks into two partition;
# with, it is one partition.
ht.add_stop_tag('TTGCATACGTTGAGCCAGCG')
ht.consume_fasta_and_tag(filename) # DO NOT join reads across stoptags
subset = ht.do_subset_partition(0, 0, True)
ht.merge_subset(subset)
n, _ = ht.count_partitions()
assert n == 2, n
def test_tag_across_stoptraverse():
filename = utils.get_test_data('random-20-a.fa')
K = 20 # size of kmer
HT_SIZE = 1e4 # size of hashtable
N_HT = 3 # number of hashtables
ht = khmer.new_hashbits(K, HT_SIZE, N_HT)
# without tagging/joining across consume, this breaks into two partition;
# with, it is one partition.
ht.add_stop_tag('CCGAATATATAACAGCGACG')
ht.consume_fasta_and_tag_with_stoptags(filename) # DO join reads across
subset = ht.do_subset_partition(0, 0)
n, _ = ht.count_partitions()
assert n == 99 # reads only connected by traversal...
n, _ = ht.subset_count_partitions(subset)
assert n == 2 # but need main to cross stoptags.
ht.merge_subset(subset)
n, _ = ht.count_partitions() # ta-da!
assert n == 1, n
def test_notag_across_stoptraverse():
filename = utils.get_test_data('random-20-a.fa')
K = 20 # size of kmer
HT_SIZE = 1e4 # size of hashtable
N_HT = 3 # number of hashtables
ht = khmer.new_hashbits(K, HT_SIZE, N_HT)
# connecting k-mer at the beginning/end of a read: breaks up into two.
ht.add_stop_tag('TTGCATACGTTGAGCCAGCG')
ht.consume_fasta_and_tag_with_stoptags(filename)
subset = ht.do_subset_partition(0, 0)
ht.merge_subset(subset)
n, _ = ht.count_partitions()
assert n == 2, n
def test_find_stoptags():
ht = khmer.new_hashbits(5, 1, 1)
ht.add_stop_tag("AAAAA")
assert ht.identify_stoptags_by_position("AAAAA") == [0]
assert ht.identify_stoptags_by_position("AAAAAA") == [0, 1]
assert ht.identify_stoptags_by_position("TTTTT") == [0]
assert ht.identify_stoptags_by_position("TTTTTT") == [0, 1]
def test_find_stoptags2():
ht = khmer.new_hashbits(4, 1, 1)
ht.add_stop_tag("ATGC")
x = ht.identify_stoptags_by_position("ATGCATGCGCAT")
assert x == [0, 2, 4, 8], x
def test_get_ksize():
kh = khmer.new_hashbits(22, 1, 1)
assert kh.ksize() == 22
def test_get_hashsizes():
kh = khmer.new_hashbits(22, 100, 4)
assert kh.hashsizes() == [101, 103, 107, 109], kh.hashsizes()
def test_extract_unique_paths_0():
kh = khmer.new_hashbits(10, 4, 4)
x = kh.extract_unique_paths('ATGGAGAGACACAGATAGACAGGAGTGGCGATG', 10, 1)
assert x == ['ATGGAGAGACACAGATAGACAGGAGTGGCGATG']
kh.consume('ATGGAGAGACACAGATAGACAGGAGTGGCGATG')
x = kh.extract_unique_paths('ATGGAGAGACACAGATAGACAGGAGTGGCGATG', 10, 1)
assert not x
def test_extract_unique_paths_1():
kh = khmer.new_hashbits(10, 4, 4)
kh.consume('AGTGGCGATG')
x = kh.extract_unique_paths('ATGGAGAGACACAGATAGACAGGAGTGGCGATG', 10, 1)
print x
assert x == ['ATGGAGAGACACAGATAGACAGGAGTGGCGAT'] # all but the last k-mer
def test_extract_unique_paths_2():
kh = khmer.new_hashbits(10, 4, 4)
kh.consume('ATGGAGAGAC')
x = kh.extract_unique_paths('ATGGAGAGACACAGATAGACAGGAGTGGCGATG', 10, 1)
print x
assert x == ['TGGAGAGACACAGATAGACAGGAGTGGCGATG'] # all but the 1st k-mer
def test_extract_unique_paths_3():
kh = khmer.new_hashbits(10, 4, 4)
kh.consume('ATGGAGAGAC')
kh.consume('AGTGGCGATG')
x = kh.extract_unique_paths('ATGGAGAGACACAGATAGACAGGAGTGGCGATG', 10, 1)
print x
# all but the 1st/last k-mer
assert x == ['TGGAGAGACACAGATAGACAGGAGTGGCGAT']
def test_extract_unique_paths_4():
kh = khmer.new_hashbits(10, 4, 4)
kh.consume('ATGGAGAGAC')
kh.consume('AGTGGCGATG')
kh.consume('ATAGACAGGA')
x = kh.extract_unique_paths('ATGGAGAGACACAGATAGACAGGAGTGGCGATG', 10, 1)
print x
assert x == ['TGGAGAGACACAGATAGACAGG', 'TAGACAGGAGTGGCGAT']
def test_find_unpart():
filename = utils.get_test_data('random-20-a.odd.fa')
filename2 = utils.get_test_data('random-20-a.even.fa')
K = 20 # size of kmer
HT_SIZE = 1e4 # size of hashtable
N_HT = 3 # number of hashtables
ht = khmer.new_hashbits(K, HT_SIZE, N_HT)
ht.consume_fasta_and_tag(filename)
subset = ht.do_subset_partition(0, 0)
ht.merge_subset(subset)
n, _ = ht.count_partitions()
assert n == 49
ht.find_unpart(filename2, True, False)
n, _ = ht.count_partitions()
assert n == 1, n # all sequences connect
def test_find_unpart_notraverse():
filename = utils.get_test_data('random-20-a.odd.fa')
filename2 = utils.get_test_data('random-20-a.even.fa')
K = 20 # size of kmer
HT_SIZE = 1e4 # size of hashtable
N_HT = 3 # number of hashtables
ht = khmer.new_hashbits(K, HT_SIZE, N_HT)
ht.consume_fasta_and_tag(filename)
subset = ht.do_subset_partition(0, 0)
ht.merge_subset(subset)
n, _ = ht.count_partitions()
assert n == 49
ht.find_unpart(filename2, False, False) # <-- don't traverse
n, _ = ht.count_partitions()
assert n == 99, n # all sequences disconnected
def test_find_unpart_fail():
filename = utils.get_test_data('random-20-a.odd.fa')
filename2 = utils.get_test_data('random-20-a.odd.fa') # <- switch to odd
K = 20 # size of kmer
HT_SIZE = 1e4 # size of hashtable
N_HT = 3 # number of hashtables
ht = khmer.new_hashbits(K, HT_SIZE, N_HT)
ht.consume_fasta_and_tag(filename)
subset = ht.do_subset_partition(0, 0)
ht.merge_subset(subset)
n, _ = ht.count_partitions()
assert n == 49
ht.find_unpart(filename2, True, False)
n, _ = ht.count_partitions()
assert n == 49, n # only 49 sequences worth of tags
def test_simple_median():
hi = khmer.new_hashbits(6, 2, 2)
(median, average, stddev) = hi.get_median_count("AAAAAA")
print median, average, stddev
assert median == 0
assert average == 0.0
assert stddev == 0.0
hi.consume("AAAAAA")
(median, average, stddev) = hi.get_median_count("AAAAAA")
print median, average, stddev
assert median == 1
assert average == 1.0
assert stddev == 0.0
def test_badget():
hbts = khmer.new_hashbits(6, 1e6, 1)
dna = "AGCTTTTCATTCTGACTGCAACGGGCAATATGTCTCTGTGTGGATTAAAAAAAGAGTGTCTGATAG"
hbts.consume(dna)
assert hbts.get("AGCTTT") == 1
assert hbts.get("GATGAG") == 0
try:
hbts.get("AGCTT")
assert 0, "this should fail"
except ValueError as err:
print str(err)
#
def test_load_notexist_should_fail():
savepath = utils.get_temp_filename('temphashbitssave0.ht')
hi = khmer.new_counting_hash(12, 2)
try:
hi.load(savepath)
assert 0, "load should fail"
except IOError:
pass
def test_load_truncated_should_fail():
inpath = utils.get_test_data('random-20-a.fa')
savepath = utils.get_temp_filename('temphashbitssave0.ct')
hi = khmer.new_counting_hash(12, 1000)
hi.consume_fasta(inpath)
hi.save(savepath)
fp = open(savepath, 'rb')
data = fp.read()
fp.close()
fp = open(savepath, 'wb')
fp.write(data[:1000])
fp.close()
hi = khmer.new_counting_hash(12, 1)
try:
hi.load(savepath)
assert 0, "load should fail"
except IOError as e:
print str(e)
def test_save_load_tagset_notexist():
ht = khmer.new_hashbits(32, 1, 1)
outfile = utils.get_temp_filename('tagset')
try:
ht.load_tagset(outfile)
assert 0, "this test should fail"
except IOError as e:
print str(e)
def test_save_load_tagset_trunc():
ht = khmer.new_hashbits(32, 1, 1)
outfile = utils.get_temp_filename('tagset')
ht.add_tag('A' * 32)
ht.add_tag('G' * 32)
ht.save_tagset(outfile)
# truncate tagset file...
fp = open(outfile, 'rb')
data = fp.read()
fp.close()
fp = open(outfile, 'wb')
fp.write(data[:26])
fp.close()
# try loading it...
try:
ht.load_tagset(outfile)
assert 0, "this test should fail"
except IOError:
pass
# to build the test files used below, add 'test' to this function
# and then look in /tmp. You will need to tweak the version info in
# khmer.hh in order to create "bad" versions, of course. -CTB
def _build_testfiles():
# hashbits file
inpath = utils.get_test_data('random-20-a.fa')
hi = khmer.new_hashbits(12, 2)
hi.consume_fasta(inpath)
hi.save('/tmp/goodversion-k12.ht')
# tagset file
ht = khmer.new_hashbits(32, 1, 1)
ht.add_tag('A' * 32)
ht.add_tag('G' * 32)
ht.save_tagset('/tmp/goodversion-k32.tagset')
# stoptags file
fakelump_fa = utils.get_test_data('fakelump.fa')
ht = khmer.new_hashbits(32, 4, 4)
ht.consume_fasta_and_tag(fakelump_fa)
subset = ht.do_subset_partition(0, 0)
ht.merge_subset(subset)
EXCURSION_DISTANCE = 40
EXCURSION_KMER_THRESHOLD = 82
EXCURSION_KMER_COUNT_THRESHOLD = 1
counting = khmer.new_counting_hash(32, 4, 4)
ht.repartition_largest_partition(None, counting,
EXCURSION_DISTANCE,
EXCURSION_KMER_THRESHOLD,
EXCURSION_KMER_COUNT_THRESHOLD)
ht.save_stop_tags('/tmp/goodversion-k32.stoptags')
def test_hashbits_file_version_check():
ht = khmer.new_hashbits(12, 1, 1)
inpath = utils.get_test_data('badversion-k12.ht')
try:
ht.load(inpath)
assert 0, "this should fail"
except IOError as e:
print str(e)
def test_hashbits_file_type_check():
kh = khmer.new_counting_hash(12, 1, 1)
savepath = utils.get_temp_filename('tempcountingsave0.ct')
kh.save(savepath)
ht = khmer.new_hashbits(12, 1, 1)
try:
ht.load(savepath)
assert 0, "this should fail"
except IOError as e:
print str(e)
def test_stoptags_file_version_check():
ht = khmer.new_hashbits(32, 1, 1)
inpath = utils.get_test_data('badversion-k32.stoptags')
try:
ht.load_stop_tags(inpath)
assert 0, "this should fail"
except IOError as e:
print str(e)
def test_stoptags_ksize_check():
ht = khmer.new_hashbits(31, 1, 1)
inpath = utils.get_test_data('goodversion-k32.stoptags')
try:
ht.load_stop_tags(inpath)
assert 0, "this should fail"
except IOError as e:
print str(e)
def test_stop_tags_filetype_check():
ht = khmer.new_hashbits(31, 1, 1)
inpath = utils.get_test_data('goodversion-k32.tagset')
try:
ht.load_stop_tags(inpath)
assert 0, "this should fail"
except IOError as e:
print str(e)
def test_tagset_file_version_check():
ht = khmer.new_hashbits(32, 1, 1)
inpath = utils.get_test_data('badversion-k32.tagset')
try:
ht.load_tagset(inpath)
assert 0, "this should fail"
except IOError as e:
print str(e)
def test_tagset_ksize_check():
ht = khmer.new_hashbits(31, 1, 1)
inpath = utils.get_test_data('goodversion-k32.tagset')
try:
ht.load_tagset(inpath)
assert 0, "this should fail"
except IOError as e:
print str(e)
def test_tagset_filetype_check():
ht = khmer.new_hashbits(31, 1, 1)
inpath = utils.get_test_data('goodversion-k32.stoptags')
try:
ht.load_tagset(inpath)
assert 0, "this should fail"
except IOError as e:
print str(e)
def test_bad_primes_list():
try:
coutingtable = khmer._Hashbits(31, ["a", "b", "c"], 1)
assert 0, "Bad primes list should fail"
except TypeError as e:
print str(e)
def test_consume_absentfasta_with_reads_parser():
presencetable = khmer.new_hashbits(31, 1, 1)
try:
presencetable.consume_fasta_with_reads_parser()
assert 0, "this should fail"
except TypeError as err:
print str(err)
try:
readparser = ReadParser(utils.get_test_data('empty-file'))
presencetable.consume_fasta_with_reads_parser(readparser)
assert 0, "this should fail"
except IOError as err:
print str(err)
except ValueError as err:
print str(err)
| bsd-3-clause | 1,501,069,062,629,945,900 | 25.146597 | 79 | 0.621596 | false |
isaacyeaton/global-dyn-non-equil-gliding | Code/script_epsilon.py | 1 | 2676 | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 21 20:34:20 2016
%reset -f
%pylab
%clear
%load_ext autoreload
%autoreload 2
@author: isaac
"""
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import plots
reload(plots)
from plots import bmap, rcj, tl, tickout, four_plot
# %% Get some colors
import brewer2mpl
new_bmap = brewer2mpl.get_map('Set1', 'Qualitative', 9).mpl_colors
new_bmap.pop(5)
more_colors = brewer2mpl.get_map('Set2', 'Qualitative', 8).mpl_colors
new_bmap += more_colors
# %%
d = pd.read_csv('./Data/epsilon.csv')
mass = d[u'Mass (g)'].values
eps = d[u'epsilon'].values
eps_snake = d[u'eps from c from sqrt(Ws)'].values
Ws = d[u'Wing loading (N/m^2)']
all_labels = d[u'Label']
labels = d[u'Label'].unique().tolist()
markers = ['o', 'v', '^', 'p', 's', 'd']
fig, ax = plt.subplots()
eps_const = .04597
eps_const = (1.18 * 9.81) / (2 * 10**2.4)
eps_const = (1.18 * 9.81) / (2 * np.e**2.1)
eps_const = (1.18 * 9.81) / (2 * 2.1)
ax.axhline(eps_const, color='gray', lw=1)
m_min = 120
mth = np.linspace(m_min, 1400, 10)
eth = mth**(.11)
#eth -= eth[0]
#eth += .01
offset = eth[0] - .01
eth = 10**(np.log10(eth) - offset)
mgeom = np.r_[m_min, 1400]
egeom = np.r_[.01, .01] # np.r_[eps.mean(), eps.mean()]
#ax.loglog(mth, 10**(np.log10(eth) - 2.25), c='gray')
ax.loglog(mth, eth, c='gray')
ax.loglog(mgeom, egeom, c='gray')
for i in np.arange(len(labels)):
label = labels[i]
marker = markers[i]
idx = np.where(all_labels == label)[0]
# ax.loglog(mass[idx], np.sqrt(eps[idx]), marker, label=label)
ax.loglog(mass[idx], eps[idx], marker, c=new_bmap[i], ms=10,
mew=0, mec='w', label=label)
# if label == 'Snake':
# ax.loglog(mass[idx], np.sqrt(eps_snake[idx]), marker)
ax.legend(loc='upper right', frameon=True, framealpha=.2, ncol=2)
ax.set_xlabel('mass (g)', fontsize=16)
ax.set_ylabel(r'$\epsilon$ ', fontsize=16, rotation=0)
#ax.set_aspect('equal', adjustable='box')
[ttl.set_size(16) for ttl in ax.get_xticklabels()]
[ttl.set_size(16) for ttl in ax.get_yticklabels()]
# https://stackoverflow.com/questions/21920233/matplotlib-log-scale-tick-label-number-formatting
#from matplotlib.ticker import ScalarFormatter
#for axis in [ax.xaxis, ax.yaxis]:
# axis.set_major_formatter(ScalarFormatter())
from matplotlib import ticker
ax.xaxis.set_major_formatter(ticker.FuncFormatter(lambda y,pos: ('{{:.{:1d}f}}'.format(int(np.maximum(-np.log10(y),0)))).format(y)))
ax.yaxis.set_major_formatter(ticker.FuncFormatter(lambda y,pos: ('{{:.{:1d}f}}'.format(int(np.maximum(-np.log10(y),0)))).format(y)))
rcj(ax)
tl(fig)
fig.savefig('Figures/figure8_epsilon.pdf', transparent=True)
| mit | 2,785,185,356,967,121,400 | 25.49505 | 132 | 0.646487 | false |
Lana-B/Pheno4T | madanalysis/selection/condition_connector.py | 1 | 1788 | ################################################################################
#
# Copyright (C) 2012-2013 Eric Conte, Benjamin Fuks
# The MadAnalysis development team, email: <[email protected]>
#
# This file is part of MadAnalysis 5.
# Official website: <https://launchpad.net/madanalysis5>
#
# MadAnalysis 5 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MadAnalysis 5 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MadAnalysis 5. If not, see <http://www.gnu.org/licenses/>
#
################################################################################
from madanalysis.enumeration.connector_type import ConnectorType
from madanalysis.enumeration.operator_type import OperatorType
from madanalysis.enumeration.combination_type import CombinationType
import logging
class ConditionConnector():
def __init__(self,name):
if name=="or":
self.value=ConnectorType.OR
elif name=="and":
self.value=ConnectorType.AND
def GetStringDisplay(self):
if self.value==ConnectorType.OR:
return "or"
elif self.value==ConnectorType.AND:
return "and"
def GetStringCode(self):
if self.value==ConnectorType.OR:
return "||"
elif self.value==ConnectorType.AND:
return "&&"
| gpl-3.0 | 7,740,569,640,907,614,000 | 35.489796 | 80 | 0.630872 | false |
osperlabs/logbeam | logbeam/__init__.py | 1 | 6060 | import logging
from threading import Event
import boto3
from botocore.exceptions import ClientError
from cwlogs.push import EventBatchPublisher, EventBatch, LogEvent
from cwlogs.threads import BaseThread
from six.moves import queue as Queue
logger = logging.getLogger(__name__)
class BatchedCloudWatchSink(BaseThread):
"""A sink for LogEvent objects which batches and uploads to CloudWatch logs
It relies on the LogEvent, EventBatch and EventBatchPublisher from the
awscli-cwlogs plugin (cwlogs package). The latter of the two do the heavy
lifting - all this class does is add items to batches and submit batches
to the EventBatchPublisher queue for publishing when they are full.
"""
def __init__(
self,
logs_service,
log_group_name,
log_stream_name,
buffer_duration,
batch_count,
batch_size):
super(BatchedCloudWatchSink, self).__init__(Event())
self.logs_service = logs_service
self.publisher_stop_flag = Event()
self.group_stop_flag = Event()
# Incoming LogEvents enter this queue via self.add_event()
self.event_queue = Queue.Queue()
# Completed EventBatches get put onto this queue, for the
# EventBatchPublisher to upload
self.publisher_queue = Queue.Queue()
# The publisher thread, will be started and stopped with this thread
self.publisher = EventBatchPublisher(
self.publisher_stop_flag,
self.publisher_queue,
logs_service,
log_group_name,
log_stream_name
)
self.publisher.group_stop_flag = self.group_stop_flag
# Get the nextSequenceToken for this log stream from AWS
# otherwise the first batch upload will fail (though it would succeed
# when it automatically retries)
self.publisher.sequence_token = nextSequenceToken(
logs_service,
log_group_name,
log_stream_name
)
self.buffer_duration = buffer_duration
self.batch_count = batch_count
self.batch_size = batch_size
self.event_batch = None
def shutdown(self):
logger.info('CloudWatch sink shutting down gracefully')
# Only shutdown ourselves here. The publisher thread should be shut
# down by the end of the _run(), that this flag breaks the loop of
self.stop_flag.set()
self.join()
self.publisher.join()
logger.info('CloudWatch sink shutdown complete')
def _add_event_to_batch(self, event):
if self.event_batch is None:
self.event_batch = EventBatch(
self.buffer_duration,
self.batch_count,
self.batch_size
)
return self.event_batch.add_event(event)
def _send_batch_to_publisher(self, force=False):
if self.event_batch is None:
return
if force or self.event_batch.should_batch_be_published():
self.event_batch.force_publish = (
force or self.event_batch.force_publish
)
self.publisher_queue.put(self.event_batch)
self.event_batch = None
def _run(self):
self.publisher.start()
logger.info('CloudWatch Sink thread starting')
while True:
try:
event = self.event_queue.get(False)
add_status = self._add_event_to_batch(event)
if add_status == 0:
self._send_batch_to_publisher(force=True)
self._add_event_to_batch(event)
except Queue.Empty:
if self._exit_needed():
self._send_batch_to_publisher(force=True)
break
else:
self.stop_flag.wait(2)
self._send_batch_to_publisher()
logger.info('Asking publisher thread to shut down...')
self.publisher_stop_flag.set()
def on_run_failed(self, e):
self.group_stop_flag.set()
self.publisher_stop_flag.set()
def add_event(self, event):
self.event_queue.put(event)
class CloudWatchLogsHandler(logging.Handler):
def __init__(
self,
log_group_name,
log_stream_name,
buffer_duration=10000,
batch_count=10,
batch_size=1024 * 1024,
logs_client=None,
*args, **kwargs):
super(CloudWatchLogsHandler, self).__init__(*args, **kwargs)
self.prev_event = None
if logs_client is None:
logs_client = boto3.client('logs')
self.sink = BatchedCloudWatchSink(
logs_client,
log_group_name,
log_stream_name,
buffer_duration,
batch_count,
batch_size
)
self.sink.start()
logger.info('CloudWatch Sink started...')
def logrecord_to_logevent(self, record):
return LogEvent(
timestamp=int(record.created * 1000),
message=self.format(record),
prev_event=self.prev_event,
)
def emit(self, record):
event = self.logrecord_to_logevent(record)
self.prev_event = event
self.sink.add_event(event)
def close(self):
self.sink.shutdown()
def nextSequenceToken(cwl, log_group_name, log_stream_name):
try:
res = cwl.describe_log_streams(
logGroupName=log_group_name,
logStreamNamePrefix=log_stream_name,
)
except ClientError:
return None
try:
matching_streams = res['logStreams']
# As the search is prefixed-based, we need to make sure we're looking
# at a log stream with exactly the correct name
stream, = (
x for x in matching_streams
if x['logStreamName'] == log_stream_name
)
return stream['uploadSequenceToken']
except (KeyError, IndexError, ValueError):
return None
| mit | -1,902,479,632,143,394,600 | 32.480663 | 79 | 0.591584 | false |
SouthPatron/GojiDNS | site/gojidns/goji/admin.py | 1 | 1033 | # GojiDNS - Developed by South Patron CC - http://www.southpatron.com/
#
# This file is part of GojiDNS.
#
# GojiDNS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GojiDNS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with GojiDNS. If not, see <http://www.gnu.org/licenses/>.
from django.contrib import admin
from goji.models import *
admin.site.register( AuthenticationCode )
admin.site.register( Profile )
admin.site.register( EmailChangeRequest )
admin.site.register( Domain )
admin.site.register( Resource )
admin.site.register( NameserverStatus )
admin.site.register( Faq )
| gpl-3.0 | 1,672,789,506,817,689,600 | 33.433333 | 70 | 0.766699 | false |
bettse/hue | light.py | 1 | 4728 | #!/usr/bin/python
import requests
from time import sleep
import json
import ConfigParser
import os
from os.path import dirname, join
CONFIGFILE = join(dirname(__file__), 'hue.cfg')
API_URL = 'https://client-eastwood-dot-hue-prod-us.appspot.com/api/nupnp'
class Light:
def __init__(self, ip=None, secret=None, lightnum=None, debug=False):
self.secret = None
self.debug = debug
self.ip = self.getIP()
#If a config is available, default to it
if os.path.isfile(CONFIGFILE):
config = ConfigParser.RawConfigParser()
config.read(CONFIGFILE)
if config.has_option('hue', 'secret'):
self.secret = config.get('hue', 'secret')
if config.has_option('hue', 'light'):
self.lightnum = config.get('hue', 'light')
else:
self.lightnum = 1
else:
print "No config found"
#Fill in if parameter was set
if(secret): self.secret = secret
if(lightnum): self.lightnum = lightnum
if(not self.secret):
self.register()
def getIP(self):
url = API_URL
r = requests.get(url)
data = json.loads(r.content)
if(len(data) > 0 and data[0].has_key('internalipaddress')):
return data[0]['internalipaddress']
else:
import ssdp
from urlparse import urlparse
url = ssdp.client()
if url:
return urlparse(url).hostname
else:
print "Can't find bridge"
return None
return None
def register(self):
secret = None
while not secret:
body = json.dumps({'username': 'bettseLight', 'devicetype': 'python'})
url = 'http://%s/api/' % (self.ip)
r = requests.post(url, data=body)
data = json.loads(r.content)[0]
if(data.has_key('success')):
secret = data['success']['username']
print "Key is %s" % secret
if(data.has_key('error')):
print "Please push the button on the Phlips Hue Hub"
sleep(0.5)
self.secret = secret
configfile = open(CONFIGFILE, 'w+')
config = ConfigParser.RawConfigParser()
if not config.has_section("hue"):
config.add_section("hue")
config.set('hue', 'secret', self.secret)
config.set('hue', 'light', self.lightnum)
config.set('hue', 'ip', self.ip)
config.write(configfile)
def setstate(self, body):
if(type(body) != str):
body = json.dumps(body)
if(self.debug):
print "Send %s to light %s" % (body, self.lightnum)
url = 'http://%s/api/%s/lights/%s/state' % (self.ip, self.secret, self.lightnum)
r = requests.put(url, data=body)
def brightness(self, i):
if(i == 'full'):
i = 254
if(int(i) > 254):
i = 254
bri = json.dumps({'bri': int(i), 'on': True})
self.setstate(bri)
def on(self):
body = json.dumps({'on': True})
self.setstate(body)
def off(self):
body = json.dumps({'on': False})
self.setstate(body)
def number(self):
return self.lightnum
def getstate(self):
url = 'http://%s/api/%s/lights/%s/' % (self.ip, self.secret, self.lightnum)
r = requests.get(url)
return json.loads(r.content)['state']
def colortemp(self, i):
body = json.dumps({'colormode': 'ct', 'ct': i})
self.setstate(body)
def concentrate(self):
body = json.dumps({u'on': True, u'hue': 13122, u'colormode': u'ct', u'bri': 219, u'sat': 211, u'ct': 233})
self.setstate(body)
def energize(self):
body = json.dumps({u'on': True, u'hue': 13122, u'colormode': u'ct', u'bri': 203, u'sat': 211, u'ct': 156})
self.setstate(body)
def reading(self):
body = json.dumps({u'on': True, u'hue': 13122, u'colormode': u'ct', u'bri': 240, u'sat': 211, u'ct': 346})
self.setstate(body)
def relax(self):
body = json.dumps({u'on': True, u'hue': 13122, u'colormode': u'ct', u'bri': 144, u'sat': 211, u'ct': 467})
self.setstate(body)
def red(self):
self.setstate({"on": True, "hue": 836, "colormode": "xy", "xy": [0.6475, 0.3316]})
def blue(self):
self.setstate({"on": True, "hue": 47103, "colormode": "xy", "xy": [0.167, 0.04]})
def green(self):
self.setstate({"on": True, "hue": 47103, "colormode": "xy", "xy": [0.3991, 0.4982]})
def white(self):
self.setstate({"on": True, "hue": 47103, "colormode": "xy", "xy": [0.3355, 0.3595]})
| apache-2.0 | 666,285,074,332,368,300 | 33.26087 | 114 | 0.537648 | false |
disqus/nydus | nydus/db/base.py | 1 | 5387 | """
nydus.db.base
~~~~~~~~~~~~~
:copyright: (c) 2011-2012 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
__all__ = ('LazyConnectionHandler', 'BaseCluster')
import collections
from nydus.db.map import DistributedContextManager
from nydus.db.routers import BaseRouter, routing_params
from nydus.utils import apply_defaults
def iter_hosts(hosts):
# this can either be a dictionary (with the key acting as the numeric
# index) or it can be a sorted list.
if isinstance(hosts, collections.Mapping):
return hosts.iteritems()
return enumerate(hosts)
def create_connection(Connection, num, host_settings, defaults):
# host_settings can be an iterable or a dictionary depending on the style
# of connection (some connections share options and simply just need to
# pass a single host, or a list of hosts)
if isinstance(host_settings, collections.Mapping):
return Connection(num, **apply_defaults(host_settings, defaults or {}))
elif isinstance(host_settings, collections.Iterable):
return Connection(num, *host_settings, **defaults or {})
return Connection(num, host_settings, **defaults or {})
class BaseCluster(object):
"""
Holds a cluster of connections.
"""
class MaxRetriesExceededError(Exception):
pass
def __init__(self, hosts, backend, router=BaseRouter, max_connection_retries=20, defaults=None):
self.hosts = dict(
(conn_number, create_connection(backend, conn_number, host_settings, defaults))
for conn_number, host_settings
in iter_hosts(hosts)
)
self.max_connection_retries = max_connection_retries
self.install_router(router)
def __len__(self):
return len(self.hosts)
def __getitem__(self, name):
return self.hosts[name]
def __getattr__(self, name):
return CallProxy(self, name)
def __iter__(self):
for name in self.hosts.iterkeys():
yield name
def install_router(self, router):
self.router = router(self)
def execute(self, path, args, kwargs):
connections = self.__connections_for(path, args=args, kwargs=kwargs)
results = []
for conn in connections:
for retry in xrange(self.max_connection_retries):
func = conn
for piece in path.split('.'):
func = getattr(func, piece)
try:
results.append(func(*args, **kwargs))
except tuple(conn.retryable_exceptions), e:
if not self.router.retryable:
raise e
elif retry == self.max_connection_retries - 1:
raise self.MaxRetriesExceededError(e)
else:
conn = self.__connections_for(path, retry_for=conn.num, args=args, kwargs=kwargs)[0]
else:
break
# If we only had one db to query, we simply return that res
if len(results) == 1:
return results[0]
else:
return results
def disconnect(self):
"""Disconnects all connections in cluster"""
for connection in self.hosts.itervalues():
connection.disconnect()
def get_conn(self, *args, **kwargs):
"""
Returns a connection object from the router given ``args``.
Useful in cases where a connection cannot be automatically determined
during all steps of the process. An example of this would be
Redis pipelines.
"""
connections = self.__connections_for('get_conn', args=args, kwargs=kwargs)
if len(connections) is 1:
return connections[0]
else:
return connections
def map(self, workers=None, **kwargs):
return DistributedContextManager(self, workers, **kwargs)
@routing_params
def __connections_for(self, attr, args, kwargs, **fkwargs):
return [self[n] for n in self.router.get_dbs(attr=attr, args=args, kwargs=kwargs, **fkwargs)]
class CallProxy(object):
"""
Handles routing function calls to the proper connection.
"""
def __init__(self, cluster, path):
self.__cluster = cluster
self.__path = path
def __call__(self, *args, **kwargs):
return self.__cluster.execute(self.__path, args, kwargs)
def __getattr__(self, name):
return CallProxy(self.__cluster, self.__path + '.' + name)
class LazyConnectionHandler(dict):
"""
Maps clusters of connections within a dictionary.
"""
def __init__(self, conf_callback):
self.conf_callback = conf_callback
self.conf_settings = {}
self.__is_ready = False
def __getitem__(self, key):
if not self.is_ready():
self.reload()
return super(LazyConnectionHandler, self).__getitem__(key)
def is_ready(self):
return self.__is_ready
def reload(self):
from nydus.db import create_cluster
for conn_alias, conn_settings in self.conf_callback().iteritems():
self[conn_alias] = create_cluster(conn_settings)
self._is_ready = True
def disconnect(self):
"""Disconnects all connections in cluster"""
for connection in self.itervalues():
connection.disconnect()
| apache-2.0 | -8,090,204,037,582,475,000 | 31.648485 | 108 | 0.609616 | false |
spencerahill/aospy-obj-lib | aospy_user/calcs/__init__.py | 1 | 7250 | """My library of functions for use in aospy.
Historically, these assumed input variables in the form of numpy arrays or
masked numpy arrays. As of October 2015, I have switched to assuming
xarray.DataArrays, to coincide with the same switch within aospy. However, not
all of the functions in this module have been converted to support this new
datatype.
"""
from .tendencies import (
first_to_last_vals_dur,
time_tendency_first_to_last,
time_tendency_each_timestep,
)
from .numerics import (
latlon_deriv_prefactor,
wraparound,
d_dx_from_latlon,
d_dy_from_lat,
d_dx_at_const_p_from_eta,
d_dy_at_const_p_from_eta,
d_dp_from_p,
d_dp_from_eta
)
from .thermo import (
dse,
mse,
fmse,
kinetic_energy,
internal_energy,
energy,
total_energy,
cpt_lvq,
virt_temp,
pot_temp,
virt_pot_temp,
equiv_pot_temp,
z_from_hypso,
mse_from_hypso,
mixing_ratio_from_specific_mass,
specific_mass_dry_air,
specific_gas_constant_moist_air,
heat_capacity_moist_air_constant_volume,
specific_entropy_dry_air,
specific_entropy_water_vapor,
tdt_diab,
tdt_lw_cld,
tdt_sw_cld,
tdt_moist_diabatic,
mse_tendency,
)
from .toa_sfc_fluxes import (
albedo,
sfc_albedo,
cre_sw,
cre_lw,
cre_net,
toa_rad,
toa_rad_clr,
toa_sw,
sfc_rad,
sfc_rad_cld,
sfc_lw,
sfc_lw_cld,
sfc_sw,
sfc_sw_cld,
sfc_energy,
column_energy,
column_lw,
column_sw,
bowen_ratio,
evap_frac,
)
from .advection import (
zonal_advec,
merid_advec,
vert_advec,
horiz_advec,
total_advec,
zonal_advec_upwind,
merid_advec_upwind,
horiz_advec_upwind,
total_advec_upwind,
zonal_advec_const_p_from_eta,
merid_advec_const_p_from_eta,
horiz_advec_const_p_from_eta,
vert_advec_from_eta,
total_advec_from_eta,
horiz_advec_spharm,
)
from .mass import (
horiz_divg,
horiz_divg_spharm,
vert_divg,
divg_3d,
dp,
uv_mass_adjustment,
uv_mass_adjusted,
u_mass_adjustment,
u_mass_adjusted,
v_mass_adjustment,
v_mass_adjusted,
column_flux_divg,
column_flux_divg_adj,
mass_column,
mass_column_divg,
mass_column_divg_spharm,
mass_column_divg_adj,
mass_column_integral,
mass_column_source,
mass_column_budget_lhs,
mass_column_budget_with_adj_lhs,
mass_column_budget_residual,
mass_column_budget_adj_residual,
horiz_divg_mass_adj,
horiz_divg_mass_adj_spharm,
horiz_divg_mass_adj_from_eta,
ps_horiz_advec,
uv_dry_mass_adjusted,
dry_mass_column_tendency,
dry_mass_column_divg,
dry_mass_column_divg_adj,
dry_mass_column_budget_residual,
dry_mass_column_budget_adj_residual,
uv_mass_adjustment,
uv_mass_adjusted,
horiz_divg_mass_adj,
horiz_advec_mass_adj,
)
from .transport import (
field_horiz_flux_divg,
field_vert_flux_divg,
field_times_horiz_divg,
field_horiz_advec_divg_sum,
field_total_advec,
field_vert_int_bal,
field_times_horiz_divg_mass_adj,
field_horiz_flux_divg_mass_adj,
omega_from_divg_eta,
)
from .energy_budget import (
energy_column,
energy_column_tendency,
energy_column_tendency_each_timestep,
energy_column_source,
energy_column_divg,
energy_column_budget_residual,
uv_energy_adjustment,
uv_energy_adjusted,
uv_mass_energy_adjustment,
uv_mass_energy_adjusted,
u_energy_adjustment,
u_energy_adjusted,
u_mass_energy_adjustment,
u_mass_energy_adjusted,
v_energy_adjustment,
v_energy_adjusted,
v_mass_energy_adjustment,
v_mass_energy_adjusted,
energy_column_divg_adj,
energy_column_divg_adj_time_mean,
energy_column_divg_adj_eddy,
energy_column_budget_adj_residual,
energy_column_budget_energy_adj_residual,
energy_column_budget_mass_adj_residual,
energy_column_budget_dry_mass_adj_residual,
energy_column_divg_mass_adj,
energy_sfc_ps_advec,
energy_sfc_ps_advec_as_resid,
energy_horiz_advec_adj,
energy_zonal_advec_upwind,
energy_merid_advec_upwind,
energy_horiz_advec_upwind,
energy_horiz_advec_eta_adj,
energy_horiz_advec_eta_adj_spharm,
energy_horiz_advec_eta_adj_time_mean,
energy_horiz_advec_eta_upwind,
energy_zonal_advec_eta_upwind,
energy_merid_advec_eta_upwind,
energy_horiz_advec_eta_upwind_time_mean,
energy_horiz_advec_eta_upwind_adj_time_mean,
energy_horiz_divg_eta,
energy_column_vert_advec_as_resid_eta_time_mean,
energy_column_vert_advec_as_resid,
energy_vert_advec,
energy_vert_advec_eta,
energy_vert_advec_eta_adj,
energy_vert_advec_eta_time_mean,
energy_vert_advec_eta_adj_time_mean,
energy_vert_advec_eta_upwind,
energy_vert_advec_eta_upwind_time_mean,
energy_vert_advec_eta_upwind_adj_time_mean,
)
from .mse_budget import (
mse_horiz_flux_divg,
mse_horiz_advec,
mse_times_horiz_divg,
mse_horiz_advec_divg_sum,
mse_vert_flux_divg,
mse_vert_advec,
mse_total_advec,
mse_horiz_advec_upwind,
mse_merid_advec_upwind,
mse_zonal_advec_upwind,
mse_vert_advec_upwind,
mse_total_advec_upwind,
mse_budget_advec_residual,
)
from .mse_from_hypso_budget import(
mse_from_hypso_zonal_advec_upwind,
mse_from_hypso_merid_advec_upwind,
mse_from_hypso_horiz_advec_upwind,
mse_from_hypso_vert_advec_upwind,
cpt_lvq_zonal_deriv,
cpt_lvq_merid_deriv,
cpt_lvq_zonal_advec_upwind,
cpt_lvq_merid_advec_upwind,
cpt_lvq_horiz_advec_upwind,
)
from .fmse_budget import (
fmse_merid_deriv_eta,
fmse_zonal_deriv_eta,
fmse_horiz_advec_eta_upwind,
fmse_budget_advec_residual,
omega_change_from_fmse_budget,
)
from .dse_budget import (
dse_horiz_flux_divg,
dse_horiz_advec,
dse_times_horiz_divg,
dse_horiz_advec_divg_sum,
dse_vert_advec,
)
from .stats import (
pointwise_corr,
pointwise_lin_regr,
corr_cre_sw,
corr_cre_lw,
corr_cre_net,
corr_toa_rad_clr,
lin_regr_cre_net,
lin_regr_toa_rad_clr,
vert_centroid,
vert_avg
)
from .water import (
p_minus_e,
prec_conv_frac,
precip_large_scale,
moisture_column_source,
moisture_column_tendency,
moisture_column_divg_with_adj2,
moisture_column_budget_lhs,
moisture_column_budget_with_adj_lhs,
moisture_column_budget_with_adj2_lhs,
moisture_column_budget_residual,
)
from .gms import (
field_vert_int_max,
horiz_divg_vert_int_max,
vert_divg_vert_int_max,
gms_like_ratio,
gross_moist_strat,
gross_dry_stab,
gross_moist_stab,
gms_up_low,
gms_each_level,
dry_static_stab,
moist_static_stab,
frozen_moist_static_stab,
moist_static_stab_p,
frozen_moist_static_stab_p,
)
from .zonal_mean_circ import (
msf,
msf_max,
aht,
aht_no_snow,
oht,
tht,
gms_change_est,
gms_change_est2,
gms_h01,
gms_h01est,
gms_h01est2,
gms_moc,
gms_msf,
total_gms,
ang_mom,
hadley_bounds,
had_bounds,
had_bounds500,
thermal_equator,
itcz_pos,
itcz_loc,
prec_centroid,
precip_centroid,
trop_height,
)
| apache-2.0 | 6,250,675,430,963,108,000 | 23.328859 | 79 | 0.663172 | false |
xuru/pyvisdk | pyvisdk/do/tools_config_info_tools_last_install_info.py | 1 | 1037 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ToolsConfigInfoToolsLastInstallInfo(vim, *args, **kwargs):
'''Describes status of last tools upgrade attempt'''
obj = vim.client.factory.create('ns0:ToolsConfigInfoToolsLastInstallInfo')
# do some validation checking...
if (len(args) + len(kwargs)) < 1:
raise IndexError('Expected at least 2 arguments got: %d' % len(args))
required = [ 'counter' ]
optional = [ 'fault', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | -7,795,499,560,222,779,000 | 30.454545 | 124 | 0.605593 | false |
vlegoff/tsunami | src/primaires/scripting/commandes/editeur/__init__.py | 1 | 3073 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'editeur'."""
from primaires.interpreteur.commande.commande import Commande
from primaires.scripting.commandes.editeur.creer import PrmCreer
from primaires.scripting.commandes.editeur.editer import PrmEditer
from primaires.scripting.commandes.editeur.liste import PrmListe
class CmdEditeur(Commande):
"""Commande 'editeur'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "editeur", "editor")
self.groupe = "administrateur"
self.nom_categorie = "batisseur"
self.aide_courte = "manipule les éditeurs personnalisés"
self.aide_longue = \
"Cette commande permet de créer, éditer et lister " \
"des éditeurs personnalisés. Un éditeur personnalisé " \
"est un éditeur... pour créer d'autres éditeurs. À l'instar " \
"de %dyncom% pour créer des commandes dynamiques, %editeur% " \
"permet de créer des éditeurs dynamiques, créables et " \
"scriptables par les bâtisseurs. Pour une explication " \
"détaillée, et un tutoriel pas à pas sur la création " \
"d'éditeurs, consultez http://redmine.kassie.fr/projects/" \
"documentation/wiki/EditeurPersonnalise"
def ajouter_parametres(self):
"""Ajout des paramètres."""
self.ajouter_parametre(PrmCreer())
self.ajouter_parametre(PrmEditer())
self.ajouter_parametre(PrmListe())
| bsd-3-clause | 7,689,151,746,744,989,000 | 47.412698 | 79 | 0.724262 | false |
lefnire/tensorforce | tensorforce/core/networks/complex_network.py | 1 | 7315 | # Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import Counter
import json
import os
import tensorflow as tf
from tensorforce import TensorForceError
from tensorforce.core.networks import Layer
from tensorforce.core.networks.network import LayerBasedNetwork
class Input(Layer):
"""
Input layer. Used for ComplexLayerNetwork's to collect data together
as a form of output to the next layer. Allows for multiple inputs
to merge into a single import for next layer.
"""
def __init__(
self,
inputs,
axis=1,
scope='merge_inputs',
summary_labels=()
):
"""
Input layer.
Args:
inputs: A list of strings that name the inputs to merge
axis: Axis to merge the inputs
"""
self.inputs = inputs
self.axis = axis
super(Input, self).__init__(scope=scope, summary_labels=summary_labels)
def tf_apply(self, x, update):
inputs_to_merge = list()
for name in self.inputs:
# Previous input, by name or "*", like normal network_spec
# Not using named_tensors as there could be unintended outcome
if name == "*" or name == "previous":
inputs_to_merge.append(x)
elif name in self.named_tensors:
inputs_to_merge.append(self.named_tensors[name])
else:
# Failed to find key in available inputs, print out help to user, raise error
keys = list(self.named_tensors)
raise TensorForceError(
'ComplexNetwork input "{}" doesn\'t exist, Available inputs: {}'.format(name, keys)
)
# Review data for casting to more precise format so TensorFlow doesn't throw error for mixed data
# Quick & Dirty cast only promote types: bool=0,int32=10, int64=20, float32=30, double=40
cast_type_level = 0
cast_type_dict = {
'bool': 0,
'int32': 10,
'int64': 20,
'float32': 30,
'float64': 40
}
cast_type_func_dict = {
0: tf.identity,
10: tf.to_int32,
20: tf.to_int64,
30: tf.to_float,
40: tf.to_double
}
# Scan inputs for max cast_type
for tensor in inputs_to_merge:
key = str(tensor.dtype.name)
if key in cast_type_dict:
if cast_type_dict[key] > cast_type_level:
cast_type_level = cast_type_dict[key]
else:
raise TensorForceError('Network spec input does not support dtype {}'.format(key))
# Add casting if needed
for index, tensor in enumerate(inputs_to_merge):
key = str(tensor.dtype.name)
if cast_type_dict[key] < cast_type_level:
inputs_to_merge[index] = cast_type_func_dict[cast_type_level](tensor)
input_tensor = tf.concat(inputs_to_merge, self.axis)
return input_tensor
class Output(Layer):
"""
Output layer. Used for ComplexLayerNetwork's to capture the tensor
under and name for use with Input layers. Acts as a input to output passthrough.
"""
def __init__(
self,
output,
scope='output',
summary_labels=()
):
"""
Output layer.
Args:
output: A string that names the tensor, will be added to available inputs
"""
self.output = output
super(Output, self).__init__(scope=scope, summary_labels=summary_labels)
def tf_apply(self, x, update):
self.named_tensors[self.output] = x
return x
class ComplexLayeredNetwork(LayerBasedNetwork):
"""
Complex Network consisting of a sequence of layers, which can be created from a specification dict.
"""
def __init__(self, complex_layers_spec, scope='layered-network', summary_labels=()):
"""
Complex Layered network.
Args:
complex_layers_spec: List of layer specification dicts
"""
super(ComplexLayeredNetwork, self).__init__(scope=scope, summary_labels=summary_labels)
self.complex_layers_spec = complex_layers_spec
#self.named_tensors = dict()
layer_counter = Counter()
for branch_spec in self.complex_layers_spec:
for layer_spec in branch_spec:
if isinstance(layer_spec['type'], str):
name = layer_spec['type']
else:
name = 'layer'
scope = name + str(layer_counter[name])
layer_counter[name] += 1
layer = Layer.from_spec(
spec=layer_spec,
kwargs=dict(scope=scope, summary_labels=summary_labels)
)
# Link named dictionary reference into Layer.
layer.tf_tensors(named_tensors=self.named_tensors)
self.add_layer(layer=layer)
def tf_apply(self, x, internals, update, return_internals=False):
if isinstance(x, dict):
self.named_tensors.update(x)
if len(x) == 1:
x = next(iter(x.values()))
next_internals = dict()
for layer in self.layers:
layer_internals = {name: internals['{}_{}'.format(layer.scope, name)] for name in layer.internals_spec()}
if len(layer_internals) > 0:
x, layer_internals = layer.apply(x=x, update=update, **layer_internals)
for name, internal in layer_internals.items():
next_internals['{}_{}'.format(layer.scope, name)] = internal
else:
x = layer.apply(x=x, update=update)
if return_internals:
return x, next_internals
else:
return x
@staticmethod
def from_json(filename): # TODO: NOT TESTED
"""
Creates a complex_layered_network_builder from a JSON.
Args:
filename: Path to configuration
Returns: A ComplexLayeredNetwork class with layers generated from the JSON
"""
path = os.path.join(os.getcwd(), filename)
with open(path, 'r') as fp:
config = json.load(fp=fp)
return ComplexLayeredNetwork(complex_layers_spec=config)
| apache-2.0 | 8,334,893,506,367,331,000 | 33 | 117 | 0.563226 | false |
digibyte/digibyte | contrib/seeds/generate-seeds.py | 1 | 4385 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys
import os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % vchAddr)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
sys.exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef DIGIBYTE_CHAINPARAMSSEEDS_H\n')
g.write('#define DIGIBYTE_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the digibyte network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside an IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_main', 8333)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_test', 18333)
g.write('#endif // DIGIBYTE_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
| mit | 3,191,925,870,720,644,600 | 30.546763 | 99 | 0.575371 | false |
sondree/Master-thesis | Python MOEA/batch.py | 1 | 1096 |
import traceback
from main import main
from utility import load_config
def batch_vardev():
conf = load_config("VarDevConf")
try:
main(conf)
except:
print traceback.format_exc()
def batch_countdev():
c = 1
for num_steps in xrange(3+c,7):
conf = load_config("CountDevConf")
conf.NUM_STEPS = num_steps
conf.reinit()
try:
main(conf)
except:
print traceback.format_exc()
def batch_stepdev():
c = 0
for num_uavs in xrange(3,9):
for num_steps in xrange(3,5):
if c < 7:
print "Skipping (%s,%s)" % (num_uavs,num_steps)
c += 1
continue
conf = load_config("StepDevConf")
conf.NUM_UAVS = num_uavs
conf.NUM_STEPS = num_steps
conf.reinit()
try:
main(conf)
except:
print traceback.format_exc()
if __name__=="__main__":
# batch_countdev()
# batch_vardev()
batch_stepdev()
| gpl-3.0 | 3,182,776,818,449,855,500 | 20.076923 | 63 | 0.488139 | false |
rphillips/bitbake | lib/bb/server/none.py | 1 | 5721 | #
# BitBake 'dummy' Passthrough Server
#
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
# Copyright (C) 2006 - 2008 Richard Purdie
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
This module implements an xmlrpc server for BitBake.
Use this by deriving a class from BitBakeXMLRPCServer and then adding
methods which you want to "export" via XMLRPC. If the methods have the
prefix xmlrpc_, then registering those function will happen automatically,
if not, you need to call register_function.
Use register_idle_function() to add a function which the xmlrpc server
calls from within server_forever when no requests are pending. Make sure
that those functions are non-blocking or else you will introduce latency
in the server's main loop.
"""
import time
import bb
from bb.ui import uievent
import xmlrpclib
import pickle
DEBUG = False
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
import inspect, select
class BitBakeServerCommands():
def __init__(self, server, cooker):
self.cooker = cooker
self.server = server
def runCommand(self, command):
"""
Run a cooker command on the server
"""
#print "Running Command %s" % command
return self.cooker.command.runCommand(command)
def terminateServer(self):
"""
Trigger the server to quit
"""
self.server.server_exit()
#print "Server (cooker) exitting"
return
def ping(self):
"""
Dummy method which can be used to check the server is still alive
"""
return True
eventQueue = []
class BBUIEventQueue:
class event:
def __init__(self, parent):
self.parent = parent
@staticmethod
def send(event):
bb.server.none.eventQueue.append(pickle.loads(event))
@staticmethod
def quit():
return
def __init__(self, BBServer):
self.eventQueue = bb.server.none.eventQueue
self.BBServer = BBServer
self.EventHandle = bb.event.register_UIHhandler(self)
def getEvent(self):
if len(self.eventQueue) == 0:
return None
return self.eventQueue.pop(0)
def waitEvent(self, delay):
event = self.getEvent()
if event:
return event
self.BBServer.idle_commands(delay)
return self.getEvent()
def queue_event(self, event):
self.eventQueue.append(event)
def system_quit( self ):
bb.event.unregister_UIHhandler(self.EventHandle)
class BitBakeServer():
# remove this when you're done with debugging
# allow_reuse_address = True
def __init__(self, cooker):
self._idlefuns = {}
self.commands = BitBakeServerCommands(self, cooker)
def register_idle_function(self, function, data):
"""Register a function to be called while the server is idle"""
assert hasattr(function, '__call__')
self._idlefuns[function] = data
def idle_commands(self, delay):
#print "Idle queue length %s" % len(self._idlefuns)
#print "Idle timeout, running idle functions"
#if len(self._idlefuns) == 0:
nextsleep = delay
for function, data in self._idlefuns.items():
try:
retval = function(self, data, False)
#print "Idle function returned %s" % (retval)
if retval is False:
del self._idlefuns[function]
elif retval is True:
nextsleep = None
elif nextsleep is None:
continue
elif retval < nextsleep:
nextsleep = retval
except SystemExit:
raise
except:
import traceback
traceback.print_exc()
self.commands.runCommand(["stateShutdown"])
pass
if nextsleep is not None:
#print "Sleeping for %s (%s)" % (nextsleep, delay)
time.sleep(nextsleep)
def server_exit(self):
# Tell idle functions we're exiting
for function, data in self._idlefuns.items():
try:
retval = function(self, data, True)
except:
pass
class BitbakeServerInfo():
def __init__(self, server):
self.server = server
self.commands = server.commands
class BitBakeServerFork():
def __init__(self, serverinfo, command, logfile):
serverinfo.forkCommand = command
serverinfo.logfile = logfile
class BitBakeServerConnection():
def __init__(self, serverinfo):
self.server = serverinfo.server
self.connection = serverinfo.commands
self.events = bb.server.none.BBUIEventQueue(self.server)
for event in bb.event.ui_queue:
self.events.queue_event(event)
def terminate(self):
try:
self.events.system_quit()
except:
pass
try:
self.connection.terminateServer()
except:
pass
| gpl-2.0 | 1,023,466,820,555,777,000 | 30.262295 | 78 | 0.619822 | false |
spectralpython/spectral | spectral/image.py | 1 | 6861 | '''
Generic functions for handling spectral images.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import numbers
import numpy as np
from .spectral import BandInfo
class Image(object):
'''spectral.Image is the common base class for spectral image objects.'''
def __init__(self, params, metadata=None):
self.bands = BandInfo()
self.set_params(params, metadata)
def set_params(self, params, metadata):
try:
self.nbands = params.nbands
self.nrows = params.nrows
self.ncols = params.ncols
self.dtype = params.dtype
if not metadata:
self.metadata = {}
else:
self.metadata = metadata
except:
raise
def params(self):
'''Return an object containing the SpyFile parameters.'''
class P:
pass
p = P()
p.nbands = self.nbands
p.nrows = self.nrows
p.ncols = self.ncols
p.metadata = self.metadata
p.dtype = self.dtype
return p
def __repr__(self):
return self.__str__()
class ImageArray(np.ndarray, Image):
'''ImageArray is an interface to an image loaded entirely into memory.
ImageArray objects are returned by :meth:`spectral.SpyFile.load`.
This class inherits from both numpy.ndarray and Image, providing the
interfaces of both classes.
'''
format = 'f' # Use 4-byte floats for data arrays
def __new__(subclass, data, spyfile):
obj = np.asarray(data).view(subclass)
ImageArray.__init__(obj, data, spyfile)
return obj
def __init__(self, data, spyfile):
# Add param data to Image initializer
params = spyfile.params()
params.dtype = data.dtype
params.swap = 0
Image.__init__(self, params, spyfile.metadata)
self.bands = spyfile.bands
self.filename = spyfile.filename
self.interleave = 2 # bip
def __repr__(self):
lst = np.array2string(np.asarray(self), prefix="ImageArray(")
return "{}({}, dtype={})".format('ImageArray', lst, self.dtype.name)
def __getitem__(self, args):
# Duplicate the indexing behavior of SpyFile. If args is iterable
# with length greater than one, and if not all of the args are
# scalars, then the scalars need to be replaced with slices.
try:
iterator = iter(args)
except TypeError:
if isinstance(args, numbers.Number):
if args == -1:
updated_args = slice(args, None)
else:
updated_args = slice(args, args+1)
else:
updated_args = args
return self._parent_getitem(updated_args)
keep_original_args = True
updated_args = []
for arg in iterator:
if isinstance(arg, numbers.Number):
if arg == -1:
updated_args.append(slice(arg, None))
else:
updated_args.append(slice(arg, arg+1))
elif isinstance(arg, np.bool_):
updated_args.append(arg)
else:
updated_args.append(arg)
keep_original_args = False
if keep_original_args:
updated_args = args
else:
updated_args = tuple(updated_args)
return self._parent_getitem(updated_args)
def _parent_getitem(self, args):
return np.ndarray.__getitem__(self, args)
def read_band(self, i):
'''
For compatibility with SpyFile objects. Returns arr[:,:,i].squeeze()
'''
return np.asarray(self[:, :, i].squeeze())
def read_bands(self, bands):
'''For SpyFile compatibility. Equivlalent to arr.take(bands, 2)'''
return np.asarray(self.take(bands, 2))
def read_pixel(self, row, col):
'''For SpyFile compatibility. Equivlalent to arr[row, col]'''
return np.asarray(self[row, col])
def read_subregion(self, row_bounds, col_bounds, bands=None):
'''
For SpyFile compatibility.
Equivalent to arr[slice(*row_bounds), slice(*col_bounds), bands],
selecting all bands if none are specified.
'''
if bands:
return np.asarray(self[slice(*row_bounds),
slice(*col_bounds),
bands])
else:
return np.asarray(self[slice(*row_bounds),
slice(*col_bounds)])
def read_subimage(self, rows, cols, bands=None):
'''
For SpyFile compatibility.
Equivalent to arr[rows][:, cols][:, :, bands], selecting all bands if
none are specified.
'''
if bands:
return np.asarray(self[rows][:, cols][:, :, bands])
else:
return np.asarray(self[rows][:, cols])
def read_datum(self, i, j, k):
'''For SpyFile compatibility. Equivlalent to arr[i, j, k]'''
return np.asscalar(self[i, j, k])
def load(self):
'''For compatibility with SpyFile objects. Returns self'''
return self
def asarray(self, writable=False):
'''Returns an object with a standard numpy array interface.
The return value is the same as calling `numpy.asarray`, except
that the array is not writable by default to match the behavior
of `SpyFile.asarray`.
This function is for compatibility with SpyFile objects.
Keyword Arguments:
`writable` (bool, default False):
If `writable` is True, modifying values in the returned
array will result in corresponding modification to the
ImageArray object.
'''
arr = np.asarray(self)
if not writable:
arr.setflags(write=False)
return arr
def info(self):
s = '\t# Rows: %6d\n' % (self.nrows)
s += '\t# Samples: %6d\n' % (self.ncols)
s += '\t# Bands: %6d\n' % (self.shape[2])
s += '\tData format: %8s' % self.dtype.name
return s
def __array_wrap__(self, out_arr, context=None):
# The ndarray __array_wrap__ causes ufunc results to be of type
# ImageArray. Instead, return a plain ndarray.
return out_arr
# Some methods do not call __array_wrap__ and will return an ImageArray.
# Currently, these need to be overridden individually or with
# __getattribute__ magic.
def __getattribute__(self, name):
if ((name in np.ndarray.__dict__) and
(name not in ImageArray.__dict__)):
return getattr(np.asarray(self), name)
return super(ImageArray, self).__getattribute__(name)
| gpl-2.0 | -5,828,234,927,931,188,000 | 30.911628 | 82 | 0.563037 | false |
brechtm/rinohtype | src/rinoh/font/opentype/macglyphs.py | 1 | 6561 | # This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
# from Apple's TrueType Reference Manual (December 18, 2003)
MAC_GLYPHS = ('.notdef',
'.null',
'nonmarkingreturn',
'space',
'exclam',
'quotedbl',
'numbersign',
'dollar',
'percent',
'ampersand',
'quotesingle',
'parenleft',
'parenright',
'asterisk',
'plus',
'comma',
'hyphen',
'period',
'slash',
'zero',
'one',
'two',
'three',
'four',
'five',
'six',
'seven',
'eight',
'nine',
'colon',
'semicolon',
'less',
'equal',
'greater',
'question',
'at',
'A',
'B',
'C',
'D',
'E',
'F',
'G',
'H',
'I',
'J',
'K',
'L',
'M',
'N',
'O',
'P',
'Q',
'R',
'S',
'T',
'U',
'V',
'W',
'X',
'Y',
'Z',
'bracketleft',
'backslash',
'bracketright',
'asciicircum',
'underscore',
'grave',
'a',
'b',
'c',
'd',
'e',
'f',
'g',
'h',
'i',
'j',
'k',
'l',
'm',
'n',
'o',
'p',
'q',
'r',
's',
't',
'u',
'v',
'w',
'x',
'y',
'z',
'braceleft',
'bar',
'braceright',
'asciitilde',
'Adieresis',
'Aring',
'Ccedilla',
'Eacute',
'Ntilde',
'Odieresis',
'Udieresis',
'aacute',
'agrave',
'acircumflex',
'adieresis',
'atilde',
'aring',
'ccedilla',
'eacute',
'egrave',
'ecircumflex',
'edieresis',
'iacute',
'igrave',
'icircumflex',
'idieresis',
'ntilde',
'oacute',
'ograve',
'ocircumflex',
'odieresis',
'otilde',
'uacute',
'ugrave',
'ucircumflex',
'udieresis',
'dagger',
'degree',
'cent',
'sterling',
'section',
'bullet',
'paragraph',
'germandbls',
'registered',
'copyright',
'trademark',
'acute',
'dieresis',
'notequal',
'AE',
'Oslash',
'infinity',
'plusminus',
'lessequal',
'greaterequal',
'yen',
'mu',
'partialdiff',
'summation',
'product',
'pi',
'integral',
'ordfeminine',
'ordmasculine',
'Omega',
'ae',
'oslash',
'questiondown',
'exclamdown',
'logicalnot',
'radical',
'florin',
'approxequal',
'Delta',
'guillemotleft',
'guillemotright',
'ellipsis',
'nonbreakingspace',
'Agrave',
'Atilde',
'Otilde',
'OE',
'oe',
'endash',
'emdash',
'quotedblleft',
'quotedblright',
'quoteleft',
'quoteright',
'divide',
'lozenge',
'ydieresis',
'Ydieresis',
'fraction',
'currency',
'guilsinglleft',
'guilsinglright',
'fi',
'fl',
'daggerdbl',
'periodcentered',
'quotesinglbase',
'quotedblbase',
'perthousand',
'Acircumflex',
'Ecircumflex',
'Aacute',
'Edieresis',
'Egrave',
'Iacute',
'Icircumflex',
'Idieresis',
'Igrave',
'Oacute',
'Ocircumflex',
'apple',
'Ograve',
'Uacute',
'Ucircumflex',
'Ugrave',
'dotlessi',
'circumflex',
'tilde',
'macron',
'breve',
'dotaccent',
'ring',
'cedilla',
'hungarumlaut',
'ogonek',
'caron',
'Lslash',
'lslash',
'Scaron',
'scaron',
'Zcaron',
'zcaron',
'brokenbar',
'Eth',
'eth',
'Yacute',
'yacute',
'Thorn',
'thorn',
'minus',
'multiply',
'onesuperior',
'twosuperior',
'threesuperior',
'onehalf',
'onequarter',
'threequarters',
'franc',
'Gbreve',
'gbreve',
'Idotaccent',
'Scedilla',
'scedilla',
'Cacute',
'cacute',
'Ccaron',
'ccaron',
'dcroat')
| agpl-3.0 | -487,069,589,057,482,240 | 23.573034 | 75 | 0.280902 | false |
TeamODrKnow/doctor-know | pipeline-image/twitter-to-redis.py | 1 | 2462 | """This script uses the Twitter Streaming API, via the tweepy library,
to pull in tweets and store them in a Redis server.
"""
import os
import redis
from tweepy import OAuthHandler
from tweepy import Stream
from tweepy.streaming import StreamListener
# Get your twitter credentials from the environment variables.
# These are set in the 'twitter-stream.json' manifest file.
consumer_key = os.environ['CONSUMERKEY']
consumer_secret = os.environ['CONSUMERSECRET']
access_token = os.environ['ACCESSTOKEN']
access_token_secret = os.environ['ACCESSTOKENSEC']
# Get info on the Redis host and port from the environment variables.
# The name of this variable comes from the redis service id, 'redismaster'.
REDIS_HOST = os.environ['REDISMASTER_SERVICE_HOST']
REDIS_PORT = os.environ['REDISMASTER_SERVICE_PORT']
REDIS_LIST = os.environ['REDISLIST']
class StdOutListener(StreamListener):
"""A listener handles tweets that are received from the stream.
This listener dumps the tweets into Redis.
"""
count = 0
twstring = ''
tweets = []
r = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=0)
total_tweets = 100000
def write_to_redis(self, tw):
try:
self.r.lpush(REDIS_LIST, tw)
except:
print 'Problem adding sensor data to Redis.'
def on_data(self, data):
"""What to do when tweet data is received."""
self.write_to_redis(data)
self.count += 1
# if we've grabbed more than total_tweets tweets, exit the script.
# If this script is being run in the context of a kubernetes
# replicationController, the pod will be restarted fresh when
# that happens.
if self.count > self.total_tweets:
return False
if (self.count % 1000) == 0:
print 'count is: %s' % self.count
return True
def on_error(self, status):
print status
if __name__ == '__main__':
print '....'
l = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
print 'stream mode is: %s' % os.environ['TWSTREAMMODE']
stream = Stream(auth, l)
# set up the streaming depending upon whether our mode is 'sample', which will
# sample the twitter public stream. If not 'sample', instead track the given
# set of keywords.
# This environment var is set in the 'twstream.json' manifest.
if os.environ['TWSTREAMMODE'] == 'sample':
stream.sample()
else:
stream.filter(
track=['Pittsburgh']
)
| mit | 2,234,303,924,202,498,000 | 30.164557 | 80 | 0.701056 | false |
inviwo/inviwo | modules/python3/scripts/ivw/regression.py | 1 | 2420 | #*********************************************************************************
#
# Inviwo - Interactive Visualization Workshop
#
# Copyright (c) 2013-2021 Inviwo Foundation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#*********************************************************************************
import time
import json
import inviwopy
def saveCanvas(canvas : inviwopy.CanvasProcessor , name = None):
if name == None: name = canvas.identifier
canvas.snapshot(inviwopy.app.getOutputPath() + "/imgtest/"+name+".png")
class Measurements:
def __init__(self):
self.m = []
def add(self, name, quantity, unit, value):
self.m.append({"name": name, 'quantity' : quantity, "unit": unit, 'value' : value})
def addCount(self, name, value):
self.add(name, "count", "", value)
def addTime(self, name, value):
self.add(name, "time", "s", value)
def addFrequency(self, name, value):
self.add(name, "frequency", "Hz", value)
def addFraction(self, name, value):
self.add(name, "fraction", "%", value)
def save(self):
with open(inviwopy.app.getOutputPath() + "/stats.json", 'w') as f:
json.dump(self.m, f, indent=4, separators=(',', ': '))
| bsd-2-clause | -2,159,099,499,981,238,800 | 39.333333 | 85 | 0.688017 | false |
dietrichc/streamline-ppc-reports | examples/dfp/v201403/activity_group_service/create_activity_groups.py | 1 | 2557 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new activity groups.
To determine which activity groups exist, run get_all_activity_groups.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: ActivityGroupService.createActivityGroups
"""
__author__ = ('Vincent Tsao',
'Joseph DiLallo')
import uuid
# Import appropriate modules from the client library.
from googleads import dfp
# Set the ID of the advertiser company this activity group is associated with.
ADVERTISER_COMPANY_ID = 'INSERT_ADVERTISER_COMPANY_ID_HERE'
def main(client, advertiser_company_id):
# Initialize appropriate service.
activity_group_service = client.GetService('ActivityGroupService',
version='v201403')
# Create a short-term activity group.
short_term_activity_group = {
'name': 'Short-term activity group #%s' % uuid.uuid4(),
'companyIds': [advertiser_company_id],
'clicksLookback': '1',
'impressionsLookback': '1'
}
# Create a long-term activity group.
long_term_activity_group = {
'name': 'Long-term activity group #%s' % uuid.uuid4(),
'companyIds': [advertiser_company_id],
'clicksLookback': '30',
'impressionsLookback': '30'
}
# Create the activity groups on the server.
activity_groups = activity_group_service.createActivityGroups([
short_term_activity_group, long_term_activity_group])
# Display results.
for activity_group in activity_groups:
print ('Activity group with ID \'%s\' and name \'%s\' was created.'
% (activity_group['id'], activity_group['name']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, ADVERTISER_COMPANY_ID)
| apache-2.0 | -894,893,596,057,951,500 | 33.554054 | 78 | 0.70747 | false |
rajpushkar83/cloudmesh | tests/test_inventory.py | 1 | 2617 | """ run with
nosetests -v --nocapture --nologcapture
nosetests -v --nocapture test_inventory.py:Test_Inventory.test_06
nosetests -v
"""
from __future__ import print_function
from datetime import datetime
from cloudmesh_base.util import HEADING
from cloudmesh.inventory import Inventory
from pprint import pprint
class Test_Inventory:
def setup(self):
self.cluster = "bravo"
self.name = "b010"
self.inventory = Inventory()
self.inventory.clear()
self.inventory.generate()
print("GENERATION COMPLETE")
def tearDown(self):
pass
def test_clear(self):
HEADING()
self.inventory.clear()
def test_find(self):
HEADING()
r = self.inventory.find({})
print(r.count())
assert r.count > 0
def test_host(self):
HEADING()
data = self.inventory.host(self.name)
pprint(data)
def test_list(self):
HEADING()
data = self.inventory.hostlist(self.cluster)
# pprint(data)
def test_combine(self):
attribute = "cm_temp"
value = "32"
print("SET ATTRIBUTE")
print(70 * '=')
data = self.inventory.set_attribute(self.name, attribute, value)
print(70 * '=')
print(data)
print("GET ATTRIBUTE")
data = self.inventory.get_attribute(self.name, attribute)
print(data)
data = self.inventory.host(self.name)
pprint(data)
def test_set(self):
HEADING()
"""
data = self.inventory.find({'cm_id': self.name})
for e in data:
pprint (e)
"""
print(70 * '=')
"""
print "BEFORE"
data = self.inventory.host(self.name)
pprint(data)
"""
attribute = "cm_temp"
value = "32"
print("SET ATTRIBUTE")
print(70 * '=')
data = self.inventory.set_attribute(self.name, attribute, value)
print(70 * '=')
print(data)
print("GET ATTRIBUTE")
data = self.inventory.get_attribute(self.name, attribute)
print(data)
def test_i066(self):
HEADING()
name = "i066"
attribute = "cm_doesnotexist"
print("GET ATTRIBUTE")
data = self.inventory.get_attribute(name, attribute)
print(data)
"""
data = self.inventory.host(self.name)
print "AFTER"
pprint(data)
ef test_ipaddr(self):
HEADING()
print self.inventory.ipadr (self.name, "public")
print self.inventory.ipadr (self.name, "internal")
"""
| apache-2.0 | 4,321,092,970,322,313,700 | 21.177966 | 72 | 0.560948 | false |
gannetson/sportschooldeopenlucht | apps/tasks/migrations/0003_auto__del_skills__add_skill__add_field_task_skill.py | 1 | 12818 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'Skills'
db.delete_table(u'tasks_skills')
# Adding model 'Skill'
db.create_table(u'tasks_skill', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('name_nl', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal(u'tasks', ['Skill'])
# Adding field 'Task.skill'
db.add_column(u'tasks_task', 'skill',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tasks.Skill'], null=True),
keep_default=False)
def backwards(self, orm):
# Adding model 'Skills'
db.create_table(u'tasks_skills', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100, unique=True)),
))
db.send_create_signal(u'tasks', ['Skills'])
# Deleting model 'Skill'
db.delete_table(u'tasks_skill')
# Deleting field 'Task.skill'
db.delete_column(u'tasks_task', 'skill_id')
models = {
u'accounts.bluebottleuser': {
'Meta': {'object_name': 'BlueBottleUser'},
'about': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'}),
'availability': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'available_time': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'birthdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'newsletter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'picture': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'primary_language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'share_money': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'share_time_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'user_type': ('django.db.models.fields.CharField', [], {'default': "'person'", 'max_length': '25'}),
'username': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'why': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'projects.partnerorganization': {
'Meta': {'object_name': 'PartnerOrganization'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'projects.project': {
'Meta': {'ordering': "['title']", 'object_name': 'Project'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'team_member'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['accounts.BlueBottleUser']"}),
'partner_organization': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.PartnerOrganization']", 'null': 'True', 'blank': 'True'}),
'phase': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'popularity': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
},
u'tasks.skill': {
'Meta': {'object_name': 'Skill'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_nl': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
u'tasks.task': {
'Meta': {'ordering': "['-created']", 'object_name': 'Task'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'author'", 'to': u"orm['accounts.BlueBottleUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'deadline': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'end_goal': ('django.db.models.fields.TextField', [], {}),
'expertise': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'people_needed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.Project']"}),
'skill': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tasks.Skill']", 'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'open'", 'max_length': '20'}),
'time_needed': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'tasks.taskfile': {
'Meta': {'object_name': 'TaskFile'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tasks.Task']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'tasks.taskmember': {
'Meta': {'object_name': 'TaskMember'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tasks.Task']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
}
}
complete_apps = ['tasks'] | bsd-3-clause | 5,640,817,934,617,898,000 | 71.835227 | 187 | 0.555001 | false |
anubhav929/eden | modules/s3/s3task.py | 1 | 16173 | # -*- coding: utf-8 -*-
""" Asynchronous Task Execution
- falls back to Synchronous if no workers are alive
Worker nodes won't run on Win32 yet.
To run a worker node: python web2py.py -K eden
NB
Need WEB2PY_PATH environment variable to be defined (e.g. /etc/profile)
Tasks need to be defined outside conditional model loads
Avoid passing state into the async call as state may change before the message is executed (race condition)
Old screencast: http://www.vimeo.com/27478796
@requires: U{B{I{gluon}} <http://web2py.com>}
@copyright: 2011-12 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["S3Task"]
import datetime
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
from gluon import current, HTTP
from gluon.storage import Storage
from s3widgets import S3TimeIntervalWidget
from s3validators import IS_TIME_INTERVAL_WIDGET
from s3utils import s3_debug
# -----------------------------------------------------------------------------
class S3Task(object):
""" Asynchronous Task Execution """
TASK_TABLENAME = "scheduler_task"
# -------------------------------------------------------------------------
def __init__(self):
migrate = current.deployment_settings.get_base_migrate()
tasks = current.response.s3.tasks
# Instantiate Scheduler
try:
from gluon.scheduler import Scheduler
except:
# Warning should already have been given by eden_update_check.py
self.scheduler = None
else:
self.scheduler = Scheduler(current.db,
tasks,
migrate=migrate)
# -------------------------------------------------------------------------
def configure_tasktable_crud(self,
task=None,
function=None,
args=[],
vars={}):
"""
Configure the task table for interactive CRUD,
setting defaults, widgets and hiding unnecessary fields
@param task: the task name (will use a UUID if omitted)
@param function: the function name (won't hide if omitted)
@param args: the function position arguments
@param vars: the function named arguments
"""
T = current.T
db = current.db
tablename = self.TASK_TABLENAME
table = db[tablename]
if not task:
import uuid
task = str(uuid.uuid4())
table.task_name.default = task
table.task_name.readable = False
table.task_name.writable = False
if function:
table.function_name.default = function
table.function_name.readable = False
table.function_name.writable = False
table.args.default = json.dumps(args)
table.args.readable = False
table.args.writable = False
table.repeats.label = T("Repeat")
table.repeats.comment = T("times (0 = unlimited)")
table.repeats.default = 0
table.repeats.represent = lambda opt: opt and "%s %s" % (opt, T("times")) or \
opt == 0 and T("unlimited") or \
"-"
table.period.label = T("Run every")
table.period.widget = S3TimeIntervalWidget.widget
table.period.requires = IS_TIME_INTERVAL_WIDGET(table.period)
table.period.represent = S3TimeIntervalWidget.represent
table.period.comment = None
table.timeout.default = 600
table.timeout.represent = lambda opt: opt and "%s %s" % (opt, T("seconds")) or \
opt == 0 and T("unlimited") or \
"-"
table.vars.default = json.dumps(vars)
table.vars.readable = False
table.vars.writable = False
table.application_name.readable = False
table.application_name.writable = False
table.group_name.readable = False
table.group_name.writable = False
table.status.readable = False
table.status.writable = False
table.next_run_time.readable = False
table.next_run_time.writable = False
table.times_run.readable = False
table.times_run.writable = False
table.assigned_worker_name.readable = False
table.assigned_worker_name.writable = False
current.s3db.configure(tablename,
list_fields=["id",
"enabled",
"start_time",
"repeats",
"period",
(T("Last run"), "last_run_time"),
(T("Last status"), "status"),
(T("Next run"), "next_run_time"),
"stop_time"])
response = current.response
if response:
response.s3.crud_strings[tablename] = Storage(
title_create = T("Add Job"),
title_display = T("Scheduled Jobs"),
title_list = T("Job Schedule"),
title_update = T("Edit Job"),
title_search = T("Search for Job"),
subtitle_create = T("Add Job"),
label_list_button = T("List Jobs"),
label_create_button = T("Add Job"),
msg_record_created = T("Job added"),
msg_record_modified = T("Job updated updated"),
msg_record_deleted = T("Job deleted"),
msg_list_empty = T("No jobs configured yet"),
msg_no_match = T("No jobs configured"))
return
# -------------------------------------------------------------------------
# API Function run within the main flow of the application
# -------------------------------------------------------------------------
def async(self, task, args=[], vars={}, timeout=300):
"""
Wrapper to call an asynchronous task.
- run from the main request
@param task: The function which should be run
- async if a worker is alive
@param args: The list of unnamed args to send to the function
@param vars: The list of named vars to send to the function
@param timeout: The length of time available for the task to complete
- default 300s (5 mins)
"""
# Check that task is defined
tasks = current.response.s3.tasks
if not tasks:
return False
if task not in tasks:
return False
# Check that worker is alive
if not self._is_alive():
# Run the task synchronously
_args = []
for arg in args:
if isinstance(arg, (int, long)):
_args.append(str(arg))
elif isinstance(arg, str):
_args.append("'%s'" % str(arg))
else:
raise HTTP(501, "Unhandled arg type")
args = " ,".join(_args)
_vars = ""
for var in vars:
_vars += ", %s=%s" % (str(var),
str(vars[var]))
statement = "tasks['%s'](%s%s)" % (task, args, _vars)
exec(statement)
return None
auth = current.auth
if auth.is_logged_in():
# Add the current user to the vars
vars["user_id"] = auth.user.id
# Run the task asynchronously
db = current.db
record = db.scheduler_task.insert(task_name=task,
function_name=task,
args=json.dumps(args),
vars=json.dumps(vars),
timeout=timeout)
# Return record so that status can be polled
return record
# -------------------------------------------------------------------------
def schedule_task(self,
task,
args=[], # args to pass to the task
vars={}, # vars to pass to the task
function_name=None,
start_time=None,
next_run_time=None,
stop_time=None,
repeats=None,
period=None,
timeout=None,
enabled=None, # None = Enabled
group_name=None,
ignore_duplicate=False):
"""
Schedule a task in web2py Scheduler
@param task: name of the function/task to be scheduled
@param args: args to be passed to the scheduled task
@param vars: vars to be passed to the scheduled task
@param function_name: function name (if different from task name)
@param start_time: start_time for the scheduled task
@param next_run_time: next_run_time for the the scheduled task
@param stop_time: stop_time for the the scheduled task
@param repeats: number of times the task to be repeated
@param period: time period between two consecutive runs
@param timeout: set timeout for a running task
@param enabled: enabled flag for the scheduled task
@param group_name: group_name for the scheduled task
@param ignore_duplicate: disable or enable duplicate checking
"""
kwargs = {}
if function_name is None:
function_name = task
# storing valid keyword arguments only if they are provided
if start_time:
kwargs["start_time"] = start_time
if next_run_time:
kwargs["next_run_time"] = next_run_time
elif start_time:
# default it to start_time
kwargs["next_run_time"] = start_time
if stop_time:
kwargs["stop_time"] = stop_time
elif start_time:
# default it to one day ahead of given start_time
if not isinstance(start_time, datetime.datetime):
start_time = datetime.datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
stop_time = start_time + datetime.timedelta(days=1)
if repeats is not None:
kwargs["repeats"] = repeats
if period:
kwargs["period"] = period
if timeout:
kwargs["timeout"] = timeout
if enabled != None:
# NB None => enabled
kwargs["enabled"] = enabled
if group_name:
kwargs["group_name"] = group_name
if not ignore_duplicate and self._duplicate_task_exists(task, args, vars):
# if duplicate task exists, do not insert a new one
s3_debug("Duplicate Task, Not Inserted", value=task)
return False
auth = current.auth
if auth.is_logged_in():
# Add the current user to the vars
vars["user_id"] = auth.user.id
# Add to DB for pickup by Scheduler task
db = current.db
record = db.scheduler_task.insert(task_name=task,
function_name=function_name,
args=json.dumps(args),
vars=json.dumps(vars),
**kwargs)
return record
# -------------------------------------------------------------------------
def _duplicate_task_exists(self, task, args, vars):
"""
Checks if given task already exists in the Scheduler and both coincide
with their execution time
@param task: name of the task function
@param args: the job position arguments (list)
@param vars: the job named arguments (dict)
"""
db = current.db
ttable = db.scheduler_task
_args = json.dumps(args)
query = ((ttable.function_name == task) & \
(ttable.args == _args) & \
(ttable.status.belongs(["RUNNING", "QUEUED", "ALLOCATED"])))
jobs = db(query).select(ttable.vars)
for job in jobs:
job_vars = json.loads(job.vars)
if job_vars == vars:
return True
return False
# -------------------------------------------------------------------------
def _is_alive(self):
"""
Returns True if there is at least 1 active worker to run scheduled tasks
- run from the main request
NB Can't run this 1/request at the beginning since the tables
only get defined in zz_last
"""
#if self.scheduler:
# return self.scheduler.is_alive()
#else:
# return False
db = current.db
cache = current.response.s3.cache
now = datetime.datetime.now()
offset = datetime.timedelta(minutes=1)
table = db.scheduler_worker
query = (table.last_heartbeat > (now - offset))
worker_alive = db(query).select(table.id,
limitby=(0, 1),
cache=cache).first()
if worker_alive:
return True
else:
return False
# -------------------------------------------------------------------------
@staticmethod
def reset(task_id):
"""
Reset the status of a task to QUEUED after FAILED
@param task_id: the task record ID
"""
db = current.db
ttable = db.scheduler_task
query = (ttable.id == task_id) & (ttable.status == "FAILED")
task = db(query).select(limitby=(0, 1)).first()
if task:
task.update_record(status="QUEUED")
# =========================================================================
# Functions run within the Task itself
# =========================================================================
def authenticate(self, user_id):
"""
Activate the authentication passed from the caller to this new request
- run from within the task
NB This is so simple that we don't normally run via this API
- this is just kept as an example of what needs to happen within the task
"""
current.auth.s3_impersonate(user_id)
# END =========================================================================
| mit | -8,151,199,539,073,031,000 | 36.611628 | 115 | 0.506585 | false |
manhg/tokit | tokit/tasks.py | 1 | 3573 | import os
from concurrent.futures import ThreadPoolExecutor
from tornado.queues import PriorityQueue, QueueEmpty
from tornado.gen import sleep, coroutine
from tokit import Event, on, logger
from inspect import iscoroutinefunction
from email.mime.text import MIMEText
import smtplib
from email.header import Header
from tornado.gen import coroutine
from tornado.concurrent import run_on_executor
tasks_queue = PriorityQueue()
def put(name, *args, priority=0, **kwargs):
"""
Schedule a task with given params
Handlers of event with same name will be used when execute task
Example::
@on('task_xyz')
def do_something(arg1):
pass
put('task_xyz', 'val1')
"""
tasks_queue.put((priority, {'name': name, 'args': args, 'kwargs': kwargs}))
@coroutine
def tasks_consumer(app):
"""
Check for pending and excute tasks
A task handler can be coroutine (run in event loop)
or normal function (run in thread - can be blocking)
"""
while True:
# another way: use Postgres notfiy / listen
# http://initd.org/psycopg/docs/advanced.html#asynchronous-notifications
yield sleep(0.3)
try:
priority, task = tasks_queue.get_nowait()
handlers = Event.get(task['name']).handlers
handler = None
for handler in handlers:
if iscoroutinefunction(handler):
yield handler(
app,
*task.get('args'),
**task.get('kwargs')
)
else:
with ThreadPoolExecutor() as executor:
yield executor.submit(
handler,
app,
*task.get('args'),
**task.get('kwargs')
)
if not handler:
logger.warn('No handler for task: %s', task['name'])
except QueueEmpty:
pass
else:
tasks_queue.task_done()
def register_task_runner(app):
from tornado.ioloop import IOLoop
IOLoop.current().spawn_callback(lambda: tasks_consumer(app))
@on('send_email')
@coroutine
def send_email_consumer(app, receipt, body, subject=None):
if not subject:
# consider first line as subject
subject, body = body.split("\n", 1)
msg = MIMEText(body, 'plain', 'utf-8')
config = app.config.env['smtp']
msg['Subject'] = Header(subject, 'utf-8')
msg['From'] = config['from']
msg['To'] = receipt
with smtplib.SMTP(config['host'], config.get('port')) as mailer:
if config.getboolean('tls'):
mailer.starttls()
if config.get('user'):
mailer.login(config.get('user'), config['password'])
mailer.send_message(msg)
mailer.quit()
logger.debug("Sent email to %s", receipt)
class EmailMixin:
def send_email(self, template, receipt, **kwargs):
content = self.render_string(
os.path.join(self.application.root_path, template), **kwargs
).decode()
put('send_email', receipt, content)
@on('init')
def init_executor(app):
max_thread_worker = app.config.env['app'].getint('max_thread_worker', 16)
app._thread_executor = ThreadPoolExecutor(max_workers=max_thread_worker)
class ThreadPoolMixin:
""" Mix this and wrap blocking function with ``run_on_executor`` """
@property
def executor(self):
return self.application._thread_executor
| mit | 3,672,451,276,787,360,000 | 29.538462 | 80 | 0.588021 | false |
infobloxopen/netmri-toolkit | Python/NetMRI_GUI_Python/Script 3 - Generate a Custom Issue.py | 1 | 1227 | # BEGIN-SCRIPT-BLOCK
#
# Script-Filter:
# true
#
# Script-Variables:
# $command word "show version"
# END-SCRIPT-BLOCK
import requests, json, re
from infoblox_netmri.easy import NetMRIEasy
# This values will be provided by NetMRI before execution
defaults = {
"api_url": api_url,
"http_username": http_username,
"http_password": http_password,
"job_id": job_id,
"device_id": device_id,
"batch_id": batch_id
}
# Create NetMRI context manager. It will close session after execution
with NetMRIEasy(**defaults) as easy:
vtpstatus = easy.send_command('show vtp status')
regexp = re.compile(r"VTP Operating Mode\s*: (.*)")
if regexp.search(vtpstatus):
#print ('matched')
status = re.search('(?<=VTP Operating Mode\s.)(.*)', vtpstatus, re.MULTILINE).group()
if re.search(r'Server', status):
issue_id = easy.generate_issue("info", "siftest",**{
"Host":device_devicename,
"IPAddress":device_deviceipdotted,
"noclue1":'test1',
"noclue2":'test2',
"device_id": device_id,
"batch_id": batch_id
})
else:
print ('no match')
| mit | 2,723,534,120,206,774,300 | 29.675 | 93 | 0.586797 | false |
simos/keyboardlayouteditor | KeycodesReader.py | 1 | 5036 | #!/usr/bin/env python
# -*- encoding: UTF-8 -*-
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
import sys
import re
import antlr3
from KeycodesLexer import KeycodesLexer, KEYCODELISTTYPE, KEYCODEMATERIAL, \
KEYCODELISTOPTIONS, KEYCODELISTNAME, \
INCLUDE, MINIMUM, MAXIMUM, KEYCODE, \
ALIAS, INDICATOR
from KeycodesParser import KeycodesParser
from KeycodesWalker import KeycodesWalker
# Global variable, we use global so that the compare function does not have to create the dictionary on every invocation.
KEYCODEDB = {}
# Helper function to iterate through all children of a given type
def getChildrenListByType(tree, type_value):
list = []
for i in range(tree.getChildCount()):
child = tree.getChild(i)
if child.getType() == type_value:
list.append(child)
return list
def parseFile(fileandvariant = "/usr/share/X11/xkb/keycodes/xfree86|xfree86", *morefilesandvariants):
keycodedb = {}
for eachfileandvariant in (fileandvariant,) + morefilesandvariants:
filename, pipe, variant = eachfileandvariant.partition('|')
try:
file = open(filename, 'r')
except OSError:
print "Could not open file ", filename, " Aborting..."
sys.exit(-1)
file.close
char_stream = antlr3.ANTLRFileStream(filename)
lexer = KeycodesLexer(char_stream)
tokens = antlr3.CommonTokenStream(lexer)
parser = KeycodesParser(tokens)
result = parser.keycodedoc()
nodes = antlr3.tree.CommonTreeNodeStream(result.tree)
nodes.setTokenStream(tokens)
walker = KeycodesWalker(nodes)
# walker.keycodedoc()
keycodeidinclude = [variant]
for itemKeycodeDoc in result.tree.getChildren():
copying = False
listType = getChildrenListByType(itemKeycodeDoc, KEYCODELISTTYPE)
material = getChildrenListByType(itemKeycodeDoc, KEYCODEMATERIAL)
if len(listType) != 1:
print "Requires single node for KEYCODELISTTYPE. Found", len(listType)
sys.exit(-1)
if len(material) != 1:
print "Requires single node for KEYCODEMATERIAL. Found", len(material)
sys.exit(-1)
for listNameGroup in getChildrenListByType(listType[0], KEYCODELISTNAME):
for listName in listNameGroup.getChildren():
if listName.getText()[1:-1] == variant or listName.getText()[1:-1] in keycodeidinclude:
copying = True
if not copying:
break
for materialIncludeGroup in getChildrenListByType(material[0], INCLUDE):
for includeName in materialIncludeGroup.getChildren():
includeKeycodelist = re.findall('(\w+)\((\w+)\)', includeName.getText()[1:-1])
if includeKeycodelist[0][1] not in keycodeidinclude:
keycodeidinclude.append(includeKeycodelist[0][1])
for keycode in getChildrenListByType(material[0], KEYCODE):
keycodedb[keycode.getChild(0).getText()] = keycode.getChild(1).getText()
for alias in getChildrenListByType(material[0], ALIAS):
keycodedb[alias.getChild(0).getText()] = keycodedb[alias.getChild(1).getText()]
for indicator in getChildrenListByType(material[0], INDICATOR):
pass
return keycodedb
def compare_keycode(a, b):
global KEYCODEDB
if not KEYCODEDB.has_key(a):
if not KEYCODEDB.has_key(b):
return 0
else:
return 1
else:
return -1
if KEYCODEDB[a] > KEYCODEDB[b]:
return 1
elif KEYCODEDB[a] < KEYCODEDB[b]:
return -1
else:
return 0
def initialise():
global KEYCODEDB
KEYCODEDB = parseFile("/usr/share/X11/xkb/keycodes/xfree86|xfree86",
"/usr/share/X11/xkb/keycodes/aliases|qwerty",
"/usr/share/X11/xkb/keycodes/evdev|evdev")
#KEYCODEDB = parseFile("evdev|evdev", "aliases|qwerty")
if __name__ == "__main__":
KEYCODEDB = parseFile("/usr/share/X11/xkb/keycodes/xfree86|xfree86", "/usr/share/X11/xkb/keycodes/aliases|qwerty")
| gpl-3.0 | 668,175,162,504,604,700 | 38.34375 | 121 | 0.620334 | false |
blackpan2/HealthNet | src/healthnet/prescription/views.py | 1 | 5883 | from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404, HttpResponseRedirect
from .forms import *
from base.views import group_required
from base.models import *
# Create your views here.
@login_required
@group_required('Doctor')
def createPrescription(request, patient=None):
user = User.objects.get(username=request.user.username)
person = Person.objects.get(user=user)
doctor = Doctor.objects.get(personID=person)
print(Prescription.objects.all())
prescriptionSuccess = False
if request.method == 'POST':
preForm = PrescriptionForm(request.POST)
medForm = MedicationForm(request.POST)
prescrip = apps.get_model('base', 'Prescription')
# If both forms are valid
if (preForm.is_valid() and medForm.is_valid()):
# Grab the cleaned data from the form
# Medication form first
name = medForm.cleaned_data['name']
descrip = medForm.cleaned_data['description']
# Create a medication object
med = Medication(name=name, description=descrip)
med.save()
Logger.createLog('Created',person,str(prescrip),doctor.hospitalID)
pat = preForm.cleaned_data['patient']
amount = preForm.cleaned_data['amount']
refill = preForm.cleaned_data['refill']
Prescription.objects.create(medication=med, patientID=pat, amount=amount,
refill=refill, doctorID=doctor)
prescriptionSuccess = True
# prescription = preForm.save(commit=False)
# # prescription.doctor = doctor
# prescription.save()
# medication = medForm.save(commit=False)
# medication.save()
# Todo add system logger event
else:
preForm = PrescriptionForm()
medForm = MedicationForm()
# If a patient is passed in
if patient is not None:
# Fill the form
preForm.fields['patient'].initial = patient
# # Set the doctor field
# preForm.fields['doctor'].initial = doctor
# Hide the doctor field
# preForm.fields['doctor'].widget = forms.HiddenInput()
context = {'prescriptionForm': preForm,
'medicationForm': medForm,
'prescriptionSuccess': prescriptionSuccess}
return render(request, 'prescription/write.html', context)
@login_required
@group_required('Patient', 'Nurse', 'Doctor')
def viewPrescriptions(request):
user_model = User.objects.get_by_natural_key(request.user.username)
person_model = Person.objects.get(user=user_model)
# Create a context
context = {
'prescriptions': None
}
# If a patient is viewing
if Patient.objects.filter(personID=person_model).exists():
patient_model = Patient.objects.get(personID=person_model)
# Grab the prescriptions
prescriptions = Prescription.objects.all().filter(patientID=patient_model.id).order_by(
'patientID__personID__user__first_name')
context['prescriptions'] = prescriptions
# If a doctor is viewing
elif Doctor.objects.filter(personID=person_model).exists():
doctor_model = Doctor.objects.get(personID=person_model)
patients = gatherPatientList(user_model)
prescriptions = Prescription.objects.all().filter(doctorID=doctor_model.id).order_by(
'patientID__personID__user__first_name')
context['prescriptions'] = prescriptions
# If a nurse is viewing
elif Nurse.objects.filter(personID=person_model).exists():
nurse_model = Nurse.objects.get(personID=person_model)
patients = gatherPatientList(user_model)
prescriptions = []
for p in patients:
# Grab each patient's prescriptions
prescriptionObjects = Prescription.objects.all().filter(patientID=p.id)
# Add them to the list
prescriptions.extend(prescriptionObjects)
context['prescriptions'] = prescriptions
return render(request, 'prescription/view.html', context)
@login_required()
@group_required('Doctor')
def deletePrescription(request, **kwargs):
prescriptionID = kwargs.get('pk')
prescription_model = get_object_or_404(Prescription, id=prescriptionID)
# If a post method
if request.method == 'POST':
form = DeletePrescription(request.POST, instance=prescription_model)
if form.is_valid():
prescription_model.delete()
return HttpResponseRedirect(reverse('prescription:view'))
else:
form = DeletePrescription(instance=prescription_model)
context = {'form': form,
'prescriptionID': prescriptionID,
'prescription_model': prescription_model, }
return render(request, 'prescription/view.html', context)
def gatherPatientList(requestUser):
person_model = Person.objects.get(user=requestUser)
if Nurse.objects.filter(personID=person_model).exists():
nursePerson = Nurse.objects.all().get(personID=person_model)
# Grab the nurse's hospital ID
hospital_id = nursePerson.hospitalID
# Grab patients with that hospital ID
patients = Patient.objects.all().filter(hospitalID=hospital_id)
elif Admin.objects.filter(personID=person_model).exists():
adminPerson = Admin.objects.all().get(personID=person_model)
hospital_id = adminPerson.hospitalID
patients = Patient.objects.all().filter(hospitalID=hospital_id)
elif Doctor.objects.filter(personID=person_model).exists():
patients = Patient.objects.all()
elif Root.objects.filter(personID=person_model).exists():
patients = Patient.objects.all()
else:
patients = []
return patients
| mit | -6,837,937,245,806,848,000 | 36.711538 | 95 | 0.653578 | false |
nodejs/node-gyp | gyp/tools/pretty_vcproj.py | 3 | 10633 | #!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
__author__ = "nsylvain (Nicolas Sylvain)"
ARGUMENTS = None
REPLACEMENTS = dict()
def cmp(x, y):
return (x > y) - (x < y)
class CmpTuple:
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode:
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
def get_string(node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
return cmp(get_string(x), get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print("{}{}".format(" " * indent, node.data.strip()))
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print("{}<{}>".format(" " * indent, node.nodeName))
else:
print("{}<{}".format(" " * indent, node.nodeName))
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print('{} {}="{}"'.format(" " * indent, name, value))
print("%s>" % (" " * indent))
if node.nodeValue:
print("{} {}".format(" " * indent, node.nodeValue))
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent + 2)
print("{}</{}>".format(" " * indent, node.nodeName))
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if node.attributes and node.getAttribute("Name") == "_excluded_files":
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == "Filter":
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip("\"' ")
if filename.startswith("$"):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
"""Makes all the properties we know about in this node absolute."""
if node.attributes:
for (name, value) in node.attributes.items():
if name in [
"InheritedPropertySheets",
"RelativePath",
"AdditionalIncludeDirectories",
"IntermediateDirectory",
"OutputDirectory",
"AdditionalLibraryDirectories",
]:
# We want to fix up these paths
path_list = value.split(";")
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ";".join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
"""For each sub node, we call recursively this function."""
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extraneous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(";"))
unique_list = []
for i in sorted_list:
if not unique_list.count(i):
unique_list.append(i)
node.setAttribute(name, ";".join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == "Filter":
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == "Tool":
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == "UserMacro":
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
# TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute("InheritedPropertySheets")
return FixFilenames(vsprops.split(";"), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == "Name":
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ";".join([value1, value2]))
else:
# The attribute does not exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == "InheritedPropertySheets":
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
"""Main function of this vcproj prettifier."""
global ARGUMENTS
ARGUMENTS = argv
# check if we have exactly 1 parameter.
if len(argv) < 2:
print(
'Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
"[key2=value2]" % argv[0]
)
return 1
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split("=")
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute("InheritedPropertySheets")
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(
vsprops.strip().split(";"), os.path.dirname(argv[1])
)
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node, parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
# print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| mit | -3,412,354,126,205,803,500 | 30.365782 | 87 | 0.593341 | false |
jeffkit/wechat | wechat/official.py | 1 | 16558 | # encoding=utf-8
from hashlib import sha1
import requests
import json
import tempfile
import shutil
import os
from .crypt import WXBizMsgCrypt
from .models import WxRequest, WxResponse
from .models import WxMusic, WxArticle, WxImage, WxVoice, WxVideo, WxLink
from .models import WxTextResponse, WxImageResponse, WxVoiceResponse,\
WxVideoResponse, WxMusicResponse, WxNewsResponse, APIError, WxEmptyResponse
__all__ = ['WxRequest', 'WxResponse', 'WxMusic', 'WxArticle', 'WxImage',
'WxVoice', 'WxVideo', 'WxLink', 'WxTextResponse',
'WxImageResponse', 'WxVoiceResponse', 'WxVideoResponse',
'WxMusicResponse', 'WxNewsResponse', 'WxApplication',
'WxEmptyResponse', 'WxApi', 'APIError']
class WxApplication(object):
UNSUPPORT_TXT = u'暂不支持此类型消息'
WELCOME_TXT = u'你好!感谢您的关注!'
SECRET_TOKEN = None
APP_ID = None
ENCODING_AES_KEY = None
def is_valid_params(self, params):
timestamp = params.get('timestamp', '')
nonce = params.get('nonce', '')
signature = params.get('signature', '')
echostr = params.get('echostr', '')
sign_ele = [self.token, timestamp, nonce]
sign_ele.sort()
if(signature == sha1(''.join(sign_ele)).hexdigest()):
return True, echostr
else:
return None
def process(self, params, xml=None, token=None, app_id=None, aes_key=None):
self.token = token if token else self.SECRET_TOKEN
self.app_id = app_id if app_id else self.APP_ID
self.aes_key = aes_key if aes_key else self.ENCODING_AES_KEY
assert self.token is not None
ret = self.is_valid_params(params)
if not ret:
return 'invalid request'
if not xml:
# 微信开发者设置的调用测试
return ret[1]
# 解密消息
encrypt_type = params.get('encrypt_type', '')
if encrypt_type != '' and encrypt_type != 'raw':
msg_signature = params.get('msg_signature', '')
timestamp = params.get('timestamp', '')
nonce = params.get('nonce', '')
if encrypt_type == 'aes':
cpt = WXBizMsgCrypt(self.token,
self.aes_key, self.app_id)
err, xml = cpt.DecryptMsg(xml, msg_signature, timestamp, nonce)
if err:
return 'decrypt message error, code : %s' % err
else:
return 'unsupport encrypty type %s' % encrypt_type
req = WxRequest(xml)
self.wxreq = req
func = self.handler_map().get(req.MsgType, None)
if not func:
return WxTextResponse(self.UNSUPPORT_TXT, req)
self.pre_process()
rsp = func(req)
self.post_process(rsp)
result = rsp.as_xml().encode('UTF-8')
# 加密消息
if encrypt_type != '' and encrypt_type != 'raw':
if encrypt_type == 'aes':
err, result = cpt.EncryptMsg(result, nonce)
if err:
return 'encrypt message error , code %s' % err
else:
return 'unsupport encrypty type %s' % encrypt_type
return result
def on_text(self, text):
return WxTextResponse(self.UNSUPPORT_TXT, text)
def on_link(self, link):
return WxTextResponse(self.UNSUPPORT_TXT, link)
def on_image(self, image):
return WxTextResponse(self.UNSUPPORT_TXT, image)
def on_voice(self, voice):
return WxTextResponse(self.UNSUPPORT_TXT, voice)
def on_video(self, video):
return WxTextResponse(self.UNSUPPORT_TXT, video)
def on_location(self, loc):
return WxTextResponse(self.UNSUPPORT_TXT, loc)
def event_map(self):
if getattr(self, 'event_handlers', None):
return self.event_handlers
return {
'subscribe': self.on_subscribe,
'unsubscribe': self.on_unsubscribe,
'SCAN': self.on_scan,
'LOCATION': self.on_location_update,
'CLICK': self.on_click,
'VIEW': self.on_view,
'scancode_push': self.on_scancode_push,
'scancode_waitmsg': self.on_scancode_waitmsg,
'pic_sysphoto': self.on_pic_sysphoto,
'pic_photo_or_album': self.on_pic_photo_or_album,
'pic_weixin': self.on_pic_weixin,
'location_select': self.on_location_select,
}
def on_event(self, event):
func = self.event_map().get(event.Event, None)
return func(event)
def on_subscribe(self, sub):
return WxTextResponse(self.WELCOME_TXT, sub)
def on_unsubscribe(self, unsub):
return WxEmptyResponse()
def on_click(self, click):
return WxEmptyResponse()
def on_scan(self, scan):
return WxEmptyResponse()
def on_location_update(self, location):
return WxEmptyResponse()
def on_view(self, view):
return WxEmptyResponse()
def on_scancode_push(self, event):
return WxEmptyResponse()
def on_scancode_waitmsg(self, event):
return WxEmptyResponse()
def on_pic_sysphoto(self, event):
return WxEmptyResponse()
def on_pic_photo_or_album(self, event):
return WxEmptyResponse()
def on_pic_weixin(self, event):
return WxEmptyResponse()
def on_location_select(self, event):
return WxEmptyResponse()
def handler_map(self):
if getattr(self, 'handlers', None):
return self.handlers
return {
'text': self.on_text,
'link': self.on_link,
'image': self.on_image,
'voice': self.on_voice,
'video': self.on_video,
'location': self.on_location,
'event': self.on_event,
}
def pre_process(self):
pass
def post_process(self, rsp=None):
pass
class WxBaseApi(object):
API_PREFIX = 'https://api.weixin.qq.com/cgi-bin/'
def __init__(self, appid, appsecret, api_entry=None):
self.appid = appid
self.appsecret = appsecret
self._access_token = None
self.api_entry = api_entry or self.API_PREFIX
@property
def access_token(self):
if not self._access_token:
token, err = self.get_access_token()
if not err:
self._access_token = token['access_token']
return self._access_token
else:
return None
return self._access_token
def set_access_token(self, token):
self._access_token = token
def _process_response(self, rsp):
if rsp.status_code != 200:
return None, APIError(rsp.status_code, 'http error')
try:
content = rsp.json()
except:
return None, APIError(99999, 'invalid rsp')
if 'errcode' in content and content['errcode'] != 0:
return None, APIError(content['errcode'], content['errmsg'])
return content, None
def _get(self, path, params=None):
if not params:
params = {}
params['access_token'] = self.access_token
rsp = requests.get(self.api_entry + path, params=params,
verify=False)
return self._process_response(rsp)
def _post(self, path, data, ctype='json'):
headers = {'Content-type': 'application/json'}
path = self.api_entry + path
if '?' in path:
path += '&access_token=' + self.access_token
else:
path += '?access_token=' + self.access_token
if ctype == 'json':
data = json.dumps(data, ensure_ascii=False).encode('utf-8')
rsp = requests.post(path, data=data, headers=headers, verify=False)
return self._process_response(rsp)
def upload_media(self, mtype, file_path=None, file_content=None,
url='media/upload', suffies=None):
path = self.api_entry + url + '?access_token=' \
+ self.access_token + '&type=' + mtype
suffies = suffies or {'image': '.jpg', 'voice': '.mp3',
'video': 'mp4', 'thumb': 'jpg'}
suffix = None
if mtype in suffies:
suffix = suffies[mtype]
if file_path:
fd, tmp_path = tempfile.mkstemp(suffix=suffix)
shutil.copy(file_path, tmp_path)
os.close(fd)
elif file_content:
fd, tmp_path = tempfile.mkstemp(suffix=suffix)
f = os.fdopen(fd, 'wb')
f.write(file_content)
f.close()
media = open(tmp_path, 'rb')
rsp = requests.post(path, files={'media': media},
verify=False)
media.close()
os.remove(tmp_path)
return self._process_response(rsp)
def download_media(self, media_id, to_path, url='media/get'):
rsp = requests.get(self.api_entry + url,
params={'media_id': media_id,
'access_token': self.access_token},
verify=False)
if rsp.status_code == 200:
save_file = open(to_path, 'wb')
save_file.write(rsp.content)
save_file.close()
return {'errcode': 0}, None
else:
return None, APIError(rsp.status_code, 'http error')
def _get_media_id(self, obj, resource, content_type):
if not obj.get(resource + '_id'):
rsp, err = None, None
if obj.get(resource + '_content'):
rsp, err = self.upload_media(
content_type,
file_content=obj.get(resource + '_content'))
if err:
return None
elif obj.get(resource + '_url'):
rs = requests.get(obj.get(resource + '_url'))
rsp, err = self.upload_media(
content_type,
file_content=rs.content)
if err:
return None
else:
return None
return rsp['media_id']
return obj.get(resource + '_id')
class WxApi(WxBaseApi):
def get_access_token(self, url=None, **kwargs):
params = {'grant_type': 'client_credential', 'appid': self.appid,
'secret': self.appsecret}
if kwargs:
params.update(kwargs)
rsp = requests.get(url or self.api_entry + 'token', params=params,
verify=False)
return self._process_response(rsp)
def user_info(self, user_id, lang='zh_CN'):
return self._get('user/info', {'openid': user_id, 'lang': lang})
def followers(self, next_id=''):
return self._get('user/get', {'next_openid': next_id})
def send_message(self, to_user, msg_type, content):
func = {'text': self.send_text,
'image': self.send_image,
'voice': self.send_voice,
'video': self.send_video,
'music': self.send_music,
'news': self.send_news}.get(msg_type, None)
if func:
return func(to_user, content)
return None, None
def send_text(self, to_user, content):
return self._post('message/custom/send',
{'touser': to_user, 'msgtype': 'text',
'text': {'content': content}})
def send_image(self, to_user, media_id=None, media_url=None):
if media_id and media_id.startswith('http'):
media_url = media_id
media_id = None
mid = self._get_media_id(
{'media_id': media_id, 'media_url': media_url},
'media', 'image')
return self._post('message/custom/send',
{'touser': to_user, 'msgtype': 'image',
'image': {'media_id': mid}})
def send_voice(self, to_user, media_id=None, media_url=None):
if media_id and media_id.startswith('http'):
media_url = media_id
media_id = None
mid = self._get_media_id(
{'media_id': media_id, 'media_url': media_url},
'media', 'voice')
return self._post('message/custom/send',
{'touser': to_user, 'msgtype': 'voice',
'voice': {'media_id': mid}})
def send_music(self, to_user, music):
music['thumb_media_id'] = self._get_media_id(music,
'thumb_media',
'image')
if not music.get('thumb_media_id'):
return None, APIError(41006, 'missing media_id')
return self._post('message/custom/send',
{'touser': to_user, 'msgtype': 'music',
'music': music})
def send_video(self, to_user, video):
video['media_id'] = self._get_media_id(video, 'media', 'video')
video['thumb_media_id'] = self._get_media_id(video,
'thumb_media', 'image')
if 'media_id' not in video or 'thumb_media_id' not in video:
return None, APIError(41006, 'missing media_id')
return self._post('message/custom/send',
{'touser': to_user, 'msgtype': 'video',
'video': video})
def send_news(self, to_user, news):
if isinstance(news, dict):
news = [news]
return self._post('message/custom/send',
{'touser': to_user, 'msgtype': 'news',
'news': {'articles': news}})
def create_group(self, name):
return self._post('groups/create',
{'group': {'name': name}})
def groups(self):
return self._get('groups/get')
def update_group(self, group_id, name):
return self._post('groups/update',
{'group': {'id': group_id, 'name': name}})
def group_of_user(self, user_id):
return self._get('groups/getid', {'openid': user_id})
def move_user_to_group(self, user_id, group_id):
return self._post('groups/members/update',
{'openid': user_id, 'to_groupid': group_id})
def create_menu(self, menus):
return self._post('menu/create', menus)
def get_menu(self):
return self._get('menu/get')
def delete_menu(self):
return self._get('menu/delete')
def create_tag(self, name):
return self._post('tags/create',
{'tag': {"name":name}})
def tags(self):
return self._get('tags/get')
def update_tag(self, tag_id,name):
return self._post('tags/update',
{'tag': {'id': tag_id, 'name': name}})
def delete_tag(self, tag_id):
return self._post('tags/delete',
{'tag': {'id': tag_id}})
def tag_of_user(self, user_id):
return self._post('tags/getidlist', {'openid': user_id})
def batch_tagging(self, tag_id, users_list):
return self._post('tags/members/batchtagging',
{'openid_list': users_list, 'tagid': tag_id})
def batch_untagging(self, tag_id,users_list):
return self._post('tags/members/batchuntagging',
{'openid_list': users_list, 'tagid': tag_id})
def get_blacklist(self, user_id=""):
return self._post('tags/members/getblacklist',
{'begin_openid': user_id})
def batch_blacklist(self, users_list):
return self._post('tags/members/batchblacklist',
{'openid_list': users_list})
def batch_unblacklist(self, users_list):
return self._post('tags/members/batchunblacklist',
{'openid_list': users_list})
def update_user_remark(self, openid, remark):
return self._post('user/info/updateremark',
{'openid': openid, 'remark': remark})
def customservice_records(self, starttime, endtime, openid=None,
pagesize=100, pageindex=1):
return self._get('customservice/getrecord',
{'starttime': starttime,
'endtime': endtime,
'openid': openid,
'pagesize': pagesize,
'pageindex': pageindex})
| gpl-3.0 | 6,699,400,970,800,230,000 | 34.670996 | 79 | 0.53392 | false |
mastak/airflow_multi_dagrun | examples/trigger_with_multi_dagrun_sensor.py | 1 | 1147 | from airflow.models import DAG
from airflow.operators.python import PythonOperator
from airflow.utils.dates import days_ago
from airflow_multi_dagrun.operators import TriggerMultiDagRunOperator
from airflow_multi_dagrun.sensors import MultiDagRunSensor
def generate_dag_run():
return [{'timeout': i} for i in range(10)]
def after_dags_handler():
print("All target DAGs are finished")
args = {
'start_date': days_ago(1),
'owner': 'airflow',
}
dag = DAG(
dag_id='trigger_with_multi_dagrun_sensor',
max_active_runs=1,
schedule_interval='@hourly',
default_args=args,
)
gen_target_dag_run = TriggerMultiDagRunOperator(
task_id='gen_target_dag_run',
dag=dag,
trigger_dag_id='common_target',
python_callable=generate_dag_run,
)
# Wait until there is no running instance of target DAG
wait_target_dag = MultiDagRunSensor(
task_id='wait_target_dag',
dag=dag
)
wait_target_dag.set_upstream(gen_target_dag_run)
after_dags_handler_op = PythonOperator(
task_id='after_dags_handler',
python_callable=after_dags_handler,
dag=dag
)
after_dags_handler_op.set_upstream(wait_target_dag)
| apache-2.0 | -5,851,436,575,635,393,000 | 22.895833 | 69 | 0.720139 | false |
jdorvi/EsriBasemaps | esrimap/resources.py | 1 | 5317 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt4 (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x04\x0a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x17\x00\x00\x00\x18\x08\x06\x00\x00\x00\x11\x7c\x66\x75\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\
\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\
\x00\x9a\x9c\x18\x00\x00\x00\x07\x74\x49\x4d\x45\x07\xd9\x02\x15\
\x16\x11\x2c\x9d\x48\x83\xbb\x00\x00\x03\x8a\x49\x44\x41\x54\x48\
\xc7\xad\x95\x4b\x68\x5c\x55\x18\xc7\x7f\xe7\xdc\x7b\x67\xe6\xce\
\x4c\x66\x26\x49\xd3\x24\x26\xa6\xc6\xf8\x40\x21\xa5\x04\xb3\x28\
\xda\x98\x20\xa5\x0b\xad\x55\xa8\x2b\xc5\x50\x1f\xa0\x6e\x34\x2b\
\x45\x30\x14\x02\xba\x52\x69\x15\x17\x66\x63\x45\x97\x95\xa0\xad\
\x0b\xfb\xc0\x06\x25\xb6\x71\x61\x12\x41\x50\xdb\x2a\x21\xd1\xe2\
\x24\xf3\x9e\xc9\xcc\xbd\xe7\x1c\x17\x35\x43\x1e\x33\x21\xb6\xfd\
\x56\x87\xf3\x9d\xfb\xfb\x1e\xf7\xff\x9d\x23\x8c\x31\x43\x95\xf4\
\x85\x1e\x3f\x3b\x35\xac\xfd\xcc\x43\xdc\xa4\x49\x3b\xfe\x9d\x1d\
\xdb\x7b\x22\x90\x78\xf8\xb2\x28\xa7\xbe\x7d\xc1\x4b\x9d\x79\xdf\
\x18\x15\xe5\x16\x99\x10\x56\xde\x69\xdc\x3f\x22\xfd\xec\xd4\xf0\
\xad\x04\x03\x18\xa3\xa2\x7e\x76\x6a\x58\xde\x68\x2b\xb4\x36\xf8\
\xbe\xc6\x18\x53\xdb\xef\xe7\xfa\xec\xed\x67\x63\x10\x42\x00\xf0\
\xfb\xd5\x65\x2a\x15\x45\xc7\x6d\x0d\x00\xc4\xa2\xc1\xaa\x6f\x0d\
\x3e\x6c\xab\xc2\x1c\x56\xa4\x77\x4b\xb0\xf2\x35\x15\x5f\x21\x85\
\xe0\xc8\x6b\x5f\x92\x2d\x37\x33\x39\xf9\x03\x27\x8e\x1f\xa2\xf7\
\xbe\x9d\x04\x1c\x0b\x37\xe4\xac\xff\xa6\x30\x87\xbd\xba\x00\x6a\
\x06\x79\xe5\xf5\xaf\x89\xd9\x92\xc5\xcc\x0a\xd9\x7c\x19\xcf\xe9\
\xe2\xe4\xa9\x2f\x78\x7c\xff\x01\x72\x85\x0a\x2b\x65\x1f\xa5\x4c\
\xb5\xb2\x55\x16\x80\xbd\x31\xda\xda\x20\x1f\x7d\x3e\xcd\xc2\xfd\
\x59\xa6\x93\x39\x92\xd1\x22\xea\x9b\x16\xce\x9d\x3f\xce\xe0\x83\
\x03\x24\x82\x59\x3a\xdb\x7b\x88\xc7\x82\x68\x63\x58\xc9\xcc\x62\
\x8c\x21\x18\xb0\x6a\xc3\x37\x06\x49\x16\xff\x24\x6b\xa5\x49\xbb\
\x25\xbc\xa2\xa6\x21\xbb\x40\x7f\xdf\x00\x83\xbd\x01\x8e\x3c\xd5\
\x45\xd7\x8e\x6b\x9c\x9c\x98\x25\x1a\xb6\xe8\xbe\x3d\xc2\xdd\x77\
\x44\x48\xc4\x1c\x22\xe1\xeb\x58\x59\xaf\xcf\xd3\x33\x29\x2e\x34\
\x2d\x91\x93\x3e\xbe\x34\x78\x01\xc5\xe2\x61\xc5\xae\x72\x8e\x70\
\xc8\xc2\x0d\x5a\xbc\xf5\xee\x2f\x9c\xfa\x3e\x86\x69\x7a\x8e\xcf\
\x26\xe6\xf9\x63\xa1\x44\xa1\xa4\xd0\xda\x6c\x0d\x2f\x15\x7c\xb4\
\x67\x28\x59\x0a\xcf\xd6\x54\xe2\x06\x13\x87\x2b\x6f\x68\xa6\x27\
\xaf\x31\x32\x36\xc7\xb2\x7f\x17\xef\x7d\x7c\x8c\x33\x67\xcf\x12\
\x70\x24\x4a\x69\xd6\x6a\x46\xd6\xd3\x70\x72\xa9\x82\x67\x34\x45\
\xad\x28\xdb\x1a\x15\x34\x98\xff\x46\xed\xef\x37\x0d\x99\xbf\x4a\
\x3c\x30\x38\xc0\xc8\x4b\xaf\x92\x5a\x9c\xe2\xe0\x23\x6d\x74\xb4\
\xba\x84\x5d\x0b\x29\x45\x7d\xb8\x94\x82\x96\xb6\x10\xf3\xc5\x12\
\x2a\xef\x53\x11\x1a\x63\xad\x3f\x93\x19\x85\xf1\xb1\x77\x58\x5a\
\xf8\x99\x97\x9f\xe9\xa6\x75\x47\x90\xc6\xb8\x43\xd8\xb5\xb6\xce\
\xfc\xfa\xfd\x00\xfb\x3e\xf4\xc8\x05\x35\xba\x5e\xeb\x46\x21\xf9\
\xcf\x0a\xa9\x8c\x87\xe3\x48\xdc\x90\xb5\x6e\x98\x6a\xaa\x65\xf2\
\x52\x92\x43\x2f\x5e\xc2\x8c\x02\x1a\x10\xf5\x07\xac\xc3\x75\x70\
\x83\x92\x80\xb3\xf9\xd0\x26\xf8\x8f\xb3\x29\xc6\x3e\xb8\x8c\x19\
\x35\x75\x6b\x7b\x7e\x3c\xca\x45\x0c\x7e\x49\x31\xf4\x58\x3b\xf7\
\xf6\x34\x90\x88\x39\x04\x1c\x59\x1f\xfe\xdb\xd5\x3c\x5f\x9d\x4b\
\x32\xfd\x44\xb2\xba\xd7\xfa\xb6\x60\xcf\xde\x16\xdc\x90\x45\x4c\
\x4a\x2a\x9e\x62\xfe\x4e\xc5\xc8\xc1\x4e\xda\x76\x86\xe8\xe9\x0a\
\xe3\xd8\x92\x58\xd4\xc6\xb2\x44\x6d\x78\x2a\x53\xe1\xca\x7c\x99\
\x63\x5d\xbf\x56\x9d\xbd\x9f\x44\x18\x7a\xba\x95\x27\x0f\xb4\xd3\
\xdc\x18\xc0\xf3\x0d\x52\x40\xd8\xb5\xb0\xa4\x20\x14\xb2\x70\x6c\
\x81\x63\xcb\xaa\x42\xd6\xfd\xb7\xf4\xec\xa3\x06\xa0\x50\x52\xd8\
\x4e\x1b\x7e\x4a\xd3\x31\xf9\x29\xcf\xfe\xd4\x49\x7f\x5f\x13\xfb\
\xfa\x9b\x71\x43\x92\x58\xd4\x21\x18\x90\xac\xde\xb0\x42\x50\x13\
\x58\x33\xf3\x88\x6b\xa1\xfd\x65\x96\xf2\x79\xc6\x43\x7b\xd8\x75\
\x38\xcc\x3d\xdd\xd1\xaa\xcf\x71\xe4\xff\x7f\x91\x56\x33\xaf\xea\
\x37\xe7\xa1\x94\x21\x16\xb5\xd1\x06\x2c\x29\x36\xf5\x72\x9b\x96\
\x95\xc0\xc4\xda\x9d\x78\x83\x43\x53\x22\x80\x65\x09\x1c\xfb\x86\
\xc1\x00\xe7\x25\x70\x14\x48\x6f\x1e\x22\x51\xe3\x75\xd9\xb6\xa5\
\x81\xa3\x32\xb1\xfb\xf4\x0c\x30\xb8\xb1\x82\x9b\xb0\x09\x60\x30\
\xb1\xfb\xf4\xcc\xbf\xa0\xe9\x6e\xae\x5a\xdf\x4b\x81\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = "\
\x00\x07\
\x07\x3b\xe0\xb3\
\x00\x70\
\x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\
\x00\x07\
\x0c\xa9\x03\x40\
\x00\x65\
\x00\x73\x00\x72\x00\x69\x00\x6d\x00\x61\x00\x70\
\x00\x08\
\x0a\x61\x5a\xa7\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| mit | 2,510,665,176,160,652,000 | 47.779817 | 96 | 0.723528 | false |
dropbox-dashbpard/error-detect-of-log | ed/system_tombstone.py | 1 | 1957 | # -*- coding: utf-8 -*-
import re
from utils import detect_string, gen_hashcode, detect_all
from symbols import translate_traces
from symbols import backtrace as remote_traces
IGNORE = ['/data/app-lib', '/mnt/asec/', '/data/data/', '/data/app/']
def detect_trace(contents):
for content in contents:
if "backtrace:" in content:
backtrace = detect_all(content, r'((?:/.+)+(?: \(.+\))*)')
if backtrace:
for bt in backtrace:
for ig in IGNORE:
if ig in bt:
return None, None
return backtrace, content
return None, None
def detect_issue_owner(backtrace):
if backtrace:
for bt in backtrace:
if '(' in bt:
lib = bt.split('(')[0].strip()
break
else:
lib = backtrace[0]
ret = lib.split('/')[-1] if '/' in lib else lib
return None if '<unknown>' in ret else ret
def match_version(content):
return detect_string(content[0], r'^Build:.+/(\d+\.\d+)/') == detect_string(content[1], r'^Build\s+fingerprint:.+/(\d+\.\d+)/')
def system_tombstone(logcat, headers):
content = logcat.split('\n\n')
if len(content) >= 3:
if not match_version(content):
return None, None, None
signal = detect_string(content[1], r'^(signal\s+-?\d+\s+\(\w+\),\s+code\s+-?\d+\s+\(\w+\))')
backtrace, raw_bt = detect_trace(content[2:])
issue_owner = detect_issue_owner(backtrace)
if issue_owner and signal and backtrace:
# traces = translate_traces(headers, raw_bt)
traces = remote_traces(headers.get("X-Dropbox-UA", "="), logcat)
md5 = gen_hashcode({'issue_owner': issue_owner, 'signal': signal, 'backtrace': backtrace})
return md5, {'issue_owner': issue_owner, 'signal': signal, 'backtrace': backtrace}, traces
return None, None, None
| mit | -5,243,716,539,045,736,000 | 35.240741 | 131 | 0.556975 | false |
fermiPy/fermipy | fermipy/diffuse/gt_split_and_bin.py | 1 | 11315 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Prepare data for diffuse all-sky analysis
"""
from __future__ import absolute_import, division, print_function
import os
import math
import yaml
from fermipy.jobs.utils import is_null
from fermipy.jobs.link import Link
from fermipy.jobs.chain import Chain
from fermipy.jobs.scatter_gather import ScatterGather
from fermipy.jobs.slac_impl import make_nfs_path
from fermipy.diffuse.utils import create_inputlist
from fermipy.diffuse.name_policy import NameFactory
from fermipy.diffuse import defaults as diffuse_defaults
from fermipy.diffuse.binning import EVT_TYPE_DICT
from fermipy.diffuse.job_library import Gtlink_select,\
Gtlink_bin, Gtexpcube2_SG
from fermipy.diffuse.gt_coadd_split import CoaddSplit_SG
NAME_FACTORY = NameFactory()
def make_full_path(basedir, outkey, origname):
"""Make a full file path"""
return os.path.join(basedir, outkey, os.path.basename(
origname).replace('.fits', '_%s.fits' % outkey))
class SplitAndBin(Chain):
"""Small class to split and bin data according to some user-provided specification
This chain consists multiple `Link` objects:
select-energy-EBIN-ZCUT : `Gtlink_select`
Initial splitting by energy bin and zenith angle cut
select-type-EBIN-ZCUT-FILTER-TYPE : `Gtlink_select`
Refinement of selection from event types
bin-EBIN-ZCUT-FILTER-TYPE : `Gtlink_bin`
Final binning of the data for each event type
"""
appname = 'fermipy-split-and-bin'
linkname_default = 'split-and-bin'
usage = '%s [options]' % (appname)
description = 'Run gtselect and gtbin together'
default_options = dict(data=diffuse_defaults.diffuse['data'],
comp=diffuse_defaults.diffuse['comp'],
hpx_order_max=diffuse_defaults.diffuse['hpx_order_ccube'],
ft1file=(None, 'Input FT1 file', str),
evclass=(128, 'Event class bit mask', int),
outdir=('counts_cubes_cr', 'Base name for output files', str),
outkey=(None, 'Key for this particular output file', str),
pfiles=(None, 'Directory for .par files', str),
scratch=(None, 'Scratch area', str),
dry_run=(False, 'Print commands but do not run them', bool))
__doc__ += Link.construct_docstring(default_options)
def __init__(self, **kwargs):
"""C'tor
"""
super(SplitAndBin, self).__init__(**kwargs)
self.comp_dict = None
def _map_arguments(self, args):
"""Map from the top-level arguments to the arguments provided to
the indiviudal links """
comp_file = args.get('comp', None)
datafile = args.get('data', None)
if is_null(comp_file):
return
if is_null(datafile):
return
NAME_FACTORY.update_base_dict(datafile)
outdir = args.get('outdir', None)
outkey = args.get('outkey', None)
ft1file = args['ft1file']
if is_null(outdir) or is_null(outkey):
return
pfiles = os.path.join(outdir, outkey)
self.comp_dict = yaml.safe_load(open(comp_file))
coordsys = self.comp_dict.pop('coordsys')
full_out_dir = make_nfs_path(os.path.join(outdir, outkey))
for key_e, comp_e in sorted(self.comp_dict.items()):
emin = math.pow(10., comp_e['log_emin'])
emax = math.pow(10., comp_e['log_emax'])
enumbins = comp_e['enumbins']
zmax = comp_e['zmax']
zcut = "zmax%i" % comp_e['zmax']
evclassstr = NAME_FACTORY.base_dict['evclass']
kwargs_select = dict(zcut=zcut,
ebin=key_e,
psftype='ALL',
coordsys=coordsys,
mktime='none')
selectfile_energy = make_full_path(outdir, outkey, NAME_FACTORY.select(**kwargs_select))
linkname = 'select-energy-%s-%s' % (key_e, zcut)
self._set_link(linkname, Gtlink_select,
infile=ft1file,
outfile=selectfile_energy,
zmax=zmax,
emin=emin,
emax=emax,
evclass=NAME_FACTORY.evclassmask(evclassstr),
pfiles=pfiles,
logfile=os.path.join(full_out_dir, "%s.log" % linkname))
if 'evtclasses' in comp_e:
evtclasslist_vals = comp_e['evtclasses']
else:
evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']]
for evtclassval in evtclasslist_vals:
for psf_type, psf_dict in sorted(comp_e['psf_types'].items()):
linkname_select = 'select-type-%s-%s-%s-%s' % (
key_e, zcut, evtclassval, psf_type)
linkname_bin = 'bin-%s-%s-%s-%s' % (key_e, zcut, evtclassval, psf_type)
hpx_order = psf_dict['hpx_order']
kwargs_bin = kwargs_select.copy()
kwargs_bin['psftype'] = psf_type
selectfile_psf = make_full_path(
outdir, outkey, NAME_FACTORY.select(**kwargs_bin))
binfile = make_full_path(outdir, outkey, NAME_FACTORY.ccube(**kwargs_bin))
self._set_link(linkname_select, Gtlink_select,
infile=selectfile_energy,
outfile=selectfile_psf,
zmax=zmax,
emin=emin,
emax=emax,
evtype=EVT_TYPE_DICT[psf_type],
evclass=NAME_FACTORY.evclassmask(evtclassval),
pfiles=pfiles,
logfile=os.path.join(full_out_dir, "%s.log" % linkname_select))
self._set_link(linkname_bin, Gtlink_bin,
coordsys=coordsys,
hpx_order=hpx_order,
evfile=selectfile_psf,
outfile=binfile,
emin=emin,
emax=emax,
enumbins=enumbins,
pfiles=pfiles,
logfile=os.path.join(full_out_dir, "%s.log" % linkname_bin))
class SplitAndBin_SG(ScatterGather):
"""Small class to generate configurations for SplitAndBin
"""
appname = 'fermipy-split-and-bin-sg'
usage = "%s [options]" % (appname)
description = "Prepare data for diffuse all-sky analysis"
clientclass = SplitAndBin
job_time = 1500
default_options = dict(comp=diffuse_defaults.diffuse['comp'],
data=diffuse_defaults.diffuse['data'],
hpx_order_max=diffuse_defaults.diffuse['hpx_order_ccube'],
ft1file=(None, 'Input FT1 file', str),
scratch=(None, 'Path to scratch area', str))
__doc__ += Link.construct_docstring(default_options)
def build_job_configs(self, args):
"""Hook to build job configurations
"""
job_configs = {}
comp_file = args.get('comp', None)
if comp_file is not None:
comp_dict = yaml.safe_load(open(comp_file))
coordsys = comp_dict.pop('coordsys')
for v in comp_dict.values():
v['coordsys'] = coordsys
else:
return job_configs
NAME_FACTORY.update_base_dict(args['data'])
inputfiles = create_inputlist(args['ft1file'])
outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes')
for idx, infile in enumerate(inputfiles):
key = "%06i" % idx
output_dir = os.path.join(outdir_base, key)
try:
os.mkdir(output_dir)
except OSError:
pass
logfile = make_nfs_path(os.path.join(output_dir, 'scatter_%s.log' % key))
#job_configs[key] = args.copy()
job_configs[key] = args.copy()
job_configs[key].update(dict(ft1file=infile,
comp=args['comp'],
hpx_order_max=args['hpx_order_max'],
outdir=outdir_base,
outkey=key,
logfile=logfile,
pfiles=output_dir))
return job_configs
class SplitAndBinChain(Chain):
"""Chain to run split and bin and then make exposure cubes
This chain consists of:
split-and-bin : `SplitAndBin_SG`
Chain to make the binned counts maps for each input file
coadd-split : `CoaddSplit_SG`
Link to co-add the binnec counts maps files
expcube2 : `Gtexpcube2_SG`
Link to make the corresponding binned exposure maps
"""
appname = 'fermipy-split-and-bin-chain'
linkname_default = 'split-and-bin-chain'
usage = '%s [options]' % (appname)
description = 'Run split-and-bin, coadd-split and exposure'
default_options = dict(data=diffuse_defaults.diffuse['data'],
comp=diffuse_defaults.diffuse['comp'],
ft1file=diffuse_defaults.diffuse['ft1file'],
hpx_order_ccube=diffuse_defaults.diffuse['hpx_order_ccube'],
hpx_order_expcube=diffuse_defaults.diffuse['hpx_order_expcube'],
scratch=diffuse_defaults.diffuse['scratch'],
dry_run=diffuse_defaults.diffuse['dry_run'])
__doc__ += Link.construct_docstring(default_options)
def _map_arguments(self, args):
"""Map from the top-level arguments to the arguments provided to
the indiviudal links """
data = args.get('data')
comp = args.get('comp')
ft1file = args.get('ft1file')
scratch = args.get('scratch', None)
dry_run = args.get('dry_run', None)
self._set_link('split-and-bin', SplitAndBin_SG,
comp=comp, data=data,
hpx_order_max=args.get('hpx_order_ccube', 9),
ft1file=ft1file,
scratch=scratch,
dry_run=dry_run)
self._set_link('coadd-split', CoaddSplit_SG,
comp=comp, data=data,
ft1file=ft1file)
self._set_link('expcube2', Gtexpcube2_SG,
comp=comp, data=data,
hpx_order_max=args.get('hpx_order_expcube', 5),
dry_run=dry_run)
def register_classes():
"""Register these classes with the `LinkFactory` """
SplitAndBin.register_class()
SplitAndBin_SG.register_class()
SplitAndBinChain.register_class()
| bsd-3-clause | 7,035,053,388,917,838,000 | 39.124113 | 100 | 0.530711 | false |
Lilykos/pyphonetics | pyphonetics/phonetics/soundex.py | 1 | 1168 | import re
from unidecode import unidecode
from ..utils import translation, squeeze, check_str, check_empty
from .phonetic_algorithm import PhoneticAlgorithm
class Soundex(PhoneticAlgorithm):
"""
The Soundex algorithm.
[Reference]: https://en.wikipedia.org/wiki/Soundex
[Authors]: Robert C. Russel, Margaret King Odell
"""
def __init__(self):
super().__init__()
self.translations = translation(
'AEIOUYWHBPFVCSKGJQXZDTLMNR',
'000000DD111122222222334556'
)
self.pad = lambda code: '{}0000'.format(code)[:4]
def phonetics(self, word):
check_str(word)
check_empty(word)
word = unidecode(word).upper()
word = re.sub(r'[^A-Z]', r'', word)
first_letter = word[0]
tail = ''.join(self.translations[char] for char in word
if self.translations[char] != 'D')
# Dropping first code's letter if duplicate
if len(tail):
if tail[0] == self.translations[first_letter]:
tail = tail[1:]
code = squeeze(tail).replace('0', '')
return self.pad(first_letter + code)
| mit | 7,323,594,689,653,898,000 | 27.487805 | 64 | 0.589897 | false |
lutraconsulting/qgis-cmaps-region-generator-plugin | qgis_plugin/CMapsRegionGenerator/cmapsregiongeneratordialog.py | 1 | 24207 | # -*- coding: utf-8 -*-
# CMAPS Region Generator - Automate merging features using a CSV file definition.
# Copyright (C) 2013 Centigon Solutions Inc.
# sales at centigonsolutions dot com
# 1333 Camino Del Rio S #300
# San Diego
# CA 92108
# USA
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from ui_cmapsregiongenerator import Ui_CMapsRegionGenerator
from tableau_writer import *
import about_dialog
import os
from unicode_csv import UnicodeReader
# create the dialog for zoom to point
class CMapsRegionGeneratorDialog(QDialog, Ui_CMapsRegionGenerator):
def __init__(self, iface):
QDialog.__init__(self)
self.iface = iface
self.setupUi(self)
self.loadedOK = True
# Used to keep track of user preferences
self.preferredStandardGeoCol = 0
self.yourRegionDefCol = 1
self.maxFeaturesToCheckForVals = 50
"""
Based on selected Shapefile:
Read all attribute names
Take an example (first non-zero-length)
Populate combo-box
"""
if self.iface.mapCanvas().currentLayer() == None or \
self.iface.mapCanvas().currentLayer().type() != QgsMapLayer.VectorLayer:
# Show QMessageBox (error) and close
QMessageBox.critical(self, 'Error', 'Please first select an input Shapefile.')
self.loadedOK = False
return
self.inputLayer = self.iface.mapCanvas().currentLayer()
# Display the CRS
self.srsCrs = self.inputLayer.crs()
crsDesc = str( self.srsCrs.description() )
self.selectedCrsLabel.setText( 'Input CRS: %s (Output will always be in WGS 84)' % (crsDesc,) )
# Fetch the attribute names
try:
for index, field in self.inputLayer.dataProvider().fields().iteritems():
# Get sample
sampleText = self.getSample(index)
if sampleText is not None:
self.mergingAttributeComboBox.addItem( '%s (e.g. %s)' % (field.name(), sampleText) )
else:
self.mergingAttributeComboBox.addItem( '%s' % (field.name(),) )
except:
i = 0
for field in self.inputLayer.dataProvider().fields():
# Get sample
sampleText = self.getSample(i)
if sampleText is not None:
self.mergingAttributeComboBox.addItem( '%s (e.g. %s)' % (field.name(), sampleText) )
else:
self.mergingAttributeComboBox.addItem( '%s' % (field.name(),) )
i += 1
def getSample(self, attributeIndex):
# Fetch the value (as a string) of the first attribute
f = QgsFeature()
for indexToTry in xrange( min(self.inputLayer.featureCount(), self.maxFeaturesToCheckForVals) ):
try:
self.inputLayer.featureAtId(indexToTry, f, False, True)
stringVal = str( f.attributeMap()[attributeIndex].toString() )
if len(stringVal) > 0:
return stringVal
except:
self.inputLayer.dataProvider().getFeatures(QgsFeatureRequest(indexToTry)).nextFeature(f)
stringVal = str( f.attributes()[attributeIndex] )
if len(stringVal) > 0:
return stringVal
return None
def updateCsvCombos(self, csvFilePath):
"""
Updates the two combo boxes with samples from the CSV file
:param csvFilePath: Path to CSV file, file may not be valid
:return:
"""
# Clear the combo boxes
self.standardGeographyColumnComboBox.blockSignals(True)
self.yourRegionDefinitionColumnComboBox.blockSignals(True)
self.standardGeographyColumnComboBox.clear()
self.yourRegionDefinitionColumnComboBox.clear()
self.standardGeographyColumnComboBox.blockSignals(False)
self.yourRegionDefinitionColumnComboBox.blockSignals(False)
# Determine if the file is valid and return early if not, leaving the combo boxes empty
try:
if csvFilePath.startswith('"') and csvFilePath.endswith('"'):
csvFilePath = csvFilePath[1:-1]
with open(csvFilePath, 'rb') as csvFile:
reader = UnicodeReader(csvFile)
firstRow = reader.next()
if len(firstRow) < 2:
# There should be at least two columns, return
return
# populate the CBs
i = 1
self.standardGeographyColumnComboBox.blockSignals(True)
self.yourRegionDefinitionColumnComboBox.blockSignals(True)
for col in firstRow:
cbText = 'Column %d (e.g. %s)' % (i, col)
self.standardGeographyColumnComboBox.addItem(cbText)
self.yourRegionDefinitionColumnComboBox.addItem(cbText)
i += 1
self.standardGeographyColumnComboBox.setCurrentIndex(self.preferredStandardGeoCol)
self.yourRegionDefinitionColumnComboBox.setCurrentIndex(self.yourRegionDefCol)
self.standardGeographyColumnComboBox.blockSignals(False)
self.yourRegionDefinitionColumnComboBox.blockSignals(False)
except IOError:
# The user could be typing and we have a partial file name
pass
def colNumFromComboText(self, comboText):
return int( comboText.split('Column ')[1].split(' ')[0] )
def onStandardGeogColChanged(self, newVal):
# User updated the combo box, record the preference
self.preferredStandardGeoCol = self.colNumFromComboText(newVal) - 1
def onYourRegionDefColChanged(self, newVal):
# User updated the combo box, record the preference
self.yourRegionDefCol = self.colNumFromComboText(newVal) - 1
def run(self):
"""
The guts of the plugin
Overview:
Determine the unique entries in the SUB_REGION column
For each unique entry:
Make a list of all the matching entries from the STATE_NAME column
Select all features whose attributes match this string
Merge the features
Add STATE_NAME as a new column
Write to output file
Optionally load the layer
"""
"""
Before starting, check the quality of the input data. The
regions that we are merging should not have any invalid
geometries as these cause major problems later on.
"""
prov = self.inputLayer.dataProvider()
allFeatures = []
errorString = ''
feat = QgsFeature()
if QGis.QGIS_VERSION_INT >= 20000:
featIt = prov.getFeatures()
while featIt.nextFeature(feat):
allFeatures.append(QgsFeature(feat)) ## Append a copy
else:
prov.select(prov.attributeIndexes())
while prov.nextFeature(feat):
allFeatures.append(QgsFeature(feat)) ## Append a copy
for feat in allFeatures:
if not feat.geometry().isGeosValid():
# Determine what's wrong with it
errorString += '\n\n'
errorString += 'Feature %d has the following error(s):\n\n' % feat.id()
for res in feat.geometry().validateGeometry():
errorString += res.what() + '\n'
if len(errorString) > 0:
QMessageBox.critical(self, 'Invalid Geometry Detected', 'The following geometry errors were detected and must be resolved before generating regions:' + errorString)
return
"""
Prepare the output file
Check first that a layer with the same file name is not
already loaded. If it is, raise an error and return early.
"""
targetSrs = QgsCoordinateReferenceSystem(4326)
crsTransform = QgsCoordinateTransform(self.srsCrs, targetSrs)
outputFolder = self.outputFolderLineEdit.text()
if outputFolder == '':
QMessageBox.critical(self, 'Error', 'No output folder specified.')
return
# Ensure the output folder exists
if not os.path.isdir(outputFolder):
try:
os.makedirs(outputFolder)
except WindowsError:
QMessageBox.critical(self, 'Error', 'Failed to make destination folder, %s' % (outputFolder))
return
outputFilePrefix = self.outputFilePrefixLineEdit.text()
if outputFilePrefix == '':
QMessageBox.critical(self, 'Error', 'No output file prefix specified.')
return
shapeFileName = os.path.normpath(os.path.join(outputFolder, outputFilePrefix + '.shp'))
geoJsonFileName = os.path.normpath(os.path.join(outputFolder, outputFilePrefix + '.geojson'))
geoJsonKeylessFileName = os.path.normpath(os.path.join(outputFolder, outputFilePrefix + '_no_keys.geojson'))
csvFileName = os.path.normpath(os.path.join(outputFolder, outputFilePrefix + '.csv'))
tableauFileName = os.path.normpath(os.path.join(outputFolder, outputFilePrefix + '_tableau.csv'))
lr = QgsMapLayerRegistry.instance()
for of in [shapeFileName, geoJsonFileName, geoJsonKeylessFileName, csvFileName]:
shortName = os.path.basename(of).split('.')[0]
for layer in lr.mapLayers().values():
if layer.type() == QgsMapLayer.VectorLayer and os.path.normpath(str(layer.source())) == of:
# The file we are trying to write is already open
QMessageBox.critical(self, 'Error', 'The file you\'re trying to write (%s) is already loaded, please close it first.' % (layer.name()))
return
fields = None
if QGis.QGIS_VERSION_INT >= 20000:
fields = QgsFields()
fields.append( QgsField("region", QVariant.String) )
else:
fields = { 0 : QgsField("region", QVariant.String) }
emptyFields = QgsFields()
encoding = 'utf-8'
shapeWriter = QgsVectorFileWriter(shapeFileName, encoding, fields, QGis.WKBPolygon, targetSrs, 'ESRI Shapefile')
if shapeWriter.hasError() != QgsVectorFileWriter.NoError:
QMessageBox.critical(self, 'Error', 'Failed to create output file %s' % (shapeFileName))
return
geoJsonWriter = QgsVectorFileWriter(geoJsonFileName, encoding, fields, QGis.WKBPolygon, targetSrs, 'GeoJSON',
layerOptions=['COORDINATE_PRECISION=5'])
if geoJsonWriter.hasError() != QgsVectorFileWriter.NoError:
QMessageBox.critical(self, 'Error', 'Failed to create output file %s' % (geoJsonFileName))
return
geoJsonKeylessWriter = QgsVectorFileWriter(geoJsonKeylessFileName, encoding, emptyFields, QGis.WKBPolygon, targetSrs,
'GeoJSON', layerOptions=['COORDINATE_PRECISION=5'])
if geoJsonKeylessWriter.hasError() != QgsVectorFileWriter.NoError:
QMessageBox.critical(self, 'Error', 'Failed to create output file %s' % (geoJsonKeylessFileName))
return
csvWriter = QgsVectorFileWriter(csvFileName, encoding, fields, QGis.WKBPolygon, targetSrs, 'CSV',
layerOptions=['GEOMETRY=AS_WKT'])
if csvWriter.hasError() != QgsVectorFileWriter.NoError:
QMessageBox.critical(self, 'Error', 'Failed to create output file %s' % (csvFileName))
return
while True:
try:
tableauWriter = TableauWriter(tableauFileName, fields)
break
except TableauFileCreationError:
reply = QMessageBox.question(None, 'File in Use',
'%s appears to already be open in another application. Please either close '
'the file and retry or click Abort to cancel.' % tableauFileName,
QMessageBox.Retry | QMessageBox.Abort, QMessageBox.Retry)
if reply == QMessageBox.Abort:
return
# Read CSV control file
uniqueRegionIds = {} # A dict
csvFileH = open( str(self.regionDefinitionFileNameLineEdit.text()), 'rb')
reader = UnicodeReader(csvFileH)
if self.firstRowIsHeaderCheckBox.isChecked():
# Eat header
reader.next()
standardGeogColIdx = self.standardGeographyColumnComboBox.currentIndex()
yourRegionDefColIdx = self.yourRegionDefinitionColumnComboBox.currentIndex()
for row in reader:
fips = row[standardGeogColIdx]
regionId = row[yourRegionDefColIdx]
if not regionId in uniqueRegionIds.keys():
uniqueRegionIds[regionId] = { 'subregion_names': [fips],
'matched_subregion_names': [] }
else:
uniqueRegionIds[regionId]['subregion_names'].append(fips)
del reader
csvFileH.close()
# Determine index of merging attribute
attIdx = self.mergingAttributeComboBox.currentIndex()
feat = QgsFeature()
prov = self.inputLayer.dataProvider()
allAttrs = prov.attributeIndexes()
hasMismatches = False
mismatchReport = 'Custom Region,Unmatched Subregions\n' # Header
for region, subRegionInfo in uniqueRegionIds.iteritems():
subRegions = subRegionInfo['subregion_names']
# Make a selection of features
matchedFeatures = []
if QGis.QGIS_VERSION_INT >= 20000:
featIt = prov.getFeatures()
while featIt.nextFeature(feat):
subRegionName = str(feat.attributes()[attIdx])
if subRegionName in subRegions:
matchedFeatures.append(QgsFeature(feat)) ## Append a copy
subRegionInfo['matched_subregion_names'].append(subRegionName)
else:
prov.select(allAttrs)
while prov.nextFeature(feat):
subRegionName = str(feat.attributeMap()[attIdx].toString())
if subRegionName in subRegions:
matchedFeatures.append(QgsFeature(feat)) ## Append a copy
subRegionInfo['matched_subregion_names'].append(subRegionName)
# matchedFeatures should now contain all we require to merge
# if it has no entries, then no features could be found with this
# sub-region attribute so it will be skipped and not appear in the
# output.
if len(matchedFeatures) < len(subRegionInfo['subregion_names']):
# There are more subregions in the definition than have actually been matched
# so the output geometry is likely to have weird holes. Add this information to the
# mismatch report
hasMismatches = True
mismatchReport += '%s\n' % region
for subregion in subRegionInfo['subregion_names']:
if subregion not in subRegionInfo['matched_subregion_names']:
mismatchReport += ',%s\n' % subregion
if len(matchedFeatures) == 0:
continue
firstFeature = matchedFeatures.pop()
mergedGeom = QgsGeometry( firstFeature.geometry() )
for featureToMerge in matchedFeatures:
mergedGeom = mergedGeom.combine(featureToMerge.geometry())
# We now should have a merged geometry
# Transform to 4326
if mergedGeom.transform(crsTransform) != 0:
QMessageBox.critical(self, 'Error', 'Failed to perform CRS transform, quitting.')
del shapeWriter
del geoJsonWriter
del geoJsonKeylessWriter
del csvWriter
return
if QGis.QGIS_VERSION_INT >= 20000:
outFet = QgsFeature(fields)
outFet.setGeometry(mergedGeom)
outFet.setAttribute('region', region)
keylessFeat = QgsFeature(emptyFields)
keylessFeat.setGeometry(mergedGeom)
else:
outFet = QgsFeature()
outFet.setGeometry(mergedGeom)
outFet.addAttribute(0, QVariant(region))
keylessFeat = QgsFeature()
keylessFeat.setGeometry(mergedGeom)
shapeWriter.addFeature(outFet)
geoJsonWriter.addFeature(outFet)
geoJsonKeylessWriter.addFeature(keylessFeat)
csvWriter.addFeature(outFet)
tableauWriter.addFeature(outFet)
# close output file
del shapeWriter
del geoJsonWriter
del geoJsonKeylessWriter
del csvWriter
if tableauWriter.encounteredHoles():
holeFileName = os.path.normpath(os.path.join(outputFolder, outputFilePrefix + '_holes.csv'))
while True:
try:
hFile = open(holeFileName, 'w')
break
except:
reply = QMessageBox.question(None, 'File in Use',
'%s appears to already be open in another application. Please either close '
'the file and retry or click Abort to cancel.' % holeFileName,
QMessageBox.Retry | QMessageBox.Abort, QMessageBox.Retry)
if reply == QMessageBox.Abort:
return
hFile.write(tableauWriter.getHoleSummary())
hFile.close()
if hasMismatches:
mismatchFileName = os.path.normpath(os.path.join(outputFolder, outputFilePrefix + '_mismatches.csv'))
while True:
try:
mmFile = open(mismatchFileName, 'w')
break
except:
reply = QMessageBox.question(None, 'File in Use',
'%s appears to already be open in another application. Please either close '
'the file and retry or click Abort to cancel.' % mismatchFileName,
QMessageBox.Retry | QMessageBox.Abort, QMessageBox.Retry)
if reply == QMessageBox.Abort:
return
mmFile.write(mismatchReport)
mmFile.close()
if hasMismatches or tableauWriter.encounteredHoles():
issuesMessage = 'The following issues were encountered while exporting:'
if hasMismatches:
issuesMessage += '\n\nFailed to locate matching input sub-regions for some of your custom region definitions.' \
' Please see %s for more details.' % mismatchFileName
if tableauWriter.encounteredHoles():
issuesMessage += '\n\nSome of your custom regions contained holes / inner rings which are not supported by ' \
'the tableau file format. Holes / inner rings of the affected regions have not been written ' \
'to the exported tableau file. ' \
'Please see %s for more details.' % holeFileName
QMessageBox.warning(self, 'Export Issues', issuesMessage)
del tableauWriter
# Optionally load the layer in QGIS
if self.loadWhenFinishedCheckBox.isChecked():
for of in [shapeFileName, geoJsonFileName, geoJsonKeylessFileName, csvFileName]:
shortName = os.path.basename(of)
loadedLayer = QgsVectorLayer(of, shortName, 'ogr')
if not loadedLayer.isValid():
QMessageBox.critical(self, 'Error', 'Failed to load resulting shapefile %s.' % (of))
return
QgsMapLayerRegistry.instance().addMapLayer(loadedLayer)
QMessageBox.information(self, 'Success', 'Successfully finished processing.')
def showHelp(self):
QDesktopServices.openUrl(QUrl(QString('http://centigonknowledge.com/cmaps-analytics-region-creator')))
def aboutClicked(self):
d = about_dialog.AboutDialog(self.iface)
d.show()
res = d.exec_()
def browseForRegion(self):
"""
User is browsing for a region definition:
Check that the file is a .csv file
Open it and read the column titles
"""
# Remember the last folder in which we searched
settings = QSettings()
try:
lastFolder = str(settings.value("cmapsregiongenerator/lastRegionFolder", os.sep).toString())
except:
lastFolder = str(settings.value("cmapsregiongenerator/lastRegionFolder", os.sep))
tmpFileName = str( QFileDialog.getOpenFileName(self, 'Select Region Definition', lastFolder, 'Comma Separated Variable Files (*.csv)') )
if not len(tmpFileName) > 0 or not os.path.exists(tmpFileName):
QMessageBox.critical(self, 'Error', tmpFileName + ' does not seem to exist.')
return
# Store the path we just looked in
head, tail = os.path.split(tmpFileName)
if head <> os.sep and head.lower() <> 'c:\\' and head <> '':
settings.setValue("cmapsregiongenerator/lastRegionFolder", head)
self.regionDefinitionFileNameLineEdit.setText( tmpFileName )
def browseForShapefile(self):
# Remember the last folder in which we searched
settings = QSettings()
try:
lastFolder = str(settings.value("cmapsregiongenerator/lastShapefileFolder", os.sep).toString())
except:
lastFolder = str(settings.value("cmapsregiongenerator/lastShapefileFolder", os.sep))
tmpFolderName = str( QFileDialog.getExistingDirectory(self, 'Select Output Folder', lastFolder) )
# Store the path we just looked in
if tmpFolderName != '':
settings.setValue("cmapsregiongenerator/lastShapefileFolder", tmpFolderName)
self.outputFolderLineEdit.setText( tmpFolderName )
| gpl-2.0 | 4,395,614,780,969,134,000 | 45.745562 | 176 | 0.577354 | false |
AlertaDengue/AlertaDengue | AlertaDengue/dados/models.py | 1 | 3178 | from django.db import models
from django.utils.translation import ugettext_lazy as _
class City(models.Model):
"""
geocode INT NOT NULL,
forecast_model_id INT,
active BOOL NOT NULL,
"""
geocode = models.IntegerField(
db_column='geocodigo',
null=False,
primary_key=True,
help_text=_('Código do Município'),
)
name = models.CharField(
db_column='nome',
null=False,
max_length=128,
help_text=_('Nome do municipio'),
)
state = models.CharField(
db_column='uf',
null=False,
max_length=20,
help_text=_('Nome do estado'),
)
class Meta:
db_table = 'Dengue_global\".\"Municipio'
app_label = 'dados'
verbose_name_plural = "cities"
def __str__(self):
return self.name
class CID10(models.Model):
"""
geocode INT NOT NULL,
forecast_model_id INT,
active BOOL NOT NULL,
"""
code = models.CharField(
db_column='codigo',
null=False,
primary_key=True,
max_length=512,
help_text=_('Código do doença'),
)
name = models.CharField(
db_column='nome',
null=False,
max_length=512,
help_text=_('Nome da doença'),
)
class Meta:
db_table = 'Dengue_global\".\"CID10'
app_label = 'dados'
verbose_name_plural = "cities"
def __str__(self):
return self.name
class RegionalHealth(models.Model):
"""
codigo_estacao_wu
varcli
ucrit
tcrit
limiar_preseason
limiar_posseason
limiar_epidemico
"""
id = models.IntegerField(
db_column='id',
null=False,
primary_key=True,
help_text=_('Código da Regional Saúde'),
)
codigo_estacao_wu = models.CharField(
db_column='codigo_estacao_wu',
null=False,
max_length=16,
help_text=_('Código da Estação WU'),
)
varcli = models.CharField(
db_column='varcli',
null=False,
max_length=10,
help_text=_('Variável climática'),
)
ucrit = models.FloatField(
db_column='ucrit', null=False, help_text=_('Umidade Crítica')
)
tcrit = models.FloatField(
db_column='tcrit', null=False, help_text=_('Temperatura Crítica')
)
limiar_preseason = models.FloatField(
db_column='limiar_preseason',
null=False,
help_text=_('Limiar pré-epidêmica'),
)
limiar_posseason = models.FloatField(
db_column='limiar_posseason',
null=False,
help_text=_('Limiar pós-epidêmica'),
)
limiar_epidemico = models.FloatField(
db_column='limiar_epidemico',
null=False,
help_text=_('Limiar epidêmico'),
)
municipio_geocodigo = models.FloatField(
db_column='municipio_geocodigo',
null=False,
unique=True,
help_text=_('Código do municipio'),
)
class Meta:
db_table = 'Dengue_global\".\"regional_saude'
app_label = 'dados'
verbose_name_plural = "regionais_saude"
def __str__(self):
return self.name
| gpl-3.0 | 2,494,833,941,740,376,600 | 22.220588 | 73 | 0.563331 | false |
chris-blay/historical-rents | scraper.py | 1 | 7068 | #!/usr/bin/python3
# Copyright (C) 2017 Christopher Blay <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'Scrapes rent information about apartments from multiple configured buildings.'
from __future__ import (absolute_import, division, generators, nested_scopes,
print_function, unicode_literals, with_statement)
import argparse
import collections
import csv
import json
import re
import sys
import time
import requests
_HEADERS = {
'user-agent':
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
}
class Apartment(object):
'Information about an apartment.'
def __init__(self, unit, rent, size, beds):
self.unit = unit
self.rent = rent
self.size = size
self.beds = beds
@property
def per_sq_ft(self):
'Rent per square foot.'
return self.rent / self.size
def __str__(self):
return ('Unit {0.unit}: rent={0.rent} size={0.size} beds={0.beds}'
' per_sq_ft={0.per_sq_ft}'.format(self))
class Avalon(object):
'Scrapes apartment information for an Avalon building.'
_URL_TEMPLATE = ('https://api.avalonbay.com/json/reply/ApartmentSearch?'
'communityCode={}&_={}')
def __init__(self, name, community_code):
self.name = name
self._community_code = community_code
@property
def _url(self):
return self._URL_TEMPLATE.format(self._community_code,
int(time.time() * 1000))
@property
def apartments(self):
'Yields apartments for this building.'
info = requests.get(self._url, headers=_HEADERS).json()
for available_floor_plan_type in \
info['results']['availableFloorPlanTypes']:
beds = int(available_floor_plan_type['floorPlanTypeCode'][0])
for available_floor_plan in \
available_floor_plan_type['availableFloorPlans']:
size = available_floor_plan['estimatedSize']
for finish_package in available_floor_plan['finishPackages']:
for apartment in finish_package['apartments']:
yield Apartment(apartment['apartmentNumber'],
apartment['pricing']['effectiveRent'],
size, beds)
class Equity(object):
'Scrapes apartment information for an Equity building.'
_INFO_PATTERN = re.compile(
r'^ *[a-z0-5]+\.unitAvailability = (?P<info>\{.*\})')
_URL_TEMPLATE = 'http://www.equityapartments.com/{}'
def __init__(self, name, url_path):
self.name = name
self._url_path = url_path
@property
def _info(self):
for line in requests.get(self._url, headers=_HEADERS).text.split('\n'):
match = self._INFO_PATTERN.search(line)
if match:
return json.loads(match.group('info'))
print('Unable to get info from {}'.format(self._url))
@property
def _url(self):
return self._URL_TEMPLATE.format(self._url_path)
@property
def apartments(self):
'Yields apartments for this building.'
for bedroom_type in self._info['BedroomTypes']:
for available_unit in bedroom_type['AvailableUnits']:
yield Apartment(available_unit['UnitId'],
available_unit['BestTerm']['Price'],
available_unit['SqFt'],
bedroom_type['BedroomCount'])
BUILDINGS = [
Avalon('Avalon Mission Bay', 'CA067'),
Avalon('Avalon San Bruno', 'CA583'),
Equity('La Terraza', 'san-francisco-bay/colma/la-terrazza-apartments'),
Equity(
'South City Station',
'san-francisco-bay/south-san-francisco/south-city-station-apartments'),
]
def _check_beds(args):
if (args.min_beds is not None and args.max_beds is not None
and args.min_beds > args.max_beds):
sys.exit('Error! min_beds={} is greater than max_beds={}'.format(
args.min_beds, args.max_beds))
def _maybe_print_buildings(args):
if args.buildings:
print('# Buildings')
for building in BUILDINGS:
print(building.name)
sys.exit()
def main():
'Main method for this script.'
parser = argparse.ArgumentParser(
description='Scrapes current rental information'
' for configured buildings')
parser.add_argument('--min_beds', type=int, help='minimum number of beds')
parser.add_argument('--max_beds', type=int, help='maximum number of beds')
parser.add_argument('-b', '--buildings', action='store_true',
help='show configured buildings and exit')
parser.add_argument('--csv', action='store_const', const=csv.DictWriter(
sys.stdout, ('timestamp', 'bldg', 'unit', 'rent', 'size', 'beds')),
help='output in CSV format. omits mean rent per apt '
'size. does not apply to `--buildings`')
parser.add_argument('building', nargs='*',
help='zero or more buildings to scrape. specifying no'
' buildings scrapes all configured buildings')
args = parser.parse_args()
_maybe_print_buildings(args)
_check_beds(args)
for building in BUILDINGS:
if args.building and building.name not in args.building:
continue
if not args.csv:
print('# {}'.format(building.name))
by_size = collections.defaultdict(list)
else:
timestamp = int(time.time())
for apartment in sorted(building.apartments, key=lambda x: x.unit):
if args.min_beds is not None and args.min_beds > apartment.beds:
continue
if args.max_beds is not None and args.max_beds < apartment.beds:
continue
if args.csv:
args.csv.writerow(dict(
timestamp=timestamp, bldg=building.name, **vars(apartment)))
else:
print(apartment)
by_size[apartment.size].append(apartment.rent)
if args.csv:
continue
for size in sorted(by_size.keys()):
print('Size {}: {}'.format(
size, sum(by_size[size]) / len(by_size[size]) / size))
print()
if __name__ == '__main__':
main()
| gpl-3.0 | 6,003,138,055,225,850,000 | 36.595745 | 80 | 0.592813 | false |
mnahm5/django-estore | Lib/site-packages/awscli/utils.py | 1 | 4992 | # Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import csv
import signal
import datetime
import contextlib
from awscli.compat import six
def split_on_commas(value):
if not any(char in value for char in ['"', '\\', "'", ']', '[']):
# No quotes or escaping, just use a simple split.
return value.split(',')
elif not any(char in value for char in ['"', "'", '[', ']']):
# Simple escaping, let the csv module handle it.
return list(csv.reader(six.StringIO(value), escapechar='\\'))[0]
else:
# If there's quotes for the values, we have to handle this
# ourselves.
return _split_with_quotes(value)
def _split_with_quotes(value):
try:
parts = list(csv.reader(six.StringIO(value), escapechar='\\'))[0]
except csv.Error:
raise ValueError("Bad csv value: %s" % value)
iter_parts = iter(parts)
new_parts = []
for part in iter_parts:
# Find the first quote
quote_char = _find_quote_char_in_part(part)
# Find an opening list bracket
list_start = part.find('=[')
if list_start >= 0 and value.find(']') != -1 and \
(quote_char is None or part.find(quote_char) > list_start):
# This is a list, eat all the items until the end
if ']' in part:
# Short circuit for only one item
new_chunk = part
else:
new_chunk = _eat_items(value, iter_parts, part, ']')
list_items = _split_with_quotes(new_chunk[list_start + 2:-1])
new_chunk = new_chunk[:list_start + 1] + ','.join(list_items)
new_parts.append(new_chunk)
continue
elif quote_char is None:
new_parts.append(part)
continue
elif part.count(quote_char) == 2:
# Starting and ending quote are in this part.
# While it's not needed right now, this will
# break down if we ever need to escape quotes while
# quoting a value.
new_parts.append(part.replace(quote_char, ''))
continue
# Now that we've found a starting quote char, we
# need to combine the parts until we encounter an end quote.
new_chunk = _eat_items(value, iter_parts, part, quote_char, quote_char)
new_parts.append(new_chunk)
return new_parts
def _eat_items(value, iter_parts, part, end_char, replace_char=''):
"""
Eat items from an iterator, optionally replacing characters with
a blank and stopping when the end_char has been reached.
"""
current = part
chunks = [current.replace(replace_char, '')]
while True:
try:
current = six.advance_iterator(iter_parts)
except StopIteration:
raise ValueError(value)
chunks.append(current.replace(replace_char, ''))
if current.endswith(end_char):
break
return ','.join(chunks)
def _find_quote_char_in_part(part):
if '"' not in part and "'" not in part:
return
quote_char = None
double_quote = part.find('"')
single_quote = part.find("'")
if double_quote >= 0 and single_quote == -1:
quote_char = '"'
elif single_quote >= 0 and double_quote == -1:
quote_char = "'"
elif double_quote < single_quote:
quote_char = '"'
elif single_quote < double_quote:
quote_char = "'"
return quote_char
def find_service_and_method_in_event_name(event_name):
"""
Grabs the service name and the operation name from an event name.
This is making the assumption that the event name is in the form
event.service.operation.
"""
split_event = event_name.split('.')[1:]
service_name = None
if len(split_event) > 0:
service_name = split_event[0]
operation_name = None
if len(split_event) > 1:
operation_name = split_event[1]
return service_name, operation_name
def json_encoder(obj):
"""JSON encoder that formats datetimes as ISO8601 format."""
if isinstance(obj, datetime.datetime):
return obj.isoformat()
else:
return obj
@contextlib.contextmanager
def ignore_ctrl_c():
original = signal.signal(signal.SIGINT, signal.SIG_IGN)
try:
yield
finally:
signal.signal(signal.SIGINT, original)
def emit_top_level_args_parsed_event(session, args):
session.emit(
'top-level-args-parsed', parsed_args=args, session=session)
| mit | -7,054,070,252,055,107,000 | 32.72973 | 79 | 0.614183 | false |
brainwane/zulip | zerver/tests/test_cache.py | 2 | 14168 | from typing import Any, Dict, List, Optional
from unittest.mock import Mock, patch
from django.conf import settings
from zerver.apps import flush_cache
from zerver.lib.cache import (
MEMCACHED_MAX_KEY_LENGTH,
InvalidCacheKeyException,
NotFoundInCache,
bulk_cached_fetch,
cache_delete,
cache_delete_many,
cache_get,
cache_get_many,
cache_set,
cache_set_many,
cache_with_key,
get_cache_with_key,
safe_cache_get_many,
safe_cache_set_many,
user_profile_by_email_cache_key,
validate_cache_key,
)
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import queries_captured
from zerver.models import UserProfile, get_system_bot, get_user_profile_by_email
class AppsTest(ZulipTestCase):
def test_cache_gets_flushed(self) -> None:
with patch('zerver.apps.logging.info') as mock_logging:
with patch('zerver.apps.cache.clear') as mock:
# The argument to flush_cache doesn't matter
flush_cache(Mock())
mock.assert_called_once()
mock_logging.assert_called_once()
class CacheKeyValidationTest(ZulipTestCase):
def test_validate_cache_key(self) -> None:
validate_cache_key('nice_Ascii:string!~')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('utf8_character:ą')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('new_line_character:\n')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('control_character:\r')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('whitespace_character: ')
with self.assertRaises(InvalidCacheKeyException):
validate_cache_key('too_long:' + 'X'*MEMCACHED_MAX_KEY_LENGTH)
with self.assertRaises(InvalidCacheKeyException):
# validate_cache_key does validation on a key with the
# KEY_PREFIX appended to the start, so even though we're
# passing something "short enough" here, it becomes too
# long after appending KEY_PREFIX.
validate_cache_key('X' * (MEMCACHED_MAX_KEY_LENGTH - 2))
def test_cache_functions_raise_exception(self) -> None:
invalid_key = 'invalid_character:\n'
good_key = "good_key"
with self.assertRaises(InvalidCacheKeyException):
cache_get(invalid_key)
with self.assertRaises(InvalidCacheKeyException):
cache_set(invalid_key, 0)
with self.assertRaises(InvalidCacheKeyException):
cache_delete(invalid_key)
with self.assertRaises(InvalidCacheKeyException):
cache_get_many([good_key, invalid_key])
with self.assertRaises(InvalidCacheKeyException):
cache_set_many({good_key: 0, invalid_key: 1})
with self.assertRaises(InvalidCacheKeyException):
cache_delete_many([good_key, invalid_key])
class CacheWithKeyDecoratorTest(ZulipTestCase):
def test_cache_with_key_invalid_character(self) -> None:
def invalid_characters_cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:invalid_character:ą:{user_id}'
@cache_with_key(invalid_characters_cache_key_function, timeout=1000)
def get_user_function_with_bad_cache_keys(user_id: int) -> UserProfile:
return UserProfile.objects.get(id=user_id)
hamlet = self.example_user('hamlet')
with patch('zerver.lib.cache.cache_set') as mock_set, \
patch('zerver.lib.cache.logger.warning') as mock_warn:
with queries_captured() as queries:
result = get_user_function_with_bad_cache_keys(hamlet.id)
self.assertEqual(result, hamlet)
self.assert_length(queries, 1)
mock_set.assert_not_called()
mock_warn.assert_called_once()
def test_cache_with_key_key_too_long(self) -> None:
def too_long_cache_key_function(user_id: int) -> str:
return 'CacheWithKeyDecoratorTest:very_long_key:{}:{}'.format('a'*250, user_id)
@cache_with_key(too_long_cache_key_function, timeout=1000)
def get_user_function_with_bad_cache_keys(user_id: int) -> UserProfile:
return UserProfile.objects.get(id=user_id)
hamlet = self.example_user('hamlet')
with patch('zerver.lib.cache.cache_set') as mock_set, \
patch('zerver.lib.cache.logger.warning') as mock_warn:
with queries_captured() as queries:
result = get_user_function_with_bad_cache_keys(hamlet.id)
self.assertEqual(result, hamlet)
self.assert_length(queries, 1)
mock_set.assert_not_called()
mock_warn.assert_called_once()
def test_cache_with_key_good_key(self) -> None:
def good_cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:good_cache_key:{user_id}'
@cache_with_key(good_cache_key_function, timeout=1000)
def get_user_function_with_good_cache_keys(user_id: int) -> UserProfile:
return UserProfile.objects.get(id=user_id)
hamlet = self.example_user('hamlet')
with queries_captured() as queries:
result = get_user_function_with_good_cache_keys(hamlet.id)
self.assertEqual(result, hamlet)
self.assert_length(queries, 1)
# The previous function call should have cached the result correctly, so now
# no database queries should happen:
with queries_captured() as queries_two:
result_two = get_user_function_with_good_cache_keys(hamlet.id)
self.assertEqual(result_two, hamlet)
self.assert_length(queries_two, 0)
def test_cache_with_key_none_values(self) -> None:
def cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:test_cache_with_key_none_values:{user_id}'
@cache_with_key(cache_key_function, timeout=1000)
def get_user_function_can_return_none(user_id: int) -> Optional[UserProfile]:
try:
return UserProfile.objects.get(id=user_id)
except UserProfile.DoesNotExist:
return None
last_user_id = UserProfile.objects.last().id
with queries_captured() as queries:
result = get_user_function_can_return_none(last_user_id + 1)
self.assertEqual(result, None)
self.assert_length(queries, 1)
with queries_captured() as queries:
result_two = get_user_function_can_return_none(last_user_id + 1)
self.assertEqual(result_two, None)
self.assert_length(queries, 0)
class GetCacheWithKeyDecoratorTest(ZulipTestCase):
def test_get_cache_with_good_key(self) -> None:
# Test with a good cache key function, but a get_user function
# that always returns None just to make it convenient to tell
# whether the cache was used (whatever we put in the cache) or
# we got the result from calling the function (None)
def good_cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:good_cache_key:{user_id}'
@get_cache_with_key(good_cache_key_function)
def get_user_function_with_good_cache_keys(user_id: int) -> Any: # nocoverage
return
hamlet = self.example_user('hamlet')
with patch('zerver.lib.cache.logger.warning') as mock_warn:
with self.assertRaises(NotFoundInCache):
get_user_function_with_good_cache_keys(hamlet.id)
mock_warn.assert_not_called()
cache_set(good_cache_key_function(hamlet.id), hamlet)
result = get_user_function_with_good_cache_keys(hamlet.id)
self.assertEqual(result, hamlet)
def test_get_cache_with_bad_key(self) -> None:
def bad_cache_key_function(user_id: int) -> str:
return f'CacheWithKeyDecoratorTest:invalid_character:ą:{user_id}'
@get_cache_with_key(bad_cache_key_function)
def get_user_function_with_bad_cache_keys(user_id: int) -> Any: # nocoverage
return
hamlet = self.example_user('hamlet')
with patch('zerver.lib.cache.logger.warning') as mock_warn:
with self.assertRaises(NotFoundInCache):
get_user_function_with_bad_cache_keys(hamlet.id)
mock_warn.assert_called_once()
class SafeCacheFunctionsTest(ZulipTestCase):
def test_safe_cache_functions_with_all_good_keys(self) -> None:
items = {"SafeFunctionsTest:key1": 1, "SafeFunctionsTest:key2": 2, "SafeFunctionsTest:key3": 3}
safe_cache_set_many(items)
result = safe_cache_get_many(list(items.keys()))
for key, value in result.items():
self.assertEqual(value, items[key])
def test_safe_cache_functions_with_all_bad_keys(self) -> None:
items = {"SafeFunctionsTest:\nbadkey1": 1, "SafeFunctionsTest:\nbadkey2": 2}
with patch('zerver.lib.cache.logger.warning') as mock_warn:
safe_cache_set_many(items)
mock_warn.assert_called_once()
self.assertEqual(
mock_warn.call_args[0][1],
['SafeFunctionsTest:\nbadkey1', 'SafeFunctionsTest:\nbadkey2'],
)
with patch('zerver.lib.cache.logger.warning') as mock_warn:
result = safe_cache_get_many(list(items.keys()))
mock_warn.assert_called_once()
self.assertEqual(
mock_warn.call_args[0][1],
['SafeFunctionsTest:\nbadkey1', 'SafeFunctionsTest:\nbadkey2'],
)
self.assertEqual(result, {})
def test_safe_cache_functions_with_good_and_bad_keys(self) -> None:
bad_items = {"SafeFunctionsTest:\nbadkey1": 1, "SafeFunctionsTest:\nbadkey2": 2}
good_items = {"SafeFunctionsTest:goodkey1": 3, "SafeFunctionsTest:goodkey2": 4}
items = {**good_items, **bad_items}
with patch('zerver.lib.cache.logger.warning') as mock_warn:
safe_cache_set_many(items)
mock_warn.assert_called_once()
self.assertEqual(
mock_warn.call_args[0][1],
['SafeFunctionsTest:\nbadkey1', 'SafeFunctionsTest:\nbadkey2'],
)
with patch('zerver.lib.cache.logger.warning') as mock_warn:
result = safe_cache_get_many(list(items.keys()))
mock_warn.assert_called_once()
self.assertEqual(
mock_warn.call_args[0][1],
['SafeFunctionsTest:\nbadkey1', 'SafeFunctionsTest:\nbadkey2'],
)
self.assertEqual(result, good_items)
class BotCacheKeyTest(ZulipTestCase):
def test_bot_profile_key_deleted_on_save(self) -> None:
# Get the profile cached on both cache keys:
user_profile = get_user_profile_by_email(settings.EMAIL_GATEWAY_BOT)
bot_profile = get_system_bot(settings.EMAIL_GATEWAY_BOT)
self.assertEqual(user_profile, bot_profile)
# Flip the setting and save:
flipped_setting = not bot_profile.is_api_super_user
bot_profile.is_api_super_user = flipped_setting
bot_profile.save()
# The .save() should have deleted cache keys, so if we fetch again,
# the returned objects should have is_api_super_user set correctly.
bot_profile2 = get_system_bot(settings.EMAIL_GATEWAY_BOT)
self.assertEqual(bot_profile2.is_api_super_user, flipped_setting)
user_profile2 = get_user_profile_by_email(settings.EMAIL_GATEWAY_BOT)
self.assertEqual(user_profile2.is_api_super_user, flipped_setting)
def get_user_email(user: UserProfile) -> str:
return user.email # nocoverage
class GenericBulkCachedFetchTest(ZulipTestCase):
def test_query_function_called_only_if_needed(self) -> None:
# Get the user cached:
hamlet = get_user_profile_by_email(self.example_email("hamlet"))
class CustomException(Exception):
pass
def query_function(emails: List[str]) -> List[UserProfile]:
raise CustomException("The query function was called")
# query_function shouldn't be called, because the only requested object
# is already cached:
result: Dict[str, UserProfile] = bulk_cached_fetch(
cache_key_function=user_profile_by_email_cache_key,
query_function=query_function,
object_ids=[self.example_email("hamlet")],
id_fetcher=get_user_email,
)
self.assertEqual(result, {hamlet.delivery_email: hamlet})
with self.assertLogs(level='INFO') as info_log:
flush_cache(Mock())
self.assertEqual(info_log.output, [
'INFO:root:Clearing memcached cache after migrations'
])
# With the cache flushed, the query_function should get called:
with self.assertRaises(CustomException):
result = bulk_cached_fetch(
cache_key_function=user_profile_by_email_cache_key,
query_function=query_function,
object_ids=[self.example_email("hamlet")],
id_fetcher=get_user_email,
)
def test_empty_object_ids_list(self) -> None:
class CustomException(Exception):
pass
def cache_key_function(email: str) -> str: # nocoverage -- this is just here to make sure it's not called
raise CustomException("The cache key function was called")
def query_function(emails: List[str]) -> List[UserProfile]: # nocoverage -- this is just here to make sure it's not called
raise CustomException("The query function was called")
# query_function and cache_key_function shouldn't be called, because
# objects_ids is empty, so there's nothing to do.
result: Dict[str, UserProfile] = bulk_cached_fetch(
cache_key_function=cache_key_function,
query_function=query_function,
object_ids=[],
id_fetcher=get_user_email,
)
self.assertEqual(result, {})
| apache-2.0 | -7,180,724,032,288,269,000 | 41.794562 | 131 | 0.63911 | false |
ibelikov/jimmy | jimmy/tests/base.py | 1 | 1400 | # -*- coding: utf-8 -*-
# Copyright 2016 Mirantis, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
jimmy_dir = os.path.dirname(os.path.dirname(__file__))
class TestCase(object):
"""Test case base class for all unit tests."""
def setup(self):
with open(os.path.join(jimmy_dir, 'lib', 'schema.yaml'), 'r') as f:
self.jimmy_schema = f.read()
with open(os.path.join(jimmy_dir, 'jimmy.yaml'), 'r') as f:
self.jimmy_yaml = f.read()
self.mock_jimmy_yaml = self.jimmy_yaml.replace(
"jenkins_cli_path: /var/cache/jenkins/war/WEB-INF/jenkins-cli.jar",
"jenkins_cli_path: << path to jenkins-cli.jar >>")
| apache-2.0 | -2,660,098,153,472,267,000 | 40.176471 | 83 | 0.662143 | false |
alexkychen/PythonForBiologists | 04_SeqAlignment.py | 1 | 2605 | #!/usr/bin/env python
#Python for biologists
#03_Sequence Alignment
#Align two DNA fragments
seq1 = "ATCGTGCTAGCTGCATC"
seq2 = "ATCATGCTAGCTGCTTC"
n = 0
seq1L = []
seq2L = []
bondL = []
seq1len = len(seq1)
seq2len = len(seq2)
print(seq1len, seq2len)
#Under condition of one nucleotide mutation (SNP), equal sequence length
while True:
if n < seq1len and n < seq2len:
nuc1 = seq1[n]
nuc2 = seq2[n]
#when identical
if nuc1 == nuc2:
seq1L.append(nuc1)
seq2L.append(nuc2)
bondL.append("|")
n = n + 1
#when SNP
elif nuc1 != nuc2 and seq1[n+1] == seq2[n+1]:
seq1L.append(nuc1)
seq2L.append(nuc2)
bondL.append(" ")
n = n + 1
else:
break
else:
break
seq1N = "".join(seq1L)
seq2N = "".join(seq2L)
bondLN = "".join(bondL)
print(seq1N+"\n"+bondLN+"\n"+seq2N)
##Under condition of one mutation or indel, length between sequecnes may not be equal
seq1 = "TCGTGCTAGCTGCATCTGT"
seq2 = "ATCATGTAGACTGCTT"
n = 0
m = 0
seq1L = []
seq2L = []
bondL = []
print(len(seq1), len(seq2))
#claim some stats variables
NoMatch=0
NoSNP=0
NoIndel=0
while True:
if n < len(seq1) and m < len(seq2):
nuc1 = seq1[n]
nuc2 = seq2[m]
#when identical
if nuc1 == nuc2:
seq1L.append(nuc1)
seq2L.append(nuc2)
bondL.append("|")
n = n + 1
m = m + 1
NoMatch = NoMatch + 1
#when SNP
elif nuc1 != nuc2 and seq1[n+1] == seq2[m+1]:
seq1L.append(nuc1)
seq2L.append(nuc2)
bondL.append(" ")
n = n + 1
m = m + 1
NoSNP = NoSNP + 1
#when insertion in seq2
elif nuc1 != nuc2 and nuc1 == seq2[m+1]:
seq1L.append("-")
seq2L.append(nuc2)
bondL.append(" ")
m = m + 1
NoIndel = NoIndel + 1
#when deletion in seq2
elif nuc1 != nuc2 and seq1[n+1] == nuc2:
seq1L.append(nuc1)
seq2L.append("-")
bondL.append(" ")
n = n + 1
NoIndel = NoIndel + 1
else:
break
else:#when at least one of the sequences ends
#when seq2 ends, but seq1 not ends
if n < len(seq1):
nuc1 = seq1[n]
seq1L.append(nuc1)
seq2L.append(" ")
bondL.append(" ")
n = n + 1
#when seq1 ends, but seq2 not ends
elif m < len(seq2):
nuc2 = seq2[m]
seq1L.append(" ")
seq2L.append(nuc2)
bondL.append(" ")
m = m + 1
#when both ends
else:
break
seq1N = "".join(seq1L)
seq2N = "".join(seq2L)
bondLN = "".join(bondL)
print(seq1N+"\n"+bondLN+"\n"+seq2N)
print("Stats: "+str(NoMatch)+ " matches; "+str(NoSNP)+" SNPs; "+str(NoIndel)+" Indels.")
| mit | -7,424,942,195,998,097,000 | 21.264957 | 88 | 0.572361 | false |
ActiveState/code | recipes/Python/577413_Topological_Sort/recipe-577413.py | 1 | 1466 | try:
from functools import reduce
except:
pass
data = {
'des_system_lib': set('std synopsys std_cell_lib des_system_lib dw02 dw01 ramlib ieee'.split()),
'dw01': set('ieee dw01 dware gtech'.split()),
'dw02': set('ieee dw02 dware'.split()),
'dw03': set('std synopsys dware dw03 dw02 dw01 ieee gtech'.split()),
'dw04': set('dw04 ieee dw01 dware gtech'.split()),
'dw05': set('dw05 ieee dware'.split()),
'dw06': set('dw06 ieee dware'.split()),
'dw07': set('ieee dware'.split()),
'dware': set('ieee dware'.split()),
'gtech': set('ieee gtech'.split()),
'ramlib': set('std ieee'.split()),
'std_cell_lib': set('ieee std_cell_lib'.split()),
'synopsys': set(),
}
def toposort2(data):
for k, v in data.items():
v.discard(k) # Ignore self dependencies
extra_items_in_deps = reduce(set.union, data.values()) - set(data.keys())
data.update({item:set() for item in extra_items_in_deps})
while True:
ordered = set(item for item,dep in data.items() if not dep)
if not ordered:
break
yield ' '.join(sorted(ordered))
data = {item: (dep - ordered) for item,dep in data.items()
if item not in ordered}
assert not data, "A cyclic dependency exists amongst %r" % data
print ('\n'.join( toposort2(data) ))
| mit | 9,077,598,396,550,940,000 | 39.722222 | 102 | 0.549113 | false |
KayvanMazaheri/IMH-Summer-School-2015 | Libs/EquationSolve.py | 1 | 1320 | def swapRow (givenList , answer , j , k):
for i in range(len(givenList)):
temp = givenList[j][i]
givenList[j][i] = givenList[k][i]
givenList[k][i] = temp
temp = answer[j][i]
answer[j][i] = answer[k][i]
answer[k][i] = temp
return
def correct(givenList , answer):
for i in range(len(givenList)):
if (givenList[i][i] == 0):
temp_i = i
toSwap = -1
for j in range (i , len(givenList)):
if (givenList[j][i] != 0):
toSwap = j
break
if (toSwap != -1):
swapRow(givenList ,answer , temp_i , toSwap)
break
return
def solve (givenList , answer):
for i in range(len(givenList)):
if (givenList[i][i] == 0):
correct(givenList , answer)
pivot = givenList[i][i]
for j in range(len(givenList)):
if (j == i):
continue
coef = (givenList[j][i] / givenList[i][i]) * (-1)
for k in range(len(givenList)):
givenList[j][k] += givenList[i][k] * coef
answer[j] += answer[i] * coef
for i in range (len(givenList)):
div = givenList[i][i]
answer[i] /= div
givenList[i][i] /= div
return answer
| gpl-2.0 | -230,294,811,296,953,380 | 25.938776 | 61 | 0.474242 | false |
att-comdev/armada | hapi/chart/template_pb2.py | 1 | 2451 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: hapi/chart/template.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='hapi/chart/template.proto',
package='hapi.chart',
syntax='proto3',
serialized_pb=_b('\n\x19hapi/chart/template.proto\x12\nhapi.chart\"&\n\x08Template\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x42\x07Z\x05\x63hartb\x06proto3')
)
_TEMPLATE = _descriptor.Descriptor(
name='Template',
full_name='hapi.chart.Template',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='hapi.chart.Template.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='hapi.chart.Template.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=41,
serialized_end=79,
)
DESCRIPTOR.message_types_by_name['Template'] = _TEMPLATE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Template = _reflection.GeneratedProtocolMessageType('Template', (_message.Message,), dict(
DESCRIPTOR = _TEMPLATE,
__module__ = 'hapi.chart.template_pb2'
# @@protoc_insertion_point(class_scope:hapi.chart.Template)
))
_sym_db.RegisterMessage(Template)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('Z\005chart'))
# @@protoc_insertion_point(module_scope)
| apache-2.0 | 842,592,657,378,128,800 | 30.423077 | 194 | 0.71481 | false |
zapcoop/vertex | vertex_api/service/models/_reply_template.py | 1 | 1918 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from hvad.models import TranslatableModel, TranslatedFields
from vertex.models import AbstractDatedModel
__author__ = 'Jonathan Senecal <[email protected]>'
class ReplyTemplate(AbstractDatedModel, TranslatableModel):
"""
We can allow the admin to define a number of pre-set replies, used to
simplify the sending of updates and resolutions. These are basically Django
templates with a limited context - however if you wanted to get crafy it would
be easy to write a reply that displays ALL updates in hierarchical order etc
with use of for loops over {{ ticket.followup_set.all }} and friends.
When replying to a ticket, the user can select any reply set for the current
department, and the body text is fetched via AJAX.
"""
departments = models.ManyToManyField(
'service.department',
blank=True,
help_text=_('Leave blank to allow this reply to be used for all '
'departments, or select those departments you wish to limit this reply to.'),
)
translations = TranslatedFields(
name=models.CharField(
_('Name'),
max_length=100,
help_text=_('Only used to assist users with selecting a reply - not '
'shown to the user.'),
),
body=models.TextField(
_('Body'),
help_text=_('Context available: {{ ticket }} - ticket object (eg '
'{{ ticket.title }}); {{ department }} - The department; and {{ user }} '
'- the current user.'),
)
)
class Meta:
ordering = ['translations__name', ]
verbose_name = _('Pre-set reply')
verbose_name_plural = _('Pre-set replies')
app_label = 'service'
def __str__(self):
return u'%s' % self.name | agpl-3.0 | -5,038,954,966,465,524,000 | 35.903846 | 97 | 0.620438 | false |
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Ops/PyScripts/wrappers/packetredirect.py | 1 | 1053 |
import dsz
import ops.data
import ops.parseargs
dsz.control.echo.Off()
dsz.ui.Background()
parser = ops.parseargs.ArgumentParser()
parser.add_argument('command_id', type=int, help='Command ID to monitor.')
args = parser.parse_args()
pr = ops.data.getDszObject(args.command_id)
lasterror = None
while True:
dsz.script.CheckStop()
dsz.Sleep(5000)
pr.update()
if (not pr.commandmetadata.isrunning):
break
errors = pr.commandmetadata.friendlyerrors
if (not len(errors)):
continue
if ((lasterror is None) or (lasterror < errors[(-1)].timestamp)):
lasterror = errors[(-1)].timestamp
msg = ('packetredirect failed to send!\n Command %d: %s\n' % (pr.commandmetadata.id, pr.commandmetadata.fullcommand))
for i in errors[(-1)]:
if ((i.type == 'OsError') and (i.text == 'The system cannot find the file specified.')):
msg += (" - %s Do you need '-driver', '-raw' or to toggle FLAV?" % i)
else:
msg += (' - %s\n' % i)
ops.alert(msg) | unlicense | -5,557,397,730,484,920,000 | 35.344828 | 125 | 0.618234 | false |
simirimia/webarok | src/Actions/PlayerStatus.py | 1 | 1109 | """
The current status (player, stopped, paused)
(c) webarok project
http://sourceforge.net/projects/webarok/
This file is part of Webarok.
Webarok is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Webarok is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Webarok. If not, see <http://www.gnu.org/licenses/>.
"""
from Actions.ActionBase import PlayerActionBase
from json import JSONEncoder
class PlayerStatus( PlayerActionBase ):
def do( self, param ):
status = PlayerActionBase.player.getStatus()
encoder = JSONEncoder()
if status == False:
return False
self.out = encoder.encode( status.getDictionary() )
return True
| gpl-3.0 | 5,018,001,448,671,837,000 | 32.606061 | 68 | 0.74211 | false |
wo3kie/pchGenerator | recursive_filter.py | 1 | 1081 | from topological_sorter import\
TopologicalSorter
#
# RecursiveFilter
#
class RecursiveFilter:
def __init__( self, tSorter, predicate, options ):
self._filteredNodes = []
self._tSorter = tSorter
self._options = options
for node in self._tSorter.getNodes():
self.__filter( node, predicate )
self.__cleanUp()
def __filter( self, node, predicate ):
if predicate( node ) == False:
return
if node.isIncluded():
if self._options.watch_header == node.getData():
node.setFailingReason( "Header already included by other header" )
return
self._filteredNodes.append( node )
node.setIncluded( True )
for child in node.getChildren():
child.setIncludedRecursively( True )
def __cleanUp( self ):
for node in self._filteredNodes:
node.setIncludedRecursively( False )
def getNodes( self ):
return self._filteredNodes
def getRoot( self ):
return self._tSorter.getRoot()
| mit | -2,822,735,406,377,043,000 | 24.139535 | 82 | 0.592044 | false |
BackupTheBerlios/fbcallnotify | callmonitor.py | 1 | 3691 | # -*- coding:utf-8 -*-
#########################################
# Name: FBCallNotify
# Copyright (C): 2010 Maximilian Köhl
# License: GPLv3
#########################################
import socket
import notifyer
class Callmonitor():
def __init__(self, multiprocessmanager, numberconfig):
self.processmanager = multiprocessmanager
self.numberconfig = numberconfig
self.connections = {}
self.events = {'RING' : self.on_ring,\
'CALL' : self.on_call,\
'CONNECT' : self.on_connect,\
'DISCONNECT' : self.on_disconnect}
def start(self, host, port):
try:
self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.connection.connect((host, port))
tmpdata = ''
while True:
data = self.connection.recv(1024)
tmpdata += data
if data:
if tmpdata.endswith('\n'):
for infostr in (tmpdata.split('\n')):
info = infostr.split(';')
if info[0]:
if info[1] in self.events:
self.events[info[1]](info)
else:
print('Protokoll Error')
print(info)
tmpdata = ''
else:
print 'Connection closed...'
break
self.connection.close()
except:
print 'Can\'t create socket...'
def on_ring(self, info):
self.connections[info[2]] = {'type':'in',\
'from':info[3],\
'to':info[4],\
'overext':info[5],\
'status':'off',\
'number':info[4]}
if self.connections[info[2]]['number'] in self.numberconfig:
self.processmanager.addProcess(notifyer.notifyer, args=[self.connections[info[2]], self.numberconfig[self.connections[info[2]]['number']], 'ring'])
def on_call(self, info):
self.connections[info[2]] = {'type' : 'out',\
'from' : info[4],\
'to' : info[5],\
'overint' : info[3],\
'status' : 'off',\
'number' : info[4]}
if self.connections[info[2]]['number'] in self.numberconfig:
self.processmanager.addProcess(notifyer.notifyer, args=[self.connections[info[2]], self.numberconfig[self.connections[info[2]]['number']], 'call'])
def on_connect(self, info):
self.connections[info[2]]['status'] = 'on'
self.connections[info[2]]['overint'] = info[3]
if self.connections[info[2]]['number'] in self.numberconfig:
self.processmanager.addProcess(notifyer.notifyer, args=[self.connections[info[2]], self.numberconfig[self.connections[info[2]]['number']], 'connect'])
def on_disconnect(self, info):
self.connections[info[2]]['status'] = 'off'
self.connections[info[2]]['time'] = info[3]
if self.connections[info[2]]['number'] in self.numberconfig:
self.processmanager.addProcess(notifyer.notifyer, args=[self.connections[info[2]], self.numberconfig[self.connections[info[2]]['number']], 'disconnect'])
| gpl-3.0 | 2,132,932,810,372,888,000 | 45.708861 | 195 | 0.458266 | false |
Thraxis/pymedusa | sickbeard/event_queue.py | 1 | 1607 | # coding=utf-8
import threading
import traceback
from six.moves.queue import Queue, Empty
from sickbeard import logger
from sickrage.helper.exceptions import ex
class Event(object):
def __init__(self, event_type):
self._type = event_type
@property
def event_type(self):
"""
Returns the type of the event
"""
return self._type
class Events(threading.Thread):
def __init__(self, callback):
super(Events, self).__init__()
self.queue = Queue()
self.daemon = True
self.callback = callback
self.name = "EVENT-QUEUE"
self.stop = threading.Event()
def put(self, event_type):
self.queue.put(event_type)
def run(self):
"""
Actually runs the thread to process events
"""
try:
while not self.stop.is_set():
try:
# get event type
event_type = self.queue.get(True, 1)
# perform callback if we got a event type
self.callback(event_type)
# event completed
self.queue.task_done()
except Empty:
event_type = None
# exiting thread
self.stop.clear()
except Exception as e:
logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
logger.log(repr(traceback.format_exc()), logger.DEBUG)
# System Events
class SystemEvent(Event):
RESTART = "RESTART"
SHUTDOWN = "SHUTDOWN"
| gpl-3.0 | 6,323,000,063,821,660,000 | 25.344262 | 98 | 0.539515 | false |
monovertex/ygorganizer | ygo_core/urls.py | 1 | 1039 | from django.conf.urls import patterns, url, include
from django.contrib import admin
from .views import (CollectionPageView, BrowsePageView, AboutPageView,
IndexPageView, DonationsPageView)
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^api/', include('ygo_api.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^auth/', include('registration.backends.default.urls')),
url(r'^import/', include('ygo_import.urls')),
url(r'^$', IndexPageView.as_view(), name='index'),
url(r'^about/$', AboutPageView.as_view(), name='about'),
url(r'^donate/$', DonationsPageView.as_view(), name='donations'),
url(r'^collection/(.+?/)?$', CollectionPageView.as_view(),
name='collection'),
url(r'^browse/(.+?/)?$', BrowsePageView.as_view(), name='browse'),
# url(r'^wishlist/$', BrowsePageView.as_view(), name='wishlist'),
# url(r'^deck-list/$', DeckListPageView.as_view(), name='decks'),
# url(r'^deck/([0-9]+/)$', BrowsePageView.as_view(), name='deck'),
)
| mit | -8,705,648,786,299,676,000 | 36.107143 | 70 | 0.632339 | false |
ric2b/Vivaldi-browser | chromium/third_party/blink/renderer/bindings/scripts/web_idl/enumeration.py | 1 | 2285 | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from .code_generator_info import CodeGeneratorInfo
from .composition_parts import WithCodeGeneratorInfo
from .composition_parts import WithComponent
from .composition_parts import WithDebugInfo
from .composition_parts import WithExtendedAttributes
from .ir_map import IRMap
from .make_copy import make_copy
from .user_defined_type import UserDefinedType
class Enumeration(UserDefinedType, WithExtendedAttributes,
WithCodeGeneratorInfo, WithComponent, WithDebugInfo):
"""https://heycam.github.io/webidl/#idl-enums"""
class IR(IRMap.IR, WithExtendedAttributes, WithCodeGeneratorInfo,
WithComponent, WithDebugInfo):
def __init__(self,
identifier,
values,
extended_attributes=None,
code_generator_info=None,
component=None,
debug_info=None):
assert isinstance(values, (list, tuple))
assert all(isinstance(value, str) for value in values)
IRMap.IR.__init__(
self, identifier=identifier, kind=IRMap.IR.Kind.ENUMERATION)
WithExtendedAttributes.__init__(self, extended_attributes)
WithCodeGeneratorInfo.__init__(self, code_generator_info)
WithComponent.__init__(self, component)
WithDebugInfo.__init__(self, debug_info)
self.values = list(values)
def __init__(self, ir):
assert isinstance(ir, Enumeration.IR)
ir = make_copy(ir)
UserDefinedType.__init__(self, ir.identifier)
WithExtendedAttributes.__init__(self, ir.extended_attributes)
WithCodeGeneratorInfo.__init__(
self, CodeGeneratorInfo(ir.code_generator_info))
WithComponent.__init__(self, components=ir.components)
WithDebugInfo.__init__(self, ir.debug_info)
self._values = tuple(ir.values)
@property
def values(self):
"""Returns the list of enum values."""
return self._values
# UserDefinedType overrides
@property
def is_enumeration(self):
return True
| bsd-3-clause | -6,105,392,281,440,092,000 | 36.459016 | 76 | 0.646827 | false |
wackerly/faucet | tests/fakeoftable.py | 1 | 17306 | # Copyright (C) 2015 Research and Innovation Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2018 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from bitstring import Bits
from ryu.ofproto import ofproto_v1_3 as ofp
from ryu.ofproto import ofproto_v1_3_parser as parser
from ryu.lib import addrconv
class FakeOFTableException(Exception):
pass
class FakeOFTable(object):
"""Fake OFTable is a virtual openflow pipeline used for testing openflow controllers.
The tables are populated using apply_ofmsgs and can be queried with
is_ouput.
"""
def __init__(self, num_tables):
self.tables = [[] for _ in range(0, num_tables)]
self.groups = {}
def _apply_groupmod(self, ofmsg):
"""Maintain group table."""
def _del(_ofmsg, group_id):
if group_id == ofp.OFPG_ALL:
self.groups = {}
return
if group_id in self.groups:
del self.groups[group_id]
def _add(ofmsg, group_id):
if group_id in self.groups:
raise FakeOFTableException('group already in group table: %s' % ofmsg)
self.groups[group_id] = ofmsg
def _modify(ofmsg, group_id):
if group_id not in self.groups:
raise FakeOFTableException('group not in group table: %s' % ofmsg)
self.groups[group_id] = ofmsg
_groupmod_handlers = {
ofp.OFPGC_DELETE: _del,
ofp.OFPGC_ADD: _add,
ofp.OFPGC_MODIFY: _modify,
}
_groupmod_handlers[ofmsg.command](ofmsg, ofmsg.group_id)
def _apply_flowmod(self, ofmsg):
"""Adds, Deletes and modify flow modification messages are applied
according to section 6.4 of the OpenFlow 1.3 specification."""
def _add(table, flowmod):
# From the 1.3 spec, section 6.4:
# For add requests (OFPFC_ADD) with the
# OFPFF_CHECK_OVERLAP flag set, the switch must first
# check for any overlapping flow entries in the
# requested table. Two flow entries overlap if a
# single packet may match both, and both flow entries
# have the same priority, but the two flow entries
# don't have the exact same match. If an overlap
# conflict exists between an existing flow entry and
# the add request, the switch must refuse the addition
# and respond with an ofp_error_msg with
# OFPET_FLOW_MOD_FAILED type and OFPFMFC_OVERLAP code.
#
# Without the check overlap flag it seems like it is
# possible that we can have overlapping flow table
# entries which will cause ambiguous behaviour. This is
# obviously unnacceptable so we will assume this is
# always set
add = True
for fte in table:
if flowmod.fte_matches(fte, strict=True):
table.remove(fte)
break
elif flowmod.overlaps(fte):
add = False
break
if add:
table.append(flowmod)
def _del(table, flowmod):
removals = [fte for fte in table if flowmod.fte_matches(fte)]
for fte in removals:
table.remove(fte)
def _del_strict(table, flowmod):
for fte in table:
if flowmod.fte_matches(fte, strict=True):
table.remove(fte)
break
def _modify(table, flowmod):
for fte in table:
if flowmod.fte_matches(fte):
fte.instructions = flowmod.instructions
def _modify_strict(table, flowmod):
for fte in table:
if flowmod.fte_matches(fte, strict=True):
fte.instructions = flowmod.instructions
break
_flowmod_handlers = {
ofp.OFPFC_ADD: _add,
ofp.OFPFC_DELETE: _del,
ofp.OFPFC_DELETE_STRICT: _del_strict,
ofp.OFPFC_MODIFY: _modify,
ofp.OFPFC_MODIFY_STRICT: _modify_strict,
}
table_id = ofmsg.table_id
if table_id == ofp.OFPTT_ALL or table_id is None:
tables = self.tables
else:
tables = [self.tables[table_id]]
flowmod = FlowMod(ofmsg)
for table in tables:
_flowmod_handlers[ofmsg.command](table, flowmod)
def apply_ofmsgs(self, ofmsgs):
"""Update state of test flow tables."""
for ofmsg in ofmsgs:
if isinstance(ofmsg, parser.OFPBarrierRequest):
continue
if isinstance(ofmsg, parser.OFPPacketOut):
continue
if isinstance(ofmsg, parser.OFPSetConfig):
continue
if isinstance(ofmsg, parser.OFPSetAsync):
continue
if isinstance(ofmsg, parser.OFPDescStatsRequest):
continue
if isinstance(ofmsg, parser.OFPTableFeaturesStatsRequest):
# TODO: validate TFM
continue
if isinstance(ofmsg, parser.OFPMeterMod):
# TODO: handle OFPMeterMod
continue
if isinstance(ofmsg, parser.OFPGroupMod):
self._apply_groupmod(ofmsg)
continue
if isinstance(ofmsg, parser.OFPFlowMod):
self._apply_flowmod(ofmsg)
self.sort_tables()
continue
raise FakeOFTableException('Unsupported flow %s' % str(ofmsg))
def lookup(self, match):
"""Return the entries from flowmods that matches match.
Searches each table in the pipeline for the entries that will be
applied to the packet with fields represented by match.
Arguments:
match: a dictionary keyed by header field names with values.
header fields not provided in match must be wildcarded for the
entry to be considered matching.
Returns: a list of the flowmods that will be applied to the packet
represented by match
"""
packet_dict = match.copy() # Packet headers may be modified
instructions = []
table_id = 0
goto_table = True
while goto_table:
goto_table = False
table = self.tables[table_id]
matching_fte = None
# find a matching flowmod
for fte in table:
if fte.pkt_matches(packet_dict):
matching_fte = fte
break
# if a flowmod is found, make modifications to the match values and
# determine if another lookup is necessary
if matching_fte:
for instruction in matching_fte.instructions:
instructions.append(instruction)
if instruction.type == ofp.OFPIT_GOTO_TABLE:
if table_id < instruction.table_id:
table_id = instruction.table_id
goto_table = True
elif instruction.type == ofp.OFPIT_APPLY_ACTIONS:
for action in instruction.actions:
if action.type == ofp.OFPAT_SET_FIELD:
packet_dict[action.key] = action.value
return instructions
def is_output(self, match, port=None, vid=None):
"""Return true if packets with match fields is output to port with
correct vlan.
If port is none it will return true if output to any port (including
special ports) regardless of vlan tag.
If vid is none it will return true if output to specified port
regardless of vlan tag.
To specify the packet should be output without a vlan tag set the
OFPVID_PRESENT bit in vid is 0.
Arguments:
Match: a dictionary keyed by header field names with values.
"""
def _output_result(action, vid_stack, port, vid):
if port is None:
return True
if action.port == port:
if vid is None:
return True
if vid & ofp.OFPVID_PRESENT == 0:
return not vid_stack
return vid_stack and vid == vid_stack[-1]
return None
def _process_vid_stack(action, vid_stack):
if action.type == ofp.OFPAT_PUSH_VLAN:
vid_stack.append(ofp.OFPVID_PRESENT)
elif action.type == ofp.OFPAT_POP_VLAN:
vid_stack.pop()
elif action.type == ofp.OFPAT_SET_FIELD:
if action.key == 'vlan_vid':
vid_stack[-1] = action.value
return vid_stack
# vid_stack represents the packet's vlan stack, innermost label listed
# first
match_vid = match.get('vlan_vid', 0)
vid_stack = []
if match_vid & ofp.OFPVID_PRESENT != 0:
vid_stack.append(match_vid)
instructions = self.lookup(match)
for instruction in instructions:
if instruction.type != ofp.OFPIT_APPLY_ACTIONS:
continue
for action in instruction.actions:
vid_stack = _process_vid_stack(action, vid_stack)
if action.type == ofp.OFPAT_OUTPUT:
output_result = _output_result(action, vid_stack, port, vid)
if output_result is not None:
return output_result
elif action.type == ofp.OFPAT_GROUP:
if action.group_id not in self.groups:
raise FakeOFTableException(
'output group not in group table: %s' % action)
buckets = self.groups[action.group_id].buckets
for bucket in buckets:
bucket_vid_stack = vid_stack
for bucket_action in bucket.actions:
bucket_vid_stack = _process_vid_stack(
bucket_action, bucket_vid_stack)
if bucket_action.type == ofp.OFPAT_OUTPUT:
output_result = _output_result(
bucket_action, vid_stack, port, vid)
if output_result is not None:
return output_result
return False
def __str__(self):
string = ''
for table_id, table in enumerate(self.tables):
string += '----- Table %u -----\n' % (table_id)
string += '\n'.join([str(flowmod) for flowmod in table])
return string
def sort_tables(self):
"""Sort flows in tables by priority order."""
self.tables = [sorted(table, reverse=True) for table in self.tables]
class FlowMod(object):
"""Represents a flow modification message and its corresponding entry in
the flow table.
"""
MAC_MATCH_FIELDS = (
'eth_src', 'eth_dst', 'arp_sha', 'arp_tha', 'ipv6_nd_sll',
'ipv6_nd_tll'
)
IPV4_MATCH_FIELDS = ('ipv4_src', 'ipv4_dst', 'arp_spa', 'arp_tpa')
IPV6_MATCH_FIELDS = ('ipv6_src', 'ipv6_dst', 'ipv6_nd_target')
def __init__(self, flowmod):
"""flowmod is a ryu flow modification message object"""
self.priority = flowmod.priority
self.instructions = flowmod.instructions
self.match_values = {}
self.match_masks = {}
self.out_port = None
if ((flowmod.command == ofp.OFPFC_DELETE or flowmod.command == ofp.OFPFC_DELETE_STRICT) and
flowmod.out_port != ofp.OFPP_ANY):
self.out_port = flowmod.out_port
for key, val in flowmod.match.items():
if isinstance(val, tuple):
val, mask = val
else:
mask = -1
mask = self.match_to_bits(key, mask)
val = self.match_to_bits(key, val) & mask
self.match_values[key] = val
self.match_masks[key] = mask
def out_port_matches(self, other):
"""returns True if other has an output action to this flowmods
output_port"""
if self.out_port is None or self.out_port == ofp.OFPP_ANY:
return True
for instruction in other.instructions:
if instruction.type == ofp.OFPIT_APPLY_ACTIONS:
for action in instruction.actions:
if action.type == ofp.OFPAT_OUTPUT:
if action.port == self.out_port:
return True
return False
def pkt_matches(self, pkt_dict):
"""returns True if pkt_dict matches this flow table entry.
args:
pkt_dict - a dictionary keyed by flow table match fields with
values
if an element is included in the flow table entry match fields but not
in the pkt_dict that is assumed to indicate a failed match
"""
# TODO: add cookie and out_group
for key, val in self.match_values.items():
if key not in pkt_dict:
return False
else:
val_bits = self.match_to_bits(key, pkt_dict[key])
if val_bits != (val & self.match_masks[key]):
return False
return True
def _matches_match(self, other):
return (self.priority == other.priority and
self.match_values == other.match_values and
self.match_masks == other.match_masks)
def fte_matches(self, other, strict=False):
"""returns True if the flow table entry other matches this flowmod.
used for finding existing flow table entries that match with this
flowmod.
args:
other - a flowmod object
strict (bool) - whether to use strict matching (as defined in
of1.3 specification section 6.4)
"""
if not self.out_port_matches(other):
return False
if strict:
return self._matches_match(other)
for key, val in self.match_values.items():
if key not in other.match_values:
return False
else:
if other.match_values[key] & self.match_masks[key] != val:
return False
return True
def overlaps(self, other):
""" returns True if any packet can match both self and other."""
# This is different from the matches method as matches assumes an
# undefined field is a failed match. In this case an undefined field is
# potentially an overlap and therefore is considered success
if other.priority != self.priority:
return False
for key, val in self.match_values.items():
if key in other.match_values:
if val & other.match_masks[key] != other.match_values[key]:
return False
if other.match_values[key] & self.match_masks[key] != val:
return False
return True
def match_to_bits(self, key, val):
"""convert match fields and masks to bits objects.
this allows for masked matching. Converting all match fields to the
same object simplifies things (eg __str__).
"""
if isinstance(val, Bits):
return val
def _val_to_bits(conv, val, length):
if val is -1:
return Bits(int=-1, length=length)
return Bits(bytes=conv(val), length=length)
if key in self.MAC_MATCH_FIELDS:
return _val_to_bits(addrconv.mac.text_to_bin, val, 48)
elif key in self.IPV4_MATCH_FIELDS:
return _val_to_bits(addrconv.ipv4.text_to_bin, val, 32)
elif key in self.IPV6_MATCH_FIELDS:
return _val_to_bits(addrconv.ipv6.text_to_bin, val, 128)
return Bits(int=int(val), length=64)
def __lt__(self, other):
return self.priority < other.priority
def __eq__(self, other):
return (self._matches_match(other) and
self.out_port == other.out_port and
self.instructions == other.instructions)
def __str__(self):
string = 'priority: {0}'.format(self.priority)
for key, val in self.match_values.items():
mask = self.match_masks[key]
string += ' {0}: {1}'.format(key, val)
if mask.int != -1: # pytype: disable=attribute-error
string += '/{0}'.format(mask)
string += ' Instructions: {0}'.format(str(self.instructions))
return string
| apache-2.0 | 5,955,598,394,282,099,000 | 38.331818 | 99 | 0.558535 | false |
DongjunLee/kino-bot | kino/dialog/presence.py | 1 | 3092 |
import arrow
from .dialog_manager import DialogManager
from .dnd import DoNotDisturbManager
from ..functions import Functions
from ..skills.predictor import Predictor
from ..skills.trello import TrelloManager
from ..skills.weather import Weather
from ..slack.resource import MsgResource
from ..slack.slackbot import SlackerAdapter
from ..utils.arrow import ArrowUtil
from ..utils.data_handler import DataHandler
from ..utils.state import State
class PreseneManager(object):
def __init__(self):
self.state = State()
self.slackbot = SlackerAdapter()
self.data_handler = DataHandler()
self.dialog_manager = DialogManager()
def check_wake_up(self, presence):
record = self.data_handler.read_record()
if "wake_up" in record.get("activity", {}):
return
state = State()
state.check()
presence_log = state.current[state.SLEEP]
if (
ArrowUtil.is_between((6, 0), (13, 0))
and presence_log["presence"] == "away"
and presence == "active"
):
self.slackbot.send_message(text=MsgResource.GOOD_MORNING)
dnd_manager = DoNotDisturbManager()
is_holiday = ArrowUtil.is_weekday() == False
dnd_manager.call_is_holiday(holiday=is_holiday)
activity = record.get("activity", {})
go_to_bed_time = arrow.get(activity.get("go_to_bed", None))
wake_up_time = arrow.now()
self.data_handler.edit_record_with_category(
"activity", ("wake_up", str(wake_up_time))
)
sleep_time = (wake_up_time - go_to_bed_time).seconds / 60 / 60
sleep_time = round(sleep_time * 100) / 100
self.data_handler.edit_record(("Sleep", str(sleep_time)))
self.slackbot.send_message(
text=MsgResource.SLEEP_TIME(
bed_time=go_to_bed_time.format("HH:mm"),
wakeup_time=wake_up_time.format("HH:mm"),
diff_h=str(sleep_time),
)
)
weather = Weather()
weather.forecast(timely="daily")
weather.air_quality()
functions = Functions(slackbot=self.slackbot)
trello = TrelloManager()
trello.clean_board(except_list_name=functions.IDEA_LIST)
functions.kanban_sync()
def check_flow(self, presence):
if presence == "active":
flow = self.dialog_manager.get_flow(is_raw=True)
if flow.get("class", None) == "HappyQuestion":
self.slackbot.send_message(text=MsgResource.FLOW_HAPPY)
elif flow.get("class", None) == "AttentionQuestion":
self.slackbot.send_message(text=MsgResource.FLOW_ATTENTION)
def check_predictor(self, presence):
flow = self.dialog_manager.get_flow(is_raw=True)
flow_class = flow.get("class", None)
if presence == "active" and flow_class is None:
predictor = Predictor()
predictor.predict_skill()
| mit | 7,462,080,519,424,263,000 | 32.247312 | 75 | 0.591527 | false |
marios-zindilis/musicbrainz-django-models | musicbrainz_django_models/models/l_area_work.py | 1 | 2078 | """
.. module:: l_area_work
The **L Area Work** Model.
PostgreSQL Definition
---------------------
The :code:`l_area_work` table is defined in the MusicBrainz Server as:
.. code-block:: sql
CREATE TABLE l_area_work ( -- replicate
id SERIAL,
link INTEGER NOT NULL, -- references link.id
entity0 INTEGER NOT NULL, -- references area.id
entity1 INTEGER NOT NULL, -- references work.id
edits_pending INTEGER NOT NULL DEFAULT 0 CHECK (edits_pending >= 0),
last_updated TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
link_order INTEGER NOT NULL DEFAULT 0 CHECK (link_order >= 0),
entity0_credit TEXT NOT NULL DEFAULT '',
entity1_credit TEXT NOT NULL DEFAULT ''
);
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class l_area_work(models.Model):
"""
Not all parameters are listed here, only those that present some interest
in their Django implementation.
:param int edits_pending: the MusicBrainz Server uses a PostgreSQL `check`
to validate that the value is a positive integer. In Django, this is
done with `models.PositiveIntegerField()`.
:param int link_order: the MusicBrainz Server uses a PostgreSQL `check`
to validate that the value is a positive integer. In Django, this is
done with `models.PositiveIntegerField()`.
"""
id = models.AutoField(primary_key=True)
link = models.ForeignKey('link')
entity0 = models.ForeignKey('area', related_name='links_to_work')
entity1 = models.ForeignKey('work')
edits_pending = models.PositiveIntegerField(default=0)
last_updated = models.DateTimeField(auto_now=True)
link_order = models.PositiveIntegerField(default=0)
entity0 = models.TextField(default='')
entity1 = models.TextField(default='')
def __str__(self):
return 'L Area Work'
class Meta:
db_table = 'l_area_work'
| gpl-2.0 | 2,971,579,159,202,513,400 | 34.220339 | 82 | 0.645332 | false |
levilucio/SyVOLT | mbeddr2C_MM/transformation_from_eclipse/Hlayer3rule4.py | 1 | 8838 | from core.himesis import Himesis
import uuid
class Hlayer3rule4(Himesis):
def __init__(self):
"""
Creates the himesis graph representing the DSLTrans rule layer3rule4.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(Hlayer3rule4, self).__init__(name='Hlayer3rule4', num_nodes=0, edges=[])
# Set the graph attributes
self["mm__"] = ['HimesisMM']
self["name"] = """layer3rule4"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'layer3rule4')
# match model. We only support one match model
self.add_node()
self.vs[0]["mm__"] = """MatchModel"""
# apply model node
self.add_node()
self.vs[1]["mm__"] = """ApplyModel"""
# paired with relation between match and apply models
self.add_node()
self.vs[2]["mm__"] = """paired_with"""
# match class ImplementationModule(layer3rule4class0) node
self.add_node()
self.vs[3]["mm__"] = """ImplementationModule"""
self.vs[3]["attr1"] = """+"""
# match_contains node for class ImplementationModule(layer3rule4class0)
self.add_node()
self.vs[4]["mm__"] = """match_contains"""
# match class InstanceConfiguration(layer3rule4class1) node
self.add_node()
self.vs[5]["mm__"] = """InstanceConfiguration"""
self.vs[5]["attr1"] = """+"""
# match_contains node for class InstanceConfiguration(layer3rule4class1)
self.add_node()
self.vs[6]["mm__"] = """match_contains"""
# match class ComponentInstance(layer3rule4class2) node
self.add_node()
self.vs[7]["mm__"] = """ComponentInstance"""
self.vs[7]["attr1"] = """+"""
# match_contains node for class ComponentInstance(layer3rule4class2)
self.add_node()
self.vs[8]["mm__"] = """match_contains"""
# match class AtomicComponent(layer3rule4class3) node
self.add_node()
self.vs[9]["mm__"] = """AtomicComponent"""
self.vs[9]["attr1"] = """+"""
# match_contains node for class AtomicComponent(layer3rule4class3)
self.add_node()
self.vs[10]["mm__"] = """match_contains"""
# apply class ImplementationModule(layer3rule4class4) node
self.add_node()
self.vs[11]["mm__"] = """ImplementationModule"""
self.vs[11]["attr1"] = """1"""
# apply_contains node for class ImplementationModule(layer3rule4class4)
self.add_node()
self.vs[12]["mm__"] = """apply_contains"""
# apply class GlobalVariableDeclaration(layer3rule4class5) node
self.add_node()
self.vs[13]["mm__"] = """GlobalVariableDeclaration"""
self.vs[13]["attr1"] = """1"""
# apply_contains node for class GlobalVariableDeclaration(layer3rule4class5)
self.add_node()
self.vs[14]["mm__"] = """apply_contains"""
# apply class TypeDef(layer3rule4class6) node
self.add_node()
self.vs[15]["mm__"] = """TypeDef"""
self.vs[15]["attr1"] = """1"""
# apply_contains node for class TypeDef(layer3rule4class6)
self.add_node()
self.vs[16]["mm__"] = """apply_contains"""
# apply class TypeDefType(layer3rule4class7) node
self.add_node()
self.vs[17]["mm__"] = """TypeDefType"""
self.vs[17]["attr1"] = """1"""
# apply_contains node for class TypeDefType(layer3rule4class7)
self.add_node()
self.vs[18]["mm__"] = """apply_contains"""
# match association ImplementationModule--contents-->InstanceConfiguration node
self.add_node()
self.vs[19]["attr1"] = """contents"""
self.vs[19]["mm__"] = """directLink_S"""
# match association InstanceConfiguration--contents-->ComponentInstance node
self.add_node()
self.vs[20]["attr1"] = """contents"""
self.vs[20]["mm__"] = """directLink_S"""
# match association ComponentInstance--component-->AtomicComponent node
self.add_node()
self.vs[21]["attr1"] = """component"""
self.vs[21]["mm__"] = """directLink_S"""
# apply association ImplementationModule--contents-->GlobalVariableDeclaration node
self.add_node()
self.vs[22]["attr1"] = """contents"""
self.vs[22]["mm__"] = """directLink_T"""
# apply association GlobalVariableDeclaration--type-->TypeDefType node
self.add_node()
self.vs[23]["attr1"] = """type"""
self.vs[23]["mm__"] = """directLink_T"""
# apply association TypeDefType--typeDef-->TypeDef node
self.add_node()
self.vs[24]["attr1"] = """typeDef"""
self.vs[24]["mm__"] = """directLink_T"""
# backward association ImplementationModule---->ImplementationModule node
self.add_node()
self.vs[25]["mm__"] = """backward_link"""
# backward association AtomicComponent---->TypeDef node
self.add_node()
self.vs[26]["mm__"] = """backward_link"""
# Add the edges
self.add_edges([
(0,4), # matchmodel -> match_contains
(4,3), # match_contains -> match_class ImplementationModule(layer3rule4class0)
(0,6), # matchmodel -> match_contains
(6,5), # match_contains -> match_class InstanceConfiguration(layer3rule4class1)
(0,8), # matchmodel -> match_contains
(8,7), # match_contains -> match_class ComponentInstance(layer3rule4class2)
(0,10), # matchmodel -> match_contains
(10,9), # match_contains -> match_class AtomicComponent(layer3rule4class3)
(1,12), # applymodel -> apply_contains
(12,11), # apply_contains -> apply_class ImplementationModule(layer3rule4class4)
(1,14), # applymodel -> apply_contains
(14,13), # apply_contains -> apply_class GlobalVariableDeclaration(layer3rule4class5)
(1,16), # applymodel -> apply_contains
(16,15), # apply_contains -> apply_class TypeDef(layer3rule4class6)
(1,18), # applymodel -> apply_contains
(18,17), # apply_contains -> apply_class TypeDefType(layer3rule4class7)
(3,19), # match_class ImplementationModule(layer3rule4class0) -> association contents
(19,5), # association contents -> match_class InstanceConfiguration(layer3rule4class1)
(5,20), # match_class InstanceConfiguration(layer3rule4class1) -> association contents
(20,7), # association contents -> match_class ComponentInstance(layer3rule4class2)
(7,21), # match_class ComponentInstance(layer3rule4class2) -> association component
(21,9), # association component -> match_class AtomicComponent(layer3rule4class3)
(11,22), # apply_class ImplementationModule(layer3rule4class4) -> association contents
(22,13), # association contents -> apply_class GlobalVariableDeclaration(layer3rule4class5)
(13,23), # apply_class GlobalVariableDeclaration(layer3rule4class5) -> association type
(23,17), # association type -> apply_class TypeDefType(layer3rule4class7)
(17,24), # apply_class TypeDefType(layer3rule4class7) -> association typeDef
(24,15), # association typeDef -> apply_class TypeDef(layer3rule4class6)
(11,25), # apply_class ImplementationModule(layer3rule4class4) -> backward_association
(25,3), # backward_association -> apply_class ImplementationModule(layer3rule4class0)
(15,26), # apply_class TypeDef(layer3rule4class6) -> backward_association
(26,9), # backward_association -> apply_class AtomicComponent(layer3rule4class3)
(0,2), # matchmodel -> pairedwith
(2,1) # pairedwith -> applyModel
])
# Add the attribute equations
self["equations"] = [((11,'__ApplyAttribute'),('constant','ImplementationModule')), ((13,'name'),('concat',((3,'name'),('concat',(('constant','_'),('concat',((5,'name'),('concat',(('constant','_'),('concat',((7,'name'),('constant','__instance')))))))))))), ((13,'__ApplyAttribute'),('constant','GlobalComponentInstanceDeclaration')), ((15,'__ApplyAttribute'),('constant','TypeDefComponentStruct')), ]
| mit | -5,395,848,567,690,402,000 | 45.516129 | 408 | 0.564155 | false |
nullpixel/litecord | litecord/objects/guild.py | 1 | 12297 | import datetime
import logging
import collections
from .base import LitecordObject
from .member import Member
from ..snowflake import snowflake_time
from ..utils import dt_to_json
from ..enums import ChannelType
log = logging.getLogger(__name__)
BareGuild = collections.namedtuple('BareGuild', 'id')
class Guild(LitecordObject):
"""A general guild.
Parameters
----------
server: :class:`LitecordServer`
Server instance.
raw: dict
Raw gulid data.
Attributes
----------
_raw: dict
Raw guild data.
id: int
The guild's snowflake ID.
name: str
Guild's name.
icons: dict
Contains two keys: ``"icon"`` and ``"splash"``.
created_at: datetime.datetime
Guild's creation date.
owner_id: int
Guild owner's ID.
owner: :class:`User`
User instance of the guild owner, can be :py:meth:`None`
region: str
Guild's voice region.
features: list[str]
Features this guild has.
channel_ids: list[int]
Snowflake IDs that reference channels of the guild.
channels: dict[int, :class:`Channel`]
Channels this guild has.
role_ids: list[int]
Snowflake IDs that reference roles of the guild.
roles: dict[int, :class:`Role`]
Roles of the guild.
member_ids: list[int]
Guild member snowflake IDs.
members: dict[int, :class:`Member`]
Members this guild has.
member_count: int
Amount of members in this guild.
banned_ids: list[str]
User IDs that are banned in this guild.
_viewers: list[int]
List of user IDs that are viewers of this guild and will have specific
guild events dispatched to them.
"""
__slots__ = ('_raw', 'id', 'owner_id','created_at', 'members', 'roles', 'channels', 'icons',
'emojis', '_viewers', 'region', 'features', 'channel_ids', 'role_ids', 'member_ids',
'banned_ids', 'member_count', 'owner')
def __init__(self, server, raw):
super().__init__(server)
self._raw = raw
self.id = int(raw['guild_id'])
self.created_at = self.to_timestamp(self.id)
self._needs_update = False
self.members = {}
self.roles = {}
self.channels = {}
self.icons = {'splash': None}
# one day... one day.
self.emojis = []
self._viewers = []
self._update(raw)
def _update_caches(self, raw):
for channel_id in list(self.channel_ids):
channel = self.guild_man.get_channel(channel_id)
if channel is None:
log.info('Channel %d not found, requesting update', channel_id)
self.channel_ids.remove(channel_id)
self._needs_update = True
continue
channel.guild = self
self.channels[channel.id] = channel
for member_id in self.member_ids:
user = self.server.get_user(member_id)
if user is None:
log.warning('user %d not found', member_id)
continue
raw_member = self.guild_man.raw_members[self.id].get(user.id)
if raw_member is None and (user.id in self.members):
del self.members[user.id]
member = Member(self.server, self, user, raw_member)
self.members[member.id] = member
for role_id in list(self.role_ids):
role = self.guild_man.get_role(role_id)
if role is None:
log.info('Role %d not found, requesting update', role_id)
self.role_ids.remove(role_id)
self._needs_update = True
continue
role.guild = self
self.roles[role.id] = role
def _update(self, raw):
self.name = raw['name']
self.icons['icon'] = raw['icon']
self.owner_id = int(raw['owner_id'])
self.region = raw['region']
self.features = raw['features']
self.channel_ids = raw['channel_ids']
self.member_ids = raw['member_ids']
self.role_ids = raw['role_ids']
self.banned_ids = raw.get('bans', [])
self._update_caches(raw)
self.member_count = len(self.members)
self.owner = self.members.get(self.owner_id)
if self.owner is None:
log.error('Guild %d without owner(%d)!', self.id, self.owner_id)
def __repr__(self):
return f'<Guild id={self.id} name={self.name!r} region={self.region} ' \
f'member_count={self.member_count}>'
def __eq__(self, other):
return isinstance(other, Guild) and other.id == self.id
def mark_watcher(self, user_id):
"""Mark a user ID as a viewer in that guild, meaning it will receive
events from that gulid using :py:meth:`Guild.dispatch`.
"""
user_id = int(user_id)
try:
self._viewers.index(user_id)
except:
self._viewers.append(user_id)
log.debug(f'Marked {user_id} as watcher of {self!r}')
def unmark_watcher(self, user_id):
"""Unmark user from being a viewer in this guild."""
user_id = int(user_id)
try:
self._viewers.remove(user_id)
log.debug(f'Unmarked {user_id} as watcher of {self!r}')
except:
pass
def all_channels(self):
"""Yield all channels from a guild"""
for channel in self.channels.values():
yield channel
@property
def voice_channels(self):
"""Yield all voice channels from a guild."""
for channel in self.all_channels():
if channel.type == ChannelType.GUILD_VOICE:
yield channel
@property
def text_channels(self):
for channel in self.all_channels():
if channel.type == ChannelType.GUILD_TEXT:
yield channel
def all_members(self):
"""Yield all members from a guild"""
for member in self.members.values():
yield member
@property
def viewers(self):
"""Yield all members that are viewers of this guild.
Keep in mind that :attr:`Guild.viewers` is different from :py:meth:`Guild.online_members`.
Members are viewers automatically, but if they are Atomic-Discord clients,
they only *are* viewers if they send a OP 12 Guild Sync(:py:meth:`Connection.guild_sync_handler`)
to the gateway.
"""
for member in self.members.values():
try:
self._viewers.index(member.id)
yield member
except:
pass
@property
def online_members(self):
"""Yield all members that have an identified connection"""
for member in self.members.values():
if member.user.online:
yield member
@property
def presences(self):
"""Returns a list of :class:`Presence` objects for all online members."""
return [self.server.presence.get_presence(self.id, member.id) \
for member in self.online_members]
async def _dispatch(self, evt_name, evt_data) -> int:
"""Dispatch an event to all guild viewers.
Parameters
----------
evt_name: str
Event name.
evt_data: dict
Event data.
Returns
-------
int:
Total number of members that this event was dispatched to.
"""
total, dispatched = 0, 0
for member in self.viewers:
success = await member.dispatch(evt_name, evt_data)
if not success:
self.unmark_watcher(member.id)
else:
dispatched += 1
total += 1
log.debug(f'Dispatched {evt_name} to {dispatched}/{total} gulid viewers')
return dispatched
async def dispatch(self, evt_name, evt_data):
return self.server.loop.create_task(self._dispatch(evt_name, evt_data))
async def add_member(self, user):
"""Add a :class:`User` to a guild.
Returns
-------
:class:`Member`.
"""
return (await self.guild_man.add_member(self, user))
async def ban(self, user, delete_days=None):
"""Ban a user from the guild.
Raises
------
Exception on failure.
"""
await self.guild_man.ban_user(self, user, delete_days)
async def unban(self, user):
"""Unban a user from the guild.
Raises
------
Exception on failure.
"""
await self.guild_man.unban_user(self, user)
async def edit(self, edit_payload):
"""Edit a guild.
Returns
-------
:class:`Guild`
The edited guild as a object.
"""
return await self.guild_man.edit_guild(self, edit_payload)
async def create_channel(self, chan_create_payload):
"""Create a channel in a guild.
Returns
-------
:class:`Channel`
New channel.
"""
return await self.guild_man.create_channel(self, chan_create_payload)
async def delete(self):
"""Delete a guild."""
return await self.guild_man.delete_guild(self)
@property
def as_json(self):
d = {
'id': str(self.id),
'name': self.name,
'icon': self.icons['icon'],
'splash': self.icons['splash'],
'owner_id': str(self.owner_id),
'region': self.region,
# TODO: call afk_channel_id from guild settings
# object(also todo: make guild settings object)
'afk_channel_id': '00',
# 5 minutes default
'afk_timeout': 300000,
# TODO: how are these supposed to even work?
'embed_enabled': None,
'embed_channel_id': None,
'verification_level': 0,
'explicit_content_filter': 2,
'default_message_notifications': 0,
'roles': self.iter_json(self.roles),
'emojis': self.emojis,
'features': self.features,
'mfa_level': 0,
# those fields are only in the GUILD_CREATE event
# but we can send them anyways :')
# usually clients ignore this, so we don't need to worry
'joined_at': dt_to_json(self.created_at),
# TODO: make as_json custom per user
# so we can implement this as we can.
'large': self.member_count > 250,
'unavailable': False,
# TODO: maybe make member_count a property we
# can get async-y?
# guild_man.member_coll.count({'guild_id': str(self.id)})
# will help a ton on this.
'member_count': self.member_count,
'voice_states': [],
#'voice_states': self.voice_states,
# arrays of stuff
'members': self.iter_json(self.members),
'channels': self.iter_json(self.channels),
'presences': [p.as_json for p in self.presences],
}
if self.owner.user.bot:
d['application_id'] = self.owner.id
return d
@property
def default_settings(self):
return {
'guild_id': str(self.id),
'suppress_everyone': False,
'muted': False,
'mobile_push': False,
'message_notifications': 1,
'channel_overrides': [],
#'channel_overrides': [
# {
# 'channel_id': str(c.id),
# 'muted': False,
# 'message_notifications': 3,
# } for c in self.text_channels
#],
}
@property
def as_invite(self):
return {
'id': str(self.id),
'name': self.name,
'icon': self.icons['icon'],
'splash': self.icons['splash'],
}
def user_guild(self, uid):
return {
'id': str(self.id),
'name': self.name,
'icon': self.icons['icon'],
'owner': self.owner_id == uid,
'permissions': self.members.get(uid).perm,
}
| mit | 4,421,614,363,433,700,400 | 28.992683 | 105 | 0.540132 | false |
kg-bot/SupyBot | plugins/Gateway/gwplain.py | 1 | 2628 | ###
# Copyright (c) 2005, Ali Afshar
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from twisted.protocols import basic
from twisted.internet import protocol, reactor
from gwbase import BasePlugin
class SBPlainProtocol(basic.LineReceiver):
def connectionMade(self):
self.cb.authorised(self.user)
self.cb._connections.append(self.user)
def lineReceived(self, line):
if len(line):
self.factory.cb.cb.receivedCommand(line, self.user)
class PlainUser:
def __init__(self, cb, p, addr):
self.cb = cb
self.p = p
self.hostmask = self.cb.cb.buildAnonymousHostmask(self.cb.PROTOCOL,
addr.host)
def sendReply(self, reply, inreply):
self.p.transport.write('%s\r\n' % reply)
def close(self):
self.p.transport.loseConnection()
self.cb._connections.remove(self)
class PlainGW(BasePlugin):
PROTOCOL = "plain"
USESSL = False
PORTALISE = False
DEFAUL_PORT = 9021
CONFIG_EXTRA = []
class FactoryClass(protocol.ServerFactory):
protocol = SBPlainProtocol
def buildProtocol(self, addr):
p = protocol.ServerFactory.buildProtocol(self, addr)
p.user = PlainUser(self.cb, p, addr)
return p
| gpl-3.0 | -8,137,286,768,114,357,000 | 34.04 | 79 | 0.753044 | false |
m46kruk/openavb-pentests | scapy/ptp_fuzz.py | 1 | 1454 | #!/usr/bin/env python
# ptp_fuzz.py - Fuzz a PTP implementation
# Copyright (C) 2016 Matthias Kruk
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
def usage(argv0):
print("Usage: %s iface dest\n\nArguments:\n iface Outgoing network interface\n dest Destination MAC address\n" % (argv0, ))
if __name__ == "__main__":
if len(sys.argv) < 3:
usage(sys.argv[0])
exit(1)
for arg in sys.argv:
if arg == "-h" or arg == "--help":
usage(sys.argv[0])
exit(1)
from scapy.all import *
from proto.ptp import *
import netifaces
eth_addr = netifaces.ifaddresses(sys.argv[1])[netifaces.AF_LINK][0]["addr"]
sendp(Ether(src=eth_addr,dst=sys.argv[2])/fuzz(PTP()), loop=1, iface=sys.argv[1])
exit(0)
| gpl-2.0 | -4,596,063,875,462,127,600 | 32.813953 | 136 | 0.678129 | false |
mfrey/baltimore | experiment/experimentmanager.py | 1 | 5964 | #!/usr/bin/env python2.7
import os
from . import runner
import logging
import itertools
#from queue import Empty
from Queue import Empty
from multiprocessing import Queue, Pool
from .experimentmanagerworker import ExperimentManagerWorker
from parser.omnetconfigurationfileparser import OMNeTConfigurationFileParser
class ExperimentManager:
def __init__(self, baltimore_revision, libara_revision):
self.logger = logging.getLogger('baltimore.experiment.ExperimentManager')
self.logger.debug('creating an instance of ExperimentManager')
self.baltimore_revision = baltimore_revision
self.libara_revision = libara_revision
def check_result_files(self, directory, scenarios):
result = self._check_result_directory_for_results(directory, scenarios)
non_existing_scenarios = [scenario[0] for scenario in result if scenario[1] == False]
for scenario in non_existing_scenarios:
self.logger.error("There is no scenario " + scenario + " to analyze!")
# return a list of the remaining scenarios
return list(set(scenarios) - set(non_existing_scenarios))
def run_simulations(self, configuration):
ned_path_raw = configuration['ned_path']
omnetpp_ini_raw = configuration['omnetpp_ini']
self.pool = Pool(configuration['cpu_cores'])
for experiment in configuration['experiments']:
scenarios = experiment[0]
self.logger.debug("scenarios " + str(scenarios))
location = experiment[1]
configuration['repetitions'] = experiment[2]
configuration['ned_path'] = ned_path_raw + configuration['ara_home'] + '/simulations/' + location
configuration['omnetpp_ini'] = omnetpp_ini_raw + location + '/omnetpp.ini'
# build up a tuple consisting of scenarios and repetitions
argument = itertools.product(scenarios, list(range(experiment[2])), [configuration], [location])
# run the simulations
self.pool.map(runner.run_simulation, argument)
def process(self, configuration, experiment, scenarios, arguments):
is_verbose = arguments.verbose
directory = configuration['cwd'] + '/' + experiment
if os.path.exists(directory + '/omnetpp.ini'):
self.read_omnetppini(directory + '/omnetpp.ini', is_verbose)
if is_verbose:
self._print_general_settings(self.omnetpp_ini.get_section('General'))
queue = Queue()
jobs = []
# single scenario to handle
if len(scenarios) == 1 and scenarios[0] != '':
process = ExperimentManagerWorker(configuration, experiment, scenarios[0], queue, arguments)
jobs.append(process)
process.start()
# multiple scenarios in a directory
else:
if len(scenarios) == 1 and scenarios[0] == '':
scenarios = self._get_scenarios(directory + '/results')
for scenario in scenarios:
process = ExperimentManagerWorker(configuration, experiment, scenario, queue, arguments)
jobs.append(process)
process.start()
# storing the results in an class attribute
for job in jobs:
job.join()
try:
result = queue.get(True)
self.logger.debug("The process " + job.scenario_name + " returned with the status " + result)
except Empty:
self.logger.error("Could not retrieve result data for scenario " + job.scenario_name + " (might have failed earlier)")
def read_omnetppini(self, file_path, is_verbose):
# TODO throw error if verbose = True
self.omnetpp_configuration = OMNeTConfigurationFileParser(file_path)
self.omnetpp_ini = self.omnetpp_configuration.get_section("General")
self.omnetpp_ini_checksum = self.omnetpp_configuration.omnetpp_ini_hash
self.standard_ini_checksum = self.omnetpp_configuration.standard_ini_hash
def _get_scenarios(self, directory):
scenarios = []
for file_name in os.listdir(directory):
if file_name.endswith('sca'):
scenario = file_name.split('-')[0]
if scenario not in scenarios:
scenarios.append(scenario)
return scenarios
def _check_result_directory_for_results(self, directory, scenarios):
existing_scenarios = self._get_scenarios(directory)
return [(scenario, scenario in existing_scenarios) for scenario in scenarios]
def check_result_directory(self, directory, scenarios):
existing_scenarios = self._get_scenarios(directory)
for scenario in scenarios:
if scenario in existing_scenarios:
print("There seems already to be a scenario ", scenario, " in the results directory")
reply = input("Shall the existing scenario be removed? [Y/n] ").lower()
if reply.startswith("y"):
self._remove_scenario(directory, scenario)
def _remove_scenario(self, directory, scenario):
files = [f for f in os.listdir(directory) if f.startswith(scenario + "-")]
for f in files:
os.remove(directory + '/' + f)
def result_dir_exists(self, directory):
if not os.path.exists(directory + '/results'):
os.makedirs(directory + '/results')
return False
return True
def _print_general_settings(self, general_settings):
self._print_tuple(general_settings)
def _print_scenario_settings(self, scenario_settings):
self._print_tuple(scenario_settings)
def _print_tuple(self, settings):
for setting in settings:
print(setting[0], ' = ', setting[1])
def __getstate__(self):
d = dict(self.__dict__)
del d['logger']
return d
def __setstate__(self, d):
self.__dict__.update(d)
| gpl-3.0 | 3,060,395,181,642,958,000 | 40.131034 | 134 | 0.63397 | false |
diegocortassa/TACTIC | src/bin/startup_dev.py | 1 | 5317 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
from __future__ import print_function
import os, sys
# set up environment
os.environ['TACTIC_APP_SERVER'] = "cherrypy"
os.environ['TACTIC_MODE'] = "development"
os.environ['TACTIC_CLEANUP'] = "true"
import tacticenv
from pyasm.common import Environment, Config
tactic_install_dir = tacticenv.get_install_dir()
tactic_site_dir = tacticenv.get_site_dir()
sys.path.insert(0, "%s/src" % tactic_install_dir)
sys.path.insert(0, "%s/tactic_sites" % tactic_install_dir)
sys.path.insert(0, tactic_site_dir)
def startup(port, server=""):
from tactic.startup import FirstRunInit
cmd = FirstRunInit()
cmd.execute()
log_dir = "%s/log" % Environment.get_tmp_dir()
if not os.path.exists(log_dir):
os.makedirs(log_dir)
try:
file = open("%s/pid.%s" % (log_dir,port), "w")
pid = os.getpid()
file.write(str(pid))
file.close()
except IOError as e:
if e.errno == 13:
print
print("Permission error opening the file [%s/pid.%s]." % (log_dir,port))
print
if os.name=='nt':
print("You may need to run this shell as the Administrator.")
else:
print("The file should be owned by the same user that runs this startup_dev.py process.")
sys.exit(2)
if os.name != 'nt' and os.getuid() == 0:
print
print("You should not run this as root. Run it as the Web server process's user. e.g. apache")
print
sys.exit(0)
import cherrypy
cherrypy_major_version = int(cherrypy.__version__.split('.')[0])
if cherrypy_major_version >= 3:
from pyasm.web.cherrypy30_startup import CherryPyStartup
startup = CherryPyStartup(port)
startup.set_config('global', 'server.socket_port', port)
startup.set_config('global', 'server.socket_queue_size', 100)
startup.set_config('global', 'server.thread_pool', 50)
#startup.set_config('global', 'server.socket_host', server)
startup.set_config('global', 'log.screen', False)
startup.set_config('global', 'request.show_tracebacks', True)
startup.set_config('global', 'server.log_unhandled_tracebacks', True)
startup.set_config('global', 'engine.autoreload_on', True)
"""
access_log = cherrypy.log.access_log
for handler in tuple(access_log.handlers):
access_log.removeHandler(handler)
cherrypy.log.error_log.propagate = False
cherrypy.log.access_log.propagate = False
"""
else:
from pyasm.web.cherrypy_startup import CherryPyStartup
startup = CherryPyStartup(port)
startup.set_config('global', 'server.environment', 'development')
startup.set_config('global', 'server.socket_port', port)
startup.set_config('global', 'server.log_to_screen', True)
startup.set_config('global', 'server.socket_queue_size', 100)
startup.set_config('global', 'server.thread_pool', 100)
#startup.set_config('global', 'server.socket_host', server)
startup.set_config('global', 'server.log_tracebacks', True)
startup.set_config('global', 'server.log_unhandled_tracebacks', True)
hostname = None
server_default = '127.0.0.1'
if not server:
hostname = Config.get_value("install", "hostname")
if hostname == 'localhost':
# swap it to IP to suppress CherryPy Warning
hostname = server_default
if hostname:
# special host name for IIS which can't load balance across many
# ports with the same service
hostname = hostname.replace("{port}", str(port))
server = hostname
else:
server = server_default
startup.set_config('global', 'server.socket_host', server)
startup.execute()
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-p", "--port", dest="port", help="Port to run TACTIC server on", default=8081)
parser.add_option("-s", "--server", dest="server", help="Host name TACTIC server will run under")
parser.add_option("-o", "--data_dir", dest="data_dir", help="Override location of TACTIC_DATA_DIR")
parser.add_option("-d", "--python_path", dest="python_path", help="Host name TACTIC server will run under", default="")
(options, args) = parser.parse_args()
# add the optional path to the python path
if options.python_path:
paths = options.python_path.split("|")
paths.reverse()
for path in paths:
sys.path.insert(0, path)
if len(args) == 1:
port = int(args[0])
else:
port = int(options.port)
data_dir = options.data_dir
if data_dir:
if not os.path.exists(data_dir):
os.makedirs(data_dir)
os.environ["TACTIC_DATA_DIR"] = data_dir
startup(port, options.server)
| epl-1.0 | -5,019,779,552,770,964,000 | 30.276471 | 123 | 0.610307 | false |
YuvaAthur/jkernel-yuva | wrapper.py | 1 | 3704 | from __future__ import print_function
from ctypes import *
import sys
import os
# CUSTOMIZE HERE
# J binary directory (the one with all the binaries)
j_bin_path = os.path.expanduser("/Applications/j64-804/bin")
def get_libj(binpath):
if os.name == "nt":
lib_path = binpath + "/j.dll" # Windows
elif sys.platform == "darwin":
lib_path = binpath + "/libj.dylib" # OSX
else:
lib_path = binpath + "/libj.so" # Linux
libj = cdll.LoadLibrary(lib_path)
libj.JInit.restype = c_void_p
libj.JSM.argtypes = [c_void_p, c_void_p]
libj.JDo.argtypes = [c_void_p, c_char_p]
libj.JDo.restype = c_int
libj.JFree.restype = c_int
libj.JFree.argtypes = [c_void_p]
return libj
class JWrapper:
def __init__(self):
#print("JWrapper called")
binpath = j_bin_path
self.libj = get_libj(binpath)
self.j = self.libj.JInit()
# buffer for multiline input,
# for normal line input and J explicit definitions.
self.input_buffer = []
OUTPUT_CALLBACK = CFUNCTYPE(None, c_void_p, c_int, c_char_p)
INPUT_CALLBACK = CFUNCTYPE(c_char_p, c_void_p, c_char_p)
def output_callback(j, output_type, result):
output_types = [None, "output", "error", "output log", "assert", "EXIT", "1!:2[2 (wat)"]
self.output_type = output_types[output_type]
self.output = result.decode('utf-8', 'replace')
def input_callback(j, prompt):
if not self.input_buffer:
return b")"
line = self.input_buffer.pop(0)
return line.encode()
callbacks_t = c_void_p*5
callbacks = callbacks_t(
cast(OUTPUT_CALLBACK(output_callback), c_void_p),
0,
cast(INPUT_CALLBACK(input_callback), c_void_p),
0,
c_void_p(3) # defines "console" frontend (for some reason, see jconsole.c, line 128)
)
self.libj.JSM(self.j, callbacks)
self.sendline("ARGV_z_=:''")
self.sendline("BINPATH_z_=:'{}'".format(binpath))
self.sendline("1!:44'{}'".format(binpath))
self.sendline("0!:0 <'profile.ijs'")
self.sendline("(9!:7) 16 17 18 19 20 21 22 23 24 25 26 { a.") # pretty boxes
def close(self):
self.libj.JFree(self.j)
def sendline(self, line):
self.output = None
self.libj.JDo(self.j, c_char_p(line.encode())) #(2) JDo unicode exec error?
# (2) simple line encode return works
# (2) does not work with done the way above :(!)
# (2) self.output = line.encode()
if not self.output:
return ""
return self.output
def sendlines(self, lines):
output = lines
'''
self.input_buffer = eval(lines) # (2) hoping to convert string to array
output = ""
while self.input_buffer:
line = self.input_buffer.pop(0)
output += self.sendline(line)
'''
return output
if __name__ == "__main__":
#def main():
print ("hello")
j = JWrapper()
j.sendline("load 'viewmat'")
j.sendline("load 'bmp'")
#j.sendline("VISIBLE_jviewmat_ =: 0")
#j.sendline("viewmat i. 5 5")
'''
#(2) works
line = '1+1'
print(line)
j.libj.JDo(j.j,line)
print (j.output)
#(2) works with encoding too
lines=[u'1+1','2+2']
output = None
while lines:
line = lines.pop(0)
print ('input is ' + line)
j.libj.JDo(j.j,c_char_p(line.encode()))
print (j.output)
'''
#(2) testing on method call - works
lines=[u'1+1','2+2']
print (j.sendlines(lines))
j.close()
| gpl-3.0 | 543,247,820,746,448,600 | 28.870968 | 100 | 0.554266 | false |
openstack/oslo.service | oslo_service/tests/test_loopingcall.py | 1 | 16173 | # Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from unittest import mock
import eventlet
from eventlet.green import threading as greenthreading
from oslotest import base as test_base
from oslo_service import fixture
from oslo_service import loopingcall
class LoopingCallTestCase(test_base.BaseTestCase):
def setUp(self):
super(LoopingCallTestCase, self).setUp()
self.num_runs = 0
def test_return_true(self):
def _raise_it():
raise loopingcall.LoopingCallDone(True)
timer = loopingcall.FixedIntervalLoopingCall(_raise_it)
self.assertTrue(timer.start(interval=0.5).wait())
def test_monotonic_timer(self):
def _raise_it():
clock = eventlet.hubs.get_hub().clock
ok = (clock == time.monotonic)
raise loopingcall.LoopingCallDone(ok)
timer = loopingcall.FixedIntervalLoopingCall(_raise_it)
self.assertTrue(timer.start(interval=0.5).wait())
def test_eventlet_clock(self):
# Make sure that by default the oslo_service.service_hub() kicks in,
# test in the main thread
hub = eventlet.hubs.get_hub()
self.assertEqual(time.monotonic,
hub.clock)
def test_return_false(self):
def _raise_it():
raise loopingcall.LoopingCallDone(False)
timer = loopingcall.FixedIntervalLoopingCall(_raise_it)
self.assertFalse(timer.start(interval=0.5).wait())
def test_terminate_on_exception(self):
def _raise_it():
raise RuntimeError()
timer = loopingcall.FixedIntervalLoopingCall(_raise_it)
self.assertRaises(RuntimeError, timer.start(interval=0.5).wait)
def _raise_and_then_done(self):
if self.num_runs == 0:
raise loopingcall.LoopingCallDone(False)
else:
self.num_runs = self.num_runs - 1
raise RuntimeError()
def test_do_not_stop_on_exception(self):
self.useFixture(fixture.SleepFixture())
self.num_runs = 2
timer = loopingcall.FixedIntervalLoopingCall(self._raise_and_then_done)
res = timer.start(interval=0.5, stop_on_exception=False).wait()
self.assertFalse(res)
def _wait_for_zero(self):
"""Called at an interval until num_runs == 0."""
if self.num_runs == 0:
raise loopingcall.LoopingCallDone(False)
else:
self.num_runs = self.num_runs - 1
def test_no_double_start(self):
wait_ev = greenthreading.Event()
def _run_forever_until_set():
if wait_ev.is_set():
raise loopingcall.LoopingCallDone(True)
timer = loopingcall.FixedIntervalLoopingCall(_run_forever_until_set)
timer.start(interval=0.01)
self.assertRaises(RuntimeError, timer.start, interval=0.01)
wait_ev.set()
timer.wait()
def test_no_double_stop(self):
def _raise_it():
raise loopingcall.LoopingCallDone(False)
timer = loopingcall.FixedIntervalLoopingCall(_raise_it)
timer.start(interval=0.5)
timer.stop()
timer.stop()
def test_repeat(self):
self.useFixture(fixture.SleepFixture())
self.num_runs = 2
timer = loopingcall.FixedIntervalLoopingCall(self._wait_for_zero)
self.assertFalse(timer.start(interval=0.5).wait())
def assertAlmostEqual(self, expected, actual, precision=7, message=None):
self.assertEqual(0, round(actual - expected, precision), message)
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
@mock.patch('oslo_service.loopingcall.LoopingCallBase._elapsed')
def test_interval_adjustment(self, elapsed_mock, sleep_mock):
"""Ensure the interval is adjusted to account for task duration."""
self.num_runs = 3
second = 1
smidgen = 0.01
elapsed_mock.side_effect = [second - smidgen,
second + second,
second + smidgen,
]
timer = loopingcall.FixedIntervalLoopingCall(self._wait_for_zero)
timer.start(interval=1.01).wait()
expected_calls = [0.02, 0.00, 0.00]
for i, call in enumerate(sleep_mock.call_args_list):
expected = expected_calls[i]
args, kwargs = call
actual = args[0]
message = ('Call #%d, expected: %s, actual: %s' %
(i, expected, actual))
self.assertAlmostEqual(expected, actual, message=message)
def test_looping_call_timed_out(self):
def _fake_task():
pass
timer = loopingcall.FixedIntervalWithTimeoutLoopingCall(_fake_task)
self.assertRaises(loopingcall.LoopingCallTimeOut,
timer.start(interval=0.1, timeout=0.3).wait)
class DynamicLoopingCallTestCase(test_base.BaseTestCase):
def setUp(self):
super(DynamicLoopingCallTestCase, self).setUp()
self.num_runs = 0
def test_return_true(self):
def _raise_it():
raise loopingcall.LoopingCallDone(True)
timer = loopingcall.DynamicLoopingCall(_raise_it)
self.assertTrue(timer.start().wait())
def test_monotonic_timer(self):
def _raise_it():
clock = eventlet.hubs.get_hub().clock
ok = (clock == time.monotonic)
raise loopingcall.LoopingCallDone(ok)
timer = loopingcall.DynamicLoopingCall(_raise_it)
self.assertTrue(timer.start().wait())
def test_no_double_start(self):
wait_ev = greenthreading.Event()
def _run_forever_until_set():
if wait_ev.is_set():
raise loopingcall.LoopingCallDone(True)
else:
return 0.01
timer = loopingcall.DynamicLoopingCall(_run_forever_until_set)
timer.start()
self.assertRaises(RuntimeError, timer.start)
wait_ev.set()
timer.wait()
def test_return_false(self):
def _raise_it():
raise loopingcall.LoopingCallDone(False)
timer = loopingcall.DynamicLoopingCall(_raise_it)
self.assertFalse(timer.start().wait())
def test_terminate_on_exception(self):
def _raise_it():
raise RuntimeError()
timer = loopingcall.DynamicLoopingCall(_raise_it)
self.assertRaises(RuntimeError, timer.start().wait)
def _raise_and_then_done(self):
if self.num_runs == 0:
raise loopingcall.LoopingCallDone(False)
else:
self.num_runs = self.num_runs - 1
raise RuntimeError()
def test_do_not_stop_on_exception(self):
self.useFixture(fixture.SleepFixture())
self.num_runs = 2
timer = loopingcall.DynamicLoopingCall(self._raise_and_then_done)
timer.start(stop_on_exception=False).wait()
def _wait_for_zero(self):
"""Called at an interval until num_runs == 0."""
if self.num_runs == 0:
raise loopingcall.LoopingCallDone(False)
else:
self.num_runs = self.num_runs - 1
sleep_for = self.num_runs * 10 + 1 # dynamic duration
return sleep_for
def test_repeat(self):
self.useFixture(fixture.SleepFixture())
self.num_runs = 2
timer = loopingcall.DynamicLoopingCall(self._wait_for_zero)
self.assertFalse(timer.start().wait())
def _timeout_task_without_any_return(self):
pass
def test_timeout_task_without_return_and_max_periodic(self):
timer = loopingcall.DynamicLoopingCall(
self._timeout_task_without_any_return
)
self.assertRaises(RuntimeError, timer.start().wait)
def _timeout_task_without_return_but_with_done(self):
if self.num_runs == 0:
raise loopingcall.LoopingCallDone(False)
else:
self.num_runs = self.num_runs - 1
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
def test_timeout_task_without_return(self, sleep_mock):
self.num_runs = 1
timer = loopingcall.DynamicLoopingCall(
self._timeout_task_without_return_but_with_done
)
timer.start(periodic_interval_max=5).wait()
sleep_mock.assert_has_calls([mock.call(5)])
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
def test_interval_adjustment(self, sleep_mock):
self.num_runs = 2
timer = loopingcall.DynamicLoopingCall(self._wait_for_zero)
timer.start(periodic_interval_max=5).wait()
sleep_mock.assert_has_calls([mock.call(5), mock.call(1)])
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
def test_initial_delay(self, sleep_mock):
self.num_runs = 1
timer = loopingcall.DynamicLoopingCall(self._wait_for_zero)
timer.start(initial_delay=3).wait()
sleep_mock.assert_has_calls([mock.call(3), mock.call(1)])
class TestBackOffLoopingCall(test_base.BaseTestCase):
@mock.patch('random.SystemRandom.gauss')
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
def test_exponential_backoff(self, sleep_mock, random_mock):
def false():
return False
random_mock.return_value = .8
self.assertRaises(loopingcall.LoopingCallTimeOut,
loopingcall.BackOffLoopingCall(false).start()
.wait)
expected_times = [mock.call(1.6),
mock.call(2.4000000000000004),
mock.call(3.6),
mock.call(5.4),
mock.call(8.1),
mock.call(12.15),
mock.call(18.225),
mock.call(27.337500000000002),
mock.call(41.00625),
mock.call(61.509375000000006),
mock.call(92.26406250000001)]
self.assertEqual(expected_times, sleep_mock.call_args_list)
@mock.patch('random.SystemRandom.gauss')
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
def test_exponential_backoff_negative_value(self, sleep_mock, random_mock):
def false():
return False
# random.gauss() can return negative values
random_mock.return_value = -.8
self.assertRaises(loopingcall.LoopingCallTimeOut,
loopingcall.BackOffLoopingCall(false).start()
.wait)
expected_times = [mock.call(1.6),
mock.call(2.4000000000000004),
mock.call(3.6),
mock.call(5.4),
mock.call(8.1),
mock.call(12.15),
mock.call(18.225),
mock.call(27.337500000000002),
mock.call(41.00625),
mock.call(61.509375000000006),
mock.call(92.26406250000001)]
self.assertEqual(expected_times, sleep_mock.call_args_list)
@mock.patch('random.SystemRandom.gauss')
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
def test_no_backoff(self, sleep_mock, random_mock):
random_mock.return_value = 1
func = mock.Mock()
# func.side_effect
func.side_effect = [True, True, True, loopingcall.LoopingCallDone(
retvalue='return value')]
retvalue = loopingcall.BackOffLoopingCall(func).start().wait()
expected_times = [mock.call(1), mock.call(1), mock.call(1)]
self.assertEqual(expected_times, sleep_mock.call_args_list)
self.assertTrue(retvalue, 'return value')
@mock.patch('random.SystemRandom.gauss')
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
def test_no_sleep(self, sleep_mock, random_mock):
# Any call that executes properly the first time shouldn't sleep
random_mock.return_value = 1
func = mock.Mock()
# func.side_effect
func.side_effect = loopingcall.LoopingCallDone(retvalue='return value')
retvalue = loopingcall.BackOffLoopingCall(func).start().wait()
self.assertFalse(sleep_mock.called)
self.assertTrue(retvalue, 'return value')
@mock.patch('random.SystemRandom.gauss')
@mock.patch('oslo_service.loopingcall.LoopingCallBase._sleep')
def test_max_interval(self, sleep_mock, random_mock):
def false():
return False
random_mock.return_value = .8
self.assertRaises(loopingcall.LoopingCallTimeOut,
loopingcall.BackOffLoopingCall(false).start(
max_interval=60)
.wait)
expected_times = [mock.call(1.6),
mock.call(2.4000000000000004),
mock.call(3.6),
mock.call(5.4),
mock.call(8.1),
mock.call(12.15),
mock.call(18.225),
mock.call(27.337500000000002),
mock.call(41.00625),
mock.call(60),
mock.call(60),
mock.call(60)]
self.assertEqual(expected_times, sleep_mock.call_args_list)
class AnException(Exception):
pass
class UnknownException(Exception):
pass
class RetryDecoratorTest(test_base.BaseTestCase):
"""Tests for retry decorator class."""
def test_retry(self):
result = "RESULT"
@loopingcall.RetryDecorator()
def func(*args, **kwargs):
return result
self.assertEqual(result, func())
def func2(*args, **kwargs):
return result
retry = loopingcall.RetryDecorator()
self.assertEqual(result, retry(func2)())
self.assertTrue(retry._retry_count == 0)
def test_retry_with_expected_exceptions(self):
result = "RESULT"
responses = [AnException(None),
AnException(None),
result]
def func(*args, **kwargs):
response = responses.pop(0)
if isinstance(response, Exception):
raise response
return response
sleep_time_incr = 0.01
retry_count = 2
retry = loopingcall.RetryDecorator(10, sleep_time_incr, 10,
(AnException,))
self.assertEqual(result, retry(func)())
self.assertTrue(retry._retry_count == retry_count)
self.assertEqual(retry_count * sleep_time_incr, retry._sleep_time)
def test_retry_with_max_retries(self):
responses = [AnException(None),
AnException(None),
AnException(None)]
def func(*args, **kwargs):
response = responses.pop(0)
if isinstance(response, Exception):
raise response
return response
retry = loopingcall.RetryDecorator(2, 0, 0,
(AnException,))
self.assertRaises(AnException, retry(func))
self.assertTrue(retry._retry_count == 2)
def test_retry_with_unexpected_exception(self):
def func(*args, **kwargs):
raise UnknownException(None)
retry = loopingcall.RetryDecorator()
self.assertRaises(UnknownException, retry(func))
self.assertTrue(retry._retry_count == 0)
| apache-2.0 | -2,603,906,600,743,247,000 | 33.930886 | 79 | 0.593273 | false |
ephes/trivago2015 | config/urls.py | 1 | 1388 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='pages/index.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("trivago2015.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
url(r'^events/', include("trivago2015.events.urls", namespace="events")),
url(r'^chats/', include("trivago2015.chats.urls", namespace="chats")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', 'django.views.defaults.bad_request'),
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
]
| bsd-3-clause | 2,559,333,641,438,452,000 | 36.513514 | 91 | 0.680836 | false |
mrpau/kolibri | kolibri/core/tasks/utils.py | 1 | 5577 | import importlib
import logging
import os
import time
import uuid
try:
from thread import get_ident
except ImportError:
from threading import get_ident
from kolibri.core.tasks import compat
from kolibri.core.utils.cache import process_cache
# An object on which to store data about the current job
# So far the only use is to track the job, but other metadata
# could be added.
current_state_tracker = compat.local()
def get_current_job():
return getattr(current_state_tracker, "job", None)
def stringify_func(func):
if not callable(func):
raise TypeError(
"function {} passed to stringify_func isn't a function!".format(func)
)
fqn = "{module}.{funcname}".format(module=func.__module__, funcname=func.__name__)
return fqn
def import_stringified_func(funcstring):
"""
Import a string that represents a module and function, e.g. {module}.{funcname}.
Given a function f, import_stringified_func(stringify_func(f)) will return the same function.
:param funcstring: String to try to import
:return: callable
"""
if not isinstance(funcstring, str):
raise TypeError("Argument must be a string")
modulestring, funcname = funcstring.rsplit(".", 1)
mod = importlib.import_module(modulestring)
func = getattr(mod, funcname)
return func
class InfiniteLoopThread(compat.Thread):
"""A class that runs a given function an infinite number of times, until told to shut down."""
DEFAULT_TIMEOUT_SECONDS = 0.2
def __init__(self, func, thread_name, wait_between_runs=1, *args, **kwargs):
"""
Run the given func continuously until either shutdown_event is set, or the python interpreter exits.
:param func: the function to run. This should accept no arguments.
:param thread_name: the name of the thread to use during logging and debugging
:param wait_between_runs: how many seconds to wait in between func calls.
"""
self.shutdown_event = compat.Event()
self.thread_name = thread_name
self.thread_id = uuid.uuid4().hex
self.logger = logging.getLogger(
"{module}".format(module=__name__.split(".")[0])
)
self.full_thread_name = "{thread_name}-{thread_id}".format(
thread_name=self.thread_name, thread_id=self.thread_id
)
super(InfiniteLoopThread, self).__init__(
name=self.full_thread_name, *args, **kwargs
)
self.func = func
self.wait = wait_between_runs
def run(self):
self.logger.debug(
"Started new {name} thread ID#{id}".format(
name=self.thread_name, id=self.thread_id
)
)
while True:
if self.shutdown_event.wait(self.DEFAULT_TIMEOUT_SECONDS):
self.logger.debug(
"{name} shut down event received; closing.".format(
name=self.thread_name
)
)
break
else:
self.main_loop()
continue
def main_loop(self):
"""
The main loop of a thread. Run this loop if we haven't received any shutdown events in the last
timeout seconds. Normally this is used to read from a queue; the func can return an argument that
indicates how long the function took to execute, and to correct the waiting time on the next
interval - this is useful if you want the function to run at a fixed interval.
:return: None
"""
try:
corrected_time = self.func()
except Exception as e:
self.logger.warning(
"Got an exception running {func}: {e}".format(func=self.func, e=str(e))
)
corrected_time = 0
wait = self.wait - (corrected_time if corrected_time is not None else 0)
if wait > 0:
time.sleep(wait)
def stop(self):
self.shutdown_event.set()
def shutdown(self):
self.stop()
class DiskCacheRLock(object):
"""
Vendored from
https://github.com/grantjenks/python-diskcache/blob/2d1f43ea2be4c82a430d245de6260c3e18059ba1/diskcache/recipes.py
"""
def __init__(self, cache, key, expire=None):
self._cache = cache
self._key = key
self._expire = expire
def acquire(self):
"Acquire lock by incrementing count using spin-lock algorithm."
pid = os.getpid()
tid = get_ident()
pid_tid = "{}-{}".format(pid, tid)
while True:
value, count = self._cache.get(self._key, (None, 0))
if pid_tid == value or count == 0:
self._cache.set(self._key, (pid_tid, count + 1), self._expire)
return
time.sleep(0.001)
def release(self):
"Release lock by decrementing count."
pid = os.getpid()
tid = get_ident()
pid_tid = "{}-{}".format(pid, tid)
value, count = self._cache.get(self._key, default=(None, 0))
is_owned = pid_tid == value and count > 0
assert is_owned, "cannot release un-acquired lock"
self._cache.set(self._key, (value, count - 1), self._expire)
# RLOCK leaves the db connection open after releasing the lock
# Let's ensure it's correctly closed
self._cache.close()
def __enter__(self):
self.acquire()
def __exit__(self, *exc_info):
self.release()
db_task_write_lock = DiskCacheRLock(process_cache, "db_task_write_lock")
| mit | -7,861,053,696,255,299,000 | 30.868571 | 117 | 0.604088 | false |
numansiddique/contrail-controller | src/config/common/zkclient.py | 1 | 14418 | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import os
import gevent
import logging
import kazoo.client
import kazoo.exceptions
import kazoo.handlers.gevent
import kazoo.recipe.election
from kazoo.client import KazooState
from kazoo.retry import KazooRetry
from bitarray import bitarray
from cfgm_common.exceptions import ResourceExhaustionError, ResourceExistsError
from gevent.coros import BoundedSemaphore
import uuid
LOG_DIR = '/var/log/contrail/'
class IndexAllocator(object):
def __init__(self, zookeeper_client, path, size=0, start_idx=0,
reverse=False,alloc_list=None, max_alloc=0):
self._size = size
self._start_idx = start_idx
if alloc_list is None:
self._alloc_list = [{'start':start_idx, 'end':start_idx+size}]
else:
sorted_alloc_list = sorted(alloc_list, key=lambda k: k['start'])
self._alloc_list = sorted_alloc_list
alloc_count = len(self._alloc_list)
total_size = 0
size = 0
#check for overlap in alloc_list --TODO
for alloc_idx in range (0, alloc_count -1):
idx_start_addr = self._alloc_list[alloc_idx]['start']
idx_end_addr = self._alloc_list[alloc_idx]['end']
next_start_addr = self._alloc_list[alloc_idx+1]['start']
if next_start_addr <= idx_end_addr:
raise Exception(
'Allocation Lists Overlapping: %s' %(alloc_list))
size += idx_end_addr - idx_start_addr + 1
size += self._alloc_list[alloc_count-1]['end'] - self._alloc_list[alloc_count-1]['start'] + 1
if max_alloc == 0:
self._max_alloc = size
else:
self._max_alloc = max_alloc
self._zookeeper_client = zookeeper_client
self._path = path
self._in_use = bitarray('0')
self._reverse = reverse
for idx in self._zookeeper_client.get_children(path):
idx_int = self._get_bit_from_zk_index(int(idx))
if idx_int >= 0:
self._set_in_use(idx_int)
# end for idx
# end __init__
def _get_zk_index_from_bit(self, idx):
size = idx
if self._reverse:
for alloc in reversed(self._alloc_list):
size -= alloc['end'] - alloc['start'] + 1
if size < 0:
return alloc['start']-size - 1
else:
for alloc in self._alloc_list:
size -= alloc['end'] - alloc['start'] + 1
if size < 0:
return alloc['end']+size + 1
raise ResourceExhaustionError(
'Cannot get zk index from bit %s' %(idx))
# end _get_zk_index
def _get_bit_from_zk_index(self, idx):
size = 0
if self._reverse:
for alloc in reversed(self._alloc_list):
if alloc['start'] <= idx <= alloc['end']:
return alloc['end'] - idx + size
size += alloc['end'] - alloc['start'] + 1
pass
else:
for alloc in self._alloc_list:
if alloc['start'] <= idx <= alloc['end']:
return idx - alloc['start'] + size
size += alloc['end'] - alloc['start'] + 1
return -1
# end _get_bit_from_zk_index
def _set_in_use(self, bitnum):
# if the index is higher than _max_alloc, do not use the bitarray, in
# order to reduce the size of the bitarray. Otherwise, set the bit
# corresponding to idx to 1 and extend the _in_use bitarray if needed
if bitnum > self._max_alloc:
return
if bitnum >= self._in_use.length():
temp = bitarray(bitnum - self._in_use.length())
temp.setall(0)
temp.append('1')
self._in_use.extend(temp)
else:
self._in_use[bitnum] = 1
# end _set_in_use
def _reset_in_use(self, bitnum):
# if the index is higher than _max_alloc, do not use the bitarray, in
# order to reduce the size of the bitarray. Otherwise, set the bit
# corresponding to idx to 1 and extend the _in_use bitarray if needed
if bitnum > self._max_alloc:
return
if bitnum >= self._in_use.length():
return
else:
self._in_use[bitnum] = 0
# end _reset_in_use
def set_in_use(self, idx):
bit_idx = self._get_bit_from_zk_index(idx)
if bit_idx < 0:
return
self._set_in_use(bit_idx)
# end set_in_use
def reset_in_use(self, idx):
bit_idx = self._get_bit_from_zk_index(idx)
if bit_idx < 0:
return
self._reset_in_use(bit_idx)
# end reset_in_use
def get_alloc_count(self):
return self._in_use.count()
# end get_alloc_count
def alloc(self, value=None):
# Allocates a index from the allocation list
if self._in_use.all():
idx = self._in_use.length()
if idx > self._max_alloc:
raise ResourceExhaustionError()
self._in_use.append(1)
else:
idx = self._in_use.index(0)
self._in_use[idx] = 1
idx = self._get_zk_index_from_bit(idx)
try:
# Create a node at path and return its integer value
id_str = "%(#)010d" % {'#': idx}
self._zookeeper_client.create_node(self._path + id_str, value)
return idx
except ResourceExistsError:
return self.alloc(value)
# end alloc
def reserve(self, idx, value=None):
# Reserves the requested index if available
if not self._start_idx <= idx < self._start_idx + self._size:
return None
try:
# Create a node at path and return its integer value
id_str = "%(#)010d" % {'#': idx}
self._zookeeper_client.create_node(self._path + id_str, value)
self.set_in_use(idx)
return idx
except ResourceExistsError:
self.set_in_use(idx)
existing_value = self.read(idx)
if (value == existing_value or
existing_value is None): # upgrade case
# idempotent reserve
return idx
msg = 'For index %s reserve conflicts with existing value %s.' \
%(idx, existing_value)
self._zookeeper_client.syslog(msg, level='notice')
return None
# end reserve
def delete(self, idx):
id_str = "%(#)010d" % {'#': idx}
self._zookeeper_client.delete_node(self._path + id_str)
bit_idx = self._get_bit_from_zk_index(idx)
if 0 <= bit_idx < self._in_use.length():
self._in_use[bit_idx] = 0
# end delete
def read(self, idx):
id_str = "%(#)010d" % {'#': idx}
id_val = self._zookeeper_client.read_node(self._path+id_str)
if id_val is not None:
bit_idx = self._get_bit_from_zk_index(idx)
if bit_idx >= 0:
self._set_in_use(bit_idx)
return id_val
# end read
def empty(self):
return not self._in_use.any()
# end empty
@classmethod
def delete_all(cls, zookeeper_client, path):
try:
zookeeper_client.delete_node(path, recursive=True)
except kazoo.exceptions.NotEmptyError:
#TODO: Add retries for NotEmptyError
zookeeper_client.syslog("NotEmptyError while deleting %s" % path)
# end delete_all
#end class IndexAllocator
class ZookeeperClient(object):
def __init__(self, module, server_list, logging_fn=None):
# logging
logger = logging.getLogger(module)
logger.setLevel(logging.INFO)
try:
handler = logging.handlers.RotatingFileHandler(LOG_DIR + module + '-zk.log', maxBytes=10*1024*1024, backupCount=5)
except IOError:
print "Cannot open log file in %s" %(LOG_DIR)
else:
log_format = logging.Formatter('%(asctime)s [%(name)s]: %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
handler.setFormatter(log_format)
logger.addHandler(handler)
if logging_fn:
self.log = logging_fn
else:
self.log = self.syslog
self._zk_client = \
kazoo.client.KazooClient(
server_list,
timeout=400,
handler=kazoo.handlers.gevent.SequentialGeventHandler(),
logger=logger)
self._zk_client.add_listener(self._zk_listener)
self._logger = logger
self._election = None
self._server_list = server_list
# KazooRetry to retry keeper CRUD operations
self._retry = KazooRetry(max_tries=None, max_delay=300,
sleep_func=gevent.sleep)
self._conn_state = None
self._sandesh_connection_info_update(status='INIT', message='')
self._lost_cb = None
self.connect()
# end __init__
# start
def connect(self):
while True:
try:
self._zk_client.start()
break
except gevent.event.Timeout as e:
# Update connection info
self._sandesh_connection_info_update(status='DOWN',
message=str(e))
gevent.sleep(1)
# Zookeeper is also throwing exception due to delay in master election
except Exception as e:
# Update connection info
self._sandesh_connection_info_update(status='DOWN',
message=str(e))
gevent.sleep(1)
# Update connection info
self._sandesh_connection_info_update(status='UP', message='')
# end
def is_connected(self):
return self._zk_client.state == KazooState.CONNECTED
# end is_connected
def syslog(self, msg, *args, **kwargs):
if not self._logger:
return
level = kwargs.get('level', 'info')
if isinstance(level, int):
from pysandesh.sandesh_logger import SandeshLogger
level = SandeshLogger.get_py_logger_level(level)
log_method = getattr(self._logger, level, self._logger.info)
log_method(msg)
# end syslog
def set_lost_cb(self, lost_cb=None):
# set a callback to be called when kazoo state is lost
# set to None for default action
self._lost_cb = lost_cb
# end set_lost_cb
def _zk_listener(self, state):
if state == KazooState.CONNECTED:
if self._election:
self._election.cancel()
# Update connection info
self._sandesh_connection_info_update(status='UP', message='')
elif state == KazooState.LOST:
# Lost the session with ZooKeeper Server
# Best of option we have is to exit the process and restart all
# over again
self._sandesh_connection_info_update(status='DOWN',
message='Connection to Zookeeper lost')
if self._lost_cb:
self._lost_cb()
else:
os._exit(2)
elif state == KazooState.SUSPENDED:
# Update connection info
self._sandesh_connection_info_update(status='INIT',
message = 'Connection to zookeeper lost. Retrying')
# end
def _zk_election_callback(self, func, *args, **kwargs):
func(*args, **kwargs)
# Exit if running master encounters error or exception
exit(1)
# end
def master_election(self, path, identifier, func, *args, **kwargs):
while True:
self._election = self._zk_client.Election(path, identifier)
self._election.run(self._zk_election_callback, func, *args, **kwargs)
# end master_election
def create_node(self, path, value=None):
try:
if value is None:
value = uuid.uuid4()
retry = self._retry.copy()
retry(self._zk_client.create, path, str(value), makepath=True)
except kazoo.exceptions.NodeExistsError:
current_value = self.read_node(path)
if current_value == value:
return True;
raise ResourceExistsError(path, str(current_value))
# end create_node
def delete_node(self, path, recursive=False):
try:
retry = self._retry.copy()
retry(self._zk_client.delete, path, recursive=recursive)
except kazoo.exceptions.NoNodeError:
pass
except Exception as e:
raise e
# end delete_node
def read_node(self, path):
try:
retry = self._retry.copy()
value = retry(self._zk_client.get, path)
return value[0]
except Exception:
return None
# end read_node
def get_children(self, path):
try:
retry = self._retry.copy()
return retry(self._zk_client.get_children, path)
except Exception:
return []
# end read_node
def _sandesh_connection_info_update(self, status, message):
from pysandesh.connection_info import ConnectionState
from pysandesh.gen_py.process_info.ttypes import ConnectionStatus, \
ConnectionType
from pysandesh.gen_py.sandesh.ttypes import SandeshLevel
new_conn_state = getattr(ConnectionStatus, status)
ConnectionState.update(conn_type = ConnectionType.ZOOKEEPER,
name = 'Zookeeper', status = new_conn_state,
message = message,
server_addrs = self._server_list.split(','))
if (self._conn_state and self._conn_state != ConnectionStatus.DOWN and
new_conn_state == ConnectionStatus.DOWN):
msg = 'Connection to Zookeeper down: %s' %(message)
self.log(msg, level=SandeshLevel.SYS_ERR)
if (self._conn_state and self._conn_state != new_conn_state and
new_conn_state == ConnectionStatus.UP):
msg = 'Connection to Zookeeper ESTABLISHED'
self.log(msg, level=SandeshLevel.SYS_NOTICE)
self._conn_state = new_conn_state
# end _sandesh_connection_info_update
# end class ZookeeperClient
| apache-2.0 | -6,376,591,169,113,161,000 | 34.425061 | 126 | 0.556596 | false |
nagyistoce/geokey | geokey/contributions/migrations/0010_auto_20150511_1132.py | 1 | 2387 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.db import migrations
def clean_list(val):
if val is not None and (isinstance(val, str) or isinstance(val, unicode)):
return json.loads(val)
return val
def clean_int(val):
if val is not None and (isinstance(val, str) or isinstance(val, unicode)):
return int(val)
return val
def clean_number(val):
if val is not None and (isinstance(val, str) or isinstance(val, unicode)):
try: # it's an int
return int(val)
except ValueError:
pass
try: # it's a float
return float(val)
except ValueError:
pass
# cannot convert to number, returns string or None
return val
def clean_values(apps, schema_editor):
Observation = apps.get_model("contributions", "Observation")
NumericField = apps.get_model("categories", "NumericField")
LookupField = apps.get_model("categories", "LookupField")
MultipleLookupField = apps.get_model("categories", "MultipleLookupField")
for field in NumericField.objects.all():
for observation in Observation.objects.filter(category=field.category):
if observation.properties:
value = observation.properties.get(field.key)
if value:
observation.properties[field.key] = clean_number(value)
observation.save()
for field in LookupField.objects.all():
for observation in Observation.objects.filter(category=field.category):
if observation.properties:
value = observation.properties.get(field.key)
if value:
observation.properties[field.key] = clean_int(value)
observation.save()
for field in MultipleLookupField.objects.all():
for observation in Observation.objects.filter(category=field.category):
if observation.properties:
value = observation.properties.get(field.key)
if value:
observation.properties[field.key] = clean_list(value)
observation.save()
class Migration(migrations.Migration):
dependencies = [
('contributions', '0009_auto_20150420_1549'),
]
operations = [
migrations.RunPython(clean_values),
]
| apache-2.0 | 9,169,566,666,281,336,000 | 30 | 79 | 0.620863 | false |
dougthor42/PyBank | pybank/_logging.py | 1 | 6390 | # -*- coding: utf-8 -*-
"""
Logging setup and handlers for PyBank
Created on Wed Jan 13 13:55:56 2016
"""
# ---------------------------------------------------------------------------
### Imports
# ---------------------------------------------------------------------------
# Standard Library
import datetime
import os.path
import logging
from logging.handlers import TimedRotatingFileHandler as TRFHandler
# Third Party
# Package / Application
# ---------------------------------------------------------------------------
### Module Constants
# ---------------------------------------------------------------------------
LOG_LEVEL_BASE = logging.DEBUG
LOG_LEVEL_FILE = LOG_LEVEL_BASE
LOG_LEVEL_CONSOLE = logging.INFO
LOG_LEVEL_GUI = LOG_LEVEL_BASE
# ---------------------------------------------------------------------------
### Classes
# ---------------------------------------------------------------------------
class CustomLoggingFormatter(logging.Formatter):
"""
Custom logging formatter. Overrides funcName and module if a value
for name_override or module_override exists.
"""
def format(self, record):
if hasattr(record, 'name_override'):
record.funcName = record.name_override
if hasattr(record, 'module_override'):
record.module = record.module_override
return super(CustomLoggingFormatter, self).format(record)
# ---------------------------------------------------------------------------
### Functions
# ---------------------------------------------------------------------------
def _setup_logging():
"""
Set up logging for the entire package.
Log strings are sent to both the console and a log file.
The file is a TimedRotatingFileHandler set up to create a new log file
every Sunday at midnight. This should keep the log files small-ish
while also keeping the number of log files down. All log files are
kept (none are automatically deleted by backupCount parameter).
Log lines look like so::
2015-06-23 17:04:10.409 [DEBUG ] [gui ] [_color_dolla] msg...
|--------| |----------| |--------| |--------| |------------| |----)
^ ^ ^ ^ ^ ^
| | | | | |
Date + | | | | |
Time -----------+ | | | |
Level Name (8 char) ---------+ | | |
Module Name (8 char) -------------------+ | |
Function Name (12 char) -----------------------------+ |
Message --------------------------------------------------------+
All dates and times are ISO 8601 format, local time.
Parameters:
-----------
None
Returns:
--------
None
Notes:
------
1. Since I cannot get ms in the date format, I use the logger `msecs`
attribute in the log format string.
2. cx_freeze puts .py files in library.zip, so this means that I have
to go up one additional level in the directory tree to find the
`log` folder.
References:
-----------
Logging different levels to different places:
https://aykutakin.wordpress.com/2013/08/06/
logging-to-console-and-file-in-python/
Adding milliseconds to log string:
http://stackoverflow.com/a/7517430/1354930
TimedRotatingFileHandler:
https://docs.python.org/3.4/library/
logging.handlers.html#logging.handlers.TimedRotatingFileHandler
TimedRotatingFileHandler:
http://www.blog.pythonlibrary.org/2014/02/11/
python-how-to-create-rotating-logs/
"""
logfmt = ("%(asctime)s.%(msecs)03d"
" [%(levelname)-8.8s]"
" [%(module)-8.8s]" # Note implicit string concatenation.
" [%(funcName)-16.16s]"
" %(message)s"
)
datefmt = "%Y-%m-%d %H:%M:%S"
# Create the logger
logger = logging.getLogger()
logger.setLevel(LOG_LEVEL_BASE)
### Console Handler #####################################################
handler = logging.StreamHandler()
handler.setLevel(LOG_LEVEL_CONSOLE)
# formatter = logging.Formatter(logfmt, datefmt)
formatter = CustomLoggingFormatter(logfmt, datefmt)
handler.setFormatter(formatter)
handler.set_name("Console Handler")
logger.addHandler(handler)
logging.info("Console logging initialized")
### File Handler ########################################################
# Build the logfile path.
dirname = os.path.dirname(os.path.abspath(__file__))
rootpath = os.path.split(dirname)[0]
logpath = os.path.join(rootpath, "log")
logfile = os.path.join(logpath, "PyBank.log")
# see Note #2
if not os.path.isdir(logpath):
rootpath = os.path.split(rootpath)[0] # up one level
logpath = os.path.join(rootpath, "log")
logfile = os.path.join(logpath, "PyBank.log")
# create log directory if it doesn't exist.
try:
os.makedirs(logpath)
except OSError:
if not os.path.isdir(logpath):
raise
# create the log file if it doesn't exist.
if not os.path.isfile(logfile):
open(logfile, 'a').close()
rollover_time = datetime.time.min # midnight
handler = TRFHandler(logfile,
when="W6", # Sunday
#interval=7, # when=Sunday -> not needed
#backupCount=5,
atTime=rollover_time,
#delay=True,
)
handler.setLevel(LOG_LEVEL_FILE)
# formatter = logging.Formatter(logfmt, datefmt)
formatter = CustomLoggingFormatter(logfmt, datefmt)
handler.setFormatter(formatter)
handler.set_name("File Handler")
logger.addHandler(handler)
logging.info("File logging initialized")
### Module Executions #######################################################
if __name__ == "__main__":
print("This module is not intended to be run as a stand-alone.")
| gpl-2.0 | -5,926,981,340,757,622,000 | 34.101695 | 77 | 0.478091 | false |
RadishLLC/ballast | ballast/discovery/ns.py | 1 | 3780 | import abc
import logging
import socket
from ballast.compat import unicode
from ballast.exception import BallastException, BallastConfigurationException
from ballast.discovery import ServerList, Server
try:
from dns import resolver, rdatatype, exception
from dns.rdtypes.IN.A import A
from dns.rdtypes.ANY.CNAME import CNAME
except ImportError:
raise BallastException(
"Please install optional DNS dependencies "
"in order to use this feature: \n\n"
"$ pip install ballast[dns] or \n"
"$ pip install ballast[all]"
)
class DnsRecordList(ServerList):
__metaclass__ = abc.ABCMeta
_RESOLVER_CACHE = resolver.LRUCache()
def __init__(self, dns_qname, dns_host=None, dns_port=None):
self._dns_qname = dns_qname
self._logger = logging.getLogger(self.__module__)
# create a DNS resolver that caches results
self._dns_resolver = resolver.Resolver()
self._dns_resolver.cache = DnsRecordList._RESOLVER_CACHE
if dns_port is not None:
self._dns_resolver.port = dns_port
if dns_host is not None:
try:
self._dns_resolver.nameservers = [
socket.gethostbyname(dns_host)
]
except Exception as e:
raise BallastConfigurationException(
'Name resolution failed for DNS host: %s' % dns_host,
e
)
@abc.abstractmethod
def get_servers(self):
pass
class DnsARecordList(DnsRecordList):
def __init__(self, dns_qname, dns_host=None, dns_port=None, server_port=80):
super(DnsARecordList, self).__init__(
dns_qname,
dns_host,
dns_port
)
self.server_port = server_port
def get_servers(self):
try:
# query SRV records for our service name
answer = self._dns_resolver.query(self._dns_qname, rdatatype.A)
# iterate the results, generate server objects
for i, srv in enumerate(answer):
ttl = answer.response.answer[0].ttl
s = Server(
srv.address,
self.server_port,
ttl=ttl
)
self._logger.debug("Created server from DNS A record: %s", s)
yield s
except (exception.DNSException, BallastException):
return
class DnsServiceRecordList(DnsRecordList):
def __init__(self, dns_qname, dns_host=None, dns_port=None):
super(DnsServiceRecordList, self).__init__(
dns_qname,
dns_host,
dns_port
)
def get_servers(self):
try:
# query SRV records for our service name
answer = self._dns_resolver.query(self._dns_qname, rdatatype.SRV)
# iterate the results, generate server objects
for i, srv in enumerate(answer):
rdata = answer.response.additional[0].items[i]
if isinstance(rdata, A):
address = rdata.address
elif isinstance(rdata, CNAME):
address = unicode(rdata.target).rstrip('.')
else:
raise BallastException('Unexpected DNS record: %s' % rdata)
ttl = answer.response.additional[0].ttl
s = Server(
address,
srv.port,
srv.weight,
srv.priority,
ttl
)
self._logger.debug("Created server from DNS SRV record: %s", s)
yield s
except (exception.DNSException, BallastException):
return
| apache-2.0 | 1,116,718,246,703,858,000 | 28.76378 | 80 | 0.551323 | false |
aerospike/aerospike-client-python | test/new_tests/test_nested_cdt_ctx.py | 1 | 150657 | # -*- coding: utf-8 -*-
import sys
import random
import unittest
from datetime import datetime
import pytest
from aerospike import exception as e
from aerospike_helpers.operations import bitwise_operations
from aerospike_helpers import cdt_ctx
from aerospike_helpers.operations import list_operations
from aerospike_helpers.operations import map_operations
list_index = "list_index"
list_rank = "list_rank"
list_value = "list_value"
map_index = "map_index"
map_key = "map_key"
map_rank = "map_rank"
map_value = "map_value"
ctx_ops = {
list_index: cdt_ctx.cdt_ctx_list_index,
list_rank: cdt_ctx.cdt_ctx_list_rank,
list_value: cdt_ctx.cdt_ctx_list_value,
map_index: cdt_ctx.cdt_ctx_map_index,
map_key: cdt_ctx.cdt_ctx_map_key,
map_rank: cdt_ctx.cdt_ctx_map_rank,
map_value: cdt_ctx.cdt_ctx_map_value,
}
def add_ctx_op(ctx_type, value):
ctx_func = ctx_ops[ctx_type]
return ctx_func(value)
aerospike = pytest.importorskip("aerospike")
try:
import aerospike
except:
print("Please install aerospike python client.")
sys.exit(1)
class TestCTXOperations(object):
@pytest.fixture(autouse=True)
def setup(self, request, as_connection):
"""
Setup Method
"""
self.keys = []
self.test_key = 'test', 'demo', 'nested_cdt_ops'
self.nested_list = [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy']]
random.seed(datetime.now())
self.nested_list_order = [[4, 2, 5],[1, 4, 2, 3],[[2,2,2]]]
self.nested_map = { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'}, 'cage': ['bird']}, 'two': []}}
self.layered_map = {'first': {'one': {1: {'g': 'layer', 'l': 'done'} } }, 'second': {'two': {2: {'g': 'layer', 'l': 'bye'} } } }
self.num_map = {1: {1: 'v1', 2: 'v2', 3: 'v3'}, 2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8', 9: 'v9', 10: {11: 'v11'}}}
self.layered_map_bin = 'layered_map'
self.nested_list_bin = 'nested_list'
self.nested_list_order_bin = 'nested_order'
self.nested_map_bin = 'nested_map'
self.num_map_bin = 'num_map'
ctx_sort_nested_map1 = [
cdt_ctx.cdt_ctx_map_key('first')
]
sort_map_ops = [
map_operations.map_set_policy('nested_map', {'map_order': aerospike.MAP_KEY_ORDERED}, ctx_sort_nested_map1),
]
self.as_connection.put(
self.test_key,
{
self.nested_list_bin: self.nested_list,
self.nested_list_order_bin: self.nested_list_order,
self.nested_map_bin: self.nested_map,
self.layered_map_bin: self.layered_map,
self.num_map_bin: self.num_map
}
)
self.keys.append(self.test_key)
#apply map order policy
_, _, _ = self.as_connection.operate(self.test_key, sort_map_ops)
yield
for key in self.keys:
try:
self.as_connection.remove(key)
except e.AerospikeError:
pass
@pytest.mark.parametrize("ctx_types, value, list_indexes, expected", [
([list_index], 'toast', [1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy', 'toast']]),
([list_index], 'jam', [0], [['hi', 'friend', ['how', 'are',
['you']], 'jam'], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
([list_index, list_index, list_index], 4, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3, 4]], 'home boy']]),
([list_index, list_index], '?', [0,2], [['hi', 'friend', ['how', 'are',
['you'], '?']], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
([list_index, list_index, list_rank], '?', [1,1,-1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3, '?']], 'home boy']]),
([list_index, list_value], '?', [1, ['numbers', [1, 2, 3]]], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3], '?'], 'home boy']]),
])
def test_ctx_list_append(self, ctx_types, value, list_indexes, expected):
"""
Invoke list_append() append value to a list.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
list_operations.list_append(self.nested_list_bin, value, None, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("value, list_indexes, expected", [
('toast', [2], e.OpNotApplicable),
('?', 'cat', e.ParamError)
])
def test_ctx_list_append_negative(self, value, list_indexes, expected):
"""
Invoke list_append() append value to a list with expected failures.
"""
ctx = []
for index in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(index))
ops = [
list_operations.list_append(self.nested_list_bin, value, None, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, values, list_indexes, expected", [
([list_rank], ['toast'], [0], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy', 'toast']]),
([list_index], ['jam', 'butter', 2], [0], [['hi', 'friend', ['how', 'are',
['you']], 'jam', 'butter', 2], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
([list_rank, list_index, list_value], [4, 5, 6], [0,1,[1 ,2 ,3]], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3, 4, 5, 6]], 'home boy']]),
([list_rank, list_index], ['?', '!'], [1,2], [['hi', 'friend', ['how', 'are',
['you'], '?', '!']], ['hey', ['numbers', [1, 2, 3]], 'home boy']])
])
def test_ctx_list_append_items(self, ctx_types, values, list_indexes, expected):
"""
Invoke list_append_items() to append values to a list.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
list_operations.list_append_items(self.nested_list_bin, values, None, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("values, list_indexes, expected", [
(['toast'], [2], e.OpNotApplicable),
(['?'], 'cat', e.ParamError),
('toast', [2], e.ParamError)
])
def test_ctx_list_append_items_negative(self, values, list_indexes, expected):
"""
Invoke list_append_items() to append values to a list with expected failures.
"""
ctx = []
for index in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(index))
ops = [
list_operations.list_append_items(self.nested_list_bin, values, None, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, value, list_indexes, expected", [
(0, 'toast', [1], [['hi', 'friend', ['how', 'are',
['you']]], ['toast', 'hey', ['numbers', [1, 2, 3]], 'home boy']]),
(2, 'jam', [0], [['hi', 'friend', 'jam', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(1, 4, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 4, 2, 3]], 'home boy']]),
(2, '?', [0,2], [['hi', 'friend', ['how', 'are',
'?', ['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy']])
])
def test_ctx_list_insert(self, index, value, list_indexes, expected):
"""
Invoke list_insert() to insert a value into a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_insert(self.nested_list_bin, index, value, None, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, value, list_indexes, expected", [
(1, 'toast', [2], e.OpNotApplicable),
(0, '?', 'cat', e.ParamError),
])
def test_ctx_list_insert_negative(self, index, value, list_indexes, expected):
"""
Invoke list_insert() to insert a value into a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_insert(self.nested_list_bin, index, value, None, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, value, list_indexes, expected", [
(0, 2, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [3, 2, 3]], 'home boy']]),
(0, 0, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(2, 300, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 303]], 'home boy']]),
])
def test_ctx_list_increment(self, index, value, list_indexes, expected):
"""
Invoke list_increment() to increment a value in a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_increment(self.nested_list_bin, index, value, None, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, value, policy, list_indexes, expected", [
(0, 1, None, [1], e.InvalidRequest),
(0, 'cat', None, [1], e.InvalidRequest),
(0, 1, None, [1,1,1,1], e.InvalidRequest),
])
def test_ctx_list_increment_negative(self, index, value, policy, list_indexes, expected):
"""
Invoke list_increment() to increment a value in a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_increment(self.nested_list_bin, index, value, policy, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, list_indexes, expected", [
(0, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [2, 3]], 'home boy']]),
(2, [1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3]]]]),
(2, [0], [['hi', 'friend'], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(1, [0,2], [['hi', 'friend', ['how',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
])
def test_ctx_list_pop(self, index, list_indexes, expected):
"""
Invoke list_pop() to pop a value off a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_pop(self.nested_list_bin, index, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, list_indexes, expected", [
(3, [1,1,1], e.OpNotApplicable),
(2, [1,1,1,1], e.InvalidRequest),
('cat', [0], e.ParamError),
])
def test_ctx_list_pop_negative(self, index, list_indexes, expected):
"""
Invoke list_pop() to pop a value off a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_pop(self.nested_list_bin, index, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, list_indexes, count, expected", [
(0, [1,1,1], 3, [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', []], 'home boy']]),
(2, [1], 1, [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3]]]]),
(1, [0], 2, [['hi'], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(0, [0,2], 3, [['hi', 'friend', []], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
])
def test_ctx_list_pop_range(self, index, list_indexes, count, expected):
"""
Invoke list_pop_range() to pop values off a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_pop_range(self.nested_list_bin, index, count, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, list_indexes, count, expected", [
#(4, [1,1,1], 1, e.OpNotApplicable),
(2, [1,1,1,1], 1, e.InvalidRequest),
('cat', [0], 1, e.ParamError),
#(0, [1,1,1], 20, e.OpNotApplicable),
])
def test_ctx_list_pop_range_negative(self, index, list_indexes, count, expected):
"""
Invoke list_pop_range() to pop values off a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_pop_range(self.nested_list_bin, index, count, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, list_indexes, expected", [
(2, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2]], 'home boy']]),
(1, [0,2], [['hi', 'friend', ['how',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(0, [1], [['hi', 'friend', ['how', 'are',
['you']]], [['numbers', [1, 2, 3]], 'home boy']]),
(1, [1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers'], 'home boy']]),
])
def test_ctx_list_remove(self, index, list_indexes, expected):
"""
Invoke list_remove() to remove a value from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove(self.nested_list_bin, index, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, list_indexes, expected", [
(0, 'cat', e.ParamError),
(40, [1], e.OpNotApplicable),
(0, [1,1,1,1], e.InvalidRequest),
])
def test_ctx_list_remove_negative(self, index, list_indexes, expected):
"""
Invoke list_remove() to remove a value from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove(self.nested_list_bin, index, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("count, index, list_indexes, expected", [
(3, 0, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', []], 'home boy']]),
(1, 1, [0,2], [['hi', 'friend', ['how',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(2, 1, [1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey']]),
(1, 1, [1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers'], 'home boy']]),
])
def test_ctx_list_remove_range(self, count, index, list_indexes, expected):
"""
Invoke list_remove_range() to remove values from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_range(self.nested_list_bin, index, count, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("count, index, list_indexes, expected", [
(1, 0, 'cat', e.ParamError),
(1, 0, [1,1,1,1], e.InvalidRequest),
])
def test_ctx_list_remove_range_negative(self, count, index, list_indexes, expected):
"""
Invoke list_remove_range() to remove values from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_range(self.nested_list_bin, index, count, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("list_indexes, expected", [
([1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', []], 'home boy']]),
([0,2], [['hi', 'friend', []], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
([1], [['hi', 'friend', ['how', 'are',
['you']]], []]),
([1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', [], 'home boy']]),
])
def test_ctx_list_clear(self, list_indexes, expected):
"""
Invoke list_clear() to empty a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_clear(self.nested_list_bin, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("list_indexes, expected", [
('cat', e.ParamError),
([1,1,1,1], e.InvalidRequest),
])
def test_ctx_list_clear_negative(self, list_indexes, expected):
"""
Invoke list_clear() to empty a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_clear(self.nested_list_bin, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, value, list_indexes, expected", [
(0, 'toast', [1], [['hi', 'friend', ['how', 'are',
['you']]], ['toast', ['numbers', [1, 2, 3]], 'home boy']]),
(2, 'jam', [0], [['hi', 'friend', 'jam'], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(1, 'honey', [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 'honey', 3]], 'home boy']]),
(2, 6, [0,2], [['hi', 'friend', ['how', 'are',
6]], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(5, 'toast', [1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy', None, None, 'toast']])
])
def test_ctx_list_set(self, index, value, list_indexes, expected):
"""
Invoke list_set() to set a value in a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_set(self.nested_list_bin, index, value, None, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, value, list_indexes, expected", [
(1, 'toast', [2], e.OpNotApplicable),
(0, '?', 'cat', e.ParamError),
])
def test_ctx_list_set_negative(self, index, value, list_indexes, expected):
"""
Invoke list_set() to set a value in a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_set(self.nested_list_bin, index, value, None, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, list_indexes, expected", [
(0, [1,1,1], 1),
(2, [1,1,1], 3),
(1, [1,1], [1, 2, 3]),
(2, [0,2], ['you']),
])
def test_ctx_list_get(self, index, list_indexes, expected):
"""
Invoke list_get() to retrieve a value from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get(self.nested_list_bin, index, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, list_indexes, expected", [
(1, [2], e.OpNotApplicable),
(4, [1,1,1], e.OpNotApplicable),
('cat', [1], e.ParamError),
])
def test_ctx_list_get_negative(self, index, list_indexes, expected):
"""
Invoke list_get() to retrieve a value from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get(self.nested_list_bin, index, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, count, list_indexes, expected", [
(0, 3, [1,1,1], [1,2,3]),
(2, 1, [1,1,1], [3]),
(1, 5, [1], [['numbers', [1, 2, 3]], 'home boy']),
(1, 2, [0,2], ['are', ['you']]),
(4, 1, [1,1,1], []),
])
def test_ctx_list_get_range(self, index, count, list_indexes, expected):
"""
Invoke list_get_range() to retrieve values from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_range(self.nested_list_bin, index, count, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, count, list_indexes, expected", [
(1, 1, [2], e.OpNotApplicable),
('cat', 1, [1], e.ParamError),
])
def test_ctx_list_get_range_negative(self, index, count, list_indexes, expected):
"""
Invoke list_get_range() to retrieve values from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_range(self.nested_list_bin, index, count, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, count, list_indexes, expected", [
(0, 1, [0], [['hi'], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(0, 0, [1], [['hi', 'friend', ['how', 'are', ['you']]], []]),
(0, 2, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1, 2]], 'home boy']]),
(1, 3, [0,2], [['hi', 'friend', ['are',
['you']]], ['hey', ['numbers', [1, 2, 3]], 'home boy']])
])
def test_ctx_list_trim(self, index, count, list_indexes, expected):
"""
Invoke list_trim() to remove list elements outside the given range.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_trim(self.nested_list_bin, index, count, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, count, list_indexes, expected", [
(3, 1, [2], e.OpNotApplicable),
(0, 2, 'cat', e.ParamError),
(1, 'dog', [2], e.ParamError),
('lizard', 1, [2], e.OpNotApplicable),
])
def test_ctx_list_trim_negative(self, index, count, list_indexes, expected):
"""
Invoke list_trim() to remove list elements outside the given range with expected failures.
"""
ctx = []
for index in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(index))
ops = [
list_operations.list_trim(self.nested_list_bin, index, count, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("list_indexes, expected", [
([1,1,1], 3),
([1], 3),
([1,1], 3),
([0,2], 3),
([0,2,2], 1),
])
def test_ctx_list_size(self, list_indexes, expected):
"""
Invoke list_size() to get the size of a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_size(self.nested_list_bin, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("list_indexes, expected", [
([4], e.OpNotApplicable),
([1,1,1,1], e.BinIncompatibleType),
(['cat'], e.ParamError),
])
def test_ctx_list_size(self, list_indexes, expected):
"""
Invoke list_size() to get the size of a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_size(self.nested_list_bin, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("value, offset, return_type, count, inverted, list_indexes, expected_bin, expected_val", [
(2, 0, aerospike.LIST_RETURN_VALUE, 2, False, [1,1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers', [1]], 'home boy']], [2,3]),
('hi', 1, aerospike.LIST_RETURN_INDEX, 2, False, [0], [['hi', 'friend'], ['hey', ['numbers', [1,2,3]], 'home boy']], [2]),
('numbers', 0, aerospike.LIST_RETURN_VALUE, 1, True, [1,1], [['hi', 'friend', ['how', 'are',
['you']]], ['hey', ['numbers'], 'home boy']], [[1,2,3]]),
])
def test_ctx_list_remove_by_value_rank_range(self, value, offset, return_type, count,
inverted, list_indexes, expected_bin, expected_val):
"""
Invoke list_remove_by_value_rank_range() to remove elements in a range by rank relative
to the element specified by the given value.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_value_rank_range_relative(self.nested_list_bin, value, offset,
return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected_bin
assert res[self.nested_list_bin] == expected_val
@pytest.mark.parametrize("value, offset, return_type, count, inverted, list_indexes, expected", [
(2, 0, aerospike.LIST_RETURN_VALUE, 2, False, [1,1,1,1], e.InvalidRequest),
])
def test_ctx_list_remove_by_value_rank_range_negative(self, value, offset, return_type, count,
inverted, list_indexes, expected):
"""
Invoke list_remove_by_value_rank_range() to remove elements in a range by rank relative
to the element specified by the given value with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_value_rank_range_relative(self.nested_list_bin, value, offset,
return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("value, offset, return_type, count, inverted, list_indexes, expected", [
(2, 0, aerospike.LIST_RETURN_VALUE, 2, False, [1,1,1], [2,3]),
('hi', 0, aerospike.LIST_RETURN_INDEX, None, False, [0], [0,2]),
('numbers', 0, aerospike.LIST_RETURN_VALUE, 1, True, [1,1], [[1,2,3]]),
])
def test_ctx_list_get_by_value_rank_range_relative(self, value, offset, return_type, count,
inverted, list_indexes, expected):
"""
Invoke list_get_by_value_rank_range() to get elements in a range by rank relative
to the element specified by the given value.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_value_rank_range_relative(self.nested_list_bin, value, offset,
return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("value, offset, return_type, count, inverted, list_indexes, expected", [
(2, 0, aerospike.LIST_RETURN_VALUE, 2, False, [1,1,1,6], e.OpNotApplicable),
(2, 0, aerospike.LIST_RETURN_VALUE, 2, False, [1,1,1,1], e.BinIncompatibleType),
])
def test_ctx_list_get_by_value_rank_range_relative_negative(self, value, offset, return_type, count,
inverted, list_indexes, expected):
"""
Invoke list_get_by_value_rank_range() to get elements in a range by rank relative
to the element specified by the given value with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_value_rank_range_relative(self.nested_list_bin, value, offset,
return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, return_type, list_indexes, expected", [
(0, aerospike.LIST_RETURN_COUNT, [1,1,1], 1),
(2, aerospike.LIST_RETURN_VALUE, [1,1,1], 3),
(1, aerospike.LIST_RETURN_VALUE, [1,1], [1, 2, 3]),
(2, aerospike.LIST_RETURN_RANK, [0,2], 2),
])
def test_ctx_list_get_by_index(self, index, return_type, list_indexes, expected):
"""
Invoke list_get_by_index() to get the value at index from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_index(self.nested_list_bin, index, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, return_type, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, [2], e.OpNotApplicable),
(4, aerospike.LIST_RETURN_VALUE, [1,1,1], e.OpNotApplicable),
('cat', aerospike.LIST_RETURN_VALUE, [1], e.ParamError),
])
def test_ctx_list_get_by_index_negative(self, index, return_type, list_indexes, expected):
"""
Invoke list_get_by_index() to get the value at index from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_index(self.nested_list_bin, index, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, return_type, count, inverted, list_indexes, expected", [
(0, aerospike.LIST_RETURN_COUNT, 3, False, [1], 3),
(2, aerospike.LIST_RETURN_VALUE, 1, True, [1,1,1], [1,2]),
(4, aerospike.LIST_RETURN_VALUE, 1, False, [1,1,1], []),
(0, aerospike.LIST_RETURN_VALUE, 2, False, [1,1], ['numbers', [1, 2, 3]]),
(1, aerospike.LIST_RETURN_RANK, 3, False, [0,2], [0, 2]),
])
def test_ctx_list_get_by_index_range(self, index, return_type, count, inverted, list_indexes, expected):
"""
Invoke list_get_by_index() to get the values at index for count from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_index_range(self.nested_list_bin, index, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("index, return_type, count, inverted, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, 3, False, [2], e.OpNotApplicable),
(1, aerospike.LIST_RETURN_VALUE, 3, False, ['dog'], e.ParamError),
(1, 42, 1, False, [1], e.OpNotApplicable),
#(1, aerospike.LIST_RETURN_VALUE, 1, 'dog', [1], e.ParamError), why does this pass with bad bool?
])
def test_ctx_list_get_by_index_range_negative(self, index, return_type, count, inverted, list_indexes, expected):
"""
Invoke list_get_by_index() to get the values at index for count from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_index_range(self.nested_list_bin, index, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("rank, return_type, list_indexes, expected", [
(0, aerospike.LIST_RETURN_COUNT, [1,1,1], 1),
(2, aerospike.LIST_RETURN_VALUE, [1,1,1], 3),
(1, aerospike.LIST_RETURN_VALUE, [1,1], [1, 2, 3]),
(2, aerospike.LIST_RETURN_VALUE, [0,2], ['you']),
])
def test_ctx_list_get_by_rank(self, rank, return_type, list_indexes, expected):
"""
Invoke list_get_by_rank() to get an entry of the given rank from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_rank(self.nested_list_bin, rank, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("rank, return_type, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, [2], e.OpNotApplicable),
(3, aerospike.LIST_RETURN_VALUE, [1,1,1], e.OpNotApplicable),
('cat', aerospike.LIST_RETURN_VALUE, [1], e.ParamError),
])
def test_ctx_list_get_by_rank_negative(self, rank, return_type, list_indexes, expected):
"""
Invoke list_get_by_rank() to get an entry of the given rank from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_rank(self.nested_list_bin, rank, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("rank, return_type, count, inverted, list_indexes, expected", [
(0, aerospike.LIST_RETURN_COUNT, 3, False, [1], 3),
(2, aerospike.LIST_RETURN_VALUE, 1, True, [1,1,1], [1,2]),
(4, aerospike.LIST_RETURN_VALUE, 1, False, [1,1,1], []),
(0, aerospike.LIST_RETURN_VALUE, 2, False, [1,1], ['numbers', [1, 2, 3]]),
(1, aerospike.LIST_RETURN_RANK, 3, False, [0,2], [0, 2]),
(20, aerospike.LIST_RETURN_VALUE, 3, False, [0], []),
])
def test_ctx_list_get_by_rank_range(self, rank, return_type, count, inverted, list_indexes, expected):
"""
Invoke list_get_by_rank_range() to start getting elements at value for count from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_index_range(self.nested_list_bin, rank, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("rank, return_type, count, inverted, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, 3, False, [2], e.OpNotApplicable),
(1, aerospike.LIST_RETURN_VALUE, 3, False, ['dog'], e.ParamError),
(1, 42, 1, False, [1], e.OpNotApplicable),
#(1, aerospike.LIST_RETURN_VALUE, 1, 'dog', [1], e.ParamError), why does this pass with bad bool?
])
def test_ctx_list_get_by_rank_range_negative(self, rank, return_type, count, inverted, list_indexes, expected):
"""
Invoke list_get_by_rank_range() to start getting elements at value for count from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_index_range(self.nested_list_bin, rank, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("value, return_type, inverted, list_indexes, expected", [
(2, aerospike.LIST_RETURN_COUNT, False, [1,1,1], 1),
([1,2,3], aerospike.LIST_RETURN_RANK, False, [1,1], [1]),
('home boy', aerospike.LIST_RETURN_INDEX, False, [1], [2]),
('how', aerospike.LIST_RETURN_VALUE, True, [0,2], ['are', ['you']]),
])
def test_ctx_list_get_by_value(self, value, return_type, inverted, list_indexes, expected):
"""
Invoke list_get_by_value() to get the given value from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_value(self.nested_list_bin, value, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("value, return_type, inverted, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, False, [2], e.OpNotApplicable),
(2, aerospike.LIST_RETURN_VALUE, False, [1,1,1,1], e.BinIncompatibleType),
(1, 'bad_return_type', False, [1], e.ParamError),
])
def test_ctx_list_get_by_value_negative(self, value, return_type, inverted, list_indexes, expected):
"""
Invoke list_get_by_value() to get the given value from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_value(self.nested_list_bin, value, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("values, return_type, inverted, list_indexes, expected", [
([2,3], aerospike.LIST_RETURN_COUNT, False, [1,1,1], 2),
([[1,2,3], 'numbers'], aerospike.LIST_RETURN_RANK, False, [1,1], [1,0]),
(['hi', ['how', 'are', ['you']]], aerospike.LIST_RETURN_INDEX, False, [0], [0,2]),
(['how'], aerospike.LIST_RETURN_VALUE, True, [0,2], ['are', ['you']]),
])
def test_ctx_list_get_by_value_list(self, values, return_type, inverted, list_indexes, expected):
"""
Invoke list_get_by_value_list() to get the given values from a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_value_list(self.nested_list_bin, values, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("values, return_type, inverted, list_indexes, expected", [
([1], aerospike.LIST_RETURN_VALUE, False, [2], e.OpNotApplicable),
([2], aerospike.LIST_RETURN_VALUE, False, [1,1,1,1], e.BinIncompatibleType),
([1], 'bad_return_type', False, [1], e.ParamError),
])
def test_ctx_list_get_by_value_list_negative(self, values, return_type, inverted, list_indexes, expected):
"""
Invoke list_get_by_value_list() to get the given values from a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_value_list(self.nested_list_bin, values, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("return_type, value_begin, value_end, inverted, list_indexes, expected", [
(aerospike.LIST_RETURN_COUNT, 0, 2, False, [1,1,1], 1),
(aerospike.LIST_RETURN_RANK, None, None, False, [1,1], [0,1]),
(aerospike.LIST_RETURN_INDEX, 2, 3, False, [1,1,1], [1]),
(aerospike.LIST_RETURN_VALUE, 'a', 'c', True, [0,2], ['how', ['you']]),
])
def test_ctx_list_get_by_value_range(self, return_type, value_begin, value_end, inverted, list_indexes, expected):
"""
Invoke list_get_by_value_range() get elements with values between value_begin and value_end
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_value_range(self.nested_list_bin, return_type, value_begin, value_end, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected
@pytest.mark.parametrize("return_type, value_begin, value_end, inverted, list_indexes, expected", [
(aerospike.LIST_RETURN_VALUE, 0, 1, False, [2], e.OpNotApplicable),
(aerospike.LIST_RETURN_VALUE, 0, 1, False, [1,1,1,1], e.BinIncompatibleType),
('bad_return_type', 0, 1, False, [1], e.ParamError),
])
def test_ctx_list_get_by_value_range_negative(self, return_type, value_begin, value_end, inverted, list_indexes, expected):
"""
Invoke list_get_by_value_range() get elements with values between value_begin and value_end with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_get_by_value_range(self.nested_list_bin, return_type, value_begin, value_end, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, return_type, list_indexes, expected_res, expected_bin", [
(0, aerospike.LIST_RETURN_COUNT, [1,1,1], 1, [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [2, 3]], 'home boy']]),
(2, aerospike.LIST_RETURN_VALUE, [1,1,1], 3, [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [1, 2]], 'home boy']]),
(1, aerospike.LIST_RETURN_VALUE, [1,1], [1, 2, 3], [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers'], 'home boy']]),
(2, aerospike.LIST_RETURN_RANK, [0,2], 2, [['hi', 'friend', ['how', 'are']],
['hey', ['numbers', [1, 2, 3]], 'home boy']]),
])
def test_ctx_list_remove_by_index(self, index, return_type, list_indexes, expected_res, expected_bin):
"""
Invoke list_remove_by_index() to remove the element at index in a list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_index(self.nested_list_bin, index, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected_res
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected_bin
@pytest.mark.parametrize("index, return_type, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, [2], e.OpNotApplicable),
(4, aerospike.LIST_RETURN_VALUE, [1,1,1], e.OpNotApplicable),
('cat', aerospike.LIST_RETURN_VALUE, [1], e.ParamError),
(0, aerospike.LIST_RETURN_VALUE, [1,1,1,1], e.InvalidRequest),
])
def test_ctx_list_remove_by_index_negative(self, index, return_type, list_indexes, expected):
"""
Invoke list_remove_by_index() to remove the element at index in a list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_index(self.nested_list_bin, index, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, return_type, count, inverted, list_indexes, expected_res, expected_bin", [
(0, aerospike.LIST_RETURN_COUNT, 1, False, [1,1,1], 1, [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [2, 3]], 'home boy']]),
(1, aerospike.LIST_RETURN_VALUE, 3, False, [1,1,1], [2,3], [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [1]], 'home boy']]),
(0, aerospike.LIST_RETURN_VALUE, 2, False, [1,1], ['numbers',[1, 2, 3]], [['hi', 'friend', ['how', 'are', ['you']]],
['hey', [], 'home boy']]),
(2, aerospike.LIST_RETURN_RANK, 1, True, [0,2], [1,0], [['hi', 'friend', [['you']]],
['hey', ['numbers', [1, 2, 3]], 'home boy']]),
])
def test_ctx_list_remove_by_index_range(self, index, return_type, count, inverted, list_indexes, expected_res, expected_bin):
"""
Invoke Invoke list_remove_by_index_range() to remove elements starting at index for count.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_index_range(self.nested_list_bin, index, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected_res
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected_bin
@pytest.mark.parametrize("index, return_type, count, inverted, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, 3, False, [2], e.OpNotApplicable),
(1, aerospike.LIST_RETURN_VALUE, 1, False, ['dog'], e.ParamError),
(1, 42, 1, False, [1], e.OpNotApplicable),
(0, aerospike.LIST_RETURN_INDEX, 'dog', False, [1,1,1], e.ParamError),
(0, aerospike.LIST_RETURN_VALUE, 3, False, [1,1,1,1], e.InvalidRequest),
#(4, aerospike.LIST_RETURN_VALUE, 3, False, [1], e.OpNotApplicable), why does this silently fail?
])
def test_ctx_list_remove_by_index_range_negative(self, index, return_type, count, inverted, list_indexes, expected):
"""
Invoke Invoke list_remove_by_index_range() to remove elements starting at index for count with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_index_range(self.nested_list_bin, index, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("rank, return_type, list_indexes, expected_res, expected_bin", [
(0, aerospike.LIST_RETURN_COUNT, [1,1,1], 1, [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [2, 3]], 'home boy']]),
(2, aerospike.LIST_RETURN_VALUE, [1,1,1], 3, [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [1, 2]], 'home boy']]),
(1, aerospike.LIST_RETURN_VALUE, [1,1], [1, 2, 3], [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers'], 'home boy']]),
(2, aerospike.LIST_RETURN_VALUE, [0,2], ['you'], [['hi', 'friend', ['how', 'are']],
['hey', ['numbers', [1, 2, 3]], 'home boy']]),
])
def test_ctx_list_remove_by_rank(self, rank, return_type, list_indexes, expected_res, expected_bin):
"""
Invoke Invoke list_remove_by_rank() to remove the element with the given rank.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_rank(self.nested_list_bin, rank, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected_res
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected_bin
@pytest.mark.parametrize("rank, return_type, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, [2], e.OpNotApplicable),
(3, aerospike.LIST_RETURN_VALUE, [1,1,1], e.OpNotApplicable),
(1, aerospike.LIST_RETURN_VALUE, [1,1,1,1], e.InvalidRequest),
('cat', aerospike.LIST_RETURN_VALUE, [1], e.ParamError),
])
def test_ctx_list_remove_by_rank_negative(self, rank, return_type, list_indexes, expected):
"""
Invoke Invoke list_remove_by_rank() to remove the element with the given rank with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_rank(self.nested_list_bin, rank, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("rank, return_type, count, inverted, list_indexes, expected_res, expected_bin", [
(0, aerospike.LIST_RETURN_COUNT, 1, False, [1,1,1], 1, [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [2, 3]], 'home boy']]),
(1, aerospike.LIST_RETURN_VALUE, 3, False, [1,1,1], [2,3], [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [1]], 'home boy']]),
(0, aerospike.LIST_RETURN_VALUE, 1, False, [1,1], ['numbers'], [['hi', 'friend', ['how', 'are', ['you']]],
['hey', [[1,2,3]], 'home boy']]),
(2, aerospike.LIST_RETURN_RANK, 2, True, [0,2], [0,1], [['hi', 'friend', [['you']]],
['hey', ['numbers', [1, 2, 3]], 'home boy']])
])
def test_ctx_list_remove_by_rank_range(self, rank, return_type, count, inverted, list_indexes, expected_res, expected_bin):
"""
Invoke list_remove_by_rank_range() to remove the element with the given rank for count.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_rank_range(self.nested_list_bin, rank, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected_res
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected_bin
@pytest.mark.parametrize("rank, return_type, count, inverted, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, 3, False, [2], e.OpNotApplicable),
(1, aerospike.LIST_RETURN_VALUE, 1, False, ['dog'], e.ParamError),
(1, 42, 1, False, [1], e.OpNotApplicable),
(0, aerospike.LIST_RETURN_INDEX, 'dog', False, [1,1,1], e.ParamError),
(0, aerospike.LIST_RETURN_VALUE, 3, False, [1,1,1,1], e.InvalidRequest),
('dog', aerospike.LIST_RETURN_VALUE, 3, False, [1,1,1], e.ParamError),
])
def test_ctx_list_remove_by_rank_range_negative(self, rank, return_type, count, inverted, list_indexes, expected):
"""
Invoke list_remove_by_rank_range() to remove elements starting with rank for count with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_rank_range(self.nested_list_bin, rank, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("value, return_type, inverted, list_indexes, expected_res, expected_bin", [
(1, aerospike.LIST_RETURN_COUNT, False, [1,1,1], 1, [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [2, 3]], 'home boy']]),
(3, aerospike.LIST_RETURN_VALUE, False, [1,1,1], [3], [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers', [1, 2]], 'home boy']]),
([1,2,3], aerospike.LIST_RETURN_RANK, False, [1,1], [1], [['hi', 'friend', ['how', 'are', ['you']]],
['hey', ['numbers'], 'home boy']]),
(['you'], aerospike.LIST_RETURN_INDEX, True, [0,2], [0,1], [['hi', 'friend', [['you']]],
['hey', ['numbers', [1, 2, 3]], 'home boy']]),
])
def test_ctx_list_remove_by_value(self, value, return_type, inverted, list_indexes, expected_res, expected_bin):
"""
Invoke list_remove_by_value() to remove the element with the given value.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_value(self.nested_list_bin, value, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected_res
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected_bin
@pytest.mark.parametrize("value, return_type, inverted, list_indexes, expected", [
(1, aerospike.LIST_RETURN_VALUE, False, [2], e.OpNotApplicable),
(1, aerospike.LIST_RETURN_VALUE, False, [1,1,1,1], e.InvalidRequest),
])
def test_ctx_list_remove_by_value_negative(self, value, return_type, inverted, list_indexes, expected):
"""
Invoke list_remove_by_value() to remove the element with the given value with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_value(self.nested_list_bin, value, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("values, return_type, inverted, list_indexes, expected_res, expected_bin", [
([2,3], aerospike.LIST_RETURN_COUNT, False, [1,1,1], 2,
[['hi', 'friend', ['how', 'are', ['you']]], ['hey', ['numbers', [1]], 'home boy']]),
([[1,2,3], 'numbers'], aerospike.LIST_RETURN_RANK, False, [1,1], [1,0],
[['hi', 'friend', ['how', 'are', ['you']]], ['hey', [], 'home boy']]),
(['hi', ['how', 'are', ['you']]], aerospike.LIST_RETURN_INDEX, False, [0], [0,2],
[['friend'], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
(['how'], aerospike.LIST_RETURN_VALUE, True, [0,2], ['are', ['you']],
[['hi', 'friend', ['how']], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
])
def test_ctx_list_remove_by_value_list(self, values, return_type, inverted, list_indexes, expected_res, expected_bin):
"""
Invoke list_remove_by_value_list() to remove elements with the given values.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_value_list(self.nested_list_bin, values, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected_res
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected_bin
@pytest.mark.parametrize("values, return_type, inverted, list_indexes, expected", [
([1], aerospike.LIST_RETURN_VALUE, False, [2], e.OpNotApplicable),
([2], aerospike.LIST_RETURN_VALUE, False, [1,1,1,1], e.InvalidRequest),
([1], 'bad_return_type', False, [1], e.ParamError),
])
def test_ctx_list_remove_by_value_list_negative(self, values, return_type, inverted, list_indexes, expected):
"""
Invoke list_remove_by_value_list() to remove elements with the given values with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_value_list(self.nested_list_bin, values, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("return_type, value_begin, value_end, inverted, list_indexes, expected_res, expected_bin", [
(aerospike.LIST_RETURN_COUNT, 0, 2, False, [1,1,1], 1,
[['hi', 'friend', ['how', 'are', ['you']]], ['hey', ['numbers', [2, 3]], 'home boy']]),
(aerospike.LIST_RETURN_RANK, None, None, False, [1,1], [0,1],
[['hi', 'friend', ['how', 'are', ['you']]], ['hey', [], 'home boy']]),
(aerospike.LIST_RETURN_INDEX, 2, 3, False, [1,1,1], [1],
[['hi', 'friend', ['how', 'are', ['you']]], ['hey', ['numbers', [1,3]], 'home boy']]),
(aerospike.LIST_RETURN_VALUE, 'a', 'c', True, [0,2], ['how', ['you']],
[['hi', 'friend', ['are']], ['hey', ['numbers', [1, 2, 3]], 'home boy']]),
])
def test_ctx_list_remove_by_value_range(self, return_type, value_begin, value_end, inverted, list_indexes, expected_res, expected_bin):
"""
Invoke list_remove_by_value_range() to remove elements between value_begin and value_end.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_value_range(self.nested_list_bin, return_type, value_begin, value_end, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_list_bin] == expected_res
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_bin] == expected_bin
@pytest.mark.parametrize("return_type, value_begin, value_end, inverted, list_indexes, expected", [
(aerospike.LIST_RETURN_VALUE, 0, 1, False, [2], e.OpNotApplicable),
(aerospike.LIST_RETURN_VALUE, 0, 1, False, [1,1,1,1], e.InvalidRequest),
('bad_return_type', 0, 1, False, [1], e.ParamError),
])
def test_ctx_list_remove_by_value_range_negative(self, return_type, value_begin, value_end, inverted, list_indexes, expected):
"""
Invoke list_remove_by_value_range() to remove elements between value_begin and value_end with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_remove_by_value_range(self.nested_list_bin, return_type, value_begin, value_end, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("list_order, list_indexes, expected", [
(aerospike.LIST_ORDERED, [0], [[2,4,5],[1,4,2,3],[[2,2,2]]]),
(aerospike.LIST_ORDERED, [1], [[4,2,5],[1,2,3,4],[[2,2,2]]]),
(aerospike.LIST_UNORDERED, [0], [[4,2,5],[1,4,2,3],[[2,2,2]]]),
])
def test_ctx_list_set_order(self, list_order, list_indexes, expected):
"""
Invoke list_set_order() to set the order of the list.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_set_order(self.nested_list_order_bin, list_order, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_order_bin] == expected
@pytest.mark.parametrize("list_order, list_indexes, expected", [
(aerospike.LIST_ORDERED, [0,1], e.InvalidRequest),
('bad_list_order_type', [1], e.ParamError),
])
def test_ctx_list_set_order_negative(self, list_order, list_indexes, expected):
"""
Invoke list_set_order() to set the order of the list with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_set_order(self.nested_list_order_bin, list_order, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("sort_flags, list_indexes, expected", [
(aerospike.LIST_SORT_DEFAULT, [0], [[2,4,5],[1,4,2,3],[[2,2,2]]]),
(aerospike.LIST_SORT_DROP_DUPLICATES, [2,0], [[4,2,5],[1,4,2,3],[[2]]]),
(aerospike.LIST_SORT_DEFAULT, [1], [[4,2,5],[1,2,3,4],[[2,2,2]]]),
])
def test_ctx_list_sort(self, sort_flags, list_indexes, expected):
"""
Invoke list_sort() to set how the list will be sorted.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_sort(self.nested_list_order_bin, sort_flags, ctx)
]
self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_list_order_bin] == expected
@pytest.mark.parametrize("sort_flags, list_indexes, expected", [
(aerospike.LIST_SORT_DEFAULT, [0,1], e.InvalidRequest),
(aerospike.LIST_SORT_DROP_DUPLICATES, [0,1], e.InvalidRequest),
('bad_sort_flags_type', [1], e.ParamError),
])
def test_ctx_list_sort_negative(self, sort_flags, list_indexes, expected):
"""
Invoke list_sort() to set how the list will be sorted with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_list_index(place))
ops = [
list_operations.list_sort(self.nested_list_order_bin, sort_flags, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, return_type, list_indexes, expected", [
([map_index], 'greet', aerospike.MAP_RETURN_VALUE, [0], 'hi'),
([map_index], 3, aerospike.MAP_RETURN_VALUE, [0], 'hello'),
([map_index], 'nested', aerospike.MAP_RETURN_VALUE, [1], {4,5,6}),
([map_index], 'dog', aerospike.MAP_RETURN_VALUE, [1], None),
([map_index, map_index, map_index], 'fish', aerospike.MAP_RETURN_VALUE, [2,0,0], 'pond'), # why does this fail?
([map_key], 'nested', aerospike.MAP_RETURN_INDEX, ['second'], 1)
])
def test_ctx_map_get_by_key(self, ctx_types, key, return_type, list_indexes, expected):
"""
Invoke map_get_by_key() to get the value at key in the map.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_key(self.nested_map_bin, key, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected
@pytest.mark.parametrize("key, return_type, list_indexes, expected", [
('greet', aerospike.MAP_RETURN_VALUE, [3], e.OpNotApplicable),
('nested', aerospike.MAP_RETURN_VALUE, [1,0,0], e.OpNotApplicable),
('greet', aerospike.MAP_RETURN_VALUE, 'teddy', e.ParamError),
])
def test_ctx_map_get_by_key_negative(self, key, return_type, list_indexes, expected):
"""
Invoke map_get_by_key() to get the value at key in the map with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_key(self.nested_map_bin, key, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key_start, key_stop, return_type, inverted, list_indexes, expected", [
([map_index], 0, 4, aerospike.MAP_RETURN_VALUE, False, [0], ['v3', 'v1', 'v2']),
([map_index], 7, 9, aerospike.MAP_RETURN_VALUE, False, [2], ['v8', 'v7']),
([map_key, map_key], 11, 12, aerospike.MAP_RETURN_VALUE, False, [3, 10], ['v11']),
([map_index], 7, 9, aerospike.MAP_RETURN_VALUE, True, [2], ['v9', {11: 'v11'}]),
])
def test_ctx_map_get_by_key_range(self, ctx_types, key_start, key_stop, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_key_range() to get the values starting at key_start up to key_stop.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_key_range(self.num_map_bin, key_start, key_stop, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.num_map_bin] == expected
@pytest.mark.parametrize("ctx_types, key_start, key_stop, return_type, inverted, list_indexes, expected", [
([map_index], 0, 4, aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
([map_key, map_key, map_index], 11, 12, aerospike.MAP_RETURN_VALUE, False, [3, 10, 0], e.OpNotApplicable),
])
def test_ctx_map_get_by_key_range_negative(self, ctx_types, key_start, key_stop, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_key_range() to get the values starting at key_start up to key_stop with expected failures.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_key_range(self.nested_map_bin, key_start, key_stop, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, return_type, inverted, list_indexes, expected", [
([map_index], ['greet'], aerospike.MAP_RETURN_VALUE, False, [0], ['hi']),
([map_index], ['numbers', 3], aerospike.MAP_RETURN_VALUE, False, [0], ['hello', [3,1,2]]),
([map_index], ['nested', 'hundred'], aerospike.MAP_RETURN_VALUE, False, [1], [100, {4,5,6}]),
([map_index], ['dog'], aerospike.MAP_RETURN_VALUE, False, [1], []),
([map_index, map_index, map_index], ['horse', 'fish'], aerospike.MAP_RETURN_VALUE, False, [2,0,0], ['pond', 'shoe']),
([map_key], ['nested'], aerospike.MAP_RETURN_INDEX, True, ['second'], [0])
])
def test_ctx_map_get_by_key_list(self, ctx_types, key, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_key_list() to get the values at the supplied keys.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_key_list(self.nested_map_bin, key, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
res = res[self.nested_map_bin]
assert res == expected
@pytest.mark.parametrize("key, return_type, inverted, list_indexes, expected", [
(['greet'], aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
(['nested'], aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
(['greet'], aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
])
def test_ctx_map_get_by_key_list_negative(self, key, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_key_list() to get the values at the supplied keys with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_key_list(self.nested_map_bin, key, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, index, return_type, list_indexes, expected", [
([map_index], 1, aerospike.MAP_RETURN_VALUE, [2], []),
([map_key], 0, aerospike.MAP_RETURN_VALUE, ['first'], 'hello'),
([map_index, map_key, map_index], 1, aerospike.MAP_RETURN_VALUE, [2,'one',0], 'shoe'),
])
def test_ctx_map_get_by_index(self, ctx_types, index, return_type, list_indexes, expected):
"""
Invoke map_get_by_index() to get the value at index.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_index(self.nested_map_bin, index, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected
@pytest.mark.parametrize("index, return_type, list_indexes, expected", [
(0, aerospike.MAP_RETURN_VALUE, [3], e.OpNotApplicable),
(0, aerospike.MAP_RETURN_VALUE, [1,0,0], e.OpNotApplicable),
(0, aerospike.MAP_RETURN_VALUE, 'teddy', e.ParamError),
(200, aerospike.MAP_RETURN_VALUE, [0], e.OpNotApplicable),
])
def test_ctx_map_get_by_index_negative(self, index, return_type, list_indexes, expected):
"""
Invoke map_get_by_index() to get the value at index with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_index(self.nested_map_bin, index, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, index, rmv_count, return_type, inverted, list_indexes, expected", [
([map_index], 1, 1, aerospike.MAP_RETURN_VALUE, False, [0], ['hi']),
([map_index], 0, 3, aerospike.MAP_RETURN_VALUE, False, [0], ['hello', 'hi', [3,1,2]]),
([map_index], 0, 2, aerospike.MAP_RETURN_VALUE, False, [1], [100, {4,5,6}]),
([map_index, map_index, map_index], 0, 2, aerospike.MAP_RETURN_VALUE, False, [2,0,0], ['pond', 'shoe']),
([map_key], 1, 2, aerospike.MAP_RETURN_INDEX, True, ['second'], [0]),
([map_rank, map_value], 0, 3, aerospike.MAP_RETURN_INDEX, False, [1, {'cat': 'dog',
'barn': {'horse': 'shoe', 'fish': 'pond'}, 'cage': ['bird']}], [0,1,2])
])
def test_ctx_map_get_by_index_range(self, ctx_types, index, rmv_count, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_index_range() to get the value starting at index for rmv_count.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_index_range(self.nested_map_bin, index, rmv_count, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
res = res[self.nested_map_bin]
assert res == expected
@pytest.mark.parametrize("index, rmv_count, return_type, inverted, list_indexes, expected", [
(1, 1, aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
(1, 1, aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
(1, 1, aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
(1, 'bad_rmv_count', aerospike.MAP_RETURN_VALUE, False, [1], e.ParamError),
])
def test_ctx_map_get_by_index_range_negative(self, index, rmv_count, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_index_range() to get the value starting at index for rmv_count with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_index_range(self.nested_map_bin, index, rmv_count, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, value, return_type, inverted, list_indexes, expected", [
([map_index, map_rank, map_key], 'done', aerospike.MAP_RETURN_VALUE, False, [0,0,1], ['done']),
([map_index, map_rank, map_index], 'bye', aerospike.MAP_RETURN_VALUE, False, [1,0,0], ['bye']),
([map_index, map_rank, map_index], {'g': 'layer', 'l': 'done'}, aerospike.MAP_RETURN_VALUE, False, [0,0], [{'g': 'layer', 'l': 'done'}]),
])
def test_ctx_map_get_by_value(self, ctx_types, value, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_value() to get the value in the map.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_value(self.layered_map_bin, value, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.layered_map_bin] == expected
@pytest.mark.parametrize("value, return_type, inverted, list_indexes, expected", [
(0, aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
(0, aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
(0, aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
])
def test_ctx_map_get_by_value_negative(self, value, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_value() to get the value in the map with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_value(self.nested_map_bin, value, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, value_start, value_end, return_type, inverted, list_indexes, expected", [
([map_index], 'v1', 'v4', aerospike.MAP_RETURN_VALUE, False, [0], ['v3', 'v1', 'v2']),
([map_index], 'v5', 'v9', aerospike.MAP_RETURN_VALUE, False, [2], ['v8', 'v7']),
([map_key, map_key], 'v11', 'v12', aerospike.MAP_RETURN_VALUE, False, [3, 10], ['v11']),
([map_index], 'v5', 'v9', aerospike.MAP_RETURN_VALUE, True, [2], ['v9', {11: 'v11'}]),
])
def test_ctx_map_get_by_value_range(self, ctx_types, value_start, value_end, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_value_range to get the elements between value_start and value_end.
"""
ctx = []
for x in range(0, len(list_indexes)):
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_value_range(self.num_map_bin, value_start, value_end, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.num_map_bin] == expected
@pytest.mark.parametrize("value_start, value_end, return_type, inverted, list_indexes, expected", [
('v0', 'v4', aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
('v0', 'v4', aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
('v0', 'v4', aerospike.MAP_RETURN_VALUE, False, 'bad_cdt_types', e.ParamError),
])
def test_ctx_map_get_by_value_range_negative(self, value_start, value_end, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_value_range to get the elements between value_start and value_end with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_value_range(self.nested_map_bin, value_start, value_end, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, values, return_type, inverted, list_indexes, expected", [
([map_index], ['hi', 'hello'], aerospike.MAP_RETURN_VALUE, False, [0], ['hello', 'hi']),
([map_index], ['hello'], aerospike.MAP_RETURN_VALUE, False, [0], ['hello']),
([map_value], [{4,5,6}, 100], aerospike.MAP_RETURN_VALUE, False, [{'nested': {4,5,6,}, 'hundred': 100}], [100, {4,5,6}]),
([map_index], ['dog'], aerospike.MAP_RETURN_VALUE, False, [1], []),
([map_index, map_key], ['dog', ['bird']], aerospike.MAP_RETURN_VALUE, True, [2,'one'], [{'horse': 'shoe', 'fish': 'pond'}]),
])
def test_ctx_map_get_by_value_list(self, ctx_types, values, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_value_list to get the provided values from a map.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_value_list(self.nested_map_bin, values, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
res = res[self.nested_map_bin]
assert res == expected
@pytest.mark.parametrize("values, return_type, inverted, list_indexes, expected", [
('greet', aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
('nested', aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
('greet', aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
])
def test_ctx_map_get_by_value_list_negative(self, values, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_value_list to get the provided values from a map with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_value_list(self.nested_map_bin, values, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, rank, return_type, list_indexes, expected", [
([map_index], 1, aerospike.MAP_RETURN_VALUE, [0], 'hi'),
([map_index], 0, aerospike.MAP_RETURN_VALUE, [0], 'hello'),
([map_index], 1, aerospike.MAP_RETURN_VALUE, [1], {4,5,6}),
([map_index, map_index, map_index], 0, aerospike.MAP_RETURN_VALUE, [2,0,0], 'pond'),
([map_key], 1, aerospike.MAP_RETURN_INDEX, ['second'], 1)
])
def test_ctx_map_get_by_rank(self, ctx_types, rank, return_type, list_indexes, expected):
"""
Invoke map_get_by_rank to get the entry with the given rank.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_rank(self.nested_map_bin, rank, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected
@pytest.mark.parametrize("rank, return_type, list_indexes, expected", [
(1, aerospike.MAP_RETURN_VALUE, [3], e.OpNotApplicable),
(1, aerospike.MAP_RETURN_VALUE, [1,0,0], e.OpNotApplicable),
(1, aerospike.MAP_RETURN_VALUE, 'teddy', e.ParamError),
(6, aerospike.MAP_RETURN_VALUE, [1], e.OpNotApplicable),
])
def test_ctx_map_get_by_rank_negative(self, rank, return_type, list_indexes, expected):
"""
Invoke map_get_by_rank to get the entry with the given rank with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_rank(place))
ops = [
map_operations.map_get_by_rank(self.nested_map_bin, rank, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, rank, rmv_count, return_type, inverted, list_indexes, expected", [
([map_index], 0, 4, aerospike.MAP_RETURN_VALUE, False, [0], ['v1', 'v2', 'v3']),
([map_index], 0, 2, aerospike.MAP_RETURN_VALUE, False, [2], ['v7', 'v8']),
([map_key, map_key], 0, 1, aerospike.MAP_RETURN_VALUE, False, [3, 10], ['v11']),
([map_index], 0, 2, aerospike.MAP_RETURN_VALUE, True, [2], ['v9', {11: 'v11'}]),
])
def test_ctx_map_get_by_rank_range(self, ctx_types, rank, rmv_count, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_rank_range to get values starting at rank for rmv_count.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_rank_range(self.num_map_bin, rank, rmv_count, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.num_map_bin] == expected
@pytest.mark.parametrize("rank, rmv_count, return_type, inverted, list_indexes, expected", [
(1, 1, aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
(1, 1, aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
(1, 1, aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
(1, 'bad_rmv_count', aerospike.MAP_RETURN_VALUE, False, [1], e.ParamError),
(['bad_rank'], 1, aerospike.MAP_RETURN_VALUE, False, [1], e.ParamError),
])
def test_ctx_map_get_by_rank_range_negative(self, rank, rmv_count, return_type, inverted, list_indexes, expected):
"""
Invoke map_get_by_rank_range to get values starting at rank for rmv_count with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_rank(place))
ops = [
map_operations.map_get_by_rank_range(self.nested_map_bin, rank, rmv_count, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, policy, map_indexes, expected", [
([map_key], {'map_order': aerospike.MAP_KEY_VALUE_ORDERED}, ['first'],
{3: 'hello', 'greet': 'hi', 'numbers': [3,1,2]}),
])
def test_ctx_map_set_policy(self, ctx_types, policy, map_indexes, expected):
"""
Invoke map_set_policy() to apply a map policy to a nested map.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_set_policy(self.nested_map_bin, policy, ctx),
map_operations.map_get_by_key(self.nested_map_bin, 'first', aerospike.MAP_RETURN_VALUE)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert res[self.nested_map_bin] == expected
@pytest.mark.parametrize("ctx_types, policy, map_indexes, expected", [
(0, {'map_order': aerospike.MAP_UNORDERED}, [3], e.OpNotApplicable),
(0, {'map_order': aerospike.MAP_UNORDERED}, [1,0,0], e.OpNotApplicable),
(0, {'map_order': aerospike.MAP_UNORDERED}, 'teddy', e.ParamError),
(0, 'bad_policy', [0], e.ParamError),
])
def test_ctx_map_set_policy_negative(self, ctx_types, policy, map_indexes, expected):
"""
Invoke map_set_policy() to apply a map policy to a nested map with expected failures.
"""
ctx = []
for place in map_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_set_policy(self.nested_map_bin, policy, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, value, map_policy, map_indexes, expected_val, expected_bin", [
([map_index, map_rank, map_key], 3, 4, None, [0,0,1], 3, {'second': {'two': {2: {'l': 'bye', 'g': 'layer'}}},
'first': {'one': {1: {3: 4, 'l': 'done', 'g': 'layer'}}}}),
([map_index, map_rank, map_key], 'here', 'place', None, [1,0,2], 3, {'second': {'two': {2: {'here': 'place', 'l': 'bye', 'g': 'layer'}}},
'first': {'one': {1: {'l': 'done', 'g': 'layer'}}}}),
([map_index, map_rank], 'here', 'place', None, [1,0], 2, {'second': {'two': {2: {'l': 'bye', 'g': 'layer'}, 'here': 'place'}},
'first': {'one': {1: {'l': 'done', 'g': 'layer'}}}}),
])
def test_ctx_map_put(self, ctx_types, key, value, map_policy, map_indexes, expected_val, expected_bin):
"""
Invoke map_put() to place a value at key in a nested map.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_put(self.layered_map_bin, key, value, map_policy, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.layered_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.layered_map_bin] == expected_bin
@pytest.mark.parametrize("ctx_types, key, value, map_policy, map_indexes, expected", [
([map_index, map_rank, map_key], 3, 4, None, [0,0,3], e.OpNotApplicable),
([map_index], 3, 4, {'map_write_flags': aerospike.MAP_WRITE_FLAGS_UPDATE_ONLY}, [0], e.ElementNotFoundError),
])
def test_ctx_map_put_negative(self, ctx_types, key, value, map_policy, map_indexes, expected):
"""
Invoke map_put() to place a value at key in a nested map with expected failures.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_put(self.layered_map_bin, key, value, map_policy, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, values, map_policy, map_indexes, expected_val, expected_bin", [
([map_index, map_rank, map_key], {3: 4}, None, [0,0,1], 3, {'second': {'two': {2: {'l': 'bye', 'g': 'layer'}}},
'first': {'one': {1: {3: 4, 'l': 'done', 'g': 'layer'}}}}),
([map_index, map_rank, map_key], {3: 4, 'here': 'place'}, None, [1,0,2], 4, {'second': {'two': {2: {3: 4, 'here': 'place', 'l': 'bye', 'g': 'layer'}}},
'first': {'one': {1: {'l': 'done', 'g': 'layer'}}}}),
([map_index, map_rank], {'here': 'place', 1: 2}, None, [1,0], 3, {'second': {'two': {2: {'l': 'bye', 'g': 'layer'}, 'here': 'place', 1: 2}},
'first': {'one': {1: {'l': 'done', 'g': 'layer'}}}}),
])
def test_ctx_map_put_items(self, ctx_types, values, map_policy, map_indexes, expected_val, expected_bin):
"""
Invoke map_put_items on nested maps
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_put_items(self.layered_map_bin, values, map_policy, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.layered_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.layered_map_bin] == expected_bin
@pytest.mark.parametrize("ctx_types, values, map_policy, map_indexes, expected", [
([map_index, map_rank, map_key], {3: 4}, None, [0,0,3], e.OpNotApplicable),
([map_index], {3: 4}, {'map_write_flags': aerospike.MAP_WRITE_FLAGS_UPDATE_ONLY}, [0], e.ElementNotFoundError),
])
def test_ctx_map_put_items_negative(self, ctx_types, values, map_policy, map_indexes, expected):
"""
Invoke map_put on nested maps with expected failure
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_put_items(self.layered_map_bin, values, map_policy, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, amount, map_policy, map_indexes, expected_bin", [
([map_index, map_rank, map_key], 1, 27, None, [0,0,1], {'second': {'two': {2: {'l': 'bye', 'g': 'layer'}}},
'first': {'one': {1: {1: 27, 'l': 'done', 'g': 'layer'}}}}),
([map_index, map_rank], 56, 211, None, [0,0], {'second': {'two': {2: {'l': 'bye', 'g': 'layer'}}},
'first': {'one': {1: {'l': 'done', 'g': 'layer'}, 56: 211}}}),
([map_index], 40, 2, None, [1], {'second': {40: 2, 'two': {2: {'l': 'bye', 'g': 'layer'}}},
'first': {'one': {1: {'l': 'done', 'g': 'layer'}}}}),
])
def test_ctx_map_increment(self, ctx_types, key, amount, map_policy, map_indexes, expected_bin):
"""
Invoke map_increment to increment an element in a nested map.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_increment(self.layered_map_bin, key, amount, map_policy, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.layered_map_bin] == expected_bin
@pytest.mark.parametrize("ctx_types, key, amount, map_policy, map_indexes, expected", [
([map_index, map_rank, map_key], 'l', 27, None, [0,0,1], e.InvalidRequest),
([map_key], 'one', 27, None, ['first'], e.InvalidRequest),
([map_key], 20, 27, {'map_write_flags': aerospike.MAP_WRITE_FLAGS_UPDATE_ONLY}, ['first'], e.ElementNotFoundError), #why does this fail?
])
def test_ctx_map_increment_negative(self, ctx_types, key, amount, map_policy, map_indexes, expected):
"""
Invoke map_increment on nested maps with expected failure.
"""
if map_policy is not None:
pytest.xfail("map_increment does not support map_write_flags see: PROD-806")
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_increment(self.layered_map_bin, key, amount, map_policy, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, amount, map_policy, map_indexes, expected_bin", [
([map_index], 'hundred', 27, None, [1], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 73},
'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'}, 'cage': ['bird']}, 'two': []}}),
([map_index, map_rank, map_key], 'new', 10, None, [2,1,'barn'], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond', 'new': -10}, 'cage': ['bird']}, 'two': []}}),
([map_index, map_key], 2, 50, None, [2,'one'], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'}, 'cage': ['bird'], 2: -50}, 'two': []}}),
])
def test_ctx_map_decrement(self, ctx_types, key, amount, map_policy, map_indexes, expected_bin):
"""
Invoke map_decrement to decrement an element in a nested map.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_decrement(self.nested_map_bin, key, amount, map_policy, ctx)
]
_, _, _ = self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("ctx_types, key, amount, map_policy, map_indexes, expected", [
([map_index, map_rank, map_key], 'l', 27, None, [0,0,1], e.InvalidRequest),
([map_key], 'one', 27, None, ['first'], e.InvalidRequest),
([map_key], 20, 27, {'map_write_flags': aerospike.MAP_WRITE_FLAGS_UPDATE_ONLY}, ['first'], e.ElementNotFoundError), #why does this fail?
])
def test_ctx_map_decrement_negative(self, ctx_types, key, amount, map_policy, map_indexes, expected):
"""
Invoke map_decrement on nested maps with expected failure.
"""
if map_policy is not None:
pytest.xfail("map_decrement does not support map_write_flags see: PROD-806")
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_decrement(self.layered_map_bin, key, amount, map_policy, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, map_indexes, expected", [
([map_index], [0], 1),
([map_index, map_rank, map_value], [0,0, {'g': 'layer', 'l': 'done'}], 2),
([map_index, map_index], [1,0], 1),
])
def test_ctx_map_size(self, ctx_types, map_indexes, expected):
"""
Invoke map_size() to get the size of a nested map.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_size(self.layered_map_bin, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.layered_map_bin] == expected
@pytest.mark.parametrize("ctx_types, map_indexes, expected", [
([map_index], [3], e.OpNotApplicable),
([map_index, map_rank, map_value], [0,0, {'dog': 'cat'}], e.OpNotApplicable),
([map_index, map_index, map_index, map_index], [1,0,0,0], e.InvalidRequest),
])
def test_ctx_map_size_negative(self, ctx_types, map_indexes, expected):
"""
Invoke map_size() on a nested map with expected failure.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_size(self.layered_map_bin, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, map_indexes, expected", [
([map_index], [1], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {},
'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'}, 'cage': ['bird']}, 'two': []}}),
([map_index, map_key], [2,'one'], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {}, 'two': []}}),
([map_index, map_key, map_value], [2,'one', {'horse': 'shoe', 'fish': 'pond'}],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {}, 'cage': ['bird']}, 'two': []}}),
])
def test_ctx_map_clear(self, ctx_types, map_indexes, expected):
"""
Invoke map_clear to empty a nested map.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_clear(self.nested_map_bin, ctx)
]
_, _, _ = self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected
@pytest.mark.parametrize("ctx_types, map_indexes, expected", [
([map_index], [3], e.OpNotApplicable),
([map_index, map_key], [2, 'bad_val'], e.OpNotApplicable),
([map_index, map_key, map_value], [2,'one', {'horse': 'shoe', 'fish': 'john'}], e.OpNotApplicable)
])
def test_ctx_map_clear_negative(self, ctx_types, map_indexes, expected):
"""
Invoke map_clear on nested maps with expected failure.
"""
ctx = []
for x in range(0, len(map_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], map_indexes[x]))
ops = [
map_operations.map_clear(self.nested_map_bin, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, return_type, list_indexes, expected_val, expected_bin", [
([map_index], 'greet', aerospike.MAP_RETURN_VALUE, [0], 'hi', { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 3, aerospike.MAP_RETURN_VALUE, [0], 'hello', { 'first': {'greet': 'hi', 'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 'nested', aerospike.MAP_RETURN_VALUE, [1], {4,5,6}, { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 'dog', aerospike.MAP_RETURN_VALUE, [1], None, { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index, map_index, map_index], 'fish', aerospike.MAP_RETURN_VALUE, [2,0,0], 'pond',
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe'}, 'cage': ['bird']}, 'two': []}}),
([map_key], 'nested', aerospike.MAP_RETURN_INDEX, ['second'], 1, { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': { 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}})
])
def test_ctx_map_remove_by_key(self, ctx_types, key, return_type, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_key() to remove an element at key.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_key(self.nested_map_bin, key, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("key, return_type, list_indexes, expected", [
('greet', aerospike.MAP_RETURN_VALUE, [3], e.OpNotApplicable),
('nested', aerospike.MAP_RETURN_VALUE, [1,0,0], e.OpNotApplicable),
('greet', aerospike.MAP_RETURN_VALUE, 'teddy', e.ParamError),
])
def test_ctx_map_remove_by_key_negative(self, key, return_type, list_indexes, expected):
"""
Invoke map_remove_by_key() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_key(self.nested_map_bin, key, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, return_type, inverted, list_indexes, expected_val, expected_bin", [
([map_index], ['greet'], aerospike.MAP_RETURN_VALUE, False, [0], ['hi'], { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], ['numbers', 3], aerospike.MAP_RETURN_VALUE, False, [0], ['hello', [3,1,2]], { 'first': {'greet': 'hi',},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], ['nested', 'hundred'], aerospike.MAP_RETURN_VALUE, False, [1], [100, {4,5,6}], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], ['dog'], aerospike.MAP_RETURN_VALUE, False, [1], [], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index, map_index, map_index], ['horse', 'fish'], aerospike.MAP_RETURN_VALUE, False, [2,0,0], ['pond', 'shoe'],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {}, 'cage': ['bird']}, 'two': []}}),
([map_key], ['nested'], aerospike.MAP_RETURN_INDEX, True, ['second'], [0], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': { 'nested': {4,5,6,}}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}})
])
def test_ctx_map_remove_by_key_list(self, ctx_types, key, return_type, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_key_list() to remove the elements at the provided keys.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_key_list(self.nested_map_bin, key, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
res = res[self.nested_map_bin]
assert res == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("key, return_type, inverted, list_indexes, expected", [
(['greet'], aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
(['nested'], aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
(['greet'], aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
])
def test_ctx_map_remove_key_list_negative(self, key, return_type, inverted, list_indexes, expected):
"""
Invoke map_remove_key_list_negative() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_key_list(self.nested_map_bin, key, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key_start, key_end, return_type, inverted, list_indexes, expected_val, expected_bin", [
([map_index], 0, 4, aerospike.MAP_RETURN_VALUE, False, [0], ['v3', 'v1', 'v2'], {1: {},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8', 9: 'v9', 10: {11: 'v11'}}}),
([map_index], 5, 9, aerospike.MAP_RETURN_VALUE, False, [2], ['v8', 'v7'], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {9: 'v9', 10: {11: 'v11'}}}),
([map_key, map_key], 11, 12, aerospike.MAP_RETURN_VALUE, False, [3, 10], ['v11'], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8', 9: 'v9', 10: {}}}),
([map_index], 5, 9, aerospike.MAP_RETURN_VALUE, True, [2], ['v9', {11: 'v11'}], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8'}}),
])
def test_ctx_map_remove_by_key_range(self, ctx_types, key_start, key_end, return_type, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_key_range() to remove elements between key_start and key_end.
"""
ctx = []
for x in range(0, len(list_indexes)):
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_key_range(self.num_map_bin, key_start, key_end, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert res[self.num_map_bin] == expected_val
assert bins[self.num_map_bin] == expected_bin
@pytest.mark.parametrize("key_start, key_end, return_type, inverted, list_indexes, expected", [
(0, 4, aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
(0, 4, aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
(0, 4, aerospike.MAP_RETURN_VALUE, False, 'bad_cdt_types', e.ParamError),
])
def test_ctx_map_remove_by_key_range_negative(self, key_start, key_end, return_type, inverted, list_indexes, expected):
"""
Invoke map_remove_by_key_range() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_key_range(self.nested_map_bin, key_start, key_end, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, value, return_type, inverted, list_indexes, expected_val, expected_bin", [
([map_index], 'hi', aerospike.MAP_RETURN_VALUE, False, [0], ['hi'], { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 'hello', aerospike.MAP_RETURN_VALUE, False, [0], ['hello'], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], {4,5,6}, aerospike.MAP_RETURN_VALUE, False, [1], [{4,5,6}], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 'dog', aerospike.MAP_RETURN_VALUE, False, [1], [], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index, map_index, map_index], 'pond', aerospike.MAP_RETURN_VALUE, True, [2,0,0], ['shoe'],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {'fish': 'pond'}, 'cage': ['bird']}, 'two': []}}),
([map_key], 100, aerospike.MAP_RETURN_INDEX, False, ['second'], [0], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6}}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}})
])
def test_ctx_map_remove_by_value(self, ctx_types, value, return_type, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_value to remove the element with the given value.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_value(self.nested_map_bin, value, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("value, return_type, inverted, list_indexes, expected", [
('greet', aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
('nested', aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
('greet', aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
])
def test_ctx_map_remove_by_value_negative(self, value, return_type, inverted, list_indexes, expected):
"""
Invoke map_remove_by_value() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_value(self.nested_map_bin, value, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, values, return_type, inverted, list_indexes, expected_val, expected_bin", [
([map_index], ['hi', 'hello'], aerospike.MAP_RETURN_VALUE, False, [0], ['hello', 'hi'], { 'first': {'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], ['hello'], aerospike.MAP_RETURN_VALUE, False, [0], ['hello'], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_value], [{4,5,6}, 100], aerospike.MAP_RETURN_VALUE, False, [{'nested': {4,5,6,}, 'hundred': 100}], [100, {4,5,6}], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], ['dog'], aerospike.MAP_RETURN_VALUE, False, [1], [], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index, map_key], ['dog', ['bird']], aerospike.MAP_RETURN_VALUE, True, [2,'one'], [{'horse': 'shoe', 'fish': 'pond'}],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'cage': ['bird']}, 'two': []}}),
])
def test_ctx_map_remove_by_value_list(self, ctx_types, values, return_type, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_value_list() to remove elements with the given values.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_value_list(self.nested_map_bin, values, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
res = res[self.nested_map_bin]
assert res == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("values, return_type, inverted, list_indexes, expected", [
('greet', aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
('nested', aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
('greet', aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
])
def test_ctx_map_remove_by_value_list_negative(self, values, return_type, inverted, list_indexes, expected):
"""
Invoke map_remove_by_value_list() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_value_list(self.nested_map_bin, values, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, value_start, value_end, return_type, inverted, list_indexes, expected_val, expected_bin", [
([map_index], 'v1', 'v4', aerospike.MAP_RETURN_VALUE, False, [0], ['v3', 'v1', 'v2'], {1: {},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8', 9: 'v9', 10: {11: 'v11'}}}),
([map_index], 'v5', 'v9', aerospike.MAP_RETURN_VALUE, False, [2], ['v8', 'v7'], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {9: 'v9', 10: {11: 'v11'}}}),
([map_key, map_key], 'v11', 'v12', aerospike.MAP_RETURN_VALUE, False, [3, 10], ['v11'], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8', 9: 'v9', 10: {}}}),
([map_index], 'v5', 'v9', aerospike.MAP_RETURN_VALUE, True, [2], ['v9', {11: 'v11'}], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8'}}),
])
def test_ctx_map_remove_by_value_range(self, ctx_types, value_start, value_end, return_type, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_value_range to remove elements with values between value_start and value_end.
"""
ctx = []
for x in range(0, len(list_indexes)):
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_value_range(self.num_map_bin, value_start, value_end, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
_, _, bins = self.as_connection.get(self.test_key)
assert res[self.num_map_bin] == expected_val
assert bins[self.num_map_bin] == expected_bin
@pytest.mark.parametrize("value_start, value_end, return_type, inverted, list_indexes, expected", [
('v0', 'v4', aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
('v0', 'v4', aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
('v0', 'v4', aerospike.MAP_RETURN_VALUE, False, 'bad_cdt_types', e.ParamError),
])
def test_ctx_map_remove_by_value_range_negative(self, value_start, value_end, return_type, inverted, list_indexes, expected):
"""
Invoke map_remove_by_value_range on a nested map with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_value_range(self.nested_map_bin, value_start, value_end, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, index, return_type, list_indexes, expected_val, expected_bin", [
([map_index], 1, aerospike.MAP_RETURN_VALUE, [0], 'hi', { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 0, aerospike.MAP_RETURN_VALUE, [0], 'hello', { 'first': {'greet': 'hi', 'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 1, aerospike.MAP_RETURN_VALUE, [1], {4,5,6}, { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index, map_index, map_index], 0, aerospike.MAP_RETURN_VALUE, [2,0,0], 'pond',
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe'}, 'cage': ['bird']}, 'two': []}}),
([map_key], 1, aerospike.MAP_RETURN_INDEX, ['second'], 1, { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': { 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}})
])
def test_ctx_map_remove_by_index(self, ctx_types, index, return_type, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_index() to remove the element at index.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_index(self.nested_map_bin, index, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("index, return_type, list_indexes, expected", [
(1, aerospike.MAP_RETURN_VALUE, [3], e.OpNotApplicable),
(1, aerospike.MAP_RETURN_VALUE, [1,0,0], e.OpNotApplicable),
(1, aerospike.MAP_RETURN_VALUE, 'teddy', e.ParamError),
(6, aerospike.MAP_RETURN_VALUE, [1], e.OpNotApplicable),
])
def test_ctx_map_remove_by_index_negative(self, index, return_type, list_indexes, expected):
"""
Invoke map_remove_by_index() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_index(self.nested_map_bin, index, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, index, rmv_count, return_type, inverted, list_indexes, expected_val, expected_bin", [
([map_index], 1, 1, aerospike.MAP_RETURN_VALUE, False, [0], ['hi'], { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 0, 3, aerospike.MAP_RETURN_VALUE, False, [0], ['hello', 'hi', [3,1,2]], { 'first': {},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 0, 2, aerospike.MAP_RETURN_VALUE, False, [1], [100, {4,5,6}], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index, map_index, map_index], 0, 2, aerospike.MAP_RETURN_VALUE, False, [2,0,0], ['pond', 'shoe'],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {}, 'cage': ['bird']}, 'two': []}}),
([map_key], 1, 2, aerospike.MAP_RETURN_INDEX, True, ['second'], [0], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6}}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_key, map_value], 0, 3, aerospike.MAP_RETURN_INDEX, False, ['third', {'cat': 'dog', 'barn': {'fish': 'pond', 'horse': 'shoe'}, 'cage': ['bird']}],
[0,1,2], { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {}, 'two': []}})
])
def test_ctx_map_remove_by_index_range(self, ctx_types, index, rmv_count, return_type, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_index_range() to remove elements starting at index for rmv_count.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_index_range(self.nested_map_bin, index, rmv_count, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
res = res[self.nested_map_bin]
assert res == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("index, rmv_count, return_type, inverted, list_indexes, expected", [
(1, 1, aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
(1, 1, aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
(1, 1, aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
(1, 'bad_rmv_count', aerospike.MAP_RETURN_VALUE, False, [1], e.ParamError),
])
def test_ctx_map_remove_by_index_range_negative(self, index, rmv_count, return_type, inverted, list_indexes, expected):
"""
Invoke map_remove_by_index_range() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_index_range(self.nested_map_bin, index, rmv_count, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, rank, return_type, list_indexes, expected_val, expected_bin", [
([map_index], 1, aerospike.MAP_RETURN_VALUE, [0], 'hi', { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 0, aerospike.MAP_RETURN_VALUE, [0], 'hello', { 'first': {'greet': 'hi', 'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 1, aerospike.MAP_RETURN_VALUE, [1], {4,5,6}, { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': {'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index, map_index, map_index], 0, aerospike.MAP_RETURN_VALUE, [2,0,0], 'pond',
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6,}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe'}, 'cage': ['bird']}, 'two': []}}),
([map_key], 1, aerospike.MAP_RETURN_INDEX, ['second'], 1, { 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'},
'second': { 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}})
])
def test_ctx_map_remove_by_rank(self, ctx_types, rank, return_type, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_rank() to remove the element with the given rank.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_rank(self.nested_map_bin, rank, return_type, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("rank, return_type, list_indexes, expected", [
(1, aerospike.MAP_RETURN_VALUE, [3], e.OpNotApplicable),
(1, aerospike.MAP_RETURN_VALUE, [1,0,0], e.OpNotApplicable),
(1, aerospike.MAP_RETURN_VALUE, 'teddy', e.ParamError),
(6, aerospike.MAP_RETURN_VALUE, [1], e.OpNotApplicable),
])
def test_ctx_map_remove_by_rank_negative(self, rank, return_type, list_indexes, expected):
"""
Invoke map_remove_by_rank() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_rank(place))
ops = [
map_operations.map_remove_by_rank(self.nested_map_bin, rank, return_type, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, rank, rmv_count, return_type, inverted, list_indexes, expected_val, expected_bin", [
([map_index], 0, 4, aerospike.MAP_RETURN_VALUE, False, [0], ['v1', 'v2', 'v3'], {1: {},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8', 9: 'v9', 10: {11: 'v11'}}}),
([map_index], 0, 2, aerospike.MAP_RETURN_VALUE, False, [2], ['v7', 'v8'], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {9: 'v9', 10: {11: 'v11'}}}),
([map_key, map_key], 0, 1, aerospike.MAP_RETURN_VALUE, False, [3, 10], ['v11'], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8', 9: 'v9', 10: {}}}),
([map_index], 0, 2, aerospike.MAP_RETURN_VALUE, True, [2], ['v9', {11: 'v11'}], {1: {1: 'v1', 2: 'v2', 3: 'v3'},
2: {4: 'v4', 5: 'v5', 6: 'v6'}, 3: {7: 'v7', 8: 'v8'}}),
])
def test_ctx_map_remove_by_rank_range(self, ctx_types, rank, rmv_count, return_type, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_rank_range() to remove the elements starting with the given rank for rmv_count.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_rank_range(self.num_map_bin, rank, rmv_count, return_type, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.num_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.num_map_bin] == expected_bin
@pytest.mark.parametrize("rank, rmv_count, return_type, inverted, list_indexes, expected", [
(1, 1, aerospike.MAP_RETURN_VALUE, False, [3], e.OpNotApplicable),
(1, 1, aerospike.MAP_RETURN_VALUE, False, [1,0,0], e.OpNotApplicable),
(1, 1, aerospike.MAP_RETURN_VALUE, False, 'teddy', e.ParamError),
(1, 'bad_rmv_count', aerospike.MAP_RETURN_VALUE, False, [1], e.ParamError),
(['bad_rank'], 1, aerospike.MAP_RETURN_VALUE, False, [1], e.ParamError),
])
def test_ctx_map_remove_by_rank_range_negative(self, rank, rmv_count, return_type, inverted, list_indexes, expected):
"""
Invoke map_remove_by_rank_range() with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_rank(place))
ops = [
map_operations.map_remove_by_rank_range(self.nested_map_bin, rank, rmv_count, return_type, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, value, offset, return_type, count, inverted, list_indexes, expected_val, expected_bin", [
([map_index], 'hi', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0], ['hi'], { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 'hi', 1, aerospike.MAP_RETURN_VALUE, 3, True, [0], ['hello', 'hi'], { 'first': {'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_key, map_index, map_value], 'pond', 0, aerospike.MAP_RETURN_VALUE, 2, False, ['third',0,{'horse': 'shoe', 'fish': 'pond'}], ['pond', 'shoe'],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {}, 'cage': ['bird']}, 'two': []}}),
([map_key, map_rank], {'horse': 'shoe', 'fish': 'pond'}, 0, aerospike.MAP_RETURN_VALUE, 2, True, ['third',1], ['dog',['bird']],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6}, 'hundred': 100},
'third': {'one': {'barn': {'horse': 'shoe', 'fish': 'pond'}}, 'two': []}}),
])
def test_ctx_map_remove_by_value_rank_range_relative(self, ctx_types, value, offset, return_type, count, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_value_rank_range_relative() to remove elements starting with value for count by relative rank.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_value_rank_range_relative(self.nested_map_bin, value, offset, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("value, offset, return_type, count, inverted, list_indexes, expected", [
('greet', 'bad_offset', aerospike.MAP_RETURN_VALUE, 1, False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [3], e.OpNotApplicable),
('greet', 0, aerospike.MAP_RETURN_VALUE, 'bad_count', False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0,0,0,0], e.OpNotApplicable),
])
def test_ctx_map_remove_by_value_rank_range_relative_negative(self, value, offset, return_type, count, inverted, list_indexes, expected):
"""
Invoke value_rank_range_relative() on a nested map with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_value_rank_range_relative(self.nested_map_bin, value, offset, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, value, offset, return_type, count, inverted, list_indexes, expected", [
([map_index], 'hi', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0], ['hi']),
([map_index], 'hi', 1, aerospike.MAP_RETURN_VALUE, 3, True, [0], ['hello', 'hi']),
([map_key, map_index, map_value], 'horse', 0, aerospike.MAP_RETURN_VALUE, 2, False,
['third',0,{'horse': 'shoe', 'fish': 'pond'}], ['pond', 'shoe'],),
([map_key, map_rank], {'horse': 'shoe', 'fish': 'pond'}, 0, aerospike.MAP_RETURN_VALUE, 2, True,
['third',1], ['dog',['bird']]),
])
def test_ctx_map_get_by_value_rank_range_relative(self, ctx_types, value, offset, return_type, count, inverted, list_indexes, expected):
"""
Invoke map_get_by_value_rank_range_relative() to get elements starting with value for count by relative rank.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_value_rank_range_relative(self.nested_map_bin, value, offset, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected
@pytest.mark.parametrize("value, offset, return_type, count, inverted, list_indexes, expected", [
('greet', 'bad_offset', aerospike.MAP_RETURN_VALUE, 1, False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [3], e.OpNotApplicable),
('greet', 0, aerospike.MAP_RETURN_VALUE, 'bad_count', False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0,0,0,0], e.OpNotApplicable),
])
def test_ctx_map_get_by_value_rank_range_relative_negative(self, value, offset, return_type, count, inverted, list_indexes, expected):
"""
Invoke map_get_by_value_rank_range_relative() on a nested map with expected failures
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_value_rank_range_relative(self.nested_map_bin, value, offset, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, offset, return_type, count, inverted, list_indexes, expected_val, expected_bin", [
([map_index], 'greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0], ['hi'], { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 'greet', 1, aerospike.MAP_RETURN_VALUE, 3, True, [0], ['hello', 'hi'], { 'first': {'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_key, map_index, map_value], 'fish', 0, aerospike.MAP_RETURN_VALUE, 2, False, ['third',0,{'horse': 'shoe', 'fish': 'pond'}], ['pond', 'shoe'],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {}, 'cage': ['bird']}, 'two': []}}),
([map_key, map_rank], 'barn', 0, aerospike.MAP_RETURN_VALUE, 2, True, ['third',1], ['dog'],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6}, 'hundred': 100},
'third': {'one': {'barn': {'horse': 'shoe', 'fish': 'pond'}, 'cage': ['bird']}, 'two': []}}),
])
def test_ctx_map_remove_by_key_index_range_relative(self, ctx_types, key, offset, return_type, count, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_key_index_range_relative() to remove elements starting with value for count by relative rank.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_key_index_range_relative(self.nested_map_bin, key, offset, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("key, offset, return_type, count, inverted, list_indexes, expected", [
('greet', 'bad_offset', aerospike.MAP_RETURN_VALUE, 1, False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [3], e.OpNotApplicable),
('greet', 0, aerospike.MAP_RETURN_VALUE, 'bad_count', False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0,0,0,0], e.OpNotApplicable),
])
def test_ctx_map_remove_by_key_index_range_relative_negative(self, key, offset, return_type, count, inverted, list_indexes, expected):
"""
Invoke map_remove_by_key_index_range_relative_negative on a nested map with expected failures
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_key_index_range_relative(self.nested_map_bin, key, offset, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, offset, return_type, count, inverted, list_indexes, expected_val, expected_bin", [
([map_index], 'greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0], ['hi'], { 'first': {'numbers': [3, 1, 2], 3: 'hello'},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_index], 'greet', 1, aerospike.MAP_RETURN_VALUE, 3, True, [0], ['hello', 'hi'], { 'first': {'numbers': [3, 1, 2]},
'second': {'nested': {4,5,6,}, 'hundred': 100}, 'third': {'one': {'cat': 'dog', 'barn': {'horse': 'shoe', 'fish': 'pond'},
'cage': ['bird']}, 'two': []}}),
([map_key, map_index, map_value], 'fish', 0, aerospike.MAP_RETURN_VALUE, 2, False, ['third',0,{'horse': 'shoe', 'fish': 'pond'}], ['pond', 'shoe'],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6}, 'hundred': 100},
'third': {'one': {'cat': 'dog', 'barn': {}, 'cage': ['bird']}, 'two': []}}),
([map_key, map_rank], 'barn', 0, aerospike.MAP_RETURN_VALUE, 2, True, ['third',1], ['dog'],
{ 'first': {'greet': 'hi', 'numbers': [3, 1, 2], 3: 'hello'}, 'second': {'nested': {4,5,6}, 'hundred': 100},
'third': {'one': {'barn': {'horse': 'shoe', 'fish': 'pond'}, 'cage': ['bird']}, 'two': []}}),
])
def test_ctx_map_remove_by_key_index_range_relative(self, ctx_types, key, offset, return_type, count, inverted, list_indexes, expected_val, expected_bin):
"""
Invoke map_remove_by_key_index_range_relative() to remove elements starting at key for count by relative index.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_remove_by_key_index_range_relative(self.nested_map_bin, key, offset, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected_val
_, _, bins = self.as_connection.get(self.test_key)
assert bins[self.nested_map_bin] == expected_bin
@pytest.mark.parametrize("key, offset, return_type, count, inverted, list_indexes, expected", [
('greet', 'bad_offset', aerospike.MAP_RETURN_VALUE, 1, False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [3], e.OpNotApplicable),
('greet', 0, aerospike.MAP_RETURN_VALUE, 'bad_count', False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0,0,0,0], e.OpNotApplicable),
])
def test_ctx_map_remove_by_key_index_range_relative_negative(self, key, offset, return_type, count, inverted, list_indexes, expected):
"""
Invoke map_remove_by_key_index_range_relative() on a nested map with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_remove_by_key_index_range_relative(self.nested_map_bin, key, offset, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("ctx_types, key, offset, return_type, count, inverted, list_indexes, expected", [
([map_index], 'greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0], ['hi']),
([map_index], 'greet', 1, aerospike.MAP_RETURN_VALUE, 3, True, [0], ['hello', 'hi']),
([map_key, map_index, map_value], 'fish', 0, aerospike.MAP_RETURN_VALUE, 2, False, ['third',0,
{'horse': 'shoe', 'fish': 'pond'}], ['pond', 'shoe']),
([map_key, map_rank], 'barn', 0, aerospike.MAP_RETURN_VALUE, 2, True, ['third',1], ['dog']),
])
def test_ctx_map_get_by_key_index_range_relative(self, ctx_types, key, offset, return_type, count, inverted, list_indexes, expected):
"""
Invoke map_get_by_key_index_range_relative() to get the element at key for count by relative index.
"""
ctx = []
for x in range(0, len(list_indexes)) :
ctx.append(add_ctx_op(ctx_types[x], list_indexes[x]))
ops = [
map_operations.map_get_by_key_index_range_relative(self.nested_map_bin, key, offset, return_type, count, inverted, ctx)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert res[self.nested_map_bin] == expected
@pytest.mark.parametrize("key, offset, return_type, count, inverted, list_indexes, expected", [
('greet', 'bad_offset', aerospike.MAP_RETURN_VALUE, 1, False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [3], e.OpNotApplicable),
('greet', 0, aerospike.MAP_RETURN_VALUE, 'bad_count', False, [3], e.ParamError),
('greet', 0, aerospike.MAP_RETURN_VALUE, 1, False, [0,0,0,0], e.OpNotApplicable),
])
def test_ctx_map_get_by_key_index_range_relative_negative(self, key, offset, return_type, count, inverted, list_indexes, expected):
"""
Invoke map_get_key_index_range_relative() on a nested map with expected failures.
"""
ctx = []
for place in list_indexes:
ctx.append(cdt_ctx.cdt_ctx_map_index(place))
ops = [
map_operations.map_get_by_key_index_range_relative(self.nested_map_bin, key, offset, return_type, count, inverted, ctx)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
def test_non_list_ctx(self):
"""
Test ctx conversion with a non list ctx.
"""
ctx = [cdt_ctx.cdt_ctx_map_key(1)]
ops = [
map_operations.map_get_by_key(self.nested_map_bin, 'greet', aerospike.MAP_RETURN_VALUE, ctx[0])
]
for i in range(10):
with pytest.raises(e.ParamError):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("key, val", [
('new_key', 'val1'),
(230, 'val1')
])
def test_cdt_ctx_map_key_create_pos(self, key, val):
"""
Test the map_key_create cdt_ctx type.
"""
ctx = [cdt_ctx.cdt_ctx_map_key_create(key, aerospike.MAP_KEY_ORDERED)]
ops = [
map_operations.map_put(self.nested_map_bin, 'key1', val, None, ctx),
map_operations.map_get_by_key(self.nested_map_bin, key, aerospike.MAP_RETURN_VALUE)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert(res[self.nested_map_bin] == {'key1': val})
@pytest.mark.parametrize("key, val, flags, expected", [
('new_key', 'val1', ['bad_order'], e.ParamError),
('new_key', 'val1', None, e.ParamError)
])
def test_cdt_ctx_map_key_create_neg(self, key, val, flags, expected):
"""
Test the map_key_create cdt_ctx type.
"""
ctx = [cdt_ctx.cdt_ctx_map_key_create(key, flags)]
ops = [
map_operations.map_put(self.nested_map_bin, 'key1', val, None, ctx),
map_operations.map_get_by_key(self.nested_map_bin, key, aerospike.MAP_RETURN_VALUE)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops)
@pytest.mark.parametrize("index, val, pad", [
(10, 'val1', True),
(2, 'val1', False)
])
def test_cdt_ctx_list_index_create_pos(self, index, val, pad):
"""
Test the list_index_create cdt_ctx type.
"""
ctx = [cdt_ctx.cdt_ctx_list_index_create(index, aerospike.LIST_UNORDERED, pad)]
ops = [
list_operations.list_append(self.nested_list_bin, 'val1', None, ctx),
list_operations.list_get_by_index(self.nested_list_bin, index, aerospike.LIST_RETURN_VALUE)
]
_, _, res = self.as_connection.operate(self.test_key, ops)
assert(res[self.nested_list_bin] == ['val1'])
@pytest.mark.parametrize("index, val, pad, flags, expected", [
(10, 'val1', False, aerospike.LIST_ORDERED, e.OpNotApplicable),
(None, 'val1', False, aerospike.LIST_ORDERED, e.ParamError),
(2, 'val1', "bad_pad", aerospike.LIST_ORDERED, e.ParamError),
(2, 'val1', "bad_pad", ["bad_flags"], e.ParamError)
])
def test_cdt_ctx_list_index_create_neg(self, index, val, pad, flags, expected):
"""
Test the list_index_create cdt_ctx type.
"""
ctx = [cdt_ctx.cdt_ctx_list_index_create(index, aerospike.LIST_ORDERED, pad)]
ops = [
list_operations.list_append(self.nested_list_bin, 'val1', None, ctx),
list_operations.list_get_by_index(self.nested_list_bin, index, aerospike.LIST_RETURN_VALUE)
]
with pytest.raises(expected):
self.as_connection.operate(self.test_key, ops) | apache-2.0 | -7,527,856,688,997,616,000 | 46.99554 | 190 | 0.559363 | false |
schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Components/FileList.py | 1 | 16437 | import os
import re
from MenuList import MenuList
from Components.Harddisk import harddiskmanager
from Tools.Directories import SCOPE_ACTIVE_SKIN, resolveFilename, fileExists, pathExists
from enigma import RT_HALIGN_LEFT, eListboxPythonMultiContent, \
eServiceReference, eServiceCenter, gFont, getDesktop
from Tools.LoadPixmap import LoadPixmap
EXTENSIONS = {
"m4a": "music",
"mp2": "music",
"mp3": "music",
"wav": "music",
"ogg": "music",
"wma": "music",
"flac": "music",
"jpg": "picture",
"jpeg": "picture",
"png": "picture",
"bmp": "picture",
"ts": "movie",
"avi": "movie",
"divx": "movie",
"m4v": "movie",
"mpg": "movie",
"mpeg": "movie",
"mkv": "movie",
"mp4": "movie",
"mov": "movie",
"m2ts": "movie",
"3gp": "movie",
"3g2": "movie",
"asf": "movie",
"wmv": "movie",
"webm": "movie",
}
def FileEntryComponent(name, absolute = None, isDir = False):
if getDesktop(0).size().width() == 1920:
res = [(absolute, isDir), (eListboxPythonMultiContent.TYPE_TEXT, 35, 1, 900, 40, 0, RT_HALIGN_LEFT, name)]
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 10, 7, 20, 20, png))
return res
if getDesktop(0).size().width() == 1280:
res = [(absolute, isDir), (eListboxPythonMultiContent.TYPE_TEXT, 35, 1, 470, 20, 0, RT_HALIGN_LEFT, name)]
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 10, 2, 20, 20, png))
return res
class FileList(MenuList):
def __init__(self, directory, showDirectories = True, showFiles = True, showMountpoints = True, matchingPattern = None, useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.additional_extensions = additionalExtensions
self.mountpoints = []
self.current_directory = None
self.current_mountpoint = None
self.useServiceRef = useServiceRef
self.showDirectories = showDirectories
self.showMountpoints = showMountpoints
self.showFiles = showFiles
self.isTop = isTop
# example: matching .nfi and .ts files: "^.*\.(nfi|ts)"
if matchingPattern:
self.matchingPattern = re.compile(matchingPattern)
else:
self.matchingPattern = None
self.inhibitDirs = inhibitDirs or []
self.inhibitMounts = inhibitMounts or []
self.refreshMountpoints()
self.changeDir(directory)
if getDesktop(0).size().width() == 1920:
self.l.setFont(0, gFont("Regular", 26))
self.l.setItemHeight(40)
else:
self.l.setFont(0, gFont("Regular", 18))
self.l.setItemHeight(23)
self.serviceHandler = eServiceCenter.getInstance()
def refreshMountpoints(self):
self.mountpoints = [os.path.join(p.mountpoint, "") for p in harddiskmanager.getMountedPartitions()]
self.mountpoints.sort(reverse = True)
def getMountpoint(self, file):
file = os.path.join(os.path.realpath(file), "")
for m in self.mountpoints:
if file.startswith(m):
return m
return False
def getMountpointLink(self, file):
if os.path.realpath(file) == file:
return self.getMountpoint(file)
else:
if file[-1] == "/":
file = file[:-1]
mp = self.getMountpoint(file)
last = file
file = os.path.dirname(file)
while last != "/" and mp == self.getMountpoint(file):
last = file
file = os.path.dirname(file)
return os.path.join(last, "")
def getSelection(self):
if self.l.getCurrentSelection() is None:
return None
return self.l.getCurrentSelection()[0]
def getCurrentEvent(self):
l = self.l.getCurrentSelection()
if not l or l[0][1] == True:
return None
else:
return self.serviceHandler.info(l[0][0]).getEvent(l[0][0])
def getFileList(self):
return self.list
def inParentDirs(self, dir, parents):
dir = os.path.realpath(dir)
for p in parents:
if dir.startswith(p):
return True
return False
def changeDir(self, directory, select = None):
self.list = []
# if we are just entering from the list of mount points:
if self.current_directory is None:
if directory and self.showMountpoints:
self.current_mountpoint = self.getMountpointLink(directory)
else:
self.current_mountpoint = None
self.current_directory = directory
directories = []
files = []
if directory is None and self.showMountpoints: # present available mountpoints
for p in harddiskmanager.getMountedPartitions():
path = os.path.join(p.mountpoint, "")
if path not in self.inhibitMounts and not self.inParentDirs(path, self.inhibitDirs):
self.list.append(FileEntryComponent(name = p.description, absolute = path, isDir = True))
files = [ ]
directories = [ ]
elif directory is None:
files = [ ]
directories = [ ]
elif self.useServiceRef:
# we should not use the 'eServiceReference(string)' constructor, because it doesn't allow ':' in the directoryname
root = eServiceReference(2, 0, directory)
if self.additional_extensions:
root.setName(self.additional_extensions)
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(root)
while 1:
s = list.getNext()
if not s.valid():
del list
break
if s.flags & s.mustDescent:
directories.append(s.getPath())
else:
files.append(s)
directories.sort()
files.sort()
else:
if fileExists(directory):
try:
files = os.listdir(directory)
except:
files = []
files.sort()
tmpfiles = files[:]
for x in tmpfiles:
if os.path.isdir(directory + x):
directories.append(directory + x + "/")
files.remove(x)
if directory is not None and self.showDirectories and not self.isTop:
if directory == self.current_mountpoint and self.showMountpoints:
self.list.append(FileEntryComponent(name = "<" +_("List of storage devices") + ">", absolute = None, isDir = True))
elif (directory != "/") and not (self.inhibitMounts and self.getMountpoint(directory) in self.inhibitMounts):
self.list.append(FileEntryComponent(name = "<" +_("Parent directory") + ">", absolute = '/'.join(directory.split('/')[:-2]) + '/', isDir = True))
if self.showDirectories:
for x in directories:
if not (self.inhibitMounts and self.getMountpoint(x) in self.inhibitMounts) and not self.inParentDirs(x, self.inhibitDirs):
name = x.split('/')[-2]
self.list.append(FileEntryComponent(name = name, absolute = x, isDir = True))
if self.showFiles:
for x in files:
if self.useServiceRef:
path = x.getPath()
name = path.split('/')[-1]
else:
path = directory + x
name = x
if (self.matchingPattern is None) or self.matchingPattern.search(path):
self.list.append(FileEntryComponent(name = name, absolute = x , isDir = False))
if self.showMountpoints and len(self.list) == 0:
self.list.append(FileEntryComponent(name = _("nothing connected"), absolute = None, isDir = False))
self.l.setList(self.list)
if select is not None:
i = 0
self.moveToIndex(0)
for x in self.list:
p = x[0][0]
if isinstance(p, eServiceReference):
p = p.getPath()
if p == select:
self.moveToIndex(i)
i += 1
def getCurrentDirectory(self):
return self.current_directory
def canDescent(self):
if self.getSelection() is None:
return False
return self.getSelection()[1]
def descent(self):
if self.getSelection() is None:
return
self.changeDir(self.getSelection()[0], select = self.current_directory)
def getFilename(self):
if self.getSelection() is None:
return None
x = self.getSelection()[0]
if isinstance(x, eServiceReference):
x = x.getPath()
return x
def getServiceRef(self):
if self.getSelection() is None:
return None
x = self.getSelection()[0]
if isinstance(x, eServiceReference):
return x
return None
def execBegin(self):
harddiskmanager.on_partition_list_change.append(self.partitionListChanged)
def execEnd(self):
harddiskmanager.on_partition_list_change.remove(self.partitionListChanged)
def refresh(self):
self.changeDir(self.current_directory, self.getFilename())
def partitionListChanged(self, action, device):
self.refreshMountpoints()
if self.current_directory is None:
self.refresh()
def MultiFileSelectEntryComponent(name, absolute = None, isDir = False, selected = False):
if getDesktop(0).size().width() == 1920:
res = [(absolute, isDir, selected, name), (eListboxPythonMultiContent.TYPE_TEXT, 55, 1, 470, 40, 0, RT_HALIGN_LEFT, name)]
else:
res = [(absolute, isDir, selected, name), (eListboxPythonMultiContent.TYPE_TEXT, 55, 1, 470, 20, 0, RT_HALIGN_LEFT, name)]
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 30, 2, 20, 20, png))
if not name.startswith('<'):
if selected:
icon = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "icons/lock_on.png"))
else:
icon = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "icons/lock_off.png"))
if getDesktop(0).size().width() == 1920:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 2, 5, 25, 25, icon))
else:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 2, 0, 25, 25, icon))
return res
class MultiFileSelectList(FileList):
def __init__(self, preselectedFiles, directory, showMountpoints = False, matchingPattern = None, showDirectories = True, showFiles = True, useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
if preselectedFiles is None:
self.selectedFiles = []
else:
if getDesktop(0).size().width() == 1920:
self.selectedFiles = preselectedFiles
FileList.__init__(self, directory, showMountpoints = showMountpoints, matchingPattern = matchingPattern, showDirectories = showDirectories, showFiles = showFiles, useServiceRef = useServiceRef, inhibitDirs = inhibitDirs, inhibitMounts = inhibitMounts, isTop = isTop, enableWrapAround = enableWrapAround, additionalExtensions = additionalExtensions)
self.changeDir(directory)
self.l.setItemHeight(40)
self.l.setFont(0, gFont("Regular", 26))
self.onSelectionChanged = [ ]
else:
self.selectedFiles = preselectedFiles
FileList.__init__(self, directory, showMountpoints = showMountpoints, matchingPattern = matchingPattern, showDirectories = showDirectories, showFiles = showFiles, useServiceRef = useServiceRef, inhibitDirs = inhibitDirs, inhibitMounts = inhibitMounts, isTop = isTop, enableWrapAround = enableWrapAround, additionalExtensions = additionalExtensions)
self.changeDir(directory)
self.l.setItemHeight(25)
self.l.setFont(0, gFont("Regular", 20))
self.onSelectionChanged = [ ]
def selectionChanged(self):
for f in self.onSelectionChanged:
f()
def changeSelectionState(self):
if len(self.list):
idx = self.l.getCurrentSelectionIndex()
newList = self.list[:]
x = self.list[idx]
if not x[0][3].startswith('<'):
if x[0][1] is True:
realPathname = x[0][0]
else:
realPathname = self.current_directory + x[0][0]
if x[0][2]:
SelectState = False
try:
self.selectedFiles.remove(realPathname)
except:
try:
self.selectedFiles.remove(os.path.normpath(realPathname))
except:
print "Couldn't remove:", realPathname
else:
SelectState = True
if (realPathname not in self.selectedFiles) and (os.path.normpath(realPathname) not in self.selectedFiles):
self.selectedFiles.append(realPathname)
newList[idx] = MultiFileSelectEntryComponent(name = x[0][3], absolute = x[0][0], isDir = x[0][1], selected = SelectState)
self.list = newList
self.l.setList(self.list)
def getSelectedList(self):
selectedFilesExist = []
for x in self.selectedFiles:
if pathExists(x):
selectedFilesExist.append(x)
return selectedFilesExist
def changeDir(self, directory, select = None):
self.list = []
# if we are just entering from the list of mount points:
if self.current_directory is None:
if directory and self.showMountpoints:
self.current_mountpoint = self.getMountpointLink(directory)
else:
self.current_mountpoint = None
self.current_directory = directory
directories = []
files = []
if directory is None and self.showMountpoints: # present available mountpoints
for p in harddiskmanager.getMountedPartitions():
path = os.path.join(p.mountpoint, "")
if path not in self.inhibitMounts and not self.inParentDirs(path, self.inhibitDirs):
self.list.append(MultiFileSelectEntryComponent(name = p.description, absolute = path, isDir = True))
files = [ ]
directories = [ ]
elif directory is None:
files = [ ]
directories = [ ]
elif self.useServiceRef:
root = eServiceReference("2:0:1:0:0:0:0:0:0:0:" + directory)
if self.additional_extensions:
root.setName(self.additional_extensions)
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(root)
while 1:
s = list.getNext()
if not s.valid():
del list
break
if s.flags & s.mustDescent:
directories.append(s.getPath())
else:
files.append(s)
directories.sort()
files.sort()
else:
if fileExists(directory):
try:
files = os.listdir(directory)
except:
files = []
files.sort()
tmpfiles = files[:]
for x in tmpfiles:
if os.path.isdir(directory + x):
directories.append(directory + x + "/")
files.remove(x)
if directory is not None and self.showDirectories and not self.isTop:
if directory == self.current_mountpoint and self.showMountpoints:
self.list.append(MultiFileSelectEntryComponent(name = "<" +_("List of storage devices") + ">", absolute = None, isDir = True))
elif (directory != "/") and not (self.inhibitMounts and self.getMountpoint(directory) in self.inhibitMounts):
self.list.append(MultiFileSelectEntryComponent(name = "<" +_("Parent directory") + ">", absolute = '/'.join(directory.split('/')[:-2]) + '/', isDir = True))
if self.showDirectories:
for x in directories:
if not (self.inhibitMounts and self.getMountpoint(x) in self.inhibitMounts) and not self.inParentDirs(x, self.inhibitDirs):
name = x.split('/')[-2]
alreadySelected = (x in self.selectedFiles) or (os.path.normpath(x) in self.selectedFiles)
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x, isDir = True, selected = alreadySelected))
if self.showFiles:
for x in files:
if self.useServiceRef:
path = x.getPath()
name = path.split('/')[-1]
else:
path = directory + x
name = x
if (self.matchingPattern is None) or self.matchingPattern.search(path):
alreadySelected = False
for entry in self.selectedFiles:
#if os.path.basename(entry) == x:
if entry == path:
alreadySelected = True
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x , isDir = False, selected = alreadySelected))
self.l.setList(self.list)
if select is not None:
i = 0
self.moveToIndex(0)
for x in self.list:
p = x[0][0]
if isinstance(p, eServiceReference):
p = p.getPath()
if p == select:
self.moveToIndex(i)
i += 1
| gpl-2.0 | -5,400,781,138,365,447,000 | 33.824153 | 367 | 0.684249 | false |
mrunge/horizon | openstack_dashboard/dashboards/project/routers/tests.py | 1 | 35064 | # Copyright 2012, Nachi Ueno, NTT MCL, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.routers.extensions.routerrules\
import rulemanager
from openstack_dashboard.dashboards.project.routers import tables
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
class RouterTests(test.TestCase):
DASHBOARD = 'project'
INDEX_URL = reverse('horizon:%s:routers:index' % DASHBOARD)
DETAIL_PATH = 'horizon:%s:routers:detail' % DASHBOARD
def _mock_external_network_list(self, alter_ids=False):
search_opts = {'router:external': True}
ext_nets = [n for n in self.networks.list() if n['router:external']]
if alter_ids:
for ext_net in ext_nets:
ext_net.id += 'some extra garbage'
api.neutron.network_list(
IsA(http.HttpRequest),
**search_opts).AndReturn(ext_nets)
def _mock_external_network_get(self, router):
ext_net_id = router.external_gateway_info['network_id']
ext_net = self.networks.list()[2]
api.neutron.network_get(IsA(http.HttpRequest), ext_net_id,
expand_subnet=False).AndReturn(ext_net)
@test.create_stubs({api.neutron: ('router_list', 'network_list'),
quotas: ('tenant_quota_usages',)})
def test_index(self):
quota_data = self.quota_usages.first()
quota_data['routers']['available'] = 5
api.neutron.router_list(
IsA(http.HttpRequest),
tenant_id=self.tenant.id,
search_opts=None).AndReturn(self.routers.list())
quotas.tenant_quota_usages(
IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(quota_data)
self._mock_external_network_list()
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
self.assertTemplateUsed(res, '%s/routers/index.html' % self.DASHBOARD)
routers = res.context['table'].data
self.assertItemsEqual(routers, self.routers.list())
@test.create_stubs({api.neutron: ('router_list', 'network_list'),
quotas: ('tenant_quota_usages',)})
def test_index_router_list_exception(self):
quota_data = self.quota_usages.first()
quota_data['routers']['available'] = 5
api.neutron.router_list(
IsA(http.HttpRequest),
tenant_id=self.tenant.id,
search_opts=None).MultipleTimes().AndRaise(self.exceptions.neutron)
quotas.tenant_quota_usages(
IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(quota_data)
self._mock_external_network_list()
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
self.assertTemplateUsed(res, '%s/routers/index.html' % self.DASHBOARD)
self.assertEqual(len(res.context['table'].data), 0)
self.assertMessageCount(res, error=1)
@test.create_stubs({api.neutron: ('router_list', 'network_list'),
quotas: ('tenant_quota_usages',)})
def test_set_external_network_empty(self):
router = self.routers.first()
quota_data = self.quota_usages.first()
quota_data['routers']['available'] = 5
api.neutron.router_list(
IsA(http.HttpRequest),
tenant_id=self.tenant.id,
search_opts=None).MultipleTimes().AndReturn([router])
quotas.tenant_quota_usages(
IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(quota_data)
self._mock_external_network_list(alter_ids=True)
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
table_data = res.context['table'].data
self.assertEqual(len(table_data), 1)
self.assertIn('(Not Found)',
table_data[0]['external_gateway_info']['network'])
self.assertTemplateUsed(res, '%s/routers/index.html' % self.DASHBOARD)
self.assertMessageCount(res, error=1)
@test.create_stubs({api.neutron: ('router_get', 'port_list',
'network_get')})
def test_router_detail(self):
router = self.routers.first()
api.neutron.router_get(IsA(http.HttpRequest), router.id)\
.AndReturn(self.routers.first())
api.neutron.port_list(IsA(http.HttpRequest),
device_id=router.id)\
.AndReturn([self.ports.first()])
self._mock_external_network_get(router)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:%s'
':routers:detail' % self.DASHBOARD,
args=[router.id]))
self.assertTemplateUsed(res, '%s/routers/detail.html' % self.DASHBOARD)
ports = res.context['interfaces_table'].data
self.assertItemsEqual(ports, [self.ports.first()])
@test.create_stubs({api.neutron: ('router_get',)})
def test_router_detail_exception(self):
router = self.routers.first()
api.neutron.router_get(IsA(http.HttpRequest), router.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:%s'
':routers:detail' % self.DASHBOARD,
args=[router.id]))
self.assertRedirectsNoFollow(res, self.INDEX_URL)
class RouterActionTests(test.TestCase):
DASHBOARD = 'project'
INDEX_URL = reverse('horizon:%s:routers:index' % DASHBOARD)
DETAIL_PATH = 'horizon:%s:routers:detail' % DASHBOARD
@test.create_stubs({api.neutron: ('router_create',
'get_feature_permission',)})
def test_router_create_post(self):
router = self.routers.first()
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"dvr", "create")\
.AndReturn(False)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"l3-ha", "create")\
.AndReturn(False)
api.neutron.router_create(IsA(http.HttpRequest), name=router.name)\
.AndReturn(router)
self.mox.ReplayAll()
form_data = {'name': router.name}
url = reverse('horizon:%s:routers:create' % self.DASHBOARD)
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, self.INDEX_URL)
@test.create_stubs({api.neutron: ('router_create',
'get_feature_permission',)})
def test_router_create_post_mode_server_default(self):
router = self.routers.first()
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"dvr", "create")\
.AndReturn(True)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"l3-ha", "create")\
.AndReturn(True)
api.neutron.router_create(IsA(http.HttpRequest), name=router.name)\
.AndReturn(router)
self.mox.ReplayAll()
form_data = {'name': router.name,
'mode': 'server_default',
'ha': 'server_default'}
url = reverse('horizon:%s:routers:create' % self.DASHBOARD)
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, self.INDEX_URL)
@test.create_stubs({api.neutron: ('router_create',
'get_feature_permission',)})
def test_dvr_ha_router_create_post(self):
router = self.routers.first()
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"dvr", "create")\
.MultipleTimes().AndReturn(True)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"l3-ha", "create")\
.MultipleTimes().AndReturn(True)
param = {'name': router.name,
'distributed': True,
'ha': True}
api.neutron.router_create(IsA(http.HttpRequest), **param)\
.AndReturn(router)
self.mox.ReplayAll()
form_data = {'name': router.name,
'mode': 'distributed',
'ha': 'enabled'}
url = reverse('horizon:%s:routers:create' % self.DASHBOARD)
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, self.INDEX_URL)
@test.create_stubs({api.neutron: ('router_create',
'get_feature_permission',)})
def test_router_create_post_exception_error_case_409(self):
router = self.routers.first()
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"dvr", "create")\
.MultipleTimes().AndReturn(False)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"l3-ha", "create")\
.AndReturn(False)
self.exceptions.neutron.status_code = 409
api.neutron.router_create(IsA(http.HttpRequest), name=router.name)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'name': router.name}
url = reverse('horizon:%s:routers:create' % self.DASHBOARD)
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, self.INDEX_URL)
@test.create_stubs({api.neutron: ('router_create',
'get_feature_permission',)})
def test_router_create_post_exception_error_case_non_409(self):
router = self.routers.first()
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"dvr", "create")\
.MultipleTimes().AndReturn(False)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"l3-ha", "create")\
.MultipleTimes().AndReturn(False)
self.exceptions.neutron.status_code = 999
api.neutron.router_create(IsA(http.HttpRequest), name=router.name)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'name': router.name}
url = reverse('horizon:%s:routers:create' % self.DASHBOARD)
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, self.INDEX_URL)
@test.create_stubs({api.neutron: ('router_get',
'get_feature_permission')})
def _test_router_update_get(self, dvr_enabled=False,
current_dvr=False,
ha_enabled=False):
router = [r for r in self.routers.list()
if r.distributed == current_dvr][0]
api.neutron.router_get(IsA(http.HttpRequest), router.id)\
.AndReturn(router)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"dvr", "update")\
.AndReturn(dvr_enabled)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"l3-ha", "update")\
.AndReturn(ha_enabled)
self.mox.ReplayAll()
url = reverse('horizon:%s:routers:update' % self.DASHBOARD,
args=[router.id])
return self.client.get(url)
def test_router_update_get_dvr_disabled(self):
res = self._test_router_update_get(dvr_enabled=False)
self.assertTemplateUsed(res, 'project/routers/update.html')
self.assertNotContains(res, 'Router Type')
self.assertNotContains(res, 'id="id_mode"')
def test_router_update_get_dvr_enabled_mode_centralized(self):
res = self._test_router_update_get(dvr_enabled=True, current_dvr=False)
self.assertTemplateUsed(res, 'project/routers/update.html')
self.assertContains(res, 'Router Type')
# Check both menu are displayed.
self.assertContains(
res,
'<option value="centralized" selected="selected">'
'Centralized</option>',
html=True)
self.assertContains(
res,
'<option value="distributed">Distributed</option>',
html=True)
def test_router_update_get_dvr_enabled_mode_distributed(self):
res = self._test_router_update_get(dvr_enabled=True, current_dvr=True)
self.assertTemplateUsed(res, 'project/routers/update.html')
self.assertContains(res, 'Router Type')
self.assertContains(
res,
'<input class="form-control" id="id_mode" name="mode" '
'readonly="readonly" type="text" value="distributed" />',
html=True)
self.assertNotContains(res, 'centralized')
@test.create_stubs({api.neutron: ('router_get',
'router_update',
'get_feature_permission')})
def test_router_update_post_dvr_ha_disabled(self):
router = self.routers.first()
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"dvr", "update")\
.AndReturn(False)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"l3-ha", "update")\
.AndReturn(False)
api.neutron.router_update(IsA(http.HttpRequest), router.id,
name=router.name,
admin_state_up=router.admin_state_up)\
.AndReturn(router)
api.neutron.router_get(IsA(http.HttpRequest), router.id)\
.AndReturn(router)
self.mox.ReplayAll()
form_data = {'router_id': router.id,
'name': router.name,
'admin_state': router.admin_state_up}
url = reverse('horizon:%s:routers:update' % self.DASHBOARD,
args=[router.id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, self.INDEX_URL)
@test.create_stubs({api.neutron: ('router_get',
'router_update',
'get_feature_permission')})
def test_router_update_post_dvr_ha_enabled(self):
router = self.routers.first()
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"dvr", "update")\
.AndReturn(True)
api.neutron.get_feature_permission(IsA(http.HttpRequest),
"l3-ha", "update")\
.AndReturn(True)
api.neutron.router_update(IsA(http.HttpRequest), router.id,
name=router.name,
admin_state_up=router.admin_state_up,
distributed=True,
ha=True)\
.AndReturn(router)
api.neutron.router_get(IsA(http.HttpRequest), router.id)\
.AndReturn(router)
self.mox.ReplayAll()
form_data = {'router_id': router.id,
'name': router.name,
'admin_state': router.admin_state_up,
'mode': 'distributed',
'ha': True}
url = reverse('horizon:%s:routers:update' % self.DASHBOARD,
args=[router.id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, self.INDEX_URL)
def _mock_network_list(self, tenant_id):
api.neutron.network_list(
IsA(http.HttpRequest),
shared=False,
tenant_id=tenant_id).AndReturn(self.networks.list())
api.neutron.network_list(
IsA(http.HttpRequest),
shared=True).AndReturn([])
def _test_router_addinterface(self, raise_error=False):
router = self.routers.first()
subnet = self.subnets.first()
port = self.ports.first()
add_interface = api.neutron.router_add_interface(
IsA(http.HttpRequest), router.id, subnet_id=subnet.id)
if raise_error:
add_interface.AndRaise(self.exceptions.neutron)
else:
add_interface.AndReturn({'subnet_id': subnet.id,
'port_id': port.id})
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(port)
self._check_router_addinterface(router, subnet)
def _check_router_addinterface(self, router, subnet, ip_address=''):
# mock APIs used to show router detail
api.neutron.router_get(IsA(http.HttpRequest), router.id)\
.AndReturn(router)
self._mock_network_list(router['tenant_id'])
self.mox.ReplayAll()
form_data = {'router_id': router.id,
'router_name': router.name,
'subnet_id': subnet.id,
'ip_address': ip_address}
url = reverse('horizon:%s:routers:addinterface' % self.DASHBOARD,
args=[router.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
detail_url = reverse(self.DETAIL_PATH, args=[router.id])
self.assertRedirectsNoFollow(res, detail_url)
@test.create_stubs({api.neutron: ('router_get',
'router_add_interface',
'port_get',
'network_list')})
def test_router_addinterface(self):
self._test_router_addinterface()
@test.create_stubs({api.neutron: ('router_get',
'router_add_interface',
'network_list')})
def test_router_addinterface_exception(self):
self._test_router_addinterface(raise_error=True)
def _test_router_addinterface_ip_addr(self, errors=[]):
router = self.routers.first()
subnet = self.subnets.first()
port = self.ports.first()
ip_addr = port['fixed_ips'][0]['ip_address']
self._setup_mock_addinterface_ip_addr(router, subnet, port,
ip_addr, errors)
self._check_router_addinterface(router, subnet, ip_addr)
def _setup_mock_addinterface_ip_addr(self, router, subnet, port,
ip_addr, errors=[]):
subnet_get = api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)
if 'subnet_get' in errors:
subnet_get.AndRaise(self.exceptions.neutron)
return
subnet_get.AndReturn(subnet)
params = {'network_id': subnet.network_id,
'fixed_ips': [{'subnet_id': subnet.id,
'ip_address': ip_addr}]}
port_create = api.neutron.port_create(IsA(http.HttpRequest), **params)
if 'port_create' in errors:
port_create.AndRaise(self.exceptions.neutron)
return
port_create.AndReturn(port)
add_inf = api.neutron.router_add_interface(
IsA(http.HttpRequest), router.id, port_id=port.id)
if 'add_interface' not in errors:
return
add_inf.AndRaise(self.exceptions.neutron)
port_delete = api.neutron.port_delete(IsA(http.HttpRequest), port.id)
if 'port_delete' in errors:
port_delete.AndRaise(self.exceptions.neutron)
@test.create_stubs({api.neutron: ('router_add_interface', 'subnet_get',
'port_create',
'router_get', 'network_list')})
def test_router_addinterface_ip_addr(self):
self._test_router_addinterface_ip_addr()
@test.create_stubs({api.neutron: ('subnet_get',
'router_get', 'network_list')})
def test_router_addinterface_ip_addr_exception_subnet_get(self):
self._test_router_addinterface_ip_addr(errors=['subnet_get'])
@test.create_stubs({api.neutron: ('subnet_get', 'port_create',
'router_get', 'network_list')})
def test_router_addinterface_ip_addr_exception_port_create(self):
self._test_router_addinterface_ip_addr(errors=['port_create'])
@test.create_stubs({api.neutron: ('router_add_interface', 'subnet_get',
'port_create', 'port_delete',
'router_get', 'network_list')})
def test_router_addinterface_ip_addr_exception_add_interface(self):
self._test_router_addinterface_ip_addr(errors=['add_interface'])
@test.create_stubs({api.neutron: ('router_add_interface', 'subnet_get',
'port_create', 'port_delete',
'router_get', 'network_list')})
def test_router_addinterface_ip_addr_exception_port_delete(self):
self._test_router_addinterface_ip_addr(errors=['add_interface',
'port_delete'])
@test.create_stubs({api.neutron: ('router_get',
'router_add_gateway',
'network_list')})
def test_router_add_gateway(self):
router = self.routers.first()
network = self.networks.first()
api.neutron.router_add_gateway(
IsA(http.HttpRequest),
router.id,
network.id).AndReturn(None)
api.neutron.router_get(
IsA(http.HttpRequest), router.id).AndReturn(router)
search_opts = {'router:external': True}
api.neutron.network_list(
IsA(http.HttpRequest), **search_opts).AndReturn([network])
self.mox.ReplayAll()
form_data = {'router_id': router.id,
'router_name': router.name,
'network_id': network.id}
url = reverse('horizon:%s:routers:setgateway' % self.DASHBOARD,
args=[router.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
detail_url = self.INDEX_URL
self.assertRedirectsNoFollow(res, detail_url)
@test.create_stubs({api.neutron: ('router_get',
'router_add_gateway',
'network_list')})
def test_router_add_gateway_exception(self):
router = self.routers.first()
network = self.networks.first()
api.neutron.router_add_gateway(
IsA(http.HttpRequest),
router.id,
network.id).AndRaise(self.exceptions.neutron)
api.neutron.router_get(
IsA(http.HttpRequest), router.id).AndReturn(router)
search_opts = {'router:external': True}
api.neutron.network_list(
IsA(http.HttpRequest), **search_opts).AndReturn([network])
self.mox.ReplayAll()
form_data = {'router_id': router.id,
'router_name': router.name,
'network_id': network.id}
url = reverse('horizon:%s:routers:setgateway' % self.DASHBOARD,
args=[router.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
detail_url = self.INDEX_URL
self.assertRedirectsNoFollow(res, detail_url)
class RouterRuleTests(test.TestCase):
DASHBOARD = 'project'
INDEX_URL = reverse('horizon:%s:routers:index' % DASHBOARD)
DETAIL_PATH = 'horizon:%s:routers:detail' % DASHBOARD
def _mock_external_network_get(self, router):
ext_net_id = router.external_gateway_info['network_id']
ext_net = self.networks.list()[2]
api.neutron.network_get(IsA(http.HttpRequest), ext_net_id,
expand_subnet=False).AndReturn(ext_net)
def _mock_network_list(self, tenant_id):
api.neutron.network_list(
IsA(http.HttpRequest),
shared=False,
tenant_id=tenant_id).AndReturn(self.networks.list())
api.neutron.network_list(
IsA(http.HttpRequest),
shared=True).AndReturn([])
@test.create_stubs({api.neutron: ('router_get', 'port_list',
'network_get')})
def test_extension_hides_without_rules(self):
router = self.routers.first()
api.neutron.router_get(IsA(http.HttpRequest), router.id)\
.AndReturn(self.routers.first())
api.neutron.port_list(IsA(http.HttpRequest),
device_id=router.id)\
.AndReturn([self.ports.first()])
self._mock_external_network_get(router)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:%s'
':routers:detail' % self.DASHBOARD,
args=[router.id]))
self.assertTemplateUsed(res, '%s/routers/detail.html' % self.DASHBOARD)
self.assertTemplateNotUsed(res,
'%s/routers/extensions/routerrules/grid.html' % self.DASHBOARD)
@test.create_stubs({api.neutron: ('router_get', 'port_list',
'network_get', 'network_list')})
def test_routerrule_detail(self):
router = self.routers_with_rules.first()
api.neutron.router_get(IsA(http.HttpRequest), router.id)\
.AndReturn(self.routers_with_rules.first())
api.neutron.port_list(IsA(http.HttpRequest),
device_id=router.id)\
.AndReturn([self.ports.first()])
self._mock_external_network_get(router)
if self.DASHBOARD == 'project':
api.neutron.network_list(
IsA(http.HttpRequest),
shared=False,
tenant_id=router['tenant_id']).AndReturn(self.networks.list())
api.neutron.network_list(
IsA(http.HttpRequest),
shared=True).AndReturn([])
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:%s'
':routers:detail' % self.DASHBOARD,
args=[router.id]))
self.assertTemplateUsed(res, '%s/routers/detail.html' % self.DASHBOARD)
if self.DASHBOARD == 'project':
self.assertTemplateUsed(res,
'%s/routers/extensions/routerrules/grid.html' % self.DASHBOARD)
rules = res.context['routerrules_table'].data
self.assertItemsEqual(rules, router['router_rules'])
def _test_router_addrouterrule(self, raise_error=False):
pre_router = self.routers_with_rules.first()
post_router = copy.deepcopy(pre_router)
rule = {'source': '1.2.3.4/32', 'destination': '4.3.2.1/32', 'id': 99,
'action': 'permit', 'nexthops': ['1.1.1.1', '2.2.2.2']}
post_router['router_rules'].insert(0, rule)
api.neutron.router_get(IsA(http.HttpRequest),
pre_router.id).AndReturn(pre_router)
params = {}
params['router_rules'] = rulemanager.format_for_api(
post_router['router_rules'])
router_update = api.neutron.router_update(IsA(http.HttpRequest),
pre_router.id, **params)
if raise_error:
router_update.AndRaise(self.exceptions.neutron)
else:
router_update.AndReturn({'router': post_router})
self.mox.ReplayAll()
form_data = {'router_id': pre_router.id,
'source': rule['source'],
'destination': rule['destination'],
'action': rule['action'],
'nexthops': ','.join(rule['nexthops'])}
url = reverse('horizon:%s:routers:addrouterrule' % self.DASHBOARD,
args=[pre_router.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
detail_url = reverse(self.DETAIL_PATH, args=[pre_router.id])
self.assertRedirectsNoFollow(res, detail_url)
@test.create_stubs({api.neutron: ('router_get',
'router_update')})
def test_router_addrouterrule(self):
self._test_router_addrouterrule()
@test.create_stubs({api.neutron: ('router_get',
'router_update')})
def test_router_addrouterrule_exception(self):
self._test_router_addrouterrule(raise_error=True)
@test.create_stubs({api.neutron: ('router_get', 'router_update',
'port_list', 'network_get')})
def test_router_removerouterrule(self):
pre_router = self.routers_with_rules.first()
post_router = copy.deepcopy(pre_router)
rule = post_router['router_rules'].pop()
api.neutron.router_get(IsA(http.HttpRequest),
pre_router.id).AndReturn(pre_router)
params = {}
params['router_rules'] = rulemanager.format_for_api(
post_router['router_rules'])
api.neutron.router_get(IsA(http.HttpRequest),
pre_router.id).AndReturn(pre_router)
router_update = api.neutron.router_update(IsA(http.HttpRequest),
pre_router.id, **params)
router_update.AndReturn({'router': post_router})
api.neutron.router_get(IsA(http.HttpRequest),
pre_router.id).AndReturn(pre_router)
api.neutron.port_list(IsA(http.HttpRequest),
device_id=pre_router.id)\
.AndReturn([self.ports.first()])
self._mock_external_network_get(pre_router)
self.mox.ReplayAll()
form_rule_id = rule['source'] + rule['destination']
form_data = {'router_id': pre_router.id,
'action': 'routerrules__delete__%s' % form_rule_id}
url = reverse(self.DETAIL_PATH, args=[pre_router.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
@test.create_stubs({api.neutron: ('router_get', 'router_update',
'network_list', 'port_list',
'network_get')})
def test_router_resetrouterrules(self):
pre_router = self.routers_with_rules.first()
post_router = copy.deepcopy(pre_router)
default_rules = [{'source': 'any', 'destination': 'any',
'action': 'permit', 'nexthops': [], 'id': '2'}]
del post_router['router_rules'][:]
post_router['router_rules'].extend(default_rules)
api.neutron.router_get(IsA(http.HttpRequest),
pre_router.id).AndReturn(post_router)
params = {}
params['router_rules'] = rulemanager.format_for_api(
post_router['router_rules'])
router_update = api.neutron.router_update(IsA(http.HttpRequest),
pre_router.id, **params)
router_update.AndReturn({'router': post_router})
api.neutron.router_get(IsA(http.HttpRequest),
pre_router.id).AndReturn(post_router)
api.neutron.port_list(IsA(http.HttpRequest),
device_id=pre_router.id)\
.AndReturn([self.ports.first()])
self._mock_external_network_get(pre_router)
self._mock_network_list(pre_router['tenant_id'])
api.neutron.router_get(IsA(http.HttpRequest),
pre_router.id).AndReturn(post_router)
self.mox.ReplayAll()
form_data = {'router_id': pre_router.id,
'action': 'routerrules__resetrules'}
url = reverse(self.DETAIL_PATH, args=[pre_router.id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
class RouterViewTests(test.TestCase):
DASHBOARD = 'project'
INDEX_URL = reverse('horizon:%s:routers:index' % DASHBOARD)
def _mock_external_network_list(self, alter_ids=False):
search_opts = {'router:external': True}
ext_nets = [n for n in self.networks.list() if n['router:external']]
if alter_ids:
for ext_net in ext_nets:
ext_net.id += 'some extra garbage'
api.neutron.network_list(
IsA(http.HttpRequest),
**search_opts).AndReturn(ext_nets)
@test.create_stubs({api.neutron: ('router_list', 'network_list'),
quotas: ('tenant_quota_usages',)})
def test_create_button_disabled_when_quota_exceeded(self):
quota_data = self.quota_usages.first()
quota_data['routers']['available'] = 0
api.neutron.router_list(
IsA(http.HttpRequest),
tenant_id=self.tenant.id,
search_opts=None).AndReturn(self.routers.list())
quotas.tenant_quota_usages(
IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(quota_data)
self._mock_external_network_list()
self.mox.ReplayAll()
res = self.client.get(self.INDEX_URL)
self.assertTemplateUsed(res, 'project/routers/index.html')
routers = res.context['Routers_table'].data
self.assertItemsEqual(routers, self.routers.list())
create_link = tables.CreateRouter()
url = create_link.get_link_url()
classes = (list(create_link.get_default_classes())
+ list(create_link.classes))
link_name = "%s (%s)" % (unicode(create_link.verbose_name),
"Quota exceeded")
expected_string = "<a href='%s' title='%s' class='%s disabled' "\
"id='Routers__action_create'>" \
"<span class='glyphicon glyphicon-plus'></span>%s</a>" \
% (url, link_name, " ".join(classes), link_name)
self.assertContains(res, expected_string, html=True,
msg_prefix="The create button is not disabled")
| apache-2.0 | 46,520,934,601,362,296 | 43.441065 | 79 | 0.560746 | false |
ikalnytskyi/sphinxcontrib-openapi | tests/conftest.py | 1 | 1259 | import textwrap
import pytest
from sphinx.application import Sphinx
def _format_option_raw(key, val):
if isinstance(val, bool) and val:
return ':%s:' % key
return ':%s: %s' % (key, val)
@pytest.fixture(scope='function')
def run_sphinx(tmpdir):
src = tmpdir.ensure('src', dir=True)
out = tmpdir.ensure('out', dir=True)
def run(spec, options={}):
options_raw = '\n'.join([
' %s' % _format_option_raw(key, val)
for key, val in options.items()])
src.join('conf.py').write_text(
textwrap.dedent('''
import os
project = 'sphinxcontrib-openapi-test'
copyright = '2017, Ihor Kalnytskyi'
extensions = ['sphinxcontrib.openapi']
source_suffix = '.rst'
master_doc = 'index'
'''),
encoding='utf-8')
src.join('index.rst').write_text(
'.. openapi:: %s\n%s' % (spec, options_raw),
encoding='utf-8')
Sphinx(
srcdir=src.strpath,
confdir=src.strpath,
outdir=out.strpath,
doctreedir=out.join('.doctrees').strpath,
buildername='html'
).build()
yield run
| bsd-2-clause | 7,175,091,320,421,709,000 | 25.229167 | 56 | 0.517077 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.